Fixed bug 3362 - OpenGL renderer doesn't check if framebuffers are supported when creating target textures

Simon Hug

The GL_CreateTexture function doesn't have any checks for the case where the driver doesn't support the framebuffer object extension. It will call into GL_GetFBO which will call the non-existent glGenFramebuffersEXT.

Also, for some reason GL_CreateContext always sets the SDL_RENDERER_TARGETTEXTURE info flag, even if it is not supported. Changeset 6e6bd53feff0 [1] makes this change, but doesn't explain why. It seems to me like the code would already have taken care of this [2].

The attached patch adds some checks and stops SDL from reporting render target support if there is none. The application can then properly inform the user instead of just crashing.
diff --git a/src/render/opengl/SDL_render_gl.c b/src/render/opengl/SDL_render_gl.c
index 6a4fa3e..ab831e7 100644
--- a/src/render/opengl/SDL_render_gl.c
+++ b/src/render/opengl/SDL_render_gl.c
@@ -450,7 +450,7 @@
     renderer->GL_BindTexture = GL_BindTexture;
     renderer->GL_UnbindTexture = GL_UnbindTexture;
     renderer->info = GL_RenderDriver.info;
-    renderer->info.flags = (SDL_RENDERER_ACCELERATED | SDL_RENDERER_TARGETTEXTURE);
+    renderer->info.flags = SDL_RENDERER_ACCELERATED;
     renderer->driverdata = data;
     renderer->window = window;
 
@@ -664,6 +664,11 @@
 
     GL_ActivateRenderer(renderer);
 
+    if (texture->access == SDL_TEXTUREACCESS_TARGET &&
+        !renderdata->GL_EXT_framebuffer_object_supported) {
+        return SDL_SetError("Render targets not supported by OpenGL");
+    }
+
     if (!convert_format(renderdata, texture->format, &internalFormat,
                         &format, &type)) {
         return SDL_SetError("Texture format %s not supported by OpenGL",
@@ -980,6 +985,10 @@
 
     GL_ActivateRenderer(renderer);
 
+    if (!data->GL_EXT_framebuffer_object_supported) {
+        return SDL_SetError("Render targets not supported by OpenGL");
+    }
+
     if (texture == NULL) {
         data->glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
         return 0;