Render to 3D Texture with OpenGL on OSX (multi-layer framebuffer attachment) - macos

I have an OpenGL 3.2 CORE context on OSX 10.7.5 set up and trying to render to a 3D texture,
using a layered rendering approach. The geometry shader feature "gl_layer" is supported, but I cannot bind a GL_TEXTURE_3D to my framebuffer attachment. It returns GL_FRAMEBUFFER_UNSUPPORTED.
This is the card and driver version in my MBP:
AMD Radeon HD 6770M 1024 MB - OpenGL 3.2 CORE (ATI-7.32.12)
This feature does not directly relate to a specific extension AFAIK.
Does anybody know how to figure out whether this is unsupported by the driver or hardware?
Thanks so much.
Below the code to reconstruct. I use glfw to set up the context:
// Initialize GLFW
if (!glfwInit())
throw "Failed to initialize GLFW";
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Open a window and create its OpenGL context
if (!glfwOpenWindow(720, 480, 8, 8, 8, 8, 24, 8, GLFW_WINDOW))
throw "Failed to open GLFW window";
//
// ...
//
GLuint framebuffer, texture;
GLenum status;
glGenFramebuffers(1, &framebuffer);
// Set up the FBO with one texture attachment
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_3D, texture);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA8, 256, 256, 256, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, texture, 0);
status = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE)
throw status;
//
// status is GL_FRAMEBUFFER_UNSUPPORTED here !!!
//
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glDeleteTextures(1, &texture);
glDeleteFramebuffers(1, &framebuffer);
exit(1);

Does anybody know how to figure out whether this is unsupported by the driver or hardware?
It just told you. That's what GL_FRAMEBUFFER_UNSUPPORTED means: it's the driver exercising veto-power over any framebuffer attachments it doesn't like for any reason whatsoever.
There's not much you can do when this happens except to try other things. Perhaps rendering to a 2D array texture.

Related

get OpenGL error GL_INVALID_OPERATION when call glDrawArrays on macOS

I got some problem about OGL in macOS, as the following sample, it works well in my win system but get error at API glPushAttrib, glDrawArrays, and glPopAttrib and get GL_INVALID_OPERATION each. Any idea about this?
I had read some topic about the issue but seems not work on macOS and compare with win system.
My mac is OS X El Capitan 10.11.6 with compiler Xcode version 6.4
//gen a texture
glGenTextures(1, &FilteredTexture);
glBindTexture(GL_TEXTURE_2D, FilteredTexture);
printf("Start glTexImage2D FilteredTexture...");
glTexImage2D(GL_TEXTURE_2D, 0, TexInterFormat, cols, rows, 0, TexFormat, DataType, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
//gen a Renderbuffer
glGenRenderbuffers(1, &GlRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, GlRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, cols, rows);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
//gen a Framebuffer & attach
glGenFramebuffers(1, &GlFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, GlFramebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, FilteredTexture, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, GlRenderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
//gen shaderas program (ignore detail here)
program = loadProgramFromFile("empty.vert", "depth.frag", "quad.geom");
//do the computation
glBindFramebuffer(GL_FRAMEBUFFER, GlFramebuffer);
const static GLuint attachment_bufferss[] = { static_cast<GLuint>(GL_COLOR_ATTACHMENT0) };
glDrawBuffers(1, attachment_bufferss);
glPushAttrib(GL_VIEWPORT_BIT); //get GL_INVALID_OPERATION here
glViewport(0, 0, cols, rows);
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(1);
glDrawArrays(GL_POINTS, 0, 1); //get GL_INVALID_OPERATION here
glDrawBuffers(1, attachment_bufferss);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glUseProgram(0);
glPopAttrib(); //get GL_INVALID_OPERATION here
glFinish();
full code can be downloaded here: https://1drv.ms/u/s!Au9Donvb28A9hgeor0v94Q5hKc0e
The errors occur because you're trying to use deprecated functions (glPushAttrib, glPopAttrib) in Core profile. To fix this, replace line
glPushAttrib(GL_VIEWPORT_BIT);
with
GLint vp [4];
glGetIntegerv (GL_VIEWPORT, vp);
and
glPopAttrib();
with
glViewport( vp[0], vp[1], vp[2], vp[3] );
The code first copies your current viewport into an array, then restores the viewport from the array.

How to render Image from file using the Qt OpenGL API

I'm trying to render an image from a *.png file on to screen using the Qt OpenGL API. My implement method creates the OpenGLWindow class, which is subclass of QOpenGLWindow and QOpenGLFunctions, then overrides 2 methods: initializeGL() and paintGL().
I have code, as below:
class OpenGLWindow : public QOpenGLWindow, protected QOpenGLFunctions
{
Q_OBJECT
public:
OpenGLWindow();
protected:
void initializeGL();
void paintGL() Q_DECL_OVERRIDE;
};
OpenGLWindow::OpenGLWindow()
: QOpenGLWindow(QOpenGLWindow::NoPartialUpdate)
{
}
void OpenGLWindow::initializeGL()
{
initializeOpenGLFunctions();
}
void OpenGLWindow::paintGL()
{
// Clear buffer with blue color
glClearColor(0, 0, 1, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Load image from file
QImage image;
if(!image.load(":/side1.png"))
{
qDebug() << "load image fail";
return;
}
// Draw Texture
glEnable(GL_TEXTURE_2D); // Enable texturing
glActiveTexture(GL_TEXTURE1);
GLuint textureID;
glGenTextures(1, &textureID); // Obtain an id for the texture
glBindTexture(GL_TEXTURE_2D, textureID); // Set as the current texture
QImage tex = QGLWidget::convertToGLFormat(image);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tex.width(), tex.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, tex.bits());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glDisable(GL_TEXTURE_2D);
}
The result is the window only displays a blue screen, with an error that the image could not be displayed. I saw that the image was loaded successfully to QImage. Have I don't something wrong or am I missing something or not? Can somebody help me?
Result screen:

Puzzels about glteximage2d using gl_luminance

I met some problem about using gl_luminance to define FBO. Here is the code i used,
generateRenderToTexture(GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, _maskTexture, _imageWidth, _imageHeight, false);
related code is as follows,
TextureBuffer _maskTexture;
class TextureBuffer {
public:
GLuint texture;
GLuint frameBuffer;
GLenum internalformat;
GLenum format;
GLenum type;
int w,h;
TextureBuffer() : texture(0), frameBuffer(0) {}
void release()
{
if(texture)
{
glDeleteTextures(1, &texture);
texture = 0;
}
if(frameBuffer)
{
glDeleteFramebuffers(1, &frameBuffer);
frameBuffer = 0;
}
}
};
void generateRenderToTexture(GLint internalformat, GLenum format, GLenum type,
TextureBuffer &tb, int w, int h, bool linearInterp)
{
glGenTextures(1, &tb.texture);
glBindTexture(GL_TEXTURE_2D, tb.texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, linearInterp ? GL_LINEAR : GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, linearInterp ? GL_LINEAR : GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, internalformat, w, h, 0, format, type, NULL);
glGenFramebuffers(1, &tb.frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, tb.frameBuffer);
glClear(_glClearBits);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tb.texture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if(status != GL_FRAMEBUFFER_COMPLETE)
printf("Framebuffer status: %x", (int)status);
tb.internalformat = internalformat;
tb.format = format;
tb.type = type;
tb.w = w;
tb.h = h;
}
The question is when I use,
generateRenderToTexture(GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, _maskTexture, _imageWidth, _imageHeight, false);
The code went well. But if use gl_luminance instead,
generateRenderToTexture(GL_LUMINANCE, GL_LUMINANCE, GL_UNSIGNED_BYTE, _maskTexture, _imageWidthOriginal,
I don't know why i could not use GL_LUMINANCE to define the FBO. Anyone have some useful suggestions to solve this?
The only formats that are guaranteed to work as color FBO attachments in ES 2.0 are, according to table 4.5 in the spec document:
GL_RGBA4
GL_RGB5_A1
GL_RGB565
Support for rendering to GL_RGBA, which works for you, is not required by the standard. Many implementations support it, though. The OES_rgb8_rgba8 extension adds support for GL_RGB8 and GL_RGBA8 formats as render targets.
GL_LUMINANCE is not supported as a color-renderable format by the standard, and I can't find an extension for it either. It's possible that some implementations could support it, but you certainly can't count on it.
ES 3.0 lists GL_R8 as a color-renderable format. In ES 3.0, the RED/R formats replace the LUMINANCE/ALPHA formats from ES 2.0. So if you can move to ES 3.0, you have support to render to 1-component texture formats.
You're using non-extension FBO functions, which were introduced only with OpenGL-3. So unlike FBO extension functions (ending with ARB) those functions are available only with a OpenGL-3 context. In OpenGL-3 the GL_LUMINANCE and GL_ALPHA texture internal formats are deprecated, are not available in core profile. They have been replaced by the GL_RED texture format. You can use an appropriately written shader or texture swizzle parameters to make a GL_RED texture work just like GL_LUMINANCE (swizzle dst.rgb = texture.r) or GL_ALPHA (swizzle dst.a = texture.r).
I have solved by using GL_RG_EXT, or GL_RED_EXT, instead.

Render to texture using a frame buffer and GLKit's GLKBaseEffect

I'm trying to draw to a texture (and then use that texture on an object) with GLKit but I'm receiving GL ERROR: 0x0502 which I think it means invalid value passed to a function. The thing is, the error is fired somewhere inside the effects prepareToDraw method. The vertex arrays seem to be set up correctly since I can draw on the default frame buffer with no problem using the same set up. Is there something I'm missing?
GLint defaultFBO;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO);
GLenum status;
glBindFramebuffer(GL_FRAMEBUFFER, _boundsTextureFramebuffer);
glBindTexture(GL_TEXTURE_2D, self.backgroundTexture.name);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8_OES, 1024, 1024, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self.backgroundTexture.name, 0);
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE)
{
NSLog(#"Failed to initialize the texture framebuffer");
}
effect.constantColor = [self.color vectorValue];
[effect prepareToDraw];
glBindVertexArrayOES(self.vertexArray);
glDrawElements(_data.mode, (GLsizei)self.data.indicesCount, GL_UNSIGNED_INT, (void*)0);
glBindVertexArrayOES(0);
glBindFramebuffer(GL_FRAMEBUFFER, (GLuint)defaultFBO);
You're missing one important thing from the code you've posted: the render buffer. Adding one might solve your problem. Here is an example off the top of my head:
GLint defaultFBO;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO);
GLint defaultRBO;
glGetIntegerv(GL_RENDERBUFFER_BINDING, &defaultRBO);
glGenFramebuffers(1, &_boundsTextureFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _boundsTextureFramebuffer);
glGenRenderbuffers(1, &_boundsTextureRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _boundsTextureRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _boundsTextureRenderbuffer);
...
glBindFramebuffer(GL_FRAMEBUFFER, (GLuint)defaultFBO);
glBindRenderbuffer(GL_RENDERBUFFER, (GLuint)defaultRBO);

Opengl ES 2.0, FBOs, and Fragment Shaders

Here's my render loop:
Bind a custom FBO
Bind a texture (previously associated with the FBO as COLOR_ATTACHMENT0)
Render using a custom fragment shader which chooses fragment colors on the basis of a fractal algorithm. (If a fragment is not used by the algorithm, it is assigned the color black)
Rebind the window-provided framebuffer and renderbuffer. (on ios 5, this is the [view bindDrawable] method.
Clear the screen to white.
Render the fbo texture into a frame that is substantially smaller than the window itself.
Expected result:
The fractal should appear in the smaller frame. The frame should have a black background. The rest of the screen should be white.
Current result:
The entire screen is taken up by the fractal, as if I were rendering to the window provided fbo, and to my custom fbo/texture.
I don't really know what I'm doing wrong, so I'd be grateful for any help.
EDIT:
Fragment Shader:
void main()
{
highp vec2 pix = gl_FragCoord.xy;
lowp vec4 color = vec4(0.0,0.0,0.0,0.0);
//determine if fragment is part of the fractal using the algorithm
//if yes, change the value of the color vec4
gl_FragColor = color;
}
FBO Initialization:
//First create and bind the frame buffer
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
//Create a texture
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//Attach the texture to the framebuffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("\n Incomplete Frame Buffer!");
}
Render Loop:
{
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, texture);
glViewport(0, 0, width, height);
//Call the fractal render, basically a plane of 4 vertices happening through GL_TRIANGLE_STRIP, but that calls the fragment shader above.
[self.view bindDrawable];
glClearColor(1.0,1.0,1.0,1.0);
matrix4x4 mvp = multiplyMatrices(projectionMatrix, modelView);
glUseProgram(shaderId);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, NULL);
glBindBuffer(GL_ARRAY_BUFFER, texelBuffer);
glVertexAttribPointer(texelLocation, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glUniformMatrix4fv(mvpLocation, 1, 0, mvp.val);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
glUniform1i(textureLocation, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, tVertices);
}
I hope this helps. Please let me know if you'd like any more information on what I'm doing.
I also noticed something very strange happening.
If, after binding the FBO, I try
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(1.0,0.0,0.0,0.5);
...it is the window-provided framebuffer on which the clear actually happens.
Thank you guys for helping.

Resources