Here's my render loop:
Bind a custom FBO
Bind a texture (previously associated with the FBO as COLOR_ATTACHMENT0)
Render using a custom fragment shader which chooses fragment colors on the basis of a fractal algorithm. (If a fragment is not used by the algorithm, it is assigned the color black)
Rebind the window-provided framebuffer and renderbuffer. (on ios 5, this is the [view bindDrawable] method.
Clear the screen to white.
Render the fbo texture into a frame that is substantially smaller than the window itself.
Expected result:
The fractal should appear in the smaller frame. The frame should have a black background. The rest of the screen should be white.
Current result:
The entire screen is taken up by the fractal, as if I were rendering to the window provided fbo, and to my custom fbo/texture.
I don't really know what I'm doing wrong, so I'd be grateful for any help.
EDIT:
Fragment Shader:
void main()
{
highp vec2 pix = gl_FragCoord.xy;
lowp vec4 color = vec4(0.0,0.0,0.0,0.0);
//determine if fragment is part of the fractal using the algorithm
//if yes, change the value of the color vec4
gl_FragColor = color;
}
FBO Initialization:
//First create and bind the frame buffer
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
//Create a texture
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//Attach the texture to the framebuffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("\n Incomplete Frame Buffer!");
}
Render Loop:
{
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, texture);
glViewport(0, 0, width, height);
//Call the fractal render, basically a plane of 4 vertices happening through GL_TRIANGLE_STRIP, but that calls the fragment shader above.
[self.view bindDrawable];
glClearColor(1.0,1.0,1.0,1.0);
matrix4x4 mvp = multiplyMatrices(projectionMatrix, modelView);
glUseProgram(shaderId);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, NULL);
glBindBuffer(GL_ARRAY_BUFFER, texelBuffer);
glVertexAttribPointer(texelLocation, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glUniformMatrix4fv(mvpLocation, 1, 0, mvp.val);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
glUniform1i(textureLocation, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, tVertices);
}
I hope this helps. Please let me know if you'd like any more information on what I'm doing.
I also noticed something very strange happening.
If, after binding the FBO, I try
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(1.0,0.0,0.0,0.5);
...it is the window-provided framebuffer on which the clear actually happens.
Thank you guys for helping.
Related
I got some problem about OGL in macOS, as the following sample, it works well in my win system but get error at API glPushAttrib, glDrawArrays, and glPopAttrib and get GL_INVALID_OPERATION each. Any idea about this?
I had read some topic about the issue but seems not work on macOS and compare with win system.
My mac is OS X El Capitan 10.11.6 with compiler Xcode version 6.4
//gen a texture
glGenTextures(1, &FilteredTexture);
glBindTexture(GL_TEXTURE_2D, FilteredTexture);
printf("Start glTexImage2D FilteredTexture...");
glTexImage2D(GL_TEXTURE_2D, 0, TexInterFormat, cols, rows, 0, TexFormat, DataType, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
//gen a Renderbuffer
glGenRenderbuffers(1, &GlRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, GlRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, cols, rows);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
//gen a Framebuffer & attach
glGenFramebuffers(1, &GlFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, GlFramebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, FilteredTexture, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, GlRenderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
//gen shaderas program (ignore detail here)
program = loadProgramFromFile("empty.vert", "depth.frag", "quad.geom");
//do the computation
glBindFramebuffer(GL_FRAMEBUFFER, GlFramebuffer);
const static GLuint attachment_bufferss[] = { static_cast<GLuint>(GL_COLOR_ATTACHMENT0) };
glDrawBuffers(1, attachment_bufferss);
glPushAttrib(GL_VIEWPORT_BIT); //get GL_INVALID_OPERATION here
glViewport(0, 0, cols, rows);
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(1);
glDrawArrays(GL_POINTS, 0, 1); //get GL_INVALID_OPERATION here
glDrawBuffers(1, attachment_bufferss);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glUseProgram(0);
glPopAttrib(); //get GL_INVALID_OPERATION here
glFinish();
full code can be downloaded here: https://1drv.ms/u/s!Au9Donvb28A9hgeor0v94Q5hKc0e
The errors occur because you're trying to use deprecated functions (glPushAttrib, glPopAttrib) in Core profile. To fix this, replace line
glPushAttrib(GL_VIEWPORT_BIT);
with
GLint vp [4];
glGetIntegerv (GL_VIEWPORT, vp);
and
glPopAttrib();
with
glViewport( vp[0], vp[1], vp[2], vp[3] );
The code first copies your current viewport into an array, then restores the viewport from the array.
I have been trying to get shadow maps to work, but I can;t even get past the depth buffer part of it. I've been looking on the internet for hours now, yadayadayada. Here is my code so far -
Making the FBO and the texture for depth:
GLuint framebuffer;
GLuint depth;
GLuint texture;
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, 1280, 800, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
glGenTextures(1, &depth);
glBindTexture(GL_TEXTURE_2D, depth);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, 1280, 800, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, depth, 0);
//glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
glDrawBuffer(GL_NONE);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
std::cout << "There was a problem with making the framebuffer\n";
glBindFramebuffer(GL_FRAMEBUFFER, 0);
return framebuffer;
Rendering the 'depth' to a quad for viewing:
glBindFramebuffer(GL_FRAMEBUFFER, FBO);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
for (int i = 0; i < objects.size(); i++)
{
objects[i].renderObjectForDepth();
}
glBindFrameBuffer(GL_TEXTURE_2D,0);
glBindTexture2D(GL_TEXTURE_2D, texture)
drawQuad();
And to me, everything looks fine, but It generates images like the picture attached. I've been ripping out my hair for days now, and any help would be appreciated.
EDIT: It was because I was rendering a blank texture, IE the texture color that never got written to. After drawing the depth texture and writing the color texture, it works fine. But how do I do it without a color texture?
Check that the following two states are set when rendering to your depth-only FBO:
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
I'm trying to draw to a texture (and then use that texture on an object) with GLKit but I'm receiving GL ERROR: 0x0502 which I think it means invalid value passed to a function. The thing is, the error is fired somewhere inside the effects prepareToDraw method. The vertex arrays seem to be set up correctly since I can draw on the default frame buffer with no problem using the same set up. Is there something I'm missing?
GLint defaultFBO;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO);
GLenum status;
glBindFramebuffer(GL_FRAMEBUFFER, _boundsTextureFramebuffer);
glBindTexture(GL_TEXTURE_2D, self.backgroundTexture.name);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8_OES, 1024, 1024, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self.backgroundTexture.name, 0);
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE)
{
NSLog(#"Failed to initialize the texture framebuffer");
}
effect.constantColor = [self.color vectorValue];
[effect prepareToDraw];
glBindVertexArrayOES(self.vertexArray);
glDrawElements(_data.mode, (GLsizei)self.data.indicesCount, GL_UNSIGNED_INT, (void*)0);
glBindVertexArrayOES(0);
glBindFramebuffer(GL_FRAMEBUFFER, (GLuint)defaultFBO);
You're missing one important thing from the code you've posted: the render buffer. Adding one might solve your problem. Here is an example off the top of my head:
GLint defaultFBO;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO);
GLint defaultRBO;
glGetIntegerv(GL_RENDERBUFFER_BINDING, &defaultRBO);
glGenFramebuffers(1, &_boundsTextureFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _boundsTextureFramebuffer);
glGenRenderbuffers(1, &_boundsTextureRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _boundsTextureRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _boundsTextureRenderbuffer);
...
glBindFramebuffer(GL_FRAMEBUFFER, (GLuint)defaultFBO);
glBindRenderbuffer(GL_RENDERBUFFER, (GLuint)defaultRBO);
I am attempting to get my first shadow map up and running (from here), but I have run into a very strange problem. This is the code that causes the error:
#interface HSOpenGLView() {
GLuint shadowFramebuffer;
GLuint shadowTexture;
}
#end
#implementation HSOpenGLView
- (void) drawRect:(NSRect)dirtyRect {
}
#pragma mark - Init
- (void) prepareOpenGL {
[super prepareOpenGL];
glEnable(GL_TEXTURE_2D);
glGenFramebuffers(1, &shadowFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, shadowFramebuffer);
// Depth texture. Slower than a depth buffer, but you can sample it later in your shader
glGenTextures(1, &shadowTexture);
glBindTexture(GL_TEXTURE_2D, shadowTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, shadowTexture, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, shadowTexture, 0);
//glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, shadowTexture);
glDrawBuffer(GL_NONE); // No color buffer is drawn to.
// Always check that our framebuffer is ok
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
NSLog(#"Failed to create the shadow framebuffer! %u", glGetError());
}
}
This class is a subclass of 'NSOpenGLView'. When it prints the error number it turns out to be '0'. What could possibly be causing it?
Add a call to glReadBuffer(GL_NONE) after your call to glDrawBuffer(GL_NONE). You need to indicate that your FBO doesn't have a color buffer source for read operations too.
I'm using cpp/marmalade to make a game for ios, but sometimes textures render corrupted.
Here is the source texture file:
Example of a currupted texture:
I'm loading texture with this code:
VGTexture2D* VGTextureLoader::loadImage(std::string imagefile)
{
CIwImage img;
img.LoadFromFile(imagefile.c_str());
// Convert to an OpenGL ES native format
CIwImage nativeImg;
nativeImg.SetFormat(CIwImage::ABGR_8888);
img.ConvertToImage(&nativeImg);
// Generate texture object
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
// Upload
uint32 width = img.GetWidth();
uint32 height = img.GetHeight();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nativeImg.GetTexels());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Create and return texture
VGTexture2D* tex = new VGTexture2D(texture, (float)width, (float)height);
return tex;
}
Your textures aren't being corrupted, but the channels do seem to be flipped. Could it be because you are converting the image to ABGR_8888 but then uploading it as GL_RGBA?