Cutting part of texture - xcode

I'm writing for iPhone, OpenGL ES
Do you know how can I draw quad with part of particular texture?
Now I'm rendering like that:
static const GLfloat texCoords[] = {
0.0, 0.0,
10.0, 0.0,
10.0, 10.0,
0.0, 10.0
};
glBindTexture(GL_TEXTURE_2D, atlas_tex[0]);
GLfloat squareVertices[] = {
0.0f, 0.0f,
2000.5f, 0.0f,
2000.5f, 2000.33f,
0.0f, 2000.33f
};
glVertexPointer(2, GL_FLOAT, 0, squareVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
But of course, it display all texture. I don't know how to cut it.

If I understood you correctly, you need to change the texCoords array appropriately.
E.g. try something like that:
static const GLfloat texCoords[] = {
0.0, 0.0,
0.5, 0.0,
0.5, 0.5,
0.0, 0.5
};

Related

OpenGL GLSL, stuck viewing pyramid from "behind"

It appears that my pyramid is smaller in the front and bigger in the back.
-(void)drawRect:(NSRect)dirtyRect
{
// get the dimensions of the window
NSSize dim = [self frame].size;
// clear the background with color
glClearColor(0.0, 0.0, 0.0, 0.4);
glViewport(0, 0, dim.width, dim.height);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// cube position data
GLfloat vertices[] = {0.0, 100.0, 0.0, -50.0, 0.0, 50.0, 50.0, 0.0, 50.0, 50.0, 0.0, -50.0, -50.0, 0.0, -50.0};
// cube indices data
GLubyte indices[] = {0, 1, 2, 0, 2, 3, 0, 3, 4, 0, 4, 1, 1, 4, 3, 1, 3, 2};
// cube color data
GLfloat color[] = {0.0, 1.0, 0.0, 1.0,
0.0, 0.3, 0.8, 1.0,
1.0, 0.0, 0.0, 1.0,
0.5, 0.0, 0.8, 1.0,
0.5, 0.6, 0.3, 1.0};
// bind each array of data to separate buffers
// bind cube position data to the first buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffers[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// bind the cube color data to the second buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffers[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(color), color, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// enable the shader program
GLuint programID = [self loadShaders];
glUseProgram(programID);
// enable vertex attributes
// enable cube position attributes
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffers[0]);
glEnableVertexAttribArray(VERTEX_POS_INDEX);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// enable cube color attributes
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffers[1]);
glEnableVertexAttribArray(VERTEX_COLOR_INDEX);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// point to the enabled attribute data
glVertexAttribPointer(VERTEX_POS_INDEX, VERTEX_POS_SIZE, GL_FLOAT, GL_FALSE, 0, vertices);
glVertexAttribPointer(VERTEX_COLOR_INDEX, VERTEX_COLOR_SIZE, GL_FLOAT, GL_FALSE, 0, color);
GLfloat offset[] = {0.0, 0.0, -200.0};
GLint offsetUnif = glGetUniformLocation(programID, "offset");
GLint zNearUnif = glGetUniformLocation(programID, "zNear");
GLint zFarUnif = glGetUniformLocation(programID, "zFar");
GLint frustumScaleUnif = glGetUniformLocation(programID, "frustumScale");
glUniform3fv(offsetUnif, 1, offset);
glUniform1f(frustumScaleUnif, 1.0f);
glUniform1f(zNearUnif, 0.1);
glUniform1f(zFarUnif, 1000.0);
// draw the elements
glDrawElements(GL_TRIANGLES, sizeof(indices)/sizeof(GLubyte), GL_UNSIGNED_BYTE, indices);
glDisableVertexAttribArray(VERTEX_POS_INDEX);
glDisableVertexAttribArray(VERTEX_COLOR_INDEX);
glUseProgram(0);
// flush buffer
glFlush();
[[self openGLContext] flushBuffer];
}
Here is the vertex shader:
#version 120
attribute vec3 position;
attribute vec4 inColor;
uniform vec3 offset;
uniform float zNear;
uniform float zFar;
uniform float frustumScale;
varying vec4 outColor;
void main()
{
vec4 cameraPos = vec4(position.x, position.y, -position.z, 1.0) + vec4(offset.x, offset.y, offset.z, 0.0);
vec4 clipPos;
clipPos.xy = cameraPos.xy * frustumScale;
clipPos.z = cameraPos.z * (zNear + zFar) / (zNear - zFar);
clipPos.z += 2 * zNear * zFar / (zNear - zFar);
clipPos.w = -cameraPos.z;
gl_Position = clipPos;
outColor = inColor;
}
When I run the code I get this image, note it looks as though the pyramid with bigger in the back than the front:
I circled the "front" corners (though I should be seeing this from the other side thus they are actually the rear corners). Any thoughts or suggests as to where the problem may lie?
It seems, that you don't apply a perspective projection matrix within your shader, thus the lack of view angle/depth, causing sort of an orthogonal projection.
The matrices you'd want to apply within the shader are gl_ModelViewMatrix (object & camera transform), gl_ProjectionMatrix (projection) or gl_ModelViewProjectionMatrix (object & camera transform & projection). As these builtin uniforms are deprecated, you need to pass them yourselves.

OpenGL: set image in a texture

Here is my code:
-(void) mergeWithImage:(UIImage*) image{
if(image==nil){
return;
}
glPushMatrix();
glColor4f(256,
256,
256,
1.0);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glGenTextures(1, &stampTexture);
glBindTexture(GL_TEXTURE_2D, stampTexture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
GLuint imgwidth = CGImageGetWidth(image.CGImage);
GLuint imgheight = CGImageGetHeight(image.CGImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *imageData = malloc( imgheight * imgwidth * 4 );
CGContextRef context2 = CGBitmapContextCreate( imageData, imgwidth, imgheight, 8, 4 * imgwidth, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big );
CGContextTranslateCTM (context2, 0, imgheight);
CGContextScaleCTM (context2, 1.0, -1.0);
CGColorSpaceRelease( colorSpace );
CGContextClearRect( context2, CGRectMake( 0, 0, imgwidth, imgheight ) );
CGContextTranslateCTM( context2, 0, imgheight - imgheight );
CGContextDrawImage( context2, CGRectMake( 0, 0, imgwidth, imgheight ), image.CGImage );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imgwidth, imgheight, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
CGContextRelease(context2);
free(imageData);
static const GLfloat texCoords[] = {
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
};
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
/*
These array would need to be changed if the size of the paintview changes. You must make sure that all image imput is 64x64, 256x256, 512x512 or 1024x1024. In this we are using 512, but you can use 1024 as follows:
use the numbers:
{
0.0, height, 0.0,
1024, height, 0.0,
0.0, height-1024, 0.0,
1024, height-1024, 0.0
}
*/
static const GLfloat vertices[] = {
0.0, 1024, 0.0,
1024, 1024, 0.0,
0.0, 0, 0.0,
1024, 0, 0.0
};
static const GLfloat normals[] = {
0.0, 0.0, 1024,
0.0, 0.0, 1024,
0.0, 0.0, 1024,
0.0, 0.0, 1024
};
glBindTexture(GL_TEXTURE_2D, stampTexture);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glNormalPointer(GL_FLOAT, 0, normals);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glPopMatrix();
glDeleteTextures( 1, &stampTexture );
//set back the brush
glBindTexture(GL_TEXTURE_2D, brushTexture);
glColor4f(lastSetRed,
lastSetGreen,
lastSetBlue,
1.0);
// Display the buffer
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
It works fine if my image is 1024x1024 but if I have an image with size 1024x768, what's the value to assign at vertices and normals?
what's the value to assign at vertices and normals?
Those don't matter as they do not (directly) interfer with texture coordinates. BTW normals should always be unit length. Also if you do not want to apply lighting you don't need normals. And if you do want to apply lighting normals must be unit length.
Texture coordinates for regular textures are always in the range [0;1] no matter what the aspect ratio of your image is. The vertex positions should be chosen in accordance with the projection you use. You could for example use a
glOrtho(0, texture_width, 0, textture_height, …)
projection and then your vertices would be {0, texture_width}×{0, texture_height} either. There's no definitive answer to your problem.
add these lines when you bind your texture
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); //IMPORTANT FOR NON POWER OF 2 TEXTURES
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

Using glDrawElements on OS/X (OpenGL 3.2 core profile)

I'm trying to port some OpenGL 3.2 code from Windows to OS/X 10.8 (using GLFW core profile), but get a INVALID_OPERATION (glError()) when I call the glDrawElements. The glDrawArrays functions works fine, so my shaders are initialized ok.
The following snippet explains well what I am doing. Any idea of what I do wrong?
struct Vertex {
vec2 position;
vec3 color;
};
void display() {
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
mat4 projection = Ortho2D(-15.0f, 15.0f, -15.0f, 15.0f);
glUniformMatrix4fv(projectionUniform, 1, GL_TRUE, projection);
glBindVertexArray(shapeVertexArrayBuffer);
mat4 modelView;
// upper left
modelView = Translate(-7,+7,0);
glUniformMatrix4fv(modelViewUniform, 1, GL_TRUE, modelView);
glDrawArrays(GL_TRIANGLE_FAN, 0, rectangleSize); // renders correctly
// upper right
modelView = Translate(+7,+7,0);
glUniformMatrix4fv(modelViewUniform, 1, GL_TRUE, modelView);
GLuint indices[6] = {0,1,2,3,4,0};
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, indices); //INVALID_OPERATION
glfwSwapBuffers();
}
void loadGeometry() {
vec3 color(1.0f, 1.0f, 0.0f);
Vertex rectangleData[rectangleSize] = {
{ vec2( 0.0, 0.0 ), color },
{ vec2( 5.0, -5.0 ), color },
{ vec2( 5.0, 0.0 ), color },
{ vec2( 0.0, 5.0 ), color },
{ vec2(-5.0, 0.0 ), color },
{ vec2(-5.0, -5.0 ), color }
};
shapeVertexArrayBuffer = loadBufferData(rectangleData, rectangleSize);
}
GLuint loadBufferData(Vertex* vertices, int vertexCount) {
GLuint vertexArrayObject;
glGenVertexArrays(1, &vertexArrayObject);
glBindVertexArray(vertexArrayObject);
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vertexCount * sizeof(Vertex), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(positionAttribute);
glEnableVertexAttribArray(colorAttribute);
glVertexAttribPointer(positionAttribute, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)0);
glVertexAttribPointer(colorAttribute , 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid *)sizeof(vec2));
return vertexArrayObject;
}
You are suppose to create an indices buffer after your vertex buffer.
GLuint elementBuffer;
glGenBuffers(1, &elementBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
Then you can call glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);

OpenGL ES - using glDrawArrays()

I am porting some code from OpenGL to the ES version. I am using glDrawArrays() to draw a triangle in conjuction with glVertexPointer(). However, it doesn't draw on the screen. The complete code is:
void init(void)
{
glClearColor (0.0, 0.0, 0.0, 0.0);
glShadeModel (GL_FLAT);
}
void display(void)
{
glEnableClientState (GL_COLOR_ARRAY);
glClear (GL_COLOR_BUFFER_BIT);
glColor4f (0.0, 0.0, 1.0, 1.0);
glLoadIdentity ();
glTranslatef(0, 0, -20);
const GLfloat triVertices[] = {
0.0f, 1.0f, 0.0f,
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f
};
glVertexPointer(3, GL_FLOAT, 0, triVertices);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableClientState(GL_VERTEX_ARRAY);
glFlush ();
}
void reshape (int w, int h)
{
glViewport (0, 0, (GLsizei) w, (GLsizei) h);
glMatrixMode (GL_PROJECTION);
glLoadIdentity ();
glFrustum (-1.0, 1.0, -1.0, 1.0, 1.5, 20.0);
glMatrixMode (GL_MODELVIEW);
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode (GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize (400, 400);
glutInitWindowPosition (100, 100);
glutCreateWindow (argv[0]);
init ();
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutMainLoop();
return 0;
}
GLUT opens up the window, it clears with black, but doesn't draw anything.
Can someone please spot what I am doing wrong? Thanks.
I see no call to glEnableClientState(GL_VERTEX_ARRAY);. I also see glEnableClientState (GL_COLOR_ARRAY); but no call to glColorPointer(). Perhaps you wrote one when you meant the other?

IOS OpenGL ES - Rotate Camera around the scene

This is my code:
- (void)drawView {
// Define square for flowers head (vertecies)
const GLfloat flowerHead[] = {
-1.0, 1.0, 1.0, // top left
-1.0, -1.0, 1.0, // bottom left
1.0, -1.0, 1.0, // bottom right
1.0, 1.0, 1.0
};
// Define the texture coordinates --> as the flowerHead vertecies
const GLshort flowerHeadTextureCoord[] = {
0, 1, // top left
0, 0, // bottom left
1, 0, // bottom right
1, 1 // top right
};
rota = 1.0;
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, backingWidth, backingHeight);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glTexCoordPointer(2, GL_SHORT, 0, flowerHeadTextureCoord);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// flower #1
glPushMatrix();
{
glEnable(GL_ALPHA_TEST); // enable alpha -> transparency state
glTranslatef(0.0, 0.0, -6.0); // positioning the flower
glRotatef(55.0, 1.0, 0.0, 0.0); // rotate flower on (x) axis
glVertexPointer(3, GL_FLOAT, 0, flowerHead); // pointer to the flowerHead vertecies array
glEnableClientState(GL_VERTEX_ARRAY); // enable the vertex array state
// Draw the front face
// Bind texture
glBindTexture(GL_TEXTURE_2D, textures[0]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
glPopMatrix();
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
[self checkGLError:NO];
}
Now this is my drawing code...all I need now is to rotate the camera around the object (and not the object itself)...
My setupView function:
const GLfloat zNear = 0.1, zFar = 1000.0, fieldOfView = 60.0;
GLfloat size;
// Set the general polygon properties - for transparency!!!
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
glAlphaFunc(GL_GREATER,0.1f);
glEnable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
size = zNear * tanf(DEGREES_TO_RADIANS(fieldOfView) / 2.0);
// This give us the size of the iPhone display
CGRect rect = self.bounds;
glFrustumf(-size, size, -size / (rect.size.width / rect.size.height), size / (rect.size.width / rect.size.height), zNear, zFar);
glViewport(0, 0, rect.size.width, rect.size.height);
glClearColor(0.0f, 0.0f, 0.0f, 0.5f);

Resources