Drawing Simple Triangles in 3D.. Only one shows up on Emulator, on real device - No Triangles - opengl-es

// Front face
fPyramid[0] = 0.0f; fPyramid[1] = 5.0f; fPyramid[2] = 0.0f;
fPyramid[3] = -3.0f; fPyramid[4] = 0.0f; fPyramid[5] = 3.0f;
fPyramid[6] = 3.0f; fPyramid[7] = 0.0f; fPyramid[8] = 3.0f;
// Left face
fPyramid[18] = 0.0f; fPyramid[19] = 5.0f; fPyramid[20] = 0.0f;
fPyramid[21] = -3.0f; fPyramid[22] = 0.0f; fPyramid[23] = -3.0f;
fPyramid[24] = -3.0f; fPyramid[25] = 0.0f; fPyramid[26] = 3.0f;
// Back face
fPyramid[9] = 0.0f; fPyramid[10] = 5.0f; fPyramid[11] = 0.0f;
fPyramid[12] = 3.0f; fPyramid[13] = 0.0f; fPyramid[14] = -3.0f;
fPyramid[15] = -3.0f; fPyramid[16] = 0.0f; fPyramid[17] = -3.0f;
// Right face
fPyramid[27] = 0.0f; fPyramid[28] = 5.0f; fPyramid[29] = 0.0f;
fPyramid[30] = 3.0f; fPyramid[31] = 0.0f; fPyramid[32] = 3.0f;
fPyramid[33] = 3.0f; fPyramid[34] = 0.0f; fPyramid[35] = -3.0f;
for(int i = 0; i < 4 ; i++)
{
fPyramidColor[i*9] = 1.0f; fPyramidColor[i*9+1] = 0.0f; fPyramidColor[i*9+2] = 0.0f;
if(i < 2)
{
fPyramidColor[i*9+1] = 0.0f; fPyramidColor[i*9+4] = 1.0f; fPyramidColor[i*9+5] = 0.0f;
fPyramidColor[i*9+2] = 0.0f; fPyramidColor[i*9+7] = 0.0f; fPyramidColor[i*9+8] = 1.0f;
}
else
{
fPyramidColor[i*9+2] = 0.0f; fPyramidColor[i*9+7] = 1.0f; fPyramidColor[i*9+8] = 0.0f;
fPyramidColor[i*9+1] = 0.0f; fPyramidColor[i*9+4] = 0.0f; fPyramidColor[i*9+5] = 1.0f;
}
}
...
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[0]);
glBufferData(GL_ARRAY_BUFFER, 36*sizeof(float), fPyramid, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[1]);
glBufferData(GL_ARRAY_BUFFER, 36*sizeof(float), fPyramidColor, GL_STATIC_DRAW);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
****************************************************************
//Shaders
****************************************************************
char vShaderStr[] =
"uniform mat4 projectionMatrix; \n"
"uniform mat4 modelViewMatrix; \n"
"attribute vec3 vPosition; \n"
"attribute vec3 inColor; \n"
"varying vec3 theColor; \n"
"void main() \n"
"{ \n"
" gl_Position = projectionMatrix*modelViewMatrix*vec4(vPosition, 1.0); \n"
" theColor = inColor; \n"
"} \n";
char fShaderStr[] =
"varying vec3 theColor; \n"
"void main() \n"
"{ \n"
" gl_FragColor = vec4(theColor, 1.0); \n"
"} \n";
****************************************************************
//in drawframe() function
****************************************************************
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[0]);
glEnableVertexAttribArray(0);//0 is or position
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, uiVBO[1]);
glEnableVertexAttribArray(2);//2 is for colors
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
int iModelViewLoc = glGetUniformLocation(globalProgramObject, "modelViewMatrix");
int iProjectionLoc = glGetUniformLocation(globalProgramObject, "projectionMatrix");
glUniformMatrix4fv(iProjectionLoc, 1, GL_FALSE, getPerspectiveMatrixInFloat());
ESMatrix mModelView = ogmLookAt(0, 15, 40, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
ESMatrix mCurrent = ogmRotate(&mModelView, fRotationAngle, 0.0f, 1.0f, 0.0f);
ogmTranslate(&mCurrent, 0, 13, 35);
glUniformMatrix4fv(iModelViewLoc, 1, GL_FALSE, getCurrentMatrixInFloat(mCurrent));
//and finally draw arrays
glDrawArrays(GL_TRIANGLE_STRIP, 0, 12);
fRotationAngle is changing with 0.01.
this is the code, and the output is
This is the output on emulator. On real device I do not see anything. Just WHITE screen(changing from black to white is a mental peace!)
I am out of tries. Have tries changing geometry to just 2 triangles, but I get to see only one. What is wrong with this?

Your shaders should not compile on ES2. According to the GLES Shading Language 1.0.17 spec (which is relevant for ES2.0), there are no layout qualifiers, and there is no in/out (just like with GLSL <=1.20 on desktop GL), but you have to use attribute for vertex shader inputs and varying for communication between vertex and fragment shader.
You have to use glBindAttribLocation before linking the program to assign the attribute indices to your shader inputs (or query the ones the GL assigned if you do not specify them).
As a side note: currently, you try to assign index 1 for the color in the shader, but are using 2 in the client code.

Related

GLSL Vertex Shader gives wrong results if I do not mention gl_Vertex

I am writing a shader to draw lines with a width, as an alternative to glLineWidth, which doesn't work above 1.0 with ANGLE, and I'd like my lines to have the same thickness on Windows. I am running on desktop OpenGL for now, though.
The vertex shader source is as follows
attribute vec3 a_startPosition;
attribute vec3 a_endPosition;
attribute float a_choice;
attribute float a_dir;
uniform mat4 u_mvpMatrix;
uniform float u_width;
uniform vec2 u_viewDims;
void main()
{
vec4 start = u_mvpMatrix*vec4(a_startPosition, 1.0);
vec4 end = u_mvpMatrix*vec4(a_endPosition, 1.0);
//gl_Vertex;
vec2 slope = normalize(end.xy - start.xy);
slope = vec2(slope.y, -slope.x);
vec2 scale = u_width/u_viewDims;
if (a_choice == 0.0)
gl_Position = vec4(start.xy + a_dir*scale*slope.xy*start.w, start.zw);
else
gl_Position = vec4(end.xy + a_dir*scale*slope.xy*end.w, end.zw);
}
See that I have gl_Vertex, unused, commented out.
int width, height;
glfwGetFramebufferSize(m_window, &width, &height);
glUseProgram(m_shaders[Shader_WideLine]->id());
GLint shaderid = m_shaders[Shader_WideLine]->id();
GLint coloc = glGetUniformLocation(shaderid, "Color");
GLint dimloc = glGetUniformLocation(shaderid, "u_viewDims");
GLint widthloc = glGetUniformLocation(shaderid, "u_width");
GLint mvploc = glGetUniformLocation(shaderid, "u_mvpMatrix");
GLint modelviewloc = glGetUniformLocation(shaderid, "u_modelview");
GLint projloc = glGetUniformLocation(shaderid, "u_projection");
GLint dirloc = glGetAttribLocation(shaderid, "a_dir");
GLint startloc = glGetAttribLocation(shaderid, "a_startPosition");
GLint endloc = glGetAttribLocation(shaderid, "a_endPosition");
GLint chloc = glGetAttribLocation(shaderid, "a_choice");
//////////
//Set Uniforms
//////////
glUniform1f(widthloc, 10);
glUniform2f(dimloc, width, height);
glUniform4f(coloc, 0.101f, 0.558f, 0.109f, 1.f);
glm::mat4 modelview;
glm::mat4 projection;
glGetFloatv(GL_MODELVIEW_MATRIX, glm::value_ptr(modelview));
glGetFloatv(GL_PROJECTION_MATRIX, glm::value_ptr(projection));
glm::mat4 mvp = projection * modelview;
glUniformMatrix4fv(mvploc, 1, GL_FALSE, glm::value_ptr(mvp));
int numpts = 4;
GLfloat v[4][3] = {
{0,1,0},
{0,0,0},
{1,0,0},
{1,1,0}
};
//////////
// Draw (attributes)
//////////
glBegin( GL_TRIANGLE_STRIP );
glNormal3d(0.0, 0.0, 1.0);
for(int i=0; i<numpts-1; i++)
{
glVertexAttrib3fv(startloc, v[i]);
glVertexAttrib3fv(endloc, v[i+1]);
glVertexAttrib1f(chloc, 0);
glVertexAttrib1f(dirloc, -1.0f);
glVertex3d(0,0,0);
glVertexAttrib1f(dirloc, 1.0f);
glVertex3d(0,0,0);
glVertexAttrib1f(chloc, -1);
glVertexAttrib1f(dirloc, -1.0f);
glVertex3d(0,0,0);
glVertexAttrib1f(dirloc, 1.0f);
glVertex3d(0,0,0);
}
glEnd();
glUseProgram(0);
So I am trying to draw lines from (0,1,0) to (0,0,0) to (1,0,0) to (1,1,0) with a width of 10 pixels. In the following images is a wire cube 2x2x2 centered on the origin for reference.
When called as presented I get the unexpected result of this
If I uncomment gl_Vertex; in the shader, so that it is unused but referenced, I get this expected result.
What is the reason that this could happen?
gl_ModelViewProjectionMatrix is not a valid ES 2.0 vertex shader built-in variable.
You'll have to pass in your MVP via uniform.

Upgrading code from OpenGL ES 1.0 to 2.0

I'm just working through the twee jump project which is written in cocos2d 1.0 found here
I'm having trouble converting this section of the code to OpenGL ES 2.0. I need at least 10 reputations to post a picture but a picture of the errors I get when I try to build and run can be found here:
- (void)draw {
[super draw];
if(currentScorePosition < 0) return;
glColor4f(0.0f, 0.0f, 0.0f, 0.2f);
float w = 320.0f;
float h = 27.0f;
float x = (320.0f - w) / 2.0f;
float y = 359.0f - currentScorePosition * h;
GLfloat vertices[4][2];
GLubyte indices[4] = { 0, 1, 3, 2 };
vertices[0][0] = x; vertices[0][1] = y;
vertices[1][0] = x+w; vertices[1][1] = y;
vertices[2][0] = x+w; vertices[2][1] = y+h;
vertices[3][0] = x; vertices[3][1] = y+h;
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_BYTE, indices);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_TEXTURE_2D);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}

How to convert world coordinates to screen coordinates in OpenGL ES 2.0

I am using following OpenGL ES 1.x code to set my projection coordinates.
glMatrixMode(GL_PROJECTION);
float width = 320;
float height = 480;
glOrthof(0.0, // Left
1.0, // Right
height / width, // Bottom
0.0, // Top
-1.0, // Near
1.0); // Far
glMatrixMode(GL_MODELVIEW);
What is the equivalent method to setup this in OpenGL ES 2.0 ?
What projection matrix should I pass to the vertex shader ?
I have tried following function to create the matrix but its not working:
void SetOrtho (Matrix4x4& m, float left, float right, float bottom, float top, float near,
float far)
{
const float tx = - (right + left)/(right - left);
const float ty = - (top + bottom)/(top - bottom);
const float tz = - (far + near)/(far - near);
m.m[0] = 2.0f/(right-left);
m.m[1] = 0;
m.m[2] = 0;
m.m[3] = tx;
m.m[4] = 0;
m.m[5] = 2.0f/(top-bottom);
m.m[6] = 0;
m.m[7] = ty;
m.m[8] = 0;
m.m[9] = 0;
m.m[10] = -2.0/(far-near);
m.m[11] = tz;
m.m[12] = 0;
m.m[13] = 0;
m.m[14] = 0;
m.m[15] = 1;
}
Vertex Shader :
uniform mat4 u_mvpMatrix;
attribute vec4 a_position;
attribute vec4 a_color;
varying vec4 v_color;
void main()
{
gl_Position = u_mvpMatrix * a_position;
v_color = a_color;
}
Client Code (parameters to the vertex shader):
float min = 0.0f;
float max = 1.0f;
const GLfloat squareVertices[] = {
min, min,
min, max,
max, min,
max, max
};
const GLfloat squareColors[] = {
1, 1, 0, 1,
0, 1, 1, 1,
0, 0, 0, 1,
1, 0, 1, 1,
};
Matrix4x4 proj;
SetOrtho(proj, 0.0f, 1.0f, 480.0/320.0, 0.0f, -1.0f, 1.0f );
The output i am getting in the iPhone simulator:
Your transcription of the glOrtho formula looks correct.
Your Matrix4x4 class is custom, but is it possible that m.m ends up being loaded directly as a glUniformMatrix4fv? If so check that you're setting the transpose flag as GL_TRUE, since you're loading data in row major format and OpenGL expects column major (ie, standard rules are that index [0] is the top of the first column, [3] is at the bottom of the first column, [4] is at the top of the second column, etc).
It's possibly also worth checking that —— assuming you've directly replicated the old world matrix stacks — you're applying modelview and projection in the correct order in your vertex shader or else compositing them correctly on the CPU, whichever way around you're doing it.

help with drawing a wedge with opengl es

I'm trying to do some basic opengl es programming to get started on the basics.
I have a drawing function tries to draw a wedge of a circle. Something is going wrong because its actually just drawing a circle.
I'm still just trying to grasp the basics of opengl es here. Heres what I have so far.
- (void)drawView
{
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(0, 0, 60, 60);
int i;
float angle_start=90;
float angle_stop=180;
int segments=360;
float const angle_step = (angle_stop - angle_start)/segments;
GLfloat *arc_vertices;
arc_vertices = malloc(2*sizeof(GLfloat) * (segments+2));
arc_vertices[0] = arc_vertices[1] = 0.0;
for(i=0; i<segments+1; i++) {
arc_vertices[2 + 2*i ] = cos(angle_start + i*angle_step);
arc_vertices[2 + 2*i + 1] = sin(angle_start + i*angle_step);
}
glVertexPointer(2, GL_FLOAT, 0, arc_vertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1.0f, 0.0f, 0.0f, 1.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, segments+2);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
free(arc_vertices);
}
sin() and cos() take radians as input:
float angle_start=90;
float angle_stop=180;
int segments=360;
float const angle_step = (angle_stop - angle_start)/segments;
GLfloat* verts = (GLfloat*)malloc(2*sizeof(GLfloat) * (segments+2));
unsigned int pos = 0;
verts[pos++] = 0;
verts[pos++] = 0;
float radius = 10;
for( unsigned int i = 0; i < segments; ++i )
{
float rads = (angle_start + i*angle_step) * (3.14159 / 180);
verts[pos++] = ( cos( rads ) * radius );
verts[pos++] = ( sin( rads ) * radius );
}
glVertexPointer(2, GL_FLOAT, 0, verts);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1.0f, 0.0f, 0.0f, 1.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, segments+1);
glDisableClientState(GL_VERTEX_ARRAY);
I see something wrong. You access vertices[i] and vertices[i+1], but i always increments by 1.
Try replacing
GLfloat vertices[720];
with
GLfloat vertices[2*720];
and replace
vertices[i]=p1;
vertices[i+1]=p2;
by
vertices[2*i]=p1;
vertices[2*i+1]=p2;
this works.
Anti aliasing is horrible but it works.
[credit1
-(void)drawcircelofSlice2
{
amt+=20;
if(amt>360.0)
{
amt=0;
}
[EAGLContext setCurrentContext:context];
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
glViewport(20, 20, 50,50);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(30.0f, 30.0f, -1.5f, 1.5f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
float x=0;
float y=0;
//float radius=20;
float lowAngle=0;
float highAngle=(amt/360) *360;
// float highAngle=360;
float numcirclePts=360;
lowAngle=DEGREES_TO_RADIANS(lowAngle);
highAngle=DEGREES_TO_RADIANS(highAngle);
float res=numcirclePts;
float angle=lowAngle;
float anglerange=highAngle-lowAngle;
float angleAdder=anglerange/ res;
int k=0;
GLfloat verts[720];
for (int i = 0; i < numcirclePts; i++){
verts[k] = x + cos(angle) ;
verts[k+1] = y - sin(angle) ;
angle += angleAdder;
k+=2;
}
verts[0] = x;
verts[1] = y;
k = 2;
for (int i = 2; i < numcirclePts; i++){
verts[k] = verts[k];
verts[k+1] = verts[k+1];
k+=2;
}
glVertexPointer(2, GL_FLOAT, 0, verts);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(0.0f, 0.0f, 1.0f, 0.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, numcirclePts);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
glDisableClientState(GL_VERTEX_ARRAY);
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
}

How to draw a dotted line using OpenGL ES 1?

To draw a dotted line in OpenGL I can use glLineStipple, but how do I achieve the same effect in OpenGL ES 1?
Lines can be textured, just like triangles. Enable alpha testing, apply an alpha texture, set up some texture coordinates, and enjoy.
Actually i have realized the doted line or the dashed line using for loops but it still make non sense to use it as a line type link to the drawing method, here is the code of my doted line and dashed line below:
doted line:
(void)drawVerticalDotedInternalGrid{
float a,b;
int drawCount =0;
GLfloat dotedInternalGrid[1296];
for (a = -0.5f; a <= 0.5f; a +=0.5f) {
for (b = -0.875f; b <=0.925f; b += 0.025f)
{
dotedInternalGrid[drawCount] = b;
drawCount++;
dotedInternalGrid[drawCount] = a;
drawCount++;
};
};
glPointSize(1.0f);
glColor4f(0.863f,0.863f,0.863f,0.8f); //line color
glVertexPointer(2, GL_FLOAT, 0, dotedInternalGrid);
glEnableClientState(GL_VERTEX_ARRAY);
glDrawArrays(GL_POINTS, 0, 648);
glDisableClientState(GL_VERTEX_ARRAY);
}
dashed line:
(void)drawVerticalDashedInternalGridH{
GLfloat dashedLine[1296];
float a,b;
int i =0;
//-0.4----0.4 // -0.875----0.900
for (a = -0.4f; a <= 0.4f; a +=0.1f) {
for (b =-0.825f; b <=0.950f; b+=0.025f) {
dashedLine[i] = b;
i++;
dashedLine[i] = a;
i++;
};
};
//glLineWidth(1.0f);
glColor4f(0.863f,0.863f,0.863f,1.f); //line color
glVertexPointer(2, GL_FLOAT, 0, dashedLine);
glEnableClientState(GL_VERTEX_ARRAY);
glDrawArrays(GL_LINES, 0, 648);
glDisableClientState(GL_VERTEX_ARRAY);
}
of course ye can see the code is drawing in a rectangle area of certain coordinates,the bother things is how to figure out the dotedInternalGrid[1296]; this size of array dynamically for draw method use and the number of lines to draw as well.
To explain it easily, I have put drawHorizontalDashedLine() first.
To understand, click this image.
I cannot put an image on this post because of my reputation.
Visualizing the Vertices
+(void)drawHorizontalDashedLine:(GLfloat)x1 x2:(GLfloat)x2 y:(GLfloat)y {
//Parameters
GLfloat DASH_LENGTH = 4.0f;
GLfloat GAP_LENGTH = 2.0f;
GLfloat LINE_THICKNESS = 1.5f;
//Calculate how many dashes require to draw the whole line
GLfloat fHorizontalLength = fabsf(x2-x1);
int nDashedLineCount = fHorizontalLength / (DASH_LENGTH + GAP_LENGTH);
int nVerticesSize = nDashedLineCount * 4; //A dashed line has 4 values(2 points)
//Vertex
GLfloat vertices[nVerticesSize];
//The first dashed line vertices
vertices[0] = (x1 < x2)? x1 : x2;
vertices[1] = y;
vertices[2] = (x1 < x2)? x1 : x2 + DASH_LENGTH;
vertices[3] = y;
//The other vertices of dashed lines
for (int nIndex=4; nIndex < nVerticesSize; nIndex=nIndex+4) {
vertices[nIndex] = vertices[nIndex-2] + GAP_LENGTH;
vertices[nIndex+1] = y;
vertices[nIndex+2] = vertices[nIndex] + DASH_LENGTH;
vertices[nIndex+3] = y;
//NSLog(#"Point1(%.2f, %.2f)", vertices[nIndex], vertices[nIndex+1]);
//NSLog(#"Point2(%.2f, %.2f)", vertices[nIndex+2], vertices[nIndex+3]);
}
//Draw the arrays
glPushMatrix();
glLineWidth(LINE_THICKNESS);
glVertexPointer (2, GL_FLOAT, 0, vertices);
glDrawArrays (GL_LINES, 0, nVerticesSize/2);
glPopMatrix();
}
drawDashedLine().
I used the trigonometric function to get lengths.
+(void)drawDashedLine:(CGPoint)point1 point2:(CGPoint)point2 {
//Parameters
GLfloat DASH_LENGTH = 3.0f;
GLfloat GAP_LENGTH = 1.0f;
GLfloat LINE_THICKNESS = 1.5f;
//Calculate how many dashes require to draw the whole line
GLfloat fWidth = point2.x - point1.x;
GLfloat fHeight = point2.y - point1.y;
GLfloat fRadian = atan2(fHeight, fWidth);
float fLineLength = sqrtf(powf(fWidth, 2) + powf(fHeight, 2));
int nDashedLineCount = fabsf(fLineLength / (DASH_LENGTH + GAP_LENGTH));
int nVerticesSize = nDashedLineCount * 4; //A dashed line has 4 values(2 points)
//Vertex
GLfloat vertices[nVerticesSize];
//The first dashed line vertices
vertices[0] = point1.x;
vertices[1] = point1.y;
vertices[2] = point1.x + cosf(fRadian) * DASH_LENGTH;
vertices[3] = point1.y + sinf(fRadian) * DASH_LENGTH;
//The other vertices of dashed lines
for (int nIndex=4; nIndex < nVerticesSize; nIndex=nIndex+4) {
vertices[nIndex] = vertices[nIndex-2] + cosf(fRadian) * GAP_LENGTH;
vertices[nIndex+1] = vertices[nIndex-1] + sinf(fRadian) * GAP_LENGTH;
vertices[nIndex+2] = vertices[nIndex] + cosf(fRadian) * DASH_LENGTH;
vertices[nIndex+3] = vertices[nIndex+1] + sinf(fRadian) * DASH_LENGTH;
//NSLog(#"DrawDash Point1(%.2f, %.2f)", vertices[nIndex], vertices[nIndex+1]);
//NSLog(#"DrawDash Point2(%.2f, %.2f)", vertices[nIndex+2], vertices[nIndex+3]);
}
//Draw the arrays
glPushMatrix();
glLineWidth(LINE_THICKNESS);
glVertexPointer (2, GL_FLOAT, 0, vertices);
glDrawArrays (GL_LINES, 0, nVerticesSize/2);
glPopMatrix();
}
glPushAttrib(GL_ENABLE_BIT);
# glPushAttrib is done to return everything to normal after drawing
glLineStipple(1, 0xAAAA); # [1]
glEnable(GL_LINE_STIPPLE);
glBegin(GL_LINES);
glVertex3f(-.5,.5,-.5);
glVertex3f(.5,.5,-.5);
glEnd();
glPopAttrib();

Resources