For the love of god… why doesnt this display
correctly? I know it has something to do
with the indices but I’m not sure whats wrong
with whats given…
float yrot=45;
float verts = { -100.0, -100.0, 0,
100.0, -100.0, 0,
-100.0,100.0, 0,
100.0,100.0, 0 };unsigned int indices = { 2,0,3,
1,3,0 };float norms = { 0.0,0.0,1.0,
0.0,0.0,1.0 };void Init()
{
glEnable(GL_LIGHT0);
glEnable(GL_LIGHTING);
glShadeModel(GL_FLAT);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);glVertexPointer(3,GL_FLOAT,0,verts);
glNormalPointer(GL_FLOAT,0,norms);
}
void RenderScene()
{
glTranslatef(0,0,-300);glRotatef(yrot,0,1,0);
glColor3f(1,0,1);
glDrawElements(GL_TRIANGLES,6,GL_UNSIGNED_INT,indices);
}
void OnDraw(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();RenderScene();
glFlush();
glutSwapBuffers();
}
void OnKeyDown(unsigned char key, int x, int y)
{}
void OnAnimate(void)
{
if (animate) {
yrot+=0.01;
if (yrot>360.0f)
yrot=0.0f;glutPostRedisplay();
}
}
void OnResize(int w, int h)
{
glMatrixMode (GL_MODELVIEW);
glViewport (0, 0, w, h);
glLoadIdentity();glMatrixMode(GL_PROJECTION);
glLoadIdentity();float fAspect;
if (H > 0)
fAspect = GLfloat(w)/GLfloat(h);
else // Don’t divide by zero, not that we should ever run into that.
fAspect = 1.0f;float fNearPlane = 1.0f;
float fFarPlane = 1000.0f;gluPerspective(45.0f, fAspect, fNearPlane, fFarPlane);
glMatrixMode(GL_MODELVIEW);
}int main(int argc, char *argv)
{
glutInitDisplayMode(GLUT_DEPTH | GLUT_RGBA| GLUT_DOUBLE);
glutInitWindowSize(600, 400);
glutCreateWindow(“GLUT template”);glutDisplayFunc(OnDraw);
glutKeyboardFunc(OnKeyDown);
glutReshapeFunc(OnResize);
glutIdleFunc(OnAnimate);//glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glEnable(GL_DEPTH_TEST);
glCullFace(GL_BACK);
glEnable(GL_CULL_FACE);Init();
glutSwapBuffers();
glutMainLoop();return 0;
}