PDA

View Full Version : My code works in Gl 3.1, but not in 3.3? What's wrong? (core profile issues maybe?)



WIld Sage
12-20-2013, 07:16 PM
I'm using SDL to handle my windows and context creation. Whenever I request OpenGL version 3 (I get 3.1.0 according to glGetString), everything works fine. My objects are all drawn as they should be. However, when I request an OpenGL version 3.3 implementation, it renders nothing. glGetString shows that I do indeed have GL 3.3. My hardware isn't the problem, I'm using an Nvidia Geforce 560 TI, capable of running GL 4.4.

I suspect that some of the code I'm using is not compliant with the core profile. When SDL gives me a 3.3 context, it likely chooses core profile by default. I want to update my code so that it complies with the core profile, if that is indeed the problem.

Here's my implementation of my Geometry class, which handles about 90% of my program's rendering.


#include "Geometry.h"

Geometry::Geometry(void)
: vformat(VF_INVALID),
iformat(0),
vbo(0),
ibo(0),
vcount(0),
icount(0),
prim(0)
{}


Geometry::Geometry(GLenum primitive, VertexFormat vertexFormat, int vertexCount, void* vertices, bool generateNormals)
: vformat(VF_INVALID),
iformat(0),
vbo(0),
ibo(0),
vcount(0),
icount(0),
prim(0)
{
loadData(primitive, vertexFormat, vertexCount, vertices, generateNormals);
}


Geometry::Geometry(GLenum primitive, VertexFormat vertexFormat, int vertexCount, void* vertices,
GLenum indexFormat, int indexCount, void* indices, bool generateNormals)
: vformat(VF_INVALID),
iformat(0),
vbo(0),
ibo(0),
vcount(0),
icount(0),
prim(0)
{
loadData(primitive, vertexFormat, vertexCount, vertices, indexFormat, indexCount, indices, generateNormals);
}


Geometry::~Geometry(void)
{
clean();
}


void Geometry::clean(void)
{
if (vbo != 0)
{
glDeleteBuffers(1, &vbo);
vbo = 0;
}
if (ibo != 0)
{
glDeleteBuffers(1, &ibo);
ibo = 0;
}
}


void Geometry::loadData(GLenum primitive, VertexFormat vertexFormat, int vertexCount, void* vertices, bool generateNormals)
{
/* Check for invalid formats */
/* VertexFormat is a special enum which always equals the sizeof of the vertex class I'm using. */
if (vertexFormat == VF_INVALID || vertexFormat == VF_MAXTYPES)
{
error("In Geometry::loadData():\n");
errormore("Invalid vertexFormat enum!\n");
return;
}


/* Clean up any existing data */
clean();


/* Create vertex buffer */
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, (int)vertexFormat * vertexCount, vertices, GL_STATIC_DRAW);


/* Save variables for later usage */
prim = primitive;
vformat = vertexFormat;
vcount = vertexCount;
}


void Geometry::loadData(GLenum primitive, VertexFormat vertexFormat, int vertexCount, void* vertices,
GLenum indexFormat, int indexCount, void* indices, bool generateNormals)
{
/* Check for invalid formats */
/* VertexFormat is a special enum which always equals the sizeof of the vertex class I'm using. */
if (vertexFormat == VF_INVALID || vertexFormat == VF_MAXTYPES)
{
error("In Geometry::loadData():\n");
errormore("Invalid vertexFormat enum!\n");
return;
}
if (indexSize(indexFormat) == -1)
{
error("In Geometry::loadData():\n");
errormore("Invalid indexFormat enum!\n");
return;
}


/* Clean up existing data */
clean();


/* Generate normals */
if (generateNormals == true)
{
if (vertexFormat == VF_32)
{
Vertex32* verts = (Vertex32*) vertices;


/* Go through each triangle and get the normal */
for (int i = 0; i < indexCount; i += 3)
{
unsigned int index0 = 0, index1 = 0, index2 = 0;
switch (indexFormat)
{
case GL_UNSIGNED_BYTE:
index0 = (unsigned int)(((unsigned char*)indices)[i]);
index1 = (unsigned int)(((unsigned char*)indices)[i+1]);
index2 = (unsigned int)(((unsigned char*)indices)[i+2]);
break;
case GL_UNSIGNED_SHORT:
index0 = (unsigned int)(((unsigned short*)indices)[i]);
index1 = (unsigned int)(((unsigned short*)indices)[i+1]);
index2 = (unsigned int)(((unsigned short*)indices)[i+2]);
break;
case GL_UNSIGNED_INT:
index0 = (unsigned int)(((unsigned int*)indices)[i]);
index1 = (unsigned int)(((unsigned int*)indices)[i+1]);
index2 = (unsigned int)(((unsigned int*)indices)[i+2]);
break;
default:
error("In Geometry::loadData():\n");
errormore("Could not generate normals:\n");
errormore("Invalid indexFormat enum!\n");
return;
} // switch (indexFormat)


vec3 v1 = verts[index1].pos - verts[index0].pos;
vec3 v2 = verts[index2].pos - verts[index0].pos;
vec3 normal = v1.cross(v2);


verts[index0].normal += normal;
verts[index1].normal += normal;
verts[index2].normal += normal;
} // for each index


/* Normalize each vertex normal */
for (int i = 0; i < vertexCount; ++i)
{
verts[i].normal.normalize();
}
}
}


/* Create vertex buffer */
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, (int)vertexFormat * vertexCount, vertices, GL_STATIC_DRAW);


/* Create index buffer */
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexSize(indexFormat) * indexCount, indices, GL_STATIC_DRAW);


/* Save variables for later usage */
prim = primitive;
vformat = vertexFormat;
vcount = vertexCount;
iformat = indexFormat;
icount = indexCount;
}


void Geometry::draw(void)
{
/* If the vertex buffer doesn't exist, draw nothing. */
if (vbo == 0)
return;


/* Bind our buffers. */
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);


/* Handle the vertex format */
switch (vformat)
{
case VF_BASIC:
/* VF_BASIC has only a position - a 3 component float vector. */
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0);
break;
case VF_COLORED:
/* VF_COLORED has a 3-component position (offset 0),
and a 3-component color (offset 12) */
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 24, (GLvoid*)0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 24, (GLvoid*)12);
break;
case VF_32:
/* VF_32 has 3 components for position, 3 for a normal, and 2 for texture coordinates.
Position is offset 0, Normal is offset 12, and TextureCoordinates is offset 24. */
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 32, (GLvoid*)0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 32, (GLvoid*)12);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 32, (GLvoid*)24);
break;
default:
return;
break;
}


if (ibo == 0)
{
/* If the index buffer does not exist, draw it accordingly */
glDrawArrays(prim, 0, vcount);
}
else
{
/* If the index buffer does exist, draw properly */
glDrawElements(prim, icount, iformat, 0);
}


/* Loop through and disable each vertex attribute we may have used. */
for (int i = 0; i < 3; ++i)
{
glDisableVertexAttribArray(i);
}
}




The problem could also be in my Shader class, maybe? I guess it couldn't hurt to post that here too. It's messy, though..




Shader::Shader(void)
: shaderProg(0),
vertProg(0),
fragProg(0),
geomProg(0),
tescProg(0),
teseProg(0),
id(0),
ready(false)
{
shaderProg = glCreateProgram();
id = addShaderToList(this);
}


Shader::~Shader(void)
{
glDeleteShader(vertProg);
glDeleteShader(fragProg);
glDeleteShader(geomProg);
glDeleteShader(tescProg);
glDeleteShader(teseProg);


glDeleteProgram(shaderProg);


shaderList[id] = NULL;
hasEmpty = true;
}


bool Shader::load(const char* vertfile, const char* fragfile)
{
if (LoadShader(vertfile, GL_VERTEX_SHADER, &vertProg) == false)
return false;
if (LoadShader(fragfile, GL_FRAGMENT_SHADER, &fragProg) == false)
return false;


if (LinkShaders() == false)
return false;


return true;
}


bool Shader::load(const char* vertfile, const char* fragfile, const char* geomfile)
{
if (LoadShader(vertfile, GL_VERTEX_SHADER, &vertProg) == false)
return false;
if (LoadShader(fragfile, GL_FRAGMENT_SHADER, &fragProg) == false)
return false;
if (LoadShader(geomfile, GL_GEOMETRY_SHADER, &geomProg) == false)
return false;


if (LinkShaders() == false)
return false;


return true;
}


bool Shader::load(const char* vertfile, const char* fragfile, const char* tescfile, const char* tesefile)
{
if (LoadShader(vertfile, GL_VERTEX_SHADER, &vertProg) == false)
return false;
if (LoadShader(fragfile, GL_FRAGMENT_SHADER, &fragProg) == false)
return false;
if (LoadShader(tescfile, GL_TESS_CONTROL_SHADER, &tescProg) == false)
return false;
if (LoadShader(tesefile, GL_TESS_EVALUATION_SHADER, &teseProg) == false)
return false;


if (LinkShaders() == false)
return false;


return true;
}


bool Shader::load(const char* vertfile, const char* fragfile, const char* geomfile, const char* tescfile, const char* tesefile)
{
if (LoadShader(vertfile, GL_VERTEX_SHADER, &vertProg) == false)
return false;
if (LoadShader(fragfile, GL_FRAGMENT_SHADER, &fragProg) == false)
return false;
if (LoadShader(geomfile, GL_GEOMETRY_SHADER, &geomProg) == false)
return false;
if (LoadShader(tescfile, GL_TESS_CONTROL_SHADER, &tescProg) == false)
return false;
if (LoadShader(tesefile, GL_TESS_EVALUATION_SHADER, &teseProg) == false)
return false;


if (LinkShaders() == false)
return false;


return true;
}




void Shader::use(void)
{
if (ready == true)
glUseProgram(shaderProg);
}


int Shader::getUniformLocation(const char* uniform)
{
return glGetUniformLocation(shaderProg, uniform);
}


bool Shader::LoadShader(const char* filename, GLenum shaderType, unsigned int* shader)
{
/* Open the shader file. */
FILE* F = fopen(filename, "r");
if (F == NULL)
{
error("In Shader::LoadShader():\n");
errormore("Could not open \"%s\"\n", filename);
return false;
}


/* Get the file length. */
fseek(F, 0, SEEK_END);
int fileLen = ftell(F);
fseek(F, 0, SEEK_SET);


/* Allocate memory to hold the file. */
char* fileText = new char[fileLen];
memset(fileText, 0, sizeof(char) * fileLen);


/* Read the file contents. */
fread(fileText, 1, fileLen, F);


/* Load the shader. */
*shader = glCreateShader(shaderType);
glShaderSource(*shader, 1, (const GLchar**)&fileText, 0);


/* Compile the shader. */
glCompileShader(*shader);


/* Check for errors. */
int success = 0;
glGetShaderiv(*shader, GL_COMPILE_STATUS, &success);
if (success == false)
{
char infolog[1024];
glGetShaderInfoLog(*shader, sizeof(infolog), NULL, infolog);
error("Error compiling shader '%s':\n", filename);
errormore("%s\n", infolog);


delete[] fileText;
fclose(F);
return false;
}


/* Attach the shader. */
glAttachShader(shaderProg, *shader);


/* Clean up. */
delete[] fileText;
fclose(F);
return true;
}


bool Shader::LinkShaders(void)
{
/* Link the program together. */
glLinkProgram(shaderProg);


/* Check for errors. */
int success = 0;
glGetProgramiv(shaderProg, GL_LINK_STATUS, &success);
if (success == false)
{
char infolog[1024];
glGetProgramInfoLog(shaderProg, sizeof(infolog), NULL, infolog);
error("Error linking shader:\n");
errormore("%s\n", infolog);
return false;
}


/* And we're good to go! */
ready = true;
return true;
}

Huge thanks in advance to anybody that can point me in the right direction!

tonyo_au
12-20-2013, 10:52 PM
Try putting some glGetError calls in the code. It might give you a clue.

mbentrup
12-21-2013, 05:22 AM
When you request version 3.0 you will get a compatibility context (well, technichally there are no profiles before version 3.2, but the contexts <= 3.1 behave exactly like compatibilty contexts), whereas when you request version 3.2 or higher you get a core profile by default. On a core profile you have to create and bind a VAO first, otherwise nothing will be drawn.

WIld Sage
12-21-2013, 03:21 PM
I looked into VAO's and I managed to convert most of my drawing to work with them. However a new problem seems to have arisen - When I try to run the program in Gl 3.3, I get a segfault at glGenVertexArrays(). I tried googling the problem and found to set glewExperimental = GL_TRUE;, but that didn't fix it - I still get a segfault at the same place. Like before, it works in GL 3.1, except when I try to draw something that's indexed - glDrawArrays() works fine, but glDrawElements() causes a segfault.

I'm using GLEW 1.10.0 and SDL 2.0.1

EDIT: I fixed the problem with glDrawElements() - passing 'NULL' instead of '0' fixed the issue (I don't even...).

EDIT: And now, for some reason, it all works, regardless of the version of OpenGL I use. Works on 3.1, works on 3.3, works on 4.3, etc etc.