Use of VBO and shaders causes a black screen

Hi
I’m trying to implement VBO’s to my Application (that is using shaders).
But when I use the code below, I just get an black screen.
I guess I just mixed up some digits, but i didn’t find my mistake. Note that I do not want to use things above the OpenGL 2.1 specification!
First of all I want to explain how this works (or should work)
I’m using an 3D Array in which every element represents a cube and therefore contains an integer value that represent a specific texture. If that value is 0, then this cube represents air and mustn’t be rendered. (Also I want to render one extra cube.)
Based on this i’m setting up one VBO (which contains a position (xyz),some data (a value for the texture which is selected in the shader and the normal (its full components are calculated in the shader)) and the color (rgb) for each Vertex in each cube (24 Vertices/cube)).
So Therefore:

void Render::updateScene()
{
	//Getting the number of cubes to be rendered plus one extra cube
	renderSize = Blockmanager::getChunkSize(0)+1;
	int b = 0; //The Bufferoffset
	//Allocating mem for the vertices
	pvertex = (VERTEX*)malloc(sizeof(VERTEX)*24*(renderSize));
	//Allocating mem for the indices
	pindex = (GLshort*)malloc(sizeof(GLshort)*36*(renderSize));
	//pvertex is a pointer to a struct (VERTEX{float[3],float[3],int[2]})
	//pindex is a pointer to a GLshort

  //now I have to calculate the array which I will pass to glBufferData();
	//writing one extra cube to the array
	if(!texX==0) renderBlock(texX-1, offSetX, offSetY, offSetZ, 0);
	b++;
	//wrting all other cubes to the array
	for(int i = 0; i < 1; i++){
	 for(int x = 0; x < CHUNKSIZE; x++){
	  for(int y = 0; y < CHUNKHEIGHT; y++){
	   for(int z = 0; z < CHUNKSIZE; z++){
		if(!((Blockmanager::chunk[x][y][z][i].BlockId == 0) ||
		(x-CHUNKSIZE/2==offSetX && y-CHUNKHEIGHT/2==offSetY
				 && z-CHUNKSIZE/2==offSetZ)))
		//checking if the cube represents air or
		//if the extra cube has the same coordinates as this cube
	 //(thought has been given to this in Blockmanager::getChunkSize(0);)
		{
			//writing the cube with offset b and the coordinates
			//through which the cube is defined in the array
			renderBlock(Blockmanager::chunk[x][y][z][i].BlockId-1,
			  x-CHUNKSIZE/2, y-CHUNKHEIGHT/2, z-CHUNKSIZE/2, b);
			b++;}}}}
} [/b]
[/b]

still void Render::updateScene():


	//Now the VBO/IBO/... can be created
	//Create the IBO
	//16 bit indices
	glGenBuffers(1, &IBO);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IBO);
   glBufferData(GL_ELEMENT_ARRAY_BUFFER, (renderSize)*36*sizeof(GLushort),
			  pindex, GL_STATIC_DRAW);
		//36 indices per cube, number of cubes: rendersize,
		//each index has the size of sizeof(GLushort)
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

	//Create VBO
	glGenBuffers(1, &VBO);
	glBindBuffer(GL_ARRAY_BUFFER, VBO);
	glBufferData(GL_ARRAY_BUFFER, sizeof(VERTEX)*24*(renderSize),
			  pvertex, GL_STATIC_DRAW);
		//24 vertices per cube, number of cubes: rendersize,
		//each Vertex has the size of sizeof(VERTEX)

	//Just testing
	glBindBuffer(GL_ARRAY_BUFFER, 0);

	//VAO
	glGenVertexArrays(1, &VAO);
	glBindVertexArray(VAO);

	//Bind the VBO and setup pointers for the VAO
	glBindBuffer(GL_ARRAY_BUFFER, VBO);
	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(VERTEX),
		   BUFFER_OFFSET(0)); // 3 floats in "position"
	glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(VERTEX),
		   BUFFER_OFFSET(sizeof(float)*3)); //3 floats in "color"
	glVertexAttribPointer(2, 2, GL_INT, GL_FALSE, sizeof(VERTEX),
		 BUFFER_OFFSET(sizeof(float)*6)); //2 integer values in "data"
	glEnableVertexAttribArray(0);
	glEnableVertexAttribArray(1);
	glEnableVertexAttribArray(2);

	//Bind the IBO for the VAO
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IBO);

	//Just testing
	glBindVertexArray(0);
	glDisableVertexAttribArray(0);
	glDisableVertexAttribArray(1);
	glDisableVertexAttribArray(2);

	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
} [/b]
[/b]

void Render::renderBlock(int texture, float x, float y, float z, int bufferOffSet)
// writes the vertices and indices of one cube (x, y, z, texture) to pvertex and pindex with an offset of bufferOffSet
{

Click to reveal.. ]
```

const float blockSize = 0.5f;
static GLfloat vert[24][3] = { //Vertex Coordinates of a cube
( blockSize+x, -blockSize+y, -blockSize+z), (-blockSize+x, -blockSize+y, -blockSize+z),
(-blockSize+x, blockSize+y, -blockSize+z), ( blockSize+x, blockSize+y, -blockSize+z),
(-blockSize+x, -blockSize+y, blockSize+z), ( blockSize+x, -blockSize+y, blockSize+z),
( blockSize+x, blockSize+y, blockSize+z), (-blockSize+x, blockSize+y, blockSize+z),
(-blockSize+x, -blockSize+y, -blockSize+z), (-blockSize+x, -blockSize+y, blockSize+z),
(-blockSize+x, blockSize+y, blockSize+z), (-blockSize+x, blockSize+y, -blockSize+z),
( blockSize+x, -blockSize+y, blockSize+z), ( blockSize+x, -blockSize+y, -blockSize+z),
( blockSize+x, blockSize+y, -blockSize+z), ( blockSize+x, blockSize+y, blockSize+z),
( blockSize+x, blockSize+y, blockSize+z), ( blockSize+x, blockSize+y, -blockSize+z),
(-blockSize+x, blockSize+y, -blockSize+z), (-blockSize+x, blockSize+y, blockSize+z),
(-blockSize+x, -blockSize+y, blockSize+z), (-blockSize+x, -blockSize+y, -blockSize+z),
( blockSize+x, -blockSize+y, -blockSize+z), ( blockSize+x, -blockSize+y, blockSize+z)};
GLfloat color0 = (texture == 0)? 0.55f:1.0f; // Color calculation
GLfloat color1 = (texture == 0)? 0.78f:1.0f;
GLfloat color2 = (texture == 0)? 0.24f:1.0f;
static GLfloat col[3] = { // colors of a cube
{color0, color1, color2}, {color0, color1, color2}};
int f = 0;
for(int n = 0; n < 24; n++) //writing the vertex-information for a cube (24)
{
for(int m = 0; m < 3; m++)
{
pvertex[bufferOffSet24+n].col[m] = col[n]; //writing the rgb color
pvertex[bufferOffSet
24+n].pos[m] = vert[n][m]; //writing the xyz position
}
//tell the shader which side is rendered (for normal calculation)
pvertex[bufferOffSet24+n].data[0] = (n-f)/4;
//tell the shader which texture should be used
pvertex[bufferOffSet
24+n].data[1] = texture;
f++;
if(f>3)f=0;
}
for(int n = 0; n < 6; n++) //writing the indices for a cube (36)
{
pindex[bufferOffSet36+n6+0] = 0;
pindex[bufferOffSet36+n6+1] = 1;
pindex[bufferOffSet36+n6+2] = 2;
pindex[bufferOffSet36+n6+3] = 2;
pindex[bufferOffSet36+n6+4] = 1;
pindex[bufferOffSet36+n6+5] = 3;
}
[/b]
[/b]

[/QUOTE]</div>
[b]}[/b]
And here's how the scene is rendered

void Render::mainRender()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT
| GL_STENCIL_BUFFER_BIT);
glBindVertexArray(VAO);
glBindTexture(GL_TEXTURE_2D, TextureManager::terrain[1].texID);
//I’m still in the testing stage, so here just one texture is bound,
//later on I will bind a Layered Texture
//which contains all Textures for the cubes
//and the shader will select the right layer.
//Draw command
//The first to last vertex is 0 to 24*(renderSize)-1
//sizeof(GLshort)36(renderSize) indices will be used.
//The last parameter is the start address in the IBO => zero
glDrawRangeElements(GL_QUADS, 0, 24*(renderSize)-1,
sizeof(GLshort)36(renderSize), GL_UNSIGNED_SHORT, NULL);
usleep(1);
} [/b]
[/b]


And for the sake of completeness, here is how I set up my shader and the shader code (I think the rest of my code is unrelevant for my problem):

void setShaders() {

//Compiling shaders... -> program p

glBindAttribLocation(p, 0, "inPosition");
glBindAttribLocation(p, 1, "inColor");
glBindAttribLocation(p, 2, "data");

//Linking program p...

} [/b]
[/b]


vertexshader:

attribute ivec2 data;
attribute vec3 inPosition;
attribute vec3 inColor;
uniform mat4 proj_matrix;
uniform mat4 model_matrix;

void main()
{
gl_Position = proj_matrix * model_matrix * vec4(inPosition, 1.0);
gl_Color = vec4(1.0,1.0,1.0,1.0);//vec4(inColor, 1.0);
} [/b]
[/b]


fragmentshader:

void main()
{
gl_FragColor = gl_Color;
} [/b]
[/b]


Actually there's much more code in the shaders. But to identify my mistake I left out all of the code that isn't needed.
The most of the part of setting up the VBO/IBO/... is copied from [that tutorial](http://www.opengl.org/wiki/Tutorial1:_Rendering_shapes_with_glDrawRangeElements,_VAO,_VBO,_shaders_%28C%2B%2B_/_freeGLUT%29).
I used the php code tags to make it more clear, but actually i'm using C++ and I'm sorry for the bad code formatation, but the boxes are too small and with spoilers it would be even worse.
Thanks in advance!

oh, there’s a mistake!
where it says:

glDrawRangeElements(GL_QUADS, 0, 24*(renderSize)-1,
	sizeof(GLshort)*36*(renderSize), GL_UNSIGNED_SHORT, NULL); [/b]
[/b]

it should be:

glDrawRangeElements(GL_QUADS, 0, 24*(renderSize)-1,
	36*(renderSize), GL_UNSIGNED_SHORT, NULL);

But, unfortunately, that doesn’t solve my problem.

Hi,
I dont know if this helps but just skimming through the code i notice that this line


glVertexAttribPointer(2, 2, GL_INT, GL_FALSE, sizeof(VERTEX), BUFFER_OFFSET(sizeof(float)*6));

shouldn’t it be this


glVertexAttribPointer(2, 2, GL_INT, GL_FALSE, 2*sizeof(GL_INT), BUFFER_OFFSET(sizeof(float)*6));

glVertexAttribPointer(2, 2, GL_INT, GL_FALSE, 2*sizeof(GL_INT), BUFFER_OFFSET(sizeof(float)*6));

I tried to do that, but it doesn’t change anything.
i got the code from here where it says:

glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VC), BUFFER_OFFSET(0));
glVertexAttribPointer(3, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(TVertex_VC), BUFFER_OFFSET(sizeof(float)*3));
[...]
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNT), BUFFER_OFFSET(0));
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNT), BUFFER_OFFSET(sizeof(float)*3));
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNT), BUFFER_OFFSET(sizeof(float)*6));

^^here the full size of the vertex-struct is used, too. Is this wrong then?
regards freecraft

going by what you said originally,

Based on this i’m setting up one VBO (which contains a position (xyz),some data (a value for the texture which is selected in the shader and the normal (its full components are calculated in the shader)) and the color (rgb) for each Vertex in each cube (24 Vertices/cube)).

then a ‘VERTEX’ should be a strut containing
struct VERTEX {
glfloat[3] position
glint[2] data
glfloat[3] normal
glfloat[3] color}
I’m not sure about the order, but that does need to be well defined and consistent in your VBO setup code. Let’s assume this order for this post. I’ll also assume you send 3 elements for the normal - which is contradicting your blurb about reconstructing the full normal, and the fact your VBO pointers do not include the normal.

So, I make the ‘VERTEX’ structure to be:
3 * sizeof (glfloat) 12 bytes
2 * sizeof (glInt) 8 bytes
3 * sizeof (glfloat) 12 bytes
3 * sizeof (glfloat) 12 bytes

So, your first job is to check that STRUCT and determine its size in bytes (44 bytes?).

glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(VERTEX),
BUFFER_OFFSET(0)); // 3 floats in “position”
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(VERTEX),
BUFFER_OFFSET(sizeof(float)*3)); //3 floats in “color”
glVertexAttribPointer(2, 2, GL_INT, GL_FALSE, sizeof(VERTEX),
BUFFER_OFFSET(sizeof(float)*6)); //2 integer values in “data”

This seems to be along the right lines. The 5th argumane is the stride (in bytes) between each draw call. We know this to be 44 bytes or ‘VERTEX’. However, the order presented in not the order you first mentioned the VERTEX struct to be: you have poistion,color,data when I have shown position,data,normal,color. Ignoring normal for now, that’s still swapping data and color.

On to your vertex shader.
You have not declared a version number (or not telling us anyway).

attribute ivec2 data;
attribute vec3 inPosition;
attribute vec3 inColor;
uniform mat4 proj_matrix;
uniform mat4 model_matrix;

void main()
{
gl_Position = proj_matrix * model_matrix * vec4(inPosition, 1.0);
gl_Color = vec4(1.0,1.0,1.0,1.0);//vec4(inColor, 1.0);
}

…because you have used gl_Color = vec4(1.0,1.0,1.0,1.0) then the shader will have removed the unused attributes data and color.
Are you saying that the cubes are not rendered properly? Garbage on the screen? What do you see?

Also, I’m not 100% sure, but I think you need #version 130 to use integer vertex attributes in shaders.

Finally, onto the bit where I see the most dificulty. I assume the above has not made any difference - so here goes…

//now I have to calculate the array which I will pass to glBufferData();
//writing one extra cube to the array
if(!texX==0) renderBlock(texX-1, offSetX, offSetY, offSetZ, 0);
b++;
//wrting all other cubes to the array
for(int i = 0; i < 1; i++){
for(int x = 0; x < CHUNKSIZE; x++){
for(int y = 0; y < CHUNKHEIGHT; y++){
for(int z = 0; z < CHUNKSIZE; z++){
if(!((Blockmanager::chunk[y][z][i].BlockId == 0) ||
(x-CHUNKSIZE/2==offSetX && y-CHUNKHEIGHT/2==offSetY
&& z-CHUNKSIZE/2==offSetZ)))
//checking if the cube represents air or
//if the extra cube has the same coordinates as this cube
//(thought has been given to this in Blockmanager::getChunkSize(0):wink:
{
//writing the cube with offset b and the coordinates
//through which the cube is defined in the array
renderBlock(Blockmanager::chunk[y][z][i].BlockId-1,
x-CHUNKSIZE/2, y-CHUNKHEIGHT/2, z-CHUNKSIZE/2, b);
b++;}}}}
}

I don’t know about you, but I can’t follow or debug that many nested levels.
Why not restructure the code and make it much more simple. You know a cube is 24 sets of ‘VERTEX’. So why not just create a linear (1-dimensional) array of ‘VERTEX’ and populate with 24 entries for 1 cube. Repeat for n cubes.
Much simpler and I guarantee you, it will work!

NOTE: This is post is just to complete the topic and to tell what was wrong (for someone who has the same error)! I’m not expecting an answer!

  1. my struct was this:
struct VERTEX {
glfloat[3] position
glfloat[3] color
glint[2] data}

^I guess this wasn’t wrong.
2)

So, your first job is to check that STRUCT and determine its size in bytes (44 bytes?).

I thought that you have to make the struct multiple with 32 Bytes (because of compatibility with ATI cards)?
3)

You have not declared a version number (or not telling us anyway).

I don’t want to use anything above GLSL 1.20 (because of compatibility)
I guess this means:

#version 120

?
4)

…because you have used gl_Color = vec4(1.0,1.0,1.0,1.0) then the shader will have removed the unused attributes data and color.
Are you saying that the cubes are not rendered properly? Garbage on the screen? What do you see?

I just saw a black screen and because I didn’t know where the mistake is I wanted to draw triangles, the color doesn’t matter for debugging! I just wanted to be sure that none of the Vertices are rendered (or the Vertex data is broken).
5)

Also, I’m not 100% sure, but I think you need #version 130 to use integer vertex attributes in shaders.
That reminded me to look at my shaders error log (I had forgotten to look at it)
it said that I can’t use ivec’s, so I replaced them with vec’s. Also I found two other problems:

glDrawRangeElements(GL_QUADS, [...]);

On the screen I’m rendering QUADS, but actually I’m rendering Triangles (see my IBO), so GL_Triangles is the right flag.
In my initialisation code I set up wrong indices, it should be:

pindex[bufferOffSet*36+n*6+0] = bufferOffSet*24+1+n*4;
pindex[bufferOffSet*36+n*6+1] = bufferOffSet*24+0+n*4;
pindex[bufferOffSet*36+n*6+2] = bufferOffSet*24+2+n*4;
pindex[bufferOffSet*36+n*6+3] = bufferOffSet*24+2+n*4;
pindex[bufferOffSet*36+n*6+4] = bufferOffSet*24+0+n*4;
pindex[bufferOffSet*36+n*6+5] = bufferOffSet*24+3+n*4;

So the problem is solved for me.
Thank you for your posts