Ortho and Shader Vertices

Hi guys,

You helped me get things on the screen with shaders and such but now I am trying to actually get the coordinates correct. Given the below vertices that I am passing in:

[b]0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    1.4013e-45,    2.8026e-45,    0,    0,    4.22246e-38,    0.0025,    0,    0,    0,    0,    -0.00333333,    0,    0,    0,    0,    -1,    0,    -1,    1,    -0,    1,    1,    0,    -0,    0,    -0,    1,    -0,    0,    0,    0,    1,    0,    -0,    -0,    -1[/b]

I get the following on screen:
[ATTACH=CONFIG]365[/ATTACH]

Here is the code for my matrix:

Window w/h is 800x600

int w, h;	SDL_GetWindowSize(mWindow, &w, &h);
	projection = glm::ortho(0.0f, static_cast<float>(w), static_cast<float>(h), 0.0f, -1.0f, 1.0f);
	view = glm::lookAt(
				glm::vec3(0,0,1), // Camera is at (0,0,5), in World Space
				glm::vec3(0,0,0), // and looks at the origin
				glm::vec3(0,1,0)  // Head is up (set to 0,-1,0 to look upside-down)
	);
	model = glm::mat4(1.0f);
	ModelViewMatrix = projection * view * model;

Vertex Data Fill Function:

/** * Used internally to fill the vertex array with quad vertex information.
 */
void OGL_Core_Renderer::fillVertexArray(GLfloat x, GLfloat y, GLfloat w, GLfloat h)
{
	cout << "Vertices Array:
"; 
	for (int i = 0; i < sizeof(mVertexArrayData); i++) {
		cout << mVertexArrayData[i] << ",	";
	}
	cout << endl;
	mVertexArrayData[0] = static_cast<GLfloat>(x); mVertexArrayData[1] = static_cast<GLfloat>(y + h); mVertexArrayData[2] = 0;
	mVertexArrayData[3] = static_cast<GLfloat>(x); mVertexArrayData[4] = static_cast<GLfloat>(y); mVertexArrayData[5] = 0;
	mVertexArrayData[6] = static_cast<GLfloat>(x + w); mVertexArrayData[7] = static_cast<GLfloat>(y); mVertexArrayData[8] = 0;
	mVertexArrayData[9] = static_cast<GLfloat>(x + w);	mVertexArrayData[10] = static_cast<GLfloat>(y + h); mVertexArrayData[11] = 0;
	mVertexArrayData[12] = static_cast<GLfloat>(x);	mVertexArrayData[13] = static_cast<GLfloat>(y + h); mVertexArrayData[14] = 0;


	glBufferData(GL_ARRAY_BUFFER, 3 * 5 * sizeof(mVertexArrayData), mVertexArrayData, GL_DYNAMIC_DRAW);
	getError();
}

and draw:

void OGL_Core_Renderer::drawVertexArray(GLuint textureId, bool defaultTextureCoords){
	clearScreen(0, 0, 0);


	glUseProgram(mShaderManager->getShaderProgram());
	getError();
	
	mModelViewMatrix = glGetUniformLocation(mShaderManager->getShaderProgram(), "mvp");
	getError();
	
	glUniformMatrix4fv(mModelViewMatrix, 1, GL_FALSE, glm::value_ptr(ModelViewMatrix));
	getError();
	
	glBindVertexArray(mVertexArray);
	getError();


	glDrawArrays(GL_TRIANGLE_STRIP, 0, 5);
	getError();


	glBindVertexArray(0);
	getError();
	glUseProgram(0);
	getError();
}

My shaders - Vert:

#version 150

in vec4 position;


uniform mat4 mvp;


void main()
{
    gl_Position = mvp * position;
}

and Frag:

#version 150

out vec3 fragColor;


void main()
{
    fragColor = vec3(0,0,1);
}

I have the frag hard coded for testing before I implement texturing. Any thoughts on where I have gone wrong? It seems to be trying to normalize the coordinates.

Is the issue having to do that I don’t pad the vertex shader position?

We need your vertex array creation code

Ahh I apologize for not including that. Here it is:

    glGenVertexArrays(1, &mVertexArray);    getError();
    glBindVertexArray(mVertexArray);
    getError();


    glGenBuffers(1, &mVertexBufferObject);
    getError();
    glBindBuffer(GL_ARRAY_BUFFER, mVertexBufferObject);
    getError();
    glBufferData(GL_ARRAY_BUFFER, 4 * 6 * sizeof(GLfloat), DEFAULT_VERTEX_COORDS, GL_DYNAMIC_DRAW);
    getError();


    glGenBuffers(1, &mTextureBufferObject);
    getError();
    glBindBuffer(GL_ARRAY_BUFFER, mTextureBufferObject);
    getError();
    glBufferData(GL_ARRAY_BUFFER, 2 * sizeof(DEFAULT_TEXTURE_COORDS), DEFAULT_TEXTURE_COORDS, GL_STATIC_DRAW);
    getError();
    glEnableVertexAttribArray(0);
    getError();


    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
    getError();

This is during initialization. But the data is supposed to get changed when I go to render something on screen like an image for example. I pass in the coordinates of 0,0 to the fillArray function plus an image width and height. For this example I am passing 0,0 and a width/height of 800x600 the same size as the viewport.

The texture part isn’t used yet, that is next.

DefaultVertexCoords is this:

GLfloat DEFAULT_VERTEX_COORDS[8] =    { 1.0f, 1.0f,  1.0f, 32.0f,  32.0f, 32.0f,  32.0f, 0.0f };
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);

You seem to be defining you vertex position at vec2 but in the shader it is vec4 so it will be padded with 0,1 - is this what you want?

No, ideally I want to use just two coordinates since I only do 2D rendering but was told I needed to pad it…

EDIT: If I remove the MVP multiplication in the shader I get a blue box in the upper right hand corner as if the screen was divided into 4 squares…

I think the issue is my Ortho projection is incorrect as the origin should set 0,0 to be the top left corner and I believe without multiplying the projection currently places 0,0 in the middle of the screen.

If all I am trying to do is render 2D objects, wouldn’t all I need is:


int w, h;
SDL_GetWindowSize(mWindow, &w, &h);
projection = glm::ortho(0.0f, static_cast<float>(w), static_cast<float>(h), 0.0f);

right? I would just need to translate so (0,0) is in the upper left.