Hi guys,
You helped me get things on the screen with shaders and such but now I am trying to actually get the coordinates correct. Given the below vertices that I am passing in:
[b]0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.4013e-45, 2.8026e-45, 0, 0, 4.22246e-38, 0.0025, 0, 0, 0, 0, -0.00333333, 0, 0, 0, 0, -1, 0, -1, 1, -0, 1, 1, 0, -0, 0, -0, 1, -0, 0, 0, 0, 1, 0, -0, -0, -1[/b]
I get the following on screen:
[ATTACH=CONFIG]365[/ATTACH]
Here is the code for my matrix:
Window w/h is 800x600
int w, h; SDL_GetWindowSize(mWindow, &w, &h);
projection = glm::ortho(0.0f, static_cast<float>(w), static_cast<float>(h), 0.0f, -1.0f, 1.0f);
view = glm::lookAt(
glm::vec3(0,0,1), // Camera is at (0,0,5), in World Space
glm::vec3(0,0,0), // and looks at the origin
glm::vec3(0,1,0) // Head is up (set to 0,-1,0 to look upside-down)
);
model = glm::mat4(1.0f);
ModelViewMatrix = projection * view * model;
Vertex Data Fill Function:
/** * Used internally to fill the vertex array with quad vertex information.
*/
void OGL_Core_Renderer::fillVertexArray(GLfloat x, GLfloat y, GLfloat w, GLfloat h)
{
cout << "Vertices Array:
";
for (int i = 0; i < sizeof(mVertexArrayData); i++) {
cout << mVertexArrayData[i] << ", ";
}
cout << endl;
mVertexArrayData[0] = static_cast<GLfloat>(x); mVertexArrayData[1] = static_cast<GLfloat>(y + h); mVertexArrayData[2] = 0;
mVertexArrayData[3] = static_cast<GLfloat>(x); mVertexArrayData[4] = static_cast<GLfloat>(y); mVertexArrayData[5] = 0;
mVertexArrayData[6] = static_cast<GLfloat>(x + w); mVertexArrayData[7] = static_cast<GLfloat>(y); mVertexArrayData[8] = 0;
mVertexArrayData[9] = static_cast<GLfloat>(x + w); mVertexArrayData[10] = static_cast<GLfloat>(y + h); mVertexArrayData[11] = 0;
mVertexArrayData[12] = static_cast<GLfloat>(x); mVertexArrayData[13] = static_cast<GLfloat>(y + h); mVertexArrayData[14] = 0;
glBufferData(GL_ARRAY_BUFFER, 3 * 5 * sizeof(mVertexArrayData), mVertexArrayData, GL_DYNAMIC_DRAW);
getError();
}
and draw:
void OGL_Core_Renderer::drawVertexArray(GLuint textureId, bool defaultTextureCoords){
clearScreen(0, 0, 0);
glUseProgram(mShaderManager->getShaderProgram());
getError();
mModelViewMatrix = glGetUniformLocation(mShaderManager->getShaderProgram(), "mvp");
getError();
glUniformMatrix4fv(mModelViewMatrix, 1, GL_FALSE, glm::value_ptr(ModelViewMatrix));
getError();
glBindVertexArray(mVertexArray);
getError();
glDrawArrays(GL_TRIANGLE_STRIP, 0, 5);
getError();
glBindVertexArray(0);
getError();
glUseProgram(0);
getError();
}
My shaders - Vert:
#version 150
in vec4 position;
uniform mat4 mvp;
void main()
{
gl_Position = mvp * position;
}
and Frag:
#version 150
out vec3 fragColor;
void main()
{
fragColor = vec3(0,0,1);
}
I have the frag hard coded for testing before I implement texturing. Any thoughts on where I have gone wrong? It seems to be trying to normalize the coordinates.