Hellooo.
I’ve started on programming this cube made out of triangles, and I decided to try compiling it half way through applying all my vertices. The program compiles fine fine, but the problem is that on execution; the color of my half- finished cube is a depressing white colour… even though I had specified each colour to be sent with every vertex :’( (that’s another thing that I need help with, and i’ll explain that further on.). Anyway could someone please take a look at my code, and help me understand why my fragment shader isn’t doing anything with the colours i’m sending to it? And also, can someone explain to me how to send colour values that are applied to each vertex? I saw the method in a tutorial but I’m doing it kind of different, and it doesn’t explain how the shaders actually read the data or anything… it just tells you what to do. Anyway, thank in advance :).
FYI, I think the problem could either be that i’ve made my shaderes wrong, or the shaderinfo structure that I got from the openGL Red Book -and use on line 68 of my main file to load my shaders- doesn’t take colour values?
Here is my main file:
#include "main.h" //g++ main.cpp -lGLEW -lGL -lglfw -lGLU -lglut -o triangle
#define BUFFER_OFFSET(offset) ((void *)(offset))
using namespace std;
enum VAO_IDs {Triangles, NumVAOs};
enum Buffer_IDs {ArrayBuffer, cBuffer, NumBuffers};
enum Attrib_IDs {vPosition = 0, colorPosition = 1};
GLuint VAOs[NumVAOs];
GLuint ColourObjects[NumVAOs];
GLuint Buffers[NumBuffers];
const GLuint NumVertices = 36;
//assigning vertex data to buffer objects and preparing to send to
void init() //Vertex Shaders.
{
glewInit();
glGenVertexArrays(NumVAOs, VAOs);
glBindVertexArray(VAOs[Triangles]);
GLfloat vertices [NumVertices] [3] =
{
{ 0.30, 0.00, -0.30 }, //Triangle 1
{ 0.30, 0.00, 0.30 },
{ -0.30, 0.00, -0.30 },
{ -0.30, 0.00, -0.30 }, //Triangle 2
{ -0.30, 0.00, 0.30 },
{ 0.30, 0.00, -0.30 },
{ -0.30, 0.00, 0.30 }, //Triangle 3
{ 0.30, 0.00, 0.30 },
{ 0.30, 0.60, 0.30 },
{ -0.30, 0.60, 0.30 }, //Triangle 4
{ 0.30, 0.60, 0.30 },
{ -0.30, 0.00, 0.30 },
};
glGenBuffers(NumBuffers, Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(vPosition, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(vPosition );
GLfloat g_color_buffer_data[NumVertices][3] =
{
{ 0.583f, 0.771f, 0.014f },
{ 0.609f, 0.115f, 0.436f },
{ 0.327f, 0.483f, 0.844f },
{ 0.822f, 0.569f, 0.201f },
{ 0.435f, 0.602f, 0.223f },
{ 0.310f, 0.747f, 0.185f },
{ 0.597f, 0.770f, 0.761f },
{ 0.559f, 0.436f, 0.730f },
{ 0.359f, 0.583f, 0.152f },
{ 0.483f, 0.596f, 0.789f },
{ 0.559f, 0.861f, 0.639f },
{ 0.195f, 0.548f, 0.859f },
};
glBindBuffer(GL_ARRAY_BUFFER, Buffers[cBuffer]);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
glVertexAttribPointer(colorPosition, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(colorPosition);
glBindVertexArray(0);
ShaderInfo shaders[] =
{
{ GL_VERTEX_SHADER, "triangle.vert"},
{ GL_FRAGMENT_SHADER, "triangle.frag"},
{ GL_NONE, NULL}
};
}
void display()
{
glClearColor(1,0.5f,0,0);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs[Triangles]); //?
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glFlush();
}
int main(int argc, char** argv)
{
const int width = 400, height = 400;
glutInit(&argc, argv);
glutInitWindowSize(width, height);
glutInitDisplayMode(GLUT_RGBA);
glutInitContextVersion(3, 3);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
init();
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
Here is my vertex shader:
#version 330 core
layout(location = 0) in vec4 vPostion;
layout(location = 1) in vec3 vertexColor;
out vec3 fragmentColor;
void main()
{
gl_Position = vPosition;
fragmentColor = vertexColor;
}
Here is my fragment shader:
#version 330 core
in vec3 fragmentColor;
out vec3 color;
void main()
{
color = fragmentColor;
}
Here is the load shaders header file that I got from the openGL red book website (I don’t actually know how it works and could pontentially be the problem) :
//////////////////////////////////////////////////////////////////////////////
//
// --- LoadShaders.h ---
//
//////////////////////////////////////////////////////////////////////////////
#ifndef __LOAD_SHADERS_H__
#define __LOAD_SHADERS_H__
#include <GL/gl.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
//----------------------------------------------------------------------------
//
// LoadShaders() takes an array of ShaderFile structures, each of which
// contains the type of the shader, and a pointer a C-style character
// string (i.e., a NULL-terminated array of characters) containing the
// entire shader source.
//
// The array of structures is terminated by a final Shader with the
// "type" field set to GL_NONE.
//
// LoadShaders() returns the shader program value (as returned by
// glCreateProgram()) on success, or zero on failure.
//
typedef struct {
GLenum type;
const char* filename;
GLuint shader;
} ShaderInfo;
//-----------------------------------GLuint LoadShaders( ShaderInfo* );
#ifdef __cplusplus
};
#endif // __cplusplus
#endif // __LOAD_SHADERS_H__
here is the result on execution:
[ATTACH=CONFIG]141[/ATTACH]