hi there. im using ubuntu 14.04, GLEW and freeglut. i have the latest nvidia drivers. i'm wondering why this code i wrote isn't working... its in c
Code :
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
 
#include <GL/glew.h>
#include <GL/freeglut.h>
 
GLuint vao, buff;
 
void init();
void display();
 
int main (int argc, char **argv)
{
	glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
    glutInitWindowSize(500, 500);
    glutInitContextVersion(4,3);
    glutInitContextProfile(GLUT_CORE_PROFILE);
 
    glutCreateWindow("Test Programs");
 
	if (glewInit() != GLEW_OK)
	{
		printf("glew could not load");
		exit(1);
	}
 
	init();
 
	glutDisplayFunc(display);
 
	glutMainLoop();
}
 
void init()
{
	glGenVertexArrays(1, &vao);
	glBindVertexArray(vao);
 
 
	GLfloat vertices[][2] = {
		{-0.90, -0.90},
		{0.85, -0.90},
		{-0.90, 0.85},
		{0.90, -0.85},
		{0.90, 0.90},
		{-0.85, 0.90}
	};
 
	GLubyte colors[][4] = {
		{255, 0, 0, 255},
		{0, 255, 0, 255},
		{0, 0, 255, 255},
		{10, 10, 10, 255},
		{100, 100, 100, 255},
		{255, 255, 255, 255}
	};
 
	glGenBuffers(1, &buff);
	glBindBuffer(GL_ARRAY_BUFFER, buff);
	glBufferData(GL_ARRAY_BUFFER, sizeof(vertices) + sizeof(colors), NULL, GL_STATIC_DRAW);
	glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(vertices), vertices);
	glBufferSubData(GL_ARRAY_BUFFER, sizeof(vertices), sizeof(colors), colors);
 
	const char *vshader =
 
		"#version 430 core\n"
		"layout (location = 0) in vec2 vPosition;\n"
		"layout (location = 1) in vec4 vColor;\n"
		"out vec4 color;\n"
	    "void main()\n"
		"{\n"
		"	color = vColor;\n"
		"	gl_Position = vPosition;\n"
		"}\n";
 
	const char *fshader = 
		"#version 430 core\n"
		"in vec4 color;\n"
		"out vec4 fColor;\n"
		"void main()\n"
		"{\n"
		"	fColor = color;\n"
		"}\n";
 
	GLuint prog;
	prog = glCreateProgram();
 
	GLuint vert = glCreateShader(GL_VERTEX_SHADER);
	GLuint frag = glCreateShader(GL_FRAGMENT_SHADER);
 
	GLint vlen = (GLint)strlen(vshader);
	GLint flen = (GLint)strlen(fshader);
 
	glShaderSource(vert, 1, &vshader, &vlen);
	glShaderSource(frag, 1, &fshader, &flen);
	glCompileShader(vert);
	glCompileShader(frag);
 
 
	glAttachShader(prog, vert);
	glAttachShader(prog, frag);
	glLinkProgram(prog);
	glUseProgram(prog);
 
	glVertexAttribPointer(1, 4, GL_UNSIGNED_BYTE, GL_TRUE, 0, (void *)sizeof(vertices));
	glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (void *)0);
 
	glEnableVertexAttribArray(1);
	glEnableVertexAttribArray(0);
}
 
void display()
{
	glClear(GL_COLOR_BUFFER_BIT);
	glBindVertexArray(vao);
	glDrawArrays(GL_TRIANGLES, 0 , 6);
	glutSwapBuffers();
}
};
its my first time trying to enable multiple vertex attributes at the same time so i can get the gouraud shading thing to work.
it compiles but i get the "Segmentation fault (core dumped)" message when i run it.
is it something wrong with my shaders?
array buffer?
vertex attributes?

thanks