Part of the Khronos Group
OpenGL.org

The Industry's Foundation for High Performance Graphics

from games to virtual reality, mobile phones to supercomputers

Results 1 to 5 of 5

Thread: Help with drawing a triangle in 3.2 GLSL 1.4

  1. #1
    Junior Member Newbie
    Join Date
    Apr 2010
    Posts
    16

    Help with drawing a triangle in 3.2 GLSL 1.4

    Hello,

    I'm learning about OpenGL shaders, and today I decided to make a simple program that draws a triangle. I've been struggling all day and can't make it work. The program compiles and runs successfully, but the triangle is nowhere to be seen. Can you spot the problem?

    P.S I'm using SDL 1.3, to manage contexts,windows,etc...

    Code :
    #define GL_GLEXT_PROTOTYPES
    #define GL3_PROTOTYPES 	1
     
    #include <GL3/gl3.h>
    #include "SDL.h"
    #include <iostream>
    #include <cmath>
     
    GLint vertexIndex;
    GLuint cacaProgram;//shader program
     
    int InitOpenGL(int,int); 
    void Draw();
    void Run();
    void setupGeometry();
    bool  setupShaders();
    int printShaderInfoLog(GLuint);
     
    bool quit=0;
     
    int width, height;
     
     
    int main(int argc, char **argv)
    {
      width=800;
      height=600; 
     
      if (!SDL_Init(SDL_INIT_VIDEO))
      {
      	std::cout<<"Some SDL Error: "<<SDL_GetError()<<std::endl;
      }
     
      SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
      SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,2);
     
      SDL_SetVideoMode(width, height,0,SDL_OPENGL);
      if  (!InitOpenGL(width,height))
      {
      	std::cout<<"openGL couldn't be setup"<<std::endl;
      }
      Run();  
     
      return 1; 
    }
     
    int InitOpenGL(int w, int h)
    {
      if (!setupShaders())
      {
      	std::cout<<"shaders were not set correctly"<<std::endl;
    	return 0;
      }
     
      setupGeometry();
     
      glViewport(0,0,w,h); 
      glClearColor(1.0f,1.0f,1.0f,1.0f);   
      return 1;
    }
     
    void Run()
    {
      SDL_Event event;
      while(!quit)
      {
        	Draw();
        	while(SDL_PollEvent(&amp;event))
    	{
    		switch(event.type)
    	 	{
          			case SDL_KEYDOWN:		
    				switch(event.key.keysym.sym)
    				{			
    					case SDLK_ESCAPE: 
    					quit=1;
    					break;
    					default:
    					break;
    				}	
    				break;
           			case SDL_QUIT:
    			       quit=1;
    		        break;
    	 	}
       	}
     
      }
      SDL_Quit();
    }
     
    bool setupShaders()
    {
    	GLuint cacaVS, cacaFS;
    	GLint VSCompiled, FSCompiled, linked;
     
    	const char* VSCode = "#version 140\n\
    	in vec3 MCVertex;\
    	void main()\
    	{\
    		gl_Position = vec4(MCVertex,1.0);\
    	}\
    	";
     
    	const char* FSCode = "\
    	#version 140 \n\
    	out vec4 FragColor;\
    	void main()\
    	{\
    		FragColor = vec4(0.0,0.0,1.0,1.0);\
    	}\
    	";
    	cacaVS = glCreateShader(GL_VERTEX_SHADER);
    	cacaFS = glCreateShader(GL_FRAGMENT_SHADER);
    	glShaderSource(cacaVS, 1, &amp;VSCode, NULL);
    	glShaderSource(cacaFS, 1, &amp;FSCode,NULL);	
    	glCompileShader(cacaVS);
    	glGetShaderiv(cacaVS,GL_COMPILE_STATUS, &amp;VSCompiled);
    	printShaderInfoLog(cacaVS);
    	glCompileShader(cacaFS);
    	glGetShaderiv(cacaFS,GL_COMPILE_STATUS, &amp;FSCompiled);
    	printShaderInfoLog(cacaFS);
    	if (!VSCompiled || !FSCompiled)
    	{
    		std::cout<<"compile failed"<<std::endl;
    		return false;
    	}
     
    	cacaProgram = glCreateProgram();
    	glAttachShader(cacaProgram, cacaVS);
    	glAttachShader(cacaProgram, cacaFS);
     
    	//is this needed at all?
    	//how do I know the index is 0?
    	glBindFragDataLocation(cacaProgram,0,"FragColor");
     
     
    	glLinkProgram(cacaProgram);
    	glGetProgramiv(cacaProgram,GL_LINK_STATUS,&amp;linked);
     
    	if (!linked)
    	{
    		std::cout<<"link failed"<<std::endl;
    		return false;
    	}
     
    	vertexIndex = glGetAttribLocation(cacaProgram, "MCVertex");
    	glUseProgram(cacaProgram);
    	return true;
    }
    void setupGeometry()
    {
       GLfloat vertices[] = {  0.0f,  0.5f, 0.0f,  
       			   0.5f, -0.5f, 0.0f, 
                               -0.5f, -0.5f, 0.0f
    			   };
    	glVertexAttribPointer(vertexIndex,3,GL_FLOAT,GL_FALSE,0,vertices);
    	glEnableVertexAttribArray(vertexIndex);
    }
     
    void Draw()
    {
    	 glClear(GL_COLOR_BUFFER_BIT);
    	 glDrawArrays(GL_TRIANGLES,0,3);  
    	 SDL_GL_SwapBuffers();
    }
    GLX version: 1.4
    OpenGL: 3.2.0 NVIDIA 195.36.15
    OpenGL shading language: 1.50 NVIDIA
    Kubuntu Karmic

  2. #2
    Junior Member Newbie
    Join Date
    Apr 2010
    Posts
    16

    Re: Help with drawing a triangle in 3.2 GLSL 1.4

    ok, I guess this was too much code... I have no idea what the problem is but I'm wondering how does the program know which is the fragment color?

    In GLSL 1.3 there was a gl_FragColor built-in fragment shader variable, but now it's deprecated, and the glsl1.4 spec documentation says

    ...Both gl_FragColor and gl_FragData are deprecated; the preferred usage is to explicitly declare these outputs in the fragment shader using the out storage qualifier.
    So lets say we have this fragment shader:

    Code :
    const char* FSCode = "\
    	#version 140 \n\
    	out vec4 FragColorA;\
            out vec4 FragColorB;\
    	void main()\
    	{\
    		FragColorA = vec4(0.0,0.0,1.0,1.0);\
    		FragColorB = vec4(1.0,0.0,0.0,1.0);\
    	}\
    	";

    What color will be the fragment? Is there some way to tell the application which one is it? or how does this work?
    GLX version: 1.4
    OpenGL: 3.2.0 NVIDIA 195.36.15
    OpenGL shading language: 1.50 NVIDIA
    Kubuntu Karmic

  3. #3
    Member Regular Contributor Rosario Leonardi's Avatar
    Join Date
    Aug 2008
    Location
    Italy
    Posts
    355

    Re: Help with drawing a triangle in 3.2 GLSL 1.4

    I still have problem running SDL 1.3 correctly (if I run your program glCreateShader return 0) so I can't reproduce the bug.

    I have already asked your second question, and I link the answare.
    http://www.opengl.org/discussion_boa...072#Post245072

    For further detail you can read the 3.9.2 section of the (ogl3.3) specifics in the "Shader Outputs" part.
    ~ ~ I tell you, realtime 3D is made of blood, sweat and screams! ~ ~

  4. #4
    Junior Member Newbie
    Join Date
    Apr 2010
    Posts
    16

    Re: Help with drawing a triangle in 3.2 GLSL 1.4

    Thanks, I read the documentation but unfortunately haven't been able to make it work yet. What I did find out is that if I change the render mode to GL_LINES sometimes I do get a line with the color specified in the fragment shader, the line can appear or not randomly, and its always either vertical or horizontal. It looks like the vertex data is not getting to the shader for some reason....
    GLX version: 1.4
    OpenGL: 3.2.0 NVIDIA 195.36.15
    OpenGL shading language: 1.50 NVIDIA
    Kubuntu Karmic

  5. #5
    Junior Member Newbie
    Join Date
    Apr 2010
    Posts
    16

    Re: Help with drawing a triangle in 3.2 GLSL 1.4

    I can't believe it it took me 2 days to draw a triangle!
    But Finally I could make it work!!



    For some reason it started working after I added the vertex data to a Buffer Object.

    DOESN'T WORK:
    Code :
    void setupGeometry()
    {
       GLfloat vertices[] = {  0.0f,  0.5f, 0.0f,  
                               -0.5f, -0.5f, 0.0f,
       			   0.5f, -0.5f, 0.0f
    			};
       glVertexAttribPointer(vertexIndex,3,GL_FLOAT,GL_FALSE,0,vertices);
      glEnableVertexAttribArray(vertexIndex);
    }

    WORKS:
    Code :
    void setupGeometry()
    {
       GLfloat vertices[] = {  0.0f,  0.5f, 0.0f,  
                               -0.5f, -0.5f, 0.0f,
       			   0.5f, -0.5f, 0.0f
    			};
    glGenBuffers(1,&amp;vbuff);
    glBindBuffer(GL_ARRAY_BUFFER,vbuff);
    glBufferData(GL_ARRAY_BUFFER,sizeof(vertices),vertices, GL_STATIC_DRAW);
     
    	glVertexAttribPointer(vertexIndex,3,GL_FLOAT,GL_FALSE,0,0);
    	glEnableVertexAttribArray(vertexIndex);
    }

    Anyone knows why this happens? Is the use of Buffers required in this context?
    GLX version: 1.4
    OpenGL: 3.2.0 NVIDIA 195.36.15
    OpenGL shading language: 1.50 NVIDIA
    Kubuntu Karmic

Posting Permissions

  • You may not post new threads
  • You may not post replies
  • You may not post attachments
  • You may not edit your posts
  •