The code display a triangle with a different color in each vertex. The GS should display a smaller, just orange, triangle.
As in macOSX we have a GeForce GT120 from 2009 and OpenGL 2.1, we use GLSL version 120.
These are the shaders:
Vertex
#version 120
varying float LightIntensity;
const vec3 LightPos = vec3( 0., 10., 10. );
void main()
{
vec3 normal = normalize( gl_NormalMatrix * gl_Normal );
gl_Position = gl_ModelViewMatrix * gl_Vertex;
LightIntensity = dot( normalize(LightPos - gl_Position.xyz), normal);
}
Geometry
#version 120
#extension GL_EXT_gpu_shader4: enable
#extension GL_EXT_geometry_shader4: enable
varying float LightIntensity;
//uniform
const float Shrink=0.5;
vec3 V[3];
vec3 CG;
void
ProduceVertex( int v )
{
LightIntensity = 0.8*LightIntensity;
gl_Position = gl_ProjectionMatrix * vec4( CG + Shrink * ( V[v] - CG ), 1. );
EmitVertex();
}
void main()
{
V[0] = gl_PositionIn[0].xyz;
V[1] = gl_PositionIn[1].xyz;
V[2] = gl_PositionIn[2].xyz;
CG = 0.33333 * ( V[0] + V[1] + V[2] );
ProduceVertex( 0 );
ProduceVertex( 1 );
ProduceVertex( 2 );
}
Fragment
#version 120
varying float LightIntensity;
void
main()
{
gl_FragColor = vec4( LightIntensity * vec3(1., 0.5, 0.), 1. );
}
To use it in the C++ program, I am using GLSLProgram class from Mike Baileys course, and GLEW. I have also tried including
just NVidia’s glext.h.
Without parameters, when I activate the shader, nothing happens, not even a black screen, just the same multicolored triangle. In Mac sometimes it works, most times it display just a black image.
With these parameters, it display just a black screen.
glProgramParameteriEXT(shaderpg->program_id(), GL_GEOMETRY_INPUT_TYPE_EXT, GL_TRIANGLES);
glProgramParameteriEXT(shaderpg->program_id(), GL_GEOMETRY_OUTPUT_TYPE_EXT, GL_TRIANGLE_STRIP);
int temp;
glGetIntegerv(GL_MAX_GEOMETRY_OUTPUT_VERTICES_EXT, &temp);
printf("Max out vertices %d
", temp);
glProgramParameteriEXT(shaderpg->program_id(), GL_GEOMETRY_VERTICES_OUT_EXT, temp);
Any idea?