PDA

View Full Version : glUniform1f() not working



Lazy Foo'
07-12-2010, 09:15 PM
I'm getting some very bizarre behavior here. I'm loading and creating a shader program (that uses the Projection matrix in the vertex shader and sets the fragment's color to magenta in the fragment shader) and glGetError() reports no errors after glUseProgram().

When I call glGetUniformLocation, it returns -1 yet glGetError() returns GL_NO_ERROR.

Yet still the magenta cube shows up.

Here's my shader program creation code along with the code that attempts to set the uniform:


//Shader Program
GLuint shaderProgram = 0;
shaderProgram = glCreateProgram();

//Vertex shader
GLuint vertexShader = glCreateShader( GL_VERTEX_SHADER );
GLchar* vertexSource = load_shader( "vertex.glsl" );
const GLchar** vShaderSource = (const GLchar**)&vertexSource;
glShaderSource( vertexShader, 1, vShaderSource, NULL );
glCompileShader( vertexShader );
glAttachShader( shaderProgram, vertexShader );
delete[] vertexSource; vertexSource = NULL;

//Fragment shader
GLuint fragmentShader = glCreateShader( GL_FRAGMENT_SHADER );
GLchar* fragmentSource = load_shader( "fragment.glsl" );
const GLchar** fShaderSource = (const GLchar**)&fragmentSource;
glShaderSource( fragmentShader, 1, fShaderSource, NULL );
glCompileShader( fragmentShader );
glAttachShader( shaderProgram, fragmentShader );
delete[] fragmentSource; fragmentSource = NULL;

//Link and use program
glLinkProgram( shaderProgram );
glUseProgram( shaderProgram );

GLenum err = glGetError();

if( err != GL_NO_ERROR )
{
quit = true;
}

GLint uniform = glGetUniformLocation( shaderProgram, "test" );
std::cout << "Uniform: " << uniform << std::endl;

err = glGetError();
if( err != GL_NO_ERROR )
{
quit = true;
}

glUniform1f( uniform, 1.f );


Here's vertex.glsl


uniform mat4 LProj;
uniform mat4 LMView;
uniform float test;

void main()
{
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;

//Transforming The Vertex
//gl_Position = LProj * gl_Vertex;
}


Here's fragment.glsl


uniform float test;

void main()
{
// Setting Each Pixel To Red
gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);
}


I'm on an ATI HD 5700. I'm starting to think it's haunted.

Lazy Foo'
07-12-2010, 09:47 PM
Things just got stranger. This is my log from glslDevil:



W! Program Start
| wglCreateContext(86011084)
| wglMakeCurrent(86011084, 00010000)
| wglGetExtensionsStringARB(86011084)
| wglChoosePixelFormatARB(86011084, 0028F980, 0028F96C, 1, 0028F964, 0028F968)
| wglMakeCurrent(00000000, 00000000)
| wglDeleteContext(00010000)
| wglCreateContext(02010F6E)
| wglMakeCurrent(02010F6E, 00020000)
| glGetString(GL_EXTENSIONS)
| wglMakeCurrent(02010F6E, 00020000)
| wglGetCurrentDC()
| wglGetExtensionsStringARB(02010F6E)
| glGetString(GL_EXTENSIONS)
| glGetString(GL_VERSION)
| glClearColor(1.000000, 1.000000, 1.000000, 1.000000)
| glClearDepth(1.000000)
| glMatrixMode(GL_PROJECTION)
| glLoadIdentity()
| glMultMatrixd(0028FBD8)
| glMatrixMode(GL_MODELVIEW)
| glLoadIdentity()
| glDisable(GL_BLEND)
| glEnable(GL_TEXTURE_2D)
| glEnable(GL_COLOR_MATERIAL)
| glEnable(GL_DEPTH_TEST)
| glDepthFunc(GL_LEQUAL)
| glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
| glGetError()
| glCreateProgram()
| glCreateShader(GL_VERTEX_SHADER)
| glShaderSource(2, 1, 0028FD04, 00000000)
| glCompileShader(2)
| glAttachShader(1, 2)
| glCreateShader(GL_FRAGMENT_SHADER)
| glShaderSource(3, 1, 0028FCF8, 00000000)
| glCompileShader(3)
| glAttachShader(1, 3)
| glLinkProgram(1)
| glUseProgram(1)
W! OpenGL error GL_INVALID_OPERATION detected
| glGetError()
| glGetUniformLocation(1, 004750AA)
W! OpenGL error GL_INVALID_OPERATION detected
| glGetError()
| glUniform1f(-1, 1.000000)
W! OpenGL error GL_INVALID_OPERATION detected
| glClear(GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT)
| glLoadIdentity()
| glTranslatef(0.000000, 0.000000, -20.000000)
| glRotatef(45.000000, 1.000000, 1.000000, 1.000000)
| glScalef(3.000000, 3.000000, 3.000000)
| glBegin(GL_QUADS)
| glColor3f(0.000000, 1.000000, 0.000000)
| glVertex3f(1.000000, 1.000000, -1.000000)
| glVertex3f(-1.000000, 1.000000, -1.000000)
| glVertex3f(-1.000000, 1.000000, 1.000000)
| glVertex3f(1.000000, 1.000000, 1.000000)
| glColor3f(1.000000, 0.500000, 0.000000)
| glVertex3f(1.000000, -1.000000, 1.000000)
| glVertex3f(-1.000000, -1.000000, 1.000000)
| glVertex3f(-1.000000, -1.000000, -1.000000)
| glVertex3f(1.000000, -1.000000, -1.000000)
| glColor3f(1.000000, 0.000000, 0.000000)
| glVertex3f(1.000000, 1.000000, 1.000000)
| glVertex3f(-1.000000, 1.000000, 1.000000)
| glVertex3f(-1.000000, -1.000000, 1.000000)
| glVertex3f(1.000000, -1.000000, 1.000000)
| glColor3f(0.000000, 0.000000, 0.000000)
| glVertex3f(1.000000, -1.000000, -1.000000)
| glVertex3f(-1.000000, -1.000000, -1.000000)
| glVertex3f(-1.000000, 1.000000, -1.000000)
| glVertex3f(1.000000, 1.000000, -1.000000)
| glColor3f(0.000000, 0.000000, 1.000000)
| glVertex3f(-1.000000, 1.000000, 1.000000)
| glVertex3f(-1.000000, 1.000000, -1.000000)
| glVertex3f(-1.000000, -1.000000, -1.000000)
| glVertex3f(-1.000000, -1.000000, 1.000000)
| glColor3f(1.000000, 0.000000, 1.000000)
| glVertex3f(1.000000, 1.000000, -1.000000)
| glVertex3f(1.000000, 1.000000, 1.000000)
| glVertex3f(1.000000, -1.000000, 1.000000)
| glVertex3f(1.000000, -1.000000, -1.000000)
| glEnd()
| SwapBuffers(02010F6E)
| wglMakeCurrent(00000000, 00000000)
| wglDeleteContext(00020000)
E! Child process exited
W! Program termination forced!


Apparently glUseProgram() is giving GL_INVALID_OPERATION, yet glGetError() reported no such problem.

frank li
07-12-2010, 10:00 PM
Certainly you will get the location as -1 since "test" is an inactive uniform. But I feel strange that glGetError() returns nothing but glslDevil returns GL_INVALID_OPERATION. I believe glGetError() could throw out the correct error code.

Lazy Foo'
07-12-2010, 11:14 PM
Certainly you will get the location as -1 since "test" is an inactive uniform.

Well that's something simple I wish I didn't have to learn the hard way.