Hi all.
I have a weird problem with uniform blocks:
This is my test app
// Triangle_opengl_3_1
// A cross platform version of
// http://www.opengl.org/wiki/Tutorial:_OpenGL_3.1_The_First_Triangle_%28C%2B%2B/Win%29
// with some code from http://www.lighthouse3d.com/opengl/glsl/index.php?oglexample1
// and from the book OpenGL Shading Language 3rd Edition, p215-216
// Daniel Livingstone, October 2010
#include <GL/glew.h>
#define FREEGLUT_STATIC
#include <GL/freeglut.h>
#include <iostream>
#include <fstream>
#include <string>
using namespace std;
// loadFile - loads text file into char* fname
// allocates memory - so need to delete after use
// size of file returned in fSize
const char* loadFile(const char *fname, GLint &fSize)
{
ifstream::pos_type size;
char * memblock;
string text;
// file read based on example in cplusplus.com tutorial
ifstream file (fname, ios::in|ios::binary|ios::ate);
if (file.is_open())
{
size = file.tellg();
fSize = (GLuint) size;
memblock = new char [size];
file.seekg (0, ios::beg);
file.read (memblock, size);
file.close();
cout << "file " << fname << " loaded" << endl;
text.assign(memblock);
}
else
{
cout << "Unable to open file " << fname << endl;
exit(1);
}
return memblock;
}
// printShaderInfoLog
// From OpenGL Shading Language 3rd Edition, p215-216
// Display (hopefully) useful error messages if shader fails to compile
void printShaderInfoLog(GLint shader)
{
int infoLogLen = 0;
int charsWritten = 0;
GLchar *infoLog;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLogLen);
// should additionally check for OpenGL errors here
if (infoLogLen > 0)
{
infoLog = new GLchar[infoLogLen];
// error check for fail to allocate memory omitted
glGetShaderInfoLog(shader,infoLogLen, &charsWritten, infoLog);
cout << "InfoLog:" << endl << infoLog;
delete [] infoLog;
}
}
void reshape(int w, int h)
{
glViewport(0,0,(GLsizei)w,(GLsizei)h);
}
int main (int argc, char* argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA);
glutInitWindowSize(600,600);
glutCreateWindow("Triangle Test");
glewInit();
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
cout << "glewInit failed, aborting." << endl;
exit (1);
}
cout << "Status: Using GLEW " << glewGetString(GLEW_VERSION) << endl;
cout << "OpenGL version " << glGetString(GL_VERSION) << " supported" << endl;
int l;
const char *s = loadFile("sh", l);
int fragmentShaderId = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShaderId, 1, &s, &l);
glCompileShader(fragmentShaderId);
int programId = glCreateProgram();
glAttachShader(programId, fragmentShaderId);
glLinkProgram(programId);
printShaderInfoLog(fragmentShaderId);
GLint linked;
glGetProgramiv(programId, GL_LINK_STATUS, &linked);
if (linked == GL_FALSE) {
// if a link error occured ...
GLint logLength;
glGetProgramiv(programId, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLsizei length;
char *log = new char[logLength];
glGetProgramInfoLog(programId, GLsizei(logLength), &length, log);
cout<<log<<endl;
delete[] log;
}
glDeleteProgram(programId);
programId = 0;
throw exception();
} else {
cout << "Linked
";
}
printf("%d
",glGetError());
GLfloat pixels[4];
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(programId);
glBegin(GL_QUADS);
glVertex2i(-1, -1);
glVertex2i(1, -1);
glVertex2i(1, 1);
glVertex2i(-1, 1);
glEnd();
glReadPixels(0, 0, 1, 1, GL_RGBA, GL_FLOAT, &pixels);
printf("
%f %f %f %f
",pixels[0],pixels[1],pixels[2],pixels[4]);
return 0;
}
this is my fragment shader (written in the external file ‘sh’):
#version 330
uniform b { float u; };
layout(location=0) out vec4 color;
void main() { color = vec4(0.1, 0.45, 0.5, 0.0); }
and this is the output i get from the app:
Status: Using GLEW 1.7.0
OpenGL version 3.3.11631 Compatibility Profile Context supported
file sh loaded
InfoLog:
Fragment shader was successfully compiled to run on hardware.
Linked
0
0.000000 0.000000 0.000000 0.000000
So the shader compiles and builds fine, glGetError() returns 0 but the glReadPixels gives me an empty (0,0,0,0) pixel instead of the value set in the shader.
But if i comment out the “uniform b { float u; };” in the shader it all works as expected and the pixel is (0.098039, 0.450980, 0.501961, 0.000000), or whatever value i put in the shader.
I’m really clueless, i hope you can help me.
I have a Radeon HD 4850, on Linux and catalyst 12.4. This is the output of my glxinfo.