Hello!
I’m posting this here in the hope that I missed something and my code is to blame, currently I’m lost and think ATI has a driver bug.
The following code simply writes some values to a float tex and reads them back in - but the values read back appear to be garbled. If NUM_READ_CHANNELS is changed to 4, the output is correct. I reproduced the problem on different cards and on Win and Linux. Any hints? Otherwise I’ll turn to ATI with this issue.
thx,
chris
Ouput:
1 2 3 1 2 3 1 2 3 255 510 765
1 2 3 1 2 3 1 2 3 255 510 765
1 2 3 1 2 3 1 2 3 255 510 765
1 2 3 1 2 3 1 2 3 255 510 765
#include <string>
#include <vector>
#include <iostream>
#include <assert.h>
#include <GL/glew.h>
#include <GL/glut.h>
const unsigned TEX_SIZE = 4;
const unsigned NUM_READ_CHANNELS = 3;
int main( int argc, char **argv )
{
glutInit ( &argc, argv );
glutInitDisplayMode ( GLUT_RGB | GLUT_DOUBLE );
glutInitWindowSize ( 500, 500 );
glutCreateWindow ( "" );
GLenum err = glewInit();
assert (GLEW_OK == err);
assert(glewIsSupported("GL_ATI_texture_float"));
GLfloat init_values[TEX_SIZE*TEX_SIZE*4];
for (unsigned i=0; i<TEX_SIZE; ++i)
{
for (unsigned j=0; j<TEX_SIZE; ++j)
{
init_values[3*(i+TEX_SIZE*j) ] = 1.0f;
init_values[3*(i+TEX_SIZE*j)+1] = 2.0f;
init_values[3*(i+TEX_SIZE*j)+2] = 3.0f;
}
}
GLuint tex;
glGenTextures(1, &tex);
assert(tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexImage2D(GL_TEXTURE_2D, 0,
GL_RGB_FLOAT16_ATI,
TEX_SIZE, TEX_SIZE, 0,
GL_RGB, GL_FLOAT,
init_values);
glGetTexImage(GL_TEXTURE_2D,
0, NUM_READ_CHANNELS == 3 ? GL_RGB : GL_RGBA,
GL_FLOAT,
init_values);
for (unsigned i=0; i<TEX_SIZE; ++i)
{
for (unsigned j=0; j<TEX_SIZE; ++j)
{
for (unsigned c=0; c<NUM_READ_CHANNELS; ++c)
{
std::cout << init_values[NUM_READ_CHANNELS*(j + i*TEX_SIZE)+c] << " ";
}
}
std::cout << "
";
}
std::cout << "
";
return 0;
}