glReadPixels bug with GL_LUMINANCE?

I found this funny behaviour of glReadPixels: if I use GL_LUMINANCE as format, in SOME cards (for example GF6600GT, GF6800) and not it other cards (GF7800 or ATI x700-x600) the most right quarter of the screen is not read back from the buffer. If I use, for example, GL_RED than everything is ok on every card I tried.
The code below is a small application that shows this thing (use m to shift from GL_LUMINANCE to GL_RED). It render a half screen triangle, then read back the buffer and makes a texture, then for all subsequent frames shows the texture. note: the problem is not in the making of the texture (already tried).

#define APP_NAME “glReadPixels(…) bug?”
#define SIZE 512
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <GL/glut.h>

int winW = SIZE; /* window width /
int winH = SIZE; /
window height */

bool modeRL = false;
bool load=true;
float * buf;

void glutDisplay (void)
{
if (!winH)
return;
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0,0,winW,winH);
glMatrixMode(GL_TEXTURE);
glLoadIdentity();

glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0,1.0,0.0,1.0,0.0,2.0);

glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0);

glPolygonMode(GL_FRONT,GL_FILL);

if(load){// first time draw a a triangle covering the upper left part of the screen,then read back the buffer and
		  // load it as texture
	if(modeRL) printf("GL_RED

"); else printf("GL_LUMINANCE
");
glDisable(GL_TEXTURE_2D);
glBegin(GL_TRIANGLES);
glTexCoord2d(1.0,1.0);
glVertex3f (1.0,1.0,0.0);
glTexCoord2d(0.0,1.0);
glVertex3f (0.0,1.0,0.0);
glTexCoord2d(0.0,0.0);
glVertex3f (0.0,0.0,0.0);
glEnd();
glFinish();

	glReadPixels(0,0,_SIZE_,_SIZE_,(modeRL)?GL_RED:GL_LUMINANCE,GL_FLOAT,buf);

	glBindTexture(GL_TEXTURE_2D,1);	
	glPixelStorei (GL_UNPACK_ALIGNMENT, 1);
	glPixelStorei (GL_PACK_ROW_LENGTH , _SIZE_);
	glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
	glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
	glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexEnvf (GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);

	glTexImage2D(	GL_TEXTURE_2D, 0, 
			1, 
			_SIZE_, _SIZE_,  
			0, 
			GL_LUMINANCE, 
			GL_FLOAT, 
			(GLvoid*)buf); 
	glEnable(GL_TEXTURE_2D);
	load = false;
	}
	else
	{// render the texture created the first time
	assert(glIsEnabled(GL_TEXTURE_2D));
	glBegin(GL_QUADS);
	glTexCoord2d(0.0,0.0);
	glVertex3f  (0.0,0.0,0.0);
	glTexCoord2d(1.0,0.0);
	glVertex3f  (1.0,0.0,0.0);
	glTexCoord2d(1.0,1.0);
	glVertex3f  (1.0,1.0,0.0);
	glTexCoord2d(0.0,1.0);
	glVertex3f  (0.0,1.0,0.0);
	glEnd();
	}

glutSwapBuffers();
glutPostRedisplay ();

}

void glutKeyboard (unsigned char key, int x, int y)
{
switch (key)
{
/* exit the program */
case 27:
case ‘q’:
case ‘Q’:
free(buf);
exit (1);
break;
case ‘m’: modeRL= !modeRL; load=true; break;
}
}

void glInit (void)
{
buf = (float*) malloc(SIZE*SIZE*sizeof(float));
glDisable(GL_DEPTH_TEST);

}
void main (void)
{
glutInitDisplayMode (GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowSize (winW,winH);
glutCreateWindow (APP_NAME);
glutKeyboardFunc (glutKeyboard);
glutDisplayFunc (glutDisplay);
glInit ();
glutMainLoop();
}

Originally posted by robilant:
I found this funny behaviour of glReadPixels: if I use GL_LUMINANCE as format, in SOME cards (for example GF6600GT, GF6800) and not it other cards (GF7800 or ATI x700-x600) the most right quarter of the screen is not read back from the buffer. If I use, for example, GL_RED than everything is ok on every card I tried.

Wow, that’s pretty bizarre. I can’t see anything wrong with your code so that’s most likely a driver bug. I also found that if one uses GL_UNSIGNED_BYTE rather than GL_FLOAT then it works, so it’s presumably a bug in the conversion code. In most cases you’ll want to use GL_UNSIGNED_BYTE anyway unless you have a high-precision or floating-point framebuffer.

I guess submit it as a bug report to NVIDIA.