a really strange problem with NVidia's driver (probably)

For a reason too long to explain I’ve to use the driver 70.41 (quadro fx beta driver) on my NVidia 6800 GT (for details see one of mine past posts, in particular “strange OGLSL problem with 66.93 NVIDIA’s driver” 12-03-2004 in this forum).

Now I have another really strange problem.

Please, take a look of this code.

#include<iostream>

#include<GL/glew.h>
#include<GL/glut.h>

#include"pbuffer/pbuffer.h"
#include"texture.h"
#include"rendertexture.h"

#define DIMTEXT 1024

using namespace std;

const int halfwidth = 512;
const int halfheight = 512;

const float wortho = 1.0f;
const float hortho = 1.0f;

PBuffer pbuf;
GLuint name;

GLfloat t[2][DIMTEXT*4];

void Init(void)
{	
	for(unsigned int i = 0;i < 2;++i)
		for(unsigned int j = 0; j < DIMTEXT*4;++j)
			t[i][j] = 0.0;

	glEnable(GL_TEXTURE_2D);
	glGenTextures(1,&name);
	glBindTexture(GL_TEXTURE_2D,name);
	glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
	glClearColor(0.0,0.0,0.0,0);
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	glOrtho(0.0,wortho,0.0,hortho,-20,20);
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glColor3f(1.0f,1.0f,1.0f);
	glDisable(GL_LIGHTING);
	glDisable(GL_LIGHT0);
	glBindTexture(GL_TEXTURE_2D,name);
	glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA_FLOAT32_ATI,1024,2,0,GL_RGBA,GL_FLOAT,t);
	pbuf.create(1024,8,32,32,32,32,24,0);
	glClearColor(0.0,1.0,0.0,1.0);
}

void display()
{
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	glOrtho(0.0,1.0,0.0,1.0,-20.0,20.0);
	static float lasttime;
	float dt;
	dt=glutGet(GLUT_ELAPSED_TIME)-lasttime;
	lasttime=glutGet(GLUT_ELAPSED_TIME);
	pbuf.MakeActiveRenderSurface();
	glBindTexture(GL_TEXTURE_2D,name);
	glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
	glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
		glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	glOrtho(0.0,1.0,0.0,1.0,-20.0,20.0);
	glEnable(GL_TEXTURE_2D);
	glColor3f(0.2f,.3f,.4f);
	glViewport(0,0,1024,2);
	glClearColor(0,0.5,0,1);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
	glDisable(GL_TEXTURE_2D);
	
	glBegin(GL_QUADS);
		glVertex2f(0.0f,0.0f);
		glVertex2f(1.0f,0.0f);
		glVertex2f(1.0f,1.0f);
		glVertex2f(0.0f,1.0f);
	glEnd();
	
	glFlush();
	glEnable(GL_TEXTURE_2D);

	glBindTexture(GL_TEXTURE_2D,name);
	glCopyTexSubImage2D(GL_TEXTURE_2D,0,0,0,0,0,1024,1);
	glGetTexImage(GL_TEXTURE_2D,0,GL_RGBA,GL_FLOAT,t);
	pbuf.Deactivate();

	glutSwapBuffers();
	glFlush();
  assert(glGetError()==0);	
}

void idle()
{
	glutPostRedisplay();
}

int main(int argc,char** argv)
{ 
	glutInit(&argc,argv);
	glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
	glutInitWindowSize(2 * halfwidth,2 * halfheight);
	int in = glutCreateWindow("Springs Chain");
	
	glewInit();
	Init();
	glutDisplayFunc(display);
	glutIdleFunc(idle);
	glutMainLoop();
	return 0;
}

How you can see this is only a simple application that make render to texture of a colored ( glColor3f(.2f,.3f,.4f) ) Quad…Clearly if I would put a brakpoint on glGetTexImage(GL_TEXTURE_2D,0,GL_RGBA,GL_FLOAT,t);I expect to read back on the first row of texture t (a bidimensional texture t[2][DIMTEXT * 4]) a long sequence of the same color…If DIMTEXT is < 1024 all works fine…but if DIMTEXT is 1024, 2048 or 4096 the texture i read back is full of 0…
I need a bidimensional texture but to debug purpose i have tried with a 1D texture t[DIMTEXT]…for any DIMTEXT all seems to works fine…

The problem seems to be in glCopyTexSubImage2D and not in glGetTexImage, I use the texture’s look-up from vertex shader to prove it…and with a glReadPixels I can see that the writing on Pbuffer also works fine…

After days and days of empty aptemps i have found that if I change the GL_NEAREST filter with GL_LINEAR on GL_TEXTURE_MIN_FILTER or GL_TEXTURE_MAG_FILTER, it’s not important in which of two, my application does the right thing also with a DIMTEXT >= 1024…

The pixel format in PBuffer is:

int piAttribIList[]= {
      WGL_DRAW_TO_PBUFFER_ARB,  1,
      WGL_RED_BITS_ARB,	        r,
      WGL_GREEN_BITS_ARB,	g,
      WGL_BLUE_BITS_ARB,	b,
      WGL_ALPHA_BITS_ARB,	a,      
      WGL_DEPTH_BITS_ARB,	depth,
      WGL_STENCIL_BITS_ARB,	stencil,
      WGL_FLOAT_COMPONENTS_NV,  GL_TRUE,
      0,0
};

I have tried this little program also on a GeForceFX 5700 (with driver 70.41) and on GeForceGO 5650 (with 61.77) and I haven’t found any problems…

Some suggestions?