Help : Can't use 32bit depth texture

Hi everyong, now I am trying to use GPU to do rasteration, so I wrote like this,

glutInit (&argc, argv);
    glutCreateWindow("TEST1");
	glutInitDisplayString("depth=32 single rgba");
	int foo;
	glGetIntegerv(GL_DEPTH_BITS, &foo);
	printf("Native Depth : %d
",foo);

    glewInit();
    // viewport transform for 1:1 pixel=texel=data mapping
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    //gluOrtho2D(0.0,texSize,0.0,texSize);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();
    glViewport(0,0,texSize,texSize);
    // create FBO and bind it (that is, use offscreen render target)
    GLuint fb;
    glGenFramebuffersEXT(1,&fb); 
    glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fb);

    // create texture
    GLuint tex[2];
    glGenTextures (2, tex);
    glBindTexture(GL_TEXTURE_RECTANGLE_ARB,tex[0]);
    // set texture parameters
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_WRAP_S, GL_CLAMP);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_WRAP_T, GL_CLAMP);
    // define texture with floating point format
    glTexImage2D(GL_TEXTURE_RECTANGLE_ARB,0,GL_RGBA32F_ARB,
                 texSize,texSize,0,GL_RGBA,GL_FLOAT,0);

	glBindTexture(GL_TEXTURE_RECTANGLE_ARB,tex[1]);
	glTexImage2D(GL_TEXTURE_RECTANGLE_ARB,0,GL_DEPTH_COMPONENT32_ARB,texSize,texSize,0,GL_DEPTH_COMPONENT,GL_UNSIGNED_INT,0);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_WRAP_S, GL_CLAMP);
    glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, 
                    GL_TEXTURE_WRAP_T, GL_CLAMP);

	GLuint depth_rb;
	glGenRenderbuffersEXT(1, &depth_rb);
	glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, depth_rb);
	glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT32, texSize, texSize); 
	glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, depth_rb); 

	


    // attach texture
    glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, 
                              GL_COLOR_ATTACHMENT0_EXT, 
                              GL_TEXTURE_RECTANGLE_ARB,tex[0],0);
	//glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,GL_DEPTH_ATTACHMENT_EXT,GL_TEXTURE_RECTANGLE_ARB,tex[1],0);


    // transfer data to texture
    //glTexSubImage2D(GL_TEXTURE_RECTANGLE_ARB,0,0,0,texSize,texSize,
    //                GL_RGBA,GL_FLOAT,data);

	glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fb);

	glGetIntegerv(GL_DEPTH_BITS, &foo);
	printf("FBO Depth : %d
",foo);
	CheckFramebufferStatus();

as you saw, I have manually setup the depth texture and the renderbuffer to GL_DEPTH_COMPONENT32, but in commandline it always output 24bit, I do not know why. I am using geforce 7300go of my laptop computer, so I think this card supports 32bit depth buffer.

Thanks !

Ah, just now I have downloaded the NEHE’s opengl tutorial lesson2, then add some code, like this:


	static	PIXELFORMATDESCRIPTOR pfd=				// pfd Tells Windows How We Want Things To Be
	{
		sizeof(PIXELFORMATDESCRIPTOR),				// Size Of This Pixel Format Descriptor
		1,											// Version Number
		PFD_DRAW_TO_WINDOW |						// Format Must Support Window
		PFD_SUPPORT_OPENGL |						// Format Must Support OpenGL
		PFD_DOUBLEBUFFER,							// Must Support Double Buffering
		PFD_TYPE_RGBA,								// Request An RGBA Format
		32,										// Select Our Color Depth
		0, 0, 0, 0, 0, 0,							// Color Bits Ignored
		0,											// No Alpha Buffer
		0,											// Shift Bit Ignored
		0,											// No Accumulation Buffer
		0, 0, 0, 0,									// Accumulation Bits Ignored
		0,											// 16Bit Z-Buffer (Depth Buffer)  
		0,											// No Stencil Buffer
		0,											// No Auxiliary Buffer
		PFD_MAIN_PLANE,								// Main Drawing Layer
		0,											// Reserved
		0, 0, 0										// Layer Masks Ignored
	};

	int foo;
	glGetIntegerv(GL_DEPTH_BITS, &foo);
	char buf[10];
	itoa(foo,buf,10);
	MessageBox(NULL,buf,"Haha!",MB_OK|MB_ICONEXCLAMATION);

To my surprise, the box shows “24” !? But why ?
Thanks very much!

Geforce 7 series only supports 24 bit depth formats.