PDA

View Full Version : Help : Can't use 32bit depth texture



Jedimaster
11-07-2007, 08:41 PM
Hi everyong, now I am trying to use GPU to do rasteration, so I wrote like this,

glutInit (&argc, argv);
glutCreateWindow("TEST1");
glutInitDisplayString("depth=32 single rgba");
int foo;
glGetIntegerv(GL_DEPTH_BITS, &foo);
printf("Native Depth : %d\n",foo);

glewInit();
// viewport transform for 1:1 pixel=texel=data mapping
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
//gluOrtho2D(0.0,texSize,0.0,texSize);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glViewport(0,0,texSize,texSize);
// create FBO and bind it (that is, use offscreen render target)
GLuint fb;
glGenFramebuffersEXT(1,&fb);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fb);

// create texture
GLuint tex[2];
glGenTextures (2, tex);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB,tex[0]);
// set texture parameters
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_WRAP_T, GL_CLAMP);
// define texture with floating point format
glTexImage2D(GL_TEXTURE_RECTANGLE_ARB,0,GL_RGBA32F _ARB,
texSize,texSize,0,GL_RGBA,GL_FLOAT,0);

glBindTexture(GL_TEXTURE_RECTANGLE_ARB,tex[1]);
glTexImage2D(GL_TEXTURE_RECTANGLE_ARB,0,GL_DEPTH_C OMPONENT32_ARB,texSize,texSize,0,GL_DEPTH_COMPONEN T,GL_UNSIGNED_INT,0);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB,
GL_TEXTURE_WRAP_T, GL_CLAMP);

GLuint depth_rb;
glGenRenderbuffersEXT(1, &depth_rb);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, depth_rb);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT32, texSize, texSize);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, depth_rb);




// attach texture
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,
GL_COLOR_ATTACHMENT0_EXT,
GL_TEXTURE_RECTANGLE_ARB,tex[0],0);
//glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,GL_DE PTH_ATTACHMENT_EXT,GL_TEXTURE_RECTANGLE_ARB,tex[1],0);


// transfer data to texture
//glTexSubImage2D(GL_TEXTURE_RECTANGLE_ARB,0,0,0,tex Size,texSize,
// GL_RGBA,GL_FLOAT,data);

glBindFramebufferEXT(GL_FRAMEBUFFER_EXT,fb);

glGetIntegerv(GL_DEPTH_BITS, &foo);
printf("FBO Depth : %d\n",foo);
CheckFramebufferStatus();

as you saw, I have manually setup the depth texture and the renderbuffer to GL_DEPTH_COMPONENT32, but in commandline it always output 24bit, I do not know why. I am using geforce 7300go of my laptop computer, so I think this card supports 32bit depth buffer.

Thanks !

Jedimaster
11-07-2007, 09:26 PM
Ah, just now I have downloaded the NEHE's opengl tutorial lesson2, then add some code, like this:


static PIXELFORMATDESCRIPTOR pfd= // pfd Tells Windows How We Want Things To Be
{
sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor
1, // Version Number
PFD_DRAW_TO_WINDOW | // Format Must Support Window
PFD_SUPPORT_OPENGL | // Format Must Support OpenGL
PFD_DOUBLEBUFFER, // Must Support Double Buffering
PFD_TYPE_RGBA, // Request An RGBA Format
32, // Select Our Color Depth
0, 0, 0, 0, 0, 0, // Color Bits Ignored
0, // No Alpha Buffer
0, // Shift Bit Ignored
0, // No Accumulation Buffer
0, 0, 0, 0, // Accumulation Bits Ignored
0, // 16Bit Z-Buffer (Depth Buffer)
0, // No Stencil Buffer
0, // No Auxiliary Buffer
PFD_MAIN_PLANE, // Main Drawing Layer
0, // Reserved
0, 0, 0 // Layer Masks Ignored
};

int foo;
glGetIntegerv(GL_DEPTH_BITS, &foo);
char buf[10];
itoa(foo,buf,10);
MessageBox(NULL,buf,"Haha!",MB_OK|MB_ICONEXCLAMATION);


To my surprise, the box shows "24" !? But why ?
Thanks very much!

sqrt[-1]
11-08-2007, 12:48 AM
Geforce 7 series only supports 24 bit depth formats.