Depth buffer to a texture

Hi, I’m a new OpenGL programmer

My aim is to attach a texture containing depth to an fbo to have access to the depth values of the triangle drawn thanks to the function display().
Everything was working fine and depth values into fbo were looking correct.

But now since I try to attach texture to fbo (glFramebufferTexture)
my code raise “Invalid frame buffer operation”.
when I comment the line display() in the same function in this error does not appear.

I can’t understand why?
is it the right way to call my display function to set the depth map of what it construct inside my texture ?

Here is my code :


glutWindow win;
GLuint depthTexture;
GLuint fbo;
GLuint rb;



void display() 
{
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);		     // Clear Screen and Depth Buffer
	glLoadIdentity();
	glTranslatef(0.0f,0.0f,-3.0f);			

	glBegin(GL_TRIANGLES);
		glColor3f(0.0f,0.0f,1.0f);			
		glVertex3f( 0.0f, 1.0f, 0.7f);
		glColor3f(0.0f,1.0f,0.0f);			
		glVertex3f(-1.0f,-1.0f, 0.7f);
		glColor3f(1.0f,0.0f,0.0f);			
		glVertex3f( 1.0f,-1.0f, 0.7f);
	glEnd();
	glutSwapBuffers();
}

void initialize () 
{
    glMatrixMode(GL_PROJECTION);		// select projection matrix
    glViewport(0, 0, win.width, win.height);	// set the viewport
    glMatrixMode(GL_PROJECTION);		// set matrix mode
    glLoadIdentity();				// reset projection matrix
    GLfloat aspect = (GLfloat) win.width / win.height;
    gluPerspective(win.field_of_view_angle, aspect, win.z_near, win.z_far);// set up a perspective projection matrix
    glMatrixMode(GL_MODELVIEW);		// specify which matrix is the current matrix
    glShadeModel( GL_SMOOTH );
    glClearDepth( 1.0f );		// specify the clear value for the depth buffer
    glEnable( GL_DEPTH_TEST );
    glDepthFunc( GL_LEQUAL );
    glHint( GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST );// specify implementation-specific hints
    glClearColor(0.0, 0.0, 0.0, 1.0); // specify clear values for the color buffers	
}

void make_Fbo()
{
	glGenFramebuffers(1, &fbo);
	glBindFramebuffer(GL_FRAMEBUFFER, fbo);
	//inform opengl that no color texture will be binded
	glDrawBuffer(GL_NONE);
	glReadBuffer(GL_NONE);

	glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rb);
	glFramebufferTexture(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, depthTexture, 0); // attach depth texture to fbo
	display();
	check_gl_error("make_fbo");
}

void make_texture() // make texture to be attached to fbo, this texture contains depth values
{
    GLuint depthTexture;
    glGenTextures(1, &depthTexture);
    glBindTexture(GL_TEXTURE_2D, depthTexture);
    glTexImage2D(GL_TEXTURE_2D, 0,GL_DEPTH_COMPONENT24, win.width, win.height, 0,GL_DEPTH_COMPONENT, GL_FLOAT, 0); // depth texture
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    check_gl_error("make_texture");
}

void make_render_buffer()
{
 glGenRenderbuffers(1, &rb);
 glBindRenderbuffer(GL_RENDERBUFFER, rb);
 glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, win.width, win.height); 
 check_gl_error("make render_buffer");
}

int main(int argc, char **argv) 
{
        [...]
	make_render_buffer();
	make_texture();
	make_Fbo();
	glutMainLoop();						// run GLUT mainloop
	return 0;
}

You should be doing this:

Also use glGetError() to get to help pin down the source of your problem.

Why are you trying to bind both a texture and a renderbuffer to GL_DEPTH_ATTACHMENT of the FBO?

Why are you trying to clear the color buffer when you don’t have one?

Have you checked your width and height?

Thank you for your answer,
the completeness was something I was missing.

anyway I’m now triyng to do something else to display the depth map,

I wrote a simple fragmentshader hoping it will dump my depth values into color channels
but a white triangle appears, like all gl_fragcoord.z = 1.


void main()
{
    float z = gl_FragCoord.z * 255.0f; //rescale 0.0f-1.0f to 0.0f-255.0f
    gl_FragColor = vec4(z,z,z,gl_FragColor.w);
} 

if you have any idea why I get this value from gl_fragcoord it will be pleasure to know :slight_smile:

EDIT: I use orthographic projection.

For a normalised colour buffer, the components of gl_FragColor should be in the range 0…1, not 0.255.

In this case, most of them will be much larger than 1, and will be clamped to 1.

Also: I presume that gl_FragColor.w should have been gl_FragCoord.w?

Absolutely true I missed it :slight_smile:

Actually no, gl_Fragcolor.w is corresponding to the alpha componeent so I think I don’t need to change it.

anyways I found a solution following : https://www.opengl.org/wiki/Compute_eye_space_from_window_space

Here is my new fragment shader:


float ndcDepth  =
(2.0 * gl_FragCoord.z - gl_DepthRange.near - gl_DepthRange.far) /
(gl_DepthRange.far - gl_DepthRange.near);

float clipDepth = ndcDepth / gl_FragCoord.w;
float z = ((clipDepth * 0.5) + 0.5) * 10.0f;
gl_FragColor = vec4(z,z,z,gl_FragColor.w);