Part of the Khronos Group
OpenGL.org

The Industry's Foundation for High Performance Graphics

from games to virtual reality, mobile phones to supercomputers

Results 1 to 2 of 2

Thread: textureCube always returns 0

  1. #1
    Intern Newbie
    Join Date
    Dec 2012
    Posts
    40

    Question textureCube always returns 0

    I'm trying to render part of a (depth) cubemap to a separate (2D) texture, however the glsl 'textureCube'-function always returns 0.

    Here's my frame buffer / texture generation code:
    Code :
    unsigned int size = 4096;
    unsigned int frameBuffer;
    glGenFramebuffers(1,&frameBuffer);
     
    glBindFramebuffer(GL_FRAMEBUFFER,frameBuffer);
    unsigned int texture;
    glGenTextures(1,&texture);
     
    glBindTexture(GL_TEXTURE_CUBE_MAP,texture);
    glClearDepth(0.75);
    for(int i=0;i<6;i++)
    {
    	glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X +i,0,GL_DEPTH_COMPONENT,size,size,0,GL_DEPTH_COMPONENT,GL_FLOAT,0);
    	glFramebufferTexture2D(GL_FRAMEBUFFER,GL_DEPTH_ATTACHMENT,GL_TEXTURE_CUBE_MAP_POSITIVE_X +i,texture,0);
     
    	glClear(GL_DEPTH_BUFFER_BIT);
    }
    glClearDepth(1.f);
    glTexParameteri(GL_TEXTURE_CUBE_MAP,GL_DEPTH_TEXTURE_MODE,GL_LUMINANCE);
    glTexParameteri(GL_TEXTURE_CUBE_MAP,GL_TEXTURE_COMPARE_FUNC,GL_LEQUAL);
    glTexParameteri(GL_TEXTURE_CUBE_MAP,GL_TEXTURE_COMPARE_MODE,GL_COMPARE_R_TO_TEXTURE);
     
    glDrawBuffer(GL_NONE);
    glReadBuffer(GL_NONE);
    bool valid = false;
    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    	valid = false;
    else valid = true;
    if(valid == true)
    {
    	unsigned char *pixels = new unsigned char[size *size];
    	glReadPixels(0,0,size,size,GL_DEPTH_COMPONENT,GL_UNSIGNED_BYTE,&pixels[0]);
    	for(unsigned int i=1243;i<1495;i++) // Read out some random pixels to make sure the previous code is working
    		std::cout<<unsigned int(pixels[i])<<std::endl; // Prints out '191' for each pixel
    	delete[] pixels;
    }
     
    glBindFramebuffer(GL_FRAMEBUFFER,0);
    glBindTexture(GL_TEXTURE_CUBE_MAP,0);

    The fragment shader:
    Code :
    #version 330 core
     
    in vec2 UV;
     
    out vec4 color;
     
    uniform samplerCube depthTexture;
     
    void main()
    {
    	float uvx = UV.x *2.0;
    	float uvy = UV.y *2.0;
    	float z = uvx -1.0;
    	float y = uvy -1.0;
    	float depth = textureCube(depthTexture,vec3(1.0,-y,z)).x;
    	color.r = depth;
    	color.g = depth;
    	color.b = depth;
    	color.a = 1;
    }
    'depth' is always 0.

    Binding the cubemap to the shader location + the actual rendering to a screen quad:
    Code :
    glUseProgram(shader);
    unsigned int locTexture = glGetUniformLocation(shader,"depthTexture");
     
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_CUBE_MAP,texture); // Same texture as generated earlier
     
    glUniform1i(locTexture,0);
     
    glEnableVertexAttribArray(0);
    glBindBuffer(GL_ARRAY_BUFFER,vertexBuffer); // Two triangles with the vertices for a quad
    glVertexAttribPointer(
    	0,
    	3,
    	GL_FLOAT,
    	GL_FALSE,
    	(void*)0,
    	0
    );
    glDrawArrays(GL_TRIANGLES,0,6);
    OpenGL::DisableVertexAttribArray(0);

    glGetError() reports no errors, what am I missing?

  2. #2
    Intern Newbie
    Join Date
    Dec 2012
    Posts
    40
    Found the issue. I didn't set the mipmap count for the texture to 0. Can't believe how many times I've run into this problem, I really should've learned by now.

Tags for this Thread

Posting Permissions

  • You may not post new threads
  • You may not post replies
  • You may not post attachments
  • You may not edit your posts
  •