Part of the Khronos Group
OpenGL.org

The Industry's Foundation for High Performance Graphics

from games to virtual reality, mobile phones to supercomputers

Results 1 to 3 of 3

Thread: Computer shader write to image3D not work

  1. #1
    Junior Member Newbie
    Join Date
    Mar 2008
    Posts
    25

    Angry Computer shader write to image3D not work

    I tried to modify a 3D texture in a compute shader with a fixed value, but it seems the texture has never changed.

    Code :
    #version 430 compatibility
     
     
    layout(r8) uniform image3D destTex;
    layout (local_size_x = 36, local_size_y = 36,local_size_z = 1) in;
     
     
    void main() 
    {
        ivec3 storePos = ivec3(gl_GlobalInvocationID.xyz);
        imageStore(destTex, storePos, vec4(3.0/255.0));
    }


    Code :
            glActiveTexture(GL_TEXTURE3);
            glBindTexture(GL_TEXTURE_3D,m_uiMaskPoolTexID);
            glTexImage3D(GL_TEXTURE_3D,
                0,
                GL_R8,
                m_aBrickPoolDim[0],
                m_aBrickPoolDim[1],
                m_aBrickPoolDim[2],
                0,
                GL_RED,
                GL_UNSIGNED_BYTE,
                nullptr);
            glBindImageTexture(3,
                m_uiMaskPoolTexID,
                0,
                GL_FALSE,
                0,
                GL_WRITE_ONLY,
                GL_R8);
       glBindTexture(GL_TEXTURE_3D,0);

    Code :
     glEnable(GL_TEXTURE_3D);
     
                glActiveTexture(GL_TEXTURE3);
                glUseProgram(imaskProgram);
                glBindTexture(GL_TEXTURE_3D,iMask);
                glBindImageTexture(3,
                    iMask,
                    0,
                    GL_FALSE,
                    0,
                    GL_WRITE_ONLY,
                    GL_R8);
     
                int texp = glGetUniformLocation(imaskProgram, "destTex");
     
                glUniform1i(texp, 3);
                glDispatchCompute(aBrickPoolDim[0]/36,aBrickPoolDim[1]/36,aBrickPoolDim[2]);            
                glMemoryBarrier(GL_ALL_BARRIER_BITS);
                glBindTexture(GL_TEXTURE_3D,0);

  2. #2
    Junior Member Newbie
    Join Date
    Jan 2013
    Posts
    11
    The code looks fine.
    I assume you are using NVIDIA hardware. There had been a bug with compute shaders and textures which is now fixed. Have you tried the latest drivers? Release 310.90 has the fix.

  3. #3
    Junior Member Newbie
    Join Date
    Mar 2008
    Posts
    25
    I'am using 310.70, and 2d texture works fine.I'll get a newest driver and try again

Tags for this Thread

Posting Permissions

  • You may not post new threads
  • You may not post replies
  • You may not post attachments
  • You may not edit your posts
  •