UBO Breaks Shader?

I converted some of the fragment shaders in my application to use UBOs, but now they do absolutely nothing, as if no fragments are ever drawn. Even if I multiply the value the shader is supposed to set the color to by something very small and then add some random color, I still get nothing. How is this possible?

Here is the shader, if that helps somehow:


// G buffer
uniform sampler2D gColor;
uniform sampler2D gPosition;
uniform sampler2D gNormal;
uniform vec2 gTexSize;


// Light source
layout(shared) uniform LightData_Point
{
    vec3 lightPosition;
    vec3 lightColor;
    float lightRange;
    float lightIntensity;
};


// Attenuation factors
uniform vec3 attenuation;


// Specularity info
uniform float shininess;


vec2 CalcGTexCoord()
{
    return gl_FragCoord.xy / gTexSize;
}


void main()
{
    vec2 gTexCoord = CalcGTexCoord();


    vec3 viewPos = texture2D(gPosition, gTexCoord).xyz;


    vec3 lightDir = lightPosition - viewPos;
    float dist = length(lightDir);


    //if(dist > lightRange)
    //    discard;
    
    lightDir /= dist;


    vec3 viewNormal = texture2D(gNormal, gTexCoord).xyz;


    float lambert = dot(lightDir, viewNormal);


    //if(lambert <= 0.0)
    //    discard;


    float fallOff = max(0.0, (lightRange - dist) / lightRange);


    float attenuation = clamp(fallOff * lightIntensity * (1.0 / (attenuation.x + attenuation.y * dist + attenuation.z * dist * dist)), 0.0, 1.0);


    // Specular
    vec3 lightRay = reflect(normalize(-lightDir), viewNormal);
    float specularIntensity = attenuation * pow(max(0.0, dot(lightRay, normalize(-viewPos))), shininess);
    specularIntensity = max(0.0, specularIntensity);


    vec4 color = texture2D(gColor, gTexCoord);


    gl_FragColor = vec4(1.0, 1.0, 0.0, 1.0) + 0.000001 * vec4(color.rgb * attenuation * lambert * lightColor + color.a * specularIntensity * lightColor, 1.0);
}

So really, it should’t even matter what the members of the UBO are set to! I could not uncomment the color calculations completely, since then it optimizes it out and errors.

Here is the wrapper class I made for the UBOs (well, an interface really, the UBO itself is just re-using a VBO wrapper class):


#include <Renderer/Shader/UBOShaderInterface.h>


#include <assert.h>


void UBOShaderInterface::Create(const std::string &uniformBlockName, Shader* shader, std::vector<std::string> &uniformNames)
{
    m_pShader = shader;


    m_blockIndex = glGetUniformBlockIndex(m_pShader->GetProgID(), uniformBlockName.c_str());


    assert(m_blockIndex != GL_INVALID_INDEX);


    glGetActiveUniformBlockiv(m_pShader->GetProgID(), m_blockIndex, GL_UNIFORM_BLOCK_DATA_SIZE, &m_blockSize);


    // Get block names
    std::vector<char const*> uniformNamesCString;


    const unsigned int numNames = uniformNames.size();


    for(unsigned int i = 0; i < numNames; i++)
        uniformNamesCString.push_back(uniformNames[i].c_str());


    unsigned int* indices = new unsigned int[numNames];


    glGetUniformIndices(m_pShader->GetProgID(), uniformNames.size(), &uniformNamesCString[0], indices);


    // Query offsets and associate names to the offsets
    int* offsets = new int[numNames];


    glGetActiveUniformsiv(m_pShader->GetProgID(), numNames, indices, GL_UNIFORM_OFFSET, offsets);


    delete[] indices;


    for(unsigned int i = 0; i < numNames; i++)
        m_uniformNameToOffset[uniformNames[i]] = offsets[i];


    delete[] offsets;
}


void UBOShaderInterface::SetUpBuffer(VBO &buffer)
{
    assert(!buffer.Created());


    buffer.Create();


    buffer.Bind(GL_UNIFORM_BUFFER);


    glBufferData(GL_UNIFORM_BUFFER, m_blockSize, NULL, GL_STREAM_DRAW);


    // Allocate VBO using this size
    buffer.Unbind();
}


void UBOShaderInterface::SetBindingIndex(unsigned int index)
{
    m_bufferBindIndex = index;


    glUniformBlockBinding(m_pShader->GetProgID(), m_blockIndex, m_bufferBindIndex);
}


int UBOShaderInterface::GetBlockSize() const
{
    return m_blockSize;
}


Shader* UBOShaderInterface::GetShader() const
{
    return m_pShader;
}


void UBOShaderInterface::BindBufferToSetIndex(VBO &buffer)
{
    glBindBufferBase(GL_UNIFORM_BUFFER, m_bufferBindIndex, buffer.GetID());
}


void UBOShaderInterface::UnbindSetIndex()
{
    glBindBufferBase(GL_UNIFORM_BUFFER, m_bufferBindIndex, 0);
}


void UBOShaderInterface::SetUniform(const std::string &name, GLintptr size, GLvoid* param)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], size, param);
}


void UBOShaderInterface::SetUniformf(const std::string &name, float param)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], sizeof(float), &param);
}


void UBOShaderInterface::SetUniformv2f(const std::string &name, const Vec2f &params)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 2 * sizeof(float), &params);
}


void UBOShaderInterface::SetUniformv3f(const std::string &name, const Vec3f &params)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 3 * sizeof(float), &params);
}


void UBOShaderInterface::SetUniformv3f(const std::string &name, const Color3f &params)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 3 * sizeof(float), &params);
}


void UBOShaderInterface::SetUniformv4f(const std::string &name, const Vec4f &params)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 4 * sizeof(float), &params);
}


void UBOShaderInterface::SetUniformv4f(const std::string &name, const Color4f &params)
{
    glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 4 * sizeof(float), &params);
}

The UBO interface is set up like this:


    // UBO interface setup    
   {
        std::vector<std::string> uniformNames;
        uniformNames.push_back("lightPosition");
        uniformNames.push_back("lightColor");
        uniformNames.push_back("lightRange");
        uniformNames.push_back("lightIntensity");


        m_pointLightUBOInterface.Create("LightData_Point", &m_pointLightEffectShader, uniformNames);
        m_pointLightUBOInterface.SetBindingIndex(0);
    }

But as I said before, I don’t think that stuff even matters, since no matter what the uniforms are set to it should still draw something.

The shader is definitely being run.

Thanks for any help!

Bumping…

Bumping…

Bumping…

Stop it!

Never just “Bump” - add information / what you have tried in the mean time etc.

PS. NaN * whatever + whatever = NaN, => Check for NaNs.
PS. OpenGl ignores invalid commands and i do not see any error queries - start there.
PS. Consider using std140 - significantly less annoying to use imho.

edit:
PS. Your source is the most inefficient way imaginable to do it - consider redoing it.

I remove the error checking from the sample code for clarity. I tried all UBO formats, nothing changed. A shader that is gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); should draw to the screen, no matter how values are inserted into the UBO! It works if I remove the UBO, but as soon as it is in the shader (just declared, not even used), all fragments are discarded somehow.

Yeah, the way it is set up is not very efficient, but right now I just want to get it working :stuck_out_tongue:

[QUOTE=cireneikual;1239461]It works if I remove the UBO, but as soon as it is in the shader (just declared, not even used), all fragments are discarded somehow.[/QUOTE]If it is not used then the compiler will remove it. I seriously doubt any of the fragments are discarded … like i said - check for NaNs (rgba8 can not represent NaNs, so they will all be converted to 0).

gl_FragColor = isnan(color.r + attenuation + lambert + lightColor.r + specularIntensity) ? vec4(1.0, 0.0, 0.0, 1.0) : vec4(0.0, 1.0, 0.0, 1.0);

If it is NaN then your UBO upload/usage is broken somewhere.

Alright, I tried that, but still no fragments are rendered. The shader is definitely being run - if I unbind the shader, the shadow geometry appears.