I converted some of the fragment shaders in my application to use UBOs, but now they do absolutely nothing, as if no fragments are ever drawn. Even if I multiply the value the shader is supposed to set the color to by something very small and then add some random color, I still get nothing. How is this possible?
Here is the shader, if that helps somehow:
// G buffer
uniform sampler2D gColor;
uniform sampler2D gPosition;
uniform sampler2D gNormal;
uniform vec2 gTexSize;
// Light source
layout(shared) uniform LightData_Point
{
vec3 lightPosition;
vec3 lightColor;
float lightRange;
float lightIntensity;
};
// Attenuation factors
uniform vec3 attenuation;
// Specularity info
uniform float shininess;
vec2 CalcGTexCoord()
{
return gl_FragCoord.xy / gTexSize;
}
void main()
{
vec2 gTexCoord = CalcGTexCoord();
vec3 viewPos = texture2D(gPosition, gTexCoord).xyz;
vec3 lightDir = lightPosition - viewPos;
float dist = length(lightDir);
//if(dist > lightRange)
// discard;
lightDir /= dist;
vec3 viewNormal = texture2D(gNormal, gTexCoord).xyz;
float lambert = dot(lightDir, viewNormal);
//if(lambert <= 0.0)
// discard;
float fallOff = max(0.0, (lightRange - dist) / lightRange);
float attenuation = clamp(fallOff * lightIntensity * (1.0 / (attenuation.x + attenuation.y * dist + attenuation.z * dist * dist)), 0.0, 1.0);
// Specular
vec3 lightRay = reflect(normalize(-lightDir), viewNormal);
float specularIntensity = attenuation * pow(max(0.0, dot(lightRay, normalize(-viewPos))), shininess);
specularIntensity = max(0.0, specularIntensity);
vec4 color = texture2D(gColor, gTexCoord);
gl_FragColor = vec4(1.0, 1.0, 0.0, 1.0) + 0.000001 * vec4(color.rgb * attenuation * lambert * lightColor + color.a * specularIntensity * lightColor, 1.0);
}
So really, it should’t even matter what the members of the UBO are set to! I could not uncomment the color calculations completely, since then it optimizes it out and errors.
Here is the wrapper class I made for the UBOs (well, an interface really, the UBO itself is just re-using a VBO wrapper class):
#include <Renderer/Shader/UBOShaderInterface.h>
#include <assert.h>
void UBOShaderInterface::Create(const std::string &uniformBlockName, Shader* shader, std::vector<std::string> &uniformNames)
{
m_pShader = shader;
m_blockIndex = glGetUniformBlockIndex(m_pShader->GetProgID(), uniformBlockName.c_str());
assert(m_blockIndex != GL_INVALID_INDEX);
glGetActiveUniformBlockiv(m_pShader->GetProgID(), m_blockIndex, GL_UNIFORM_BLOCK_DATA_SIZE, &m_blockSize);
// Get block names
std::vector<char const*> uniformNamesCString;
const unsigned int numNames = uniformNames.size();
for(unsigned int i = 0; i < numNames; i++)
uniformNamesCString.push_back(uniformNames[i].c_str());
unsigned int* indices = new unsigned int[numNames];
glGetUniformIndices(m_pShader->GetProgID(), uniformNames.size(), &uniformNamesCString[0], indices);
// Query offsets and associate names to the offsets
int* offsets = new int[numNames];
glGetActiveUniformsiv(m_pShader->GetProgID(), numNames, indices, GL_UNIFORM_OFFSET, offsets);
delete[] indices;
for(unsigned int i = 0; i < numNames; i++)
m_uniformNameToOffset[uniformNames[i]] = offsets[i];
delete[] offsets;
}
void UBOShaderInterface::SetUpBuffer(VBO &buffer)
{
assert(!buffer.Created());
buffer.Create();
buffer.Bind(GL_UNIFORM_BUFFER);
glBufferData(GL_UNIFORM_BUFFER, m_blockSize, NULL, GL_STREAM_DRAW);
// Allocate VBO using this size
buffer.Unbind();
}
void UBOShaderInterface::SetBindingIndex(unsigned int index)
{
m_bufferBindIndex = index;
glUniformBlockBinding(m_pShader->GetProgID(), m_blockIndex, m_bufferBindIndex);
}
int UBOShaderInterface::GetBlockSize() const
{
return m_blockSize;
}
Shader* UBOShaderInterface::GetShader() const
{
return m_pShader;
}
void UBOShaderInterface::BindBufferToSetIndex(VBO &buffer)
{
glBindBufferBase(GL_UNIFORM_BUFFER, m_bufferBindIndex, buffer.GetID());
}
void UBOShaderInterface::UnbindSetIndex()
{
glBindBufferBase(GL_UNIFORM_BUFFER, m_bufferBindIndex, 0);
}
void UBOShaderInterface::SetUniform(const std::string &name, GLintptr size, GLvoid* param)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], size, param);
}
void UBOShaderInterface::SetUniformf(const std::string &name, float param)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], sizeof(float), ¶m);
}
void UBOShaderInterface::SetUniformv2f(const std::string &name, const Vec2f ¶ms)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 2 * sizeof(float), ¶ms);
}
void UBOShaderInterface::SetUniformv3f(const std::string &name, const Vec3f ¶ms)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 3 * sizeof(float), ¶ms);
}
void UBOShaderInterface::SetUniformv3f(const std::string &name, const Color3f ¶ms)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 3 * sizeof(float), ¶ms);
}
void UBOShaderInterface::SetUniformv4f(const std::string &name, const Vec4f ¶ms)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 4 * sizeof(float), ¶ms);
}
void UBOShaderInterface::SetUniformv4f(const std::string &name, const Color4f ¶ms)
{
glBufferSubData(GL_UNIFORM_BUFFER, m_uniformNameToOffset[name], 4 * sizeof(float), ¶ms);
}
The UBO interface is set up like this:
// UBO interface setup
{
std::vector<std::string> uniformNames;
uniformNames.push_back("lightPosition");
uniformNames.push_back("lightColor");
uniformNames.push_back("lightRange");
uniformNames.push_back("lightIntensity");
m_pointLightUBOInterface.Create("LightData_Point", &m_pointLightEffectShader, uniformNames);
m_pointLightUBOInterface.SetBindingIndex(0);
}
But as I said before, I don’t think that stuff even matters, since no matter what the uniforms are set to it should still draw something.
The shader is definitely being run.
Thanks for any help!