Hello,
I am trying to render things to the same FBO using a GL_TEXTURE_2D_ARRAY. I have made the FBO in the following way, I think it was pretty straight forward:
// FRAMEBUFFER
glGenFramebuffers(1, &(this->dynamicRenderFBO));
glBindFramebuffer(GL_FRAMEBUFFER, this->dynamicRenderFBO);
glDrawBuffers(1, &(this->dynamicRenderFBO_Attachments));
// TEXTURE ARRAY
glGenTextures(1, &(this->dynamicRenderTextureID));
glBindTexture(GL_TEXTURE_2D_ARRAY, this->dynamicRenderTextureID);
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_RGBA, _MAX_DYNAMIC_RESOLUTION_, _MAX_DYNAMIC_RESOLUTION_, _MAX_DYNAMIC_LAYERS_, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// ATTACH TEXTURE TO FBO'S OUTPUT
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, this->dynamicRenderTextureID, 0);
However, assigning the proper layer index doesn’t work.
#version 450
#extension GL_NV_viewport_array2 : enable
uniform int outputIndex;
// LOCAL OBJECT (DISPLAY PLANE) SPACE
layout (location = 0) in vec3 vertexPos;
layout (location = 1) in vec2 texCoord;
// PASSED VARIABLES
out vec2 tex_coord;
///////////////////////////////////////////////////////////////////////////////////////////////////////////
void main() {
tex_coord = texCoord;
gl_Layer = outputIndex;
gl_Position = vec4(vertexPos[0], vertexPos[1], 0.1, 1.0);
}
#version 450
in vec2 tex_coord;
// TEXTURES
uniform sampler2D dynamicLayerColorMap;
layout(location = 0) out vec4 sceneColorMap;
///////////////////////////////////////////////////////////////////////////////////////////////////////////
void main() {
vec4 colorBuffer = texture(dynamicLayerColorMap, tex_coord);
if (colorBuffer.a > 0.0) {
sceneColorMap = vec4(colorBuffer.xyz, colorBuffer.a);
} else {
discard;
}
}
This wont compile, and it gives this error I am not able to decode:
I’ve read a few articles on gl_Layer usage, but they all seem to be working either with old OGL (like 1.4 or 1.5), while I use OpenGL 4.3+.
How do I get this to work?
Is there any way to assign the output layer index in the fragment shader?