I’m trying to use texture arrays in my shaders, and I’m having this really weird issue.
Here are my shaders:
Vertex Shader:
#version 430 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec2 aTexCoord;
layout (location = 2) in mat4 aModel;
layout (location = 6) in int aTexIndex;
out vec2 texCoord;
flat out int texIndex;
uniform int useInputModel;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main() {
mat4 transform;
transform += aModel * useInputModel;
transform += model * (1-useInputModel);
gl_Position = projection * view * transform * vec4(aPos, 1.0);
texCoord = aTexCoord;
texIndex = aTexIndex;
}
Fragment Shader:
#version 430 core
out vec4 fragColor;
in vec2 texCoord;
flat in int texIndex;
uniform int useArray;
layout (binding = 0) uniform sampler2D useTexture;
layout (binding = 1) uniform sampler2DArray textures;
uniform vec2 texOffset;
uniform vec2 texScale;
void main() {
fragColor = texture(useTexture, (texCoord * texScale) + texOffset) * (1-useArray)
+ texture(textures, vec3((texCoord * texScale) + texOffset, texIndex)) * useArray;
}
For some reason, the aTexIndex attribute is not getting passed to the fragment shader correctly. I’ve tested a few values and here are the results:
aTexIndex | texIndex |
---|---|
1 | 1065353216 |
2 | 1073741824 |
3 | 1077936128 |
4 | 1082130432 |
I am checking values with renderdoc.
Edit:
I believe those numbers are actually the binary floating point representation of the values, forced to output as integers.