very strange… this is my fragment shader:
varying vec3 v;
uniform sampler2D tex;
uniform sampler2D bump;
const vec3 addvec = vec3(0.5, 0.5, 0.5); // normals in map are clamped 0-1
void main (void)
{
vec4 TexCol = texture2D(tex, vec2(gl_TexCoord[0]));
vec4 BumpCol = texture2D(bump, vec2(gl_TexCoord[1]));
vec3 N = normalize(gl_NormalMatrix * (BumpCol.xyz - addvec));
N.y = -N.y;
vec3 L = normalize(gl_LightSource[0].position.xyz - v);
vec3 E = normalize(-v); // we are in Eye Coordinates, so EyePos is (0,0,0)
vec3 R = normalize(-reflect(L,N));
vec4 MyColor = gl_FrontLightProduct[0].ambient * TexCol;
MyColor += gl_FrontLightProduct[0].diffuse * max(dot(N,L), 0.0) * TexCol;
MyColor += gl_FrontLightProduct[0].specular * pow(max(dot(R,E),0.0), 0.3 * gl_FrontMaterial.shininess);
gl_FragColor = MyColor;
}
and this is the code in my application:
if (!(NormalTexture = LoadTexture("texture.bmp")))
{MessageBox(NULL, "Could not load 'texture.bmp'!", "ERROR!", MB_OK|MB_ICONEXCLAMATION);}
if (!(BumpTexture = LoadTexture("bump.bmp")))
{MessageBox(NULL, "Could not load 'bump.bmp'!", "ERROR!", MB_OK|MB_ICONEXCLAMATION);}
InitExtentions();
glUseProgramObjectARB(g_programObj);
shader_uniform_tex = glGetUniformLocationARB(g_programObj, "tex");
if (shader_uniform_tex != -1)
{glUniform1iARB(shader_uniform_tex, NormalTexture);} else
{MessageBox(NULL, "Could not find uniform location for 'tex'!", "ERROR!", MB_OK|MB_ICONEXCLAMATION);}
shader_uniform_bump = glGetUniformLocationARB(g_programObj, "bump");
if (shader_uniform_bump != -1)
{glUniform1iARB(shader_uniform_bump, BumpTexture);} else
{MessageBox(NULL, "Could not find uniform location for 'bump'!", "ERROR!", MB_OK|MB_ICONEXCLAMATION);}
glUseProgramObjectARB(NULL);
With the fixed pipeline it all looks fine multitextured. With this i only get to see one texture and that is ‘bump.bmp’ which is stored in ‘uniform sampler2D tex’!!
Can’t be right…
Anyone know what’s wrong?
Thanx,
Marty