I have shader
vert:
Code :
uniform vec3 u_Camera;
uniform float u_WaterLevel;
uniform vec3 u_LightPosition;
 
in vec3 in_Vertex;
in vec3 in_Normal;
 
out vec3 vectorLightSource;
out vec3 transformedNormal;
 
void main (void) {
 
	vec3 out_Vertex = in_Vertex;
	if ( (in_Vertex.y - u_WaterLevel)*(u_Camera.y - u_WaterLevel) < 0 ) {
		float dist = distance(in_Vertex, u_Camera);
		float lnt = distance( vec2(in_Vertex.x,in_Vertex.y), vec2(u_Camera.x,u_Camera.y));
		float hgt = abs(u_Camera.y - in_Vertex.y);
 
		float sinAngleA = hgt/dist;
		float angleA = asin (sinAngleA);
		float waterHgt = abs(u_Camera.y - u_WaterLevel);
		float waterLnt = waterHgt/tan(angleA);
 
		float angleB = asin ( sinAngleA/1.3330);
		float dLnt = u_WaterLevel*tan(angleB);
 
		float imagineLnt = abs(waterLnt+dLnt);
 
		float dX = in_Vertex.x - u_Camera.x;
		float dZ = in_Vertex.z - u_Camera.z;
 
		dX = imagineLnt*dX/lnt;
		dZ = imagineLnt*dZ/lnt;
 
		out_Vertex = vec3 (u_Camera.x+dX,in_Vertex.y,u_Camera.z+dZ);
	}
	vec3 vertexPos = (gl_ModelViewMatrix * vec4(out_Vertex,1)).xyz;
	vectorLightSource = normalize ( u_LightPosition - out_Vertex );
	transformedNormal = normalize ( gl_NormalMatrix * in_Normal );
	gl_Position = gl_ProjectionMatrix * gl_ModelViewMatrix * vec4(out_Vertex,1);
 
}

frag:
Code :
 
uniform vec3 u_Camera;
uniform float u_WaterLevel;
uniform vec3 u_LightPosition;
 
in vec3 transformedNormal;
in vec3 vectorLightSource;
 
void main (void) {
 
	vec3 diffColor = vec3 (1,1,0);
	vec3 n2   = normalize ( transformedNormal );
	vec3 l2   = normalize ( vectorLightSource );
	gl_FragColor = vec4 ( diffColor * max ( dot ( n2, l2 ), 0.0 ), 1 ) ;
	gl_FragColor = vec4(transformedNormal,1);
 
}
u_LightPosition and in_Normal used to make transformedNormal and vectorLightSource in vertex shader, which pass to fragment shader. The problem is thet u_LightPosition and in_Normal are optimized out by GLSL Compiler and glGetUniformLocation return me -1. But in fact I'm use them. What is the magic here?