Hi there!
I’m currently implementing some fire effects using GLSL’s fragment shaders & following fragment shaders is fine so far:
uniform float pscale; // common pertubation scale
uniform vec2 bounds; // texture bounds
uniform float lod; // some level of detail
uniform sampler2D flames; // flames texture
uniform sampler2D heat; // pertubation map (s offs, t offs, 0, offs scale)
uniform sampler2D scene; // scene texture
void main(void) {
// read pertubation value
vec4 p = texture2D(heat, gl_TexCoord[0].st / lod);
// scale pertubation by fragment's depth
// p.a *= (1.0 - gl_FragCoord.z);
// compute pertubated texture coords
vec2 uv = gl_TexCoord[0].st + (p.xy - vec2(0.5, 0.5)) * p.a * pscale;
// clamp pertubated texture coords to bounds
vec2 st = clamp(uv, vec2(0, 0), bounds);
// read flames & scene colors
vec4 fcol = texture2D(flames, st / lod);
vec4 scol = texture2D(scene, st);
// output mixed flames & scene colors
gl_FragColor = mix(scol, fcol, fcol.a);
}
However, when I uncomment the scale-pertubation-by-gl_FragCoord.z line, OpenGL drops to software rendering:
- no matter if GL_DEPTH_TEST is disabled or (enabled && glDepthFunc(GL_ALWAYS))
- default glDepthRange (0…1, I think)
- glPolygonOffset untouched
Env: ATI Radeon 9800 Pro w/ Catalyst 6.2, Win2k
Anybody any clue ?