Hello there !

I'm sorry for my bad english, but I'm going do the maximum to you can understand a maximum of thing.

So, I begin with the screen space algorithm.
I want to realize a ambient occlusion with screen space.

When I search on the internet, I found only difficult paper (for me it's difficult, perhaps, one day I can read and understand everything ^_^) or algorithm with linear Depth and spaceView and normalView...

But, for my deferred light, I use a world space coordinate because when I move my camera, if I use, for the normal for example, normal = inverse(transpose(view * model)) * in_Normal, the result is not good (like lights move and normally they are static), therefore, I use normal = model * in_Normal.

So, I try to adapt this tutorial http://blog.evoserv.at/index.php/201...ng-openglglsl/
with my world space coordinate.

But the occlusionmap is not good

Click image for larger version. 

Name:	Capture d'écran de 2014-10-04 02:14:40.jpg 
Views:	153 
Size:	19.3 KB 
ID:	1460

and

Click image for larger version. 

Name:	Capture d'écran de 2014-10-04 02:14:52.jpg 
Views:	146 
Size:	19.1 KB 
ID:	1461

Maybe can you explain me these issues, or maybe can you explain to me how can I use normalView in my deferred lighting too.

Now, I post my differents codes for my shaders, and I begin with the most important : AmbientOcclusionShaderFragment.

After, you are going to see other shaders : gBuffer and light for exemple ^^

Code :
#version 440 core
 
layout(std140, binding = 0) readonly buffer DeviceSize
{
    uvec4 size;
};
 
layout(binding = 0) uniform sampler2D normalMap;
layout(binding = 1) uniform sampler2D positionMap;
 
const vec2 poisson16[] = vec2[](    // These are the Poisson Disk Samples
                                vec2( -0.94201624,  -0.39906216 ),
                                vec2(  0.94558609,  -0.76890725 ),
                                vec2( -0.094184101, -0.92938870 ),
                                vec2(  0.34495938,   0.29387760 ),
                                vec2( -0.91588581,   0.45771432 ),
                                vec2( -0.81544232,  -0.87912464 ),
                                vec2( -0.38277543,   0.27676845 ),
                                vec2(  0.97484398,   0.75648379 ),
                                vec2(  0.44323325,  -0.97511554 ),
                                vec2(  0.53742981,  -0.47373420 ),
                                vec2( -0.26496911,  -0.41893023 ),
                                vec2(  0.79197514,   0.19090188 ),
                                vec2( -0.24188840,   0.99706507 ),
                                vec2( -0.81409955,   0.91437590 ),
                                vec2(  0.19984126,   0.78641367 ),
                                vec2(  0.14383161,  -0.14100790 )
                               );
 
out vec4 out_Color;
 
void main(void)
{
    vec2 texCoord = gl_FragCoord.xy / size.xy;
    vec3 normal = texture(normalMap, texCoord).xyz;
    vec3 position = texture(positionMap, texCoord).xyz;
 
    float AO = 0.0;
 
    vec2 filterRadius = vec2(10.0 / float(size.x), 10.0 / float((size.y)));
    float distanceThreshold = 5.0;
 
    int i;
 
    for(i = 0; i < 16; ++i)
    {
        vec2 texCoordSample = texCoord + poisson16[i] * filterRadius;
 
        vec3 samplePos = texture(positionMap, texCoordSample).xyz;
        vec3 sampleDir = normalize(samplePos - position);
 
        float nDotS = max(dot(normal, sampleDir), 0.0);
        float dist = distance(position, samplePos);
 
        float a = 1.0 - smoothstep(distanceThreshold, distanceThreshold * 2, dist);
 
        AO += (a * nDotS);
    }
 
    out_Color = vec4(AO);
}

VertexShader : Model
Code :
#version 440 core
#extension GL_ARB_shader_draw_parameters : enable
 
struct MVPandM
{
    mat4 MVP;
    mat4 M;
};
 
layout(std140, binding = 1) readonly buffer MatrixMVPandM
{
    MVPandM mat[];
};
 
layout(location = 0) in vec4 in_Pos;
layout(location = 1) in vec4 in_Norm;
layout(location = 2) in vec4 in_TexCoord;
 
out Block
{
    vec4 texCoord;
    vec4 norm;
    vec4 pos;
    flat int drawID;
}Out;
 
void main(void)
{
    Out.drawID = gl_DrawIDARB; // glMultiDraw...
 
    Out.texCoord = in_TexCoord;
    Out.norm = mat[gl_InstanceID].M * vec4(in_Norm.xyz, 0.0); /* mat[gl_InstanceID].MV * vec4(in_Norm.xyz, 0.0) don't work*/
    Out.pos = mat[gl_InstanceID].M * vec4(in_Pos.xyz, 1.0);
 
    gl_Position = mat[gl_InstanceID].MVP * vec4(in_Pos.xyz, 1.0);
}

Fragment Shaders put diffuseTexture, normalize(norm) and pos in three textures.

M is the model matrix(own translation, rotation and scaling)
MVP is the Projection * View * Model matrix.

and lightFragment
Code :
#version 440 core
 
layout(std140, binding = 0) readonly buffer DeviceSize
{
    uvec4 size;
};
 
struct PointLightStruct
{
    vec4 posRadius;
    vec4 color;
};
 
layout(std140, binding = 3) readonly buffer PointLight
{
    PointLightStruct pl[];
};
 
flat in int drawID;
 
// layout(binding = 0) uniform sampler2D diffuseTex;
layout(binding = 1) uniform sampler2D normalTex;
layout(binding = 2) uniform sampler2D positionTex;
 
layout(location = 0) out vec3 fragColor;
 
void main(void)
{
    vec3 normal = texture(normalTex, gl_FragCoord.xy / size.zw).xyz;
    vec3 position = texture(positionTex, gl_FragCoord.xy / size.zw).xyz;
 
    vec3 vertexToLight = pl[drawID].posRadius.xyz - position;
    float attenuation = max(0.0, 1.0 - length(vertexToLight) / pl[drawID].posRadius.w);
 
    vertexToLight = normalize(vertexToLight);
    float lambertCoeff = max(0.0, dot(normal, vertexToLight));
 
    fragColor = lambertCoeff * attenuation * pl[drawID].color.rgb;
}


thank you very much :-)

And sorry again for my english...