enne87

08-02-2016, 11:03 AM

Hi folks,

I would like to implement an edge detection algorithmus that is capable to find edges of arbitrary 3d meshes. I want to find the edges by detecting normal discontinuities. Furthermore, I want the edges to be one pixel wide.

I render the color values of my scene and the corresponding normal vectors to two different render buffer targets, i.e. to two different textures (screenTexture and normalTexture). The normal texture is initialized with vec3(1,1,1) vectors, the color texture includes the color values of each fragment. I use the sobel edge detection algorithmus in my fragment shader:

#version 330 core

out vec4 color;

uniform sampler2D screenTexture;

uniform sampler2D normalTexture;

uniform sampler2D depthTexture;

mat3 sx = mat3(

1.0, 2.0, 1.0,

0.0, 0.0, 0.0,

-1.0, -2.0, -1.0

);

mat3 sy = mat3(

1.0, 0.0, -1.0,

2.0, 0.0, -2.0,

1.0, 0.0, -1.0

);

void main()

{

vec4 diffuse = texelFetch(screenTexture, ivec2(gl_FragCoord), 0);

vec3 normal = texelFetch(normalTexture, ivec2(gl_FragCoord), 0 ).xyz * 2.0f - 1.0f; //move normal back to [-1, 0] domain

vec3 I[3];

for (int i=0; i<3; i++) {

vec3 norm1 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,-1), 0 ).rgb * 2.0f - 1.0f;

vec3 norm2 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,0), 0 ).rgb * 2.0f - 1.0f;

vec3 norm3 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,1), 0 ).rgb * 2.0f - 1.0f;

float sampleValLeft = dot(normal, norm1);

float sampleValMiddle = dot(normal, norm2);

float sampleValRight = dot(normal, norm3);

I[i] = vec3(sampleValLeft, sampleValMiddle, sampleValRight);

}

float gx = dot(sx[0], I[0]) + dot(sx[1], I[1]) + dot(sx[2], I[2]);

float gy = dot(sy[0], I[0]) + dot(sy[1], I[1]) + dot(sy[2], I[2]);

if((gx < 0 && gy < 0) || (gy < 0 && gx < 0) ) {

color = diffuse;

return;

}

float g = sqrt(pow(gx, 2.0)+pow(gy, 2.0));

if(g > 0.2) {

color = vec4(0., 0., 0.0, 1);

} else {

color = diffuse;

}

}

The results are ok, but the problem is that the shader produces edges that are not consistent in their width, as you can see here:

http://i.stack.imgur.com/XjsGs.png

The problem is that sobel detects some edges two times, one time with a positive dot-product (gx > 0 || gy > 0) and one time with a negative one (gx < 0 || gy < 0). I tried including the following if statement:

if((gx < 0 && gy < 0) || (gy < 0 && gx < 0) ) {

color = diffuse;

return;

}

But that leads to the result that some unwanted black pixels appear at the edges.

So is there a trick to reduce the line width to one pixel with sobel?

Thanks!

I would like to implement an edge detection algorithmus that is capable to find edges of arbitrary 3d meshes. I want to find the edges by detecting normal discontinuities. Furthermore, I want the edges to be one pixel wide.

I render the color values of my scene and the corresponding normal vectors to two different render buffer targets, i.e. to two different textures (screenTexture and normalTexture). The normal texture is initialized with vec3(1,1,1) vectors, the color texture includes the color values of each fragment. I use the sobel edge detection algorithmus in my fragment shader:

#version 330 core

out vec4 color;

uniform sampler2D screenTexture;

uniform sampler2D normalTexture;

uniform sampler2D depthTexture;

mat3 sx = mat3(

1.0, 2.0, 1.0,

0.0, 0.0, 0.0,

-1.0, -2.0, -1.0

);

mat3 sy = mat3(

1.0, 0.0, -1.0,

2.0, 0.0, -2.0,

1.0, 0.0, -1.0

);

void main()

{

vec4 diffuse = texelFetch(screenTexture, ivec2(gl_FragCoord), 0);

vec3 normal = texelFetch(normalTexture, ivec2(gl_FragCoord), 0 ).xyz * 2.0f - 1.0f; //move normal back to [-1, 0] domain

vec3 I[3];

for (int i=0; i<3; i++) {

vec3 norm1 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,-1), 0 ).rgb * 2.0f - 1.0f;

vec3 norm2 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,0), 0 ).rgb * 2.0f - 1.0f;

vec3 norm3 = texelFetch(normalTexture, ivec2(gl_FragCoord) + ivec2(i-1,1), 0 ).rgb * 2.0f - 1.0f;

float sampleValLeft = dot(normal, norm1);

float sampleValMiddle = dot(normal, norm2);

float sampleValRight = dot(normal, norm3);

I[i] = vec3(sampleValLeft, sampleValMiddle, sampleValRight);

}

float gx = dot(sx[0], I[0]) + dot(sx[1], I[1]) + dot(sx[2], I[2]);

float gy = dot(sy[0], I[0]) + dot(sy[1], I[1]) + dot(sy[2], I[2]);

if((gx < 0 && gy < 0) || (gy < 0 && gx < 0) ) {

color = diffuse;

return;

}

float g = sqrt(pow(gx, 2.0)+pow(gy, 2.0));

if(g > 0.2) {

color = vec4(0., 0., 0.0, 1);

} else {

color = diffuse;

}

}

The results are ok, but the problem is that the shader produces edges that are not consistent in their width, as you can see here:

http://i.stack.imgur.com/XjsGs.png

The problem is that sobel detects some edges two times, one time with a positive dot-product (gx > 0 || gy > 0) and one time with a negative one (gx < 0 || gy < 0). I tried including the following if statement:

if((gx < 0 && gy < 0) || (gy < 0 && gx < 0) ) {

color = diffuse;

return;

}

But that leads to the result that some unwanted black pixels appear at the edges.

So is there a trick to reduce the line width to one pixel with sobel?

Thanks!