passing GL_ALPHA to glTxtImage2D

I have a shader for text rendering. I have been playing around trying to get a thin outline around the letters. I have come across something I don’t understand. I got the shader and atlas creation code form the web and a line in the atlas creation code has me bothered.

glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, a->w, a->h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, 0);

According to the reference pages (for openGL 4), GL_ALPHA is not a supported internal format.

When I just render regular text, everything works fine. I would have expected the texture to have a problem. Is this a legacy issue?

[QUOTE=advorak;1277882]According to the reference pages (for openGL 4), GL_ALPHA is not a supported internal format.

When I just render regular text, everything works fine. I would have expected the texture to have a problem. Is this a legacy issue?[/QUOTE]

GL_ALPHA is deprecated. It results in a texture where the red, green and blue components are all zero, while the alpha component is stored in the texture.

The modern approach is to use GL_RED and have the shader use the first (nominally red) component as alpha.

I have changed the shader to look for an edge of the text. It seems to outline the text ok, but im getting a side effect. It appears the unused space between the letters in the texture is not zero. When I force an extra pixel around the buffer returned by Freetype, I still get pixels being set around the edge of the characters bounding box. I have tried to increase the space between the characters in the texture to no avail. Should I be loading a zero buffer into the texture before loading the glyphs? Below is the function to create the atlas and the fragment shader.

void create_atlas(Atlas *a, int height, int Output )
{
	FT_GlyphSlot g = face->glyph;
    int i;
	int roww = 0;
	int rowh = 0;
	int ox = 0;
	int oy = 0;
	int row, col;

	a->w = 0;
	a->h = 0;

    FT_Set_Pixel_Sizes(face, 0, height);

	memset(a->c, 0, sizeof a->c);

	/* Find minimum size for a texture holding all visible ASCII characters */
	for (i = 32; i < 177; i++) {
		if (FT_Load_Char(face, i, FT_LOAD_RENDER)) {
			fprintf(stderr, "Loading character %c failed!
", i);
			continue;
		}
		//if (roww + g->bitmap.width + 1 >= MAXWIDTH) {
		//	a->w = MAX(a->w, roww);
		//	a->h += rowh;
		//	roww = 0;
		//	rowh = 0;
		//}
		roww += g->bitmap.width + 2;//1;
		rowh = MAX(rowh, g->bitmap.rows+2);//);
	}

	a->w = MAX(a->w, roww);
	a->h += rowh;

	/* Create a texture that will be used to hold all ASCII glyphs */
	glActiveTexture(GL_TEXTURE0);
	glGenTextures(1, &a->tex);
	glBindTexture(GL_TEXTURE_2D, a->tex);
	glUniform1i(TEX_LOC, 0);

	glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, a->w, a->h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, 0);

	/* We require 1 byte alignment when uploading texture data */
	glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

	/* Clamping to edges is important to prevent artifacts when scaling */
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

	/* Linear filtering usually looks best for text */
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

	/* Paste all glyph bitmaps into the texture, remembering the offset */
	rowh = 0;

	for (i = 32; i < 177; i++) {
		if (FT_Load_Char(face, i, FT_LOAD_RENDER)) {
			fprintf(stderr, "Loading character %c failed!
", i);
			continue;
		}

		//if (ox + g->bitmap.width + 1 >= MAXWIDTH) {
		//	oy += rowh;
		//	rowh = 0;
		//	ox = 0;
		//}

		glTexSubImage2D(GL_TEXTURE_2D, 0, ox+1, oy+1, g->bitmap.width, g->bitmap.rows, GL_ALPHA, GL_UNSIGNED_BYTE, g->bitmap.buffer);
		a->c[i].ax = g->advance.x >> 6;
		a->c[i].ay = g->advance.y >> 6;

		a->c[i].bw = g->bitmap.width+2;//;
		a->c[i].bh = g->bitmap.rows+2;//;

		a->c[i].bl = g->bitmap_left+1;//;
		a->c[i].bt = g->bitmap_top+1;//;

		a->c[i].tx = ox / (float)a->w;
		a->c[i].ty = oy / (float)a->h;

		rowh = MAX(rowh, g->bitmap.rows);
		ox += g->bitmap.width + 3;
		
	}

    fprintf(stderr, "Generated a %d x %d (%d kb) texture atlas
", a->w, a->h, a->w * a->h / 1024);
}

In the shader below, inverse is never set. I’m just trying to get basic text outlined for now.

#version 450

layout (location = 0) in vec2 texpos;
layout (location = 1) uniform sampler2D tex;
layout (location = 2) uniform vec4 color;
layout (location = 3) uniform int inverse;
layout (location = 4) uniform float width;
layout (location = 5) uniform float height;

void main(void) {
    float dx = 1.0/width;
    float dy = 1.0/height;
    float lx = texpos.x - dx;
    float rx = texpos.x + dx;
    float ty = texpos.y + dy;
    float by = texpos.y - dy;
    
    if( texture2D(tex, texpos).a == 0.0 )
    {
        if( texture2D(tex, vec2(lx, texpos.y)).a > 0.392 )
        {
            if( inverse == 1 )
            {
                gl_FragColor = vec4(1,0,0, texture2D(tex, vec2(lx, texpos.y)).a );
            }
            else
            {
                gl_FragColor = vec4(0,0,0, texture2D(tex, vec2(lx, texpos.y)).a);
            }
        }
        else if( texture2D(tex, vec2(rx, texpos.y)).a > 0.392 )
        {
            if( inverse == 1 )
            {
                gl_FragColor = vec4(0,1,0, texture2D(tex, vec2(rx, texpos.y)).a );
            }
            else
            {
                gl_FragColor = vec4(0,0,0, texture2D(tex, vec2(rx, texpos.y)).a);
            }
        }
        else if( texture2D(tex, vec2(texpos.x, ty)).a > 0.392 )
        {
            if( inverse == 1 )
            {
                gl_FragColor = vec4(0,0,1, texture2D(tex, vec2(texpos.x, ty)).a );
            }
            else
            {
                gl_FragColor = vec4(0,0,0, texture2D(tex, vec2(texpos.x, ty)).a);
            }
        }
        else if( texture2D(tex, vec2(texpos.x, by)).a > 0.392 )
        {
            if( inverse == 1 )
            {
                gl_FragColor = vec4(0,0,0, texture2D(tex, vec2(texpos.x, by)).a );
            }
            else
            {
                gl_FragColor = vec4(0,0,0, texture2D(tex, vec2(texpos.x, by)).a);
            }
        }
        else
        {
            gl_FragColor = vec4(1, 1, 1, texture2D(tex, texpos).a) * color;
        }
    }
    else if( texture2D(tex, texpos).a > 0.0 && texture2D(tex, texpos).a < 0.275 )
    {
        if( inverse == 1 )
        {
            gl_FragColor = vec4(vec3(.1*color), texture2D(tex, texpos).a);
        }
        else
        {
            gl_FragColor = vec4(0,0,0, 1-texture2D(tex, texpos).a);
        }
    }
    else
    {
        if( inverse == 1 )
        {
            gl_FragColor = vec4(0.1, 0.1, 0.1, texture2D(tex, texpos).a) * color;
        }
        else
        {
            gl_FragColor = vec4(1.0, 1.0, 1.0, texture2D(tex, texpos).a) * color;
        }
    }
}

It appears initializing the texture to zero works. My characters no longer have extra pixels set at the bounding box edge. Yahoo!!! Now I just have to figure out what happened to my degree symbol. It seems to have disappeared.

BTW, I just want to thank the senior members of this thread for diligently responding to us newbies stupid questions time and again. You folks are royalty in my mind. :biggrin-new: