A 32x32 pixel PNG that I’m loading through LodePNG is either not being loaded into a texture correctly, or something is wrong with my code to keep it from being applied to anything.
I’ve checked the image data before sending it to OpenGL and it seems to be valid data, the decoder doesn’t return any errors and I see a bunch of 0-255 values. I never get anything besides GL_NO_ERROR from glGetError() during runtime.
Here’s my image loading and binding function. Texture is a class that has an ID (uint), width and height.
Texture::Texture(string pngPath)
{
string path = getWorkDir() + pngPath;
const char* filename = path.c_str();
println("Loading '" + path + "'...");
std::vector< unsigned char > rawImage;
LodePNG::loadFile( rawImage, filename );
LodePNG::Decoder decoder;
std::vector< unsigned char > image;
decoder.decode( image, rawImage.empty() ? 0 : &rawImage[0], (unsigned)rawImage.size() );
if(decoder.hasError())
println("PNGDecoder: " + toString(decoder.getError()) + ": " + LodePNG_error_text(decoder.getError()));
//
// Flip and invert the PNG image since OpenGL likes to load everything
// backwards from what is considered normal!
//
unsigned char *imagePtr = &image[0];
int halfTheHeightInPixels = decoder.getHeight() / 2;
int heightInPixels = decoder.getHeight();
// Assuming RGBA for 4 components per pixel.
int numColorComponents = 4;
// Assuming each color component is an unsigned char.
int widthInChars = decoder.getWidth() * numColorComponents;
unsigned char *top = NULL;
unsigned char *bottom = NULL;
unsigned char temp = 0;
for( int h = 0; h < halfTheHeightInPixels; ++h )
{
top = imagePtr + h * widthInChars;
bottom = imagePtr + (heightInPixels - h - 1) * widthInChars;
for( int w = 0; w < widthInChars; ++w )
{
// Swap the chars around.
temp = *top;
*top = *bottom;
*bottom = temp;
++top;
++bottom;
}
}
//
// Create the OpenGL texture and fill it with our PNG image.
//
width = decoder.getWidth();
height = decoder.getHeight();
// Allocates one texture handle
glGenTextures(1, &ID);
// Binds this texture handle so we can load the data into it
glBindTexture(GL_TEXTURE_2D, ID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &image[0]);
println("loaded texture with id " + toString(ID) + " width: " + toString(width) + " height: " + toString(height));
}
void Texture::bind()
{
glBindTexture(GL_TEXTURE_2D, ID);
}
And for displaying the image, I use this function. You can ignore the u/vScale vars (those aren’t doing anything yet).
void drawImage(int x, int y, Texture image)
{
float uScale = 1;
float vScale = 1;
int w = image.width;
int h = image.height;
glEnable(GL_TEXTURE_2D);
glColor3f(1.0f,1.0f,1.0f); // draw with no tint (white)
image.bind();
glBegin(GL_QUADS);
glTexCoord2f(0,0);
glVertex2i(x, y);
glTexCoord2f(1*uScale,0);
glVertex2i(x + w, y);
glTexCoord2f(1*uScale,1*vScale);
glVertex2i(x + w, y + h);
glTexCoord2f(0,1*vScale);
glVertex2i(x, y + h);
glEnd();
glDisable(GL_TEXTURE_2D);
}
If there isn’t anything wrong with this code, then is there some setting that would prevent textures from working (apart from TEXTURE_2D being off, which it isn’t)? My porgram is pretty basic, all I do is draw a few primitives followed by this image, which shows up as a white rectangle.
EDIT: Also, here’s my opengl init function:
bool InitGL(GLvoid) // All Setup For OpenGL Goes Here
{
glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Black Background
glDisable(GL_DEPTH_TEST);
// enable alpha blending
//glEnable(GL_BLEND);
//glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// transparent images
glAlphaFunc(GL_GREATER, 0.1f);
glEnable(GL_ALPHA_TEST);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
glDisable(GL_BLEND);
glDisable(GL_COLOR_MATERIAL);
return true; // Initialization Went OK
}