texture mapping and sdl

Please help I am trying to get texture mapping to work but I can’t seem to get the image to display on to the screen. All I get is a white box the size of the image.

#include <SDL/SDL.h>
#include <gl/gl.h>
#include <gl/glu.h>
#include <gl/glaux.h>

int main(int argc, char *argv[]){
SDL_Event event;
SDL_Init(SDL_INIT_VIDEO);
SDL_GL_SetAttribute( SDL_GL_RED_SIZE, 5 );
SDL_GL_SetAttribute( SDL_GL_GREEN_SIZE, 5 );
SDL_GL_SetAttribute( SDL_GL_BLUE_SIZE, 5 );
SDL_GL_SetAttribute( SDL_GL_DEPTH_SIZE, 16 );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
SDL_SetVideoMode(640, 480, 0, SDL_OPENGL);

// Reset coordinate system
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 640, 480, 0, -1, 1);

glClearColor(0.0f,0.0f,0.0f,1.0f); // color to clear framebuffer to
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear framebuffer

glEnable(GL_TEXTURE_2D);
GLuint textureName;
AUX_RGBImageRec *image;
image = auxDIBImageLoad(“link.bmp”);
glTexImage2D(GL_TEXTURE_2D, 0, 3, image->sizeX, image->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, image->data);
glGenTextures(1, &textureName);
glBindTexture(GL_TEXTURE_2D, textureName);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0);
glVertex3f(0.0, 0.0, 0.0);
glTexCoord2f(0.0, 26.0);
glVertex3f(0.0, 26.0, 0.0);
glTexCoord2f(20.0, 26.0);
glVertex3f(20.0, 26.0, 0.0);
glTexCoord2f(20.0, 0.0);
glVertex3f(20.0, 0.0, 0.0);
glEnd();
SDL_GL_SwapBuffers();
glDisable(GL_TEXTURE_2D);

int done;
for(done = 0; !done;){
while ( SDL_PollEvent(&event) ) {
if ( event.type == SDL_QUIT ){
done = 1;
}
if(event.key.keysym.sym == SDLK_ESCAPE){
done = 1;
}
}
}
SDL_Quit();
return(0);
}

Perhaps you are using pixel units rather than normalized units (0.0 to 1.0) when you call glTexCoord2f().

Could that be the problem?

First of all omit the SDL_GL_SetAttribute calls (they are not needed; you will get some other defaults which may fit better than a 15bit color buffer with a 16bit depth buffer; you want 32 bit color and probably a 24 bit depth buffer, double buffering is the default, so you also don’t have to specify this either) and do SDL_SetVideoMode(640, 480, 32, SDL_OPENGL);

Also you probably want to use normalized texture coordinates in the range [0, 1] in you glTexCoord calls unless you want your texture to be tiled multiple times.

You are loading the image data before you even create a texture object which is wrong! Move your glTexImage downwards after you glBindTexture the texture. It should look like this:

glGenTextures(1, &textureName);
glBindTexture(GL_TEXTURE_2D, textureName);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image->sizeX, image->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, image->data);

Maybe you should also check out GLIntercept which can be used to detect OpenGL errors.

[ www.trenki.net | vector_math (3d math library) | software renderer ]

FYI: Don’t use GL_RGB as the third parameter to glTexImage2D - drivers give different precisions.

Always specify the precision you want - typically GL_RGB8,

Trenki I made the changes you suggest below but I am still getting a white box. any other suggestion
#include <SDL/SDL.h>
#include <gl/gl.h>
#include <gl/glu.h>
#include <gl/glaux.h>

int main(int argc, char *argv[]){
SDL_Event event;
SDL_Init(SDL_INIT_VIDEO);
SDL_SetVideoMode(640, 480, 32, SDL_OPENGL);

// Reset coordinate system
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 640, 480, 0, -1, 1);

glClearColor(0.0f,0.0f,0.0f,1.0f); // color to clear framebuffer to
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // clear framebuffer

GLuint textureName;
AUX_RGBImageRec *image;
image = auxDIBImageLoad(“image.bmp”);
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &textureName);
glBindTexture(GL_TEXTURE_2D, textureName);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, image->sizeX, image->sizeY, 0, GL_RGB8, GL_UNSIGNED_BYTE, image->data);
glBegin(GL_QUADS);
glTexCoord2f(0.0f, 1.0f);
glVertex3f(0.0f, 0.0f, 0.0f);
glTexCoord2f(0.0f, 0.0f);
glVertex3f(0.0f, 100.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f);
glVertex3f(100.0f, 100.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f);
glVertex3f(100.0f, 0.0f, 0.0f);
glEnd();
SDL_GL_SwapBuffers();
glDisable(GL_TEXTURE_2D);

int done;
for(done = 0; !done;){
while ( SDL_PollEvent(&event) ) {
if ( event.type == SDL_QUIT ){
done = 1;
}
if(event.key.keysym.sym == SDLK_ESCAPE){
done = 1;
}
}
}
SDL_Quit();
return(0);
}

Set the texture paramters specifically those for the minification filter. The minification filter is normally set to use mipmaps but as you don’t specify any the texture isn’t complete. Set the minification filter to GL_LINEAR.

You could also try to generate the mipmaps with gluBuild2DMipmaps or even better to set the GL_GENERATE_MIPMAP texture parameter to GL_TRUE before uploading the texture data.

[ www.trenki.net | vector_math (3d math library) | software renderer ]

Thanks Trenki for all your help.
I found out what my problem was, I needed to change the third parameter below to a 3 for rgb colors. it works fine now i also added glTexParameteri to scale my image.

glTexImage2D(GL_TEXTURE_2D, 0, 3, image->sizeX, image->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, image->data);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); // scale linearly when image bigger than texture
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); // scale linearly when image smalled than texture

If you are using mipmapping, use GL_LINEAR_MIPMAP_LINEAR for the GL_TEXTURE_MIN_FILTER for better results.

This causes it to sample from multiple levels of detail when interpolating.