I’m doing some 2d texture stuff and this texture:
www.tcnj.edu/~lombar26/brick.bmp
is supposed to tile across 2 large triangles. But instead it comes out like this:
www.tcnj.edu/~lombar26/screenshot.JPG
In another program with almost identical loading and drawing code it comes out looking fine. I’m in win32 using dev-cpp and glut. Drawing code is like…
glLoadIdentity( );
glEnable( GL_TEXTURE_2D );
glRotatef( angle, 0.0, 0.0, 1.0 );
glScalef( scaleFactor, scaleFactor, 0.0 );
glTranslatef( -players[ myIndex ].position.x, -players[ myIndex ].position.y, -200.0f );
for( int i = 0; i < numTris; i++ )
{
glBindTexture( GL_TEXTURE_2D, texs[ tris[ i ].texIndex ] );
glBegin( GL_TRIANGLES );
glTexCoord2f( tris[ i ].texel[ 0 ].x, tris[ i ].texel[ 0 ].y );
glVertex2f( tris[ i ].v[ 0 ].x, tris[ i ].v[ 0 ].y );
glTexCoord2f( tris[ i ].texel[ 1 ].x, tris[ i ].texel[ 1 ].y );
glVertex2f( tris[ i ].v[ 1 ].x, tris[ i ].v[ 1 ].y );
glTexCoord2f( tris[ i ].texel[ 2 ].x, tris[ i ].texel[ 2 ].y );
glVertex2f( tris[ i ].v[ 2 ].x, tris[ i ].v[ 2 ].y );
glEnd( );
}
glDisable( GL_TEXTURE_2D );
loading code is like…
void GamePlay::loadTexs ( )
{
Bitmap24Bit bmp;
texs = new GLuint [ numTexs ];
glEnable( GL_TEXTURE_2D );
glGenTextures( numTexs, &texs[ 0 ] );
for( int i = 0; i < numTexs; i++ )
{
bmp.loadBMP( filenames[ i ] );
glBindTexture( GL_TEXTURE_2D, texs[ i ] );
//glPixelStorei( GL_UNPACK_ALIGNMENT, 1 );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
//glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
//glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
//glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, bmp.width, bmp.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, bmp.imageData );
bmp.releaseData();
}
}
the loading code for the program that actually works is an exact copy of the above so i’m not exactly sure what the deal is