PDA

View Full Version : How can I use 8 bit texture without glColorTableEXT ????



gggggllll
05-28-2003, 01:05 AM
i usually use these codes for 8 bit textures.


PFNGLCOLORTABLEEXTPROC glColorTableEXT;
glColorTableEXT = (PFNGLCOLORTABLEEXTPROC)wglGetProcAddress("glColorTableEXT");

i bought GeforceFX 5200 yesterday and i tried that codes.

Man! glColorTableEXT is NULL !!

How can I use 8 bit texture ????

I don't want to waste my memory by translating 24bit

i tried to use glPixelMap but it doesn't work. maybe i don't know how to use it correctly.

fp = fopen(m_szFileName.c_str(), "rb");
if (!fp)
return NULL;
fseek(fp, 4 * 2 + 256 * 3 + 1024 * 1024 * nIndex, 0);
pBits = (BYTE*)malloc(1024 * 1024);
fread(pBits, 1024 * 1024, 1, fp);

//
glGenTextures(1, &nTexture);
glBindTexture(GL_TEXTURE_2D, nTexture);

glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

if (glColorTableEXT != NULL) {
glColorTableEXT(GL_TEXTURE_2D, GL_RGB8, 256, GL_BGR_EXT, GL_UNSIGNED_BYTE, m_pDib);
glTexImage2D(GL_TEXTURE_2D, 0, GL_COLOR_INDEX8_EXT, 1024, 1024, 0, GL_COLOR_INDEX, GL_UNSIGNED_BYTE, pBits);
}
else {
int i;
float red[256], green[256], blue[256];

for(i = 0; i < 256; i++) {
red[i] = (float)m_pDib[3 * i] / 256.;
green[i] = (float)m_pDib[3 * i + 1] / 256.;
blue[i] = (float)m_pDib[3 * i + 2] / 256.;
}

glPixelTransferi(GL_MAP_COLOR, TRUE);

glPixelMapfv(GL_PIXEL_MAP_I_TO_R, 256, (const float *)&red );
glPixelMapfv(GL_PIXEL_MAP_I_TO_G, 256, (const float *)&green);
glPixelMapfv(GL_PIXEL_MAP_I_TO_B, 256, (const float *)&blue);

// glTexImage2D(GL_TEXTURE_2D, 0, 3, 1024, 1024, 0, GL_COLOR_INDEX, GL_UNSIGNED_BYTE, pBits);
glTexImage2D(GL_TEXTURE_2D, 0, 3, 256, 256, 0, GL_COLOR_INDEX, GL_UNSIGNED_BYTE, pBits);
}


free(pBits);


is this right to use it ?