Well… I’m using glReadPixels to take a screan capture of what I’ve drawn using open gl. The code is as follows:
GLuint* pixels;
Rect theRect;
theRect.top = 0;
theRect.bottom = frameHeight;
theRect.left = 0;
theRect.right = frameWidth;
// Create where we are going to store the pixels
pixels = (GLuint*) malloc(frameWidth * frameHeight * sizeof(GLuint));
if (glGetError() == GL_INVALID_ENUM)
dprintf("Ack, format is not acceptable before taking screencapture. Why? Beets me.
");
if (glGetError() == GL_INVALID_OPERATION)
dprintf("Ack, make usre this ain’t inbetween glBegin-glEnd block. before taking screencapture. Why? Beets me.
");
glReadBuffer(GL_FRONT);
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glPixelStorei(GL_PACK_SWAP_BYTES, 0);
glReadPixels(0, 0, frameWidth, frameHeight, GL_BGRA_EXT , GL_UNSIGNED_INT_8_8_8_8_REV, pixels);
if (glGetError() == GL_INVALID_ENUM)
dprintf("Ack, format is not acceptable after screen capture. This should be reported so I can get the users hardware specs
");
if (glGetError() == GL_INVALID_OPERATION)
dprintf("Ack, make usre this ain’t inbetween glBegin-glEnd block. This should be reported so I can get the users hardware specs
");
if (glGetError() == GL_INVALID_VALUE)
dprintf("Ack, um yea, this error shouldn’t really happen. This should be reported so I can get the users hardware specs
");
SimpleGraphicsCompression::compress((unsigned char*)pixels, theRect, frameWidth * sizeof(GLuint), 32);
free(pixels);
So like, this works great on my NVidia GeForce 2 MX, but not my ATI rage 128.
Any clues?