How to use texture format of GL_RGB10_A2UI?

Hi,all:
I want to write a demo, it’s texture format is RGB10_A2UI.


typedef struct __TEXTURE_{
        GLubyte R;
        GLubyte G;
        GLubyte B;
        GLubyte A;
}TexRGB10A2UI,*pTexRGB10A2UI;
GLuint RGBA10_10_10_2_UI_CAST(const pTexRGB10A2UI texture)
{
        return (GLuint)((GLuint)(texture->R & 0x03FFu) << 22u |\
                        (GLuint)(texture->G & 0x03FFu) << 12u |\
                        (GLuint)(texture->B & 0x03FFu) <<  2u |\
                        (GLuint)(texture->A & 0x0003u) <<  0u);
}


static void makeCheckImages(void)
{
   int i, j, c;


   for (i = 0; i < checkImageHeight; i++) 
    {
      for (j = 0; j < checkImageWidth; j++) 
    {
         c = ((((i&0x8)==0)^((j&0x8)==0)))*255;
         checkImage[i][j][0] = (GLuint) c;
     checkImage[i][j][1] = (GLuint) c;
      checkImage[i][j][2] = (GLuint) c;
     checkImage[i][j][3] = (GLuint) 255;
    RGB10A2UIImage[i][j] = RGBA10_10_10_2_UI_CAST((pTexRGB10A2UI)checkImage[i][j]);
    }
     }    
}


static void init(void)
{
   glClearColor (0.0, 0.0, 1.0, 0.0);
   glShadeModel(GL_FLAT);
   glEnable(GL_DEPTH_TEST);


   makeCheckImages();
   glPixelStorei(GL_UNPACK_ALIGNMENT, 4);


   glGenTextures(1, texName);
   glBindTexture(GL_TEXTURE_2D, texName);
   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_NEAREST);
   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_NEAREST);
   glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2UI, checkImageWidth,
                checkImageHeight, 0, GL_RGBA_INTEGER, GL_UNSIGNED_INT_10_10_10_2,
        RGB10A2UIImage);


   printf("%s
",gluErrorString(glGetError()));
   glEnable(GL_TEXTURE_2D);
}
static void display(void)
{
   glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
   glBindTexture(GL_TEXTURE_2D, texName);
   glPushMatrix();
       glBegin(GL_QUADS);
       glTexCoord2f(0.0, 0.0); glVertex3f(-2.0, -1.0, 0.0);
       glTexCoord2f(0.0, 1.0); glVertex3f(-2.0, 1.0, 0.0);
       glTexCoord2f(1.0, 1.0); glVertex3f(0.0, 1.0, 0.0);
       glTexCoord2f(1.0, 0.0); glVertex3f(0.0, -1.0, 0.0);
       glEnd();
    glPopMatrix();
   glFlush();
}


 When I use  
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, checkImageWidth,checkImageHeight, 0, GL_RGBA,GL_UNSIGNED_INT,checkImage); 

instead of

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2UI, checkImageWidth,checkImageHeight, 0, GL_RGBA_INTEGER,GL_UNSIGNED_INT_10_10_10_2,RGB10A2UIImage); 

, I got the right texture reslut, it shows a check image on the window.
When I use

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2UI, checkImageWidth,checkImageHeight, 0, GL_RGBA_INTEGER,GL_UNSIGNED_INT_10_10_10_2,RGB10A2UIImage); 

,although the demo reports “no error”, I’ve got a white texture on the window.
so I am a little baffled why the demo can’t give the right texture result.

Integer textures are useless in fixed function. You need to upgrade to GLSL to make use of them.

Please don’t open a new thread when your old thread, concerning the exact same thing, is not even two weeks old: How to use GL_RGB10_A2UI in OpenGL3.3 or later? - OpenGL: Basic Coding - Khronos Forums

Didn’t you say you were using GL 3.3 in your other thread? You should be using shaders for ALL rendering.

ok,got it!

ok,got it!