How to render to an unsigned integer texture2D?

I want to render to an unsigned integer texture2D and then read the data using glReadPixels but the output is weird


In C++:

        //Create a texture2D
        GLuint tex;

	glGenTextures(1, &tex);

	glBindTexture(GL_TEXTURE_2D, tex);

	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);

	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32UI,
		w,
		h,
		0, GL_RGBA_INTEGER, GL_UNSIGNED_INT,
		NULL);

	glBindTexture(GL_TEXTURE_2D, 0);

	return(tex);

       //....
      
      // Read pixel data
      glBindBuffer(GL_READ_BUFFER, m_fbo);
      glReadBuffer(GL_COLOR_ATTACHMENT0);

      GLint invert_y = m_viewport.w - m_clicked_pos.y;		
      GLuint result[4];
      glReadPixels(x, y, 1, 1, GL_RGBA_INTEGER, GL_UNSIGNED_INT, &result);

     //----------------In the GLSL------------------
     #version 450 core

     void main()
     {
	output_color = vec4(1, 2, 3, 4);
     }

After reading data at a pixel (x, y), the result is (3846258, 10391808, 10391808, 803). Is there anyone experienced this problem? Please, help me.

[QUOTE=pnt1614;1289656]I want to render to an unsigned integer texture2D and then read the data using glReadPixels but the output is weird


	output_color = vec4(1, 2, 3, 4);

[/QUOTE]
You don’t show the declaration for [var]output_color[/var], but it should be a uvec4, not a vec4.