I cannot send RGBA color as unsigned int

Hello there!

I know the glVertexAttribPointer function lets you specify the data type of each component in your vertex data and the normalized flag, so I want to send colors to OpenGL as 1 unsigned int, instead of 4 unsigned byte values…
I have this vertex data structure


    struct Vertex
    {
        // ctor
        Vertex(float X, float Y, uint32_t ColorRGBA)
            : x(X), y(Y), color(ColorRGBA)
        {
        }

        // data
        float x, y;
        uint32_t color;
    };

The problem is: I can’t get the right colors to work
I only see red or black on the screen. Here is the full code, including C++ and shaders:


#include <vector>
#include <iostream>
using std::cout;
using std::endl;

// SDL2
#include <SDL2/SDL.h>
#include <GL/glew.h>

struct TestData
{
    SDL_Window *window;
    SDL_GLContext glcontext;

    GLuint VAO_ID;
    GLuint vertexbuffer;
    GLuint vertexbuffer2;
    GLuint ProgramID;
};

// Pointer arithmetic
template <typename T>
inline auto advancePtr(int num) -> const void*
{
    return reinterpret_cast<const void*>(num * sizeof(T));
}

// MAIN
int main(int argc, char *argv[])
{
    // Initialize data
    TestData data = { };
    // Initialize SDL2
    SDL_Init(SDL_INIT_VIDEO);

    // Create window
    data.window = SDL_CreateWindow("An SDL2 window",
                                   SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
                                   640,480, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);

    // Check that the window was successfully made
    if (data.window == nullptr)
    {
        // In the event that the window could not be made...
        cout << "Could not create window: " << SDL_GetError() << '
';
        SDL_Quit();
        return 1;
    }

    // Context attributes
    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

    // Create OpenGL context
    data.glcontext = SDL_GL_CreateContext(data.window);

    // Initialize GLEW
    glewExperimental = GL_TRUE;
    glewInit();

    // OpenGL data
    // VAO
    glGenVertexArrays(1, &data.VAO_ID);
    glBindVertexArray(data.VAO_ID);

    struct Vertex
    {
        // ctor
        Vertex(float X, float Y, uint32_t ColorRGBA)
            : x(X), y(Y), color(ColorRGBA)
        {
        }

        // data
        float x, y;
        uint32_t color;
    };

    using vertexArray = std::vector<Vertex>;
    // Add some vertices to test
    vertexArray vertices;
    vertices.emplace_back(-1.0f, -1.0f, 0xFF0000FF);
    vertices.emplace_back(1.0f, -1.0f, 0x00FF00FF);
    vertices.emplace_back(0.0f,  1.0f, 0x0000FFFF);

    // Copy vertex data to GPU
    // Generate 1 buffer, put the resulting identifier in vertexbuffer
    glGenBuffers(1, &data.vertexbuffer);
    // The following commands will talk about our 'vertexbuffer' buffer
    glBindBuffer(GL_ARRAY_BUFFER, data.vertexbuffer);
    // Give our vertices to OpenGL
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), vertices.data(), GL_STATIC_DRAW);

    // Define and enable vertex attributes
    // Position
    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), nullptr);
    glEnableVertexAttribArray(0);
    // Color
    glVertexAttribPointer(1, 1, GL_UNSIGNED_INT, GL_TRUE, sizeof(Vertex), advancePtr<float>(2));
    glEnableVertexAttribArray(1);

    // Load shaders code
    const char* vertexShaderCode = R"(#version 330 core
                                   layout(location = 0) in vec2 vertexPosition_modelspace;
                                   layout(location = 1) in vec4 vertexColor;

                                   smooth out vec4 fragColor;

                                   void main()
                                   {
                                       gl_Position.xy = vertexPosition_modelspace;
                                       gl_Position.z = 0.0;
                                       gl_Position.w = 1.0;

                                       fragColor = vertexColor;
                                   })";

    const char* fragShaderCode = R"(#version 330 core
                                 smooth in vec4 fragColor;
                                 out vec4 finalColor;

                                 void main()
                                 {
                                     finalColor = fragColor;
                                 })";

    // Create shaders
    GLuint VertexShaderID   = glCreateShader(GL_VERTEX_SHADER);
    GLuint FragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);

    // Compile shaders
    if (!compileShader(VertexShaderID, vertexShaderCode) ||
        !compileShader(FragmentShaderID, fragShaderCode))
    {
        cleaner(data);
        return -1;
    }

    // Create shader program
    data.ProgramID = glCreateProgram();

    // Attach shaders to it
    glAttachShader(data.ProgramID, VertexShaderID);
    glAttachShader(data.ProgramID, FragmentShaderID);

    // Bind attributes (if needed)

    // Link the program object
    if (!linkProgram(data.ProgramID))
    {
        cleaner(data);
        return -1;
    }

    // Once the program is linked, we can detach the shaders
    glDetachShader(data.ProgramID, VertexShaderID);
    glDetachShader(data.ProgramID, FragmentShaderID);
    // And delete them
    glDeleteShader(VertexShaderID);
    glDeleteShader(FragmentShaderID);

    // Activate the shader program
    glUseProgram(data.ProgramID);


    // Main Loop
    bool close = false;

    while (!close)
    {
        // Process system events
        SDL_Event event;

        while (SDL_PollEvent(&event))
        {
            switch(event.type)
            {
                case SDL_QUIT:
                    close = true;
                    break;

        } // event

       // draw
        glClear(GL_COLOR_BUFFER_BIT);

        // Draw the triangle
        glDrawArrays(GL_TRIANGLES, 0, 3);

        SDL_GL_SwapWindow(data.window);
    }

    // Clean
    cleaner(data);

    // Clean up SDL2 and exit the program
    SDL_Quit();
    return 0;
}

What am I doing wrong?
(Im using SDL 2 library and OpenGL 3.3)


glVertexAttribPointer(1, 1, GL_UNSIGNED_INT, GL_TRUE, sizeof(Vertex), advancePtr<float>(2));

layout(location = 1) in vec4 vertexColor;

your vertex shader expects a vec4, which appears not to be unsigned int
but you are putting unsigned ints in your buffer

try to use
layout(location = 1) in uint vertexColor;

and use its byte components as RGBA values:
vec4 color = vec4((vertexColor & 0x000000FF) >> 0, (vertexColor & 0x0000FF00) >> 8, (vertexColor & 0x00FF0000) >> 16, (vertexColor & 0xFF000000)>> 24);

This says that attribute 1 consists of a single unsigned int, which is treated as a normalised value (i.e. a value of 232-1 in the array will be passed to the shader as 1.0).

This says that attribute 1 is a vector of 4 floats. As the corresponding attribute array only has a single component, the other three components will be 0.0, 0.0 and 1.0 respectively.

That depends upon what you’re trying to do.

If you want a single 32-bit signed integer to be passed to the shader as a single 32-bit signed integer, the shader needs:


layout(location = 1) in uint vertexColor;

while the application needs


glVertexAttribIPointer(1, 1, GL_UNSIGNED_INT, sizeof(Vertex), advancePtr<float>(2));

(note: glVertexAttribIPointer, not glVertexAttribPointer).

If you want the integer to be decomposed into RGBA components, you should use


glVertexAttribIPointer(1, 4, GL_UNSIGNED_BYTE, sizeof(Vertex), advancePtr<float>(2));

and access the components using either vertexColor.rgba or vertexColor.abgr in the shader depending upon the order in which the components are packed into the integer. Unfortunately, the GL_UNSIGNED_INT_8_8_8_8 and GL_UNSIGNED_INT_8_8_8_8_REV types aren’t valid for attribute arrays, only for textures.

Thanks for the answers :slight_smile: