Trying to use modern opengl on linux

Hi, I’ve installed the last version of mesa driver, but, this one seems to be still experimental, and …, I’m just wandering if I shoudn’t use old opengl versions on linux.

I’m trying to display a single triangle, if I pass an array to the glVertexAttribPointer it works, but, if I pass an std::vector I doesn’t work anymore :


int main(int argc, char* argv[]) {
    odfaeg::RenderWindow window(sf::VideoMode(800, 600, 32), "Modern OpenGL",sf::Style::Default,sf::ContextSettings(24, 8, 4, 3, 3));

    odfaeg::Matrix4f matrix = window.getView().getProjMatrix().get3DMatrix() * window.getView().getViewMatrix().get3DMatrix();
    // Create and compile our GLSL program from the shaders
    /*GLuint programID = LoadShaders( "SimpleVertexShader.vertexshader", "SimpleFragmentShader.fragmentshader" );

    GLuint location = glGetUniformLocation(programID, "mvp");
    if (location == -1)
        std::cout<<"param not found in shader!"<<std::endl;*/

    /*glUseProgram(programID);
    glUniformMatrix4fv(location, 1, GL_TRUE,pm.get3DMatrix().toGlMatrix());*/
    odfaeg::Shader shader;
    shader.loadFromFile("SimpleVertexShader.vertexshader", "SimpleFragmentShader.fragmentshader");
    shader.setParameter("mvp", matrix);

    // An array of 3 vectors which represents 3 vertices
    std::vector<odfaeg::Vertex> vertices;
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(0.0f, 300.0f, 0)));
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(400.0f, -300.0f, 0)));
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(-400.0f, -300.0f, 0)));
    /*static const GLfloat vertices[] = {
       0.0f, 300.0f, 0.0f,
       400.0f, -300.0f, 0.0f,
       -400.0f, -300.0f, 0.0f
    };*/
    // This will identify our vertex buffer
    GLuint vertexbuffer;

    // Generate 1 buffer, put the resulting identifier in vertexbuffer
    glGenBuffers(1, &vertexbuffer);

    // The following commands will talk about our 'vertexbuffer' buffer
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);

    // Give our vertices to OpenGL.
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), &vertices[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    while (window.isOpen()) {

        window.clear(sf::Color::Blue);
        //rt.clear(sf::Color::Blue);

        // 1rst attribute buffer : vertices
        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
        glVertexAttribPointer(
           0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
           3,                  // size
           GL_FLOAT,           // type
           GL_FALSE,           // normalized?
           0,                  // stride
           (void*)0            // array buffer offset
        );
        // Use our shader
        //glUseProgram(programID);
        odfaeg::Shader::bind(&shader);
        // Draw the triangle !
        glDrawArrays(GL_TRIANGLES, 0, 3); // Starting from vertex 0; 3 vertices total -> 1 triangle

        glDisableVertexAttribArray(0);
        window.display();
        sf::Event event;
        while(window.pollEvent(event)) {
            if (event.type == sf::Event::Closed)
                window.close();
        }
    }
    return 0;
}

[QUOTE=Lolilolight;1260518]I’m trying to display a single triangle, if I pass an array to the glVertexAttribPointer it works, but, if I pass an std::vector I doesn’t work anymore :


    ...
    // An array of 3 vectors which represents 3 vertices
    std::vector<odfaeg::Vertex> vertices;
    ...
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float), &vertices[0], GL_STATIC_DRAW);

[/QUOTE]

The size of each element is sizeof(odfaeg::Vertex), not sizeof(float).

Hi, I’ve installed the last version of mesa driver, but, this one seems to be still experimental, and …, I’m just wandering if I shoudn’t use old opengl versions on linux.

Just install the latest vendor-provided OpenGL driver version for your GPU (NVidia, AMD/ATI, Intel, etc.)

Ok, it works with triangles but not with quads. :confused:



int main(int argc, char* argv[]) {
    odfaeg::RenderWindow window(sf::VideoMode(800, 600, 32), "Modern OpenGL",sf::Style::Default,sf::ContextSettings(24, 8, 4, 3, 3));

    odfaeg::Matrix4f matrix = window.getView().getProjMatrix().get3DMatrix() * window.getView().getViewMatrix().get3DMatrix();
    // Create and compile our GLSL program from the shaders
    odfaeg::Shader shader;
    shader.loadFromFile("SimpleVertexShader.vertexshader", "SimpleFragmentShader.fragmentshader");
    shader.setParameter("mvp", matrix);
    shader.setParameter("texture", odfaeg::Shader::CurrentTexture);
    // An array of 3 vectors which represents 3 vertices
    std::vector<odfaeg::Vertex> vertices;
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(0.0f, 0.0f, 0), sf::Color::Red, sf::Vector2f(0, 0)));
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(0.0f, 50.0f, 0), sf::Color::Red, sf::Vector2f(100, 0)));
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(100.0f, 50.0f, 0), sf::Color::Red,sf::Vector2f(100, 50)));
    vertices.push_back(odfaeg::Vertex(sf::Vector3f(100.0f, 0.0f, 0), sf::Color::Red,sf::Vector2f(0, 50)));
    // This will identify our vertex buffer
    odfaeg::Texture tex;
    tex.loadFromFile("tilesets/herbe.png");
    GLuint vertexbuffer;

    // Generate 1 buffer, put the resulting identifier in vertexbuffer
    glGenBuffers(1, &vertexbuffer);

    // The following commands will talk about our 'vertexbuffer' buffer
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);

    // Give our vertices to OpenGL.
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(odfaeg::Vertex), &vertices[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    while (window.isOpen()) {

        window.clear(sf::Color::Blue);
        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
        glVertexAttribPointer(
           0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
           3,                  // size
           GL_FLOAT,           // type
           GL_FALSE,           // normalized?
           sizeof(odfaeg::Vertex),                  // stride
           (void*)0            // array buffer offset
        );
        glEnableVertexAttribArray(1);
        glVertexAttribPointer(
           1,                  // attribute 1. No particular reason for 0, but must match the layout in the shader.
           4,                  // size
           GL_UNSIGNED_BYTE,           // type
           GL_FALSE,           // normalized?
           sizeof(odfaeg::Vertex),                  // stride
           (void*)12            // array buffer offset
        );
        glEnableVertexAttribArray(2);
        glVertexAttribPointer(
           2,                  // attribute 2. No particular reason for 0, but must match the layout in the shader.
           2,                  // size
           GL_FLOAT,           // type
           GL_FALSE,           // normalized?
           sizeof(odfaeg::Vertex),                  // stride
           (void*)16            // array buffer offset
        );
        odfaeg::Texture::bind(&tex);
        odfaeg::Shader::bind(&shader);
        glDrawArrays(GL_QUADS, 0, 4);
        glDisableVertexAttribArray(0);
        glDisableVertexAttribArray(1);
        glDisableVertexAttribArray(2);
        window.display();
        sf::Event event;
        while(window.pollEvent(event)) {
            if (event.type == sf::Event::Closed)
                window.close();
        }
    }
    return 0;
}

I can find a compatible driver on the ADM website ? :o

I’ve tried to install the proprietary drivers from the amd web site but as I excepected it tells me that there are not compatible with my graphical card. (Under ubuntu 14.04)

[QUOTE=Lolilolight;1260526]Ok, it works with triangles but not with quads. :confused:
[/QUOTE]

Of course not with quads. Quads are not part of modern OpenGL, they have been deprecated since 3.0.

Woooowwwww, I didn’t know that, I’ve read the tutorials on your website but I think I should have missed something.

Ok so i suppose that I have to use triangles strips instead.

But I’m surprised that they are deprecated.

Ok now I’m trying to bind textures but I’ve a strange result. :o

Here is the code :


#include <GL/glew.h>
#include <GL/gl.h>
#include <SFML/Window.hpp>
#include <iostream>
#include <fstream>
#include <vector>
#include "odfaeg/Graphics/renderWindow.h"
#include "odfaeg/Graphics/projMatrix.h"
#include "odfaeg/Graphics/vertexArray.h"
#include "odfaeg/Graphics/2D/tile.h"
int main(int argc, char* argv[]) {
    odfaeg::RenderWindow window(sf::VideoMode(800, 600, 32), "Modern OpenGL",sf::Style::Default,sf::ContextSettings(24, 8, 4, 3, 3));
    odfaeg::Texture tex;
    tex.loadFromFile("tilesets/herbe.png");
    GLuint vertexbuffer;
    odfaeg::g2d::Tile tile (&tex, odfaeg::Vec3f(0, 0, 0), odfaeg::Vec3f(100, 50, 100), sf::IntRect(0, 0, 100, 50));
    odfaeg::Matrix4f matrix = tile.getTransform().get3DMatrix() * window.getView().getViewMatrix().get3DMatrix() * window.getView().getProjMatrix().get3DMatrix();
    odfaeg::Shader shader;
    shader.loadFromFile("SimpleVertexShader.vertexshader", "SimpleFragmentShader.fragmentshader");
    shader.setParameter("mvp", matrix);
    shader.setParameter("texture", odfaeg::Shader::CurrentTexture);
    odfaeg::Matrix4f textureMatrix;
    textureMatrix.m11 = 1.f / tex.getSize().x;
    textureMatrix.m22 = 1.f / tex.getSize().y;
    while (window.isOpen()) {
        window.clear(sf::Color::Blue);
        odfaeg::RenderStates states;
        states.shader = &shader;
        window.draw(tile, states);
        window.display();
        sf::Event event;
        while(window.pollEvent(event)) {
            if (event.type == sf::Event::Closed)
                window.close();
        }
    }
    return 0;
}

And here is the code which create my textures :


////////////////////////////////////////////////////////////
bool Texture::create(unsigned int width, unsigned int height)
{
    // Check if texture parameters are valid before creating it
    if ((width == 0) || (height == 0))
    {
        err() << "Failed to create texture, invalid size (" << width << "x" << height << ")" << std::endl;
        return false;
    }

    // Compute the internal texture dimensions depending on NPOT textures support
    Vector2u actualSize(getValidSize(width), getValidSize(height));

    // Check the maximum texture size
    unsigned int maxSize = getMaximumSize();
    if ((actualSize.x > maxSize) || (actualSize.y > maxSize))
    {
        err() << "Failed to create texture, its internal size is too high "
              << "(" << actualSize.x << "x" << actualSize.y << ", "
              << "maximum is " << maxSize << "x" << maxSize << ")"
              << std::endl;
        return false;
    }

    // All the validity checks passed, we can store the new texture settings
    m_size.x        = width;
    m_size.y        = height;
    m_actualSize    = actualSize;
    m_pixelsFlipped = false;

    ensureGlContext();

    // Create the OpenGL texture if it doesn't exist yet
    if (!m_texture)
    {
        GLuint texture;
        glCheck(glGenTextures(1, &texture));
        m_texture = static_cast<unsigned int>(texture);
    }

    // Make sure that the current texture binding will be preserved
    priv::TextureSaver save;

    // Initialize the texture
    glCheck(glBindTexture(GL_TEXTURE_2D, m_texture));
    glCheck(glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, m_actualSize.x, m_actualSize.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL));
    glCheck(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, m_isRepeated ? GL_REPEAT : GL_CLAMP_TO_EDGE));
    glCheck(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, m_isRepeated ? GL_REPEAT : GL_CLAMP_TO_EDGE));
    glCheck(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, m_isSmooth ? GL_LINEAR : GL_NEAREST));
    glCheck(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, m_isSmooth ? GL_LINEAR : GL_NEAREST));
    m_cacheId = getUniqueId();

    return true;
}

Is there anything changed since opengl 3.3 ?

There is my shaders


#version 330 core 
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec4 vertex_color;
layout(location = 2) in vec2 vertex_texCoords0;
uniform mat4 mvp;
uniform mat4 tm;
out vec2 texCoords;
out vec4 front_color;
void main () {
	gl_Position = mvp * vec4(vertex_position.xyz, 1.0);
	texCoords = vec2(tm * vec4(vertex_texCoords0.xy, 0, 0)).xy;
	front_color = vertex_color;	
}


#version 330 core
in vec2 texCoords;
in vec4 front_color;
out vec4 color;
uniform sampler2D texture;
void main() {
    vec4 texel = texture2D (texture, texCoords);
    color = front_color * texel;
}

And here is the code which draw everything :


 if (vertices && GLEW_ARB_vertex_buffer_object) {
            //In moddern opengl we need to use glVertexAttribPointer functions. (gl*Pointer is deprecated)
            if (majorVersion >= 3 && minorVersion >= 3) {
                glCheck(glBindBuffer(GL_ARRAY_BUFFER, states.vertexBufferId));
                glCheck(glEnableVertexAttribArray(0));
                glCheck(glVertexAttribPointer(0, 3,GL_FLOAT,GL_FALSE,sizeof(Vertex), (GLvoid*) 0));
                glCheck(glEnableVertexAttribArray(1));
                glCheck(glVertexAttribPointer(1, 4,GL_UNSIGNED_BYTE,GL_FALSE,sizeof(Vertex),(GLvoid*) 12));
                glCheck(glEnableVertexAttribArray(2));
                glCheck(glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*) 16));
                glCheck(glBindBuffer(GL_ARRAY_BUFFER, states.normalBufferId));
                glCheck(glEnableVertexAttribArray(3));
                glCheck(glVertexAttribPointer(3, 3,GL_FLOAT,GL_FALSE,sizeof(Vector3f), (GLvoid*) 0));
                static const GLenum modes[] = {GL_POINTS, GL_LINES, GL_LINE_STRIP, GL_TRIANGLES,
                                               GL_TRIANGLE_STRIP, GL_TRIANGLE_FAN, GL_QUADS};
                GLenum mode = modes[type];
                if (indexesCount == 0) {
                    glCheck(glDrawArrays(mode, 0, vertexCount));
                } else if (indexesCount > 0 && (numIndexesCount == 0 || baseVerticesCount == 0 || baseIndexesCount == 0)) {
                    glCheck(glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, states.indexBufferId));
                    glCheck(glDrawElements(mode, indexesCount,GL_UNSIGNED_INT,0));
                } 
}

But it gives me a white texture like this :

[ATTACH=CONFIG]685[/ATTACH]

This topic was automatically closed 183 days after the last reply. New replies are no longer allowed.