Desktop/laptop issue for vertex shaders

Hi

Can anyone explain why the JOGL program pasted at the bottom runs fine on my desktop but throws shader compile and link exceptions on my laptop?

My desktop is running WinXP-64 with an NVIDIA Quadro FX1400, which supports OpenGL2 but accepts the 330 shader version. The program throws no exceptions and runs as expected. I also still don’t fully understand how an OpenGL2 card can accept a shader verion of 330, which apears to contradict the Table on Wikipedia: http://en.wikipedia.org/wiki/GLSL.

The program throws shader compile and program link exceptions on my Win7-64 laptop with an ATI Mobility Radeon HD 5470 [version 8.672.1.3000], which from the following spec website supports OpenGL 3.2:

http://www.amd.com/uk/products/notebook/graphics/ati-mobility-hd-5400/Pages/hd-5450-specs.aspx

On both machines I’m using the same jars/dlls from the most recent JOGL download at www.jogamp.org.

Thanks

Graham

===

package modern_gl_prog;

// java
import java.awt.EventQueue;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import java.awt.Canvas;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.charset.Charset;
// gl
import javax.media.opengl.GL;
import javax.media.opengl.GL2;
import javax.media.opengl.GL3;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLCapabilities;
import javax.media.opengl.GLEventListener;
import javax.media.opengl.GLProfile;
import javax.media.opengl.awt.GLCanvas;
import com.jogamp.common.nio.Buffers;

/**

  • Java port of JL McKesson’s tut1.cpp tutorial; Tutorial 0.3.7\Tut 01 Hello triangle

  • @author gmseed
    */
    public class tut1 extends JFrame
    {
    private JPanel contentPane;
    private Canvas canvas;

    static { GLProfile.initSingleton(true); } // recommended first call

    /**

    • Launch the application.
      */
      public static void main(String[] args)
      {
      EventQueue.invokeLater(new Runnable()
      {
      public void run()
      {
      try
      {
      tut1 frame = new tut1();
      frame.setVisible(true);
      }
      catch (Exception e)
      {
      e.printStackTrace();
      }
      }
      });
      }

    /**

    • Create the frame.
      */
      public tut1()
      {
      setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
      setBounds(100, 100, 545, 536);
      contentPane = new JPanel();
      contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
      setContentPane(contentPane);
      contentPane.setLayout(null);

      // OpenGL 2.1 profile
      GLProfile prof = GLProfile.get(GLProfile.GL2);
      GLCapabilities caps = new GLCapabilities(prof);
      canvas = new tut1Canvas(caps);
      canvas.setBounds(0, 0, 517, 390);
      contentPane.add(canvas);
      }
      } // class tut1

class tut1Canvas extends GLCanvas
implements GLEventListener
{
protected int mTheProgram; // handle to program object
protected int mPositionBufferObject; // handle to buffer object
protected int mvao; // vertex array object

protected int           mVertexCount = 3; 
protected FloatBuffer   mVertexPositions = null; 

public tut1Canvas() 
{ 
    super(); 
    this.addGLEventListener(this); 
} 

public tut1Canvas(GLCapabilities caps) 
{ 
    super(caps); 
    this.addGLEventListener(this); 
} 

private int createShader(GL2 gl, int eShaderType, String strShaderFile) 
{ 
    int shader = gl.glCreateShader(eShaderType); 
    String[] strings = new String[1]; strings[0] = strShaderFile; 
    gl.glShaderSource(shader, 1, strings,(int[])null,0); // glShaderSource(int shader, int count, java.lang.String[] string, int[] length, int length_offset); 

    gl.glCompileShader(shader); 

    int[] statusArray = new int[1]; 
    int status; 
    gl.glGetShaderiv(shader, GL2.GL_COMPILE_STATUS, statusArray,0); 
    status = statusArray[0]; 
    if (status == GL.GL_FALSE) 
    { 
        int[] infoLogLengthArray = new int[1]; 
        int infoLogLength; 
        gl.glGetShaderiv(shader, GL2.GL_INFO_LOG_LENGTH,infoLogLengthArray,0); 
        infoLogLength = infoLogLengthArray[0]; 
        IntBuffer intValue = IntBuffer.allocate(1); 
        ByteBuffer strInfoLog = Buffers.newDirectByteBuffer(infoLogLength); 
        gl.glGetShaderInfoLog(shader, infoLogLength, intValue, strInfoLog); 

        String strShaderType = null; 
        switch(eShaderType) 
        { 
            case GL2.GL_VERTEX_SHADER:   strShaderType = "vertex";   break; 
            case GL3.GL_GEOMETRY_SHADER: strShaderType = "geometry"; break; 
            case GL2.GL_FRAGMENT_SHADER: strShaderType = "fragment"; break; 
        } 
        
        // decode byte buffer 
        String      encoding    = System.getProperty("file.encoding"); 
        CharBuffer  charBuff    = Charset.forName(encoding).decode(strInfoLog); 
        String      buffString  = charBuff.toString(); 
        String      msg         = "Compile failure in " + strShaderType + " shader: " + buffString;   
        System.out.println(msg); 
    } 

    return shader; 
} 

private int createProgram(GL2 gl, int[] shaderList) 
{ 
    int program = gl.glCreateProgram(); 

    for(int iLoop = 0; iLoop < shaderList.length; iLoop++) 
    { 
        gl.glAttachShader(program, shaderList[iLoop]); 
    } 

    gl.glLinkProgram(program); 
    
    int[] statusArray = new int[1]; 
    int status; 
    gl.glGetProgramiv(program,GL2.GL_LINK_STATUS,statusArray,0); 
    status = statusArray[0]; 
    if (status == GL.GL_FALSE) 
    { 
        int[] infoLogLengthArray = new int[1]; 
        int infoLogLength; 
        gl.glGetShaderiv(program, GL2.GL_INFO_LOG_LENGTH,infoLogLengthArray,0); 
        infoLogLength = infoLogLengthArray[0]; 

        IntBuffer intValue = IntBuffer.allocate(1); 
        ByteBuffer strInfoLog = ByteBuffer.allocate(infoLogLength); 
        gl.glGetProgramInfoLog(program, infoLogLength, intValue, strInfoLog); 
        
        // decode byte buffer 
        String      encoding    = System.getProperty("file.encoding"); 
        CharBuffer  charBuff    = Charset.forName(encoding).decode(strInfoLog); 
        String      buffString  = charBuff.toString(); 
        String      msg         = "Linker failure: " + buffString;   
        System.out.println(msg);             
    } 

    for(int iLoop = 0; iLoop < shaderList.length; iLoop++) 
    { 
        gl.glDetachShader(program, shaderList[iLoop]); 
    } 

    return program; 
}     

public String strVertexShader() 
{ 
    String s = new String(); 
    s += "#version 340

";
s += "layout(location = 0) in vec4 position;
";
s += "void main()
";
s += "{
";
s += " gl_Position = position;
";
s += "}
";
return s;
}

public String strFragmentShader() 
{ 
    String s = new String(); 
    s += "#version 340

";
s += "out vec4 outputColor;
";
s += "void main()
";
s += "{
";
s += " outputColor = vec4(1.0f, 1.0f, 1.0f, 1.0f);
";
s += "}
";
return s;
};

void initializeProgram(GL2 gl) 
{ 
    String  vertexShaderString      = strVertexShader(); 
    String  fragmentShaderString    = strFragmentShader(); 
    int     vertexShader            = createShader(gl,GL2.GL_VERTEX_SHADER,vertexShaderString); 
    int     fragmentShader          = createShader(gl,GL2.GL_FRAGMENT_SHADER,fragmentShaderString); 
    int[] shaderList = new int[2]; 
    shaderList[0] = vertexShader; 
    shaderList[1] = fragmentShader; 
    
    mTheProgram = createProgram(gl,shaderList); 
} 

void initializeVertexBuffer(GL2 gl) 
{ 
    int[] array = new int[1]; 
    gl.glGenBuffers(1, array, 0); 
    mPositionBufferObject = array[0]; 

    // allocate vertex buffer [4 for (x,y,z,w) for each vertex] 
    mVertexPositions = Buffers.newDirectFloatBuffer(4*mVertexCount); 
    mVertexPositions.put(0.75f);  mVertexPositions.put(0.75f);  mVertexPositions.put(0.75f); mVertexPositions.put(1.0f); 
    mVertexPositions.put(0.75f);  mVertexPositions.put(-0.75f); mVertexPositions.put(0.0f);  mVertexPositions.put(1.0f); 
    mVertexPositions.put(-0.75f); mVertexPositions.put(-0.75f); mVertexPositions.put(0.0f);  mVertexPositions.put(1.0f); 
    // invoke this method to prepare for a sequence of channel-write or relative get operations 
    mVertexPositions.flip(); 
    
    gl.glBindBuffer(GL2.GL_ARRAY_BUFFER,mPositionBufferObject); 
    gl.glBufferData(GL2.GL_ARRAY_BUFFER,4*mVertexCount*Buffers.SIZEOF_FLOAT,mVertexPositions,GL2.GL_STATIC_DRAW); 
    gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0); 
} 

@Override 
public void init(GLAutoDrawable gLDrawable) 
{ 
    GL2 gl = gLDrawable.getGL().getGL2(); 
    
    initializeProgram(gl); 
    initializeVertexBuffer(gl); 

    // generate vertex array and bind 
    int[] array = new int[1]; 
    gl.glGenVertexArrays(1,array,0); 
    mvao = array[0]; 
    gl.glBindVertexArray(mvao); 
} 

@Override 
public void display(GLAutoDrawable gLDrawable) 
{ 
    final GL2 gl = gLDrawable.getGL().getGL2(); 
    gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 
    gl.glClear(GL.GL_COLOR_BUFFER_BIT); 

    gl.glUseProgram(mTheProgram); 

    gl.glBindBuffer(GL2.GL_ARRAY_BUFFER,mPositionBufferObject); 
    gl.glEnableVertexAttribArray(0); // GL2ES2 
    gl.glVertexAttribPointer(0, 4, GL.GL_FLOAT,false, 0, 0); 
    
    gl.glDrawArrays(GL.GL_TRIANGLES, 0, 3); 
    
    gl.glDisableVertexAttribArray(0); 
    gl.glUseProgram(0); 

    gLDrawable.swapBuffers(); 
} 

@Override 
public void reshape(GLAutoDrawable gLDrawable, int x, int y, int width, int height) 
{       
    GL2 gl = gLDrawable.getGL().getGL2(); 
    if (height <= 0) 
    { 
        height = 1; 
    } 
    gl.glViewport(0,0,width,height); 
} 

@Override 
public void dispose(GLAutoDrawable arg0) 
{ 
    // do nothing 
} 

} // class tut1Canvas

First, please learn how to properly format a post. Particularly code; with the mangle tab intention in your post, it’s virtually impossible to read. Use proper code tags.

Second:

#version 340

There is no version “340”. If NVIDIA lets that compile, then it’s NVIDIA’s compiler that has the problem.

The program throws shader compile and program link exceptions

And those “exceptions” are… what? You have to tell us what errors you’re getting. Help us help you.

Hi

Thanks for your reply.

Maybe I missed it but I couldn’t see a “code” icon on the editor.

The version should have read:

#version 330

I was playing around with it and forgot to change it back it before pasting in this post.

The shader compile exceptions thrown are:

"
Compile failure in vertex shader: Vertex shader failed to compile with the following errors:
ERROR: 0:1: error(#106) Version number not supported by GL2
ERROR: 0:2: error(#279) Invalid layout qualifier ‘location’
ERROR: error(#273) 2 compilation errors. No code generated
"

Thanks

Graham

Your code specifically asks for GL 2.1 profile then it wants to use GLSL 330. This is not so good idea. I am not fully aware how to do this with jogl, but it would probably be a better idea to ask for “latest available” GL version instead of 2.1.

You can query for the supported shading language version with glGetString(GL_SHADING_LANGUAGE_VERSION), but first make sure you are not asking unnecessarily old GL version.

Edit: The code button is hidden behind the sharp sign (#).

Hi

Thanks for your reply.

I added a call to the following in my init():


    public static String GLSLVersion(GL gl)
    {
        String version = gl.glGetString(GL2.GL_SHADING_LANGUAGE_VERSION);
        return version;
    }

and it outputs for my desktop:

glslVersion: 1.20 NVIDIA via Cg compiler

which corresponds to OpenGL2.1.

Thus, I still don’t understand how the program runs fine on my desktop which is OpenGL 2.1/GLSL 1.2 with a call to 330 shading version but throws an exception on my laptop which is OpenGL3/GLSL3.

Thanks

Graham

You may have an old driver on your desktop which simply accepts more than it should. Make sure you have the latest driver (on both devices), and report also your GL versions.

Hi

I ran the GLSL language version query on my laptop and comes back with 1.40.

Thus, although it’s a recently purchased Sony VAIO i5 with an ATI Mobility Radeon HD 5470 [version 8.672.1.3000], which from ATI’s spec website supports OpenGL 3.2, the driver is only supporting shader version 1.4!!

How can that be? Why would the manufacturer provide 3.2GL support but shader support of 1.4?

Thanks

Graham

Try querying the GL version from the driver directly via gl.glGetString(GL2.GL_VERSION), and see what you get. Spec sheets are somewhat notorious for reporting incorrect OpenGL versions (and they’re often out of date)…

My desktop has the settings:

glVersion : 2.1.2
glslVersion: 1.20 NVIDIA via Cg compiler

and runs the glsl shading version 330.

I’ll perform the same test on my laptop.

I updated the Nvidia display driver on my desktop from 258.49 to 276.28, with Nvidia stating on their website that the latest driver supports OpenGL4+. However, even after the update I get the same GL and GLSL versions as before the driver update.

Thanks

Graham

However, even after the update I get the same GL and GLSL versions as before the driver update

Possibly because you are not asking for the new style GL contexts?
If you use the ‘old’ wglCreateContext you get OpenGL 2.1.
Are you using wglCreateContextAttribsARB instead?

Hi

I commented out:


        // OpenGL profile
        //GLProfile prof = GLProfile.get(GLProfile.GL2);
        //GLCapabilities caps = new GLCapabilities(prof);
        //canvas = new tut1Canvas(caps);
        canvas = new tut1Canvas();

so I’m not requesting any context, assuming it would give me the default.

I don’t make any _ARB calls in the program.

By the way, I checked with the Nvidia console that the new driver is being used.

Thanks

Graham

According to http://en.wikipedia.org/wiki/Comparison_of_Nvidia_graphics_processing_units FX 1400 is OpenGL 2.1.

So it appears with (some) NVIDIA Quadro you can get GLSL 330 with OpenGL 2.1. In general however, you only get GLSL 120 with GL 2.1.

That’s right.

With the desktop NVIDIA Quadro [GL 2.1/GLSL 1.2] but the driver parses GLSL 3.3.

With my laptop ATI Radeon [GL/GLSL 1.4] but the driver throws an exception when parsing GLSL 3.3.

Graham

I updated the Nvidia display driver on my desktop from 258.49 to 276.28, with Nvidia stating on their website that the latest driver supports OpenGL4+. However, even after the update I get the same GL and GLSL versions as before the driver update.

That’s because your desktop hardware, the QuadroFX 1400, is only capable of OpenGL 2.1 (it has dedicated vertex and pixel shaders). The supported GL version is always the lesser of the hardware-supported version and the driver-supported version. The GLSL compiler should be throwing an error as soon as it sees a GLSL #version > 120, and it’s an Nvidia bug that it’s not.

I’d be interested to see what GL version your laptop reports though, if the spec sheet’s claiming GL 3.2 support but the driver’s only giving you GLSL 1.40.

With the desktop NVIDIA Quadro [GL 2.1/GLSL 1.2] but the driver parses GLSL 3.3.

That’s because, as I said before, the driver is doing something wrong. It should not parse 3.30 shaders. So if it is, that’s a sign of something wrong in the driver.

Just run the GL / GLSL version commands on my laptop:

glVersion: 3.1.9117
glslVersion: 1.40
Compile failure in vertex shader: Vertex shader failed to compile with the following errors:
ERROR: 0:1: error(#106) Version number not supported by GL2
ERROR: 0:2: error(#279) Invalid layout qualifier ‘location’
ERROR: error(#273) 2 compilation errors. No code generated

and appears to agree with the Wikipedia table [OpenGL Shading Language - Wikipedia] of GL 3.1 / GLSL 1.40.08

I suppose I’ve fallen victim to there being a big version difference of GLSL 1.4–>3.3 for GL versions of 3–>3.3.

Thus, if you want to do any up to date vertex buffer/shader GL programming then make sure you don’t buy a Sony Vaio Laptop that is shipped with an ATI Mobility Radeon graphics driver.

Graham

Have you tried to upgrade your graphics drivers?

I suppose I’ve fallen victim to there being a big version difference of GLSL 1.4–>3.3 for GL versions of 3–>3.3.

What are you talking about? All the ARB did was properly number GLSL versions after their OpenGL version counterparts, so that you don’t need a table to know what GLSL version 1.30 shipped with (GL 3.0, FYI).

The GLSL versions go 1.40, 1.50, 3.30, for GL versions 3.1, 3.2, and 3.3.

It’s not about a “big difference.” It’s about a difference; it doesn’t matter how big it is. You gave a 3.30 shader to a GL implementation that doesn’t support 3.30. It fails to compile. That is what is supposed to happen

@Alfonso

Thanks for your replies but I must comment on the slightly unnecessary tone you use.

For example, you have posted:

“And those “exceptions” are… what?”
“What are you talking about?”
“It’s not about a “big difference.” It’s about a difference”

I’m just asking questions, and trying to get to the bottom of a problem I have. I admit I am not an expert and this is the reason for posting questions, which may seem to certain people as yourself dumb, but it’s the purpose of a forum.

To help clarify. What I meant by “I suppose I’ve fallen victim to there being a big version difference of GLSL 1.4–>3.3 for GL versions of 3–>3.3.” is that, in my humble opinion and I hope I don’t get shouted at for saying, is that in going from minor increments of GL from 3.0–>3.3 there are major increments in GLSL from 1.4–>3.3. My card appears to support GL 3.3 but only GLSL 1.4. I do think it is a problem when trying to work with GLSL 3.3+.

The original post indicated that I was attempting to provide Java ports of the “Modern OpenGL Prog” tutorials, which all use GLSL version 330.

Graham

My card appears to support GL 3.3 but only GLSL 1.4.

You seem to not understand the difference between what your hardware can do, and what the current drivers installed on your machine allow it to do.

The Mobility HD-5xxx series is capable of OpenGL 4.2. But if your drivers are from 2+ years ago, then it will have less support, because the drivers are not prescient. They cannot implement an OpenGL version that didn’t exist when they were written.

Your drivers only expose GL 3.1. Therefore, unless you change those drivers, you’re not getting anything more than 3.1 support from that hardware.

So I ask again, “Have you tried to upgrade your graphics drivers?”

@Alfonse

You ask:

“So I ask again, “Have you tried to upgrade your graphics drivers?””

In post #306419 of the same thread I stated:

“I updated the Nvidia display driver on my desktop from 258.49 to 276.28, with Nvidia stating on their website that the latest driver supports OpenGL4+. However, even after the update I get the same GL and GLSL versions as before the driver update.”

I have been unable to update the driver on my laptop as I can’t find an updated driver on ATI’s website. If you know how I can get hold of a driver for the Mobility HD-5xxx that would realise OpenGL4.2 and GLSL3.3+ then I would much appreciate a reply.

Presumably, the recommended way is via the manufacturer’s website:

http://support.amd.com/us/gpudownload/Pages/index.aspx

Select: Notebook Graphics | Radeon HD Series | Mobility HD-5xxx | Windows 7 64-bit

I am then faced with a page titled “AMD Catalyst™ Display Driver”. Selecting the download button generates an error along the lines of “…software incompatible with machine…”, which is just rubbish.

Incidentally, I already have the Catalyst Suite installed on my laptop as it was shipped with the ATI card and the software was pre-installed. If I run the pre-installed Catalyst suite from the laptop and select the update option I am taken to the above website, and hence come full circle.

It states on ATI’s website [http://support.amd.com/us/kbarticles/Pages/737-28041SupportforATIMobility.aspx]:

"The laptop manufacturer is the exclusive provider of driver updates. If the required driver update is not available at the laptop manufacturers’ website, it is recommended to contact the manufacturer directly, to see if an updated driver will be made available in the future.

AMD cannot provide driver updates directly from AMD.com."

Thus, I’m retrying to obtain the latest driver from Sony’s support.

Graham