Part of the Khronos Group
OpenGL.org

The Industry's Foundation for High Performance Graphics

from games to virtual reality, mobile phones to supercomputers

Results 1 to 8 of 8

Thread: glutCreateWindow causes GLX 153 error

  1. #1
    Junior Member Newbie
    Join Date
    Apr 2016
    Posts
    14

    glutCreateWindow causes GLX 153 error

    Hi there,

    I have the following code to run some simple OpenGL applications from an ARM Cortex A9 running Ubuntu 12.04 LTS.

    When I compile and run the code I get the following GLX error:

    Code :
    X Error of failed request: BadRequest (Invalid request code or no such operation)
    Major opcode of failed request: 153 (GLX)
    Minor opcode of failed request: 34 ()
    Serial number of failed request: 36
    Current serial number in output stream: 35

    Running this with debug symbols on only adds the following line `[Inferior 1 (process 1569) exited with code 01]` which is not really helpful.

    Code for Main loop:

    Code :
    #include "CubeTexture.h"
     
    #ifdef _WIN32 
    #include <BasicEngine\Engine.h>
    #elif __linux__ 
    #include "../BasicEngine/Engine.h"
    #include <pthread.h>
    #endif
     
     
    using namespace BasicEngine;
     
    #ifdef __linux__ 
    void junk() {
    	int i;
    	i = pthread_getconcurrency();
    }
     
    #endif
     
    int main(int argc, char **argv)
    {
     
    	Engine* engine = new Engine();
    	engine->Init();
     
    	engine->GetShader_Manager()->CreateProgram("cubeShader", "./Shaders/Cube_Vertex_Shader.glsl", "./Shaders/Cube_Fragment_Shader.glsl");
     
    	CubeTexture* cube = new CubeTexture();
    	int program = engine->GetShader_Manager()->GetShader("cubeShader");
    	if (program != 0)
    	{
    		cube->SetProgram(program);
    		cube->Create();	
    	}
    	else
    	{
    		std::cout << "invalid program..."; std::cin.get();
    	}
     
    	unsigned int texture = engine->GetTexture_Loader()->LoadTexture("./Textures/Crate.bmp", 256, 256);
    	cube->SetTexture("Create", texture);
     
    	engine->GetModels_Manager()->SetModel("cube", cube);
     
    	engine->Run();
     
    	delete engine;
    	return (0);
    }


    Code to initialise GLUT and X Windows environment

    Code :
    #include "../Init/Init_GLUT.h"
     
    using namespace BasicEngine;
    using namespace Core::Init;
     
    Core::IListener* Init_GLUT::listener = NULL;
    Core::WindowInfo Init_GLUT::windowInformation;
     
    #ifdef __linux__ 
     
    Display					*dpy;
    Window					root;
    GLint					att[] = { GLX_RGBA, GLX_DEPTH_SIZE, 24, GLX_DOUBLEBUFFER, None };
    XVisualInfo				*vi;
    Colormap				cmap;
    XSetWindowAttributes	swa;
    Window					win;
    GLXContext				glc;
    XWindowAttributes		gwa;
    XEvent					xev;
     
     
    void Init_GLUT::linuxinit()
    {
     
    	std::cout << "Linux detected" << std::endl;
     
    	dpy = XOpenDisplay(NULL);
     
    	if (dpy == NULL)
    	{
    		std::cout << "\n\tcannont connect to X server\n\n" << std::endl;
    		exit(0);
    	}
     
    	root = DefaultRootWindow(dpy);
     
    	vi = glXChooseVisual(dpy, 0, att);
     
    	if (vi == NULL)
    	{
    		std::cout << "\n\tno appropriate visual found\n\n" << std::endl;
    		exit(0);
    	}
    	else
    	{
    		std::cout << "\n\tvisual %p selected\n" << (void*)vi->visualid << std::endl; //%p creates a hexadecimal output like in glxinfo
    	}
     
    	cmap = XCreateColormap(dpy, root, vi->visual, AllocNone);
     
    	swa.colormap = cmap;
    	swa.event_mask = ExposureMask | KeyPressMask;
     
    	std::cout << "end of Linux compiler directives" << std::endl;
     
     
     
    }
     
    #endif
     
    void Init_GLUT::init(const Core::WindowInfo& windowInfo,
    	const Core::ContextInfo& contextInfo,
    	const Core::FramebufferInfo& framebufferInfo)
    {
     
    	windowInformation = windowInfo;
     
    	//i need some fake things here
    	int fakeargc = 1;
    	char* fakeargv[] = { "fake", NULL };
    	glutInit(&fakeargc, fakeargv);
     
    	if (contextInfo.core)
    	{	
    		glutInitContextVersion(contextInfo.major_version,
    			contextInfo.minor_version);
    		glutInitContextProfile(GLUT_CORE_PROFILE);
    	}
    	else
    	{
    		//doesn't matter in compatibility mode 
    		glutInitContextProfile(GLUT_COMPATIBILITY_PROFILE);
    	}
     
    	//old functions from main.cpp now using info from the structures 
    	glutInitDisplayMode(framebufferInfo.flags);
    	glutInitWindowPosition(windowInfo.position_x, windowInfo.position_y);
    	glutInitWindowSize(windowInfo.width, windowInfo.height);
     
    	glutCreateWindow(windowInfo.name.c_str());
     
    	std::cout << "GLUT:initialised" << std::endl;
     
    #ifdef _WIN32
    	//Lets add some debug capability
    	glEnable(GL_DEBUG_OUTPUT);
    #endif
     
    	Core::Init::Init_GLEW::Init();
     
    #ifdef _WIN32
    	glDebugMessageCallback(DebugOutput::myCallback, NULL);
    	glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, NULL, TRUE);
    #endif
     
    	//these call backs are used for rendering 
    	glutIdleFunc(idleCallback);
    	glutCloseFunc(closeCallback);
    	glutDisplayFunc(displayCallback);
    	glutReshapeFunc(reshapeCallback);
     
    	//init GLEW (can be called in main.cpp)
     
     
    	//clean up 
    	glutSetOption(GLUT_ACTION_ON_WINDOW_CLOSE, GLUT_ACTION_GLUTMAINLOOP_RETURNS);
     
    	//our method to display some info. Needs contextInfo and windowinfo 
    	printOpenGLInfo(windowInfo, contextInfo);
    }
     
    //starts the rendering loop 
    void Init_GLUT::run()
    {
    	std::cout << "GLUT:\t Start Running" << std::endl;
    	glutMainLoop();
    }
     
    void Init_GLUT::close()
    {
    	std::cout << "GLUT:\t Finished" << std::endl;
    	glutLeaveMainLoop;
    }
     
    void Init_GLUT::idleCallback(void)
    {
    	//do nothing, just redisplay
    	glutPostRedisplay();
    }
     
    void Init_GLUT::displayCallback()
    {
    	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    	glClearColor(0.0, 0.0, 0.0, 1);
    	glutSwapBuffers;
     
    	if (listener)
    	{	
    		listener->notifyBeginFrame();
    		listener->notifyDisplayFrame();
     
    		glutSwapBuffers();
     
    		listener->notifyEndFrame();
    	}
     
    }
     
    void Init_GLUT::reshapeCallback(int width, int height)
    {
    	if (windowInformation.isReshapable == true)
    	{
    		if (listener)
    		{
    			listener->notifyReshape(width, height, windowInformation.width, windowInformation.height);
    		}
    		windowInformation.width = width;
    		windowInformation.height = height;
    	}
    }
     
    void Init_GLUT::closeCallback()
    {
    	close();
    }
     
    void Init_GLUT::enterFullscreen()
    {
    	glutFullScreen();
    }
     
    void Init_GLUT::exitFullscreen()
    {
    	glutLeaveFullScreen();
    }
     
    void Init_GLUT::setListener(Core::IListener* iListener)
    {
    	listener = iListener;
    }
     
    void Init_GLUT::printOpenGLInfo(const Core::WindowInfo& windowInfo,
    	const Core::ContextInfo& contextInfo)
    {
    	const unsigned char* renderer = glGetString(GL_RENDERER);
    	const unsigned char* vendor = glGetString(GL_VENDOR);
    	const unsigned char* version = glGetString(GL_VERSION);
     
    	std::cout << "*******************************************************************************" << std::endl;
     
    	std::cout << "GLUT:\tVendor : " << vendor << std::endl;
    	std::cout << "GLUT:\tRenderer : " << renderer << std::endl;
    	std::cout << "GLUT:\tOpenGl version: " << version << std::endl;
    	std::cout << "GLUT:\tInitial window is '" << windowInfo.name << "', with dimensions (" << windowInfo.width
    		<< "X" << windowInfo.height;
    	std::cout << ") starts at (" << windowInfo.position_x << "X" << windowInfo.position_y;
    	std::cout << ") and " << ((windowInfo.isReshapable) ? "is" : "is not ") << " redimensionable" << std::endl;
    	std::cout << "GLUT:\tInitial Framebuffer contains double buffers for" << std::endl;
     
    	std::cout << "GLUT:\t OpenGL context is " << contextInfo.major_version << "." << contextInfo.minor_version;
    	std::cout << " and profile is " << ((contextInfo.core) ? "core" : "compatibility") << std::endl;
     
    	std::cout << "*****************************************************************" << std::endl;
    }

    Does anybody know what is causing the error? Or indeed what BadRequest, Major Opcode 153 (GLX) actually refers to?

    Cheers

  2. #2
    Senior Member OpenGL Guru
    Join Date
    Jun 2013
    Posts
    2,480
    Quote Originally Posted by LastHorizon View Post
    When I compile and run the code I get the following GLX error:
    Code :
    X Error of failed request: BadRequest (Invalid request code or no such operation)
    Major opcode of failed request: 153 (GLX)
    Minor opcode of failed request: 34 ()
    Minor opcode 34 is X_GLXCreateContextAtrribsARB.

    Quote Originally Posted by LastHorizon View Post
    Code to initialise GLUT and X Windows environment

    Code :
    	if (contextInfo.core)
    	{	
    		glutInitContextVersion(contextInfo.major_version,
    			contextInfo.minor_version);
    		glutInitContextProfile(GLUT_CORE_PROFILE);
    	}
    	else
    	{
    		//doesn't matter in compatibility mode 
    		glutInitContextProfile(GLUT_COMPATIBILITY_PROFILE);
    	}
    It appears that your OpenGL implementation doesn't support glXCreateContextAttribsARB(), which is what GLUT uses if you request a specific version or profile.

    FreeGLUT doesn't actually check for the extension when using GLX (although it does for WGL). It checks the the function pointer is non-NULL, but that only tests whether the client library provides the function, not whether the server supports the operation.

    Quote Originally Posted by LastHorizon View Post
    Does anybody know what is causing the error? Or indeed what BadRequest, Major Opcode 153 (GLX) actually refers to?
    Major opcodes are dynamically assigned to X extensions. In this case, it tells you that 153 is assigned to GLX. Minor opcodes are determined by the extension and indicate the actual operation. For GLX, 34 is X_GLXCreateContextAtrribsARB (you can find the symbolic names in GL/glxproto.h).

    In short, you'll need to add an option to control whether to use glutInitContextVersion() and glutInitContextProfile(). Otherwise it will fail in this manner if the X server doesn't support the extension.

  3. #3
    Junior Member Newbie
    Join Date
    Apr 2016
    Posts
    14
    Quote Originally Posted by GClements View Post

    In short, you'll need to add an option to control whether to use glutInitContextVersion() and glutInitContextProfile(). Otherwise it will fail in this manner if the X server doesn't support the extension.
    I tried forcing GLUT to initilise using `glutInitContextProfile(GLUT_COMPATIBILITY_MODE);` and I got the same error as before. I think I am misunderstanding your answer. What do I need to do when initilising glut to avoid returning this error. Apologies for my lack of understanding. I suppose the other way to avoid this error is to not use GLUT at all, but this does feel like re-inventing the wheel...

    Does the paid for version of GLUT offer more support in checking what is compatible and what isn't?

    ************EDIT**************

    it appears that OpenGL2.1 doesn't support glutInitContextVersion() and glutInitContextProfile(). What commands do I need to use instead of these that are compatible with OpenGL2.1?
    Last edited by LastHorizon; 05-25-2016 at 06:29 AM.

  4. #4
    Senior Member OpenGL Guru
    Join Date
    Jun 2013
    Posts
    2,480
    Quote Originally Posted by LastHorizon View Post
    I tried forcing GLUT to initilise using `glutInitContextProfile(GLUT_COMPATIBILITY_MODE);` and I got the same error as before. I think I am misunderstanding your answer. What do I need to do when initilising glut to avoid returning this error.
    Don't call glutInitContextProfile(). You'll just have to accept whichever version and profile you get by default.

    Quote Originally Posted by LastHorizon View Post
    Does the paid for version of GLUT offer more support in checking what is compatible and what isn't?
    There is no paid-for version of GLUT.

    The "Free" in "FreeGLUT" refers to the fact that its licence is less restrictive than that of the original GLUT library (which didn't permit distribution of modified versions).

  5. #5
    Junior Member Newbie
    Join Date
    Apr 2016
    Posts
    14
    Ah okay, I understand now. So by removing those calls I seem to manage to set up Open GL as 2.1. Here is the output from my terminal:

    Code :
    *******************************************************************************
    GLUT:	Vendor : Mesa Project
    GLUT:	Renderer : Software Rasterizer
    GLUT:	OpenGl version: 2.1 Mesa 8.0.2
    GLUT:	Initial window is 'in2gpu OpenGL Chapter 2 tutorial', with dimensions (800X600) starts at (400X200) and is redimensionable
    GLUT:	Initial Framebuffer contains double buffers for
    GLUT:	 OpenGL context is 2.1 and profile is compatibility
    *****************************************************************

    However my shaders now don't seem to compile. I started learning OpenGL recently so only really understand stuff that's supported in version 4.X

    I get the following errors from my shaders:

    Code :
    ERROR compiling shader:vertex shader0:2(1): error: syntax error, unexpected IDENTIFIER
     
    ERROR compiling shader:fragment shader0:4(1): error: syntax error, unexpected IDENTIFIER
     
    GLUT:	 Start Running
    Segmentation fault (core dumped)

    And my shader code looks like this

    Vertex Shader
    Code :
    #version 120
    layout(location = 0) in vec3 in_position;
    layout(location = 1) in vec2 in_texture;
     
    uniform mat4 projection_matrix, view_matrix;
    uniform vec3 rotation;
     
    out vec2 texcoord; 
     
    void main(void)
    {
    	texcoord = in_texture;
    	mat4 rotate_x, rotate_y, rotate_z;
     
     
        rotate_x = mat4(1.0, 0.0, 0.0, 0.0,
                        0.0, cos(rotation.x), sin(rotation.x), 0.0,
                        0.0, -sin(rotation.x), cos(rotation.x), 0.0,
                        0.0, 0.0, 0.0, 1.0);
     
        rotate_y = mat4(cos(rotation.y), 0.0, -sin(rotation.y), 0.0,
                        0.0, 1.0, 0.0, 0.0,
                        sin(rotation.y), 0.0, cos(rotation.y), 0.0,
                        0.0, 0.0, 0.0, 1.0);
     
        rotate_z = mat4(cos(rotation.z), -sin(rotation.z), 0.0, 0.0,
                        sin(rotation.z), cos(rotation.z), 0.0, 0.0,
                        0.0, 0.0, 1.0, 0.0,
                        0.0, 0.0, 0.0, 1.0);
     
        gl_Position = projection_matrix * view_matrix *
                          rotate_y * rotate_x *rotate_z * vec4(in_position, 1);
     
    }

    Fragment shader

    Code :
    //Cube_Fragment_Shader.glsl
    #version 120
     
    layout(location = 0) out vec4 out_color;
     
    uniform sampler2D texture1;
     
    in vec2 texcoord;
    void main()
    {
        vec4 color = texture(texture1, texcoord);
        out_color = color;
    }


    I guess the errors originate from the #version lines, i believe #version 120 is what i need for openGL 2.1? But does it need something else?

    Cheers

  6. #6
    Senior Member OpenGL Guru
    Join Date
    Jun 2013
    Posts
    2,480
    Quote Originally Posted by LastHorizon View Post
    Vertex Shader

    Code :
    #version 120
    layout(location = 0) in vec3 in_position;
    layout(location = 1) in vec2 in_texture;
     
    uniform mat4 projection_matrix, view_matrix;
    uniform vec3 rotation;
     
    out vec2 texcoord;
    Quote Originally Posted by LastHorizon View Post
    Fragment shader

    Code :
    //Cube_Fragment_Shader.glsl
    #version 120
     
    layout(location = 0) out vec4 out_color;
     
    uniform sampler2D texture1;
     
    in vec2 texcoord;
    The main differences between GLSL 1.2 and later versions are that 1.2

    - Doesn't support layout qualifiers. You need to either set the attribute location in the client code with glBindAttribLocation() or allow the implementation to choose locations and query them with glGetAttribLocation().
    - Uses "attribute" for vertex shader inputs rather than "in".
    - Uses "varying" for variables which communicate data between the vertex shader and fragment shader, rather than using "out" in the vertex shader and "in" in the fragment shader.
    - Doesn't support user-defined fragment shader outputs. For a single colour buffer use gl_FragColor; for multiple colour buffers use gl_FragData[n].

    Also, it doesn't support the overloaded texture() function; you need to use a variant whose name depends upon the sampler type, e.g. texture2D() for sampler2D.

  7. #7
    Junior Member Newbie
    Join Date
    Apr 2016
    Posts
    14
    Awesome! That has mostly worked!

    I haven't managed to get it to load a texture to all faces of my cube yet though, I guess that's a question of finding the correct texture loading command though. At the moment if I use texture2D() it loads a single 2D with the incorrect colouring. It should be a create but instead I get a brown square.

    I'm curious as to whether the co-ordinate system has changed between 2.1 and 4.X, when I ran my code previously I had my cube rotating around the centre of the object, now it rotates around the bottom vertex of the object.

    The colour gradient is also a lot coarser than before, but I feel that is more to do with technology limitations than anything else. However if I'm barking up the wrong tree, please let me know!

  8. #8
    Senior Member OpenGL Guru
    Join Date
    Jun 2013
    Posts
    2,480
    Quote Originally Posted by LastHorizon View Post
    I haven't managed to get it to load a texture to all faces of my cube yet though, I guess that's a question of finding the correct texture loading command though. At the moment if I use texture2D() it loads a single 2D with the incorrect colouring. It should be a create but instead I get a brown square.
    Are you using a 2D texture for each face, or a cube map?

    A cube map is created by calling glBindTexture(GL_TEXTURE_CUBE_MAP) then glTexImage2D(GL_TEXTURE_CUBE_MAP_*) for each of the six faces, and accessed in the shader via a samplerCube uniform and the textureCube() function (which takes the coordinates via a vec3).

    Quote Originally Posted by LastHorizon View Post
    I'm curious as to whether the co-ordinate system has changed between 2.1 and 4.X, when I ran my code previously I had my cube rotating around the centre of the object, now it rotates around the bottom vertex of the object.
    When using shaders, coordinate systems are entirely up to the program. Vertex attributes will be passed to the vertex shader as-is.

Tags for this Thread

Posting Permissions

  • You may not post new threads
  • You may not post replies
  • You may not post attachments
  • You may not edit your posts
  •