Single-buffered glx window?

Hi,

I’m writing a glx app. It works ok if I choose a doublebuffer FBConfig, but fails with
X Error of failed request: BadMatch (invalid parameter attributes)
Major opcode of failed request: 1 (X_CreateWindow)
Serial number of failed request: 13
Current serial number in output stream: 15
for singlebuffered configs. Here’s my code:

</font><blockquote><font size=“1” face=“Verdana, Arial”>code:</font><hr /><pre style=“font-size:x-small; font-family: monospace;”>#include <X11/Xlib.h>
#include <GL/glx.h>

Display* g_display;

int main(int argc, char* argv)
{
g_display = XOpenDisplay(NULL);
if(g_display == NULL)
{
cerr << “Failed to open display” << endl;
return 1;
}

int errorBase, eventBase;
if(!glXQueryExtension(g_display, &errorBase, &eventBase))
{
cerr << “Failed to query glx” << endl;
return 1;
}

int major, minor;
if(!glXQueryVersion(g_display, &major, &minor)

Half the code was missing :~
</font><blockquote><font size=“1” face=“Verdana, Arial”>code:</font><hr /><pre style=“font-size:x-small; font-family: monospace;”>int major, minor;
if(!glXQueryVersion(g_display, &major, &minor)

Hm, seems like this web interface has problems with my code :frowning:

try this:

#include<X11/Xlib.h>
#include<GL/glx.h>
#include<GL/glu.h>

Display *dpy = XOpenDisplay(NULL);
Window root = DefaultRootWindow(dpy);
GLint att[] = {GLX_RGBA, None};
XVisualInfo *vi = glXChooseVisual(dpy, 0, att);
GLXContext glc = glXCreateContext(dpy, vi, NULL, False);
Visual *vis = DefaultVisual(dpy, 0);
Colormap cmap = XCreateColormap(dpy, root, vis, AllocNone);
unsigned int w = XDisplayWidth(dpy, 0) / 2;
unsigned int h = XDisplayHeight(dpy, 0) / 2;
int dep = DefaultDepth(dpy, 0);
int cmask = CWColormap | CWBorderPixel | CWEventMask;
int emask = ExposureMask;
XEvent xev;
XSetWindowAttributes swa;
XWindowAttributes gwa;
Window win;

int main(int argc, char *argv[]){

swa.colormap = cmap;
swa.border_pixel = 0;
swa.event_mask = emask;
win = XCreateWindow(dpy, root, 0, 0, 400, 400, 0, dep, InputOutput, vis, cmask, &swa);
XStoreName(dpy, win, “SIMPLE GL QUAD”);
XMapWindow(dpy, win);

glXMakeCurrent(dpy, win, glc);
glClearColor(0.00, 0.00, 0.60, 1.00);

glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1., 1., -1., 1., 1., 100.);

glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0., 0., 10., 0., 0., 0., 0., 1., 0.);

while(1) {
XNextEvent(dpy, &xev);

    if(xev.type == Expose) {

            XGetWindowAttributes(dpy, win, &gwa);
            w = gwa.width;
            h = gwa.height;

            glViewport(0, 0, w, h);
            glClear(GL_COLOR_BUFFER_BIT);

            glBegin(GL_QUADS);
             glColor3f(1., 0., 0.); glVertex3f(-.75, -.75, 0.);
             glColor3f(0., 1., 0.); glVertex3f( .75, -.75, 0.);
             glColor3f(0., 0., 1.); glVertex3f( .75,  .75, 0.);
             glColor3f(1., 1., 0.); glVertex3f(-.75,  .75, 0.);
            glEnd();

            glFlush(); } } }

//
// gcc -I/usr/X11R6/include/ -L/usr/X11R6/lib -o SimpleQuad SimpleQuad.cc -lX11 -lGL -lGLU
//

Hi, thanks, but the above code uses glx1.2, I want to use 1.3. I was told that I have to write OR and AND for the c++ logical operators, so here is my code again (this time hopefully complete):

#include <iostream>
using namespace std;

#include <X11/Xlib.h>
#include <GL/glx.h>

Display* g_display;

int main(int argc, char* argv[])
{
  g_display = XOpenDisplay(NULL);
  if(g_display == NULL)
  {
    cerr << "Failed to open display" << endl;
    return 1;
  }

  int errorBase, eventBase;
  if(!glXQueryExtension(g_display, &errorBase, &eventBase))
  {
    cerr << "Failed to query glx" << endl;
    return 1;
  }

  int major, minor;
  if(!glXQueryVersion(g_display, &major, &minor)
      OR major < 1 OR (major == 1 AND minor < 3))
  {
    cerr << "glx 1.3 required, only " << major << "."
         << minor << " found." << endl;
    return 1;
  }

  int num;
  int attribs[] = { GLX_DRAWABLE_TYPE, GLX_WINDOW_BIT,
                    GLX_DOUBLEBUFFER, False,
                    //GLX_X_RENDERABLE, True,
                    //GLX_RED_SIZE, 1, GLX_GREEN_SIZE, 1, GLX_BLUE_SIZE, 1,
                    GLX_RENDER_TYPE, GLX_RGBA_BIT,
                    None };
  
  GLXFBConfig* configs = glXChooseFBConfig(g_display,
      XDefaultScreen(g_display), attribs, &num);
  if(configs == NULL OR num < 1)
  {
    cerr << "No config found" << endl;
    return 1;
  }
  GLXFBConfig config = configs[0];
  XFree(configs);
    
  XVisualInfo* vis = glXGetVisualFromFBConfig(g_display, config);
  if(vis == NULL)
  {
    cerr << "Couldn't get visual" << endl;
    return 1;
  }


  int x = 100, y = 100, wid = 640, hyt = 150;
  XSetWindowAttributes swa;
  swa.event_mask = ExposureMask | StructureNotifyMask;
  Visual* t = XDefaultVisual(g_display, 0);
  Window win = XCreateWindow(g_display, XRootWindow(g_display, vis->screen),
      x, y, wid, hyt, 0, vis->depth, InputOutput, vis->visual,
      CWEventMask, &swa);

  GLXWindow glXWin = glXCreateWindow(g_display, config, win, NULL);
      
  GLXContext glXContext = glXCreateNewContext(g_display, config,
      GLX_RGBA_TYPE, NULL, True);
  if(glXContext == NULL)
  {
    cerr << "Failed to create glXContext" << endl;
    return 1;
  }
  

  XMapWindow(g_display, win);
  glXMakeContextCurrent(g_display, glXWin, glXWin, glXContext);

  Atom a = XInternAtom(g_display, "WM_DELETE_WINDOW", False);
  XSetWMProtocols(g_display, win, &a, 1);
  

  bool done = false;
  XEvent event;
  while(!done)
  {
    XNextEvent(g_display, &event);
    switch(event.type)
    {
      case Expose:
        if(event.xexpose.count > 1)
          break;
        glClear(GL_COLOR_BUFFER_BIT);
        glFlush();
        glXSwapBuffers(g_display, glXWin);
        break;
      case ClientMessage:
        done = true;
        break;
    }
  }

  glXMakeContextCurrent(g_display, None, None, NULL);
  glXDestroyContext(g_display, glXContext);
  glXDestroyWindow(g_display, glXWin);
  XCloseDisplay(g_display);
}

When I run the program, I get a BadMatch error from X.

At what point, exactly, do you get BadMatch? I looked at your code pretty closely, and it looks correct. Of course, not like that helps any… :frowning:

in your XCreateWindow call, replace vis->visual with t (the default visual)

by the way, it doesn’t make much sense to use glXSwapBuffers for a single buffer context…

You are not creating a color map. Check your XCreateWindow attributes.

Hi,

thanks for the colormap tip, now it works (I inserted the following snippet into the code above). However, why can’t I simply use XDefaultColormap(g_display, vis->screen)? Why do I have to create a new colormap? What is a colormap after all (I’m using 32bit color, after all, so it can’t be some kind of palette)?

  swa.colormap = XCreateColormap(g_display, XRootWindow(g_display, vis->screen),
      vis->visual, AllocNone); //XDefaultColormap(g_display, vis->screen);
  Visual* t = XDefaultVisual(g_display, 0);
  //Window win = XCreateWindow(g_display, XRootWindow(g_display, vis->screen),
  //    x, y, wid, hyt, 0, vis->depth, InputOutput, vis->visual,
  //    CWEventMask, &swa);
  Window win = XCreateWindow(g_display, XRootWindow(g_display, vis->screen),
      x, y, wid, hyt, 0, 24, InputOutput, vis->visual, //CopyFromParent,
     CWEventMask | CWColormap, &swa);

TIA,
Nico

That’s a good question. For a better explaination than I can give you go here: http://users.actcom.co.il/~choo/lupg/tutorials/xlib-programming/xlib-programming-2.html

Search for “color maps”.

I think the reason why you can’t use the screen’s color map is because of access rights. You probably need read/write access. If color maps are still used internally even for true-color displays I wouldn’t know. Would be interesting to hear.

This topic was automatically closed 183 days after the last reply. New replies are no longer allowed.