Quadro K600 X_GLXCreateContext issue

If I connect to an other Linux machine via ssh -Y
and start any program using OpenGL contexts I get an error like this

> ssh -Y other
me@other:~$ glxinfo 
name of display: localhost:12.0
X Error of failed request:  BadValue (integer parameter out of range for operation)
  Major opcode of failed request:  154 (GLX)
  Minor opcode of failed request:  3 (X_GLXCreateContext)
  Value in failed request:  0x0
  Serial number of failed request:  27
  Current serial number in output stream:  30

If I use the Qt routine QGLWidget::renderPixmap in any program I get the same error:

X Error: BadValue (integer parameter out of range for operation) 2
  Extension:    154 (Uknown extension)
  Minor opcode: 3 (Unknown request)
  Resource id:  0x

is this a limitation of the card? I know that this works fine on different computers with other cards, but on this computer with any Linux and driver version I tied it did not work.

nvidia-bug-report.log.gz (183 KB)

I found out that the tool glxpixmap produces the same error so modified it to give some information on the visual before the error occurs.

here the c-code of glxpixmap from Debian mesa-utils

/*
 * A demonstration of using the GLXPixmap functions.  This program is in
 * the public domain.
 *
 * Brian Paul
 */


#include <GL/gl.h>
#define GLX_GLXEXT_PROTOTYPES
#include <GL/glx.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>


static GLXContext ctx;
static XVisualInfo *visinfo;
static GC gc;



static Window make_rgb_window( Display *dpy,
                                  unsigned int width, unsigned int height )
{
   const int sbAttrib[] = { GLX_RGBA,
                            GLX_RED_SIZE, 1,
                            GLX_GREEN_SIZE, 1,
                            GLX_BLUE_SIZE, 1,
                            None };
   const int dbAttrib[] = { GLX_RGBA,
                            GLX_RED_SIZE, 1,
                            GLX_GREEN_SIZE, 1,
                            GLX_BLUE_SIZE, 1,
                            GLX_DOUBLEBUFFER,
                            None };
   int scrnum;
   XSetWindowAttributes attr;
   unsigned long mask;
   Window root;
   Window win;

   scrnum = DefaultScreen( dpy );
   root = RootWindow( dpy, scrnum );

   visinfo = glXChooseVisual( dpy, scrnum, (int *) sbAttrib );
   if (!visinfo) {
      visinfo = glXChooseVisual( dpy, scrnum, (int *) dbAttrib );
      if (!visinfo) {
         printf("Error: couldn't get an RGB visual\n");
         exit(1);
      }
   }

   /* window attributes */
   attr.background_pixel = 0;
   attr.border_pixel = 0;
   /* TODO: share root colormap if possible */
   attr.colormap = XCreateColormap( dpy, root, visinfo->visual, AllocNone);
   attr.event_mask = StructureNotifyMask | ExposureMask;
   mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;

   win = XCreateWindow( dpy, root, 0, 0, width, height,
                        0, visinfo->depth, InputOutput,
                        visinfo->visual, mask, &attr );

   /* make an X GC so we can do XCopyArea later */
   gc = XCreateGC( dpy, win, 0, NULL );
   printf("Vis %d %d %lx %lx %lx %d %d\n",
   visinfo->screen,
   visinfo->depth,
   visinfo->red_mask,
   visinfo->green_mask,
   visinfo->blue_mask,
   visinfo->colormap_size,
   visinfo->bits_per_rgb);
  printf("%d %p %p\n",__LINE__,dpy,visinfo);
   /* need indirect context */
   ctx = glXCreateContext( dpy, visinfo, NULL, False );
   if (!ctx) {
      printf("Error: glXCreateContext failed\n");
      exit(-1);
   }

   printf("Direct rendering: %s\n", glXIsDirect(dpy, ctx) ? "Yes" : "No");

   return win;
}


static GLXPixmap make_pixmap( Display *dpy, Window win,
                               unsigned int width, unsigned int height,
                               Pixmap *pixmap)
{
   Pixmap pm;
   GLXPixmap glxpm;
   XWindowAttributes attr;

   pm = XCreatePixmap( dpy, win, width, height, visinfo->depth );
   if (!pm) {
      printf("Error: XCreatePixmap failed\n");
      exit(-1);
   }

   XGetWindowAttributes( dpy, win, &attr );

   /*
    * IMPORTANT:
    *   Use the glXCreateGLXPixmapMESA funtion when using Mesa because
    *   Mesa needs to know the colormap associated with a pixmap in order
    *   to render correctly.  This is because Mesa allows RGB rendering
    *   into any kind of visual, not just TrueColor or DirectColor.
    */
#ifdef GLX_MESA_pixmap_colormap
   if (strstr(glXQueryExtensionsString(dpy, 0), "GLX_MESA_pixmap_colormap")) {
      /* stand-alone Mesa, specify the colormap */
      PFNGLXCREATEGLXPIXMAPMESAPROC glXCreateGLXPixmapMESA_func =
         (PFNGLXCREATEGLXPIXMAPMESAPROC)
         glXGetProcAddressARB((GLubyte *) "glXCreateGLXPixmapMESA");
      glxpm = glXCreateGLXPixmapMESA_func( dpy, visinfo, pm, attr.colormap );
   }
   else {
      glxpm = glXCreateGLXPixmap( dpy, visinfo, pm );
   }
#else
   /* This will work with Mesa too if the visual is TrueColor or DirectColor */
   glxpm = glXCreateGLXPixmap( dpy, visinfo, pm );
#endif

   if (!glxpm) {
      printf("Error: GLXCreateGLXPixmap failed\n");
      exit(-1);
   }

   *pixmap = pm;

   return glxpm;
}



static void event_loop( Display *dpy, GLXPixmap pm )
{
   XEvent event;

   while (1) {
      XNextEvent( dpy, &event );

      switch (event.type) {
         case Expose:
            printf("Redraw\n");
            /* copy the image from GLXPixmap to window */
            XCopyArea( dpy, pm, event.xany.window,  /* src, dest */
                       gc, 0, 0, 300, 300,          /* gc, src pos, size */
                       0, 0 );                      /* dest pos */
            break;
         case ConfigureNotify:
            /* nothing */
            break;
      }
   }
}



int main( int argc, char *argv[] )
{
   Display *dpy;
   Window win;
   Pixmap pm;
   GLXPixmap glxpm;
   printf("%d\n",__LINE__);
   dpy = XOpenDisplay(NULL);
   printf("%d %p\n",__LINE__,dpy);

   win = make_rgb_window( dpy, 300, 300 );
   printf("%d\n",__LINE__);
   glxpm = make_pixmap( dpy, win, 300, 300, &pm );
   printf("%d\n",__LINE__);

   glXMakeCurrent( dpy, glxpm, ctx );
   printf("%d\n",__LINE__);
   printf("GL_RENDERER = %s\n", (char *) glGetString(GL_RENDERER));

   /* Render an image into the pixmap */
   glShadeModel( GL_FLAT );
   glClearColor( 0.5, 0.5, 0.5, 1.0 );
   glClear( GL_COLOR_BUFFER_BIT );
   glViewport( 0, 0, 300, 300 );
   glOrtho( -1.0, 1.0, -1.0, 1.0, -1.0, 1.0 );
   glColor3f( 0.0, 1.0, 1.0 );
   glRectf( -0.75, -0.75, 0.75, 0.75 );
   glFlush();
   glXWaitGL();

   XMapWindow( dpy, win );

   event_loop( dpy, pm );
   return 0;
}

on my system with the Quadro K600 this is the output:

174
176 0x18f4ad0
Vis 0 24 ff0000 ff00 ff 256 11
79 0x18f4ad0 0x1909020
X Error of failed request:  BadValue (integer parameter out of range for operation)
  Major opcode of failed request:  154 (GLX)
  Minor opcode of failed request:  3 (X_GLXCreateContext)
  Value in failed request:  0x0
  Serial number of failed request:  20
  Current serial number in output stream:  21

Now I wonder about the “11” (last number in 3.line of output) which means bits_per_rgb. 11 is strange it should be 8.

output on an older machine:

174
176 0x1e84950
Vis 0 24 ff0000 ff00 ff 256 8
79 0x1e84950 0x1e93e20
Direct rendering: No
179
181
184
GL_RENDERER = GeForce 9400 GT/PCIe/SSE2
Redraw
Redraw
Redraw

So is my Quadro K600 broken or do all Quadro / K600 cards have this issue?
Can some one please help me. Thanks
chuebsch

Sorry, me again.

I figured out that
GLXContext glXCreateContext(Display * dpy, XVisualInfo * vis, GLXContext shareList, Bool direct);
work fine if direct is True but produces that error above when it set to False.

glxinfo -i
dies like this:

glxinfo -i 
name of display: :0.0
X Error of failed request:  BadValue (integer parameter out of range for operation)
  Major opcode of failed request:  154 (GLX)
  Minor opcode of failed request:  24 (X_GLXCreateNewContext)
  Value in failed request:  0x0
  Serial number of failed request:  33
  Current serial number in output stream:  34

without the -i output looks like this:

direct.txt (71 KB)

What vendor’s libGL.so.1 is installed on the remote machine? The last time I tried it, bugs in the Mesa version caused problems like this. Please try using the NVIDIA driver on the remote machine as well as the one running the X server.

Thank you Aaron for your reply.

the result seems to be independent from the remote graphics setup.
here the output when the remote is a openSUSE 12.2 (x86_64) with NVIDIA GPU Quadro NVS 290:

ctest@bach:~> glxinfo
name of display: localhost:10.0
X Error of failed request:  BadValue (integer parameter out of range for operation)
  Major opcode of failed request:  154 (GLX)
  Minor opcode of failed request:  3 (X_GLXCreateContext)
  Value in failed request:  0x0
  Serial number of failed request:  18
  Current serial number in output stream:  19
ctest@bach:~> ldd /usr/bin/X11/gl
glav                    glewinfo                glib-compile-resources  glib-compile-schemas    glib-genmarshal         glib-gettextize         glib-mkenums            glxgears                glxinfo                 
ctest@bach:~> ldd /usr/bin/X11/glxinfo 
        linux-vdso.so.1 (0x00007fffaefff000)
        libGL.so.1 => /usr/X11R6/lib64/libGL.so.1 (0x00007f6de2862000)
        libX11.so.6 => /usr/lib64/libX11.so.6 (0x00007f6de2526000)
        libc.so.6 => /lib64/libc.so.6 (0x00007f6de2181000)
        libnvidia-tls.so.331.38 => /usr/lib64/tls/libnvidia-tls.so.331.38 (0x00007f6de1f7e000)
        libnvidia-glcore.so.331.38 => /usr/lib64/libnvidia-glcore.so.331.38 (0x00007f6ddf770000)
        libXext.so.6 => /usr/lib64/libXext.so.6 (0x00007f6ddf55e000)
        libdl.so.2 => /lib64/libdl.so.2 (0x00007f6ddf35a000)
        libxcb.so.1 => /usr/lib64/libxcb.so.1 (0x00007f6ddf139000)
        /lib64/ld-linux-x86-64.so.2 (0x00007f6de2b95000)
        libm.so.6 => /lib64/libm.so.6 (0x00007f6ddee42000)
        libXau.so.6 => /usr/lib64/libXau.so.6 (0x00007f6ddec3e000)
ctest@bach:~> exit

and a Debian 8.2 (x86_64) with Quadro FX 4600

chuebsch@ewald:~$ glxinfo 
name of display: localhost:10.0
X Error of failed request:  BadValue (integer parameter out of range for operation)
  Major opcode of failed request:  154 (GLX)
  Minor opcode of failed request:  24 (X_GLXCreateNewContext)
  Value in failed request:  0x0
  Serial number of failed request:  96
  Current serial number in output stream:  97
chuebsch@ewald:~$ ldd /usr/bin/X11/glxinfo
        linux-vdso.so.1 (0x00007ffecc549000)
        libGLEW.so.1.10 => /usr/lib/x86_64-linux-gnu/libGLEW.so.1.10 (0x00007feb99bf1000)
        libGLU.so.1 => /usr/lib/x86_64-linux-gnu/libGLU.so.1 (0x00007feb99983000)
        libGL.so.1 => /usr/lib/x86_64-linux-gnu/libGL.so.1 (0x00007feb99637000)
        libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007feb99336000)
        libX11.so.6 => /usr/lib/x86_64-linux-gnu/libX11.so.6 (0x00007feb98ff3000)
        libXext.so.6 => /usr/lib/x86_64-linux-gnu/libXext.so.6 (0x00007feb98de1000)
        libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007feb98a38000)
        libXmu.so.6 => /usr/lib/x86_64-linux-gnu/libXmu.so.6 (0x00007feb9881f000)
        libXi.so.6 => /usr/lib/x86_64-linux-gnu/libXi.so.6 (0x00007feb9860f000)
        libstdc++.so.6 => /usr/lib/x86_64-linux-gnu/libstdc++.so.6 (0x00007feb98304000)
        libgcc_s.so.1 => /lib/x86_64-linux-gnu/libgcc_s.so.1 (0x00007feb980ee000)
        libnvidia-tls.so.340.65 => /usr/lib/x86_64-linux-gnu/tls/libnvidia-tls.so.340.65 (0x00007feb97eeb000)
        libnvidia-glcore.so.340.65 => /usr/lib/x86_64-linux-gnu/libnvidia-glcore.so.340.65 (0x00007feb952d7000)
        libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007feb950d3000)
        libxcb.so.1 => /usr/lib/x86_64-linux-gnu/libxcb.so.1 (0x00007feb94eb1000)
        /lib64/ld-linux-x86-64.so.2 (0x00007feb99e7d000)
        libXt.so.6 => /usr/lib/x86_64-linux-gnu/libXt.so.6 (0x00007feb94c48000)
        libXau.so.6 => /usr/lib/x86_64-linux-gnu/libXau.so.6 (0x00007feb94a44000)
        libXdmcp.so.6 => /usr/lib/x86_64-linux-gnu/libXdmcp.so.6 (0x00007feb9483f000)
        libSM.so.6 => /usr/lib/x86_64-linux-gnu/libSM.so.6 (0x00007feb94637000)
        libICE.so.6 => /usr/lib/x86_64-linux-gnu/libICE.so.6 (0x00007feb9441a000)
        libuuid.so.1 => /lib/x86_64-linux-gnu/libuuid.so.1 (0x00007feb94215000)

I think the problem must be on my local machine (Debian stretch x86_64 Quadro K600) as the glxinfo -i test in my post above was done locally.

What I find puzzling is that I thought that indirect means that the rendering is done my the software/driver and the CPU while direct is done my the GPU. So why it is not working only on this machine with that Quadro K600?

Oh! I completely forgot that newer X servers disable indirect GLX by default for security reasons. Does it work if you start your X server with the +iglx option?

Thank you Aaron!
that seems to do the trick!
I run
startx startkde – +iglx
from a tty and it works fine.

Then I add
Option “AllowIndirectGLXProtocol” “True”
to /etc/X11/xorg.conf
and restarted my computer.
Does not work. but /var/log/Xorg.0.log said
[ 15.010] () NVIDIA(0): Option “AllowIndirectGLXProtocol” “True”
[ 15.010] (
) NVIDIA(0): Enabling 2D acceleration

so where to put this +iglx option so that it works without starting from the command line?

Probably in your display manager configure file, if you are using any(ps aux|grep dm).

Ok!
so I changed the line in
/etc/X11/xdm/Xservers
to
:0 local /usr/bin/X :0 vt7 -nolisten tcp +iglx

This works fine now!

Many thanks to all!

chuebsch
[solved]

That’s indeed a workaround, but it’s more than mildly annoying to have to do (gdm seemingly makes it much harder than it should be to modify), and there’s talk of iglx being removed entirely from Xorg in future given their reason for disabling it is that it’s insecure and nobody uses it.

Is there any way around this?

jh