I have created a class to initialize and clear Xlib and glx properly.
OpenGLContext::OpenGLContext()
:m_display(nullptr)
,m_context(nullptr)
,m_vi(nullptr)
{
memset(&m_cmap, 0, sizeof(Colormap));
memset(&m_swa, 0, sizeof(XSetWindowAttributes));
memset(&m_win, 0, sizeof(Window));
m_display = XOpenDisplay(NULL);
assert(m_display);
static int dblBuf[] = {GLX_RGBA, GLX_RED_SIZE, 1, GLX_GREEN_SIZE, 1, GLX_BLUE_SIZE, 1, GLX_DEPTH_SIZE, 12, GLX_DOUBLEBUFFER, None};
m_vi = glXChooseVisual(m_display, DefaultScreen(m_display), dblBuf);
m_context = glXCreateContext(m_display, m_vi, None, True);
m_cmap = XCreateColormap(m_display, RootWindow(m_display, m_vi->screen), m_vi->visual, AllocNone);
m_swa.colormap = m_cmap;
m_win = XCreateWindow(
m_display,
RootWindow( m_display, m_vi->screen ),
0, 0, /* width */ 640, /* height */ 480, 0, m_vi->depth, InputOutput, m_vi->visual,
CWBorderPixel | CWColormap | CWEventMask, &m_swa
);
char* dummy[] = { "", 0 };
XSetStandardProperties(m_display, m_win, "glxsimple", "glxsimple", None, dummy, 0, NULL);
glXMakeCurrent(m_display, m_win, m_context);
XMapWindow(GetDisplay(), GetWindow());
}
OpenGLContext::~OpenGLContext()
{
XUnmapWindow(m_display, m_win);
glXMakeCurrent(m_display, None, NULL);
XFreeColormap(m_display, m_cmap);
XDestroyWindow(m_display, m_win);
glXDestroyContext(m_display, m_context);
XFree(m_vi);
XCloseDisplay(m_display);
}
Unfortunately, valgrind reports a memory leak.
==28742== 12,796 (584 direct, 12,212 indirect) bytes in 1 blocks are definitely lost in loss record 631 of 637
==28742== at 0x4C29F5D: malloc (vg_replace_malloc.c:263)
==28742== by 0xBCD7E7C: driConcatConfigs (in /usr/lib64/mesa/swrastg_dri.so)
==28742== by 0xBCDBDFF: dri_init_screen_helper (in /usr/lib64/mesa/swrastg_dri.so)
==28742== by 0xBCDAF0D: drisw_init_screen (in /usr/lib64/mesa/swrastg_dri.so)
==28742== by 0xBCD8583: driCreateNewScreen (in /usr/lib64/mesa/swrastg_dri.so)
==28742== by 0x5295604: driswCreateScreen (in /usr/lib64/opengl/xorg-x11/lib/libGL.so.1.2)
==28742== by 0x527412B: __glXInitialize (in /usr/lib64/opengl/xorg-x11/lib/libGL.so.1.2)
==28742== by 0x5270154: glXGetFBConfigs (in /usr/lib64/opengl/xorg-x11/lib/libGL.so.1.2)
==28742== by 0x5270B57: glXChooseFBConfig (in /usr/lib64/opengl/xorg-x11/lib/libGL.so.1.2)
==28742== by 0x4E9A7CE: ??? (in /usr/lib64/librrfaker.so)
==28742== by 0x4E5B676: glXChooseVisual (in /usr/lib64/librrfaker.so)
==28742== by 0x46D23B: Zion::Core::OpenGLContext::OpenGLContext() (OpenGLContext.cpp:23)
Note that I’m using VirtualGL (which explains librrfaker.so). Is there something I did wrong? or should I assume this is a bug on VirtualGL’s side?