/* Creates the real pixel format for the target window */ static int gl_choose_pixel_format(HDC hdc, struct gs_init_data *info) { struct darray attribs; int color_bits = get_color_format_bits(info->format); int depth_bits = get_depth_format_bits(info->zsformat); int stencil_bits = get_stencil_format_bits(info->zsformat); UINT num_formats; BOOL success; int format; if (!color_bits) { blog(LOG_ERROR, "gl_init_pixel_format: color format not " "supported"); return false; } darray_init(&attribs); add_attrib(&attribs, WGL_DRAW_TO_WINDOW_ARB, GL_TRUE); add_attrib(&attribs, WGL_SUPPORT_OPENGL_ARB, GL_TRUE); add_attrib(&attribs, WGL_ACCELERATION_ARB, WGL_FULL_ACCELERATION_ARB); add_attrib(&attribs, WGL_DOUBLE_BUFFER_ARB, GL_TRUE); add_attrib(&attribs, WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB); add_attrib(&attribs, WGL_COLOR_BITS_ARB, color_bits); add_attrib(&attribs, WGL_DEPTH_BITS_ARB, depth_bits); add_attrib(&attribs, WGL_STENCIL_BITS_ARB, stencil_bits); add_attrib(&attribs, 0, 0); success = wglChoosePixelFormatARB(hdc, attribs.array, NULL, 1, &format, &num_formats); if (!success || !num_formats) { blog(LOG_ERROR, "wglChoosePixelFormatARB failed, %u", GetLastError()); format = 0; } darray_free(&attribs); return format; }
extern bool gl_platform_init_swapchain(struct gs_swap_chain *swap) { Display *display = swap->device->plat->display; struct gs_init_data *info = &swap->info; xcb_connection_t *xcb_conn = XGetXCBConnection(display); xcb_window_t wid = xcb_generate_id(xcb_conn); xcb_window_t parent = swap->info.window.id; xcb_get_geometry_reply_t *geometry = get_window_geometry(xcb_conn, parent); bool status = false; int screen_num; int visual; GLXFBConfig *fb_config; if (!geometry) goto fail_geometry_request; screen_num = get_screen_num_from_root(xcb_conn, geometry->root); if (screen_num == -1) { goto fail_screen; } /* NOTE: * So GLX is odd. You can have different extensions per screen, * not just per video card or visual. * * Because of this, it makes sense to call LoadGLX everytime * we open a frackin' window. In Windows, entry points can change * so it makes more sense there. Here, despite it virtually never * having the possibility of changing unless the user is intentionally * being an asshole to cause this behavior, we still have to give it * the correct screen num just out of good practice. *sigh* */ if (!gladLoadGLX(display, screen_num)) { blog(LOG_ERROR, "Unable to load GLX entry functions."); goto fail_load_glx; } /* Define our FBConfig hints for GLX... */ const int fb_attribs[] = { GLX_STENCIL_SIZE, get_stencil_format_bits(info->zsformat), GLX_DEPTH_SIZE, get_depth_format_bits(info->zsformat), GLX_BUFFER_SIZE, get_color_format_bits(info->format), GLX_DOUBLEBUFFER, true, GLX_X_RENDERABLE, true, None }; /* ...fetch the best match... */ { int num_configs; fb_config = glXChooseFBConfig(display, screen_num, fb_attribs, &num_configs); if (!fb_config || !num_configs) { blog(LOG_ERROR, "Failed to find FBConfig!"); goto fail_fb_config; } } /* ...then fetch matching visual info for xcb. */ { int error = glXGetFBConfigAttrib(display, fb_config[0], GLX_VISUAL_ID, &visual); if (error) { blog(LOG_ERROR, "Bad call to GetFBConfigAttrib!"); goto fail_visual_id; } } xcb_colormap_t colormap = xcb_generate_id(xcb_conn); uint32_t mask = XCB_CW_BORDER_PIXEL | XCB_CW_COLORMAP; uint32_t mask_values[] = { 0, colormap, 0 }; xcb_create_colormap(xcb_conn, XCB_COLORMAP_ALLOC_NONE, colormap, parent, visual ); xcb_create_window( xcb_conn, 24 /* Hardcoded? */, wid, parent, 0, 0, geometry->width, geometry->height, 0, 0, visual, mask, mask_values ); swap->wi->config = fb_config[0]; swap->wi->window = wid; xcb_map_window(xcb_conn, wid); XFree(fb_config); status = true; goto success; fail_visual_id: XFree(fb_config); fail_fb_config: fail_load_glx: fail_screen: fail_geometry_request: success: free(geometry); return status; }