示例#1
0
void screen_init(struct rdp_config* config)
{
    // make window resizable for the user
    if (!fullscreen) {
        LONG style = GetWindowLong(gfx.hWnd, GWL_STYLE);
        style |= WS_SIZEBOX | WS_MAXIMIZEBOX;
        SetWindowLong(gfx.hWnd, GWL_STYLE, style);

        BOOL zoomed = IsZoomed(gfx.hWnd);

        if (zoomed) {
            ShowWindow(gfx.hWnd, SW_RESTORE);
        }

        // Fix client size after changing the window style, otherwise the PJ64
        // menu will be displayed incorrectly.
        // For some reason, this needs to be called twice, probably because the
        // style set above isn't applied immediately.
        for (int i = 0; i < 2; i++) {
            win32_client_resize(gfx.hWnd, gfx.hStatusBar, WINDOW_DEFAULT_WIDTH, WINDOW_DEFAULT_HEIGHT);
        }

        if (zoomed) {
            ShowWindow(gfx.hWnd, SW_MAXIMIZE);
        }
    }

    PIXELFORMATDESCRIPTOR win_pfd = {
        sizeof(PIXELFORMATDESCRIPTOR), 1,
        PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER, // Flags
        PFD_TYPE_RGBA, // The kind of framebuffer. RGBA or palette.
        32,            // Colordepth of the framebuffer.
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
        24, // Number of bits for the depthbuffer
        8,  // Number of bits for the stencilbuffer
        0,  // Number of Aux buffers in the framebuffer.
        PFD_MAIN_PLANE, 0, 0, 0, 0
    };

    dc = GetDC(gfx.hWnd);
    if (!dc) {
        msg_error("Can't get device context.");
    }

    int32_t win_pf = ChoosePixelFormat(dc, &win_pfd);
    if (!win_pf) {
        msg_error("Can't choose pixel format.");
    }
    SetPixelFormat(dc, win_pf, &win_pfd);

    // create legacy context, required for wglGetProcAddress to work properly
    glrc = wglCreateContext(dc);
    if (!glrc || !wglMakeCurrent(dc, glrc)) {
        msg_error("Can't create OpenGL context.");
    }

    // load wgl extension
    wgl_LoadFunctions(dc);

    // attributes for a 3.3 core profile without all the legacy stuff
    GLint attribs[] = {
        WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
        WGL_CONTEXT_MINOR_VERSION_ARB, 3,
        WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
        0
    };

    // create the actual context
    glrc_core = wglCreateContextAttribsARB(dc, glrc, attribs);
    if (!glrc_core || !wglMakeCurrent(dc, glrc_core)) {
        // rendering probably still works with the legacy context, so just send
        // a warning
        msg_warning("Can't create OpenGL 3.3 core context.");
    }

    // enable vsync
    wglSwapIntervalEXT(1);

    gl_screen_init(config);
}
示例#2
0
  void graphicssystem_initialize()
  {
    #ifdef DEBUG_MODE
    glDebugMessageCallbackARB((GLDEBUGPROCARB)&DebugCallbackARB, 0);
    glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_ARB);
    printf("OpenGL version supported by this platform (%s): \n", glGetString(GL_VERSION));

    GLuint other_ids[] = { 131185 };
    glDebugMessageControlARB(GL_DEBUG_SOURCE_API_ARB, GL_DEBUG_TYPE_OTHER_ARB, GL_DONT_CARE, 1, other_ids, GL_FALSE); //Disable some notifications shown below:
    //OpenGL: Buffer detailed info: Buffer object 1 (bound to GL_ELEMENT_ARRAY_BUFFER_ARB, usage hint is GL_STATIC_DRAW) will use VIDEO memory as the source for buffer object operations. [source=API type=OTHER severity=UNDEFINED (33387) id=131185]
    GLuint performance_ids[] = { 131218, 2 };
    glDebugMessageControlARB(GL_DEBUG_SOURCE_API_ARB, GL_DEBUG_TYPE_PERFORMANCE_ARB, GL_DONT_CARE, 2, performance_ids, GL_FALSE); //Disable some notifications shown below:
    //OpenGL: Program/shader state performance warning: Vertex shader in program 9 is being recompiled based on GL state. [source=API type=PERFORMANCE severity=MEDIUM id=131218] - This is NVidia only and doesn't tell much
    #endif

    gl_screen_init();

    glEnable(GL_SCISSOR_TEST); // constrain clear to viewport like D3D9
    glDepthFunc(GL_LEQUAL); // to match GM8's D3D8 default

    init_shaders();
    // read shaders into graphics system structure and compile and link them if needed
    for (size_t i = 0; i < shader_idmax; ++i) {
      ShaderStruct* shaderstruct = shaderdata[i];
      //TODO(harijs): If precompile == false or ID's are not defragged, there is a segfault because we try to access invalid position in shader vector
      if (shaderstruct->precompile == false) { continue; }

      int vshader_id = enigma_user::glsl_shader_create(enigma_user::sh_vertex);
      enigma_user::glsl_shader_load_string(vshader_id, shaderstruct->vertex);

      int fshader_id = enigma_user::glsl_shader_create(enigma_user::sh_fragment);
      enigma_user::glsl_shader_load_string(fshader_id, shaderstruct->fragment);

      int prog_id = enigma_user::glsl_program_create();
      enigma_user::glsl_program_set_name(prog_id, enigma_user::shader_get_name(i));

      enigma_user::glsl_shader_compile(vshader_id);
      enigma_user::glsl_shader_compile(fshader_id);

      enigma_user::glsl_program_attach(prog_id, vshader_id);
      enigma_user::glsl_program_attach(prog_id, fshader_id);
      enigma_user::glsl_program_link(prog_id);
      enigma_user::glsl_program_validate(prog_id);
    }

    //ADD DEFAULT SHADER (emulates FFP)
    int vshader_id = enigma_user::glsl_shader_create(enigma_user::sh_vertex);
    enigma_user::glsl_shader_load_string(vshader_id, getDefaultVertexShader());

    int fshader_id = enigma_user::glsl_shader_create(enigma_user::sh_fragment);
    enigma_user::glsl_shader_load_string(fshader_id, getDefaultFragmentShader());

    int prog_id = enigma_user::glsl_program_create();

    enigma_user::glsl_shader_compile(vshader_id);
    enigma_user::glsl_shader_compile(fshader_id);
    enigma_user::glsl_program_attach(prog_id, vshader_id);
    enigma_user::glsl_program_attach(prog_id, fshader_id);
    enigma_user::glsl_program_link(prog_id);
    enigma_user::glsl_program_validate(prog_id);
    enigma_user::glsl_program_set_name(prog_id, "DEFAULT_SHADER");

    default_shader = prog_id; //Default shader for FFP
    main_shader = default_shader; //Main shader used to override the default one

    enigma_user::glsl_program_reset(); //Set the default program
    //END DEFAULT SHADER

    //In GL3.3 Core VAO is mandatory. So we create one and never change it
    GLuint vertexArrayObject;
    glGenVertexArrays(1, &vertexArrayObject);
    glBindVertexArray(vertexArrayObject);
  }