SimpleScene() {
    ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
    eyeHeight = ovrHmd_GetFloat(hmd, OVR_KEY_PLAYER_HEIGHT, OVR_DEFAULT_PLAYER_HEIGHT);
    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
      SAY_ERR("Could not attach to sensor device");
    }
    for_each_eye([&](ovrEyeType eye){
      textureIds[eye] = 0;
    });

    memset(eyeTextures, 0, 2 * sizeof(ovrGLTexture));
    float eyeHeight = 1.5f;
    player = glm::inverse(glm::lookAt(
      glm::vec3(0, eyeHeight, 4),
      glm::vec3(0, eyeHeight, 0),
      glm::vec3(0, 1, 0)));

    for_each_eye([&](ovrEyeType eye){
      ovrSizei eyeTextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->MaxEyeFov[eye], 1.0f);

      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = eyeTextureSize;
      eyeTextureHeader.RenderViewport.Size = eyeTextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;
    });

    resetCamera();
  }
예제 #2
0
  void initGl() {
    GlfwApp::initGl();

    ovrFovPort eyeFovPorts[2];
    for_each_eye([&](ovrEyeType eye){
      ovrTextureHeader & eyeTextureHeader = textures[eye].Header;
      eyeFovPorts[eye] = hmd->DefaultEyeFov[eye];
      eyeTextureHeader.TextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->DefaultEyeFov[eye], 1.0f);
      eyeTextureHeader.RenderViewport.Size = eyeTextureHeader.TextureSize;
      eyeTextureHeader.RenderViewport.Pos.x = 0;
      eyeTextureHeader.RenderViewport.Pos.y = 0;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;

      eyeFramebuffers[eye] = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeFramebuffers[eye]->init(ovr::toGlm(eyeTextureHeader.TextureSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = oglplus::GetName(eyeFramebuffers[eye]->color);
    });

    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(ovrGLConfig));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.Multisample = 1;

    /**
     * In the Direct3D examples in the Oculus SDK, they make the point that the
     * onscreen window size does not need to match the Rift resolution.  However
     * this doesn't currently work in OpenGL, so we have to create the window at
     * the full resolution of the Rift and ensure that we use the same
     * size here when setting the BackBufferSize.
     */
    cfg.OGL.Header.BackBufferSize = ovr::fromGlm(getSize());

    ON_LINUX([&]{
      cfg.OGL.Disp = (Display*)glfw::getNativeDisplay(getWindow());
    });

    int distortionCaps = 0
        | ovrDistortionCap_TimeWarp
        | ovrDistortionCap_Vignette;

    ON_LINUX([&]{
      distortionCaps |= ovrDistortionCap_LinuxDevFullscreen;
    });

    ovrEyeRenderDesc              eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
        distortionCaps, eyeFovPorts, eyeRenderDescs);
    if (!configResult) {
      FAIL("Unable to configure SDK based distortion rendering");
    }

    for_each_eye([&](ovrEyeType eye){
      eyeOffsets[eye] = eyeRenderDescs[eye].HmdToEyeViewOffset;
      eyeProjections[eye] = ovr::toGlm(
          ovrMatrix4f_Projection(eyeFovPorts[eye], 0.01f, 1000.0f, true));
    });
  }
 void draw() {
     glClear(GL_COLOR_BUFFER_BIT);
     for_each_eye([&](StereoEye eye) {
         renderEye(eye);
     });
     GL_CHECK_ERROR;
 }
  virtual void initGl() {
    CubeScene::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.RTSize = hmd->Resolution;
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Chromatic;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
      distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArg = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrTextureHeader & textureHeader = textures[eye].Header;
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      eyeArg.frameBuffer.init(Rift::fromOvr(texSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = eyeArg.frameBuffer.color->texture;

      ovrVector3f offset = eyeRenderDescs[eye].ViewAdjust;
      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);

      eyeArg.projection = Rift::fromOvr(projection);
      eyeArg.modelviewOffset = glm::translate(glm::mat4(), Rift::fromOvr(offset));
    });
  }
  void initGl() {
    RiftGlfwApp::initGl();
    for_each_eye([&](ovrEyeType eye){
      EyeArg & eyeArg = eyeArgs[eye];
      ovrFovPort fov = hmdDesc.DefaultEyeFov[eye];
      ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(hmd, eye, fov);

      // Set up the per-eye projection matrix
      eyeArg.projection = Rift::fromOvr(
        ovrMatrix4f_Projection(fov, 0.01, 100000, true));
      eyeArg.viewOffset = glm::translate(glm::mat4(), Rift::fromOvr(renderDesc.ViewAdjust));
      ovrRecti texRect;
      texRect.Size = ovrHmd_GetFovTextureSize(hmd, eye,
        hmdDesc.DefaultEyeFov[eye], 1.0f);
      texRect.Pos.x = texRect.Pos.y = 0;

      eyeArg.frameBuffer.init(Rift::fromOvr(texRect.Size));

      ovrVector2f scaleAndOffset[2];
      ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size,
        texRect, scaleAndOffset);
      eyeArg.scale = Rift::fromOvr(scaleAndOffset[0]);
      eyeArg.offset = Rift::fromOvr(scaleAndOffset[1]);

      ovrHmd_CreateDistortionMesh(hmd, eye, fov, 0, &eyeArg.mesh);

      eyeArg.meshVao = gl::VertexArrayPtr(new gl::VertexArray());
      eyeArg.meshVao->bind();

      eyeArg.meshIbo = gl::IndexBufferPtr(new gl::IndexBuffer());
      eyeArg.meshIbo->bind();
      size_t bufferSize = eyeArg.mesh.IndexCount * sizeof(unsigned short);
      eyeArg.meshIbo->load(bufferSize, eyeArg.mesh.pIndexData);

      eyeArg.meshVbo = gl::VertexBufferPtr(new gl::VertexBuffer());
      eyeArg.meshVbo->bind();
      bufferSize = eyeArg.mesh.VertexCount * sizeof(ovrDistortionVertex);
      eyeArg.meshVbo->load(bufferSize, eyeArg.mesh.pVertexData);

      size_t stride = sizeof(ovrDistortionVertex);
      size_t offset = offsetof(ovrDistortionVertex, Pos);
      glEnableVertexAttribArray(gl::Attribute::Position);
      glVertexAttribPointer(gl::Attribute::Position, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      offset = offsetof(ovrDistortionVertex, TexG);
      glEnableVertexAttribArray(gl::Attribute::TexCoord0);
      glVertexAttribPointer(gl::Attribute::TexCoord0, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      gl::VertexArray::unbind();
      gl::Program::clear();
    });

    distortionProgram = GlUtils::getProgram(
      Resource::SHADERS_DISTORTION_VS,
      Resource::SHADERS_DISTORTION_FS
    );
  }
void RiftRenderingApp::initializeRiftRendering() {
    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.BackBufferSize = ovr::fromGlm(hmdNativeResolution);
    cfg.OGL.Header.Multisample = 1;

    ON_WINDOWS([&]{
      cfg.OGL.Window = (HWND)getNativeWindow();
    });

    int distortionCaps = 0
      | ovrDistortionCap_Vignette
      | ovrDistortionCap_Overdrive
      | ovrDistortionCap_TimeWarp;

    ON_LINUX([&]{
      distortionCaps |= ovrDistortionCap_LinuxDevFullscreen;
    });

    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
      distortionCaps, hmd->MaxEyeFov, eyeRenderDescs);
    assert(configResult);

    for_each_eye([&](ovrEyeType eye){
      const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];
      ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
      projections[eye] = ovr::toGlm(ovrPerspectiveProjection);
      eyeOffsets[eye] = erd.HmdToEyeViewOffset;
    });

    // Allocate the frameBuffer that will hold the scene, and then be
    // re-rendered to the screen with distortion
    glm::uvec2 frameBufferSize = ovr::toGlm(eyeTextures[0].Header.TextureSize);
    for_each_eye([&](ovrEyeType eye) {
      eyeFramebuffers[eye] = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeFramebuffers[eye]->init(frameBufferSize);
      ((ovrGLTexture&)(eyeTextures[eye])).OGL.TexId =
        oglplus::GetName(eyeFramebuffers[eye]->color);
    });
  }
  void initGl() {
    RiftGlfwApp::initGl();

    Resource * sceneImages = SCENE_IMAGES_DK2;
    if (hmd->Type == ovrHmd_DK1) {
      sceneImages = SCENE_IMAGES_DK1;
    }

    for_each_eye([&](ovrEyeType eye){
      glm::uvec2 textureSize;
      GlUtils::getImageAsTexture(sceneTextures[eye],
        sceneImages[eye], textureSize);

      memset(eyeTextures + eye, 0,
        sizeof(eyeTextures[eye]));

      ovrTextureHeader & eyeTextureHeader =
        eyeTextures[eye].Header;

      eyeTextureHeader.TextureSize = Rift::toOvr(textureSize);
      eyeTextureHeader.RenderViewport.Size =
        eyeTextureHeader.TextureSize;

      eyeTextureHeader.API = ovrRenderAPI_OpenGL;

      ((ovrGLTextureData&)eyeTextures[eye]).TexId =
        sceneTextures[eye]->texture;
    });

    ovrRenderAPIConfig config;
    memset(&config, 0, sizeof(config));
    config.Header.API = ovrRenderAPI_OpenGL;
    config.Header.RTSize = Rift::toOvr(windowSize);
    config.Header.Multisample = 1;
#if defined(OVR_OS_WIN32)
    ((ovrGLConfigData&)config).Window = 0;
#elif defined(OVR_OS_LINUX)
    ((ovrGLConfigData&)config).Win = 0;
    ((ovrGLConfigData&)config).Disp = 0;
#endif

    int distortionCaps = 
      ovrDistortionCap_Vignette
      | ovrDistortionCap_Chromatic;

    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &config,
      distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);
    if (0 == configResult) {
      FAIL("Unable to configure rendering");
    }
    ovrhmd_EnableHSWDisplaySDKRender(hmd, false);
  }
    void draw() {
        static int frameIndex = 0;
        ovrFrameTiming timing = ovrHmd_BeginFrameTiming(hmd, frameIndex++);
        for (int i = 0; i < 2; ++i) {
            const ovrEyeType eye = hmdDesc.EyeRenderOrder[i];
            const EyeArg & eyeArg = eyeArgs[eye];
            // Set up the per-eye projection matrix
            gl::Stacks::projection().top() = eyeArg.projection;

            eyeArg.frameBuffer.activate();
            gl::MatrixStack & mv = gl::Stacks::modelview();
            gl::Stacks::with_push([&] {
                ovrSensorState ss = ovrHmd_GetSensorState(hmd, timing.EyeScanoutSeconds[eye]);
                // Set up the per-eye modelview matrix
                // Apply the head pose
                mv.preMultiply(glm::inverse(Rift::fromOvr(ss.Predicted.Pose)));
                // Apply the per-eye offset
                mv.preMultiply(eyeArg.viewOffset);
                renderScene();
            });
            eyeArg.frameBuffer.deactivate();
        }

        glClearColor(0.1f, 0.1f, 0.1f, 1);
        glClear(GL_COLOR_BUFFER_BIT);
        glDisable(GL_BLEND);
        glDisable(GL_CULL_FACE);
        glDisable(GL_DEPTH_TEST);

        gl::ProgramPtr distortionProgram = GlUtils::getProgram(
                                               Resource::SHADERS_DISTORTION_VS,
                                               Resource::SHADERS_DISTORTION_FS
                                           );
        distortionProgram->use();
        distortionProgram->setUniform("samples", enableSamples);

        glViewport(0, 0, windowSize.x, windowSize.y);
        for_each_eye([&](ovrEyeType eye) {
            const EyeArg & eyeArg = eyeArgs[eye];
            distortionProgram->setUniform(0, eyeArg.scale);
            distortionProgram->setUniform(1, eyeArg.offset);
            eyeArg.frameBuffer.color->bind();
            eyeArg.meshVao->bind();
            glDrawElements(GL_TRIANGLES, eyeArg.mesh.IndexCount,
                           GL_UNSIGNED_SHORT, nullptr);
        });
        gl::Texture2d::unbind();
        gl::Program::clear();
        ovrHmd_EndFrameTiming(hmd);
        glEnable(GL_CULL_FACE);
        glEnable(GL_DEPTH_TEST);
    }
    void initGl() {
        RiftGlfwApp::initGl();
        glDisable(GL_BLEND);
        glDisable(GL_DEPTH_TEST);
        glClearColor(0.1f, 0.1f, 0.1f, 1.0f);

        quadGeometry = GlUtils::getQuadGeometry();

        // Generate the lookup textures and load the scene textures
        for_each_eye([&](StereoEye eye) {
            GlUtils::getImageAsTexture(sceneTextures[eye], SCENE_IMAGES[eye]);
            createLookupTexture(lookupTextures[eye], eye);
        });
    }
RiftRenderingApp::RiftRenderingApp() {
    Platform::sleepMillis(200);
    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
      SAY_ERR("Could not attach to sensor device");
    }

    memset(eyeTextures, 0, 2 * sizeof(ovrGLTexture));

    for_each_eye([&](ovrEyeType eye){
      ovrSizei eyeTextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->MaxEyeFov[eye], 1.0f);
      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = eyeTextureSize;
      eyeTextureHeader.RenderViewport.Size = eyeTextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;
    });
  }
  void draw() {
    auto frameTime = ovrHmd_BeginFrame(hmd, frameIndex++);
    ovrLock.unlock();

    if (0 == frameTime.TimewarpPointSeconds) {
      ovr_WaitTillTime(frameTime.TimewarpPointSeconds - 0.002);
    } else {
      ovr_WaitTillTime(frameTime.NextFrameSeconds - 0.008);
    }

    // Grab the most recent textures
    for_each_eye([&](ovrEyeType eye) {
      ((ovrGLTexture&)(eyeTextures[eye])).OGL.TexId =
        textureIds[eye];
    });

    ovrLock.lock();
    ovrHmd_EndFrame(hmd, eyePoses, eyeTextures);
  }
  void initGl() {
    RiftGlfwApp::initGl();

    Resource * sceneImages = SCENE_IMAGES_DK2;
    if (hmd->Type == ovrHmd_DK1) {
      sceneImages = SCENE_IMAGES_DK1;
    }

    for_each_eye([&](ovrEyeType eye){
      glm::uvec2 textureSize;
      sceneTextures[eye] = oria::load2dTexture(sceneImages[eye], textureSize);

      memset(eyeTextures + eye, 0, sizeof(eyeTextures[eye]));
      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = ovr::fromGlm(textureSize);
      eyeTextureHeader.RenderViewport.Size = eyeTextureHeader.TextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;

      ((ovrGLTextureData&)eyeTextures[eye]).TexId = oglplus::GetName(*sceneTextures[eye]);
    });

    ovrRenderAPIConfig config;
    memset(&config, 0, sizeof(config));
    config.Header.API = ovrRenderAPI_OpenGL;
    config.Header.BackBufferSize = ovr::fromGlm(getSize());
    config.Header.Multisample = 1;
#if defined(OVR_OS_WIN32)
    ((ovrGLConfigData&)config).Window = 0;
#elif defined(OVR_OS_LINUX)
    ((ovrGLConfigData&)config).Disp = 0;
#endif

    int distortionCaps = ovrDistortionCap_Vignette;

    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &config,
        distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);
    if (0 == configResult) {
      FAIL("Unable to configure rendering");
    }
  }
  void initGl() {
    RiftGlfwApp::initGl();
    glDisable(GL_BLEND);
    glDisable(GL_DEPTH_TEST);
    glEnable(GL_PRIMITIVE_RESTART);
    glPrimitiveRestartIndex(UINT_MAX);
    glClearColor(0.1f, 0.1f, 0.1f, 1.0f);

    program = GlUtils::getProgram(
        Resource::SHADERS_TEXTURED_VS,
        Resource::SHADERS_TEXTURED_FS);
    program->use();

    DistortionHelper distortionHelper(ovrHmdInfo);

    // Load scene textures and generate distortion meshes
    for_each_eye([&](StereoEye eye){
      GlUtils::getImageAsTexture(textures[eye], SCENE_IMAGES[eye]);
      distortionGeometry[eye] = distortionHelper.createDistortionMesh(glm::uvec2(64, 64), eye);
    });
  }
  void initGl() {
    RiftGlfwApp::initGl();

    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = Rift::toOvr(windowSize);
    cfg.OGL.Header.Multisample = 1;

    int distortionCaps = 0
      | ovrDistortionCap_Vignette
      | ovrDistortionCap_Chromatic
      | ovrDistortionCap_TimeWarp
      ;

    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
      distortionCaps, hmd->MaxEyeFov, eyeRenderDescs);

#ifdef _DEBUG
    ovrhmd_EnableHSWDisplaySDKRender(hmd, false);
#endif

    for_each_eye([&](ovrEyeType eye){
      const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];
      ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
      projections[eye] = Rift::fromOvr(ovrPerspectiveProjection);
    });

    ///////////////////////////////////////////////////////////////////////////
    // Initialize OpenGL settings and variables
    glEnable(GL_BLEND);

    ovrLock.lock();
    renderWindow = glfwCreateWindow(100, 100, "Ofscreen", nullptr, window);

    threadPtr = std::unique_ptr<std::thread>(new std::thread(&SimpleScene::runOvrThread, this));
    glfwMakeContextCurrent(window);
  }
  virtual void initGl() {
    RiftGlfwApp::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.BackBufferSize = ovr::fromGlm(getSize());
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Vignette;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
        distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArgs = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      eyeArgs.framebuffer = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeArgs.framebuffer->init(ovr::toGlm(texSize));

      ovrTextureHeader & textureHeader = eyeTextures[eye].Header;
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      ((ovrGLTextureData&)eyeTextures[eye]).TexId =
        oglplus::GetName(eyeArgs.framebuffer->color);

      eyeArgs.modelviewOffset = glm::translate(glm::mat4(), 
        ovr::toGlm(eyeRenderDescs[eye].HmdToEyeViewOffset));

      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);
      eyeArgs.projection = ovr::toGlm(projection);
    });
  }
예제 #16
0
void Application::paintGL() {
    // Some plugins process message events, allowing paintGL to be called reentrantly.
    if (_aboutToQuit || _window->isMinimized()) {
        return;
    }

    _renderFrameCount++;
    _lastTimeRendered.start();

    auto lastPaintBegin = usecTimestampNow();
    PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
    PerformanceTimer perfTimer("paintGL");

    if (nullptr == _displayPlugin) {
        return;
    }

    DisplayPluginPointer displayPlugin;
    {
        PROFILE_RANGE(render, "/getActiveDisplayPlugin");
        displayPlugin = getActiveDisplayPlugin();
    }

    {
        PROFILE_RANGE(render, "/pluginBeginFrameRender");
        // If a display plugin loses it's underlying support, it
        // needs to be able to signal us to not use it
        if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
            QMetaObject::invokeMethod(this, "updateDisplayMode");
            return;
        }
    }

    RenderArgs renderArgs;
    glm::mat4  HMDSensorPose;
    glm::mat4  eyeToWorld;
    glm::mat4  sensorToWorld;

    bool isStereo;
    glm::mat4  stereoEyeOffsets[2];
    glm::mat4  stereoEyeProjections[2];

    {
        QMutexLocker viewLocker(&_renderArgsMutex);
        renderArgs = _appRenderArgs._renderArgs;

        // don't render if there is no context.
        if (!_appRenderArgs._renderArgs._context) {
            return;
        }

        HMDSensorPose = _appRenderArgs._headPose;
        eyeToWorld = _appRenderArgs._eyeToWorld;
        sensorToWorld = _appRenderArgs._sensorToWorld;
        isStereo = _appRenderArgs._isStereo;
        for_each_eye([&](Eye eye) {
            stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
            stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
        });
    }

    {
        PROFILE_RANGE(render, "/gpuContextReset");
        _gpuContext->beginFrame(HMDSensorPose);
        // Reset the gpu::Context Stages
        // Back to the default framebuffer;
        gpu::doInBatch(_gpuContext, [&](gpu::Batch& batch) {
            batch.resetStages();
        });
    }


    {
        PROFILE_RANGE(render, "/renderOverlay");
        PerformanceTimer perfTimer("renderOverlay");
        // NOTE: There is no batch associated with this renderArgs
        // the ApplicationOverlay class assumes it's viewport is setup to be the device size
        renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize());
        _applicationOverlay.renderOverlay(&renderArgs);
    }

    {
        PROFILE_RANGE(render, "/updateCompositor");
        getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
    }

    gpu::FramebufferPointer finalFramebuffer;
    QSize finalFramebufferSize;
    {
        PROFILE_RANGE(render, "/getOutputFramebuffer");
        // Primary rendering pass
        auto framebufferCache = DependencyManager::get<FramebufferCache>();
        finalFramebufferSize = framebufferCache->getFrameBufferSize();
        // Final framebuffer that will be handled to the display-plugin
        finalFramebuffer = framebufferCache->getFramebuffer();
    }

    {
        if (isStereo) {
            renderArgs._context->enableStereo(true);
            renderArgs._context->setStereoProjections(stereoEyeProjections);
            renderArgs._context->setStereoViews(stereoEyeOffsets);
        }

        renderArgs._hudOperator = displayPlugin->getHUDOperator();
        renderArgs._hudTexture = _applicationOverlay.getOverlayTexture();
        renderArgs._blitFramebuffer = finalFramebuffer;
        runRenderFrame(&renderArgs);
    }

    auto frame = _gpuContext->endFrame();
    frame->frameIndex = _renderFrameCount;
    frame->framebuffer = finalFramebuffer;
    frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
        DependencyManager::get<FramebufferCache>()->releaseFramebuffer(framebuffer);
    };
    // deliver final scene rendering commands to the display plugin
    {
        PROFILE_RANGE(render, "/pluginOutput");
        PerformanceTimer perfTimer("pluginOutput");
        _renderLoopCounter.increment();
        displayPlugin->submitFrame(frame);
    }

    // Reset the framebuffer and stereo state
    renderArgs._blitFramebuffer.reset();
    renderArgs._context->enableStereo(false);

    {
        Stats::getInstance()->setRenderDetails(renderArgs._details);
    }

    uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
    _frameTimingsScriptingInterface.addValue(lastPaintDuration);
}
예제 #17
0
void GraphicsEngine::render_performFrame() {
    // Some plugins process message events, allowing paintGL to be called reentrantly.

    _renderFrameCount++;

    auto lastPaintBegin = usecTimestampNow();
    PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount);
    PerformanceTimer perfTimer("paintGL");

    DisplayPluginPointer displayPlugin;
    {
        PROFILE_RANGE(render, "/getActiveDisplayPlugin");
        displayPlugin = qApp->getActiveDisplayPlugin();
    }

    {
        PROFILE_RANGE(render, "/pluginBeginFrameRender");
        // If a display plugin loses it's underlying support, it
        // needs to be able to signal us to not use it
        if (!displayPlugin->beginFrameRender(_renderFrameCount)) {
            QMetaObject::invokeMethod(qApp, "updateDisplayMode");
            return;
        }
    }

    RenderArgs renderArgs;
    glm::mat4  HMDSensorPose;
    glm::mat4  eyeToWorld;
    glm::mat4  sensorToWorld;
    ViewFrustum viewFrustum;

    bool isStereo;
    glm::mat4  stereoEyeOffsets[2];
    glm::mat4  stereoEyeProjections[2];

    {
        QMutexLocker viewLocker(&_renderArgsMutex);
        renderArgs = _appRenderArgs._renderArgs;

        // don't render if there is no context.
        if (!_appRenderArgs._renderArgs._context) {
            return;
        }

        HMDSensorPose = _appRenderArgs._headPose;
        eyeToWorld = _appRenderArgs._eyeToWorld;
        sensorToWorld = _appRenderArgs._sensorToWorld;
        isStereo = _appRenderArgs._isStereo;
        for_each_eye([&](Eye eye) {
            stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye];
            stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye];
        });
        viewFrustum = _appRenderArgs._renderArgs.getViewFrustum();
    }

    {
        PROFILE_RANGE(render, "/gpuContextReset");
        getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose);
        // Reset the gpu::Context Stages
        // Back to the default framebuffer;
        gpu::doInBatch("Application_render::gpuContextReset", getGPUContext(), [&](gpu::Batch& batch) {
            batch.resetStages();
        });

        if (isStereo) {
            renderArgs._context->enableStereo(true);
            renderArgs._context->setStereoProjections(stereoEyeProjections);
            renderArgs._context->setStereoViews(stereoEyeOffsets);
        }
    }

    gpu::FramebufferPointer finalFramebuffer;
    QSize finalFramebufferSize;
    {
        PROFILE_RANGE(render, "/getOutputFramebuffer");
        // Primary rendering pass
        auto framebufferCache = DependencyManager::get<FramebufferCache>();
        finalFramebufferSize = framebufferCache->getFrameBufferSize();
        // Final framebuffer that will be handed to the display-plugin
        finalFramebuffer = framebufferCache->getFramebuffer();
    }

    if (!_programsCompiled.load()) {
        gpu::doInBatch("splashFrame", _gpuContext, [&](gpu::Batch& batch) {
            batch.setFramebuffer(finalFramebuffer);
            batch.enableSkybox(true);
            batch.enableStereo(isStereo);
            batch.setViewportTransform({ 0, 0, finalFramebuffer->getSize() });
            _splashScreen->render(batch, viewFrustum);
        });
    } else {
        {
            PROFILE_RANGE(render, "/renderOverlay");
            PerformanceTimer perfTimer("renderOverlay");
            // NOTE: There is no batch associated with this renderArgs
            // the ApplicationOverlay class assumes it's viewport is setup to be the device size
            renderArgs._viewport = glm::ivec4(0, 0, qApp->getDeviceSize());
            qApp->getApplicationOverlay().renderOverlay(&renderArgs);
        }

        {
            PROFILE_RANGE(render, "/updateCompositor");
            qApp->getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld);
        }

        {
            PROFILE_RANGE(render, "/runRenderFrame");
            renderArgs._hudOperator = displayPlugin->getHUDOperator();
            renderArgs._hudTexture = qApp->getApplicationOverlay().getOverlayTexture();
            renderArgs._blitFramebuffer = finalFramebuffer;
            render_runRenderFrame(&renderArgs);
        }
    }

    auto frame = getGPUContext()->endFrame();
    frame->frameIndex = _renderFrameCount;
    frame->framebuffer = finalFramebuffer;
    frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
        auto frameBufferCache = DependencyManager::get<FramebufferCache>();
        if (frameBufferCache) {
            frameBufferCache->releaseFramebuffer(framebuffer);
        }
    };
    // deliver final scene rendering commands to the display plugin
    {
        PROFILE_RANGE(render, "/pluginOutput");
        PerformanceTimer perfTimer("pluginOutput");
        _renderLoopCounter.increment();
        displayPlugin->submitFrame(frame);
    }

    // Reset the framebuffer and stereo state
    renderArgs._blitFramebuffer.reset();
    renderArgs._context->enableStereo(false);

#if !defined(DISABLE_QML)
    {
        auto stats = Stats::getInstance();
        if (stats) {
            stats->setRenderDetails(renderArgs._details);
        }
    }
#endif

    uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;
    _frameTimingsScriptingInterface.addValue(lastPaintDuration);
}
  void runOvrThread() {
    // Make the shared context current
    glfwMakeContextCurrent(renderWindow);
    // Each thread requires it's own glewInit call.
    glewInit();

    // Synchronization to determine when a given eye's render commands have completed
    GLsync eyeFences[2]{0, 0};
    // The index of the current rendering target framebuffer.  
    int backBuffers[2]{0, 0};
    // The pose for each rendered framebuffer
    ovrPosef backPoses[2];

    // Offscreen rendering targets.  two for each eye.
    // One is used for rendering while the other is used for distortion
    gl::FrameBufferWrapper frameBuffers[2][2];
    for_each_eye([&](ovrEyeType eye) {
      glm::uvec2 frameBufferSize = Rift::fromOvr(eyeTextures[0].Header.TextureSize);
      for (int i = 0; i < 2; ++i) {
        frameBuffers[i][eye].init(frameBufferSize);
      }
    });

    while (running) {
      for (int i = 0; i < 2; ++i) {
        for_each_eye([&](ovrEyeType eye) {
          if (0 != eyeFences[eye]) {
            GLenum result = glClientWaitSync(eyeFences[eye], GL_SYNC_FLUSH_COMMANDS_BIT, 0);
            switch (result) {
            case GL_ALREADY_SIGNALED:
            case GL_CONDITION_SATISFIED:
              withLock(ovrLock, [&]{
                eyeFences[eye] = 0;
                int bufferIndex = backBuffers[eye];
                textureIds[eye] = frameBuffers[bufferIndex][eye].color->texture;
                backBuffers[eye] = (bufferIndex + 1) % 2;
                eyePoses[eye] = backPoses[eye];
              });
              break;
            }
          }
        });


        ovrEyeType eye = hmd->EyeRenderOrder[i];
        if (0 != eyeFences[eye]) {
          continue;
        }

        gl::MatrixStack & mv = gl::Stacks::modelview();
        gl::Stacks::projection().top() = projections[eye];
        gl::Stacks::with_push(mv, [&]{
          const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];

          // We can only acquire an eye pose between beginframe and endframe.
          // So we've arranged for the lock to be only open at those points.  
          // The main thread will spend most of it's time in the wait.
          ::withLock(ovrLock, [&]{
            if (running) {
              backPoses[eye] = ovrHmd_GetEyePose(hmd, eye);
            }
          });

          {
            // Apply the head pose
            glm::mat4 m = Rift::fromOvr(backPoses[eye]);
            mv.preMultiply(glm::inverse(m));
            // Apply the per-eye offset
            glm::vec3 eyeOffset = Rift::fromOvr(erd.ViewAdjust);
            mv.preMultiply(glm::translate(glm::mat4(), eyeOffset));
          }

          int bufferIndex = backBuffers[eye];
          gl::FrameBufferWrapper & frameBuffer = frameBuffers[bufferIndex][eye];
          // Render the scene to an offscreen buffer
          frameBuffer.activate();
          renderScene();
          frameBuffer.deactivate();
          eyeFences[eye] = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
        });
      }
    }
  }