コード例 #1
0
///@brief Called once a GL context has been set up.
void initVR()
{
    const ovrHmdDesc& hmd = m_Hmd;

    for (int eye = 0; eye < 2; ++eye)
    {
        const ovrSizei& bufferSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmd.DefaultEyeFov[eye], 1.f);
        LOG_INFO("Eye %d tex : %dx%d @ ()", eye, bufferSize.w, bufferSize.h);

        ovrTextureSwapChain textureSwapChain = 0;
        ovrTextureSwapChainDesc desc = {};
        desc.Type = ovrTexture_2D;
        desc.ArraySize = 1;
        desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
        desc.Width = bufferSize.w;
        desc.Height = bufferSize.h;
        desc.MipLevels = 1;
        desc.SampleCount = 1;
        desc.StaticImage = ovrFalse;

        // Allocate the frameBuffer that will hold the scene, and then be
        // re-rendered to the screen with distortion
        ovrTextureSwapChain& chain = g_textureSwapChain[eye];
        if (ovr_CreateTextureSwapChainGL(g_session, &desc, &chain) == ovrSuccess)
        {
            int length = 0;
            ovr_GetTextureSwapChainLength(g_session, chain, &length);

            for (int i = 0; i < length; ++i)
            {
                GLuint chainTexId;
                ovr_GetTextureSwapChainBufferGL(g_session, chain, i, &chainTexId);
                glBindTexture(GL_TEXTURE_2D, chainTexId);
                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
                glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
            }
        }
        else
        {
            LOG_ERROR("Unable to create swap textures");
            return;
        }

        // Manually assemble swap FBO
        FBO& swapfbo = m_swapFBO[eye];
        swapfbo.w = bufferSize.w;
        swapfbo.h = bufferSize.h;
        glGenFramebuffers(1, &swapfbo.id);
        glBindFramebuffer(GL_FRAMEBUFFER, swapfbo.id);
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, swapfbo.tex, 0);

        swapfbo.depth = 0;
        glGenRenderbuffers(1, &swapfbo.depth);
        glBindRenderbuffer(GL_RENDERBUFFER, swapfbo.depth);
        glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, bufferSize.w, bufferSize.h);
        glBindRenderbuffer(GL_RENDERBUFFER, 0);
        glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, swapfbo.depth);

        // Check status
        const GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
        if (status != GL_FRAMEBUFFER_COMPLETE)
        {
            LOG_ERROR("Framebuffer status incomplete: %d %x", status, status);
        }
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
    }

    // Initialize mirror texture
    ovrMirrorTextureDesc desc;
    memset(&desc, 0, sizeof(desc));
    desc.Width = g_mirrorWindowSz.x;
    desc.Height = g_mirrorWindowSz.y;
    desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;

    const ovrResult result = ovr_CreateMirrorTextureGL(g_session, &desc, &g_mirrorTexture);
    if (!OVR_SUCCESS(result))
    {
        LOG_ERROR("Unable to create mirror texture");
        return;
    }

    // Manually assemble mirror FBO
    m_mirrorFBO.w = g_mirrorWindowSz.x;
    m_mirrorFBO.h = g_mirrorWindowSz.y;
    glGenFramebuffers(1, &m_mirrorFBO.id);
    glBindFramebuffer(GL_FRAMEBUFFER, m_mirrorFBO.id);
    GLuint texId;
    ovr_GetMirrorTextureBufferGL(g_session, g_mirrorTexture, &texId);
    m_mirrorFBO.tex = texId;
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_mirrorFBO.tex, 0);

    const ovrSizei sz = { 600, 600 };
    g_tweakbarQuad.initGL(g_session, sz);

    glBindFramebuffer(GL_FRAMEBUFFER, 0);
    glBindTexture(GL_TEXTURE_2D, 0);

    g_hmdVisible = true;
}
コード例 #2
0
// Display to an HMD with OVR SDK backend.
void displayHMD()
{
    ovrSessionStatus sessionStatus;
    ovr_GetSessionStatus(g_session, &sessionStatus);

    if (sessionStatus.HmdPresent == false)
    {
        displayMonitor();
        return;
    }

    const ovrHmdDesc& hmdDesc = m_Hmd;
    double sensorSampleTime; // sensorSampleTime is fed into the layer later
    if (g_hmdVisible)
    {
        // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyeOffset) may change at runtime.
        ovrEyeRenderDesc eyeRenderDesc[2];
        eyeRenderDesc[0] = ovr_GetRenderDesc(g_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
        eyeRenderDesc[1] = ovr_GetRenderDesc(g_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

        // Get eye poses, feeding in correct IPD offset
        ovrVector3f HmdToEyeOffset[2] = {
            eyeRenderDesc[0].HmdToEyeOffset,
            eyeRenderDesc[1].HmdToEyeOffset };
#if 0
        // Get both eye poses simultaneously, with IPD offset already included.
        double displayMidpointSeconds = ovr_GetPredictedDisplayTime(g_session, 0);
        ovrTrackingState hmdState = ovr_GetTrackingState(g_session, displayMidpointSeconds, ovrTrue);
        ovr_CalcEyePoses(hmdState.HeadPose.ThePose, HmdToEyeOffset, m_eyePoses);
#else
        ovr_GetEyePoses(g_session, g_frameIndex, ovrTrue, HmdToEyeOffset, m_eyePoses, &sensorSampleTime);
#endif
        storeHmdPose(m_eyePoses[0]);

        for (int eye = 0; eye < 2; ++eye)
        {
            const FBO& swapfbo = m_swapFBO[eye];
            const ovrTextureSwapChain& chain = g_textureSwapChain[eye];

            int curIndex;
            ovr_GetTextureSwapChainCurrentIndex(g_session, chain, &curIndex);
            GLuint curTexId;
            ovr_GetTextureSwapChainBufferGL(g_session, chain, curIndex, &curTexId);

            glBindFramebuffer(GL_FRAMEBUFFER, swapfbo.id);
            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, curTexId, 0);

            glViewport(0, 0, swapfbo.w, swapfbo.h);
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
            glEnable(GL_FRAMEBUFFER_SRGB);

            {
                glClearColor(0.3f, 0.3f, 0.3f, 0.f);
                glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

                const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale);
                ovrRecti vp = { 0, 0, downSize.w, downSize.h };
                const int texh = swapfbo.h;
                vp.Pos.y = (texh - vp.Size.h) / 2;
                glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);

                // Cinemascope - letterbox bars scissoring off pixels above and below vp center
                const float hc = .5f * m_cinemaScope;
                const int scisPx = static_cast<int>(hc * static_cast<float>(vp.Size.h));
                ovrRecti sp = vp;
                sp.Pos.y += scisPx;
                sp.Size.h -= 2 * scisPx;
                glScissor(sp.Pos.x, sp.Pos.y, sp.Size.w, sp.Size.h);
                glEnable(GL_SCISSOR_TEST);
                glEnable(GL_DEPTH_TEST);

                // Render the scene for the current eye
                const ovrPosef& eyePose = m_eyePoses[eye];
                const glm::mat4 mview =
                    makeWorldToChassisMatrix() *
                    makeMatrixFromPose(eyePose, m_headSize);
                const ovrMatrix4f ovrproj = ovrMatrix4f_Projection(hmdDesc.DefaultEyeFov[eye], 0.2f, 1000.0f, ovrProjection_None);
                const glm::mat4 proj = makeGlmMatrixFromOvrMatrix(ovrproj);
                g_pScene->RenderForOneEye(glm::value_ptr(glm::inverse(mview)), glm::value_ptr(proj));

                const ovrTextureSwapChain& chain = g_textureSwapChain[eye];
                const ovrResult commitres = ovr_CommitTextureSwapChain(g_session, chain);
                if (!OVR_SUCCESS(commitres))
                {
                    LOG_ERROR("ovr_CommitTextureSwapChain returned %d", commitres);
                    return;
                }
            }
            glDisable(GL_SCISSOR_TEST);

            // Grab a copy of the left eye's undistorted render output for presentation
            // to the desktop window instead of the barrel distorted mirror texture.
            // This blit, while cheap, could cost some framerate to the HMD.
            // An over-the-shoulder view is another option, at a greater performance cost.
            if (0)
            {
                if (eye == ovrEyeType::ovrEye_Left)
                {
                    BlitLeftEyeRenderToUndistortedMirrorTexture();
                }
            }

            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
            glBindFramebuffer(GL_FRAMEBUFFER, 0);
        }
    }

    std::vector<const ovrLayerHeader*> layerHeaders;
    {
        // Do distortion rendering, Present and flush/sync
        ovrLayerEyeFov ld;
        ld.Header.Type = ovrLayerType_EyeFov;
        ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL.

        for (int eye = 0; eye < 2; ++eye)
        {
            const FBO& swapfbo = m_swapFBO[eye];
            const ovrTextureSwapChain& chain = g_textureSwapChain[eye];

            ld.ColorTexture[eye] = chain;

            const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale);
            ovrRecti vp = { 0, 0, downSize.w, downSize.h };
            const int texh = swapfbo.h;
            vp.Pos.y = (texh - vp.Size.h) / 2;

            ld.Viewport[eye] = vp;
            ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
            ld.RenderPose[eye] = m_eyePoses[eye];
            ld.SensorSampleTime = sensorSampleTime;
        }
        layerHeaders.push_back(&ld.Header);

        // Submit layers to HMD for display
        ovrLayerQuad ql;
        if (g_tweakbarQuad.m_showQuadInWorld)
        {
            ql.Header.Type = ovrLayerType_Quad;
            ql.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL.

            ql.ColorTexture = g_tweakbarQuad.m_swapChain;
            ovrRecti vp;
            vp.Pos.x = 0;
            vp.Pos.y = 0;
            vp.Size.w = 600; ///@todo
            vp.Size.h = 600; ///@todo
            ql.Viewport = vp;
            ql.QuadPoseCenter = g_tweakbarQuad.m_QuadPoseCenter;
            ql.QuadSize = { 1.f, 1.f }; ///@todo Pass in

            g_tweakbarQuad.SetHmdEyeRay(m_eyePoses[ovrEyeType::ovrEye_Left]); // Writes to m_layerQuad.QuadPoseCenter
            g_tweakbarQuad.DrawToQuad();
            layerHeaders.push_back(&ql.Header);
        }
    }

#if 0
    ovrViewScaleDesc viewScaleDesc;
    viewScaleDesc.HmdToEyeOffset[0] = m_eyeOffsets[0];
    viewScaleDesc.HmdToEyeOffset[1] = m_eyeOffsets[1];
    viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.f;
#endif

    const ovrResult result = ovr_SubmitFrame(g_session, g_frameIndex, nullptr, &layerHeaders[0], layerHeaders.size());
    if (result == ovrSuccess)
    {
        g_hmdVisible = true;
    }
    else if (result == ovrSuccess_NotVisible)
    {
        g_hmdVisible = false;
        ///@todo Enter a lower-power, polling "no focus/HMD not worn" mode
    }
    else if (result == ovrError_DisplayLost)
    {
        LOG_INFO("ovr_SubmitFrame returned ovrError_DisplayLost");
        g_hmdVisible = false;
        ///@todo Tear down textures and session and re-create
    }
    else
    {
        LOG_INFO("ovr_SubmitFrame returned %d", result);
        //g_hmdVisible = false;
    }

    // Handle OVR session events
    ovr_GetSessionStatus(g_session, &sessionStatus);
    if (sessionStatus.ShouldQuit)
    {
        glfwSetWindowShouldClose(g_pMirrorWindow, 1);
    }
    if (sessionStatus.ShouldRecenter)
    {
        ovr_RecenterTrackingOrigin(g_session);
    }

    // Blit mirror texture to monitor window
    if (g_hmdVisible)
    {
        glViewport(0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y);
        const FBO& srcFBO = m_mirrorFBO;
        glBindFramebuffer(GL_READ_FRAMEBUFFER, srcFBO.id);
        glBlitFramebuffer(
            0, srcFBO.h, srcFBO.w, 0,
            0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y,
            GL_COLOR_BUFFER_BIT, GL_NEAREST);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
    }
    else
    {
        displayMonitor();
    }
    ++g_frameIndex;

#ifdef USE_ANTTWEAKBAR
    if (g_tweakbarQuad.m_showQuadInWorld)
    {
        TwDraw();
    }
#endif
}
コード例 #3
0
void OculusWindow::initialize_hmd_environment()
{
    try {

        auto result = ovr_Initialize(nullptr);

        if (!OVR_SUCCESS(result)) {
            Logger::LOG_WARNING << "Failed to initialize oculus environment!" << "Errorcode:" << (int)result << std::endl;
        }

        ovrGraphicsLuid luid;
        result = ovr_Create(&hmd_session_, &luid);

        if (!OVR_SUCCESS(result)) {

            throw std::runtime_error("Unable to create HMD.");
        }

        hmd_desc_ = ovr_GetHmdDesc(hmd_session_);

        // get optimal texture size for rendering
        ovrSizei ideal_texture_size_left = ovr_GetFovTextureSize(hmd_session_, ovrEyeType(0), hmd_desc_.DefaultEyeFov[0], 1);
        ovrSizei ideal_texture_size_right = ovr_GetFovTextureSize(hmd_session_, ovrEyeType(1), hmd_desc_.DefaultEyeFov[1], 1);

        math::vec2ui window_size(ideal_texture_size_left.w + ideal_texture_size_right.w, std::max(ideal_texture_size_left.h, ideal_texture_size_right.h));

        // initialize window => resolution is independent of rendering resolution!
        config.set_size(window_size);

        config.set_left_resolution(math::vec2ui(ideal_texture_size_left.w, ideal_texture_size_left.h));
        config.set_left_position(math::vec2ui(0, 0));
        config.set_right_resolution(math::vec2ui(ideal_texture_size_right.w, ideal_texture_size_right.h));
        config.set_right_position(math::vec2ui(ideal_texture_size_left.w, 0));

        // Initialize VR structures, filling out description.
        ovrEyeRenderDesc eyeRenderDesc[2];
        ovrVector3f      hmdToEyeViewOffset[2];

        eyeRenderDesc[0] = ovr_GetRenderDesc(hmd_session_, ovrEye_Left, hmd_desc_.DefaultEyeFov[0]);
        eyeRenderDesc[1] = ovr_GetRenderDesc(hmd_session_, ovrEye_Right, hmd_desc_.DefaultEyeFov[1]);

        hmdToEyeViewOffset[0] = eyeRenderDesc[0].HmdToEyeOffset;
        hmdToEyeViewOffset[1] = eyeRenderDesc[1].HmdToEyeOffset;

        // Initialize our single full screen Fov layer.
        color_layer_.Header.Type = ovrLayerType_EyeFov;
        color_layer_.Header.Flags = 0;
        color_layer_.Fov[0] = eyeRenderDesc[0].Fov;
        color_layer_.Fov[1] = eyeRenderDesc[1].Fov;

        ovrRecti left_viewport;
        left_viewport.Size = { int(config.left_resolution().x), int(config.left_resolution().y) };
        left_viewport.Pos = { int(config.left_position().x), int(config.left_position().y) };

        ovrRecti right_viewport;
        right_viewport.Size = { int(config.right_resolution().x), int(config.right_resolution().y) };
        right_viewport.Pos = { int(config.right_position().x), int(config.right_position().y) };

        color_layer_.Viewport[0] = left_viewport;
        color_layer_.Viewport[1] = right_viewport;

    }
    catch (std::exception& e) {
        gua::Logger::LOG_WARNING << "Failed to initialize oculus rift.\n" << e.what() << std::endl;
    }
}