Ejemplo n.º 1
0
void Render(float dt)
{
    static unsigned int frameIndex = 0;
    frameIndex++;
    ovrFrameTiming timing = ovrHmd_BeginFrame(s_hmd, 0);

    // ovrSensorState ss = ovrHmd_GetSensorState(s_hmd, timing.ScanoutMidpointSeconds);
    // TODO: Use this for head tracking...
    // TODO: Use player height from SDK

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    // render into fbo
    glBindFramebuffer(GL_FRAMEBUFFER, s_fbo);

    // TODO: enable this when we have more complex rendering.
    glEnable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    static float t = 0.0;
    t += dt;

    // clear render target
    glViewport(0, 0, s_renderTargetSize.w, s_renderTargetSize.h);
    glClearColor(s_clearColor.x, s_clearColor.y, s_clearColor.z, s_clearColor.w);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int i = 0; i < 2; i++)
    {
        ovrEyeType eye = s_hmdDesc.EyeRenderOrder[i];
        ovrPosef pose = ovrHmd_BeginEyeRender(s_hmd, eye);

        glViewport(s_eyeTexture[eye].Header.RenderViewport.Pos.x,
                   s_eyeTexture[eye].Header.RenderViewport.Pos.y,
                   s_eyeTexture[eye].Header.RenderViewport.Size.w,
                   s_eyeTexture[eye].Header.RenderViewport.Size.h);

        Quatf q(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, pose.Orientation.w);
        Vector3f p(pose.Position.x, pose.Position.y, pose.Position.z);

        Matrixf cameraMatrix = Matrixf::QuatTrans(q, s_cameraPos);
        Matrixf viewCenter = cameraMatrix.OrthoInverse();

        // let ovr compute projection matrix, cause it's hard.
        ovrMatrix4f ovrProj = ovrMatrix4f_Projection(s_eyeRenderDesc[eye].Fov, 0.1f, 10000.0f, true);

        // convert to abaci matrix
        Matrixf projMatrix = Matrixf::Rows(Vector4f(ovrProj.M[0][0], ovrProj.M[0][1], ovrProj.M[0][2], ovrProj.M[0][3]),
                                           Vector4f(ovrProj.M[1][0], ovrProj.M[1][1], ovrProj.M[1][2], ovrProj.M[1][3]),
                                           Vector4f(ovrProj.M[2][0], ovrProj.M[2][1], ovrProj.M[2][2], ovrProj.M[2][3]),
                                           Vector4f(ovrProj.M[3][0], ovrProj.M[3][1], ovrProj.M[3][2], ovrProj.M[3][3]));

        // use EyeRenderDesc.ViewAdjust to do eye offset.
        Matrixf viewMatrix = viewCenter * Matrixf::Trans(Vector3f(s_eyeRenderDesc[eye].ViewAdjust.x,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.y,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.z));

        // compute model matrix for terminal
        const float kTermScale = 0.001f;
        const Vector3f termOrigin(-2 * kFeetToMeters, 6.75f * kFeetToMeters, -2.5 * kFeetToMeters);
        Matrixf modelMatrix = Matrixf::ScaleQuatTrans(Vector3f(kTermScale, -kTermScale, kTermScale),
                                                      Quatf::AxisAngle(Vector3f(0, 1, 0), 0),
                                                      termOrigin);
        RenderBegin();

        RenderFloor(projMatrix, viewMatrix, 0.0f);

        RenderTextBegin(projMatrix, viewMatrix, modelMatrix);
        for (int j = 0; j < win_get_text_count(); j++)
        {
            gb::Text* text = (gb::Text*)win_get_text(j);
            if (text)
            {
                RenderText(text->GetQuadVec());
            }
        }
        RenderTextEnd();

        RenderEnd();
        ovrHmd_EndEyeRender(s_hmd, eye, pose, &s_eyeTexture[eye]);
    }

    ovrHmd_EndFrame(s_hmd);
}
Ejemplo n.º 2
0
void OculusWorldDemoApp::OnIdle()
{
    double curtime = ovr_GetTimeInSeconds();
    // If running slower than 10fps, clamp. Helps when debugging, because then dt can be minutes!
    float  dt      = Alg::Min<float>(float(curtime - LastUpdate), 0.1f);
    LastUpdate     = curtime;    


    Profiler.RecordSample(RenderProfiler::Sample_FrameStart);

    if (LoadingState == LoadingState_DoLoad)
    {
        PopulateScene(MainFilePath.ToCStr());
        LoadingState = LoadingState_Finished;
        return;
    }    

    if (HmdSettingsChanged)
    {
        CalculateHmdValues();        
        HmdSettingsChanged = false;
    }

    HmdFrameTiming = ovrHmd_BeginFrame(Hmd, 0);


    // Update gamepad.
    GamepadState gamepadState;
    if (GetPlatformCore()->GetGamepadManager()->GetGamepadState(0, &gamepadState))
    {
        GamepadStateChanged(gamepadState);
    }

    SensorState ss = ovrHmd_GetSensorState(Hmd, HmdFrameTiming.ScanoutMidpointSeconds);
    HmdStatus = ss.StatusFlags;

    // Change message status around positional tracking.
	bool hadVisionTracking = HaveVisionTracking;
	HaveVisionTracking = (ss.StatusFlags & Status_PositionTracked) != 0;
	if (HaveVisionTracking && !hadVisionTracking)
		Menu.SetPopupMessage("Vision Tracking Acquired");
    if (!HaveVisionTracking && hadVisionTracking)
		Menu.SetPopupMessage("Lost Vision Tracking");
    
    // Check if any new devices were connected.
    ProcessDeviceNotificationQueue();
    // FPS count and timing.
    UpdateFrameRateCounter(curtime);

    
    // Update pose based on frame!
    ThePlayer.HeadPose = ss.Predicted.Pose;
    // Movement/rotation with the gamepad.
    ThePlayer.BodyYaw -= ThePlayer.GamepadRotate.x * dt;
    ThePlayer.HandleMovement(dt, &CollisionModels, &GroundCollisionModels, ShiftDown);


    // Record after processing time.
    Profiler.RecordSample(RenderProfiler::Sample_AfterGameProcessing);    


    // Determine if we are rendering this frame. Frame rendering may be
    // skipped based on FreezeEyeUpdate and Time-warp timing state.
    bool bupdateRenderedView = FrameNeedsRendering(curtime);
    
    if (bupdateRenderedView)
    {
        // If render texture size is changing, apply dynamic changes to viewport.
        ApplyDynamicResolutionScaling();

        pRender->BeginScene(PostProcess_None);

        if (ForceZeroIpd)
        {             
            // Zero IPD eye rendering: draw into left eye only,
            // re-use  texture for right eye.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_Left].pTex);
            pRender->Clear();
        
            ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, ovrEye_Left);
        
            View = CalculateViewFromPose(eyeRenderPose);
            RenderEyeView(ovrEye_Left);
            ovrHmd_EndEyeRender(Hmd, ovrEye_Left, eyeRenderPose, &EyeTexture[ovrEye_Left]);

            // Second eye gets the same texture (initialized to same value above).
            ovrHmd_BeginEyeRender(Hmd, ovrEye_Right); 
            ovrHmd_EndEyeRender(Hmd, ovrEye_Right, eyeRenderPose, &EyeTexture[ovrEye_Right]);
        }

        else if (RendertargetIsSharedByBothEyes)
        {
            // Shared render target eye rendering; set up RT once for both eyes.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_BothEyes].pTex);
            pRender->Clear();

            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye); 
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }

        else
        {
            // Separate eye rendering - each eye gets its own render target.
            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                pRender->SetRenderTarget(
                    RenderTargets[(eye == 0) ? Rendertarget_Left : Rendertarget_Right].pTex);
                pRender->Clear();
            
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye);            
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }   

        pRender->SetDefaultRenderTarget();
        pRender->FinishScene();        
    }
        
    /*
    double t= ovr_GetTimeInSeconds();
    while (ovr_GetTimeInSeconds() < (t + 0.017))
    {

    } */

    Profiler.RecordSample(RenderProfiler::Sample_AfterEyeRender);

    // TODO: These happen inside ovrHmd_EndFrame; need to hook into it.
    //Profiler.RecordSample(RenderProfiler::Sample_BeforeDistortion);
    ovrHmd_EndFrame(Hmd);
    Profiler.RecordSample(RenderProfiler::Sample_AfterPresent);    
}