Example #1
0
void OculusWorldDemoApp::OnIdle()
{
    double curtime = ovr_GetTimeInSeconds();
    // If running slower than 10fps, clamp. Helps when debugging, because then dt can be minutes!
    float  dt      = Alg::Min<float>(float(curtime - LastUpdate), 0.1f);
    LastUpdate     = curtime;    


    Profiler.RecordSample(RenderProfiler::Sample_FrameStart);

    if (LoadingState == LoadingState_DoLoad)
    {
        PopulateScene(MainFilePath.ToCStr());
        LoadingState = LoadingState_Finished;
        return;
    }    

    if (HmdSettingsChanged)
    {
        CalculateHmdValues();        
        HmdSettingsChanged = false;
    }

    HmdFrameTiming = ovrHmd_BeginFrame(Hmd, 0);


    // Update gamepad.
    GamepadState gamepadState;
    if (GetPlatformCore()->GetGamepadManager()->GetGamepadState(0, &gamepadState))
    {
        GamepadStateChanged(gamepadState);
    }

    SensorState ss = ovrHmd_GetSensorState(Hmd, HmdFrameTiming.ScanoutMidpointSeconds);
    HmdStatus = ss.StatusFlags;

    // Change message status around positional tracking.
	bool hadVisionTracking = HaveVisionTracking;
	HaveVisionTracking = (ss.StatusFlags & Status_PositionTracked) != 0;
	if (HaveVisionTracking && !hadVisionTracking)
		Menu.SetPopupMessage("Vision Tracking Acquired");
    if (!HaveVisionTracking && hadVisionTracking)
		Menu.SetPopupMessage("Lost Vision Tracking");
    
    // Check if any new devices were connected.
    ProcessDeviceNotificationQueue();
    // FPS count and timing.
    UpdateFrameRateCounter(curtime);

    
    // Update pose based on frame!
    ThePlayer.HeadPose = ss.Predicted.Pose;
    // Movement/rotation with the gamepad.
    ThePlayer.BodyYaw -= ThePlayer.GamepadRotate.x * dt;
    ThePlayer.HandleMovement(dt, &CollisionModels, &GroundCollisionModels, ShiftDown);


    // Record after processing time.
    Profiler.RecordSample(RenderProfiler::Sample_AfterGameProcessing);    


    // Determine if we are rendering this frame. Frame rendering may be
    // skipped based on FreezeEyeUpdate and Time-warp timing state.
    bool bupdateRenderedView = FrameNeedsRendering(curtime);
    
    if (bupdateRenderedView)
    {
        // If render texture size is changing, apply dynamic changes to viewport.
        ApplyDynamicResolutionScaling();

        pRender->BeginScene(PostProcess_None);

        if (ForceZeroIpd)
        {             
            // Zero IPD eye rendering: draw into left eye only,
            // re-use  texture for right eye.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_Left].pTex);
            pRender->Clear();
        
            ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, ovrEye_Left);
        
            View = CalculateViewFromPose(eyeRenderPose);
            RenderEyeView(ovrEye_Left);
            ovrHmd_EndEyeRender(Hmd, ovrEye_Left, eyeRenderPose, &EyeTexture[ovrEye_Left]);

            // Second eye gets the same texture (initialized to same value above).
            ovrHmd_BeginEyeRender(Hmd, ovrEye_Right); 
            ovrHmd_EndEyeRender(Hmd, ovrEye_Right, eyeRenderPose, &EyeTexture[ovrEye_Right]);
        }

        else if (RendertargetIsSharedByBothEyes)
        {
            // Shared render target eye rendering; set up RT once for both eyes.
            pRender->SetRenderTarget(RenderTargets[Rendertarget_BothEyes].pTex);
            pRender->Clear();

            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye); 
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }

        else
        {
            // Separate eye rendering - each eye gets its own render target.
            for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
            {      
                ovrEyeType eye = HmdDesc.EyeRenderOrder[eyeIndex];
                pRender->SetRenderTarget(
                    RenderTargets[(eye == 0) ? Rendertarget_Left : Rendertarget_Right].pTex);
                pRender->Clear();
            
                ovrPosef eyeRenderPose = ovrHmd_BeginEyeRender(Hmd, eye);

                View = CalculateViewFromPose(eyeRenderPose);
                RenderEyeView(eye);            
                ovrHmd_EndEyeRender(Hmd, eye, eyeRenderPose, &EyeTexture[eye]);
            }
        }   

        pRender->SetDefaultRenderTarget();
        pRender->FinishScene();        
    }
        
    /*
    double t= ovr_GetTimeInSeconds();
    while (ovr_GetTimeInSeconds() < (t + 0.017))
    {

    } */

    Profiler.RecordSample(RenderProfiler::Sample_AfterEyeRender);

    // TODO: These happen inside ovrHmd_EndFrame; need to hook into it.
    //Profiler.RecordSample(RenderProfiler::Sample_BeforeDistortion);
    ovrHmd_EndFrame(Hmd);
    Profiler.RecordSample(RenderProfiler::Sample_AfterPresent);    
}
void OculusVRDebug::OnUpdate(const ovrTrackingState &trackingState)
{
    UpdateFrameRateCounter(ovr_GetTimeInSeconds());
}