예제 #1
0
bool HSWDisplay::Dismiss()
{
    #if HSWDISPLAY_DEBUGGING
        if(GetKeyState(VK_SCROLL) & 0x0001) // If the scroll lock key is toggled on...
            return false;                   // Make it so that the display doesn't dismiss, so we can debug this.
    #endif

    // If dismissal is not requested yet, mark it as such.
    bool newlyRequested = false;

    if(!DismissRequested)
    {
        DismissRequested = true;
        newlyRequested = true;
    }

    // If displayed and time has elapsed, do the dismissal.
    OVR_ASSERT(DismissibleTime <= (ovr_GetTimeInSeconds() + HSWDISPLAY_FIRST_DISMISSAL_TIME)); // Make sure the dismissal time is sane.
    if (Displayed && (ovr_GetTimeInSeconds() >= DismissibleTime))
    {
        DismissInternal();
        Displayed = false;
        DismissRequested = false;
        SDKRendered = false;
        return true;
    }

    if(newlyRequested)
        { HSWDISPLAY_LOG(("[HSWDisplay] Dismiss(): Not permitted yet. Queued for timeout in %.1f seconds.", DismissibleTime - ovr_GetTimeInSeconds())); }

    return false; // Cannot dismiss yet.
}
void DistortionRenderer::EndFrame(bool swapBuffers)
{
	///QUESTION : Clear the screen? 
	///QUESTION : Ensure the screen is the render target

    // Don't spin if we are explicitly asked not to
    if (RState.DistortionCaps & ovrDistortionCap_TimeWarp &&
        !(RState.DistortionCaps & ovrDistortionCap_ProfileNoTimewarpSpinWaits))
    {
        if (!TimeManager.NeedDistortionTimeMeasurement())
        {
            // Wait for timewarp distortion if it is time and Gpu idle
            FlushGpuAndWaitTillTime(TimeManager.GetFrameTiming().TimewarpPointTime);

            renderEndFrame();
        }
        else
        {
            // If needed, measure distortion time so that TimeManager can better estimate
            // latency-reducing time-warp wait timing.
            WaitUntilGpuIdle();
            double  distortionStartTime = ovr_GetTimeInSeconds();

            renderEndFrame();

            WaitUntilGpuIdle();
            TimeManager.AddDistortionTimeMeasurement(ovr_GetTimeInSeconds() - distortionStartTime);
        }
    }
    else
    {
        renderEndFrame();
    }

    if (LatencyTestActive)
    {
        renderLatencyQuad(LatencyTestDrawColor);
    }

    if (swapBuffers)
    {
        if (SwapChain)
        {
            SwapChain->Present(NULL, NULL, NULL, NULL, 0);
        }
        else
        {
		    Device->Present( NULL, NULL, NULL, NULL );
        }

        // Force GPU to flush the scene, resulting in the lowest possible latency.
        // It's critical that this flush is *after* present.
        // Doesn't need to be done if running through the Oculus driver.
        if (RState.OurHMDInfo.InCompatibilityMode &&
            !(RState.DistortionCaps & ovrDistortionCap_ProfileNoTimewarpSpinWaits))
        {
            WaitUntilGpuIdle();
        }
    }
}
예제 #3
0
void MoviePlayerView::ScrubBarClicked( const float progress )
{
	// if we're rw/ff'ing, then stop and resume playback
	if ( SeekSpeed != 0 )
	{
		SeekSpeed = 0;
		PlayMovie();
		SetSeekIcon( SeekSpeed );
		NextSeekTime = 0;
	}

	// choke off the amount position changes we send to the media player
	const double now = ovr_GetTimeInSeconds();
	if ( now <= NextSeekTime )
	{
		return;
	}

	int position = Cinema.SceneMgr.MovieDuration * progress;
	Native::SetPosition( Cinema.app, position );

	ScrubBar.SetProgress( progress );

	NextSeekTime = ovr_GetTimeInSeconds() + 0.1;
}
void DistortionRenderer::EndFrame(bool swapBuffers,
                                  unsigned char* latencyTesterDrawColor, unsigned char* latencyTester2DrawColor)
{
	OVR_UNUSED(swapBuffers);
	OVR_UNUSED(latencyTesterDrawColor);

	///QUESTION : Should I be clearing the screen? 
	///QUESTION : Should I be ensuring the screen is the render target

	if (!TimeManager.NeedDistortionTimeMeasurement())
    {
		if (RState.DistortionCaps & ovrDistortionCap_TimeWarp)
		{
			// Wait for timewarp distortion if it is time and Gpu idle
			WaitTillTimeAndFlushGpu(TimeManager.GetFrameTiming().TimewarpPointTime);
		}

        RenderBothDistortionMeshes();
    }
    else
    {
        // If needed, measure distortion time so that TimeManager can better estimate
        // latency-reducing time-warp wait timing.
        WaitUntilGpuIdle();
        double  distortionStartTime = ovr_GetTimeInSeconds();

        RenderBothDistortionMeshes();
        WaitUntilGpuIdle();

        TimeManager.AddDistortionTimeMeasurement(ovr_GetTimeInSeconds() - distortionStartTime);
    }

    if(latencyTesterDrawColor)
    {
		///QUESTION : Is this still to be supported?
        ///renderLatencyQuad(latencyTesterDrawColor);
    }

    if(latencyTester2DrawColor)
    {
        // TODO:
    }

    if (swapBuffers)
    {
        if (swapChain)
        {
            swapChain->Present(NULL, NULL, NULL, NULL, 0);
        }
        else
        {
		    device->Present( NULL, NULL, NULL, NULL );
        }

        // Force GPU to flush the scene, resulting in the lowest possible latency.
        // It's critical that this flush is *after* present.
        WaitUntilGpuIdle();
    }
}
예제 #5
0
void MoviePlayerView::OneTimeInit( const char * launchIntent )
{
	LOG( "MoviePlayerView::OneTimeInit" );

	const double start = ovr_GetTimeInSeconds();

	GazeUserId = Cinema.app->GetGazeCursor().GenerateUserId();

	CreateMenu( Cinema.app, Cinema.app->GetVRMenuMgr(), Cinema.app->GetDefaultFont() );

	LOG( "MoviePlayerView::OneTimeInit: %3.1f seconds", ovr_GetTimeInSeconds() - start );
}
예제 #6
0
void OculusWorldDemoApp::ApplyDynamicResolutionScaling()
{
    if (!DynamicRezScalingEnabled)
    {
        // Restore viewport rectangle in case dynamic res scaling was enabled before.
        EyeTexture[0].Header.RenderViewport.Size = EyeRenderSize[0];
        EyeTexture[1].Header.RenderViewport.Size = EyeRenderSize[1];
        return;
    }
   
    // Demonstrate dynamic-resolution rendering.
    // This demo is too simple to actually have a framerate that varies that much, so we'll
    // just pretend this is trying to cope with highly dynamic rendering load.
    float dynamicRezScale = 1.0f;

    {
        // Hacky stuff to make up a scaling...
        // This produces value oscillating as follows: 0 -> 1 -> 0.        
        static double dynamicRezStartTime   = ovr_GetTimeInSeconds();
        float         dynamicRezPhase       = float ( ovr_GetTimeInSeconds() - dynamicRezStartTime );
        const float   dynamicRezTimeScale   = 4.0f;

        dynamicRezPhase /= dynamicRezTimeScale;
        if ( dynamicRezPhase < 1.0f )
        {
            dynamicRezScale = dynamicRezPhase;
        }
        else if ( dynamicRezPhase < 2.0f )
        {
            dynamicRezScale = 2.0f - dynamicRezPhase;
        }
        else
        {
            // Reset it to prevent creep.
            dynamicRezStartTime = ovr_GetTimeInSeconds();
            dynamicRezScale     = 0.0f;
        }

        // Map oscillation: 0.5 -> 1.0 -> 0.5
        dynamicRezScale = dynamicRezScale * 0.5f + 0.5f;
    }

    Sizei sizeLeft  = EyeRenderSize[0];
    Sizei sizeRight = EyeRenderSize[1];
    
    // This viewport is used for rendering and passed into ovrHmd_EndEyeRender.
    EyeTexture[0].Header.RenderViewport.Size = Sizei(int(sizeLeft.w  * dynamicRezScale),
                                                     int(sizeLeft.h  * dynamicRezScale));
    EyeTexture[1].Header.RenderViewport.Size = Sizei(int(sizeRight.w * dynamicRezScale),
                                                     int(sizeRight.h * dynamicRezScale));
}
예제 #7
0
void GVRInterface::idle() {
#if defined(ANDROID) && defined(HAVE_LIBOVR)
    if (!_inVRMode && ovr_IsHeadsetDocked()) {
        qDebug() << "The headset just got docked - enter VR mode.";
        enterVRMode();
    } else if (_inVRMode) {
        
        if (ovr_IsHeadsetDocked()) {
            static int counter = 0;
            
            // Get the latest head tracking state, predicted ahead to the midpoint of the time
            // it will be displayed.  It will always be corrected to the real values by
            // time warp, but the closer we get, the less black will be pulled in at the edges.
            const double now = ovr_GetTimeInSeconds();
            static double prev;
            const double rawDelta = now - prev;
            prev = now;
            const double clampedPrediction = std::min( 0.1, rawDelta * 2);
            ovrSensorState sensor = ovrHmd_GetSensorState(OvrHmd, now + clampedPrediction, true );   
            
            auto ovrOrientation = sensor.Predicted.Pose.Orientation;
            glm::quat newOrientation(ovrOrientation.w, ovrOrientation.x, ovrOrientation.y, ovrOrientation.z);
            _client->setOrientation(newOrientation);
            
            if (counter++ % 100000 == 0) {
                qDebug() << "GetSensorState in frame" << counter << "-" 
                    << ovrOrientation.x <<  ovrOrientation.y <<  ovrOrientation.z <<  ovrOrientation.w;
            }
        } else {
            qDebug() << "The headset was undocked - leaving VR mode.";
            
            leaveVRMode();
        }
    } 
    
    OVR::KeyState& backKeyState = _mainWindow->getBackKeyState();
    auto backEvent = backKeyState.Update(ovr_GetTimeInSeconds());

    if (backEvent == OVR::KeyState::KEY_EVENT_LONG_PRESS) {
        qDebug() << "Attemping to start the Platform UI Activity.";
        ovr_StartPackageActivity(_ovr, PUI_CLASS_NAME, PUI_GLOBAL_MENU);
    } else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP || backEvent == OVR::KeyState::KEY_EVENT_SHORT_PRESS) {
        qDebug() << "Got an event we should cancel for!";
    } else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP) {
        qDebug() << "The button is down!";
    }
#endif
}
예제 #8
0
//================================
// OvrAnimComponent::Play
void OvrAnimComponent::Play()
{
	AnimState = ANIMSTATE_PLAYING;
	BaseTime = ovr_GetTimeInSeconds();
	// on a play we offset the base frame to the current frame so a resume from pause doesn't restart
	BaseFrame = CurFrame;
}
예제 #9
0
void OculusInterface::oculusDisplayWarning()
{
  // Health and Safety Warning display state.
  ovrHSWDisplayState hswDisplayState;
  ovrHmd_GetHSWDisplayState(m_hmd, &hswDisplayState);
  if (hswDisplayState.Displayed)
  {
  // Dismiss the warning if the user pressed the appropriate key or if the user
  // is tapping the side of the HMD.
  // If the user has requested to dismiss the warning via keyboard or controller input...
  if (m_warningOff)
    ovrHmd_DismissHSWDisplay(m_hmd);
  else
  {
  // Detect a moderate tap on the side of the HMD.
  ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
  if (ts.StatusFlags & ovrStatus_OrientationTracked)
  {
  const OVR::Vector3f v(ts.RawSensorData.Accelerometer.x,
  ts.RawSensorData.Accelerometer.y,
  ts.RawSensorData.Accelerometer.z);
  // Arbitrary value and representing moderate tap on the side of the DK2 Rift.
  if (v.LengthSq() > 250.f)
  ovrHmd_DismissHSWDisplay(m_hmd);
  }
  }
}
}
예제 #10
0
bool OptionSelectionMenu::OnGamepad(UInt32 buttonMask)
{
    // Check global shortcuts first.
    String s = ProcessShortcutButton(buttonMask);
    if (!s.IsEmpty())
    {
        PopupMessage  = s;
        PopupMessageTimeout = ovr_GetTimeInSeconds() + 4.0f;
        return true;
    }

    if (GetSubmenu() != NULL)
    {
        return GetSubmenu()->OnGamepad(buttonMask);
    }

    if (ToggleShortcut.MatchGamepadButton(buttonMask))
        return true;

    if (DisplayState == Display_None)
        return false;

    for (int i = 0; i < Nav_LAST; i++)
    {
        if (NavShortcuts[i].MatchGamepadButton(buttonMask))
            return true;
    }

    // Let the caller process keystroke
    return false;
}
예제 #11
0
//==============================
// OvrSliderComponent::OnFrameUpdate
eMsgStatus OvrSliderComponent::OnFrameUpdate( App * app, VrFrame const & vrFrame, OvrVRMenuMgr & menuMgr, 
		VRMenuObject * self, VRMenuEvent const & event )
{
	if ( TouchDown )
	{
		UpdateText( menuMgr, self, BubbleId );
		UpdateText( menuMgr, self, TextId );
	}

	if ( BubbleFadeOutTime > 0.0 )
	{
		if ( ovr_GetTimeInSeconds() >= BubbleFadeOutTime )
		{
			BubbleFadeOutTime = -1.0;
			BubbleFader.StartFadeOut();
		}
	}

	VRMenuObject * bubble = menuMgr.ToObject( self->ChildHandleForId( menuMgr, BubbleId ) );
	if ( bubble != NULL )
	{
		float const fadeTime = 0.5f;
		float const fadeRate = 1.0 / fadeTime;
		BubbleFader.Update( fadeRate, vrFrame.DeltaSeconds );

		Vector4f color = bubble->GetColor();
		color.w = BubbleFader.GetFinalAlpha();
		bubble->SetColor( color );
		Vector4f textColor = bubble->GetTextColor();
		textColor.w = color.w;
		bubble->SetTextColor( textColor );
	}

	return MSG_STATUS_ALIVE;
}
예제 #12
0
FrameTimeManager::Timing FrameTimeManager::GetFrameTiming(unsigned frameIndex)
{
    Timing frameTiming = LocklessTiming.GetState();

    if (frameTiming.ThisFrameTime != 0.0)
    {
        // If timing hasn't been initialized, starting based on "now" is the best guess.
        frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
                                         ovr_GetTimeInSeconds(), frameIndex);
    }
    
    else if (frameIndex > frameTiming.FrameIndex)
    {
        unsigned frameDelta    = frameIndex - frameTiming.FrameIndex;
        double   thisFrameTime = frameTiming.NextFrameTime +
                                 double(frameDelta-1) * frameTiming.Inputs.FrameDelta;
        // Don't run away too far into the future beyond rendering.
        OVR_ASSERT(frameDelta < 6);

        frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
                                         thisFrameTime, frameIndex);
    }    
     
    return frameTiming;
}
bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
    _currentRenderFrameInfo = FrameInfo();
    _currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();;
    _currentRenderFrameInfo.predictedDisplayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
    auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
    _currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
    _currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;

    std::array<glm::mat4, 2> handPoses;
    // Make controller poses available to the presentation thread
    ovr_for_each_hand([&](ovrHandType hand) {
        static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
        if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
            return;
        }

        auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
        static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
        handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
    });

    withRenderThreadLock([&] {
        _uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
        _handPoses = handPoses;
        _frameInfos[frameIndex] = _currentRenderFrameInfo;
    });
    return Parent::beginFrameRender(frameIndex);
}
예제 #14
0
void OVRScene::RenderForOneEye(const float* pMview, const float* pPersp) const
{
    printf("Rendering in OVRScene!\n");
    if (m_bDraw == false)
        return;
    if (pMview == false)
        return;
    if (pPersp == false)
        return;

    const glm::mat4 modelview = glm::make_mat4(pMview);
    const glm::mat4 projection = glm::make_mat4(pPersp);

    // Assemble modelview matrix to lock camera in with real world geometry:
    // We still have to use the assembled HMD stereo modelview matrices from OVRSDK05AppSkeleton,
    // but we undo the effects of chassis yaw and position so the frustum follows the viewer.
    if (m_pHmd != NULL)
    {
        const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
        const ovrPosef& cp = ts.CameraPose;

        OVR::Matrix4f camMtx = OVR::Matrix4f();
        camMtx *= OVR::Matrix4f::Translation(cp.Position)
            * OVR::Matrix4f(OVR::Quatf(cp.Orientation));

        glm::mat4 ogmat = glm::make_mat4(&camMtx.Transposed().M[0][0]);

        DrawScene(modelview * ogmat, projection);
    }
}
예제 #15
0
파일: vr_ovr.c 프로젝트: fourks/quake2vr
ovrBool VR_OVR_InitSensor()
{
	unsigned int sensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection;

	if (sensorEnabled)
	{
		sensorEnabled = 0;
	}

	sensorCaps |= ovrTrackingCap_Position;

	sensorEnabled = ovrHmd_ConfigureTracking(hmd,sensorCaps, ovrTrackingCap_Orientation);
	if (sensorEnabled)
	{
		ovrTrackingState ss;
		ss = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
		Com_Printf("VR_OVR: Successfully initialized sensors!\n");

		if (ss.StatusFlags & ovrStatus_PositionConnected)
			Com_Printf("...sensor has position tracking support\n");
		if (ss.StatusFlags & ovrStatus_OrientationTracked)
			Com_Printf("...orientation tracking enabled\n");
		if (ss.StatusFlags & ovrStatus_PositionTracked)
			Com_Printf("...position tracking enabled\n");
	}
	return sensorEnabled;
}
예제 #16
0
void OculusInterface::oculusPoseState()
{
  ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
  if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked))
  {
    ovrPoseStatef pose = ts.HeadPose;
    std::cout<<"--------------------------------------------------------------\n";
    std::cout<<"Time "<<pose.TimeInSeconds<<"\n";
    std::cout<<"Orientation Quat <<"<< pose.ThePose.Orientation.x <<" "
              << pose.ThePose.Orientation.y <<" "
              << pose.ThePose.Orientation.z <<" "
              << pose.ThePose.Orientation.w <<"\n";
    std::cout << "Angular Velocity "<< pose.AngularVelocity.x <<" "
                 << pose.AngularVelocity.y <<" "
                 << pose.AngularVelocity.z <<"\n";
    std::cout << "Linear Velocity "<< pose.LinearVelocity.x <<" "
                 << pose.LinearVelocity.y <<" "
                 << pose.LinearVelocity.z <<"\n";
    std::cout << "AngularAcceleration Velocity "<< pose.AngularAcceleration.x <<" "
                 << pose.AngularAcceleration.y <<" "
                 << pose.AngularAcceleration.z <<"\n";

    std::cout << "LinearAcceleration Velocity "<< pose.LinearAcceleration.x <<" "
                 << pose.LinearAcceleration.y <<" "
                 << pose.LinearAcceleration.z <<"\n";
    std::cout<<"--------------------------------------------------------------\n";


  }
}
예제 #17
0
//==============================
// OvrGazeCursorLocal::CancelTimer
void OvrGazeCursorLocal::CancelTimer()
{
	double curTime = ovr_GetTimeInSeconds();
	LOG( "(%.4f) Cancel Timer", curTime );
	TimerShowTime = -1.0;
	TimerEndTime = -1.0;
}
예제 #18
0
//==============================
// OvrGazeCursorLocal::StartTimer
void OvrGazeCursorLocal::StartTimer( float const durationSeconds, float const timeBeforeShowingTimer )
{
	double curTime = ovr_GetTimeInSeconds();
	LOG( "(%.4f) StartTimer = %.2f", curTime, durationSeconds );
	TimerShowTime =  curTime + (double)timeBeforeShowingTimer; 
	TimerEndTime = curTime + (double)durationSeconds;
}
예제 #19
0
//==============================
// OvrTextFade_Component::FocusLost
eMsgStatus OvrTextFade_Component::FocusLost( App * app, VrFrame const & vrFrame, OvrVRMenuMgr & menuMgr,
												VRMenuObject * self, VRMenuEvent const & event )
{
	StartFadeOutTime = FADE_DELAY + ovr_GetTimeInSeconds();
	StartFadeInTime = -1.0;

	return MSG_STATUS_ALIVE;
}
예제 #20
0
void OptionSelectionMenu::renderShortcutChangeMessage(RenderDevice* prender)
{
    if (ovr_GetTimeInSeconds() < PopupMessageTimeout)
    {
        DrawTextBox(prender, 0, 120, 22.0f, PopupMessage.ToCStr(),
                    DrawText_Center | (PopupMessageBorder ? DrawText_Border : 0));
    }
}
void LogTime(const char* msg)
{
    static double lastTime = 0.0;
    double now = ovr_GetTimeInSeconds();

    LogText("t=%.3f, dt=%.3f: %s\n", now, now-lastTime, msg);
    lastTime = now;
}
예제 #22
0
Point3F OculusVRSensorDevice::getPosition()
{
    if(!mIsValid)
        return Point3F();

    ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
    OVR::Vector3f v = ts.HeadPose.ThePose.Position;
    return Point3F(-v.x, v.z, -v.y);
}
예제 #23
0
void OculusInterface::setLeftEye()
{
	ovrEyeType eye = m_hmd->EyeRenderOrder[0];
	glViewport(0, 0, (m_fbWidth / 2), m_fbHeight);
	ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
	OVR::Posef pose = ts.HeadPose;
//	m_pose[0] = ovrHmd_GetEyePose(m_hmd, eye);

}
예제 #24
0
void GVRMainWindow::keyReleaseEvent(QKeyEvent* event) {
#ifdef ANDROID
    if (event->key() == Qt::Key_Back) {
        // release on the Android back key, hand off to OVR KeyState
        _backKeyState.HandleEvent(ovr_GetTimeInSeconds(), false, 0);
        _wasBackKeyDown = false;
    }
#endif
    QWidget::keyReleaseEvent(event);
}
void OcculusCameraComponent::update( float dt )
{
	//Occulus warning
	// Health and Safety Warning display state.
	ovrHSWDisplayState hswDisplayState;
	ovrHmd_GetHSWDisplayState(hmd, &hswDisplayState);
	if (hswDisplayState.Displayed) 
	{ 
		// Dismiss the warning if the user pressed the appropriate key or if the user 
		// is tapping the side of the HMD. 
		// If the user has requested to dismiss the warning via keyboard or controller input... 
		//if (Util_GetAndResetHSWDismissedState()) 
		//	ovrHmd_DismissHSWDisplay(hmd); 
		//else 
		{ 
			// Detect a moderate tap on the side of the HMD. 
			ovrTrackingState ts = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
			if (ts.StatusFlags & ovrStatus_OrientationTracked) 
			{ 
				const OVR::Vector3f v(ts.RawSensorData.Accelerometer.x, ts.RawSensorData.Accelerometer.y, ts.RawSensorData.Accelerometer.z);

				// Arbitrary value and representing moderate tap on the side of the DK2 Rift.
				if (v.LengthSq() > 250.f) ovrHmd_DismissHSWDisplay(hmd);
			}
		}
	}


	dt;
	// Query the HMD for the current tracking state.
	
	ovrTrackingState ts = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
	if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked))
	{
		ovrPosef pose = ts.HeadPose.ThePose;

		//convert to mat4 if desired
		glm::mat4 transformMatrix = glm::mat4_cast( fromOVR( pose.Orientation ) );

		//parent->gc<TransformComponent>()->setRotation( glm::mat3(transformMatrix) );
	}
	parent->getStage()->subscribeRender( this );
}
double DistortionRenderer::FlushGpuAndWaitTillTime(double absTime)
{
	double initialTime = ovr_GetTimeInSeconds();
	if (initialTime >= absTime)
		return 0.0;

	WaitUntilGpuIdle();

    return WaitTillTime(absTime);
}
예제 #27
0
double FrameTimeManager::GetEyePredictionTime(ovrEyeType eye)
{
    if (VsyncEnabled)
    {
        return FrameTiming.EyeRenderTimes[eye];
    }

    // No VSync: Best guess for the near future
    return ovr_GetTimeInSeconds() + ScreenSwitchingDelay + NoVSyncToScanoutDelay;
}
예제 #28
0
//================================
// OvrAnimComponent::SetFrame
void OvrAnimComponent::SetFrame( VRMenuObject * self, int const frameNum )
{
	CurFrame = Alg::Clamp( frameNum, 0, GetNumFrames( self ) - 1 );
	// we must reset the base frame and the current time so that the frame calculation
	// remains correct if we're playing.  If we're not playing, this will cause the
	// next Play() to start from this frame.
	BaseFrame = frameNum;
	BaseTime = ovr_GetTimeInSeconds();
	ForceVisibilityUpdate = true;	// make sure visibilities are set next frame update
}
예제 #29
0
void GVRMainWindow::keyPressEvent(QKeyEvent* event) {
#ifdef ANDROID
    if (event->key() == Qt::Key_Back) {
        // got the Android back key, hand off to OVR KeyState
        _backKeyState.HandleEvent(ovr_GetTimeInSeconds(), true, (_wasBackKeyDown ? 1 : 0));
        _wasBackKeyDown = true;
        return;
    }
#endif
    QWidget::keyPressEvent(event);
}
double DistortionRenderer::WaitTillTimeAndFlushGpu(double absTime)
{
	double       initialTime = ovr_GetTimeInSeconds();
	if (initialTime >= absTime)
		return 0.0;

	WaitUntilGpuIdle();

	double newTime   = initialTime;
	volatile int i;

	while (newTime < absTime)
	{
		for (int j = 0; j < 50; j++)
			i = 0;
		newTime = ovr_GetTimeInSeconds();
	}

	// How long we waited
	return newTime - initialTime;
}