bool OSVRInterface::GetPose(FTransform& Value, bool Latest) const { if (Latest) { #if OSVR_ENABLED OSVR_TimeValue Time; OSVR_PoseState Pose; OSVR_ReturnCode ReturnCode = osvrGetPoseState(OSVRClientInterface, &Time, &Pose); Value.SetTranslation(OSVR2FVector(Pose.translation)); Value.SetRotation(OSVR2FQuat(Pose.rotation)); return ReturnCode == OSVR_RETURN_SUCCESS; #endif // OSVR_ENABLED } else { Value = PoseState; } return HasPoseState(); }
/** * Returns the calibration-space orientation of the requested controller's hand. * * @param ControllerIndex The Unreal controller (player) index of the contoller set * @param DeviceHand Which hand, within the controller set for the player, to get the orientation and position for * @param OutOrientation (out) If tracked, the orientation (in calibrated-space) of the controller in the specified hand * @param OutPosition (out) If tracked, the position (in calibrated-space) of the controller in the specified hand * @return True if the device requested is valid and tracked, false otherwise */ bool FOSVRInputDevice::GetControllerOrientationAndPosition(const int32 ControllerIndex, const EControllerHand DeviceHand, FRotator& OutOrientation, FVector& OutPosition) const { bool bRet = false; if (ControllerIndex == 0) { FScopeLock lock(contextMutex); if (osvrClientCheckStatus(context) == OSVR_RETURN_SUCCESS && osvrClientUpdate(context) == OSVR_RETURN_SUCCESS) { auto iface = DeviceHand == EControllerHand::Left ? leftHand : rightHand; OSVR_PoseState state; OSVR_TimeValue tvalue; if (osvrGetPoseState(iface, &tvalue, &state) == OSVR_RETURN_SUCCESS) { // @todo: how do we get the world to meters scale without the HMD? float worldToMetersScale = mOSVRHMD.IsValid() ? mOSVRHMD->GetWorldToMetersScale() : 100.0f; OutPosition = OSVR2FVector(state.translation, worldToMetersScale); OutOrientation = OSVR2FQuat(state.rotation).Rotator(); bRet = true; } } } return bRet; }
void QVRDevice::update() { if (config().processIndex() == QVRManager::processIndex()) { #ifdef HAVE_VRPN if (_internals->vrpnTrackerRemote) _internals->vrpnTrackerRemote->mainloop(); if (_internals->vrpnButtonRemote) _internals->vrpnButtonRemote->mainloop(); if (_internals->vrpnAnalogRemote) _internals->vrpnAnalogRemote->mainloop(); #endif #ifdef HAVE_OCULUS if (_internals->oculusTrackedEye >= 0) { const ovrPosef* p; if (_internals->oculusTrackedEye == 1) p = &(QVROculusRenderPoses[0]); else if (_internals->oculusTrackedEye == 2) p = &(QVROculusRenderPoses[1]); else p = &(QVROculusTrackingState.HeadPose.ThePose); // Note the position Y offset that moves the sitting user's eyes to a default standing height in // the virtual world. _position = QVector3D(p->Position.x, p->Position.y + QVRObserverConfig::defaultEyeHeight, p->Position.z); _orientation = QQuaternion(p->Orientation.w, p->Orientation.x, p->Orientation.y, p->Orientation.z); } #endif #ifdef HAVE_OPENVR if (_internals->openVrTrackedEntity >= 0) { _orientation = QVROpenVRTrackedOrientations[_internals->openVrTrackedEntity]; _position = QVROpenVRTrackedPositions[_internals->openVrTrackedEntity]; } if (_internals->openVrButtonsEntity >= 0) { _buttons[0] = QVROpenVRControllerStates[_internals->openVrButtonsEntity].rAxis[0].y > +0.5f; _buttons[1] = QVROpenVRControllerStates[_internals->openVrButtonsEntity].rAxis[0].y < -0.5f; _buttons[2] = QVROpenVRControllerStates[_internals->openVrButtonsEntity].rAxis[0].x < -0.5f; _buttons[3] = QVROpenVRControllerStates[_internals->openVrButtonsEntity].rAxis[0].x > +0.5f; unsigned long buttonPressed = QVROpenVRControllerStates[_internals->openVrButtonsEntity].ulButtonPressed; _buttons[4] = buttonPressed & vr::ButtonMaskFromId(vr::k_EButton_ApplicationMenu); _buttons[5] = buttonPressed & vr::ButtonMaskFromId(vr::k_EButton_Grip); } if (_internals->openVrAnalogsEntity >= 0) { _analogs[0] = QVROpenVRControllerStates[_internals->openVrAnalogsEntity].rAxis[0].y; _analogs[1] = QVROpenVRControllerStates[_internals->openVrAnalogsEntity].rAxis[0].x; _analogs[2] = QVROpenVRControllerStates[_internals->openVrAnalogsEntity].rAxis[1].x; } #endif #ifdef HAVE_OSVR if (_internals->osvrTrackedEye != -1 || _internals->osvrTrackingInterface) { OSVR_Pose3 pose; bool ok; if (_internals->osvrTrackedEye == 0) { // center eye ok = (osvrClientGetViewerPose(QVROsvrDisplayConfig, 0, &pose) == OSVR_RETURN_SUCCESS); } else if (_internals->osvrTrackedEye == 1) { // left eye ok = (osvrClientGetViewerEyePose(QVROsvrDisplayConfig, 0, 0, &pose) == OSVR_RETURN_SUCCESS); } else if (_internals->osvrTrackedEye == 2) { // right eye OSVR_EyeCount eyes; osvrClientGetNumEyesForViewer(QVROsvrDisplayConfig, 0, &eyes); int e = (eyes == 2 ? 1 : 0); ok = (osvrClientGetViewerEyePose(QVROsvrDisplayConfig, 0, e, &pose) == OSVR_RETURN_SUCCESS); } else { // _internals->osvrTrackingInterface struct OSVR_TimeValue timestamp; ok = (osvrGetPoseState(_internals->osvrTrackingInterface, ×tamp, &pose) == OSVR_RETURN_SUCCESS); } if (ok) { if (_internals->osvrTrackedEye >= 0 && pose.translation.data[1] < 1.1f) { // Assume the user wears a HMD and sits (i.e. no room-scale VR). // In this case, we apply an offset to a default standing observer, // just as we do for Oculus Rift. pose.translation.data[1] += QVRObserverConfig::defaultEyeHeight; } _position = QVector3D(pose.translation.data[0], pose.translation.data[1], pose.translation.data[2]); _orientation = QQuaternion(pose.rotation.data[0], pose.rotation.data[1], pose.rotation.data[2], pose.rotation.data[3]); } } if (_internals->osvrButtonsInterfaces.length() > 0) { OSVR_ButtonState state; struct OSVR_TimeValue timestamp; for (int i = 0; i < _buttons.length(); i++) { if (_internals->osvrButtonsInterfaces[i] && osvrGetButtonState(_internals->osvrButtonsInterfaces[i], ×tamp, &state) == OSVR_RETURN_SUCCESS) { _buttons[i] = state; } } } if (_internals->osvrAnalogsInterfaces.length() > 0) { OSVR_AnalogState state; struct OSVR_TimeValue timestamp; for (int i = 0; i < _analogs.length(); i++) { if (_internals->osvrAnalogsInterfaces[i] && osvrGetAnalogState(_internals->osvrAnalogsInterfaces[i], ×tamp, &state) == OSVR_RETURN_SUCCESS) { _analogs[i] = state; } } } #endif } }
/** * Handle OSVR tracking. ***/ void* OSVR_Tracker::Provoke(void* pThis, int eD3D, int eD3DInterface, int eD3DMethod, DWORD dwNumberConnected, int& nProvokerIndex) { // update game timer m_cGameTimer.Tick(); static UINT unFrameSkip = 200; if (unFrameSkip > 0) { unFrameSkip--; return nullptr; } if ((!m_psOSVR_ClientContext) || (!m_psOSVR_ClientInterface)) { // create client context handle m_psOSVR_ClientContext = osvrClientInit("com.mtbs3d.vireio.osvr.tracker", 0); // get client interface osvrClientGetInterface(m_psOSVR_ClientContext, "/me/head", &m_psOSVR_ClientInterface); } else { // update the client context osvrClientUpdate(m_psOSVR_ClientContext); // let's read the tracker state. OSVR_ReturnCode cRet = osvrGetPoseState(m_psOSVR_ClientInterface, &m_sTimestamp, &m_sState); if (cRet != OSVR_RETURN_SUCCESS) { OutputDebugStringA("No pose state!\n"); } else { m_bControlUpdate = true; // backup old euler angles and velocity float afEulerOld[3]; float afEulerVelocityOld[3]; memcpy(&afEulerOld[0], &m_afEuler[0], sizeof(float)* 3); memcpy(&afEulerVelocityOld[0], &m_afEulerVelocity[0], sizeof(float)* 3); // quaternion -> euler angles const float w = (float)m_sState.rotation.data[0]; const float x = (float)m_sState.rotation.data[1]; const float y = (float)m_sState.rotation.data[2]; const float z = (float)m_sState.rotation.data[3]; float sqw = w*w; float sqx = x*x; float sqy = y*y; float sqz = z*z; float unit = sqx + sqy + sqz + sqw; float test = x*y + z*w; if (test > 0.499*unit) { // singularity at north pole m_afEuler[1] = 2 * atan2(x, w); m_afEuler[2] = FLOAT_PI / 2; m_afEuler[0] = 0; } else if (test < -0.499*unit) { // singularity at south pole m_afEuler[1] = -2 * atan2(x, w); m_afEuler[2] = -FLOAT_PI / 2; m_afEuler[0] = 0; } else { m_afEuler[1] = atan2(2 * y*w - 2 * x*z, sqx - sqy - sqz + sqw); m_afEuler[2] = asin(2 * test / unit); m_afEuler[0] = atan2(2 * x * w - 2 * y * z, -sqx + sqy - sqz + sqw); } // PITCH = atan2(2.0 * (x * y + w * z), w * w + x * x - y * y - z * z); // ROLL = atan2(2 * y * w - 2 * x * z, 1 - 2 * y * y - 2 * z * z); // get euler velocity + acceleration float afEulerAcceleration[3]; for (UINT unI = 0; unI < 3; unI++) { // get the velocity m_afEulerVelocity[unI] = (m_afEuler[unI] - afEulerOld[unI]) / (float)m_cGameTimer.DeltaTime(); // get the acceleration afEulerAcceleration[unI] = (m_afEulerVelocity[unI] - afEulerVelocityOld[unI]) / (float)m_cGameTimer.DeltaTime(); } // get predicted euler for (UINT unI = 0; unI < 3; unI++) { // compute predicted euler m_afEulerPredicted[unI] = (0.5f * afEulerAcceleration[unI] * ((float)m_cGameTimer.DeltaTime() * (float)m_cGameTimer.DeltaTime())) + (m_afEulerVelocity[unI] * (float)m_cGameTimer.DeltaTime()) + m_afEuler[unI]; } // set position m_afPosition[0] = (float)m_sState.translation.data[0]; m_afPosition[1] = (float)m_sState.translation.data[1]; m_afPosition[2] = (float)m_sState.translation.data[2]; #ifdef _DEBUG // output debug data std::wstringstream szPose; szPose << L"Got POSE state: Position = (" << m_sState.translation.data[0] << L", " << m_sState.translation.data[1] << L", " << m_sState.translation.data[2] << L"), orientation = (" << osvrQuatGetW(&(m_sState.rotation)) << L", " << osvrQuatGetX(&(m_sState.rotation)) << L", " << osvrQuatGetY(&(m_sState.rotation)) << L", " << osvrQuatGetZ(&(m_sState.rotation)) << L")"; OutputDebugString(szPose.str().c_str()); #endif } } return nullptr; }