void RiftRenderer::UpdateDeviceRotation() { if (pSensor) { OVR::Quatf hmdOrient = SFusion->GetOrientation(); OVR::Vector3f x_vec = hmdOrient.Rotate(OVR::Vector3f(1, 0, 0)); OVR::Vector3f y_vec = hmdOrient.Rotate(OVR::Vector3f(0, 1, 0)); OVR::Vector3f z_vec = hmdOrient.Rotate(OVR::Vector3f(0, 0, -1)); //OpenGL - forward is along negative z-axis right = QVector3D(x_vec.x, x_vec.y, x_vec.z); up = QVector3D(y_vec.x, y_vec.y, y_vec.z); forward = QVector3D(z_vec.x, z_vec.y, z_vec.z); } else { //otherwise we apply no extra rotation (identity) right = QVector3D(1, 0, 0); up = QVector3D(0, 1, 0); forward = QVector3D(0, 0, -1); } }
OVR_PUBLIC_FUNCTION(ovrTrackerPose) ovr_GetTrackerPose(ovrSession session, unsigned int trackerPoseIndex) { ovrTrackerPose pose = { 0 }; // Get the index for this tracker. vr::TrackedDeviceIndex_t trackers[vr::k_unMaxTrackedDeviceCount]; g_VRSystem->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_TrackingReference, trackers, vr::k_unMaxTrackedDeviceCount); vr::TrackedDeviceIndex_t index = trackers[trackerPoseIndex]; // Set the flags pose.TrackerFlags = 0; if (session->poses[index].bDeviceIsConnected) pose.TrackerFlags |= ovrTracker_Connected; if (session->poses[index].bPoseIsValid) pose.TrackerFlags |= ovrTracker_PoseTracked; // Convert the pose OVR::Matrix4f matrix; if (session->poses[index].bPoseIsValid) matrix = REV_HmdMatrixToOVRMatrix(session->poses[index].mDeviceToAbsoluteTracking); OVR::Quatf quat = OVR::Quatf(matrix); pose.Pose.Orientation = quat; pose.Pose.Position = matrix.GetTranslation(); // Level the pose float yaw; quat.GetYawPitchRoll(&yaw, nullptr, nullptr); pose.LeveledPose.Orientation = OVR::Quatf(OVR::Axis_Y, yaw); pose.LeveledPose.Position = matrix.GetTranslation(); return pose; }
gua::math::mat4 const get_oculus_transform(OVR::SensorFusion* sensor) { OVR::Quatf orient = sensor->GetPredictedOrientation(); OVR::Matrix4f mat(orient.Inverted()); return gua::math::mat4( mat.M[0][0], mat.M[0][1], mat.M[0][2], mat.M[0][3], mat.M[1][0], mat.M[1][1], mat.M[1][2], mat.M[1][3], mat.M[2][0], mat.M[2][1], mat.M[2][2], mat.M[2][3], mat.M[3][0], mat.M[3][1], mat.M[3][2], mat.M[3][3]); }
mat4 OVR_SDL2_app::view() const { // Orientation of the head OVR::Quatf q = OVR::Quatf(pose[eye].Orientation); mat4 O = getMatrix4f(OVR::Matrix4f(q.Inverted())); // Offset of the head from the center of the world mat4 P = translation(vec3(-pose[eye].Position.x, -pose[eye].Position.y, -pose[eye].Position.z)); // return E * O * P; return O * P; }
void OculusHMD::prepareForDraw() { OVR::Quatf ovrQuat = m_system->SFusion.GetOrientation(); OVR::Vector3f OVRaxis; float angle; ovrQuat.GetAxisAngle(&OVRaxis, &angle); glm::vec3 axis =glm::vec3(OVRaxis.x, OVRaxis.y, OVRaxis.z); glm::quat orientation = glm::angleAxis(glm::degrees(angle), glm::normalize(axis)); glm::vec3 parentPos = glm::vec3(parentNode()->worldTransform() * glm::vec4(0,0,0,1)); m_boneTracker->setOrientation(glm::mat3_cast(orientation)); RenderToTextureDisplay::prepareForDraw(); }
glm::quat fromOVR( OVR::Quatf quat ) { OVR::Vector3f OVRaxis; float angle; quat.GetAxisAngle(&OVRaxis, &angle); //convert axis type glm::vec3 axis = glm::vec3(OVRaxis.x, OVRaxis.y, OVRaxis.z); //construct glm orientation quaternion glm::quat orientation = glm::angleAxis(glm::degrees(angle), glm::normalize(axis)); return orientation; }
///@todo Even though this function shares most of its code with client rendering, /// which appears to work fine, it is non-convergable. It appears that the projection /// matrices for each eye are too far apart? Could be modelview... void RiftAppSkeleton::display_stereo_undistorted() //const { ovrHmd hmd = m_Hmd; if (hmd == NULL) return; //ovrFrameTiming hmdFrameTiming = ovrHmd_BeginFrameTiming(hmd, 0); bindFBO(m_renderBuffer, m_fboScale); glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) { ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex]; ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye); const ovrGLTexture& otex = l_EyeTexture[eye]; const ovrRecti& rvp = otex.OGL.Header.RenderViewport; const ovrRecti rsc = { static_cast<int>(m_fboScale * rvp.Pos.x), static_cast<int>(m_fboScale * rvp.Pos.y), static_cast<int>(m_fboScale * rvp.Size.w), static_cast<int>(m_fboScale * rvp.Size.h) }; glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h); OVR::Quatf orientation = OVR::Quatf(eyePose.Orientation); OVR::Matrix4f proj = ovrMatrix4f_Projection( m_EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true); //m_EyeRenderDesc[eye].DistortedViewport; OVR::Vector3f EyePos = m_chassisPos; OVR::Matrix4f view = OVR::Matrix4f(orientation.Inverted()) * OVR::Matrix4f::RotationY(m_chassisYaw) * OVR::Matrix4f::Translation(-EyePos); OVR::Matrix4f eyeview = OVR::Matrix4f::Translation(m_EyeRenderDesc[eye].ViewAdjust) * view; _resetGLState(); _DrawScenes(&eyeview.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp); } unbindFBO(); glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glDisable(GL_DEPTH_TEST); glDisable(GL_CULL_FACE); // Present FBO to screen const GLuint prog = m_presentFbo.prog(); glUseProgram(prog); m_presentFbo.bindVAO(); { glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex); glUniform1i(m_presentFbo.GetUniLoc("fboTex"), 0); // This is the only uniform that changes per-frame glUniform1f(m_presentFbo.GetUniLoc("fboScale"), m_fboScale); glDrawArrays(GL_TRIANGLE_FAN, 0, 4); } glBindVertexArray(0); glUseProgram(0); ovrHmd_EndFrameTiming(hmd); }