void HMDState::UpdateRenderProfile(Profile* profile) { // Apply the given profile to generate a render context RenderState.OurProfileRenderInfo = GenerateProfileRenderInfoFromProfile(RenderState.OurHMDInfo, profile); RenderState.RenderInfo = GenerateHmdRenderInfoFromHmdInfo(RenderState.OurHMDInfo, RenderState.OurProfileRenderInfo); RenderState.Distortion[0] = CalculateDistortionRenderDesc(StereoEye_Left, RenderState.RenderInfo, 0); RenderState.Distortion[1] = CalculateDistortionRenderDesc(StereoEye_Right, RenderState.RenderInfo, 0); if (pClient) { // Center pupil depth float centerPupilDepth = GetCenterPupilDepthFromRenderInfo(&RenderState.RenderInfo); pClient->SetNumberValue(GetNetId(), "CenterPupilDepth", centerPupilDepth); // Neck model Vector3f neckModel = GetNeckModelFromProfile(profile); double neckModelArray[3] = { neckModel.x, neckModel.y, neckModel.z }; pClient->SetNumberValues(GetNetId(), "NeckModelVector3f", neckModelArray, 3); // Camera position // OVR_KEY_CAMERA_POSITION is actually the *inverse* of a camera position. Posed centeredFromWorld; double values[7]; if (profile->GetDoubleValues(OVR_KEY_CAMERA_POSITION, values, 7) == 7) { centeredFromWorld = Posed::FromArray(values); } else { centeredFromWorld = TheTrackingStateReader.GetDefaultCenteredFromWorld(); } // ComputeCenteredFromWorld wants a worldFromCpf pose, so invert it. // FIXME: The stored centeredFromWorld doesn't have a neck model offset applied, but probably should. TheTrackingStateReader.ComputeCenteredFromWorld(centeredFromWorld.Inverted(), Vector3d(0, 0, 0)); } }
void HMDState::UpdateRenderProfile(Profile* profile) { // Apply the given profile to generate a render context RenderState.RenderInfo = GenerateHmdRenderInfoFromHmdInfo(RenderState.OurHMDInfo, profile); RenderState.Distortion[0] = CalculateDistortionRenderDesc(StereoEye_Left, RenderState.RenderInfo, 0); RenderState.Distortion[1] = CalculateDistortionRenderDesc(StereoEye_Right, RenderState.RenderInfo, 0); if (pClient) { // Center pupil depth float centerPupilDepth = GetCenterPupilDepthFromRenderInfo(&RenderState.RenderInfo); pClient->SetNumberValue(GetNetId(), "CenterPupilDepth", centerPupilDepth); // Neck model Vector3f neckModel = GetNeckModelFromProfile(profile); double neckModelArray[3] = { neckModel.x, neckModel.y, neckModel.z }; pClient->SetNumberValues(GetNetId(), "NeckModelVector3f", neckModelArray, 3); double camerastate[7]; if (profile->GetDoubleValues(OVR_KEY_CAMERA_POSITION, camerastate, 7) == 0) { //there is no value, so we load the default for (int i = 0; i < 7; i++) camerastate[i] = 0; camerastate[3] = 1;//no offset. by default, give the quaternion w component value 1 } else TheSensorStateReader.setCenteredFromWorld(OVR::Posed::FromArray(camerastate)); } }