void HMDState::UpdateRenderProfile(Profile* profile) { // Apply the given profile to generate a render context RenderState.OurProfileRenderInfo = GenerateProfileRenderInfoFromProfile(RenderState.OurHMDInfo, profile); RenderState.RenderInfo = GenerateHmdRenderInfoFromHmdInfo(RenderState.OurHMDInfo, RenderState.OurProfileRenderInfo); RenderState.Distortion[0] = CalculateDistortionRenderDesc(StereoEye_Left, RenderState.RenderInfo, 0); RenderState.Distortion[1] = CalculateDistortionRenderDesc(StereoEye_Right, RenderState.RenderInfo, 0); if (pClient) { // Center pupil depth float centerPupilDepth = GetCenterPupilDepthFromRenderInfo(&RenderState.RenderInfo); pClient->SetNumberValue(GetNetId(), "CenterPupilDepth", centerPupilDepth); // Neck model Vector3f neckModel = GetNeckModelFromProfile(profile); double neckModelArray[3] = { neckModel.x, neckModel.y, neckModel.z }; pClient->SetNumberValues(GetNetId(), "NeckModelVector3f", neckModelArray, 3); // Camera position // OVR_KEY_CAMERA_POSITION is actually the *inverse* of a camera position. Posed centeredFromWorld; double values[7]; if (profile->GetDoubleValues(OVR_KEY_CAMERA_POSITION, values, 7) == 7) { centeredFromWorld = Posed::FromArray(values); } else { centeredFromWorld = TheTrackingStateReader.GetDefaultCenteredFromWorld(); } // ComputeCenteredFromWorld wants a worldFromCpf pose, so invert it. // FIXME: The stored centeredFromWorld doesn't have a neck model offset applied, but probably should. TheTrackingStateReader.ComputeCenteredFromWorld(centeredFromWorld.Inverted(), Vector3d(0, 0, 0)); } }
void HMDState::UpdateRenderProfile(Profile* profile) { // Apply the given profile to generate a render context RenderState.RenderInfo = GenerateHmdRenderInfoFromHmdInfo(RenderState.OurHMDInfo, profile); RenderState.Distortion[0] = CalculateDistortionRenderDesc(StereoEye_Left, RenderState.RenderInfo, 0); RenderState.Distortion[1] = CalculateDistortionRenderDesc(StereoEye_Right, RenderState.RenderInfo, 0); if (pClient) { // Center pupil depth float centerPupilDepth = GetCenterPupilDepthFromRenderInfo(&RenderState.RenderInfo); pClient->SetNumberValue(GetNetId(), "CenterPupilDepth", centerPupilDepth); // Neck model Vector3f neckModel = GetNeckModelFromProfile(profile); double neckModelArray[3] = { neckModel.x, neckModel.y, neckModel.z }; pClient->SetNumberValues(GetNetId(), "NeckModelVector3f", neckModelArray, 3); double camerastate[7]; if (profile->GetDoubleValues(OVR_KEY_CAMERA_POSITION, camerastate, 7) == 0) { //there is no value, so we load the default for (int i = 0; i < 7; i++) camerastate[i] = 0; camerastate[3] = 1;//no offset. by default, give the quaternion w component value 1 } else TheSensorStateReader.setCenteredFromWorld(OVR::Posed::FromArray(camerastate)); } }
unsigned HMDState::getFloatArray(const char* propertyName, float values[], unsigned arraySize) { if (arraySize) { if (OVR_strcmp(propertyName, "ScreenSize") == 0) { float data[2] = { OurHMDInfo.ScreenSizeInMeters.w, OurHMDInfo.ScreenSizeInMeters.h }; return CopyFloatArrayWithLimit(values, arraySize, data, 2); } else if (OVR_strcmp(propertyName, "DistortionClearColor") == 0) { return CopyFloatArrayWithLimit(values, arraySize, RenderState.ClearColor, 4); } else if (OVR_strcmp(propertyName, "DK2Latency") == 0) { if (OurHMDInfo.HmdType < HmdType_DK2) { return 0; } OutputLatencyTimings timings; ScreenLatencyTracker.GetLatencyTimings(timings); if (arraySize > 0) { switch (arraySize) { default: values[4] = (float)timings.ErrorTimewarp; // Fall-thru case 4: values[3] = (float)timings.ErrorRender; // Fall-thru case 3: values[2] = (float)timings.LatencyPostPresent; // Fall-thru case 2: values[1] = (float)timings.LatencyTimewarp; // Fall-thru case 1: values[0] = (float)timings.LatencyRender; } } return arraySize > 5 ? 5 : arraySize; } else if (OVR_strcmp(propertyName, "NeckModelVector3f") == 0) { // Query the service to grab the HNM. double hnm[3] = {}; int count = NetClient::GetInstance()->GetNumberValues(GetNetId(), propertyName, hnm, (int)arraySize); // If the service is unavailable or returns zero data, if (count < 3 || (hnm[0] == 0.0 && hnm[1] == 0.0 && hnm[2] == 0.0)) { // These are the default values used if the server does not return any data, due to not // being reachable or other errors. OVR_ASSERT(pProfile.GetPtr()); if (pProfile.GetPtr()) { Vector3f neckModel = GetNeckModelFromProfile(pProfile); hnm[0] = neckModel.x; hnm[1] = neckModel.y; hnm[2] = neckModel.z; } } for (unsigned i = 0; i < 3 && i < arraySize; ++i) { values[i] = (float)hnm[i]; } return arraySize > 3 ? 3 : arraySize; } else if (NetSessionCommon::IsServiceProperty(NetSessionCommon::EGetNumberValues, propertyName)) { // Convert floats to doubles double* da = new double[arraySize]; for (int i = 0; i < (int)arraySize; ++i) { da[i] = values[i]; } int count = NetClient::GetInstance()->GetNumberValues(GetNetId(), propertyName, da, (int)arraySize); for (int i = 0; i < count; ++i) { values[i] = (float)da[i]; } delete[] da; return count; } else if (pProfile) { // TBD: Not quite right. Should update profile interface, so that // we can return 0 in all conditions if property doesn't exist. return pProfile->GetFloatValues(propertyName, values, arraySize); } } return 0; }