// Generates an appropriate stereo ortho projection matrix. void HSWDisplay::GetOrthoProjection(const HMDRenderState& RenderState, Matrix4f OrthoProjection[2]) { Matrix4f perspectiveProjection[2]; perspectiveProjection[0] = ovrMatrix4f_Projection(RenderState.EyeRenderDesc[0].Fov, 0.01f, 10000.f, true); perspectiveProjection[1] = ovrMatrix4f_Projection(RenderState.EyeRenderDesc[1].Fov, 0.01f, 10000.f, true); const float orthoDistance = HSWDISPLAY_DISTANCE; // This is meters from the camera (viewer) that we place the ortho plane. const Vector2f orthoScale0 = Vector2f(1.f) / Vector2f(RenderState.EyeRenderDesc[0].PixelsPerTanAngleAtCenter); const Vector2f orthoScale1 = Vector2f(1.f) / Vector2f(RenderState.EyeRenderDesc[1].PixelsPerTanAngleAtCenter); OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(perspectiveProjection[0], orthoScale0, orthoDistance, RenderState.EyeRenderDesc[0].ViewAdjust.x); OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(perspectiveProjection[1], orthoScale1, orthoDistance, RenderState.EyeRenderDesc[1].ViewAdjust.x); }
glm::mat4 getOrthographic() { const ovrEyeRenderDesc & erd = getEyeRenderDesc(); ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true); ovrVector2f scale; scale.x = scaleFactor; scale.y = scaleFactor; return ovr::toGlm(ovrMatrix4f_OrthoSubProjection(ovrPerspectiveProjection, scale, 100.8f, erd.HmdToEyeViewOffset.x)); }
glm::mat4 getOrthographic() { const ovrEyeRenderDesc & erd = getEyeRenderDesc(); ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true); ovrVector2f scale; scale.x = scaleFactor; scale.y = scaleFactor; return Rift::fromOvr(ovrMatrix4f_OrthoSubProjection(ovrPerspectiveProjection, scale, 100.8f, erd.ViewAdjust.x)); }
void OculusWorldDemoApp::CalculateHmdValues() { // Initialize eye rendering information for ovrHmd_Configure. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2]; eyeFov[0] = HmdDesc.DefaultEyeFov[0]; eyeFov[1] = HmdDesc.DefaultEyeFov[1]; // Clamp Fov based on our dynamically adjustable FovSideTanMax. // Most apps should use the default, but reducing Fov does reduce rendering cost. eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax)); eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax)); if (ForceZeroIpd) { // ForceZeroIpd does three things: // 1) Sets FOV to maximum symmetrical FOV based on both eyes // 2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0) // 3) Uses only the Left texture for rendering. eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]); eyeFov[1] = eyeFov[0]; Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, eyeFov[0], DesiredPixelDensity); Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left, recommenedTexSize); EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize); EyeRenderSize[1] = EyeRenderSize[0]; // Store texture pointers that will be passed for rendering. EyeTexture[0] = RenderTargets[Rendertarget_Left].Tex; EyeTexture[0].Header.TextureSize = textureSize; EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]); // Right eye is the same. EyeTexture[1] = EyeTexture[0]; } else { // Configure Stereo settings. Default pixel density is 1.0f. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, eyeFov[0], DesiredPixelDensity); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity); if (RendertargetIsSharedByBothEyes) { Sizei rtSize(recommenedTex0Size.w + recommenedTex1Size.w, Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h)); // Use returned size as the actual RT size may be different due to HW limits. rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize); // Don't draw more then recommended size; this also ensures that resolution reported // in the overlay HUD size is updated correctly for FOV/pixel density change. EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size); EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size); // Store texture pointers that will be passed for rendering. // Same texture is used, but with different viewports. EyeTexture[0] = RenderTargets[Rendertarget_BothEyes].Tex; EyeTexture[0].Header.TextureSize = rtSize; EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]); EyeTexture[1] = RenderTargets[Rendertarget_BothEyes].Tex; EyeTexture[1].Header.TextureSize = rtSize; EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]); } else { Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left, recommenedTex0Size); Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size); EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size); EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size); // Store texture pointers and viewports that will be passed for rendering. EyeTexture[0] = RenderTargets[Rendertarget_Left].Tex; EyeTexture[0].Header.TextureSize = tex0Size; EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]); EyeTexture[1] = RenderTargets[Rendertarget_Right].Tex; EyeTexture[1].Header.TextureSize = tex1Size; EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]); } } // Hmd caps. unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) | ovrHmdCap_LatencyTest; if (IsLowPersistence) hmdCaps |= ovrHmdCap_LowPersistence; if (DynamicPrediction) hmdCaps |= ovrHmdCap_DynamicPrediction; ovrHmd_SetEnabledCaps(Hmd, hmdCaps); ovrRenderAPIConfig config = pRender->Get_ovrRenderAPIConfig(); unsigned distortionCaps = ovrDistortionCap_Chromatic; if (TimewarpEnabled) distortionCaps |= ovrDistortionCap_TimeWarp; if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps, eyeFov, EyeRenderDesc )) { // Fail exit? TBD return; } if (ForceZeroIpd) { // Remove IPD adjust EyeRenderDesc[0].ViewAdjust = Vector3f(0); EyeRenderDesc[1].ViewAdjust = Vector3f(0); } // ovrHmdCap_LatencyTest - enables internal latency feedback unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection; if (PositionTrackingEnabled) sensorCaps |= ovrSensorCap_Position; if (StartSensorCaps != sensorCaps) { ovrHmd_StartSensor(Hmd, sensorCaps, 0); StartSensorCaps = sensorCaps; } // Calculate projections Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov, 0.01f, 10000.0f, true); Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov, 0.01f, 10000.0f, true); float orthoDistance = 0.8f; // 2D is 0.8 meter from camera Vector2f orthoScale0 = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter); Vector2f orthoScale1 = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter); OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance, EyeRenderDesc[0].ViewAdjust.x); OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance, EyeRenderDesc[1].ViewAdjust.x); }
int OVRConfigureRenderer(int width, int height, float znear, float zfar, float ipd, float multisample, int lowpersistence, int dynamicprediction, int vsync, int chromatic, int timewarp, int vignette, int state, int flip, int srgb, int overdrive, int profile) { unsigned hmdCaps; unsigned int distortionCaps; ovrFovPort eyeFov[EYE_ALL] = { _OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT] }; float FovSideTanMax = OVR::FovPort::Max(_OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT]).GetMaxSideTan(); //float FovSideTanLimit = OVR::FovPort::Max(_OVRGlobals.HMD->MaxEyeFov[EYE_LEFT], _OVRGlobals.HMD->MaxEyeFov[EYE_RIGHT]).GetMaxSideTan(); ovrBool didSetIPD = 0; // generate the HMD and distortion caps hmdCaps = (lowpersistence ? ovrHmdCap_LowPersistence : 0) | (dynamicprediction ? ovrHmdCap_DynamicPrediction : 0) | (vsync ? 0 : ovrHmdCap_NoVSync); distortionCaps = (chromatic ? ovrDistortionCap_Chromatic : 0) | (timewarp ? ovrDistortionCap_TimeWarp : 0) | (vignette ? ovrDistortionCap_Vignette : 0) | (state ? 0 : ovrDistortionCap_NoRestore) | (flip ? ovrDistortionCap_FlipInput : 0) | (srgb ? ovrDistortionCap_SRGB : 0) | (overdrive ? ovrDistortionCap_Overdrive : 0) | (profile ? ovrDistortionCap_ProfileNoTimewarpSpinWaits : 0); didSetIPD = ovrHmd_SetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 ); ovrHmd_SetEnabledCaps( _OVRGlobals.HMD, hmdCaps ); ovrRenderAPIConfig config = ovrRenderAPIConfig(); config.Header.API = ovrRenderAPI_OpenGL; config.Header.RTSize.w = width; config.Header.RTSize.h = height; config.Header.Multisample = multisample > 1 ? 1 : 0; // clamp fov eyeFov[EYE_LEFT] = OVR::FovPort::Min(eyeFov[EYE_LEFT], OVR::FovPort(FovSideTanMax)); eyeFov[EYE_RIGHT] = OVR::FovPort::Min(eyeFov[EYE_RIGHT], OVR::FovPort(FovSideTanMax)); if ( !ovrHmd_ConfigureRendering( _OVRGlobals.HMD, &config, distortionCaps, eyeFov, _OVRGlobals.EyeRenderDesc ) ) { return 0; } #ifdef DEBUG ovrhmd_EnableHSWDisplaySDKRender( _OVRGlobals.HMD, false ); #else ovrHmd_DismissHSWDisplay( _OVRGlobals.HMD ); #endif _OVRGlobals.IPD = ovrHmd_GetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 ); // create the projection _OVRGlobals.Eye[EYE_LEFT].Projection = ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_LEFT].Fov, znear, zfar, true ); _OVRGlobals.Eye[EYE_RIGHT].Projection = ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_RIGHT].Fov, znear, zfar, true ); // transpose the projection OVR::Matrix4 <float>transposeLeft = _OVRGlobals.Eye[EYE_LEFT].Projection; OVR::Matrix4 <float>transposeRight = _OVRGlobals.Eye[EYE_RIGHT].Projection; _OVRGlobals.Eye[EYE_LEFT].Projection = transposeLeft.Transposed(); _OVRGlobals.Eye[EYE_RIGHT].Projection = transposeRight.Transposed(); // TODO: ortho { float orthoDistance = 0.8f; // 2D is 0.8 meter from camera OVR::Vector2f orthoScale0 = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_LEFT].PixelsPerTanAngleAtCenter); OVR::Vector2f orthoScale1 = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_RIGHT].PixelsPerTanAngleAtCenter); _OVRGlobals.Eye[EYE_LEFT].OrthoProjection = ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_LEFT].Projection, orthoScale0, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_LEFT].ViewAdjust.x); _OVRGlobals.Eye[EYE_RIGHT].OrthoProjection = ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_RIGHT].Projection, orthoScale1, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_RIGHT].ViewAdjust.x); OVR::Matrix4 <float>transposeLeftOrtho = _OVRGlobals.Eye[EYE_LEFT].OrthoProjection; OVR::Matrix4 <float>transposeRightOrtho = _OVRGlobals.Eye[EYE_RIGHT].OrthoProjection; _OVRGlobals.Eye[EYE_LEFT].OrthoProjection = transposeLeftOrtho.Transposed(); _OVRGlobals.Eye[EYE_RIGHT].OrthoProjection = transposeRightOrtho.Transposed(); } return 1; }