void VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID, PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect, const gfx::Rect& aRightEyeRect) { // aInputFrameID is no longer controlled by content with the WebVR 1.1 API // update; however, we will later use this code to enable asynchronous // submission of multiple layers to be composited. This will enable // us to build browser UX that remains responsive even when content does // not consistently submit frames. int32_t inputFrameID = aInputFrameID; if (inputFrameID == 0) { inputFrameID = mInputFrameID; } if (inputFrameID < 0) { // Sanity check to prevent invalid memory access on builds with assertions // disabled. inputFrameID = 0; } VRHMDSensorState sensorState = mLastSensorState[inputFrameID % kMaxLatencyFrames]; // It is possible to get a cache miss on mLastSensorState if latency is // longer than kMaxLatencyFrames. An optimization would be to find a frame // that is closer than the one selected with the modulus. // If we hit this; however, latency is already so high that the site is // un-viewable and a more accurate pose prediction is not likely to // compensate. TextureHost* th = TextureHost::AsTextureHost(aTexture); // WebVR doesn't use the compositor to compose the frame, so use // AutoLockTextureHostWithoutCompositor here. AutoLockTextureHostWithoutCompositor autoLock(th); if (autoLock.Failed()) { NS_WARNING("Failed to lock the VR layer texture"); return; } CompositableTextureSourceRef source; if (!th->BindTextureSource(source)) { NS_WARNING("The TextureHost was successfully locked but can't provide a TextureSource"); return; } MOZ_ASSERT(source); IntSize texSize = source->GetSize(); TextureSourceD3D11* sourceD3D11 = source->AsSourceD3D11(); if (!sourceD3D11) { NS_WARNING("WebVR support currently only implemented for D3D11"); return; } SubmitFrame(sourceD3D11, texSize, sensorState, aLeftEyeRect, aRightEyeRect); }
void VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect, const gfx::Rect& aRightEyeRect) { if ((mDisplayInfo.mGroupMask & aLayer->GetGroup()) == 0) { // Suppress layers hidden by the group mask return; } TextureHost* th = TextureHost::AsTextureHost(aTexture); // WebVR doesn't use the compositor to compose the frame, so use // AutoLockTextureHostWithoutCompositor here. AutoLockTextureHostWithoutCompositor autoLock(th); if (autoLock.Failed()) { NS_WARNING("Failed to lock the VR layer texture"); return; } CompositableTextureSourceRef source; if (!th->BindTextureSource(source)) { NS_WARNING("The TextureHost was successfully locked but can't provide a TextureSource"); return; } MOZ_ASSERT(source); IntSize texSize = source->GetSize(); TextureSourceD3D11* sourceD3D11 = source->AsSourceD3D11(); if (!sourceD3D11) { NS_WARNING("WebVR support currently only implemented for D3D11"); return; } if (!SubmitFrame(sourceD3D11, texSize, aLeftEyeRect, aRightEyeRect)) { return; } /** * Trigger the next VSync immediately after we are successfully * submitting frames. As SubmitFrame is responsible for throttling * the render loop, if we don't successfully call it, we shouldn't trigger * NotifyVRVsync immediately, as it will run unbounded. * If NotifyVRVsync is not called here due to SubmitFrame failing, the * fallback "watchdog" code in VRDisplayHost::NotifyVSync() will cause * frames to continue at a lower refresh rate until frame submission * succeeds again. */ VRManager *vm = VRManager::Get(); MOZ_ASSERT(vm); vm->NotifyVRVsync(mDisplayInfo.mDisplayID); }