bool OctreeQueryNode::moveShouldDump() const { // if shutting down, return immediately if (_isShuttingDown) { return false; } QMutexLocker viewLocker(&_viewMutex); glm::vec3 oldPosition = _lastKnownViewFrustum.getPosition(); glm::vec3 newPosition = _currentViewFrustum.getPosition(); // theoretically we could make this slightly larger but relative to avatar scale. const float MAXIMUM_MOVE_WITHOUT_DUMP = 0.0f; return glm::distance(newPosition, oldPosition) > MAXIMUM_MOVE_WITHOUT_DUMP; }
void OctreeQueryNode::updateLastKnownViewFrustum() { // if shutting down, return immediately if (_isShuttingDown) { return; } { QMutexLocker viewLocker(&_viewMutex); bool frustumChanges = !_lastKnownViewFrustum.isVerySimilar(_currentViewFrustum); if (frustumChanges) { // save our currentViewFrustum into our lastKnownViewFrustum _lastKnownViewFrustum = _currentViewFrustum; } } // save that we know the view has been sent. setLastTimeBagEmpty(); }
void GraphicsEngine::render_performFrame() { // Some plugins process message events, allowing paintGL to be called reentrantly. _renderFrameCount++; auto lastPaintBegin = usecTimestampNow(); PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount); PerformanceTimer perfTimer("paintGL"); DisplayPluginPointer displayPlugin; { PROFILE_RANGE(render, "/getActiveDisplayPlugin"); displayPlugin = qApp->getActiveDisplayPlugin(); } { PROFILE_RANGE(render, "/pluginBeginFrameRender"); // If a display plugin loses it's underlying support, it // needs to be able to signal us to not use it if (!displayPlugin->beginFrameRender(_renderFrameCount)) { QMetaObject::invokeMethod(qApp, "updateDisplayMode"); return; } } RenderArgs renderArgs; glm::mat4 HMDSensorPose; glm::mat4 eyeToWorld; glm::mat4 sensorToWorld; ViewFrustum viewFrustum; bool isStereo; glm::mat4 stereoEyeOffsets[2]; glm::mat4 stereoEyeProjections[2]; { QMutexLocker viewLocker(&_renderArgsMutex); renderArgs = _appRenderArgs._renderArgs; // don't render if there is no context. if (!_appRenderArgs._renderArgs._context) { return; } HMDSensorPose = _appRenderArgs._headPose; eyeToWorld = _appRenderArgs._eyeToWorld; sensorToWorld = _appRenderArgs._sensorToWorld; isStereo = _appRenderArgs._isStereo; for_each_eye([&](Eye eye) { stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye]; stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye]; }); viewFrustum = _appRenderArgs._renderArgs.getViewFrustum(); } { PROFILE_RANGE(render, "/gpuContextReset"); getGPUContext()->beginFrame(_appRenderArgs._view, HMDSensorPose); // Reset the gpu::Context Stages // Back to the default framebuffer; gpu::doInBatch("Application_render::gpuContextReset", getGPUContext(), [&](gpu::Batch& batch) { batch.resetStages(); }); if (isStereo) { renderArgs._context->enableStereo(true); renderArgs._context->setStereoProjections(stereoEyeProjections); renderArgs._context->setStereoViews(stereoEyeOffsets); } } gpu::FramebufferPointer finalFramebuffer; QSize finalFramebufferSize; { PROFILE_RANGE(render, "/getOutputFramebuffer"); // Primary rendering pass auto framebufferCache = DependencyManager::get<FramebufferCache>(); finalFramebufferSize = framebufferCache->getFrameBufferSize(); // Final framebuffer that will be handed to the display-plugin finalFramebuffer = framebufferCache->getFramebuffer(); } if (!_programsCompiled.load()) { gpu::doInBatch("splashFrame", _gpuContext, [&](gpu::Batch& batch) { batch.setFramebuffer(finalFramebuffer); batch.enableSkybox(true); batch.enableStereo(isStereo); batch.setViewportTransform({ 0, 0, finalFramebuffer->getSize() }); _splashScreen->render(batch, viewFrustum); }); } else { { PROFILE_RANGE(render, "/renderOverlay"); PerformanceTimer perfTimer("renderOverlay"); // NOTE: There is no batch associated with this renderArgs // the ApplicationOverlay class assumes it's viewport is setup to be the device size renderArgs._viewport = glm::ivec4(0, 0, qApp->getDeviceSize()); qApp->getApplicationOverlay().renderOverlay(&renderArgs); } { PROFILE_RANGE(render, "/updateCompositor"); qApp->getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld); } { PROFILE_RANGE(render, "/runRenderFrame"); renderArgs._hudOperator = displayPlugin->getHUDOperator(); renderArgs._hudTexture = qApp->getApplicationOverlay().getOverlayTexture(); renderArgs._blitFramebuffer = finalFramebuffer; render_runRenderFrame(&renderArgs); } } auto frame = getGPUContext()->endFrame(); frame->frameIndex = _renderFrameCount; frame->framebuffer = finalFramebuffer; frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) { auto frameBufferCache = DependencyManager::get<FramebufferCache>(); if (frameBufferCache) { frameBufferCache->releaseFramebuffer(framebuffer); } }; // deliver final scene rendering commands to the display plugin { PROFILE_RANGE(render, "/pluginOutput"); PerformanceTimer perfTimer("pluginOutput"); _renderLoopCounter.increment(); displayPlugin->submitFrame(frame); } // Reset the framebuffer and stereo state renderArgs._blitFramebuffer.reset(); renderArgs._context->enableStereo(false); #if !defined(DISABLE_QML) { auto stats = Stats::getInstance(); if (stats) { stats->setRenderDetails(renderArgs._details); } } #endif uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin; _frameTimingsScriptingInterface.addValue(lastPaintDuration); }
void Application::paintGL() { // Some plugins process message events, allowing paintGL to be called reentrantly. if (_aboutToQuit || _window->isMinimized()) { return; } _renderFrameCount++; _lastTimeRendered.start(); auto lastPaintBegin = usecTimestampNow(); PROFILE_RANGE_EX(render, __FUNCTION__, 0xff0000ff, (uint64_t)_renderFrameCount); PerformanceTimer perfTimer("paintGL"); if (nullptr == _displayPlugin) { return; } DisplayPluginPointer displayPlugin; { PROFILE_RANGE(render, "/getActiveDisplayPlugin"); displayPlugin = getActiveDisplayPlugin(); } { PROFILE_RANGE(render, "/pluginBeginFrameRender"); // If a display plugin loses it's underlying support, it // needs to be able to signal us to not use it if (!displayPlugin->beginFrameRender(_renderFrameCount)) { QMetaObject::invokeMethod(this, "updateDisplayMode"); return; } } RenderArgs renderArgs; glm::mat4 HMDSensorPose; glm::mat4 eyeToWorld; glm::mat4 sensorToWorld; bool isStereo; glm::mat4 stereoEyeOffsets[2]; glm::mat4 stereoEyeProjections[2]; { QMutexLocker viewLocker(&_renderArgsMutex); renderArgs = _appRenderArgs._renderArgs; // don't render if there is no context. if (!_appRenderArgs._renderArgs._context) { return; } HMDSensorPose = _appRenderArgs._headPose; eyeToWorld = _appRenderArgs._eyeToWorld; sensorToWorld = _appRenderArgs._sensorToWorld; isStereo = _appRenderArgs._isStereo; for_each_eye([&](Eye eye) { stereoEyeOffsets[eye] = _appRenderArgs._eyeOffsets[eye]; stereoEyeProjections[eye] = _appRenderArgs._eyeProjections[eye]; }); } { PROFILE_RANGE(render, "/gpuContextReset"); _gpuContext->beginFrame(HMDSensorPose); // Reset the gpu::Context Stages // Back to the default framebuffer; gpu::doInBatch(_gpuContext, [&](gpu::Batch& batch) { batch.resetStages(); }); } { PROFILE_RANGE(render, "/renderOverlay"); PerformanceTimer perfTimer("renderOverlay"); // NOTE: There is no batch associated with this renderArgs // the ApplicationOverlay class assumes it's viewport is setup to be the device size renderArgs._viewport = glm::ivec4(0, 0, getDeviceSize()); _applicationOverlay.renderOverlay(&renderArgs); } { PROFILE_RANGE(render, "/updateCompositor"); getApplicationCompositor().setFrameInfo(_renderFrameCount, eyeToWorld, sensorToWorld); } gpu::FramebufferPointer finalFramebuffer; QSize finalFramebufferSize; { PROFILE_RANGE(render, "/getOutputFramebuffer"); // Primary rendering pass auto framebufferCache = DependencyManager::get<FramebufferCache>(); finalFramebufferSize = framebufferCache->getFrameBufferSize(); // Final framebuffer that will be handled to the display-plugin finalFramebuffer = framebufferCache->getFramebuffer(); } { if (isStereo) { renderArgs._context->enableStereo(true); renderArgs._context->setStereoProjections(stereoEyeProjections); renderArgs._context->setStereoViews(stereoEyeOffsets); } renderArgs._hudOperator = displayPlugin->getHUDOperator(); renderArgs._hudTexture = _applicationOverlay.getOverlayTexture(); renderArgs._blitFramebuffer = finalFramebuffer; runRenderFrame(&renderArgs); } auto frame = _gpuContext->endFrame(); frame->frameIndex = _renderFrameCount; frame->framebuffer = finalFramebuffer; frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) { DependencyManager::get<FramebufferCache>()->releaseFramebuffer(framebuffer); }; // deliver final scene rendering commands to the display plugin { PROFILE_RANGE(render, "/pluginOutput"); PerformanceTimer perfTimer("pluginOutput"); _renderLoopCounter.increment(); displayPlugin->submitFrame(frame); } // Reset the framebuffer and stereo state renderArgs._blitFramebuffer.reset(); renderArgs._context->enableStereo(false); { Stats::getInstance()->setRenderDetails(renderArgs._details); } uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin; _frameTimingsScriptingInterface.addValue(lastPaintDuration); }
bool OctreeQueryNode::updateCurrentViewFrustum() { // if shutting down, return immediately if (_isShuttingDown) { return false; } if (!_usesFrustum) { // this client does not use a view frustum so the view frustum for this query has not changed return false; } else { bool currentViewFrustumChanged = false; ViewFrustum newestViewFrustum; // get position and orientation details from the camera newestViewFrustum.setPosition(getCameraPosition()); newestViewFrustum.setOrientation(getCameraOrientation()); newestViewFrustum.setCenterRadius(getCameraCenterRadius()); // Also make sure it's got the correct lens details from the camera float originalFOV = getCameraFov(); float wideFOV = originalFOV + VIEW_FRUSTUM_FOV_OVERSEND; if (0.0f != getCameraAspectRatio() && 0.0f != getCameraNearClip() && 0.0f != getCameraFarClip() && getCameraNearClip() != getCameraFarClip()) { newestViewFrustum.setProjection(glm::perspective( glm::radians(wideFOV), // hack getCameraAspectRatio(), getCameraNearClip(), getCameraFarClip())); newestViewFrustum.calculate(); } { // if there has been a change, then recalculate QMutexLocker viewLocker(&_viewMutex); if (!newestViewFrustum.isVerySimilar(_currentViewFrustum)) { _currentViewFrustum = newestViewFrustum; currentViewFrustumChanged = true; } } // Also check for LOD changes from the client if (_lodInitialized) { if (_lastClientBoundaryLevelAdjust != getBoundaryLevelAdjust()) { _lastClientBoundaryLevelAdjust = getBoundaryLevelAdjust(); _lodChanged = true; } if (_lastClientOctreeSizeScale != getOctreeSizeScale()) { _lastClientOctreeSizeScale = getOctreeSizeScale(); _lodChanged = true; } } else { _lodInitialized = true; _lastClientOctreeSizeScale = getOctreeSizeScale(); _lastClientBoundaryLevelAdjust = getBoundaryLevelAdjust(); _lodChanged = false; } // When we first detect that the view stopped changing, we record this. // but we don't change it back to false until we've completely sent this // scene. if (_viewFrustumChanging && !currentViewFrustumChanged) { _viewFrustumJustStoppedChanging = true; } _viewFrustumChanging = currentViewFrustumChanged; return currentViewFrustumChanged; } }
void OctreeQueryNode::copyLastKnownViewFrustum(ViewFrustum& viewOut) const { QMutexLocker viewLocker(&_viewMutex); viewOut = _lastKnownViewFrustum; }
void OctreeQueryNode::copyCurrentViewFrustum(ViewFrustum& viewOut) const { QMutexLocker viewLocker(&_viewMutex); viewOut = _currentViewFrustum; }