void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { // Bind the main frameBuffer rd->pushState(m_frameBuffer); { rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); m_gbuffer->resize(rd->width(), rd->height()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness); if (! m_settings.colorGuardBandThickness.isZero()) { rd->setGuardBandClip2D(m_settings.colorGuardBandThickness); } // Cull and sort Array<shared_ptr<Surface> > sortedVisibleSurfaces; Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces); Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector()); // Early-Z pass static const bool renderTransmissiveSurfaces = false; Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces); rd->setDepthWrite(false); // Render velocity buffer (if needed) if (activeCamera()->motionBlurSettings().enabled()) { Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame()); } LocalLightingEnvironment environment = m_scene->localLightingEnvironment(); environment.ambientOcclusion = m_ambientOcclusion; // Compute AO m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_depthBuffer); // Compute shadow maps and forward-render visible surfaces Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment); rd->setDepthWrite(true); // Call to make the App show the output of debugDraw(...) drawDebugShapes(); // Post-process special effects m_depthOfField->apply(rd, m_colorBuffer0, m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); m_motionBlur->apply(rd, m_colorBuffer0, m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), m_gbuffer->specification().encoding[GBuffer::Field::SS_POSITION_CHANGE], m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); } rd->popState(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_colorBuffer0, 1); screenPrintf("WASD to move"); screenPrintf("Mouse to turn"); screenPrintf("Space to jump"); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& surface3D) { // Bind the main framebuffer rd->pushState(m_framebuffer); { rd->clear(); LightingEnvironment env = m_scene->lighting(); env.ambientOcclusion = m_ambientOcclusion; // Render full shading viewport const Rect2D& shadeViewport = Rect2D::xywh(0, 0, rd->width() / 2.0f, rd->height() / 2.0f); rd->setViewport(shadeViewport); Draw::skyBox(rd, env.environmentMapArray[0]); Surface::renderDepthOnly(rd, surface3D, CullFace::BACK); m_ambientOcclusion->update(rd, env.ambientOcclusionSettings, activeCamera(), m_framebuffer->texture(Framebuffer::DEPTH)); Array<shared_ptr<Surface> > sortedVisible; Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), surface3D, sortedVisible); Surface::sortBackToFront(sortedVisible, activeCamera()->frame().lookVector()); rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); for (int i = 0; i < sortedVisible.size(); ++i) { sortedVisible[i]->render(rd, env, RenderPassType::OPAQUE_SAMPLES, ""); } // Wireframe views shared_ptr<Camera> wireCamera[3] = {Camera::create(), Camera::create(), Camera::create()}; wireCamera[0]->setFrame(CFrame::fromXYZYPRDegrees(0,40,0,0,-90)); wireCamera[1]->setFrame(CFrame::fromXYZYPRDegrees(0,0,40,0,0)); wireCamera[2]->setFrame(CFrame::fromXYZYPRDegrees(40,0,0,90,0)); Rect2D wireViewport[3]; wireViewport[0] = shadeViewport + Vector2(rd->width() / 2.0f, 0.0f); wireViewport[1] = shadeViewport + Vector2(rd->width() / 2.0f, rd->height() / 2.0f); wireViewport[2] = shadeViewport + Vector2(0.0f, rd->height() / 2.0f); for (int i = 0; i < 3; ++i) { rd->setViewport(wireViewport[i]); rd->setProjectionAndCameraMatrix(wireCamera[i]->projection(), wireCamera[i]->frame()); Surface::renderWireframe(rd, surface3D); Draw::axes(rd); // Call to make the GApp show the output of debugDraw calls drawDebugShapes(); } } rd->popState(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0), 1); }
void App::onGraphics3D (RenderDevice* rd, Array<Surface::Ref>& surface3D) { if (_scene.isNull()) { return; } Draw::skyBox(rd, _scene->skyBoxTexture(), _scene->skyBoxConstant()); Surface::sortAndRender(rd, defaultCamera, surface3D, _scene->lighting()); if (_wireframe) { rd->pushState(); { rd->setRenderMode(RenderDevice::RENDER_WIREFRAME); rd->setLineWidth(2); rd->setColor(Color3::black()); Surface::sendGeometry(rd, surface3D); } rd->popState(); } Draw::axes(CoordinateFrame(Vector3(0, 0, 0)), rd); Draw::lighting(_scene->lighting(), rd); if (_renderSettings._usePhotonMap && _raytracer._photonMap) { rd->setPointSize(5); rd->beginPrimitive(PrimitiveType::POINTS); for (PhotonGrid::Iterator it = _raytracer._photonMap->begin(); it.hasMore(); ++it) { rd->setColor(it->power / it->power.max()); rd->sendVertex(it->position); } rd->endPrimitive(); } // Call to make the GApp show the output of debugDraw drawDebugShapes(); }
void App::onGraphics3D(RenderDevice* rd, Array<Surface::Ref>& surface3D) { if (m_scene.isNull()) { return; } Draw::skyBox(rd, m_scene->skyBoxTexture(), m_scene->skyBoxConstant()); // Render all objects (or, you can call Surface methods on the // elements of posed3D directly to customize rendering. Pass a // ShadowMap as the final argument to create shadows.) Surface::sortAndRender(rd, defaultCamera, surface3D, m_scene->lighting(), m_shadowMap); if (m_showWireframe) { Surface::renderWireframe(rd, surface3D); } ////////////////////////////////////////////////////// // Sample immediate-mode rendering code rd->enableLighting(); for (int i = 0; i < m_scene->lighting()->lightArray.size(); ++i) { rd->setLight(i, m_scene->lighting()->lightArray[i]); } rd->setAmbientLightColor(Color3::white() * 0.5f); // Draw::sphere(Sphere(Vector3(2.5f, 0.5f, 0), 0.5f), rd, Color3::white(), Color4::clear()); // Draw::box(AABox(Vector3(-2.0f, 0.0f, -0.5f), Vector3(-1.0f, 1.0f, 0.5f)), rd, Color4(Color3::orange(), 0.25f), Color3::black()); if (m_showAxes) { Draw::axes(Point3(0, 0, 0), rd); } if (m_showLightSources) { Draw::lighting(m_scene->lighting(), rd); } // Call to make the GApp show the output of debugDraw drawDebugShapes(); m_depthOfField->apply(rd, m_colorBuffer0, m_depthBuffer, defaultCamera); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { // This implementation is equivalent to the default GApp's. It is repeated here to make it // easy to modify rendering. If you don't require custom rendering, just delete this // method from your application and rely on the base class. if (! scene()) { return; } m_gbuffer->setSpecification(m_gbufferSpecification); m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.hdrFramebuffer.depthGuardBandThickness, m_settings.hdrFramebuffer.colorGuardBandThickness); m_renderer->render(rd, m_framebuffer, m_depthPeelFramebuffer, scene()->lightingEnvironment(), m_gbuffer, allSurfaces); // Debug visualizations and post-process effects rd->pushState(m_framebuffer); { if (m_enableSVO) { rd->clear(); //rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); rd->push2D(); const Vector2int32 guardBand(m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness); const Vector2int32 colorRegionExtent = Vector2int32(m_framebuffer->vector2Bounds()) - guardBand * 2; Args args; rd->setGuardBandClip2D(Vector2int16(guardBand)); args.setRect(rd->viewport()); Matrix4 proj; activeCamera()->getProjectUnitMatrix(m_framebuffer->rect2DBounds(), proj); float focalLength = proj[0][0]; m_svo->setCurSvoId(0); args.setUniform("guardBand", guardBand); args.setUniform("focalLength", focalLength); args.setUniform("renderRes", Vector2(colorRegionExtent)); args.setUniform("renderResI", colorRegionExtent); args.setUniform("screenRatio", float(colorRegionExtent.y) / float(colorRegionExtent.x)); m_svo->connectToShader(args, Access::READ, m_svo->maxDepth(), m_svo->maxDepth()); rd->setColorWrite(true); rd->setDepthWrite(false); const Matrix4& cameraToVoxelMatrix = Matrix4(m_svo->svoToWorldMatrix()).inverse() * activeCamera()->frame(); args.setUniform("cameraToVoxelMatrix", cameraToVoxelMatrix); args.setUniform("voxelToWorldMatrix", m_svo->svoToWorldMatrix()); args.setUniform("worldToVoxelMatrix", m_svo->worldToSVOMatrix()); args.setUniform("wsCameraPos", activeCamera()->frame().translation); scene()->lightingEnvironment().setShaderArgs(args); args.setUniform("raycastingConeFactor", m_voxelConeAperture); rd->setDepthTest(RenderDevice::DEPTH_ALWAYS_PASS); // TODO: write gl_FragDepth and use a regular depth test here m_gbuffer->texture(GBuffer::Field::DEPTH_AND_STENCIL)->setShaderArgs(args, "depth_", Sampler::buffer()); //rd->setBlendFunc(RenderDevice::BLEND_ONE, RenderDevice::BLEND_ONE_MINUS_SRC_ALPHA); LAUNCH_SHADER("raycast.pix", args); rd->pop2D(); } // Call to make the App show the output of debugDraw(...) rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); drawDebugShapes(); const shared_ptr<Entity>& selectedEntity = (notNull(developerWindow) && notNull(developerWindow->sceneEditorWindow)) ? developerWindow->sceneEditorWindow->selectedEntity() : shared_ptr<Entity>(); scene()->visualize(rd, selectedEntity, allSurfaces, sceneVisualizationSettings(), activeCamera()); rd->setPolygonOffset(-0.2f); if (m_debugSVONodes) { m_svo->visualizeNodes(rd, m_debugSVONodeLevel); } if (m_debugSVOFragments) { m_svo->visualizeFragments(rd); } rd->setPolygonOffset(0.0f); // Post-process special effects m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness); m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness); } rd->popState(); if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!renderDevice->swapBuffersAutomatically())) { // We're about to render to the actual back buffer, so swap the buffers now. // This call also allows the screenshot and video recording to capture the // previous frame just before it is displayed. swapBuffers(); } // Clear the entire screen (needed even though we'll render over it, since // AFR uses clear() to detect that the buffer is not re-used.) rd->clear(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0)); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { rd->clear(); // Bind the main framebuffer rd->pushState(m_framebuffer); { rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); m_gbuffer->resize(rd->width(), rd->height()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness); rd->clear(); // Cull and sort Array<shared_ptr<Surface> > sortedVisibleSurfaces; Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces); Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector()); // Depth pre-pass static const bool renderTransmissiveSurfaces = false; Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces); // Intentionally copy the lighting environment for mutation LightingEnvironment environment = scene()->lightingEnvironment(); if (! m_settings.colorGuardBandThickness.isZero()) { rd->setGuardBandClip2D(m_settings.colorGuardBandThickness); } // Render G-buffer if needed. In this default implementation, it is needed if motion blur is enabled (for velocity) or // if face normals have been allocated and ambient occlusion is enabled. if (activeCamera()->motionBlurSettings().enabled() || (environment.ambientOcclusionSettings.enabled && notNull(m_gbuffer) && notNull(m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL)))) { rd->setDepthWrite(false); { // We've already rendered the depth Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), activeCamera()->expressivePreviousFrame()); } rd->setDepthWrite(true); } // Compute AO environment.ambientOcclusionSettings.useDepthPeelBuffer = false; m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_framebuffer->texture(Framebuffer::DEPTH), shared_ptr<Texture>(), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); // Compute shadow maps and forward-render visible surfaces environment.ambientOcclusion = m_ambientOcclusion; Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment); // Call to make the App show the output of debugDraw(...) drawDebugShapes(); scene()->visualize(rd, shared_ptr<Entity>(), sceneVisualizationSettings()); // Post-process special effects m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); } rd->popState(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer rd->push2D(m_finalFramebuffer); { rd->clear(); m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0)); } rd->pop2D(); // Copy the final buffer to the server screen rd->push2D(); { Draw::rect2D(m_finalFramebuffer->texture(0)->rect2DBounds(), rd, Color3::white(), m_finalFramebuffer->texture(0)); } rd->pop2D(); clientSetMutex.lock(); screenPrintf("Number of clients: %d\n", clientSet.size()); //screenPrintf("clientWantsImage: %d\n", clientWantsImage.value()); if ((clientWantsImage.value() != 0) && (clientSet.size() > 0)) { // Send the image to the first client mg_connection* conn = *clientSet.begin(); // JPEG encoding/decoding takes more time but substantially less bandwidth than PNG mg_websocket_write_image(conn, m_finalBuffer->toImage(ImageFormat::RGB8()), Image::JPEG); clientWantsImage = 0; } clientSetMutex.unlock(); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { if (! scene()) { return; } m_gbuffer->setSpecification(m_gbufferSpecification); m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height()); // Share the depth buffer with the forward-rendering pipeline m_framebuffer->set(Framebuffer::DEPTH, m_gbuffer->texture(GBuffer::Field::DEPTH_AND_STENCIL)); m_depthPeelFramebuffer->resize(m_framebuffer->width(), m_framebuffer->height()); // Bind the main framebuffer rd->pushState(m_framebuffer); { rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness); rd->clear(); // Cull and sort Array<shared_ptr<Surface> > sortedVisibleSurfaces; Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces); Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector()); const bool renderTransmissiveSurfaces = false; // Intentionally copy the lighting environment for mutation LightingEnvironment environment = scene()->lightingEnvironment(); environment.ambientOcclusion = m_ambientOcclusion; // Render z-prepass and G-buffer. Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), activeCamera()->expressivePreviousFrame(), renderTransmissiveSurfaces); // This could be the OR of several flags; the starter begins with only one motivating algorithm for depth peel const bool needDepthPeel = environment.ambientOcclusionSettings.useDepthPeelBuffer; if (needDepthPeel) { rd->pushState(m_depthPeelFramebuffer); { rd->clear(); rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces, m_framebuffer->texture(Framebuffer::DEPTH), environment.ambientOcclusionSettings.depthPeelSeparationHint); } rd->popState(); } if (! m_settings.colorGuardBandThickness.isZero()) { rd->setGuardBandClip2D(m_settings.colorGuardBandThickness); } // Compute AO m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_framebuffer->texture(Framebuffer::DEPTH), m_depthPeelFramebuffer->texture(Framebuffer::DEPTH), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_gbuffer->specification().encoding[GBuffer::Field::CS_FACE_NORMAL], m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); RealTime lightingChangeTime = max(scene()->lastEditingTime(), max(scene()->lastLightChangeTime(), scene()->lastVisibleChangeTime())); bool updateShadowMaps = false; if (lightingChangeTime > m_lastLightingChangeTime) { m_lastLightingChangeTime = lightingChangeTime; updateShadowMaps = true; } // No need to write depth, since it was covered by the gbuffer pass //rd->setDepthWrite(false); // Compute shadow maps and forward-render visible surfaces Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment, Surface::ALPHA_BINARY, updateShadowMaps, m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); if (m_showWireframe) { Surface::renderWireframe(rd, sortedVisibleSurfaces); } // Call to make the App show the output of debugDraw(...) drawDebugShapes(); scene()->visualize(rd, sceneVisualizationSettings()); // Post-process special effects m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION), m_gbuffer->specification().encoding[GBuffer::Field::SS_EXPRESSIVE_MOTION], m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); } rd->popState(); // We're about to render to the actual back buffer, so swap the buffers now. // This call also allows the screenshot and video recording to capture the // previous frame just before it is displayed. swapBuffers(); // Clear the entire screen (needed even though we'll render over it, since // AFR uses clear() to detect that the buffer is not re-used.) rd->clear(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0)); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { // This implementation is equivalent to the default GApp's. It is repeated here to make it // easy to modify rendering. If you don't require custom rendering, just delete this // method from your application and rely on the base class. if (!scene()) { if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!rd->swapBuffersAutomatically())) { swapBuffers(); } rd->clear(); rd->pushState(); { rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); drawDebugShapes(); } rd->popState(); return; } updateAudioData(); if (System::time() - m_lastInterestingEventTime > 10.0) { if (Random::common().uniform() < 0.001f) { if (m_sonicSculpturePieces.size() > 0) { int index = Random::common().integer(0, m_sonicSculpturePieces.size() - 1); generatePlayPulse(m_sonicSculpturePieces[index]); m_lastInterestingEventTime = System::time(); } } } handlePlayPulses(); GBuffer::Specification gbufferSpec = m_gbufferSpecification; extendGBufferSpecification(gbufferSpec); m_gbuffer->setSpecification(gbufferSpec); m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness); m_renderer->render(rd, m_framebuffer, m_depthPeelFramebuffer, scene()->lightingEnvironment(), m_gbuffer, allSurfaces); // Debug visualizations and post-process effects rd->pushState(m_framebuffer); { // Call to make the App show the output of debugDraw(...) rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); for (auto& piece : m_sonicSculpturePieces) { piece->render(rd, scene()->lightingEnvironment()); } if (notNull(m_currentSonicSculpturePiece)) { m_currentSonicSculpturePiece->render(rd, scene()->lightingEnvironment()); } for (int i = m_playPlanes.size() - 1; i >= 0; --i) { const PlayPlane& pp = m_playPlanes[i]; if (pp.endWindowIndex < g_sampleWindowIndex) { m_playPlanes.remove(i); } Point3 point = pp.origin + (pp.direction * METERS_PER_SAMPLE_WINDOW * (g_sampleWindowIndex-pp.beginWindowIndex)); Color4 solidColor(1.0f, .02f, .03f, .15f); // Plane plane(point, pp.direction); // Draw::plane(plane, rd, solidColor, Color4::clear()); CFrame planeFrame = pp.frame; planeFrame.translation = point; Vector3 minP(finf(), finf(), finf()); Vector3 maxP(-finf(), -finf(), -finf()); for (auto& piece : m_sonicSculpturePieces) { piece->minMaxValue(planeFrame, minP, maxP); } Box b(Vector3(minP.xy()-Vector2(3,3), 0.0f), Vector3(maxP.xy() + Vector2(3, 3), 0.1f), planeFrame); Draw::box(b, rd, solidColor, Color4::clear()); } drawDebugShapes(); const shared_ptr<Entity>& selectedEntity = (notNull(developerWindow) && notNull(developerWindow->sceneEditorWindow)) ? developerWindow->sceneEditorWindow->selectedEntity() : shared_ptr<Entity>(); scene()->visualize(rd, selectedEntity, allSurfaces, sceneVisualizationSettings(), activeCamera()); // Post-process special effects m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); } rd->popState(); rd->push2D(m_framebuffer); { rd->setBlendFunc(RenderDevice::BLEND_SRC_ALPHA, RenderDevice::BLEND_ONE_MINUS_SRC_ALPHA); Array<SoundInstance> soundInstances; Synthesizer::global->getSoundInstances(soundInstances); int xOffset = 10; Vector2 dim(256,100); for (int i = 0; i < soundInstances.size(); ++i) { int yOffset = rd->height() - 120 - (120 * i); Draw::rect2D(Rect2D::xywh(Point2(xOffset, yOffset), dim), rd, Color3::white(), soundInstances[i].displayTexture()); float playheadAlpha = ((float)soundInstances[i].currentPosition) / soundInstances[i].audioSample->buffer.size(); float playheadX = xOffset + (playheadAlpha * dim.x); Draw::rect2D(Rect2D::xywh(Point2(playheadX, yOffset), Vector2(1, dim.y)), rd, Color3::yellow()); } static shared_ptr<GFont> font = GFont::fromFile(System::findDataFile("arial.fnt")); float time = System::time() - m_initialTime; if (time < 10.0f) { float fontAlpha = time < 9.0f ? 1.0f : 10.0f - time; font->draw2D(rd, "Press Space to Sculpt", Vector2(rd->width()/2, rd->height()-100.0f), 30.0f, Color4(Color3::black(), fontAlpha), Color4(Color3::white()*0.6f, fontAlpha), GFont::XALIGN_CENTER); } } rd->pop2D(); if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!renderDevice->swapBuffersAutomatically())) { // We're about to render to the actual back buffer, so swap the buffers now. // This call also allows the screenshot and video recording to capture the // previous frame just before it is displayed. swapBuffers(); } // Clear the entire screen (needed even though we'll render over it, since // AFR uses clear() to detect that the buffer is not re-used.) rd->clear(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0)); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) { if (! scene()) { return; } // Bind the main frameBuffer rd->pushState(m_frameBuffer); { rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame()); m_gbuffer->resize(rd->width(), rd->height()); m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness); rd->clear(); // Cull and sort Array<shared_ptr<Surface> > sortedVisibleSurfaces; Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces); Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector()); const bool renderTransmissiveSurfaces = false; // Intentionally copy the lighting environment for mutation LocalLightingEnvironment environment = scene()->localLightingEnvironment(); environment.ambientOcclusion = m_ambientOcclusion; // Render z-prepass and G-buffer. In this default implementation, it is needed if motion blur is enabled (for velocity) or // if face normals have been allocated and ambient occlusion is enabled. Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), renderTransmissiveSurfaces); if (! m_settings.colorGuardBandThickness.isZero()) { rd->setGuardBandClip2D(m_settings.colorGuardBandThickness); } // Compute AO m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_frameBuffer->texture(Framebuffer::DEPTH), shared_ptr<Texture>(), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_gbuffer->specification().encoding[GBuffer::Field::CS_FACE_NORMAL], m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); // No need to write depth, since it was covered by the gbuffer pass //rd->setDepthWrite(false); // Compute shadow maps and forward-render visible surfaces Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment, Surface::ALPHA_BINARY, true, m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); if (m_showWireframe) { Surface::renderWireframe(rd, sortedVisibleSurfaces); } // Call to make the App show the output of debugDraw(...) drawDebugShapes(); scene()->visualize(rd, sceneVisualizationSettings()); // Post-process special effects m_depthOfField->apply(rd, m_colorBuffer0, m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); m_motionBlur->apply(rd, m_colorBuffer0, m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), m_gbuffer->specification().encoding[GBuffer::Field::SS_POSITION_CHANGE], m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness); } rd->popState(); // We're about to render to the actual back buffer, so swap the buffers now. // This call also allows the screenshot and video recording to capture the // previous frame just before it is displayed. swapBuffers(); // Clear the entire screen (needed even though we'll render over it, since // AFR uses clear() to detect that the buffer is not re-used.) rd->clear(); // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_colorBuffer0); }
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& surface3D) { rd->setColorClearValue(Color3::white() * 0.3f); rd->clear(); // Draw the base geometry as gray with black wireframe rd->pushState(); rd->setPolygonOffset(0.2f); rd->setColor(Color3::white() * 0.10f); for (int i = 0; i < m_sceneGeometry.size(); ++i) { const shared_ptr<Surface>& surface = m_sceneGeometry[i]; CFrame cframe; surface->getCoordinateFrame(cframe); rd->setObjectToWorldMatrix(cframe); surface->sendGeometry(rd); } rd->popState(); rd->pushState(); rd->setColor(Color3::black()); rd->setRenderMode(RenderDevice::RENDER_WIREFRAME); for (int i = 0; i < m_sceneGeometry.size(); ++i) { const shared_ptr<Surface>& surface = m_sceneGeometry[i]; CFrame cframe; surface->getCoordinateFrame(cframe); rd->setObjectToWorldMatrix(cframe); surface->sendGeometry(rd); } rd->popState(); // Draw the extruded geometry as colored wireframe with "glass" interior rd->pushState(); rd->setBlendFunc(RenderDevice::BLEND_ONE, RenderDevice::BLEND_ONE); rd->setDepthWrite(false); Args args; args.setUniform("intensity", 0.1f); CFrame cframe; for (int i = 0; i < m_sceneGeometry.size(); ++i) { const shared_ptr<UniversalSurface>& surface = dynamic_pointer_cast<UniversalSurface>(m_sceneGeometry[i]); if (surface) { surface->getCoordinateFrame(cframe); args.setUniform("MVP", rd->invertYMatrix() * rd->projectionMatrix() * (rd->cameraToWorldMatrix().inverse() * cframe)); surface->gpuGeom()->setShaderArgs(args); LAUNCH_SHADER("extrude.*", args); } } rd->popState(); rd->pushState(); rd->setRenderMode(RenderDevice::RENDER_WIREFRAME); rd->setCullFace(CullFace::NONE); args.setUniform("intensity", 1.0f); for (int i = 0; i < m_sceneGeometry.size(); ++i) { const shared_ptr<UniversalSurface>& surface = dynamic_pointer_cast<UniversalSurface>(m_sceneGeometry[i]); if (notNull(surface)) { surface->getCoordinateFrame(cframe); args.setUniform("MVP", rd->invertYMatrix() * rd->projectionMatrix() * (rd->cameraToWorldMatrix().inverse() * cframe)); surface->gpuGeom()->setShaderArgs(args); LAUNCH_SHADER("extrude.*", args); } } rd->popState(); drawDebugShapes(); }