Beispiel #1
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    // Bind the main frameBuffer
    rd->pushState(m_frameBuffer); {

        rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

        m_gbuffer->resize(rd->width(), rd->height());
        m_gbuffer->prepare(rd, activeCamera(),  0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);
        if (! m_settings.colorGuardBandThickness.isZero()) {
            rd->setGuardBandClip2D(m_settings.colorGuardBandThickness);
        }        
        
        // Cull and sort
        Array<shared_ptr<Surface> > sortedVisibleSurfaces;
        Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces);
        Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector());
        
        // Early-Z pass
        static const bool renderTransmissiveSurfaces = false;
        Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces);
        rd->setDepthWrite(false);
        
        // Render velocity buffer (if needed)
        if (activeCamera()->motionBlurSettings().enabled()) {
            Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame());
        }
        LocalLightingEnvironment environment = m_scene->localLightingEnvironment();
        environment.ambientOcclusion = m_ambientOcclusion;

        // Compute AO
        m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_depthBuffer);
        
        // Compute shadow maps and forward-render visible surfaces
        Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment);
        rd->setDepthWrite(true);
        
        // Call to make the App show the output of debugDraw(...)
        drawDebugShapes();
        
        // Post-process special effects
        m_depthOfField->apply(rd, m_colorBuffer0, m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        m_motionBlur->apply(rd, m_colorBuffer0, m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), 
                            m_gbuffer->specification().encoding[GBuffer::Field::SS_POSITION_CHANGE], m_depthBuffer, activeCamera(), 
                            m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

    } rd->popState();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_colorBuffer0, 1);

    screenPrintf("WASD to move");
    screenPrintf("Mouse to turn");
    screenPrintf("Space to jump");
}
Beispiel #2
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& surface3D) {

    m_gbuffer->setSpecification(m_gbufferSpecification);
    m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height());
    m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);

    m_renderer->render(rd, m_framebuffer, m_depthPeelFramebuffer, scene()->lightingEnvironment(), m_gbuffer, surface3D);

    rd->pushState(m_framebuffer); {

        rd->setProjectionAndCameraMatrix(m_debugCamera->projection(), m_debugCamera->frame());
        Array< shared_ptr<Surface> > mySurfaces;
        // Pose our model based on the manipulator axes
        model->pose(mySurfaces, manipulator->frame());
            
        // Set up shared arguments
        Args args;
        configureShaderArgs(args);
            
        // Send model geometry to the graphics card
        CFrame cframe;
        for (int i = 0; i < mySurfaces.size(); ++i) {

            // Downcast to UniversalSurface to access its fields
            shared_ptr<UniversalSurface> surface = dynamic_pointer_cast<UniversalSurface>(mySurfaces[i]);
            if (notNull(surface)) {
                surface->getCoordinateFrame(cframe);
                rd->setObjectToWorldMatrix(cframe);
                surface->gpuGeom()->setShaderArgs(args);

                // (If you want to manually set the material properties and vertex attributes
                // for shader args, they can be accessed from the fields of the gpuGeom.)
                LAUNCH_SHADER("phong.*", args);
            }
        }
    } rd->popState();

    swapBuffers();
    rd->clear();
    m_film->exposeAndRender(rd, m_debugCamera->filmSettings(), m_framebuffer->texture(0), 1);
}
Beispiel #3
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    // This implementation is equivalent to the default GApp's. It is repeated here to make it
    // easy to modify rendering. If you don't require custom rendering, just delete this
    // method from your application and rely on the base class.

    if (! scene()) {
        return;
    }
	
    m_gbuffer->setSpecification(m_gbufferSpecification);
    m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height());
    m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.hdrFramebuffer.depthGuardBandThickness, m_settings.hdrFramebuffer.colorGuardBandThickness);

	
	m_renderer->render(rd, m_framebuffer, m_depthPeelFramebuffer, scene()->lightingEnvironment(), m_gbuffer, allSurfaces);
	

    // Debug visualizations and post-process effects
    rd->pushState(m_framebuffer); {

		if (m_enableSVO) {
			rd->clear();
			//rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

			rd->push2D();
			const Vector2int32 guardBand(m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness);
			const Vector2int32 colorRegionExtent = Vector2int32(m_framebuffer->vector2Bounds()) - guardBand * 2;

			Args args;
			rd->setGuardBandClip2D(Vector2int16(guardBand));
			args.setRect(rd->viewport());

			Matrix4 proj;
			activeCamera()->getProjectUnitMatrix(m_framebuffer->rect2DBounds(), proj);
			float focalLength = proj[0][0];

			m_svo->setCurSvoId(0);
			args.setUniform("guardBand", guardBand);

			args.setUniform("focalLength", focalLength);
			args.setUniform("renderRes", Vector2(colorRegionExtent));
			args.setUniform("renderResI", colorRegionExtent);
			args.setUniform("screenRatio", float(colorRegionExtent.y) / float(colorRegionExtent.x));

			m_svo->connectToShader(args, Access::READ, m_svo->maxDepth(), m_svo->maxDepth());

			rd->setColorWrite(true);
			rd->setDepthWrite(false);
			
			const Matrix4& cameraToVoxelMatrix = Matrix4(m_svo->svoToWorldMatrix()).inverse() * activeCamera()->frame();

			args.setUniform("cameraToVoxelMatrix", cameraToVoxelMatrix);
			args.setUniform("voxelToWorldMatrix", m_svo->svoToWorldMatrix());
			args.setUniform("worldToVoxelMatrix", m_svo->worldToSVOMatrix());
			args.setUniform("wsCameraPos", activeCamera()->frame().translation);
			scene()->lightingEnvironment().setShaderArgs(args);
			args.setUniform("raycastingConeFactor", m_voxelConeAperture);
			

			rd->setDepthTest(RenderDevice::DEPTH_ALWAYS_PASS); // TODO: write gl_FragDepth and use a regular depth test here
			m_gbuffer->texture(GBuffer::Field::DEPTH_AND_STENCIL)->setShaderArgs(args, "depth_", Sampler::buffer());
			//rd->setBlendFunc(RenderDevice::BLEND_ONE, RenderDevice::BLEND_ONE_MINUS_SRC_ALPHA);
			
			LAUNCH_SHADER("raycast.pix", args);
			rd->pop2D();
		}

		// Call to make the App show the output of debugDraw(...)
		rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());
		drawDebugShapes();
		const shared_ptr<Entity>& selectedEntity = (notNull(developerWindow) && notNull(developerWindow->sceneEditorWindow)) ? developerWindow->sceneEditorWindow->selectedEntity() : shared_ptr<Entity>();
        scene()->visualize(rd, selectedEntity, allSurfaces, sceneVisualizationSettings(), activeCamera());		

		rd->setPolygonOffset(-0.2f);
		if (m_debugSVONodes) {
			m_svo->visualizeNodes(rd, m_debugSVONodeLevel);
		}
		if (m_debugSVOFragments) {
			m_svo->visualizeFragments(rd);
		}
		rd->setPolygonOffset(0.0f);

        // Post-process special effects
        m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness);
        
        m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION), 
                            m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), 
                            m_settings.hdrFramebuffer.depthGuardBandThickness - m_settings.hdrFramebuffer.colorGuardBandThickness);



    } rd->popState();

    if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!renderDevice->swapBuffersAutomatically())) {
        // We're about to render to the actual back buffer, so swap the buffers now.
        // This call also allows the screenshot and video recording to capture the
        // previous frame just before it is displayed.
        swapBuffers();
    }

	// Clear the entire screen (needed even though we'll render over it, since
    // AFR uses clear() to detect that the buffer is not re-used.)
    rd->clear();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0));
}
Beispiel #4
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    rd->clear();

    // Bind the main framebuffer
    rd->pushState(m_framebuffer); {
        rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

        m_gbuffer->resize(rd->width(), rd->height());
        m_gbuffer->prepare(rd, activeCamera(),  0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);
        rd->clear();
        
        // Cull and sort
        Array<shared_ptr<Surface> > sortedVisibleSurfaces;
        Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces);
        Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector());
        
        // Depth pre-pass
        static const bool renderTransmissiveSurfaces = false;
        Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces);

        // Intentionally copy the lighting environment for mutation
        LightingEnvironment environment = scene()->lightingEnvironment();

        if (! m_settings.colorGuardBandThickness.isZero()) {
            rd->setGuardBandClip2D(m_settings.colorGuardBandThickness);
        }        
                
        // Render G-buffer if needed.  In this default implementation, it is needed if motion blur is enabled (for velocity) or
        // if face normals have been allocated and ambient occlusion is enabled.
        if (activeCamera()->motionBlurSettings().enabled() || 
            (environment.ambientOcclusionSettings.enabled && 
             notNull(m_gbuffer) && 
             notNull(m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL)))) {

            rd->setDepthWrite(false); { 
                // We've already rendered the depth
                Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), activeCamera()->expressivePreviousFrame());
            } rd->setDepthWrite(true);
        }
        

        // Compute AO
        environment.ambientOcclusionSettings.useDepthPeelBuffer = false;
        m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_framebuffer->texture(Framebuffer::DEPTH), shared_ptr<Texture>(), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

        // Compute shadow maps and forward-render visible surfaces
        environment.ambientOcclusion = m_ambientOcclusion;
        Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment);
                
        // Call to make the App show the output of debugDraw(...)
        drawDebugShapes();
        scene()->visualize(rd, shared_ptr<Entity>(), sceneVisualizationSettings());

        // Post-process special effects
        m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), 
                            m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
    } rd->popState();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    rd->push2D(m_finalFramebuffer); {
        rd->clear();
        m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0));
    } rd->pop2D();

    // Copy the final buffer to the server screen
    rd->push2D(); {
        Draw::rect2D(m_finalFramebuffer->texture(0)->rect2DBounds(), rd, Color3::white(), m_finalFramebuffer->texture(0));
    } rd->pop2D();

    clientSetMutex.lock();
    screenPrintf("Number of clients: %d\n", clientSet.size());
    //screenPrintf("clientWantsImage: %d\n", clientWantsImage.value());

    if ((clientWantsImage.value() != 0) && (clientSet.size() > 0)) {
        // Send the image to the first client
        mg_connection* conn = *clientSet.begin();

        // JPEG encoding/decoding takes more time but substantially less bandwidth than PNG
        mg_websocket_write_image(conn, m_finalBuffer->toImage(ImageFormat::RGB8()), Image::JPEG);
        clientWantsImage = 0;
    }
    clientSetMutex.unlock();

}
Beispiel #5
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    if (! scene()) {
        return;
    }
    
    m_gbuffer->setSpecification(m_gbufferSpecification);
    m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height());

    // Share the depth buffer with the forward-rendering pipeline
    m_framebuffer->set(Framebuffer::DEPTH, m_gbuffer->texture(GBuffer::Field::DEPTH_AND_STENCIL));

    m_depthPeelFramebuffer->resize(m_framebuffer->width(), m_framebuffer->height());

    // Bind the main framebuffer
    rd->pushState(m_framebuffer); {
        rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

        m_gbuffer->prepare(rd, activeCamera(),  0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);
        rd->clear();
        
        // Cull and sort
        Array<shared_ptr<Surface> > sortedVisibleSurfaces;
        Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces);
        Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector());
        
        const bool renderTransmissiveSurfaces = false;

        // Intentionally copy the lighting environment for mutation
        LightingEnvironment environment = scene()->lightingEnvironment();
        environment.ambientOcclusion = m_ambientOcclusion;
       
        // Render z-prepass and G-buffer.
        Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), activeCamera()->expressivePreviousFrame(), renderTransmissiveSurfaces);

        // This could be the OR of several flags; the starter begins with only one motivating algorithm for depth peel
        const bool needDepthPeel = environment.ambientOcclusionSettings.useDepthPeelBuffer;
        if (needDepthPeel) {
            rd->pushState(m_depthPeelFramebuffer); {
                rd->clear();
                rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());
                Surface::renderDepthOnly(rd, sortedVisibleSurfaces, CullFace::BACK, renderTransmissiveSurfaces, m_framebuffer->texture(Framebuffer::DEPTH), environment.ambientOcclusionSettings.depthPeelSeparationHint);
            } rd->popState();
        }
        

        if (! m_settings.colorGuardBandThickness.isZero()) {
            rd->setGuardBandClip2D(m_settings.colorGuardBandThickness);
        }        

        // Compute AO
        m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_framebuffer->texture(Framebuffer::DEPTH), m_depthPeelFramebuffer->texture(Framebuffer::DEPTH), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_gbuffer->specification().encoding[GBuffer::Field::CS_FACE_NORMAL], m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);


        RealTime lightingChangeTime = max(scene()->lastEditingTime(), max(scene()->lastLightChangeTime(), scene()->lastVisibleChangeTime()));
        bool updateShadowMaps = false;
        if (lightingChangeTime > m_lastLightingChangeTime) {
            m_lastLightingChangeTime = lightingChangeTime;
            updateShadowMaps = true;
        }
        // No need to write depth, since it was covered by the gbuffer pass
        //rd->setDepthWrite(false);
        // Compute shadow maps and forward-render visible surfaces
        Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment, Surface::ALPHA_BINARY, updateShadowMaps, m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        if (m_showWireframe) {
            Surface::renderWireframe(rd, sortedVisibleSurfaces);
        }
                
        // Call to make the App show the output of debugDraw(...)
        drawDebugShapes();
        scene()->visualize(rd, sceneVisualizationSettings());

        // Post-process special effects
        m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION), 
                            m_gbuffer->specification().encoding[GBuffer::Field::SS_EXPRESSIVE_MOTION], m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), 
                            m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

    } rd->popState();

    // We're about to render to the actual back buffer, so swap the buffers now.
    // This call also allows the screenshot and video recording to capture the
    // previous frame just before it is displayed.
    swapBuffers();

	// Clear the entire screen (needed even though we'll render over it, since
    // AFR uses clear() to detect that the buffer is not re-used.)
    rd->clear();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0));
}
Beispiel #6
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    // This implementation is equivalent to the default GApp's. It is repeated here to make it
    // easy to modify rendering. If you don't require custom rendering, just delete this
    // method from your application and rely on the base class.

    if (!scene()) {
        if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!rd->swapBuffersAutomatically())) {
            swapBuffers();
        }
        rd->clear();
        rd->pushState(); {
            rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());
            drawDebugShapes();
        } rd->popState();
        return;
    }

    updateAudioData();

    if (System::time() - m_lastInterestingEventTime > 10.0) {
        if (Random::common().uniform() < 0.001f) {
            if (m_sonicSculpturePieces.size() > 0) {
                int index = Random::common().integer(0, m_sonicSculpturePieces.size() - 1);
                generatePlayPulse(m_sonicSculpturePieces[index]);
                m_lastInterestingEventTime = System::time();
            }
        }
    }
    handlePlayPulses();

    GBuffer::Specification gbufferSpec = m_gbufferSpecification;
    extendGBufferSpecification(gbufferSpec);
    m_gbuffer->setSpecification(gbufferSpec);
    m_gbuffer->resize(m_framebuffer->width(), m_framebuffer->height());
    m_gbuffer->prepare(rd, activeCamera(), 0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);

    m_renderer->render(rd, m_framebuffer, m_depthPeelFramebuffer, scene()->lightingEnvironment(), m_gbuffer, allSurfaces);

    // Debug visualizations and post-process effects
    rd->pushState(m_framebuffer); {
        // Call to make the App show the output of debugDraw(...)
        rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

        for (auto& piece : m_sonicSculpturePieces) {
            piece->render(rd, scene()->lightingEnvironment());
        }
        if (notNull(m_currentSonicSculpturePiece)) {
            m_currentSonicSculpturePiece->render(rd, scene()->lightingEnvironment());
        }
        for (int i = m_playPlanes.size() - 1; i >= 0; --i) {
	        const PlayPlane& pp = m_playPlanes[i];
	        if (pp.endWindowIndex < g_sampleWindowIndex) {
	            m_playPlanes.remove(i);
	        }

	        Point3 point = pp.origin + (pp.direction * METERS_PER_SAMPLE_WINDOW * (g_sampleWindowIndex-pp.beginWindowIndex));

	        Color4 solidColor(1.0f, .02f, .03f, .15f);
	        //	  Plane plane(point, pp.direction);
	        //	  Draw::plane(plane, rd, solidColor, Color4::clear());

	        CFrame planeFrame = pp.frame;
	        planeFrame.translation = point;
            Vector3 minP(finf(), finf(), finf());
            Vector3 maxP(-finf(), -finf(), -finf());
            for (auto& piece : m_sonicSculpturePieces) {
                piece->minMaxValue(planeFrame, minP, maxP);
            }
            Box b(Vector3(minP.xy()-Vector2(3,3), 0.0f), Vector3(maxP.xy() + Vector2(3, 3), 0.1f), planeFrame);

            Draw::box(b, rd, solidColor, Color4::clear());
        }


        drawDebugShapes();
        const shared_ptr<Entity>& selectedEntity = (notNull(developerWindow) && notNull(developerWindow->sceneEditorWindow)) ? developerWindow->sceneEditorWindow->selectedEntity() : shared_ptr<Entity>();
        scene()->visualize(rd, selectedEntity, allSurfaces, sceneVisualizationSettings(), activeCamera());

        // Post-process special effects
        m_depthOfField->apply(rd, m_framebuffer->texture(0), m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

        m_motionBlur->apply(rd, m_framebuffer->texture(0), m_gbuffer->texture(GBuffer::Field::SS_EXPRESSIVE_MOTION),
            m_framebuffer->texture(Framebuffer::DEPTH), activeCamera(),
            m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
    } rd->popState();

    rd->push2D(m_framebuffer); {
        rd->setBlendFunc(RenderDevice::BLEND_SRC_ALPHA, RenderDevice::BLEND_ONE_MINUS_SRC_ALPHA);
        Array<SoundInstance> soundInstances;
        Synthesizer::global->getSoundInstances(soundInstances);
        int xOffset = 10;
        Vector2 dim(256,100);
        for (int i = 0; i < soundInstances.size(); ++i) {
            int yOffset = rd->height() - 120 - (120 * i);
            Draw::rect2D(Rect2D::xywh(Point2(xOffset, yOffset), dim), rd, Color3::white(), soundInstances[i].displayTexture());
            float playheadAlpha = ((float)soundInstances[i].currentPosition) / soundInstances[i].audioSample->buffer.size();
            float playheadX = xOffset + (playheadAlpha * dim.x);
            Draw::rect2D(Rect2D::xywh(Point2(playheadX, yOffset), Vector2(1, dim.y)), rd, Color3::yellow());
        }


	
	static shared_ptr<GFont> font = GFont::fromFile(System::findDataFile("arial.fnt"));
	float time = System::time() - m_initialTime;
	if (time < 10.0f) {
	  float fontAlpha = time < 9.0f ? 1.0f : 10.0f - time;
	  font->draw2D(rd, "Press Space to Sculpt", Vector2(rd->width()/2, rd->height()-100.0f), 30.0f, Color4(Color3::black(), fontAlpha), Color4(Color3::white()*0.6f, fontAlpha), GFont::XALIGN_CENTER);
	}
    } rd->pop2D();

    if ((submitToDisplayMode() == SubmitToDisplayMode::MAXIMIZE_THROUGHPUT) && (!renderDevice->swapBuffersAutomatically())) {
        // We're about to render to the actual back buffer, so swap the buffers now.
        // This call also allows the screenshot and video recording to capture the
        // previous frame just before it is displayed.
        swapBuffers();
    }

    // Clear the entire screen (needed even though we'll render over it, since
    // AFR uses clear() to detect that the buffer is not re-used.)
    rd->clear();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_framebuffer->texture(0));
}
Beispiel #7
0
void App::onGraphics3D(RenderDevice* rd, Array<shared_ptr<Surface> >& allSurfaces) {
    if (! scene()) {
        return;
    }

    // Bind the main frameBuffer
    rd->pushState(m_frameBuffer); {
        rd->setProjectionAndCameraMatrix(activeCamera()->projection(), activeCamera()->frame());

        m_gbuffer->resize(rd->width(), rd->height());
        m_gbuffer->prepare(rd, activeCamera(),  0, -(float)previousSimTimeStep(), m_settings.depthGuardBandThickness, m_settings.colorGuardBandThickness);
        rd->clear();
        
        // Cull and sort
        Array<shared_ptr<Surface> > sortedVisibleSurfaces;
        Surface::cull(activeCamera()->frame(), activeCamera()->projection(), rd->viewport(), allSurfaces, sortedVisibleSurfaces);
        Surface::sortBackToFront(sortedVisibleSurfaces, activeCamera()->frame().lookVector());
        
        const bool renderTransmissiveSurfaces = false;

        // Intentionally copy the lighting environment for mutation
        LocalLightingEnvironment environment = scene()->localLightingEnvironment();
        environment.ambientOcclusion = m_ambientOcclusion;
       
        // Render z-prepass and G-buffer.  In this default implementation, it is needed if motion blur is enabled (for velocity) or
        // if face normals have been allocated and ambient occlusion is enabled.
        Surface::renderIntoGBuffer(rd, sortedVisibleSurfaces, m_gbuffer, activeCamera()->previousFrame(), renderTransmissiveSurfaces);

        if (! m_settings.colorGuardBandThickness.isZero()) {
            rd->setGuardBandClip2D(m_settings.colorGuardBandThickness);
        }        

        // Compute AO
        m_ambientOcclusion->update(rd, environment.ambientOcclusionSettings, activeCamera(), m_frameBuffer->texture(Framebuffer::DEPTH), shared_ptr<Texture>(), m_gbuffer->texture(GBuffer::Field::CS_FACE_NORMAL), m_gbuffer->specification().encoding[GBuffer::Field::CS_FACE_NORMAL], m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

        // No need to write depth, since it was covered by the gbuffer pass
        //rd->setDepthWrite(false);
        // Compute shadow maps and forward-render visible surfaces
        Surface::render(rd, activeCamera()->frame(), activeCamera()->projection(), sortedVisibleSurfaces, allSurfaces, environment, Surface::ALPHA_BINARY, true, m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        if (m_showWireframe) {
            Surface::renderWireframe(rd, sortedVisibleSurfaces);
        }
                
        // Call to make the App show the output of debugDraw(...)
        drawDebugShapes();
        scene()->visualize(rd, sceneVisualizationSettings());

        // Post-process special effects
        m_depthOfField->apply(rd, m_colorBuffer0, m_depthBuffer, activeCamera(), m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);
        
        m_motionBlur->apply(rd, m_colorBuffer0, m_gbuffer->texture(GBuffer::Field::SS_POSITION_CHANGE), 
                            m_gbuffer->specification().encoding[GBuffer::Field::SS_POSITION_CHANGE], m_depthBuffer, activeCamera(), 
                            m_settings.depthGuardBandThickness - m_settings.colorGuardBandThickness);

    } rd->popState();

    // We're about to render to the actual back buffer, so swap the buffers now.
    // This call also allows the screenshot and video recording to capture the
    // previous frame just before it is displayed.
    swapBuffers();

	// Clear the entire screen (needed even though we'll render over it, since
    // AFR uses clear() to detect that the buffer is not re-used.)
    rd->clear();

    // Perform gamma correction, bloom, and SSAA, and write to the native window frame buffer
    m_film->exposeAndRender(rd, activeCamera()->filmSettings(), m_colorBuffer0);
}