Example #1
0
void
IBLRenderPass::colorConvert(Ctr::Scene* scene, Ctr::IBLProbe* probe)
{
    {
        // Convert specular src to MDR, save. (small memory optimization).
        colorConvert(true, true, scene, probe->environmentCubeMapMDR(), probe->environmentCubeMap(), probe);

        // Convert specular to MDR
        colorConvert(true, true, scene, probe->specularCubeMapMDR(), probe->specularCubeMap(), probe);

        // Convert diffuse to MDR
        colorConvert(true, false, scene, probe->diffuseCubeMapMDR(), probe->diffuseCubeMap(), probe);
    }
}
Example #2
0
void
IBLRenderPass::render (Ctr::Scene* scene)
{
    Ctr::Camera* camera           = scene->camera();

    _deviceInterface->enableDepthWrite();
    _deviceInterface->enableZTest();

    // For each ibl set on the scene, render and process the dynamic ibl.

    _deviceInterface->disableAlphaBlending ();
    _deviceInterface->setCullMode (Ctr::CullNone);
    
    Ctr::Viewport oldview;
    _deviceInterface->getViewport(&oldview);
    
    // Render all meshes that have an environment marker.
    const std::vector<Ctr::IBLProbe*>& probes = scene->probes();
    
    const Ctr::Brdf* brdf = scene->activeBrdf();
    if (!brdf)
    {
        THROW("Cannot find brdf to create IBL " << __FILE__ << " " << __LINE__);
    }

    // Detect uncache condition based on importance sampling shaders.
    bool forceUncache = false;
    if (_specularHash != brdf->specularImportanceSamplingShader()->hash() ||
        _diffuseHash != brdf->diffuseImportanceSamplingShader()->hash())
    {
        _specularHash = brdf->specularImportanceSamplingShader()->hash();
        _diffuseHash = brdf->diffuseImportanceSamplingShader()->hash();
        forceUncache = true;
    }

    Ctr::CameraTransformCachePtr cachedTransforms = scene->camera()->cameraTransformCache();
    for (auto it = probes.begin(); it != probes.end(); it++)
    {
        // Todo, cull probe by location and range.
        // if (probe->hasInfluence(scene->activeCamera()))
        IBLProbe * probe = *it;
        if (forceUncache)
            probe->uncache();

        bool cached = probe->isCached();
        if (cached)
        {
            continue;
        }
        else if (probe->sampleOffset() == 0)
        {
            // If sample offset is 0, we need to create the environment
            // map and perform a first set of samples.
            _deviceInterface->disableZTest();
            _deviceInterface->disableDepthWrite();
            _deviceInterface->disableStencilTest();
            _deviceInterface->setCullMode (Ctr::CullNone);

             {
				// The ibl probe could also have a znear and zfar.
				// In this example it is more expedient just to use the camera znear - zfar.
				float projNear = camera->zNear();
				float projFar = camera->zFar();
                Ctr::Matrix44f proj;
                Ctr::projectionPerspectiveMatrixLH (Ctr::BB_PI * 0.5f,
                                                    1.0, 
                                                    projNear, 
                                                    projFar,
                                                    &proj);
    
                // Setup view matrix for the environment source render.
                _environmentTransformCache->set(probe->basis(), proj, probe->basis(), probe->center(), projNear, projFar, -1);
    
                // Setup camera cache.
                camera->setCameraTransformCache(_environmentTransformCache);
    
                // Set framebuffer to cubemap.
                // Render to environment top level mip (highest resolution).
                size_t mipLevels = probe->environmentCubeMap()->resource()->mipLevels();

                Ctr::Vector2f mipSize = Ctr::Vector2f(float(probe->environmentCubeMap()->resource()->width()), 
                                                    float(probe->environmentCubeMap()->resource()->height()));

                for (size_t mipId = 0; mipId < mipLevels; mipId++)
                {
                    Ctr::Viewport mipViewport (0.0f, 0.0f, (float)(mipSize.x), (float)(mipSize.y), 0.0f, 1.0f);

                    // Render to top level mip for both cubemaps. A better strategy would be to blit after the first render...
                    Ctr::FrameBuffer framebuffer(probe->environmentCubeMap()->surface(-1, (int32_t)(mipId)), nullptr);
                    _deviceInterface->bindFrameBuffer(framebuffer);
                    _deviceInterface->setViewport(&mipViewport);
                    _deviceInterface->clearSurfaces (0, Ctr::CLEAR_TARGET, 0, 0, 0, 1);
    
                    // Render the scene to cubemap (single pass).
                    //renderMeshes (_passName, scene);
                    const std::vector<Ctr::Mesh*>& meshes = scene->meshesForPass(_passName);
                    for (auto it = meshes.begin(); it != meshes.end(); it++)
                    {
                        const Ctr::Mesh* mesh = (*it);
                        const Ctr::Material* material = mesh->material();
                        const Ctr::IShader* shader = material->shader();
                        const Ctr::GpuTechnique* technique = material->technique();
    
                        RenderRequest renderRequest (technique, scene, scene->camera(), mesh);
                        shader->renderMesh(renderRequest);
                    }

                    mipSize.x /= 2.0f;
                    mipSize.y /= 2.0f;
                }

                // Generate mip maps post rendering.
                probe->environmentCubeMap()->generateMipMaps();    
                refineSpecular(scene, probe);
                refineDiffuse(scene, probe);

                colorConvert(scene, probe);
                // Update the sample count
                probe->updateSamples();
            }
            _deviceInterface->enableZTest();
            _deviceInterface->enableDepthWrite();
            _deviceInterface->setCullMode (Ctr::CullNone);
        }
        else
        {
            // Refine samples.
            // If sample offset is 0, we need to create the environment
            // map and perform a first set of samples.
            _deviceInterface->disableZTest();
            _deviceInterface->disableDepthWrite();
            _deviceInterface->disableStencilTest();
            _deviceInterface->setCullMode (Ctr::CullNone);
    
            float projNear = camera->zNear();
            float projFar = camera->zFar();
            Ctr::Matrix44f proj;
            Ctr::projectionPerspectiveMatrixLH (Ctr::BB_PI * 0.5f,
                                                1.0, 
                                                projNear, 
                                                projFar,
                                                &proj);
    
    
            // Setup view matrix
            _environmentTransformCache->set(probe->basis(), proj, probe->basis(), probe->center(), projNear, projFar, -1);
    
            // Setup camera cache.
            scene->camera()->setCameraTransformCache(_environmentTransformCache);
    
            refineSpecular(scene, probe);
            refineDiffuse(scene, probe);

            // Update the sample count
            probe->updateSamples();
            colorConvert(scene, probe);

            _deviceInterface->enableZTest();
            _deviceInterface->enableDepthWrite();
            _deviceInterface->setCullMode (Ctr::CullNone);
        }
    }
    
    // Restore original camera transforms.
    scene->camera()->setCameraTransformCache(cachedTransforms);
    
    _deviceInterface->disableAlphaBlending ();
    _deviceInterface->bindFrameBuffer(_deviceInterface->postEffectsMgr()->sceneFrameBuffer());
    _deviceInterface->setViewport(&oldview);

    _deviceInterface->setCullMode (Ctr::CCW);
}
Example #3
0
int main(int argc, char *argv[])
{
//    char filename[]="/test400x240-mpeg4-witch.mp4";
//    char filename[]="/test400x240-witch.mp4";
//    char filename[]="/test800x400-witch-900kbps.mp4";
//    char filename[]="/test800x400-witch-1pass.mp4";
//    char filename[]="/test800x400-witch.mp4";
//    char filename[]="/test800x480-witch-mpeg4.mp4";
//    char filename[]="/test320x176-karanokyoukai.mp4";
    char filename[] = "/test.mp4";

    MovieState mvS;

    initServices();

    // Register all formats and codecs
    av_register_all();
    av_log_set_level(AV_LOG_INFO);


    printf("Press start to open the file\n");
    waitForStart();
    int ret = setup(&mvS, filename);
    if (ret)
    {
        waitForStartAndExit();
        return -1;
    }

    printf("Press start to decompress\n");
    waitForStart();
    // Read frames and save first five frames to disk
    int i = 0;
    int frameFinished;

    u64 timeBeginning, timeEnd;
    u64 timeBefore, timeAfter;
    u64 timeDecodeTotal = 0, timeScaleTotal = 0, timeDisplayTotal = 0;

    timeBefore = osGetTime();
    timeBeginning = timeBefore;
    bool stop = false;

    while (av_read_frame(mvS.pFormatCtx, &mvS.packet) >= 0 && !stop)
    {
        // Is this a packet from the video stream?
        if (mvS.packet.stream_index == mvS.videoStream)
        {

            /*********************
             * Decode video frame
             *********************/

            int err = avcodec_decode_video2(mvS.pCodecCtx, mvS.pFrame, &frameFinished, &mvS.packet);
            if (err <= 0)printf("decode error\n");
            // Did we get a video frame?
            if (frameFinished)
            {
                err = av_frame_get_decode_error_flags(mvS.pFrame);
                if (err)
                {
                    char buf[100];
                    av_strerror(err, buf, 100);
                }
                timeAfter = osGetTime();
                timeDecodeTotal += timeAfter - timeBefore;

                /*******************************
                 * Conversion of decoded frame
                 *******************************/
                timeBefore = osGetTime();
                colorConvert(&mvS);
                timeAfter = osGetTime();

                /***********************
                 * Display of the frame
                 ***********************/
                timeScaleTotal += timeAfter - timeBefore;
                timeBefore = osGetTime();

                if (mvS.renderGpu)
                {
                    gpuRenderFrame(&mvS);
                    gpuEndFrame();
                }
                else display(mvS.outFrame);

                timeAfter = osGetTime();
                timeDisplayTotal += timeAfter - timeBefore;

                ++i;//New frame

                hidScanInput();
                u32 kDown = hidKeysDown();
                if (kDown & KEY_START)
                    stop = true; // break in order to return to hbmenu
                if (i % 50 == 0)printf("frame %d\n", i);
                timeBefore = osGetTime();
            }

        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&mvS.packet);
    }
    timeEnd = timeBefore;

    tearup(&mvS);

    printf("Played %d frames in %f s (%f fps)\n",
           i, (timeEnd - timeBeginning) / 1000.0,
           i / ((timeEnd - timeBeginning) / 1000.0));
    printf("\tdecode:\t%llu\t%f perframe"
           "\n\tscale:\t%llu\t%f perframe"
           "\n\tdisplay:\t%llu\t%f perframe\n",
           timeDecodeTotal, timeDecodeTotal / (double) i,
           timeScaleTotal, timeScaleTotal / (double) i,
           timeDisplayTotal, timeDisplayTotal / (double) i);

    waitForStartAndExit();
    return 0;
}