コード例 #1
0
ファイル: voxelize.cpp プロジェクト: eile/Fivox
    void sample()
    {
        ::fivox::URI uri = getURI();

        // for compatibility
        if( _vm.count( "size" ))
            uri.addQuery( "size", std::to_string( _vm["size"].as< size_t >( )));

        const ::fivox::URIHandler params( uri );
        auto source = params.newImageSource< fivox::FloatVolume >();

        const fivox::Vector3f& extent( source->getSizeInMicrometer( ));
        const size_t size( std::ceil( source->getSizeInVoxel().find_max( )));

        const VolumeHandler volumeHandler( size, extent );
        VolumePtr output = source->GetOutput();

        output->SetRegions( volumeHandler.computeRegion( _decompose ));
        output->SetSpacing( volumeHandler.computeSpacing( ));
        const fivox::AABBf& bbox = source->getBoundingBox();
        output->SetOrigin( volumeHandler.computeOrigin( bbox.getCenter( )));

        ::fivox::EventSourcePtr loader = source->getEventSource();
        const fivox::Vector2ui frameRange( getFrameRange( loader->getDt( )));

        const std::string& datatype( _vm["datatype"].as< std::string >( ));
        if( datatype == "char" )
        {
            LBINFO << "Sampling volume as char (uint8_t) data" << std::endl;
            _sample< uint8_t >( source, frameRange, params, _outputFile );
        }
        else if( datatype == "short" )
        {
            LBINFO << "Sampling volume as short (uint16_t) data" << std::endl;
            _sample< uint16_t >( source, frameRange, params, _outputFile );
        }
        else if( datatype == "int" )
        {
            LBINFO << "Sampling volume as int (uint32_t) data" << std::endl;
            _sample< uint32_t >( source, frameRange, params, _outputFile );
        }
        else
        {
            LBINFO << "Sampling volume as floating point data" << std::endl;
            _sample< float >( source, frameRange, params, _outputFile );
        }
    }
コード例 #2
0
std::string PointsFileSystem::getPointsFolder(int frame)
{
    QModelIndex root_index = index(rootPath());

    int start, end;
    getFrameRange(start, end);
    if (start < 0 || end < 0)
        return std::string();

    std::string folder;

    QString root_path = rootPath();
    if (root_path.contains("frame_"))
    {
        folder =  root_path.toStdString();
    }
    else if (root_path.contains("points"))
    {
        // not work ??
        /*std::cout << root_path.toStdString() << std::endl;
        QModelIndex frame_index = index(frame-start, 0, root_index);
        folder = filePath(frame_index).toStdString();*/

        QString frame_name(QString("frame_%1").arg(frame, 5, 10, QChar('0')));
        QStringList root_entries = QDir(root_path).entryList();
        for (QStringList::const_iterator root_entries_it = root_entries.begin();
            root_entries_it != root_entries.end(); ++ root_entries_it)
        {
            if (root_entries_it->compare(frame_name) == 0)
            {
                folder = (root_path + QString("/%1").arg(*root_entries_it)).toStdString();
                break;
            }
        }
    }

    return folder;
}
コード例 #3
0
ファイル: clip.cpp プロジェクト: JohanAberg/Ramen
void clip_t::getUnmappedFrameRange(double &unmappedStartFrame, double &unmappedEndFrame) const
{
    getFrameRange( unmappedStartFrame, unmappedEndFrame);
}
コード例 #4
0
ファイル: lumaenc.cpp プロジェクト: PeterZs/lumahdrv
// Parse parameter options from command line
bool setParams(int argc, char* argv[], LumaEncoderParams *params, IOData *io)
{
    std::string frames;
    std::string ptf, ptfValues[] = {"PSI", "PQ", "LOG", "HDRVDP", "LINEAR"}; // valid ptf input values
    std::string cs, csValues[] = {"LUV", "RGB", "YCBCR", "XYZ"}; // valid color space input values
    unsigned int bdValues[] = {8, 10, 12}; // valid bit depths

    // Application usage info
    std::string info = std::string("lumaenc -- Compress a sequence of high dyncamic range (HDR) frames in to a Matroska (.mkv) HDR video\n\n") +
                       std::string("Usage: lumaenc --input <hdr_frames> \\\n") +
                       std::string("               --frames <start_frame:step:end_frame> \\\n") +
                       std::string("               --output <output>\n");
    std::string postInfo = std::string("\nExample: lumaenc -i hdr_frame_%05d.exr -f 1:100 -o hdr_video.mkv\n\n") +
                           std::string("See man page for more information.");
    ArgParser argHolder(info, postInfo);

    // Input arguments
    argHolder.add(&io->hdrFrames,            "--input",             "-i",   "Input HDR video sequence");
    argHolder.add(&io->outputFile,           "--output",            "-o",   "Output location of the compressed HDR video", 0);
    argHolder.add(&frames,                   "--frames",            "-f",   "Input frames, formatted as startframe:step:endframe");
    argHolder.add(&params->fps,              "--framerate",         "-fps", "Framerate of video stream, specified as frames/s");
    argHolder.add(&params->profile,          "--profile",           "-p",   "VP9 encoding profile", (unsigned int)(0), (unsigned int)(3));
    argHolder.add(&params->quantizerScale,   "--quantizer-scaling", "-q",   "Scaling of the encoding quantization", (unsigned int)(0), (unsigned int)(63));
    argHolder.add(&params->preScaling,       "--pre-scaling",       "-sc",  "Scaling of pixels to apply before tranformation and encoding", 0.0f, 1e20f);
    argHolder.add(&params->ptfBitDepth,      "--ptf-bitdepth",      "-pb",  "Bit depth of the perceptual transfer function", (unsigned int)(0), (unsigned int)(16));
    argHolder.add(&params->colorBitDepth,    "--color-bitdepth",    "-cb",  "Bit depth of the color channels", (unsigned int)(0), (unsigned int)(16));
    argHolder.add(&ptf,                      "--transfer-function", "-ptf", "The perceptual transfer function used for encoding", ptfValues, 5);
    argHolder.add(&cs,                       "--color-space",       "-cs",  "Color space for encoding", csValues, 4);
    argHolder.add(&params->bitrate,          "--bitrate",           "-b",   "HDR video stream target bandwidth, in Kb/s", (unsigned int)(0), (unsigned int)(9999));
    argHolder.add(&params->keyframeInterval, "--keyframe-interval", "-k",   "Interval between keyframes. 0 for automatic keyframes", (unsigned int)(0), (unsigned int)(9999));
    argHolder.add(&params->bitDepth,         "--encoding-bitdepth", "-eb",  "Encoding at 8, 10 or 12 bits", bdValues, 3);
    argHolder.add(&params->lossLess,         "--lossless",          "-l",   "Enable lossless encoding mode");
    argHolder.add(&io->verbose,              "--verbose",           "-v",   "Verbose mode");

    // Parse arguments
    if (!argHolder.read(argc, argv))
        return 0;
    
    // Check output format
    if (!hasExtension(io->outputFile.c_str(), ".mkv"))
        throw ParserException("Unsupported output format. HDR video should be stored as Matroska file (.mkv)");
    
    // Parse frame range
    if (frames.size() > 0 && !getFrameRange(frames, io->startFrame, io->stepFrame, io->endFrame))
        throw ParserException(std::string("Unable to parse frame range from '" + frames + "'. Valid format is startframe:step:endframe").c_str());
    
    // Valid frame range?
    if (io->endFrame < io->startFrame)
        throw ParserException(std::string("Invalid frame range '" + frames + "'. End frame should be >= start frame").c_str());

    // Translate input strings to enums
    if (!strcmp(ptf.c_str(), ptfValues[0].c_str()))
        params->ptf = LumaQuantizer::PTF_PSI;
    else if (!strcmp(ptf.c_str(), ptfValues[1].c_str()))
        params->ptf = LumaQuantizer::PTF_PQ;
    else if (!strcmp(ptf.c_str(), ptfValues[2].c_str()))
        params->ptf = LumaQuantizer::PTF_LOG;
    else if (!strcmp(ptf.c_str(), ptfValues[3].c_str()))
        params->ptf = LumaQuantizer::PTF_JND_HDRVDP;
    else if (!strcmp(ptf.c_str(), ptfValues[4].c_str()))
        params->ptf = LumaQuantizer::PTF_LINEAR;

    if (!strcmp(cs.c_str(), csValues[0].c_str()))
        params->colorSpace = LumaQuantizer::CS_LUV;
    else if (!strcmp(cs.c_str(), csValues[1].c_str()))
        params->colorSpace = LumaQuantizer::CS_RGB;
    else if (!strcmp(cs.c_str(), csValues[2].c_str()))
        params->colorSpace = LumaQuantizer::CS_YCBCR;
    else if (!strcmp(cs.c_str(), csValues[3].c_str()))
        params->colorSpace = LumaQuantizer::CS_XYZ;

    return 1;
}
コード例 #5
0
ファイル: computeVSD.cpp プロジェクト: BlueBrain/Fivox
    void sample()
    {
        ::fivox::URI uri = getURI();

        const size_t sensorRes( _vm["sensor-res"].as< size_t >( ));
        const size_t sensorDim( _vm["sensor-dim"].as< size_t >( ));
        const float resolution = (float)sensorDim / sensorRes;
        // the URI handler takes voxels/unit as resolution
        uri.addQuery( "resolution", std::to_string( 1 / resolution ));

        const ::fivox::URIHandler params( uri );
        ImageSourcePtr source = params.newImageSource< fivox::FloatVolume >();

        _eventSource = source->getEventSource();
        std::shared_ptr< fivox::VSDLoader > vsdLoader =
                std::static_pointer_cast< fivox::VSDLoader >( _eventSource );

        const float v0 = _vm["v0"].as< float >();
        const float g0 = _vm["g0"].as< float >();

        LBINFO << "VSD info: V0 = " << v0 << " mV; G0 = " << g0 << std::endl;
        vsdLoader->setRestingPotential( v0 );
        vsdLoader->setAreaMultiplier( g0 );

        if( _vm.count( "ap-threshold" ))
        {
            vsdLoader->setSpikeFilter( true );
            const float apThreshold = _vm["ap-threshold"].as< float >();
            LBINFO << "Action potential threshold set to " << apThreshold
                   << " mV." << std::endl;
            vsdLoader->setApThreshold( apThreshold );
        }

        if( _vm.count( "curve" ))
        {
            const std::string& curveFile = _vm["curve"].as< std::string >();
            const float depth = _vm["depth"].as< float >();

            const bool interpolate = _vm.count( "interpolate-attenuation" );
            LBINFO << "Using '" << curveFile << "' as the dye curve file; "
                   << "depth of " << depth << " micrometers. "
                   << "Attenuation values will" << (!interpolate ? " not " :" ")
                   << "be interpolated." << std::endl;

            const fivox::AttenuationCurve dye( curveFile, depth );
            vsdLoader->setCurve( dye );
            vsdLoader->setInterpolation( interpolate );
        }

        const size_t size( std::ceil( source->getSizeInVoxel().find_max( )));

        // crop the volume region to the specified sensor dimensions
        fivox::Vector3f extent( source->getSizeInMicrometer( ));
        extent[0] = sensorDim;
        extent[2] = sensorDim;

        const fivox::VolumeHandler volumeHandler( size, extent );
        fivox::FloatVolume::IndexType vIndex;
        vIndex.Fill(0);
        fivox::FloatVolume::SizeType vSize;
        vSize[0] = extent[0] / resolution;
        vSize[1] = extent[1] / resolution;
        vSize[2] = extent[2] / resolution;

        VolumePtr output = source->GetOutput();
        output->SetRegions( fivox::FloatVolume::RegionType( vIndex, vSize ));

        fivox::AABBf bboxSomas;
        const auto& somaPositions = vsdLoader->getSomaPositions();
        for( const auto& position : somaPositions )
            bboxSomas.merge( position );

        // pixel/voxel size
        const auto spacing = volumeHandler.computeSpacing();
        // left bottom corner of the image/volume
        const auto origin = volumeHandler.computeOrigin( bboxSomas.getCenter());

        if( _vm.count( "soma-pixels" ))
        {
            const auto& fileName = _vm["soma-pixels"].as< std::string >();
            std::ofstream file( fileName );
            if( !file.is_open( ))
                LBERROR << "File " << fileName << " could not be opened"
                        << std::endl;
            else
            {
                file << "# Soma position and corresponding pixel index for "
                        "each cell, in the following format:\n"
                     << "#     gid [ posX posY posZ ]: i j\n"
                     << "# File version: 1\n"
                     << "# Fivox version: " << fivox::Version::getString()
                     << std::endl;

                size_t i = 0;
                const auto& gids = vsdLoader->getGIDs();
                for( const auto& gid : gids )
                {
                    if( file.bad( ))
                        break;
                    const auto& pos = somaPositions[i++];
                    file << gid << " " << pos << ": "
                         << std::floor((pos[0] - origin[0]) / spacing[0]) << " "
                         << std::floor((pos[2] - origin[2]) / spacing[1])
                         << std::endl;
                }
            }
            if( file.good( ))
                LBINFO << "Soma positions written as " << fileName << std::endl;
            else
                LBERROR << "Error while writing to " << fileName << std::endl;
        }

        output->SetSpacing( spacing );
        output->SetOrigin( origin );

        VolumeWriter< float > writer( output, fivox::Vector2ui( ));

        const fivox::Vector2ui frameRange( getFrameRange( _eventSource->getDt( )));
        size_t numDigits = std::to_string( frameRange.y( )).length();
        if( _vm.count( "times" ))
        {
            const float endTime = _vm["times"].as< fivox::Vector2f >()[1];
            std::ostringstream s;
            s << std::fixed << std::setprecision(1) << endTime;
            numDigits = s.str().length();
        }

        if( _vm.count( "export-point-sprites" ))
        {
            _writePointSpriteHeader();
            _writePointSpritePositions();
        }

        for( uint32_t i = frameRange.x(); i < frameRange.y(); ++i )
        {
            std::string filename = _outputFile;
            if( frameRange.y() - frameRange.x() > 1 )
            {
                // append the frame number if --frames, timestamp otherwise
                std::ostringstream os;
                os << filename << std::setfill('0') << std::setw( numDigits );
                if( _vm.count( "times" ))
                    os << std::fixed << std::setprecision(1)
                       << i * vsdLoader->getDt();
                else
                    os << i;

                filename = os.str();
            }

            _eventSource->setFrame( i );
            source->Modified();

            if( _vm.count( "export-volume" ))
            {
                const std::string& volumeName = filename + ".mhd";
                writer->SetFileName( volumeName );
                source->Modified();
                writer->Update(); // Run pipeline to write volume
                LBINFO << "Volume written as " << volumeName << std::endl;
            }

            projectVSD( output, filename );

            if( _vm.count( "export-point-sprites" ))
                _writePointSpriteIntensities( filename );
        }
    }
コード例 #6
0
ファイル: OfxhClipImage.hpp プロジェクト: EfestoLab/TuttleOFX
	OfxRangeD getFrameRange() const
	{
		OfxRangeD frameRange;
		getFrameRange( frameRange.min, frameRange.max );
		return frameRange;
	}