コード例 #1
0
Stage& PipelineManager::makeWriter(const std::string& outputFile,
    std::string driver, Stage& parent)
{
    Stage& writer = makeWriter(outputFile, driver);
    writer.setInput(parent);
    return writer;
}
コード例 #2
0
ファイル: MergeKernel.cpp プロジェクト: marops/PDAL
int MergeKernel::execute()
{
    PointTable table;

    MergeFilter filter;

    for (size_t i = 0; i < m_files.size(); ++i)
    {
        Options readerOpts;
        readerOpts.add("filename", m_files[i]);
        readerOpts.add("debug", isDebug());
        readerOpts.add("verbose", getVerboseLevel());

        Stage& reader = makeReader(m_files[i]);
        reader.setOptions(readerOpts);

        filter.setInput(reader);
    }

    Options writerOpts;

    Stage& writer = makeWriter(m_outputFile, filter);
    applyExtraStageOptionsRecursive(&writer);

    writer.prepare(table);
    writer.execute(table);
    return 0;
}
コード例 #3
0
ファイル: GroundKernel.cpp プロジェクト: EricAlex/PDAL
int GroundKernel::execute()
{
    PointTable table;

    Stage& readerStage(makeReader(m_inputFile, ""));

    Options groundOptions;
    groundOptions.add("max_window_size", m_maxWindowSize);
    groundOptions.add("slope", m_slope);
    groundOptions.add("max_distance", m_maxDistance);
    groundOptions.add("initial_distance", m_initialDistance);
    groundOptions.add("cell_size", m_cellSize);
    groundOptions.add("classify", m_classify);
    groundOptions.add("extract", m_extract);
    groundOptions.add("approximate", m_approximate);

    Stage& groundStage = makeFilter("filters.ground", readerStage);
    groundStage.addOptions(groundOptions);

    // setup the Writer and write the results
    Stage& writer(makeWriter(m_outputFile, groundStage, ""));

    writer.prepare(table);

    // process the data, grabbing the PointViewSet for visualization of the
    // resulting PointView
    PointViewSet viewSetOut = writer.execute(table);

    if (isVisualize())
        visualize(*viewSetOut.begin());
    //visualize(*viewSetIn.begin(), *viewSetOut.begin());

    return 0;
}
コード例 #4
0
ファイル: PipelineManager.cpp プロジェクト: PDAL/PDAL
Stage& PipelineManager::makeWriter(const std::string& outputFile,
    std::string driver, Options options)
{
    StageCreationOptions ops { outputFile, driver, nullptr, options };

    return makeWriter(ops);
}
コード例 #5
0
ファイル: PipelineManager.cpp プロジェクト: PDAL/PDAL
Stage& PipelineManager::makeWriter(const std::string& outputFile,
    std::string driver, Stage& parent)
{
    StageCreationOptions ops { outputFile, driver, &parent };

    return makeWriter(ops);
}
コード例 #6
0
ファイル: SplitKernel.cpp プロジェクト: adam-erickson/PDAL
int SplitKernel::execute()
{
    PointTable table;

    Options readerOpts;
    readerOpts.add("filename", m_inputFile);
    readerOpts.add("debug", isDebug());
    readerOpts.add("verbose", getVerboseLevel());

    Stage& reader = makeReader(m_inputFile);
    reader.setOptions(readerOpts);

    std::unique_ptr<Stage> f;
    StageFactory factory;
    Options filterOpts;
    if (m_length)
    {
        f.reset(factory.createStage("filters.splitter"));
        filterOpts.add("length", m_length);
        filterOpts.add("origin_x", m_xOrigin);
        filterOpts.add("origin_y", m_yOrigin);
    }
    else
    {
        f.reset(factory.createStage("filters.chipper"));
        filterOpts.add("capacity", m_capacity);
    }
    f->setInput(reader);
    f->setOptions(filterOpts);

    f->prepare(table);
    PointViewSet pvSet = f->execute(table);

    int filenum = 1;
    for (auto& pvp : pvSet)
    {
        BufferReader reader;
        reader.addView(pvp);

        std::string filename = makeFilename(m_outputFile, filenum++);
        Stage& writer = makeWriter(filename, reader);

        writer.prepare(table);
        writer.execute(table);
    }
    return 0;
}
コード例 #7
0
int PCLKernel::execute()
{
    PointTable table;

    Stage& readerStage(makeReader(m_inputFile, ""));

    // go ahead and prepare/execute on reader stage only to grab input
    // PointViewSet, this makes the input PointView available to both the
    // processing pipeline and the visualizer
    readerStage.prepare(table);
    PointViewSet viewSetIn = readerStage.execute(table);

    // the input PointViewSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointViewPtr input_view = *viewSetIn.begin();
    std::shared_ptr<BufferReader> bufferReader(new BufferReader);
    bufferReader->addView(input_view);

    Options filterOptions({"filename", m_pclFile});
    Stage& pclStage = makeFilter("filters.pclblock", *bufferReader,
        filterOptions);

    // the PCLBlock stage consumes the BufferReader rather than the
    // readerStage

    Options writerOptions;
    if (m_bCompress)
        writerOptions.add<bool>("compression", true);
    if (m_bForwardMetadata)
        writerOptions.add("forward_metadata", true);

    Stage& writer(makeWriter(m_outputFile, pclStage, "", writerOptions));

    writer.prepare(table);

    // process the data, grabbing the PointViewSet for visualization of the
    // resulting PointView
    PointViewSet viewSetOut = writer.execute(table);

    if (isVisualize())
        visualize(*viewSetOut.begin());
    //visualize(*viewSetIn.begin(), *viewSetOut.begin());

    return 0;
}
コード例 #8
0
ファイル: SortKernel.cpp プロジェクト: pblottiere/PDAL
int SortKernel::execute()
{
    Stage& readerStage = makeReader(m_inputFile, m_driverOverride);
    Stage& sortStage = makeFilter("filters.mortonorder", readerStage);

    Options writerOptions;
    if (m_bCompress)
        writerOptions.add("compression", true);
    if (m_bForwardMetadata)
        writerOptions.add("forward_metadata", true);
    Stage& writer = makeWriter(m_outputFile, sortStage, "", writerOptions);

    PointTable table;
    writer.prepare(table);
    writer.execute(table);

    return 0;
}
コード例 #9
0
ファイル: SortKernel.cpp プロジェクト: EricAlex/PDAL
int SortKernel::execute()
{
    Stage& readerStage = makeReader(m_inputFile, "");

    // go ahead and prepare/execute on reader stage only to grab input
    // PointViewSet, this makes the input PointView available to both the
    // processing pipeline and the visualizer
    PointTable table;
    readerStage.prepare(table);
    PointViewSet viewSetIn = readerStage.execute(table);

    // the input PointViewSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointViewPtr inView = *viewSetIn.begin();

    BufferReader bufferReader;
    bufferReader.addView(inView);

    Stage& sortStage = makeFilter("filters.mortonorder", bufferReader);

    Stage& writer = makeWriter(m_outputFile, sortStage, "");
    Options writerOptions;
    if (m_bCompress)
        writerOptions.add("compression", true);
    if (m_bForwardMetadata)
        writerOptions.add("forward_metadata", true);
    writer.addOptions(writerOptions);

    writer.prepare(table);

    // process the data, grabbing the PointViewSet for visualization of the
    PointViewSet viewSetOut = writer.execute(table);

    if (isVisualize())
        visualize(*viewSetOut.begin());

    return 0;
}
コード例 #10
0
ファイル: RandomKernel.cpp プロジェクト: Rafaelaniemann/PDAL
int RandomKernel::execute()
{
    Options readerOptions;

    if (!m_bounds.empty())
        readerOptions.add("bounds", m_bounds);

    std::string distribution(Utils::tolower(m_distribution));
    if (distribution == "uniform")
        readerOptions.add("mode", "uniform");
    else if (distribution == "normal")
        readerOptions.add("mode", "normal");
    else if (distribution == "random")
        readerOptions.add("mode", "random");
    else
        throw pdal_error("invalid distribution: " + m_distribution);
    readerOptions.add("count", m_numPointsToWrite);

    Options writerOptions;
    if (m_bCompress)
        writerOptions.add("compression", true);

    Stage& reader = makeReader("", "readers.faux");
    reader.addOptions(readerOptions);

    Stage& writer = makeWriter(m_outputFile, reader, "");
    writer.addOptions(writerOptions);

    PointTable table;
    writer.prepare(table);
    PointViewSet viewSet = writer.execute(table);

    if (isVisualize())
        visualize(*viewSet.begin());

    return 0;
}
コード例 #11
0
ファイル: SplitKernel.cpp プロジェクト: jasonoverland/PDAL
int SplitKernel::execute()
{
    PointTable table;

    Stage& reader = makeReader(m_inputFile, m_driverOverride);

    Options filterOpts;
    std::string driver = (m_length ? "filters.splitter" : "filters.chipper");
    if (m_length)
    {
        filterOpts.add("length", m_length);
        filterOpts.add("origin_x", m_xOrigin);
        filterOpts.add("origin_y", m_yOrigin);
    }
    else
    {
        filterOpts.add("capacity", m_capacity);
    }
    Stage& f = makeFilter(driver, reader, filterOpts);
    f.prepare(table);
    PointViewSet pvSet = f.execute(table);

    int filenum = 1;
    for (auto& pvp : pvSet)
    {
        BufferReader reader;
        reader.addView(pvp);

        std::string filename = makeFilename(m_outputFile, filenum++);
        Stage& writer = makeWriter(filename, reader, "");

        writer.prepare(table);
        writer.execute(table);
    }
    return 0;
}
コード例 #12
0
int CpdKernel::execute()
{
    PointTable tableX;
    PointTable tableY;

    cpd::Matrix X = readFile(m_filex);
    cpd::Matrix Y = readFile(m_filey);

    if (X.rows() == 0 || Y.rows() == 0)
    {
        throw pdal_error("No points to process.");
    }

    cpd::Matrix result;
    if (m_method == "rigid") {
        cpd::Rigid rigid;
        rigid
            .set_tolerance(m_tolerance)
            .set_max_iterations(m_max_it)
            .set_outlier_weight(m_outliers);
        rigid
            .no_reflections(m_no_reflections)
            .allow_scaling(m_allow_scaling);
        if (m_sigma2 > 0) {
            result = rigid.compute(X, Y, m_sigma2).points;
        } else {
            result = rigid.compute(X, Y).points;
        }
    } else if (m_method == "nonrigid") {
        cpd::Nonrigid nonrigid;
        nonrigid
            .set_tolerance(m_tolerance)
            .set_max_iterations(m_max_it)
            .set_outlier_weight(m_outliers);
        nonrigid
            .set_beta(m_beta)
            .set_lambda(m_lambda);
        if (m_sigma2 > 0) {
            result = nonrigid.compute(X, Y, m_sigma2).points;
        } else {
            result = nonrigid.compute(X, Y).points;
        }
    } else {
        std::stringstream ss;
        ss << "Invalid cpd method: " << m_method << std::endl;
        throw pdal_error(ss.str());
    }

    PointTable outTable;
    PointLayoutPtr outLayout(outTable.layout());
    outLayout->registerDim(Dimension::Id::X);
    outLayout->registerDim(Dimension::Id::Y);
    outLayout->registerDim(Dimension::Id::Z);
    outLayout->registerDim(Dimension::Id::XVelocity);
    outLayout->registerDim(Dimension::Id::YVelocity);
    outLayout->registerDim(Dimension::Id::ZVelocity);
    PointViewPtr outView(new PointView(outTable));

    size_t M = Y.rows();
    for (size_t i = 0; i < M; ++i)
    {
        outView->setField<double>(Dimension::Id::X, i, result(i, 0));
        outView->setField<double>(Dimension::Id::Y, i, result(i, 1));
        outView->setField<double>(Dimension::Id::Z, i, result(i, 2));
        outView->setField<double>(Dimension::Id::XVelocity, i,
                                  Y(i, 0) - result(i, 0));
        outView->setField<double>(Dimension::Id::YVelocity, i,
                                  Y(i, 1) - result(i, 1));
        outView->setField<double>(Dimension::Id::ZVelocity, i,
                                  Y(i, 2) - result(i, 2));
    }

    BufferReader reader;
    reader.addView(outView);

    Options writerOpts;
    if (StageFactory::inferReaderDriver(m_output) == "writers.text")   
    {
        writerOpts.add("order", "X,Y,Z,XVelocity,YVelocity,ZVelocity");
        writerOpts.add("keep_unspecified", false);
    }
    Stage& writer = makeWriter(m_output, reader, "", writerOpts);
    writer.prepare(outTable);
    writer.execute(outTable);

    return 0;
}
コード例 #13
0
ファイル: SortKernel.cpp プロジェクト: devrimgunduz/PDAL
int SortKernel::execute()
{
    PointTable table;

    Options readerOptions;
    readerOptions.add("filename", m_inputFile);
    readerOptions.add("debug", isDebug());
    readerOptions.add("verbose", getVerboseLevel());

    Stage& readerStage = makeReader(readerOptions);

    // go ahead and prepare/execute on reader stage only to grab input
    // PointViewSet, this makes the input PointView available to both the
    // processing pipeline and the visualizer
    readerStage.prepare(table);
    PointViewSet viewSetIn = readerStage.execute(table);

    // the input PointViewSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointViewPtr inView = *viewSetIn.begin();

    BufferReader bufferReader;
    bufferReader.setOptions(readerOptions);
    bufferReader.addView(inView);

    Options sortOptions;
    sortOptions.add<bool>("debug", isDebug());
    sortOptions.add<uint32_t>("verbose", getVerboseLevel());

    StageFactory f;
    Stage& sortStage = ownStage(f.createStage("filters.mortonorder"));
    sortStage.setInput(bufferReader);
    sortStage.setOptions(sortOptions);

    Options writerOptions;
    writerOptions.add("filename", m_outputFile);
    setCommonOptions(writerOptions);

    if (m_bCompress)
        writerOptions.add("compression", true);
    if (m_bForwardMetadata)
        writerOptions.add("forward_metadata", true);

    std::vector<std::string> cmd = getProgressShellCommand();
    UserCallback *callback =
        cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) :
        (UserCallback *)new HeartbeatCallback();

    Stage& writer = makeWriter(m_outputFile, sortStage);

    // Some options are inferred by makeWriter based on filename
    // (compression, driver type, etc).
    writer.setOptions(writerOptions + writer.getOptions());
    writer.setUserCallback(callback);

    for (const auto& pi : getExtraStageOptions())
    {
        std::string name = pi.first;
        Options options = pi.second;
        //ABELL - Huh?
        std::vector<Stage *> stages = writer.findStage(name);
        for (const auto& s : stages)
        {
            Options opts = s->getOptions();
            for (const auto& o : options.getOptions())
                opts.add(o);
            s->setOptions(opts);
        }
    }
    writer.prepare(table);

    // process the data, grabbing the PointViewSet for visualization of the
    PointViewSet viewSetOut = writer.execute(table);

    if (isVisualize())
        visualize(*viewSetOut.begin());

    return 0;
}
コード例 #14
0
void PackfileImportExport::exportData()
{
	hkPackfileWriter::Options options;

	ShapeListener shapeListener;
	hkArray<char> names;

	//
	// Write to two temporary files
	//

	const char* filenames[2][2] = {{"bodies.xml", "bodies.bin"}, {"shapes.xml", "shapes.bin"}};
	{
		hkPackfileWriter* writer = makeWriter( m_options.m_bodiesFormat );
		writer->setContents( m_physicsData, hkpPhysicsDataClass, &shapeListener );
		hkArray<const hkReferencedObject*>& array = shapeListener.m_shapes.m_array;
		names.reserve(array.getSize()*10);
		for( int i = 0; i < array.getSize(); ++i )
		{
			char* name = names.begin()+10*i;
			hkString::snprintf(name, 10, "shape_%0i", i);
			writer->addImport( array[i], name);
		}

		hkOstream out(filenames[0][m_options.m_bodiesFormat]);
		writer->save( out.getStreamWriter(), options );
		writer->removeReference();
	}

	{
		hkPackfileWriter* writer = makeWriter( m_options.m_shapesFormat );
		writer->setContents( &shapeListener.m_shapes, PackfileImportExportReferencedObjectArrayClass );
		hkArray<const hkReferencedObject*>& array = shapeListener.m_shapes.m_array;
		for( int i = 0; i < array.getSize(); ++i )
		{
			char* name = names.begin()+10*i;
			writer->addExport( array[i], name);
		}
		hkOstream out(filenames[1][m_options.m_shapesFormat]);
		writer->save( out.getStreamWriter(), options );
		writer->removeReference();
	}

	//
	// Destroy world, m_physicsData etc.
	//

	cleanup();

	//
	// Reload
	//

	{
		int formats[2];
		int bodiesFirst = m_options.m_loadOrder;
		formats[bodiesFirst^1] = m_options.m_bodiesFormat;
		formats[bodiesFirst  ] = m_options.m_shapesFormat;
		const char* filename[2];
		filename[bodiesFirst^1] = filenames[0][m_options.m_bodiesFormat];
		filename[bodiesFirst  ] = filenames[1][m_options.m_shapesFormat];

		for( int fileIndex = 0; fileIndex < 2; ++fileIndex )
		{
			hkIstream instream(filename[fileIndex]);
			hkPackfileReader* reader = makeReader( formats[fileIndex] );
			reader->loadEntireFile(instream.getStreamReader());
			reader->getPackfileData()->setName(filename[fileIndex]);
			m_linker.add( reader->getPackfileData() );

			if( fileIndex != bodiesFirst )
			{
				m_physicsData = (hkpPhysicsData*)reader->getContents("hkpPhysicsData");
			}
			else
			{
				/*void* unused = */ reader->getContents("PackfileImportExportReferencedObjectArray");
			}
			reader->removeReference();
		}
		
		HK_ASSERT(0, m_linker.m_dangling.getSize() == 0 );
	}

	//
	// Create new world from loaded physicsdata.
	//

	setup();
}