int DiffKernel::execute() { PointTable sourceTable; Stage& source = makeReader(m_sourceFile, m_driverOverride); source.prepare(sourceTable); PointViewSet sourceSet = source.execute(sourceTable); MetadataNode errors; PointTable candidateTable; Stage& candidate = makeReader(m_candidateFile, m_driverOverride); candidate.prepare(candidateTable); PointViewSet candidateSet = candidate.execute(candidateTable); assert(sourceSet.size() == 1); assert(candidateSet.size() == 1); PointViewPtr sourceView = *sourceSet.begin(); PointViewPtr candidateView = *candidateSet.begin(); if (candidateView->size() != sourceView->size()) { std::ostringstream oss; oss << "Source and candidate files do not have the same point count"; errors.add("count.error", oss.str()); errors.add("count.candidate", candidateView->size()); errors.add("count.source", sourceView->size()); } MetadataNode source_metadata = sourceTable.metadata(); MetadataNode candidate_metadata = candidateTable.metadata(); if (source_metadata != candidate_metadata) { std::ostringstream oss; oss << "Source and candidate files do not have the same metadata count"; errors.add("metadata.error", oss.str()); errors.add(source_metadata); errors.add(candidate_metadata); } if (candidateTable.layout()->dims().size() != sourceTable.layout()->dims().size()) { std::ostringstream oss; oss << "Source and candidate files do not have the same " "number of dimensions"; } return 0; }
int GroundKernel::execute() { PointTable table; Stage& readerStage(makeReader(m_inputFile, "")); Options groundOptions; groundOptions.add("max_window_size", m_maxWindowSize); groundOptions.add("slope", m_slope); groundOptions.add("max_distance", m_maxDistance); groundOptions.add("initial_distance", m_initialDistance); groundOptions.add("cell_size", m_cellSize); groundOptions.add("classify", m_classify); groundOptions.add("extract", m_extract); groundOptions.add("approximate", m_approximate); Stage& groundStage = makeFilter("filters.ground", readerStage); groundStage.addOptions(groundOptions); // setup the Writer and write the results Stage& writer(makeWriter(m_outputFile, groundStage, "")); writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the // resulting PointView PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); //visualize(*viewSetIn.begin(), *viewSetOut.begin()); return 0; }
int MergeKernel::execute() { PointTable table; MergeFilter filter; for (size_t i = 0; i < m_files.size(); ++i) { Options readerOpts; readerOpts.add("filename", m_files[i]); readerOpts.add("debug", isDebug()); readerOpts.add("verbose", getVerboseLevel()); Stage& reader = makeReader(m_files[i]); reader.setOptions(readerOpts); filter.setInput(reader); } Options writerOpts; Stage& writer = makeWriter(m_outputFile, filter); applyExtraStageOptionsRecursive(&writer); writer.prepare(table); writer.execute(table); return 0; }
Stage& PipelineManager::makeReader(const std::string& inputFile, std::string driver, Options options) { StageCreationOptions ops { inputFile, driver, nullptr, options }; return makeReader(ops); }
PointViewPtr DeltaKernel::loadSet(const std::string& filename, PointTable& table) { Stage& reader = makeReader(filename, m_driverOverride); reader.prepare(table); PointViewSet viewSet = reader.execute(table); assert(viewSet.size() == 1); return *viewSet.begin(); }
int SmoothKernel::execute() { PointTable table; Stage& readerStage(makeReader(m_inputFile, "")); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr input_view = *viewSetIn.begin(); PipelineManager manager; manager.commonOptions() = m_manager.commonOptions(); manager.stageOptions() = m_manager.stageOptions(); BufferReader& bufferReader = static_cast<BufferReader&>(manager.makeReader("", "readers.buffer")); bufferReader.addView(input_view); std::ostringstream ss; ss << "{"; ss << " \"pipeline\": {"; ss << " \"filters\": [{"; ss << " \"name\": \"MovingLeastSquares\""; ss << " }]"; ss << " }"; ss << "}"; Options smoothOptions; smoothOptions.add("json", ss.str()); Stage& smoothStage = manager.makeFilter("filters.pclblock", bufferReader); smoothStage.addOptions(smoothOptions); Stage& writer(Kernel::makeWriter(m_outputFile, smoothStage, "")); writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the // resulting PointView PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); //visualize(*viewSetIn.begin(), *viewSetOut.begin()); return 0; }
cpd::Matrix CpdKernel::readFile(const std::string& filename) { Stage& reader = makeReader(filename, ""); PointTable table; PointViewSet viewSet; if (!m_bounds.empty()) { Options boundsOptions; boundsOptions.add("bounds", m_bounds); Stage& crop = makeFilter("filters.crop", reader); crop.setOptions(boundsOptions); crop.prepare(table); viewSet = crop.execute(table); } else { reader.prepare(table); viewSet = reader.execute(table); } cpd::Matrix matrix(0, 3); for (auto it = viewSet.begin(); it != viewSet.end(); ++it) { PointViewPtr view = *it; point_count_t rowidx; if (matrix.rows() == 0) { rowidx = 0; matrix.resize(view->size(), 3); } else { rowidx = matrix.rows(); matrix.conservativeResize(matrix.rows() + view->size(), 3); } for (point_count_t bufidx = 0; bufidx < view->size(); ++bufidx, ++rowidx) { matrix(rowidx, 0) = view->getFieldAs<double>(Dimension::Id::X, bufidx); matrix(rowidx, 1) = view->getFieldAs<double>(Dimension::Id::Y, bufidx); matrix(rowidx, 2) = view->getFieldAs<double>(Dimension::Id::Z, bufidx); } } return matrix; }
int SplitKernel::execute() { PointTable table; Options readerOpts; readerOpts.add("filename", m_inputFile); readerOpts.add("debug", isDebug()); readerOpts.add("verbose", getVerboseLevel()); Stage& reader = makeReader(m_inputFile); reader.setOptions(readerOpts); std::unique_ptr<Stage> f; StageFactory factory; Options filterOpts; if (m_length) { f.reset(factory.createStage("filters.splitter")); filterOpts.add("length", m_length); filterOpts.add("origin_x", m_xOrigin); filterOpts.add("origin_y", m_yOrigin); } else { f.reset(factory.createStage("filters.chipper")); filterOpts.add("capacity", m_capacity); } f->setInput(reader); f->setOptions(filterOpts); f->prepare(table); PointViewSet pvSet = f->execute(table); int filenum = 1; for (auto& pvp : pvSet) { BufferReader reader; reader.addView(pvp); std::string filename = makeFilename(m_outputFile, filenum++); Stage& writer = makeWriter(filename, reader); writer.prepare(table); writer.execute(table); } return 0; }
int PCLKernel::execute() { PointTable table; Stage& readerStage(makeReader(m_inputFile, "")); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr input_view = *viewSetIn.begin(); std::shared_ptr<BufferReader> bufferReader(new BufferReader); bufferReader->addView(input_view); Options filterOptions({"filename", m_pclFile}); Stage& pclStage = makeFilter("filters.pclblock", *bufferReader, filterOptions); // the PCLBlock stage consumes the BufferReader rather than the // readerStage Options writerOptions; if (m_bCompress) writerOptions.add<bool>("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); Stage& writer(makeWriter(m_outputFile, pclStage, "", writerOptions)); writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the // resulting PointView PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); //visualize(*viewSetIn.begin(), *viewSetOut.begin()); return 0; }
int SortKernel::execute() { Stage& readerStage = makeReader(m_inputFile, m_driverOverride); Stage& sortStage = makeFilter("filters.mortonorder", readerStage); Options writerOptions; if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); Stage& writer = makeWriter(m_outputFile, sortStage, "", writerOptions); PointTable table; writer.prepare(table); writer.execute(table); return 0; }
int SortKernel::execute() { Stage& readerStage = makeReader(m_inputFile, ""); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer PointTable table; readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr inView = *viewSetIn.begin(); BufferReader bufferReader; bufferReader.addView(inView); Stage& sortStage = makeFilter("filters.mortonorder", bufferReader); Stage& writer = makeWriter(m_outputFile, sortStage, ""); Options writerOptions; if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); writer.addOptions(writerOptions); writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); return 0; }
int RandomKernel::execute() { Options readerOptions; if (!m_bounds.empty()) readerOptions.add("bounds", m_bounds); std::string distribution(Utils::tolower(m_distribution)); if (distribution == "uniform") readerOptions.add("mode", "uniform"); else if (distribution == "normal") readerOptions.add("mode", "normal"); else if (distribution == "random") readerOptions.add("mode", "random"); else throw pdal_error("invalid distribution: " + m_distribution); readerOptions.add("count", m_numPointsToWrite); Options writerOptions; if (m_bCompress) writerOptions.add("compression", true); Stage& reader = makeReader("", "readers.faux"); reader.addOptions(readerOptions); Stage& writer = makeWriter(m_outputFile, reader, ""); writer.addOptions(writerOptions); PointTable table; writer.prepare(table); PointViewSet viewSet = writer.execute(table); if (isVisualize()) visualize(*viewSet.begin()); return 0; }
int SplitKernel::execute() { PointTable table; Stage& reader = makeReader(m_inputFile, m_driverOverride); Options filterOpts; std::string driver = (m_length ? "filters.splitter" : "filters.chipper"); if (m_length) { filterOpts.add("length", m_length); filterOpts.add("origin_x", m_xOrigin); filterOpts.add("origin_y", m_yOrigin); } else { filterOpts.add("capacity", m_capacity); } Stage& f = makeFilter(driver, reader, filterOpts); f.prepare(table); PointViewSet pvSet = f.execute(table); int filenum = 1; for (auto& pvp : pvSet) { BufferReader reader; reader.addView(pvp); std::string filename = makeFilename(m_outputFile, filenum++); Stage& writer = makeWriter(filename, reader, ""); writer.prepare(table); writer.execute(table); } return 0; }
int Ground::execute() { PointContext ctx; Options readerOptions; readerOptions.add<std::string>("filename", m_inputFile); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addBuffer(input_buffer); Options groundOptions; std::ostringstream ss; ss << "{"; ss << " \"pipeline\": {"; ss << " \"filters\": [{"; ss << " \"name\": \"ProgressiveMorphologicalFilter\","; ss << " \"setMaxWindowSize\": " << m_maxWindowSize << ","; ss << " \"setSlope\": " << m_slope << ","; ss << " \"setMaxDistance\": " << m_maxDistance << ","; ss << " \"setInitialDistance\": " << m_initialDistance << ","; ss << " \"setCellSize\": " << m_cellSize << ","; ss << " \"setBase\": " << m_base << ","; ss << " \"setExponential\": " << m_exponential; ss << " }]"; ss << " }"; ss << "}"; std::string json = ss.str(); groundOptions.add<std::string>("json", json); groundOptions.add<bool>("debug", isDebug()); groundOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> groundStage(new filters::PCLBlock()); groundStage->setInput(&bufferReader); groundStage->setOptions(groundOptions); // the PCLBlock groundStage consumes the BufferReader rather than the // readerStage groundStage->setInput(&bufferReader); Options writerOptions; writerOptions.add<std::string>("filename", m_outputFile); setCommonOptions(writerOptions); std::unique_ptr<Writer> writer(AppSupport::makeWriter(m_outputFile, groundStage.get())); writer->setOptions(writerOptions); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); writer->setUserCallback(callback); for (auto pi: getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; std::vector<Stage*> stages = writer->findStage(name); for (auto s: stages) { Options opts = s->getOptions(); for (auto o: options.getOptions()) opts.add(o); s->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }
int Random::execute() { Options readerOptions; { boost::char_separator<char> sep(SEPARATORS); std::vector<double> means; tokenizer mean_tokens(m_means, sep); for (tokenizer::iterator t = mean_tokens.begin(); t != mean_tokens.end(); ++t) { means.push_back(boost::lexical_cast<double>(*t)); } if (means.size()) { readerOptions.add<double >("mean_x", means[0]); readerOptions.add<double >("mean_y", means[1]); readerOptions.add<double >("mean_z", means[2]); } std::vector<double> stdevs; tokenizer stdev_tokens(m_stdevs, sep); for (tokenizer::iterator t = stdev_tokens.begin(); t != stdev_tokens.end(); ++t) { stdevs.push_back(boost::lexical_cast<double>(*t)); } if (stdevs.size()) { readerOptions.add<double >("stdev_x", stdevs[0]); readerOptions.add<double >("stdev_y", stdevs[1]); readerOptions.add<double >("stdev_z", stdevs[2]); } if (!m_bounds.empty()) readerOptions.add<BOX3D >("bounds", m_bounds); if (boost::iequals(m_distribution, "uniform")) readerOptions.add<std::string>("mode", "uniform"); else if (boost::iequals(m_distribution, "normal")) readerOptions.add<std::string>("mode", "normal"); else if (boost::iequals(m_distribution, "random")) readerOptions.add<std::string>("mode", "random"); else throw pdal_error("invalid distribution: " + m_distribution); readerOptions.add<int>("num_points", m_numPointsToWrite); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); } Options writerOptions; { writerOptions.add<std::string>("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) { writerOptions.add<bool>("compression", true); } } Stage* final_stage = makeReader(readerOptions); Writer* writer = AppSupport::makeWriter(m_outputFile, final_stage); writer->setOptions(writerOptions); PointContext ctx; UserCallback* callback; if (!getProgressShellCommand().size()) callback = static_cast<pdal::UserCallback*>(new PercentageCallback); else callback = static_cast<pdal::UserCallback*>(new ShellScriptCallback(getProgressShellCommand())); writer->setUserCallback(callback); writer->prepare(ctx); PointBufferSet pbSet = writer->execute(ctx); if (isVisualize()) visualize(*pbSet.begin()); delete writer; delete final_stage; return 0; }
void PackfileImportExport::exportData() { hkPackfileWriter::Options options; ShapeListener shapeListener; hkArray<char> names; // // Write to two temporary files // const char* filenames[2][2] = {{"bodies.xml", "bodies.bin"}, {"shapes.xml", "shapes.bin"}}; { hkPackfileWriter* writer = makeWriter( m_options.m_bodiesFormat ); writer->setContents( m_physicsData, hkpPhysicsDataClass, &shapeListener ); hkArray<const hkReferencedObject*>& array = shapeListener.m_shapes.m_array; names.reserve(array.getSize()*10); for( int i = 0; i < array.getSize(); ++i ) { char* name = names.begin()+10*i; hkString::snprintf(name, 10, "shape_%0i", i); writer->addImport( array[i], name); } hkOstream out(filenames[0][m_options.m_bodiesFormat]); writer->save( out.getStreamWriter(), options ); writer->removeReference(); } { hkPackfileWriter* writer = makeWriter( m_options.m_shapesFormat ); writer->setContents( &shapeListener.m_shapes, PackfileImportExportReferencedObjectArrayClass ); hkArray<const hkReferencedObject*>& array = shapeListener.m_shapes.m_array; for( int i = 0; i < array.getSize(); ++i ) { char* name = names.begin()+10*i; writer->addExport( array[i], name); } hkOstream out(filenames[1][m_options.m_shapesFormat]); writer->save( out.getStreamWriter(), options ); writer->removeReference(); } // // Destroy world, m_physicsData etc. // cleanup(); // // Reload // { int formats[2]; int bodiesFirst = m_options.m_loadOrder; formats[bodiesFirst^1] = m_options.m_bodiesFormat; formats[bodiesFirst ] = m_options.m_shapesFormat; const char* filename[2]; filename[bodiesFirst^1] = filenames[0][m_options.m_bodiesFormat]; filename[bodiesFirst ] = filenames[1][m_options.m_shapesFormat]; for( int fileIndex = 0; fileIndex < 2; ++fileIndex ) { hkIstream instream(filename[fileIndex]); hkPackfileReader* reader = makeReader( formats[fileIndex] ); reader->loadEntireFile(instream.getStreamReader()); reader->getPackfileData()->setName(filename[fileIndex]); m_linker.add( reader->getPackfileData() ); if( fileIndex != bodiesFirst ) { m_physicsData = (hkpPhysicsData*)reader->getContents("hkpPhysicsData"); } else { /*void* unused = */ reader->getContents("PackfileImportExportReferencedObjectArray"); } reader->removeReference(); } HK_ASSERT(0, m_linker.m_dangling.getSize() == 0 ); } // // Create new world from loaded physicsdata. // setup(); }
int Translate::execute() { Options readerOptions; { readerOptions.add<std::string>("filename", m_inputFile); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); if (!m_input_srs.empty()) { readerOptions.add<std::string>("spatialreference", m_input_srs.getWKT()); } } Options writerOptions; { writerOptions.add<std::string>("filename", m_outputFile); writerOptions.add<bool>("debug", isDebug()); writerOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); if (!m_input_srs.empty()) { writerOptions.add<std::string>("spatialreference", m_input_srs.getWKT()); } if (m_bCompress) { writerOptions.add<bool>("compression", true); } if (m_bForwardMetadata) { writerOptions.add<bool>("forward_metadata", true); } } Stage* final_stage = makeReader(readerOptions); Writer* writer = AppSupport::makeWriter(writerOptions, *final_stage); if (!m_output_srs.empty()) { writer->setSpatialReference(m_output_srs); } writer->initialize(); const boost::uint64_t numPointsToRead = final_stage->getNumPoints(); if (m_numPointsToWrite == 0) m_numPointsToWrite = numPointsToRead; std::cerr << "Requested to read " << numPointsToRead << " points" << std::endl; std::cerr << "Requested to write " << m_numPointsToWrite << " points" << std::endl; // std::cerr << "Buffer capacity is " << writer->getChunkSize() << std::endl; pdal::UserCallback* callback; if (!getProgressShellCommand().size()) if (m_numPointsToWrite == 0) callback = static_cast<pdal::UserCallback*>(new HeartbeatCallback); else callback = static_cast<pdal::UserCallback*>(new PercentageCallback); else callback = static_cast<pdal::UserCallback*>(new ShellScriptCallback(getProgressShellCommand())); writer->setUserCallback(callback); const boost::uint64_t numPointsRead = writer->write(m_numPointsToWrite, m_numSkipPoints); std::cerr << "Wrote " << numPointsRead << " points\n"; delete writer; delete final_stage; return 0; }
int SmoothKernel::execute() { PointContext ctx; Options readerOptions; readerOptions.add("filename", m_inputFile); readerOptions.add("debug", isDebug()); readerOptions.add("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addBuffer(input_buffer); Options smoothOptions; std::ostringstream ss; ss << "{"; ss << " \"pipeline\": {"; ss << " \"filters\": [{"; ss << " \"name\": \"MovingLeastSquares\""; ss << " }]"; ss << " }"; ss << "}"; std::string json = ss.str(); smoothOptions.add("json", json); smoothOptions.add("debug", isDebug()); smoothOptions.add("verbose", getVerboseLevel()); std::unique_ptr<Stage> smoothStage(new filters::PCLBlock()); smoothStage->setOptions(smoothOptions); smoothStage->setInput(&bufferReader); Options writerOptions; writerOptions.add("filename", m_outputFile); setCommonOptions(writerOptions); WriterPtr writer(KernelSupport::makeWriter(m_outputFile, smoothStage.get())); writer->setOptions(writerOptions); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); writer->setUserCallback(callback); std::map<std::string, Options> extra_opts = getExtraStageOptions(); std::map<std::string, Options>::iterator pi; for (pi = extra_opts.begin(); pi != extra_opts.end(); ++pi) { std::string name = pi->first; Options options = pi->second; std::vector<Stage*> stages = writer->findStage(name); std::vector<Stage*>::iterator s; for (s = stages.begin(); s != stages.end(); ++s) { Options opts = (*s)->getOptions(); std::vector<Option>::iterator o; for (o = options.getOptions().begin(); o != options.getOptions().end(); ++o) opts.add(*o); (*s)->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }
int PCLKernel::execute() { PointContext ctx; Options readerOptions; readerOptions.add<std::string>("filename", m_inputFile); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.addBuffer(input_buffer); Options pclOptions; pclOptions.add<std::string>("filename", m_pclFile); pclOptions.add<bool>("debug", isDebug()); pclOptions.add<uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> pclStage(new filters::PCLBlock()); pclStage->setInput(&bufferReader); pclStage->setOptions(pclOptions); // the PCLBlock stage consumes the BufferReader rather than the // readerStage Options writerOptions; writerOptions.add<std::string>("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) writerOptions.add<bool>("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); WriterPtr writer(KernelSupport::makeWriter(m_outputFile, pclStage.get())); // Some options are inferred by makeWriter based on filename // (compression, driver type, etc). writer->setOptions(writerOptions+writer->getOptions()); writer->setUserCallback(callback); for (const auto& pi : getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; std::vector<Stage*> stages = writer->findStage(name); for (const auto& s : stages) { Options opts = s->getOptions(); for (const auto& o : options.getOptions()) opts.add(o); s->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }
int SortKernel::execute() { PointTable table; Options readerOptions; readerOptions.add("filename", m_inputFile); readerOptions.add("debug", isDebug()); readerOptions.add("verbose", getVerboseLevel()); Stage& readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr inView = *viewSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addView(inView); Options sortOptions; sortOptions.add<bool>("debug", isDebug()); sortOptions.add<uint32_t>("verbose", getVerboseLevel()); StageFactory f; Stage& sortStage = ownStage(f.createStage("filters.mortonorder")); sortStage.setInput(bufferReader); sortStage.setOptions(sortOptions); Options writerOptions; writerOptions.add("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); Stage& writer = makeWriter(m_outputFile, sortStage); // Some options are inferred by makeWriter based on filename // (compression, driver type, etc). writer.setOptions(writerOptions + writer.getOptions()); writer.setUserCallback(callback); for (const auto& pi : getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; //ABELL - Huh? std::vector<Stage *> stages = writer.findStage(name); for (const auto& s : stages) { Options opts = s->getOptions(); for (const auto& o : options.getOptions()) opts.add(o); s->setOptions(opts); } } writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); return 0; }