int WaypointMessageHelper::readWaypointMessageForTarget (const void *pWaypointMsgPayload, uint32 ui32Offset, uint32 ui32TotalLen, PreviousMessageIds &previouMessagesSentToTargets, uint32 &ui32WaypointMsgPayloadLen) { ui32WaypointMsgPayloadLen = ui32TotalLen; BufferReader br (pWaypointMsgPayload, ui32TotalLen); br.setPosition (ui32Offset); uint32 ui32LatestMessageSentLen = 0; if (br.read32 (&ui32LatestMessageSentLen) < 0) { return -1; } ui32WaypointMsgPayloadLen -= 4; if (ui32LatestMessageSentLen > 0) { static const uint16 BUF_LEN = 1024; char buf[BUF_LEN]; if ((ui32LatestMessageSentLen + 1) > BUF_LEN) { return -2; } if (br.readBytes (buf, ui32LatestMessageSentLen) < 0) { return -3; } buf[ui32LatestMessageSentLen] = '\0'; previouMessagesSentToTargets = buf; ui32WaypointMsgPayloadLen -= ui32LatestMessageSentLen; } return 0; }
TEST(test_buffer, read_unsigned) { err_code_t err; char str[] = "1024 768\r798"; size_t len_str = 12; DataBlock::setMinCapacity(MIN_DATABLOCK_CAPACITY); BufferReader reader; reader.write(str, len_str); uint64_t val; TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 1024ULL); EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 768ULL); EXPECT_EQ(reader.peek(err, 0), '\r'); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); reader.readUnsigned(err, val); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); TEST_SKIP_BYTES_NO_THROW(3); }
TEST(test_buffer, read_bytes_empty) { err_code_t err; DataBlock::setMinCapacity(5); BufferReader reader; TokenData td; reader.readBytes(err, 0, td); ASSERT_EQ(err, RET_OK); ASSERT_EQ(td.size(), 0); }
void Handle::deserialize(BufferReader& reader) { const char* buf = reader.getPointer(); memcpy(&this->index, buf, sizeof(size_t)); memcpy(&this->id, buf + sizeof(size_t), sizeof(unsigned int)); memcpy( &this->gc, buf + sizeof(size_t)+sizeof(unsigned int), sizeof(bool) ); reader.finished(sizeof(size_t) + sizeof(unsigned int)+sizeof(bool)); }
void DeathCamera::deserialize(BufferReader& buffer) { this->gameState.deserialize(buffer); const DeathCameraData* data = reinterpret_cast<const DeathCameraData*>(buffer.getPointer()); this->target.set(data->target[0], data->target[1], data->target[2], 1.0f); this->position.set(data->position[0], data->position[1], data->position[2], 1.0f); buffer.finished(sizeof(DeathCameraData)); }
TEST(test_buffer, peek_empty) { err_code_t err = RET_OK; BufferReader reader; reader.peek(err, 0); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); err = RET_OK; reader.peek(err, 1); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); }
TEST(test_buffer, read_until) { err_code_t err; char str[] = "foo b baz"; size_t len_str = strlen(str); DataBlock::setMinCapacity(MIN_DATABLOCK_CAPACITY); BufferReader reader; DataBlock* dbPtr = NULL; reader.write(str, len_str); TokenData td, td2; ASSERT_EQ(reader.readUntil(err, ' ', td), 3); ASSERT_EQ(err, RET_OK); // bypass ' ' EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK); TEST_SKIP_BYTES_NO_THROW(1); ASSERT_EQ(reader.readUntil(err, ' ', td2), 1); ASSERT_EQ(err, RET_OK); ASSERT_EQ(td.size(), 1); ASSERT_EQ(td.front().size, 3); ASSERT_EQ(td.front().offset, 0); dbPtr = &(*td.front().iterator); ASSERT_N_STREQ((*dbPtr)[td.front().offset], "foo", 3); freeTokenData(td); ASSERT_EQ(td2.size(), 1); ASSERT_EQ(td2.front().size, 1); ASSERT_EQ(td2.front().offset, 4); dbPtr = &(*td2.front().iterator); ASSERT_N_STREQ((*dbPtr)[td2.front().offset], "b", 1); freeTokenData(td2); // bypass ' ' EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK); TEST_SKIP_BYTES_NO_THROW(1); // should rollback read cursor on error td2.clear(); reader.readUntil(err, ' ', td2); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); ASSERT_EQ(reader.peek(err, 0), 'b'); ASSERT_EQ(err, RET_OK); ASSERT_EQ(reader.peek(err, 1), 'a'); ASSERT_EQ(err, RET_OK); ASSERT_EQ(reader.peek(err, 2), 'z'); ASSERT_EQ(err, RET_OK); TEST_SKIP_BYTES_NO_THROW(3); }
void Kernel::visualize(PointBufferPtr buffer) const { BufferReader bufferReader; bufferReader.addBuffer(buffer); StageFactory f; WriterPtr writer(f.createWriter("writers.pclvisualizer")); writer->setInput(&bufferReader); PointContext ctx; writer->prepare(ctx); writer->execute(ctx); }
TEST(test_buffer, read_until_empty) { err_code_t err; BufferReader reader; TokenData td; reader.readUntil(err, ' ', td); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); reader.write(CSTR("b"), 1); reader.readUntil(err, ' ', td); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); TEST_SKIP_BYTES_NO_THROW(1); }
TEST(LasWriterTest, auto_offset) { using namespace Dimension; const std::string FILENAME(Support::temppath("offset_test.las")); PointTable table; table.layout()->registerDim(Id::X); BufferReader bufferReader; PointViewPtr view(new PointView(table)); view->setField(Id::X, 0, 125000.00); view->setField(Id::X, 1, 74529.00); view->setField(Id::X, 2, 523523.02); bufferReader.addView(view); Options writerOps; writerOps.add("filename", FILENAME); writerOps.add("offset_x", "auto"); writerOps.add("scale_x", "auto"); LasWriter writer; writer.setOptions(writerOps); writer.setInput(bufferReader); writer.prepare(table); writer.execute(table); Options readerOps; readerOps.add("filename", FILENAME); PointTable readTable; LasReader reader; reader.setOptions(readerOps); reader.prepare(readTable); EXPECT_DOUBLE_EQ(74529.00, reader.header().offsetX()); PointViewSet viewSet = reader.execute(readTable); EXPECT_EQ(viewSet.size(), 1u); view = *viewSet.begin(); EXPECT_EQ(view->size(), 3u); EXPECT_NEAR(125000.00, view->getFieldAs<double>(Id::X, 0), .0001); EXPECT_NEAR(74529.00, view->getFieldAs<double>(Id::X, 1), .0001); EXPECT_NEAR(523523.02, view->getFieldAs<double>(Id::X, 2), .0001); FileUtils::deleteFile(FILENAME); }
TEST(test_buffer, read_unsigned_empty) { err_code_t err; char str[] = "6"; size_t len_str = strlen(str); uint64_t val; DataBlock::setMinCapacity(MIN_DATABLOCK_CAPACITY); BufferReader reader; reader.readUnsigned(err, val); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); reader.write(str, len_str); reader.readUnsigned(err, val); ASSERT_EQ(err, RET_INCOMPLETE_BUFFER_ERR); TEST_SKIP_BYTES_NO_THROW(len_str); }
void ActionEvent::deserialize( BufferReader& buffer ) { Event::deserialize( buffer ); const struct ActionHeader *actionHdr = reinterpret_cast<const struct ActionHeader *>(buffer.getPointer()); this->actionType = actionHdr->actionType; this->playerGuid = actionHdr->playerGuid; buffer.finished( sizeof( struct ActionHeader ) ); }
int SplitKernel::execute() { PointTable table; Options readerOpts; readerOpts.add("filename", m_inputFile); readerOpts.add("debug", isDebug()); readerOpts.add("verbose", getVerboseLevel()); Stage& reader = makeReader(m_inputFile); reader.setOptions(readerOpts); std::unique_ptr<Stage> f; StageFactory factory; Options filterOpts; if (m_length) { f.reset(factory.createStage("filters.splitter")); filterOpts.add("length", m_length); filterOpts.add("origin_x", m_xOrigin); filterOpts.add("origin_y", m_yOrigin); } else { f.reset(factory.createStage("filters.chipper")); filterOpts.add("capacity", m_capacity); } f->setInput(reader); f->setOptions(filterOpts); f->prepare(table); PointViewSet pvSet = f->execute(table); int filenum = 1; for (auto& pvp : pvSet) { BufferReader reader; reader.addView(pvp); std::string filename = makeFilename(m_outputFile, filenum++); Stage& writer = makeWriter(filename, reader); writer.prepare(table); writer.execute(table); } return 0; }
BufferReader(BufferReader const & src, uint64_t pos, uint64_t size) : m_data(src.m_data) { ASSERT_LESS_OR_EQUAL(pos + size, src.Size(), (pos, size)); m_offset = static_cast<size_t>(src.m_offset + pos); m_size = static_cast<size_t>(size); }
TEST(ComputeRangeFilterTest, compute) { using namespace Dimension; PointTable table; PointLayoutPtr layout(table.layout()); layout->registerDim(Id::X); layout->registerDim(Id::Y); layout->registerDim(Id::Z); Id pn = layout->registerOrAssignDim("Pixel Number", Type::Double); Id fn = layout->registerOrAssignDim("Frame Number", Type::Double); PointViewPtr view(new PointView(table)); BufferReader r; r.addView(view); ComputeRangeFilter crop; crop.setInput(r); crop.prepare(table); view->setField(Id::X, 0, 0.0); view->setField(Id::Y, 0, 0.0); view->setField(Id::Z, 0, 0.0); view->setField(pn, 0, 0.0); view->setField(fn, 0, 0.0); view->setField(Id::X, 1, 0.0); view->setField(Id::Y, 1, 3.0); view->setField(Id::Z, 1, 4.0); view->setField(pn, 1, -5.0); view->setField(fn, 1, 0.0); PointViewSet s = crop.execute(table); EXPECT_EQ(1u, s.size()); Id range = layout->findDim("Range"); EXPECT_NE(Id::Unknown, range); PointViewPtr out = *s.begin(); EXPECT_EQ(2u, out->size()); EXPECT_EQ(5.0, out->getFieldAs<double>(range, 0)); EXPECT_EQ(0.0, out->getFieldAs<double>(range, 1)); }
TEST(test_buffer, read_unsigned_across_block) { err_code_t err; DataBlock::setMinCapacity(5); BufferReader reader; reader.write(CSTR("12345"), 5); reader.write(CSTR("6 89 "), 5); reader.write(CSTR("423 0"), 5); reader.write(CSTR(" 10\r\n"), 5); uint64_t val; TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 123456ULL); EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 89ULL); EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 423ULL); EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 0ULL); EXPECT_EQ(reader.peek(err, 0), ' '); ASSERT_EQ(err, RET_OK);; TEST_SKIP_BYTES_NO_THROW(1); TEST_READ_UNSIGNED_NO_THROW(val); ASSERT_EQ(val, 10ULL); reader.readUnsigned(err, val); ASSERT_EQ(err, RET_PROGRAMMING_ERR); TEST_SKIP_BYTES_NO_THROW(2); }
int SortKernel::execute() { Stage& readerStage = makeReader(m_inputFile, ""); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer PointTable table; readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr inView = *viewSetIn.begin(); BufferReader bufferReader; bufferReader.addView(inView); Stage& sortStage = makeFilter("filters.mortonorder", bufferReader); Stage& writer = makeWriter(m_outputFile, sortStage, ""); Options writerOptions; if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); writer.addOptions(writerOptions); writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); return 0; }
inline unsigned process_linei(const string& format, BufferReader& in, BufferWriter& out) { int face[12]; auto ret = sscanf(in.read_raw(), format.data(), face + 0, face + 1, face + 2, face + 3, face + 4, face + 5, face + 6, face + 7, face + 8, face + 9, face + 10, face + 11); for (unsigned char x = 0; x < ret; ++x) *(face + x) -= 1; out.write_elements<int>(face, ret); return ret; }
TEST(test_buffer, scalability) { err_code_t err; DataBlock::setMinCapacity(3); BufferReader reader; reader.write(CSTR("012"), 3); reader.write(CSTR("345"), 3); reader.write(CSTR("678"), 3); ASSERT_EQ(reader.peek(err, 0), '0'); ASSERT_EQ(err, RET_OK); TEST_SKIP_BYTES_NO_THROW(3); ASSERT_EQ(reader.peek(err, 0), '3'); ASSERT_EQ(err, RET_OK); reader.write(CSTR("BCD"), 3); ASSERT_EQ(reader.capacity(), 12); TEST_SKIP_BYTES_NO_THROW(9); reader.reset(); ASSERT_EQ(reader.capacity(), 3); }
int SplitKernel::execute() { PointTable table; Stage& reader = makeReader(m_inputFile, m_driverOverride); Options filterOpts; std::string driver = (m_length ? "filters.splitter" : "filters.chipper"); if (m_length) { filterOpts.add("length", m_length); filterOpts.add("origin_x", m_xOrigin); filterOpts.add("origin_y", m_yOrigin); } else { filterOpts.add("capacity", m_capacity); } Stage& f = makeFilter(driver, reader, filterOpts); f.prepare(table); PointViewSet pvSet = f.execute(table); int filenum = 1; for (auto& pvp : pvSet) { BufferReader reader; reader.addView(pvp); std::string filename = makeFilename(m_outputFile, filenum++); Stage& writer = makeWriter(filename, reader, ""); writer.prepare(table); writer.execute(table); } return 0; }
inline unsigned process_linei(const string& format, BufferReader& in, BufferWriter& out, unsigned& face_count) { int face[12]; // vertex/uv/normal // TEST does atoi stop at the / auto ret = sscanf(in.read_raw(), format.data(), face + 0, face + 1, face + 2, face + 3, face + 4, face + 5, face + 6, face + 7, face + 8, face + 9, face + 10, face + 11); for (unsigned char x = 0; x < ret; ++x) *(face + x) -= 1; out.write_elements<int>(face, ret); ++face_count; return ret; }
PtrLList<const char> * InformationPull::remoteSearchArrived (const void *pData, uint32 ui32DataLength, uint32 &ui32RcvdRemoteSeachQuery, const char *pszSenderNodeId) { // Read the message BufferReader br (pData, ui32DataLength); br.read32 (&ui32RcvdRemoteSeachQuery); uint16 ui16Len; br.read16 (&ui16Len); char *pszGroupName = new char[ui16Len+1]; br.readBytes (pszGroupName, ui16Len); pszGroupName[ui16Len] = '\0'; br.read16 (&ui16Len); char *pszQuery = new char[ui16Len+1]; br.readBytes (pszQuery, ui16Len); pszQuery[ui16Len] = '\0'; // Check the search seq id uint32 *pUI32PrevRcvdRemoteSeachQuery = _latestSearchIdRcvdByPeer.get (pszSenderNodeId); if (pUI32PrevRcvdRemoteSeachQuery == NULL) { pUI32PrevRcvdRemoteSeachQuery = new uint32; (*pUI32PrevRcvdRemoteSeachQuery) = ui32RcvdRemoteSeachQuery; _latestSearchIdRcvdByPeer.put (pszSenderNodeId, pUI32PrevRcvdRemoteSeachQuery); } else { if (SequentialArithmetic::lessThanOrEqual (ui32RcvdRemoteSeachQuery, (*pUI32PrevRcvdRemoteSeachQuery))) { // This is either a duplicate search or an old search. Either way // it must not be served! (Actually the "equal" case should never // happen). return NULL; } else { (*pUI32PrevRcvdRemoteSeachQuery) = ui32RcvdRemoteSeachQuery; } } // Get the IDs of the messages matching the query except the ones specified // in ppszFilters (because they have already been sent) char **ppszFilters = (char **) malloc (sizeof(char*) * 2); ppszFilters[0] = (char *) pszSenderNodeId; ppszFilters[1] = NULL; PtrLList<const char> *pRet = _pInformationStore->getMessageIDs (pszGroupName, pszQuery); ppszFilters[0] = NULL; delete ppszFilters; ppszFilters = NULL; return pRet; }
GCL::Node::Node( BufferReader &buffer ) { buffer.Read(mId); buffer.Read(mTransform); buffer.Read(mName); uint32_t parentNodeId; buffer.Read(parentNodeId); mParentNode = (Node *)size_t(parentNodeId); size_t childCount; buffer.Read(childCount); for (size_t i=0; i<childCount; ++i) { uint32_t id; buffer.Read(id); mChilds.push_back((Node*)size_t(id)); } }
// Identical to above, but writes each input view to a separate output file. TEST(LasWriterTest, auto_offset2) { using namespace Dimension; const std::string outname(Support::temppath("offset_test#.las")); const std::string inname1(Support::temppath("offset_test1.las")); const std::string inname2(Support::temppath("offset_test2.las")); PointTable table; table.layout()->registerDims({Id::X, Id::Y, Id::Z}); BufferReader bufferReader; PointViewPtr view(new PointView(table)); view->setField(Id::X, 0, 125000.00); view->setField(Id::X, 1, 74529.00); view->setField(Id::X, 2, 1000000.02); view->setField(Id::Y, 0, 0); view->setField(Id::Y, 1, 1); view->setField(Id::Y, 2, 2); view->setField(Id::Z, 0, -123); view->setField(Id::Z, 1, 456.78); view->setField(Id::Z, 2, 945.23); bufferReader.addView(view); view.reset(new PointView(table)); view->setField(Id::X, 0, 25.00); view->setField(Id::X, 1, 74529.00); view->setField(Id::X, 2, 534252.35); view->setField(Id::Y, 0, 3); view->setField(Id::Y, 1, 4); view->setField(Id::Y, 2, 5); view->setField(Id::Z, 0, 1.5); view->setField(Id::Z, 1, 2147483524); view->setField(Id::Z, 2, 745.23); bufferReader.addView(view); Options writerOps; writerOps.add("filename", outname); writerOps.add("offset_x", "auto"); writerOps.add("scale_x", "auto"); writerOps.add("offset_z", "auto"); writerOps.add("scale_z", "auto"); LasWriter writer; writer.setOptions(writerOps); writer.setInput(bufferReader); writer.prepare(table); writer.execute(table); { Options readerOps; readerOps.add("filename", inname1); PointTable readTable; LasReader reader; reader.setOptions(readerOps); reader.prepare(readTable); EXPECT_DOUBLE_EQ(74529.00, reader.header().offsetX()); EXPECT_DOUBLE_EQ(0, reader.header().offsetY()); EXPECT_DOUBLE_EQ(-123, reader.header().offsetZ()); EXPECT_NEAR(4.30956e-4, reader.header().scaleX(), 1e-4); EXPECT_DOUBLE_EQ(.01, reader.header().scaleY()); // (max - min) are chosen to yield std::numeric_limits<int>::max(); EXPECT_NEAR(4.9743e-7, reader.header().scaleZ(), 1e-7); PointViewSet viewSet = reader.execute(readTable); EXPECT_EQ(viewSet.size(), 1u); view = *viewSet.begin(); EXPECT_EQ(view->size(), 3u); EXPECT_NEAR(125000.00, view->getFieldAs<double>(Id::X, 0), .001); EXPECT_NEAR(74529.00, view->getFieldAs<double>(Id::X, 1), .001); EXPECT_NEAR(1000000.02, view->getFieldAs<double>(Id::X, 2), .0001); } { Options readerOps; readerOps.add("filename", inname2); PointTable readTable; LasReader reader; reader.setOptions(readerOps); reader.prepare(readTable); EXPECT_DOUBLE_EQ(25.0, reader.header().offsetX()); EXPECT_DOUBLE_EQ(0, reader.header().offsetY()); EXPECT_DOUBLE_EQ(1.5, reader.header().offsetZ()); EXPECT_NEAR(2.4876e-4, reader.header().scaleX(), 1e-7); EXPECT_DOUBLE_EQ(.01, reader.header().scaleY()); EXPECT_NEAR(.99999, reader.header().scaleZ(), 1e-5); PointViewSet viewSet = reader.execute(readTable); EXPECT_EQ(viewSet.size(), 1u); view = *viewSet.begin(); EXPECT_EQ(view->size(), 3u); EXPECT_NEAR(25.00, view->getFieldAs<double>(Id::X, 0), .0001); EXPECT_NEAR(74529.00, view->getFieldAs<double>(Id::X, 1), .001); EXPECT_NEAR(534252.35, view->getFieldAs<double>(Id::X, 2), .0001); } FileUtils::deleteFile(inname1); FileUtils::deleteFile(inname2); }
int SmoothKernel::execute() { PointContext ctx; Options readerOptions; readerOptions.add("filename", m_inputFile); readerOptions.add("debug", isDebug()); readerOptions.add("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addBuffer(input_buffer); Options smoothOptions; std::ostringstream ss; ss << "{"; ss << " \"pipeline\": {"; ss << " \"filters\": [{"; ss << " \"name\": \"MovingLeastSquares\""; ss << " }]"; ss << " }"; ss << "}"; std::string json = ss.str(); smoothOptions.add("json", json); smoothOptions.add("debug", isDebug()); smoothOptions.add("verbose", getVerboseLevel()); std::unique_ptr<Stage> smoothStage(new filters::PCLBlock()); smoothStage->setOptions(smoothOptions); smoothStage->setInput(&bufferReader); Options writerOptions; writerOptions.add("filename", m_outputFile); setCommonOptions(writerOptions); WriterPtr writer(KernelSupport::makeWriter(m_outputFile, smoothStage.get())); writer->setOptions(writerOptions); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); writer->setUserCallback(callback); std::map<std::string, Options> extra_opts = getExtraStageOptions(); std::map<std::string, Options>::iterator pi; for (pi = extra_opts.begin(); pi != extra_opts.end(); ++pi) { std::string name = pi->first; Options options = pi->second; std::vector<Stage*> stages = writer->findStage(name); std::vector<Stage*>::iterator s; for (s = stages.begin(); s != stages.end(); ++s) { Options opts = (*s)->getOptions(); std::vector<Option>::iterator o; for (o = options.getOptions().begin(); o != options.getOptions().end(); ++o) opts.add(*o); (*s)->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }
int CpdKernel::execute() { PointTable tableX; PointTable tableY; cpd::Matrix X = readFile(m_filex); cpd::Matrix Y = readFile(m_filey); if (X.rows() == 0 || Y.rows() == 0) { throw pdal_error("No points to process."); } cpd::Matrix result; if (m_method == "rigid") { cpd::Rigid rigid; rigid .set_tolerance(m_tolerance) .set_max_iterations(m_max_it) .set_outlier_weight(m_outliers); rigid .no_reflections(m_no_reflections) .allow_scaling(m_allow_scaling); if (m_sigma2 > 0) { result = rigid.compute(X, Y, m_sigma2).points; } else { result = rigid.compute(X, Y).points; } } else if (m_method == "nonrigid") { cpd::Nonrigid nonrigid; nonrigid .set_tolerance(m_tolerance) .set_max_iterations(m_max_it) .set_outlier_weight(m_outliers); nonrigid .set_beta(m_beta) .set_lambda(m_lambda); if (m_sigma2 > 0) { result = nonrigid.compute(X, Y, m_sigma2).points; } else { result = nonrigid.compute(X, Y).points; } } else { std::stringstream ss; ss << "Invalid cpd method: " << m_method << std::endl; throw pdal_error(ss.str()); } PointTable outTable; PointLayoutPtr outLayout(outTable.layout()); outLayout->registerDim(Dimension::Id::X); outLayout->registerDim(Dimension::Id::Y); outLayout->registerDim(Dimension::Id::Z); outLayout->registerDim(Dimension::Id::XVelocity); outLayout->registerDim(Dimension::Id::YVelocity); outLayout->registerDim(Dimension::Id::ZVelocity); PointViewPtr outView(new PointView(outTable)); size_t M = Y.rows(); for (size_t i = 0; i < M; ++i) { outView->setField<double>(Dimension::Id::X, i, result(i, 0)); outView->setField<double>(Dimension::Id::Y, i, result(i, 1)); outView->setField<double>(Dimension::Id::Z, i, result(i, 2)); outView->setField<double>(Dimension::Id::XVelocity, i, Y(i, 0) - result(i, 0)); outView->setField<double>(Dimension::Id::YVelocity, i, Y(i, 1) - result(i, 1)); outView->setField<double>(Dimension::Id::ZVelocity, i, Y(i, 2) - result(i, 2)); } BufferReader reader; reader.addView(outView); Options writerOpts; if (StageFactory::inferReaderDriver(m_output) == "writers.text") { writerOpts.add("order", "X,Y,Z,XVelocity,YVelocity,ZVelocity"); writerOpts.add("keep_unspecified", false); } Stage& writer = makeWriter(m_output, reader, "", writerOpts); writer.prepare(outTable); writer.execute(outTable); return 0; }
int HeightAboveGroundKernel::execute() { // we require separate contexts for the input and ground files PointContextRef input_ctx; PointContextRef ground_ctx; // because we are appending HeightAboveGround to the input buffer, we must // register it's Dimension input_ctx.registerDim(Dimension::Id::HeightAboveGround); // StageFactory will be used to create required stages StageFactory f; // setup the reader, inferring driver type from the filename std::string reader_driver = f.inferReaderDriver(m_input_file); std::unique_ptr<Reader> input(f.createReader(reader_driver)); Options readerOptions; readerOptions.add("filename", m_input_file); input->setOptions(readerOptions); // go ahead and execute to get the PointBuffer input->prepare(input_ctx); PointBufferSet pbSetInput = input->execute(input_ctx); PointBufferPtr input_buf = *pbSetInput.begin(); PointBufferSet pbSetGround; PointBufferPtr ground_buf; if (m_use_classification) { // the user has indicated that the classification dimension exists, so // we will find all ground returns Option source("source", "import numpy as np\n" "def yow1(ins,outs):\n" " cls = ins['Classification']\n" " keep_classes = [2]\n" " keep = np.equal(cls, keep_classes[0])\n" " outs['Mask'] = keep\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "yow1"); Options opts; opts.add(source); opts.add(module); opts.add(function); // and create a PointBuffer of only ground returns std::unique_ptr<Filter> pred(f.createFilter("filters.predicate")); pred->setOptions(opts); pred->setInput(input.get()); pred->prepare(ground_ctx); pbSetGround = pred->execute(ground_ctx); ground_buf = *pbSetGround.begin(); } else { // the user has provided a file containing only ground returns, setup // the reader, inferring driver type from the filename std::string ground_driver = f.inferReaderDriver(m_ground_file); std::unique_ptr<Reader> ground(f.createReader(ground_driver)); Options ro; ro.add("filename", m_ground_file); ground->setOptions(ro); // go ahead and execute to get the PointBuffer ground->prepare(ground_ctx); pbSetGround = ground->execute(ground_ctx); ground_buf = *pbSetGround.begin(); } typedef pcl::PointXYZ PointT; typedef pcl::PointCloud<PointT> Cloud; typedef Cloud::Ptr CloudPtr; // convert the input PointBuffer to a PointCloud CloudPtr cloud(new Cloud); BOX3D const& bounds = input_buf->calculateBounds(); pclsupport::PDALtoPCD(*input_buf, *cloud, bounds); // convert the ground PointBuffer to a PointCloud CloudPtr cloud_g(new Cloud); // here, we offset the ground cloud by the input bounds so that the two are aligned pclsupport::PDALtoPCD(*ground_buf, *cloud_g, bounds); // create a set of planar coefficients with X=Y=0,Z=1 pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients()); coefficients->values.resize(4); coefficients->values[0] = coefficients->values[1] = 0; coefficients->values[2] = 1.0; coefficients->values[3] = 0; // create the filtering object and project ground returns into xy plane pcl::ProjectInliers<PointT> proj; proj.setModelType(pcl::SACMODEL_PLANE); proj.setInputCloud(cloud_g); proj.setModelCoefficients(coefficients); CloudPtr cloud_projected(new Cloud); proj.filter(*cloud_projected); // setup the KdTree pcl::KdTreeFLANN<PointT> tree; tree.setInputCloud(cloud_projected); // loop over all points in the input cloud, finding the nearest neighbor in // the ground returns (XY plane only), and calculating the difference in z int32_t k = 1; for (size_t idx = 0; idx < cloud->points.size(); ++idx) { // Search for nearesrt neighbor of the query point std::vector<int32_t> neighbors(k); std::vector<float> distances(k); PointT temp_pt = cloud->points[idx]; temp_pt.z = 0.0f; int num_neighbors = tree.nearestKSearch(temp_pt, k, neighbors, distances); double hag = cloud->points[idx].z - cloud_g->points[neighbors[0]].z; input_buf->setField(Dimension::Id::HeightAboveGround, idx, hag); } // populate BufferReader with the input PointBuffer, which now has the // HeightAboveGround dimension BufferReader bufferReader; bufferReader.addBuffer(input_buf); // we require that the output be BPF for now, to house our non-standard // dimension Options wo; wo.add("filename", m_output_file); std::unique_ptr<Writer> writer(f.createWriter("writers.bpf")); writer->setOptions(wo); writer->setInput(&bufferReader); writer->prepare(input_ctx); writer->execute(input_ctx); return 0; }
int Ground::execute() { PointContext ctx; Options readerOptions; readerOptions.add<std::string>("filename", m_inputFile); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addBuffer(input_buffer); Options groundOptions; std::ostringstream ss; ss << "{"; ss << " \"pipeline\": {"; ss << " \"filters\": [{"; ss << " \"name\": \"ProgressiveMorphologicalFilter\","; ss << " \"setMaxWindowSize\": " << m_maxWindowSize << ","; ss << " \"setSlope\": " << m_slope << ","; ss << " \"setMaxDistance\": " << m_maxDistance << ","; ss << " \"setInitialDistance\": " << m_initialDistance << ","; ss << " \"setCellSize\": " << m_cellSize << ","; ss << " \"setBase\": " << m_base << ","; ss << " \"setExponential\": " << m_exponential; ss << " }]"; ss << " }"; ss << "}"; std::string json = ss.str(); groundOptions.add<std::string>("json", json); groundOptions.add<bool>("debug", isDebug()); groundOptions.add<boost::uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> groundStage(new filters::PCLBlock()); groundStage->setInput(&bufferReader); groundStage->setOptions(groundOptions); // the PCLBlock groundStage consumes the BufferReader rather than the // readerStage groundStage->setInput(&bufferReader); Options writerOptions; writerOptions.add<std::string>("filename", m_outputFile); setCommonOptions(writerOptions); std::unique_ptr<Writer> writer(AppSupport::makeWriter(m_outputFile, groundStage.get())); writer->setOptions(writerOptions); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); writer->setUserCallback(callback); for (auto pi: getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; std::vector<Stage*> stages = writer->findStage(name); for (auto s: stages) { Options opts = s->getOptions(); for (auto o: options.getOptions()) opts.add(o); s->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }
int SortKernel::execute() { PointTable table; Options readerOptions; readerOptions.add("filename", m_inputFile); readerOptions.add("debug", isDebug()); readerOptions.add("verbose", getVerboseLevel()); Stage& readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr inView = *viewSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addView(inView); Options sortOptions; sortOptions.add<bool>("debug", isDebug()); sortOptions.add<uint32_t>("verbose", getVerboseLevel()); StageFactory f; Stage& sortStage = ownStage(f.createStage("filters.mortonorder")); sortStage.setInput(bufferReader); sortStage.setOptions(sortOptions); Options writerOptions; writerOptions.add("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); Stage& writer = makeWriter(m_outputFile, sortStage); // Some options are inferred by makeWriter based on filename // (compression, driver type, etc). writer.setOptions(writerOptions + writer.getOptions()); writer.setUserCallback(callback); for (const auto& pi : getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; //ABELL - Huh? std::vector<Stage *> stages = writer.findStage(name); for (const auto& s : stages) { Options opts = s->getOptions(); for (const auto& o : options.getOptions()) opts.add(o); s->setOptions(opts); } } writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); return 0; }
int PCLKernel::execute() { PointContext ctx; Options readerOptions; readerOptions.add<std::string>("filename", m_inputFile); readerOptions.add<bool>("debug", isDebug()); readerOptions.add<uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointBufferSet, this makes the input PointBuffer available to both the // processing pipeline and the visualizer readerStage->prepare(ctx); PointBufferSet pbSetIn = readerStage->execute(ctx); // the input PointBufferSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointBufferPtr input_buffer = *pbSetIn.begin(); BufferReader bufferReader; bufferReader.addBuffer(input_buffer); Options pclOptions; pclOptions.add<std::string>("filename", m_pclFile); pclOptions.add<bool>("debug", isDebug()); pclOptions.add<uint32_t>("verbose", getVerboseLevel()); std::unique_ptr<Stage> pclStage(new filters::PCLBlock()); pclStage->setInput(&bufferReader); pclStage->setOptions(pclOptions); // the PCLBlock stage consumes the BufferReader rather than the // readerStage Options writerOptions; writerOptions.add<std::string>("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) writerOptions.add<bool>("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); WriterPtr writer(KernelSupport::makeWriter(m_outputFile, pclStage.get())); // Some options are inferred by makeWriter based on filename // (compression, driver type, etc). writer->setOptions(writerOptions+writer->getOptions()); writer->setUserCallback(callback); for (const auto& pi : getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; std::vector<Stage*> stages = writer->findStage(name); for (const auto& s : stages) { Options opts = s->getOptions(); for (const auto& o : options.getOptions()) opts.add(o); s->setOptions(opts); } } writer->prepare(ctx); // process the data, grabbing the PointBufferSet for visualization of the // resulting PointBuffer PointBufferSet pbSetOut = writer->execute(ctx); if (isVisualize()) visualize(*pbSetOut.begin()); //visualize(*pbSetIn.begin(), *pbSetOut.begin()); return 0; }