MetadataNode findChild(std::string s) const { auto splitString = [](std::string& s) -> std::string { std::string val; size_t pos = s.find(':'); if (pos == std::string::npos) { val = s; s.clear(); } else { val = s.substr(0, pos); s = (pos == s.size() - 1) ? "" : s.substr(pos + 1); } return val; }; if (s.empty()) return *this; std::string lname = splitString(s); auto nodes = children(lname); for (auto ai = nodes.begin(); ai != nodes.end(); ++ai) { MetadataNode& n = *ai; MetadataNode child = n.findChild(s); if (!child.empty()) return child; } return MetadataNode(); }
MetadataNode DeltaKernel::dumpDetail(PointViewPtr& srcView, PointViewPtr& candView, KD3Index& index, DimIndexMap& dims) { MetadataNode root; for (PointId id = 0; id < srcView->size(); ++id) { double x = srcView->getFieldAs<double>(Dimension::Id::X, id); double y = srcView->getFieldAs<double>(Dimension::Id::Y, id); double z = srcView->getFieldAs<double>(Dimension::Id::Z, id); PointId candId = index.neighbor(x, y, z); MetadataNode delta = root.add("delta"); delta.add("i", id); for (auto di = dims.begin(); di != dims.end(); ++di) { DimIndex& d = di->second; double sv = srcView->getFieldAs<double>(d.m_srcId, id); double cv = candView->getFieldAs<double>(d.m_candId, candId); delta.add(d.m_name, sv - cv); } } return root; }
PointViewPtr IcpFilter::icp(PointViewPtr fixed, PointViewPtr moving) const { typedef pcl::PointXYZ Point; typedef pcl::PointCloud<Point> Cloud; Cloud::Ptr fixedCloud(new Cloud()); pclsupport::PDALtoPCD(fixed, *fixedCloud); Cloud::Ptr movingCloud(new Cloud()); pclsupport::PDALtoPCD(moving, *movingCloud); pcl::IterativeClosestPoint<Point, Point> icp; icp.setInputSource(movingCloud); icp.setInputTarget(fixedCloud); Cloud result; icp.align(result); MetadataNode root = getMetadata(); // I couldn't figure out the template-fu to get // `MetadataNodeImpl::setValue` to work for all Eigen matrices with one // function, so I'm just brute-forcing the cast for now. root.add("transform", Eigen::MatrixXd(icp.getFinalTransformation().cast<double>())); root.add("converged", icp.hasConverged()); root.add("fitness", icp.getFitnessScore()); assert(moving->size() == result.points.size()); for (PointId i = 0; i < moving->size(); ++i) { moving->setField(Dimension::Id::X, i, result.points[i].x); moving->setField(Dimension::Id::Y, i, result.points[i].y); moving->setField(Dimension::Id::Z, i, result.points[i].z); } return moving; }
// Make sure that we can forward the LAS_Spec/3 VLR TEST(LasWriterTest, forward_spec_3) { PointTable table; std::string infile(Support::datapath("las/spec_3.las")); std::string outfile(Support::temppath("out.las")); // remove file from earlier run, if needed FileUtils::deleteFile(outfile); Options readerOpts; readerOpts.add("filename", infile); Options writerOpts; writerOpts.add("forward", "all,vlr"); writerOpts.add("filename", outfile); LasReader reader; reader.setOptions(readerOpts); LasWriter writer; writer.setOptions(writerOpts); writer.setInput(reader); writer.prepare(table); writer.execute(table); PointTable t2; Options readerOpts2; readerOpts2.add("filename", outfile); LasReader reader2; reader2.setOptions(readerOpts2); reader2.prepare(t2); reader2.execute(t2); auto pred = [](MetadataNode temp) { auto recPred = [](MetadataNode n) { return n.name() == "record_id" && n.value() == "3"; }; auto userPred = [](MetadataNode n) { return n.name() == "user_id" && n.value() == "LASF_Spec"; }; return Utils::startsWith(temp.name(), "vlr_") && !temp.findChild(recPred).empty() && !temp.findChild(userPred).empty(); }; MetadataNode root = reader2.getMetadata(); MetadataNodeList nodes = root.findChildren(pred); EXPECT_EQ(nodes.size(), 1u); }
bool BpfReader::readUlemFiles() { BpfUlemFile file; while (file.read(m_stream)) { MetadataNode m = m_metadata.add("bundled_file"); m.addEncoded(file.m_filename, (const unsigned char *)file.m_buf.data(), file.m_len); } return (bool)m_stream; }
TEST(Stats, metadata) { BOX3D bounds(1.0, 2.0, 3.0, 101.0, 102.0, 103.0); Options ops; ops.add("bounds", bounds); ops.add("count", 1000); ops.add("mode", "constant"); StageFactory f; std::unique_ptr<Stage> reader(f.createStage("readers.faux")); EXPECT_TRUE(reader.get()); reader->setOptions(ops); Options filterOps; filterOps.add("dimensions", " , X, Z "); StatsFilter filter; filter.setInput(*reader); filter.setOptions(filterOps); PointTable table; filter.prepare(table); filter.execute(table); MetadataNode m = filter.getMetadata(); std::vector<MetadataNode> children = m.children("statistic"); auto findNode = [](MetadataNode m, const std::string name, const std::string val) { auto findNameVal = [name, val](MetadataNode m) { return (m.name() == name && m.value() == val); }; return m.find(findNameVal); }; for (auto mi = children.begin(); mi != children.end(); ++mi) { if (findNode(*mi, "name", "X").valid()) { EXPECT_DOUBLE_EQ(mi->findChild("average").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("minimum").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("maximum").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("count").value<double>(), 1000.0); } if (findNode(*mi, "name", "Z").valid()) { EXPECT_DOUBLE_EQ(mi->findChild("average").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("minimum").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("maximum").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("count").value<double>(), 1000.0); } } }
MetadataNode find(PREDICATE p) const { if (p(*this)) return *this; auto nodes = children(); for (auto ai = nodes.begin(); ai != nodes.end(); ++ai) { MetadataNode n = ai->find(p); if (!n.empty()) return n; } return MetadataNode(); }
TIndexKernel::FileInfo TIndexKernel::getFileInfo(KernelFactory& factory, const std::string& filename) { FileInfo fileInfo; PipelineManager manager; manager.commonOptions() = m_manager.commonOptions(); manager.stageOptions() = m_manager.stageOptions(); // Need to make sure options get set. Stage& reader = manager.makeReader(filename, ""); if (m_fastBoundary) { QuickInfo qi = reader.preview(); std::stringstream polygon; polygon << "POLYGON (("; polygon << qi.m_bounds.minx << " " << qi.m_bounds.miny; polygon << ", " << qi.m_bounds.maxx << " " << qi.m_bounds.miny; polygon << ", " << qi.m_bounds.maxx << " " << qi.m_bounds.maxy; polygon << ", " << qi.m_bounds.minx << " " << qi.m_bounds.maxy; polygon << ", " << qi.m_bounds.minx << " " << qi.m_bounds.miny; polygon << "))"; fileInfo.m_boundary = polygon.str(); if (!qi.m_srs.empty()) fileInfo.m_srs = qi.m_srs.getWKT(); } else { Stage& hexer = manager.makeFilter("filters.hexbin", reader); PointTable table; hexer.prepare(table); PointViewSet set = hexer.execute(table); MetadataNode m = table.metadata(); m = m.findChild("filters.hexbin:boundary"); fileInfo.m_boundary = m.value(); PointViewPtr v = *set.begin(); if (!v->spatialReference().empty()) fileInfo.m_srs = v->spatialReference().getWKT(); } FileUtils::fileTimes(filename, &fileInfo.m_ctime, &fileInfo.m_mtime); fileInfo.m_filename = filename; return fileInfo; }
TEST_F(PythonFilterTest, metadata) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy\n" "import sys\n" "import redirector\n" "def myfunc(ins,outs):\n" " global metadata\n" " #print('before', globals(), file=sys.stderr,)\n" " metadata = {'name': 'root', 'value': 'a string', 'type': 'string', 'description': 'a description', 'children': [{'name': 'filters.python', 'value': 52, 'type': 'integer', 'description': 'a filter description', 'children': []}, {'name': 'readers.faux', 'value': 'another string', 'type': 'string', 'description': 'a reader description', 'children': []}]}\n" " # print ('schema', schema, file=sys.stderr,)\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Options opts; opts.add(source); opts.add(module); opts.add(function); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); PointTable table; filter->prepare(table); PointViewSet viewSet = filter->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); PointLayoutPtr layout(table.layout()); MetadataNode m = table.metadata(); m = m.findChild("filters.python"); MetadataNodeList l = m.children(); EXPECT_EQ(l.size(), 3u); EXPECT_EQ(l[0].name(), "filters.python"); EXPECT_EQ(l[0].value(), "52"); EXPECT_EQ(l[0].description(), "a filter description"); }
TEST(XMLSchemaTest, copy) { using namespace pdal; std::string xml = ReadXML(TestConfig::dataPath() + "../../schemas/16-dim-schema.xml"); std::string xsd = ReadXML(TestConfig::dataPath() + "../../schemas/LAS.xsd"); XMLSchema s1(xml, xsd); PointTable table; XMLDimList dims = s1.xmlDims(); for (auto di = dims.begin(); di != dims.end(); ++di) { Dimension::Id id = table.layout()->registerOrAssignDim( di->m_name, di->m_dimType.m_type); s1.setId(di->m_name, id); } MetadataNode m; MetadataNode m1 = m.add("m1", 1u); MetadataNode m2 = m.add("m2", 1); MetadataNode m1prime = m.add("m1prime", "Some other metadata"); m1.add("uuid", Uuid()); XMLSchema s2(s1.xmlDims(), m); std::string xml_output = s2.xml(); XMLSchema s3(xml_output, xsd); XMLDimList dims3 = s3.xmlDims(); EXPECT_EQ(dims.size(), dims3.size()); auto di1 = dims.begin(); auto di3 = dims3.begin(); while (di1 != dims.end() && di3 != dims3.end()) { XMLDim& dim1 = *di1; XMLDim& dim3 = *di3; EXPECT_EQ(dim1.m_name, dim3.m_name); EXPECT_EQ(dim1.m_dimType.m_type, dim3.m_dimType.m_type); di1++; di3++; } }
void toJSON(const MetadataNode& m, std::ostream& o) { if (m.name().empty()) pdal::subnodesToJSON(m, o, 0); else if (m.kind() == MetadataType::Array) pdal::arrayToJSON(m.children(), o, 0); else { o << "{" << std::endl; pdal::toJSON(m, o, 1); o << std::endl; o << "}"; } o << std::endl; }
/// Search for metadata associated with the provided recordId and userId. /// \param node - Top-level node to use for metadata search. /// \param recordId - Record ID to match. /// \param userId - User ID to match. MetadataNode LasWriter::findVlrMetadata(MetadataNode node, uint16_t recordId, const std::string& userId) { std::string sRecordId = std::to_string(recordId); // Find a node whose name starts with vlr and that has child nodes // with the name and recordId we're looking for. auto pred = [sRecordId,userId](MetadataNode n) { auto recPred = [sRecordId](MetadataNode n) { return n.name() == "record_id" && n.value() == sRecordId; }; auto userPred = [userId](MetadataNode n) { return n.name() == "user_id" && n.value() == userId; }; return (boost::algorithm::istarts_with(n.name(), "vlr") && !n.findChild(recPred).empty() && !n.findChild(userPred).empty()); }; return node.find(pred); }
void addMetadata(PyObject *list, MetadataNode m) { if (!PyList_Check(list)) return; for (Py_ssize_t i = 0; i < PyList_Size(list); ++i) { PyObject *tuple = PyList_GetItem(list, i); if (!PyTuple_Check(tuple) || PyTuple_Size(tuple) != 5) continue; std::string name = readPythonString(tuple, 0); std::string value = readPythonString(tuple, 1); std::string type = readPythonString(tuple, 2); if (type.empty()) type = Metadata::inferType(value); std::string description = readPythonString(tuple, 3); PyObject *submeta = PyTuple_GetItem(tuple, 4); MetadataNode child = m.addWithType(name, value, type, description); if (submeta) addMetadata(submeta, child); } }
void addMetadata(PyObject *dict, MetadataNode m) { if (! dict) { return; } if (!PyDict_Check(dict) ) throw pdal::pdal_error("'metadata' member must be a dictionary!"); std::string name = readPythonString(dict, "name"); std::string value = readPythonString(dict, "value"); std::string type = readPythonString(dict, "type"); if (type.empty()) type = Metadata::inferType(value); std::string description = readPythonString(dict, "description"); PyObject *submeta = PyDict_GetItemString(dict, "children"); if (submeta) { if (!PyList_Check(submeta)) throw pdal::pdal_error("'children' metadata member must be a list!"); for (Py_ssize_t i = 0; i < PyList_Size(submeta); ++i) { PyObject* p = PyList_GetItem(submeta, i); addMetadata(p, m); } MetadataNode child = m.addWithType(name, value, type, description); } }
inline MetadataNode toMetadata(PointTableRef table) { const PointLayoutPtr layout(table.layout()); MetadataNode root; for (const auto& id : layout->dims()) { MetadataNode dim("dimensions"); dim.add("name", layout->dimName(id)); Dimension::Type::Enum t = layout->dimType(id); dim.add("type", Dimension::toName(Dimension::base(t))); dim.add("size", layout->dimSize(id)); root.addList(dim); } return root; }
MetadataNode PointView::toMetadata() const { MetadataNode node; const Dimension::IdList& dims = layout()->dims(); for (PointId idx = 0; idx < size(); idx++) { MetadataNode pointnode = node.add(std::to_string(idx)); for (auto di = dims.begin(); di != dims.end(); ++di) { double v = getFieldAs<double>(*di, idx); pointnode.add(layout()->dimName(*di), v); } } return node; }
void OciReader::addDimensions(PointLayoutPtr layout) { log()->get(LogLevel::Debug) << "Fetching schema from SDO_PC object" << std::endl; XMLSchema schema = fetchSchema(m_stmt, m_block); loadSchema(layout, schema); MetadataNode comp = schema.getMetadata().findChild("compression"); m_compression = (comp.value() == "lazperf"); if (m_schemaFile.size()) { std::string pcSchema = schema.xml(); std::ostream *out = Utils::createFile(m_schemaFile); out->write(pcSchema.c_str(), pcSchema.size()); FileUtils::closeFile(out); } }
inline MetadataNode toMetadata(const PointViewPtr view) { MetadataNode node; const Dimension::IdList& dims = view->dims(); for (PointId idx = 0; idx < view->size(); idx++) { MetadataNode pointnode = node.add(std::to_string(idx)); for (auto di = dims.begin(); di != dims.end(); ++di) { double v = view->getFieldAs<double>(*di, idx); pointnode.add(Dimension::name(*di), v); } } return node; }
TEST(LasWriterTest, pdal_add_vlr) { PointTable table; std::string infile(Support::datapath("las/1.2-with-color.las")); std::string outfile(Support::temppath("simple.las")); // remove file from earlier run, if needed FileUtils::deleteFile(outfile); Options readerOpts; readerOpts.add("filename", infile); std::string vlr( " [ { \"description\": \"A description under 32 bytes\", \"record_id\": 42, \"user_id\": \"hobu\", \"data\": \"dGhpcyBpcyBzb21lIHRleHQ=\" }, { \"description\": \"A description under 32 bytes\", \"record_id\": 43, \"user_id\": \"hobu\", \"data\": \"dGhpcyBpcyBzb21lIG1vcmUgdGV4dA==\" } ]"); Options writerOpts; writerOpts.add("vlrs", vlr); writerOpts.add("filename", outfile); LasReader reader; reader.setOptions(readerOpts); LasWriter writer; writer.setOptions(writerOpts); writer.setInput(reader); writer.prepare(table); writer.execute(table); PointTable t2; Options readerOpts2; readerOpts2.add("filename", outfile); LasReader reader2; reader2.setOptions(readerOpts2); reader2.prepare(t2); reader2.execute(t2); MetadataNode forward = reader2.getMetadata(); auto pred = [](MetadataNode temp) { return Utils::startsWith(temp.name(), "vlr_"); }; MetadataNodeList nodes = forward.findChildren(pred); EXPECT_EQ(nodes.size(), 2UL); }
void InfoKernel::dump(MetadataNode& root) { if (m_showSchema) root.add(m_manager->pointTable().toMetadata().clone("schema")); if (m_PointCloudSchemaOutput.size() > 0) { #ifdef PDAL_HAVE_LIBXML2 XMLSchema schema(m_manager->pointTable().layout()); std::ostream *out = FileUtils::createFile(m_PointCloudSchemaOutput); std::string xml(schema.xml()); out->write(xml.c_str(), xml.size()); FileUtils::closeFile(out); #else std::cerr << "libxml2 support not enabled, no schema is produced" << std::endl; #endif } if (m_showStats) root.add(m_statsStage->getMetadata().clone("stats")); if (m_pipelineFile.size() > 0) PipelineWriter::writePipeline(m_manager->getStage(), m_pipelineFile); if (m_pointIndexes.size()) { PointViewSet viewSet = m_manager->views(); assert(viewSet.size() == 1); root.add(dumpPoints(*viewSet.begin()).clone("points")); } if (m_queryPoint.size()) { PointViewSet viewSet = m_manager->views(); assert(viewSet.size() == 1); root.add(dumpQuery(*viewSet.begin())); } if (m_showMetadata) { // If we have a reader cached, this means we // weren't reading a pipeline file directly. In that // case, use the metadata from the reader (old behavior). // Otherwise, return the full metadata of the entire pipeline if (m_reader) root.add(m_reader->getMetadata().clone("metadata")); else root.add(m_manager->getMetadata().clone("metadata")); } if (m_boundary) { PointViewSet viewSet = m_manager->views(); assert(viewSet.size() == 1); root.add(m_hexbinStage->getMetadata().clone("boundary")); } }
TEST(LasWriterTest, forwardvlr) { Options readerOps1; readerOps1.add("filename", Support::datapath("las/lots_of_vlr.las")); LasReader r1; r1.addOptions(readerOps1); std::string testfile = Support::temppath("tmp.las"); FileUtils::deleteFile(testfile); Options writerOps; writerOps.add("forward", "vlr"); writerOps.add("filename", testfile); LasWriter w; w.setInput(r1); w.addOptions(writerOps); PointTable t; w.prepare(t); w.execute(t); Options readerOps; readerOps.add("filename", testfile); LasReader r; r.setOptions(readerOps); PointTable t2; r.prepare(t2); r.execute(t2); MetadataNode forward = t2.privateMetadata("lasforward"); auto pred = [](MetadataNode temp) { return Utils::startsWith(temp.name(), "vlr_"); }; MetadataNodeList nodes = forward.findChildren(pred); EXPECT_EQ(nodes.size(), 388UL); }
int HausdorffKernel::execute() { PointTable srcTable; PointViewPtr srcView = loadSet(m_sourceFile, srcTable); PointTable candTable; PointViewPtr candView = loadSet(m_candidateFile, candTable); double hausdorff = Utils::computeHausdorff(srcView, candView); MetadataNode root; root.add("filenames", m_sourceFile); root.add("filenames", m_candidateFile); root.add("hausdorff", hausdorff); root.add("pdal_version", pdal::GetFullVersionString()); Utils::toJSON(root, std::cout); return 0; }
TEST(LasWriterTest, metadata_options) { Options ops; Option metadataOp("metadata", ""); Options metadataOps; metadataOps.add("format", 4); metadataOps.add("software_id", "MySoftwareId"); metadataOps.add("system_id", "FORWARD"); metadataOps.add("minor_version", "forward"); metadataOp.setOptions(metadataOps); ops.add(metadataOp); ops.add("filename", Support::temppath("wontgetwritten")); LasWriter writer; writer.setOptions(ops); PointTable table; writer.prepare(table); MetadataNode m = writer.getMetadata(); m.add("minor_version", 56); uint8_t format = (uint8_t)LasTester::headerVal<unsigned>(writer, "format"); EXPECT_EQ(format, 4u); std::string softwareId = LasTester::headerVal<std::string>(writer, "software_id"); EXPECT_EQ(softwareId, "MySoftwareId"); std::string systemId = LasTester::headerVal<std::string>(writer, "system_id"); // Since the option specifies forward and there is not associated // metadata, the value should be the default. LasHeader header; EXPECT_EQ(systemId, header.getSystemIdentifier()); // In this case, we should have metadata to override the default. uint8_t minorVersion = (uint8_t)LasTester::headerVal<unsigned>(writer, "minor_version"); EXPECT_EQ(minorVersion, 56u); }
TEST(LasReaderTest, test_vlr) { PointTable table; Options ops1; ops1.add("filename", Support::datapath("las/lots_of_vlr.las")); LasReader reader; reader.setOptions(ops1); reader.prepare(table); reader.execute(table); MetadataNode root = reader.getMetadata(); for (size_t i = 0; i < 390; ++i) { std::string name("vlr_"); name += std::to_string(i); MetadataNode m = root.findChild(name); EXPECT_TRUE(!m.value().empty()) << "No node " << i; } }
static void add( MetadataNode& parent, const std::string& name, const std::string& value ) { Json::Value node; Json::Reader reader; if (reader.parse(value, node)) add(parent, name, node); else parent.add(name, value); }
TEST(LasWriterTest, fix1063_1064_1065) { std::string outfile = Support::temppath("out.las"); std::string infile = Support::datapath("las/test1_4.las"); FileUtils::deleteFile(outfile); std::string cmd = "pdal translate --writers.las.forward=all " "--writers.las.a_srs=\"EPSG:4326\" " + infile + " " + outfile; std::string output; Utils::run_shell_command(Support::binpath(cmd), output); Options o; o.add("filename", outfile); LasReader r; r.setOptions(o); PointTable t; r.prepare(t); PointViewSet s = r.execute(t); EXPECT_EQ(s.size(), 1u); PointViewPtr v = *s.begin(); EXPECT_EQ(v->size(), 1000u); // https://github.com/PDAL/PDAL/issues/1063 for (PointId idx = 0; idx < v->size(); ++idx) EXPECT_EQ(8, v->getFieldAs<int>(Dimension::Id::ClassFlags, idx)); // https://github.com/PDAL/PDAL/issues/1064 MetadataNode m = r.getMetadata(); m = m.findChild("global_encoding"); EXPECT_EQ(17, m.value<int>()); // https://github.com/PDAL/PDAL/issues/1065 SpatialReference ref = v->spatialReference(); std::string wkt = "GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]]"; EXPECT_EQ(ref.getWKT(), wkt); }
static void add( MetadataNode& parent, const std::string& name, const Json::Value& node ) { if (node.isNull()) { parent.add(name, ""); } else if (node.isBool()) { parent.add(name, node.asBool()); } else if (node.isInt()) { parent.add(name, node.asInt64()); } else if (node.isUInt()) { parent.add(name, node.asUInt64()); } else if (node.isDouble()) { parent.add(name, node.asDouble()); } else if (node.isString()) { parent.add(name, node.asString()); } else if (node.isObject()) { MetadataNode object = parent.add(name); for (const std::string& name: node.getMemberNames()) { add(object, name, node[name]); } } else if (node.isArray()) { for (const Json::Value& item: node) { add(parent, name, item); } } }
TEST(LasReaderTest, IgnoreVLRs) { PointTable table; Options readOps; readOps.add("filename", Support::datapath("las/lots_of_vlr.las")); readOps.add("ignore_vlr", "Merrick"); LasReader reader; reader.setOptions(readOps); reader.prepare(table); PointViewSet viewSet = reader.execute(table); // First two VLRs are SRS info, the other 388 would be // Merrick ones that we want to ignore/remove MetadataNode root = reader.getMetadata(); for (size_t i = 2; i < 390; ++i) { std::string name("vlr_"); name += std::to_string(i); MetadataNode m = root.findChild(name); EXPECT_FALSE(!m.empty()) << "No node " << i; m = m.findChild("data"); EXPECT_FALSE(!m.empty()) << "No value for node " << i; } }
// Read a block (set of points) from the database. bool OciReader::readOci(Statement stmt, BlockPtr block) { if (!block->fetched()) { if (!stmt->Fetch()) { m_atEnd = true; return false; } block->setFetched(); } // Read the points from the blob in the row. readBlob(stmt, block); XMLSchema *s = findSchema(stmt, block); updateSchema(*s); MetadataNode comp = s->getMetadata().findChild("compression"); m_compression = (comp.value() == "lazperf"); block->reset(); block->clearFetched(); return true; }
/// Set VLRs from metadata for forwarded info, or from option-provided data /// otherwise. void LasWriter::setVlrsFromMetadata() { std::vector<uint8_t> data; for (auto oi = m_optionInfos.begin(); oi != m_optionInfos.end(); ++oi) { VlrOptionInfo& vlrInfo = *oi; if (vlrInfo.m_name == "FORWARD") { MetadataNode m = findVlrMetadata(m_metadata, vlrInfo.m_recordId, vlrInfo.m_userId); if (m.empty()) continue; data = Utils::base64_decode(m.value()); } else data = Utils::base64_decode(vlrInfo.m_value); addVlr(vlrInfo.m_userId, vlrInfo.m_recordId, vlrInfo.m_description, data); } }