TEST_F(PythonFilterTest, add_dimension) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy\n" "def myfunc(ins,outs):\n" " outs['AddedIntensity'] = np.zeros(ins['X'].size, dtype=numpy.double) + 1\n" " outs['AddedPointSourceId'] = np.zeros(ins['X'].size, dtype=numpy.double) + 2\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Option intensity("add_dimension", "AddedIntensity"); Option scanDirection("add_dimension", "AddedPointSourceId"); Options opts; opts.add(source); opts.add(module); opts.add(function); opts.add(intensity); opts.add(scanDirection); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); PointTable table; filter->prepare(table); PointViewSet viewSet = filter->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); PointLayoutPtr layout(table.layout()); Dimension::Id int_id = layout->findDim("AddedIntensity"); Dimension::Id psid_id = layout->findDim("AddedPointSourceId"); for (unsigned int i = 0; i < view->size(); ++i) { EXPECT_EQ(view->getFieldAs<uint16_t>(int_id, i), 1); EXPECT_EQ(view->getFieldAs<uint16_t>(psid_id, i), 2); } }
TEST(Stats, metadata) { BOX3D bounds(1.0, 2.0, 3.0, 101.0, 102.0, 103.0); Options ops; ops.add("bounds", bounds); ops.add("count", 1000); ops.add("mode", "constant"); StageFactory f; std::unique_ptr<Stage> reader(f.createStage("readers.faux")); EXPECT_TRUE(reader.get()); reader->setOptions(ops); Options filterOps; filterOps.add("dimensions", " , X, Z "); StatsFilter filter; filter.setInput(*reader); filter.setOptions(filterOps); PointTable table; filter.prepare(table); filter.execute(table); MetadataNode m = filter.getMetadata(); std::vector<MetadataNode> children = m.children("statistic"); auto findNode = [](MetadataNode m, const std::string name, const std::string val) { auto findNameVal = [name, val](MetadataNode m) { return (m.name() == name && m.value() == val); }; return m.find(findNameVal); }; for (auto mi = children.begin(); mi != children.end(); ++mi) { if (findNode(*mi, "name", "X").valid()) { EXPECT_DOUBLE_EQ(mi->findChild("average").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("minimum").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("maximum").value<double>(), 1.0); EXPECT_DOUBLE_EQ(mi->findChild("count").value<double>(), 1000.0); } if (findNode(*mi, "name", "Z").valid()) { EXPECT_DOUBLE_EQ(mi->findChild("average").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("minimum").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("maximum").value<double>(), 3.0); EXPECT_DOUBLE_EQ(mi->findChild("count").value<double>(), 1000.0); } } }
TEST_F(PythonFilterTest, metadata) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy\n" "import sys\n" "import redirector\n" "def myfunc(ins,outs):\n" " global metadata\n" " #print('before', globals(), file=sys.stderr,)\n" " metadata = {'name': 'root', 'value': 'a string', 'type': 'string', 'description': 'a description', 'children': [{'name': 'filters.python', 'value': 52, 'type': 'integer', 'description': 'a filter description', 'children': []}, {'name': 'readers.faux', 'value': 'another string', 'type': 'string', 'description': 'a reader description', 'children': []}]}\n" " # print ('schema', schema, file=sys.stderr,)\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Options opts; opts.add(source); opts.add(module); opts.add(function); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); PointTable table; filter->prepare(table); PointViewSet viewSet = filter->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); PointLayoutPtr layout(table.layout()); MetadataNode m = table.metadata(); m = m.findChild("filters.python"); MetadataNodeList l = m.children(); EXPECT_EQ(l.size(), 3u); EXPECT_EQ(l[0].name(), "filters.python"); EXPECT_EQ(l[0].value(), "52"); EXPECT_EQ(l[0].description(), "a filter description"); }
TEST(NitfWriterTest, longFtitle) { StageFactory f; Stage *r = f.createStage("readers.las"); Options ro; ro.add("filename", Support::datapath("nitf/autzen-utm10.las")); r->setOptions(ro); Stage *w = f.createStage("writers.nitf"); Options wo; wo.add("filename", "aaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccddddddddddddddeeeeeeeeeeee"); w->setOptions(wo); w->setInput(*r); PointTable t; w->prepare(t); EXPECT_THROW(w->execute(t), pdal_error); }
TEST(OldPCLBlockTests, StatisticalOutliers2) { StageFactory f; Options ro; ro.add("filename", Support::datapath("autzen/autzen-point-format-3.las")); Stage* r(f.createStage("readers.las")); EXPECT_TRUE(r); r->setOptions(ro); Options fo; fo.add("method", "statistical"); fo.add("multiplier", 0.0); fo.add("mean_k", 5); Stage* outlier(f.createStage("filters.outlier")); EXPECT_TRUE(outlier); outlier->setOptions(fo); outlier->setInput(*r); Options rangeOpts; rangeOpts.add("limits", "Classification![7:7]"); Stage* range(f.createStage("filters.range")); EXPECT_TRUE(range); range->setOptions(rangeOpts); range->setInput(*outlier); PointTable table; range->prepare(table); PointViewSet viewSet = range->execute(table); EXPECT_EQ(1u, viewSet.size()); PointViewPtr view = *viewSet.begin(); EXPECT_EQ(63u, view->size()); }
TEST(IcebridgeReaderTest, testRead) { StageFactory f; Stage* reader(f.createStage("readers.icebridge")); EXPECT_TRUE(reader); Option filename("filename", getFilePath(), ""); Options options(filename); reader->setOptions(options); PointTable table; reader->prepare(table); PointViewSet viewSet = reader->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); EXPECT_EQ(view->size(), 2u); checkPoint( *view, 0, 141437548, // time 82.605319, // latitude -58.593811, // longitude 18.678, // elevation 2408, // xmtSig 181, // rcvSig 49.91, // azimuth -4.376, // pitch 0.608, // roll 2.9, // gpsPdop 20.0, // pulseWidth 0.0); // relTime checkPoint( *view, 1, 141437548, // time 82.605287, // latitude -58.593811, // longitude 18.688, // elevation 2642, // xmtSig 173, // rcvSig 52.006, // azimuth -4.376, // pitch 0.609, // roll 2.9, // gpsPdop 17.0, // pulseWidth 0.0); // relTime }
void writeData(Orientation orient, bool scaling, bool compression = false) { Options options; options.add("capacity", 10000); options.add("connection", std::string(connectString)); options.add("debug", "true"); options.add("block_table_name", blockTableName); options.add("base_table_name", baseTableName); options.add("cloud_column_name", "CLOUD"); options.add("srid", 26910); options.add("disable_cloud_trigger", true); options.add("store_dimensional_orientation", orient == Orientation::DimensionMajor); if (scaling) { options.add("offset_x", "auto"); options.add("offset_y", "auto"); options.add("offset_z", "auto"); options.add("scale_x", 1e-6); options.add("scale_y", 1e-6); options.add("scale_z", 1e-6); } if (compression) options.add("compression", true); PointTable table; Options readerOps; readerOps.add("filename", Support::datapath("autzen/autzen-utm.las")); StageFactory f; LasReader reader; reader.setOptions(readerOps); SplitFilter split; split.setInput(reader); Stage* writer(f.createStage("writers.oci")); EXPECT_TRUE(writer); writer->setOptions(options); writer->setInput(split); writer->prepare(table); writer->execute(table); }
TEST_F(PythonFilterTest, pdalargs) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy\n" "import sys\n" "import redirector\n" "def myfunc(ins,outs):\n" " pdalargs['name']\n" "# print ('pdalargs', pdalargs, file=sys.stderr,)\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Option args("pdalargs", "{\"name\":\"Howard\",\"something\":42, \"another\": \"True\"}"); Options opts; opts.add(source); opts.add(module); opts.add(function); opts.add(args); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); PointTable table; filter->prepare(table); PointViewSet viewSet = filter->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); // Not throwing anything is success for now }
TEST(Stats, simple) { BOX3D bounds(1.0, 2.0, 3.0, 101.0, 102.0, 103.0); Options ops; ops.add("bounds", bounds); ops.add("count", 1000); ops.add("mode", "constant"); StageFactory f; std::unique_ptr<Stage> reader(f.createStage("readers.faux")); EXPECT_TRUE(reader.get()); reader->setOptions(ops); StatsFilter filter; filter.setInput(*reader); EXPECT_EQ(filter.getName(), "filters.stats"); PointTable table; filter.prepare(table); filter.execute(table); const stats::Summary& statsX = filter.getStats(Dimension::Id::X); const stats::Summary& statsY = filter.getStats(Dimension::Id::Y); const stats::Summary& statsZ = filter.getStats(Dimension::Id::Z); EXPECT_EQ(statsX.count(), 1000u); EXPECT_EQ(statsY.count(), 1000u); EXPECT_EQ(statsZ.count(), 1000u); EXPECT_FLOAT_EQ(statsX.minimum(), 1.0); EXPECT_FLOAT_EQ(statsY.minimum(), 2.0); EXPECT_FLOAT_EQ(statsZ.minimum(), 3.0); EXPECT_FLOAT_EQ(statsX.maximum(), 1.0); EXPECT_FLOAT_EQ(statsY.maximum(), 2.0); EXPECT_FLOAT_EQ(statsZ.maximum(), 3.0); EXPECT_FLOAT_EQ(statsX.average(), 1.0); EXPECT_FLOAT_EQ(statsY.average(), 2.0); EXPECT_FLOAT_EQ(statsZ.average(), 3.0); }
TEST(PredicateFilterTest, PredicateFilterTest_test5) { StageFactory f; // test error handling if missing Mask BOX3D bounds(0.0, 0.0, 0.0, 2.0, 2.0, 2.0); Options readerOpts; readerOpts.add("bounds", bounds); readerOpts.add("num_points", 1000); readerOpts.add("mode", "ramp"); FauxReader reader; reader.setOptions(readerOpts); const Option source("source", // "Y > 0.5" "import numpy as np\n" "def yow2(ins,outs):\n" " Y = ins['Y']\n" " Mask = np.greater(Y, 0.5)\n" " #print Mask\n" " outs['xxxMaskxxx'] = Mask # delierbately rong\n" " return True\n" ); const Option module("module", "MyModule1"); const Option function("function", "yow2"); Options opts; opts.add(source); opts.add(module); opts.add(function); std::unique_ptr<Stage> filter(f.createStage("filters.predicate")); filter->setOptions(opts); filter->setInput(reader); PointTable table; filter->prepare(table); ASSERT_THROW(filter->execute(table), plang::error); }
TEST_F(PgpointcloudWriterTest, writeXYZ) { if (shouldSkipTests()) { return; } Options ops = getDbOptions(); ops.add("output_dims", "X,Y,Z"); optionsWrite(ops); PointTable table; StageFactory factory; Stage* reader(factory.createStage("readers.pgpointcloud")); reader->setOptions(getDbOptions()); reader->prepare(table); Dimension::IdList dims = table.layout()->dims(); EXPECT_EQ(dims.size(), (size_t)3); EXPECT_TRUE(Utils::contains(dims, Dimension::Id::X)); EXPECT_TRUE(Utils::contains(dims, Dimension::Id::Y)); EXPECT_TRUE(Utils::contains(dims, Dimension::Id::Z)); }
TEST_F(MatlabWriterTest, findStage) { StageFactory factory; Stage* stage(factory.createStage("writers.matlab")); EXPECT_TRUE(stage); }
TEST(NNDistanceTest, kdist) { StageFactory f; Stage *reader(f.createStage("readers.faux")); Stage *filter(f.createStage("filters.nndistance")); // Make a 10x10x10 grid of points. Options rOpts; rOpts.add("mode", "grid"); rOpts.add("bounds", "([0, 10],[0,10],[0,10])"); rOpts.add("count", 1000); reader->setOptions(rOpts); // Kth distance with k == 3 Options fOpts; fOpts.add("mode", "kth"); fOpts.add("k", 3); filter->setOptions(fOpts); filter->setInput(*reader); PointTable t; filter->prepare(t); PointViewSet s = filter->execute(t); PointViewPtr v = *(s.begin()); for (PointId i = 0; i < v->size(); ++i) EXPECT_EQ(v->getFieldAs<double>(Dimension::Id::NNDistance, i), 1); // Kth distance with k == 4 Options opts2; opts2.add("k", 4); filter->setOptions(opts2); PointTable t2; filter->prepare(t2); s = filter->execute(t2); v = *(s.begin()); for (PointId i = 0; i < v->size(); ++i) { double d = v->getFieldAs<double>(Dimension::Id::NNDistance, i); double x = v->getFieldAs<double>(Dimension::Id::X, i); double y = v->getFieldAs<double>(Dimension::Id::Y, i); double z = v->getFieldAs<double>(Dimension::Id::Z, i); if ((x == 9 || x == 0) && (y == 9 || y == 0) && (z == 9 || z == 0)) EXPECT_EQ(d, sqrt(2)); } // Test for avg distance. Options opts3; opts3.add("mode", "avg"); opts3.add("k", 6); filter->setOptions(opts3); PointTable t3; filter->prepare(t3); s = filter->execute(t3); v = *(s.begin()); for (PointId i = 0; i < v->size(); ++i) { double d = v->getFieldAs<double>(Dimension::Id::NNDistance, i); double x = v->getFieldAs<double>(Dimension::Id::X, i); double y = v->getFieldAs<double>(Dimension::Id::Y, i); double z = v->getFieldAs<double>(Dimension::Id::Z, i); int xe = (x == 9 || x == 0) ? 1 : 0; int ye = (y == 9 || y == 0) ? 1 : 0; int ze = (z == 9 || z == 0) ? 1 : 0; bool corner = (xe + ye + ze == 3); bool edge = (xe + ye + ze == 2); bool face = (xe + ye + ze == 1); if (corner) EXPECT_EQ(d, (1 + std::sqrt(2)) / 2.0); else if (edge) EXPECT_EQ(d, (2 + std::sqrt(2)) / 3.0); else if (face) EXPECT_EQ(d, (5 + std::sqrt(2)) / 6.0); else EXPECT_EQ(d, 1); } }
TEST(FerryFilterTest, create) { StageFactory f; Stage* filter(f.createStage("filters.ferry")); EXPECT_TRUE(filter); }
TEST_F(PythonFilterTest, PythonFilterTest_modify) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy as np\n" "def myfunc(ins,outs):\n" " X = ins['X']\n" " Y = ins['Y']\n" " Z = ins['Z']\n" " X = np.delete(X, (9), axis=0)\n" " Y = np.delete(Y, (9), axis=0)\n" " Z = np.delete(Z, (9), axis=0)\n" " Z = np.append(Z,100)\n" " Y = np.append(Y,200)\n" "# print (Z)\n" "# print (X)\n" " outs['Z'] = Z\n" " outs['Y'] = Y\n" " outs['X'] = X\n" "# print (len(X), len(Y), len(Z))\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Options opts; opts.add(source); opts.add(module); opts.add(function); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); std::unique_ptr<StatsFilter> stats(new StatsFilter); stats->setInput(*filter); PointTable table; stats->prepare(table); PointViewSet viewSet = stats->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); const stats::Summary& statsX = stats->getStats(Dimension::Id::X); const stats::Summary& statsY = stats->getStats(Dimension::Id::Y); const stats::Summary& statsZ = stats->getStats(Dimension::Id::Z); EXPECT_EQ(view->size(), 10u); EXPECT_DOUBLE_EQ(statsX.minimum(), 0.0); EXPECT_DOUBLE_EQ(statsX.maximum(), 1.0); EXPECT_DOUBLE_EQ(statsY.minimum(), 0.0); EXPECT_DOUBLE_EQ(statsY.maximum(), 200.0); EXPECT_DOUBLE_EQ(statsZ.minimum(), 0.0); EXPECT_DOUBLE_EQ(statsZ.maximum(), 100); }
TEST(PLangTest, log) { // verify we can redirect the stdout inside the python script Options reader_opts; { BOX3D bounds(1.0, 2.0, 3.0, 101.0, 102.0, 103.0); Option opt1("bounds", bounds); Option opt2("count", 750); Option opt3("mode", "constant"); reader_opts.add(opt1); reader_opts.add(opt2); reader_opts.add(opt3); Option optlog("log", Support::temppath("mylog_three.txt")); reader_opts.add(optlog); } Options xfilter_opts; { const Option source("source", "import numpy as np\n" "import sys\n" "def xfunc(ins,outs):\n" " X = ins['X']\n" " print (\"Testing log output through python script.\")\n" " X = X + 1.0\n" " outs['X'] = X\n" " sys.stdout.flush()\n" " return True\n" ); const Option module("module", "xModule"); const Option function("function", "xfunc"); xfilter_opts.add("log", Support::temppath("mylog_three.txt")); xfilter_opts.add(source); xfilter_opts.add(module); xfilter_opts.add(function); } StageFactory f; { FauxReader reader; reader.setOptions(reader_opts); Stage* xfilter(f.createStage("filters.python")); xfilter->setOptions(xfilter_opts); xfilter->setInput(reader); PointTable table; xfilter->prepare(table); PointViewSet pvSet = xfilter->execute(table); EXPECT_EQ(pvSet.size(), 1u); PointViewPtr view = *pvSet.begin(); EXPECT_EQ(view->size(), 750u); } bool ok = Support::compare_text_files( Support::temppath("mylog_three.txt"), Support::datapath("logs/log_py.txt")); // TODO: fails on Windows // unknown file: error: C++ exception with description "pdalboost::filesystem::remove: // The process cannot access the file because it is being used by another process: // "C:/projects/pdal/test/data/../temp/mylog_three.txt"" thrown in the test body. //if (ok) // FileUtils::deleteFile(Support::temppath("mylog_three.txt")); EXPECT_TRUE(ok); }
TEST_F(PythonFilterTest, PythonFilterTest_test1) { StageFactory f; BOX3D bounds(0.0, 0.0, 0.0, 1.0, 1.0, 1.0); Options ops; ops.add("bounds", bounds); ops.add("count", 10); ops.add("mode", "ramp"); FauxReader reader; reader.setOptions(ops); Option source("source", "import numpy as np\n" "def myfunc(ins,outs):\n" " X = ins['X']\n" " Y = ins['Y']\n" " Z = ins['Z']\n" " #print ins['X']\n" " X = X + 10.0\n" " # Y: leave as-is, don't export back out\n" " # Z: goofiness to make it a numpy array of a constant\n" " Z = np.zeros(X.size) + 3.14\n" " outs['X'] = X\n" " #print outs['X']\n" " outs['Z'] = Z\n" " return True\n" ); Option module("module", "MyModule"); Option function("function", "myfunc"); Options opts; opts.add(source); opts.add(module); opts.add(function); Stage* filter(f.createStage("filters.python")); filter->setOptions(opts); filter->setInput(reader); std::unique_ptr<StatsFilter> stats(new StatsFilter); stats->setInput(*filter); PointTable table; stats->prepare(table); PointViewSet viewSet = stats->execute(table); EXPECT_EQ(viewSet.size(), 1u); PointViewPtr view = *viewSet.begin(); const stats::Summary& statsX = stats->getStats(Dimension::Id::X); const stats::Summary& statsY = stats->getStats(Dimension::Id::Y); const stats::Summary& statsZ = stats->getStats(Dimension::Id::Z); EXPECT_DOUBLE_EQ(statsX.minimum(), 10.0); EXPECT_DOUBLE_EQ(statsX.maximum(), 11.0); EXPECT_DOUBLE_EQ(statsY.minimum(), 0.0); EXPECT_DOUBLE_EQ(statsY.maximum(), 1.0); EXPECT_DOUBLE_EQ(statsZ.minimum(), 3.14); EXPECT_DOUBLE_EQ(statsZ.maximum(), 3.14); }
TEST(DecimationFilterTest, create) { StageFactory f; Stage* filter(f.createStage("filters.decimation")); EXPECT_TRUE(filter); }
TEST(TransformationMatrix, create) { StageFactory f; Stage* filter(f.createStage("filters.transformation")); EXPECT_TRUE(filter); }
TEST(NitfWriterTest, test1) { StageFactory f; const std::string las_input(Support::datapath("las/1.2-with-color.las")); const std::string nitf_output(Support::temppath("temp_nitf.ntf")); const std::string reference_output( Support::datapath("nitf/write_test1.ntf")); FileUtils::deleteFile(nitf_output); // // write the NITF // { Options reader_opts; Option reader_opt1("filename", las_input); reader_opts.add(reader_opt1); Options writer_opts; Option writer_opt1("filename", nitf_output); Option debug("debug", true); Option verbose("verbose", 8); Option datetime("idatim", "20110516183337"); writer_opts.add(datetime); Option cls("fsclas", "S"); writer_opts.add(cls); Option phone("ophone", "5159664628"); writer_opts.add(phone); Option name("oname", "Howard Butler"); writer_opts.add(name); Option ftitle("ftitle", "LiDAR from somewhere"); writer_opts.add(ftitle); // writer_opts.add(debug); // writer_opts.add(verbose); writer_opts.add(writer_opt1); Stage* reader(f.createStage("readers.las")); EXPECT_TRUE(reader); reader->setOptions(reader_opts); Stage* writer(f.createStage("writers.nitf")); EXPECT_TRUE(writer); writer->setOptions(writer_opts); writer->setInput(*reader); { // writer.setCompressed(false); // // writer.setDate(0, 0); // // writer.setPointFormat(::drivers::las::PointFormat3); // // writer.setSystemIdentifier(""); // writer.setGeneratingSoftware("PDAL-NITF"); // writer.setChunkSize(100); } PointTable table; writer->prepare(table); writer->execute(table); } // // check the generated NITF // //ABELL - This doesn't work and is probably broken because the reference // file is out of date, but some method of comparing the NITF wrapper // instead of a byte-by-byte file diff is probably in order. /** bool filesSame = Support::compare_files(nitf_output, reference_output); EXPECT_TRUE(filesSame); **/ // // check the LAS contents against the source image // //ABELL - This tells us that the packaged file (LAS) is fine, but it // doesn't tell us much about the NITF wrapper. compare_contents(las_input, nitf_output); // if (filesSame) FileUtils::deleteFile(Support::temppath(nitf_output)); }
TEST(ComputeRangeFilterTest, create) { StageFactory f; Stage* filter(f.createStage("filters.computerange")); EXPECT_TRUE(filter); }
int SortKernel::execute() { PointTable table; Options readerOptions; readerOptions.add("filename", m_inputFile); readerOptions.add("debug", isDebug()); readerOptions.add("verbose", getVerboseLevel()); Stage& readerStage = makeReader(readerOptions); // go ahead and prepare/execute on reader stage only to grab input // PointViewSet, this makes the input PointView available to both the // processing pipeline and the visualizer readerStage.prepare(table); PointViewSet viewSetIn = readerStage.execute(table); // the input PointViewSet will be used to populate a BufferReader that is // consumed by the processing pipeline PointViewPtr inView = *viewSetIn.begin(); BufferReader bufferReader; bufferReader.setOptions(readerOptions); bufferReader.addView(inView); Options sortOptions; sortOptions.add<bool>("debug", isDebug()); sortOptions.add<uint32_t>("verbose", getVerboseLevel()); StageFactory f; Stage& sortStage = ownStage(f.createStage("filters.mortonorder")); sortStage.setInput(bufferReader); sortStage.setOptions(sortOptions); Options writerOptions; writerOptions.add("filename", m_outputFile); setCommonOptions(writerOptions); if (m_bCompress) writerOptions.add("compression", true); if (m_bForwardMetadata) writerOptions.add("forward_metadata", true); std::vector<std::string> cmd = getProgressShellCommand(); UserCallback *callback = cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) : (UserCallback *)new HeartbeatCallback(); Stage& writer = makeWriter(m_outputFile, sortStage); // Some options are inferred by makeWriter based on filename // (compression, driver type, etc). writer.setOptions(writerOptions + writer.getOptions()); writer.setUserCallback(callback); for (const auto& pi : getExtraStageOptions()) { std::string name = pi.first; Options options = pi.second; //ABELL - Huh? std::vector<Stage *> stages = writer.findStage(name); for (const auto& s : stages) { Options opts = s->getOptions(); for (const auto& o : options.getOptions()) opts.add(o); s->setOptions(opts); } } writer.prepare(table); // process the data, grabbing the PointViewSet for visualization of the PointViewSet viewSetOut = writer.execute(table); if (isVisualize()) visualize(*viewSetOut.begin()); return 0; }
TEST(NitfReaderTest, test_one) { StageFactory f; Options nitf_opts; nitf_opts.add("filename", Support::datapath("nitf/autzen-utm10.ntf")); nitf_opts.add("count", 750); PointTable table; Stage* nitf_reader(f.createStage("readers.nitf")); EXPECT_TRUE(nitf_reader); nitf_reader->setOptions(nitf_opts); nitf_reader->prepare(table); PointViewSet pbSet = nitf_reader->execute(table); EXPECT_EQ(pbSet.size(), 1u); PointViewPtr view = *pbSet.begin(); // check metadata MetadataNode m = nitf_reader->getMetadata(); MetadataNode n = m.findChild( [](MetadataNode& m) { return m.name() == "IM:0.IGEOLO"; } ); EXPECT_EQ(n.value(), "440344N1230429W440344N1230346W440300N1230346W440300N1230429W"); n = m.findChild("FH.FDT"); EXPECT_EQ(n.value(), "20120323002946"); // // read LAS // Options las_opts; las_opts.add("count", 750); las_opts.add("filename", Support::datapath("nitf/autzen-utm10.las")); PointTable table2; Stage* las_reader(f.createStage("readers.las")); EXPECT_TRUE(las_reader); las_reader->setOptions(las_opts); las_reader->prepare(table2); PointViewSet pbSet2 = las_reader->execute(table2); EXPECT_EQ(pbSet2.size(), 1u); PointViewPtr view2 = *pbSet.begin(); // // // compare the two views // EXPECT_EQ(view->size(), view2->size()); for (PointId i = 0; i < view2->size(); i++) { int32_t nitf_x = view->getFieldAs<int32_t>(Dimension::Id::X, i); int32_t nitf_y = view->getFieldAs<int32_t>(Dimension::Id::Y, i); int32_t nitf_z = view->getFieldAs<int32_t>(Dimension::Id::Z, i); int32_t las_x = view2->getFieldAs<int32_t>(Dimension::Id::X, i); int32_t las_y = view2->getFieldAs<int32_t>(Dimension::Id::Y, i); int32_t las_z = view2->getFieldAs<int32_t>(Dimension::Id::Z, i); EXPECT_EQ(nitf_x, las_x); EXPECT_EQ(nitf_y, las_y); EXPECT_EQ(nitf_z, las_z); } }
void testReadWrite(bool compression, bool scaling) { // remove file from earlier run, if needed std::string tempFilename = getSQLITEOptions().getValueOrThrow<std::string>("connection"); FileUtils::deleteFile(tempFilename); Options sqliteOptions = getSQLITEOptions(); if (scaling) { sqliteOptions.add("scale_x", 0.01); sqliteOptions.add("scale_y", 0.01); } sqliteOptions.add("compression", compression, ""); { // remove file from earlier run, if needed std::string temp_filename = sqliteOptions.getValueOrThrow<std::string>("connection"); Options lasReadOpts; lasReadOpts.add("filename", Support::datapath("las/1.2-with-color.las")); lasReadOpts.add("count", 11); LasReader reader; reader.setOptions(lasReadOpts); StageFactory f; std::unique_ptr<Stage> sqliteWriter(f.createStage("writers.sqlite")); sqliteWriter->setOptions(sqliteOptions); sqliteWriter->setInput(reader); PointTable table; sqliteWriter->prepare(table); sqliteWriter->execute(table); } { // Done - now read back. StageFactory f; std::unique_ptr<Stage> sqliteReader(f.createStage("readers.sqlite")); sqliteReader->setOptions(sqliteOptions); PointTable table2; sqliteReader->prepare(table2); PointViewSet viewSet = sqliteReader->execute(table2); EXPECT_EQ(viewSet.size(), 1U); PointViewPtr view = *viewSet.begin(); using namespace Dimension; uint16_t reds[] = {68, 54, 112, 178, 134, 99, 90, 106, 106, 100, 64}; for (PointId idx = 0; idx < 11; idx++) { uint16_t r = view->getFieldAs<uint16_t>(Id::Red, idx); EXPECT_EQ(r, reds[idx]); } int32_t x = view->getFieldAs<int32_t>(Id::X, 10); EXPECT_EQ(x, 636038); double xd = view->getFieldAs<double>(Id::X, 10); EXPECT_FLOAT_EQ(xd, 636037.53); } FileUtils::deleteFile(tempFilename); }
// Merge a couple of 1.4 LAS files with point formats 1 and 6. Write the // output with version 3. Verify that you get point format 3 (default), // LAS 1.3 output and other header data forwarded. TEST(LasWriterTest, forward) { Options readerOps1; readerOps1.add("filename", Support::datapath("las/4_1.las")); Options readerOps2; readerOps2.add("filename", Support::datapath("las/4_6.las")); LasReader r1; r1.addOptions(readerOps1); LasReader r2; r2.addOptions(readerOps2); StageFactory sf; Stage *m = sf.createStage("filters.merge"); m->setInput(r1); m->setInput(r2); std::string testfile = Support::temppath("tmp.las"); FileUtils::deleteFile(testfile); Options writerOps; writerOps.add("forward", "header"); writerOps.add("minor_version", 3); writerOps.add("filename", testfile); LasWriter w; w.setInput(*m); w.addOptions(writerOps); PointTable t; w.prepare(t); w.execute(t); Options readerOps; readerOps.add("filename", testfile); LasReader r; r.setOptions(readerOps); PointTable t2; r.prepare(t2); r.execute(t2); MetadataNode n1 = r.getMetadata(); EXPECT_EQ(n1.findChild("major_version").value<uint8_t>(), 1); EXPECT_EQ(n1.findChild("minor_version").value<uint8_t>(), 3); EXPECT_EQ(n1.findChild("dataformat_id").value<uint8_t>(), 3); EXPECT_EQ(n1.findChild("filesource_id").value<uint8_t>(), 0); // Global encoding doesn't match because 4_1.las has a bad value, so we // get the default. EXPECT_EQ(n1.findChild("global_encoding").value<uint16_t>(), 0); EXPECT_EQ(n1.findChild("project_id").value<Uuid>(), Uuid()); EXPECT_EQ(n1.findChild("system_id").value(), ""); EXPECT_EQ(n1.findChild("software_id").value(), "TerraScan"); EXPECT_EQ(n1.findChild("creation_doy").value<uint16_t>(), 142); EXPECT_EQ(n1.findChild("creation_year").value<uint16_t>(), 2014); }
TIndexKernel::FileInfo TIndexKernel::getFileInfo(KernelFactory& factory, const std::string& filename) { FileInfo fileInfo; StageFactory f; std::string driverName = f.inferReaderDriver(filename); Stage *s = f.createStage(driverName); Options ops; ops.add("filename", filename); setCommonOptions(ops); s->setOptions(ops); applyExtraStageOptionsRecursive(s); if (m_fastBoundary) { QuickInfo qi = s->preview(); std::stringstream polygon; polygon << "POLYGON (("; polygon << qi.m_bounds.minx << " " << qi.m_bounds.miny; polygon << ", " << qi.m_bounds.maxx << " " << qi.m_bounds.miny; polygon << ", " << qi.m_bounds.maxx << " " << qi.m_bounds.maxy; polygon << ", " << qi.m_bounds.minx << " " << qi.m_bounds.maxy; polygon << ", " << qi.m_bounds.minx << " " << qi.m_bounds.miny; polygon << "))"; fileInfo.m_boundary = polygon.str(); if (!qi.m_srs.empty()) fileInfo.m_srs = qi.m_srs.getWKT(); } else { PointTable table; Stage *hexer = f.createStage("filters.hexbin"); if (! hexer) { std::ostringstream oss; oss << "Unable to create hexer stage to create boundaries. " << "Is PDAL_DRIVER_PATH environment variable set?"; throw pdal_error(oss.str()); } hexer->setInput(*s); hexer->prepare(table); PointViewSet set = hexer->execute(table); MetadataNode m = table.metadata(); m = m.findChild("filters.hexbin:boundary"); fileInfo.m_boundary = m.value(); PointViewPtr v = *set.begin(); if (!v->spatialReference().empty()) fileInfo.m_srs = v->spatialReference().getWKT(); } FileUtils::fileTimes(filename, &fileInfo.m_ctime, &fileInfo.m_mtime); fileInfo.m_filename = filename; return fileInfo; }
// Test that data from three input views gets written to separate output files. TEST(NitfWriterTest, flex) { StageFactory f; std::array<std::string, 3> outname = {{ "test_1.ntf", "test_2.ntf", "test_3.ntf" }}; Options readerOps; readerOps.add("filename", Support::datapath("nitf/autzen-utm10.ntf")); PointTable table; Stage* reader(f.createStage("readers.nitf")); reader->setOptions(readerOps); reader->prepare(table); PointViewSet views = reader->execute(table); PointViewPtr v = *(views.begin()); PointViewPtr v1(new PointView(table)); PointViewPtr v2(new PointView(table)); PointViewPtr v3(new PointView(table)); std::vector<PointViewPtr> vs; vs.push_back(v1); vs.push_back(v2); vs.push_back(v3); for (PointId i = 0; i < v->size(); ++i) vs[i % 3]->appendPoint(*v, i); for (size_t i = 0; i < outname.size(); ++i) FileUtils::deleteFile(Support::temppath(outname[i])); BufferReader reader2; reader2.addView(v1); reader2.addView(v2); reader2.addView(v3); Options writerOps; writerOps.add("filename", Support::temppath("test_#.ntf")); Stage* writer(f.createStage("writers.nitf")); writer->setOptions(writerOps); writer->setInput(reader2); writer->prepare(table); writer->execute(table); for (size_t i = 0; i < outname.size(); ++i) { std::string filename = Support::temppath(outname[i]); EXPECT_TRUE(FileUtils::fileExists(filename)); Options ops; ops.add("filename", filename); Stage* r(f.createStage("readers.nitf")); r->setOptions(ops); EXPECT_EQ(r->preview().m_pointCount, vs[i]->size()); } }
TEST_F(PredicateFilterTest, PredicateFilterTest_test_programmable_3) { StageFactory f; // can we make a pipeline with TWO python filters in it? BOX3D bounds(0.0, 0.0, 0.0, 2.0, 2.0, 2.0); Options readerOpts; readerOpts.add("bounds", bounds); readerOpts.add("count", 1000); readerOpts.add("mode", "ramp"); FauxReader reader; reader.setOptions(readerOpts); // keep all points where x less than 1.0 const Option source1("source", // "X < 1.0" "import numpy as np\n" "def yow1(ins,outs):\n" " X = ins['X']\n" " Mask = np.less(X, 1.0)\n" " #print X\n" " #print Mask\n" " outs['Mask'] = Mask\n" " return True\n" ); const Option module1("module", "MyModule1"); const Option function1("function", "yow1"); Options opts1; opts1.add(source1); opts1.add(module1); opts1.add(function1); Stage* filter1(f.createStage("filters.python")); filter1->setOptions(opts1); filter1->setInput(reader); // keep all points where y greater than 0.5 const Option source2("source", // "Y > 0.5" "import numpy as np\n" "def yow2(ins,outs):\n" " Y = ins['Y']\n" " Mask = np.greater(Y, 0.5)\n" " #print X\n" " #print Mask\n" " outs['Mask'] = Mask\n" " return True\n" ); const Option module2("module", "MyModule2"); const Option function2("function", "yow2"); Options opts2; opts2.add(source2); opts2.add(module2); opts2.add(function2); Stage* filter2(f.createStage("filters.python")); filter2->setOptions(opts2); filter2->setInput(*filter1); Options statOpts; std::unique_ptr<StatsFilter> stats(new StatsFilter); stats->setOptions(statOpts); stats->setInput(*filter2); PointTable table; stats->prepare(table); stats->execute(table); const stats::Summary& statsX = stats->getStats(Dimension::Id::X); const stats::Summary& statsY = stats->getStats(Dimension::Id::Y); const stats::Summary& statsZ = stats->getStats(Dimension::Id::Z); EXPECT_TRUE(Utils::compare_approx(statsX.minimum(), 0.5, 0.01)); EXPECT_TRUE(Utils::compare_approx(statsY.minimum(), 0.5, 0.01)); EXPECT_TRUE(Utils::compare_approx(statsZ.minimum(), 0.5, 0.01)); EXPECT_TRUE(Utils::compare_approx(statsX.maximum(), 1.0, 0.01)); EXPECT_TRUE(Utils::compare_approx(statsY.maximum(), 1.0, 0.01)); EXPECT_TRUE(Utils::compare_approx(statsZ.maximum(), 1.0, 0.01)); }
bool ossimPdalFileReader::open(const ossimFilename& fname) { // PDAL opens the file here. m_inputFilename = fname; pdal::Stage* reader = 0; try { if (m_inputFilename.contains("fauxreader")) { // Debug generated image: reader = new FauxReader; m_pdalPipe = reader; m_pdalOptions.add("mode", "ramp"); BOX3D bbox(-0.001, -0.001, -100.0, 0.001, 0.001, 100.0); m_pdalOptions.add("bounds", bbox); m_pdalOptions.add("num_points", "11"); m_pdalPipe->setOptions(m_pdalOptions); } else { StageFactory factory; const string driver = factory.inferReaderDriver(m_inputFilename.string()); if (driver == "") throw pdal_error("File type not supported by PDAL"); reader = factory.createStage(driver); if (!reader) throw pdal_error("No reader created by PDAL"); m_pdalOptions.add("filename", m_inputFilename.string()); reader->setOptions(m_pdalOptions); // Stick a stats filter in the pipeline: m_pdalPipe = new StatsFilter(); m_pdalPipe->setInput(*reader); } m_pointTable = PointTablePtr(new PointTable); m_pdalPipe->prepare(*m_pointTable); m_pvs = m_pdalPipe->execute(*m_pointTable); if (!m_pvs.empty()) { m_currentPV = *(m_pvs.begin()); // Determine available data fields: establishAvailableFields(); } std::string wkt; const SpatialReference& srs = reader->getSpatialReference(); wkt = srs.getWKT(SpatialReference::eCompoundOK, false); m_geometry = new ossimPointCloudGeometry(wkt); } catch (std::exception& e) { //ossimNotify() << e.what() << endl; return false; } // Establish the bounds (geographic & radiometric). These are stored in two extreme point records // maintained by the base class: establishMinMax(); return true; }
TEST(CropFilterTest, create) { StageFactory f; std::unique_ptr<Stage> filter(f.createStage("filters.crop")); EXPECT_TRUE(filter.get()); }