Ejemplo n.º 1
0
PointBufferSet Splitter::run(PointBufferPtr buf)
{
    PointBufferSet pbSet;
    if (!buf->size())
        return pbSet;

    CoordCompare compare;
    std::map<Coord, PointBufferPtr, CoordCompare> buffers(compare);

    // Use the location of the first point as the origin.
    double xOrigin = buf->getFieldAs<double>(Dimension::Id::X, 0);
    double yOrigin = buf->getFieldAs<double>(Dimension::Id::Y, 0);

    // Overlay a grid of squares on the points (m_length sides).  Each square
    // corresponds to a new point buffer.  Place the points falling in the
    // each square in the corresponding point buffer.
    for (PointId idx = 0; idx < buf->size(); idx++)
    {
        int xpos = (buf->getFieldAs<double>(Dimension::Id::X, idx) - xOrigin) /
            m_length;
        int ypos = (buf->getFieldAs<double>(Dimension::Id::Y, idx) - yOrigin) /
            m_length;
        Coord loc(xpos, ypos);
        PointBufferPtr& outbuf = buffers[loc];
        if (!outbuf)
            outbuf = buf->makeNew();
        outbuf->appendPoint(*buf, idx);
    }

    // Pull the buffers out of the map and stick them in the standard
    // output set, setting the bounds as we go.
    for (auto bi = buffers.begin(); bi != buffers.end(); ++bi)
        pbSet.insert(bi->second);
    return pbSet;
}
Ejemplo n.º 2
0
Archivo: Merge.hpp Proyecto: hobu/PDAL
    virtual PointBufferSet run(PointBufferPtr buf)
    {
        PointBufferSet pbSet;

        m_buf->append(*buf);
        pbSet.insert(m_buf);
        return pbSet;
    }
Ejemplo n.º 3
0
PointBufferSet Crop::run(PointBufferPtr buffer)
{
    PointBufferSet pbSet;
    PointBufferPtr output = buffer->makeNew();
    crop(*buffer, *output);
    pbSet.insert(output);
    return pbSet;
}
Ejemplo n.º 4
0
BOX3D PointBuffer::calculateBounds(const PointBufferSet& set, bool is3d)
{
    BOX3D out;
    for (auto iter = set.begin(); iter != set.end(); ++iter)
    {
        PointBufferPtr buf = *iter;
        out.grow(buf->calculateBounds(is3d));
    }
    return out;
}
Ejemplo n.º 5
0
TEST(IcebridgeReaderTest, testRead)
{
    StageFactory f;
    ReaderPtr reader(f.createReader("readers.icebridge"));
    EXPECT_TRUE(reader.get());

    Option filename("filename", getFilePath(), "");
    Options options(filename);
    reader->setOptions(options);

    PointContext ctx;
    reader->prepare(ctx);
    PointBufferSet pbSet = reader->execute(ctx);
    EXPECT_EQ(pbSet.size(), 1u);
    PointBufferPtr buf = *pbSet.begin();
    EXPECT_EQ(buf->size(), 2u);

    checkPoint(
            *buf,
            0,
            141437548,     // time
            82.605319,      // latitude
            301.406196,     // longitude
            18.678,         // elevation
            2408,           // xmtSig
            181,            // rcvSig
            49.91,          // azimuth
            -4.376,         // pitch
            0.608,          // roll
            2.9,            // gpsPdop
            20.0,           // pulseWidth
            0.0);           // relTime

    checkPoint(
            *buf,
            1,
            141437548,     // time
            82.605287,      // latitude
            301.404862,     // longitude
            18.688,         // elevation
            2642,           // xmtSig
            173,            // rcvSig
            52.006,         // azimuth
            -4.376,         // pitch
            0.609,          // roll
            2.9,            // gpsPdop
            17.0,           // pulseWidth
            0.0);           // relTime
}
Ejemplo n.º 6
0
TEST(SortFilterTest, pipeline)
{
    PipelineManager mgr;
    PipelineReader reader(mgr);

    reader.readPipeline(Support::configuredpath("filters/sort.xml"));
    mgr.execute();

    PointBufferSet pbSet = mgr.buffers();

    EXPECT_EQ(pbSet.size(), 1u);
    PointBufferPtr buf = *pbSet.begin();

    for (PointId i = 1; i < buf->size(); ++i)
    {
        double d1 = buf->getFieldAs<double>(Dimension::Id::X, i - 1);
        double d2 = buf->getFieldAs<double>(Dimension::Id::X, i);
        EXPECT_TRUE(d1 <= d2);
    }
}
Ejemplo n.º 7
0
TEST(LasReaderTest, test_sequential)
{
    PointContext ctx;

    Options ops1;
    ops1.add("filename", Support::datapath("las/1.2-with-color.las"));
    ops1.add("count", 103);
    LasReader reader;
    reader.setOptions(ops1);

    reader.prepare(ctx);
    PointBufferSet pbSet = reader.execute(ctx);
    EXPECT_EQ(pbSet.size(), 1u);
    PointBufferPtr buf = *pbSet.begin();
    Support::check_p0_p1_p2(*buf);
    PointBufferPtr buf2 = buf->makeNew();
    buf2->appendPoint(*buf, 100);
    buf2->appendPoint(*buf, 101);
    buf2->appendPoint(*buf, 102);
    Support::check_p100_p101_p102(*buf2);
}
Ejemplo n.º 8
0
static void test_a_format(const std::string& file, boost::uint8_t majorVersion, boost::uint8_t minorVersion, int pointFormat,
                          double xref, double yref, double zref, double tref, boost::uint16_t rref,  boost::uint16_t gref,  boost::uint16_t bref)
{
    PointContext ctx;

    Options ops1;
    ops1.add("filename", Support::datapath(file));
    ops1.add("count", 1);
    pdal::drivers::las::Reader reader;
    reader.setOptions(ops1);
    reader.prepare(ctx);

    BOOST_CHECK_EQUAL(reader.header().pointFormat(), pointFormat);
    BOOST_CHECK_EQUAL(reader.header().versionMajor(), majorVersion);
    BOOST_CHECK_EQUAL(reader.header().versionMinor(), minorVersion);

    PointBufferSet pbSet = reader.execute(ctx);
    BOOST_CHECK_EQUAL(pbSet.size(), 1);
    PointBufferPtr buf = *pbSet.begin();
    BOOST_CHECK_EQUAL(buf->size(), 1);

    Support::check_pN(*buf, 0, xref, yref, zref, tref, rref, gref, bref);
}
Ejemplo n.º 9
0
TEST(NitfReaderTest, test_one)
{
    StageFactory f;

    Options nitf_opts;
    nitf_opts.add("filename", Support::datapath("nitf/autzen-utm10.ntf"));
    nitf_opts.add("count", 750);

    PointContext ctx;

    ReaderPtr nitf_reader(f.createReader("readers.nitf"));
    EXPECT_TRUE(nitf_reader.get());
    nitf_reader->setOptions(nitf_opts);
    nitf_reader->prepare(ctx);
    PointBufferSet pbSet = nitf_reader->execute(ctx);
    EXPECT_EQ(nitf_reader->getDescription(), "NITF Reader");
    EXPECT_EQ(pbSet.size(), 1u);
    PointBufferPtr buf = *pbSet.begin();

    // check metadata
//ABELL
/**
    {
        Metadata metadata = nitf_reader.getMetadata();
        /////////////////////////////////////////////////EXPECT_EQ(metadatums.size(), 80u);
        EXPECT_EQ(metadata.toPTree().get<std::string>("metadata.FH_FDT.value"), "20120323002946");
    }
**/

    //
    // read LAS
    //
    Options las_opts;
    las_opts.add("count", 750);
    las_opts.add("filename", Support::datapath("nitf/autzen-utm10.las"));

    PointContext ctx2;

    ReaderPtr las_reader(f.createReader("readers.las"));
    EXPECT_TRUE(las_reader.get());
    las_reader->setOptions(las_opts);
    las_reader->prepare(ctx2);
    PointBufferSet pbSet2 = las_reader->execute(ctx2);
    EXPECT_EQ(pbSet2.size(), 1u);
    PointBufferPtr buf2 = *pbSet.begin();
    //
    //
    // compare the two buffers
    //
    EXPECT_EQ(buf->size(), buf2->size());

    for (PointId i = 0; i < buf2->size(); i++)
    {
        int32_t nitf_x = buf->getFieldAs<int32_t>(Dimension::Id::X, i);
        int32_t nitf_y = buf->getFieldAs<int32_t>(Dimension::Id::Y, i);
        int32_t nitf_z = buf->getFieldAs<int32_t>(Dimension::Id::Z, i);

        int32_t las_x = buf2->getFieldAs<int32_t>(Dimension::Id::X, i);
        int32_t las_y = buf2->getFieldAs<int32_t>(Dimension::Id::Y, i);
        int32_t las_z = buf2->getFieldAs<int32_t>(Dimension::Id::Z, i);

        EXPECT_EQ(nitf_x, las_x);
        EXPECT_EQ(nitf_y, las_y);
        EXPECT_EQ(nitf_z, las_z);
    }
}
Ejemplo n.º 10
0
int SmoothKernel::execute()
{
    PointContext ctx;

    Options readerOptions;
    readerOptions.add("filename", m_inputFile);
    readerOptions.add("debug", isDebug());
    readerOptions.add("verbose", getVerboseLevel());

    std::unique_ptr<Stage> readerStage = makeReader(readerOptions);

    // go ahead and prepare/execute on reader stage only to grab input
    // PointBufferSet, this makes the input PointBuffer available to both the
    // processing pipeline and the visualizer
    readerStage->prepare(ctx);
    PointBufferSet pbSetIn = readerStage->execute(ctx);

    // the input PointBufferSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointBufferPtr input_buffer = *pbSetIn.begin();
    BufferReader bufferReader;
    bufferReader.setOptions(readerOptions);
    bufferReader.addBuffer(input_buffer);

    Options smoothOptions;
    std::ostringstream ss;
    ss << "{";
    ss << "  \"pipeline\": {";
    ss << "    \"filters\": [{";
    ss << "      \"name\": \"MovingLeastSquares\"";
    ss << "      }]";
    ss << "    }";
    ss << "}";
    std::string json = ss.str();
    smoothOptions.add("json", json);
    smoothOptions.add("debug", isDebug());
    smoothOptions.add("verbose", getVerboseLevel());

    std::unique_ptr<Stage> smoothStage(new filters::PCLBlock());
    smoothStage->setOptions(smoothOptions);
    smoothStage->setInput(&bufferReader);

    Options writerOptions;
    writerOptions.add("filename", m_outputFile);
    setCommonOptions(writerOptions);

    WriterPtr writer(KernelSupport::makeWriter(m_outputFile, smoothStage.get()));
    writer->setOptions(writerOptions);

    std::vector<std::string> cmd = getProgressShellCommand();
    UserCallback *callback =
        cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) :
        (UserCallback *)new HeartbeatCallback();

    writer->setUserCallback(callback);

    std::map<std::string, Options> extra_opts = getExtraStageOptions();
    std::map<std::string, Options>::iterator pi;
    for (pi = extra_opts.begin(); pi != extra_opts.end(); ++pi)
    {
        std::string name = pi->first;
        Options options = pi->second;
        std::vector<Stage*> stages = writer->findStage(name);
        std::vector<Stage*>::iterator s;
        for (s = stages.begin(); s != stages.end(); ++s)
        {
            Options opts = (*s)->getOptions();
            std::vector<Option>::iterator o;
            for (o = options.getOptions().begin(); o != options.getOptions().end(); ++o)
                opts.add(*o);
            (*s)->setOptions(opts);
        }
    }

    writer->prepare(ctx);

    // process the data, grabbing the PointBufferSet for visualization of the
    // resulting PointBuffer
    PointBufferSet pbSetOut = writer->execute(ctx);

    if (isVisualize())
        visualize(*pbSetOut.begin());
    //visualize(*pbSetIn.begin(), *pbSetOut.begin());

    return 0;
}
Ejemplo n.º 11
0
int HeightAboveGroundKernel::execute()
{
    // we require separate contexts for the input and ground files
    PointContextRef input_ctx;
    PointContextRef ground_ctx;

    // because we are appending HeightAboveGround to the input buffer, we must
    // register it's Dimension
    input_ctx.registerDim(Dimension::Id::HeightAboveGround);

    // StageFactory will be used to create required stages
    StageFactory f;

    // setup the reader, inferring driver type from the filename
    std::string reader_driver = f.inferReaderDriver(m_input_file);
    std::unique_ptr<Reader> input(f.createReader(reader_driver));
    Options readerOptions;
    readerOptions.add("filename", m_input_file);
    input->setOptions(readerOptions);

    // go ahead and execute to get the PointBuffer
    input->prepare(input_ctx);
    PointBufferSet pbSetInput = input->execute(input_ctx);
    PointBufferPtr input_buf = *pbSetInput.begin();

    PointBufferSet pbSetGround;
    PointBufferPtr ground_buf;

    if (m_use_classification)
    {
        // the user has indicated that the classification dimension exists, so
        // we will find all ground returns
        Option source("source",
                      "import numpy as np\n"
                      "def yow1(ins,outs):\n"
                      "  cls = ins['Classification']\n"
                      "  keep_classes = [2]\n"
                      "  keep = np.equal(cls, keep_classes[0])\n"
                      "  outs['Mask'] = keep\n"
                      "  return True\n"
                     );
        Option module("module", "MyModule");
        Option function("function", "yow1");
        Options opts;
        opts.add(source);
        opts.add(module);
        opts.add(function);

        // and create a PointBuffer of only ground returns
        std::unique_ptr<Filter> pred(f.createFilter("filters.predicate"));
        pred->setOptions(opts);
        pred->setInput(input.get());
        pred->prepare(ground_ctx);
        pbSetGround = pred->execute(ground_ctx);
        ground_buf = *pbSetGround.begin();
    }
    else
    {
        // the user has provided a file containing only ground returns, setup
        // the reader, inferring driver type from the filename
        std::string ground_driver = f.inferReaderDriver(m_ground_file);
        std::unique_ptr<Reader> ground(f.createReader(ground_driver));
        Options ro;
        ro.add("filename", m_ground_file);
        ground->setOptions(ro);

        // go ahead and execute to get the PointBuffer
        ground->prepare(ground_ctx);
        pbSetGround = ground->execute(ground_ctx);
        ground_buf = *pbSetGround.begin();
    }

    typedef pcl::PointXYZ PointT;
    typedef pcl::PointCloud<PointT> Cloud;
    typedef Cloud::Ptr CloudPtr;

    // convert the input PointBuffer to a PointCloud
    CloudPtr cloud(new Cloud);
    BOX3D const& bounds = input_buf->calculateBounds();
    pclsupport::PDALtoPCD(*input_buf, *cloud, bounds);

    // convert the ground PointBuffer to a PointCloud
    CloudPtr cloud_g(new Cloud);
    // here, we offset the ground cloud by the input bounds so that the two are aligned
    pclsupport::PDALtoPCD(*ground_buf, *cloud_g, bounds);

    // create a set of planar coefficients with X=Y=0,Z=1
    pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients());
    coefficients->values.resize(4);
    coefficients->values[0] = coefficients->values[1] = 0;
    coefficients->values[2] = 1.0;
    coefficients->values[3] = 0;

    // create the filtering object and project ground returns into xy plane
    pcl::ProjectInliers<PointT> proj;
    proj.setModelType(pcl::SACMODEL_PLANE);
    proj.setInputCloud(cloud_g);
    proj.setModelCoefficients(coefficients);
    CloudPtr cloud_projected(new Cloud);
    proj.filter(*cloud_projected);

    // setup the KdTree
    pcl::KdTreeFLANN<PointT> tree;
    tree.setInputCloud(cloud_projected);

    // loop over all points in the input cloud, finding the nearest neighbor in
    // the ground returns (XY plane only), and calculating the difference in z
    int32_t k = 1;
    for (size_t idx = 0; idx < cloud->points.size(); ++idx)
    {
        // Search for nearesrt neighbor of the query point
        std::vector<int32_t> neighbors(k);
        std::vector<float> distances(k);
        PointT temp_pt = cloud->points[idx];
        temp_pt.z = 0.0f;
        int num_neighbors = tree.nearestKSearch(temp_pt, k, neighbors, distances);

        double hag = cloud->points[idx].z - cloud_g->points[neighbors[0]].z;
        input_buf->setField(Dimension::Id::HeightAboveGround, idx, hag);
    }

    // populate BufferReader with the input PointBuffer, which now has the
    // HeightAboveGround dimension
    BufferReader bufferReader;
    bufferReader.addBuffer(input_buf);

    // we require that the output be BPF for now, to house our non-standard
    // dimension
    Options wo;
    wo.add("filename", m_output_file);
    std::unique_ptr<Writer> writer(f.createWriter("writers.bpf"));
    writer->setOptions(wo);
    writer->setInput(&bufferReader);
    writer->prepare(input_ctx);
    writer->execute(input_ctx);

    return 0;
}
Ejemplo n.º 12
0
int Ground::execute()
{
    PointContext ctx;

    Options readerOptions;
    readerOptions.add<std::string>("filename", m_inputFile);
    readerOptions.add<bool>("debug", isDebug());
    readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel());

    std::unique_ptr<Stage> readerStage = makeReader(readerOptions);

    // go ahead and prepare/execute on reader stage only to grab input
    // PointBufferSet, this makes the input PointBuffer available to both the
    // processing pipeline and the visualizer
    readerStage->prepare(ctx);
    PointBufferSet pbSetIn = readerStage->execute(ctx);

    // the input PointBufferSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointBufferPtr input_buffer = *pbSetIn.begin();
    BufferReader bufferReader;
    bufferReader.setOptions(readerOptions);
    bufferReader.addBuffer(input_buffer);

    Options groundOptions;
    std::ostringstream ss;
    ss << "{";
    ss << "  \"pipeline\": {";
    ss << "    \"filters\": [{";
    ss << "      \"name\": \"ProgressiveMorphologicalFilter\",";
    ss << "      \"setMaxWindowSize\": " << m_maxWindowSize << ",";
    ss << "      \"setSlope\": " << m_slope << ",";
    ss << "      \"setMaxDistance\": " << m_maxDistance << ",";
    ss << "      \"setInitialDistance\": " << m_initialDistance << ",";
    ss << "      \"setCellSize\": " << m_cellSize << ",";
    ss << "      \"setBase\": " << m_base << ",";
    ss << "      \"setExponential\": " << m_exponential;
    ss << "      }]";
    ss << "    }";
    ss << "}";
    std::string json = ss.str();
    groundOptions.add<std::string>("json", json);
    groundOptions.add<bool>("debug", isDebug());
    groundOptions.add<boost::uint32_t>("verbose", getVerboseLevel());

    std::unique_ptr<Stage> groundStage(new filters::PCLBlock());
    groundStage->setInput(&bufferReader);
    groundStage->setOptions(groundOptions);

    // the PCLBlock groundStage consumes the BufferReader rather than the
    // readerStage
    groundStage->setInput(&bufferReader);

    Options writerOptions;
    writerOptions.add<std::string>("filename", m_outputFile);
    setCommonOptions(writerOptions);

    std::unique_ptr<Writer> writer(AppSupport::makeWriter(m_outputFile, groundStage.get()));
    writer->setOptions(writerOptions);

    std::vector<std::string> cmd = getProgressShellCommand();
    UserCallback *callback =
        cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) :
        (UserCallback *)new HeartbeatCallback();

    writer->setUserCallback(callback);

    for (auto pi: getExtraStageOptions())
    {
        std::string name = pi.first;
        Options options = pi.second;
        std::vector<Stage*> stages = writer->findStage(name);
        for (auto s: stages)
        {
            Options opts = s->getOptions();
            for (auto o: options.getOptions())
                opts.add(o);
            s->setOptions(opts);
        }
    }

    writer->prepare(ctx);

    // process the data, grabbing the PointBufferSet for visualization of the
    // resulting PointBuffer
    PointBufferSet pbSetOut = writer->execute(ctx);

    if (isVisualize())
        visualize(*pbSetOut.begin());
    //visualize(*pbSetIn.begin(), *pbSetOut.begin());

    return 0;
}
Ejemplo n.º 13
0
int PCLKernel::execute()
{
    PointContext ctx;

    Options readerOptions;
    readerOptions.add<std::string>("filename", m_inputFile);
    readerOptions.add<bool>("debug", isDebug());
    readerOptions.add<uint32_t>("verbose", getVerboseLevel());

    std::unique_ptr<Stage> readerStage = makeReader(readerOptions);

    // go ahead and prepare/execute on reader stage only to grab input
    // PointBufferSet, this makes the input PointBuffer available to both the
    // processing pipeline and the visualizer
    readerStage->prepare(ctx);
    PointBufferSet pbSetIn = readerStage->execute(ctx);

    // the input PointBufferSet will be used to populate a BufferReader that is
    // consumed by the processing pipeline
    PointBufferPtr input_buffer = *pbSetIn.begin();
    BufferReader bufferReader;
    bufferReader.addBuffer(input_buffer);

    Options pclOptions;
    pclOptions.add<std::string>("filename", m_pclFile);
    pclOptions.add<bool>("debug", isDebug());
    pclOptions.add<uint32_t>("verbose", getVerboseLevel());

    std::unique_ptr<Stage> pclStage(new filters::PCLBlock());
    pclStage->setInput(&bufferReader);
    pclStage->setOptions(pclOptions);

    // the PCLBlock stage consumes the BufferReader rather than the
    // readerStage

    Options writerOptions;
    writerOptions.add<std::string>("filename", m_outputFile);
    setCommonOptions(writerOptions);

    if (m_bCompress)
        writerOptions.add<bool>("compression", true);
    if (m_bForwardMetadata)
        writerOptions.add("forward_metadata", true);

    std::vector<std::string> cmd = getProgressShellCommand();
    UserCallback *callback =
        cmd.size() ? (UserCallback *)new ShellScriptCallback(cmd) :
        (UserCallback *)new HeartbeatCallback();

    WriterPtr
        writer(KernelSupport::makeWriter(m_outputFile, pclStage.get()));

    // Some options are inferred by makeWriter based on filename
    // (compression, driver type, etc).
    writer->setOptions(writerOptions+writer->getOptions());

    writer->setUserCallback(callback);

    for (const auto& pi : getExtraStageOptions())
    {
        std::string name = pi.first;
        Options options = pi.second;
        std::vector<Stage*> stages = writer->findStage(name);
        for (const auto& s : stages)
        {
            Options opts = s->getOptions();
            for (const auto& o : options.getOptions())
                opts.add(o);
            s->setOptions(opts);
        }
    }

    writer->prepare(ctx);

    // process the data, grabbing the PointBufferSet for visualization of the
    // resulting PointBuffer
    PointBufferSet pbSetOut = writer->execute(ctx);

    if (isVisualize())
        visualize(*pbSetOut.begin());
    //visualize(*pbSetIn.begin(), *pbSetOut.begin());

    return 0;
}
Ejemplo n.º 14
0
TEST(ColorizationFilterTest, ColorizationFilterTest_test_1)
{
    Options ops1;
    ops1.add("filename", Support::datapath("autzen/autzen-point-format-3.las"));
    LasReader reader;
    reader.setOptions(ops1);

    Options options;

    Option red("dimension", "Red", "");
    Option b0("band",1, "");
    Option s0("scale", 1.0f, "scale factor for this dimension");
    Options redO;
    redO.add(b0);
    redO.add(s0);
    red.setOptions(redO);

    Option green("dimension", "Green", "");
    Option b1("band",2, "");
    Option s1("scale", 1.0f, "scale factor for this dimension");
    Options greenO;
    greenO.add(b1);
    greenO.add(s1);
    green.setOptions(greenO);

    Option blue("dimension", "Blue", "");
    Option b2("band",3, "");
    Option s2("scale", 255.0f, "scale factor for this dimension");
    Options blueO;
    blueO.add(b2);
    blueO.add(s2);
    blue.setOptions(blueO);

    Option datasource("raster", Support::datapath("autzen/autzen.jpg"),
        "raster to read");

    Options reader_options;
    reader_options.add(red);
    reader_options.add(green);
    reader_options.add(blue);
    reader_options.add(datasource);

    ColorizationFilter filter;
    filter.setOptions(reader_options);
    filter.setInput(&reader);

    PointContext ctx;

    filter.prepare(ctx);
    PointBufferSet pbSet = filter.execute(ctx);
    EXPECT_EQ(pbSet.size(), 1u);
    PointBufferPtr buf = *pbSet.begin();

    uint16_t r = buf->getFieldAs<uint16_t>(Dimension::Id::Red, 0);
    uint16_t g = buf->getFieldAs<uint16_t>(Dimension::Id::Green, 0);
    uint16_t b = buf->getFieldAs<uint16_t>(Dimension::Id::Blue, 0);

    EXPECT_EQ(r, 210u);
    EXPECT_EQ(g, 205u);
    // We scaled this up to 16bit by multiplying by 255
    EXPECT_EQ(b, 47175u);
}
Ejemplo n.º 15
0
int Random::execute()
{
    Options readerOptions;
    {
        boost::char_separator<char> sep(SEPARATORS);
        std::vector<double> means;
        tokenizer mean_tokens(m_means, sep);
        for (tokenizer::iterator t = mean_tokens.begin(); t != mean_tokens.end(); ++t)
        {
            means.push_back(boost::lexical_cast<double>(*t));
        }

        if (means.size())
        {
            readerOptions.add<double >("mean_x", means[0]);
            readerOptions.add<double >("mean_y", means[1]);
            readerOptions.add<double >("mean_z", means[2]);
        }

        std::vector<double> stdevs;
        tokenizer stdev_tokens(m_stdevs, sep);
        for (tokenizer::iterator t = stdev_tokens.begin(); t != stdev_tokens.end(); ++t)
        {
            stdevs.push_back(boost::lexical_cast<double>(*t));
        }

        if (stdevs.size())
        {
            readerOptions.add<double >("stdev_x", stdevs[0]);
            readerOptions.add<double >("stdev_y", stdevs[1]);
            readerOptions.add<double >("stdev_z", stdevs[2]);
        }

        if (!m_bounds.empty())
            readerOptions.add<BOX3D >("bounds", m_bounds);

        if (boost::iequals(m_distribution, "uniform"))
            readerOptions.add<std::string>("mode", "uniform");
        else if (boost::iequals(m_distribution, "normal"))
            readerOptions.add<std::string>("mode", "normal");
        else if (boost::iequals(m_distribution, "random"))
            readerOptions.add<std::string>("mode", "random");
        else
            throw pdal_error("invalid distribution: " + m_distribution);
        readerOptions.add<int>("num_points", m_numPointsToWrite);
        readerOptions.add<bool>("debug", isDebug());
        readerOptions.add<boost::uint32_t>("verbose", getVerboseLevel());
    }

    Options writerOptions;
    {
        writerOptions.add<std::string>("filename", m_outputFile);
        setCommonOptions(writerOptions);

        if (m_bCompress)
        {
            writerOptions.add<bool>("compression", true);
        }
    }

    Stage* final_stage = makeReader(readerOptions);

    Writer* writer = AppSupport::makeWriter(m_outputFile, final_stage);
    writer->setOptions(writerOptions);
    PointContext ctx;

    UserCallback* callback;
    if (!getProgressShellCommand().size())
        callback = static_cast<pdal::UserCallback*>(new PercentageCallback);
    else
        callback = static_cast<pdal::UserCallback*>(new ShellScriptCallback(getProgressShellCommand()));
    writer->setUserCallback(callback);
    writer->prepare(ctx);
    PointBufferSet pbSet = writer->execute(ctx);

    if (isVisualize())
        visualize(*pbSet.begin());

    delete writer;
    delete final_stage;

    return 0;
}
Ejemplo n.º 16
0
int Diff::execute()
{
    PointContext sourceCtx;

    Options sourceOptions;
    {
        sourceOptions.add<std::string>("filename", m_sourceFile);
        sourceOptions.add<bool>("debug", isDebug());
        sourceOptions.add<boost::uint32_t>("verbose", getVerboseLevel());
    }
    std::unique_ptr<Stage> source(AppSupport::makeReader(m_sourceFile));
    source->setOptions(sourceOptions);
    source->prepare(sourceCtx);
    PointBufferSet sourceSet = source->execute(sourceCtx);

    ptree errors;

    PointContext candidateCtx;
    Options candidateOptions;
    {
        candidateOptions.add<std::string>("filename", m_candidateFile);
        candidateOptions.add<bool>("debug", isDebug());
        candidateOptions.add<boost::uint32_t>("verbose", getVerboseLevel());
    }

    std::unique_ptr<Stage> candidate(AppSupport::makeReader(m_candidateFile));
    candidate->setOptions(candidateOptions);
    candidate->prepare(candidateCtx);
    PointBufferSet candidateSet = candidate->execute(candidateCtx);

    assert(sourceSet.size() == 1);
    assert(candidateSet.size() == 1);
    PointBufferPtr sourceBuf = *sourceSet.begin();
    PointBufferPtr candidateBuf = *candidateSet.begin();
    if (candidateBuf->size() != sourceBuf->size())
    {
        std::ostringstream oss;

        oss << "Source and candidate files do not have the same point count";
        errors.put("count.error", oss.str());
        errors.put("count.candidate", candidateBuf->size());
        errors.put("count.source", sourceBuf->size());
    }

    MetadataNode source_metadata = sourceCtx.metadata();
    MetadataNode candidate_metadata = candidateCtx.metadata();
    if (source_metadata != candidate_metadata)
    {
        std::ostringstream oss;

        oss << "Source and candidate files do not have the same metadata count";
        errors.put("metadata.error", oss.str());
        errors.put_child("metadata.source", pdal::utils::toPTree(source_metadata));
        errors.put_child("metadata.candidate", pdal::utils::toPTree(candidate_metadata));
    }

    if (candidateCtx.dims().size() != sourceCtx.dims().size())
    {
        std::ostringstream oss;

        oss << "Source and candidate files do not have the same "
            "number of dimensions";
        errors.put<std::string>("schema.error", oss.str());
        //Need to "ptree" the PointContext dimension list in some way
        // errors.put_child("schema.source", sourceCtx.schema()->toPTree());
        // errors.put_child("schema.candidate",
        //     candidateCtx.schema()->toPTree());
    }

    if (errors.size())
    {
        write_json(std::cout, errors);
        return 1;
    }
    else
    {
        // If we made it this far with no errors, now we'll
        // check the points.
        checkPoints(*sourceBuf, *candidateBuf, errors);
        if (errors.size())
        {
            write_json(std::cout, errors);
            return 1;
        }
    }
    return 0;
}