コード例 #1
0
ファイル: qRANSAC_SD.cpp プロジェクト: jkua/cloudcompare
void qRansacSD::doAction()
{
    assert(m_app);
    if (!m_app)
        return;

    const ccHObject::Container& selectedEntities = m_app->getSelectedEntities();
    size_t selNum = selectedEntities.size();
    if (selNum!=1)
    {
        m_app->dispToConsole("Select only one cloud!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
        return;
    }

    ccHObject* ent = selectedEntities[0];
    assert(ent);
    if (!ent || !ent->isA(CC_POINT_CLOUD))
    {
        m_app->dispToConsole("Select a real point cloud!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
        return;
    }

    ccPointCloud* pc = static_cast<ccPointCloud*>(ent);

    //input cloud
    size_t count = (size_t)pc->size();
    bool hasNorms = pc->hasNormals();
    PointCoordinateType bbMin[3],bbMax[3];
    pc->getBoundingBox(bbMin,bbMax);

    //Convert CC point cloud to RANSAC_SD type
    PointCloud cloud;
    {
        try
        {
            cloud.reserve(count);
        }
        catch(...)
        {
            m_app->dispToConsole("Not enough memory!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
            return;
        }

        //default point & normal
        Point Pt;
        Pt.normal[0] = 0.0;
        Pt.normal[1] = 0.0;
        Pt.normal[2] = 0.0;
        for (unsigned i=0; i<(unsigned)count; ++i)
        {
            const CCVector3* P = pc->getPoint(i);
            Pt.pos[0] = P->x;
            Pt.pos[1] = P->y;
            Pt.pos[2] = P->z;
            if (hasNorms)
            {
                const PointCoordinateType* N = pc->getPointNormal(i);
                Pt.normal[0] = N[0];
                Pt.normal[1] = N[1];
                Pt.normal[2] = N[2];
            }
            cloud.push_back(Pt);
        }

        //manually set bounding box!
        Vec3f cbbMin,cbbMax;
        cbbMin[0] = bbMin[0];
        cbbMin[1] = bbMin[1];
        cbbMin[2] = bbMin[2];
        cbbMax[0] = bbMax[0];
        cbbMax[1] = bbMax[1];
        cbbMax[2] = bbMax[2];
        cloud.setBBox(cbbMin,cbbMax);
    }

    //cloud scale (useful for setting several parameters
    const float scale = cloud.getScale();

    //init dialog with default values
    ccRansacSDDlg rsdDlg(m_app->getMainWindow());
    rsdDlg.epsilonDoubleSpinBox->setValue(.01f * scale);		// set distance threshold to .01f of bounding box width
    // NOTE: Internally the distance threshold is taken as 3 * ransacOptions.m_epsilon!!!
    rsdDlg.bitmapEpsilonDoubleSpinBox->setValue(.02f * scale);	// set bitmap resolution to .02f of bounding box width
    // NOTE: This threshold is NOT multiplied internally!
    rsdDlg.supportPointsSpinBox->setValue(s_supportPoints);
    rsdDlg.normThreshDoubleSpinBox->setValue(s_normThresh);
    rsdDlg.probaDoubleSpinBox->setValue(s_proba);
    rsdDlg.planeCheckBox->setChecked(s_primEnabled[0]);
    rsdDlg.sphereCheckBox->setChecked(s_primEnabled[1]);
    rsdDlg.cylinderCheckBox->setChecked(s_primEnabled[2]);
    rsdDlg.coneCheckBox->setChecked(s_primEnabled[3]);
    rsdDlg.torusCheckBox->setChecked(s_primEnabled[4]);

    if (!rsdDlg.exec())
        return;

    //for parameters persistence
    {
        s_supportPoints = rsdDlg.supportPointsSpinBox->value();
        s_normThresh = rsdDlg.normThreshDoubleSpinBox->value();
        s_proba = rsdDlg.probaDoubleSpinBox->value();

        //consistency check
        {
            unsigned char primCount = 0;
            for (unsigned char k=0; k<5; ++k)
                primCount += (unsigned)s_primEnabled[k];
            if (primCount==0)
            {
                m_app->dispToConsole("No primitive type selected!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
                return;
            }
        }

        s_primEnabled[0] = rsdDlg.planeCheckBox->isChecked();
        s_primEnabled[1] = rsdDlg.sphereCheckBox->isChecked();
        s_primEnabled[2] = rsdDlg.cylinderCheckBox->isChecked();
        s_primEnabled[3] = rsdDlg.coneCheckBox->isChecked();
        s_primEnabled[4] = rsdDlg.torusCheckBox->isChecked();
    }

    //import parameters from dialog
    RansacShapeDetector::Options ransacOptions;
    {
        ransacOptions.m_epsilon = rsdDlg.epsilonDoubleSpinBox->value();
        ransacOptions.m_bitmapEpsilon = rsdDlg.bitmapEpsilonDoubleSpinBox->value();
        ransacOptions.m_normalThresh = rsdDlg.normThreshDoubleSpinBox->value();
        ransacOptions.m_minSupport = rsdDlg.supportPointsSpinBox->value();
        ransacOptions.m_probability = rsdDlg.probaDoubleSpinBox->value();
    }

    //progress dialog
    ccProgressDialog progressCb(false,m_app->getMainWindow());
    progressCb.setRange(0,0);

    if (!hasNorms)
    {
        progressCb.setInfo("Computing normals (please wait)");
        progressCb.start();
        QApplication::processEvents();

        cloud.calcNormals(.01f * scale);

        if (pc->reserveTheNormsTable())
        {
            for (size_t i=0; i<count; ++i)
            {
                CCVector3 N(cloud[i].normal);
                N.normalize();
                pc->addNorm(N.u);
            }
            pc->showNormals(true);

            //currently selected entities appearance may have changed!
            pc->prepareDisplayForRefresh_recursive();
        }
        else
        {
            m_app->dispToConsole("Not enough memory to compute normals!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
            return;
        }
    }

    // set which primitives are to be detected by adding the respective constructors
    RansacShapeDetector detector(ransacOptions); // the detector object

    if (rsdDlg.planeCheckBox->isChecked())
        detector.Add(new PlanePrimitiveShapeConstructor());
    if (rsdDlg.sphereCheckBox->isChecked())
        detector.Add(new SpherePrimitiveShapeConstructor());
    if (rsdDlg.cylinderCheckBox->isChecked())
        detector.Add(new CylinderPrimitiveShapeConstructor());
    if (rsdDlg.coneCheckBox->isChecked())
        detector.Add(new ConePrimitiveShapeConstructor());
    if (rsdDlg.torusCheckBox->isChecked())
        detector.Add(new TorusPrimitiveShapeConstructor());

    size_t remaining = count;
    typedef std::pair< MiscLib::RefCountPtr< PrimitiveShape >, size_t > DetectedShape;
    MiscLib::Vector< DetectedShape > shapes; // stores the detected shapes

    // run detection
    // returns number of unassigned points
    // the array shapes is filled with pointers to the detected shapes
    // the second element per shapes gives the number of points assigned to that primitive (the support)
    // the points belonging to the first shape (shapes[0]) have been sorted to the end of pc,
    // i.e. into the range [ pc.size() - shapes[0].second, pc.size() )
    // the points of shape i are found in the range
    // [ pc.size() - \sum_{j=0..i} shapes[j].second, pc.size() - \sum_{j=0..i-1} shapes[j].second )

    {
        progressCb.setInfo("Operation in progress");
        progressCb.setMethodTitle("Ransac Shape Detection");
        progressCb.start();

        //run in a separate thread
        s_detector = &detector;
        s_shapes = &shapes;
        s_cloud = &cloud;
        QFuture<void> future = QtConcurrent::run(doDetection);

        unsigned progress = 0;
        while (!future.isFinished())
        {
#if defined(_WIN32) || defined(WIN32)
            ::Sleep(500);
#else
            sleep(500);
#endif
            progressCb.update(++progress);
            //Qtconcurrent::run can't be canceled!
            /*if (progressCb.isCancelRequested())
            {
            	future.cancel();
            	future.waitForFinished();
            	s_remainingPoints = count;
            	break;
            }
            //*/
        }

        remaining = s_remainingPoints;

        progressCb.stop();
        QApplication::processEvents();
    }
    //else
    //{
    //	remaining = detector.Detect(cloud, 0, cloud.size(), &shapes);
    //}

#ifdef _DEBUG
    FILE* fp = fopen("RANS_SD_trace.txt","wt");

    fprintf(fp,"[Options]\n");
    fprintf(fp,"epsilon=%f\n",ransacOptions.m_epsilon);
    fprintf(fp,"bitmap epsilon=%f\n",ransacOptions.m_bitmapEpsilon);
    fprintf(fp,"normal thresh=%f\n",ransacOptions.m_normalThresh);
    fprintf(fp,"min support=%i\n",ransacOptions.m_minSupport);
    fprintf(fp,"probability=%f\n",ransacOptions.m_probability);

    fprintf(fp,"\n[Statistics]\n");
    fprintf(fp,"input points=%i\n",count);
    fprintf(fp,"segmented=%i\n",count-remaining);
    fprintf(fp,"remaining=%i\n",remaining);

    if (shapes.size()>0)
    {
        fprintf(fp,"\n[Shapes]\n");
        for (unsigned i=0; i<shapes.size(); ++i)
        {
            PrimitiveShape* shape = shapes[i].first;
            unsigned shapePointsCount = shapes[i].second;

            std::string desc;
            shape->Description(&desc);
            fprintf(fp,"#%i - %s - %i points\n",i+1,desc.c_str(),shapePointsCount);
        }
    }
    fclose(fp);
#endif

    if (remaining == count)
    {
        m_app->dispToConsole("Segmentation failed...",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
        return;
    }

    if (shapes.size() > 0)
    {
        ccHObject* group = 0;
        for (MiscLib::Vector<DetectedShape>::const_iterator it = shapes.begin(); it != shapes.end(); ++it)
        {
            const PrimitiveShape* shape = it->first;
            size_t shapePointsCount = it->second;

            //too many points?!
            if (shapePointsCount > count)
            {
                m_app->dispToConsole("Inconsistent result!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
                break;
            }

            std::string desc;
            shape->Description(&desc);

            //new cloud for sub-part
            ccPointCloud* pcShape = new ccPointCloud(desc.c_str());

            //we fill cloud with sub-part points
            if (!pcShape->reserve((unsigned)shapePointsCount))
            {
                m_app->dispToConsole("Not enough memory!",ccMainAppInterface::ERR_CONSOLE_MESSAGE);
                delete pcShape;
                break;
            }
            bool saveNormals = pcShape->reserveTheNormsTable();

            for (size_t j=0; j<shapePointsCount; ++j)
            {
                pcShape->addPoint(CCVector3(cloud[count-1-j].pos));
                if (saveNormals)
                    pcShape->addNorm(cloud[count-1-j].normal);
            }

            //random color
            unsigned char col[3]= { (unsigned char)(255.0*(float)rand()/(float)RAND_MAX),
                                    (unsigned char)(255.0*(float)rand()/(float)RAND_MAX),
                                    0
                                  };
            col[2]=255-(col[1]+col[2])/2;
            pcShape->setRGBColor(col);
            pcShape->showColors(true);
            pcShape->showNormals(saveNormals);
            pcShape->setVisible(true);

            //convert detected primitive into a CC primitive type
            ccGenericPrimitive* prim = 0;
            switch(shape->Identifier())
            {
            case 0: //plane
            {
                const PlanePrimitiveShape* plane = static_cast<const PlanePrimitiveShape*>(shape);
                Vec3f G = plane->Internal().getPosition();
                Vec3f N = plane->Internal().getNormal();
                Vec3f X = plane->getXDim();
                Vec3f Y = plane->getYDim();

                //we look for real plane extents
                float minX,maxX,minY,maxY;
                for (size_t j=0; j<shapePointsCount; ++j)
                {
                    std::pair<float,float> param;
                    plane->Parameters(cloud[count-1-j].pos,&param);
                    if (j!=0)
                    {
                        if (minX<param.first)
                            minX=param.first;
                        else if (maxX>param.first)
                            maxX=param.first;
                        if (minY<param.second)
                            minY=param.second;
                        else if (maxY>param.second)
                            maxY=param.second;
                    }
                    else
                    {
                        minX=maxX=param.first;
                        minY=maxY=param.second;
                    }
                }

                //we recenter plane (as it is not always the case!)
                float dX = maxX-minX;
                float dY = maxY-minY;
                G += X * (minX+dX*0.5);
                G += Y * (minY+dY*0.5);

                //we build matrix from these vecctors
                ccGLMatrix glMat(CCVector3(X.getValue()),
                                 CCVector3(Y.getValue()),
                                 CCVector3(N.getValue()),
                                 CCVector3(G.getValue()));

                //plane primitive
                prim = new ccPlane(dX,dY,&glMat);

            }
            break;

            case 1: //sphere
            {
                const SpherePrimitiveShape* sphere = static_cast<const SpherePrimitiveShape*>(shape);
                float radius = sphere->Internal().Radius();
                Vec3f CC = sphere->Internal().Center();

                pcShape->setName(QString("Sphere (r=%1)").arg(radius,0,'f'));

                //we build matrix from these vecctors
                ccGLMatrix glMat;
                glMat.setTranslation(CCVector3(CC.getValue()));
                //sphere primitive
                prim = new ccSphere(radius,&glMat);
                prim->setEnabled(false);

            }
            break;

            case 2: //cylinder
            {
                const CylinderPrimitiveShape* cyl = static_cast<const CylinderPrimitiveShape*>(shape);
                Vec3f G = cyl->Internal().AxisPosition();
                Vec3f N = cyl->Internal().AxisDirection();
                Vec3f X = cyl->Internal().AngularDirection();
                Vec3f Y = N.cross(X);
                float r = cyl->Internal().Radius();
                float hMin = cyl->MinHeight();
                float hMax = cyl->MaxHeight();
                float h = hMax-hMin;
                G += N * (hMin+h*0.5);

                pcShape->setName(QString("Cylinder (r=%1/h=%2)").arg(r,0,'f').arg(h,0,'f'));

                //we build matrix from these vecctors
                ccGLMatrix glMat(CCVector3(X.getValue()),
                                 CCVector3(Y.getValue()),
                                 CCVector3(N.getValue()),
                                 CCVector3(G.getValue()));

                //cylinder primitive
                prim = new ccCylinder(r,h,&glMat);
                prim->setEnabled(false);

            }
            break;

            case 3: //cone
            {
                const ConePrimitiveShape* cone = static_cast<const ConePrimitiveShape*>(shape);
                Vec3f CC = cone->Internal().Center();
                Vec3f CA = cone->Internal().AxisDirection();
                float alpha = cone->Internal().Angle();

                //compute max height
                CCVector3 maxP(CC.getValue());
                float maxHeight = 0;
                for (size_t j=0; j<shapePointsCount; ++j)
                {
                    float h = cone->Internal().Height(cloud[count-1-j].pos);
                    if (h>maxHeight)
                    {
                        maxHeight=h;
                        maxP = CCVector3(cloud[count-1-j].pos);
                    }
                }

                pcShape->setName(QString("Cone (alpha=%1/h=%2)").arg(alpha,0,'f').arg(maxHeight,0,'f'));

                float radius = tan(alpha)*maxHeight;
                CCVector3 Z = CCVector3(CA.getValue());
                CCVector3 C = CCVector3(CC.getValue()); //cone apex

                //construct remaining of base
                Z.normalize();
                CCVector3 X = maxP - (C + maxHeight * Z);
                X.normalize();
                CCVector3 Y = Z * X;

                //we build matrix from these vecctors
                ccGLMatrix glMat(X,Y,Z,C+(maxHeight*0.5)*Z);

                //cone primitive
                prim = new ccCone(0,radius,maxHeight,0,0,&glMat);
                prim->setEnabled(false);

            }
            break;

            case 4: //torus
            {
                const TorusPrimitiveShape* torus = static_cast<const TorusPrimitiveShape*>(shape);
                if (torus->Internal().IsAppleShaped())
                {
                    m_app->dispToConsole("[qRansacSD] Apple-shaped torus are not handled by CloudCompare!",ccMainAppInterface::WRN_CONSOLE_MESSAGE);
                }
                else
                {
                    Vec3f CC = torus->Internal().Center();
                    Vec3f CA = torus->Internal().AxisDirection();
                    float minRadius = torus->Internal().MinorRadius();
                    float maxRadius = torus->Internal().MajorRadius();

                    pcShape->setName(QString("Torus (r=%1/R=%2)").arg(minRadius,0,'f').arg(maxRadius,0,'f'));

                    CCVector3 Z = CCVector3(CA.getValue());
                    CCVector3 C = CCVector3(CC.getValue());
                    //construct remaining of base
                    CCVector3 X = Z.orthogonal();
                    CCVector3 Y = Z * X;

                    //we build matrix from these vecctors
                    ccGLMatrix glMat(X,Y,Z,C);

                    //torus primitive
                    prim = new ccTorus(maxRadius-minRadius,maxRadius+minRadius,M_PI*2.0,false,0,&glMat);
                    prim->setEnabled(false);
                }

            }
            break;
            }

            //is there a primitive to add to part cloud?
            if (prim)
            {
                prim->applyGLTransformation_recursive();
                pcShape->addChild(prim);
                prim->setDisplay(pcShape->getDisplay());
                prim->setColor(col);
                prim->showColors(true);
                prim->setVisible(true);
            }

            if (!group)
            {
                group = new ccHObject(QString("Ransac Detected Shapes (%1)").arg(ent->getName()));
                m_app->addToDB(group,true,0,false);
            }
            group->addChild(pcShape);
            m_app->addToDB(pcShape,true,0,false);

            count -= shapePointsCount;

            QApplication::processEvents();
        }

        if (group)
        {
            assert(group->getChildrenNumber()!=0);

            //we hide input cloud
            pc->setEnabled(false);
            m_app->dispToConsole("[qRansacSD] Input cloud has been automtically hidden!",ccMainAppInterface::WRN_CONSOLE_MESSAGE);

            //we add new group to DB/display
            group->setVisible(true);
            group->setDisplay_recursive(pc->getDisplay());
            group->prepareDisplayForRefresh_recursive();
            m_app->refreshAll();
        }
    }
}
コード例 #2
0
void optimized_shs(const PointCloud<PointXYZRGB> &cloud, float tolerance, PointIndices &indices_in, PointIndices &indices_out, float delta_hue)
{
    // Create a bool vector of processed point indices, and initialize it to false
    std::vector<char> processed (cloud.points.size (), 0);

    double squared_radius = tolerance * tolerance;

    // Process all points in the indices vector
    for (size_t k = 0; k < indices_in.indices.size (); ++k)
    {
        int i = indices_in.indices[k];
        if (processed[i])
            continue;

        processed[i] = 1;

        std::vector<int> seed_queue;
        int sq_idx = 0;
        seed_queue.push_back (i);

        PointXYZRGB  p;
        p = cloud.points[i];
        PointXYZHSV h;
        PointXYZRGBtoXYZHSV(p, h);

        while (sq_idx < (int)seed_queue.size ())
        {
            int index = seed_queue[sq_idx];
            int ycoo = index / cloud.width;
            int xcoo = index - cloud.width * ycoo;

            const int winsz = 20;
            const int winsz2 = winsz/2;

            int ybeg = max(0, ycoo - winsz2);
            int yend = min((int)cloud.height-1, ycoo + winsz2) + 1;
            int xend = min((int)cloud.width-1, xcoo + winsz2) + 1;

            for(int y = ybeg; y < yend; ++y)
                for(int x = max(0, xcoo - winsz2); x < xend; ++x)
                {
                    int idx = y * cloud.width + x;
                    if (!processed[idx] && (sqnorm(h, cloud.at(x,y)) < squared_radius))
                    {
                        PointXYZHSV h_l;
                        PointXYZRGB p_l = cloud.at(x, y);
                        PointXYZRGBtoXYZHSV(p_l, h_l);

                        if (fabs(h_l.h - h.h) < delta_hue)
                        {
                            seed_queue.push_back (idx);
                            processed[idx] = 1;
                        }
                    }
                }

            sq_idx++;
        }
        // Copy the seed queue into the output indices
        for (size_t l = 0; l < seed_queue.size (); ++l)
            indices_out.indices.push_back(seed_queue[l]);
    }
    // This is purely esthetical, can be removed for speed purposes
    std::sort (indices_out.indices.begin (), indices_out.indices.end ());
}
コード例 #3
0
void optimized_shs3(const PointCloud<PointXYZRGB> &cloud, float tolerance, PointIndices &indices_in, PointIndices &indices_out, float delta_hue)
{
    cv::Mat huebuf(cloud.height, cloud.width, CV_32F);
    float *hue = huebuf.ptr<float>();

    for(size_t i = 0; i < cloud.points.size(); ++i)
    {
        PointXYZHSV h;
        PointXYZRGB p = cloud.points[i];
        PointXYZRGBtoXYZHSV(p, h);
        hue[i] = h.h;
    }

    // Create a bool vector of processed point indices, and initialize it to false
    std::vector<char> processed (cloud.points.size (), 0);

    SearchD search;
    search.setInputCloud(cloud.makeShared());

    vector< vector<int> > storage(100);

    // Process all points in the indices vector
#pragma omp parallel for
    for (size_t k = 0; k < indices_in.indices.size (); ++k)
    {
        int i = indices_in.indices[k];
        if (processed[i])
            continue;

        processed[i] = 1;

        std::vector<int> seed_queue;
        seed_queue.reserve(cloud.size());
        int sq_idx = 0;
        seed_queue.push_back (i);

        PointXYZRGB p = cloud.points[i];
        float h = hue[i];

        while (sq_idx < (int)seed_queue.size ())
        {
            int index = seed_queue[sq_idx];
            const PointXYZRGB& q = cloud.points[index];

            if(!isFinite (q))
                continue;

            // search window
            unsigned left, right, top, bottom;
            double squared_radius = tolerance * tolerance;
            search.getProjectedRadiusSearchBox (q, squared_radius, left, right, top, bottom);

            unsigned yEnd  = (bottom + 1) * cloud.width + right + 1;
            unsigned idx  = top * cloud.width + left;
            unsigned skip = cloud.width - right + left - 1;
            unsigned xEnd = idx - left + right + 1;

            for (; xEnd != yEnd; idx += skip, xEnd += cloud.width)
            {
                for (; idx < xEnd; ++idx)
                {
                    if (processed[idx])
                        continue;

                    if (sqnorm(cloud.points[idx], q) <= squared_radius)
                    {
                        float h_l = hue[idx];

                        if (fabs(h_l - h) < delta_hue)
                        {
                            if(idx & 1)
                              seed_queue.push_back (idx);
                            processed[idx] = 1;
                        }

                    }
                }
            }
            sq_idx++;

        }
        // Copy the seed queue into the output indices
        int id = omp_get_thread_num();
        storage[id].insert(storage[id].end(), seed_queue.begin(), seed_queue.end());
    }

    indices_out.indices.clear();
    for(size_t i = 0; i < storage.size(); ++i)
        indices_out.indices.insert(indices_out.indices.begin(), storage[i].begin(), storage[i].end());

    std::sort (indices_out.indices.begin (), indices_out.indices.end ());
    indices_out.indices.erase(std::unique(indices_out.indices.begin (), indices_out.indices.end ()), indices_out.indices.end());
}
コード例 #4
0
ファイル: EvaluatePCDMatch.cpp プロジェクト: rvbust/Open3D
int main(int argc, char *argv[])
{
    using namespace three;

    if (argc <= 1 || ProgramOptionExists(argc, argv, "--help") ||
            ProgramOptionExists(argc, argv, "-h")) {
        PrintHelp();
        return 0;
    }
    int verbose = GetProgramOptionAsInt(argc, argv, "--verbose", 2);
    SetVerbosityLevel((VerbosityLevel)verbose);
    std::string log_filename = GetProgramOptionAsString(argc, argv, "--log");
    std::string gt_filename = GetProgramOptionAsString(argc, argv, "--gt");
    std::string pcd_dirname = GetProgramOptionAsString(argc, argv, "--dir");
    double threshold = GetProgramOptionAsDouble(argc, argv, "--threshold");
    double threshold_rmse = GetProgramOptionAsDouble(argc, argv,
            "--threshold_rmse", threshold * 2.0);
    if (pcd_dirname.empty()) {
        pcd_dirname = filesystem::GetFileParentDirectory(log_filename) +
                "pcds/";
    }
    double threshold2 = threshold * threshold;
    std::vector<std::string> pcd_names;
    filesystem::ListFilesInDirectoryWithExtension(pcd_dirname, "pcd",
            pcd_names);
    std::vector<PointCloud> pcds(pcd_names.size());
    std::vector<KDTreeFlann> kdtrees(pcd_names.size());
    for (auto i = 0; i < pcd_names.size(); i++) {
        ReadPointCloud(pcd_dirname + "cloud_bin_" + std::to_string(i) + ".pcd",
                pcds[i]);
        kdtrees[i].SetGeometry(pcds[i]);
    }

    std::vector<std::pair<int, int>> pair_ids;
    std::vector<Eigen::Matrix4d> transformations;
    ReadLogFile(log_filename, pair_ids, transformations);
    std::vector<Eigen::Matrix4d> gt_trans;
    ReadLogFile(gt_filename, pair_ids, gt_trans);

    double total_rmse = 0.0;
    int positive = 0;
    double positive_rmse = 0;
    for (auto k = 0; k < pair_ids.size(); k++) {
        PointCloud source = pcds[pair_ids[k].second];
        source.Transform(transformations[k]);
        PointCloud gtsource = pcds[pair_ids[k].second];
        gtsource.Transform(gt_trans[k]);
        std::vector<int> indices(1);
        std::vector<double> distance2(1);
        int correspondence_num = 0;
        double rmse = 0.0;
        for (auto i = 0; i < source.points_.size(); i++) {
            if (kdtrees[pair_ids[k].first].SearchKNN(gtsource.points_[i], 1,
                    indices, distance2) > 0) {
                if (distance2[0] < threshold2) {
                    correspondence_num++;
                    double new_dis = (source.points_[i] -
                            pcds[pair_ids[k].first].points_[indices[0]]
                            ).norm();
                    rmse += new_dis * new_dis;
                }
            }
        }
        rmse = std::sqrt(rmse / (double)correspondence_num);
        PrintInfo("#%d < -- #%d : rmse %.4f\n", pair_ids[k].first,
                pair_ids[k].second, rmse);
        total_rmse += rmse;
        if (rmse < threshold_rmse) {
            positive++;
            positive_rmse += rmse;
        }
    }
    PrintInfo("Average rmse %.8f (%.8f / %d)\n", total_rmse /
            (double)pair_ids.size(), total_rmse, (int)pair_ids.size());
    PrintInfo("Average rmse of positives %.8f (%.8f / %d)\n", positive_rmse /
            (double)positive, positive_rmse, positive);
    PrintInfo("Accuracy %.2f%% (%d / %d)\n", (double)positive * 100.0 /
            (double)pair_ids.size(), positive, (int)pair_ids.size());
    return 1;
}
コード例 #5
0
  TEST (PCL, SpinImageEstimationEigen)
  {
    // Estimate normals first
    double mr = 0.002;
    NormalEstimation<PointXYZ, Normal> n;
    PointCloud<Normal>::Ptr normals (new PointCloud<Normal> ());
    // set parameters
    n.setInputCloud (cloud.makeShared ());
    boost::shared_ptr<vector<int> > indicesptr (new vector<int> (indices));
    n.setIndices (indicesptr);
    n.setSearchMethod (tree);
    n.setRadiusSearch (20 * mr);
    n.compute (*normals);

    EXPECT_NEAR (normals->points[103].normal_x, 0.36683175, 1e-4);
    EXPECT_NEAR (normals->points[103].normal_y, -0.44696972, 1e-4);
    EXPECT_NEAR (normals->points[103].normal_z, -0.81587529, 1e-4);
    EXPECT_NEAR (normals->points[200].normal_x, -0.71414840, 1e-4);
    EXPECT_NEAR (normals->points[200].normal_y, -0.06002361, 1e-4);
    EXPECT_NEAR (normals->points[200].normal_z, -0.69741613, 1e-4);

    EXPECT_NEAR (normals->points[140].normal_x, -0.45109111, 1e-4);
    EXPECT_NEAR (normals->points[140].normal_y, -0.19499126, 1e-4);
    EXPECT_NEAR (normals->points[140].normal_z, -0.87091631, 1e-4);

    SpinImageEstimation<PointXYZ, Normal, Eigen::MatrixXf> spin_est (8, 0.5, 16);
    // set parameters
    //spin_est.setInputWithNormals (cloud.makeShared (), normals);
    spin_est.setInputCloud (cloud.makeShared ());
    spin_est.setInputNormals (normals);
    spin_est.setIndices (indicesptr);
    spin_est.setSearchMethod (tree);
    spin_est.setRadiusSearch (40*mr);

    // Object
    PointCloud<Eigen::MatrixXf>::Ptr spin_images (new PointCloud<Eigen::MatrixXf>);

    // radial SI
    spin_est.setRadialStructure ();

    // estimate
    spin_est.computeEigen (*spin_images);
    EXPECT_EQ (spin_images->points.rows (), indices.size ());

    EXPECT_NEAR (spin_images->points (100, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 24), 0.00233226, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 48), 8.48662e-005, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 60), 0.0266387, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 96), 0.0414662, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 144), 0.0128513, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 24), 0.00932424, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 60), 0.0145733, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 96), 0.00034457, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 144), 0.0121195, 1e-4);


    // radial SI, angular spin-images
    spin_est.setAngularDomain ();

    // estimate
    spin_est.computeEigen (*spin_images);
    EXPECT_EQ (spin_images->points.rows (), indices.size ());

    EXPECT_NEAR (spin_images->points (100, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 24), 0.13213, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 48), 0.908804, 1.1e-4);
    EXPECT_NEAR (spin_images->points (100, 60), 0.63875, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 96), 0.550392, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 144), 0.25713, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 24), 0.230605, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 60), 0.764872, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 96), 1.02824, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 144), 0.293567, 1e-4);


    // rectangular SI
    spin_est.setRadialStructure (false);
    spin_est.setAngularDomain (false);

    // estimate
    spin_est.computeEigen (*spin_images);
    EXPECT_EQ (spin_images->points.rows (), indices.size ());

    EXPECT_NEAR (spin_images->points (100, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 24), 0.000889345, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 60), 0.0489534, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 96), 0.0747141, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 132), 0.0173423, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 144), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 24), 0.0267132, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 60), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 96), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 108), 0.0209709, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 144), 0.029372, 1e-4);

    // rectangular SI, angular spin-images
    spin_est.setAngularDomain ();

    // estimate
    spin_est.computeEigen (*spin_images);
    EXPECT_EQ (spin_images->points.rows (), indices.size ());

    EXPECT_NEAR (spin_images->points (100, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 24), 0.132126, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 60), 0.388011, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 96), 0.468881, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 108), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 132), 0.678995, 1e-4);
    EXPECT_NEAR (spin_images->points (100, 144), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 0), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 12), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 24), 0.143845, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 36), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 48), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 60), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 72), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 84), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 96), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 108), 0.706084, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 120), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 132), 0, 1e-4);
    EXPECT_NEAR (spin_images->points (300, 144), 0.272542, 1e-4);
  }
コード例 #6
0
TEST (SampleConsensusModelNormalParallelPlane, RANSAC)
{
  srand (0);

  // Use a custom point cloud for these tests until we need something better
  PointCloud<PointXYZ> cloud;
  PointCloud<Normal> normals;
  cloud.points.resize (10);
  normals.resize (10);

  for (unsigned idx = 0; idx < cloud.size (); ++idx)
  {
    cloud.points[idx].x = static_cast<float> ((rand () % 200) - 100);
    cloud.points[idx].y = static_cast<float> ((rand () % 200) - 100);
    cloud.points[idx].z = 0.0f;

    normals.points[idx].normal_x = 0.0f;
    normals.points[idx].normal_y = 0.0f;
    normals.points[idx].normal_z = 1.0f;
  }

  // Create a shared plane model pointer directly
  SampleConsensusModelNormalParallelPlanePtr model (new SampleConsensusModelNormalParallelPlane<PointXYZ, Normal> (cloud.makeShared ()));
  model->setInputNormals (normals.makeShared ());

  const float max_angle_rad = 0.01f;
  const float angle_eps = 0.001f;
  model->setEpsAngle (max_angle_rad);

  // Test true axis
  {
    model->setAxis (Eigen::Vector3f (0, 0, 1));

    RandomSampleConsensus<PointXYZ> sac (model, 0.03);
    sac.computeModel();

    std::vector<int> inliers;
    sac.getInliers (inliers);
    ASSERT_EQ (cloud.size (), inliers.size ());
  }

  // test axis slightly in valid range
  {
    model->setAxis (Eigen::Vector3f (0, sin (max_angle_rad * (1 - angle_eps)), cos (max_angle_rad * (1 - angle_eps))));
    RandomSampleConsensus<PointXYZ> sac (model, 0.03);
    sac.computeModel ();

    std::vector<int> inliers;
    sac.getInliers (inliers);
    ASSERT_EQ (cloud.size (), inliers.size ());
  }

  // test axis slightly out of valid range
  {
    model->setAxis (Eigen::Vector3f (0, sin (max_angle_rad * (1 + angle_eps)), cos (max_angle_rad * (1 + angle_eps))));
    RandomSampleConsensus<PointXYZ> sac (model, 0.03);
    sac.computeModel ();

    std::vector<int> inliers;
    sac.getInliers (inliers);
    ASSERT_EQ (0, inliers.size ());
  }
}
コード例 #7
0
ファイル: ar_kinect.cpp プロジェクト: MorS25/dasl-ros-pkg
  /* 
   * One and only one callback, now takes cloud, does everything else needed. 
   */
  void ARPublisher::getTransformationCallback (const sensor_msgs::PointCloud2ConstPtr & msg)
  {
    sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image);
    ARUint8 *dataPtr;
    ARMarkerInfo *marker_info;
    int marker_num;
    int i, k, j;

    /* do we need to initialize? */
    if(!configured_)
    {
      if(msg->width == 0 || msg->height == 0)
      {
        ROS_ERROR ("Deformed cloud! Size = %d, %d.", msg->width, msg->height);
        return;
      }

      cam_param_.xsize = msg->width;
      cam_param_.ysize = msg->height;

      cam_param_.dist_factor[0] = msg->width/2;         // x0 = cX from openCV calibration
      cam_param_.dist_factor[1] = msg->height/2;        // y0 = cY from openCV calibration
      cam_param_.dist_factor[2] = 0;                    // f = -100*k1 from CV. Note, we had to do mm^2 to m^2, hence 10^8->10^2
      cam_param_.dist_factor[3] = 1.0;                  // scale factor, should probably be >1, but who cares...
      
      arInit ();
    }

    /* convert cloud to PCL */
    PointCloud cloud;
    pcl::fromROSMsg(*msg, cloud);
 
    /* get an OpenCV image from the cloud */
    pcl::toROSMsg (cloud, *image_msg);

    cv_bridge::CvImagePtr cv_ptr;
    try
    {
        cv_ptr = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR ("Could not convert from '%s' to 'bgr8'.", image_msg->encoding.c_str ());
    }
    dataPtr = (ARUint8 *) cv_ptr->image.ptr();

    /* detect the markers in the video frame */
    if (arDetectMarkerLite (dataPtr, threshold_, &marker_info, &marker_num) < 0)
    {
      argCleanup ();
      return;
    }
 
    arPoseMarkers_.markers.clear ();
    /* check for known patterns */
    for (i = 0; i < objectnum; i++)
    {
      k = -1;
      for (j = 0; j < marker_num; j++)
      {
        if (object[i].id == marker_info[j].id)
        {
          if (k == -1)
            k = j;
          else                  // make sure you have the best pattern (highest confidence factor)
          if (marker_info[k].cf < marker_info[j].cf)
            k = j;
        }
      }
      if (k == -1)
      {
        object[i].visible = 0;
        continue;
      }
      
      /* create a cloud for marker corners */
      int d = marker_info[k].dir;
      PointCloud marker;
      marker.push_back( cloud.at( (int)marker_info[k].vertex[(4-d)%4][0], (int)marker_info[k].vertex[(4-d)%4][1] ) ); // upper left
      marker.push_back( cloud.at( (int)marker_info[k].vertex[(5-d)%4][0], (int)marker_info[k].vertex[(5-d)%4][1] ) ); // upper right
      marker.push_back( cloud.at( (int)marker_info[k].vertex[(6-d)%4][0], (int)marker_info[k].vertex[(6-d)%4][1] ) ); // lower right
      marker.push_back( cloud.at( (int)marker_info[k].vertex[(7-d)%4][0], (int)marker_info[k].vertex[(7-d)%4][1] ) );

      /* create an ideal cloud */
      double w = object[i].marker_width;
      PointCloud ideal;
      ideal.push_back( makeRGBPoint(-w/2,w/2,0) );
      ideal.push_back( makeRGBPoint(w/2,w/2,0) );
      ideal.push_back( makeRGBPoint(w/2,-w/2,0) );
      ideal.push_back( makeRGBPoint(-w/2,-w/2,0) );

      /* get transformation */
      Eigen::Matrix4f t;
      TransformationEstimationSVD obj;
      obj.estimateRigidTransformation( marker, ideal, t );

      
      /* get final transformation */
      tf::Transform transform = tfFromEigen(t.inverse());
   
      // any(transform == nan)
      tf::Matrix3x3  m = transform.getBasis();
      tf::Vector3    p = transform.getOrigin();
      bool invalid = false;
      for(int i=0; i < 3; i++)
        for(int j=0; j < 3; j++)
          invalid = (invalid || isnan(m[i][j]) || fabs(m[i][j]) > 1.0);

      for(int i=0; i < 3; i++)
          invalid = (invalid || isnan(p[i]));
       

      if(invalid)
        continue; 

      /* publish the marker */
      ar_pose::ARMarker ar_pose_marker;
      ar_pose_marker.header.frame_id = msg->header.frame_id;
      ar_pose_marker.header.stamp = msg->header.stamp;
      ar_pose_marker.id = object[i].id;

      ar_pose_marker.pose.pose.position.x = transform.getOrigin().getX();
      ar_pose_marker.pose.pose.position.y = transform.getOrigin().getY();
      ar_pose_marker.pose.pose.position.z = transform.getOrigin().getZ();

      ar_pose_marker.pose.pose.orientation.x = transform.getRotation().getAxis().getX();
      ar_pose_marker.pose.pose.orientation.y = transform.getRotation().getAxis().getY();
      ar_pose_marker.pose.pose.orientation.z = transform.getRotation().getAxis().getZ();
      ar_pose_marker.pose.pose.orientation.w = transform.getRotation().getW();

      ar_pose_marker.confidence = marker_info->cf;
      arPoseMarkers_.markers.push_back (ar_pose_marker);

      /* publish transform */
      if (publishTf_)
      {
	    broadcaster_.sendTransform(tf::StampedTransform(transform, msg->header.stamp, msg->header.frame_id, object[i].name));
      }

      /* publish visual marker */

      if (publishVisualMarkers_)
      {
        tf::Vector3 markerOrigin (0, 0, 0.25 * object[i].marker_width * AR_TO_ROS);
        tf::Transform m (tf::Quaternion::getIdentity (), markerOrigin);
        tf::Transform markerPose = transform * m; // marker pose in the camera frame

        tf::poseTFToMsg (markerPose, rvizMarker_.pose);

        rvizMarker_.header.frame_id = msg->header.frame_id;
        rvizMarker_.header.stamp = msg->header.stamp;
        rvizMarker_.id = object[i].id;

        rvizMarker_.scale.x = 1.0 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.scale.y = 1.0 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.scale.z = 0.5 * object[i].marker_width * AR_TO_ROS;
        rvizMarker_.ns = "basic_shapes";
        rvizMarker_.type = visualization_msgs::Marker::CUBE;
        rvizMarker_.action = visualization_msgs::Marker::ADD;
        switch (i)
        {
          case 0:
            rvizMarker_.color.r = 0.0f;
            rvizMarker_.color.g = 0.0f;
            rvizMarker_.color.b = 1.0f;
            rvizMarker_.color.a = 1.0;
            break;
          case 1:
            rvizMarker_.color.r = 1.0f;
            rvizMarker_.color.g = 0.0f;
            rvizMarker_.color.b = 0.0f;
            rvizMarker_.color.a = 1.0;
            break;
          default:
            rvizMarker_.color.r = 0.0f;
            rvizMarker_.color.g = 1.0f;
            rvizMarker_.color.b = 0.0f;
            rvizMarker_.color.a = 1.0;
        }
        rvizMarker_.lifetime = ros::Duration ();

        rvizMarkerPub_.publish (rvizMarker_);
        ROS_DEBUG ("Published visual marker");
      }
    }
    arMarkerPub_.publish (arPoseMarkers_);
    ROS_DEBUG ("Published ar_multi markers");
  }
コード例 #8
0
ファイル: test_sampling.cpp プロジェクト: 2php/pcl
TEST (RandomSample, Filters)
{
  // Test the PointCloud<PointT> method
  // Randomly sample 10 points from cloud
  RandomSample<PointXYZ> sample (true); // Extract removed indices
  sample.setInputCloud (cloud_walls);
  sample.setSample (10);

  // Indices
  std::vector<int> indices;
  sample.filter (indices);

  EXPECT_EQ (int (indices.size ()), 10);

  // Cloud
  PointCloud<PointXYZ> cloud_out;
  sample.filter(cloud_out);

  EXPECT_EQ (int (cloud_out.width), 10);
  EXPECT_EQ (int (indices.size ()), int (cloud_out.size ()));

  for (size_t i = 0; i < indices.size () - 1; ++i)
  {
    // Check that indices are sorted
    EXPECT_LT (indices[i], indices[i+1]);
    // Compare original points with sampled indices against sampled points
    EXPECT_NEAR (cloud_walls->points[indices[i]].x, cloud_out.points[i].x, 1e-4);
    EXPECT_NEAR (cloud_walls->points[indices[i]].y, cloud_out.points[i].y, 1e-4);
    EXPECT_NEAR (cloud_walls->points[indices[i]].z, cloud_out.points[i].z, 1e-4);
  }

  IndicesConstPtr removed = sample.getRemovedIndices ();
  EXPECT_EQ (removed->size (), cloud_walls->size () - 10);
  // Organized
  // (sdmiller) Removing for now, to debug the Linux 32-bit segfault offline
  sample.setKeepOrganized (true);
  sample.filter(cloud_out);
  removed = sample.getRemovedIndices ();
  EXPECT_EQ (int (removed->size ()), cloud_walls->size () - 10);
  for (size_t i = 0; i < removed->size (); ++i)
  {
    EXPECT_TRUE (pcl_isnan (cloud_out.at ((*removed)[i]).x));
    EXPECT_TRUE (pcl_isnan (cloud_out.at ((*removed)[i]).y));
    EXPECT_TRUE (pcl_isnan (cloud_out.at ((*removed)[i]).z));
  }

  EXPECT_EQ (cloud_out.width, cloud_walls->width);
  EXPECT_EQ (cloud_out.height, cloud_walls->height);
  // Negative
  sample.setKeepOrganized (false);
  sample.setNegative (true);
  sample.filter(cloud_out);
  removed = sample.getRemovedIndices ();
  EXPECT_EQ (int (removed->size ()), 10);
  EXPECT_EQ (int (cloud_out.size ()), int (cloud_walls->size () - 10));

  // Make sure sampling >N works
  sample.setSample (static_cast<unsigned int> (cloud_walls->size ()+10));
  sample.setNegative (false);
  sample.filter (cloud_out);
  EXPECT_EQ (cloud_out.size (), cloud_walls->size ());
  removed = sample.getRemovedIndices ();
  EXPECT_TRUE (removed->empty ());

  // Test the pcl::PCLPointCloud2 method
  // Randomly sample 10 points from cloud
  pcl::PCLPointCloud2::Ptr cloud_blob (new pcl::PCLPointCloud2 ());
  toPCLPointCloud2 (*cloud_walls, *cloud_blob);
  RandomSample<pcl::PCLPointCloud2> sample2;
  sample2.setInputCloud (cloud_blob);
  sample2.setSample (10);

  // Indices
  std::vector<int> indices2;
  sample2.filter (indices2);

  EXPECT_EQ (int (indices2.size ()), 10);

  // Cloud
  pcl::PCLPointCloud2 output_blob;
  sample2.filter (output_blob);

  fromPCLPointCloud2 (output_blob, cloud_out);

  EXPECT_EQ (int (cloud_out.width), 10);
  EXPECT_EQ (int (indices2.size ()), int (cloud_out.size ()));

  for (size_t i = 0; i < indices2.size () - 1; ++i)
  {
    // Check that indices are sorted
    EXPECT_LT (indices2[i], indices2[i+1]);
    // Compare original points with sampled indices against sampled points
    EXPECT_NEAR (cloud_walls->points[indices2[i]].x, cloud_out.points[i].x, 1e-4);
    EXPECT_NEAR (cloud_walls->points[indices2[i]].y, cloud_out.points[i].y, 1e-4);
    EXPECT_NEAR (cloud_walls->points[indices2[i]].z, cloud_out.points[i].z, 1e-4);
  }
}
コード例 #9
0
ModelPtr MultiPointCloud::model( )
{

    // Count all points that need to be exported
    pc_attr_it it;
    size_t c = 0;
    for(it = m_clouds.begin(); it != m_clouds.end(); it++)
    {
        PointCloud* pc = it->second->cloud;
        if(pc->isActive())
        {
            vector<uColorVertex>::iterator p_it;
            for(p_it = pc->m_points.begin(); p_it != pc->m_points.end(); p_it++)
            {
                c++;
            }
        }
    }


    // Create a new model and save points
    PointBufferPtr pcBuffer( new PointBuffer);
    floatArr pointBuffer(new float[3 * c]);
    ucharArr colorBuffer(new unsigned char[3 * c]);
    c = 0;

    for(it = m_clouds.begin(); it != m_clouds.end(); it++)
    {
        PointCloud* pc = it->second->cloud;
        if(pc->isActive())
        {
            vector<uColorVertex>::iterator p_it;
            for(p_it = pc->m_points.begin(); p_it != pc->m_points.end(); p_it++)
            {
                size_t bufferPos = 3 * c;

                uColorVertex v = *p_it;
                pointBuffer[bufferPos    ] = v.x;
                pointBuffer[bufferPos + 1] = v.y;
                pointBuffer[bufferPos + 2] = v.z;

                colorBuffer[bufferPos    ] = v.r;
                colorBuffer[bufferPos + 1] = v.g;
                colorBuffer[bufferPos + 2] = v.b;

                c++;
            }
        }

    }

    pcBuffer->setPointArray(pointBuffer, c);
    pcBuffer->setPointColorArray(colorBuffer, c);

    ModelPtr modelPtr(new Model);
    modelPtr->m_pointCloud = pcBuffer;


    return modelPtr;

}
コード例 #10
0
void Detector::ComputeFreespace(const Camera& camera,
                     Matrix<int>& occ_map_binary,
                     Matrix<int>& mat_2D_pos_x, Matrix<int>& mat_2D_pos_y,
                     const PointCloud &point_cloud)
{
    // Set Freespace Parameters
    double scale_z_ = Globals::freespace_scaleZ;
    double scale_x_ = Globals::freespace_scaleX;
    double min_x_ = Globals::freespace_minX;
    double min_z_ = Globals::freespace_minZ;
    double max_x_ = Globals::freespace_maxX;
    double max_z_ = Globals::freespace_maxZ;

    int x_bins = (int)round((max_x_ - min_x_)*scale_x_)+1;
    int z_bins = (int)round((max_z_ - min_z_)*scale_z_)+1;

    Matrix<double> occ_map(x_bins, z_bins, 0.0);
    occ_map_binary.set_size(x_bins, z_bins);
    double step_x = (max_x_ - min_x_)/double(x_bins-1);
    double step_z = (max_z_ - min_z_)/double(z_bins-1);

    Vector<double> plane_parameters = camera.get_GP();
    double plane_par_0 = plane_parameters(0);
    double plane_par_1 = plane_parameters(1);
    double plane_par_2 = plane_parameters(2);
    double plane_par_3 = plane_parameters(3)*Globals::WORLD_SCALE;

//    double log_2 = log(2);

    for(int j = 0; j < point_cloud.X.getSize(); j++)
    {
        double zj = point_cloud.Z(j);

        if(zj < Globals::freespace_max_depth_to_cons && zj >= 0.1)
        {
            double xj = point_cloud.X(j);
            double yj = point_cloud.Y(j);

            double distance_to_plane = plane_par_0*xj + plane_par_1*yj + plane_par_2*zj + plane_par_3;

            // Only accept points in upper_body height range (0.1m to 2.5m)
            if(distance_to_plane < 2.5 && distance_to_plane > 0.1)
            {
                double x = xj - min_x_;
                double z = zj - min_z_;

                int pos_x = (int)round(x / step_x);
                int pos_z = (int)round(z / step_z);

//                occ_map(pos_x, pos_z) += z*(log(z) / log_2);
                occ_map(pos_x, pos_z) += z;

                mat_2D_pos_x.data()[j] = pos_x;
                mat_2D_pos_y.data()[j] = pos_z;
            }
        }
    }
    
//    occ_map.WriteToTXT("before.txt");
    
    
     Vector<double> kernel = AncillaryMethods::getGaussian1D(2,3);
     occ_map = AncillaryMethods::conv1D(occ_map, kernel, false);
     occ_map = AncillaryMethods::conv1D(occ_map, kernel, true);

// occ_map.WriteToTXT("after.txt");
 
    int occ_map_length = x_bins*z_bins;
    for(int i = 0; i < occ_map_length; i++)
    {
        occ_map_binary.data()[i] = (occ_map.data()[i] < Globals::freespace_threshold) ? 0 : 1;
    }
}
コード例 #11
0
void Detector::ProcessFrame(const Camera &camera_origin, const Matrix<double> &depth_map, const PointCloud &point_cloud,
                            const Matrix<double> &upper_body_template, Vector<Vector<double> > &detected_bounding_boxes)
{
    int width = depth_map.x_size();
    int height = depth_map.y_size();

//    Matrix<int> labeledROIs;
    Matrix<int> mat_2D_pos_x(width, height, -1);
    Matrix<int> mat_2D_pos_y(width, height, -1);

    // Compute 2D_positions, and occ_Map matrices
    ComputeFreespace(camera_origin, labeledROIs, mat_2D_pos_x, mat_2D_pos_y, point_cloud);

    Vector<ROI> all_ROIs;
    PreprocessROIs(labeledROIs, all_ROIs);

//    Vector<Vector<Vector<double> > > points3D_in_ROIs(all_ROIs.getSize());
    ExtractPointsInROIs(/*points3D_in_ROIs*/all_ROIs, mat_2D_pos_x, mat_2D_pos_y, point_cloud, labeledROIs);

    double scaleZ = Globals::freespace_scaleZ;

    Vector<double> plane_in_camera = camera_origin.get_GP();


    Vector<Vector<double> > close_range_BBoxes;
    Vector<Vector<double > > distances;

    Vector<double> lower_point(3);

    for(int l = 0; l < all_ROIs.getSize(); l++)
    {
//        if(all_ROIs(l).getSize() > 0)
        if(all_ROIs(l).has_any_point)
        {
//            int min_y_index = 0;
//            AncillaryMethods::MinVecVecDim(points3D_in_ROIs(l), 1, min_y_index);
//            Vector<double> lower_point = points3D_in_ROIs(l)(min_y_index);
            lower_point(0) = point_cloud.X(all_ROIs(l).ind_min_y);
            lower_point(1) = point_cloud.Y(all_ROIs(l).ind_min_y);
            lower_point(2) = point_cloud.Z(all_ROIs(l).ind_min_y);

            double height = plane_in_camera(0)*lower_point(0) + plane_in_camera(1)*lower_point(1) +
                    plane_in_camera(2)*lower_point(2) + plane_in_camera(3)*Globals::WORLD_SCALE;
            ROS_DEBUG_STREAM("Checking height: " << Globals::min_height << " < " << height << " < " << Globals::max_height);
            ROS_DEBUG_STREAM("Checking distance: " << (all_ROIs(l).center_y()-all_ROIs(l).width()/2.0)/scaleZ << " < " << Globals::distance_range_accepted_detections);
            if((all_ROIs(l).center_y()-all_ROIs(l).width()/2.0)/scaleZ < Globals::distance_range_accepted_detections
                    && height > Globals::min_height && height < Globals::max_height)
            {
                Vector<double> bbox(4, 0.0);
                // if there is a valid bounding box
//                if( ROI::GetRegion2D(bbox, camera_origin, points3D_in_ROIs(l)) )
                if(all_ROIs(l).GetRegion2D(bbox, camera_origin, point_cloud) )
                {
                    close_range_BBoxes.pushBack(bbox);

                    Vector<double> distance(2);
                    distance(0) = all_ROIs(l).center_y()/scaleZ;
                    distance(1) = all_ROIs(l).height()/scaleZ;

                    distances.pushBack(distance);
                }
            }
        }
    }



    detected_bounding_boxes = EvaluateTemplate(upper_body_template, depth_map, close_range_BBoxes, distances);
}
コード例 #12
0
ファイル: test_recognition_cg.cpp プロジェクト: 2php/pcl
/* ---[ */
int
main (int argc, char** argv)
{
  if (argc < 3)
  {
    cerr << "No test file given. Please download `milk.pcd` and `milk_cartoon_all_small_clorox.pcd` and pass their paths to the test." << endl;
    return (-1);
  }

  if (loadPCDFile (argv[1], *model_) < 0)
  {
    cerr << "Failed to read test file. Please download `milk.pcd` and pass its path to the test." << endl;
    return (-1);
  }

  if (loadPCDFile (argv[2], *scene_) < 0)
  {
    cerr << "Failed to read test file. Please download `milk_cartoon_all_small_clorox.pcd` and pass its path to the test." << endl;
    return (-1);
  }

  //Normals
  NormalEstimationOMP<PointType, NormalType> norm_est;
  norm_est.setKSearch (10);
  norm_est.setInputCloud (model_);
  norm_est.compute (*model_normals_);

  norm_est.setInputCloud (scene_);
  norm_est.compute (*scene_normals_);

  //Downsampling
  UniformSampling<PointType> uniform_sampling;
  uniform_sampling.setInputCloud (model_);
  uniform_sampling.setRadiusSearch (0.005);
  uniform_sampling.filter (*model_downsampled_);

  uniform_sampling.setInputCloud (scene_);
  uniform_sampling.setRadiusSearch (0.02);
  uniform_sampling.filter (*scene_downsampled_);

  //Descriptor
  SHOTEstimationOMP<PointType, NormalType, DescriptorType> descr_est;
  descr_est.setRadiusSearch (0.015);
  descr_est.setInputCloud (model_downsampled_);
  descr_est.setInputNormals (model_normals_);
  descr_est.setSearchSurface (model_);
  descr_est.compute (*model_descriptors_);

  descr_est.setInputCloud (scene_downsampled_);
  descr_est.setInputNormals (scene_normals_);
  descr_est.setSearchSurface (scene_);
  descr_est.compute (*scene_descriptors_);

  //Correspondences with KdTree
  KdTreeFLANN<DescriptorType> match_search;
  match_search.setInputCloud (model_descriptors_);

  for (size_t i = 0; i < scene_descriptors_->size (); ++i)
  {
    if ( pcl_isfinite( scene_descriptors_->at (i).descriptor[0] ) )
    {
      vector<int> neigh_indices (1);
      vector<float> neigh_sqr_dists (1);
      int found_neighs = match_search.nearestKSearch (scene_descriptors_->at (i), 1, neigh_indices, neigh_sqr_dists);
      if(found_neighs == 1 && neigh_sqr_dists[0] < 0.25f)
      {
        Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]);
        model_scene_corrs_->push_back (corr);
      }
    }
  }

  testing::InitGoogleTest (&argc, argv);
  return (RUN_ALL_TESTS ());
}
コード例 #13
0
ファイル: basic.cpp プロジェクト: amoliu/robotarm
void drawCallback()
{
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	
	// Modelview matrix
	glLoadIdentity();
	glScalef(zoom, zoom, 1);	// Matrix tranformations to alter where the object is rendered.
	glTranslatef(0, 0, -1000);
	glTranslatef(offset[0], offset[1], -offset[2]);
	glRotatef(rotangles[0], 1, 0, 0);
	glRotatef(rotangles[1], 0, 1, 0);
	glTranslatef(0, 0, 750);
	
	//show point cloud here
	glPointSize(1.0f);
	glBegin(GL_POINTS);
	
	for (unsigned int i = 0; i < cloud->size(); i++) {
		//glColor3ub(cloud->points[i].r, cloud->points[i].g, cloud->points[i].b);
		glColor3f(0.8, 0.8, 1.0);
		glVertex3f(cloud->points[i].x, -cloud->points[i].y, -cloud->points[i].z);
	}
	
	glEnd();
	
	if (done_ransac) {
		glEnable(GL_BLEND);
		glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
		
		//show ransac indices
		double boxSize = 600.0;
		double bl_x = -boxSize, bl_y = -boxSize;
		double tl_x = -boxSize, tl_y =  boxSize;
		double tr_x =  boxSize, tr_y =  boxSize;
		double br_x =  boxSize, br_y = -boxSize;
		// ax + by + cz + d = 0
		// ax + by + d = -cz
		// -(ax + by + d)/c = z
		double bl_z = -((plane_coefficients[0]*bl_x) + (plane_coefficients[1]*bl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
		double tl_z = -((plane_coefficients[0]*tl_x) + (plane_coefficients[1]*tl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
		double tr_z = -((plane_coefficients[0]*tr_x) + (plane_coefficients[1]*tr_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
		double br_z = -((plane_coefficients[0]*br_x) + (plane_coefficients[1]*br_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
		
		switch (rotation) {
//				ABCD = 0,//normal
//				BACD,//ab
//				CBAD,//ac
//				DBCA,//ad
//				ACBD,//bc
//				ADCB,//bd
//				ABDC //cd
			case BACD:
				bl_z = -((plane_coefficients[1]*bl_x) + (plane_coefficients[0]*bl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				tl_z = -((plane_coefficients[1]*tl_x) + (plane_coefficients[0]*tl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				tr_z = -((plane_coefficients[1]*tr_x) + (plane_coefficients[0]*tr_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				br_z = -((plane_coefficients[1]*br_x) + (plane_coefficients[0]*br_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				break;
			case CBAD:
				bl_z = -((plane_coefficients[2]*bl_x) + (plane_coefficients[1]*bl_y) + plane_coefficients[0])/(plane_coefficients[2]*1.0);
				tl_z = -((plane_coefficients[2]*tl_x) + (plane_coefficients[1]*tl_y) + plane_coefficients[0])/(plane_coefficients[2]*1.0);
				tr_z = -((plane_coefficients[2]*tr_x) + (plane_coefficients[1]*tr_y) + plane_coefficients[0])/(plane_coefficients[2]*1.0);
				br_z = -((plane_coefficients[2]*br_x) + (plane_coefficients[1]*br_y) + plane_coefficients[0])/(plane_coefficients[2]*1.0);
				break;
			case DBCA:
				bl_z = -((plane_coefficients[3]*bl_x) + (plane_coefficients[1]*bl_y) + plane_coefficients[2])/(plane_coefficients[0]*1.0);
				tl_z = -((plane_coefficients[3]*tl_x) + (plane_coefficients[1]*tl_y) + plane_coefficients[2])/(plane_coefficients[0]*1.0);
				tr_z = -((plane_coefficients[3]*tr_x) + (plane_coefficients[1]*tr_y) + plane_coefficients[2])/(plane_coefficients[0]*1.0);
				br_z = -((plane_coefficients[3]*br_x) + (plane_coefficients[1]*br_y) + plane_coefficients[2])/(plane_coefficients[0]*1.0);
				break;
			case ACBD:
				bl_z = -((plane_coefficients[0]*bl_x) + (plane_coefficients[2]*bl_y) + plane_coefficients[1])/(plane_coefficients[3]*1.0);
				tl_z = -((plane_coefficients[0]*tl_x) + (plane_coefficients[2]*tl_y) + plane_coefficients[1])/(plane_coefficients[3]*1.0);
				tr_z = -((plane_coefficients[0]*tr_x) + (plane_coefficients[2]*tr_y) + plane_coefficients[1])/(plane_coefficients[3]*1.0);
				br_z = -((plane_coefficients[0]*br_x) + (plane_coefficients[2]*br_y) + plane_coefficients[1])/(plane_coefficients[3]*1.0);
				break;
			case ADCB:
				bl_z = -((plane_coefficients[0]*bl_x) + (plane_coefficients[3]*bl_y) + plane_coefficients[2])/(plane_coefficients[1]*1.0);
				tl_z = -((plane_coefficients[0]*tl_x) + (plane_coefficients[3]*tl_y) + plane_coefficients[2])/(plane_coefficients[1]*1.0);
				tr_z = -((plane_coefficients[0]*tr_x) + (plane_coefficients[3]*tr_y) + plane_coefficients[2])/(plane_coefficients[1]*1.0);
				br_z = -((plane_coefficients[0]*br_x) + (plane_coefficients[3]*br_y) + plane_coefficients[2])/(plane_coefficients[1]*1.0);
				break;
			case ABDC:
				bl_z = -((plane_coefficients[0]*bl_x) + (plane_coefficients[1]*bl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				tl_z = -((plane_coefficients[0]*tl_x) + (plane_coefficients[1]*tl_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				tr_z = -((plane_coefficients[0]*tr_x) + (plane_coefficients[1]*tr_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				br_z = -((plane_coefficients[0]*br_x) + (plane_coefficients[1]*br_y) + plane_coefficients[3])/(plane_coefficients[2]*1.0);
				break;
				
			default:
				//ABCD or Original
				break;
		}
		
		
		glBegin(GL_QUADS);
		glColor4f(.392156863, 1, .392156863, 0.3);
		// bl, tl, tr, br
		glVertex3d(bl_x, -bl_y, -bl_z);
		glVertex3d(tl_x, -tl_y, -tl_z);
		glVertex3d(tr_x, -tr_y, -tr_z);
		glVertex3d(br_x, -br_y, -br_z);
		
		
		glEnd();
		
		/*glPointSize(1.0f);
		glBegin(GL_POINTS);
		
		for (unsigned int i = 0; i < table_plane_cloud->size(); i++) {
			//glColor3ub(table_plane_cloud->points[i].r, table_plane_cloud->points[i].g, table_plane_cloud->points[i].b);
			glColor3f(1.0, 0.0, 0.0);
			glVertex3f(table_plane_cloud->points[i].x, -table_plane_cloud->points[i].y, -table_plane_cloud->points[i].z);
		}
		
		glEnd();*/
	}
	
	
	
	// end drawing cloud
	
	/*printf("\tDrawing Cloud - Mid point:: x:%.3f y:%.3f z:%.3f r:%d g:%d b:%d\n",
		   cloud->points[cloud->size()/2].x,
		   cloud->points[cloud->size()/2].y,
		   cloud->points[cloud->size()/2].z,
		   cloud->points[cloud->size()/2].r,
		   cloud->points[cloud->size()/2].g,
		   cloud->points[cloud->size()/2].b);
	printf("\tDrawing Cloud - Mid point:: x:%.3f y:%.3f z:%.3f r:%d g:%d b:%d    +10\n",
		   cloud->points[cloud->size()/2+10].x,
		   cloud->points[cloud->size()/2+10].y,
		   cloud->points[cloud->size()/2+10].z,
		   cloud->points[cloud->size()/2+10].r,
		   cloud->points[cloud->size()/2+10].g,
		   cloud->points[cloud->size()/2+10].b);
	printf("\n");*/
	
	glutSwapBuffers();
}
コード例 #14
0
bool GoalDetection::execute() {

	getGoalPoles().clearList();

	vector<PointCloud*> cloudList;
	PointCloud *cloud = new PointCloud();

	int goalLastX = 0;
	int goalCloudMinY = 0;
	int goalCloudMaxY = 0;

	// go through all points and separate them to clouds (possible poles)
	list<Point>::const_iterator it = getGoalPoints().getCloud()->begin();
	for( ; it != getGoalPoints().getCloud()->end(); ++it) {
		int x = (*it).getX();
		int y = (*it).getY();
		if( x != goalLastX) {
			if( x > (goalLastX + 2*mMinCloudDistance)) {
				if( cloud->getSize() > 0) {
					cloudList.push_back(cloud);
					//lint -e{423}
					cloud = new PointCloud();
				}
				goalCloudMinY = y;
				goalCloudMaxY = y;
				//Debugger::DEBUG("Goal", "Last: %d, actual: %d (%d)", goalLastX, x, y);
			} else {
				int divisor = goalCloudMaxY - goalCloudMinY;
				if( divisor != 0) {
					double ratio = (double)(y - goalCloudMinY) / (double)divisor;
					//Debugger::DEBUG("Goal", "Ratio: %f (%d|%d) [%d]", ratio, x, y, (goalCloudMaxY - goalCloudMinY));
					if( ratio < mMinHeightRatio || ratio >= mMaxHeightRatio) {
						if( cloud->getSize() > 0) {
							cloudList.push_back(cloud);
							//lint -e{423}
							cloud = new PointCloud();
						}
						goalCloudMinY = y;
						goalCloudMaxY = y;
						//Debugger::DEBUG("Goal", "MinMax: %d", y);
					}
				}
			}
			goalLastX = x;
		}
		if( y > goalCloudMaxY) {
			goalCloudMaxY = y;
			//Debugger::DEBUG("Goal", "Max: %d", y);
		} else if( y < goalCloudMinY) {
			goalCloudMinY = y;
			//Debugger::DEBUG("Goal", "Min: %d", y);
		}

		cloud->addPoint( (*it));
	}

	if( cloud->getSize() > 0) {
		cloudList.push_back(cloud);
	} else {
		delete cloud;
		cloud = NULL;
	}

	double panAngle = DEGREE_TO_RADIAN * getBodyStatus().getPan();
	double tiltAngle = DEGREE_TO_RADIAN * getBodyStatus().getTilt();

	// go through all clouds and filter out wrong poles
	vector< pair<int, Object> > objectMap;
	vector<PointCloud*>::iterator cloudIt;
	for( cloudIt = cloudList.begin(); cloudIt != cloudList.end(); ) {
		//lint -e{423}
		cloud = (*cloudIt);
		//Debugger::INFO("GoalDetection", "Goal point cloud: %d", (*it)->getSize());
		if (cloud->getSize() >= mMinCloudPoints) {
			BoundingBox box = (*cloudIt)->calculateBoundingBox();

			//Debugger::INFO("GoalDetection", "Box: %d, %d (%d x %d)", box.topLeft.getX(), box.topLeft.getY(), box.width, box.heigth);

			if (box.width >= mMinPoleWidth && box.height >= mMinPoleHeight && filterFieldLines(box)) {
				int area = (box.width * box.height);
				if( area != 0) {
					double ratio = (double)cloud->getSize() / (double)area;
					if( ratio >= mMinPointsAreaRatio) {
						Object obj(box.topLeft.getX(), box.topLeft.getY(),
								box.width, box.height);
						obj.type = Object::GOAL_POLE_YELLOW;

						objectMap.push_back(make_pair(obj.lastImageTopLeftX, obj));
#ifdef _DEBUG
						if( mDebugDrawings >= 2) {
							DRAW_BOX("Goal", box.topLeft.getX(), box.topLeft.getY(),
									box.width, box.height, DebugDrawer::GREEN);
						}
#endif
					} else {
#ifdef _DEBUG
						if( mDebugDrawings >= 3) {
							DRAW_BOX("Goal", box.topLeft.getX(), box.topLeft.getY(),
									box.width, box.height, DebugDrawer::TEAL);
						}
#endif
					}
				}
#ifdef _DEBUG
				else {
					Debugger::WARN("GoalDetection", "Area is 0, this should be impossible!");
				}
#endif
			}
#ifdef _DEBUG
			else {
				if( mDebugDrawings >= 4) {
					DRAW_BOX("Goal", box.topLeft.getX(), box.topLeft.getY(),
						box.width, box.height, DebugDrawer::LIGHT_GRAY);
				}
			}
#endif

		}
#ifdef _DEBUG
		else if( cloud->getSize() > 0) {
			if( mDebugDrawings >= 5) {
				BoundingBox box = (*cloudIt)->calculateBoundingBox();
				DRAW_BOX("Goal", box.topLeft.getX(), box.topLeft.getY(),
					box.width, box.height, DebugDrawer::RED);
			}
		}
#endif
		delete cloud;
		cloud = NULL;
		cloudIt = cloudList.erase(cloudIt);
	}

	if( cloud != NULL) {
		delete cloud;
	}

	// sort objects downward
	//lint --e{864}
	sort( objectMap.begin(), objectMap.end(), poleCompare);

	// use only the outer objects (left and right)
	vector< pair<int,Object> >::iterator objIt = objectMap.begin();
	if( objIt != objectMap.end()) {
		Object obj = objIt->second;
		getGoalPoles().addObject(obj);

		objIt = objectMap.end();
		if( --objIt != objectMap.begin()) {
			obj = objIt->second;
			getGoalPoles().addObject(obj);
		}
	}

	return true;
}
コード例 #15
0
ファイル: test_transforms.cpp プロジェクト: 4ker/pcl
TEST (PCL, TransformCopyFields)
{
  Eigen::Affine3f transform;
  transform = Eigen::Translation3f (100, 0, 0);

  PointXYZRGBNormal empty_point;
  std::vector<int> indices (1);

  PointCloud<PointXYZRGBNormal> cloud (2, 1);
  cloud.points[0].rgba = 0xFF0000;
  cloud.points[1].rgba = 0x00FF00;

  // Preserve data in all fields
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloud (cloud, cloud_out, transform, true);
    ASSERT_EQ (cloud.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (cloud.points[0], cloud_out.points[0]);
    EXPECT_RGBA_EQ (cloud.points[1], cloud_out.points[1]);
  }
  // Preserve data in all fields (with indices)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloud (cloud, indices, cloud_out, transform, true);
    ASSERT_EQ (indices.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (cloud.points[0], cloud_out.points[0]);
  }
  // Do not preserve data in all fields
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloud (cloud, cloud_out, transform, false);
    ASSERT_EQ (cloud.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[0]);
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[1]);
  }
  // Do not preserve data in all fields (with indices)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloud (cloud, indices, cloud_out, transform, false);
    ASSERT_EQ (indices.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[0]);
  }
  // Preserve data in all fields (with normals version)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloudWithNormals (cloud, cloud_out, transform, true);
    ASSERT_EQ (cloud.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (cloud.points[0], cloud_out.points[0]);
    EXPECT_RGBA_EQ (cloud.points[1], cloud_out.points[1]);
  }
  // Preserve data in all fields (with normals and indices version)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloudWithNormals (cloud, indices, cloud_out, transform, true);
    ASSERT_EQ (indices.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (cloud.points[0], cloud_out.points[0]);
  }
  // Do not preserve data in all fields (with normals version)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloudWithNormals (cloud, cloud_out, transform, false);
    ASSERT_EQ (cloud.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[0]);
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[1]);
  }
  // Do not preserve data in all fields (with normals and indices version)
  {
    PointCloud<PointXYZRGBNormal> cloud_out;
    transformPointCloudWithNormals (cloud, indices, cloud_out, transform, false);
    ASSERT_EQ (indices.size (), cloud_out.size ());
    EXPECT_RGBA_EQ (empty_point, cloud_out.points[0]);
  }
}
コード例 #16
0
ファイル: main.cpp プロジェクト: bzobl/sfm
bool initial_pose_estimation(Camera const &cam, ViewCombination &view, IPoseEstimation *poseestim,
                             bool show_points, bool debug = false)
{
    cv::Mat P_0, P_1;
    PointCloud pts;

    if (!poseestim->estimatePose(cam, view, P_0, P_1, pts)) {
        return false;
    }

    if (debug) {
        std::cout << "Initial Pose estimation successful" << std::endl;
        std::cout << "P_0" << std::endl << P_0 << std::endl;
        std::cout << "P_1" << std::endl << P_1 << std::endl;
        std::cout << "Triangulated " << pts.size() << " 3d points" << std::endl;
    }

    if (show_points) {
        pts.RunVisualization("Initial pose estimation");
    }

    std::vector<std::pair<size_t, double>> minimalErrors;
    for (size_t i = 0; i < pts.size(); i++) {
        minimalErrors.emplace_back(i, pts.getError(i));
    }
    std::sort(minimalErrors.begin(), minimalErrors.end(),
              [](std::pair<size_t, double> const &lhs, std::pair<size_t, double> const &rhs)
    {
        return lhs.second < rhs.second;
    });

    std::vector<cv::Point2f> img_points_left;
    std::vector<cv::Point2f> img_points_right;
    std::vector<cv::Point3f> points_3d;
    for (size_t i = 0;
            (i < minimalErrors.size()) && (minimalErrors[i].second <= 50.0);
            i++) {
        size_t idx = minimalErrors[i].first;

        if (pts.getPoint(idx)(3) <= 0) continue;

        img_points_left.emplace_back(view.getMatchingPointLeft(idx));
        img_points_right.emplace_back(view.getMatchingPointRight(idx));
        cv::Vec4d const &pt = pts.getPoint(idx);
        points_3d.emplace_back(pt(0), pt(1), pt(2));
    }

    if (debug) {
        std::cout << "found " << points_3d.size() << " points elegible for PnP" << std::endl;
    }

    if (points_3d.size() <= 6) {
        return false;
    }

    try {
        if (debug) {
            std::cout << "PnP of left view" << std::endl;
        }
        poseestim->estimatePose(cam, *view.getImageLeft(), points_3d, img_points_left);

        if (debug) {
            std::cout << "PnP of right view" << std::endl;
        }
        poseestim->estimatePose(cam, *view.getImageRight(), points_3d, img_points_right);
    } catch (std::exception e) {
        std::cerr << "PnP after initial pose estimation failed: " << e.what();
        return false;
    }

    return true;
}
コード例 #17
0
			//Pointcloud and tf transform received
			void callback(const PointCloud::ConstPtr& cloud_in)
			{

				
				PointCloud cloudTransformed;
				sensor_msgs::LaserScanPtr output(new sensor_msgs::LaserScan());

				try
				{
					//Transform pointcloud to new reference frame
					result = pcl_ros::transformPointCloud(baseFrame, *cloud_in, cloudTransformed, tfListener);
				}
				catch (tf::TransformException& e)
				{
					NODELET_INFO("CloudToScanHoriz failed");
					std::cout << e.what();
					return;
				}
				NODELET_DEBUG("Got cloud");

				//Setup laserscan message
				output->header = cloud_in->header;
				output->header.frame_id = laserFrame;
				output->angle_min = -M_PI/2;
				output->angle_max = M_PI/2;
				output->angle_increment = M_PI/180.0/2.0;
				output->time_increment = 0.0;
				output->scan_time = 1.0/30.0;
				output->range_min = 0.45;
				output->range_max = 10.0;

				uint32_t ranges_size = std::ceil((output->angle_max - output->angle_min) / output->angle_increment);
				output->ranges.assign(ranges_size, output->range_max + 1.0);

				//"Thin" laser height from pointcloud
				for (PointCloud::const_iterator it = cloudTransformed.begin(); it != cloudTransformed.end(); ++it)
				{
					const float &x = it->x;
					const float &y = it->y;
					const float &z = it->z;

					if ( std::isnan(x) || std::isnan(y) || std::isnan(z) )
					{
						continue;  
					}
					if (z > max_height_ || z < min_height_)
					{
						continue;
					}
					double angle = atan2(y, x);
					if (angle < output->angle_min || angle > output->angle_max)
					{
						continue;
					}
					int index = (angle - output->angle_min) / output->angle_increment;
					//Calculate hypoteneuse distance to point
					double range_sq = y*y+x*x;
					if (output->ranges[index] * output->ranges[index] > range_sq)
						output->ranges[index] = sqrt(range_sq);
				} //for it
				laserPublisher.publish(output);
			} //callback
コード例 #18
0
ファイル: main.cpp プロジェクト: bzobl/sfm
bool process_sfm(IFeatureDetect *fdetect, IPoseEstimation *poseestim, ITriangulation *triang,
                 Camera const &cam,
                 Frame *frame_last, Frame *frame_now, std::vector<Frame *> const &frames,
                 PointCloud &global_points, SfMTimes &times,
                 bool initial_pose_estimated, bool export_kp, bool show_matches, bool show_points)
{
    double const DISTANCE_THRESHOLD = 10;

    StopWatch fdsw;
    fdetect->findFeatures(frame_now);
    ViewCombination view = fdetect->matchFrames(frame_last, frame_now);

    if (export_kp) {
        export_keypoints(view);
    }
    if (show_matches) {
        test_show_features_colored(view, view.getMatchesSize(), true);
    }

    double average_distance = 0.0;
    for (size_t i = 0; i < view.getMatchesSize(); i++) {
        double distance = ITriangulation::norm(view.getMatchingPointLeft(i),
                                               view.getMatchingPointRight(i));
        average_distance += distance;
    }
    average_distance /= view.getMatchesSize();
    fdsw.stop();
    std::cout << "average_distance between " << view.getMatchesSize()
              << " matches: " << average_distance << std::endl;

    if (average_distance < DISTANCE_THRESHOLD) {
        std::cerr << "camera has not moved" << std::endl;
        return false;
    }

    StopWatch pesw;
    if (!initial_pose_estimated) {
        std::cout << "Estimating initial pose" << std::endl;
        if (!initial_pose_estimation(cam, view, poseestim, show_points)) {
            std::cerr << "Pose estimation failed" << std::endl;
            return false;
        }
    } else {
        std::vector<cv::Point2f> points_2d;
        std::vector<cv::Point3f> points_3d;

        std::cout << "finding 2D 3D correspondences" << std::endl;
        if (!find_2D_3D_correspndences(frame_now, frames, global_points,
                                       fdetect, points_2d, points_3d)) {
            std::cerr << "could not find enough correspondences" << std::endl;
            return false;
        }

        std::cout << "Starting PnP" << std::endl;
        if (!poseestim->estimatePose(cam, *frame_now, points_3d, points_2d)) {
            std::cerr << "PnP failed" << std::endl;
            return false;
        }
    }
    pesw.stop();

    std::cout << "Starting triangulation of " << view.getMatchesSize() << " keypoints" << std::endl;
    StopWatch trsw;
    if (!triang->triangulate(cam, view, global_points)) {
        std::cerr << "Triangulation failed" << std::endl;
        return false;
    }
    trsw.stop();
    /*
    OpticalFlowFeatures of;
    std::cout << "starting of" << std::endl;
    ViewCombination vof = of.matchFrames(frame_last, frame_now);
    std::cout << "matches:  " << vof.getMatchesSize() << std::endl
              << "kp left:  " << frame_last->getKeypointsSize() << std::endl
              << "kp right: " << frame_now->getKeypointsSize() << std::endl;
    std::cout << "starting triangulating of" << std::endl;
    triang->triangulate(cam, vof,
                        view.getImageLeft()->getProjectionMatrix(),
                        view.getImageRight()->getProjectionMatrix(),
                        global_points);
    //of_pts.RunVisualization();
    */
    /*
    OpticalFlowFeatures of;
    std::cout << "starting of" << std::endl;
    Frame f1(img_last);
    Frame f2(img_now);
    ViewCombination vof = of.matchFrames(&f1, &f2);
    triang->triangulate(cam, vof,
                        frame_last->getProjectionMatrix(),
                        frame_now->getProjectionMatrix(),
                        global_points);
                        */

    if (show_points) {
        global_points.RunVisualization("Global points in sfm loop");
    }

    times.feature_detection = fdsw.getDurationMs();
    times.pose_estimation = pesw.getDurationMs();
    times.triangulation = trsw.getDurationMs();
    return true;
}
コード例 #19
0
int _tmain(int argc, _TCHAR* argv[])
{
	viewer = util::rgbVisualizer("RANSAC - Dev");
	//testMainMS();
	testMainWS();
	return 0;

	source_pc.reset(new PointCloud<PointXYZ>);
	PointCloud<Normal>::Ptr source_n(new PointCloud<Normal>);

	pcl::io::loadPCDFile(dPath + FILENAME, *source_pc);
	
	if (reducePointCloud)
	{
		source_pc = util::resamplingPointcloud(source_pc, 2);
		minimumInliner = 100;
	}

	source_n = proc::normalsEstimate(source_pc);

	util::displayPC(viewer, source_pc, 1, 1, 1);
	//viewer->addPointCloudNormals<PointXYZ, Normal>(source_pc, source_n, 64, 0.05, "NN");

	std::vector<PlanarRegion<PointXYZ>, Eigen::aligned_allocator<PlanarRegion<PointXYZ> > > regions;
	std::vector<ModelCoefficients> model_coefficients;
	std::vector<PointIndices> inlier_indices;
	PointCloud<Label>::Ptr labels(new PointCloud<Label>);
	std::vector<PointIndices> label_indices;
	std::vector<PointIndices> boundary_indices;

	// Segment Planes
	int64 mps_start = cv::getTickCount();
	OrganizedMultiPlaneSegmentation<PointXYZ, Normal, Label> mps;
	mps.setAngularThreshold(pcl::deg2rad(10.0));
	mps.setMinInliers(minimumInliner);
	mps.setInputNormals(source_n);
	mps.setInputCloud(source_pc);
	
	//mps.segment(model_coefficients,inlier_indices);
	mps.segmentAndRefine(regions, model_coefficients, 
		inlier_indices, labels, 
		label_indices, boundary_indices);

	double mps_end = cv::getTickCount();
	PCL_WARN("MPS + Refine took: %f sec \n=== \n", double(mps_end - mps_start)/cv::getTickFrequency());

	for (int i = 0; i < inlier_indices.size();i++)
	{
		PointIndicesPtr idc(new PointIndices);
		idc->indices = inlier_indices[i].indices;

		PointCloud<PointXYZ>::Ptr planeCloud(new PointCloud<PointXYZ>);

		/*Create the filtering object*/
		pcl::ExtractIndices<PointXYZ> extractPoints;
		extractPoints.setInputCloud(source_pc);
		extractPoints.setIndices(idc);
		extractPoints.setNegative(false);
		extractPoints.filter(*planeCloud);

		util::displayPC(viewer, planeCloud);
	}

	/*viewer->spinOnce(100);
	boost::this_thread::sleep(boost::posix_time::microseconds(100000));*/

	while (!viewer->wasStopped())
	{
		viewer->spinOnce(100);
		boost::this_thread::sleep(boost::posix_time::microseconds(100000));
	}
	return 0;
}
コード例 #20
0
TEST (PCL, NormalEstimation)
{
  Eigen::Vector4f plane_parameters;
  float curvature;

  NormalEstimation<PointXYZ, Normal> n;

  // computePointNormal (indices, Vector)
  computePointNormal (cloud, indices, plane_parameters, curvature);
  EXPECT_NEAR (fabs (plane_parameters[0]), 0.035592, 1e-4);
  EXPECT_NEAR (fabs (plane_parameters[1]), 0.369596, 1e-4);
  EXPECT_NEAR (fabs (plane_parameters[2]), 0.928511, 1e-4);
  EXPECT_NEAR (fabs (plane_parameters[3]), 0.0622552, 1e-4);
  EXPECT_NEAR (curvature, 0.0693136, 1e-4);

  float nx, ny, nz;
  // computePointNormal (indices)
  n.computePointNormal (cloud, indices, nx, ny, nz, curvature);
  EXPECT_NEAR (fabs (nx), 0.035592, 1e-4);
  EXPECT_NEAR (fabs (ny), 0.369596, 1e-4);
  EXPECT_NEAR (fabs (nz), 0.928511, 1e-4);
  EXPECT_NEAR (curvature, 0.0693136, 1e-4);

  // computePointNormal (Vector)
  computePointNormal (cloud, plane_parameters, curvature);
  EXPECT_NEAR (plane_parameters[0],  0.035592,  1e-4);
  EXPECT_NEAR (plane_parameters[1],  0.369596,  1e-4);
  EXPECT_NEAR (plane_parameters[2],  0.928511,  1e-4);
  EXPECT_NEAR (plane_parameters[3], -0.0622552, 1e-4);
  EXPECT_NEAR (curvature,            0.0693136, 1e-4);

  // flipNormalTowardsViewpoint (Vector)
  flipNormalTowardsViewpoint (cloud.points[0], 0, 0, 0, plane_parameters);
  EXPECT_NEAR (plane_parameters[0], -0.035592,  1e-4);
  EXPECT_NEAR (plane_parameters[1], -0.369596,  1e-4);
  EXPECT_NEAR (plane_parameters[2], -0.928511,  1e-4);
  EXPECT_NEAR (plane_parameters[3],  0.0799743, 1e-4);

  // flipNormalTowardsViewpoint
  flipNormalTowardsViewpoint (cloud.points[0], 0, 0, 0, nx, ny, nz);
  EXPECT_NEAR (nx, -0.035592, 1e-4);
  EXPECT_NEAR (ny, -0.369596, 1e-4);
  EXPECT_NEAR (nz, -0.928511, 1e-4);

  // Object
  PointCloud<Normal>::Ptr normals (new PointCloud<Normal> ());

  // set parameters
  PointCloud<PointXYZ>::Ptr cloudptr = cloud.makeShared ();
  n.setInputCloud (cloudptr);
  EXPECT_EQ (n.getInputCloud (), cloudptr);
  boost::shared_ptr<vector<int> > indicesptr (new vector<int> (indices));
  n.setIndices (indicesptr);
  EXPECT_EQ (n.getIndices (), indicesptr);
  n.setSearchMethod (tree);
  EXPECT_EQ (n.getSearchMethod (), tree);
  n.setKSearch (static_cast<int> (indices.size ()));

  // estimate
  n.compute (*normals);
  EXPECT_EQ (normals->points.size (), indices.size ());

  for (size_t i = 0; i < normals->points.size (); ++i)
  {
    EXPECT_NEAR (normals->points[i].normal[0], -0.035592, 1e-4);
    EXPECT_NEAR (normals->points[i].normal[1], -0.369596, 1e-4);
    EXPECT_NEAR (normals->points[i].normal[2], -0.928511, 1e-4);
    EXPECT_NEAR (normals->points[i].curvature, 0.0693136, 1e-4);
  }

  PointCloud<PointXYZ>::Ptr surfaceptr = cloudptr;
  n.setSearchSurface (surfaceptr);
  EXPECT_EQ (n.getSearchSurface (), surfaceptr);

  // Additional test for searchForNeigbhors
  surfaceptr.reset (new PointCloud<PointXYZ>);
  *surfaceptr = *cloudptr;
  surfaceptr->points.resize (640 * 480);
  surfaceptr->width = 640;
  surfaceptr->height = 480;
  EXPECT_EQ (surfaceptr->points.size (), surfaceptr->width * surfaceptr->height);
  n.setSearchSurface (surfaceptr);
  tree.reset ();
  n.setSearchMethod (tree);

  // estimate
  n.compute (*normals);
  EXPECT_EQ (normals->points.size (), indices.size ());
}
コード例 #21
0
int main(int argc, char* argv[])
{
    ros::init(argc, argv, "bumbleBee");
    ros::NodeHandle nh;

    // Create the topic publishers for the pointclouds
    ros::Publisher whiteLinePcPublisher = nh.advertise<sensor_msgs::PointCloud2>("/vision3D/points", 1);
    ros::Publisher redflagPcPublisher = nh.advertise<sensor_msgs::PointCloud2>("/redflag/points", 1);
    ros::Publisher blueflagPcPublisher = nh.advertise<sensor_msgs::PointCloud2>("/blueflag/points", 1);
    // TODO use the ROS stereo_msgs::disparity message instead of an image message
    ros::Publisher disparityPublisher = nh.advertise<sensor_msgs::Image>("/triclops/disparity", 1);
    ros::Publisher rectifiedPublisher = nh.advertise<sensor_msgs::Image>("/triclops/rectified", 1);

    WhitelineFilter wl_filter; // The white line filter
    Vision3d i3d; // The pointcloud creatorImage
    Color_Filter cf; // The color filter for red and blue
    BumbleBeeCamera bb2; // The camera driver

    bb2.startCamera();

    ros::Rate loop_rate(15);
    while (ros::ok()) {

        // Get the disparity and rectified images.
        bb2.retrieveImages();
        TriclopsImage16 tri_disparityImage = bb2.getDisparityImage();
        TriclopsColorImage tri_rectifiedColorImage = bb2.getRectifiedColorImage();

        // Convert the images to opencv formats
        cv::Mat cv_rectifiedColorImage;
        cv::Mat cv_disparityImage;
        convertTriclops2Opencv(tri_rectifiedColorImage, cv_rectifiedColorImage);
        convertTriclops2Opencv(tri_disparityImage, cv_disparityImage);

        //Publish the images to ros
        sensor_msgs::ImagePtr outmsg = cv_bridge::CvImage(std_msgs::Header(), sensor_msgs::image_encodings::TYPE_8UC3, cv_rectifiedColorImage).toImageMsg();
        outmsg->header.frame_id = "bumblebee2";
        outmsg->header.stamp = ros::Time::now();
        rectifiedPublisher.publish(outmsg);

        outmsg = cv_bridge::CvImage(std_msgs::Header(), sensor_msgs::image_encodings::TYPE_16UC1, cv_disparityImage).toImageMsg();
        outmsg->header.frame_id = "bumblebee2";
        outmsg->header.stamp = ros::Time::now();
        disparityPublisher.publish(outmsg);

        if (debug) {
            cv::imshow("RectifiedImage", cv_rectifiedColorImage);
            cv::imshow("DisparityImage", cv_disparityImage);
        }
        cv::waitKey(3);

        // Find the whitelines and put them into a point cloud
        cv::Mat mask = wl_filter.findLines(cv_rectifiedColorImage);
        PointCloud whiteLinePoints;
        i3d.producePointCloud(cv_disparityImage, mask, bb2.getTriclopsContext(),
            whiteLinePoints);

        // Find the blue flags and put them into a point cloud
        cv::Mat blueBlob = cf.findBlueHsv(cv_rectifiedColorImage);
        PointCloud bluePoints;
        i3d.producePointCloud(cv_disparityImage, blueBlob, bb2.getTriclopsContext(),
            bluePoints);

        // Find the red flags and put them into a point cloud
        cv::Mat redBlob = cf.findRedHsv(cv_rectifiedColorImage);
        PointCloud redPoints;
        i3d.producePointCloud(cv_disparityImage, redBlob, bb2.getTriclopsContext(),
            redPoints);

        // Below code could be used to tune the vision. To find the HSV limits.
        //    cf.filterControl();
        //    cv::Mat hsvBlob =cf.findContorllerHsv(cv_rectifiedColorImage);
        //    cv::imshow("hsvBlob", hsvBlob );
        //    cv::waitKey(3);

        // Publish the point clouds
        whiteLinePoints.header.frame_id = "bumblebee2";
        redPoints.header.frame_id = "bumblebee2";
        bluePoints.header.frame_id = "bumblebee2";
        // returnedPoints.header.stamp = ros::Time::now().toNSec();
        //TODO fix the timestamp. The above code causing timing problems.
        whiteLinePcPublisher.publish(whiteLinePoints);
        redflagPcPublisher.publish(redPoints);
        blueflagPcPublisher.publish(bluePoints);
        whiteLinePoints.clear();

        if (debug) {
            wl_filter.displayCyan();
            wl_filter.displayThreshold();
        }
        ros::spinOnce();
        loop_rate.sleep();
    }
    cout << "Shutting Down!" << endl;
    bb2.shutdown();
    return 0;
}
コード例 #22
0
ファイル: invrender.cpp プロジェクト: kyzyx/empty-room
int main(int argc, char* argv[]) {
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB | GLUT_DEPTH);
    glutInitWindowSize(1,1);
    glutCreateWindow("");
    glutHideWindow();
    GLenum err = glewInit();
    if (err != GLEW_OK) {
        cerr << "Error:" << glewGetErrorString(err) << endl;
        return 1;
    }
    if (!parseargs(argc, argv)) return 1;
    PolygonMesh::Ptr mesh(new PolygonMesh());
    cout << "Loading mesh geometry...." << endl;
    io::loadPolygonFile(argv[1], *mesh);
    PointCloud<PointXYZRGB>::Ptr cloud(new PointCloud<PointXYZRGB>());
    {
        PointCloud<PointXYZ> tmp;
        fromPCLPointCloud2(mesh->cloud, tmp);
        cloud->points.resize(tmp.size());
        for (size_t i = 0; i < tmp.size(); ++i) {
            cloud->points[i].x = tmp[i].x;
            cloud->points[i].y = tmp[i].y;
            cloud->points[i].z = tmp[i].z;
        }
    }
    PlaneOrientationFinder of(mesh, resolution/2);
    of.computeNormals(ccw);
    Mesh m(mesh,true);
    cout << "Done loading mesh geometry" << endl;

    vector<int> wallindices;
    vector<int> floorindices;
    ColorHelper loader(image_flip_x, image_flip_y);
    WallFinder wf(&of, resolution);

    if (do_wallfinding) {
        cout << "===== WALLFINDING =====" << endl;
        if (wallinput) {
            cout << "Loading wall files..." << endl;
            wf.loadWalls(wallfile, m.types);
            cout << "Done loading wall files..." << endl;
            of.normalize();
        } else {
            cout << "Analyzing geometry..." << endl;
            if (!of.computeOrientation()) {
                cout << "Error computing orientation! Non-triangle mesh!" << endl;
            }
            cout << "Done analyzing geometry" << endl;
            of.normalize();
            cout << "Finding walls..." << endl;
            wf.findFloorAndCeiling(m.types, anglethreshold);
            wf.findWalls(m.types, wallthreshold, minlength, anglethreshold);
            cout << "Done finding walls" << endl;
        }
        if (flipfloorceiling) {
            for (int i = 0; i < m.types.size(); ++i) {
                if (m.types[i] == WallFinder::LABEL_CEILING) m.types[i] = WallFinder::LABEL_FLOOR;
                else if (m.types[i] == WallFinder::LABEL_FLOOR) m.types[i] = WallFinder::LABEL_CEILING;
            }
        }
        for (int i = 0; i < m.types.size(); ++i) {
            if (m.types[i] == WallFinder::LABEL_WALL) wallindices.push_back(i);
            else if (m.types[i] == WallFinder::LABEL_FLOOR) floorindices.push_back(i);
        }
        if (output_wall) wf.saveWalls(walloutfile, m.types);
        cout << "=======================" << endl;
    }

    int numlights = 0;
    if (do_reprojection) {
        cout << "===== REPROJECTING =====" << endl;
        if (input) {
            cout << "Loading reprojection input files..." << endl;
            loader.readCameraFile(camfile);
            cout << "Reading input files..." << endl;
            loader.load(camfile, ColorHelper::READ_COLOR);
            if (use_confidence_files) {
                loader.load(camfile, ColorHelper::READ_CONFIDENCE);
            }
            cout << "Done reading " << loader.size() << " color images" << endl;
            if (do_wallfinding) {
                vector<char> tmp;
                swap(tmp, m.types);
                numlights = m.readSamples(infile);
                swap (m.types, tmp);
            } else {
                numlights = m.readSamples(infile);
            }
            cout << "Done loading reprojection files" << endl;
            if (hdr_threshold > 0) {
                numlights = clusterLights(m, hdr_threshold, minlightsize);
                cout << "Done clustering " << numlights << " lights" << endl;
            }
        } else {
            if (hdr_threshold < 0) hdr_threshold = 10.0;
            if (all_project) {
                cout << "Reading input files..." << endl;
                loader.load(camfile, ColorHelper::READ_COLOR | ColorHelper::READ_DEPTH);
                if (use_confidence_files) {
                    loader.load(camfile, ColorHelper::READ_CONFIDENCE);
                }
                cout << "Done reading " << loader.size() << " color images" << endl;
                cout << "Reprojecting..." << endl;
                reproject(loader, m, hdr_threshold);
            } else {
                loader.load(camfile, 0);
                loader.load(project, ColorHelper::READ_COLOR | ColorHelper::READ_DEPTH);
                if (use_confidence_files) {
                    loader.load(project, ColorHelper::READ_CONFIDENCE);
                }
                cout << "Reprojecting..." << endl;
                reproject((const float*) loader.getImage(project),
                          loader.getConfidenceMap(project),
                          loader.getDepthMap(project),
                          loader.getCamera(project),
                          m, hdr_threshold);
            }
            cout << "Done reprojecting; clustering lights..." << endl;
            numlights = clusterLights(m, hdr_threshold, minlightsize);
            cout << "Done clustering " << numlights << " lights" << endl;
        }
        if (output_reprojection) m.writeSamples(outfile);
        if (coloredfile.length()) m.writeColoredMesh(coloredfile, displayscale);
        m.computeColorsOGL();
        hemicuberesolution = max(hemicuberesolution, loader.getCamera(0)->width);
        hemicuberesolution = max(hemicuberesolution, loader.getCamera(0)->height);
        cout << "========================" << endl;
    }
    InverseRender ir(&m, numlights, hemicuberesolution);
    vector<SampleData> walldata, floordata;
    // Only do inverse rendering with full reprojection and wall labels
    if (do_sampling && (input || all_project) && (wallinput || do_wallfinding)) {
        cout << "==== SAMPLING SCENE ====" << endl;
        if (read_eq) {
            ir.loadVariablesBinary(walldata, samplefile + ".walls");
            if (do_texture) {
                ir.loadVariablesBinary(floordata, samplefile + ".floors");
            }
        } else {
            ir.computeSamples(walldata, wallindices, numsamples, discardthreshold);
            if (do_texture) {
                ir.computeSamples(floordata, floorindices, numsamples, discardthreshold);
            }
            if (write_eq) {
                ir.writeVariablesBinary(walldata, sampleoutfile + ".walls");
                if (do_texture) {
                    ir.writeVariablesBinary(floordata, sampleoutfile + ".floors");
                }
            }
            if (write_matlab) {
                ir.writeVariablesMatlab(walldata, matlabsamplefile);
            }
        }
        cout << "========================" << endl;
        ir.setNumRansacIters(numRansacIters);
        ir.setMaxPercentErr(maxPercentErr);
        ir.solve(walldata);

        Texture tex;
        if (do_texture) {
            // Prepare floor plane
            Eigen::Matrix4f t = of.getNormalizationTransform().inverse();
            Eigen::Vector3f floornormal(0,flipfloorceiling?-1:1,0);
            floornormal = t.topLeftCorner(3,3)*floornormal;
            Eigen::Vector4f floorpoint(0, wf.floorplane, 0, 1);
            floorpoint = t*floorpoint;
            R3Plane floorplane(eigen2gaps(floorpoint.head(3)).Point(), eigen2gaps(floornormal));

            loader.load(camfile, use_confidence_files);

            cout << "Solving texture..." << endl;
            ir.solveTexture(floordata, &loader, floorplane, tex);
            cout << "Done solving texture..." << endl;
            if (tex.size > 0) {
                ColorHelper::writeExrImage(texfile, tex.texture, tex.size, tex.size);
            }
        }
        if (radfile != "") {
            outputRadianceFile(radfile, wf, m, ir);
        }
        if (plyfile != "") {
            outputPlyFile(plyfile, wf, m, ir);
        }
        if (pbrtfile != "") {
            outputPbrtFile(pbrtfile, wf, m, ir, tex, loader.getCamera(0), do_texture?texfile:"");
        }
    }
    cout << "DONE PROCESSING" << endl;

    if (display) {
        InvRenderVisualizer irv(cloud, loader, wf, ir);
        if (project_debug) irv.recalculateColors(InvRenderVisualizer::LABEL_REPROJECT_DEBUG);
        irv.visualizeCameras(camera);
        irv.visualizeWalls();
        irv.addSamples(walldata);
        irv.loop();
    }
    return 0;
}
コード例 #23
0
TEST (PCL, SHOTLocalReferenceFrameEstimation)
{
  PointCloud<ReferenceFrame> bunny_LRF;

  boost::shared_ptr<vector<int> > indicesptr (new vector<int> (indices));

  // Compute SHOT LRF
  SHOTLocalReferenceFrameEstimation<PointXYZ, ReferenceFrame> lrf_estimator;

  float radius = 0.01f;

  lrf_estimator.setRadiusSearch (radius);

  lrf_estimator.setInputCloud (cloud.makeShared ());
  lrf_estimator.setSearchMethod (tree);
  lrf_estimator.setIndices (indicesptr);

  lrf_estimator.compute (bunny_LRF);

  // TESTS
  EXPECT_EQ (indices.size (), bunny_LRF.size ());

  EXPECT_FALSE (bunny_LRF.is_dense);

  // NaN result for point 24
  //EXPECT_EQ (numeric_limits<float>::max (), bunny_LRF.at (24).confidence);
  EXPECT_TRUE (pcl_isnan (bunny_LRF.at (24).x_axis.normal_x));

  // Expected Results
  // point 15: tanget disambiguation
  //float point_15_conf = 0;
  Eigen::Vector3f point_15_x (-0.849213f, 0.528016f, 0.00593846f);
  Eigen::Vector3f point_15_y (0.274564f, 0.451135f, -0.849171f);
  Eigen::Vector3f point_15_z (-0.451055f, -0.719497f, -0.528084f);

  //float point_45_conf = 0;
  Eigen::Vector3f point_45_x (0.950556f, 0.0673042f, 0.303171f);
  Eigen::Vector3f point_45_y (0.156242f, -0.947328f, -0.279569f);
  Eigen::Vector3f point_45_z (0.268386f, 0.313114f, -0.911004f);

  //float point_163_conf = 0;
  Eigen::Vector3f point_163_x (0.816369f, 0.309943f, -0.487317f);
  Eigen::Vector3f point_163_y (0.235273f, -0.949082f, -0.209498f);
  Eigen::Vector3f point_163_z (-0.527436f, 0.0563754f, -0.847722f);

  // point 311: normal disambiguation
  //float point_311_conf = 0;
  Eigen::Vector3f point_311_x (0.77608663f, -0.60673451f, 0.17193851f);
  Eigen::Vector3f point_311_y (0.49546647f, 0.75532055f, 0.42895663f);
  Eigen::Vector3f point_311_z (-0.39013144f, -0.24771771f, 0.88681078f);

  //Test Results
  //EXPECT_NEAR (point_15_conf,bunny_LRF.at (15).confidence, 1E-3);
  EXPECT_NEAR_VECTORS (point_15_x, bunny_LRF.at (15).x_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_15_y, bunny_LRF.at (15).y_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_15_z, bunny_LRF.at (15).z_axis.getNormalVector3fMap (), 1E-3);

  //EXPECT_NEAR (point_45_conf, bunny_LRF.at (45).confidence, 1E-3);
  EXPECT_NEAR_VECTORS (point_45_x, bunny_LRF.at (45).x_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_45_y, bunny_LRF.at (45).y_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_45_z, bunny_LRF.at (45).z_axis.getNormalVector3fMap (), 1E-3);

  //EXPECT_NEAR (point_163_conf, bunny_LRF.at (163).confidence, 1E-3);
  EXPECT_NEAR_VECTORS (point_163_x, bunny_LRF.at (163).x_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_163_y, bunny_LRF.at (163).y_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_163_z, bunny_LRF.at (163).z_axis.getNormalVector3fMap (), 1E-3);

  //EXPECT_NEAR (point_311_conf, bunny_LRF.at (311).confidence, 1E-3);
  EXPECT_NEAR_VECTORS (point_311_x, bunny_LRF.at (311).x_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_311_y, bunny_LRF.at (311).y_axis.getNormalVector3fMap (), 1E-3);
  EXPECT_NEAR_VECTORS (point_311_z, bunny_LRF.at (311).z_axis.getNormalVector3fMap (), 1E-3);
}
コード例 #24
0
void cloudCallback(const sensor_msgs::PointCloud2ConstPtr& msg)
{
	PointCloud cloud;
	pcl::fromROSMsg(*msg, cloud);

	g_cloud_frame = cloud.header.frame_id;
	g_cloud_ready = true;

	if(!g_head_depth_ready) return;

	Mat img3D;
	img3D = Mat::zeros(cloud.height, cloud.width, CV_32FC3);
	//img3D.create(cloud.height, cloud.width, CV_32FC3);
	
	int yMin, xMin, yMax, xMax;
	yMin = 0; xMin = 0;
	yMax = img3D.rows; xMax = img3D.cols;

	//get 3D from depth
	for(int y = yMin ; y < img3D.rows; y++) {
		Vec3f* img3Di = img3D.ptr<Vec3f>(y);
	
		for(int x = xMin; x < img3D.cols; x++) {
			pcl::PointXYZ p = cloud.at(x,y);
			if((p.z>g_head_depth-0.2) && (p.z<g_head_depth+0.2)) {
				img3Di[x][0] = p.x*1000;
				img3Di[x][1] = p.y*1000;
				img3Di[x][2] = hypot(img3Di[x][0], p.z*1000); //they seem to have trained with incorrectly projected 3D points
				//img3Di[x][2] = p.z*1000;
			} else {
				img3Di[x] = 0;
			}
		}
	}
	
	g_means.clear();
	g_votes.clear();
	g_clusters.clear();
	
	//do the actual estimate
	estimator.estimate( 	img3D,
							g_means,
							g_clusters,
							g_votes,
							g_stride,
							g_maxv,
							g_prob_th,
							g_larger_radius_ratio,
							g_smaller_radius_ratio,
							false,
							g_th
						);
	
	//assuming there's only one head in the image!
	if(g_means.size() > 0) {	
		geometry_msgs::PoseStamped pose_msg;
		pose_msg.header.frame_id = msg->header.frame_id;
	
		cv::Vec<float,POSE_SIZE> pose(g_means[0]);
	
		KDL::Rotation r = KDL::Rotation::RPY(
											 from_degrees( pose[5]+180+g_roll_bias ), 
											 from_degrees(-pose[3]+180+g_pitch_bias),
											 from_degrees(-pose[4]+    g_yaw_bias  )
											);
		double qx, qy, qz, qw;
		r.GetQuaternion(qx, qy, qz, qw);
	
		geometry_msgs::PointStamped head_point;
		geometry_msgs::PointStamped head_point_transformed;
	
		head_point.header = pose_msg.header;

		head_point.point.x = pose[0]/1000;
		head_point.point.y = pose[1]/1000;
		head_point.point.z = pose[2]/1000;
	
		try {
			listener->waitForTransform(head_point.header.frame_id, g_head_target_frame, ros::Time::now(), ros::Duration(2.0));
			listener->transformPoint(g_head_target_frame, head_point, head_point_transformed);
		} catch(tf::TransformException ex) {
			ROS_WARN(
				"Transform exception, when transforming point from %s to %s\ndropping pose (don't worry, this is probably ok)",
				head_point.header.frame_id.c_str(), g_head_target_frame.c_str());
				ROS_WARN("Exception was %s", ex.what());
			return;
		}
		
		tf::Transform trans;
		
		pose_msg.pose.position = head_point_transformed.point;
		pose_msg.header.frame_id = head_point_transformed.header.frame_id;
	
		pose_msg.pose.orientation.x = qx;
		pose_msg.pose.orientation.y = qy;
		pose_msg.pose.orientation.z = qz;
		pose_msg.pose.orientation.w = qw;

		trans.setOrigin(tf::Vector3(
			pose_msg.pose.position.x, 
			pose_msg.pose.position.y, 
			pose_msg.pose.position.z
		));
		trans.setRotation(tf::Quaternion(qx, qy, qz, qw));
		g_transform = tf::StampedTransform(trans, pose_msg.header.stamp, pose_msg.header.frame_id, "head_origin");
		// broadcaster->sendTransform(tf::StampedTransform(trans, pose_msg.header.stamp, pose_msg.header.frame_id, "head_origin"));
		g_transform_ready = true;
		pose_msg.header.stamp = ros::Time::now();
		geometry_msgs::PoseStamped zero_pose;
		zero_pose.header.frame_id = "head_origin";
		zero_pose.header.stamp = ros::Time::now();
		zero_pose.pose.orientation.w = 1;
		//pose_pub.publish(pose_msg);
		pose_pub.publish(zero_pose);
	}
}
コード例 #25
0
TEST (PCL, SpinImageEstimation)
{
  // Estimate normals first
  double mr = 0.002;
  NormalEstimation<PointXYZ, Normal> n;
  PointCloud<Normal>::Ptr normals (new PointCloud<Normal> ());
  // set parameters
  n.setInputCloud (cloud.makeShared ());
  boost::shared_ptr<vector<int> > indicesptr (new vector<int> (indices));
  n.setIndices (indicesptr);
  n.setSearchMethod (tree);
  n.setRadiusSearch (20 * mr);
  n.compute (*normals);

  EXPECT_NEAR (normals->points[103].normal_x, 0.36683175, 1e-4);
  EXPECT_NEAR (normals->points[103].normal_y, -0.44696972, 1e-4);
  EXPECT_NEAR (normals->points[103].normal_z, -0.81587529, 1e-4);
  EXPECT_NEAR (normals->points[200].normal_x, -0.71414840, 1e-4);
  EXPECT_NEAR (normals->points[200].normal_y, -0.06002361, 1e-4);
  EXPECT_NEAR (normals->points[200].normal_z, -0.69741613, 1e-4);

  EXPECT_NEAR (normals->points[140].normal_x, -0.45109111, 1e-4);
  EXPECT_NEAR (normals->points[140].normal_y, -0.19499126, 1e-4);
  EXPECT_NEAR (normals->points[140].normal_z, -0.87091631, 1e-4);

  typedef Histogram<153> SpinImage;
  SpinImageEstimation<PointXYZ, Normal, SpinImage> spin_est(8, 0.5, 16);
  // set parameters
  //spin_est.setInputWithNormals (cloud.makeShared (), normals);
  spin_est.setInputCloud (cloud.makeShared ());
  spin_est.setInputNormals (normals);
  spin_est.setIndices (indicesptr);
  spin_est.setSearchMethod (tree);
  spin_est.setRadiusSearch (40*mr);

  // Object
  PointCloud<SpinImage>::Ptr spin_images (new PointCloud<SpinImage> ());


  // radial SI
  spin_est.setRadialStructure();

  // estimate
  spin_est.compute (*spin_images);
  EXPECT_EQ (spin_images->points.size (), indices.size ());

  EXPECT_NEAR (spin_images->points[100].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[24], 0.00233226, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[48], 8.48662e-005, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[60], 0.0266387, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[96], 0.0414662, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[144], 0.0128513, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[24], 0.00932424, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[60], 0.0145733, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[96], 0.00034457, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[144], 0.0121195, 1e-4);

  // radial SI, angular spin-images
  spin_est.setAngularDomain ();

  // estimate
  spin_est.compute (*spin_images);
  EXPECT_EQ (spin_images->points.size (), indices.size ());

  EXPECT_NEAR (spin_images->points[100].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[24], 0.132139, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[48], 0.908814, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[60], 0.63875, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[96], 0.550392, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[144], 0.257136, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[24], 0.230605, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[60], 0.764872, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[96], 1.02824, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[144], 0.293567, 1e-4);

  // rectangular SI
  spin_est.setRadialStructure (false);
  spin_est.setAngularDomain (false);

  // estimate
  spin_est.compute (*spin_images);
  EXPECT_EQ (spin_images->points.size (), indices.size ());

  EXPECT_NEAR (spin_images->points[100].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[24], 0.000889345, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[60], 0.0489534, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[96], 0.0747141, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[132], 0.0173423, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[144], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[24], 0.0267132, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[60], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[96], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[108], 0.0209709, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[144], 0.029372, 1e-4);

  // rectangular SI, angular spin-images
  spin_est.setAngularDomain ();

  // estimate
  spin_est.compute (*spin_images);
  EXPECT_EQ (spin_images->points.size (), indices.size ());

  EXPECT_NEAR (spin_images->points[100].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[24], 0.132139, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[60], 0.38800787925720215, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[96], 0.468881, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[108], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[132], 0.67901438474655151, 1e-4);
  EXPECT_NEAR (spin_images->points[100].histogram[144], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[0], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[12], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[24], 0.143845, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[36], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[48], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[60], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[72], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[84], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[96], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[108], 0.706084, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[120], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[132], 0, 1e-4);
  EXPECT_NEAR (spin_images->points[300].histogram[144], 0.272542, 1e-4);
}
コード例 #26
0
void RobotThread::extractCurves(QList<LaserPoint> scan_data, std_msgs::Header scanTime)
{
    /** This is the feature extraction method:
     *
     * 1. Calculate the maximum length of a laser scan along both sides of a range reading i: K_f[i], K_b[i].
     *    K_f[i] is the max range that satisfies d(i, i + K_f[i]) > l(i, i + K_f[i]) - 1
     * 2. Get angle
     * 3. Construct features, composed of at least 10 range readings. **/

    QList<Shape> shapes;

    PointCloud current;
    bool addTo = true;

    for (int x = 2; x < scan_data.size(); x++)
    {
        if (!addTo)
        { shapes.push_back(Shape(current)); current.clear(); addTo = true; }//start over

        //We start at one so that we can access the previous
        int iter = x + 1;
        int k_f = x + 1;
        int k_b = x - 1;

        while (iter < scan_data.size())
        {
            /** Determine K_f **/
            double dist = getDistance(scan_data.at(x), scan_data.at(iter));
            double l = scan_data.at((x + iter) / 2).range - 1;

            if (iter >= scan_data.size() - 1 || dist < l)
            { k_f = iter; break; }

            iter++;
        }//end while

        iter = x - 1;

        while (iter > 0)
        {
            double dist = getDistance(scan_data.at(x), scan_data.at(iter));
            double l = scan_data.at((x + iter) / 2).range - 1;

            if (iter == 0 || dist < l)
            { k_b = iter; break; }
            iter--;
        }//end while

        if (x < scan_data.size() - 1)
        {
            double f_ix = scan_data.at(k_f).px - scan_data.at(x).px;
            double f_iy = scan_data.at(k_f).py - scan_data.at(x).py;

            double b_ix = scan_data.at(k_b).px - scan_data.at(x).py;
            double b_iy = scan_data.at(k_b).py - scan_data.at(x).py;

            double f_iDotb_i = f_ix * b_ix + f_iy * b_iy;
            double f_iTimesb_i = sqrt(pow(f_ix, 2) + pow(f_iy, 2)) * sqrt(pow(b_ix, 2) + pow(b_iy, 2));

            double theta_i = acos(f_iDotb_i / f_iTimesb_i);

            if (sqrt(pow(theta_i, 2)) > 0.1 && current.size() >= 10)
                addTo = false;

            else
            {
                pcl::PointXYZ toPush;
                toPush.x = scan_data.at(x).px;
                toPush.y = scan_data.at(x).py;
                toPush.z = 1.0; //set a standard height.

                current.push_back(toPush);
            }//end else
        }//end for
    }

    PointCloud final;

    publishRunData(shapes);

    for (int y = 0; y < shapes.size(); y++)
    {
        Shape current = shapes.at(y);
        PointCloud curRent = current.getCorrections();

        for (unsigned int z = 0; z < curRent.size(); z++)
        {

            pcl::PointXYZ toPush;
            toPush.x = -curRent.points.at(z).y;
            toPush.y = curRent.points.at(z).z;
            toPush.z = curRent.points.at(z).x;

            final.push_back(toPush);
        }//END FOR
    }
コード例 #27
0
void optimized_shs2(const PointCloud<PointXYZRGB> &cloud, float tolerance, PointIndices &indices_in, PointIndices &indices_out, float delta_hue)
{
    // Create a bool vector of processed point indices, and initialize it to false
    std::vector<char> processed (cloud.points.size (), 0);

    std::vector<int> nn_indices;
    std::vector<float> nn_distances;

    SearchD search;
    search.setInputCloud(cloud.makeShared());

    // Process all points in the indices vector
    for (size_t k = 0; k < indices_in.indices.size (); ++k)
    {
        int i = indices_in.indices[k];
        if (processed[i])
            continue;

        processed[i] = 1;

        std::vector<int> seed_queue;
        int sq_idx = 0;
        seed_queue.push_back (i);

        PointXYZRGB  p;
        p = cloud.points[i];
        PointXYZHSV h;
        PointXYZRGBtoXYZHSV(p, h);

        while (sq_idx < (int)seed_queue.size ())
        {
            int index = seed_queue[sq_idx];
            const PointXYZRGB& q = cloud.points[index];

            if(!isFinite (q))
                continue;

            // search window
            unsigned left, right, top, bottom;
            double squared_radius = tolerance * tolerance;
            search.getProjectedRadiusSearchBox (q, squared_radius, left, right, top, bottom);

            unsigned yEnd  = (bottom + 1) * cloud.width + right + 1;
            unsigned idx  = top * cloud.width + left;
            unsigned skip = cloud.width - right + left - 1;
            unsigned xEnd = idx - left + right + 1;

            for (; xEnd != yEnd; idx += skip, xEnd += cloud.width)
            {
                for (; idx < xEnd; ++idx)
                {
                    if (processed[idx])
                        continue;

                    if (sqnorm(cloud.points[idx], q) <= squared_radius)
                    {
                        PointXYZRGB  p_l;
                        p_l = cloud.points[idx];
                        PointXYZHSV h_l;
                        PointXYZRGBtoXYZHSV(p_l, h_l);

                        if (fabs(h_l.h - h.h) < delta_hue)
                        {
                            if(idx & 1)
                              seed_queue.push_back (idx);
                            processed[idx] = 1;
                        }

                    }
                }
            }
            sq_idx++;

        }
        // Copy the seed queue into the output indices
        for (size_t l = 0; l < seed_queue.size (); ++l)
            indices_out.indices.push_back(seed_queue[l]);
    }
    // This is purely esthetical, can be removed for speed purposes
    std::sort (indices_out.indices.begin (), indices_out.indices.end ());
}
コード例 #28
0
ファイル: test_brisk.cpp プロジェクト: KoenBuys/pcl
TEST (PCL, BRISK_2D)
{
  // Compute BRISK keypoints 
  BriskKeypoint2D<PointT> brisk_keypoint_estimation;
  brisk_keypoint_estimation.setThreshold (60);
  brisk_keypoint_estimation.setOctaves (4);
  brisk_keypoint_estimation.setInputCloud (cloud_image);

  cloud_keypoints.reset (new PointCloud<KeyPointT>);
  brisk_keypoint_estimation.compute (*cloud_keypoints);

  //io::savePCDFileBinary ("brisk_keypoints.pcd", *cloud_keypoints);

  const int num_of_keypoints = int (cloud_keypoints->size ());
  const int num_of_keypoints_gt = int (cloud_keypoints_gt->size ());
  EXPECT_EQ (num_of_keypoints_gt, num_of_keypoints);


  for (size_t point_index = 0; point_index < cloud_keypoints->size (); ++point_index)
  {
    PointWithScale & point = (*cloud_keypoints) [point_index];

    const float dx = point.x - point.x;
    const float dy = point.y - point.y;
    const float dz = point.z - point.z;

    const float sqr_distance = (dx*dx + dy*dy + dz*dz);

    EXPECT_NEAR (0.0f, sqr_distance, 1e-4);
  }

  BRISK2DEstimation<PointT> brisk_descriptor_estimation;
  brisk_descriptor_estimation.setInputCloud (cloud_image);
  brisk_descriptor_estimation.setKeypoints (cloud_keypoints);


  cloud_descriptors.reset (new PointCloud<BRISKSignature512>);
  brisk_descriptor_estimation.compute (*cloud_descriptors);

  const int num_of_descriptors = int (cloud_descriptors->size ());
  const int num_of_descriptors_gt = int (cloud_descriptors_gt->size ());
  EXPECT_EQ (num_of_descriptors_gt, num_of_descriptors);


  //io::savePCDFileBinary ("brisk_descriptors.pcd", *cloud_descriptors);
  //for (size_t point_index = 0; point_index < cloud_keypoints->size (); ++point_index)
  for (size_t point_index = 0; point_index < cloud_descriptors->size (); ++point_index)
  {
    BRISKSignature512 & descriptor = (*cloud_descriptors) [point_index];
    BRISKSignature512 & descriptor_gt = (*cloud_descriptors_gt) [point_index];

    float sqr_dist = 0.0f;
    for (size_t index = 0; index < 33; ++index)
    {
      const float dist = float (descriptor.descriptor[index] - descriptor_gt.descriptor[index]);
      sqr_dist += dist * dist;
    }

    EXPECT_NEAR (0.0f, sqr_dist, 1e-4);
  }
}
コード例 #29
0
void optimized_elec(const PointCloud<pcl::PointXYZRGB> &cloud, const cv::Mat& src_labels, float tolerance,
                    std::vector<std::vector<PointIndices> > &labeled_clusters,
                    unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster, unsigned int num_parts,
                    bool brute_force_border, float radius_scale)
{
    typedef unsigned char uchar;

    Size sz = src_labels.size();

    cv::Mat dst_labels(sz, CV_32S, Scalar(-1));
    cv::Mat wavefront(sz, CV_32SC2);
    cv::Mat region_sizes = Mat::zeros(sz, CV_32S);

    Point2i *wf = wavefront.ptr<Point2i>();
    int *rsizes = region_sizes.ptr<int>();

    int cc = -1;

    double squared_radius = tolerance * tolerance;

    for(int j = 0; j < sz.height; ++j)
    {
        for(int i = 0; i < sz.width; ++i)
        {
            if (src_labels.at<uchar>(j, i) >= num_parts || dst_labels.at<int>(j, i) != -1) // invalid label && has been labeled
                continue;

            Point2i* ws = wf; // initialize wavefront
            Point2i p(i, j);  // current pixel

            cc++;	// next label

            dst_labels.at<int>(j, i) = cc;
            int count = 0;	// current region size

            // wavefront propagation
            while( ws >= wf ) // wavefront not empty
            {
                // put neighbors onto wavefront
                const uchar* sl = &src_labels.at<uchar>(p.y, p.x);
                int*         dl = &dst_labels.at<  int>(p.y, p.x);
                const pcl::PointXYZRGB *sp = &cloud.at(p.x, p.y);

                //right
                if( p.x < sz.width-1 && dl[+1] == -1 && sl[+1] == sl[0])
                    if (sqnorm(sp[0], sp[+1]) <= squared_radius)
                    {
                        dl[+1] = cc;
                        *ws++ = Point2i(p.x+1, p.y);
                    }

                //left
                if( p.x > 0 && dl[-1] == -1 && sl[-1] == sl[0])
                    if (sqnorm(sp[0], sp[-1]) <= squared_radius)
                    {
                        dl[-1] = cc;
                        *ws++ = Point2i(p.x-1, p.y);
                    }

                //top
                if( p.y < sz.height-1 && dl[+sz.width] == -1 && sl[+sz.width] == sl[0])
                    if (sqnorm(sp[0], sp[+sz.width]) <= squared_radius)
                    {
                        dl[+sz.width] = cc;
                        *ws++ = Point2i(p.x, p.y+1);
                    }


                //top
                if( p.y > 0 && dl[-sz.width] == -1 && sl[-sz.width] == sl[0])
                    if (sqnorm(sp[0], sp[-sz.width]) <= squared_radius)
                    {
                        dl[-sz.width] = cc;
                        *ws++ = Point2i(p.x, p.y-1);
                    }

                // pop most recent and propagate
                p = *--ws;
                count++;
            }

            rsizes[cc] = count;
        } /* for(int i = 0; i < sz.width; ++i) */
    } /* for(int j = 0; j < sz.height; ++j) */

    Mat djset(sz, CV_32S);
    int* dj = djset.ptr<int>();
    yota(dj, dj + sz.area(), 0);


    if (brute_force_border)
    {
        pcl::ScopeTime time("border");

        SearchD search;
        search.setInputCloud(cloud.makeShared());

#define HT_USE_OMP

#ifdef HT_USE_OMP

        const int threads_num = 3;

        int range[threads_num + 1];
        for(int i = 0; i < threads_num+1; ++i)
            range[i] = i * num_parts/threads_num;

        #pragma omp parallel for num_threads(threads_num)
        for(int r = 0; r < threads_num; ++r)
        {
#endif
            for(int j = 1; j < sz.height-1; ++j)
            {
                int *dl = dst_labels.ptr<int>(j);

                for(int i = 1; i < sz.width-1; ++i)
                {
                    int cc = dl[i];

                    if (cc == -1)
                        continue;

#ifdef HT_USE_OMP
                    uchar lbl = src_labels.at<uchar>(j, i);
                    bool inRange =range[r] <= lbl && lbl < range[r+1];
                    if (!inRange)
                        continue;
#endif

                    if (cc == dl[i+1] && cc == dl[i-1] && cc == dl[i+sz.width] && cc == dl[i-sz.width])
                        continue; // inner point

                    int root = cc;
                    while(root != dj[root])
                        root = dj[root];

                    const pcl::PointXYZRGB& sp = cloud.at(i, j);
                    uchar sl = src_labels.at<uchar>(j, i);

                    if (!isFinite(sp))
                        continue;

                    unsigned left, right, top, bottom;
                    search.getProjectedRadiusSearchBox (sp, squared_radius, left, right, top, bottom);

                    if (radius_scale != 1.f)
                    {
                        left =  i - (i - left) * radius_scale;
                        right = i + (right - i) * radius_scale;
                        top =  j - (j - top) * radius_scale;
                        bottom = j + (bottom - j) * radius_scale;
                    }

                    for(int y = top; y < bottom + 1; ++y)
                    {
                        const uchar *sl1  = src_labels.ptr<uchar>(y);
                        const int   *dl1 = dst_labels.ptr<int>(y);
                        const pcl::PointXYZRGB* sp1 = &cloud.at(0, y);

                        for(int x = left; x < right + 1; ++x)
                        {
                            int cc1 = dl1[x];
                            // not the same cc, the same label, and in radius
                            if (cc1 != cc && sl1[x] == sl && sqnorm(sp, sp1[x]) <= squared_radius)
                            {
                                while(cc1 != dj[cc1])
                                    cc1 = dj[cc1];

                                dj[cc1] = root;
                            }
                        }
                    }
                }
            }
#ifdef HT_USE_OMP
        }
#endif

        for(int j = 0; j < sz.height; ++j)
        {
            int *dl = dst_labels.ptr<int>(j);

            for(int i = 0; i < sz.width; ++i)
            {
                int cc = dl[i];
                if (cc == -1)
                    continue;

                while(cc != dj[cc]) //disjoint set
                    cc = dj[cc];
                dl[i] = cc;
                rsizes[cc]++;
            }
        }

    } /* if (brute_force_border)*/

    //convert to output format
    labeled_clusters.clear();
    labeled_clusters.resize(num_parts);

    vector<int> remap(sz.area(), -1);

    for(int j = 0; j < sz.height; ++j)
    {
        const uchar *sl = src_labels.ptr<uchar>(j);
        const int   *dl = dst_labels.ptr<int>(j);

        for(int i = 0; i < sz.width; ++i)
        {
            int part = sl[i];
            int cc   = dl[i];

            if (cc == -1)
                continue;

            if (min_pts_per_cluster <= rsizes[cc] && rsizes[cc] <= max_pts_per_cluster)
            {

                int ccindex = remap[cc];
                if (ccindex == -1)
                {
                    ccindex = (int)labeled_clusters[part].size();
                    labeled_clusters[part].resize(ccindex + 1);
                    remap[cc] = ccindex;
                }

                labeled_clusters[part][ccindex].indices.push_back(j*sz.width + i);
            }
        }
    }
}
コード例 #30
0
ファイル: test_search.cpp プロジェクト: BITVoyager/pcl
/* ---[ */
int
main (int argc, char** argv)
{
  if (argc < 2)
  {
    std::cout << "need path to table_scene_mug_stereo_textured.pcd file\n";
    return (-1);
  }

  pcl::io::loadPCDFile (argv [1], *organized_sparse_cloud);
  
  // create unorganized cloud
  unorganized_dense_cloud->resize (unorganized_point_count);
  unorganized_dense_cloud->height = 1;
  unorganized_dense_cloud->width = unorganized_point_count;
  unorganized_dense_cloud->is_dense = true;
  
  unorganized_sparse_cloud->resize (unorganized_point_count);
  unorganized_sparse_cloud->height = 1;
  unorganized_sparse_cloud->width = unorganized_point_count;
  unorganized_sparse_cloud->is_dense = false;

  PointXYZ point;
  for (unsigned pIdx = 0; pIdx < unorganized_point_count; ++pIdx)
  {
    point.x = rand_float ();
    point.y = rand_float ();
    point.z = rand_float ();

    unorganized_dense_cloud->points [pIdx] = point;
    
    if (rand_uint () == 0)
      unorganized_sparse_cloud->points [pIdx].x = unorganized_sparse_cloud->points [pIdx].y = unorganized_sparse_cloud->points [pIdx].z = std::numeric_limits<float>::quiet_NaN ();
    else
      unorganized_sparse_cloud->points [pIdx] = point;
  }
  
  unorganized_grid_cloud->reserve (1000);
  unorganized_grid_cloud->height = 1;
  unorganized_grid_cloud->width = 1000;
  unorganized_grid_cloud->is_dense = true;
  
  // values between 0 and 1
  for (unsigned xIdx = 0; xIdx < 10; ++xIdx)
  {
    for (unsigned yIdx = 0; yIdx < 10; ++yIdx)
    {
      for (unsigned zIdx = 0; zIdx < 10; ++zIdx)
      {
        point.x = 0.1f * static_cast<float>(xIdx);
        point.y = 0.1f * static_cast<float>(yIdx);
        point.z = 0.1f * static_cast<float>(zIdx);
        unorganized_grid_cloud->push_back (point);
      }
    }
  }
  
  createIndices (organized_input_indices, static_cast<unsigned> (organized_sparse_cloud->size () - 1));
  createIndices (unorganized_input_indices, unorganized_point_count - 1);
  
  brute_force.setSortedResults (true);
  KDTree.setSortedResults (true);
  octree_search.setSortedResults (true);
  organized.setSortedResults (true);
  
  unorganized_search_methods.push_back (&brute_force);
  unorganized_search_methods.push_back (&KDTree);
  unorganized_search_methods.push_back (&octree_search);
  
  organized_search_methods.push_back (&brute_force);
  organized_search_methods.push_back (&KDTree);
  organized_search_methods.push_back (&octree_search);
  organized_search_methods.push_back (&organized);
  
  createQueryIndices (unorganized_dense_cloud_query_indices, unorganized_dense_cloud, query_count);
  createQueryIndices (unorganized_sparse_cloud_query_indices, unorganized_sparse_cloud, query_count);
  createQueryIndices (organized_sparse_query_indices, organized_sparse_cloud, query_count);
  
  testing::InitGoogleTest (&argc, argv);
  return (RUN_ALL_TESTS ());
}