main (int argc, char** argv) { std::string fileName = argv[1]; std::cout << "Reading " << fileName << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) //* load the file { PCL_ERROR ("Couldn't read file"); return (-1); } std::cout << "Loaded " << cloud->points.size() << " points." << std::endl; // Compute the normals pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normalEstimation; normalEstimation.setInputCloud (cloud); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); normalEstimation.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>); normalEstimation.setRadiusSearch (0.03); normalEstimation.compute (*cloudWithNormals); // Setup the spin images computation typedef pcl::Histogram<153> SpinImage; pcl::SpinImageEstimation<pcl::PointXYZ, pcl::Normal, SpinImage> spinImageEstimation(8, 0.5, 16); // Provide the original point cloud (without normals) //spinImageEstimation.setInputCloud (cloud); // Provide the point cloud with normals //spinImageEstimation.setInputNormals(cloudWithNormals); spinImageEstimation.setInputWithNormals(cloud, cloudWithNormals); // Use the same KdTree from the normal estimation spinImageEstimation.setSearchMethod (tree); pcl::PointCloud<SpinImage>::Ptr spinImages(new pcl::PointCloud<SpinImage>); spinImageEstimation.setRadiusSearch (0.2); // Actually compute the spin images spinImageEstimation.compute (*spinImages); std::cout << "output points.size (): " << spinImages->points.size () << std::endl; // Display and retrieve the shape context descriptor vector for the 0th point. SpinImage descriptor = spinImages->points[0]; std::cout << descriptor << std::endl; return 0; }
void ComputeSpinImages(InputCloud::Ptr input, OutputCloud::Ptr output) { // Compute the normals std::cout << "Computing normals..." << std::endl; pcl::NormalEstimation<InputCloud::PointType, pcl::Normal> normalEstimation; normalEstimation.setInputCloud (input); pcl::search::KdTree<InputCloud::PointType>::Ptr tree (new pcl::search::KdTree<InputCloud::PointType>); normalEstimation.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloudWithNormals (new pcl::PointCloud<pcl::Normal>); normalEstimation.setRadiusSearch (0.03); normalEstimation.compute (*cloudWithNormals); pcl::SpinImageEstimation<InputCloud::PointType, pcl::Normal, OutputCloud::PointType> spinImageEstimation(8, 0.5, 16); // Provide the original point cloud (without normals) //spinImageEstimation.setInputCloud (cloud); // Provide the point cloud with normals //spinImageEstimation.setInputNormals(cloudWithNormals); spinImageEstimation.setInputWithNormals(input, cloudWithNormals); // Use the same KdTree from the normal estimation spinImageEstimation.setSearchMethod (tree); OutputCloud::Ptr spinImages(new OutputCloud); //spinImageEstimation.setRadiusSearch (0.2); //spinImageEstimation.setRadiusSearch (0.4); //spinImageEstimation.setMinPointCountInNeighbourhood(5); // If less than this many points is found in the specified radius, an error results. spinImageEstimation.setKSearch(20); // Actually compute the spin images std::cout << "Computing spin images..." << std::endl; spinImageEstimation.compute (*spinImages); std::cout << "output points.size (): " << spinImages->points.size () << std::endl; }
void Object_Recognition::callback(const sensor_msgs::PointCloud2ConstPtr& input) // A callback function acting on the raw input data and performing the object recognition { // Container for original & filtered data PointCloudColored::Ptr cloud(new PointCloudColored); pcl::PCLPointCloud2 *cloud2 = new pcl::PCLPointCloud2; // Convert to PCL data type pcl_conversions::toPCL(*input, *cloud2); pcl::fromPCLPointCloud2(*cloud2, *cloud); // Filter, segment and cluster the raw point cloud std::vector<PointCloudColored::Ptr> clusters; filterCloud.filterSegmentCluster(cloud, clusters); // Loop through all clusters and assigning classifications for (std::size_t i = 0; i < clusters.size(); i++) { float best = 0; // Initialize recognized object as unknown std::string object = "unknown"; // Loop through the library of for (std::size_t model = 0; model < cloudLibrary.size(); model++) { // Space for model spin images and extractor pcl::PointCloud<pcl::Histogram<histLength>>::Ptr modelSpinImages(new pcl::PointCloud<pcl::Histogram<histLength> >); // Extract spin features pcl::PointCloud<pcl::Histogram<histLength>>::Ptr spinImages(new pcl::PointCloud<pcl::Histogram<histLength>>); siExtractor.getSpinImage(clusters[i], spinImages); // Reduce dimensionality (optional) // -- If true, done also for the candidate spin images // Randomly pick N and M, or less points to compare from each std::random_shuffle(spinImages->begin(), spinImages->end()); std::random_shuffle(librarySpinImages[model]->begin(), librarySpinImages[model]->end()); std::size_t N = maxBlobSize; std::size_t M = maxModelBlobSize; std::size_t Nx = std::min(spinImages->size(), N); std::size_t Ny = std::min(librarySpinImages[model]->size(), M); // If cloud over crowded, continue if (spinImages->size()>Nx*2) continue; // Create matching // Returns // -- Candidates float totalCorrelation = 0; float pairWiseCorrelation; float totalCount = Nx*Ny; for (std::size_t x = 0; x < Nx; x++) { for (std::size_t y = 0; y < Ny; y++) { // Find correlation between current point cloud and reference point cloud Eigen::Map<Eigen::VectorXf> blobV(spinImages->at(x).histogram, histLength); Eigen::Map<Eigen::VectorXf> modelV(librarySpinImages[model]->at(y).histogram, histLength); pairWiseCorrelation = (blobV.dot(modelV) * histLength - blobV.sum() * modelV.sum()) / std::sqrt( (histLength * (blobV.dot(blobV)) - blobV.sum() * blobV.sum()) * (histLength * (modelV.dot(modelV)) - modelV.sum() * modelV.sum())); // Use only pairs with correlation greater than minimal accuracy if (std::abs(pairWiseCorrelation) < minAccuracy) totalCount--; pairWiseCorrelation = std::abs(pairWiseCorrelation) > minAccuracy ? pairWiseCorrelation : 0.0; totalCorrelation += pairWiseCorrelation; } } // Perform p1,p2,d1,d2 sensitivity procedure to filter out noisy point cloud blobs // Inspired by : http://www.slideshare.net/SameeraHorawalavithana/locality-sensitive-hashing if (totalCount < minCount) totalCorrelation = 0.0; totalCorrelation*=totalCount/cloudLibrary[model].myCloud->size(); clusters[i]->header.frame_id = camera_frame_name; clusterPub.publish(clusters[i]); std::cout << "Correlation with " << cloudLibrary[model].myName << " is " << totalCorrelation; if (totalCorrelation > best) { best = totalCorrelation; object = cloudLibrary[model].myName; } } std::cout << "The object is : " << object << std::endl; pcl::PCLPointCloud2::Ptr cloud2(new pcl::PCLPointCloud2); pcl::toPCLPointCloud2(*clusters[i],*cloud2); plotBoundingBox(cloud2,object); cloud2.reset(); } }