/*! * @brief Reads a point cloud and extracts color and depth values as .ppm image */ int main(int argc, char** argv) { readOptions(argc, argv); PointCloud<PointXYZRGB>::Ptr p(new PointCloud<PointXYZRGB>()); PCDReader r; if (r.read(file_i, *p) == -1) return(0); cout << "Loaded pointcloud with " << p->width << " x " << p->height << " points." << endl; cob_3d_mapping_common::PPMWriter w; if (w.writeRGB(file_o[0], *p) == -1) return(0); cout << "Extracted RGB image..." << endl; if (file_o[1] != "") { if (min_z != FLT_MAX) w.setMinZ(min_z); if (max_z != FLT_MIN) w.setMaxZ(max_z); if (w.writeDepth(file_o[1], *p) == -1) return(0); if (w.writeDepthLinear(file_o[1] + "_linear", *p) == -1) return(0); cout << "Extracted depth image..." << endl; } cout << "Done!" << endl; return(0); }
int main (int argc, char **argv) { if (argc != 2) { PCL_ERROR ("Syntax: ./multiscale_feature_persistence_example [path_to_cloud.pcl]\n"); return -1; } PointCloud<PointXYZ>::Ptr cloud_scene (new PointCloud<PointXYZ> ()); PCDReader reader; reader.read (argv[1], *cloud_scene); PointCloud<PointXYZ>::Ptr cloud_subsampled; PointCloud<Normal>::Ptr cloud_subsampled_normals; subsampleAndCalculateNormals (cloud_scene, cloud_subsampled, cloud_subsampled_normals); PCL_INFO ("STATS:\ninitial point cloud size: %u\nsubsampled point cloud size: %u\n", cloud_scene->points.size (), cloud_subsampled->points.size ()); visualization::CloudViewer viewer ("Multiscale Feature Persistence Example Visualization"); viewer.showCloud (cloud_scene, "scene"); MultiscaleFeaturePersistence<PointXYZ, FPFHSignature33> feature_persistence; std::vector<float> scale_values; for (float x = 2.0f; x < 3.6f; x += 0.35f) scale_values.push_back (x / 100.0f); feature_persistence.setScalesVector (scale_values); feature_persistence.setAlpha (1.3f); FPFHEstimation<PointXYZ, Normal, FPFHSignature33>::Ptr fpfh_estimation (new FPFHEstimation<PointXYZ, Normal, FPFHSignature33> ()); fpfh_estimation->setInputCloud (cloud_subsampled); fpfh_estimation->setInputNormals (cloud_subsampled_normals); pcl::search::KdTree<PointXYZ>::Ptr tree (new pcl::search::KdTree<PointXYZ> ()); fpfh_estimation->setSearchMethod (tree); feature_persistence.setFeatureEstimator (fpfh_estimation); feature_persistence.setDistanceMetric (pcl::CS); PointCloud<FPFHSignature33>::Ptr output_features (new PointCloud<FPFHSignature33> ()); boost::shared_ptr<std::vector<int> > output_indices (new std::vector<int> ()); feature_persistence.determinePersistentFeatures (*output_features, output_indices); PCL_INFO ("persistent features cloud size: %u\n", output_features->points.size ()); ExtractIndices<PointXYZ> extract_indices_filter; extract_indices_filter.setInputCloud (cloud_subsampled); extract_indices_filter.setIndices (output_indices); PointCloud<PointXYZ>::Ptr persistent_features_locations (new PointCloud<PointXYZ> ()); extract_indices_filter.filter (*persistent_features_locations); viewer.showCloud (persistent_features_locations, "persistent features"); PCL_INFO ("Persistent features have been computed. Waiting for the user to quit the visualization window.\n"); // io::savePCDFileASCII ("persistent_features.pcd", *persistent_features_locations); // PCL_INFO ("\nPersistent feature locations written to persistent_features.pcd\n"); while (!viewer.wasStopped (50)); return 0; }
void pcl::PCDGrabberBase::PCDGrabberImpl::readAhead () { PCDReader reader; int pcd_version; // Check if we're still reading files from a TAR file if (tar_fd_ != -1) { if (!readTARHeader ()) return; valid_ = (reader.read (tar_file_, next_cloud_, origin_, orientation_, pcd_version, tar_offset_) == 0); if (!valid_) closeTARFile (); else { tar_offset_ += (tar_header_.getFileSize ()) + (512 - tar_header_.getFileSize () % 512); int result = static_cast<int> (pcl_lseek (tar_fd_, tar_offset_, SEEK_SET)); if (result < 0) closeTARFile (); } } // We're not still reading from a TAR file, so check if there are other PCD/TAR files in the list else { if (pcd_iterator_ != pcd_files_.end ()) { // Try to read in the file as a PCD first valid_ = (reader.read (*pcd_iterator_, next_cloud_, origin_, orientation_, pcd_version) == 0); // Has an error occured? Check if we can interpret the file as a TAR file first before going onto the next if (!valid_ && openTARFile (*pcd_iterator_) >= 0 && readTARHeader ()) { tar_file_ = *pcd_iterator_; valid_ = (reader.read (tar_file_, next_cloud_, origin_, orientation_, pcd_version, tar_offset_) == 0); if (!valid_) closeTARFile (); else { tar_offset_ += (tar_header_.getFileSize ()) + (512 - tar_header_.getFileSize () % 512); int result = static_cast<int> (pcl_lseek (tar_fd_, tar_offset_, SEEK_SET)); if (result < 0) closeTARFile (); } } if (++pcd_iterator_ == pcd_files_.end () && repeat_) pcd_iterator_ = pcd_files_.begin (); } else valid_ = false; } }
DataSource(const std::string& file) : cloud(new PointCloud<PointXYZ>()), surface(new PointCloud<PointXYZ>()), indices( new std::vector<int>() ), normals(new PointCloud<Normal>()), normals_surface(new PointCloud<Normal>()) { PCDReader pcd; pcd.read(file, *cloud); PointXYZ minp, maxp; pcl::getMinMax3D(*cloud, minp, maxp); float sz = (maxp.x - minp.x + maxp.y - minp.y + maxp.z - minp.z) / 3; radius = sz / 15; }
int main (int, char **argv) { PointCloud<PointXYZ>::Ptr cloud (new PointCloud<PointXYZ> ()); PCDReader reader; reader.read (argv[1], *cloud); PCL_INFO ("Cloud read: %s\n", argv[1]); cerr << "cloud has #points: " << cloud->points.size () << endl; PointCloud<PointXYZ>::Ptr cloud_subsampled (new PointCloud<PointXYZ> ()); VoxelGrid<PointXYZ> subsampling_filter; subsampling_filter.setInputCloud (cloud); subsampling_filter.setLeafSize (subsampling_leaf_size, subsampling_leaf_size, subsampling_leaf_size); subsampling_filter.filter (*cloud_subsampled); cerr << "subsampled cloud has #points: " << cloud_subsampled->points.size () << endl; StatisticalMultiscaleInterestRegionExtraction<PointXYZ> region_extraction; std::vector<float> scale_vector; PCL_INFO ("Scale values that will be used: "); float base_scale_aux = base_scale; for (size_t scales = 0; scales < 7; ++scales) { PCL_INFO ("%f ", base_scale_aux); scale_vector.push_back (base_scale_aux); base_scale_aux *= 1.6f; } PCL_INFO ("\n"); region_extraction.setInputCloud (cloud_subsampled); region_extraction.setScalesVector (scale_vector); std::list<IndicesPtr> rois; region_extraction.computeRegionsOfInterest (rois); PCL_INFO ("Regions of interest found: %d\n", rois.size ()); pcl::ExtractIndices<PointXYZ> extract_indices_filter; unsigned int roi_count = 0; for (std::list<IndicesPtr>::iterator l_it = rois.begin (); l_it != rois.end (); ++l_it) { PointCloud<PointXYZ> roi_points; extract_indices_filter.setInputCloud (cloud_subsampled); extract_indices_filter.setIndices (*l_it); extract_indices_filter.filter (roi_points); char filename[512]; sprintf (filename, "roi_%03d.pcd", ++roi_count); io::savePCDFileASCII (filename, roi_points); } io::savePCDFileASCII ("subsampled_input.pcd", *cloud_subsampled); return 0; }
/* ---[ */ int main (int argc, char** argv) { // Parse the command line arguments for .pcd files std::vector<int> p_file_indices; p_file_indices = parse_file_extension_argument (argc, argv, ".pcd"); if (p_file_indices.empty ()) { print_error (" Need at least an input PCD file (e.g. scene.pcd) to continue!\n\n"); print_info ("Ideally, need an input file, and three output PCD files, e.g., object.pcd, plane.pcd, rest.pcd\n"); return (-1); } string object_file = "object.pcd", plane_file = "plane.pcd", rest_file = "rest.pcd"; if (p_file_indices.size () >= 4) rest_file = argv[p_file_indices[3]]; if (p_file_indices.size () >= 3) plane_file = argv[p_file_indices[2]]; if (p_file_indices.size () >= 2) object_file = argv[p_file_indices[1]]; PCDReader reader; // Test the header pcl::PCLPointCloud2 dummy; reader.readHeader (argv[p_file_indices[0]], dummy); if (dummy.height != 1 && getFieldIndex (dummy, "rgba") != -1) { print_highlight ("Enabling 2D image viewer mode.\n"); ObjectSelection<PointXYZRGBA> s; if (!s.load (argv[p_file_indices[0]])) return (-1); s.initGUI (); s.compute (); s.save (object_file, plane_file); } else { ObjectSelection<PointXYZ> s; if (!s.load (argv[p_file_indices[0]])) return (-1); s.initGUI (); s.compute (); s.save (object_file, plane_file); } return (0); }
// TODO Enum for is_test_phase PointCloud<Signature>::Ptr Detector::obtainFeatures(Scene &scene, PointCloud<PointNormal>::Ptr keypoints, bool is_test_phase, bool cache) { if (cache == false) { PointCloud<Signature>::Ptr features = computeFeatures(scene.cloud, keypoints); return features; } else { std::string name_str = std::string(feature_est_->name_) + scene.id; if (is_test_phase) { name_str += "_test"; } else { name_str += "_train"; } name_str += ".pcd"; const char *name = name_str.c_str(); if (ifstream(name)) { PointCloud<Signature>::Ptr features (new PointCloud<Signature>()); PCDReader r; r.readEigen(std::string(name), *features); //*features = *tmp; //io::loadPCDFile(name, *features); if (features->size() != keypoints->size()) assert(false); cout << "got " << features->size() << " features from " << keypoints->size() << " points" << endl; return features; } else { PointCloud<Signature>::Ptr features = computeFeatures(scene.cloud, keypoints); PCDWriter w; w.writeASCIIEigen(std::string(name), *features); return features; } } }
int main (int argc, char **argv) { if (argc != 5) { PCL_ERROR ("./surfel_smoothing_test normal_search_radius surfel_scale source_cloud destination_cloud\n"); return (-1); } PointCloud<PointXYZ>::Ptr cloud (new PointCloud<PointXYZ> ()); PointCloud<Normal>::Ptr normals (new PointCloud<Normal> ()); PCDReader reader; reader.read (argv[3], *cloud); PCL_INFO ("Cloud read: %s\n", argv[3]); double normal_search_radius = static_cast<double> (atof (argv[1])); double surfel_scale = static_cast<double> (atof (argv[2])); NormalEstimation<PointXYZ, Normal> normal_estimation; normal_estimation.setInputCloud (cloud); search::KdTree<PointXYZ>::Ptr search_tree (new search::KdTree<PointXYZ>); normal_estimation.setSearchMethod (search_tree); normal_estimation.setRadiusSearch (normal_search_radius); normal_estimation.compute (*normals); SurfelSmoothing<PointXYZ, Normal> surfel_smoothing (surfel_scale); surfel_smoothing.setInputCloud (cloud); surfel_smoothing.setInputNormals (normals); surfel_smoothing.setSearchMethod (search_tree); PointCloud<PointXYZ>::Ptr output_positions; PointCloud<Normal>::Ptr output_normals; surfel_smoothing.computeSmoothedCloud (output_positions, output_normals); PointCloud<PointNormal>::Ptr output_with_normals (new PointCloud<PointNormal> ()); pcl::concatenateFields (*output_positions, *normals, *output_with_normals); io::savePCDFileASCII (argv[4], *output_with_normals); return (0); }
int main (int argc, char** argv) { if (argc != 3) { PCL_ERROR ("Syntax: ./ppf_object_recognition pcd_model_list pcd_scene\n"); return -1; } /// read point clouds from HDD PCL_INFO ("Reading scene ...\n"); PointCloud<PointXYZ>::Ptr cloud_scene (new PointCloud<PointXYZ> ()); PCDReader reader; reader.read (argv[2], *cloud_scene); PCL_INFO ("Scene read: %s\n", argv[2]); PCL_INFO ("Reading models ...\n"); vector<PointCloud<PointXYZ>::Ptr > cloud_models; ifstream pcd_file_list (argv[1]); while (!pcd_file_list.eof()) { char str[512]; pcd_file_list.getline (str, 512); PointCloud<PointXYZ>::Ptr cloud (new PointCloud<PointXYZ> ()); reader.read (str, *cloud); cloud_models.push_back (cloud); PCL_INFO ("Model read: %s\n", str); } pcl::SACSegmentation<pcl::PointXYZ> seg; pcl::ExtractIndices<pcl::PointXYZ> extract; seg.setOptimizeCoefficients (true); seg.setModelType (pcl::SACMODEL_PLANE); seg.setMethodType (pcl::SAC_RANSAC); seg.setMaxIterations (1000); seg.setDistanceThreshold (0.05); extract.setNegative (true); pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients ()); pcl::PointIndices::Ptr inliers (new pcl::PointIndices ()); unsigned nr_points = unsigned (cloud_scene->points.size ()); while (cloud_scene->points.size () > 0.3 * nr_points) { seg.setInputCloud (cloud_scene); seg.segment (*inliers, *coefficients); PCL_INFO ("Plane inliers: %u\n", inliers->indices.size ()); if (inliers->indices.size () < 50000) break; extract.setInputCloud (cloud_scene); extract.setIndices (inliers); extract.filter (*cloud_scene); } PointCloud<PointNormal>::Ptr cloud_scene_input = subsampleAndCalculateNormals (cloud_scene); vector<PointCloud<PointNormal>::Ptr > cloud_models_with_normals; PCL_INFO ("Training models ...\n"); vector<PPFHashMapSearch::Ptr> hashmap_search_vector; for (size_t model_i = 0; model_i < cloud_models.size (); ++model_i) { PointCloud<PointNormal>::Ptr cloud_model_input = subsampleAndCalculateNormals (cloud_models[model_i]); cloud_models_with_normals.push_back (cloud_model_input); PointCloud<PPFSignature>::Ptr cloud_model_ppf (new PointCloud<PPFSignature> ()); PPFEstimation<PointNormal, PointNormal, PPFSignature> ppf_estimator; ppf_estimator.setInputCloud (cloud_model_input); ppf_estimator.setInputNormals (cloud_model_input); ppf_estimator.compute (*cloud_model_ppf); PPFHashMapSearch::Ptr hashmap_search (new PPFHashMapSearch (12.0f / 180.0f * float (M_PI), 0.05f)); hashmap_search->setInputFeatureCloud (cloud_model_ppf); hashmap_search_vector.push_back (hashmap_search); } visualization::PCLVisualizer viewer ("PPF Object Recognition - Results"); viewer.setBackgroundColor (0, 0, 0); viewer.addPointCloud (cloud_scene); viewer.spinOnce (10); PCL_INFO ("Registering models to scene ...\n"); for (size_t model_i = 0; model_i < cloud_models.size (); ++model_i) { PPFRegistration<PointNormal, PointNormal> ppf_registration; // set parameters for the PPF registration procedure ppf_registration.setSceneReferencePointSamplingRate (10); ppf_registration.setPositionClusteringThreshold (0.2f); ppf_registration.setRotationClusteringThreshold (30.0f / 180.0f * float (M_PI)); ppf_registration.setSearchMethod (hashmap_search_vector[model_i]); ppf_registration.setInputCloud (cloud_models_with_normals[model_i]); ppf_registration.setInputTarget (cloud_scene_input); PointCloud<PointNormal> cloud_output_subsampled; ppf_registration.align (cloud_output_subsampled); PointCloud<PointXYZ>::Ptr cloud_output_subsampled_xyz (new PointCloud<PointXYZ> ()); for (size_t i = 0; i < cloud_output_subsampled.points.size (); ++i) cloud_output_subsampled_xyz->points.push_back ( PointXYZ (cloud_output_subsampled.points[i].x, cloud_output_subsampled.points[i].y, cloud_output_subsampled.points[i].z)); Eigen::Matrix4f mat = ppf_registration.getFinalTransformation (); Eigen::Affine3f final_transformation (mat); // io::savePCDFileASCII ("output_subsampled_registered.pcd", cloud_output_subsampled); PointCloud<PointXYZ>::Ptr cloud_output (new PointCloud<PointXYZ> ()); pcl::transformPointCloud (*cloud_models[model_i], *cloud_output, final_transformation); stringstream ss; ss << "model_" << model_i; visualization::PointCloudColorHandlerRandom<PointXYZ> random_color (cloud_output->makeShared ()); viewer.addPointCloud (cloud_output, random_color, ss.str ()); // io::savePCDFileASCII (ss.str ().c_str (), *cloud_output); PCL_INFO ("Showing model %s\n", ss.str ().c_str ()); } PCL_INFO ("All models have been registered!\n"); while (!viewer.wasStopped ()) { viewer.spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } return 0; }
int main(int argc, char** argv) { readOptions(argc, argv); boost::timer t; // 3D point clouds PointCloud<PointXYZRGB>::Ptr p(new PointCloud<PointXYZRGB>); PointCloud<Normal>::Ptr n(new PointCloud<Normal>); PointCloud<PrincipalCurvatures>::Ptr pc(new PointCloud<PrincipalCurvatures>); PointCloud<PointLabel>::Ptr l(new PointCloud<PointLabel>); PCDReader r; if (r.read(file_in_, *p) == -1) return(0); vector<PointIndices> indices; ifstream fs; string line; fs.open(file_cluster_.c_str()); if (fs.is_open()) { while(fs.good()) { getline (fs,line); istringstream iss(line); PointIndices pi; do { int temp; iss >> temp; pi.indices.push_back(temp); } while(iss); if(pi.indices.size()>0) indices.push_back(pi); } } std::cout << "indices file read successfully" << std::endl; // --- Normal Estimation --- if (en_one_) { t.restart(); cob_3d_features::OrganizedNormalEstimation<PointXYZRGB, Normal, PointLabel>one; one.setInputCloud(p); one.setOutputLabels(l); //one.setPixelSearchRadius(pns_n_,points_,circle_); //radius,pixel,circle one.setPixelSearchRadius(8,2,2); one.setSkipDistantPointThreshold(12); one.compute(*n); cout << t.elapsed() << "s\t for Organized Normal Estimation" << endl; } else { t.restart(); #ifdef PCL_VERSION_COMPARE //fuerte pcl::search::KdTree<PointXYZRGB>::Ptr tree (new pcl::search::KdTree<PointXYZRGB>()); #else //electric pcl::KdTreeFLANN<PointXYZRGB>::Ptr tree (new pcl::KdTreeFLANN<PointXYZRGB> ()); #endif NormalEstimation<PointXYZRGB, Normal> ne; ne.setRadiusSearch(rn_); ne.setSearchMethod(tree); ne.setInputCloud(p); ne.compute(*n); cout << t.elapsed() << "s\t for normal estimation" << endl; } cob_3d_features::OrganizedCurvatureEstimationOMP<PointXYZRGB, Normal, PointLabel, PrincipalCurvatures> oce; oce.setInputCloud(p); oce.setInputNormals(n); oce.setPixelSearchRadius(8,2,2); oce.setSkipDistantPointThreshold(12); //KdTreeFLANN<PointXYZRGB>::Ptr tree(new KdTreeFLANN<PointXYZRGB>); //ne.setRadiusSearch(rn_); //ne.setSearchMethod(tree); if (indices.size() == 0) { oce.setOutputLabels(l); oce.compute(*pc); cob_3d_features::CurvatureClassifier<PrincipalCurvatures, PointLabel>cc; cc.setInputCloud(pc); cc.classify(*l); } else { for(unsigned int i=0; i<indices.size(); i++) { std::cout << "cluster " << i << " has " << indices[i].indices.size() << " points" << std::endl; if(i==3) { /*for(unsigned int j=0; j<indices[i].indices.size(); j++) std::cout << indices[i].indices[j] << ","; std::cout << std::endl;*/ //cout << i << ": " << indices[i].indices.front() << "!" << endl; t.restart(); boost::shared_ptr<PointIndices> ind_ptr = boost::make_shared<PointIndices>(indices[i]); std::cout << ind_ptr->indices.size() << std::endl; oce.setIndices(ind_ptr); oce.setOutputLabels(l); oce.compute(*pc); cout << t.elapsed() << "s\t for principal curvature estimation" << endl; cob_3d_features::CurvatureClassifier<PrincipalCurvatures, PointLabel>cc; cc.setInputCloud(pc); cc.setIndices(ind_ptr); cc.classify(*l); } } } // colorize edges of 3d point cloud for (size_t i = 0; i < l->points.size(); i++) { //std::cout << l->points[i].label << std::endl; if (l->points[i].label == I_UNDEF) { p->points[i].r = 0; p->points[i].g = 0; p->points[i].b = 0; } else if (l->points[i].label == I_NAN) { p->points[i].r = 0; p->points[i].g = 255; p->points[i].b = 0; } else if (l->points[i].label == I_EDGE) { p->points[i].r = 0; p->points[i].g = 0; p->points[i].b = 255; } else if (l->points[i].label == I_BORDER) { p->points[i].r = 255; p->points[i].g = 0; p->points[i].b = 0; } else if (l->points[i].label == I_PLANE) { p->points[i].r = 255; p->points[i].g = 255; p->points[i].b = 0; } else if (l->points[i].label == I_CYL) { p->points[i].r = 255; p->points[i].g = 0; p->points[i].b = 255; } else { p->points[i].r = 255; p->points[i].g = 255; p->points[i].b = 255; } } visualization::PCLVisualizer v; v.setBackgroundColor(0,127,127); ColorHdlRGB col_hdl(p); v.addPointCloud<PointXYZRGB>(p,col_hdl, "segmented"); while(!v.wasStopped()) { v.spinOnce(100); usleep(100000); } return(0); }
int main (int argc, char **argv) { if (argc != 3) { PCL_ERROR ("Syntax: ./pyramid_surface_matching model_1 model_2\n"); return -1; } /// read point clouds from HDD PCL_INFO ("Reading scene ...\n"); PointCloud<PointXYZ>::Ptr cloud_a (new PointCloud<PointXYZ> ()), cloud_b (new PointCloud<PointXYZ> ()); PCDReader reader; reader.read (argv[1], *cloud_a); reader.read (argv[2], *cloud_b); PointCloud<PointXYZ>::Ptr cloud_a_subsampled, cloud_b_subsampled; PointCloud<Normal>::Ptr cloud_a_subsampled_normals, cloud_b_subsampled_normals; subsampleAndCalculateNormals (cloud_a, cloud_a_subsampled, cloud_a_subsampled_normals); subsampleAndCalculateNormals (cloud_b, cloud_b_subsampled, cloud_b_subsampled_normals); PCL_INFO ("Finished subsampling the clouds ...\n"); PointCloud<PPFSignature>::Ptr ppf_signature_a (new PointCloud<PPFSignature> ()), ppf_signature_b (new PointCloud<PPFSignature> ()); PointCloud<PointNormal>::Ptr cloud_subsampled_with_normals_a (new PointCloud<PointNormal> ()), cloud_subsampled_with_normals_b (new PointCloud<PointNormal> ()); concatenateFields (*cloud_a_subsampled, *cloud_a_subsampled_normals, *cloud_subsampled_with_normals_a); concatenateFields (*cloud_b_subsampled, *cloud_b_subsampled_normals, *cloud_subsampled_with_normals_b); PPFEstimation<PointNormal, PointNormal, PPFSignature> ppf_estimator; ppf_estimator.setInputCloud (cloud_subsampled_with_normals_a); ppf_estimator.setInputNormals (cloud_subsampled_with_normals_a); ppf_estimator.compute (*ppf_signature_a); ppf_estimator.setInputCloud (cloud_subsampled_with_normals_b); ppf_estimator.setInputNormals (cloud_subsampled_with_normals_b); ppf_estimator.compute (*ppf_signature_b); PCL_INFO ("Feature cloud sizes: %u , %u\n", ppf_signature_a->points.size (), ppf_signature_b->points.size ()); PCL_INFO ("Finished calculating the features ...\n"); vector<pair<float, float> > dim_range_input, dim_range_target; for (size_t i = 0; i < 3; ++i) dim_range_input.push_back (pair<float, float> ((float) -M_PI, (float) M_PI)); dim_range_input.push_back (pair<float, float> (0.0f, 1.0f)); for (size_t i = 0; i < 3; ++i) dim_range_target.push_back (pair<float, float> ((float) -M_PI * 10.0f, (float) M_PI * 10.0f)); dim_range_target.push_back (pair<float, float> (0.0f, 50.0f)); PyramidFeatureHistogram<PPFSignature>::Ptr pyramid_a (new PyramidFeatureHistogram<PPFSignature> ()); pyramid_a->setInputCloud (ppf_signature_a); pyramid_a->setInputDimensionRange (dim_range_input); pyramid_a->setTargetDimensionRange (dim_range_target); pyramid_a->compute (); PCL_INFO ("Done with the first pyramid\n"); PyramidFeatureHistogram<PPFSignature>::Ptr pyramid_b (new PyramidFeatureHistogram<PPFSignature> ()); pyramid_b->setInputCloud (ppf_signature_b); pyramid_b->setInputDimensionRange (dim_range_input); pyramid_b->setTargetDimensionRange (dim_range_target); pyramid_b->compute (); PCL_INFO ("Done with the second pyramid\n"); float value = PyramidFeatureHistogram<PPFSignature>::comparePyramidFeatureHistograms (pyramid_a, pyramid_b); PCL_INFO ("Surface comparison value between %s and %s is: %f\n", argv[1], argv[2], value); return 0; }
int main(int argc, char** argv) { readOptions(argc, argv); PointCloud<PointXYZRGB>::Ptr p_raw(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_rsd_out(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_rsd_ref(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_pc_out(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_pc_ref(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_fpfh_out(new PointCloud<PointXYZRGB>()); PointCloud<PointXYZRGB>::Ptr p_fpfh_ref(new PointCloud<PointXYZRGB>()); PCDReader r; if(r.read(file_in_, *p_raw) == -1) cout << "Could not read file " << file_in_ << endl; cout << "Read pcd file \"" << file_in_ << "\" (Points: " << p_raw->points.size() << ", width: " << p_raw->width << ", height: " << p_raw->height << ")" << endl; cout << "Headline: \n\n"<< cob_3d_mapping_common::writeHeader() << endl << endl; if (rsd_enable_) processRSD(p_raw, p_rsd_ref, p_rsd_out); if (pc_enable_) processPC(p_raw, p_pc_ref, p_pc_out); if (fpfh_enable_) processFPFH(p_raw, p_fpfh_ref, p_fpfh_out); if (quiet_) return (0); boost::shared_ptr<visualization::PCLVisualizer> v; if (camera_pos_ != "") { int argc_dummy = 3; char *argv_dummy[] = {(char*)argv[0], "-cam", (char*)camera_pos_.c_str(), NULL}; v.reset(new visualization::PCLVisualizer(argc_dummy, argv_dummy, file_in_)); } else { v.reset(new visualization::PCLVisualizer(file_in_)); } /* --- Viewports: --- * 1y * | 1 | 3 | * .5 ----+---- * | 2 | 4 | * 0 .5 1x * 1: */ int v1(0); ColorHdlRGB col_hdl1(p_rsd_ref); v->createViewPort(0.0, 0.5, 0.5, 1.0, v1); v->setBackgroundColor(255,255,255, v1); v->addPointCloud<PointXYZRGB>(p_rsd_ref, col_hdl1, "raw", v1); // 2: int v2(0); ColorHdlRGB col_hdl2(p_fpfh_out); v->createViewPort(0.0, 0.0, 0.5, 0.5, v2); v->setBackgroundColor(255,255,255, v2); v->addPointCloud<PointXYZRGB>(p_fpfh_out, col_hdl2, "fpfh", v2); // 3: int v3(0); ColorHdlRGB col_hdl3(p_rsd_out); v->createViewPort(0.5, 0.5, 1.0, 1.0, v3); v->setBackgroundColor(255,255,255, v3); v->addPointCloud<PointXYZRGB>(p_rsd_out, col_hdl3, "rsd", v3); // 4: int v4(0); ColorHdlRGB col_hdl4(p_pc_out); v->createViewPort(0.5, 0.0, 1.0, 0.5, v4); v->setBackgroundColor(255,255,255, v4); v->addPointCloud<PointXYZRGB>(p_pc_out, col_hdl4, "pc", v4); while(!v->wasStopped()) { v->spinOnce(100); usleep(100000); } return(0); }
int main (int argc, char** argv) { if (argc < 2) { std::cerr << "Error: Please specify a PCD file (rosrun cob_3d_features profile_ne test.pcd)." << std::endl; return -1; } PointCloud<PointXYZ>::Ptr p (new PointCloud<PointXYZ>); PointCloud<Normal>::Ptr n (new PointCloud<Normal>); PointCloud<InterestPoint>::Ptr ip (new PointCloud<InterestPoint>); pcl::PointCloud<pcl::PointNormal> p_n; PrecisionStopWatch t; std::string file_ = argv[1]; PCDReader r; if (r.read (file_, *p) == -1) return -1; Eigen::Vector3f normal; determinePlaneNormal (p, normal); //std::cout << normal << std::endl; cob_3d_features::OrganizedNormalEstimation<PointXYZ, Normal, PointLabel> ne; ne.setPixelSearchRadius (8, 2, 2); //ne.setSkipDistantPointThreshold(8); ne.setInputCloud (p); PointCloud<PointLabel>::Ptr labels (new PointCloud<PointLabel>); ne.setOutputLabels (labels); t.precisionStart (); ne.compute (*n); std::cout << t.precisionStop () << "s\t for organized normal estimation" << std::endl; cob_3d_features::OrganizedNormalEstimationOMP<PointXYZ, Normal, PointLabel> ne_omp; ne_omp.setPixelSearchRadius (8, 2, 2); //ne.setSkipDistantPointThreshold(8); ne_omp.setInputCloud (p); //PointCloud<PointLabel>::Ptr labels(new PointCloud<PointLabel>); ne_omp.setOutputLabels (labels); t.precisionStart (); ne_omp.compute (*n); std::cout << t.precisionStop () << "s\t for organized normal estimation (OMP)" << std::endl; concatenateFields (*p, *n, p_n); io::savePCDFileASCII ("normals_organized.pcd", p_n); double good_thr = 0.97; unsigned int ctr = 0, nan_ctr = 0; double d_sum = 0; for (unsigned int i = 0; i < p->size (); i++) { if (pcl_isnan(n->points[i].normal[0])) { nan_ctr++; continue; } double d = normal.dot (n->points[i].getNormalVector3fMap ()); d_sum += fabs (1 - fabs (d)); if (fabs (d) > good_thr) ctr++; } std::cout << "Average error: " << d_sum / p->size () << std::endl; std::cout << "Ratio of good normals: " << (double)ctr / p->size () << std::endl; std::cout << "Invalid normals: " << nan_ctr << std::endl; IntegralImageNormalEstimation<PointXYZ, Normal> ne2; ne2.setNormalEstimationMethod (ne2.COVARIANCE_MATRIX); ne2.setMaxDepthChangeFactor (0.02f); ne2.setNormalSmoothingSize (10.0f); ne2.setDepthDependentSmoothing (true); ne2.setInputCloud (p); t.precisionStart (); ne2.compute (*n); std::cout << t.precisionStop () << "s\t for integral image normal estimation" << std::endl; concatenateFields (*p, *n, p_n); io::savePCDFileASCII ("normals_integral.pcd", p_n); ctr = 0; nan_ctr = 0; d_sum = 0; for (unsigned int i = 0; i < p->size (); i++) { if (pcl_isnan(n->points[i].normal[0])) { nan_ctr++; continue; } double d = normal.dot (n->points[i].getNormalVector3fMap ()); d_sum += fabs (1 - fabs (d)); if (fabs (d) > good_thr) ctr++; } std::cout << "Average error: " << d_sum / p->size () << std::endl; std::cout << "Ratio of good normals: " << (double)ctr / p->size () << std::endl; std::cout << "Invalid normals: " << nan_ctr << std::endl; NormalEstimationOMP<PointXYZ, Normal> ne3; ne3.setInputCloud (p); ne3.setNumberOfThreads (4); ne3.setKSearch (256); //ne3.setRadiusSearch(0.01); t.precisionStart (); ne3.compute (*n); std::cout << t.precisionStop () << "s\t for vanilla normal estimation" << std::endl; concatenateFields (*p, *n, p_n); io::savePCDFileASCII ("normals_vanilla.pcd", p_n); ctr = 0; nan_ctr = 0; d_sum = 0; for (unsigned int i = 0; i < p->size (); i++) { if (pcl_isnan(n->points[i].normal[0])) { nan_ctr++; continue; } double d = normal.dot (n->points[i].getNormalVector3fMap ()); d_sum += fabs (1 - fabs (d)); if (fabs (d) > good_thr) ctr++; } std::cout << "Average error: " << d_sum / p->size () << std::endl; std::cout << "Ratio of good normals: " << (double)ctr / p->size () << std::endl; std::cout << "Invalid normals: " << nan_ctr << std::endl; return 0; }
int main(int argc, char** argv) { if (argc < 2) { cout << "Input a PCD file name...\n"; return 0; } PointCloud<PointXYZ>::Ptr cloud(new PointCloud<PointXYZ>), cloud_f(new PointCloud<PointXYZ>); PCDReader reader; reader.read(argv[1], *cloud); cout << "PointCloud before filtering has: " << cloud->points.size() << " data points.\n"; PointCloud<PointXYZ>::Ptr cloud_filtered(new PointCloud<PointXYZ>); VoxelGrid<PointXYZ> vg; vg.setInputCloud(cloud); vg.setLeafSize(0.01f, 0.01f, 0.01f); vg.filter(*cloud_filtered); cout << "PointCloud after filtering has: " << cloud_filtered->points.size() << " data points.\n"; SACSegmentation<PointXYZ> seg; PointIndices::Ptr inliers(new PointIndices); PointCloud<PointXYZ>::Ptr cloud_plane(new PointCloud<PointXYZ>); ModelCoefficients::Ptr coefficients(new ModelCoefficients); seg.setOptimizeCoefficients(true); seg.setModelType(SACMODEL_PLANE); seg.setMethodType(SAC_RANSAC); seg.setMaxIterations(100); seg.setDistanceThreshold(0.02); int i=0, nr_points = (int)cloud_filtered->points.size(); while (cloud_filtered->points.size() > 0.3 * nr_points) { seg.setInputCloud(cloud_filtered); seg.segment(*inliers, *coefficients); if (inliers->indices.size() == 0) { cout << "Coud not estimate a planar model for the given dataset.\n"; break; } ExtractIndices<PointXYZ> extract; extract.setInputCloud(cloud_filtered); extract.setIndices(inliers); extract.setNegative(false); extract.filter(*cloud_plane); cout << "PointCloud representing the planar component has: " << cloud_filtered->points.size() << " data points.\n"; extract.setNegative(true); extract.filter(*cloud_f); cloud_filtered->swap(*cloud_f); } search::KdTree<PointXYZ>::Ptr kdtree(new search::KdTree<PointXYZ>); kdtree->setInputCloud(cloud_filtered); vector<PointIndices> cluster_indices; EuclideanClusterExtraction<PointXYZ> ece; ece.setClusterTolerance(0.02); ece.setMinClusterSize(100); ece.setMaxClusterSize(25000); ece.setSearchMethod(kdtree); ece.setInputCloud(cloud_filtered); ece.extract(cluster_indices); PCDWriter writer; int j = 0; for (vector<PointIndices>::const_iterator it=cluster_indices.begin(); it != cluster_indices.end(); ++it) { PointCloud<PointXYZ>::Ptr cluster_cloud(new PointCloud<PointXYZ>); for (vector<int>::const_iterator pit=it->indices.begin(); pit != it->indices.end(); ++pit) cluster_cloud->points.push_back(cloud_filtered->points[*pit]); cluster_cloud->width = cluster_cloud->points.size(); cluster_cloud->height = 1; cluster_cloud->is_dense = true; cout << "PointCloud representing a cluster has: " << cluster_cloud->points.size() << " data points.\n"; stringstream ss; ss << "cloud_cluster_" << j << ".pcd"; writer.write<PointXYZ>(ss.str(), *cluster_cloud, false); j++; } return 0; }
template<typename PointT> int pcl::PCDWriter::appendBinary(const std::string &file_name, const pcl::PointCloud<PointT> &cloud) { if(cloud.empty()) { throw pcl::IOException("[pcl::PCDWriter::appendBinary] Input point cloud has no data!"); return -1; } if(!boost::filesystem::exists(file_name)) return writeBinary(file_name, cloud); std::ifstream file_istream; file_istream.open(file_name.c_str(), std::ifstream::binary); if(!file_istream.good()) { throw pcl::IOException("[pcl::PCDWriter::appendBinary] Error opening file for reading"); return -1; } file_istream.seekg(0, std::ios_base::end); size_t file_size = file_istream.tellg(); file_istream.close(); pcl::PCLPointCloud2 tmp_cloud; PCDReader reader; if(reader.readHeader(file_name, tmp_cloud) != 0) { throw pcl::IOException("[pcl::PCDWriter::appendBinary] Failed reading header"); return -1; } if(tmp_cloud.height != 1 || cloud.height != 1) { throw pcl::IOException("[pcl::PCDWriter::appendBinary] can't use appendBinary with a point cloud that " "has height different than 1!"); return -1; } tmp_cloud.width += cloud.width; std::ostringstream oss; pcl::PointCloud<PointT> tmp_cloud2; // copy the header values: tmp_cloud2.header = tmp_cloud.header; tmp_cloud2.width = tmp_cloud.width; tmp_cloud2.height = tmp_cloud.height; tmp_cloud2.is_dense = tmp_cloud.is_dense; oss << PCDWriter::generateHeader(tmp_cloud2, tmp_cloud2.width) << "DATA binary\n"; size_t data_idx = oss.tellp(); #if _WIN32 HANDLE h_native_file = CreateFileA (file_name.c_str (), GENERIC_READ | GENERIC_WRITE, 0, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); if (h_native_file == INVALID_HANDLE_VALUE) { throw pcl::IOException ("[pcl::PCDWriter::appendBinary] Error during CreateFile!"); return (-1); } #else int fd = pcl_open (file_name.c_str (), O_RDWR | O_CREAT | O_APPEND, static_cast<mode_t> (0600)); if (fd < 0) { throw pcl::IOException ("[pcl::PCDWriter::appendBinary] Error during open!"); return (-1); } #endif // Mandatory lock file boost::interprocess::file_lock file_lock; setLockingPermissions (file_name, file_lock); std::vector<pcl::PCLPointField> fields; std::vector<int> fields_sizes; size_t fsize = 0; size_t data_size = 0; size_t nri = 0; pcl::getFields (cloud, fields); // Compute the total size of the fields for (size_t i = 0; i < fields.size (); ++i) { if (fields[i].name == "_") continue; int fs = fields[i].count * getFieldSize (fields[i].datatype); fsize += fs; fields_sizes.push_back (fs); fields[nri++] = fields[i]; } fields.resize (nri); data_size = cloud.points.size () * fsize; data_idx += (tmp_cloud.width - cloud.width) * fsize; if (data_idx != file_size) { const char *msg = "[pcl::PCDWriter::appendBinary] The expected data size and the current data size are different!"; PCL_WARN(msg); throw pcl::IOException (msg); return -1; } // Prepare the map #if _WIN32 HANDLE fm = CreateFileMappingA (h_native_file, NULL, PAGE_READWRITE, 0, (DWORD) (data_idx + data_size), NULL); char *map = static_cast<char*>(MapViewOfFile (fm, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, data_idx + data_size)); CloseHandle (fm); #else // Stretch the file size to the size of the data off_t result = pcl_lseek (fd, getpagesize () + data_size - 1, SEEK_SET); if (result < 0) { pcl_close (fd); resetLockingPermissions (file_name, file_lock); PCL_ERROR ("[pcl::PCDWriter::appendBinary] lseek errno: %d strerror: %s\n", errno, strerror (errno)); throw pcl::IOException ("[pcl::PCDWriter::appendBinary] Error during lseek ()!"); return (-1); } // Write a bogus entry so that the new file size comes in effect result = static_cast<int> (::write (fd, "", 1)); if (result != 1) { pcl_close (fd); resetLockingPermissions (file_name, file_lock); throw pcl::IOException ("[pcl::PCDWriter::appendBinary] Error during write ()!"); return (-1); } char *map = static_cast<char*> (mmap (0, data_idx + data_size, PROT_WRITE, MAP_SHARED, fd, 0)); if (map == reinterpret_cast<char*> (-1)) //MAP_FAILED) { pcl_close (fd); resetLockingPermissions (file_name, file_lock); throw pcl::IOException ("[pcl::PCDWriter::appendBinary] Error during mmap ()!"); return (-1); } #endif char* out = &map[0] + data_idx; // Copy the data for (size_t i = 0; i < cloud.points.size (); ++i) { int nrj = 0; for (size_t j = 0; j < fields.size (); ++j) { memcpy (out, reinterpret_cast<const char*> (&cloud.points[i]) + fields[j].offset, fields_sizes[nrj]); out += fields_sizes[nrj++]; } } // write the new header: std::string header(oss.str()); memcpy(map, header.c_str(), header.size()); // If the user set the synchronization flag on, call msync #if !_WIN32 if (map_synchronization_) msync (map, data_idx + data_size, MS_SYNC); #endif // Unmap the pages of memory #if _WIN32 UnmapViewOfFile (map); #else if (munmap (map, (data_idx + data_size)) == -1) { pcl_close (fd); resetLockingPermissions (file_name, file_lock); throw pcl::IOException ("[pcl::PCDWriter::writeBinary] Error during munmap ()!"); return (-1); } #endif // Close file #if _WIN32 CloseHandle (h_native_file); #else pcl_close (fd); #endif resetLockingPermissions (file_name, file_lock); return 0; }