int main (int argc, char** argv) { // Load input file into a PointCloud<T> with an appropriate type pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); pcl::PCLPointCloud2 cloud_blob; pcl::io::loadPCDFile ("12_inch_block_downsampled.pcd", cloud_blob); pcl::fromPCLPointCloud2 (cloud_blob, *cloud); //* the data should be available in cloud // Normal estimation* pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); n.setInputCloud (cloud); n.setSearchMethod (tree); n.setKSearch (20); n.compute (*normals); //* normals should not contain the point normals + surface curvatures // Concatenate the XYZ and normal fields* pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); //* cloud_with_normals = cloud + normals // Create search tree* pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); // Initialize objects pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3; pcl::PolygonMesh triangles; // Set the maximum distance between connected points (maximum edge length) gp3.setSearchRadius (0.025); // Set typical values for the parameters gp3.setMu (2.5); gp3.setMaximumNearestNeighbors (100); gp3.setMaximumSurfaceAngle(M_PI/4); // 45 degrees gp3.setMinimumAngle(M_PI/18); // 10 degrees gp3.setMaximumAngle(2*M_PI/3); // 120 degrees gp3.setNormalConsistency(false); // Get result gp3.setInputCloud (cloud_with_normals); gp3.setSearchMethod (tree2); gp3.reconstruct (triangles); // Additional vertex information std::vector<int> parts = gp3.getPartIDs(); std::vector<int> states = gp3.getPointStates(); for(int i = 0; i < parts.size(); i++){ std::cout<<parts[i]<<", "<<states[i]<<std::endl; } // Finish return (0); }
pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr computeNormalsSmoothed( const pcl::PointCloud<pcl::PointXYZRGB>::Ptr & cloud, float smoothingSearchRadius, bool smoothingPolynomialFit, float voxelSize) { pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointXYZRGBNormal>); pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>); tree->setInputCloud (cloud); // Init object (second point type is for the normals, even if unused) pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal> mls; mls.setComputeNormals (true); // Set parameters mls.setInputCloud (cloud); mls.setPolynomialFit (smoothingPolynomialFit); mls.setSearchMethod (tree); mls.setSearchRadius (smoothingSearchRadius); if(voxelSize > 0.0f) { mls.setUpsamplingMethod(pcl::MovingLeastSquares<pcl::PointXYZRGB, pcl::PointXYZRGBNormal>::VOXEL_GRID_DILATION); mls.setDilationVoxelSize(voxelSize); } // Reconstruct mls.process (*cloud_with_normals); return cloud_with_normals; }
pcl::PointCloud<pcl::PointNormal>::Ptr computeNormals( const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud, int normalKSearch) { pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); // Normal estimation* pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>); n.setInputCloud(cloud); n.setSearchMethod(tree); n.setKSearch(normalKSearch); n.compute(*normals); //* normals should not contain the point normals + surface curvatures pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals_filtered(new pcl::PointCloud<pcl::PointNormal>); cloud_with_normals_filtered->reserve(cloud_with_normals->size()); for (int i = 0; i < cloud_with_normals->size(); ++i) { const pcl::PointNormal& p = cloud_with_normals->at(i); // Filter out nan if (p.normal_x != p.normal_x || p.normal_y != p.normal_y || p.normal_z != p.normal_z) continue; cloud_with_normals_filtered->push_back(p); } // Concatenate the XYZ and normal fields* pcl::concatenateFields(*cloud, *normals, *cloud_with_normals_filtered); //* cloud_with_normals = cloud + normals*/ return cloud_with_normals; }
int main (int, char** argv) { std::string filename = argv[1]; std::cout << "Reading " << filename << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); if (pcl::io::loadPCDFile<pcl::PointXYZ> (filename, *cloud) == -1) //* load the file { PCL_ERROR ("Couldn't read file"); return (-1); } std::cout << "Loaded " << cloud->points.size () << " points." << std::endl; // Compute the normals pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normal_estimation; normal_estimation.setInputCloud (cloud); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); normal_estimation.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::Normal>); normal_estimation.setRadiusSearch (0.03); normal_estimation.compute (*cloud_with_normals); // Setup the feature computation pcl::PFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::PFHSignature125> pfh_estimation; // Provide the original point cloud (without normals) pfh_estimation.setInputCloud (cloud); // Provide the point cloud with normals pfh_estimation.setInputNormals (cloud_with_normals); // pfh_estimation.setInputWithNormals (cloud, cloud_with_normals); PFHEstimation does not have this function // Use the same KdTree from the normal estimation pfh_estimation.setSearchMethod (tree); pcl::PointCloud<pcl::PFHSignature125>::Ptr pfh_features (new pcl::PointCloud<pcl::PFHSignature125>); pfh_estimation.setRadiusSearch (0.2); // Actually compute the spin images pfh_estimation.compute (*pfh_features); std::cout << "output points.size (): " << pfh_features->points.size () << std::endl; // Display and retrieve the shape context descriptor vector for the 0th point. pcl::PFHSignature125 descriptor = pfh_features->points[0]; std::cout << descriptor << std::endl; return 0; }
int main (int argc, char** argv) { std::string filename = argv[1]; std::cout << "Reading " << filename << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); if (pcl::io::loadPCDFile<pcl::PointXYZ> (filename, *cloud) == -1) //* load the file { PCL_ERROR ("Couldn't read file"); return (-1); } std::cout << "Loaded " << cloud->points.size () << " points." << std::endl; // Compute the normals pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normal_estimation; normal_estimation.setInputCloud (cloud); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); normal_estimation.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::Normal>); normal_estimation.setRadiusSearch (0.03); normal_estimation.compute (*cloud_with_normals); // Setup the principal curvatures computation pcl::PrincipalCurvaturesEstimation<pcl::PointXYZ, pcl::Normal, pcl::PrincipalCurvatures> principal_curvatures_estimation; // Provide the original point cloud (without normals) principal_curvatures_estimation.setInputCloud (cloud); // Provide the point cloud with normals principal_curvatures_estimation.setInputNormals (cloud_with_normals); // Use the same KdTree from the normal estimation principal_curvatures_estimation.setSearchMethod (tree); principal_curvatures_estimation.setRadiusSearch (1.0); // Actually compute the principal curvatures pcl::PointCloud<pcl::PrincipalCurvatures>::Ptr principal_curvatures (new pcl::PointCloud<pcl::PrincipalCurvatures> ()); principal_curvatures_estimation.compute (*principal_curvatures); std::cout << "output points.size (): " << principal_curvatures->points.size () << std::endl; // Display and retrieve the shape context descriptor vector for the 0th point. pcl::PrincipalCurvatures descriptor = principal_curvatures->points[0]; std::cout << descriptor << std::endl; return 0; }
void greedy_proj () { pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); pcl::PCLPointCloud2::Ptr cloud_blob (new pcl::PCLPointCloud2 ()); pcl::PCLPointCloud2::Ptr cloud_filtered (new pcl::PCLPointCloud2 ()); pcl::io::loadPCDFile ("input.pcd", *cloud_blob); pcl::fromPCLPointCloud2 (*cloud_blob, *cloud); cloud_blob = cloud_filtered; // Normal estimation pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); n.setInputCloud (cloud); n.setSearchMethod (tree); n.setKSearch (20); n.compute (*normals); // Concatenate the XYZ and normal fields pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); // Create search tree pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); // Initialize objects pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3; pcl::PolygonMesh triangles; // Set the maximum distance between connected points (maximum edge length) gp3.setSearchRadius (1); // Set typical values for the parameters gp3.setMu (2.5); gp3.setMaximumNearestNeighbors (10); gp3.setMaximumSurfaceAngle(M_PI/4); // 45 degrees gp3.setMinimumAngle(M_PI/18); // 10 degrees gp3.setMaximumAngle(2*M_PI/3); // 120 degrees gp3.setNormalConsistency(false); // Get result gp3.setInputCloud (cloud_with_normals); gp3.setSearchMethod (tree2); gp3.reconstruct (triangles); // Additional vertex information std::vector<int> parts = gp3.getPartIDs(); std::vector<int> states = gp3.getPointStates(); pcl::io::saveVTKFile("output.vtk", triangles); pcl::io::savePolygonFileSTL("output.stl", triangles); }
void Get_ToTal_FPFH(const char *path_pcd,float normal_search_radius, float fpfh_search_radius, float a[33]) { std::string fileName = path_pcd; // std::cout << "Reading " << fileName << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); if (pcl::io::loadPCDFile<pcl::PointXYZ> (fileName, *cloud) == -1) // load the file { PCL_ERROR ("Couldn't read file"); exit(0); } // std::cout << "Loaded " << cloud->points.size () << " points." << std::endl; // Compute the normals pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> normal_estimation; normal_estimation.setInputCloud (cloud); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); normal_estimation.setSearchMethod (tree); pcl::PointCloud<pcl::Normal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::Normal>); normal_estimation.setRadiusSearch (0.05); normal_estimation.compute (*cloud_with_normals); // Setup the feature computation pcl::FPFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> fpfh_estimation; // Provide the original point cloud (without normals) fpfh_estimation.setInputCloud (cloud); // Provide the point cloud with normals fpfh_estimation.setInputNormals (cloud_with_normals); // fpfhEstimation.setInputWithNormals(cloud, cloudWithNormals); PFHEstimation does not have this function // Use the same KdTree from the normal estimation fpfh_estimation.setSearchMethod (tree); pcl::PointCloud<pcl::FPFHSignature33>::Ptr pfh_features (new pcl::PointCloud<pcl::FPFHSignature33>); fpfh_estimation.setRadiusSearch (0.25); // Actually compute the spin images fpfh_estimation.compute (*pfh_features); // std::cout << "output points.size (): " << pfh_features->points.size () << std::endl; // float a[33] = {0.0f}; // Display and retrieve the shape context descriptor vector for the 0th point. for (std::size_t i = 0; i < pfh_features->points.size(); i++) { pcl::FPFHSignature33 descriptor = pfh_features->points[i]; for (int j = 0; j < 33; j++) a[j] = descriptor.histogram[j]; } }
pcl::PolygonMesh RScloud::triangulate_cloud() { pcl::PointCloud<pointT>::Ptr cloudfiltered(new pcl::PointCloud<pointT>); pcl::StatisticalOutlierRemoval<pointT> sor; sor.setInputCloud (pointcloud); sor.setMeanK (50); sor.setStddevMulThresh (2.5); sor.filter (*cloudfiltered); // Normal estimation* pcl::NormalEstimation<pointT, pointTnormal> n; pcl::PointCloud<pointTnormal>::Ptr normals (new pcl::PointCloud<pointTnormal>); pcl::search::KdTree<pointT>::Ptr tree (new pcl::search::KdTree<pointT>); tree->setInputCloud (cloudfiltered); n.setInputCloud (cloudfiltered); n.setSearchMethod (tree); n.setKSearch (50); n.compute (*normals); // Concatenate the XYZ and normal fields* pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointXYZRGBNormal>); pcl::concatenateFields (*cloudfiltered, *normals, *cloud_with_normals); pcl::search::KdTree<pcl::PointXYZRGBNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointXYZRGBNormal>); tree2->setInputCloud (cloud_with_normals); // Initialize objects pcl::GreedyProjectionTriangulation<pcl::PointXYZRGBNormal> gp3; pcl::PolygonMesh triangles; // Set the maximum distance between connected points (maximum edge length) gp3.setSearchRadius (0.025); // Set typical values for the parameters gp3.setMu (2.5); gp3.setMaximumNearestNeighbors (100); gp3.setMaximumSurfaceAngle(M_PI/4); gp3.setMinimumAngle(M_PI/18); // 10 degrees gp3.setMaximumAngle(2*M_PI/3); // 120 degrees gp3.setNormalConsistency(false); gp3.setConsistentVertexOrdering(true); // Get result gp3.setInputCloud (cloud_with_normals); gp3.setSearchMethod (tree2); gp3.reconstruct (triangles); // Additional vertex information std::vector<int> parts = gp3.getPartIDs(); std::vector<int> states = gp3.getPointStates(); return triangles; }
void FSModel::convertPointCloudToSurfaceMesh2() { // Load input file into a PointCloud<T> with an appropriate type pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); cloud->points.resize(pointCloud->size()); for (size_t i = 0; i < pointCloud->points.size(); i++) { cloud->points[i].x = pointCloud->points[i].x; cloud->points[i].y = pointCloud->points[i].y; cloud->points[i].z = pointCloud->points[i].z; } // Normal estimation* pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); n.setInputCloud (cloud); n.setSearchMethod (tree); //n.setRadiusSearch(15.0); n.setKSearch (20); n.compute (*normals); //* normals should not contain the point normals + surface curvatures // Concatenate the XYZ and normal fields* pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); pcl::Poisson<pcl::PointNormal> poisson; poisson.setDepth(9); poisson.setDegree(2); poisson.setSamplesPerNode(1); poisson.setScale(1.25); poisson.setIsoDivide(8); poisson.setConfidence(0); poisson.setManifold(0); poisson.setOutputPolygons(0); poisson.setSolverDivide(8); poisson.setInputCloud(cloud_with_normals); poisson.reconstruct (triangles); }
pcl::PolygonMesh createMesh( pcl::PointCloud<PointType>::Ptr cloud ) { pcl::console::print_info( "createMesh" ); // 法線を取得する pcl::PointCloud<NormalType>::Ptr cloud_with_normals( new pcl::PointCloud<NormalType> ); pcl::MovingLeastSquares<PointType, NormalType> mls; pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree( new pcl::search::KdTree<PointType> ); mls.setComputeNormals( true ); mls.setInputCloud( cloud ); mls.setPolynomialFit( true ); mls.setSearchMethod( tree ); mls.setSearchRadius( 0.03 ); mls.process( *cloud_with_normals ); pcl::search::KdTree<NormalType>::Ptr tree2( new pcl::search::KdTree<NormalType> ); // メッシュ化 pcl::GreedyProjectionTriangulation<NormalType> gp3; gp3.setSearchRadius( 0.025 ); gp3.setMu( 2.5 ); gp3.setMaximumNearestNeighbors( 100 ); gp3.setMaximumSurfaceAngle( M_PI / 4 ); gp3.setMinimumAngle( M_PI / 18 ); gp3.setMaximumAngle( 2 * M_PI / 3 ); gp3.setNormalConsistency( false ); // 結果を取得 pcl::PolygonMesh triangles; gp3.setInputCloud( cloud_with_normals ); gp3.setSearchMethod( tree2 ); gp3.reconstruct( triangles ); return triangles; }
pcl::PointCloud<pcl::PointNormal>::Ptr computeNormals( const pcl::PointCloud<pcl::PointXYZ>::Ptr & cloud, int normalKSearch) { pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); // Normal estimation* pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); n.setInputCloud (cloud); n.setSearchMethod (tree); n.setKSearch (normalKSearch); n.compute (*normals); //* normals should not contain the point normals + surface curvatures // Concatenate the XYZ and normal fields* pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); //* cloud_with_normals = cloud + normals*/ return cloud_with_normals; }
//-------------------------------------------------------------- void ofApp::setup(){ ofSetLogLevel(OF_LOG_VERBOSE); glEnable(GL_DEPTH_TEST); pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>); vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> clouds; mesh.load(ofToDataPath("out.ply")); cloud = ofxPCL::toPCL<ofxPCL::PointXYZCloud>(mesh); textures.resize(2); textures.at(0).loadImage(ofToDataPath("tex0.jpg")); textures.at(1).loadImage(ofToDataPath("tex1.jpg")); pcl::SacModel model_type = pcl::SACMODEL_PLANE; float distance_threshold = 30; int min_points_limit = 10; int max_segment_count = 30; pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients()); pcl::PointIndices::Ptr inliers(new pcl::PointIndices()); pcl::SACSegmentation<pcl::PointXYZ> seg; seg.setOptimizeCoefficients(false); seg.setModelType(model_type); seg.setMethodType(pcl::SAC_RANSAC); seg.setDistanceThreshold(distance_threshold); seg.setMaxIterations(500); pcl::PointCloud<pcl::PointXYZ>::Ptr temp(new pcl::PointCloud<pcl::PointXYZ>(*cloud)); const size_t original_szie = temp->points.size(); pcl::ExtractIndices<pcl::PointXYZ> extract; int segment_count = 0; while (temp->size() > original_szie * 0.3) { if (segment_count > max_segment_count) break; segment_count++; seg.setInputCloud(temp); seg.segment(*inliers, *coefficients); if (inliers->indices.size() < min_points_limit) break; pcl::PointCloud<pcl::PointXYZ>::Ptr filterd_point_cloud(new pcl::PointCloud<pcl::PointXYZ>); extract.setInputCloud(temp); extract.setIndices(inliers); extract.setNegative(false); extract.filter(*filterd_point_cloud); if (filterd_point_cloud->points.size() > 0) { clouds.push_back(filterd_point_cloud); } extract.setNegative(true); extract.filter(*temp); ofMesh m; pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); ofxPCL::normalEstimation(filterd_point_cloud, cloud_with_normals); m = ofxPCL::triangulate(cloud_with_normals, 100); m.clearColors(); jointMesh.addVertices(m.getVertices()); jointMesh.addColors(m.getColors()); ofVec3f center = m.getCentroid(); ofVec3f planeNormal(coefficients->values[0], coefficients->values[1], coefficients->values[2]); ofVec3f tangent, bitangent; ofVec3f arb(0, 1, 0); tangent = arb.cross(planeNormal).normalize(); bitangent = planeNormal.cross(tangent).normalize(); for(int j = 0; j < m.getNumVertices(); j++) { float x = m.getVertex(j).dot(tangent) * 0.001; x = x - (long)x; if(x < 0) x += 1; float y = m.getVertex(j).dot(bitangent) * 0.001; y = y - (long)y; if(y < 0) y += 1; m.addTexCoord(ofVec2f(x, y)); } meshes.push_back(m); } ofxPCL::convert(temp, meshResidual); ofLogVerbose() << clouds.size() << " meshes extracted"; center = jointMesh.getCentroid(); }
int main (int argc, char** argv) { if(argc<2){ std::cout << "need ply/pcd file. " << std::endl; return -1; } // 确定文件格式 char tmpStr[100]; strcpy(tmpStr,argv[1]); char* pext = strrchr(tmpStr, '.'); std::string extply("ply"); std::string extpcd("pcd"); if(pext){ *pext='\0'; pext++; } std::string ext(pext); //如果不支持文件格式,退出程序 if (!((ext == extply)||(ext == extpcd))){ std::cout << "文件格式不支持!" << std::endl; std::cout << "支持文件格式:*.pcd和*.ply!" << std::endl; return(-1); } //根据文件格式选择输入方式 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>) ; //创建点云对象指针,用于存储输入 if (ext == extply){ if (pcl::io::loadPLYFile(argv[1] , *cloud) == -1){ PCL_ERROR("Could not read ply file!\n") ; return -1; } } else{ if (pcl::io::loadPCDFile(argv[1] , *cloud) == -1){ PCL_ERROR("Could not read pcd file!\n") ; return -1; } } // 估计法向量 x,y,x + 法向量 + 曲率 pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud(cloud); n.setInputCloud(cloud); n.setSearchMethod(tree); n.setKSearch(20);//20个邻居 n.compute (*normals); //计算法线,结果存储在normals中 //* normals 不能同时包含点的法向量和表面的曲率 //将点云和法线放到一起 pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); //* cloud_with_normals = cloud + normals //创建搜索树 pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); //初始化 移动立方体算法 MarchingCubes对象,并设置参数 pcl::MarchingCubes<pcl::PointNormal> *mc; mc = new pcl::MarchingCubesHoppe<pcl::PointNormal> (); /* if (hoppe_or_rbf == 0) mc = new pcl::MarchingCubesHoppe<pcl::PointNormal> (); else { mc = new pcl::MarchingCubesRBF<pcl::PointNormal> (); (reinterpret_cast<pcl::MarchingCubesRBF<pcl::PointNormal>*> (mc))->setOffSurfaceDisplacement (off_surface_displacement); } */ //创建多变形网格,用于存储结果 pcl::PolygonMesh mesh; //设置MarchingCubes对象的参数 mc->setIsoLevel (0.0f); mc->setGridResolution (50, 50, 50); mc->setPercentageExtendGrid (0.0f); //设置搜索方法 mc->setInputCloud (cloud_with_normals); //执行重构,结果保存在mesh中 mc->reconstruct (mesh); //保存网格图 pcl::io::savePLYFile("result2.ply", mesh); // 显示结果图 boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer")); viewer->setBackgroundColor (0, 0, 0); //设置背景 viewer->addPolygonMesh(mesh,"my"); //设置显示的网格 //viewer->addCoordinateSystem (1.0); //设置坐标系 viewer->initCameraParameters (); while (!viewer->wasStopped ()){ viewer->spinOnce (100); boost::this_thread::sleep (boost::posix_time::microseconds (100000)); } return (0); }
PointViewSet PoissonFilter::run(PointViewPtr input) { PointViewPtr output = input->makeNew(); PointViewSet viewSet; viewSet.insert(output); bool logOutput = log()->getLevel() > LogLevel::Debug1; if (logOutput) log()->floatPrecision(8); log()->get(LogLevel::Debug2) << "Process PoissonFilter..." << std::endl; BOX3D buffer_bounds; input->calculateBounds(buffer_bounds); // convert PointView to PointNormal typedef pcl::PointCloud<pcl::PointXYZ> Cloud; Cloud::Ptr cloud(new Cloud); pclsupport::PDALtoPCD(input, *cloud, buffer_bounds); pclsupport::setLogLevel(log()->getLevel()); // pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree; pcl::search::KdTree<pcl::PointNormal>::Ptr tree2; // Create search tree tree.reset(new pcl::search::KdTree<pcl::PointXYZ> (false)); tree->setInputCloud(cloud); // Normal estimation pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal> ()); n.setInputCloud(cloud); n.setSearchMethod(tree); n.setKSearch(20); n.compute(*normals); // Concatenate XYZ and normal information pcl::concatenateFields(*cloud, *normals, *cloud_with_normals); // Create search tree tree2.reset(new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud(cloud_with_normals); // initial setup pcl::Poisson<pcl::PointNormal> p; p.setInputCloud(cloud_with_normals); p.setSearchMethod(tree2); p.setDepth(m_depth); p.setPointWeight(m_point_weight); // create PointCloud for results pcl::PolygonMesh grid; p.reconstruct(grid); Cloud::Ptr cloud_f(new Cloud); pcl::fromPCLPointCloud2(grid.cloud, *cloud_f); if (cloud_f->points.empty()) { log()->get(LogLevel::Debug2) << "Filtered cloud has no points!" << std::endl; return viewSet; } pclsupport::PCDtoPDAL(*cloud_f, output, buffer_bounds); log()->get(LogLevel::Debug2) << cloud->points.size() << " before, " << cloud_f->points.size() << " after" << std::endl; log()->get(LogLevel::Debug2) << output->size() << std::endl; return viewSet; }
int main(int argc, char** argv) { if(argc <= 2) { std::cerr << "You must supply a pcd filename and an output filename" << std::endl; return 1; } pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); pcl::io::loadPCDFile<pcl::PointNormal>(std::string(argv[1]), *cloud_with_normals); std::cout << cloud_with_normals->points.size() << " points in " << std::string(argv[1]) << std::endl; pcl::search::KdTree<pcl::PointNormal>::Ptr tree (new pcl::search::KdTree<pcl::PointNormal>); tree->setInputCloud(cloud_with_normals); pcl::Poisson<pcl::PointNormal> poisson; pcl::PolygonMesh triangles; poisson.setDepth(10); poisson.setInputCloud(cloud_with_normals); poisson.setSearchMethod(tree); poisson.reconstruct(triangles); std::cout << triangles.polygons.size() << " polygons in reconstructed mesh" << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr mesh_cloud(new pcl::PointCloud<pcl::PointXYZ>); pcl::fromROSMsg(triangles.cloud, *mesh_cloud); FILE* mesh_file = fopen(argv[2], "w"); if(!mesh_file) { std::cerr << "ERROR: Could not open file " << std::string(argv[2]) << std::endl; return 2; } for(pcl::PointCloud<pcl::PointXYZ>::const_iterator point = mesh_cloud->points.begin(); point != mesh_cloud->points.end(); point++) { fprintf(mesh_file, "v %f %f %f\n", point->x, point->y, point->z); } fprintf(mesh_file, "\n"); for(std::vector<pcl::Vertices>::const_iterator polygon = triangles.polygons.begin(); polygon != triangles.polygons.end(); polygon++) { fprintf(mesh_file, "f %d %d %d\n", polygon->vertices[0]+1, polygon->vertices[1]+1, polygon->vertices[2]+1); } fclose(mesh_file); }
int main (int argc, char** argv) { // Data containers used pcl::PointCloud<PointTypeIO>::Ptr cloud_in (new pcl::PointCloud<PointTypeIO>), cloud_out (new pcl::PointCloud<PointTypeIO>); pcl::PointCloud<PointTypeFull>::Ptr cloud_with_normals (new pcl::PointCloud<PointTypeFull>); pcl::IndicesClustersPtr clusters (new pcl::IndicesClusters), small_clusters (new pcl::IndicesClusters), large_clusters (new pcl::IndicesClusters); pcl::search::KdTree<PointTypeIO>::Ptr search_tree (new pcl::search::KdTree<PointTypeIO>); pcl::console::TicToc tt; // Load the input point cloud std::cerr << "Loading...\n", tt.tic (); pcl::io::loadPCDFile (argv[1], *cloud_in); std::cerr << ">> Done: " << tt.toc () << " ms, " << cloud_in->points.size () << " points\n"; // Downsample the cloud using a Voxel Grid class std::cerr << "Downsampling...\n", tt.tic (); pcl::VoxelGrid<PointTypeIO> vg; vg.setInputCloud (cloud_in); vg.setLeafSize (80.0, 80.0, 80.0); vg.setDownsampleAllData (true); vg.filter (*cloud_out); std::cerr << ">> Done: " << tt.toc () << " ms, " << cloud_out->points.size () << " points\n"; // Set up a Normal Estimation class and merge data in cloud_with_normals std::cerr << "Computing normals...\n", tt.tic (); pcl::copyPointCloud (*cloud_out, *cloud_with_normals); pcl::NormalEstimation<PointTypeIO, PointTypeFull> ne; ne.setInputCloud (cloud_out); ne.setSearchMethod (search_tree); ne.setRadiusSearch (300.0); ne.compute (*cloud_with_normals); std::cerr << ">> Done: " << tt.toc () << " ms\n"; // Set up a Conditional Euclidean Clustering class std::cerr << "Segmenting to clusters...\n", tt.tic (); pcl::ConditionalEuclideanClustering<PointTypeFull> cec (true); cec.setInputCloud (cloud_with_normals); cec.setConditionFunction (&customRegionGrowing); cec.setClusterTolerance (500.0); cec.setMinClusterSize (cloud_with_normals->points.size () / 1000); cec.setMaxClusterSize (cloud_with_normals->points.size () / 5); cec.segment (*clusters); cec.getRemovedClusters (small_clusters, large_clusters); std::cerr << ">> Done: " << tt.toc () << " ms\n"; // Using the intensity channel for lazy visualization of the output for (int i = 0; i < small_clusters->size (); ++i) for (int j = 0; j < (*small_clusters)[i].indices.size (); ++j) cloud_out->points[(*small_clusters)[i].indices[j]].intensity = -2.0; for (int i = 0; i < large_clusters->size (); ++i) for (int j = 0; j < (*large_clusters)[i].indices.size (); ++j) cloud_out->points[(*large_clusters)[i].indices[j]].intensity = +10.0; for (int i = 0; i < clusters->size (); ++i) { int label = rand () % 8; for (int j = 0; j < (*clusters)[i].indices.size (); ++j) cloud_out->points[(*clusters)[i].indices[j]].intensity = label; } // Save the output point cloud std::cerr << "Saving...\n", tt.tic (); pcl::io::savePCDFile ("output.pcd", *cloud_out); std::cerr << ">> Done: " << tt.toc () << " ms\n"; return (0); }
/** * Finalizing the calculation while feeding the cloud with the * normals information. */ PointCloud<PointNormal>::Ptr NormalCalculator::mergeCloudWithNormals() { PointCloud<PointNormal>::Ptr cloud_with_normals (new PointCloud<PointNormal>); concatenateFields(*_cloud, *_normals, *cloud_with_normals); return cloud_with_normals; }
void FSModel::convertPointCloudToSurfaceMesh() { // Load input file into a PointCloud<T> with an appropriate type pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); //sensor_msgs::PointCloud2 cloud_blob; //pcl::io::loadPCDFile ("bearHigh.pcd", cloud_blob); //pcl::fromROSMsg (cloud_blob, *cloud); //* the data should be available in cloud cloud->points.resize(pointCloud->size()); for (size_t i = 0; i < pointCloud->points.size(); i++) { cloud->points[i].x = pointCloud->points[i].x; cloud->points[i].y = pointCloud->points[i].y; cloud->points[i].z = pointCloud->points[i].z; } // Normal estimation* pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); n.setInputCloud (cloud); n.setSearchMethod (tree); //n.setRadiusSearch(15.0); n.setKSearch (20); n.compute (*normals); //* normals should not contain the point normals + surface curvatures // Concatenate the XYZ and normal fields* pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals, *cloud_with_normals); //* cloud_with_normals = cloud + normals // Create search tree* pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); // Initialize objects pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3; // Set the maximum distance between connected points (maximum edge length) gp3.setSearchRadius (15.00); // Set typical values for the parameters gp3.setMu (2.5); gp3.setMaximumNearestNeighbors (100); gp3.setMaximumSurfaceAngle(M_PI/4); // 45 degrees gp3.setMinimumAngle(M_PI/18); // 10 degrees gp3.setMaximumAngle(2*M_PI/3); // 120 degrees gp3.setNormalConsistency(false); // Get result gp3.setInputCloud (cloud_with_normals); gp3.setSearchMethod (tree2); gp3.reconstruct (triangles); // Additional vertex information std::vector<int> parts = gp3.getPartIDs(); std::vector<int> states = gp3.getPointStates(); pcl::io::savePLYFile("mesh.ply", triangles); }
int main(int argc, char** argv) { if(argc <= 2) { std::cerr << "You must supply a pcd filename and output filename" << std::endl; return 1; } pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(new pcl::PointCloud<pcl::PointNormal>); pcl::io::loadPCDFile<pcl::PointNormal>(std::string(argv[1]), *cloud_with_normals); std::cout << cloud_with_normals->points.size() << " points in " << std::string(argv[1]) << std::endl; pcl::search::KdTree<pcl::PointNormal>::Ptr tree (new pcl::search::KdTree<pcl::PointNormal>); tree->setInputCloud(cloud_with_normals); pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3; pcl::PolygonMesh triangles; gp3.setSearchRadius (0.025); gp3.setMu(2.5); gp3.setMaximumNearestNeighbors(100); gp3.setMaximumSurfaceAngle(M_PI/4); gp3.setMinimumAngle(M_PI/18); gp3.setMaximumAngle(2*M_PI/3); gp3.setNormalConsistency(false); gp3.setInputCloud(cloud_with_normals); gp3.setSearchMethod(tree); gp3.reconstruct(triangles); std::cout << triangles.polygons.size() << " polygons in reconstructed mesh" << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr mesh_cloud(new pcl::PointCloud<pcl::PointXYZ>); pcl::fromROSMsg(triangles.cloud, *mesh_cloud); FILE* mesh_file = fopen(argv[2], "w"); if(!mesh_file) { std::cerr << "ERROR: Could not open file " << std::string(argv[2]) << std::endl; return 2; } for(pcl::PointCloud<pcl::PointXYZ>::const_iterator point = mesh_cloud->points.begin(); point != mesh_cloud->points.end(); point++) { fprintf(mesh_file, "v %f %f %f\n", point->x, point->y, point->z); } fprintf(mesh_file, "\n"); for(std::vector<pcl::Vertices>::const_iterator polygon = triangles.polygons.begin(); polygon != triangles.polygons.end(); polygon++) { fprintf(mesh_file, "f %d %d %d\n", polygon->vertices[0]+1, polygon->vertices[1]+1, polygon->vertices[2]+1); } fclose(mesh_file); }
int main (int argc, char** argv) { /* DOWNSAMPLING ********************************************************************************************************************/ std::ofstream output_file("properties.txt"); std::ofstream curvature("curvature.txt"); std::ofstream normals_text("normals.txt"); /* std::cerr << "PointCloud before filtering: " << cloud->width * cloud->height << " data points (" << pcl::getFieldsList (*cloud) << ")." << std::endl; // Create the filtering object pcl::VoxelGrid<pcl::PCLPointCloud2> sor; sor.setInputCloud (cloud); sor.setLeafSize (0.01f, 0.01f, 0.01f); sor.filter (*cloud_filtered); output_file << "Number of filtered points" << std::endl; output_file << cloud_filtered->width * cloud_filtered->height<<std::endl; std::cerr << "PointCloud after filtering: " << cloud_filtered->width * cloud_filtered->height << " data points (" << pcl::getFieldsList (*cloud_filtered) << ")." << std::endl; pcl::PointCloud<pcl::PointXYZ>::Ptr descriptor (new pcl::PointCloud<pcl::PointXYZ>); descriptor->width = 5000 ; descriptor->height = 1 ; descriptor->points.resize (descriptor->width * descriptor->height) ; std::cerr << descriptor->points.size() << std::endl ; pcl::PCDWriter writer; writer.write ("out.pcd", *cloud_filtered, Eigen::Vector4f::Zero (), Eigen::Quaternionf::Identity (), false); */ /***********************************************************************************************************************************/ /* CALCULATING VOLUME AND SURFACE AREA ********************************************************************************************************************/ /*pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_hull (new pcl::PointCloud<pcl::PointXYZ>); pcl::ConcaveHull<pcl::PointXYZ> chull; chull.setInputCloud(test); chull.reconstruct(*cloud_hull);*/ /*for (size_t i=0; i < test->points.size (); i++) { std::cout<< test->points[i].x <<std::endl; std::cout<< test->points[i].y <<std::endl; std::cout<< test->points[i].z <<std::endl; }*/ //std::cout<<data.size()<<std::endl; // Load input file into a PointCloud<T> with an appropriate type pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); pcl::PCLPointCloud2 cloud_blob; pcl::io::loadPCDFile ("mini_soccer_ball_downsampled.pcd", cloud_blob); pcl::fromPCLPointCloud2 (cloud_blob, *cloud); pcl::PointCloud<pcl::PointXYZ>::Ptr cloud1 (new pcl::PointCloud<pcl::PointXYZ>); pcl::fromPCLPointCloud2 (cloud_blob, *cloud1); //* the data should be available in cloud // Normal estimation* pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> n; pcl::PointCloud<pcl::Normal>::Ptr normals (new pcl::PointCloud<pcl::Normal>); pcl::search::KdTree<pcl::PointXYZ>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZ>); tree->setInputCloud (cloud); n.setInputCloud (cloud); n.setSearchMethod (tree); n.setKSearch (20); n.compute (*normals); //* normals should not contain the point normals + surface curvatures // Concatenate the XYZ and normal fields* pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals (new pcl::PointCloud<pcl::PointNormal>); pcl::concatenateFields (*cloud, *normals,*cloud_with_normals); //* cloud_with_normals = cloud + normals // Create search tree* pcl::search::KdTree<pcl::PointNormal>::Ptr tree2 (new pcl::search::KdTree<pcl::PointNormal>); tree2->setInputCloud (cloud_with_normals); // Initialize objects pcl::GreedyProjectionTriangulation<pcl::PointNormal> gp3; pcl::PolygonMesh triangles; // Set the maximum distance between connected points (maximum edge length) gp3.setSearchRadius (0.025); // Set typical values for the parameters gp3.setMu (2.5); gp3.setMaximumNearestNeighbors (100); gp3.setMaximumSurfaceAngle(M_PI/4); // 45 degrees gp3.setMinimumAngle(M_PI/18); // 10 degrees gp3.setMaximumAngle(2*M_PI/3); // 120 degrees gp3.setNormalConsistency(false); // Get result gp3.setInputCloud (cloud_with_normals); gp3.setSearchMethod (tree2); gp3.reconstruct (triangles); // Additional vertex information std::vector<int> parts = gp3.getPartIDs(); std::vector<int> states = gp3.getPointStates(); pcl::PolygonMesh::Ptr mesh(&triangles); pcl::PointCloud<pcl::PointXYZ>::Ptr triangle_cloud(new pcl::PointCloud<pcl::PointXYZ>); pcl::fromPCLPointCloud2(mesh->cloud, *triangle_cloud); /* for(int i = 0; i < 2; i++){ std::cout<<triangles.polygons[i]<<std::endl; } */ std::cout<<"first vertice "<<triangles.polygons[0].vertices[0] << std::endl; //std::cout<<"Prolly not gonna work "<<triangle_cloud->points[triangles.polygons[0].vertices[0]] << std::endl; //pcl::fromPCLPointCloud2(triangles.cloud, triangle_cloud); std::cout << "size of points " << triangle_cloud->points.size() << std::endl ; std::cout<<triangle_cloud->points[0]<<std::endl; for(unsigned i = 0; i < triangle_cloud->points.size(); i++){ std::cout << triangles.polgyons[i].getVector3fMap() <<"test"<< std::endl; } //std::cout<<"surface: "<<calculateAreaPolygon(triangles, triangle_cloud)<<std::endl; /******************************************************************************************************************************************/ /* CALCULATING CURVATURE AND NORMALS ********************************************************************************************************************/ // Create the normal estimation class, and pass the input dataset to it pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne; ne.setInputCloud (cloud); // Create an empty kdtree representation, and pass it to the normal estimation object. // Its content will be filled inside the object, based on the given input dataset (as no other search surface is given). pcl::search::KdTree<pcl::PointXYZ>::Ptr tree3 (new pcl::search::KdTree<pcl::PointXYZ> ()); ne.setSearchMethod (tree3); // Output datasets pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>); // Use all neighbors in a sphere of radius 3cm ne.setRadiusSearch (0.03); // Compute the features ne.compute (*cloud_normals); output_file << "size of points " << cloud->points.size() << std::endl ; output_file << "size of the normals " << cloud_normals->points.size() << std::endl ; //pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_ptr (new pcl::PointCloud<pcl::PointXYZRGB>); /************************************************************************************************************************************/ /* PARSING DATA ********************************************************************************************************************/ int k=0 ; float dist ; float square_of_dist ; float x1,y1,z1,x2,y2,z2 ; float nu[3], nv[3], pv_pu[3], pu_pv[3] ; float highest = triangle_cloud->points[0].z; float lowest = triangle_cloud->points[0].z; for ( int i = 0; i < cloud_normals->points.size() ; i++) { output_file<<i<<": triangulated "<<triangle_cloud->points[i].x<<", "<<triangle_cloud->points[i].y<<", "<<triangle_cloud->points[i].z<<std::endl; output_file<<i<<": normal"<<cloud1->points[i].x<<", "<<cloud1->points[i].y<<", "<<cloud1->points[i].z<<std::endl; if(triangle_cloud->points[i].z > highest) { highest = triangle_cloud->points[i].z; } if(triangle_cloud->points[i].z < lowest) { lowest = triangle_cloud->points[i].z; } normals_text <<i+1 <<": "<<" x-normal-> "<<cloud_normals->points[i].normal_x<<" y-normal-> "<<cloud_normals->points[i].normal_y<<" z-normal-> "<<cloud_normals->points[i].normal_z<<std::endl; curvature <<i+1 <<": curvature: "<<cloud_normals->points[i].curvature<<std::endl; float x, y, z, dist, nx, ny, nz, ndist; /* if(i != cloud_normals->points.size()-1){ x = cloud->points[i+1].x - cloud->points[i].x; y = cloud->points[i+1].y - cloud->points[i].y; z = cloud->points[i+1].z - cloud->points[i].z; dist = sqrt(pow(x, 2)+pow(y, 2) + pow(z, 2)); output_file << i+1 <<" -> "<< i+2 << " distance normal: " << dist <<std::endl; nx = triangle_cloud[i+1].indices[0] - triangle_cloud.points[i].x; ny = triangle_cloud.points[i+1].y - triangle_cloud.points[i].y; nz = triangle_cloud.points[i+1].z - triangle_cloud.points[i].z; ndist = sqrt(pow(nx, 2)+pow(ny, 2) + pow(nz, 2)); output_file << i+1 <<" -> "<< i+2 << " distance triangulated: " << ndist <<std::endl; } */ /* pcl::PointXYZRGB point; point.x = cloud_filtered_converted->points[i].x; point.y = cloud_filtered_converted->points[i].y; point.z = cloud_filtered_converted->points[i].z; point.r = 0; point.g = 100; point.b = 200; point_cloud_ptr->points.push_back (point); */ } output_file << "highest point: "<< highest<<std::endl; output_file << "lowest point: "<< lowest<<std::endl; //pcl::PointCloud<pcl::PointXYZ>::Ptr test (new pcl::PointCloud<pcl::PointXYZ>); //float surface_area = calculateAreaPolygon(test); //std::cout<< surface_area<<std::endl; /* descriptor->width = k ; descriptor->height = 1 ; descriptor->points.resize (descriptor->width * descriptor->height) ; std::cerr << descriptor->points.size() << std::endl ; float voxelSize = 0.01f ; // how to find appropriate voxel resolution pcl::octree::OctreePointCloud<pcl::PointXYZ> octree (voxelSize); octree.setInputCloud(descriptor) ; ctree.defineBoundingBox(0.0,0.0,0.0,3.14,3.14,3.14) ; //octree.defineBoundingBox (minX, minY, minZ, maxX, maxY, maxZ) octree.addPointsFromInputCloud (); // octree created for block int k_ball=0 ; float dist_ball ; float square_of_dist_ball ; double X,Y,Z ; bool occupied ; highest = cloud_ball->points[0].z; for ( int i = 0; i < cloud_normals_ball->points.size() ; i++) { if(cloud->points[i].z > highest){ highest = cloud_ball->points[i].z; } for (int j = i+1 ; (j < cloud_normals_ball->points.size()) ; j++) { x1 = cloud_ball->points[i].x ; y1 = cloud_ball->points[i].y ; z1 = cloud_ball->points[i].z ; nu[0] = cloud_normals_ball->points[i].normal_x ; nu[1] = cloud_normals_ball->points[i].normal_y ; nu[2] = cloud_normals_ball->points[i].normal_z ; x2 = cloud_ball->points[j].x ; y2 = cloud_ball->points[j].y ; z2 = cloud_ball->points[j].z ; nv[0] = cloud_normals_ball->points[j].normal_x ; nv[1] = cloud_normals_ball->points[j].normal_y ; nv[2] = cloud_normals_ball->points[j].normal_z ; square_of_dist = ((x2-x1)*(x2-x1)) + ((y2-y1)*(y2-y1)) + ((z2-z1)*(z2-z1)) ; dist = sqrt(square_of_dist) ; //std::cerr << dist ; pv_pu[0] = x2-x1 ; pv_pu[1] = y2-y1 ; pv_pu[2] = z2-z1 ; pu_pv[0] = x1-x2 ; pu_pv[1] = y1-y2 ; pu_pv[2] = z1-z2 ; if ((dist > 0.0099) && (dist < 0.0101)) { X = angle_between_vectors (nu, nv) ; Y = angle_between_vectors (nu, pv_pu) ; Z = angle_between_vectors (nv, pu_pv) ; // output_file << descriptor->points[k].x << "\t" << descriptor->points[k].y << "\t" << descriptor->points[k].z ; // output_file << "\n"; //k_ball = k_ball + 1 ; occupied = octree.isVoxelOccupiedAtPoint (X,Y,Z) ; if (occupied == 1) { //k_ball = k_ball + 1 ; std::cerr << "Objects Matched" << "\t" << k_ball << std::endl ; return(0); } } } } */ /***********************************************************************************************************************************/ /* points.open("secondItemPoints.txt"); myfile<<"Second point \n"; myfile<<"second highest "<<highest; for(int i = 0; i < cloud_normals->points.size(); i++){ points<<cloud->points[i].x<<", "<<cloud->points[i].y<<", "<<cloud->points[i].z<<"\n"; if(cloud->points[i].z >= highest - (highest/100)){ myfile<<cloud->points[i].x<<", "<<cloud->points[i].y<<", "<<cloud->points[i].z<<"\n"; } } points.close(); myfile.close(); std::cerr << "Objects Not Matched" << "\t" << k_ball << std::endl ; */ //output_file <<"Volume: "<<volume <<std::endl; //output_file <<"Surface Area: "<<surface_area <<std::endl; return (0); }