void UniformSamplingExtractor<PointT>::filterPlanar (const PointInTPtr & input, std::vector<int> &kp_idx) { //create a search object typename pcl::search::Search<PointT>::Ptr tree; if (input->isOrganized () && !force_unorganized_) tree.reset (new pcl::search::OrganizedNeighbor<PointT> ()); else tree.reset (new pcl::search::KdTree<PointT> (false)); tree->setInputCloud (input); size_t kept = 0; for (size_t i = 0; i < kp_idx.size (); i++) { std::vector<int> neighborhood_indices; std::vector<float> neighborhood_dist; if (tree->radiusSearch (kp_idx[i], radius_, neighborhood_indices, neighborhood_dist)) { EIGEN_ALIGN16 Eigen::Matrix3f covariance_matrix; Eigen::Vector4f xyz_centroid; EIGEN_ALIGN16 Eigen::Vector3f eigenValues; EIGEN_ALIGN16 Eigen::Matrix3f eigenVectors; //compute planarity of the region computeMeanAndCovarianceMatrix (*input, neighborhood_indices, covariance_matrix, xyz_centroid); pcl::eigen33 (covariance_matrix, eigenVectors, eigenValues); float eigsum = eigenValues.sum (); if (!pcl_isfinite(eigsum)) PCL_ERROR("Eigen sum is not finite\n"); float t_planar = threshold_planar_; if(z_adaptative_) t_planar *= (1 + (std::max(input->points[kp_idx[i]].z,1.f) - 1.f)); //if ((fabs (eigenValues[0] - eigenValues[1]) < 1.5e-4) || (eigsum != 0 && fabs (eigenValues[0] / eigsum) > 1.e-2)) if ((fabs (eigenValues[0] - eigenValues[1]) < 1.5e-4) || (eigsum != 0 && fabs (eigenValues[0] / eigsum) > t_planar)) { //region is not planar, add to filtered keypoint kp_idx[kept] = kp_idx[i]; kept++; } } } kp_idx.resize (kept); }
pcl::PointCloud<pcl::Normal>::Ptr don( pcl::PointCloud<PointT> & cloud, pcl::PointCloud<NormalT> & normals, float radius1 = 0.2, float radius2 = 0.5){ pcl::PointCloud<pcl::Normal>::Ptr donormals; donormals.reset(new pcl::PointCloud<pcl::Normal>()); donormals->resize(cloud.size()); typename pcl::search::Search<PointT>::Ptr tree; tree.reset (new pcl::search::KdTree<PointT> (false)); typename pcl::PointCloud<PointT>::ConstPtr cptr(&cloud, boost::serialization::null_deleter()); tree->setInputCloud(cptr); std::vector<int> idxs; std::vector<float> sq_dists; auto avg_normal = [] (std::vector<int> idxs, pcl::PointCloud<NormalT> & normals) { pcl::Normal sum; for(int idx : idxs) { sum.getNormalVector3fMap() += normals[idx].getNormalVector3fMap(); } sum.getNormalVector3fMap() /= idxs.size(); return sum; }; for(uint idx = 0; idx < cloud.size(); idx++){ std::vector<float> rads; rads.push_back(radius1); rads.push_back(radius2); std::vector<pcl::Normal> avgs; for(float rad : rads){ idxs.clear(); sq_dists.clear(); tree->radiusSearch(idx, rad, idxs, sq_dists); idxs.push_back(idx); avgs.push_back(avg_normal(idxs, normals)); } (*donormals)[idx].getNormalVector3fMap() = (avgs[0].getNormalVector3fMap() - avgs[1].getNormalVector3fMap())/2.0; } return donormals; }
template <typename PointT> void pcl::getPointCloudDifference ( const pcl::PointCloud<PointT> &src, double threshold, const typename pcl::search::Search<PointT>::Ptr &tree, pcl::PointCloud<PointT> &output) { // We're interested in a single nearest neighbor only std::vector<int> nn_indices (1); std::vector<float> nn_distances (1); // The input cloud indices that do not have a neighbor in the target cloud std::vector<int> src_indices; // Iterate through the source data set for (int i = 0; i < static_cast<int> (src.points.size ()); ++i) { // Ignore invalid points in the inpout cloud if (!isFinite (src.points[i])) continue; // Search for the closest point in the target data set (number of neighbors to find = 1) if (!tree->nearestKSearch (src.points[i], 1, nn_indices, nn_distances)) { PCL_WARN ("No neighbor found for point %lu (%f %f %f)!\n", i, src.points[i].x, src.points[i].y, src.points[i].z); continue; } // Add points without a corresponding point in the target cloud to the output cloud if (nn_distances[0] > threshold) src_indices.push_back (i); } // Copy all the data fields from the input cloud to the output one copyPointCloud (src, src_indices, output); // Output is always dense, as invalid points in the input cloud are ignored output.is_dense = true; }
template <typename PointT, typename PointNT> void pcl::SmoothedSurfacesKeypoint<PointT, PointNT>::detectKeypoints (PointCloudT &output) { // Calculate differences for each cloud std::vector<std::vector<float> > diffs (scales_.size ()); // The cloud with the smallest scale has no differences std::vector<float> aux_diffs (input_->points.size (), 0.0f); diffs[scales_[0].second] = aux_diffs; cloud_trees_[scales_[0].second]->setInputCloud (clouds_[scales_[0].second]); for (size_t scale_i = 1; scale_i < clouds_.size (); ++scale_i) { size_t cloud_i = scales_[scale_i].second, cloud_i_minus_one = scales_[scale_i - 1].second; diffs[cloud_i].resize (input_->points.size ()); PCL_INFO ("cloud_i %u cloud_i_minus_one %u\n", cloud_i, cloud_i_minus_one); for (size_t point_i = 0; point_i < input_->points.size (); ++point_i) diffs[cloud_i][point_i] = cloud_normals_[cloud_i]->points[point_i].getNormalVector3fMap ().dot ( clouds_[cloud_i]->points[point_i].getVector3fMap () - clouds_[cloud_i_minus_one]->points[point_i].getVector3fMap ()); // Setup kdtree for this cloud cloud_trees_[cloud_i]->setInputCloud (clouds_[cloud_i]); } // Find minima and maxima in differences inside the input cloud typename pcl::search::Search<PointT>::Ptr input_tree = cloud_trees_.back (); for (int point_i = 0; point_i < static_cast<int> (input_->points.size ()); ++point_i) { std::vector<int> nn_indices; std::vector<float> nn_distances; input_tree->radiusSearch (point_i, input_scale_ * neighborhood_constant_, nn_indices, nn_distances); bool is_min = true, is_max = true; for (std::vector<int>::iterator nn_it = nn_indices.begin (); nn_it != nn_indices.end (); ++nn_it) if (*nn_it != point_i) { if (diffs[input_index_][point_i] < diffs[input_index_][*nn_it]) is_max = false; else if (diffs[input_index_][point_i] > diffs[input_index_][*nn_it]) is_min = false; } // If the point is a local minimum/maximum, check if it is the same over all the scales if (is_min || is_max) { bool passed_min = true, passed_max = true; for (size_t scale_i = 0; scale_i < scales_.size (); ++scale_i) { size_t cloud_i = scales_[scale_i].second; // skip input cloud if (cloud_i == clouds_.size () - 1) continue; nn_indices.clear (); nn_distances.clear (); cloud_trees_[cloud_i]->radiusSearch (point_i, scales_[scale_i].first * neighborhood_constant_, nn_indices, nn_distances); bool is_min_other_scale = true, is_max_other_scale = true; for (std::vector<int>::iterator nn_it = nn_indices.begin (); nn_it != nn_indices.end (); ++nn_it) if (*nn_it != point_i) { if (diffs[input_index_][point_i] < diffs[cloud_i][*nn_it]) is_max_other_scale = false; else if (diffs[input_index_][point_i] > diffs[cloud_i][*nn_it]) is_min_other_scale = false; } if (is_min == true && is_min_other_scale == false) passed_min = false; if (is_max == true && is_max_other_scale == false) passed_max = false; if (!passed_min && !passed_max) break; } // check if point was minimum/maximum over all the scales if (passed_min || passed_max) output.points.push_back (input_->points[point_i]); } } output.header = input_->header; output.width = static_cast<uint32_t> (output.points.size ()); output.height = 1; // debug stuff // for (size_t scale_i = 0; scale_i < scales_.size (); ++scale_i) // { // PointCloud<PointXYZI>::Ptr debug_cloud (new PointCloud<PointXYZI> ()); // debug_cloud->points.resize (input_->points.size ()); // debug_cloud->width = input_->width; // debug_cloud->height = input_->height; // for (size_t point_i = 0; point_i < input_->points.size (); ++point_i) // { // debug_cloud->points[point_i].intensity = diffs[scales_[scale_i].second][point_i]; // debug_cloud->points[point_i].x = input_->points[point_i].x; // debug_cloud->points[point_i].y = input_->points[point_i].y; // debug_cloud->points[point_i].z = input_->points[point_i].z; // } // char str[512]; sprintf (str, "diffs_%2d.pcd", scale_i); // io::savePCDFile (str, *debug_cloud); // } }