template <typename PointInT, typename PointNT, typename PointOutT> void pcl::VFHEstimation<PointInT, PointNT, PointOutT>::computePointSPFHSignature ( const Eigen::Vector4f ¢roid_p, const Eigen::Vector4f ¢roid_n, const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals, const std::vector<int> &indices) { Eigen::Vector4f pfh_tuple; // Reset the whole thing hist_f1_.setZero (nr_bins_f1_); hist_f2_.setZero (nr_bins_f2_); hist_f3_.setZero (nr_bins_f3_); hist_f4_.setZero (nr_bins_f4_); // Get the bounding box of the current cluster Eigen::Vector4f min_pt, max_pt; pcl::getMinMax3D (cloud, indices, min_pt, max_pt); // Estimate the largest distance double distance_normalization_factor = (std::max)((centroid_p - min_pt).norm (), (centroid_p - max_pt).norm ()); // Factorization constant float hist_incr = 100.0 / (float)(indices.size () - 1); // Iterate over all the points in the neighborhood for (size_t idx = 0; idx < indices.size (); ++idx) { // Compute the pair P to NNi if (!computePairFeatures ( centroid_p, centroid_n, cloud.points[indices[idx]].getVector4fMap (), normals.points[indices[idx]].getNormalVector4fMap (), pfh_tuple[0], pfh_tuple[1], pfh_tuple[2], pfh_tuple[3])) continue; // Normalize the f1, f2, f3, f4 features and push them in the histogram int h_index = floor (nr_bins_f1_ * ((pfh_tuple[0] + M_PI) * d_pi_)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f1_) h_index = nr_bins_f1_ - 1; hist_f1_ (h_index) += hist_incr; h_index = floor (nr_bins_f2_ * ((pfh_tuple[1] + 1.0) * 0.5)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f2_) h_index = nr_bins_f2_ - 1; hist_f2_ (h_index) += hist_incr; h_index = floor (nr_bins_f3_ * ((pfh_tuple[2] + 1.0) * 0.5)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f3_) h_index = nr_bins_f3_ - 1; hist_f3_ (h_index) += hist_incr; h_index = floor (nr_bins_f4_ * (pfh_tuple[3] / distance_normalization_factor)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f4_) h_index = nr_bins_f4_ - 1; hist_f4_ (h_index) += hist_incr; } }
template <typename PointInT, typename PointNT, typename PointOutT> void pcl::FPFHEstimation<PointInT, PointNT, PointOutT>::computePointSPFHSignature ( const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals, int p_idx, int row, const std::vector<int> &indices, Eigen::MatrixXf &hist_f1, Eigen::MatrixXf &hist_f2, Eigen::MatrixXf &hist_f3) { Eigen::Vector4f pfh_tuple; // Get the number of bins from the histograms size int nr_bins_f1 = static_cast<int> (hist_f1.cols ()); int nr_bins_f2 = static_cast<int> (hist_f2.cols ()); int nr_bins_f3 = static_cast<int> (hist_f3.cols ()); // Factorization constant float hist_incr = 100.0f / static_cast<float>(indices.size () - 1); // Iterate over all the points in the neighborhood for (size_t idx = 0; idx < indices.size (); ++idx) { // Avoid unnecessary returns if (p_idx == indices[idx]) continue; // Compute the pair P to NNi if (!computePairFeatures (cloud, normals, p_idx, indices[idx], pfh_tuple[0], pfh_tuple[1], pfh_tuple[2], pfh_tuple[3])) continue; // Normalize the f1, f2, f3 features and push them in the histogram int h_index = static_cast<int> (floor (nr_bins_f1 * ((pfh_tuple[0] + M_PI) * d_pi_))); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f1) h_index = nr_bins_f1 - 1; hist_f1 (row, h_index) += hist_incr; h_index = static_cast<int> (floor (nr_bins_f2 * ((pfh_tuple[1] + 1.0) * 0.5))); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f2) h_index = nr_bins_f2 - 1; hist_f2 (row, h_index) += hist_incr; h_index = static_cast<int> (floor (nr_bins_f3 * ((pfh_tuple[2] + 1.0) * 0.5))); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f3) h_index = nr_bins_f3 - 1; hist_f3 (row, h_index) += hist_incr; } }
template<typename PointInT, typename PointNT, typename PointOutT> void pcl::VFHEstimation<PointInT, PointNT, PointOutT>::computePointSPFHSignature (const Eigen::Vector4f ¢roid_p, const Eigen::Vector4f ¢roid_n, const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals, const std::vector<int> &indices) { Eigen::Vector4f pfh_tuple; // Reset the whole thing hist_f1_.setZero (nr_bins_f1_); hist_f2_.setZero (nr_bins_f2_); hist_f3_.setZero (nr_bins_f3_); hist_f4_.setZero (nr_bins_f4_); // Get the bounding box of the current cluster //Eigen::Vector4f min_pt, max_pt; //pcl::getMinMax3D (cloud, indices, min_pt, max_pt); //double distance_normalization_factor = (std::max)((centroid_p - min_pt).norm (), (centroid_p - max_pt).norm ()); //Instead of using the bounding box to normalize the VFH distance component, it is better to use the max_distance //from any point to centroid. VFH is invariant to rotation about the roll axis but the bounding box is not, //resulting in different normalization factors for point clouds that are just rotated about that axis. double distance_normalization_factor = 1.0; if ( normalize_distances_ ) { Eigen::Vector4f max_pt; pcl::getMaxDistance (cloud, indices, centroid_p, max_pt); distance_normalization_factor = (centroid_p - max_pt).norm (); } // Factorization constant float hist_incr; if (normalize_bins_) { hist_incr = 100.0 / (float)(indices.size () - 1); } else { hist_incr = 1.0; } float hist_incr_size_component; if (size_component_) hist_incr_size_component = hist_incr; else hist_incr_size_component = 0.0; // Iterate over all the points in the neighborhood for (size_t idx = 0; idx < indices.size (); ++idx) { // Compute the pair P to NNi if (!computePairFeatures (centroid_p, centroid_n, cloud.points[indices[idx]].getVector4fMap (), normals.points[indices[idx]].getNormalVector4fMap (), pfh_tuple[0], pfh_tuple[1], pfh_tuple[2], pfh_tuple[3])) continue; // Normalize the f1, f2, f3, f4 features and push them in the histogram int h_index = floor (nr_bins_f1_ * ((pfh_tuple[0] + M_PI) * d_pi_)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f1_) h_index = nr_bins_f1_ - 1; hist_f1_ (h_index) += hist_incr; h_index = floor (nr_bins_f2_ * ((pfh_tuple[1] + 1.0) * 0.5)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f2_) h_index = nr_bins_f2_ - 1; hist_f2_ (h_index) += hist_incr; h_index = floor (nr_bins_f3_ * ((pfh_tuple[2] + 1.0) * 0.5)); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f3_) h_index = nr_bins_f3_ - 1; hist_f3_ (h_index) += hist_incr; if (normalize_distances_) h_index = floor (nr_bins_f4_ * (pfh_tuple[3] / distance_normalization_factor)); else h_index = pcl_round (pfh_tuple[3] * 100); if (h_index < 0) h_index = 0; if (h_index >= nr_bins_f4_) h_index = nr_bins_f4_ - 1; hist_f4_ (h_index) += hist_incr_size_component; } }
template <typename PointInT, typename PointNT, typename PointOutT> void pcl::PFHEstimation<PointInT, PointNT, PointOutT>::computePointPFHSignature ( const pcl::PointCloud<PointInT> &cloud, const pcl::PointCloud<PointNT> &normals, const std::vector<int> &indices, int nr_split, Eigen::VectorXf &pfh_histogram) { int h_index, h_p; // Clear the resultant point histogram pfh_histogram.setZero (); // Factorization constant float hist_incr = 100.0f / static_cast<float> (indices.size () * (indices.size () - 1) / 2); std::pair<int, int> key; // Iterate over all the points in the neighborhood for (size_t i_idx = 0; i_idx < indices.size (); ++i_idx) { for (size_t j_idx = 0; j_idx < i_idx; ++j_idx) { // If the 3D points are invalid, don't bother estimating, just continue if (!isFinite (cloud.points[indices[i_idx]]) || !isFinite (cloud.points[indices[j_idx]])) continue; if (use_cache_) { // In order to create the key, always use the smaller index as the first key pair member int p1, p2; // if (indices[i_idx] >= indices[j_idx]) // { p1 = indices[i_idx]; p2 = indices[j_idx]; // } // else // { // p1 = indices[j_idx]; // p2 = indices[i_idx]; // } key = std::pair<int, int> (p1, p2); // Check to see if we already estimated this pair in the global hashmap std::map<std::pair<int, int>, Eigen::Vector4f, std::less<std::pair<int, int> >, Eigen::aligned_allocator<Eigen::Vector4f> >::iterator fm_it = feature_map_.find (key); if (fm_it != feature_map_.end ()) pfh_tuple_ = fm_it->second; else { // Compute the pair NNi to NNj if (!computePairFeatures (cloud, normals, indices[i_idx], indices[j_idx], pfh_tuple_[0], pfh_tuple_[1], pfh_tuple_[2], pfh_tuple_[3])) continue; } } else if (!computePairFeatures (cloud, normals, indices[i_idx], indices[j_idx], pfh_tuple_[0], pfh_tuple_[1], pfh_tuple_[2], pfh_tuple_[3])) continue; // Normalize the f1, f2, f3 features and push them in the histogram f_index_[0] = static_cast<int> (floor (nr_split * ((pfh_tuple_[0] + M_PI) * d_pi_))); if (f_index_[0] < 0) f_index_[0] = 0; if (f_index_[0] >= nr_split) f_index_[0] = nr_split - 1; f_index_[1] = static_cast<int> (floor (nr_split * ((pfh_tuple_[1] + 1.0) * 0.5))); if (f_index_[1] < 0) f_index_[1] = 0; if (f_index_[1] >= nr_split) f_index_[1] = nr_split - 1; f_index_[2] = static_cast<int> (floor (nr_split * ((pfh_tuple_[2] + 1.0) * 0.5))); if (f_index_[2] < 0) f_index_[2] = 0; if (f_index_[2] >= nr_split) f_index_[2] = nr_split - 1; // Copy into the histogram h_index = 0; h_p = 1; for (int d = 0; d < 3; ++d) { h_index += h_p * f_index_[d]; h_p *= nr_split; } pfh_histogram[h_index] += hist_incr; if (use_cache_) { // Save the value in the hashmap feature_map_[key] = pfh_tuple_; // Use a maximum cache so that we don't go overboard on RAM usage key_list_.push (key); // Check to see if we need to remove an element due to exceeding max_size if (key_list_.size () > max_cache_size_) { // Remove the last element. feature_map_.erase (key_list_.back ()); key_list_.pop (); } } } } }