void ppfmap::CudaPPFMatch<PointT, NormalT>::detect( const PointCloudPtr cloud, const NormalsPtr normals, Eigen::Affine3f& trans, pcl::Correspondences& correspondences, int& votes) { std::vector<Pose> poses; detect(cloud, normals, poses); clusterPoses( poses, translation_threshold, rotation_threshold, trans, correspondences, votes); }
template <typename PointSource, typename PointTarget> void pcl::PPFRegistration<PointSource, PointTarget>::computeTransformation (PointCloudSource &output, const Eigen::Matrix4f& guess) { if (!search_method_) { PCL_ERROR("[pcl::PPFRegistration::computeTransformation] Search method not set - skipping computeTransformation!\n"); return; } if (guess != Eigen::Matrix4f::Identity ()) { PCL_ERROR("[pcl::PPFRegistration::computeTransformation] setting initial transform (guess) not implemented!\n"); } PoseWithVotesList voted_poses; std::vector <std::vector <unsigned int> > accumulator_array; accumulator_array.resize (input_->points.size ()); size_t aux_size = static_cast<size_t> (floor (2 * M_PI / search_method_->getAngleDiscretizationStep ())); for (size_t i = 0; i < input_->points.size (); ++i) { std::vector<unsigned int> aux (aux_size); accumulator_array[i] = aux; } PCL_INFO ("Accumulator array size: %u x %u.\n", accumulator_array.size (), accumulator_array.back ().size ()); // Consider every <scene_reference_point_sampling_rate>-th point as the reference point => fix s_r float f1, f2, f3, f4; for (size_t scene_reference_index = 0; scene_reference_index < target_->points.size (); scene_reference_index += scene_reference_point_sampling_rate_) { Eigen::Vector3f scene_reference_point = target_->points[scene_reference_index].getVector3fMap (), scene_reference_normal = target_->points[scene_reference_index].getNormalVector3fMap (); Eigen::AngleAxisf rotation_sg (acosf (scene_reference_normal.dot (Eigen::Vector3f::UnitX ())), scene_reference_normal.cross (Eigen::Vector3f::UnitX ()). normalized()); Eigen::Affine3f transform_sg (Eigen::Translation3f (rotation_sg * ((-1) * scene_reference_point)) * rotation_sg); // For every other point in the scene => now have pair (s_r, s_i) fixed std::vector<int> indices; std::vector<float> distances; scene_search_tree_->radiusSearch (target_->points[scene_reference_index], search_method_->getModelDiameter () /2, indices, distances); for(size_t i = 0; i < indices.size (); ++i) // for(size_t i = 0; i < target_->points.size (); ++i) { //size_t scene_point_index = i; size_t scene_point_index = indices[i]; if (scene_reference_index != scene_point_index) { if (/*pcl::computePPFPairFeature*/pcl::computePairFeatures (target_->points[scene_reference_index].getVector4fMap (), target_->points[scene_reference_index].getNormalVector4fMap (), target_->points[scene_point_index].getVector4fMap (), target_->points[scene_point_index].getNormalVector4fMap (), f1, f2, f3, f4)) { std::vector<std::pair<size_t, size_t> > nearest_indices; search_method_->nearestNeighborSearch (f1, f2, f3, f4, nearest_indices); // Compute alpha_s angle Eigen::Vector3f scene_point = target_->points[scene_point_index].getVector3fMap (); Eigen::AngleAxisf rotation_sg (acosf (scene_reference_normal.dot (Eigen::Vector3f::UnitX ())), scene_reference_normal.cross (Eigen::Vector3f::UnitX ()).normalized ()); Eigen::Affine3f transform_sg = Eigen::Translation3f ( rotation_sg * ((-1) * scene_reference_point)) * rotation_sg; // float alpha_s = acos (Eigen::Vector3f::UnitY ().dot ((transform_sg * scene_point).normalized ())); Eigen::Vector3f scene_point_transformed = transform_sg * scene_point; float alpha_s = atan2f ( -scene_point_transformed(2), scene_point_transformed(1)); if ( alpha_s != alpha_s) { PCL_ERROR ("alpha_s is nan\n"); continue; } if (sin (alpha_s) * scene_point_transformed(2) < 0.0f) alpha_s *= (-1); alpha_s *= (-1); // Go through point pairs in the model with the same discretized feature for (std::vector<std::pair<size_t, size_t> >::iterator v_it = nearest_indices.begin (); v_it != nearest_indices.end (); ++ v_it) { size_t model_reference_index = v_it->first, model_point_index = v_it->second; // Calculate angle alpha = alpha_m - alpha_s float alpha = search_method_->alpha_m_[model_reference_index][model_point_index] - alpha_s; unsigned int alpha_discretized = static_cast<unsigned int> (floor (alpha) + floor (M_PI / search_method_->getAngleDiscretizationStep ())); accumulator_array[model_reference_index][alpha_discretized] ++; } } else PCL_ERROR ("[pcl::PPFRegistration::computeTransformation] Computing pair feature vector between points %zu and %zu went wrong.\n", scene_reference_index, scene_point_index); } } size_t max_votes_i = 0, max_votes_j = 0; unsigned int max_votes = 0; for (size_t i = 0; i < accumulator_array.size (); ++i) for (size_t j = 0; j < accumulator_array.back ().size (); ++j) { if (accumulator_array[i][j] > max_votes) { max_votes = accumulator_array[i][j]; max_votes_i = i; max_votes_j = j; } // Reset accumulator_array for the next set of iterations with a new scene reference point accumulator_array[i][j] = 0; } Eigen::Vector3f model_reference_point = input_->points[max_votes_i].getVector3fMap (), model_reference_normal = input_->points[max_votes_i].getNormalVector3fMap (); Eigen::AngleAxisf rotation_mg (acosf (model_reference_normal.dot (Eigen::Vector3f::UnitX ())), model_reference_normal.cross (Eigen::Vector3f::UnitX ()).normalized ()); Eigen::Affine3f transform_mg = Eigen::Translation3f ( rotation_mg * ((-1) * model_reference_point)) * rotation_mg; Eigen::Affine3f max_transform = transform_sg.inverse () * Eigen::AngleAxisf ((static_cast<float> (max_votes_j) - floorf (static_cast<float> (M_PI) / search_method_->getAngleDiscretizationStep ())) * search_method_->getAngleDiscretizationStep (), Eigen::Vector3f::UnitX ()) * transform_mg; voted_poses.push_back (PoseWithVotes (max_transform, max_votes)); } PCL_DEBUG ("Done with the Hough Transform ...\n"); // Cluster poses for filtering out outliers and obtaining more precise results PoseWithVotesList results; clusterPoses (voted_poses, results); pcl::transformPointCloud (*input_, output, results.front ().pose); transformation_ = final_transformation_ = results.front ().pose.matrix (); converged_ = true; }