void
pcl::rec_3d_framework::LocalRecognitionPipeline<Distance, PointInT, FeatureT>::recognize ()
{

    models_.reset (new std::vector<ModelT>);
    transforms_.reset (new std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> >);

    PointInTPtr processed;
    typename pcl::PointCloud<FeatureT>::Ptr signatures (new pcl::PointCloud<FeatureT> ());
    //pcl::PointCloud<int> keypoints_input;
    PointInTPtr keypoints_pointcloud;

    if (signatures_ != 0 && processed_ != 0 && (signatures_->size () == keypoints_pointcloud->points.size ()))
    {
        keypoints_pointcloud = keypoints_input_;
        signatures = signatures_;
        processed = processed_;
        std::cout << "Using the ISPK ..." << std::endl;
    }
    else
    {
        processed.reset( (new pcl::PointCloud<PointInT>));
        if (indices_.size () > 0)
        {
            PointInTPtr sub_input (new pcl::PointCloud<PointInT>);
            pcl::copyPointCloud (*input_, indices_, *sub_input);
            estimator_->estimate (sub_input, processed, keypoints_pointcloud, signatures);
        }
        else
        {
            estimator_->estimate (input_, processed, keypoints_pointcloud, signatures);
        }

        processed_ = processed;

    }

    std::cout << "Number of keypoints:" << keypoints_pointcloud->points.size () << std::endl;

    int size_feat = sizeof(signatures->points[0].histogram) / sizeof(float);

    //feature matching and object hypotheses
    std::map<std::string, ObjectHypothesis> object_hypotheses;

    {
        for (size_t idx = 0; idx < signatures->points.size (); idx++)
        {
            float* hist = signatures->points[idx].histogram;
            std::vector<float> std_hist (hist, hist + size_feat);
            flann_model histogram;
            histogram.descr = std_hist;
            flann::Matrix<int> indices;
            flann::Matrix<float> distances;
            nearestKSearch (flann_index_, histogram, 1, indices, distances);

            //read view pose and keypoint coordinates, transform keypoint coordinates to model coordinates
            Eigen::Matrix4f homMatrixPose;
            getPose (flann_models_.at (indices[0][0]).model, flann_models_.at (indices[0][0]).view_id, homMatrixPose);

            typename pcl::PointCloud<PointInT>::Ptr keypoints (new pcl::PointCloud<PointInT> ());
            getKeypoints (flann_models_.at (indices[0][0]).model, flann_models_.at (indices[0][0]).view_id, keypoints);

            PointInT view_keypoint = keypoints->points[flann_models_.at (indices[0][0]).keypoint_id];
            PointInT model_keypoint;
            model_keypoint.getVector4fMap () = homMatrixPose.inverse () * view_keypoint.getVector4fMap ();

            typename std::map<std::string, ObjectHypothesis>::iterator it_map;
            if ((it_map = object_hypotheses.find (flann_models_.at (indices[0][0]).model.id_)) != object_hypotheses.end ())
            {
                //if the object hypothesis already exists, then add information
                ObjectHypothesis oh = (*it_map).second;
                oh.correspondences_pointcloud->points.push_back (model_keypoint);
                oh.correspondences_to_inputcloud->push_back (
                            pcl::Correspondence (static_cast<int> (oh.correspondences_pointcloud->points.size () - 1),
                                                 static_cast<int> (idx), distances[0][0]));
                oh.feature_distances_->push_back (distances[0][0]);

            }
            else
            {
                //create object hypothesis
                ObjectHypothesis oh;

                typename pcl::PointCloud<PointInT>::Ptr correspondences_pointcloud (new pcl::PointCloud<PointInT> ());
                correspondences_pointcloud->points.push_back (model_keypoint);

                oh.model_ = flann_models_.at (indices[0][0]).model;
                oh.correspondences_pointcloud = correspondences_pointcloud;
                //last keypoint for this model is a correspondence the current scene keypoint

                pcl::CorrespondencesPtr corr (new pcl::Correspondences ());
                oh.correspondences_to_inputcloud = corr;
                oh.correspondences_to_inputcloud->push_back (pcl::Correspondence (0, static_cast<int> (idx), distances[0][0]));

                boost::shared_ptr < std::vector<float> > feat_dist (new std::vector<float>);
                feat_dist->push_back (distances[0][0]);

                oh.feature_distances_ = feat_dist;
                object_hypotheses[oh.model_.id_] = oh;
            }
        }
    }

    typename std::map<std::string, ObjectHypothesis>::iterator it_map;

    std::vector<float> feature_distance_avg;

    {
        //pcl::ScopeTime t("Geometric verification, RANSAC and transform estimation");
        for (it_map = object_hypotheses.begin (); it_map != object_hypotheses.end (); it_map++)
        {
            std::vector < pcl::Correspondences > corresp_clusters;
            cg_algorithm_->setSceneCloud (keypoints_pointcloud);
            cg_algorithm_->setInputCloud ((*it_map).second.correspondences_pointcloud);
            cg_algorithm_->setModelSceneCorrespondences ((*it_map).second.correspondences_to_inputcloud);
            cg_algorithm_->cluster (corresp_clusters);

            std::cout << "Instances:" << corresp_clusters.size () << " Total correspondences:" << (*it_map).second.correspondences_to_inputcloud->size () << " " << it_map->first << std::endl;
            std::vector<bool> good_indices_for_hypothesis (corresp_clusters.size (), true);

            if (threshold_accept_model_hypothesis_ < 1.f)
            {
                //sort the hypotheses for each model according to their correspondences and take those that are threshold_accept_model_hypothesis_ over the max cardinality
                int max_cardinality = -1;
                for (size_t i = 0; i < corresp_clusters.size (); i++)
                {
                    //std::cout <<  (corresp_clusters[i]).size() << " -- " << (*(*it_map).second.correspondences_to_inputcloud).size() << std::endl;
                    if (max_cardinality < static_cast<int> (corresp_clusters[i].size ()))
                    {
                        max_cardinality = static_cast<int> (corresp_clusters[i].size ());
                    }
                }

                for (size_t i = 0; i < corresp_clusters.size (); i++)
                {
                    if (static_cast<float> ((corresp_clusters[i]).size ()) < (threshold_accept_model_hypothesis_ * static_cast<float> (max_cardinality)))
                    {
                        good_indices_for_hypothesis[i] = false;
                    }
                }
            }

            for (size_t i = 0; i < corresp_clusters.size (); i++)
            {

                if (!good_indices_for_hypothesis[i])
                    continue;

                //drawCorrespondences (processed, it_map->second, keypoints_pointcloud, corresp_clusters[i]);

                Eigen::Matrix4f best_trans;
                typename pcl::registration::TransformationEstimationSVD < PointInT, PointInT > t_est;
                t_est.estimateRigidTransformation (*(*it_map).second.correspondences_pointcloud, *keypoints_pointcloud, corresp_clusters[i], best_trans);

                models_->push_back ((*it_map).second.model_);
                transforms_->push_back (best_trans);

            }
        }
    }

    std::cout << "Number of hypotheses:" << models_->size() << std::endl;

    /**
     * POSE REFINEMENT
     **/

    if (ICP_iterations_ > 0)
    {
        pcl::ScopeTime ticp ("ICP ");

        //Prepare scene and model clouds for the pose refinement step
        PointInTPtr cloud_voxelized_icp (new pcl::PointCloud<PointInT> ());
        pcl::VoxelGrid<PointInT> voxel_grid_icp;
        voxel_grid_icp.setInputCloud (processed);
        voxel_grid_icp.setLeafSize (VOXEL_SIZE_ICP_, VOXEL_SIZE_ICP_, VOXEL_SIZE_ICP_);
        voxel_grid_icp.filter (*cloud_voxelized_icp);
        source_->voxelizeAllModels (VOXEL_SIZE_ICP_);

#pragma omp parallel for schedule(dynamic,1) num_threads(omp_get_num_procs())
        for (int i = 0; i < static_cast<int>(models_->size ()); i++)
        {

            ConstPointInTPtr model_cloud;
            PointInTPtr model_aligned (new pcl::PointCloud<PointInT>);
            model_cloud = models_->at (i).getAssembled (VOXEL_SIZE_ICP_);
            pcl::transformPointCloud (*model_cloud, *model_aligned, transforms_->at (i));

            typename pcl::registration::CorrespondenceRejectorSampleConsensus<PointInT>::Ptr rej (
                        new pcl::registration::CorrespondenceRejectorSampleConsensus<PointInT> ());

            rej->setInputTarget (cloud_voxelized_icp);
            rej->setMaximumIterations (1000);
            rej->setInlierThreshold (0.005f);
            rej->setInputSource (model_aligned);

            pcl::IterativeClosestPoint<PointInT, PointInT> reg;
            reg.addCorrespondenceRejector (rej);
            reg.setInputTarget (cloud_voxelized_icp); //scene
            reg.setInputSource (model_aligned); //model
            reg.setMaximumIterations (ICP_iterations_);
            reg.setMaxCorrespondenceDistance (VOXEL_SIZE_ICP_ * 4.f);

            typename pcl::PointCloud<PointInT>::Ptr output_ (new pcl::PointCloud<PointInT> ());
            reg.align (*output_);

            Eigen::Matrix4f icp_trans = reg.getFinalTransformation ();
            transforms_->at (i) = icp_trans * transforms_->at (i);
        }
    }

    /**
     * HYPOTHESES VERIFICATION
     **/

    if (hv_algorithm_)
    {

        pcl::ScopeTime thv ("HV verification");

        std::vector<typename pcl::PointCloud<PointInT>::ConstPtr> aligned_models;
        aligned_models.resize (models_->size ());
        for (size_t i = 0; i < models_->size (); i++)
        {
            ConstPointInTPtr model_cloud = models_->at (i).getAssembled (0.0025f);
            //ConstPointInTPtr model_cloud = models_->at (i).getAssembled (VOXEL_SIZE_ICP_);
            PointInTPtr model_aligned (new pcl::PointCloud<PointInT>);
            pcl::transformPointCloud (*model_cloud, *model_aligned, transforms_->at (i));
            aligned_models[i] = model_aligned;
        }

        std::vector<bool> mask_hv;
        hv_algorithm_->setSceneCloud (input_);
        hv_algorithm_->addModels (aligned_models, true);
        hv_algorithm_->verify ();
        hv_algorithm_->getMask (mask_hv);

        boost::shared_ptr < std::vector<ModelT> > models_temp;
        boost::shared_ptr < std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > > transforms_temp;

        models_temp.reset (new std::vector<ModelT>);
        transforms_temp.reset (new std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> >);

        for (size_t i = 0; i < models_->size (); i++)
        {
            if (!mask_hv[i])
                continue;

            models_temp->push_back (models_->at (i));
            transforms_temp->push_back (transforms_->at (i));
        }

        models_ = models_temp;
        transforms_ = transforms_temp;

    }
}
Example #2
0
void
Recognizer<PointT>::poseRefinement()
{
    PointTPtr scene_voxelized (new pcl::PointCloud<PointT> ());
    pcl::VoxelGrid<PointT> voxel_grid_icp;
    voxel_grid_icp.setInputCloud (scene_);
    voxel_grid_icp.setLeafSize (param_.voxel_size_icp_, param_.voxel_size_icp_, param_.voxel_size_icp_);
    voxel_grid_icp.filter (*scene_voxelized);

    switch (param_.icp_type_)
    {
    case 0:
    {
#pragma omp parallel for schedule(dynamic,1) num_threads(omp_get_num_procs())
        for (size_t i = 0; i < models_.size (); i++)
        {
//            std::cout << "Doing ICP (type 0) for model " << models_[i]->id_ << " (" << i << " / " << models_.size() << ")" << std::endl;
            ConstPointTPtr model_cloud = models_[i]->getAssembled ( param_.resolution_mm_model_assembly_ );
            PointTPtr model_aligned (new pcl::PointCloud<PointT>);
            pcl::transformPointCloud (*model_cloud, *model_aligned, transforms_[i]);

            typename pcl::registration::CorrespondenceRejectorSampleConsensus<PointT>::Ptr
                    rej (new pcl::registration::CorrespondenceRejectorSampleConsensus<PointT> ());

            rej->setInputTarget (scene_voxelized);
            rej->setMaximumIterations (1000);
            rej->setInlierThreshold (0.005f);
            rej->setInputSource (model_aligned);

            pcl::IterativeClosestPoint<PointT, PointT> reg;
            reg.addCorrespondenceRejector (rej);
            reg.setInputTarget (scene_voxelized);
            reg.setInputSource (model_aligned);
            reg.setMaximumIterations (param_.icp_iterations_);
            reg.setMaxCorrespondenceDistance (param_.max_corr_distance_);

            typename pcl::PointCloud<PointT>::Ptr output_ (new pcl::PointCloud<PointT> ());
            reg.align (*output_);

            Eigen::Matrix4f icp_trans = reg.getFinalTransformation ();
            transforms_[i] = icp_trans * transforms_[i];
        }
    }
        break;
    default:
    {
#pragma omp parallel for schedule(dynamic,1) num_threads(omp_get_num_procs())
        for (size_t i = 0; i < models_.size(); i++)
        {
//            std::cout << "Doing ICP for model " << models_[i]->id_ << " (" << i << " / " << models_.size() << ")" << std::endl;
            typename VoxelBasedCorrespondenceEstimation<PointT, PointT>::Ptr
                    est (new VoxelBasedCorrespondenceEstimation<PointT, PointT> ());

            typename pcl::registration::CorrespondenceRejectorSampleConsensus<PointT>::Ptr
                    rej (new pcl::registration::CorrespondenceRejectorSampleConsensus<PointT> ());

            Eigen::Matrix4f scene_to_model_trans = transforms_[i].inverse ();
            boost::shared_ptr<distance_field::PropagationDistanceField<PointT> > dt;
            models_[i]->getVGDT (dt);

            PointTPtr model_aligned (new pcl::PointCloud<PointT>);
            PointTPtr scene_voxelized_icp_cropped (new pcl::PointCloud<PointT>);
            typename pcl::PointCloud<PointT>::ConstPtr cloud;
            dt->getInputCloud(cloud);
            model_aligned.reset(new pcl::PointCloud<PointT>(*cloud));

            pcl::transformPointCloud (*scene_voxelized, *scene_voxelized_icp_cropped, scene_to_model_trans);

            PointT minPoint, maxPoint;
            pcl::getMinMax3D(*cloud, minPoint, maxPoint);
            minPoint.x -= param_.max_corr_distance_;
            minPoint.y -= param_.max_corr_distance_;
            minPoint.z -= param_.max_corr_distance_;

            maxPoint.x += param_.max_corr_distance_;
            maxPoint.y += param_.max_corr_distance_;
            maxPoint.z += param_.max_corr_distance_;

            pcl::CropBox<PointT> cropFilter;
            cropFilter.setInputCloud (scene_voxelized_icp_cropped);
            cropFilter.setMin(minPoint.getVector4fMap());
            cropFilter.setMax(maxPoint.getVector4fMap());
            cropFilter.filter (*scene_voxelized_icp_cropped);

            est->setVoxelRepresentationTarget (dt);
            est->setInputSource (scene_voxelized_icp_cropped);
            est->setInputTarget (model_aligned);
            est->setMaxCorrespondenceDistance (param_.max_corr_distance_);

            rej->setInputSource (scene_voxelized_icp_cropped);
            rej->setInputTarget (model_aligned);
            rej->setMaximumIterations (1000);
            rej->setInlierThreshold (0.005f);

            pcl::IterativeClosestPoint<PointT, PointT, float> reg;
            reg.setCorrespondenceEstimation (est);
            reg.addCorrespondenceRejector (rej);
            reg.setInputTarget (model_aligned);
            reg.setInputSource (scene_voxelized_icp_cropped);
            reg.setMaximumIterations (param_.icp_iterations_);
            reg.setEuclideanFitnessEpsilon(1e-5);
            reg.setTransformationEpsilon(0.001f * 0.001f);

            pcl::registration::DefaultConvergenceCriteria<float>::Ptr convergence_criteria;
            convergence_criteria = reg.getConvergeCriteria();
            convergence_criteria->setAbsoluteMSE(1e-12);
            convergence_criteria->setMaximumIterationsSimilarTransforms(15);
            convergence_criteria->setFailureAfterMaximumIterations(false);

            typename pcl::PointCloud<PointT>::Ptr output_ (new pcl::PointCloud<PointT> ());
            reg.align (*output_);

            std::cout << "ICP: iterations: " << param_.icp_iterations_ << ", fitness_score: " << reg.getFitnessScore() << std::endl;

            Eigen::Matrix4f icp_trans;
            icp_trans = reg.getFinalTransformation () * scene_to_model_trans;
            transforms_[i] = icp_trans.inverse ();
            //        std::cout << "Done ICP for model  " << models_[i]->id_ << " (" << i << " / " << models_.size() << ")" << std::endl;

        }
    }
    }
}