Ejemplo n.º 1
0
int main(int argc, char **argv) {
    PLYReader ply_reader;
    PCDWriter pcd_writer;

    PointCloud<PointXYZRGB>::Ptr source_pointer (new PointCloud<PointXYZRGB>);
    PointCloud<PointXYZRGB>::Ptr target_pointer (new PointCloud<PointXYZRGB>);

    PointCloud<PointXYZRGB> output_cloud;

    std::string source_filename = argv[1];
    std::string target_filename = argv[2];

    ply_reader.read(source_filename, *source_pointer);
    ply_reader.read(target_filename, *target_pointer);

    IterativeClosestPoint<PointXYZRGB, PointXYZRGB> icp;


#if PCL_MINOR_VERSION > 6
    icp.setInputSource(source_pointer);
#else
    icp.setInputCloud(source_pointer);
#endif
    icp.setInputTarget(target_pointer);

    icp.align(output_cloud);

    pcd_writer.write("icp_test_face.pcd", output_cloud);

    return 0;
}
Ejemplo n.º 2
0
TEST (PCL, IterativeClosestPoint)
{
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setInputCloud (cloud_source.makeShared ());
    reg.setInputTarget (cloud_target.makeShared ());
    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-8);
    reg.setMaxCorrespondenceDistance (0.05);

    // Register
    reg.align (cloud_reg);
    EXPECT_EQ (int (cloud_reg.points.size ()), int (cloud_source.points.size ()));

    Eigen::Matrix4f transformation = reg.getFinalTransformation ();

    EXPECT_NEAR (transformation (0, 0), 0.8806,  1e-3);
    EXPECT_NEAR (transformation (0, 1), 0.036481287330389023, 1e-2);
    EXPECT_NEAR (transformation (0, 2), -0.4724, 1e-3);
    EXPECT_NEAR (transformation (0, 3), 0.03453, 1e-3);

    EXPECT_NEAR (transformation (1, 0), -0.02354,  1e-3);
    EXPECT_NEAR (transformation (1, 1),  0.9992,   1e-3);
    EXPECT_NEAR (transformation (1, 2),  0.03326,  1e-3);
    EXPECT_NEAR (transformation (1, 3), -0.001519, 1e-3);

    EXPECT_NEAR (transformation (2, 0),  0.4732,  1e-3);
    EXPECT_NEAR (transformation (2, 1), -0.01817, 1e-3);
    EXPECT_NEAR (transformation (2, 2),  0.8808,  1e-3);
    EXPECT_NEAR (transformation (2, 3),  0.04116, 1e-3);

    EXPECT_EQ (transformation (3, 0), 0);
    EXPECT_EQ (transformation (3, 1), 0);
    EXPECT_EQ (transformation (3, 2), 0);
    EXPECT_EQ (transformation (3, 3), 1);
}
Ejemplo n.º 3
0
TEST (PCL, IterativeClosestPoint_PointToPlane)
{
    typedef PointNormal PointT;
    PointCloud<PointT>::Ptr src (new PointCloud<PointT>);
    copyPointCloud (cloud_source, *src);
    PointCloud<PointT>::Ptr tgt (new PointCloud<PointT>);
    copyPointCloud (cloud_target, *tgt);
    PointCloud<PointT> output;

    NormalEstimation<PointNormal, PointNormal> norm_est;
    norm_est.setSearchMethod (search::KdTree<PointNormal>::Ptr (new search::KdTree<PointNormal>));
    norm_est.setKSearch (10);
    norm_est.setInputCloud (tgt);
    norm_est.compute (*tgt);

    IterativeClosestPoint<PointT, PointT> reg;
    typedef registration::TransformationEstimationPointToPlane<PointT, PointT> PointToPlane;
    boost::shared_ptr<PointToPlane> point_to_plane (new PointToPlane);
    reg.setTransformationEstimation (point_to_plane);
    reg.setInputCloud (src);
    reg.setInputTarget (tgt);
    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-8);

    // Register
    reg.align (output);
    EXPECT_EQ (int (output.points.size ()), int (cloud_source.points.size ()));

    // We get different results on 32 vs 64-bit systems.  To address this, we've removed the explicit output test
    // on the transformation matrix.  Instead, we're testing to make sure the algorithm converges to a sufficiently
    // low error by checking the fitness score.
    /*
    Eigen::Matrix4f transformation = reg.getFinalTransformation ();

    EXPECT_NEAR (transformation (0, 0),  0.9046, 1e-2);
    EXPECT_NEAR (transformation (0, 1),  0.0609, 1e-2);
    EXPECT_NEAR (transformation (0, 2), -0.4219, 1e-2);
    EXPECT_NEAR (transformation (0, 3),  0.0327, 1e-2);

    EXPECT_NEAR (transformation (1, 0), -0.0328, 1e-2);
    EXPECT_NEAR (transformation (1, 1),  0.9968, 1e-2);
    EXPECT_NEAR (transformation (1, 2),  0.0851, 1e-2);
    EXPECT_NEAR (transformation (1, 3), -0.0003, 1e-2);

    EXPECT_NEAR (transformation (2, 0),  0.4250, 1e-2);
    EXPECT_NEAR (transformation (2, 1), -0.0641, 1e-2);
    EXPECT_NEAR (transformation (2, 2),  0.9037, 1e-2);
    EXPECT_NEAR (transformation (2, 3),  0.0413, 1e-2);

    EXPECT_EQ (transformation (3, 0), 0);
    EXPECT_EQ (transformation (3, 1), 0);
    EXPECT_EQ (transformation (3, 2), 0);
    EXPECT_EQ (transformation (3, 3), 1);
    */
    EXPECT_LT (reg.getFitnessScore (), 0.001);
}
Ejemplo n.º 4
0
float
Eye3D::dynamic_EMD(const PointCloudPtr target, const vector<float> &wInput,
        PointCloudPtr source, const vector<float> &wOutput)
{
    _viz->clear();
    float radiusArg = 0.5;
    IterativeClosestPoint<PointT, PointT> icp;
    icp.setInputCloud(source);
    icp.setInputWeight(wInput);
    icp.setInputTarget(target);
    icp.setOutputWeight(wOutput);
    pcl::PointCloud<PointT> result;
    // Set the transformation epsilon (criterion 2)
    icp.setMaximumIterations (150);
    icp.setMaxCorrespondenceDistance (100);
    icp.setTransformationEpsilon (1e-11);
    // Set the euclidean distance difference epsilon (criterion 3)
    icp.setEuclideanFitnessEpsilon (1e-11);
    icp.setRANSACIterations(0); 
    icp.align(result);
    if (!icp.hasConverged()) {
        std::cout << "ICP failed to converged!"<<std::endl;
        return -1;
    }
    if (visualize_EMD) {
        /// visualize the align result
        int size = result.points.size();
        // rank the weights
        std::map<float, int> w2rank;
        std::vector<float> wInputRank;
        m_util::rank(wInput, &w2rank);
        for(float w: wInput){
            wInputRank.push_back(w2rank[w] + 1);
        }
        w2rank.clear();
        std::vector<float> wOutputRank;
        m_util::rank(wOutput, &w2rank);
        for(float w: wOutput){
            wOutputRank.push_back(w2rank[w] + 1);
        }
        int f = 50;
        for (int i = 0; i < size; i++) {
            _viz->add_sphere(target->points[i].x/f,target->points[i].y/f,
                    target->points[i].z/f, wInputRank[i]* radiusArg, 0, 255, 0);

            //        _viz->add_sphere(source->points[i].x/f, source->points[i].y/f,
            //                source->points[i].z/f, wOutputRank[i]* radiusArg, 0, 0, 255);

            _viz->add_sphere(result.points[i].x/f, result.points[i].y/f,
                    result.points[i].z, wOutputRank[i] * radiusArg, 255, 0, 0);
        }
        //    icp.visualize_correspondence(_viz,0);
        _viz->reset_camera();
    }
    return icp.best_EMD_;
}
Ejemplo n.º 5
0
TEST (PCL, IterativeClosestPointWithRejectors)
{
  IterativeClosestPoint<PointXYZ, PointXYZ> reg;
  reg.setMaximumIterations (50);
  reg.setTransformationEpsilon (1e-8);
  reg.setMaxCorrespondenceDistance (0.15);
  // Add a median distance rejector
  pcl::registration::CorrespondenceRejectorMedianDistance::Ptr rej_med (new pcl::registration::CorrespondenceRejectorMedianDistance);
  rej_med->setMedianFactor (4.0);
  reg.addCorrespondenceRejector (rej_med);
  // Also add a SaC rejector
  pcl::registration::CorrespondenceRejectorSampleConsensus<PointXYZ>::Ptr rej_samp (new pcl::registration::CorrespondenceRejectorSampleConsensus<PointXYZ>);
  reg.addCorrespondenceRejector (rej_samp);

  size_t ntransforms = 10;
  for (size_t t = 0; t < ntransforms; t++)
  {
    // Sample a fixed offset between cloud pairs
    Eigen::Affine3f delta_transform;
    sampleRandomTransform (delta_transform, 0., 0.05);
    // Sample random global transform for each pair, to make sure we aren't biased around the origin
    Eigen::Affine3f net_transform;
    sampleRandomTransform (net_transform, 2*M_PI, 10.);

    PointCloud<PointXYZ>::ConstPtr source (cloud_source.makeShared ());
    PointCloud<PointXYZ>::Ptr source_trans (new PointCloud<PointXYZ>);
    PointCloud<PointXYZ>::Ptr target_trans (new PointCloud<PointXYZ>);

    pcl::transformPointCloud (*source, *source_trans, delta_transform.inverse () * net_transform);
    pcl::transformPointCloud (*source, *target_trans, net_transform);

    reg.setInputSource (source_trans);
    reg.setInputTarget (target_trans);

    // Register
    reg.align (cloud_reg);
    Eigen::Matrix4f trans_final = reg.getFinalTransformation ();
    // Translation should be within 1cm
    for (int y = 0; y < 4; y++)
      EXPECT_NEAR (trans_final (y, 3), delta_transform (y, 3), 1E-2);
    // Rotation within .1
    for (int y = 0; y < 4; y++)
      for (int x = 0; x < 3; x++)
        EXPECT_NEAR (trans_final (y, x), delta_transform (y, x), 1E-1);

  }
}
bool
IncrementalPoseEstimatorFromRgbFeatures::optimizeWithICP(
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_source,
    Pose3D& depth_pose,
    int closest_view_index)
{
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_target = m_image_data[closest_view_index].sampled_cloud;
    pcl::PointCloud<pcl::PointXYZ> cloud_reg;

    Pose3D new_depth_pose = depth_pose;

    ntk_dbg_print(cloud_source->points.size(), 1);
    ntk_dbg_print(cloud_target->points.size(), 1);

    IterativeClosestPoint<PointXYZ, PointXYZ> reg;

    reg.setInputCloud (cloud_target);
    reg.setInputTarget (cloud_source);

    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (0.5);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
        ntk_dbg(1) << "ICP did not converge, ignoring.";
        return false;
    }

    ntk_dbg_print(reg.getFitnessScore(), 1);

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);

    new_depth_pose.applyTransformAfter(icp_pose);
    depth_pose = new_depth_pose;
    return true;
}
Ejemplo n.º 7
0
void
compute (const pcl::PCLPointCloud2::ConstPtr &source,
         const pcl::PCLPointCloud2::ConstPtr &target,
         pcl::PCLPointCloud2 &transformed_source)
{
    // Convert data to PointCloud<T>
    PointCloud<PointNormal>::Ptr src (new PointCloud<PointNormal>);
    PointCloud<PointNormal>::Ptr tgt (new PointCloud<PointNormal>);
    fromPCLPointCloud2 (*source, *src);
    fromPCLPointCloud2 (*target, *tgt);

    // Estimate
    TicToc tt;
    tt.tic ();

    print_highlight (stderr, "Computing ");

#define Scalar double
//#define Scalar float

    TransformationEstimationLM<PointNormal, PointNormal, Scalar>::Ptr te (new TransformationEstimationLM<PointNormal, PointNormal, Scalar>);
    //TransformationEstimationSVD<PointNormal, PointNormal, Scalar>::Ptr te (new TransformationEstimationSVD<PointNormal, PointNormal, Scalar>);
    CorrespondenceEstimation<PointNormal, PointNormal, double>::Ptr cens (new CorrespondenceEstimation<PointNormal, PointNormal, double>);
    //CorrespondenceEstimationNormalShooting<PointNormal, PointNormal, PointNormal>::Ptr cens (new CorrespondenceEstimationNormalShooting<PointNormal, PointNormal, PointNormal>);
    //CorrespondenceEstimationNormalShooting<PointNormal, PointNormal, PointNormal, double>::Ptr cens (new CorrespondenceEstimationNormalShooting<PointNormal, PointNormal, PointNormal, double>);
    cens->setInputSource (src);
    cens->setInputTarget (tgt);
    //cens->setSourceNormals (src);

    CorrespondenceRejectorOneToOne::Ptr cor_rej_o2o (new CorrespondenceRejectorOneToOne);

    CorrespondenceRejectorMedianDistance::Ptr cor_rej_med (new CorrespondenceRejectorMedianDistance);
    cor_rej_med->setInputSource<PointNormal> (src);
    cor_rej_med->setInputTarget<PointNormal> (tgt);

    CorrespondenceRejectorSampleConsensus<PointNormal>::Ptr cor_rej_sac (new CorrespondenceRejectorSampleConsensus<PointNormal>);
    cor_rej_sac->setInputSource (src);
    cor_rej_sac->setInputTarget (tgt);
    cor_rej_sac->setInlierThreshold (0.005);
    cor_rej_sac->setMaximumIterations (10000);

    CorrespondenceRejectorVarTrimmed::Ptr cor_rej_var (new CorrespondenceRejectorVarTrimmed);
    cor_rej_var->setInputSource<PointNormal> (src);
    cor_rej_var->setInputTarget<PointNormal> (tgt);

    CorrespondenceRejectorTrimmed::Ptr cor_rej_tri (new CorrespondenceRejectorTrimmed);

    IterativeClosestPoint<PointNormal, PointNormal, Scalar> icp;
    icp.setCorrespondenceEstimation (cens);
    icp.setTransformationEstimation (te);
    icp.addCorrespondenceRejector (cor_rej_o2o);
    //icp.addCorrespondenceRejector (cor_rej_var);
    //icp.addCorrespondenceRejector (cor_rej_med);
    //icp.addCorrespondenceRejector (cor_rej_tri);
    //icp.addCorrespondenceRejector (cor_rej_sac);
    icp.setInputSource (src);
    icp.setInputTarget (tgt);
    icp.setMaximumIterations (1000);
    icp.setTransformationEpsilon (1e-10);
    PointCloud<PointNormal> output;
    icp.align (output);

    print_info ("[done, ");
    print_value ("%g", tt.toc ());
    print_info (" ms : ");
    print_value ("%d", output.width * output.height);
    print_info (" points], has converged: ");
    print_value ("%d", icp.hasConverged ());
    print_info (" with score: %f\n",  icp.getFitnessScore ());
    Eigen::Matrix4d transformation = icp.getFinalTransformation ();
    //Eigen::Matrix4f transformation = icp.getFinalTransformation ();
    PCL_DEBUG ("Transformation is:\n\t%5f\t%5f\t%5f\t%5f\n\t%5f\t%5f\t%5f\t%5f\n\t%5f\t%5f\t%5f\t%5f\n\t%5f\t%5f\t%5f\t%5f\n",
               transformation (0, 0), transformation (0, 1), transformation (0, 2), transformation (0, 3),
               transformation (1, 0), transformation (1, 1), transformation (1, 2), transformation (1, 3),
               transformation (2, 0), transformation (2, 1), transformation (2, 2), transformation (2, 3),
               transformation (3, 0), transformation (3, 1), transformation (3, 2), transformation (3, 3));

    // Convert data back
    pcl::PCLPointCloud2 output_source;
    toPCLPointCloud2 (output, output_source);
    concatenateFields (*source, output_source, transformed_source);
}
bool RelativePoseEstimatorICP :: estimateNewPose(const RGBDImage& image)
{
    if (m_ref_cloud.points.size() < 1)
    {
        ntk_dbg(1) << "Reference cloud was empty";
        return false;
    }

    PointCloud<PointXYZ>::Ptr target = m_ref_cloud.makeShared();
    PointCloud<PointXYZ>::Ptr source (new PointCloud<PointXYZ>());
    rgbdImageToPointCloud(*source, image);

    PointCloud<PointXYZ>::Ptr filtered_source (new PointCloud<PointXYZ>());
    PointCloud<PointXYZ>::Ptr filtered_target (new PointCloud<PointXYZ>());

    pcl::VoxelGrid<pcl::PointXYZ> grid;
    grid.setLeafSize (m_voxel_leaf_size, m_voxel_leaf_size, m_voxel_leaf_size);

    grid.setInputCloud(source);
    grid.filter(*filtered_source);

    grid.setInputCloud(target);
    grid.filter(*filtered_target);

    PointCloud<PointXYZ> cloud_reg;
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setMaximumIterations (m_max_iterations);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (m_distance_threshold);
    reg.setInputCloud (filtered_source);
    reg.setInputTarget (filtered_target);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
      ntk_dbg(1) << "ICP did not converge, ignoring.";
      return false;
    }

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);
    ntk_dbg_print(icp_pose.cvTranslation(), 1);

    // Pose3D stores the transformation from 3D space to image.
    // So we have to invert everything so that unprojecting from
    // image to 3D gives the right transformation.
    // H2' = ICP * H1'
    m_current_pose.resetCameraTransform();
    m_current_pose = *image.calibration()->depth_pose;
    m_current_pose.invert();
    m_current_pose.applyTransformAfter(icp_pose);
    m_current_pose.invert();
    return true;
}
Ejemplo n.º 9
0
TEST (PCL, IterativeClosestPoint_PointToPlane)
{
  typedef PointNormal PointT;
  PointCloud<PointT>::Ptr src (new PointCloud<PointT>);
  copyPointCloud (cloud_source, *src);
  PointCloud<PointT>::Ptr tgt (new PointCloud<PointT>);
  copyPointCloud (cloud_target, *tgt);
  PointCloud<PointT> output;

  NormalEstimation<PointNormal, PointNormal> norm_est;
  norm_est.setSearchMethod (search::KdTree<PointNormal>::Ptr (new search::KdTree<PointNormal>));
  norm_est.setKSearch (10);
  norm_est.setInputCloud (tgt);
  norm_est.compute (*tgt);

  IterativeClosestPoint<PointT, PointT> reg;
  typedef registration::TransformationEstimationPointToPlane<PointT, PointT> PointToPlane;
  boost::shared_ptr<PointToPlane> point_to_plane (new PointToPlane);
  reg.setTransformationEstimation (point_to_plane);
  reg.setInputSource (src);
  reg.setInputTarget (tgt);
  reg.setMaximumIterations (50);
  reg.setTransformationEpsilon (1e-8);
  // Use a correspondence estimator which needs normals
  registration::CorrespondenceEstimationNormalShooting<PointT, PointT, PointT>::Ptr ce (new registration::CorrespondenceEstimationNormalShooting<PointT, PointT, PointT>);
  reg.setCorrespondenceEstimation (ce);
  // Add rejector
  registration::CorrespondenceRejectorSurfaceNormal::Ptr rej (new registration::CorrespondenceRejectorSurfaceNormal);
  rej->setThreshold (0); //Could be a lot of rotation -- just make sure they're at least within 0 degrees
  reg.addCorrespondenceRejector (rej);

  // Register
  reg.align (output);
  EXPECT_EQ (int (output.points.size ()), int (cloud_source.points.size ()));
  EXPECT_LT (reg.getFitnessScore (), 0.005);

  // Check again, for all possible caching schemes
  for (int iter = 0; iter < 4; iter++)
  {
    bool force_cache = (bool) iter/2;
    bool force_cache_reciprocal = (bool) iter%2;
    pcl::search::KdTree<PointT>::Ptr tree(new pcl::search::KdTree<PointT>);
    // Ensure that, when force_cache is not set, we are robust to the wrong input
    if (force_cache)
      tree->setInputCloud (tgt);
    reg.setSearchMethodTarget (tree, force_cache);

    pcl::search::KdTree<PointT>::Ptr tree_recip (new pcl::search::KdTree<PointT>);
    if (force_cache_reciprocal)
      tree_recip->setInputCloud (src);
    reg.setSearchMethodSource (tree_recip, force_cache_reciprocal);

    // Register
    reg.align (output);
    EXPECT_EQ (int (output.points.size ()), int (cloud_source.points.size ()));
    EXPECT_LT (reg.getFitnessScore (), 0.005);
  }



  // We get different results on 32 vs 64-bit systems.  To address this, we've removed the explicit output test
  // on the transformation matrix.  Instead, we're testing to make sure the algorithm converges to a sufficiently
  // low error by checking the fitness score.
  /*
  Eigen::Matrix4f transformation = reg.getFinalTransformation ();

  EXPECT_NEAR (transformation (0, 0),  0.9046, 1e-2);
  EXPECT_NEAR (transformation (0, 1),  0.0609, 1e-2);
  EXPECT_NEAR (transformation (0, 2), -0.4219, 1e-2);
  EXPECT_NEAR (transformation (0, 3),  0.0327, 1e-2);

  EXPECT_NEAR (transformation (1, 0), -0.0328, 1e-2);
  EXPECT_NEAR (transformation (1, 1),  0.9968, 1e-2);
  EXPECT_NEAR (transformation (1, 2),  0.0851, 1e-2);
  EXPECT_NEAR (transformation (1, 3), -0.0003, 1e-2);

  EXPECT_NEAR (transformation (2, 0),  0.4250, 1e-2);
  EXPECT_NEAR (transformation (2, 1), -0.0641, 1e-2);
  EXPECT_NEAR (transformation (2, 2),  0.9037, 1e-2);
  EXPECT_NEAR (transformation (2, 3),  0.0413, 1e-2);

  EXPECT_EQ (transformation (3, 0), 0);
  EXPECT_EQ (transformation (3, 1), 0);
  EXPECT_EQ (transformation (3, 2), 0);
  EXPECT_EQ (transformation (3, 3), 1);
  */
}