Exemple #1
0
TEST (PCL, IterativeClosestPoint)
{
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setInputCloud (cloud_source.makeShared ());
    reg.setInputTarget (cloud_target.makeShared ());
    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-8);
    reg.setMaxCorrespondenceDistance (0.05);

    // Register
    reg.align (cloud_reg);
    EXPECT_EQ (int (cloud_reg.points.size ()), int (cloud_source.points.size ()));

    Eigen::Matrix4f transformation = reg.getFinalTransformation ();

    EXPECT_NEAR (transformation (0, 0), 0.8806,  1e-3);
    EXPECT_NEAR (transformation (0, 1), 0.036481287330389023, 1e-2);
    EXPECT_NEAR (transformation (0, 2), -0.4724, 1e-3);
    EXPECT_NEAR (transformation (0, 3), 0.03453, 1e-3);

    EXPECT_NEAR (transformation (1, 0), -0.02354,  1e-3);
    EXPECT_NEAR (transformation (1, 1),  0.9992,   1e-3);
    EXPECT_NEAR (transformation (1, 2),  0.03326,  1e-3);
    EXPECT_NEAR (transformation (1, 3), -0.001519, 1e-3);

    EXPECT_NEAR (transformation (2, 0),  0.4732,  1e-3);
    EXPECT_NEAR (transformation (2, 1), -0.01817, 1e-3);
    EXPECT_NEAR (transformation (2, 2),  0.8808,  1e-3);
    EXPECT_NEAR (transformation (2, 3),  0.04116, 1e-3);

    EXPECT_EQ (transformation (3, 0), 0);
    EXPECT_EQ (transformation (3, 1), 0);
    EXPECT_EQ (transformation (3, 2), 0);
    EXPECT_EQ (transformation (3, 3), 1);
}
float
Eye3D::dynamic_EMD(const PointCloudPtr target, const vector<float> &wInput,
        PointCloudPtr source, const vector<float> &wOutput)
{
    _viz->clear();
    float radiusArg = 0.5;
    IterativeClosestPoint<PointT, PointT> icp;
    icp.setInputCloud(source);
    icp.setInputWeight(wInput);
    icp.setInputTarget(target);
    icp.setOutputWeight(wOutput);
    pcl::PointCloud<PointT> result;
    // Set the transformation epsilon (criterion 2)
    icp.setMaximumIterations (150);
    icp.setMaxCorrespondenceDistance (100);
    icp.setTransformationEpsilon (1e-11);
    // Set the euclidean distance difference epsilon (criterion 3)
    icp.setEuclideanFitnessEpsilon (1e-11);
    icp.setRANSACIterations(0); 
    icp.align(result);
    if (!icp.hasConverged()) {
        std::cout << "ICP failed to converged!"<<std::endl;
        return -1;
    }
    if (visualize_EMD) {
        /// visualize the align result
        int size = result.points.size();
        // rank the weights
        std::map<float, int> w2rank;
        std::vector<float> wInputRank;
        m_util::rank(wInput, &w2rank);
        for(float w: wInput){
            wInputRank.push_back(w2rank[w] + 1);
        }
        w2rank.clear();
        std::vector<float> wOutputRank;
        m_util::rank(wOutput, &w2rank);
        for(float w: wOutput){
            wOutputRank.push_back(w2rank[w] + 1);
        }
        int f = 50;
        for (int i = 0; i < size; i++) {
            _viz->add_sphere(target->points[i].x/f,target->points[i].y/f,
                    target->points[i].z/f, wInputRank[i]* radiusArg, 0, 255, 0);

            //        _viz->add_sphere(source->points[i].x/f, source->points[i].y/f,
            //                source->points[i].z/f, wOutputRank[i]* radiusArg, 0, 0, 255);

            _viz->add_sphere(result.points[i].x/f, result.points[i].y/f,
                    result.points[i].z, wOutputRank[i] * radiusArg, 255, 0, 0);
        }
        //    icp.visualize_correspondence(_viz,0);
        _viz->reset_camera();
    }
    return icp.best_EMD_;
}
Exemple #3
0
TEST (PCL, IterativeClosestPointWithRejectors)
{
  IterativeClosestPoint<PointXYZ, PointXYZ> reg;
  reg.setMaximumIterations (50);
  reg.setTransformationEpsilon (1e-8);
  reg.setMaxCorrespondenceDistance (0.15);
  // Add a median distance rejector
  pcl::registration::CorrespondenceRejectorMedianDistance::Ptr rej_med (new pcl::registration::CorrespondenceRejectorMedianDistance);
  rej_med->setMedianFactor (4.0);
  reg.addCorrespondenceRejector (rej_med);
  // Also add a SaC rejector
  pcl::registration::CorrespondenceRejectorSampleConsensus<PointXYZ>::Ptr rej_samp (new pcl::registration::CorrespondenceRejectorSampleConsensus<PointXYZ>);
  reg.addCorrespondenceRejector (rej_samp);

  size_t ntransforms = 10;
  for (size_t t = 0; t < ntransforms; t++)
  {
    // Sample a fixed offset between cloud pairs
    Eigen::Affine3f delta_transform;
    sampleRandomTransform (delta_transform, 0., 0.05);
    // Sample random global transform for each pair, to make sure we aren't biased around the origin
    Eigen::Affine3f net_transform;
    sampleRandomTransform (net_transform, 2*M_PI, 10.);

    PointCloud<PointXYZ>::ConstPtr source (cloud_source.makeShared ());
    PointCloud<PointXYZ>::Ptr source_trans (new PointCloud<PointXYZ>);
    PointCloud<PointXYZ>::Ptr target_trans (new PointCloud<PointXYZ>);

    pcl::transformPointCloud (*source, *source_trans, delta_transform.inverse () * net_transform);
    pcl::transformPointCloud (*source, *target_trans, net_transform);

    reg.setInputSource (source_trans);
    reg.setInputTarget (target_trans);

    // Register
    reg.align (cloud_reg);
    Eigen::Matrix4f trans_final = reg.getFinalTransformation ();
    // Translation should be within 1cm
    for (int y = 0; y < 4; y++)
      EXPECT_NEAR (trans_final (y, 3), delta_transform (y, 3), 1E-2);
    // Rotation within .1
    for (int y = 0; y < 4; y++)
      for (int x = 0; x < 3; x++)
        EXPECT_NEAR (trans_final (y, x), delta_transform (y, x), 1E-1);

  }
}
bool
IncrementalPoseEstimatorFromRgbFeatures::optimizeWithICP(
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_source,
    Pose3D& depth_pose,
    int closest_view_index)
{
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_target = m_image_data[closest_view_index].sampled_cloud;
    pcl::PointCloud<pcl::PointXYZ> cloud_reg;

    Pose3D new_depth_pose = depth_pose;

    ntk_dbg_print(cloud_source->points.size(), 1);
    ntk_dbg_print(cloud_target->points.size(), 1);

    IterativeClosestPoint<PointXYZ, PointXYZ> reg;

    reg.setInputCloud (cloud_target);
    reg.setInputTarget (cloud_source);

    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (0.5);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
        ntk_dbg(1) << "ICP did not converge, ignoring.";
        return false;
    }

    ntk_dbg_print(reg.getFitnessScore(), 1);

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);

    new_depth_pose.applyTransformAfter(icp_pose);
    depth_pose = new_depth_pose;
    return true;
}
bool RelativePoseEstimatorICP :: estimateNewPose(const RGBDImage& image)
{
    if (m_ref_cloud.points.size() < 1)
    {
        ntk_dbg(1) << "Reference cloud was empty";
        return false;
    }

    PointCloud<PointXYZ>::Ptr target = m_ref_cloud.makeShared();
    PointCloud<PointXYZ>::Ptr source (new PointCloud<PointXYZ>());
    rgbdImageToPointCloud(*source, image);

    PointCloud<PointXYZ>::Ptr filtered_source (new PointCloud<PointXYZ>());
    PointCloud<PointXYZ>::Ptr filtered_target (new PointCloud<PointXYZ>());

    pcl::VoxelGrid<pcl::PointXYZ> grid;
    grid.setLeafSize (m_voxel_leaf_size, m_voxel_leaf_size, m_voxel_leaf_size);

    grid.setInputCloud(source);
    grid.filter(*filtered_source);

    grid.setInputCloud(target);
    grid.filter(*filtered_target);

    PointCloud<PointXYZ> cloud_reg;
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setMaximumIterations (m_max_iterations);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (m_distance_threshold);
    reg.setInputCloud (filtered_source);
    reg.setInputTarget (filtered_target);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
      ntk_dbg(1) << "ICP did not converge, ignoring.";
      return false;
    }

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);
    ntk_dbg_print(icp_pose.cvTranslation(), 1);

    // Pose3D stores the transformation from 3D space to image.
    // So we have to invert everything so that unprojecting from
    // image to 3D gives the right transformation.
    // H2' = ICP * H1'
    m_current_pose.resetCameraTransform();
    m_current_pose = *image.calibration()->depth_pose;
    m_current_pose.invert();
    m_current_pose.applyTransformAfter(icp_pose);
    m_current_pose.invert();
    return true;
}