Exemple #1
0
TEST (PCL, IterativeClosestPoint)
{
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setInputCloud (cloud_source.makeShared ());
    reg.setInputTarget (cloud_target.makeShared ());
    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-8);
    reg.setMaxCorrespondenceDistance (0.05);

    // Register
    reg.align (cloud_reg);
    EXPECT_EQ (int (cloud_reg.points.size ()), int (cloud_source.points.size ()));

    Eigen::Matrix4f transformation = reg.getFinalTransformation ();

    EXPECT_NEAR (transformation (0, 0), 0.8806,  1e-3);
    EXPECT_NEAR (transformation (0, 1), 0.036481287330389023, 1e-2);
    EXPECT_NEAR (transformation (0, 2), -0.4724, 1e-3);
    EXPECT_NEAR (transformation (0, 3), 0.03453, 1e-3);

    EXPECT_NEAR (transformation (1, 0), -0.02354,  1e-3);
    EXPECT_NEAR (transformation (1, 1),  0.9992,   1e-3);
    EXPECT_NEAR (transformation (1, 2),  0.03326,  1e-3);
    EXPECT_NEAR (transformation (1, 3), -0.001519, 1e-3);

    EXPECT_NEAR (transformation (2, 0),  0.4732,  1e-3);
    EXPECT_NEAR (transformation (2, 1), -0.01817, 1e-3);
    EXPECT_NEAR (transformation (2, 2),  0.8808,  1e-3);
    EXPECT_NEAR (transformation (2, 3),  0.04116, 1e-3);

    EXPECT_EQ (transformation (3, 0), 0);
    EXPECT_EQ (transformation (3, 1), 0);
    EXPECT_EQ (transformation (3, 2), 0);
    EXPECT_EQ (transformation (3, 3), 1);
}
int main(int argc, char **argv) {
    PLYReader ply_reader;
    PCDWriter pcd_writer;

    PointCloud<PointXYZRGB>::Ptr source_pointer (new PointCloud<PointXYZRGB>);
    PointCloud<PointXYZRGB>::Ptr target_pointer (new PointCloud<PointXYZRGB>);

    PointCloud<PointXYZRGB> output_cloud;

    std::string source_filename = argv[1];
    std::string target_filename = argv[2];

    ply_reader.read(source_filename, *source_pointer);
    ply_reader.read(target_filename, *target_pointer);

    IterativeClosestPoint<PointXYZRGB, PointXYZRGB> icp;


#if PCL_MINOR_VERSION > 6
    icp.setInputSource(source_pointer);
#else
    icp.setInputCloud(source_pointer);
#endif
    icp.setInputTarget(target_pointer);

    icp.align(output_cloud);

    pcd_writer.write("icp_test_face.pcd", output_cloud);

    return 0;
}
Exemple #3
0
TEST (PCL, IterativeClosestPoint_PointToPlane)
{
    typedef PointNormal PointT;
    PointCloud<PointT>::Ptr src (new PointCloud<PointT>);
    copyPointCloud (cloud_source, *src);
    PointCloud<PointT>::Ptr tgt (new PointCloud<PointT>);
    copyPointCloud (cloud_target, *tgt);
    PointCloud<PointT> output;

    NormalEstimation<PointNormal, PointNormal> norm_est;
    norm_est.setSearchMethod (search::KdTree<PointNormal>::Ptr (new search::KdTree<PointNormal>));
    norm_est.setKSearch (10);
    norm_est.setInputCloud (tgt);
    norm_est.compute (*tgt);

    IterativeClosestPoint<PointT, PointT> reg;
    typedef registration::TransformationEstimationPointToPlane<PointT, PointT> PointToPlane;
    boost::shared_ptr<PointToPlane> point_to_plane (new PointToPlane);
    reg.setTransformationEstimation (point_to_plane);
    reg.setInputCloud (src);
    reg.setInputTarget (tgt);
    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-8);

    // Register
    reg.align (output);
    EXPECT_EQ (int (output.points.size ()), int (cloud_source.points.size ()));

    // We get different results on 32 vs 64-bit systems.  To address this, we've removed the explicit output test
    // on the transformation matrix.  Instead, we're testing to make sure the algorithm converges to a sufficiently
    // low error by checking the fitness score.
    /*
    Eigen::Matrix4f transformation = reg.getFinalTransformation ();

    EXPECT_NEAR (transformation (0, 0),  0.9046, 1e-2);
    EXPECT_NEAR (transformation (0, 1),  0.0609, 1e-2);
    EXPECT_NEAR (transformation (0, 2), -0.4219, 1e-2);
    EXPECT_NEAR (transformation (0, 3),  0.0327, 1e-2);

    EXPECT_NEAR (transformation (1, 0), -0.0328, 1e-2);
    EXPECT_NEAR (transformation (1, 1),  0.9968, 1e-2);
    EXPECT_NEAR (transformation (1, 2),  0.0851, 1e-2);
    EXPECT_NEAR (transformation (1, 3), -0.0003, 1e-2);

    EXPECT_NEAR (transformation (2, 0),  0.4250, 1e-2);
    EXPECT_NEAR (transformation (2, 1), -0.0641, 1e-2);
    EXPECT_NEAR (transformation (2, 2),  0.9037, 1e-2);
    EXPECT_NEAR (transformation (2, 3),  0.0413, 1e-2);

    EXPECT_EQ (transformation (3, 0), 0);
    EXPECT_EQ (transformation (3, 1), 0);
    EXPECT_EQ (transformation (3, 2), 0);
    EXPECT_EQ (transformation (3, 3), 1);
    */
    EXPECT_LT (reg.getFitnessScore (), 0.001);
}
float
Eye3D::dynamic_EMD(const PointCloudPtr target, const vector<float> &wInput,
        PointCloudPtr source, const vector<float> &wOutput)
{
    _viz->clear();
    float radiusArg = 0.5;
    IterativeClosestPoint<PointT, PointT> icp;
    icp.setInputCloud(source);
    icp.setInputWeight(wInput);
    icp.setInputTarget(target);
    icp.setOutputWeight(wOutput);
    pcl::PointCloud<PointT> result;
    // Set the transformation epsilon (criterion 2)
    icp.setMaximumIterations (150);
    icp.setMaxCorrespondenceDistance (100);
    icp.setTransformationEpsilon (1e-11);
    // Set the euclidean distance difference epsilon (criterion 3)
    icp.setEuclideanFitnessEpsilon (1e-11);
    icp.setRANSACIterations(0); 
    icp.align(result);
    if (!icp.hasConverged()) {
        std::cout << "ICP failed to converged!"<<std::endl;
        return -1;
    }
    if (visualize_EMD) {
        /// visualize the align result
        int size = result.points.size();
        // rank the weights
        std::map<float, int> w2rank;
        std::vector<float> wInputRank;
        m_util::rank(wInput, &w2rank);
        for(float w: wInput){
            wInputRank.push_back(w2rank[w] + 1);
        }
        w2rank.clear();
        std::vector<float> wOutputRank;
        m_util::rank(wOutput, &w2rank);
        for(float w: wOutput){
            wOutputRank.push_back(w2rank[w] + 1);
        }
        int f = 50;
        for (int i = 0; i < size; i++) {
            _viz->add_sphere(target->points[i].x/f,target->points[i].y/f,
                    target->points[i].z/f, wInputRank[i]* radiusArg, 0, 255, 0);

            //        _viz->add_sphere(source->points[i].x/f, source->points[i].y/f,
            //                source->points[i].z/f, wOutputRank[i]* radiusArg, 0, 0, 255);

            _viz->add_sphere(result.points[i].x/f, result.points[i].y/f,
                    result.points[i].z, wOutputRank[i] * radiusArg, 255, 0, 0);
        }
        //    icp.visualize_correspondence(_viz,0);
        _viz->reset_camera();
    }
    return icp.best_EMD_;
}
bool
IncrementalPoseEstimatorFromRgbFeatures::optimizeWithICP(
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_source,
    Pose3D& depth_pose,
    int closest_view_index)
{
    pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud_target = m_image_data[closest_view_index].sampled_cloud;
    pcl::PointCloud<pcl::PointXYZ> cloud_reg;

    Pose3D new_depth_pose = depth_pose;

    ntk_dbg_print(cloud_source->points.size(), 1);
    ntk_dbg_print(cloud_target->points.size(), 1);

    IterativeClosestPoint<PointXYZ, PointXYZ> reg;

    reg.setInputCloud (cloud_target);
    reg.setInputTarget (cloud_source);

    reg.setMaximumIterations (50);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (0.5);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
        ntk_dbg(1) << "ICP did not converge, ignoring.";
        return false;
    }

    ntk_dbg_print(reg.getFitnessScore(), 1);

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);

    new_depth_pose.applyTransformAfter(icp_pose);
    depth_pose = new_depth_pose;
    return true;
}
bool RelativePoseEstimatorICP :: estimateNewPose(const RGBDImage& image)
{
    if (m_ref_cloud.points.size() < 1)
    {
        ntk_dbg(1) << "Reference cloud was empty";
        return false;
    }

    PointCloud<PointXYZ>::Ptr target = m_ref_cloud.makeShared();
    PointCloud<PointXYZ>::Ptr source (new PointCloud<PointXYZ>());
    rgbdImageToPointCloud(*source, image);

    PointCloud<PointXYZ>::Ptr filtered_source (new PointCloud<PointXYZ>());
    PointCloud<PointXYZ>::Ptr filtered_target (new PointCloud<PointXYZ>());

    pcl::VoxelGrid<pcl::PointXYZ> grid;
    grid.setLeafSize (m_voxel_leaf_size, m_voxel_leaf_size, m_voxel_leaf_size);

    grid.setInputCloud(source);
    grid.filter(*filtered_source);

    grid.setInputCloud(target);
    grid.filter(*filtered_target);

    PointCloud<PointXYZ> cloud_reg;
    IterativeClosestPoint<PointXYZ, PointXYZ> reg;
    reg.setMaximumIterations (m_max_iterations);
    reg.setTransformationEpsilon (1e-5);
    reg.setMaxCorrespondenceDistance (m_distance_threshold);
    reg.setInputCloud (filtered_source);
    reg.setInputTarget (filtered_target);
    reg.align (cloud_reg);

    if (!reg.hasConverged())
    {
      ntk_dbg(1) << "ICP did not converge, ignoring.";
      return false;
    }

    Eigen::Matrix4f t = reg.getFinalTransformation ();
    cv::Mat1f T(4,4);
    //toOpencv(t,T);
    for (int r = 0; r < 4; ++r)
        for (int c = 0; c < 4; ++c)
            T(r,c) = t(r,c);

    Pose3D icp_pose;
    icp_pose.setCameraTransform(T);
    ntk_dbg_print(icp_pose.cvTranslation(), 1);

    // Pose3D stores the transformation from 3D space to image.
    // So we have to invert everything so that unprojecting from
    // image to 3D gives the right transformation.
    // H2' = ICP * H1'
    m_current_pose.resetCameraTransform();
    m_current_pose = *image.calibration()->depth_pose;
    m_current_pose.invert();
    m_current_pose.applyTransformAfter(icp_pose);
    m_current_pose.invert();
    return true;
}