void callback(const sensor_msgs::PointCloud2::ConstPtr& pc)
{

  PointCloudT::Ptr cloud = boost::make_shared<PointCloudT>();
  pcl::fromROSMsg(*pc, *cloud);

  size_t height = 8;   // 8 layers
  size_t width = round((start_angle_ - end_angle_) / angular_resolution_) + 1;

  PointT invalid;
  invalid.x = invalid.y = invalid.z = std::numeric_limits<float>::quiet_NaN();
  PointCloudT::Ptr cloud_out = boost::make_shared<PointCloudT>(width, height, invalid);
  cloud_out->is_dense = false;

  for (size_t i = 0; i < cloud->size(); i++)
  {
    const PointT &p = cloud->points[i];
    int col = round((atan2(p.y, p.x) - end_angle_) / angular_resolution_);
    int row = p.layer;

    if (col < 0 || col >= width || row < 0 || row >= height)
    {
      ROS_ERROR("Invalid coordinates: (%d, %d) is outside (0, 0)..(%zu, %zu)!", col, row, width, height);
      continue;
    }
    (*cloud_out)[row * width + col] = p;
  }

  sensor_msgs::PointCloud2::Ptr msg = boost::make_shared<sensor_msgs::PointCloud2>();
  pcl::toROSMsg(*cloud_out, *msg);
  msg->header = pc->header;
  pub.publish(msg);
}
void move_to_frame(const PointCloudT::Ptr &input, const string &target_frame, PointCloudT::Ptr &output) {
    ROS_INFO("Transforming Input Point Cloud to %s frame...", target_frame.c_str());
    ROS_INFO("    Input Cloud Size: %zu", input->size());
    if (input->header.frame_id == target_frame) {
        output = input;
        return;
    }
    while (ros::ok()) {
        tf::StampedTransform stamped_transform;
        try {
            // Look up transform
            tf_listener->lookupTransform(target_frame, input->header.frame_id, ros::Time(0), stamped_transform);

            // Apply transform
            pcl_ros::transformPointCloud(*input, *output, stamped_transform);

            // Store Header Details
            output->header.frame_id = target_frame;
            pcl_conversions::toPCL(ros::Time::now(), output->header.stamp);

            break;
        }
            //keep trying until we get the transform
        catch (tf::TransformException &ex) {
            ROS_ERROR_THROTTLE(1, "%s", ex.what());
            ROS_WARN_THROTTLE(1, "    Waiting for transform from cloud frame (%s) to %s frame. Trying again",
                              input->header.frame_id.c_str(), target_frame.c_str());
            continue;
        }
    }
}
double calculate_density(const PointCloudT::Ptr &cloud, const bwi_perception::BoundingBox &box) {
    // TODO: Calculate true volume
    // If the cloud is one point thick in some dimension, we'll assign that dimension a magnitude of 1cm
    double x_dim = max(abs(box.max[0] - box.min[0]), 0.01f);
    double y_dim = max(abs(box.max[1] - box.min[1]), 0.01f);
    double z_dim = max(abs(box.max[2] - box.min[2]), 0.01f);
    double volume = x_dim * y_dim * z_dim;
    return (double) cloud->size() / volume;
}
void PlayWindow::ShowPC(PointCloudT::Ptr PC)
{
    updateModelMutex.lock();


    if (ui->checkBox_ShowPC->isChecked())
    {

        if (!viewer->updatePointCloud(PC))
            viewer->addPointCloud(PC);

        Eigen::Matrix4f currentPose = Eigen::Matrix4f::Identity();
        currentPose.block(0,0,3,3) = PC->sensor_orientation_.matrix();
        currentPose.block(0,3,3,1) = PC->sensor_origin_.head(3);

        viewer->updatePointCloudPose("cloud",Eigen::Affine3f(currentPose) );

    }



    ui->qvtkWidget->update ();


    updateModelMutex.unlock();



    // Timing
    times.push_back(QDateTime::currentDateTime().toMSecsSinceEpoch() - PC->header.stamp);

    double mean;
    if (times.size() > 60)
    {
        mean = std::accumulate(times.begin(), times.end(), 0.0) / times.size();
        times.erase(times.begin());
    }

    if (ui->checkBox_SavePC->isChecked())
        pcl::io::savePCDFileASCII(PC->header.frame_id, *PC);


    std::vector<int> ind;
    pcl::removeNaNFromPointCloud(*PC, *PC, ind);
    ui->label_nPoint->setText(QString("Nb Point : %1").arg(PC->size()));
    ui->label_time->setText(QString("Time (ms) : %1").arg(mean));
    ui->label_fps->setText(QString("FPS (Hz) : %1").arg(1000/mean));



    return;
}
// Align a rigid object to a scene with clutter and occlusions
int
main (int argc, char **argv)
{
  // Point clouds
  PointCloudT::Ptr object (new PointCloudT);
  PointCloudT::Ptr object_aligned (new PointCloudT);
  PointCloudT::Ptr scene (new PointCloudT);
  FeatureCloudT::Ptr object_features (new FeatureCloudT);
  FeatureCloudT::Ptr scene_features (new FeatureCloudT);
  
  // Get input object and scene
  if (argc != 3)
  {
    pcl::console::print_error ("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
    return (1);
  }
  
  // Load object and scene
  pcl::console::print_highlight ("Loading point clouds...\n");
  if (pcl::io::loadPCDFile<PointNT> (argv[1], *object) < 0 ||
      pcl::io::loadPCDFile<PointNT> (argv[2], *scene) < 0)
  {
    pcl::console::print_error ("Error loading object/scene file!\n");
    return (1);
  }
  
  // Downsample
  pcl::console::print_highlight ("Downsampling...\n");
  pcl::VoxelGrid<PointNT> grid;
  const float leaf = 0.005f;
  grid.setLeafSize (leaf, leaf, leaf);
  grid.setInputCloud (object);
  grid.filter (*object);
  grid.setInputCloud (scene);
  grid.filter (*scene);
  
  // Estimate normals for scene
  pcl::console::print_highlight ("Estimating scene normals...\n");
  pcl::NormalEstimationOMP<PointNT,PointNT> nest;
  nest.setRadiusSearch (0.01);
  nest.setInputCloud (scene);
  nest.compute (*scene);
  
  // Estimate features
  pcl::console::print_highlight ("Estimating features...\n");
  FeatureEstimationT fest;
  fest.setRadiusSearch (0.025);
  fest.setInputCloud (object);
  fest.setInputNormals (object);
  fest.compute (*object_features);
  fest.setInputCloud (scene);
  fest.setInputNormals (scene);
  fest.compute (*scene_features);
  
  // Perform alignment
  pcl::console::print_highlight ("Starting alignment...\n");
  pcl::SampleConsensusPrerejective<PointNT,PointNT,FeatureT> align;
  align.setInputSource (object);
  align.setSourceFeatures (object_features);
  align.setInputTarget (scene);
  align.setTargetFeatures (scene_features);
  align.setMaximumIterations (100000); // Number of RANSAC iterations
  align.setNumberOfSamples (3); // Number of points to sample for generating/prerejecting a pose
  align.setCorrespondenceRandomness (5); // Number of nearest features to use
  align.setSimilarityThreshold (0.9f); // Polygonal edge length similarity threshold
  align.setMaxCorrespondenceDistance (2.5f * leaf); // Inlier threshold
  align.setInlierFraction (0.25f); // Required inlier fraction for accepting a pose hypothesis
  {
    pcl::ScopeTime t("Alignment");
    align.align (*object_aligned);
  }
  
  if (align.hasConverged ())
  {
    // Print results
    printf ("\n");
    Eigen::Matrix4f transformation = align.getFinalTransformation ();
    pcl::console::print_info ("    | %6.3f %6.3f %6.3f | \n", transformation (0,0), transformation (0,1), transformation (0,2));
    pcl::console::print_info ("R = | %6.3f %6.3f %6.3f | \n", transformation (1,0), transformation (1,1), transformation (1,2));
    pcl::console::print_info ("    | %6.3f %6.3f %6.3f | \n", transformation (2,0), transformation (2,1), transformation (2,2));
    pcl::console::print_info ("\n");
    pcl::console::print_info ("t = < %0.3f, %0.3f, %0.3f >\n", transformation (0,3), transformation (1,3), transformation (2,3));
    pcl::console::print_info ("\n");
    pcl::console::print_info ("Inliers: %i/%i\n", align.getInliers ().size (), object->size ());
    
    // Show alignment
    pcl::visualization::PCLVisualizer visu("Alignment");
    visu.addPointCloud (scene, ColorHandlerT (scene, 0.0, 255.0, 0.0), "scene");
    visu.addPointCloud (object_aligned, ColorHandlerT (object_aligned, 0.0, 0.0, 255.0), "object_aligned");
    visu.spin ();
  }
  else
  {
    pcl::console::print_error ("Alignment failed!\n");
    return (1);
  }
  
  return (0);
}
예제 #6
0
파일: alp_2.cpp 프로젝트: ehuang3/apc_ros
// Align a rigid object to a scene with clutter and occlusions
int main (int argc, char **argv)
{

  // Point clouds
  PointCloudT::Ptr object (new PointCloudT);
  PointCloudT::Ptr scene (new PointCloudT);
  PointCloudT::Ptr object_aligned (new PointCloudT);

  // Get input object and scene
  if (argc < 3)
  {
    pcl::console::print_error ("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
    return (1);
  }
  pcl::console::parse_argument (argc, argv, "--max_iterations", in_max_iterations);
  pcl::console::parse_argument (argc, argv, "--num_samples", in_num_samples);
  pcl::console::parse_argument (argc, argv, "--s_thresh", in_similarity_thresh);
  pcl::console::parse_argument (argc, argv, "--max_cdist", in_max_corresp_dist);
  pcl::console::parse_argument (argc, argv, "--inlier_frac", in_inlier_frac);
  pcl::console::parse_argument (argc, argv, "--leaf", in_leaf);
  pcl::console::parse_argument (argc, argv, "--normal_radius", normal_radius);
  pcl::console::parse_argument (argc, argv, "--feature_radius", feature_radius);

  pcl::console::parse_argument (argc, argv, "--icp", in_icp);
  pcl::console::parse_argument (argc, argv, "--max_corr_icp", max_corr_icp);
  pcl::console::parse_argument (argc, argv, "--icp_eps", max_eps_icp);

  // Load object and scene
  pcl::console::print_highlight ("Loading point clouds...\n");

  pcl_tools::cloud_from_stl(argv[2], *object);

  if (pcl::io::loadPCDFile<PointNT> (argv[1], *scene) < 0)
  {
    pcl::console::print_error ("Error loading object/scene file!\n");
    return (1);
  }

  std::vector<int> indices;
  pcl::removeNaNFromPointCloud(*scene, *scene, indices);
  pcl::removeNaNFromPointCloud(*object, *object, indices);

  // /*pcl_tools::icp_result align = */alp_align(object, scene, object_aligned, 50000, 3, 0.9f, 5.5f * leaf, 0.7f);
  // /*pcl_tools::icp_result align = */alp_align(object_aligned, scene, object_aligned, 50000, 3, 0.9f, 7.5f * leaf, 0.4f);

  std::cout << "Inlier frac " << in_inlier_frac << std::endl;
  pcl_tools::icp_result align = alp_align(object, scene, object_aligned, in_max_iterations, in_num_samples, in_similarity_thresh, in_max_corresp_dist, in_inlier_frac, in_leaf);

  pcl::visualization::PCLVisualizer visu("Alignment");
  if (align.converged)
  {
    pcl::console::print_info ("Inliers: %i/%i, scene: %i\n", align.inliers, object->size (), scene->size ());
    
    // Show alignment
    visu.addPointCloud (object, ColorHandlerT (object, 255.0, 0.0, 0.0), "object");
    visu.addPointCloud (scene, ColorHandlerT (scene, 0.0, 255.0, 0.0), "scene");
    visu.addPointCloud (object_aligned, ColorHandlerT (object_aligned, 0.0, 0.0, 255.0), "object_aligned");
    // visu.addPointCloudNormals<PointNT>(object);

    visu.spin ();
  }
  else
  {
    pcl::console::print_error ("Alignment failed!\n");
    return (1);
  }

  if (in_icp) {
    pcl::console::print_highlight ("Applying ICP now\n");
    pcl::IterativeClosestPointNonLinear<PointNT, PointNT> icp;
    // pcl::IterativeClosestPoint<PointNT, PointNT> icp;
    pcl_tools::affine_cloud(Eigen::Vector3f::UnitZ(), 0.0, Eigen::Vector3f(0.0, 0.0, 0.02), *object_aligned, *object_aligned);

    icp.setMaximumIterations (100);
    icp.setMaxCorrespondenceDistance(max_corr_icp);
    icp.setTransformationEpsilon (max_eps_icp);
    icp.setInputSource (object_aligned);
    icp.setInputTarget (scene);
    icp.align (*object_aligned);

    if (icp.hasConverged()) {
      pcl::console::print_highlight ("ICP: Converged with fitness %f\n", icp.getFitnessScore());
    }
    // pcl::visualization::PCLVisualizer visu("Alignment");
    // visu.addPointCloud (object, ColorHandlerT (object, 255.0, 0.0, 0.0), "object");
    // visu.addPointCloud (scene, ColorHandlerT (scene, 0.0, 255.0, 0.0), "scene");
    visu.updatePointCloud (object_aligned, ColorHandlerT (object_aligned, 100.0, 50.0, 200.0), "object_aligned");

    // visu.addPointCloudNormals<PointNT>(object);

    visu.spin ();

  }
  return (0);
}