void LineSegmentCollector::collect(
     const sensor_msgs::PointCloud2::ConstPtr& cloud_msg,
     const jsk_recognition_msgs::ClusterPointIndices::ConstPtr& indices_msg,
     const jsk_recognition_msgs::ModelCoefficientsArray::ConstPtr& coefficients_msg)
 {
   boost::mutex::scoped_lock lock(mutex_);
   //JSK_NODELET_INFO("buffer length: %lu", pointclouds_buffer_.size());
   pointclouds_buffer_.push_back(cloud_msg);
   indices_buffer_.push_back(indices_msg);
   coefficients_buffer_.push_back(coefficients_msg);
   pcl::PointCloud<pcl::PointXYZ>::Ptr
     input_cloud(new pcl::PointCloud<pcl::PointXYZ>);
   pcl::fromROSMsg(*cloud_msg, *input_cloud);
   // buildup segments
   std::vector<pcl::PointIndices::Ptr> input_indices
     = pcl_conversions::convertToPCLPointIndices(indices_msg->cluster_indices);
   std::vector<pcl::ModelCoefficients::Ptr> input_coefficients
     = pcl_conversions::convertToPCLModelCoefficients(
       coefficients_msg->coefficients);
   std::vector<LineSegment::Ptr> new_segments;
   for (size_t i = 0; i < indices_msg->cluster_indices.size(); i++) {
     LineSegment::Ptr segment (new LineSegment(cloud_msg->header,
                                               input_indices[i],
                                               input_coefficients[i],
                                               input_cloud));
     segments_buffer_.push_back(segment);
     new_segments.push_back(segment);
   }
   collectFromBuffers(cloud_msg->header, new_segments);
 }
int main(int argc, char** argv){
  std::vector<int> pcd_filenames;
  std::vector<int> ply_filenames;
  pcd_filenames = pcl::console::parse_file_extension_argument (argc, argv, ".pcd");
  ply_filenames = pcl::console::parse_file_extension_argument (argc, argv, ".ply");
  if (pcd_filenames.size () != 1 || ply_filenames.size () != 1)
  {
    std::cout << "Usage: input_file_path.ply output_file_path.pcd\n";
    exit (-1);
  }
  std::string input_filename = argv[ply_filenames.at(0)];
  std::string output_filename = argv[pcd_filenames.at(0)];

  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZRGB>());
  
  pcl::PLYReader reader;
  if (reader.read(input_filename, *input_cloud) < -1) //* load the file
  // if (pcl::io::loadPLYFile<pcl::PointXYZRGB> (input_filename, *input_cloud) == -1) //* load the file
  {
    PCL_ERROR ("Couldn't read input PLY file\n");
    exit (-1);
  }
  std::cout << "Loaded "
    << input_cloud->width * input_cloud->height
    << " data points from input pcd" << std::endl;

  pcl::PCDWriter writer;
  std::stringstream ss;
  ss << output_filename;
  writer.write(ss.str(), *input_cloud);
  std::cerr << "Saved " << input_cloud->points.size () << " data points" << std::endl;
}
void ObjectDetector::cloud_cb(const sensor_msgs::PointCloud2ConstPtr& input)
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZ>);

    // Convert the sensor_msgs/PointCloud2 data to pcl/PointCloud
    pcl::fromROSMsg (*input, *input_cloud);

    // if theta and y_translation are not yet set, then run segmentation
    if(theta == 0.0 && y_translation == 0.0){
        performSegmentation(input_cloud);
    }
    else{
        // downsample the point cloud
        pcl::PointCloud<pcl::PointXYZ>::Ptr downsampled_cloud = downsampleCloud(input_cloud);

        // perform transformation
        pcl::PointCloud<pcl::PointXYZ>::Ptr transformed_cloud = transformCloud(downsampled_cloud);

        // filter floor, walls and noise so only objects remain
        pcl::PointCloud<pcl::PointXYZ>::Ptr object_cloud = filterCloud(transformed_cloud);

        // cluster objects & publish cluster
        clusterCloud(object_cloud);
    }
}
void FruitDetector::cloud_cb_(const PointCloudTConstPtr& cloud_msg) {
  PointCloudTPtr input_cloud (new PointCloudT);
  // vector containing different clusters extracted from the input_cloud
  std::vector<PointCloudTPtr> clusters;
  pcl::copyPointCloud(*cloud_msg, *input_cloud);
  cluster_extractor_->setInputCloud(input_cloud);
  cluster_extractor_->extract(clusters);

  // clustering: early rejection?
  // already done using clusterizer parameters --> setMinClusterSize

  // alignment (for each cluster)
  size_t sz = clusters.size();
  std::cout << "found #" << sz << " clusters" << std::endl;

  for(size_t i=0; i<sz; ++i) {
    if( aligner_->align(clusters[i]) ) {
        Eigen::Matrix3f rot = aligner_->getFinalRotation();
      if(earlyRejectAlignment(rot))
        continue;
      float *center = new float[3];
      center = aligner_->getFruitCenter();
      fruit_id foundId = searchFruit(center);
      std::cout << "foundId: " << foundId << std::endl;
      if(foundId == -1) {
        // This is a unique new fruit. Add it to the vector
        fruits_.push_back(boost::shared_ptr<Fruit>(new Fruit(center, rot)));
        PointCloudNT::Ptr aligned_model (new PointCloudNT);
        aligner_->getAlignedModel(aligned_model);
        // create messages to pass the result
        geometry_msgs::Vector3 msg_fruit_center;
        msg_fruit_center.x = center[0];
        msg_fruit_center.y = center[1];
        msg_fruit_center.z = center[2];
        // publish the fruit center point data
        fruit_center_pub_.publish(msg_fruit_center);
        // and publish the aligned pointcloud
        aligned_model_pub_.publish(*aligned_model);
        // TODO remove the sleep time
        ros::Duration(2.0).sleep();
      }
      else {
        // the approximation is close to one of the previous fruits
        if(!enhanceAlignment(rot, fruits_[i]))
          continue;
      }

    }
  }
}
Пример #5
0
TEST(suturo_perception_mbpe, empty_pointcloud)
{
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);

  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  std::string object_table_normal_path = "test_files/correctly_segmented_box.pcd_table_normal";
  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="red_cube";
  obj.color="ff0000";
  obj.description="a red cube";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1;
	geometry_msgs::Pose pose1;
  shape1.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;
  obj.primitives.push_back(shape1);
  obj.primitive_poses.push_back(pose1);
  objects->push_back(obj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal); // Test this
  // mpe.setRemoveNaNs(true);
  mpe.execute();

  // std::cout << "Fitness for nan matching: " << mpe.getFitnessScore() << std::endl;
  // The estimation should be succesful
	ASSERT_FALSE( mpe.poseEstimationSuccessful() );
  SUCCEED();
}
 void SelectedClusterPublisher::extract(const sensor_msgs::PointCloud2::ConstPtr& input,
                                        const jsk_pcl_ros::ClusterPointIndices::ConstPtr& indices,
                                        const jsk_pcl_ros::Int32Stamped::ConstPtr& index)
 {
   if (indices->cluster_indices.size() <= index->data) {
     NODELET_ERROR("the selected index %d is out of clusters array %lu",
                   index->data,
                   indices->cluster_indices.size());
     return;
   }
   pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZRGB>());
   pcl::fromROSMsg(*input, *input_cloud);
   pcl::ExtractIndices<pcl::PointXYZRGB> extract;
   pcl::PointIndices::Ptr pcl_indices (new pcl::PointIndices);
   pcl_indices->indices = indices->cluster_indices[index->data].indices;
   extract.setInputCloud(input_cloud);
   extract.setIndices(pcl_indices);
   pcl::PointCloud<pcl::PointXYZRGB>::Ptr extracted_cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
   extract.filter(*extracted_cloud);
   sensor_msgs::PointCloud2 ros_msg;
   pcl::toROSMsg(*extracted_cloud, ros_msg);
   ros_msg.header = input->header;
   pub_.publish(ros_msg);
 }
Пример #7
0
int
main (int argc, char** argv)
{
  bool second_rotation_performed = false;
  bool visualize_results = true;

  boost::posix_time::ptime t_s = boost::posix_time::microsec_clock::local_time();

  std::vector<int> filenames;
  filenames = pcl::console::parse_file_extension_argument (argc, argv, ".pcd");
  if (filenames.size () != 1)
  {
    std::cout << "Usage: input_file_path.pcd\n";
    exit (-1);
  }
  std::string input_filename = argv[filenames.at(0)];

  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr original_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);

  // pcl::PointCloud<pcl::PointXYZRGB>::Ptr final (new pcl::PointCloud<pcl::PointXYZRGB>);

  if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (input_filename, *original_cloud) == -1) //* load the file
  {
    PCL_ERROR ("Couldn't read input file\n");
    exit (-1);
  }
  std::cout << "Loaded "
            << original_cloud->width * original_cloud->height
            << " data points from input pcd" << std::endl;

  boost::posix_time::ptime t_file_loaded = boost::posix_time::microsec_clock::local_time();
  // Copy the original cloud to input cloud, which can be modified later during plane extraction
  pcl::copyPointCloud<pcl::PointXYZRGB, pcl::PointXYZRGB>(*original_cloud, *input_cloud);

  CuboidMatcher cm;
  cm.setInputCloud(input_cloud);
  cm.setDebug(true);
  cm.setSaveIntermediateResults(true);
  Cuboid cuboid;
  cm.execute(cuboid);
  boost::posix_time::ptime t_algorithm_done = boost::posix_time::microsec_clock::local_time();
  printDuration(t_file_loaded, t_algorithm_done, "Algorithm Runtime");

  std::cout << "Algorithm done. Visualize results" << std::endl;

  
  std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> intermediate_clouds;
  intermediate_clouds = cm.getIntermediateClouds();

  if(!visualize_results)
    exit(0);
  // Atleast one intermediate clouds means, that the user want's to display them here.
  
  std::cout << "The algorithm did " << intermediate_clouds.size() << " transformation(s)" << std::endl;

  // Create the viewports for the rotated object
  pcl::visualization::PCLVisualizer viewer;
  // Visualize original pointcloud
  int v1(0);
  viewer.createViewPort(0.0, 0.0, 0.3, 1.0, v1);
  viewer.addCoordinateSystem(1.0,v1);

  // Visualize the found inliers
  int v2(1);
  viewer.createViewPort(0.3, 0.0, 0.6, 1.0, v2);
  viewer.addCoordinateSystem(1.0,v2);

  int v3(2);
  viewer.createViewPort(0.6, 0.0, 1.0, 1.0, v3);
  viewer.addCoordinateSystem(0.5,v3);

  // Fill the viewpoints with the desired clouds
  // pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(original_cloud);
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> red_pts (original_cloud, 255,0,0);
  // viewer.addPointCloud<pcl::PointXYZRGB> (original_cloud, rgb, "sample cloud1", v1);
  viewer.addPointCloud<pcl::PointXYZRGB> (original_cloud, red_pts, "sample cloud1", v1);

  if( intermediate_clouds.size() >= 1) 
  {
    // pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb_v2(intermediate_clouds.at(0));
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> red_v2 (intermediate_clouds.at(0), 255,0,0);
    // viewer.addPointCloud<pcl::PointXYZRGB> (intermediate_clouds.at(0), rgb_v2, "first rotated cloud", v2);
    viewer.addPointCloud<pcl::PointXYZRGB> (intermediate_clouds.at(0), red_v2, "first rotated cloud", v2);
  }


  if( intermediate_clouds.size() >= 2) 
  {
    // pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb_v3(intermediate_clouds.at(1));
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> red_v3 (intermediate_clouds.at(1), 255,0,0);
    viewer.addPointCloud<pcl::PointXYZRGB> (intermediate_clouds.at(1), red_v3, "second rotated cloud", v3);
    // viewer.addPointCloud<pcl::PointXYZRGB> (intermediate_clouds.at(1), rgb_v3, "second rotated cloud", v3);
  }

  // Draw the bounding box from the given corner points
  drawBoundingBoxLines(viewer, cuboid.corner_points, v1);

  viewer.addCube	(	Eigen::Vector3f(0,0,0),
      Eigen::Quaternion<float>::Identity(),
      cuboid.length1,
      cuboid.length2,
      cuboid.length3,
      "matched_cuboid",
      v2
    );

 
  
  viewer.addCube	(	cuboid.center,
      cuboid.orientation,
      cuboid.length1,
      cuboid.length2,
      cuboid.length3,
      "matched_cuboid_centered",
      v1
    );
  

  viewer.spin();




  /*
  boost::posix_time::ptime t_extraction_done = boost::posix_time::microsec_clock::local_time();
  // --------------------------------------------
  // -----Open 3D viewer and add point cloud-----
  // --------------------------------------------
  //
  // Store the possible different colors for the segmented planes
  std::vector<std::vector<int> > color_sequence;
  std::vector<int> green;
  green.push_back(0);
  green.push_back(255);
  green.push_back(0);
  std::vector<int> red;
  red.push_back(255);
  red.push_back(0);
  red.push_back(0);
  std::vector<int> blue;
  blue.push_back(0);
  blue.push_back(0);
  blue.push_back(255);
  color_sequence.push_back(green);
  color_sequence.push_back(red);
  color_sequence.push_back(blue);
  
  pcl::visualization::PCLVisualizer viewer;
  // Visualize original pointcloud
  int v1(0);
  viewer.createViewPort(0.0, 0.0, 0.2, 1.0, v1);
  viewer.addCoordinateSystem(1.0,v1);
  pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(original_cloud);
  viewer.addPointCloud<pcl::PointXYZRGB> (original_cloud, rgb, "sample cloud1", v1);

  // Visualize the found inliers
  int v2(1);
  viewer.createViewPort(0.2, 0.0, 0.6, 1.0, v2);
  viewer.addCoordinateSystem(1.0,v2);

  int v3(2);
  viewer.createViewPort(0.6, 0.0, 1.0, 1.0, v3);
  viewer.addCoordinateSystem(0.5,v3);

  // For each segmented plane
  for (int i = 0; i < detected_planes.size(); i++) 
  {
    // Select a color from the color_sequence vector for the discovered plane
    int color_idx = i % 3;
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> single_color (detected_planes.at(i).getPoints(), color_sequence.at(color_idx).at(0), color_sequence.at(color_idx).at(1), color_sequence.at(color_idx).at(2));

    std::stringstream cloud_name("plane_cloud_");
    cloud_name << i;
    std::stringstream plane_name("model_plane_");
    plane_name << i;
    viewer.addPointCloud<pcl::PointXYZRGB> (detected_planes.at(i).getPoints(), single_color, cloud_name.str(), v2);
    viewer.addPlane (*detected_planes.at(i).getCoefficients(), plane_name.str(), v2);

    // Display the centroid of the planes
    pcl::PointXYZRGB c;
    c.x = detected_planes.at(i).getCentroid()(0);
    c.y = detected_planes.at(i).getCentroid()(1);
    c.z = detected_planes.at(i).getCentroid()(2);

    pcl::PointCloud<pcl::PointXYZRGB>::Ptr centroid_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
    centroid_cloud->push_back(c);
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> centroid_yellow (centroid_cloud, 
        255,255,0);
    std::stringstream centroid_name("centroid_");
    centroid_name << i;
    viewer.addPointCloud<pcl::PointXYZRGB> (centroid_cloud, centroid_yellow, centroid_name.str(), v2);
    viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
        5, centroid_name.str());

    // Display the plane normal with the camera origin as the origin
    {
      pcl::PointXYZRGB origin;
      origin.x=0;
      origin.y=0;
      origin.z=0;

      // Get the normal from the plane
      pcl::PointXYZRGB dest;
      dest.x = detected_planes.at(i).getCoefficients()->values.at(0);
      dest.y = detected_planes.at(i).getCoefficients()->values.at(1);
      dest.z = detected_planes.at(i).getCoefficients()->values.at(2);

      pcl::PointCloud<pcl::PointXYZRGB>::Ptr origin_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);
      origin_cloud->push_back(origin);
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr origin_cloud_projected (new pcl::PointCloud<pcl::PointXYZRGB>);
      // Project the origin point to the plane
      pcl::ProjectInliers<pcl::PointXYZRGB> proj;
      proj.setModelType (pcl::SACMODEL_PLANE);
      proj.setInputCloud (origin_cloud);
      proj.setModelCoefficients (detected_planes.at(i).getCoefficients());
      proj.filter (*origin_cloud_projected);
      if(origin_cloud_projected->points.size()!=1)
      {
        std::cout << "Projection fail" << std::endl;
        return 0;
      }

      std::stringstream plane_name("norm_plane_");
      plane_name << i;
      viewer.addLine(origin_cloud_projected->points.at(0),dest, color_sequence.at(color_idx).at(0), color_sequence.at(color_idx).at(1), color_sequence.at(color_idx).at(2), plane_name.str(),v2);
      Eigen::Vector3f norm_origin(
origin_cloud_projected->points.at(0).x,
origin_cloud_projected->points.at(0).y,
origin_cloud_projected->points.at(0).z
          );
      detected_planes.at(i).setNormOrigin(norm_origin);
    }

  }
  // Build a coordinate system for biggest plane
  // Step 1) Visualize the coordinate system
  //
  // Transform the destination of the norm vector, to let it point in a 90° Angle from the
  // centroid to its destination
  Eigen::Vector3f newDest = moveVectorBySubtraction( 
       detected_planes.at(0).getCoefficientsAsVector3f(),
       // The Centroid of the plane cloud
       detected_planes.at(0).getCentroidAsVector3f(),
       // And the center of the norm origin
       detected_planes.at(0).getNormOrigin()
      );
  // Translate the norm vector of the second plane to the centroid of the first plane
  viewer.addLine(
      getPointXYZFromVector4f(detected_planes.at(0).getCentroid()),
      getPointXYZFromVector3f(newDest),
        0, 255, 0, "f1",v2);

  Eigen::Vector3f secondDest = moveVectorBySubtraction( 
       detected_planes.at(1).getCoefficientsAsVector3f(),
       // The Centroid of the plane cloud
       detected_planes.at(0).getCentroidAsVector3f(),
       // And the center of the norm origin
       detected_planes.at(1).getNormOrigin()
      );
  // Translate the norm vector of the second plane to the centroid of the first plane
  viewer.addLine(
      getPointXYZFromVector4f(detected_planes.at(0).getCentroid()),
      getPointXYZFromVector3f(secondDest),
        255, 0, 0, "f2",v2);

  // Create the third axis by using the cross product
  Eigen::Vector3f thirdDest = newDest.cross(secondDest);
  viewer.addLine(
      getPointXYZFromVector4f(detected_planes.at(0).getCentroid()),
      getPointXYZFromVector3f(thirdDest),
        0, 0, 255, "f3",v2);



  //
  // ROTATE THE OBJECT to align it with the x-y and the x-z plane
  // TODO: dynamic
  //
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr rotated_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  // Align the front face with the cameras front plane
  {
    for (int i = 0; i < detected_planes.size(); i++) 
    {
      std::cout << "Angle between Normal " << i << " and x-y Normal ";

      Eigen::Vector3f n2( 0,0,1);
      float angle = detected_planes.at(i).angleBetween(n2);
      std::cout << ": " << angle << " RAD, " << ((angle * 180) / M_PI) << " DEG";
      std::cout << std::endl;
    }
    std::cout << "Angle between Normal 0 and x-y Normal ";

    Eigen::Vector3f n2( 0,0,1);
    float angle = detected_planes.at(0).angleBetween(n2);

    pcl::PointXYZRGB dest;
    dest.x = detected_planes.at(0).getCoefficients()->values.at(0);
    dest.y = detected_planes.at(0).getCoefficients()->values.at(1);
    dest.z = detected_planes.at(0).getCoefficients()->values.at(2);

    // Translate the first plane's origin to the camera origin
    translatePointCloud(original_cloud,
        -detected_planes.at(0).getCentroid()[0],
        -detected_planes.at(0).getCentroid()[1],
        -detected_planes.at(0).getCentroid()[2],
        rotated_cloud);


    // M
    Eigen::Vector3f plane_normal(dest.x, dest.y, dest.z);

    // Compute the necessary rotation to align a face of the object with the camera's
    // imaginary image plane
    // N
    Eigen::Vector3f camera_normal;
    camera_normal(0)=0;
    camera_normal(1)=0;
    camera_normal(2)=1;

    camera_normal = CuboidMatcher::reduceNormAngle(plane_normal, camera_normal);

    // float dotproduct3 = camera_normal.dot(plane_normal);
    // if(acos(dotproduct3)> M_PI/2)
    // {
    //   std::cout << "NORM IS ABOVE 90 DEG! TURN IN THE OTHER DIRECTION" << std::endl;
    //   camera_normal = -camera_normal;
    //   // rotated_normal_of_second_plane = - rotated_normal_of_second_plane;
    // }

    
    Eigen::Matrix< float, 4, 4 > rotationBox = 
      rotateAroundCrossProductOfNormals(camera_normal, plane_normal);
    
    pcl::transformPointCloud (*rotated_cloud, *rotated_cloud, rotationBox);   

    // Draw the rotated object in the first window
    pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb_r(rotated_cloud);
    viewer.addPointCloud<pcl::PointXYZRGB> (rotated_cloud, rgb_r, "rotated_cloud", v2);

    // Rotate the normal of the second plane with the first rotation matrix
    Eigen::Vector3f normal_of_second_plane =
      detected_planes.at(1).getCoefficientsAsVector3f();

    Eigen::Vector3f rotated_normal_of_second_plane;
    rotated_normal_of_second_plane = 
      removeTranslationVectorFromMatrix(rotationBox) * normal_of_second_plane;

    // Now align the second normal (which has been rotated) with the x-z plane
    Eigen::Vector3f xz_plane;
    xz_plane(0)=0;
    xz_plane(1)=1;
    xz_plane(2)=0;

    std::cout << "Angle between rotated normal and xz-plane ";
    float dotproduct2 = xz_plane.dot(rotated_normal_of_second_plane);
    std::cout << ": " << acos(dotproduct2) << " RAD, " << ((acos(dotproduct2) * 180) / M_PI) << " DEG";
    std::cout << std::endl;

    if(acos(dotproduct2)> M_PI/2)
    {
      std::cout << "NORM IS ABOVE 90 DEG! TURN IN THE OTHER DIRECTION" << std::endl;
      rotated_normal_of_second_plane = - rotated_normal_of_second_plane;
    }

    
    float angle_between_xz_and_second_normal = ((acos(dotproduct2) * 180) / M_PI);
    Eigen::Matrix< float, 4, 4 > secondRotation;
    if(angle_between_xz_and_second_normal > MIN_ANGLE &&
        angle_between_xz_and_second_normal < 180-MIN_ANGLE)
    {
    
      // Rotate the original centroid
      Eigen::Vector4f offset_between_centroids =
          detected_planes.at(1).getCentroid() - detected_planes.at(0).getCentroid();
      Eigen::Vector3f rotated_offset =
        removeTranslationVectorFromMatrix(rotationBox) * getVector3fFromVector4f(offset_between_centroids);
   
      
      // translatePointCloud(rotated_cloud, 
      //     - rotated_offset[0],
      //     - rotated_offset[1],
      //     - rotated_offset[2],
      //     rotated_cloud);
          

      secondRotation = 
        rotateAroundCrossProductOfNormals(xz_plane, rotated_normal_of_second_plane);

      pcl::transformPointCloud (*rotated_cloud, *rotated_cloud, secondRotation);   
      second_rotation_performed = true;
    }
    else
    {
      secondRotation = Eigen::Matrix< float, 4, 4 >::Identity();
      second_rotation_performed = false;
      std::cout << "No second rotation, since the angle is too small: "<< angle_between_xz_and_second_normal << "DEG" << std::endl;
    }




    pcl::PointXYZ origin(0,0,0);
    viewer.addLine(origin, getPointXYZFromVector3f(rotated_normal_of_second_plane) , 255, 0, 0, "normal1",v2);

    // Draw the rotated object
    pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb_r2(rotated_cloud);
    viewer.addPointCloud<pcl::PointXYZRGB> (rotated_cloud, rgb_r2, "rotated_cloud_second", v3);

    // Compute the bounding box for the rotated object
    pcl::PointXYZRGB min_pt, max_pt;
    pcl::getMinMax3D(*rotated_cloud, min_pt, max_pt);
    
    // Compute the bounding box edge points
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr manual_bounding_box(new pcl::PointCloud<pcl::PointXYZRGB>);
    computeCuboidCornersWithMinMax3D(rotated_cloud,manual_bounding_box);

    // Draw the bounding box edge points
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> red_pts (manual_bounding_box, 255,0,0);
    viewer.addPointCloud<pcl::PointXYZRGB> (manual_bounding_box, red_pts, "bb", v3);
    viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 10, "bb");

    // Now reverse all the transformations to get the bounding box around the actual object
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr bounding_box_on_object(new pcl::PointCloud<pcl::PointXYZRGB>);
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGB> white_pts (bounding_box_on_object, 255,255,255);

    // Inverse the last rotation
    pcl::transformPointCloud (*manual_bounding_box, *bounding_box_on_object, secondRotation.transpose());   

    // Translate back to the first centroid, if this was done
    if(second_rotation_performed)
    {
      Eigen::Vector4f offset_between_centroids =
        detected_planes.at(1).getCentroid() - detected_planes.at(0).getCentroid();
      Eigen::Vector3f rotated_offset =
        removeTranslationVectorFromMatrix(rotationBox) * getVector3fFromVector4f(offset_between_centroids);
      // translatePointCloud(bounding_box_on_object, 
      //     rotated_offset[0], // Translation is now NOT negative
      //     rotated_offset[1],
      //     rotated_offset[2],
      //     bounding_box_on_object);
    }
    // Translated to first centroid
    
    
    // Inverse the second rotation
    pcl::transformPointCloud (*bounding_box_on_object, *bounding_box_on_object, rotationBox.transpose());   

    // And translate back to the original position
    translatePointCloud(bounding_box_on_object, 
        detected_planes.at(0).getCentroid()[0],  
        detected_planes.at(0).getCentroid()[1], 
        detected_planes.at(0).getCentroid()[2], 
        bounding_box_on_object);

    viewer.addPointCloud<pcl::PointXYZRGB> (bounding_box_on_object, white_pts, "bb_real", v3);
    viewer.setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 10, "bb_real");
    drawBoundingBoxLines(viewer, bounding_box_on_object, v3);
    // And finally, render the original cloud
    pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb_v3(original_cloud);
    viewer.addPointCloud<pcl::PointXYZRGB> (original_cloud, rgb_v3, "original_cloud", v3);

    std::cout << "Cuboid statistics" << std::endl;
    Cuboid c = computeCuboidFromBorderPoints(bounding_box_on_object);
    std::cout << "Width: " << c.length1 << " Height: " << c.length2 << " Depth: " << c.length3 << " Volume: " << c.volume << " m^3" << std::endl;
  }

  boost::posix_time::ptime t_done = boost::posix_time::microsec_clock::local_time();

  printDuration(t_s, t_file_loaded, "Loaded file");
  printDuration(t_file_loaded, t_extraction_done, "Plane Extraction, Normal Estimation");
  printDuration(t_extraction_done, t_done, "Rotation and Visualization");
  printDuration(t_s, t_done, "Overall");

  // 
  // Create a plane x-y plane that originates in the kinect camera frame
  // 
  pcl::ModelCoefficients::Ptr kinect_plane_coefficients (new pcl::ModelCoefficients);
  // Let the Normal of the plane point along the z-Axis
  kinect_plane_coefficients->values.push_back(0); // x
  kinect_plane_coefficients->values.push_back(0); // y
  kinect_plane_coefficients->values.push_back(1); // z 
  kinect_plane_coefficients->values.push_back(0); // d 

  viewer.addPlane (*kinect_plane_coefficients, "kinect_plane", v2);
  viewer.spin();
*/
  return (0);
}
  void MultiPlaneExtraction::extract(const sensor_msgs::PointCloud2::ConstPtr& input,
                                     const jsk_pcl_ros::ClusterPointIndices::ConstPtr& indices,
                                     const jsk_pcl_ros::ModelCoefficientsArray::ConstPtr& coefficients,
                                     const jsk_pcl_ros::PolygonArray::ConstPtr& polygons)
  {
    boost::mutex::scoped_lock(mutex_);
    // convert all to the pcl types
    pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
    pcl::fromROSMsg(*input, *input_cloud);
    
    // concat indices into one PointIndices
    pcl::PointIndices::Ptr all_indices (new pcl::PointIndices);
    for (size_t i = 0; i < indices->cluster_indices.size(); i++) {
      std::vector<int> one_indices = indices->cluster_indices[i].indices;
      for (size_t j = 0; j < one_indices.size(); j++) {
        all_indices->indices.push_back(one_indices[j]);
      }
    }

    pcl::PointCloud<pcl::PointXYZRGB>::Ptr nonplane_cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
    pcl::ExtractIndices<pcl::PointXYZRGB> extract_nonplane;
    extract_nonplane.setNegative(true);
    extract_nonplane.setInputCloud(input_cloud);
    extract_nonplane.setIndices(all_indices);
    extract_nonplane.filter(*nonplane_cloud);
    nonplane_pub_.publish(nonplane_cloud);
    // for each plane, project nonplane_cloud to the plane and find the points
    // inside of the polygon
    
    std::set<int> result_set;
    for (size_t plane_i = 0; plane_i < coefficients->coefficients.size(); plane_i++) {

      pcl::ExtractPolygonalPrismData<pcl::PointXYZRGB> prism_extract;
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr hull_cloud(new pcl::PointCloud<pcl::PointXYZRGB>());
      geometry_msgs::Polygon the_polygon = polygons->polygons[plane_i].polygon;
      for (size_t i = 0; i < the_polygon.points.size(); i++) {
        pcl::PointXYZRGB p;
        p.x = the_polygon.points[i].x;
        p.y = the_polygon.points[i].y;
        p.z = the_polygon.points[i].z;
        hull_cloud->points.push_back(p);
      }
      
      prism_extract.setInputCloud(nonplane_cloud);
      prism_extract.setHeightLimits(min_height_, max_height_);
      prism_extract.setInputPlanarHull(hull_cloud);
      //pcl::PointCloud<pcl::PointXYZRGB> output;
      pcl::PointIndices output_indices;
      prism_extract.segment(output_indices);
      // append output to result_cloud
      for (size_t i = 0; i < output_indices.indices.size(); i++) {
        result_set.insert(output_indices.indices[i]);
        //result_cloud.points.push_back(output.points[i]);
      }
    }

    // convert std::set to PCLIndicesMsg
    //PCLIndicesMsg output_indices;
    pcl::PointCloud<pcl::PointXYZRGB> result_cloud;
    pcl::PointIndices::Ptr all_result_indices (new pcl::PointIndices());
    for (std::set<int>::iterator it = result_set.begin();
         it != result_set.end();
         it++) {
      all_result_indices->indices.push_back(*it);
    }

    pcl::ExtractIndices<pcl::PointXYZRGB> extract_all_indices;
    extract_all_indices.setInputCloud(nonplane_cloud);
    extract_all_indices.setIndices(all_result_indices);
    extract_all_indices.filter(result_cloud);
    
    sensor_msgs::PointCloud2 ros_result;
    pcl::toROSMsg(result_cloud, ros_result);
    ros_result.header = input->header;
    pub_.publish(ros_result);
  }
Пример #9
0
// Use the fallback feature of MPE to detect the handlebar in the second iteration
TEST(suturo_perception_mbpe, pose_estimation_handlebar_task2_with_fallback)
{
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  // std::string modelpath  = "test_files/005box_4000pts.pcd";
  std::string objectpath = "test_files/handlebar_task2.pcd";
  std::string object_table_normal_path = "test_files/handlebar_task2.pcd_table_normal";

  if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (package_path + "/" + objectpath, *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read input file\n");
    exit (-1);
  }

  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="blue_handle";
  obj.color="0000ff";
  obj.description="a blue compound of a cylinder with two cubes";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1, shape2, shape3;
	geometry_msgs::Pose pose1, pose2, pose3;
  shape1.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;

  // Define the extra parts on the handlebar
  shape2.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape2.dimensions.push_back(0.05f);
  shape2.dimensions.push_back(0.05f);
  shape2.dimensions.push_back(0.05f);
  pose2.position.x = 0;
  pose2.position.y = 0;
  pose2.position.z = 0.35f;
  pose2.orientation.x = 0;
  pose2.orientation.y = 0;
  pose2.orientation.z = 0;
  pose2.orientation.w = 1;

  shape3.type = shape1.CYLINDER;
  shape3.dimensions.push_back(0.3f);
  shape3.dimensions.push_back(0.01f);
  pose3.position.x = 0;
  pose3.position.y = 0;
  pose3.position.z = 0.175f;
  pose3.orientation.x = 0;
  pose3.orientation.y = 0;
  pose3.orientation.z = 0;
  pose3.orientation.w = 1;


  obj.primitives.push_back(shape1);
  obj.primitives.push_back(shape2);
  obj.primitives.push_back(shape3);
  obj.primitive_poses.push_back(pose1);
  obj.primitive_poses.push_back(pose2);
  obj.primitive_poses.push_back(pose3);
  objects->push_back(obj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal);
  // mpe.setInitialAlignmentMethod(ICPFitter::IA_MINMAX);
  std::vector<int> modelsOfInterest;
  modelsOfInterest.push_back(0); // only match against the cylinder (and fail)
  mpe.setModelsOfInterest(modelsOfInterest);
  mpe.setVoxelSize(0.003f);
  mpe.setFallbackInitialAlignmentEnabled(true);
  mpe.setDumpICPFitterPointclouds(true); // Enable debugging. This will save pointclouds
  mpe.execute();

  std::cout << "Fitness for handlebar matching: " << mpe.getFitnessScore() << std::endl;
  // The estimation should be succesful
	ASSERT_TRUE( mpe.poseEstimationSuccessful() );
  // Check the pose (origin.x, origin.y, origin.z)
  // ASSERT_NEAR( mpe.getEstimatedPose()[0],  -0.320974, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[1],  -0.100329, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[2],    1.02429, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );

  // // Check the orientation (x,y,z,w from a Quaternionf)
  // ASSERT_NEAR( mpe.getEstimatedPose()[3],   0.876425, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[4], -0.0972819, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[5],    0.17143, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[6],  -0.439349, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );

  SUCCEED();
}
Пример #10
0
TEST(suturo_perception_mbpe, pose_estimation_cylinder_minmax)
{
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  // std::string modelpath  = "test_files/005box_4000pts.pcd";
  std::string objectpath = "test_files/correctly_segmented_cylinder.pcd";
  std::string object_table_normal_path = "test_files/correctly_segmented_cylinder.pcd_table_normal";

  if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (package_path + "/" + objectpath, *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read input file\n");
    exit (-1);
  }

  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="green_cylinder";
  obj.color="00ff00";
  obj.description="a green cylinder";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1;
	geometry_msgs::Pose pose1;
  shape1.type = shape1.CYLINDER;
  // 0.1 x 0.02
  shape1.dimensions.push_back(0.1f);
  shape1.dimensions.push_back(0.02f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;
  obj.primitives.push_back(shape1);
  obj.primitive_poses.push_back(pose1);
  objects->push_back(obj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal);
  // mpe.setDumpICPFitterPointclouds(true);
  mpe.setInitialAlignmentMethod(ICPFitter::IA_MINMAX);
  mpe.setVoxelSize(0.003f);
  mpe.execute();

  std::cout << "Fitness for cylinder matching: " << mpe.getFitnessScore() << std::endl;
  // The estimation should be succesful
	ASSERT_TRUE( mpe.poseEstimationSuccessful() );
  // Check the pose (origin.x, origin.y, origin.z)
  // ASSERT_NEAR( mpe.getEstimatedPose()[0],  -0.110493, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[1],   0.163387, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[2],   0.566805, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );

  // // Check the orientation (x,y,z,w from a Quaternionf)
  // ASSERT_NEAR( mpe.getEstimatedPose()[3],  -0.270409, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[4], 0.00236567, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[5],  -0.227, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  // ASSERT_NEAR( mpe.getEstimatedPose()[6],   0.910853, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );

  SUCCEED();
}
Пример #11
0
TEST(suturo_perception_mbpe, pose_estimation_cube)
{
  std::cout << "PE CUBE TEST" << std::endl;
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  // std::string modelpath  = "test_files/005box_4000pts.pcd";
  std::string objectpath = "test_files/correctly_segmented_box.pcd";
  std::string object_table_normal_path = "test_files/correctly_segmented_box.pcd_table_normal";

  if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (package_path + "/" + objectpath, *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read input file\n");
    exit (-1);
  }

  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="red_cube";
  obj.color="ff0000";
  obj.description="a red cube";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1;
	geometry_msgs::Pose pose1;
  shape1.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;
  obj.primitives.push_back(shape1);
  obj.primitive_poses.push_back(pose1);
  objects->push_back(obj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal);
  mpe.setVoxelSize(0.003f);
  // mpe.setDumpICPFitterPointclouds(true);
  std::cout << "TEST: BEFORE MPE EXECUTE" << std::endl;
  mpe.execute();
  std::cout << "TEST: AFTER MPE EXECUTE" << std::endl;

  std::cout << "Fitness for cube matching: " << mpe.getFitnessScore() << std::endl;
  // The estimation should be succesful
	ASSERT_TRUE( mpe.poseEstimationSuccessful() );
  // Check the pose (origin.x, origin.y, origin.z)
  ASSERT_NEAR( mpe.getEstimatedPose()[0], -0.0634356, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  ASSERT_NEAR( mpe.getEstimatedPose()[1], -0.0439471, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );
  ASSERT_NEAR( mpe.getEstimatedPose()[2],    0.73877, CAD_RECOGNITION_TEST_ACCEPTABLE_POINT_ERROR );

  // Check the orientation (x,y,z,w from a Quaternionf)
  ASSERT_NEAR( mpe.getEstimatedPose()[3],  0.813566, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  ASSERT_NEAR( mpe.getEstimatedPose()[4], -0.164979, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  ASSERT_NEAR( mpe.getEstimatedPose()[5],  0.309649, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );
  ASSERT_NEAR( mpe.getEstimatedPose()[6], -0.463691, CAD_RECOGNITION_TEST_ACCEPTABLE_ORIENTATION_ERROR );

  SUCCEED();
}
Пример #12
0
TEST(suturo_perception_mbpe, pose_estimation_box_against_multiple_models_but_ignore_the_best)
{
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  // std::string modelpath  = "test_files/005box_4000pts.pcd";
  std::string objectpath = "test_files/correctly_segmented_box.pcd";
  std::string object_table_normal_path = "test_files/correctly_segmented_box.pcd_table_normal";

  if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (package_path + "/" + objectpath, *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read input file\n");
    exit (-1);
  }

  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="red_cube";
  obj.color="ff0000";
  obj.description="a red cube";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1;
	geometry_msgs::Pose pose1;
  shape1.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;
  obj.primitives.push_back(shape1);
  obj.primitive_poses.push_back(pose1);


  suturo_msgs::Object cobj;
  cobj.name="green_cylinder";
  cobj.color="00ff00";
  cobj.description="a green cylinder";
  cobj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape2;
  geometry_msgs::Pose pose2;
  shape2.type = shape2.CYLINDER;
  // 0.1 x 0.02
  shape2.dimensions.push_back(0.1f);
  shape2.dimensions.push_back(0.02f);
  pose2.position.x = 0;
  pose2.position.y = 0;
  pose2.position.z = 0;
  pose2.orientation.x = 0;
  pose2.orientation.y = 0;
  pose2.orientation.z = 0;
  pose2.orientation.w = 1;
  cobj.primitives.push_back(shape2);
  cobj.primitive_poses.push_back(pose2);

  objects->push_back(obj);
  objects->push_back(cobj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal);
  mpe.setVoxelSize(0.003f);
  std::vector<int> modelsOfInterest;
  modelsOfInterest.push_back(1); // only match against the cylinder (and fail)
  mpe.setModelsOfInterest(modelsOfInterest);
  mpe.execute();

  // The estimation should be succesful
	ASSERT_FALSE( mpe.poseEstimationSuccessful() );

  SUCCEED();
}
Пример #13
0
TEST(suturo_perception_mbpe, nan_handling)
{
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr input_cloud(new pcl::PointCloud<pcl::PointXYZRGB>);

  // Fill in the cloud data
  input_cloud->width    = 5;
  input_cloud->height   = 1;
  input_cloud->is_dense = false;
  input_cloud->points.resize (input_cloud->width * input_cloud->height);

  for (size_t i = 0; i < input_cloud->points.size ()-1; ++i)
  {
    input_cloud->points[i].x = 1024 * rand () / (RAND_MAX + 1.0f);
    input_cloud->points[i].y = 1024 * rand () / (RAND_MAX + 1.0f);
    input_cloud->points[i].z = 1024 * rand () / (RAND_MAX + 1.0f);
  }
  // Produce NaN
  input_cloud->points[4].x = 0.0 / 0.0;
  input_cloud->points[4].y = 0.0 / 0.0;
  input_cloud->points[4].z = 0.0 / 0.0;

  // std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  // // std::string modelpath  = "test_files/005box_4000pts.pcd";
  // std::string objectpath = "test_files/correctly_segmented_box.pcd";
  // std::string object_table_normal_path = "test_files/correctly_segmented_box.pcd_table_normal";

  // if (pcl::io::loadPCDFile<pcl::PointXYZRGB> (package_path + "/" + objectpath, *input_cloud) == -1)
  // {
  //   PCL_ERROR ("Couldn't read input file\n");
  //   exit (-1);
  // }

  std::string package_path = ros::package::getPath("suturo_perception_mbpe");
  std::string object_table_normal_path = "test_files/correctly_segmented_box.pcd_table_normal";
  Eigen::Vector4f table_normal = getTableNormalFromStringLine(object_table_normal_path,package_path);
  std::cout << "Using table normal: " << table_normal << std::endl;

  // Prepare the model that should be matched against the input cloud
  boost::shared_ptr<std::vector<suturo_msgs::Object> > objects(new std::vector<suturo_msgs::Object>);
  suturo_msgs::Object obj;
  obj.name="red_cube";
  obj.color="ff0000";
  obj.description="a red cube";
  obj.surface_material = suturo_msgs::Object::ALUMINIUM;

  shape_msgs::SolidPrimitive shape1;
	geometry_msgs::Pose pose1;
  shape1.type = shape1.BOX;
  // 0.05 x 0.05 x 0.05
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  shape1.dimensions.push_back(0.05f);
  pose1.position.x = 0;
  pose1.position.y = 0;
  pose1.position.z = 0;
  pose1.orientation.x = 0;
  pose1.orientation.y = 0;
  pose1.orientation.z = 0;
  pose1.orientation.w = 1;
  obj.primitives.push_back(shape1);
  obj.primitive_poses.push_back(pose1);
  objects->push_back(obj);

  suturo_perception::PipelineData::Ptr data_;
  suturo_perception::PipelineObject::Ptr object_;
  ModelPoseEstimation mpe(objects,data_,object_);
  mpe.setInputCloud(input_cloud);
  mpe.setSurfaceNormal(table_normal);
  mpe.setRemoveNaNs(true);
  mpe.execute();

  std::cout << "Fitness for nan matching: " << mpe.getFitnessScore() << std::endl;
  // The estimation should be succesful
	ASSERT_FALSE( mpe.poseEstimationSuccessful() );
  SUCCEED();
}
  void PointCloudLocalization::cloudCallback(
    const sensor_msgs::PointCloud2::ConstPtr& cloud_msg)
  {
    vital_checker_->poke();
    boost::mutex::scoped_lock lock(mutex_);
    //JSK_NODELET_INFO("cloudCallback");
    latest_cloud_ = cloud_msg;
    if (localize_requested_){
      JSK_NODELET_INFO("localization is requested");
      try {
        pcl::PointCloud<pcl::PointNormal>::Ptr
          local_cloud (new pcl::PointCloud<pcl::PointNormal>);
        pcl::fromROSMsg(*latest_cloud_, *local_cloud);
        JSK_NODELET_INFO("waiting for tf transformation from %s tp %s",
                     latest_cloud_->header.frame_id.c_str(),
                     global_frame_.c_str());
        if (tf_listener_->waitForTransform(
              latest_cloud_->header.frame_id,
              global_frame_,
              latest_cloud_->header.stamp,
              ros::Duration(1.0))) {
          pcl::PointCloud<pcl::PointNormal>::Ptr
            input_cloud (new pcl::PointCloud<pcl::PointNormal>);
          if (use_normal_) {
            pcl_ros::transformPointCloudWithNormals(global_frame_,
                                                    *local_cloud,
                                                    *input_cloud,
                                                    *tf_listener_);
          }
          else {
            pcl_ros::transformPointCloud(global_frame_,
                                         *local_cloud,
                                         *input_cloud,
                                         *tf_listener_);
          }
          pcl::PointCloud<pcl::PointNormal>::Ptr
            input_downsampled_cloud (new pcl::PointCloud<pcl::PointNormal>);
          applyDownsampling(input_cloud, *input_downsampled_cloud);
          if (isFirstTime()) {
            all_cloud_ = input_downsampled_cloud;
            first_time_ = false;
          }
          else {
            // run ICP
            ros::ServiceClient client
              = pnh_->serviceClient<jsk_pcl_ros::ICPAlign>("icp_align");
            jsk_pcl_ros::ICPAlign icp_srv;

            if (clip_unseen_pointcloud_) {
              // Before running ICP, remove pointcloud where we cannot see
              // First, transform reference pointcloud, that is all_cloud_, into
              // sensor frame.
              // And after that, remove points which are x < 0.
              tf::StampedTransform global_sensor_tf_transform
                = lookupTransformWithDuration(
                  tf_listener_,
                  global_frame_,
                  sensor_frame_,
                  cloud_msg->header.stamp,
                  ros::Duration(1.0));
              Eigen::Affine3f global_sensor_transform;
              tf::transformTFToEigen(global_sensor_tf_transform,
                                     global_sensor_transform);
              pcl::PointCloud<pcl::PointNormal>::Ptr sensor_cloud
                (new pcl::PointCloud<pcl::PointNormal>);
              pcl::transformPointCloudWithNormals(
                *all_cloud_,
                *sensor_cloud,
                global_sensor_transform.inverse());
              // Remove negative-x points
              pcl::PassThrough<pcl::PointNormal> pass;
              pass.setInputCloud(sensor_cloud);
              pass.setFilterFieldName("x");
              pass.setFilterLimits(0.0, 100.0);
              pcl::PointCloud<pcl::PointNormal>::Ptr filtered_cloud
                (new pcl::PointCloud<pcl::PointNormal>);
              pass.filter(*filtered_cloud);
              JSK_NODELET_INFO("clipping: %lu -> %lu", sensor_cloud->points.size(), filtered_cloud->points.size());
              // Convert the pointcloud to global frame again
              pcl::PointCloud<pcl::PointNormal>::Ptr global_filtered_cloud
                (new pcl::PointCloud<pcl::PointNormal>);
              pcl::transformPointCloudWithNormals(
                *filtered_cloud,
                *global_filtered_cloud,
                global_sensor_transform);
              pcl::toROSMsg(*global_filtered_cloud,
                            icp_srv.request.target_cloud);
            }
            else {
              pcl::toROSMsg(*all_cloud_,
                            icp_srv.request.target_cloud);
            }
            pcl::toROSMsg(*input_downsampled_cloud,
                          icp_srv.request.reference_cloud);
            
            if (client.call(icp_srv)) {
              Eigen::Affine3f transform;
              tf::poseMsgToEigen(icp_srv.response.result.pose, transform);
              Eigen::Vector3f transform_pos(transform.translation());
              float roll, pitch, yaw;
              pcl::getEulerAngles(transform, roll, pitch, yaw);
              JSK_NODELET_INFO("aligned parameter --");
              JSK_NODELET_INFO("  - pos: [%f, %f, %f]",
                           transform_pos[0],
                           transform_pos[1],
                           transform_pos[2]);
              JSK_NODELET_INFO("  - rot: [%f, %f, %f]", roll, pitch, yaw);
              pcl::PointCloud<pcl::PointNormal>::Ptr
                transformed_input_cloud (new pcl::PointCloud<pcl::PointNormal>);
              if (use_normal_) {
                pcl::transformPointCloudWithNormals(*input_cloud,
                                                    *transformed_input_cloud,
                                                    transform);
              }
              else {
                pcl::transformPointCloud(*input_cloud,
                                         *transformed_input_cloud,
                                         transform);
              }
              pcl::PointCloud<pcl::PointNormal>::Ptr
                concatenated_cloud (new pcl::PointCloud<pcl::PointNormal>);
              *concatenated_cloud = *all_cloud_ + *transformed_input_cloud;
              // update *all_cloud
              applyDownsampling(concatenated_cloud, *all_cloud_);
              // update localize_transform_
              tf::Transform icp_transform;
              tf::transformEigenToTF(transform, icp_transform);
              {
                boost::mutex::scoped_lock tf_lock(tf_mutex_);
                localize_transform_ = localize_transform_ * icp_transform;
              }
            }
            else {
              JSK_NODELET_ERROR("Failed to call ~icp_align");
              return;
            }
          }
          localize_requested_ = false;
        }
        else {
          JSK_NODELET_WARN("No tf transformation is available");
        }
      }
      catch (tf2::ConnectivityException &e)
      {
        JSK_NODELET_ERROR("[%s] Transform error: %s", __PRETTY_FUNCTION__, e.what());
        return;
      }
      catch (tf2::InvalidArgumentException &e)
      {
        JSK_NODELET_ERROR("[%s] Transform error: %s", __PRETTY_FUNCTION__, e.what());
        return;
      }
    }
  }
int
main (int argc, char** argv)
{
  // Loading first scan of room.
  pcl::PointCloud<pcl::PointXYZ>::Ptr target_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan1.pcd", *target_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan1.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << target_cloud->size () << " data points from room_scan1.pcd" << std::endl;

  // Loading second scan of room from new perspective.
  pcl::PointCloud<pcl::PointXYZ>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan2.pcd", *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan2.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << input_cloud->size () << " data points from room_scan2.pcd" << std::endl;

  // Filtering input scan to roughly 10% of original size to increase speed of registration.
  pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  pcl::ApproximateVoxelGrid<pcl::PointXYZ> approximate_voxel_filter;
  approximate_voxel_filter.setLeafSize (0.2, 0.2, 0.2);
  approximate_voxel_filter.setInputCloud (input_cloud);
  approximate_voxel_filter.filter (*filtered_cloud);
  std::cout << "Filtered cloud contains " << filtered_cloud->size ()
            << " data points from room_scan2.pcd" << std::endl;

  // Initializing Normal Distributions Transform (NDT).
  pcl::NormalDistributionsTransform<pcl::PointXYZ, pcl::PointXYZ> ndt;

  // Setting scale dependent NDT parameters
  // Setting minimum transformation difference for termination condition.
  ndt.setTransformationEpsilon (0.01);
  // Setting maximum step size for More-Thuente line search.
  ndt.setStepSize (0.1);
  //Setting Resolution of NDT grid structure (VoxelGridCovariance).
  ndt.setResolution (1.0);

  // Setting max number of registration iterations.
  ndt.setMaximumIterations (35);

  // Setting point cloud to be aligned.
  ndt.setInputCloud (filtered_cloud);
  // Setting point cloud to be aligned to.
  ndt.setInputTarget (target_cloud);

  // Set initial alignment estimate found using robot odometry.
  Eigen::AngleAxisf init_rotation (0.6931, Eigen::Vector3f::UnitZ ());
  Eigen::Translation3f init_translation (1.79387, 0.720047, 0);
  Eigen::Matrix4f init_guess = (init_translation * init_rotation).matrix ();

  // Calculating required rigid transform to align the input cloud to the target cloud.
  pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  ndt.align (*output_cloud, init_guess);

  std::cout << "Normal Distributions Transform has converged:" << ndt.hasConverged ()
            << " score: " << ndt.getFitnessScore () << std::endl;

  // Transforming unfiltered, input cloud using found transform.
  pcl::transformPointCloud (*input_cloud, *output_cloud, ndt.getFinalTransformation ());

  // Saving transformed input cloud.
  pcl::io::savePCDFileASCII ("room_scan2_transformed.pcd", *output_cloud);

  // Initializing point cloud visualizer
  boost::shared_ptr<pcl::visualization::PCLVisualizer>
  viewer_final (new pcl::visualization::PCLVisualizer ("3D Viewer"));
  viewer_final->setBackgroundColor (0, 0, 0);

  // Coloring and visualizing target cloud (red).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  target_color (target_cloud, 255, 0, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (target_cloud, target_color, "target cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "target cloud");

  // Coloring and visualizing transformed input cloud (green).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  output_color (output_cloud, 0, 255, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (output_cloud, output_color, "output cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "output cloud");

  // Starting visualizer
  viewer_final->addCoordinateSystem (1.0);
  viewer_final->initCameraParameters ();

  // Wait until visualizer window is closed.
  while (!viewer_final->wasStopped ())
  {
    viewer_final->spinOnce (100);
    boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  }

  return (0);
}