void ndtTransformAndAdd(pcl::PointCloud<pcl::PointXYZRGB>::Ptr& A,pcl::PointCloud<pcl::PointXYZRGB>::Ptr& B)
{
	 pcl::NormalDistributionsTransform<pcl::PointXYZRGB, pcl::PointXYZRGB> ndt;
	 // Setting scale dependent NDT parameters
	 // Setting minimum transformation difference for termination condition.
	 ndt.setTransformationEpsilon (0.01);
	 // Setting maximum step size for More-Thuente line search.
	 ndt.setStepSize (0.1);
	 //Setting Resolution of NDT grid structure (VoxelGridCovariance).
	 ndt.setResolution (1.0);

	  // Setting max number of registration iterations.
	  ndt.setMaximumIterations (35);

	  // Setting point cloud to be aligned.
	 ndt.setInputSource (B);
	  // Setting point cloud to be aligned to.
	 ndt.setInputTarget (A);

	 pcl::PointCloud<pcl::PointXYZRGB>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);
	 Eigen::Affine3f estimate;
	 estimate.setIdentity();
	 ndt.align (*output_cloud, estimate.matrix());

	 std::cout << "Normal Distributions Transform has converged:" << ndt.hasConverged ()
	           << " score: " << ndt.getFitnessScore () << std::endl;

	   // Transforming unfiltered, input cloud using found transform.
	 pcl::transformPointCloud (*B, *output_cloud, ndt.getFinalTransformation ());

	 *A=*A+*output_cloud;
}
int
main (int, char**)
{
  typedef pcl::PointCloud<pcl::PointXYZ> CloudType;
  CloudType::Ptr cloud (new CloudType);
  cloud->is_dense = false;
  CloudType::Ptr output_cloud (new CloudType);

  CloudType::PointType p_nan;
  p_nan.x = std::numeric_limits<float>::quiet_NaN();
  p_nan.y = std::numeric_limits<float>::quiet_NaN();
  p_nan.z = std::numeric_limits<float>::quiet_NaN();
  cloud->push_back(p_nan);

  CloudType::PointType p_valid;
  p_valid.x = 1.0f;
  cloud->push_back(p_valid);

  std::cout << "size: " << cloud->points.size () << std::endl;

  std::vector<int> indices;
  pcl::removeNaNFromPointCloud(*cloud, *output_cloud, indices);
  std::cout << "size: " << output_cloud->points.size () << std::endl;

  return 0;
}
// CALLBACKS
void pointcloudCallback(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr& cloud)
{
  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZ>);
  pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZ>);
   // Create the filtering objects
  pcl::PassThrough<pcl::PointXYZ> pass;
  pass.setInputCloud (cloud);
  pass.setFilterFieldName (fieldName_);
  if(numBands_ <= 1){ 
    // Just center it to be nice
    pass.setFilterLimits ((startPoint_+endPoint_)/2.0, (startPoint_+endPoint_)/2.0+bandWidth_);
    pass.filter(*cloud_filtered);
    *output_cloud = *cloud_filtered;
  } else {
    // Do the first one manually so that certain parameters like frame_id always match
    pass.setFilterLimits (startPoint_, startPoint_+bandWidth_);
    pass.filter(*cloud_filtered);
    *output_cloud = *cloud_filtered;
    float dL = endPoint_-startPoint_;
    
    for(int i = 1; i < numBands_; i++){
      float sLine = startPoint_ + i*dL/(float)(numBands_-1);
      float eLine = sLine + bandWidth_;
      pass.setFilterLimits (sLine, eLine);
      pass.filter(*cloud_filtered);
      *output_cloud += *cloud_filtered;
    }
  }
  cloudout_pub_.publish(*output_cloud);
}
void pcl::SACSoftSegmentation<PointT>::segment(pcl::PointIndices &inliers, pcl::ModelCoefficients &model_coefficients)
{
	typedef std::hash_set<int, hash_compare<int, greater<int>>> index_hash;
	typedef std::hash_set<int, hash_compare<int, greater<int>>>::iterator index_hash_itr;


	pcl::SACSegmentation<PointT>::segment (inliers, model_coefficients);

	pcl::PointIndices outliers;
	index_hash inliers_hash;
	for( int i=0;i<inliers.indices.size();i++)
	{
		inliers_hash.insert(inliers.indices.at(i));
	}

	//iterate over all point in the point cloud
	for (int i = 0; i < this->input_->size(); i++)
	{
		if (inliers_hash.find(i) == inliers_hash.end())
		{
			outliers.indices.push_back(i);
		}
	}

	index_hash outliers_hash;
	for (int i = 0; i < outliers.indices.size(); i++)
	{
		outliers_hash.insert(outliers.indices.at(i));
	}

	index_hash current_segment;
	index_hash_itr itr;

	this->findPointNeighbours();

	while (!outliers_hash.empty())
	{
		int seed = outliers_hash.pop_back();
		current_segment.insert(seed);

		std::vector<int> seed_neighbours = point_neighbours_[seed];
		{

		}


	}

	pcl::PointCloud<PointT>::Ptr output_cloud(new pcl::PointCloud<PointT>());
	pcl::copyPointCloud(*this->input_,outliers,*output_cloud);
	pcl::PCDWriter writer;
	writer.write("output.pcd", *output_cloud);
}
  void ColorizeDistanceFromPlane::colorize(
    const sensor_msgs::PointCloud2::ConstPtr& cloud_msg,
    const jsk_recognition_msgs::ModelCoefficientsArray::ConstPtr& coefficients_msg,
    const jsk_recognition_msgs::PolygonArray::ConstPtr& polygons)
  {
    boost::mutex::scoped_lock lock(mutex_);
    if (coefficients_msg->coefficients.size() == 0) {
      return;
    }
    // convert all the data into pcl format
    pcl::PointCloud<PointT>::Ptr cloud
      (new pcl::PointCloud<PointT>);
    pcl::fromROSMsg(*cloud_msg, *cloud);
    std::vector<pcl::ModelCoefficients::Ptr> coefficients
      = pcl_conversions::convertToPCLModelCoefficients(
        coefficients_msg->coefficients);
    
    // first, build ConvexPolygon::Ptr
    std::vector<ConvexPolygon::Ptr> convexes;
    for (size_t i = 0; i < polygons->polygons.size(); i++) {
      if (polygons->polygons[i].polygon.points.size() > 0) {
        ConvexPolygon convex =
          ConvexPolygon::fromROSMsg(polygons->polygons[i].polygon);
        ConvexPolygon::Ptr convex_ptr
          = boost::make_shared<ConvexPolygon>(convex);
        convexes.push_back(convex_ptr);
      }
      else {
        JSK_NODELET_ERROR_STREAM(__PRETTY_FUNCTION__ << ":: there is no points in the polygon");
      }
    }

    pcl::PointCloud<pcl::PointXYZRGB>::Ptr output_cloud
      (new pcl::PointCloud<pcl::PointXYZRGB>);
    
    for (size_t i = 0; i < cloud->points.size(); i++) {
      PointT p = cloud->points[i];
      pcl::PointXYZRGB p_output;
      pointFromXYZToXYZ<PointT, pcl::PointXYZRGB>(p, p_output);
      double d = distanceToConvexes(p, convexes);
      if (d != DBL_MAX) {
        uint32_t color = colorForDistance(d);
        p_output.rgb = *reinterpret_cast<float*>(&color);
        output_cloud->points.push_back(p_output);
      }
    }
    
    sensor_msgs::PointCloud2 ros_output;
    pcl::toROSMsg(*output_cloud, ros_output);
    ros_output.header = cloud_msg->header;
    pub_.publish(ros_output);
  }
template <typename PointT> void
pcl::people::GroundBasedPeopleDetectionApp<PointT>::swapDimensions (pcl::PointCloud<pcl::RGB>::Ptr& cloud)
{
  pcl::PointCloud<pcl::RGB>::Ptr output_cloud(new pcl::PointCloud<pcl::RGB>);
  output_cloud->points.resize(cloud->height*cloud->width);
  output_cloud->width = cloud->height;
  output_cloud->height = cloud->width;
  for (int i = 0; i < cloud->width; i++)
  {
    for (int j = 0; j < cloud->height; j++)
    {
      (*output_cloud)(j,i) = (*cloud)(cloud->width - i - 1, j);
    }
  }
  cloud = output_cloud;
}
  void VoxelGridDownsampleManager::pointCB(const sensor_msgs::PointCloud2ConstPtr &input)
  {
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZ>);
    if (grid_.size() == 0) {
      ROS_INFO("the number of registered grids is 0, skipping");
      return;
    }
    fromROSMsg(*input, *cloud);
    for (size_t i = 0; i < grid_.size(); i++)
    {
      visualization_msgs::Marker::ConstPtr target_grid = grid_[i];
      // not yet tf is supported
      int id = target_grid->id;
      // solve tf with ros::Time 0.0

      // transform pointcloud to the frame_id of target_grid
      pcl::PointCloud<pcl::PointXYZ>::Ptr transformed_cloud (new pcl::PointCloud<pcl::PointXYZ>);
      pcl_ros::transformPointCloud(target_grid->header.frame_id,
                                   *cloud,
                                   *transformed_cloud,
                                   tf_listener);
      double center_x = target_grid->pose.position.x;
      double center_y = target_grid->pose.position.y;
      double center_z = target_grid->pose.position.z;
      double range_x = target_grid->scale.x * 1.0; // for debug
      double range_y = target_grid->scale.y * 1.0;
      double range_z = target_grid->scale.z * 1.0;
      double min_x = center_x - range_x / 2.0;
      double max_x = center_x + range_x / 2.0;
      double min_y = center_y - range_y / 2.0;
      double max_y = center_y + range_y / 2.0;
      double min_z = center_z - range_z / 2.0;
      double max_z = center_z + range_z / 2.0;
      double resolution = target_grid->color.r;
      // filter order: x -> y -> z -> downsample
      pcl::PassThrough<pcl::PointXYZ> pass_x;
      pass_x.setFilterFieldName("x");
      pass_x.setFilterLimits(min_x, max_x);
      
      pcl::PassThrough<pcl::PointXYZ> pass_y;
      pass_y.setFilterFieldName("y");
      pass_y.setFilterLimits(min_y, max_y);
      pcl::PassThrough<pcl::PointXYZ> pass_z;
      pass_z.setFilterFieldName("z");
      pass_z.setFilterLimits(min_z, max_z);

      ROS_INFO_STREAM(id << " filter x: " << min_x << " - " << max_x);
      ROS_INFO_STREAM(id << " filter y: " << min_y << " - " << max_y);
      ROS_INFO_STREAM(id << " filter z: " << min_z << " - " << max_z);
      
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_after_x (new pcl::PointCloud<pcl::PointXYZ>);
      pass_x.setInputCloud (transformed_cloud);
      pass_x.filter(*cloud_after_x);

      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_after_y (new pcl::PointCloud<pcl::PointXYZ>);
      pass_y.setInputCloud (cloud_after_x);
      pass_y.filter(*cloud_after_y);

      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_after_z (new pcl::PointCloud<pcl::PointXYZ>);
      pass_z.setInputCloud (cloud_after_y);
      pass_z.filter(*cloud_after_z);

      // downsample
      pcl::VoxelGrid<pcl::PointXYZ> sor;
      sor.setInputCloud (cloud_after_z);
      sor.setLeafSize (resolution, resolution, resolution);
      pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZ>);
      sor.filter (*cloud_filtered);

      // reverse transform
      pcl::PointCloud<pcl::PointXYZ>::Ptr reverse_transformed_cloud (new pcl::PointCloud<pcl::PointXYZ>);
      pcl_ros::transformPointCloud(input->header.frame_id,
                                   *cloud_filtered,
                                   *reverse_transformed_cloud,
                                   tf_listener);
      
      // adding the output into *output_cloud
      // tmp <- cloud_filtered + output_cloud
      // output_cloud <- tmp
      //pcl::PointCloud<pcl::PointXYZ>::Ptr tmp (new pcl::PointCloud<pcl::PointXYZ>);
      //pcl::concatenatePointCloud (*cloud_filtered, *output_cloud, tmp);
      //output_cloud = tmp;
      ROS_INFO_STREAM(id << " includes " << reverse_transformed_cloud->points.size() << " points");
      for (size_t i = 0; i < reverse_transformed_cloud->points.size(); i++) {
        output_cloud->points.push_back(reverse_transformed_cloud->points[i]);
      }
    }
    sensor_msgs::PointCloud2 out;
    toROSMsg(*output_cloud, out);
    out.header = input->header;
    pub_.publish(out);          // for debugging

    // for concatenater
    size_t cluster_num = output_cloud->points.size() / max_points_ + 1;
    ROS_INFO_STREAM("encoding into " << cluster_num << " clusters");
    for (size_t i = 0; i < cluster_num; i++) {
      size_t start_index = max_points_ * i;
      size_t end_index = max_points_ * (i + 1) > output_cloud->points.size() ?
        output_cloud->points.size(): max_points_ * (i + 1);
      sensor_msgs::PointCloud2 cluster_out_ros;
      pcl::PointCloud<pcl::PointXYZ>::Ptr
        cluster_out_pcl(new pcl::PointCloud<pcl::PointXYZ>);
      cluster_out_pcl->points.resize(end_index - start_index);
      // build cluster_out_pcl
      ROS_INFO_STREAM("make cluster from " << start_index << " to " << end_index);
      for (size_t j = start_index; j < end_index; j++) {
        cluster_out_pcl->points[j - start_index] = output_cloud->points[j];
      }
      // conevrt cluster_out_pcl into ros msg
      toROSMsg(*cluster_out_pcl, cluster_out_ros);
      cluster_out_ros.header = input->header;
      cluster_out_ros.header.frame_id = (boost::format("%s %d %d") % (cluster_out_ros.header.frame_id) % (i) % (sequence_id_)).str();
      pub_encoded_.publish(cluster_out_ros);
      ros::Duration(1.0 / rate_).sleep();
    }
  }
Esempio n. 8
0
void walkl3 (FILE *fp, struct node *n)
{
    struct node *l2 ;
    struct node *m ;
    char cloudname [MAXCLOUDNAME] ;
    int l1count, l3count ;
    struct node *directl1 [2], *directl3 [2] ;
    int selected ;

    /*
     * Reset all non-L3 nodes
     */

    for (m = mobj_head (nodemobj) ; m != NULL ; m = m->next)
    {
	if (m->nodetype == NT_L3)
	    MK_CLEAR (m, MK_L2TRANSPORT) ;
	else
	{
	    MK_CLEAR (m, MK_L2TRANSPORT) ;
	    MK_CLEAR (m, MK_IPMATCH) ;
	    MK_CLEAR (m, MK_ISROUTER) ;
	    MK_CLEAR (m, MK_VLANTRAVERSAL) ;
	    MK_CLEAR (m, MK_PROCESSED) ;
	}
	vlan_zero (m->vlanset) ;
    }

    l2 = get_neighbour (n, NT_L2) ;
    if (l2 != NULL)
	transport_vlan_on_L2 (l2, l2->u.l2.vlan) ;

    /*
     * All reachable L2 nodes are marked.
     *
     * Count physical interfaces (L1 nodes) to see if this equipement
     * is connected via a direct link to another node, or if this
     * equipement is connected to a broadcast domain.
     *
     * Count marked L1 nodes. If there is only 2 of them, check if
     * linkname matches (if not "X", or EXTLINK).
     */

    l1count = 0 ;
    l3count = 0 ;

    selected = 0 ;
    for (m = mobj_head (nodemobj) ; m != NULL ; m = m->next)
    {
	if (m->nodetype == NT_L1 && MK_ISSET (m, MK_L2TRANSPORT))
	{
	    if (MK_ISSELECTED (m) || MK_ISSELECTED (m->eq))
		selected = 1 ;

	    if (l1count < 2)
		directl1 [l1count] = m ;
	    l1count++ ;
	}

	if (m->nodetype == NT_L3 && MK_ISSET (m, MK_L2TRANSPORT)
					&& MK_ISSET (m, MK_IPMATCH))
	{
	    if (MK_ISSELECTED (m) || MK_ISSELECTED (m->eq))
		selected = 1 ;

	    if (l3count < 2)
		directl3 [l3count] = m ;
	    l3count++ ;
	}
    }

    if (! selected)
	return ;

    if (l1count == 2 && l3count == 2 &&
		directl1 [0]->u.l1.link == directl1 [1]->u.l1.link)
    {
	/*
	 * This is a direct link between two L3 nodes
	 */

	output_direct (fp, directl3) ;

	MK_SET (directl3 [0], MK_PROCESSED) ;
	MK_SET (directl3 [1], MK_PROCESSED) ;
    }
    else
    {
	/*
	 * This is a cloud
	 */

	for (m = mobj_head (nodemobj) ; m != NULL ; m = m->next)
	{
	    if (m->nodetype == NT_L2 && MK_ISSET (m, MK_L2TRANSPORT))
	    {
		struct linklist *ll ;
		struct node *r ;

		for (ll = m->linklist ; ll != NULL ; ll = ll->next)
		{
		    r = getlinkpeer (ll->link, m) ;
		    if (r->nodetype == NT_L3 && MK_ISSET (r, MK_IPMATCH))
			MK_SET (r, MK_VLANTRAVERSAL) ;
		}
	    }
	}

	/*
	 * Output cloud
	 */

	output_cloud (fp, n, cloudname, sizeof cloudname) ;

	/*
	 * Output links to this cloud
	 */

	for (m = mobj_head (nodemobj) ; m != NULL ; m = m->next)
	{
	    if (m->nodetype == NT_L3 && MK_ISSET (m, MK_VLANTRAVERSAL))
	    {
		output_link (fp, m, cloudname) ;
		MK_CLEAR (m, MK_VLANTRAVERSAL) ;
		MK_SET (m, MK_PROCESSED) ;
	    }
	}
    }
}
Esempio n. 9
0
//static void velodyne_callback(const pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::ConstPtr& input)
static void map_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{

  if (map_loaded == 0) {
    std::cout << "Loading map data... ";
    map.header.frame_id = "/pointcloud_map_frame";
    
    // Convert the data type(from sensor_msgs to pcl).
    pcl::fromROSMsg(*input, map);
    
    pcl::PointCloud<pcl::PointXYZI>::Ptr map_ptr(new pcl::PointCloud<pcl::PointXYZI>(map));
    // Setting point cloud to be aligned to.
    ndt.setInputTarget(map_ptr);
	
    // Setting NDT parameters to default values
    ndt.setMaximumIterations(iter);
    ndt.setResolution(ndt_res);
    ndt.setStepSize(step_size);
    ndt.setTransformationEpsilon(trans_eps);
    
    map_loaded = 1;
    std::cout << "Map Loaded." << std::endl;
  }



    if (map_loaded == 1 && init_pos_set == 1) {
        callback_start = ros::Time::now();

        static tf::TransformBroadcaster br;
        tf::Transform transform;
        tf::Quaternion q;

        tf::Quaternion q_control;

        // 1 scan
	/*
        pcl::PointCloud<pcl::PointXYZI> scan;
        pcl::PointXYZI p;
        scan.header = input->header;
        scan.header.frame_id = "velodyne_scan_frame";
	*/

        ros::Time scan_time;
        scan_time.sec = additional_map.header.stamp / 1000000.0;
        scan_time.nsec = (additional_map.header.stamp - scan_time.sec * 1000000.0) * 1000.0;

        /*
         std::cout << "scan.header.stamp: " << scan.header.stamp << std::endl;
         std::cout << "scan_time: " << scan_time << std::endl;
         std::cout << "scan_time.sec: " << scan_time.sec << std::endl;
         std::cout << "scan_time.nsec: " << scan_time.nsec << std::endl;
         */

        t1_start = ros::Time::now();
	/*
        for (pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::const_iterator item = input->begin(); item != input->end(); item++) {
            p.x = (double) item->x;
            p.y = (double) item->y;
            p.z = (double) item->z;

            scan.points.push_back(p);
        }
	*/
	//	pcl::fromROSMsg(*input, scan);
        t1_end = ros::Time::now();
        d1 = t1_end - t1_start;

        Eigen::Matrix4f t(Eigen::Matrix4f::Identity());

	//        pcl::PointCloud<pcl::PointXYZI>::Ptr scan_ptr(new pcl::PointCloud<pcl::PointXYZI>(scan));
        pcl::PointCloud<pcl::PointXYZI>::Ptr filtered_additional_map_ptr(new pcl::PointCloud<pcl::PointXYZI>);

        // Downsampling the velodyne scan using VoxelGrid filter
        t2_start = ros::Time::now();
        pcl::VoxelGrid<pcl::PointXYZI> voxel_grid_filter;
        voxel_grid_filter.setLeafSize(voxel_leaf_size, voxel_leaf_size, voxel_leaf_size);
        voxel_grid_filter.setInputCloud(additional_map_ptr);
        voxel_grid_filter.filter(*filtered_additional_map_ptr);
        t2_end = ros::Time::now();
        d2 = t2_end - t2_start;

        // Setting point cloud to be aligned.
        ndt.setInputSource(filtered_additional_map_ptr);

        // Guess the initial gross estimation of the transformation
        t3_start = ros::Time::now();
        tf::Matrix3x3 init_rotation;

        guess_pos.x = previous_pos.x + offset_x;
        guess_pos.y = previous_pos.y + offset_y;
        guess_pos.z = previous_pos.z + offset_z;
        guess_pos.roll = previous_pos.roll;
        guess_pos.pitch = previous_pos.pitch;
        guess_pos.yaw = previous_pos.yaw + offset_yaw;

        Eigen::AngleAxisf init_rotation_x(guess_pos.roll, Eigen::Vector3f::UnitX());
        Eigen::AngleAxisf init_rotation_y(guess_pos.pitch, Eigen::Vector3f::UnitY());
        Eigen::AngleAxisf init_rotation_z(guess_pos.yaw, Eigen::Vector3f::UnitZ());

        Eigen::Translation3f init_translation(guess_pos.x, guess_pos.y, guess_pos.z);

        Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x).matrix();

        t3_end = ros::Time::now();
        d3 = t3_end - t3_start;

        t4_start = ros::Time::now();
        pcl::PointCloud<pcl::PointXYZI>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZI>);
        ndt.align(*output_cloud, init_guess);

        t = ndt.getFinalTransformation();
	pcl::PointCloud<pcl::PointXYZI>::Ptr transformed_additional_map_ptr (new pcl::PointCloud<pcl::PointXYZI>());
	transformed_additional_map_ptr->header.frame_id = "/map";
	pcl::transformPointCloud(*additional_map_ptr, *transformed_additional_map_ptr, t);
	sensor_msgs::PointCloud2::Ptr msg_ptr(new sensor_msgs::PointCloud2);

	pcl::toROSMsg(*transformed_additional_map_ptr, *msg_ptr);
	msg_ptr->header.frame_id = "/map";
	ndt_map_pub.publish(*msg_ptr);

	// Writing Point Cloud data to PCD file
	pcl::io::savePCDFileASCII("global_map.pcd", *transformed_additional_map_ptr);
	std::cout << "Saved " << transformed_additional_map_ptr->points.size() << " data points to global_map.pcd." << std::endl;

	pcl::PointCloud<pcl::PointXYZRGB> output;
	output.width = transformed_additional_map_ptr->width;
	output.height = transformed_additional_map_ptr->height;
	output.points.resize(output.width * output.height);

	for(size_t i = 0; i < output.points.size(); i++){
	  output.points[i].x = transformed_additional_map_ptr->points[i].x;
	  output.points[i].y = transformed_additional_map_ptr->points[i].y;
	  output.points[i].z = transformed_additional_map_ptr->points[i].z;
	  output.points[i].rgb = 255 << 8;
	}

	pcl::io::savePCDFileASCII("global_map_rgb.pcd", output);
	std::cout << "Saved " << output.points.size() << " data points to global_map_rgb.pcd." << std::endl;

        t4_end = ros::Time::now();
        d4 = t4_end - t4_start;

        t5_start = ros::Time::now();
        /*
         tf::Vector3 origin;
         origin.setValue(static_cast<double>(t(0,3)), static_cast<double>(t(1,3)), static_cast<double>(t(2,3)));
         */

        tf::Matrix3x3 tf3d;

        tf3d.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)), static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)), static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

        // Update current_pos.
        current_pos.x = t(0, 3);
        current_pos.y = t(1, 3);
        current_pos.z = t(2, 3);
        tf3d.getRPY(current_pos.roll, current_pos.pitch, current_pos.yaw, 1);

	// control_pose
	current_pos_control.roll = current_pos.roll;
	current_pos_control.pitch = current_pos.pitch;
	current_pos_control.yaw = current_pos.yaw - angle / 180.0 * M_PI;
	double theta = current_pos_control.yaw;
	current_pos_control.x = cos(theta) * (-control_shift_x) + sin(theta) * (-control_shift_y) + current_pos.x;
	current_pos_control.y = -sin(theta) * (-control_shift_x) + cos(theta) * (-control_shift_y) + current_pos.y;
	current_pos_control.z = current_pos.z - control_shift_z;

        // transform "/velodyne" to "/map"
#if 0
        transform.setOrigin(tf::Vector3(current_pos.x, current_pos.y, current_pos.z));
        q.setRPY(current_pos.roll, current_pos.pitch, current_pos.yaw);
        transform.setRotation(q);
#else
	//
	// FIXME:
	// We corrected the angle of "/velodyne" so that pure_pursuit
	// can read this frame for the control.
	// However, this is not what we want because the scan of Velodyne
	// looks unmatched for the 3-D map on Rviz.
	// What we really want is to make another TF transforming "/velodyne"
	// to a new "/ndt_points" frame and modify pure_pursuit to
	// read this frame instead of "/velodyne".
	// Otherwise, can pure_pursuit just use "/ndt_frame"?
	//
        transform.setOrigin(tf::Vector3(current_pos_control.x, current_pos_control.y, current_pos_control.z));
        q.setRPY(current_pos_control.roll, current_pos_control.pitch, current_pos_control.yaw);
        transform.setRotation(q);
#endif

	q_control.setRPY(current_pos_control.roll, current_pos_control.pitch, current_pos_control.yaw);

        /*
         std::cout << "ros::Time::now(): " << ros::Time::now() << std::endl;
         std::cout << "ros::Time::now().sec: " << ros::Time::now().sec << std::endl;
         std::cout << "ros::Time::now().nsec: " << ros::Time::now().nsec << std::endl;
         */

	//        br.sendTransform(tf::StampedTransform(transform, scan_time, "map", "velodyne"));

        static tf::TransformBroadcaster pose_broadcaster;
        tf::Transform pose_transform;
        tf::Quaternion pose_q;

/*        pose_transform.setOrigin(tf::Vector3(0, 0, 0));
        pose_q.setRPY(0, 0, 0);
        pose_transform.setRotation(pose_q);
        pose_broadcaster.sendTransform(tf::StampedTransform(pose_transform, scan_time, "map", "ndt_frame"));
*/
        // publish the position
       // ndt_pose_msg.header.frame_id = "/ndt_frame";
        ndt_pose_msg.header.frame_id = "/map";
        ndt_pose_msg.header.stamp = scan_time;
        ndt_pose_msg.pose.position.x = current_pos.x;
        ndt_pose_msg.pose.position.y = current_pos.y;
        ndt_pose_msg.pose.position.z = current_pos.z;
        ndt_pose_msg.pose.orientation.x = q.x();
        ndt_pose_msg.pose.orientation.y = q.y();
        ndt_pose_msg.pose.orientation.z = q.z();
        ndt_pose_msg.pose.orientation.w = q.w();

        static tf::TransformBroadcaster pose_broadcaster_control;
        tf::Transform pose_transform_control;
        tf::Quaternion pose_q_control;

     /*   pose_transform_control.setOrigin(tf::Vector3(0, 0, 0));
        pose_q_control.setRPY(0, 0, 0);
        pose_transform_control.setRotation(pose_q_control);
        pose_broadcaster_control.sendTransform(tf::StampedTransform(pose_transform_control, scan_time, "map", "ndt_frame"));
*/
        // publish the position
     //   control_pose_msg.header.frame_id = "/ndt_frame";
        control_pose_msg.header.frame_id = "/map";
        control_pose_msg.header.stamp = scan_time;
        control_pose_msg.pose.position.x = current_pos_control.x;
        control_pose_msg.pose.position.y = current_pos_control.y;
        control_pose_msg.pose.position.z = current_pos_control.z;
        control_pose_msg.pose.orientation.x = q_control.x();
        control_pose_msg.pose.orientation.y = q_control.y();
        control_pose_msg.pose.orientation.z = q_control.z();
        control_pose_msg.pose.orientation.w = q_control.w();

        /*
         std::cout << "ros::Time::now(): " << ros::Time::now() << std::endl;
         std::cout << "ros::Time::now().sec: " << ros::Time::now().sec << std::endl;
         std::cout << "ros::Time::now().nsec: " << ros::Time::now().nsec << std::endl;
         */

        ndt_pose_pub.publish(ndt_pose_msg);
        control_pose_pub.publish(control_pose_msg);

        t5_end = ros::Time::now();
        d5 = t5_end - t5_start;

#ifdef OUTPUT
        // Writing position to position_log.txt
        std::ofstream ofs("position_log.txt", std::ios::app);
        if (ofs == NULL) {
            std::cerr << "Could not open 'position_log.txt'." << std::endl;
            exit(1);
        }
        ofs << current_pos.x << " " << current_pos.y << " " << current_pos.z << " " << current_pos.roll << " " << current_pos.pitch << " " << current_pos.yaw << std::endl;
#endif

        // Calculate the offset (curren_pos - previous_pos)
        offset_x = current_pos.x - previous_pos.x;
        offset_y = current_pos.y - previous_pos.y;
        offset_z = current_pos.z - previous_pos.z;
        offset_yaw = current_pos.yaw - previous_pos.yaw;

        // Update position and posture. current_pos -> previous_pos
        previous_pos.x = current_pos.x;
        previous_pos.y = current_pos.y;
        previous_pos.z = current_pos.z;
        previous_pos.roll = current_pos.roll;
        previous_pos.pitch = current_pos.pitch;
        previous_pos.yaw = current_pos.yaw;

        callback_end = ros::Time::now();
        d_callback = callback_end - callback_start;

        std::cout << "-----------------------------------------------------------------" << std::endl;
        std::cout << "Sequence number: " << input->header.seq << std::endl;
        std::cout << "Number of scan points: " << additional_map_ptr->size() << " points." << std::endl;
        std::cout << "Number of filtered scan points: " << filtered_additional_map_ptr->size() << " points." << std::endl;
        std::cout << "NDT has converged: " << ndt.hasConverged() << std::endl;
        std::cout << "Fitness score: " << ndt.getFitnessScore() << std::endl;
        std::cout << "Number of iteration: " << ndt.getFinalNumIteration() << std::endl;
        std::cout << "(x,y,z,roll,pitch,yaw):" << std::endl;
        std::cout << "(" << current_pos.x << ", " << current_pos.y << ", " << current_pos.z << ", " << current_pos.roll << ", " << current_pos.pitch << ", " << current_pos.yaw << ")" << std::endl;
        std::cout << "Transformation Matrix:" << std::endl;
        std::cout << t << std::endl;
#ifdef VIEW_TIME
        std::cout << "Duration of velodyne_callback: " << d_callback.toSec() << " secs." << std::endl;
        std::cout << "Adding scan points: " << d1.toSec() << " secs." << std::endl;
        std::cout << "VoxelGrid Filter: " << d2.toSec() << " secs." << std::endl;
        std::cout << "Guessing the initial gross estimation: " << d3.toSec() << " secs." << std::endl;
        std::cout << "NDT: " << d4.toSec() << " secs." << std::endl;
        std::cout << "tf: " << d5.toSec() << " secs." << std::endl;
#endif
        std::cout << "-----------------------------------------------------------------" << std::endl;
    }
}
  void HintedHandleEstimator::estimate(
    const sensor_msgs::PointCloud2::ConstPtr& cloud_msg,
    const geometry_msgs::PointStampedConstPtr &point_msg)
  {
    boost::mutex::scoped_lock lock(mutex_);
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::Normal>::Ptr cloud_normals(new pcl::PointCloud<pcl::Normal>);
    pcl::PassThrough<pcl::PointXYZ> pass;
    int K = 1;
    std::vector<int> pointIdxNKNSearch(K);
    std::vector<float> pointNKNSquaredDistance(K);
    pcl::search::KdTree<pcl::PointXYZ>::Ptr kd_tree(new pcl::search::KdTree<pcl::PointXYZ>);

    pcl::fromROSMsg(*cloud_msg, *cloud);
    geometry_msgs::PointStamped transed_point;
    ros::Time now = ros::Time::now();
    try
      {
      listener_.waitForTransform(cloud->header.frame_id, point_msg->header.frame_id, now, ros::Duration(1.0));
      listener_.transformPoint(cloud->header.frame_id, now, *point_msg, point_msg->header.frame_id, transed_point);
    }
    catch(tf::TransformException ex)
      {
      JSK_ROS_ERROR("%s", ex.what());
      return;
    }
    pcl::PointXYZ searchPoint;
    searchPoint.x = transed_point.point.x;
    searchPoint.y = transed_point.point.y;
    searchPoint.z = transed_point.point.z;

    //remove too far cloud
    pass.setInputCloud(cloud);
    pass.setFilterFieldName("x");
    pass.setFilterLimits(searchPoint.x - 3*handle.arm_w, searchPoint.x + 3*handle.arm_w);
    pass.filter(*cloud);
    pass.setInputCloud(cloud);
    pass.setFilterFieldName("y");
    pass.setFilterLimits(searchPoint.y - 3*handle.arm_w, searchPoint.y + 3*handle.arm_w);
    pass.filter(*cloud);
    pass.setInputCloud(cloud);
    pass.setFilterFieldName("z");
    pass.setFilterLimits(searchPoint.z - 3*handle.arm_w, searchPoint.z + 3*handle.arm_w);
    pass.filter(*cloud);

    if(cloud->points.size() < 10){
      JSK_ROS_INFO("points are too small");
      return;
    }
    if(1){ //estimate_normal
      pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne;
      ne.setInputCloud(cloud);
      ne.setSearchMethod(kd_tree);
      ne.setRadiusSearch(0.02);
      ne.setViewPoint(0, 0, 0);
      ne.compute(*cloud_normals);
    }
    else{ //use normal of msg
      
    }
    if(! (kd_tree->nearestKSearch (searchPoint, K, pointIdxNKNSearch, pointNKNSquaredDistance) > 0)){
      JSK_ROS_INFO("kdtree failed");
      return;
    }
    float x = cloud->points[pointIdxNKNSearch[0]].x;
    float y = cloud->points[pointIdxNKNSearch[0]].y;
    float z = cloud->points[pointIdxNKNSearch[0]].z;
    float v_x = cloud_normals->points[pointIdxNKNSearch[0]].normal_x;
    float v_y = cloud_normals->points[pointIdxNKNSearch[0]].normal_y;
    float v_z = cloud_normals->points[pointIdxNKNSearch[0]].normal_z;
    double theta = acos(v_x);
    // use normal for estimating handle direction
    tf::Quaternion normal(0, v_z/NORM(0, v_y, v_z) * cos(theta/2), -v_y/NORM(0, v_y, v_z) * cos(theta/2), sin(theta/2));
    tf::Quaternion final_quaternion = normal;
    double min_theta_index = 0;
    double min_width = 100;
    tf::Quaternion min_qua(0, 0, 0, 1);
    visualization_msgs::Marker debug_hand_marker;
    debug_hand_marker.header = cloud_msg->header;
    debug_hand_marker.ns = string("debug_grasp");
    debug_hand_marker.id = 0;
    debug_hand_marker.type = visualization_msgs::Marker::LINE_LIST;
    debug_hand_marker.pose.orientation.w = 1;
    debug_hand_marker.scale.x=0.003;
    tf::Matrix3x3 best_mat;
    //search 180 degree and calc the shortest direction
    for(double theta_=0; theta_<3.14/2; 
        theta_+=3.14/2/30){
      tf::Quaternion rotate_(sin(theta_), 0, 0, cos(theta_));
      tf::Quaternion temp_qua = normal * rotate_;
      tf::Matrix3x3 temp_mat(temp_qua);
      geometry_msgs::Pose pose_respected_to_tf;
      pose_respected_to_tf.position.x = x;
      pose_respected_to_tf.position.y = y;
      pose_respected_to_tf.position.z = z;
      pose_respected_to_tf.orientation.x = temp_qua.getX();
      pose_respected_to_tf.orientation.y = temp_qua.getY();
      pose_respected_to_tf.orientation.z = temp_qua.getZ();
      pose_respected_to_tf.orientation.w = temp_qua.getW();
      Eigen::Affine3d box_pose_respected_to_cloud_eigend;
      tf::poseMsgToEigen(pose_respected_to_tf, box_pose_respected_to_cloud_eigend);
      Eigen::Affine3d box_pose_respected_to_cloud_eigend_inversed
        = box_pose_respected_to_cloud_eigend.inverse();
      Eigen::Matrix4f box_pose_respected_to_cloud_eigen_inversed_matrixf;
      Eigen::Matrix4d box_pose_respected_to_cloud_eigen_inversed_matrixd
        = box_pose_respected_to_cloud_eigend_inversed.matrix();
      jsk_pcl_ros::convertMatrix4<Eigen::Matrix4d, Eigen::Matrix4f>(
                                                                    box_pose_respected_to_cloud_eigen_inversed_matrixd,
                                                                    box_pose_respected_to_cloud_eigen_inversed_matrixf);
      Eigen::Affine3f offset = Eigen::Affine3f(box_pose_respected_to_cloud_eigen_inversed_matrixf);
      pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>);
      pcl::transformPointCloud(*cloud, *output_cloud, offset);

      pcl::PassThrough<pcl::PointXYZ> pass;
      pcl::PointCloud<pcl::PointXYZ>::Ptr points_z(new pcl::PointCloud<pcl::PointXYZ>), points_yz(new pcl::PointCloud<pcl::PointXYZ>), points_xyz(new pcl::PointCloud<pcl::PointXYZ>);
      pass.setInputCloud(output_cloud);
      pass.setFilterFieldName("y");
      pass.setFilterLimits(-handle.arm_w*2, handle.arm_w*2);
      pass.filter(*points_z);
      pass.setInputCloud(points_z);
      pass.setFilterFieldName("z");
      pass.setFilterLimits(-handle.finger_d, handle.finger_d);
      pass.filter(*points_yz);
      pass.setInputCloud(points_yz);
      pass.setFilterFieldName("x");
      pass.setFilterLimits(-(handle.arm_l-handle.finger_l), handle.finger_l);
      pass.filter(*points_xyz);
      pcl::KdTreeFLANN<pcl::PointXYZ> kdtree;
      for(size_t index=0; index<points_xyz->size(); index++){
        points_xyz->points[index].x = points_xyz->points[index].z = 0;
      }
      if(points_xyz->points.size() == 0){JSK_ROS_INFO("points are empty");return;}
      kdtree.setInputCloud(points_xyz);
      std::vector<int> pointIdxRadiusSearch;
      std::vector<float> pointRadiusSquaredDistance;
      pcl::PointXYZ search_point_tree;
      search_point_tree.x=search_point_tree.y=search_point_tree.z=0;
      if( kdtree.radiusSearch(search_point_tree, 10, pointIdxRadiusSearch, pointRadiusSquaredDistance) > 0 ){
        double before_w=10, temp_w;
        for(size_t index = 0; index < pointIdxRadiusSearch.size(); ++index){
          temp_w =sqrt(pointRadiusSquaredDistance[index]);
          if(temp_w - before_w > handle.finger_w*2){
            break; // there are small space for finger
          }
          before_w=temp_w;
        }
        if(before_w < min_width){
          min_theta_index = theta_;
          min_width = before_w;
          min_qua = temp_qua;
          best_mat = temp_mat;
        }
        //for debug view
        geometry_msgs::Point temp_point;
        std_msgs::ColorRGBA temp_color;
        temp_color.r=0; temp_color.g=0; temp_color.b=1; temp_color.a=1;
        temp_point.x=x-temp_mat.getColumn(1)[0] * before_w;
        temp_point.y=y-temp_mat.getColumn(1)[1] * before_w;
        temp_point.z=z-temp_mat.getColumn(1)[2] * before_w;
        debug_hand_marker.points.push_back(temp_point);
        debug_hand_marker.colors.push_back(temp_color);
        temp_point.x+=2*temp_mat.getColumn(1)[0] * before_w;
        temp_point.y+=2*temp_mat.getColumn(1)[1] * before_w;
        temp_point.z+=2*temp_mat.getColumn(1)[2] * before_w;
        debug_hand_marker.points.push_back(temp_point);
        debug_hand_marker.colors.push_back(temp_color);
      }
    }
    geometry_msgs::PoseStamped handle_pose_stamped;
    handle_pose_stamped.header = cloud_msg->header;
    handle_pose_stamped.pose.position.x = x;
    handle_pose_stamped.pose.position.y = y;
    handle_pose_stamped.pose.position.z = z;
    handle_pose_stamped.pose.orientation.x = min_qua.getX();
    handle_pose_stamped.pose.orientation.y = min_qua.getY();
    handle_pose_stamped.pose.orientation.z = min_qua.getZ();
    handle_pose_stamped.pose.orientation.w = min_qua.getW();
    std_msgs::Float64 min_width_msg;
    min_width_msg.data = min_width;
    pub_pose_.publish(handle_pose_stamped);
    pub_debug_marker_.publish(debug_hand_marker);
    pub_debug_marker_array_.publish(make_handle_array(handle_pose_stamped, handle));
    jsk_recognition_msgs::SimpleHandle simple_handle;
    simple_handle.header = handle_pose_stamped.header;
    simple_handle.pose = handle_pose_stamped.pose;
    simple_handle.handle_width = min_width;
    pub_handle_.publish(simple_handle);
  }
Esempio n. 11
0
static void scan_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{
	if (map_loaded == 1 && init_pos_set == 1) {
		matching_start = std::chrono::system_clock::now();

		static tf::TransformBroadcaster br;
		tf::Transform transform;
		tf::Quaternion predict_q, ndt_q, current_q, control_q, localizer_q;

		pcl::PointXYZ p;
		pcl::PointCloud<pcl::PointXYZ> scan;

		current_scan_time = input->header.stamp;

		pcl::fromROSMsg(*input, scan);

		if(_localizer == "velodyne"){
			pcl::PointCloud<velodyne_pointcloud::PointXYZIR> tmp;
			pcl::fromROSMsg(*input, tmp);
			scan.points.clear();

			for (pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::const_iterator item = tmp.begin(); item != tmp.end(); item++) {
				p.x = (double) item->x;
				p.y = (double) item->y;
				p.z = (double) item->z;
				if(item->ring >= min && item->ring <= max && item->ring % layer == 0 ){
					scan.points.push_back(p);
				}
			}
		}

		pcl::PointCloud<pcl::PointXYZ>::Ptr scan_ptr(new pcl::PointCloud<pcl::PointXYZ>(scan));
		pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_scan_ptr(new pcl::PointCloud<pcl::PointXYZ>());

		Eigen::Matrix4f t(Eigen::Matrix4f::Identity()); // base_link
		Eigen::Matrix4f t2(Eigen::Matrix4f::Identity()); // localizer

		// Downsampling the velodyne scan using VoxelGrid filter
		pcl::VoxelGrid<pcl::PointXYZ> voxel_grid_filter;
		voxel_grid_filter.setLeafSize(voxel_leaf_size, voxel_leaf_size, voxel_leaf_size);
		voxel_grid_filter.setInputCloud(scan_ptr);
		voxel_grid_filter.filter(*filtered_scan_ptr);

		// Setting point cloud to be aligned.
		ndt.setInputSource(filtered_scan_ptr);

		// Guess the initial gross estimation of the transformation
		predict_pose.x = previous_pose.x + offset_x;
		predict_pose.y = previous_pose.y + offset_y;
		predict_pose.z = previous_pose.z + offset_z;
		predict_pose.roll = previous_pose.roll;
		predict_pose.pitch = previous_pose.pitch;
		predict_pose.yaw = previous_pose.yaw + offset_yaw;

		Eigen::Translation3f init_translation(predict_pose.x, predict_pose.y, predict_pose.z);
		Eigen::AngleAxisf init_rotation_x(predict_pose.roll, Eigen::Vector3f::UnitX());
		Eigen::AngleAxisf init_rotation_y(predict_pose.pitch, Eigen::Vector3f::UnitY());
		Eigen::AngleAxisf init_rotation_z(predict_pose.yaw, Eigen::Vector3f::UnitZ());
		Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x) * tf_btol;

		pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>);
		ndt.align(*output_cloud, init_guess);

		t = ndt.getFinalTransformation(); // localizer
		t2 = t * tf_ltob; // base_link

		iteration = ndt.getFinalNumIteration();
		score = ndt.getFitnessScore();
		trans_probability = ndt.getTransformationProbability();

		tf::Matrix3x3 mat_l; // localizer
		mat_l.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)),
				static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)),
				static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

		// Update ndt_pose
		localizer_pose.x = t(0, 3);
		localizer_pose.y = t(1, 3);
		localizer_pose.z = t(2, 3);
		mat_l.getRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw, 1);

		tf::Matrix3x3 mat_b; // base_link
		mat_b.setValue(static_cast<double>(t2(0, 0)), static_cast<double>(t2(0, 1)), static_cast<double>(t2(0, 2)),
				static_cast<double>(t2(1, 0)), static_cast<double>(t2(1, 1)), static_cast<double>(t2(1, 2)),
				static_cast<double>(t2(2, 0)), static_cast<double>(t2(2, 1)), static_cast<double>(t2(2, 2)));

		// Update ndt_pose
		ndt_pose.x = t2(0, 3);
		ndt_pose.y = t2(1, 3);
		ndt_pose.z = t2(2, 3);
		mat_b.getRPY(ndt_pose.roll, ndt_pose.pitch, ndt_pose.yaw, 1);

		// Compute the velocity
		scan_duration = current_scan_time - previous_scan_time;
		double secs = scan_duration.toSec();
		double distance = sqrt((ndt_pose.x - previous_pose.x) * (ndt_pose.x - previous_pose.x) +
				(ndt_pose.y - previous_pose.y) * (ndt_pose.y - previous_pose.y) +
				(ndt_pose.z - previous_pose.z) * (ndt_pose.z - previous_pose.z));

		predict_pose_error = sqrt((ndt_pose.x - predict_pose.x) * (ndt_pose.x - predict_pose.x) +
				(ndt_pose.y - predict_pose.y) * (ndt_pose.y - predict_pose.y) +
				(ndt_pose.z - predict_pose.z) * (ndt_pose.z - predict_pose.z));

		current_velocity = distance / secs;
		current_velocity_smooth = (current_velocity + previous_velocity + second_previous_velocity) / 3.0;
		if(current_velocity_smooth < 0.2){
			current_velocity_smooth = 0.0;
		}
		current_acceleration = (current_velocity - previous_velocity) / secs;

		estimated_vel_mps.data = current_velocity;
		estimated_vel_kmph.data = current_velocity * 3.6;

		estimated_vel_mps_pub.publish(estimated_vel_mps);
		estimated_vel_kmph_pub.publish(estimated_vel_kmph);

		if(predict_pose_error <= PREDICT_POSE_THRESHOLD){
			use_predict_pose = 0;
		}else{
			use_predict_pose = 1;
		}
		use_predict_pose = 0;

		if(use_predict_pose == 0){
			current_pose.x = ndt_pose.x;
			current_pose.y = ndt_pose.y;
			current_pose.z = ndt_pose.z;
			current_pose.roll = ndt_pose.roll;
			current_pose.pitch = ndt_pose.pitch;
			current_pose.yaw = ndt_pose.yaw;
		}else{
			current_pose.x = predict_pose.x;
			current_pose.y = predict_pose.y;
			current_pose.z = predict_pose.z;
			current_pose.roll = predict_pose.roll;
			current_pose.pitch = predict_pose.pitch;
			current_pose.yaw = predict_pose.yaw;
		}

		control_pose.roll = current_pose.roll;
		control_pose.pitch = current_pose.pitch;
		control_pose.yaw = current_pose.yaw - angle / 180.0 * M_PI;
		double theta = control_pose.yaw;
		control_pose.x = cos(theta) * (-control_shift_x) + sin(theta) * (-control_shift_y) + current_pose.x;
		control_pose.y = -sin(theta) * (-control_shift_x) + cos(theta) * (-control_shift_y) + current_pose.y;
		control_pose.z = current_pose.z - control_shift_z;

		// Set values for publishing pose
		predict_q.setRPY(predict_pose.roll, predict_pose.pitch, predict_pose.yaw);
		predict_pose_msg.header.frame_id = "/map";
		predict_pose_msg.header.stamp = current_scan_time;
		predict_pose_msg.pose.position.x = predict_pose.x;
		predict_pose_msg.pose.position.y = predict_pose.y;
		predict_pose_msg.pose.position.z = predict_pose.z;
		predict_pose_msg.pose.orientation.x = predict_q.x();
		predict_pose_msg.pose.orientation.y = predict_q.y();
		predict_pose_msg.pose.orientation.z = predict_q.z();
		predict_pose_msg.pose.orientation.w = predict_q.w();

		ndt_q.setRPY(ndt_pose.roll, ndt_pose.pitch, ndt_pose.yaw);
		ndt_pose_msg.header.frame_id = "/map";
		ndt_pose_msg.header.stamp = current_scan_time;
		ndt_pose_msg.pose.position.x = ndt_pose.x;
		ndt_pose_msg.pose.position.y = ndt_pose.y;
		ndt_pose_msg.pose.position.z = ndt_pose.z;
		ndt_pose_msg.pose.orientation.x = ndt_q.x();
		ndt_pose_msg.pose.orientation.y = ndt_q.y();
		ndt_pose_msg.pose.orientation.z = ndt_q.z();
		ndt_pose_msg.pose.orientation.w = ndt_q.w();

		current_q.setRPY(current_pose.roll, current_pose.pitch, current_pose.yaw);
		current_pose_msg.header.frame_id = "/map";
		current_pose_msg.header.stamp = current_scan_time;
		current_pose_msg.pose.position.x = current_pose.x;
		current_pose_msg.pose.position.y = current_pose.y;
		current_pose_msg.pose.position.z = current_pose.z;
		current_pose_msg.pose.orientation.x = current_q.x();
		current_pose_msg.pose.orientation.y = current_q.y();
		current_pose_msg.pose.orientation.z = current_q.z();
		current_pose_msg.pose.orientation.w = current_q.w();

		control_q.setRPY(control_pose.roll, control_pose.pitch, control_pose.yaw);
		control_pose_msg.header.frame_id = "/map";
		control_pose_msg.header.stamp = current_scan_time;
		control_pose_msg.pose.position.x = control_pose.x;
		control_pose_msg.pose.position.y = control_pose.y;
		control_pose_msg.pose.position.z = control_pose.z;
		control_pose_msg.pose.orientation.x = control_q.x();
		control_pose_msg.pose.orientation.y = control_q.y();
		control_pose_msg.pose.orientation.z = control_q.z();
		control_pose_msg.pose.orientation.w = control_q.w();

		localizer_q.setRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw);
		localizer_pose_msg.header.frame_id = "/map";
		localizer_pose_msg.header.stamp = current_scan_time;
		localizer_pose_msg.pose.position.x = localizer_pose.x;
		localizer_pose_msg.pose.position.y = localizer_pose.y;
		localizer_pose_msg.pose.position.z = localizer_pose.z;
		localizer_pose_msg.pose.orientation.x = localizer_q.x();
		localizer_pose_msg.pose.orientation.y = localizer_q.y();
		localizer_pose_msg.pose.orientation.z = localizer_q.z();
		localizer_pose_msg.pose.orientation.w = localizer_q.w();

		predict_pose_pub.publish(predict_pose_msg);
		ndt_pose_pub.publish(ndt_pose_msg);
		current_pose_pub.publish(current_pose_msg);
		control_pose_pub.publish(control_pose_msg);
		localizer_pose_pub.publish(localizer_pose_msg);

		// Send TF "/base_link" to "/map"
		transform.setOrigin(tf::Vector3(current_pose.x, current_pose.y, current_pose.z));
		transform.setRotation(current_q);
		br.sendTransform(tf::StampedTransform(transform, current_scan_time, "/map", "/base_link"));

		matching_end = std::chrono::system_clock::now();
		exe_time = std::chrono::duration_cast<std::chrono::microseconds>(matching_end-matching_start).count()/1000.0;
		time_ndt_matching.data = exe_time;
		time_ndt_matching_pub.publish(time_ndt_matching);

		// Set values for /estimate_twist
		angular_velocity = (current_pose.yaw - previous_pose.yaw) / secs;

		estimate_twist_msg.header.stamp = current_scan_time;
		estimate_twist_msg.twist.linear.x = current_velocity;
		estimate_twist_msg.twist.linear.y = 0.0;
		estimate_twist_msg.twist.linear.z = 0.0;
		estimate_twist_msg.twist.angular.x = 0.0;
		estimate_twist_msg.twist.angular.y = 0.0;
		estimate_twist_msg.twist.angular.z = angular_velocity;

		estimate_twist_pub.publish(estimate_twist_msg);

		geometry_msgs::Vector3Stamped estimate_vel_msg;
		estimate_vel_msg.header.stamp = current_scan_time;
		estimate_vel_msg.vector.x = current_velocity;
		estimated_vel_pub.publish(estimate_vel_msg);

		// Set values for /ndt_stat
		ndt_stat_msg.header.stamp = current_scan_time;
		ndt_stat_msg.exe_time = time_ndt_matching.data;
		ndt_stat_msg.iteration = iteration;
		ndt_stat_msg.score = score;
		ndt_stat_msg.velocity = current_velocity;
		ndt_stat_msg.acceleration = current_acceleration;
		ndt_stat_msg.use_predict_pose = 0;

		ndt_stat_pub.publish(ndt_stat_msg);

		/* Compute NDT_Reliability */
		ndt_reliability.data = Wa * (exe_time/100.0) * 100.0 + Wb * (iteration/10.0) * 100.0 + Wc * ((2.0-trans_probability)/2.0) * 100.0;
		ndt_reliability_pub.publish(ndt_reliability);

#ifdef OUTPUT
		// Output log.csv
		std::ofstream ofs_log("log.csv", std::ios::app);
		if (ofs_log == NULL) {
			std::cerr << "Could not open 'log.csv'." << std::endl;
			exit(1);
		}
		ofs_log << input->header.seq << ","
				<< step_size << ","
				<< trans_eps << ","
				<< voxel_leaf_size << ","
				<< current_pose.x << ","
				<< current_pose.y << ","
				<< current_pose.z << ","
				<< current_pose.roll << ","
				<< current_pose.pitch << ","
				<< current_pose.yaw << ","
				<< predict_pose.x << ","
				<< predict_pose.y << ","
				<< predict_pose.z << ","
				<< predict_pose.roll << ","
				<< predict_pose.pitch << ","
				<< predict_pose.yaw << ","
				<< current_pose.x - predict_pose.x << ","
				<< current_pose.y - predict_pose.y << ","
				<< current_pose.z - predict_pose.z << ","
				<< current_pose.roll - predict_pose.roll << ","
				<< current_pose.pitch - predict_pose.pitch << ","
				<< current_pose.yaw - predict_pose.yaw << ","
				<< predict_pose_error << ","
				<< iteration << ","
				<< score << ","
				<< trans_probability << ","
				<< ndt_reliability.data << ","
				<< current_velocity << ","
				<< current_velocity_smooth << ","
				<< current_acceleration << ","
				<< angular_velocity << ","
				<< time_ndt_matching.data << ","
				<< std::endl;
#endif

		std::cout << "-----------------------------------------------------------------" << std::endl;
		std::cout << "Sequence: " << input->header.seq << std::endl;
		std::cout << "Timestamp: " << input->header.stamp << std::endl;
		std::cout << "Frame ID: " << input->header.frame_id << std::endl;
		std::cout << "Number of Scan Points: " << scan_ptr->size() << " points." << std::endl;
		std::cout << "Number of Filtered Scan Points: " << filtered_scan_ptr->size() << " points." << std::endl;
		std::cout << "Leaf Size: " << voxel_leaf_size << std::endl;
		std::cout << "NDT has converged: " << ndt.hasConverged() << std::endl;
		std::cout << "Fitness Score: " << ndt.getFitnessScore() << std::endl;
		std::cout << "Transformation Probability: " << ndt.getTransformationProbability() << std::endl;
		std::cout << "Execution Time: " << exe_time << " ms." << std::endl;
		std::cout << "Number of Iterations: " << ndt.getFinalNumIteration() << std::endl;
		std::cout << "NDT Reliability: " << ndt_reliability.data << std::endl;
		std::cout << "(x,y,z,roll,pitch,yaw): " << std::endl;
		std::cout << "(" << current_pose.x << ", " << current_pose.y << ", " << current_pose.z
				<< ", " << current_pose.roll << ", " << current_pose.pitch << ", " << current_pose.yaw << ")" << std::endl;
		std::cout << "Transformation Matrix: " << std::endl;
		std::cout << t << std::endl;
		std::cout << "-----------------------------------------------------------------" << std::endl;

		// Update previous_***
		offset_x = current_pose.x - previous_pose.x;
		offset_y = current_pose.y - previous_pose.y;
		offset_z = current_pose.z - previous_pose.z;
		offset_yaw = current_pose.yaw - previous_pose.yaw;

		previous_pose.x = current_pose.x;
		previous_pose.y = current_pose.y;
		previous_pose.z = current_pose.z;
		previous_pose.roll = current_pose.roll;
		previous_pose.pitch = current_pose.pitch;
		previous_pose.yaw = current_pose.yaw;

		previous_scan_time.sec = current_scan_time.sec;
		previous_scan_time.nsec = current_scan_time.nsec;

		second_previous_velocity = previous_velocity;
		previous_velocity = current_velocity;
		previous_acceleration = current_acceleration;
		previous_estimated_vel_kmph.data = estimated_vel_kmph.data;
	}
}
Esempio n. 12
0
static void points_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{
  if (map_loaded == 1 && init_pos_set == 1)
  {
    matching_start = std::chrono::system_clock::now();

    static tf::TransformBroadcaster br;
    tf::Transform transform;
    tf::Quaternion predict_q, icp_q, current_q, localizer_q;

    pcl::PointXYZ p;
    pcl::PointCloud<pcl::PointXYZ> filtered_scan;

    current_scan_time = input->header.stamp;

    pcl::fromROSMsg(*input, filtered_scan);
    pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_scan_ptr(new pcl::PointCloud<pcl::PointXYZ>(filtered_scan));
    int scan_points_num = filtered_scan_ptr->size();

    Eigen::Matrix4f t(Eigen::Matrix4f::Identity());   // base_link
    Eigen::Matrix4f t2(Eigen::Matrix4f::Identity());  // localizer

    std::chrono::time_point<std::chrono::system_clock> align_start, align_end, getFitnessScore_start, getFitnessScore_end;
    static double align_time, getFitnessScore_time = 0.0;

    // Setting point cloud to be aligned.
//    ndt.setInputSource(filtered_scan_ptr);
    icp.setInputSource(filtered_scan_ptr);

    // Guess the initial gross estimation of the transformation
    predict_pose.x = previous_pose.x + offset_x;
    predict_pose.y = previous_pose.y + offset_y;
    predict_pose.z = previous_pose.z + offset_z;
    predict_pose.roll = previous_pose.roll;
    predict_pose.pitch = previous_pose.pitch;
    predict_pose.yaw = previous_pose.yaw + offset_yaw;

    Eigen::Translation3f init_translation(predict_pose.x, predict_pose.y, predict_pose.z);
    Eigen::AngleAxisf init_rotation_x(predict_pose.roll, Eigen::Vector3f::UnitX());
    Eigen::AngleAxisf init_rotation_y(predict_pose.pitch, Eigen::Vector3f::UnitY());
    Eigen::AngleAxisf init_rotation_z(predict_pose.yaw, Eigen::Vector3f::UnitZ());
    Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x) * tf_btol;

    pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>);
//    ndt.align(*output_cloud, init_guess);

    icp.setMaximumIterations(maximum_iterations);
    icp.setTransformationEpsilon(transformation_epsilon);
    icp.setMaxCorrespondenceDistance(max_correspondence_distance);
    icp.setEuclideanFitnessEpsilon(euclidean_fitness_epsilon);
    icp.setRANSACOutlierRejectionThreshold(ransac_outlier_rejection_threshold);

    align_start = std::chrono::system_clock::now();
    icp.align(*output_cloud, init_guess);
    align_end = std::chrono::system_clock::now();
    align_time = std::chrono::duration_cast<std::chrono::microseconds>(align_end - align_start).count() / 1000.0;

//    t = ndt.getFinalTransformation();  // localizer
    t = icp.getFinalTransformation();  // localizer
    t2 = t * tf_ltob;                  // base_link

//    iteration = ndt.getFinalNumIteration();
//    score = ndt.getFitnessScore();

    getFitnessScore_start = std::chrono::system_clock::now();
    fitness_score = icp.getFitnessScore();
    getFitnessScore_end = std::chrono::system_clock::now();
    getFitnessScore_time = std::chrono::duration_cast<std::chrono::microseconds>(getFitnessScore_end - getFitnessScore_start).count() / 1000.0;

//    trans_probability = ndt.getTransformationProbability();

    tf::Matrix3x3 mat_l;  // localizer
    mat_l.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)),
                   static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)),
                   static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

    // Update localizer_pose
    localizer_pose.x = t(0, 3);
    localizer_pose.y = t(1, 3);
    localizer_pose.z = t(2, 3);
    mat_l.getRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw, 1);

    tf::Matrix3x3 mat_b;  // base_link
    mat_b.setValue(static_cast<double>(t2(0, 0)), static_cast<double>(t2(0, 1)), static_cast<double>(t2(0, 2)),
                   static_cast<double>(t2(1, 0)), static_cast<double>(t2(1, 1)), static_cast<double>(t2(1, 2)),
                   static_cast<double>(t2(2, 0)), static_cast<double>(t2(2, 1)), static_cast<double>(t2(2, 2)));

    // Update ndt_pose
    icp_pose.x = t2(0, 3);
    icp_pose.y = t2(1, 3);
    icp_pose.z = t2(2, 3);
    mat_b.getRPY(icp_pose.roll, icp_pose.pitch, icp_pose.yaw, 1);

    // Calculate the difference between ndt_pose and predict_pose
    predict_pose_error = sqrt((icp_pose.x - predict_pose.x) * (icp_pose.x - predict_pose.x) +
                              (icp_pose.y - predict_pose.y) * (icp_pose.y - predict_pose.y) +
                              (icp_pose.z - predict_pose.z) * (icp_pose.z - predict_pose.z));

    if (predict_pose_error <= PREDICT_POSE_THRESHOLD)
    {
      use_predict_pose = 0;
    }
    else
    {
      use_predict_pose = 1;
    }
    use_predict_pose = 0;

    if (use_predict_pose == 0)
    {
      current_pose.x = icp_pose.x;
      current_pose.y = icp_pose.y;
      current_pose.z = icp_pose.z;
      current_pose.roll = icp_pose.roll;
      current_pose.pitch = icp_pose.pitch;
      current_pose.yaw = icp_pose.yaw;
    }
    else
    {
      current_pose.x = predict_pose.x;
      current_pose.y = predict_pose.y;
      current_pose.z = predict_pose.z;
      current_pose.roll = predict_pose.roll;
      current_pose.pitch = predict_pose.pitch;
      current_pose.yaw = predict_pose.yaw;
    }

    // Compute the velocity and acceleration
    scan_duration = current_scan_time - previous_scan_time;
    double secs = scan_duration.toSec();
    diff_x = current_pose.x - previous_pose.x;
    diff_y = current_pose.y - previous_pose.y;
    diff_z = current_pose.z - previous_pose.z;
    diff_yaw = current_pose.yaw - previous_pose.yaw;
    diff = sqrt(diff_x * diff_x + diff_y * diff_y + diff_z * diff_z);

    current_velocity = diff / secs;
    current_velocity_x = diff_x / secs;
    current_velocity_y = diff_y / secs;
    current_velocity_z = diff_z / secs;
    angular_velocity = diff_yaw / secs;

    current_velocity_smooth = (current_velocity + previous_velocity + previous_previous_velocity) / 3.0;
    if (current_velocity_smooth < 0.2)
    {
      current_velocity_smooth = 0.0;
    }

    current_accel = (current_velocity - previous_velocity) / secs;
    current_accel_x = (current_velocity_x - previous_velocity_x) / secs;
    current_accel_y = (current_velocity_y - previous_velocity_y) / secs;
    current_accel_z = (current_velocity_z - previous_velocity_z) / secs;

    estimated_vel_mps.data = current_velocity;
    estimated_vel_kmph.data = current_velocity * 3.6;

    estimated_vel_mps_pub.publish(estimated_vel_mps);
    estimated_vel_kmph_pub.publish(estimated_vel_kmph);

    // Set values for publishing pose
    predict_q.setRPY(predict_pose.roll, predict_pose.pitch, predict_pose.yaw);
    predict_pose_msg.header.frame_id = "/map";
    predict_pose_msg.header.stamp = current_scan_time;
    predict_pose_msg.pose.position.x = predict_pose.x;
    predict_pose_msg.pose.position.y = predict_pose.y;
    predict_pose_msg.pose.position.z = predict_pose.z;
    predict_pose_msg.pose.orientation.x = predict_q.x();
    predict_pose_msg.pose.orientation.y = predict_q.y();
    predict_pose_msg.pose.orientation.z = predict_q.z();
    predict_pose_msg.pose.orientation.w = predict_q.w();

    icp_q.setRPY(icp_pose.roll, icp_pose.pitch, icp_pose.yaw);
    icp_pose_msg.header.frame_id = "/map";
    icp_pose_msg.header.stamp = current_scan_time;
    icp_pose_msg.pose.position.x = icp_pose.x;
    icp_pose_msg.pose.position.y = icp_pose.y;
    icp_pose_msg.pose.position.z = icp_pose.z;
    icp_pose_msg.pose.orientation.x = icp_q.x();
    icp_pose_msg.pose.orientation.y = icp_q.y();
    icp_pose_msg.pose.orientation.z = icp_q.z();
    icp_pose_msg.pose.orientation.w = icp_q.w();

    current_q.setRPY(current_pose.roll, current_pose.pitch, current_pose.yaw);
    current_pose_msg.header.frame_id = "/map";
    current_pose_msg.header.stamp = current_scan_time;
    current_pose_msg.pose.position.x = current_pose.x;
    current_pose_msg.pose.position.y = current_pose.y;
    current_pose_msg.pose.position.z = current_pose.z;
    current_pose_msg.pose.orientation.x = current_q.x();
    current_pose_msg.pose.orientation.y = current_q.y();
    current_pose_msg.pose.orientation.z = current_q.z();
    current_pose_msg.pose.orientation.w = current_q.w();

    localizer_q.setRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw);
    localizer_pose_msg.header.frame_id = "/map";
    localizer_pose_msg.header.stamp = current_scan_time;
    localizer_pose_msg.pose.position.x = localizer_pose.x;
    localizer_pose_msg.pose.position.y = localizer_pose.y;
    localizer_pose_msg.pose.position.z = localizer_pose.z;
    localizer_pose_msg.pose.orientation.x = localizer_q.x();
    localizer_pose_msg.pose.orientation.y = localizer_q.y();
    localizer_pose_msg.pose.orientation.z = localizer_q.z();
    localizer_pose_msg.pose.orientation.w = localizer_q.w();

    predict_pose_pub.publish(predict_pose_msg);
    icp_pose_pub.publish(icp_pose_msg);
    current_pose_pub.publish(current_pose_msg);
    localizer_pose_pub.publish(localizer_pose_msg);

    // Send TF "/base_link" to "/map"
    transform.setOrigin(tf::Vector3(current_pose.x, current_pose.y, current_pose.z));
    transform.setRotation(current_q);
    br.sendTransform(tf::StampedTransform(transform, current_scan_time, "/map", "/base_link"));

    matching_end = std::chrono::system_clock::now();
    exe_time = std::chrono::duration_cast<std::chrono::microseconds>(matching_end - matching_start).count() / 1000.0;
    time_icp_matching.data = exe_time;
    time_icp_matching_pub.publish(time_icp_matching);

    // Set values for /estimate_twist
    estimate_twist_msg.header.stamp = current_scan_time;
    estimate_twist_msg.twist.linear.x = current_velocity;
    estimate_twist_msg.twist.linear.y = 0.0;
    estimate_twist_msg.twist.linear.z = 0.0;
    estimate_twist_msg.twist.angular.x = 0.0;
    estimate_twist_msg.twist.angular.y = 0.0;
    estimate_twist_msg.twist.angular.z = angular_velocity;

    estimate_twist_pub.publish(estimate_twist_msg);

    geometry_msgs::Vector3Stamped estimate_vel_msg;
    estimate_vel_msg.header.stamp = current_scan_time;
    estimate_vel_msg.vector.x = current_velocity;
    estimated_vel_pub.publish(estimate_vel_msg);

    // Set values for /icp_stat
    icp_stat_msg.header.stamp = current_scan_time;
    icp_stat_msg.exe_time = time_icp_matching.data;
//    icp_stat_msg.iteration = iteration;
    icp_stat_msg.score = fitness_score;
    icp_stat_msg.velocity = current_velocity;
    icp_stat_msg.acceleration = current_accel;
    icp_stat_msg.use_predict_pose = 0;

    icp_stat_pub.publish(icp_stat_msg);

    // Write log
    if (!ofs)
    {
      std::cerr << "Could not open " << filename << "." << std::endl;
      exit(1);
    }
    ofs << input->header.seq << "," << scan_points_num << ","
            << current_pose.x << "," << current_pose.y << "," << current_pose.z << "," << current_pose.roll << ","
            << current_pose.pitch << "," << current_pose.yaw << "," << predict_pose.x << "," << predict_pose.y << ","
            << predict_pose.z << "," << predict_pose.roll << "," << predict_pose.pitch << "," << predict_pose.yaw << ","
            << current_pose.x - predict_pose.x << "," << current_pose.y - predict_pose.y << ","
            << current_pose.z - predict_pose.z << "," << current_pose.roll - predict_pose.roll << ","
            << current_pose.pitch - predict_pose.pitch << "," << current_pose.yaw - predict_pose.yaw << ","
            << predict_pose_error << "," <<  "," << fitness_score << ","
            << "," << current_velocity << "," << current_velocity_smooth << "," << current_accel
            << "," << angular_velocity << "," << exe_time << "," << align_time << "," << getFitnessScore_time << std::endl;

    std::cout << "-----------------------------------------------------------------" << std::endl;
    std::cout << "Sequence: " << input->header.seq << std::endl;
    std::cout << "Timestamp: " << input->header.stamp << std::endl;
    std::cout << "Frame ID: " << input->header.frame_id << std::endl;
    //		std::cout << "Number of Scan Points: " << scan_ptr->size() << " points." << std::endl;
    std::cout << "Number of Filtered Scan Points: " << scan_points_num << " points." << std::endl;
    std::cout << "ICP has converged: " << icp.hasConverged() << std::endl;
    std::cout << "Fitness Score: " << fitness_score << std::endl;
//    std::cout << "Transformation Probability: " << ndt.getTransformationProbability() << std::endl;
    std::cout << "Execution Time: " << exe_time << " ms." << std::endl;
//    std::cout << "Number of Iterations: " << ndt.getFinalNumIteration() << std::endl;
//    std::cout << "NDT Reliability: " << ndt_reliability.data << std::endl;
    std::cout << "(x,y,z,roll,pitch,yaw): " << std::endl;
    std::cout << "(" << current_pose.x << ", " << current_pose.y << ", " << current_pose.z << ", " << current_pose.roll
              << ", " << current_pose.pitch << ", " << current_pose.yaw << ")" << std::endl;
    std::cout << "Transformation Matrix: " << std::endl;
    std::cout << t << std::endl;
    std::cout << "-----------------------------------------------------------------" << std::endl;

    // Update offset
    if (_offset == "linear")
    {
      offset_x = diff_x;
      offset_y = diff_y;
      offset_z = diff_z;
      offset_yaw = diff_yaw;
    }
    else if (_offset == "quadratic")
    {
      offset_x = (current_velocity_x + current_accel_x * secs) * secs;
      offset_y = (current_velocity_y + current_accel_y * secs) * secs;
      offset_z = diff_z;
      offset_yaw = diff_yaw;
    }
    else if (_offset == "zero")
    {
      offset_x = 0.0;
      offset_y = 0.0;
      offset_z = 0.0;
      offset_yaw = 0.0;
    }

    // Update previous_***
    previous_pose.x = current_pose.x;
    previous_pose.y = current_pose.y;
    previous_pose.z = current_pose.z;
    previous_pose.roll = current_pose.roll;
    previous_pose.pitch = current_pose.pitch;
    previous_pose.yaw = current_pose.yaw;

    previous_scan_time.sec = current_scan_time.sec;
    previous_scan_time.nsec = current_scan_time.nsec;

    previous_previous_velocity = previous_velocity;
    previous_velocity = current_velocity;
    previous_velocity_x = current_velocity_x;
    previous_velocity_y = current_velocity_y;
    previous_velocity_z = current_velocity_z;
    previous_accel = current_accel;

    previous_estimated_vel_kmph.data = estimated_vel_kmph.data;
  }
}
int
main (int argc, char** argv)
{
  // Loading first scan of room.
  pcl::PointCloud<pcl::PointXYZ>::Ptr target_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan1.pcd", *target_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan1.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << target_cloud->size () << " data points from room_scan1.pcd" << std::endl;

  // Loading second scan of room from new perspective.
  pcl::PointCloud<pcl::PointXYZ>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan2.pcd", *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan2.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << input_cloud->size () << " data points from room_scan2.pcd" << std::endl;

  // Filtering input scan to roughly 10% of original size to increase speed of registration.
  pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  pcl::ApproximateVoxelGrid<pcl::PointXYZ> approximate_voxel_filter;
  approximate_voxel_filter.setLeafSize (0.2, 0.2, 0.2);
  approximate_voxel_filter.setInputCloud (input_cloud);
  approximate_voxel_filter.filter (*filtered_cloud);
  std::cout << "Filtered cloud contains " << filtered_cloud->size ()
            << " data points from room_scan2.pcd" << std::endl;

  // Initializing Normal Distributions Transform (NDT).
  pcl::NormalDistributionsTransform<pcl::PointXYZ, pcl::PointXYZ> ndt;

  // Setting scale dependent NDT parameters
  // Setting minimum transformation difference for termination condition.
  ndt.setTransformationEpsilon (0.01);
  // Setting maximum step size for More-Thuente line search.
  ndt.setStepSize (0.1);
  //Setting Resolution of NDT grid structure (VoxelGridCovariance).
  ndt.setResolution (1.0);

  // Setting max number of registration iterations.
  ndt.setMaximumIterations (35);

  // Setting point cloud to be aligned.
  ndt.setInputCloud (filtered_cloud);
  // Setting point cloud to be aligned to.
  ndt.setInputTarget (target_cloud);

  // Set initial alignment estimate found using robot odometry.
  Eigen::AngleAxisf init_rotation (0.6931, Eigen::Vector3f::UnitZ ());
  Eigen::Translation3f init_translation (1.79387, 0.720047, 0);
  Eigen::Matrix4f init_guess = (init_translation * init_rotation).matrix ();

  // Calculating required rigid transform to align the input cloud to the target cloud.
  pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  ndt.align (*output_cloud, init_guess);

  std::cout << "Normal Distributions Transform has converged:" << ndt.hasConverged ()
            << " score: " << ndt.getFitnessScore () << std::endl;

  // Transforming unfiltered, input cloud using found transform.
  pcl::transformPointCloud (*input_cloud, *output_cloud, ndt.getFinalTransformation ());

  // Saving transformed input cloud.
  pcl::io::savePCDFileASCII ("room_scan2_transformed.pcd", *output_cloud);

  // Initializing point cloud visualizer
  boost::shared_ptr<pcl::visualization::PCLVisualizer>
  viewer_final (new pcl::visualization::PCLVisualizer ("3D Viewer"));
  viewer_final->setBackgroundColor (0, 0, 0);

  // Coloring and visualizing target cloud (red).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  target_color (target_cloud, 255, 0, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (target_cloud, target_color, "target cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "target cloud");

  // Coloring and visualizing transformed input cloud (green).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  output_color (output_cloud, 0, 255, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (output_cloud, output_color, "output cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "output cloud");

  // Starting visualizer
  viewer_final->addCoordinateSystem (1.0);
  viewer_final->initCameraParameters ();

  // Wait until visualizer window is closed.
  while (!viewer_final->wasStopped ())
  {
    viewer_final->spinOnce (100);
    boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  }

  return (0);
}