int main ( int argc, char** argv )
{

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source ( new pcl::PointCloud<pcl::PointXYZ> () );
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_target ( new pcl::PointCloud<pcl::PointXYZ> () );

    loadFile ( argv[1], *cloud_source );


    float *R, *t;
    R = new float [9];
    t = new float [3];
    loadRTfromFile(R, t, argv[3]);
    Eigen::Affine3f RT;
    RT.matrix() <<
                R[0], R[1], R[2], t[0],
                R[3], R[4], R[5], t[1],
                R[6], R[7], R[8], t[2],
                0,0,0,1;
    delete [] R;
    delete [] t;


    pcl::transformPointCloud ( *cloud_source, *cloud_target, RT );

    pcl::io::savePCDFileASCII ( argv[2], *cloud_target );

    return 0;
}
Пример #2
0
int
 main (int argc, char** argv)
{
	pcl::PCDReader reader;
	pcl::PCDWriter writer;
	//レジストレーションを行うソースとターゲットのポイントクラウド
	pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud_source (new pcl::PointCloud<pcl::PointXYZRGBA>);
	pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud_target (new pcl::PointCloud<pcl::PointXYZRGBA>);
	//ソースとターゲットの読み込み
	reader.read (argv[1], *cloud_source);
	std::cout << "Source PointCloud "<< argv[1] << " has: " << cloud_source->points.size () << " data points." << std::endl; //*
	reader.read (argv[2], *cloud_target);
	std::cout << "Target PointCloud "<< argv[2] << " has: " << cloud_target->points.size () << " data points." << std::endl; //* 

	//ICP用を行うクラスIterativeClosestPoint<SourcePointT,TargetPoint>を作成
	pcl::IterativeClosestPoint<pcl::PointXYZRGBA, pcl::PointXYZRGBA> icp;
	icp.setInputCloud(cloud_source); //ソースのポイントクラウドを設定
	icp.setInputTarget(cloud_target); //ターゲットのポイントクラウドを設定

	//※ここでは一切設定していないが、ICPは各種パラメータを設定できます

	//ICPの結果を用いて位置合わされたソースのポイントクラウド
	pcl::PointCloud<pcl::PointXYZRGBA> cloud_source_transformed;

	//ICPアルゴリズムを実行
	icp.align(cloud_source_transformed); 
	std::cout << "has converged:" << icp.hasConverged() << " score: " <<
		icp.getFitnessScore() << std::endl;
	std::cout << icp.getFinalTransformation() << std::endl;

	pcl::PointCloud<pcl::PointXYZRGBA>::ConstPtr cloud_final(new pcl::PointCloud<pcl::PointXYZRGBA>);
	cloud_final = (cloud_source_transformed + *cloud_target).makeShared() ; 
	//レジストレーション結果(cloud_final.pcd)を.pcdファイルに保存
	writer.write("cloud_final.pcd",*cloud_final);

	//PCLVaizualizerを準備して、移動後のソースとターゲットを色つきで表示。
	//(カメラの初期位置が中央に偏っていてポイントクラウドが見えない場合はマウスでカメラ視点を調整。)
	boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("Registered Result"));
	viewer->setBackgroundColor (255, 255, 255);

	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGBA> source_color(cloud_source_transformed.makeShared(), 0, 0, 255);
	pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZRGBA> target_color(cloud_target, 255, 0, 0);

	viewer->addPointCloud<pcl::PointXYZRGBA> (cloud_source_transformed.makeShared(), source_color, "source cloud transformed");
	viewer->addPointCloud<pcl::PointXYZRGBA> (cloud_target, target_color, "target cloud");
	viewer->addCoordinateSystem (1.0);
	viewer->initCameraParameters ();

	while (!viewer->wasStopped ())
	{
		viewer->spinOnce (100);
		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
	}
	return (0);
}
Пример #3
0
int main ( int argc, char** argv )
{
  
  
  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source ( new pcl::PointCloud<pcl::PointXYZ> () );
  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_target ( new pcl::PointCloud<pcl::PointXYZ> () );
  
  {
    // load source
    loadFile ( argv[1], *cloud_source );
    // load target
    loadFile ( argv[2], *cloud_target );
  }


  // transformed source ---> target
  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source_trans ( new pcl::PointCloud<pcl::PointXYZ> () );
  cloud_source_trans->resize ( cloud_source->size() );

  
  { // ICP registration
    std::cout << "registration" << std::endl;
    
    Eigen::Matrix3Xd X ( 3, cloud_source->size() ); // source, transformed
    Eigen::Matrix3Xd Y ( 3, cloud_target->size() ); // target
    
    for(int i = 0; i < cloud_source->size(); i++)
    {
      X(0,i) = cloud_source->points[i].x;
      X(1,i) = cloud_source->points[i].y;
      X(2,i) = cloud_source->points[i].z;
    }
    for(int i = 0; i < cloud_target->size(); i++)
    {
      Y(0,i) = cloud_target->points[i].x;
      Y(1,i) = cloud_target->points[i].y;
      Y(2,i) = cloud_target->points[i].z;
    }
    
    // ICP::point_to_point ( X, Y ); // standard ICP
    SICP::point_to_point ( X, Y ); // sparse ICP

    
    for(int i = 0; i < cloud_source_trans->size(); i++)
    {
      cloud_source_trans->points[i].x = X(0,i);
      cloud_source_trans->points[i].y = X(1,i);
      cloud_source_trans->points[i].z = X(2,i);
    }
    
    
    
  }
  
  
  
  { // visualization
    boost::shared_ptr< pcl::visualization::PCLVisualizer > viewer ( new pcl::visualization::PCLVisualizer ("3D Viewer") );
    viewer->setBackgroundColor (0, 0, 0);
    
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> source_color ( cloud_source, 0, 255, 0 );
    viewer->addPointCloud<pcl::PointXYZ> (cloud_source, source_color, "source");
    viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "source");
    
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> target_color ( cloud_target, 255, 255, 255 );
    viewer->addPointCloud<pcl::PointXYZ> ( cloud_target, target_color, "target");
    viewer->setPointCloudRenderingProperties ( pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "target" );
    
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> source_trans_color ( cloud_source_trans, 255, 0, 255 );
    viewer->addPointCloud<pcl::PointXYZ> ( cloud_source_trans, source_trans_color, "source trans" );
    viewer->setPointCloudRenderingProperties ( pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "source trans" );
    
    
    // orthographic (parallel) projection; same with pressing key 'o'
    viewer->getRenderWindow()->GetRenderers()->GetFirstRenderer()->GetActiveCamera()->SetParallelProjection( 1 );

    viewer->resetCamera();
    
    viewer->spin ();

    
  }
  
  return(0);
}
Пример #4
0
int main (int argc, char** argv)
{
  PointCloudPtr cloud_source (new PointCloud);
  PointCloudPtr cloud_target (new PointCloud);
  PointCloudPtr featureCloudSourceTemp (new PointCloud);
  PointCloudPtr featureCloudTargetTemp (new PointCloud);
  PointCloudPtr cloud_converg_sparse_all (new PointCloud);
  PointCloudPtr cloud_converg_sparse_correspond (new PointCloud);
  PointCloudPtr cloud_target_sparse_correspond (new PointCloud);
  PointCloudPtr cloud_converg_dense (new PointCloud);
  PointCloudPtr cloud_ransac_estimation (new PointCloud);
  PointCloudNormalPtr cloud_source_normals (new PointCloudNormal);
  PointCloudNormalPtr cloud_target_normals (new PointCloudNormal);
  PointCloudNormalPtr featureCloudSource (new PointCloudNormal);
  PointCloudNormalPtr featureCloudTarget (new PointCloudNormal);
  Eigen::Matrix4f initialTransform;
  std::vector<int> indicesSource;
  std::vector<int> indicesTarget;

  //Fill in the cloud data
  //pcl::PCDReader reader;
  //ROS_INFO("Reading saved clouds with normals from file (faster)");
  //reader.read ("normals-source.pcd", *cloud_source_normals);
  //reader.read ("normals-target.pcd", *cloud_target_normals);
  //std::cout << "PointCloud source has: " << cloud_source->points.size () << " data points." << std::endl;
  //std::cout << "PointCloud target has: " << cloud_target->points.size () << " data points." << std::endl;


  ROS_INFO("Getting test data from a bag file");
  getTestDataFromBag(cloud_source, cloud_target, featureCloudSourceTemp, indicesSource, featureCloudTargetTemp, indicesTarget, initialTransform, 0);
  Eigen::Matrix4f ransacInverse = initialTransform.inverse();
/*
  // remove corresponances with large z values (susceptible to error)
  ROS_INFO("Removing feature correspondances with a large depth measurement");
  //removeCorrespondancesZThreshold (featureCloudSourceTemp, indicesSource, featureCloudTargetTemp, indicesTarget, 1.3);
  ROS_INFO_STREAM("indices source size: " << indicesSource.size() << "   indices target size: " << indicesTarget.size());

  //calculate normals
  ROS_INFO("Calcualting normals");
 normalEstimation(cloud_source, cloud_source_normals);
 normalEstimation(cloud_target, cloud_target_normals);

  ROS_INFO("Converting feature point clouds");
  pcl::copyPointCloud (*featureCloudSourceTemp, *featureCloudSource);
  pcl::copyPointCloud (*featureCloudTargetTemp, *featureCloudTarget);

  // here is a guess transform that was manually set to align point clouds, pure icp performs well with this
  PointCloud Final;
  Eigen::Matrix4f guess;
  guess <<   1, 0, 0, 0.07,
		     0, 1, 0, 0,
		     0, 0, 1, 0.015,
		     0, 0, 0, 1;

  ROS_INFO("Setting up icp with features");
  */
  /* custom icp
  pcl::IterativeClosestPointFeatures<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> icp_features;
  icp_features.setMaximumIterations (40);
  icp_features.setTransformationEpsilon (1e-9);
  icp_features.setMaxCorrespondenceDistance(0.1);
  icp_features.setRANSACOutlierRejectionThreshold(0.03);

  icp_features.setInputCloud(cloud_source_normals);
  icp_features.setInputTarget(cloud_target_normals);
  icp_features.setSourceFeatures (featureCloudSource, indicesSource);
  icp_features.setTargetFeatures (featureCloudTarget, indicesTarget);
  icp_features.setFeatureErrorWeight(0.3);  // 1 = feature, 0 = icp

  ROS_INFO("Performing rgbd icp.....");
  icp_features.align(Final, ransacInverse);
  std::cout << "ICP features has finished with converge flag of:" << icp_features.hasConverged() << " score: " <<
		  icp_features.getFitnessScore() << std::endl;
  std::cout << icp_features.getFinalTransformation() << std::endl;
*/
/*  std::vector<int> indicesSourceSmall = indicesSource;
  std::vector<int> indicesTargetSmall = indicesTarget;

  for(std::vector<int>::iterator iterator_ = indicesSource.begin(); iterator_ != indicesSource.end(); ++iterator_) {
  	*iterator_ = *iterator_ + cloud_source->size();
  }
  for(std::vector<int>::iterator iterator_ = indicesTarget.begin(); iterator_ != indicesTarget.end(); ++iterator_) {
      *iterator_ = *iterator_ + cloud_target->size();
  }

  PointCloudNormalPtr concatinatedSourceCloud (new PointCloudNormal);
  PointCloudNormalPtr concatinatedTargetCloud (new PointCloudNormal);

  *concatinatedSourceCloud = *cloud_source_normals;
  *concatinatedTargetCloud = *cloud_target_normals;

  (*concatinatedSourceCloud) += *featureCloudSource;
  (*concatinatedTargetCloud) += *featureCloudTarget;

  boost::shared_ptr< TransformationEstimationWDF<pcl::PointXYZRGBNormal,pcl::PointXYZRGBNormal> >
  		initialTransformWDF(new TransformationEstimationWDF<pcl::PointXYZRGBNormal,pcl::PointXYZRGBNormal>());

  float alpha = 1.0;
  initialTransformWDF->setAlpha(alpha);
  initialTransformWDF->setCorrespondecesDFP(indicesSource, indicesTarget);

  // Instantiate ICP
  pcl::IterativeClosestPoint<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> icp_wdf;

  // Set the max correspondence distance to 5cm (e.g., correspondences with higher distances will be ignored)
  icp_wdf.setMaxCorrespondenceDistance (0.05);
  // Set the maximum number of iterations (criterion 1)
  icp_wdf.setMaximumIterations (75);
  // Set the transformation epsilon (criterion 2)
  icp_wdf.setTransformationEpsilon (1e-8);
  // Set the euclidean distance difference epsilon (criterion 3)
  icp_wdf.setEuclideanFitnessEpsilon (0); //1

  // Set TransformationEstimationWDF as ICP transform estimator
  icp_wdf.setTransformationEstimation (initialTransformWDF);

  icp_wdf.setInputCloud( concatinatedSourceCloud);
  icp_wdf.setInputTarget( concatinatedTargetCloud);

  pcl::PointCloud<pcl::PointXYZRGBNormal>::Ptr cloud_transformed( new pcl::PointCloud<pcl::PointXYZRGBNormal>);
  // As before, due to my initial bad naming, it is the "target" that is being transformed
  //									set initial transform
  icp_wdf.align ( *cloud_transformed, ransacInverse); //init_tr );
  std::cout << "[SIIMCloudMatch::runICPMatch] Has converged? = " << icp_wdf.hasConverged() << std::endl <<
				"	fitness score (SSD): " << icp_wdf.getFitnessScore (1000) << std::endl;
  icp_wdf.getFinalTransformation ();

  /// Final ICP transformation is obtained by multiplying initial transformed with icp refinement
*/
/*------BEST-------------
icp.getMaximumIterations 50
icp.getRANSACOutlierRejectionThreshold() 0.02
icp.getMaxCorrespondenceDistance() 0.03
icp.getTransformationEpsilon () 1e-09
icp.getEuclideanFitnessEpsilon () -1.79769e+308
score: 0.000164332
  ---------------------
  //Normal (non modified) icp for reference
  pcl::IterativeClosestPoint<pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> icp;
    icp.setMaximumIterations (20);
    std::cerr << "icp.getMaximumIterations " << icp.getMaximumIterations() << std::endl;

    icp.setRANSACOutlierRejectionThreshold(0.05);
    std::cerr << "icp.getRANSACOutlierRejectionThreshold() " << icp.getRANSACOutlierRejectionThreshold() << std::endl;

    icp.setMaxCorrespondenceDistance(0.05);
    std::cerr << "icp.getMaxCorrespondenceDistance() " << icp.getMaxCorrespondenceDistance() << std::endl;

    //only used for convergence test
    icp.setTransformationEpsilon (0);
    std::cerr << "icp.getTransformationEpsilon () " << icp.getTransformationEpsilon () << std::endl;

    //only used for convergence test
    std::cerr << "icp.getEuclideanFitnessEpsilon () " << icp.getEuclideanFitnessEpsilon () << std::endl;

    icp.setInputCloud(featureCloudSource);
    icp.setInputTarget(featureCloudTarget);
    pcl::PointCloud<pcl::PointXYZRGBNormal> Final_reference;

    std::cout << "ICP has starts with a score of" << icp.getFitnessScore() << std::endl;

    ROS_INFO("Performing standard icp.....");
    icp.align(Final_reference);//, guess);
    std::cout << "ICP has finished with converge flag of:" << icp.hasConverged() << " score: " << icp.getFitnessScore() << std::endl;
      std::cout << icp.getFinalTransformation() << std::endl;
*/
/*  ROS_INFO("Writing output clouds...");
  transformPointCloud (*featureCloudSourceTemp, *cloud_converg_sparse_all, icp_wdf.getFinalTransformation());
  transformPointCloud (*featureCloudSourceTemp, *cloud_converg_sparse_correspond, icp_wdf.getFinalTransformation());
  transformPointCloud (*cloud_source, *cloud_converg_dense, icp_wdf.getFinalTransformation());
  transformPointCloud (*cloud_source, *cloud_ransac_estimation, ransacInverse);

  // remove non correspondences from feature clouds as an addition output
  uint8_t col=3;
  // *cloud_converg_sparse_correspond = *cloud_converg_sparse_all;
  cloud_converg_sparse_correspond->points.resize(indicesSourceSmall.size());
  cloud_converg_sparse_correspond->height = 1;
  cloud_converg_sparse_correspond->width = (int)indicesSourceSmall.size();

  cloud_target_sparse_correspond->points.resize(indicesTargetSmall.size());
  cloud_target_sparse_correspond->height = 1;
  cloud_target_sparse_correspond->width = (int)indicesTargetSmall.size();
  for (size_t cloudId = 0; cloudId < indicesSourceSmall.size(); ++cloudId)
  {
	  cloud_converg_sparse_correspond->points[cloudId].x = cloud_converg_sparse_all->points[indicesSourceSmall[cloudId]].x;
	  cloud_converg_sparse_correspond->points[cloudId].y = cloud_converg_sparse_all->points[indicesSourceSmall[cloudId]].y;
	  cloud_converg_sparse_correspond->points[cloudId].z = cloud_converg_sparse_all->points[indicesSourceSmall[cloudId]].z;
	  cloud_converg_sparse_correspond->points[cloudId].r = col;
	  cloud_converg_sparse_correspond->points[cloudId].g = col;
	  cloud_converg_sparse_correspond->points[cloudId].b = col;
	  cloud_target_sparse_correspond->points[cloudId].x = featureCloudTarget->points[indicesTargetSmall[cloudId]].x;
	  cloud_target_sparse_correspond->points[cloudId].y = featureCloudTarget->points[indicesTargetSmall[cloudId]].y;
	  cloud_target_sparse_correspond->points[cloudId].z = featureCloudTarget->points[indicesTargetSmall[cloudId]].z;
	  cloud_target_sparse_correspond->points[cloudId].r = col;
	  cloud_target_sparse_correspond->points[cloudId].g = col;
	  cloud_target_sparse_correspond->points[cloudId].b = col;
	  //std::cerr << "point " << cloudId << "\n ";
  }

  pcl::PCDWriter writer;
  writer.write ("cloud1-out.pcd", *cloud_source, false);
  writer.write ("cloud2-out.pcd", *cloud_target, false);
  writer.write ("normals-source.pcd", *cloud_source_normals, false);
  writer.write ("normals-target.pcd", *cloud_source_normals, false);
  writer.write ("feature-source.pcd", *featureCloudSource, false);
  writer.write ("feature-target.pcd", *featureCloudTarget, false);
  writer.write ("converged-cloud.pcd", *cloud_converg_dense, false);
  writer.write ("converged-feature-all.pcd", *cloud_converg_sparse_all, false);
  writer.write ("converged-feature-correspond.pcd", *cloud_converg_sparse_correspond, false);
  writer.write ("target-feature-correspond.pcd", *cloud_target_sparse_correspond, false);
  writer.write ("ransac_estimation.pcd", *cloud_ransac_estimation, false);
 // writer.write ("converged-reference.pcd", Final_reference, false);
*/
 return (0);
}
int
main (int argc, char** argv)
{
  if (argc < 2)
  {
    std::cerr << "Input argument 1: a .pcd file to compare against benchmark-target.pcd" << std::endl;
    std::cerr << "Input argument 2 (optional): the octree resolution to use (default = 1.0)" << std::endl;
    return 0;
  }

  float resolution;
  if (argc > 2)
    resolution = atof (argv[2]);
  else
    resolution = 1.0;
  std::cerr << "Using octree resolution: " << resolution << std::endl;

  std::cerr << std::endl << "Computing.";

  CloudPtr cloud_target (new Cloud);
  pcl::io::loadPCDFile ("theatre_benchmark_target.pcd", *cloud_target);

  std::cerr << ".";

  CloudPtr cloud_in (new Cloud);
  pcl::io::loadPCDFile (argv[1], *cloud_in);

  std::cerr << ".";

  // Instantiate octree-based point cloud change detection classes
  pcl::octree::OctreePointCloudChangeDetector<PointType> octree_forward (resolution);
  octree_forward.setInputCloud (cloud_target);
  octree_forward.addPointsFromInputCloud ();
  octree_forward.switchBuffers ();
  octree_forward.setInputCloud (cloud_in);
  octree_forward.addPointsFromInputCloud ();

  std::cerr << ".";

  pcl::octree::OctreePointCloudChangeDetector<PointType> octree_backward (resolution);
  octree_backward.setInputCloud (cloud_in);
  octree_backward.addPointsFromInputCloud ();
  octree_backward.switchBuffers ();
  octree_backward.setInputCloud (cloud_target);
  octree_backward.addPointsFromInputCloud ();

  std::cerr << ".";

  // Get vector of point indices from octree voxels that do exist in second one but do not exist in first one
  std::vector<int> differences_forward;
  octree_forward.getPointIndicesFromNewVoxels (differences_forward);

  std::vector<int> differences_backward;
  octree_backward.getPointIndicesFromNewVoxels (differences_backward);

  std::cerr << ".";

  int noise_present = 0, noise_added = 0;
  for (size_t i = 0; i < differences_forward.size (); ++i)
  {
    if (checkPointInRange (cloud_in->points[differences_forward[i]], 500, 2000, 5000, 10000, -1825, 500))
      noise_present++;
    else if (checkPointInRange (cloud_in->points[differences_forward[i]], 0, 3000, 0, 1200, -1500, 2500))
      noise_present++;
    else if (checkPointInRange (cloud_in->points[differences_forward[i]], -3500, 3000, 8300, 10000, -1800, 6000))
      noise_present++;
    else if (checkPointInRange (cloud_in->points[differences_forward[i]], -3500, 3000, 0, 8300, 300, 6000))
      noise_present++;
    else
      noise_added++;
  }
  if (noise_present > 424122)
  {
    noise_added += noise_present - 424122;
    noise_present = 424122;
  }

  // Results
  std::cerr << std::endl << std::endl << "Target cloud: " << cloud_target->width << " x " << cloud_target->height << std::endl;
  std::cerr << "Input cloud: " << cloud_in->width << " x " << cloud_in->height << std::endl;
  std::cerr << "Difference: " << abs (cloud_target->points.size () - cloud_in->points.size ()) << " points" << std::endl;
  if (resolution <= 1.0)
  {
    std::cerr << std::endl << "Noise that was removed: " << 424122 - noise_present << " points" << std::endl;
    std::cerr << "Noise that was not removed: " << noise_present << " points" << std::endl;
    std::cerr << "Noise that was added: " << noise_added << " points" << std::endl;
    std::cerr << "Non-noise that was removed: " << differences_backward.size () << " points" << std::endl;
    std::cerr << std::endl << "Benchmark error result: " << noise_present / 424122 + 10 * noise_added / 4002050 + 10 * differences_backward.size () / 4002050;
    std::cerr << "   (0 is perfect, 1 is useless (can be higher than 1))" << std::endl;
  }
  return 0;
}
Пример #6
0
int main( int argc, char** argv )
{
    ros::init(argc, argv, "KITTI");
    int offset = 0;
    int maxId = 4000;
    KITTI kitti(0,maxId,offset);
    loam_wrapper loam;


    std::vector<Eigen::Matrix4d> T_result;
    std::vector<Eigen::Matrix4d> T_result_delta;
    T_result.push_back(Eigen::Matrix4d::Identity());
    Eigen::Matrix4d T_offset = kitti.getGroundTruthByID(offset);
    Eigen::Matrix4d T_all_od = kitti.getGroundTruthByID(offset);
    Eigen::Matrix4d T_all_map = kitti.getGroundTruthByID(offset);
    Eigen::Matrix4d T_diff_od_map;


    Eigen::Matrix4d T_Velo_to_Cam = kitti.getVelo_to_cam_T();
    for (int i=offset; i<maxId-1;i++)
    {

        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source_filtered (new pcl::PointCloud<pcl::PointXYZ>);
        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_target_filtered (new pcl::PointCloud<pcl::PointXYZ>);
        // Get Input Clouds
        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_source(new pcl::PointCloud<pcl::PointXYZ>());
        kitti.getOneVel(cloud_source_filtered,i);
        pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_target(new pcl::PointCloud<pcl::PointXYZ>());
        kitti.getOneVel(cloud_target_filtered,i+1);


        pcl::VoxelGrid<pcl::PointXYZ> downSizeFilter;
        downSizeFilter.setInputCloud(cloud_source_filtered);
        downSizeFilter.setLeafSize(0.1f, 0.1f, 0.1f);
        downSizeFilter.filter(*cloud_source);

        pcl::VoxelGrid<pcl::PointXYZ> downSizeFilter2;
        downSizeFilter2.setInputCloud(cloud_target_filtered);
        downSizeFilter2.setLeafSize(0.1f, 0.1f, 0.1f);
        downSizeFilter2.filter(*cloud_target);

        // Transform into Cam coordinate system
        pcl::transformPointCloud(*cloud_source, *cloud_source, T_Velo_to_Cam);
        pcl::transformPointCloud(*cloud_target, *cloud_target, T_Velo_to_Cam);

        // transform according to ground truth
        Eigen::Matrix4d T_1 = kitti.getGroundTruthByID(i);
        Eigen::Matrix4d T_2 = kitti.getGroundTruthByID(i+1);
        Eigen::Matrix4d T_diff = T_1.inverse() * T_2;
        pcl::transformPointCloud (*cloud_target, *cloud_target, T_diff);



        loam.publishFirst(cloud_source);
        loam.publishSecond(cloud_target);








        pcl::PointCloud<pcl::PointNormal>::Ptr src(new pcl::PointCloud<pcl::PointNormal>);
        pcl::copyPointCloud(*cloud_source, *src);
        pcl::PointCloud<pcl::PointNormal>::Ptr tgt(new pcl::PointCloud<pcl::PointNormal>);
        pcl::copyPointCloud(*cloud_target, *tgt);

        pcl::NormalEstimation<pcl::PointNormal, pcl::PointNormal> norm_est;
        norm_est.setSearchMethod (pcl::search::KdTree<pcl::PointNormal>::Ptr (new pcl::search::KdTree<pcl::PointNormal>));
        norm_est.setKSearch (10);
        norm_est.setInputCloud (tgt);
        norm_est.compute (*tgt);

        pcl::IterativeClosestPoint<pcl::PointNormal, pcl::PointNormal> icp;
        typedef pcl::registration::TransformationEstimationPointToPlane<pcl::PointNormal, pcl::PointNormal> PointToPlane;
        boost::shared_ptr<PointToPlane> point_to_plane(new PointToPlane);
        icp.setTransformationEstimation(point_to_plane);
        icp.setInputCloud(src);
        icp.setInputTarget(tgt);
        icp.setRANSACOutlierRejectionThreshold(0.05);
        icp.setRANSACIterations(100);
        icp.setMaximumIterations(1000);
        icp.setTransformationEpsilon(1e-3);
        pcl::PointCloud<pcl::PointNormal> output;
        icp.align(output);








        // Obtain the transformation that aligned cloud_source to cloud_source_registered
        Eigen::Matrix4d T_back= icp.getFinalTransformation().cast<double>();




        T_all_map = T_back * T_all_map * T_diff;

        Eigen::Matrix4d T_gt = kitti.getGroundTruthDeltaByID(i);


        T_result_delta.push_back(T_back);

        T_all_od = T_offset*loam.T_total_od;

        std::cout << "[INFO]: i=" << i << std::endl;
        std::cout << "T_back:" << T_back << std::endl;
        std::cout << "T_all_od:" << T_all_od << std::endl;
        std::cout << "T_all_map:" << T_all_map << std::endl;
        std::cout << "T_gt:" << kitti.getGroundTruthByID(i) << std::endl;
        std::cout << "T_diff_od_map:" << T_diff_od_map << std::endl;

        T_result.push_back(T_all_map);


        loam.publishFirst(cloud_source);
        pcl::transformPointCloud (*cloud_target, *cloud_target, T_back);
        loam.publishSecond(cloud_target);






    }

    kitti.plotDeltaPoses(T_result,0);
    kitti.eval(T_result);

    return 0;
}