Example #1
0
void MonsterGroupManager::LoadJSONGroups() throw (std::string)
{
    //open the file
    std::ifstream file;
    file.open(monGroupFilePath);
    if(!file.good())
    {
        throw (std::string)"Unable to load file " + monGroupFilePath;
    }

    //load the data
    picojson::value groupsRaw;
    file >> groupsRaw;

    /*std::string error = picojson::get_last_error();
    if(! error.empty())
    {
        printf("'%s' : %s", monGroupFilePath, error.c_str());
        return;
    }*/

    //check the data
    if (! groupsRaw.is<picojson::array>()) {
        throw (std::string)"The monster group file " + monGroupFilePath +
              (std::string)" does not contain the expected JSON data";
    }

    init_translation();
    picojson::object jsonobj;
    MonsterGroup g;

    const picojson::array& groups = groupsRaw.get<picojson::array>();
    for (picojson::array::const_iterator it_groups = groups.begin(); it_groups != groups.end(); ++it_groups)
    {
        jsonobj = it_groups->get<picojson::object>();
        g = GetMGroupFromJSON(&jsonobj);
        monsterGroupMap[g.name] = g;
    }
    if(!json_good())
    {
        throw (std::string)"There was an error reading " + monGroupFilePath;
    }
}
static MATRIX *
initialize_transform(MRI *mri_in, MRI *mri_ref, MP *parms) {
  MATRIX *m_L ;

  fprintf(stderr, "initializing alignment using PCA...\n") ;
  if (Gdiag & DIAG_WRITE && parms->write_iterations > 0) {
    MRIwriteImageViews(parms->mri_ref, "ref", IMAGE_SIZE) ;
    MRIwriteImageViews(parms->mri_in, "before_pca", IMAGE_SIZE) ;
  }

  if (nopca)
    m_L = MatrixIdentity(3, NULL) ;
  else
    m_L = compute_pca(mri_in, mri_ref) ;

  init_scaling(mri_in, mri_ref, m_L) ;
#if 0
  init_translation(mri_in, mri_ref, m_L) ; /* in case PCA failed */
#endif

  /* convert it to RAS mm coordinates */
  MRIvoxelXformToRasXform(mri_in, mri_ref, m_L, m_L) ;

  if (Gdiag & DIAG_SHOW) {
    printf("initial transform:\n") ;
    MatrixPrint(stdout, m_L) ;
  }
  if (Gdiag & DIAG_WRITE && parms->write_iterations > 0) {
    MRI *mri_aligned ;

    mri_aligned = MRIapplyRASlinearTransform(parms->mri_in, NULL, m_L) ;
    MRIwriteImageViews(mri_aligned, "after_pca", IMAGE_SIZE) ;
    MRIfree(&mri_aligned) ;
  }
  return(m_L) ;
}
Example #3
0
template<typename PointSource, typename PointTarget> void
pcl::NormalDistributionsTransform<PointSource, PointTarget>::computeTransformation (PointCloudSource &output, const Eigen::Matrix4f &guess)
{
  nr_iterations_ = 0;
  converged_ = false;

  double gauss_c1, gauss_c2, gauss_d3;

  // Initializes the guassian fitting parameters (eq. 6.8) [Magnusson 2009]
  gauss_c1 = 10 * (1 - outlier_ratio_);
  gauss_c2 = outlier_ratio_ / pow (resolution_, 3);
  gauss_d3 = -log (gauss_c2);
  gauss_d1_ = -log ( gauss_c1 + gauss_c2 ) - gauss_d3;
  gauss_d2_ = -2 * log ((-log ( gauss_c1 * exp ( -0.5 ) + gauss_c2 ) - gauss_d3) / gauss_d1_);

  if (guess != Eigen::Matrix4f::Identity ())
  {
    // Initialise final transformation to the guessed one
    final_transformation_ = guess;
    // Apply guessed transformation prior to search for neighbours
    transformPointCloud (output, output, guess);
  }

  // Initialize Point Gradient and Hessian
  point_gradient_.setZero ();
  point_gradient_.block<3, 3>(0, 0).setIdentity ();
  point_hessian_.setZero ();

  Eigen::Transform<float, 3, Eigen::Affine, Eigen::ColMajor> eig_transformation;
  eig_transformation.matrix () = final_transformation_;

  // Convert initial guess matrix to 6 element transformation vector
  Eigen::Matrix<double, 6, 1> p, delta_p, score_gradient;
  Eigen::Vector3f init_translation = eig_transformation.translation ();
  Eigen::Vector3f init_rotation = eig_transformation.rotation ().eulerAngles (0, 1, 2);
  p << init_translation (0), init_translation (1), init_translation (2),
  init_rotation (0), init_rotation (1), init_rotation (2);

  Eigen::Matrix<double, 6, 6> hessian;

  double score = 0;
  double delta_p_norm;

  // Calculate derivates of initial transform vector, subsequent derivative calculations are done in the step length determination.
  score = computeDerivatives (score_gradient, hessian, output, p);

  while (!converged_)
  {
    // Store previous transformation
    previous_transformation_ = transformation_;

    // Solve for decent direction using newton method, line 23 in Algorithm 2 [Magnusson 2009]
    Eigen::JacobiSVD<Eigen::Matrix<double, 6, 6> > sv (hessian, Eigen::ComputeFullU | Eigen::ComputeFullV);
    // Negative for maximization as opposed to minimization
    delta_p = sv.solve (-score_gradient);

    //Calculate step length with guarnteed sufficient decrease [More, Thuente 1994]
    delta_p_norm = delta_p.norm ();

    if (delta_p_norm == 0 || delta_p_norm != delta_p_norm)
    {
      trans_probability_ = score / static_cast<double> (input_->points.size ());
      converged_ = delta_p_norm == delta_p_norm;
      return;
    }

    delta_p.normalize ();
    delta_p_norm = computeStepLengthMT (p, delta_p, delta_p_norm, step_size_, transformation_epsilon_ / 2, score, score_gradient, hessian, output);
    delta_p *= delta_p_norm;


    transformation_ = (Eigen::Translation<float, 3> (static_cast<float> (delta_p (0)), static_cast<float> (delta_p (1)), static_cast<float> (delta_p (2))) *
                       Eigen::AngleAxis<float> (static_cast<float> (delta_p (3)), Eigen::Vector3f::UnitX ()) *
                       Eigen::AngleAxis<float> (static_cast<float> (delta_p (4)), Eigen::Vector3f::UnitY ()) *
                       Eigen::AngleAxis<float> (static_cast<float> (delta_p (5)), Eigen::Vector3f::UnitZ ())).matrix ();


    p = p + delta_p;

    // Update Visualizer (untested)
    if (update_visualizer_ != 0)
      update_visualizer_ (output, std::vector<int>(), *target_, std::vector<int>() );

    if (nr_iterations_ > max_iterations_ ||
        (nr_iterations_ && (std::fabs (delta_p_norm) < transformation_epsilon_)))
    {
      converged_ = true;
    }

    nr_iterations_++;

  }

  // Store transformation probability.  The realtive differences within each scan registration are accurate
  // but the normalization constants need to be modified for it to be globally accurate
  trans_probability_ = score / static_cast<double> (input_->points.size ());
}
void NDTScanMatching::scan_matching_callback(const sensor_msgs::PointCloud2::ConstPtr& points,
        const geometry_msgs::PoseStamped::ConstPtr& pose)
{
    ros::Time scan_time = ros::Time::now();

    pcl::PointXYZI p;
    pcl::PointCloud<pcl::PointXYZI> scan;

    pcl::PointCloud<pcl::PointXYZI>::Ptr transformed_scan_ptr (new pcl::PointCloud<pcl::PointXYZI>());
    tf::Quaternion q;
    Eigen::Matrix4f t(Eigen::Matrix4f::Identity());

    tf::Transform transform;

    pcl::fromROSMsg(*points, scan);

    pcl::PointCloud<pcl::PointXYZI>::Ptr scan_ptr(new pcl::PointCloud<pcl::PointXYZI>(scan));

    if(initial_scan_loaded_ == 0)
    {
        last_scan_ = *scan_ptr;
        last_pose_ = *pose;
        initial_scan_loaded_ = 1;
        return;
    }
    // Filtering input scan to roughly 10% of original size to increase speed of registration.
    pcl::PointCloud<pcl::PointXYZI>::Ptr filtered_cloud_ptr (new pcl::PointCloud<pcl::PointXYZI>);
    pcl::ApproximateVoxelGrid<pcl::PointXYZI> approximate_voxel_filter;
    approximate_voxel_filter.setLeafSize (2.0, 2.0, 2.0);
    approximate_voxel_filter.setInputCloud (scan_ptr);
    approximate_voxel_filter.filter (*filtered_cloud_ptr);

    // Setting scale dependent NDT parameters
    // Setting minimum transformation difference for termination condition.
    ndt_.setTransformationEpsilon (0.01);
    // Setting maximum step size for More-Thuente line search.
    ndt_.setStepSize (0.1);
    //Setting Resolution of NDT grid structure (VoxelGridCovariance).
    ndt_.setResolution (1.0);

    // Setting max number of registration iterations.
    ndt_.setMaximumIterations (30);

    // Setting point cloud to be aligned.
    ndt_.setInputSource (filtered_cloud_ptr);

    pcl::PointCloud<pcl::PointXYZI>::Ptr last_scan_ptr(new pcl::PointCloud<pcl::PointXYZI>(last_scan_));

    // Setting point cloud to be aligned to.
    ndt_.setInputTarget (last_scan_ptr);

    tf::Matrix3x3 init_rotation;

    // 一個前のposeと引き算してx, y ,zの偏差を出す
    offset_x_ = pose->pose.position.x - last_pose_.pose.position.x;
    offset_y_ = pose->pose.position.y - last_pose_.pose.position.y;
    double roll, pitch, yaw = 0;
    getRPY(pose->pose.orientation, roll, pitch, yaw);
    offset_yaw_ = yaw - last_yaw_;

    guess_pos_.x = previous_pos_.x + offset_x_;
    guess_pos_.y = previous_pos_.y + offset_y_;
    guess_pos_.z = previous_pos_.z;

    guess_pos_.roll = previous_pos_.roll;
    guess_pos_.pitch = previous_pos_.pitch;
    guess_pos_.yaw = previous_pos_.yaw + offset_yaw_;

    Eigen::AngleAxisf init_rotation_x(guess_pos_.roll, Eigen::Vector3f::UnitX());
    Eigen::AngleAxisf init_rotation_y(guess_pos_.pitch, Eigen::Vector3f::UnitY());
    Eigen::AngleAxisf init_rotation_z(guess_pos_.yaw, Eigen::Vector3f::UnitZ());

    Eigen::Translation3f init_translation(guess_pos_.x, guess_pos_.y, guess_pos_.z);

    Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x).matrix();

    pcl::PointCloud<pcl::PointXYZI>::Ptr output_cloud_ptr(new pcl::PointCloud<pcl::PointXYZI>);

    ros::Time ndt_start = ros::Time::now();
    ndt_.align (*output_cloud_ptr, init_guess);
    ros::Duration ndt_delta_t = ros::Time::now() - ndt_start;

    std::cout << "Normal Distributions Transform has converged:" << ndt_.hasConverged ()
              << " score: " << ndt_.getFitnessScore () << std::endl;

    t = ndt_.getFinalTransformation();

    // Transforming unfiltered, input cloud using found transform.
    pcl::transformPointCloud (*scan_ptr, *output_cloud_ptr, t);

    tf::Matrix3x3 tf3d;
    tf3d.setValue(
        static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)),
        static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)),
        static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

    current_pos_.x = t(0, 3);
    current_pos_.y = t(1, 3);
    current_pos_.z = t(2, 3);

    std::cout << "/////////////////////////////////////////////" << std::endl;
    std::cout << "count : " << count_ << std::endl;
    std::cout << "Process time : " << ndt_delta_t.toSec() << std::endl;
    std::cout << "x : " << current_pos_.x << std::endl;
    std::cout << "y : " << current_pos_.y << std::endl;
    std::cout << "z : " << current_pos_.z << std::endl;
    std::cout << "/////////////////////////////////////////////" << std::endl;

    tf3d.getRPY(current_pos_.roll, current_pos_.pitch, current_pos_.yaw, 1);

    transform.setOrigin(tf::Vector3(current_pos_.x, current_pos_.y, current_pos_.z));
    q.setRPY(current_pos_.roll, current_pos_.pitch, current_pos_.yaw);
    transform.setRotation(q);


    // "map"に対する"base_link"の位置を発行する
    br_.sendTransform(tf::StampedTransform(transform, scan_time, "map", "ndt_base_link"));

    sensor_msgs::PointCloud2 scan_matched;
    pcl::toROSMsg(*output_cloud_ptr, scan_matched);

    scan_matched.header.stamp = scan_time;
    scan_matched.header.frame_id = "matched_point_cloud";

    point_cloud_pub_.publish(scan_matched);

    // Update position and posture. current_pos -> previous_pos
    previous_pos_ = current_pos_;

    // save current scan
    last_scan_ += *output_cloud_ptr;
    last_pose_ = *pose;
    last_yaw_ = yaw;
    // geometry_msgs::TransformStamped ndt_trans;
    // ndt_trans.header.stamp = scan_time;
    // ndt_trans.header.frame_id = "map";
    // ndt_trans.child_frame_id = "base_link";

    // ndt_trans.transform.translation.x = curren_pos.x;
    // ndt_trans.transform.translation.y = curren_pos.y;
    // ndt_trans.transform.translation.z = curren_pos.z;
    // ndt_trans.transform.rotation = q;

    // ndt_broadcaster_.sendTransform(ndt_trans);
    count_++;
}
Example #5
0
void init_data_structures()
{
    // all of the applicable types that can be loaded, along with their loading functions
    // Add to this as needed with new StaticFunctionAccessors or new ClassFunctionAccessors for new applicable types
    // Static Function Access
    type_function_map["material"] = new StaticFunctionAccessor(&material_type::load_material);
    type_function_map["bionic"] = new StaticFunctionAccessor(&load_bionic);
    type_function_map["profession"] = new StaticFunctionAccessor(&profession::load_profession);
    type_function_map["skill"] = new StaticFunctionAccessor(&Skill::load_skill);
    type_function_map["dream"] = new StaticFunctionAccessor(&load_dream);
    type_function_map["mutation"] = new StaticFunctionAccessor(&load_mutation);
    type_function_map["lab_note"] = new StaticFunctionAccessor(&computer::load_lab_note);
    type_function_map["hint"] = new StaticFunctionAccessor(&load_hint);
    type_function_map["furniture"] = new StaticFunctionAccessor(&load_furniture);
    type_function_map["terrain"] = new StaticFunctionAccessor(&load_terrain);
    type_function_map["monstergroup"] = new StaticFunctionAccessor(&MonsterGroupManager::LoadMonsterGroup);
    type_function_map["speech"] = new StaticFunctionAccessor(&load_speech);

    //data/json/colors.json would be listed here, but it's loaded before the others (see curses_start_color())

    // Non Static Function Access
    type_function_map["snippet"] = new ClassFunctionAccessor<snippet_library>(&SNIPPET, &snippet_library::load_snippet);
    type_function_map["item_group"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_item_group);

    type_function_map["vehicle_part"] = new ClassFunctionAccessor<game>(g, &game::load_vehiclepart);
    type_function_map["vehicle"] = new ClassFunctionAccessor<game>(g, &game::load_vehicle);
    type_function_map["trap"] = new ClassFunctionAccessor<game>(g, &game::load_trap);
    type_function_map["AMMO"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_ammo);
    type_function_map["GUN"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_gun);
    type_function_map["ARMOR"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_armor);
    type_function_map["TOOL"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_tool);
    type_function_map["BOOK"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_book);
    type_function_map["COMESTIBLE"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_comestible);
    type_function_map["CONTAINER"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_container);
    type_function_map["GUNMOD"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_gunmod);
    type_function_map["GENERIC"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_generic);
    type_function_map["ITEM_CATEGORY"] = new ClassFunctionAccessor<Item_factory>(item_controller, &Item_factory::load_item_category);

    type_function_map["MONSTER"] = new ClassFunctionAccessor<MonsterGenerator>(&MonsterGenerator::generator(), &MonsterGenerator::load_monster);
    type_function_map["SPECIES"] = new ClassFunctionAccessor<MonsterGenerator>(&MonsterGenerator::generator(), &MonsterGenerator::load_species);

    type_function_map["recipe_category"] = new StaticFunctionAccessor(&load_recipe_category);
    type_function_map["recipe_subcategory"] = new StaticFunctionAccessor(&load_recipe_subcategory);
    type_function_map["recipe"] = new StaticFunctionAccessor(&load_recipe);
    type_function_map["tool_quality"] = new StaticFunctionAccessor(&load_quality);
    type_function_map["technique"] = new StaticFunctionAccessor(&load_technique);
    type_function_map["martial_art"] = new StaticFunctionAccessor(&load_martial_art);
    type_function_map["effect_type"] = new StaticFunctionAccessor(&load_effect_type);
    type_function_map["tutorial_messages"] =
        new StaticFunctionAccessor(&load_tutorial_messages);
    type_function_map["overmap_terrain"] =
        new StaticFunctionAccessor(&load_overmap_terrain);
    type_function_map["construction"] =
        new StaticFunctionAccessor(&load_construction);
    type_function_map["mapgen"] =
        new StaticFunctionAccessor(&load_mapgen);

    type_function_map["monitems"] = new ClassFunctionAccessor<game>(g, &game::load_monitem);

    mutations_category[""].clear();
    init_body_parts();
    init_ter_bitflags_map();
    init_vpart_bitflag_map();
    init_translation();
    init_martial_arts();
    init_colormap();
    init_artifacts();
    init_mapgen_builtin_functions();
}
Example #6
0
//static void velodyne_callback(const pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::ConstPtr& input)
static void map_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{

  if (map_loaded == 0) {
    std::cout << "Loading map data... ";
    map.header.frame_id = "/pointcloud_map_frame";
    
    // Convert the data type(from sensor_msgs to pcl).
    pcl::fromROSMsg(*input, map);
    
    pcl::PointCloud<pcl::PointXYZI>::Ptr map_ptr(new pcl::PointCloud<pcl::PointXYZI>(map));
    // Setting point cloud to be aligned to.
    ndt.setInputTarget(map_ptr);
	
    // Setting NDT parameters to default values
    ndt.setMaximumIterations(iter);
    ndt.setResolution(ndt_res);
    ndt.setStepSize(step_size);
    ndt.setTransformationEpsilon(trans_eps);
    
    map_loaded = 1;
    std::cout << "Map Loaded." << std::endl;
  }



    if (map_loaded == 1 && init_pos_set == 1) {
        callback_start = ros::Time::now();

        static tf::TransformBroadcaster br;
        tf::Transform transform;
        tf::Quaternion q;

        tf::Quaternion q_control;

        // 1 scan
	/*
        pcl::PointCloud<pcl::PointXYZI> scan;
        pcl::PointXYZI p;
        scan.header = input->header;
        scan.header.frame_id = "velodyne_scan_frame";
	*/

        ros::Time scan_time;
        scan_time.sec = additional_map.header.stamp / 1000000.0;
        scan_time.nsec = (additional_map.header.stamp - scan_time.sec * 1000000.0) * 1000.0;

        /*
         std::cout << "scan.header.stamp: " << scan.header.stamp << std::endl;
         std::cout << "scan_time: " << scan_time << std::endl;
         std::cout << "scan_time.sec: " << scan_time.sec << std::endl;
         std::cout << "scan_time.nsec: " << scan_time.nsec << std::endl;
         */

        t1_start = ros::Time::now();
	/*
        for (pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::const_iterator item = input->begin(); item != input->end(); item++) {
            p.x = (double) item->x;
            p.y = (double) item->y;
            p.z = (double) item->z;

            scan.points.push_back(p);
        }
	*/
	//	pcl::fromROSMsg(*input, scan);
        t1_end = ros::Time::now();
        d1 = t1_end - t1_start;

        Eigen::Matrix4f t(Eigen::Matrix4f::Identity());

	//        pcl::PointCloud<pcl::PointXYZI>::Ptr scan_ptr(new pcl::PointCloud<pcl::PointXYZI>(scan));
        pcl::PointCloud<pcl::PointXYZI>::Ptr filtered_additional_map_ptr(new pcl::PointCloud<pcl::PointXYZI>);

        // Downsampling the velodyne scan using VoxelGrid filter
        t2_start = ros::Time::now();
        pcl::VoxelGrid<pcl::PointXYZI> voxel_grid_filter;
        voxel_grid_filter.setLeafSize(voxel_leaf_size, voxel_leaf_size, voxel_leaf_size);
        voxel_grid_filter.setInputCloud(additional_map_ptr);
        voxel_grid_filter.filter(*filtered_additional_map_ptr);
        t2_end = ros::Time::now();
        d2 = t2_end - t2_start;

        // Setting point cloud to be aligned.
        ndt.setInputSource(filtered_additional_map_ptr);

        // Guess the initial gross estimation of the transformation
        t3_start = ros::Time::now();
        tf::Matrix3x3 init_rotation;

        guess_pos.x = previous_pos.x + offset_x;
        guess_pos.y = previous_pos.y + offset_y;
        guess_pos.z = previous_pos.z + offset_z;
        guess_pos.roll = previous_pos.roll;
        guess_pos.pitch = previous_pos.pitch;
        guess_pos.yaw = previous_pos.yaw + offset_yaw;

        Eigen::AngleAxisf init_rotation_x(guess_pos.roll, Eigen::Vector3f::UnitX());
        Eigen::AngleAxisf init_rotation_y(guess_pos.pitch, Eigen::Vector3f::UnitY());
        Eigen::AngleAxisf init_rotation_z(guess_pos.yaw, Eigen::Vector3f::UnitZ());

        Eigen::Translation3f init_translation(guess_pos.x, guess_pos.y, guess_pos.z);

        Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x).matrix();

        t3_end = ros::Time::now();
        d3 = t3_end - t3_start;

        t4_start = ros::Time::now();
        pcl::PointCloud<pcl::PointXYZI>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZI>);
        ndt.align(*output_cloud, init_guess);

        t = ndt.getFinalTransformation();
	pcl::PointCloud<pcl::PointXYZI>::Ptr transformed_additional_map_ptr (new pcl::PointCloud<pcl::PointXYZI>());
	transformed_additional_map_ptr->header.frame_id = "/map";
	pcl::transformPointCloud(*additional_map_ptr, *transformed_additional_map_ptr, t);
	sensor_msgs::PointCloud2::Ptr msg_ptr(new sensor_msgs::PointCloud2);

	pcl::toROSMsg(*transformed_additional_map_ptr, *msg_ptr);
	msg_ptr->header.frame_id = "/map";
	ndt_map_pub.publish(*msg_ptr);

	// Writing Point Cloud data to PCD file
	pcl::io::savePCDFileASCII("global_map.pcd", *transformed_additional_map_ptr);
	std::cout << "Saved " << transformed_additional_map_ptr->points.size() << " data points to global_map.pcd." << std::endl;

	pcl::PointCloud<pcl::PointXYZRGB> output;
	output.width = transformed_additional_map_ptr->width;
	output.height = transformed_additional_map_ptr->height;
	output.points.resize(output.width * output.height);

	for(size_t i = 0; i < output.points.size(); i++){
	  output.points[i].x = transformed_additional_map_ptr->points[i].x;
	  output.points[i].y = transformed_additional_map_ptr->points[i].y;
	  output.points[i].z = transformed_additional_map_ptr->points[i].z;
	  output.points[i].rgb = 255 << 8;
	}

	pcl::io::savePCDFileASCII("global_map_rgb.pcd", output);
	std::cout << "Saved " << output.points.size() << " data points to global_map_rgb.pcd." << std::endl;

        t4_end = ros::Time::now();
        d4 = t4_end - t4_start;

        t5_start = ros::Time::now();
        /*
         tf::Vector3 origin;
         origin.setValue(static_cast<double>(t(0,3)), static_cast<double>(t(1,3)), static_cast<double>(t(2,3)));
         */

        tf::Matrix3x3 tf3d;

        tf3d.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)), static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)), static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

        // Update current_pos.
        current_pos.x = t(0, 3);
        current_pos.y = t(1, 3);
        current_pos.z = t(2, 3);
        tf3d.getRPY(current_pos.roll, current_pos.pitch, current_pos.yaw, 1);

	// control_pose
	current_pos_control.roll = current_pos.roll;
	current_pos_control.pitch = current_pos.pitch;
	current_pos_control.yaw = current_pos.yaw - angle / 180.0 * M_PI;
	double theta = current_pos_control.yaw;
	current_pos_control.x = cos(theta) * (-control_shift_x) + sin(theta) * (-control_shift_y) + current_pos.x;
	current_pos_control.y = -sin(theta) * (-control_shift_x) + cos(theta) * (-control_shift_y) + current_pos.y;
	current_pos_control.z = current_pos.z - control_shift_z;

        // transform "/velodyne" to "/map"
#if 0
        transform.setOrigin(tf::Vector3(current_pos.x, current_pos.y, current_pos.z));
        q.setRPY(current_pos.roll, current_pos.pitch, current_pos.yaw);
        transform.setRotation(q);
#else
	//
	// FIXME:
	// We corrected the angle of "/velodyne" so that pure_pursuit
	// can read this frame for the control.
	// However, this is not what we want because the scan of Velodyne
	// looks unmatched for the 3-D map on Rviz.
	// What we really want is to make another TF transforming "/velodyne"
	// to a new "/ndt_points" frame and modify pure_pursuit to
	// read this frame instead of "/velodyne".
	// Otherwise, can pure_pursuit just use "/ndt_frame"?
	//
        transform.setOrigin(tf::Vector3(current_pos_control.x, current_pos_control.y, current_pos_control.z));
        q.setRPY(current_pos_control.roll, current_pos_control.pitch, current_pos_control.yaw);
        transform.setRotation(q);
#endif

	q_control.setRPY(current_pos_control.roll, current_pos_control.pitch, current_pos_control.yaw);

        /*
         std::cout << "ros::Time::now(): " << ros::Time::now() << std::endl;
         std::cout << "ros::Time::now().sec: " << ros::Time::now().sec << std::endl;
         std::cout << "ros::Time::now().nsec: " << ros::Time::now().nsec << std::endl;
         */

	//        br.sendTransform(tf::StampedTransform(transform, scan_time, "map", "velodyne"));

        static tf::TransformBroadcaster pose_broadcaster;
        tf::Transform pose_transform;
        tf::Quaternion pose_q;

/*        pose_transform.setOrigin(tf::Vector3(0, 0, 0));
        pose_q.setRPY(0, 0, 0);
        pose_transform.setRotation(pose_q);
        pose_broadcaster.sendTransform(tf::StampedTransform(pose_transform, scan_time, "map", "ndt_frame"));
*/
        // publish the position
       // ndt_pose_msg.header.frame_id = "/ndt_frame";
        ndt_pose_msg.header.frame_id = "/map";
        ndt_pose_msg.header.stamp = scan_time;
        ndt_pose_msg.pose.position.x = current_pos.x;
        ndt_pose_msg.pose.position.y = current_pos.y;
        ndt_pose_msg.pose.position.z = current_pos.z;
        ndt_pose_msg.pose.orientation.x = q.x();
        ndt_pose_msg.pose.orientation.y = q.y();
        ndt_pose_msg.pose.orientation.z = q.z();
        ndt_pose_msg.pose.orientation.w = q.w();

        static tf::TransformBroadcaster pose_broadcaster_control;
        tf::Transform pose_transform_control;
        tf::Quaternion pose_q_control;

     /*   pose_transform_control.setOrigin(tf::Vector3(0, 0, 0));
        pose_q_control.setRPY(0, 0, 0);
        pose_transform_control.setRotation(pose_q_control);
        pose_broadcaster_control.sendTransform(tf::StampedTransform(pose_transform_control, scan_time, "map", "ndt_frame"));
*/
        // publish the position
     //   control_pose_msg.header.frame_id = "/ndt_frame";
        control_pose_msg.header.frame_id = "/map";
        control_pose_msg.header.stamp = scan_time;
        control_pose_msg.pose.position.x = current_pos_control.x;
        control_pose_msg.pose.position.y = current_pos_control.y;
        control_pose_msg.pose.position.z = current_pos_control.z;
        control_pose_msg.pose.orientation.x = q_control.x();
        control_pose_msg.pose.orientation.y = q_control.y();
        control_pose_msg.pose.orientation.z = q_control.z();
        control_pose_msg.pose.orientation.w = q_control.w();

        /*
         std::cout << "ros::Time::now(): " << ros::Time::now() << std::endl;
         std::cout << "ros::Time::now().sec: " << ros::Time::now().sec << std::endl;
         std::cout << "ros::Time::now().nsec: " << ros::Time::now().nsec << std::endl;
         */

        ndt_pose_pub.publish(ndt_pose_msg);
        control_pose_pub.publish(control_pose_msg);

        t5_end = ros::Time::now();
        d5 = t5_end - t5_start;

#ifdef OUTPUT
        // Writing position to position_log.txt
        std::ofstream ofs("position_log.txt", std::ios::app);
        if (ofs == NULL) {
            std::cerr << "Could not open 'position_log.txt'." << std::endl;
            exit(1);
        }
        ofs << current_pos.x << " " << current_pos.y << " " << current_pos.z << " " << current_pos.roll << " " << current_pos.pitch << " " << current_pos.yaw << std::endl;
#endif

        // Calculate the offset (curren_pos - previous_pos)
        offset_x = current_pos.x - previous_pos.x;
        offset_y = current_pos.y - previous_pos.y;
        offset_z = current_pos.z - previous_pos.z;
        offset_yaw = current_pos.yaw - previous_pos.yaw;

        // Update position and posture. current_pos -> previous_pos
        previous_pos.x = current_pos.x;
        previous_pos.y = current_pos.y;
        previous_pos.z = current_pos.z;
        previous_pos.roll = current_pos.roll;
        previous_pos.pitch = current_pos.pitch;
        previous_pos.yaw = current_pos.yaw;

        callback_end = ros::Time::now();
        d_callback = callback_end - callback_start;

        std::cout << "-----------------------------------------------------------------" << std::endl;
        std::cout << "Sequence number: " << input->header.seq << std::endl;
        std::cout << "Number of scan points: " << additional_map_ptr->size() << " points." << std::endl;
        std::cout << "Number of filtered scan points: " << filtered_additional_map_ptr->size() << " points." << std::endl;
        std::cout << "NDT has converged: " << ndt.hasConverged() << std::endl;
        std::cout << "Fitness score: " << ndt.getFitnessScore() << std::endl;
        std::cout << "Number of iteration: " << ndt.getFinalNumIteration() << std::endl;
        std::cout << "(x,y,z,roll,pitch,yaw):" << std::endl;
        std::cout << "(" << current_pos.x << ", " << current_pos.y << ", " << current_pos.z << ", " << current_pos.roll << ", " << current_pos.pitch << ", " << current_pos.yaw << ")" << std::endl;
        std::cout << "Transformation Matrix:" << std::endl;
        std::cout << t << std::endl;
#ifdef VIEW_TIME
        std::cout << "Duration of velodyne_callback: " << d_callback.toSec() << " secs." << std::endl;
        std::cout << "Adding scan points: " << d1.toSec() << " secs." << std::endl;
        std::cout << "VoxelGrid Filter: " << d2.toSec() << " secs." << std::endl;
        std::cout << "Guessing the initial gross estimation: " << d3.toSec() << " secs." << std::endl;
        std::cout << "NDT: " << d4.toSec() << " secs." << std::endl;
        std::cout << "tf: " << d5.toSec() << " secs." << std::endl;
#endif
        std::cout << "-----------------------------------------------------------------" << std::endl;
    }
}
static void scan_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{
	if (map_loaded == 1 && init_pos_set == 1) {
		matching_start = std::chrono::system_clock::now();

		static tf::TransformBroadcaster br;
		tf::Transform transform;
		tf::Quaternion predict_q, ndt_q, current_q, control_q, localizer_q;

		pcl::PointXYZ p;
		pcl::PointCloud<pcl::PointXYZ> scan;

		current_scan_time = input->header.stamp;

		pcl::fromROSMsg(*input, scan);

		if(_localizer == "velodyne"){
			pcl::PointCloud<velodyne_pointcloud::PointXYZIR> tmp;
			pcl::fromROSMsg(*input, tmp);
			scan.points.clear();

			for (pcl::PointCloud<velodyne_pointcloud::PointXYZIR>::const_iterator item = tmp.begin(); item != tmp.end(); item++) {
				p.x = (double) item->x;
				p.y = (double) item->y;
				p.z = (double) item->z;
				if(item->ring >= min && item->ring <= max && item->ring % layer == 0 ){
					scan.points.push_back(p);
				}
			}
		}

		pcl::PointCloud<pcl::PointXYZ>::Ptr scan_ptr(new pcl::PointCloud<pcl::PointXYZ>(scan));
		pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_scan_ptr(new pcl::PointCloud<pcl::PointXYZ>());

		Eigen::Matrix4f t(Eigen::Matrix4f::Identity()); // base_link
		Eigen::Matrix4f t2(Eigen::Matrix4f::Identity()); // localizer

		// Downsampling the velodyne scan using VoxelGrid filter
		pcl::VoxelGrid<pcl::PointXYZ> voxel_grid_filter;
		voxel_grid_filter.setLeafSize(voxel_leaf_size, voxel_leaf_size, voxel_leaf_size);
		voxel_grid_filter.setInputCloud(scan_ptr);
		voxel_grid_filter.filter(*filtered_scan_ptr);

		// Setting point cloud to be aligned.
		ndt.setInputSource(filtered_scan_ptr);

		// Guess the initial gross estimation of the transformation
		predict_pose.x = previous_pose.x + offset_x;
		predict_pose.y = previous_pose.y + offset_y;
		predict_pose.z = previous_pose.z + offset_z;
		predict_pose.roll = previous_pose.roll;
		predict_pose.pitch = previous_pose.pitch;
		predict_pose.yaw = previous_pose.yaw + offset_yaw;

		Eigen::Translation3f init_translation(predict_pose.x, predict_pose.y, predict_pose.z);
		Eigen::AngleAxisf init_rotation_x(predict_pose.roll, Eigen::Vector3f::UnitX());
		Eigen::AngleAxisf init_rotation_y(predict_pose.pitch, Eigen::Vector3f::UnitY());
		Eigen::AngleAxisf init_rotation_z(predict_pose.yaw, Eigen::Vector3f::UnitZ());
		Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x) * tf_btol;

		pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>);
		ndt.align(*output_cloud, init_guess);

		t = ndt.getFinalTransformation(); // localizer
		t2 = t * tf_ltob; // base_link

		iteration = ndt.getFinalNumIteration();
		score = ndt.getFitnessScore();
		trans_probability = ndt.getTransformationProbability();

		tf::Matrix3x3 mat_l; // localizer
		mat_l.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)),
				static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)),
				static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

		// Update ndt_pose
		localizer_pose.x = t(0, 3);
		localizer_pose.y = t(1, 3);
		localizer_pose.z = t(2, 3);
		mat_l.getRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw, 1);

		tf::Matrix3x3 mat_b; // base_link
		mat_b.setValue(static_cast<double>(t2(0, 0)), static_cast<double>(t2(0, 1)), static_cast<double>(t2(0, 2)),
				static_cast<double>(t2(1, 0)), static_cast<double>(t2(1, 1)), static_cast<double>(t2(1, 2)),
				static_cast<double>(t2(2, 0)), static_cast<double>(t2(2, 1)), static_cast<double>(t2(2, 2)));

		// Update ndt_pose
		ndt_pose.x = t2(0, 3);
		ndt_pose.y = t2(1, 3);
		ndt_pose.z = t2(2, 3);
		mat_b.getRPY(ndt_pose.roll, ndt_pose.pitch, ndt_pose.yaw, 1);

		// Compute the velocity
		scan_duration = current_scan_time - previous_scan_time;
		double secs = scan_duration.toSec();
		double distance = sqrt((ndt_pose.x - previous_pose.x) * (ndt_pose.x - previous_pose.x) +
				(ndt_pose.y - previous_pose.y) * (ndt_pose.y - previous_pose.y) +
				(ndt_pose.z - previous_pose.z) * (ndt_pose.z - previous_pose.z));

		predict_pose_error = sqrt((ndt_pose.x - predict_pose.x) * (ndt_pose.x - predict_pose.x) +
				(ndt_pose.y - predict_pose.y) * (ndt_pose.y - predict_pose.y) +
				(ndt_pose.z - predict_pose.z) * (ndt_pose.z - predict_pose.z));

		current_velocity = distance / secs;
		current_velocity_smooth = (current_velocity + previous_velocity + second_previous_velocity) / 3.0;
		if(current_velocity_smooth < 0.2){
			current_velocity_smooth = 0.0;
		}
		current_acceleration = (current_velocity - previous_velocity) / secs;

		estimated_vel_mps.data = current_velocity;
		estimated_vel_kmph.data = current_velocity * 3.6;

		estimated_vel_mps_pub.publish(estimated_vel_mps);
		estimated_vel_kmph_pub.publish(estimated_vel_kmph);

		if(predict_pose_error <= PREDICT_POSE_THRESHOLD){
			use_predict_pose = 0;
		}else{
			use_predict_pose = 1;
		}
		use_predict_pose = 0;

		if(use_predict_pose == 0){
			current_pose.x = ndt_pose.x;
			current_pose.y = ndt_pose.y;
			current_pose.z = ndt_pose.z;
			current_pose.roll = ndt_pose.roll;
			current_pose.pitch = ndt_pose.pitch;
			current_pose.yaw = ndt_pose.yaw;
		}else{
			current_pose.x = predict_pose.x;
			current_pose.y = predict_pose.y;
			current_pose.z = predict_pose.z;
			current_pose.roll = predict_pose.roll;
			current_pose.pitch = predict_pose.pitch;
			current_pose.yaw = predict_pose.yaw;
		}

		control_pose.roll = current_pose.roll;
		control_pose.pitch = current_pose.pitch;
		control_pose.yaw = current_pose.yaw - angle / 180.0 * M_PI;
		double theta = control_pose.yaw;
		control_pose.x = cos(theta) * (-control_shift_x) + sin(theta) * (-control_shift_y) + current_pose.x;
		control_pose.y = -sin(theta) * (-control_shift_x) + cos(theta) * (-control_shift_y) + current_pose.y;
		control_pose.z = current_pose.z - control_shift_z;

		// Set values for publishing pose
		predict_q.setRPY(predict_pose.roll, predict_pose.pitch, predict_pose.yaw);
		predict_pose_msg.header.frame_id = "/map";
		predict_pose_msg.header.stamp = current_scan_time;
		predict_pose_msg.pose.position.x = predict_pose.x;
		predict_pose_msg.pose.position.y = predict_pose.y;
		predict_pose_msg.pose.position.z = predict_pose.z;
		predict_pose_msg.pose.orientation.x = predict_q.x();
		predict_pose_msg.pose.orientation.y = predict_q.y();
		predict_pose_msg.pose.orientation.z = predict_q.z();
		predict_pose_msg.pose.orientation.w = predict_q.w();

		ndt_q.setRPY(ndt_pose.roll, ndt_pose.pitch, ndt_pose.yaw);
		ndt_pose_msg.header.frame_id = "/map";
		ndt_pose_msg.header.stamp = current_scan_time;
		ndt_pose_msg.pose.position.x = ndt_pose.x;
		ndt_pose_msg.pose.position.y = ndt_pose.y;
		ndt_pose_msg.pose.position.z = ndt_pose.z;
		ndt_pose_msg.pose.orientation.x = ndt_q.x();
		ndt_pose_msg.pose.orientation.y = ndt_q.y();
		ndt_pose_msg.pose.orientation.z = ndt_q.z();
		ndt_pose_msg.pose.orientation.w = ndt_q.w();

		current_q.setRPY(current_pose.roll, current_pose.pitch, current_pose.yaw);
		current_pose_msg.header.frame_id = "/map";
		current_pose_msg.header.stamp = current_scan_time;
		current_pose_msg.pose.position.x = current_pose.x;
		current_pose_msg.pose.position.y = current_pose.y;
		current_pose_msg.pose.position.z = current_pose.z;
		current_pose_msg.pose.orientation.x = current_q.x();
		current_pose_msg.pose.orientation.y = current_q.y();
		current_pose_msg.pose.orientation.z = current_q.z();
		current_pose_msg.pose.orientation.w = current_q.w();

		control_q.setRPY(control_pose.roll, control_pose.pitch, control_pose.yaw);
		control_pose_msg.header.frame_id = "/map";
		control_pose_msg.header.stamp = current_scan_time;
		control_pose_msg.pose.position.x = control_pose.x;
		control_pose_msg.pose.position.y = control_pose.y;
		control_pose_msg.pose.position.z = control_pose.z;
		control_pose_msg.pose.orientation.x = control_q.x();
		control_pose_msg.pose.orientation.y = control_q.y();
		control_pose_msg.pose.orientation.z = control_q.z();
		control_pose_msg.pose.orientation.w = control_q.w();

		localizer_q.setRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw);
		localizer_pose_msg.header.frame_id = "/map";
		localizer_pose_msg.header.stamp = current_scan_time;
		localizer_pose_msg.pose.position.x = localizer_pose.x;
		localizer_pose_msg.pose.position.y = localizer_pose.y;
		localizer_pose_msg.pose.position.z = localizer_pose.z;
		localizer_pose_msg.pose.orientation.x = localizer_q.x();
		localizer_pose_msg.pose.orientation.y = localizer_q.y();
		localizer_pose_msg.pose.orientation.z = localizer_q.z();
		localizer_pose_msg.pose.orientation.w = localizer_q.w();

		predict_pose_pub.publish(predict_pose_msg);
		ndt_pose_pub.publish(ndt_pose_msg);
		current_pose_pub.publish(current_pose_msg);
		control_pose_pub.publish(control_pose_msg);
		localizer_pose_pub.publish(localizer_pose_msg);

		// Send TF "/base_link" to "/map"
		transform.setOrigin(tf::Vector3(current_pose.x, current_pose.y, current_pose.z));
		transform.setRotation(current_q);
		br.sendTransform(tf::StampedTransform(transform, current_scan_time, "/map", "/base_link"));

		matching_end = std::chrono::system_clock::now();
		exe_time = std::chrono::duration_cast<std::chrono::microseconds>(matching_end-matching_start).count()/1000.0;
		time_ndt_matching.data = exe_time;
		time_ndt_matching_pub.publish(time_ndt_matching);

		// Set values for /estimate_twist
		angular_velocity = (current_pose.yaw - previous_pose.yaw) / secs;

		estimate_twist_msg.header.stamp = current_scan_time;
		estimate_twist_msg.twist.linear.x = current_velocity;
		estimate_twist_msg.twist.linear.y = 0.0;
		estimate_twist_msg.twist.linear.z = 0.0;
		estimate_twist_msg.twist.angular.x = 0.0;
		estimate_twist_msg.twist.angular.y = 0.0;
		estimate_twist_msg.twist.angular.z = angular_velocity;

		estimate_twist_pub.publish(estimate_twist_msg);

		geometry_msgs::Vector3Stamped estimate_vel_msg;
		estimate_vel_msg.header.stamp = current_scan_time;
		estimate_vel_msg.vector.x = current_velocity;
		estimated_vel_pub.publish(estimate_vel_msg);

		// Set values for /ndt_stat
		ndt_stat_msg.header.stamp = current_scan_time;
		ndt_stat_msg.exe_time = time_ndt_matching.data;
		ndt_stat_msg.iteration = iteration;
		ndt_stat_msg.score = score;
		ndt_stat_msg.velocity = current_velocity;
		ndt_stat_msg.acceleration = current_acceleration;
		ndt_stat_msg.use_predict_pose = 0;

		ndt_stat_pub.publish(ndt_stat_msg);

		/* Compute NDT_Reliability */
		ndt_reliability.data = Wa * (exe_time/100.0) * 100.0 + Wb * (iteration/10.0) * 100.0 + Wc * ((2.0-trans_probability)/2.0) * 100.0;
		ndt_reliability_pub.publish(ndt_reliability);

#ifdef OUTPUT
		// Output log.csv
		std::ofstream ofs_log("log.csv", std::ios::app);
		if (ofs_log == NULL) {
			std::cerr << "Could not open 'log.csv'." << std::endl;
			exit(1);
		}
		ofs_log << input->header.seq << ","
				<< step_size << ","
				<< trans_eps << ","
				<< voxel_leaf_size << ","
				<< current_pose.x << ","
				<< current_pose.y << ","
				<< current_pose.z << ","
				<< current_pose.roll << ","
				<< current_pose.pitch << ","
				<< current_pose.yaw << ","
				<< predict_pose.x << ","
				<< predict_pose.y << ","
				<< predict_pose.z << ","
				<< predict_pose.roll << ","
				<< predict_pose.pitch << ","
				<< predict_pose.yaw << ","
				<< current_pose.x - predict_pose.x << ","
				<< current_pose.y - predict_pose.y << ","
				<< current_pose.z - predict_pose.z << ","
				<< current_pose.roll - predict_pose.roll << ","
				<< current_pose.pitch - predict_pose.pitch << ","
				<< current_pose.yaw - predict_pose.yaw << ","
				<< predict_pose_error << ","
				<< iteration << ","
				<< score << ","
				<< trans_probability << ","
				<< ndt_reliability.data << ","
				<< current_velocity << ","
				<< current_velocity_smooth << ","
				<< current_acceleration << ","
				<< angular_velocity << ","
				<< time_ndt_matching.data << ","
				<< std::endl;
#endif

		std::cout << "-----------------------------------------------------------------" << std::endl;
		std::cout << "Sequence: " << input->header.seq << std::endl;
		std::cout << "Timestamp: " << input->header.stamp << std::endl;
		std::cout << "Frame ID: " << input->header.frame_id << std::endl;
		std::cout << "Number of Scan Points: " << scan_ptr->size() << " points." << std::endl;
		std::cout << "Number of Filtered Scan Points: " << filtered_scan_ptr->size() << " points." << std::endl;
		std::cout << "Leaf Size: " << voxel_leaf_size << std::endl;
		std::cout << "NDT has converged: " << ndt.hasConverged() << std::endl;
		std::cout << "Fitness Score: " << ndt.getFitnessScore() << std::endl;
		std::cout << "Transformation Probability: " << ndt.getTransformationProbability() << std::endl;
		std::cout << "Execution Time: " << exe_time << " ms." << std::endl;
		std::cout << "Number of Iterations: " << ndt.getFinalNumIteration() << std::endl;
		std::cout << "NDT Reliability: " << ndt_reliability.data << std::endl;
		std::cout << "(x,y,z,roll,pitch,yaw): " << std::endl;
		std::cout << "(" << current_pose.x << ", " << current_pose.y << ", " << current_pose.z
				<< ", " << current_pose.roll << ", " << current_pose.pitch << ", " << current_pose.yaw << ")" << std::endl;
		std::cout << "Transformation Matrix: " << std::endl;
		std::cout << t << std::endl;
		std::cout << "-----------------------------------------------------------------" << std::endl;

		// Update previous_***
		offset_x = current_pose.x - previous_pose.x;
		offset_y = current_pose.y - previous_pose.y;
		offset_z = current_pose.z - previous_pose.z;
		offset_yaw = current_pose.yaw - previous_pose.yaw;

		previous_pose.x = current_pose.x;
		previous_pose.y = current_pose.y;
		previous_pose.z = current_pose.z;
		previous_pose.roll = current_pose.roll;
		previous_pose.pitch = current_pose.pitch;
		previous_pose.yaw = current_pose.yaw;

		previous_scan_time.sec = current_scan_time.sec;
		previous_scan_time.nsec = current_scan_time.nsec;

		second_previous_velocity = previous_velocity;
		previous_velocity = current_velocity;
		previous_acceleration = current_acceleration;
		previous_estimated_vel_kmph.data = estimated_vel_kmph.data;
	}
}
Example #8
0
static void points_callback(const sensor_msgs::PointCloud2::ConstPtr& input)
{
  if (map_loaded == 1 && init_pos_set == 1)
  {
    matching_start = std::chrono::system_clock::now();

    static tf::TransformBroadcaster br;
    tf::Transform transform;
    tf::Quaternion predict_q, icp_q, current_q, localizer_q;

    pcl::PointXYZ p;
    pcl::PointCloud<pcl::PointXYZ> filtered_scan;

    current_scan_time = input->header.stamp;

    pcl::fromROSMsg(*input, filtered_scan);
    pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_scan_ptr(new pcl::PointCloud<pcl::PointXYZ>(filtered_scan));
    int scan_points_num = filtered_scan_ptr->size();

    Eigen::Matrix4f t(Eigen::Matrix4f::Identity());   // base_link
    Eigen::Matrix4f t2(Eigen::Matrix4f::Identity());  // localizer

    std::chrono::time_point<std::chrono::system_clock> align_start, align_end, getFitnessScore_start, getFitnessScore_end;
    static double align_time, getFitnessScore_time = 0.0;

    // Setting point cloud to be aligned.
//    ndt.setInputSource(filtered_scan_ptr);
    icp.setInputSource(filtered_scan_ptr);

    // Guess the initial gross estimation of the transformation
    predict_pose.x = previous_pose.x + offset_x;
    predict_pose.y = previous_pose.y + offset_y;
    predict_pose.z = previous_pose.z + offset_z;
    predict_pose.roll = previous_pose.roll;
    predict_pose.pitch = previous_pose.pitch;
    predict_pose.yaw = previous_pose.yaw + offset_yaw;

    Eigen::Translation3f init_translation(predict_pose.x, predict_pose.y, predict_pose.z);
    Eigen::AngleAxisf init_rotation_x(predict_pose.roll, Eigen::Vector3f::UnitX());
    Eigen::AngleAxisf init_rotation_y(predict_pose.pitch, Eigen::Vector3f::UnitY());
    Eigen::AngleAxisf init_rotation_z(predict_pose.yaw, Eigen::Vector3f::UnitZ());
    Eigen::Matrix4f init_guess = (init_translation * init_rotation_z * init_rotation_y * init_rotation_x) * tf_btol;

    pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud(new pcl::PointCloud<pcl::PointXYZ>);
//    ndt.align(*output_cloud, init_guess);

    icp.setMaximumIterations(maximum_iterations);
    icp.setTransformationEpsilon(transformation_epsilon);
    icp.setMaxCorrespondenceDistance(max_correspondence_distance);
    icp.setEuclideanFitnessEpsilon(euclidean_fitness_epsilon);
    icp.setRANSACOutlierRejectionThreshold(ransac_outlier_rejection_threshold);

    align_start = std::chrono::system_clock::now();
    icp.align(*output_cloud, init_guess);
    align_end = std::chrono::system_clock::now();
    align_time = std::chrono::duration_cast<std::chrono::microseconds>(align_end - align_start).count() / 1000.0;

//    t = ndt.getFinalTransformation();  // localizer
    t = icp.getFinalTransformation();  // localizer
    t2 = t * tf_ltob;                  // base_link

//    iteration = ndt.getFinalNumIteration();
//    score = ndt.getFitnessScore();

    getFitnessScore_start = std::chrono::system_clock::now();
    fitness_score = icp.getFitnessScore();
    getFitnessScore_end = std::chrono::system_clock::now();
    getFitnessScore_time = std::chrono::duration_cast<std::chrono::microseconds>(getFitnessScore_end - getFitnessScore_start).count() / 1000.0;

//    trans_probability = ndt.getTransformationProbability();

    tf::Matrix3x3 mat_l;  // localizer
    mat_l.setValue(static_cast<double>(t(0, 0)), static_cast<double>(t(0, 1)), static_cast<double>(t(0, 2)),
                   static_cast<double>(t(1, 0)), static_cast<double>(t(1, 1)), static_cast<double>(t(1, 2)),
                   static_cast<double>(t(2, 0)), static_cast<double>(t(2, 1)), static_cast<double>(t(2, 2)));

    // Update localizer_pose
    localizer_pose.x = t(0, 3);
    localizer_pose.y = t(1, 3);
    localizer_pose.z = t(2, 3);
    mat_l.getRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw, 1);

    tf::Matrix3x3 mat_b;  // base_link
    mat_b.setValue(static_cast<double>(t2(0, 0)), static_cast<double>(t2(0, 1)), static_cast<double>(t2(0, 2)),
                   static_cast<double>(t2(1, 0)), static_cast<double>(t2(1, 1)), static_cast<double>(t2(1, 2)),
                   static_cast<double>(t2(2, 0)), static_cast<double>(t2(2, 1)), static_cast<double>(t2(2, 2)));

    // Update ndt_pose
    icp_pose.x = t2(0, 3);
    icp_pose.y = t2(1, 3);
    icp_pose.z = t2(2, 3);
    mat_b.getRPY(icp_pose.roll, icp_pose.pitch, icp_pose.yaw, 1);

    // Calculate the difference between ndt_pose and predict_pose
    predict_pose_error = sqrt((icp_pose.x - predict_pose.x) * (icp_pose.x - predict_pose.x) +
                              (icp_pose.y - predict_pose.y) * (icp_pose.y - predict_pose.y) +
                              (icp_pose.z - predict_pose.z) * (icp_pose.z - predict_pose.z));

    if (predict_pose_error <= PREDICT_POSE_THRESHOLD)
    {
      use_predict_pose = 0;
    }
    else
    {
      use_predict_pose = 1;
    }
    use_predict_pose = 0;

    if (use_predict_pose == 0)
    {
      current_pose.x = icp_pose.x;
      current_pose.y = icp_pose.y;
      current_pose.z = icp_pose.z;
      current_pose.roll = icp_pose.roll;
      current_pose.pitch = icp_pose.pitch;
      current_pose.yaw = icp_pose.yaw;
    }
    else
    {
      current_pose.x = predict_pose.x;
      current_pose.y = predict_pose.y;
      current_pose.z = predict_pose.z;
      current_pose.roll = predict_pose.roll;
      current_pose.pitch = predict_pose.pitch;
      current_pose.yaw = predict_pose.yaw;
    }

    // Compute the velocity and acceleration
    scan_duration = current_scan_time - previous_scan_time;
    double secs = scan_duration.toSec();
    diff_x = current_pose.x - previous_pose.x;
    diff_y = current_pose.y - previous_pose.y;
    diff_z = current_pose.z - previous_pose.z;
    diff_yaw = current_pose.yaw - previous_pose.yaw;
    diff = sqrt(diff_x * diff_x + diff_y * diff_y + diff_z * diff_z);

    current_velocity = diff / secs;
    current_velocity_x = diff_x / secs;
    current_velocity_y = diff_y / secs;
    current_velocity_z = diff_z / secs;
    angular_velocity = diff_yaw / secs;

    current_velocity_smooth = (current_velocity + previous_velocity + previous_previous_velocity) / 3.0;
    if (current_velocity_smooth < 0.2)
    {
      current_velocity_smooth = 0.0;
    }

    current_accel = (current_velocity - previous_velocity) / secs;
    current_accel_x = (current_velocity_x - previous_velocity_x) / secs;
    current_accel_y = (current_velocity_y - previous_velocity_y) / secs;
    current_accel_z = (current_velocity_z - previous_velocity_z) / secs;

    estimated_vel_mps.data = current_velocity;
    estimated_vel_kmph.data = current_velocity * 3.6;

    estimated_vel_mps_pub.publish(estimated_vel_mps);
    estimated_vel_kmph_pub.publish(estimated_vel_kmph);

    // Set values for publishing pose
    predict_q.setRPY(predict_pose.roll, predict_pose.pitch, predict_pose.yaw);
    predict_pose_msg.header.frame_id = "/map";
    predict_pose_msg.header.stamp = current_scan_time;
    predict_pose_msg.pose.position.x = predict_pose.x;
    predict_pose_msg.pose.position.y = predict_pose.y;
    predict_pose_msg.pose.position.z = predict_pose.z;
    predict_pose_msg.pose.orientation.x = predict_q.x();
    predict_pose_msg.pose.orientation.y = predict_q.y();
    predict_pose_msg.pose.orientation.z = predict_q.z();
    predict_pose_msg.pose.orientation.w = predict_q.w();

    icp_q.setRPY(icp_pose.roll, icp_pose.pitch, icp_pose.yaw);
    icp_pose_msg.header.frame_id = "/map";
    icp_pose_msg.header.stamp = current_scan_time;
    icp_pose_msg.pose.position.x = icp_pose.x;
    icp_pose_msg.pose.position.y = icp_pose.y;
    icp_pose_msg.pose.position.z = icp_pose.z;
    icp_pose_msg.pose.orientation.x = icp_q.x();
    icp_pose_msg.pose.orientation.y = icp_q.y();
    icp_pose_msg.pose.orientation.z = icp_q.z();
    icp_pose_msg.pose.orientation.w = icp_q.w();

    current_q.setRPY(current_pose.roll, current_pose.pitch, current_pose.yaw);
    current_pose_msg.header.frame_id = "/map";
    current_pose_msg.header.stamp = current_scan_time;
    current_pose_msg.pose.position.x = current_pose.x;
    current_pose_msg.pose.position.y = current_pose.y;
    current_pose_msg.pose.position.z = current_pose.z;
    current_pose_msg.pose.orientation.x = current_q.x();
    current_pose_msg.pose.orientation.y = current_q.y();
    current_pose_msg.pose.orientation.z = current_q.z();
    current_pose_msg.pose.orientation.w = current_q.w();

    localizer_q.setRPY(localizer_pose.roll, localizer_pose.pitch, localizer_pose.yaw);
    localizer_pose_msg.header.frame_id = "/map";
    localizer_pose_msg.header.stamp = current_scan_time;
    localizer_pose_msg.pose.position.x = localizer_pose.x;
    localizer_pose_msg.pose.position.y = localizer_pose.y;
    localizer_pose_msg.pose.position.z = localizer_pose.z;
    localizer_pose_msg.pose.orientation.x = localizer_q.x();
    localizer_pose_msg.pose.orientation.y = localizer_q.y();
    localizer_pose_msg.pose.orientation.z = localizer_q.z();
    localizer_pose_msg.pose.orientation.w = localizer_q.w();

    predict_pose_pub.publish(predict_pose_msg);
    icp_pose_pub.publish(icp_pose_msg);
    current_pose_pub.publish(current_pose_msg);
    localizer_pose_pub.publish(localizer_pose_msg);

    // Send TF "/base_link" to "/map"
    transform.setOrigin(tf::Vector3(current_pose.x, current_pose.y, current_pose.z));
    transform.setRotation(current_q);
    br.sendTransform(tf::StampedTransform(transform, current_scan_time, "/map", "/base_link"));

    matching_end = std::chrono::system_clock::now();
    exe_time = std::chrono::duration_cast<std::chrono::microseconds>(matching_end - matching_start).count() / 1000.0;
    time_icp_matching.data = exe_time;
    time_icp_matching_pub.publish(time_icp_matching);

    // Set values for /estimate_twist
    estimate_twist_msg.header.stamp = current_scan_time;
    estimate_twist_msg.twist.linear.x = current_velocity;
    estimate_twist_msg.twist.linear.y = 0.0;
    estimate_twist_msg.twist.linear.z = 0.0;
    estimate_twist_msg.twist.angular.x = 0.0;
    estimate_twist_msg.twist.angular.y = 0.0;
    estimate_twist_msg.twist.angular.z = angular_velocity;

    estimate_twist_pub.publish(estimate_twist_msg);

    geometry_msgs::Vector3Stamped estimate_vel_msg;
    estimate_vel_msg.header.stamp = current_scan_time;
    estimate_vel_msg.vector.x = current_velocity;
    estimated_vel_pub.publish(estimate_vel_msg);

    // Set values for /icp_stat
    icp_stat_msg.header.stamp = current_scan_time;
    icp_stat_msg.exe_time = time_icp_matching.data;
//    icp_stat_msg.iteration = iteration;
    icp_stat_msg.score = fitness_score;
    icp_stat_msg.velocity = current_velocity;
    icp_stat_msg.acceleration = current_accel;
    icp_stat_msg.use_predict_pose = 0;

    icp_stat_pub.publish(icp_stat_msg);

    // Write log
    if (!ofs)
    {
      std::cerr << "Could not open " << filename << "." << std::endl;
      exit(1);
    }
    ofs << input->header.seq << "," << scan_points_num << ","
            << current_pose.x << "," << current_pose.y << "," << current_pose.z << "," << current_pose.roll << ","
            << current_pose.pitch << "," << current_pose.yaw << "," << predict_pose.x << "," << predict_pose.y << ","
            << predict_pose.z << "," << predict_pose.roll << "," << predict_pose.pitch << "," << predict_pose.yaw << ","
            << current_pose.x - predict_pose.x << "," << current_pose.y - predict_pose.y << ","
            << current_pose.z - predict_pose.z << "," << current_pose.roll - predict_pose.roll << ","
            << current_pose.pitch - predict_pose.pitch << "," << current_pose.yaw - predict_pose.yaw << ","
            << predict_pose_error << "," <<  "," << fitness_score << ","
            << "," << current_velocity << "," << current_velocity_smooth << "," << current_accel
            << "," << angular_velocity << "," << exe_time << "," << align_time << "," << getFitnessScore_time << std::endl;

    std::cout << "-----------------------------------------------------------------" << std::endl;
    std::cout << "Sequence: " << input->header.seq << std::endl;
    std::cout << "Timestamp: " << input->header.stamp << std::endl;
    std::cout << "Frame ID: " << input->header.frame_id << std::endl;
    //		std::cout << "Number of Scan Points: " << scan_ptr->size() << " points." << std::endl;
    std::cout << "Number of Filtered Scan Points: " << scan_points_num << " points." << std::endl;
    std::cout << "ICP has converged: " << icp.hasConverged() << std::endl;
    std::cout << "Fitness Score: " << fitness_score << std::endl;
//    std::cout << "Transformation Probability: " << ndt.getTransformationProbability() << std::endl;
    std::cout << "Execution Time: " << exe_time << " ms." << std::endl;
//    std::cout << "Number of Iterations: " << ndt.getFinalNumIteration() << std::endl;
//    std::cout << "NDT Reliability: " << ndt_reliability.data << std::endl;
    std::cout << "(x,y,z,roll,pitch,yaw): " << std::endl;
    std::cout << "(" << current_pose.x << ", " << current_pose.y << ", " << current_pose.z << ", " << current_pose.roll
              << ", " << current_pose.pitch << ", " << current_pose.yaw << ")" << std::endl;
    std::cout << "Transformation Matrix: " << std::endl;
    std::cout << t << std::endl;
    std::cout << "-----------------------------------------------------------------" << std::endl;

    // Update offset
    if (_offset == "linear")
    {
      offset_x = diff_x;
      offset_y = diff_y;
      offset_z = diff_z;
      offset_yaw = diff_yaw;
    }
    else if (_offset == "quadratic")
    {
      offset_x = (current_velocity_x + current_accel_x * secs) * secs;
      offset_y = (current_velocity_y + current_accel_y * secs) * secs;
      offset_z = diff_z;
      offset_yaw = diff_yaw;
    }
    else if (_offset == "zero")
    {
      offset_x = 0.0;
      offset_y = 0.0;
      offset_z = 0.0;
      offset_yaw = 0.0;
    }

    // Update previous_***
    previous_pose.x = current_pose.x;
    previous_pose.y = current_pose.y;
    previous_pose.z = current_pose.z;
    previous_pose.roll = current_pose.roll;
    previous_pose.pitch = current_pose.pitch;
    previous_pose.yaw = current_pose.yaw;

    previous_scan_time.sec = current_scan_time.sec;
    previous_scan_time.nsec = current_scan_time.nsec;

    previous_previous_velocity = previous_velocity;
    previous_velocity = current_velocity;
    previous_velocity_x = current_velocity_x;
    previous_velocity_y = current_velocity_y;
    previous_velocity_z = current_velocity_z;
    previous_accel = current_accel;

    previous_estimated_vel_kmph.data = estimated_vel_kmph.data;
  }
}
int
main (int argc, char** argv)
{
  // Loading first scan of room.
  pcl::PointCloud<pcl::PointXYZ>::Ptr target_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan1.pcd", *target_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan1.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << target_cloud->size () << " data points from room_scan1.pcd" << std::endl;

  // Loading second scan of room from new perspective.
  pcl::PointCloud<pcl::PointXYZ>::Ptr input_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  if (pcl::io::loadPCDFile<pcl::PointXYZ> ("room_scan2.pcd", *input_cloud) == -1)
  {
    PCL_ERROR ("Couldn't read file room_scan2.pcd \n");
    return (-1);
  }
  std::cout << "Loaded " << input_cloud->size () << " data points from room_scan2.pcd" << std::endl;

  // Filtering input scan to roughly 10% of original size to increase speed of registration.
  pcl::PointCloud<pcl::PointXYZ>::Ptr filtered_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  pcl::ApproximateVoxelGrid<pcl::PointXYZ> approximate_voxel_filter;
  approximate_voxel_filter.setLeafSize (0.2, 0.2, 0.2);
  approximate_voxel_filter.setInputCloud (input_cloud);
  approximate_voxel_filter.filter (*filtered_cloud);
  std::cout << "Filtered cloud contains " << filtered_cloud->size ()
            << " data points from room_scan2.pcd" << std::endl;

  // Initializing Normal Distributions Transform (NDT).
  pcl::NormalDistributionsTransform<pcl::PointXYZ, pcl::PointXYZ> ndt;

  // Setting scale dependent NDT parameters
  // Setting minimum transformation difference for termination condition.
  ndt.setTransformationEpsilon (0.01);
  // Setting maximum step size for More-Thuente line search.
  ndt.setStepSize (0.1);
  //Setting Resolution of NDT grid structure (VoxelGridCovariance).
  ndt.setResolution (1.0);

  // Setting max number of registration iterations.
  ndt.setMaximumIterations (35);

  // Setting point cloud to be aligned.
  ndt.setInputCloud (filtered_cloud);
  // Setting point cloud to be aligned to.
  ndt.setInputTarget (target_cloud);

  // Set initial alignment estimate found using robot odometry.
  Eigen::AngleAxisf init_rotation (0.6931, Eigen::Vector3f::UnitZ ());
  Eigen::Translation3f init_translation (1.79387, 0.720047, 0);
  Eigen::Matrix4f init_guess = (init_translation * init_rotation).matrix ();

  // Calculating required rigid transform to align the input cloud to the target cloud.
  pcl::PointCloud<pcl::PointXYZ>::Ptr output_cloud (new pcl::PointCloud<pcl::PointXYZ>);
  ndt.align (*output_cloud, init_guess);

  std::cout << "Normal Distributions Transform has converged:" << ndt.hasConverged ()
            << " score: " << ndt.getFitnessScore () << std::endl;

  // Transforming unfiltered, input cloud using found transform.
  pcl::transformPointCloud (*input_cloud, *output_cloud, ndt.getFinalTransformation ());

  // Saving transformed input cloud.
  pcl::io::savePCDFileASCII ("room_scan2_transformed.pcd", *output_cloud);

  // Initializing point cloud visualizer
  boost::shared_ptr<pcl::visualization::PCLVisualizer>
  viewer_final (new pcl::visualization::PCLVisualizer ("3D Viewer"));
  viewer_final->setBackgroundColor (0, 0, 0);

  // Coloring and visualizing target cloud (red).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  target_color (target_cloud, 255, 0, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (target_cloud, target_color, "target cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "target cloud");

  // Coloring and visualizing transformed input cloud (green).
  pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>
  output_color (output_cloud, 0, 255, 0);
  viewer_final->addPointCloud<pcl::PointXYZ> (output_cloud, output_color, "output cloud");
  viewer_final->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE,
                                                  1, "output cloud");

  // Starting visualizer
  viewer_final->addCoordinateSystem (1.0);
  viewer_final->initCameraParameters ();

  // Wait until visualizer window is closed.
  while (!viewer_final->wasStopped ())
  {
    viewer_final->spinOnce (100);
    boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  }

  return (0);
}