Ejemplo n.º 1
0
//specifies the information matrix for the odometry edge
//tf::Transform odomTf is in the camera coordinate system
void createOdometryEdge(int id1, int id2, tf::Transform& odomTf, LoadedEdge3D& edge)
{
  double infoCoeff = ParameterServer::instance()->get<double>("odometry_information_factor");
  edge.id1 = id1;
  edge.id2 = id2;

  tf::Vector3 origin = odomTf.getOrigin();
  if(std::fabs(origin[0])<1e-5) origin[0] =0;
  if(std::fabs(origin[1])<1e-5) origin[1] =0;
  if(std::fabs(origin[2])<1e-5) origin[2] =0;
  odomTf.setOrigin(origin);
  if(std::fabs(tf::getYaw(odomTf.getRotation())) <1e-7) odomTf.setRotation(tf::createIdentityQuaternion());

//comment in when parameters are calibrated correctly
//make sure that the final information matrix is given in the camera coordinate system
//-> for this the information matrix needs to be transformed into the camera coordinate system
//1. calibrate odometry -> extract noise parameters
//2. construct information matrix according to traveled distance and the noise parameters#
//3. transform information matrix to camera coordinate frame
/* 
  //parameters of Gaussian probabilistic motion model
  //http://www.mrpt.org/tutorials/programming/odometry-and-motion-models/probabilistic_motion_models/
  double alpha1 = 0.59;
  double alpha2 = 0.0014;
  double alpha3 = 0.02;
  double alpha4 = 0.00001;

  //hack as we know the transformation
  double var_dist_min = 0.001;
  double var_theta_min = 0.01;
  double var_dist = alpha3 * std::fabs(odomTf.getOrigin()[2]); //this is the translation
  var_dist = std::max(var_dist,var_dist_min);
  var_dist = var_dist_min + var_dist*var_dist;
  double var_theta = alpha1 * std::fabs(tf::getYaw(odomTf.getRotation())); //this is the translation
  var_theta = var_theta_min + var_theta*var_theta;
  var_theta = std::max(var_theta,var_theta_min);
*/  
 
  edge.transform = tf2G2O(odomTf);
  edge.informationMatrix = Eigen::Matrix<double, 6, 6>::Ones(); //Do not influence optimization
  edge.informationMatrix = edge.informationMatrix*0.001*infoCoeff;

//  edge.informationMatrix = Eigen::Matrix<double, 6, 6>::Zero(); //Do not influence optimization
//  edge.informationMatrix(0,0) = infoCoeff*(1./var_dist); //0.1m accuracy in the floor plane
//  edge.informationMatrix(1,1) = 10000000; //
//  edge.informationMatrix(2,2) = infoCoeff*(1./var_dist);//1./(0.001+var_dist);//0.01m information on rotation w.r. to floor
//  edge.informationMatrix(4,4) = infoCoeff*(1./var_theta);//0.02rad information on rotation w.r. to floor
//  edge.informationMatrix(3,3) = 10000000;//0.02rad information on rotation w.r. to floor
//  edge.informationMatrix(5,5) = 10000000;//1/var_angle; //rotation about vertical
}
Ejemplo n.º 2
0
//Retrieve the transform between the lens and the base-link at capturing time
void BagLoader::sendWithTransformation(pointcloud_type* cloud)
{
  ScopedTimer s(__FUNCTION__);
  ParameterServer* ps = ParameterServer::instance();
  std::string fixed_frame  = ps->get<std::string>("fixed_frame_name");
  std::string depth_frame_id = cloud->header.frame_id;
  ros::Time depth_time = cloud->header.stamp;
  tf::StampedTransform map2points;

  try{
    tflistener_->waitForTransform(fixed_frame, depth_frame_id, depth_time, ros::Duration(0.005));
    tflistener_->lookupTransform( fixed_frame, depth_frame_id, depth_time,map2points);
    //printTransform("Current Transform", map2points);

    QMatrix4x4 transform_to_map = g2o2QMatrix(tf2G2O(map2points));
    Q_EMIT setPointCloud(cloud, transform_to_map);
  }
  catch (tf::TransformException ex){
    ROS_ERROR("%s - No transformation available",ex.what());
  }
}