Пример #1
0
  void laserCb(const sensor_msgs::LaserScan &scan) {
    // No odom estimate
    bool map_change = matcher_.addScan(Pose2d(0.0, 0.0, 0.0), scan);
    if (debug_) {
      if (map_change) {
        pmap_.publish(matcher_.map().occGrid());
      }
    }

    odom_.header.stamp = scan.header.stamp;
    tf::poseTFToMsg(matcher_.pose().tf(), odom_.pose.pose);
    // Get velocity estimate
    if (!have_pose_) {
      last_pose_ = matcher_.pose();
      have_pose_ = true;
    } else {
      double dt = (scan.header.stamp - last_pose_time_).toSec();
      if (dt > 0.2) {
        Pose2d change = matcher_.pose().ominus(last_pose_);      
        odom_.twist.twist.linear.x = change.x() / dt;
        odom_.twist.twist.linear.y = change.y() / dt;
        odom_.twist.twist.angular.z = change.t() / dt;
        last_pose_time_ = scan.header.stamp;
        last_pose_ = matcher_.pose();
      }
    }

    podom_.publish(odom_);
    tf_.sendTransform(
      tf::StampedTransform(matcher_.pose().tf(), scan.header.stamp,
                           odom_frame_, base_frame_));
  }
Пример #2
0
void Trajectory2d::shortcut()
{
  if (m_trajectory.empty())
  {
    return;
  }

  /* Transform each 'following' pose into the cs
   * of the 'current' pose. A violation to the desired
   * driving direction is found if
   *    x-value <= 0 and driving forward
   *    x-value >= 0 and driving backward
   */
  Pose2d temp;
  for (ContainerType::iterator current=m_trajectory.begin(),
       next=current+1;
       (current!=m_trajectory.end() && next!=m_trajectory.end());
       ++current, ++next)
  {
    temp = current->pose().inverse() * next->pose();
    if (((m_is_forward_trajectory && (temp.translation().x() <=0.)))
        || ((!m_is_forward_trajectory) && (temp.translation().x() >=0.)))
    {
      erase(next);
      --current; // step back to check the same place again
      next = current+1;
    }
  }

  // recalculate curvature.
  if (curvatureAvailable())
  {
    calculateCurvature();
  }
}
Position2d PositionController::getStopLineReachedPose(const Pose2d& junction_start_pose) const
{
  Pose2d diff;
  PoseTraits<Pose2d>::fromPositionAndOrientationRPY(diff, -0.6, -0.23, 0.0);

  const Pose2d stop_pose = junction_start_pose * diff;

  ExtendedPose2d projection;
  double distance;
  std::size_t index;

  m_lateral_controller.calculateProjection(m_lateral_controller.getTrajectory(), stop_pose.translation(), projection, distance, index);

  return projection.getPosition();
}
Пример #4
0
void OdometryMotionModel2d::step(  Pose2d &s_k, 
				   Pose2d &s_km, 
				   Odometry2d &input_k, 
				   const double dT ){
 

  Pose2d::Vec x_k_i_;       /* \f[ \begin{bmatrix} x \\ y \\ \theta \end{bmatrix}_{k} \f]*/
  Pose2d::Vec x_km_i_;      /* \f[ \begin{bmatrix} x \\ y \\ \theta \end{bmatrix}_{k-1} \f]*/
  Eigen::Vector2d p_k_i_;   /* \f[ \begin{bmatrix} x \\ y \end{bmatrix}_{k} \f]*/
  Eigen::Vector2d p_km_i_;  /* \f[ \begin{bmatrix} x \\ y \end{bmatrix}_{k-1} \f]*/
  double theta_k_;          /* \f[ \theta_k \f\] */
  double theta_km_;         /* \f[ \theta_k \f\] */
  Eigen::Matrix2d C_k_i_;   /* rotation matrix k */
  Eigen::Matrix2d C_km_i_;  /* rotation matrix k-1 */
  
  Eigen::Vector3d u_k_km_;  /* odometry input */
  Eigen::Matrix3d Sd_k_km_; /* uncertainty of translation input */
  Eigen::Vector2d dp_k_km_; /* translation input */
  double dtheta_k_km_;      /* rotation input */
  Eigen::Matrix2d C_k_km_;  /* rotation matrix from odometry input */
 
  /* State at k-1 */
  s_km.get(x_km_i_);
  p_km_i_ = x_km_i_.head(2);
  theta_km_ = x_km_i_(2);
  double ct = cos(theta_km_);
  double st = sin(theta_km_);
  C_km_i_ << ct, st, -st, ct;

  /* Odometry */
  double t;
  input_k.get(u_k_km_, t);
  dp_k_km_ = u_k_km_.head(2);
  dtheta_k_km_ = u_k_km_(2);
  ct = cos(dtheta_k_km_);
  st = sin(dtheta_k_km_);
  C_k_km_ << ct, st, -st, ct;

  /* Step forward */
  p_k_i_ = p_km_i_ + C_km_i_.transpose() * dp_k_km_;
  C_k_i_ = C_k_km_ * C_km_i_;

  /* Write state at k */
  theta_k_ = atan2( C_k_i_(0, 1), C_k_i_(0, 0) );
  x_k_i_.head(2) = p_k_i_;
  x_k_i_(2) = theta_k_;
  s_k.set(x_k_i_);
}
Пример #5
0
double MeasurementModel_RngBrg::probabilityOfDetection( const Pose2d &pose,
						  const Landmark2d &landmark,
						  bool &isCloseToSensingLimit ){

  Pose2d::Vec robotPose;
  Landmark2d::Vec landmarkState;
  double range, Pd;
  
  isCloseToSensingLimit = false;

  pose.get(robotPose);
  landmark.get(landmarkState);

  range = sqrt(  pow(landmarkState(0) - robotPose(0), 2)
		+pow(landmarkState(1) - robotPose(1), 2) );

  if( range <= config.rangeLimMax_ && range >= config.rangeLimMin_){
    Pd = config.probabilityOfDetection_;
    if( range >= (config.rangeLimMax_ - config.rangeLimBuffer_ ) ||
	range <= (config.rangeLimMin_ + config.rangeLimBuffer_ ) )
      isCloseToSensingLimit = true;
  }else{
    Pd = 0;
    if( range <= (config.rangeLimMax_ + config.rangeLimBuffer_ ) &&
	range >= (config.rangeLimMin_ - config.rangeLimBuffer_ ) )
      isCloseToSensingLimit = true;
  } 

  return Pd;
}
Пример #6
0
bool MeasurementModel_XY::measure(const Pose2d &pose, 
				      const Landmark2d &landmark, 
				      Measurement2d &measurement,
				      Eigen::Matrix2d *jacobian_wrt_lmk,
				      Eigen::Matrix<double, 2, 3> *jacobian_wrt_pose){
  
  Eigen::Vector3d robotPose;
  Eigen::Vector2d mean, landmarkState;
  Eigen::Matrix2d H_lmk, landmarkUncertainty, cov;
  Eigen::Matrix<double, 2, 3> H_robot;
  Eigen::Matrix3d robotUncertainty;
  double dx_I, dy_I, dx_R, dy_R, c_RI, s_RI, range, range2, bearing;

  pose.get(robotPose, robotUncertainty);
  landmark.get(landmarkState,landmarkUncertainty);

  dx_I = landmarkState(0) - robotPose(0);
  dy_I = landmarkState(1) - robotPose(1);
  c_RI = cos(robotPose(2));
  s_RI = sin(robotPose(2));

  range2 = pow(dx_I, 2) + pow(dy_I, 2) ;
  range = sqrt(range2);

  dx_R =  c_RI * dx_I + s_RI * dy_I;
  dy_R = -s_RI * dx_I + c_RI * dy_I;
  mean << dx_R, dy_R ;

  H_lmk <<  c_RI, s_RI,
           -s_RI, c_RI;

  H_robot << -c_RI, -s_RI, -dx_I*s_RI + dy_I*c_RI,
              s_RI, -c_RI, -dx_I*c_RI - dy_I*s_RI;

  cov = H_lmk * landmarkUncertainty * H_lmk.transpose() + H_robot * robotUncertainty * H_robot.transpose() + R_;
  measurement.set(mean, cov);

  if(jacobian_wrt_lmk != NULL)
    *jacobian_wrt_lmk = H_lmk;

  if(jacobian_wrt_pose != NULL)
    *jacobian_wrt_pose = H_robot;

  if(range > config.rangeLimMax_ || range < config.rangeLimMin_)
    return false;
  else
    return true;
}
Пример #7
0
void MeasurementModel_RngBrg::inverseMeasure(const Pose2d &pose, 
				       const Measurement2d &measurement, 
				       Landmark2d &landmark){
  Eigen::Vector3d poseState;
  Eigen::Vector2d measurementState, mean;
  Eigen::Matrix2d measurementUncertainty, covariance, Hinv;
 
  pose.get(poseState);
  measurement.get(measurementState);
  this->getNoise(measurementUncertainty); 
  mean << poseState(0) + measurementState(0) *cos( poseState(2) + measurementState(1) ),
          poseState(1) + measurementState(0) *sin( poseState(2) + measurementState(1) );

  Hinv << cos(poseState(2)+measurementState(1)) , -measurementState(0)*sin(poseState(2)+measurementState(1)) ,
          sin(poseState(2)+measurementState(1)) , measurementState(0)*cos(poseState(2)+measurementState(1));

  covariance = Hinv * measurementUncertainty *Hinv.transpose();
  landmark.set( mean, covariance );

}
Пример #8
0
bool MeasurementModel_RngBrg::measure(const Pose2d &pose, 
				const Landmark2d &landmark, 
				Measurement2d &measurement, 
				Eigen::Matrix2d *jacobian){

  Eigen::Vector3d robotPose;
  Eigen::Vector2d mean, landmarkState;
  Eigen::Matrix2d H, landmarkUncertainty, cov;
  double range, bearing;

  pose.get(robotPose);
  landmark.get(landmarkState,landmarkUncertainty);

  range = sqrt(  pow(landmarkState(0) - robotPose(0), 2)
		+pow(landmarkState(1) - robotPose(1), 2) );

  bearing = atan2( landmarkState(1) - robotPose(1) , landmarkState(0) - robotPose(0) ) - robotPose(2);

  while(bearing>PI) bearing-=2*PI;
  while(bearing<-PI) bearing+=2*PI;

  mean << range, bearing ;

  H <<  (landmarkState(0)-robotPose(0))/mean(0)          , (landmarkState(1)-robotPose(1))/mean(0) ,
        -(landmarkState(1)-robotPose(1))/(pow(mean(0),2)), (landmarkState(0)-robotPose(0))/pow(mean(0),2) ;
  
  cov = H * landmarkUncertainty * H.transpose() + R_;
  measurement.set(mean, cov);

  
  if(jacobian != NULL)
    *jacobian = H;

  if(range > config.rangeLimMax_ || range < config.rangeLimMin_)
    return false;
  else
    return true;
}
Пример #9
0
void MeasurementModel_XY::inverseMeasure(const Pose2d &pose, 
					 const Measurement2d &measurement, 
					 Landmark2d &landmark){
  Eigen::Vector3d poseState;
  Eigen::Vector2d measurementState, mean;
  Eigen::Matrix2d measurementUncertainty, covariance, Hinv;
 
  pose.get(poseState);
  measurement.get(measurementState);
  this->getNoise(measurementUncertainty); 

  double c_RI = cos(poseState(2));
  double s_RI = sin(poseState(2));

  mean << poseState(0) + c_RI * measurementState(0) - s_RI * measurementState(1),
          poseState(1) + s_RI * measurementState(0) + c_RI * measurementState(1);

  Hinv << c_RI, -s_RI,
          s_RI, c_RI;
  
  covariance = Hinv * measurementUncertainty * Hinv.transpose();
  landmark.set( mean, covariance );

}
Пример #10
0
Pose2d add_noise(Pose2d in, double sigma) {
  Pose2d out (in.x()+sigma*snrv(),
              in.y()+sigma*snrv(),
              in.t()+sigma*snrv());
  return out;
}