示例#1
0
void MarkerVis::setMarker()
  {
    tf::Transform T_cB = T_bC.inverse();
    tf::Transform T_gB = T_cB * T_gC;
    tf::Quaternion q = T_gB.getRotation();
    marker.header.frame_id = "/world";
    marker.header.stamp = ros::Time();
    marker.type = visualization_msgs::Marker::CUBE;
    marker.action = visualization_msgs::Marker::ADD;
    marker.pose.position.x = T_gB.getOrigin()[0];
    marker.pose.position.y = T_gB.getOrigin()[1];
    marker.pose.position.z = T_gB.getOrigin()[2];
    //std::cout<<T_gB.getOrigin()[2]<<std::endl;
    //printf("%f, %f, %f\n", T_gB.getOrigin()[0]*1000, T_gB.getOrigin()[1]*1000, T_gB.getOrigin()[2]*1000);
    marker.pose.orientation.x = q[0];
    marker.pose.orientation.y = q[1];
    marker.pose.orientation.z = q[2];
    marker.pose.orientation.w = q[3];
    printf("%f, %f, %f, %f\n", q[0], q[1], q[2], q[3]);
    marker.scale.x = 0.2;
    marker.scale.y = 0.4;
    marker.scale.z = 0.01;
    marker.color.a = 1.0;
    marker.color.r = 1.0;
    marker.color.g = 0.0;
    marker.color.b = 0.0;
    marker_vis_pub.publish( marker );
  }
示例#2
0
void MarkerVis::setMarker()
  {
    tf::Transform T_cB = T_bC.inverse();
    tf::Transform T_gB = T_cB * T_gC;
    //printTransform(T_gB);
    tf::Quaternion q = T_gB.getRotation();

    marker.header.frame_id = "/world";
    marker.header.stamp = ros::Time();
    //marker.ns = "my_namespace";
    //marker.id = 0;
    marker.type = visualization_msgs::Marker::CUBE;
    marker.action = visualization_msgs::Marker::ADD;
    marker.pose.position.x = T_gB.getOrigin()[0];
    marker.pose.position.y = T_gB.getOrigin()[1];
    marker.pose.position.z = T_gB.getOrigin()[2];
    marker.pose.orientation.x = q[0];
    marker.pose.orientation.y = q[1];
    marker.pose.orientation.z = q[2];
    marker.pose.orientation.w = q[3];
    marker.scale.x = 0.2;
    marker.scale.y = 0.4;
    marker.scale.z = 0.01;
    marker.color.a = 1.0;
    marker.color.r = 1.0;
    marker.color.g = 0.0;
    marker.color.b = 0.0;
    marker_vis_pub.publish( marker );
  }
void OriginSlaveWSCallback(const geometry_msgs::PoseStamped& msg){
  
  Tso.setOrigin(tf::Vector3(msg.pose.position.x, msg.pose.position.y, msg.pose.position.z));
  Tso.setRotation(tf::Quaternion(msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w));
  
  // Increments absoluts 
  Toffset = Tmo.inverse() * Tso;
  Toffset.setOrigin(tf::Vector3(0., 0., 0.));
}
void CheckerboardMeasurementEdge::setError(tf::Transform cameraToMarker) {
	double x, y;
	this->frameImageConverter->project(cameraToMarker.inverse(), x, y);

	double dist = cameraToMarker.getOrigin().length();

	// set error
	this->_error[0] = measurement.marker_data[0] - x;
	this->_error[1] = measurement.marker_data[1] - y;
}
void AmclNode::savePoseToServer()
{
  // We need to apply the last transform to the latest odom pose to get
  // the latest map pose to store.  We'll take the covariance from
  // last_published_pose.
  tf::Pose map_pose = latest_tf_.inverse() * latest_odom_pose_;
  double yaw,pitch,roll;
  map_pose.getBasis().getEulerYPR(yaw, pitch, roll);

  ROS_DEBUG("Saving pose to server. x: %.3f, y: %.3f", map_pose.getOrigin().x(), map_pose.getOrigin().y() );

  private_nh_.setParam("initial_pose_x", map_pose.getOrigin().x());
  private_nh_.setParam("initial_pose_y", map_pose.getOrigin().y());
  private_nh_.setParam("initial_pose_a", yaw);
  private_nh_.setParam("initial_cov_xx",
                                  last_published_pose.pose.covariance[6*0+0]);
  private_nh_.setParam("initial_cov_yy",
                                  last_published_pose.pose.covariance[6*1+1]);
  private_nh_.setParam("initial_cov_aa",
                                  last_published_pose.pose.covariance[6*5+5]);
}
示例#6
0
void transform( const tf2_msgs::TFMessage tfm )
{
//  ROS_INFO("Pa!");
  ROS_INFO( "Size: %s", tfm.transforms[0].child_frame_id.c_str() );
  static tf::TransformBroadcaster br;

  static tf::Transform TEC( tf::Quaternion(-0.5, 0.5, -0.5, 0.5), tf::Vector3(-10e-2, 0, 15e-2) );
  static tf::Transform TEM( tf::Quaternion(0, 0, 0, 1), tf::Vector3(15e-2, 0, 0) );
  static tf::Transform TTF( tf::Quaternion(-0.5, 0.5, 0.5, 0.5), tf::Vector3(20e-2, 5e-2, 0) );
  static tf::StampedTransform TCT, TWE;
  tf::Transform TWEpp, TWF;

  if( strcmp( "EndEffector", tfm.transforms[0].child_frame_id.c_str() ) == 0 )
  {

    tf::transformStampedMsgToTF( tfm.transforms[0], TWE );
//    ROS_INFO("Inside!");

  }
  if( strcmp( "ar_marker_6", tfm.transforms[0].child_frame_id.c_str() ) == 0 )
  {
    
    tf::transformStampedMsgToTF( tfm.transforms[0], TCT );
 TWF = TWE * TEC * TCT * TTF;
  TWEpp = TWF * TEM.inverse();

  
  br.sendTransform( tf::StampedTransform(TWEpp, ros::Time::now(), "World", "EndEffectorPP") );
  br.sendTransform( tf::StampedTransform(TEC, ros::Time::now(), "EndEffector", "Camera") );
  br.sendTransform( tf::StampedTransform(TEM, ros::Time::now(), "EndEffector", "MaleConn") );
  br.sendTransform( tf::StampedTransform(TCT, ros::Time::now(), "Camera", "Tag") );
  br.sendTransform( tf::StampedTransform(TTF, ros::Time::now(), "Tag", "FemaleConn") );
  br.sendTransform( tf::StampedTransform(TWF, ros::Time::now(), "World", "FemaleConn2") );

  geometry_msgs::Transform msg;
  tf::transformTFToMsg( TWEpp, msg );
  pubT.publish( msg );
  }

 }
示例#7
0
void getTfDifference(const tf::Transform& a, const tf::Transform b, double& dist, double& angle)
{
  tf::Transform motion = a.inverse() * b;
  getTfDifference(motion, dist, angle);
}
示例#8
0
int __attribute__((optimize("0"))) inv_kin(tf::Transform in_T06, l_r in_arm, ik_solution iksol[8]) {
  dh_theta = robot_thetas[in_arm];
  dh_d = ds[in_arm];
  dh_alpha = alphas[in_arm];
  dh_a = aas[in_arm];
  for (int i = 0; i < 8; i++) iksol[i] = ik_zerosol;

  if (in_arm >= dh_l_r_last) {
    ROS_ERROR("BAD ARM IN IK!!!");
    return -1;
  }

  for (int i = 0; i < 8; i++) iksol[i].arm = in_arm;

  //  Step 1, Compute P5
  tf::Transform T60 = in_T06.inverse();
  tf::Vector3 p6rcm = T60.getOrigin();
  tf::Vector3 p05[8];

  p6rcm[2] = 0;  // take projection onto x-y plane
  for (int i = 0; i < 2; i++) {
    tf::Vector3 p65 = (-1 + 2 * i) * Lw * p6rcm.normalize();
    p05[4 * i] = p05[4 * i + 1] = p05[4 * i + 2] = p05[4 * i + 3] = in_T06 * p65;
  }

  //  Step 2, compute displacement of prismatic joint d3
  for (int i = 0; i < 2; i++) {
    double insertion = 0;
    insertion += p05[4 * i].length();  // Two step process avoids compiler
                                       // optimization problem. (Yeah, right. It
                                       // was the compiler's problem...)

    if (insertion <= Lw) {
      cerr << "WARNING: mechanism at RCM singularity(Lw:" << Lw << "ins:" << insertion
           << ").  IK failing.\n";
      iksol[4 * i + 0].invalid = iksol[4 * i + 1].invalid = ik_invalid;
      iksol[4 * i + 2].invalid = iksol[4 * i + 3].invalid = ik_invalid;
      return -2;
    }
    iksol[4 * i + 0].d3 = iksol[4 * i + 1].d3 = -d4 - insertion;
    iksol[4 * i + 2].d3 = iksol[4 * i + 3].d3 = -d4 + insertion;
  }

  //  Step 3, calculate theta 2
  for (int i = 0; i < 8; i += 2)  // p05 solutions
  {
    double z0p5 = p05[i][2];

    double d = iksol[i].d3 + d4;
    double cth2 = 0;

    if (in_arm == dh_left)
      cth2 = 1 / (GM1 * GM3) * ((-z0p5 / d) - GM2 * GM4);
    else
      cth2 = 1 / (GM1 * GM3) * ((z0p5 / d) + GM2 * GM4);

    // Smooth roundoff errors at +/- 1.
    if (cth2 > 1 && cth2 < 1 + eps)
      cth2 = 1;
    else if (cth2 < -1 && cth2 > -1 - eps)
      cth2 = -1;

    if (cth2 > 1 || cth2 < -1) {
      iksol[i].invalid = iksol[i + 1].invalid = ik_invalid;
    } else {
      iksol[i].th2 = acos(cth2);
      iksol[i + 1].th2 = -acos(cth2);
    }
  }

  //  Step 4: Compute theta 1
  for (int i = 0; i < 8; i++) {
    if (iksol[i].invalid == ik_invalid) continue;

    double cth2 = cos(iksol[i].th2);
    double sth2 = sin(iksol[i].th2);
    double d = iksol[i].d3 + d4;
    double BB1 = sth2 * GM3;
    double BB2 = 0;
    tf::Matrix3x3 Bmx;  // using 3 vector and matrix bullet types for convenience.
    tf::Vector3 xyp05(p05[i]);
    xyp05[2] = 0;

    if (in_arm == dh_left) {
      BB2 = cth2 * GM2 * GM3 - GM1 * GM4;
      Bmx.setValue(BB1, BB2, 0, -BB2, BB1, 0, 0, 0, 1);
    } else {
      BB2 = cth2 * GM2 * GM3 + GM1 * GM4;
      Bmx.setValue(BB1, BB2, 0, BB2, -BB1, 0, 0, 0, 1);
    }

    tf::Vector3 scth1 = Bmx.inverse() * xyp05 * (1 / d);
    iksol[i].th1 = atan2(scth1[1], scth1[0]);
  }

  //  Step 5: get theta 4, 5, 6
  for (int i = 0; i < 8; i++) {
    if (iksol[i].invalid == ik_invalid) continue;

    // compute T03:
    dh_theta[0] = iksol[i].th1;
    dh_theta[1] = iksol[i].th2;
    dh_d[2] = iksol[i].d3;
    tf::Transform T03 = getFKTransform(0, 3);
    tf::Transform T36 = T03.inverse() * in_T06;

    double c5 = -T36.getBasis()[2][2];
    double s5 = (T36.getOrigin()[2] - d4) / Lw;

    // Compute theta 4:
    double c4, s4;
    if (fabs(c5) > eps) {
      c4 = T36.getOrigin()[0] / (Lw * c5);
      s4 = T36.getOrigin()[1] / (Lw * c5);
    } else {
      c4 = T36.getBasis()[0][2] / s5;
      s4 = T36.getBasis()[1][2] / s5;
    }
    iksol[i].th4 = atan2(s4, c4);

    // Compute theta 5:
    iksol[i].th5 = atan2(s5, c5);

    // Compute theta 6:
    double s6, c6;
    if (fabs(s5) > eps) {
      c6 = T36.getBasis()[2][0] / s5;
      s6 = -T36.getBasis()[2][1] / s5;
    } else {
      dh_theta[3] = iksol[i].th4;
      dh_theta[4] = iksol[i].th5;
      tf::Transform T05 = T03 * getFKTransform(3, 5);
      tf::Transform T56 = T05.inverse() * in_T06;
      c6 = T56.getBasis()[0][0];
      s6 = T56.getBasis()[2][0];
    }
    iksol[i].th6 = atan2(s6, c6);

    //		if (gTime%1000 == 0 && in_arm == dh_left )
    //		{
    //			log_msg("dh_iksols: [%d]\t( %3f,\t %3f,\t %3f,\t %3f,\t %3f,\t
    //%3f)",0,
    //					iksol[i].th1 * r2d,
    //					iksol[i].th2 * r2d,
    //					iksol[i].d3,
    //					iksol[i].th4 * r2d,
    //					iksol[i].th5 * r2d,
    //					iksol[i].th6 * r2d
    //					);
    //		}
  }
  return 0;
}
示例#9
0
void
AmclNode::laserReceived(const sensor_msgs::LaserScanConstPtr& laser_scan)
{
    last_laser_received_ts_ = ros::Time::now();
    if( map_ == NULL ) {
        return;
    }
    boost::recursive_mutex::scoped_lock lr(configuration_mutex_);
    int laser_index = -1;

    // Do we have the base->base_laser Tx yet?
    if(frame_to_laser_.find(laser_scan->header.frame_id) == frame_to_laser_.end())
    {
        ROS_DEBUG("Setting up laser %d (frame_id=%s)\n", (int)frame_to_laser_.size(), laser_scan->header.frame_id.c_str());
        lasers_.push_back(new AMCLLaser(*laser_));
        lasers_update_.push_back(true);
        laser_index = frame_to_laser_.size();

        tf::Stamped<tf::Pose> ident (tf::Transform(tf::createIdentityQuaternion(),
                                     tf::Vector3(0,0,0)),
                                     ros::Time(), laser_scan->header.frame_id);
        tf::Stamped<tf::Pose> laser_pose;
        try
        {
            this->tf_->transformPose(base_frame_id_, ident, laser_pose);
        }
        catch(tf::TransformException& e)
        {
            ROS_ERROR("Couldn't transform from %s to %s, "
                      "even though the message notifier is in use",
                      laser_scan->header.frame_id.c_str(),
                      base_frame_id_.c_str());
            return;
        }

        pf_vector_t laser_pose_v;
        laser_pose_v.v[0] = laser_pose.getOrigin().x();
        laser_pose_v.v[1] = laser_pose.getOrigin().y();
        // laser mounting angle gets computed later -> set to 0 here!
        laser_pose_v.v[2] = 0;
        lasers_[laser_index]->SetLaserPose(laser_pose_v);
        ROS_DEBUG("Received laser's pose wrt robot: %.3f %.3f %.3f",
                  laser_pose_v.v[0],
                  laser_pose_v.v[1],
                  laser_pose_v.v[2]);

        frame_to_laser_[laser_scan->header.frame_id] = laser_index;
    } else {
        // we have the laser pose, retrieve laser index
        laser_index = frame_to_laser_[laser_scan->header.frame_id];
    }

    // Where was the robot when this scan was taken?
    tf::Stamped<tf::Pose> odom_pose;
    pf_vector_t pose;
    if(!getOdomPose(odom_pose, pose.v[0], pose.v[1], pose.v[2],
                    laser_scan->header.stamp, base_frame_id_))
    {
        ROS_ERROR("Couldn't determine robot's pose associated with laser scan");
        return;
    }


    pf_vector_t delta = pf_vector_zero();

    if(pf_init_)
    {
        // Compute change in pose
        //delta = pf_vector_coord_sub(pose, pf_odom_pose_);
        delta.v[0] = pose.v[0] - pf_odom_pose_.v[0];
        delta.v[1] = pose.v[1] - pf_odom_pose_.v[1];
        delta.v[2] = angle_diff(pose.v[2], pf_odom_pose_.v[2]);

        // See if we should update the filter
        bool update = fabs(delta.v[0]) > d_thresh_ ||
                      fabs(delta.v[1]) > d_thresh_ ||
                      fabs(delta.v[2]) > a_thresh_;
        update = update || m_force_update;
        m_force_update=false;

        // Set the laser update flags
        if(update)
            for(unsigned int i=0; i < lasers_update_.size(); i++)
                lasers_update_[i] = true;
    }

    bool force_publication = false;
    if(!pf_init_)
    {
        // Pose at last filter update
        pf_odom_pose_ = pose;

        // Filter is now initialized
        pf_init_ = true;

        // Should update sensor data
        for(unsigned int i=0; i < lasers_update_.size(); i++)
            lasers_update_[i] = true;

        force_publication = true;

        resample_count_ = 0;
    }
    // If the robot has moved, update the filter
    else if(pf_init_ && lasers_update_[laser_index])
    {
        //printf("pose\n");
        //pf_vector_fprintf(pose, stdout, "%.3f");

        AMCLOdomData odata;
        odata.pose = pose;
        // HACK
        // Modify the delta in the action data so the filter gets
        // updated correctly
        odata.delta = delta;

        // Use the action data to update the filter
        odom_->UpdateAction(pf_, (AMCLSensorData*)&odata);

        // Pose at last filter update
        //this->pf_odom_pose = pose;
    }

    bool resampled = false;
    // If the robot has moved, update the filter
    if(lasers_update_[laser_index])
    {
        AMCLLaserData ldata;
        ldata.sensor = lasers_[laser_index];
        ldata.range_count = laser_scan->ranges.size();

        // To account for lasers that are mounted upside-down, we determine the
        // min, max, and increment angles of the laser in the base frame.
        //
        // Construct min and max angles of laser, in the base_link frame.
        tf::Quaternion q;
        q.setRPY(0.0, 0.0, laser_scan->angle_min);
        tf::Stamped<tf::Quaternion> min_q(q, laser_scan->header.stamp,
                                          laser_scan->header.frame_id);
        q.setRPY(0.0, 0.0, laser_scan->angle_min + laser_scan->angle_increment);
        tf::Stamped<tf::Quaternion> inc_q(q, laser_scan->header.stamp,
                                          laser_scan->header.frame_id);
        try
        {
            tf_->transformQuaternion(base_frame_id_, min_q, min_q);
            tf_->transformQuaternion(base_frame_id_, inc_q, inc_q);
        }
        catch(tf::TransformException& e)
        {
            ROS_WARN("Unable to transform min/max laser angles into base frame: %s",
                     e.what());
            return;
        }

        double angle_min = tf::getYaw(min_q);
        double angle_increment = tf::getYaw(inc_q) - angle_min;

        // wrapping angle to [-pi .. pi]
        angle_increment = fmod(angle_increment + 5*M_PI, 2*M_PI) - M_PI;

        ROS_DEBUG("Laser %d angles in base frame: min: %.3f inc: %.3f", laser_index, angle_min, angle_increment);

        // Apply range min/max thresholds, if the user supplied them
        if(laser_max_range_ > 0.0)
            ldata.range_max = std::min(laser_scan->range_max, (float)laser_max_range_);
        else
            ldata.range_max = laser_scan->range_max;
        double range_min;
        if(laser_min_range_ > 0.0)
            range_min = std::max(laser_scan->range_min, (float)laser_min_range_);
        else
            range_min = laser_scan->range_min;
        // The AMCLLaserData destructor will free this memory
        ldata.ranges = new double[ldata.range_count][2];
        ROS_ASSERT(ldata.ranges);
        for(int i=0; i<ldata.range_count; i++)
        {
            // amcl doesn't (yet) have a concept of min range.  So we'll map short
            // readings to max range.
            if(laser_scan->ranges[i] <= range_min)
                ldata.ranges[i][0] = ldata.range_max;
            else
                ldata.ranges[i][0] = laser_scan->ranges[i];
            // Compute bearing
            ldata.ranges[i][1] = angle_min +
                                 (i * angle_increment);
        }

        lasers_[laser_index]->UpdateSensor(pf_, (AMCLSensorData*)&ldata);

        lasers_update_[laser_index] = false;

        pf_odom_pose_ = pose;

        // Resample the particles
        if(!(++resample_count_ % resample_interval_))
        {
            pf_update_resample(pf_);
            resampled = true;
        }

        pf_sample_set_t* set = pf_->sets + pf_->current_set;
        ROS_DEBUG("Num samples: %d\n", set->sample_count);

        // Publish the resulting cloud
        // TODO: set maximum rate for publishing
        if (!m_force_update) {
            geometry_msgs::PoseArray cloud_msg;
            cloud_msg.header.stamp = ros::Time::now();
            cloud_msg.header.frame_id = global_frame_id_;
            cloud_msg.poses.resize(set->sample_count);
            for(int i=0; i<set->sample_count; i++)
            {
                tf::poseTFToMsg(tf::Pose(tf::createQuaternionFromYaw(set->samples[i].pose.v[2]),
                                         tf::Vector3(set->samples[i].pose.v[0],
                                                     set->samples[i].pose.v[1], 0)),
                                cloud_msg.poses[i]);
            }
            particlecloud_pub_.publish(cloud_msg);
        }
    }

    if(resampled || force_publication)
    {
        // Read out the current hypotheses
        double max_weight = 0.0;
        int max_weight_hyp = -1;
        std::vector<amcl_hyp_t> hyps;
        hyps.resize(pf_->sets[pf_->current_set].cluster_count);
        for(int hyp_count = 0;
                hyp_count < pf_->sets[pf_->current_set].cluster_count; hyp_count++)
        {
            double weight;
            pf_vector_t pose_mean;
            pf_matrix_t pose_cov;
            if (!pf_get_cluster_stats(pf_, hyp_count, &weight, &pose_mean, &pose_cov))
            {
                ROS_ERROR("Couldn't get stats on cluster %d", hyp_count);
                break;
            }

            hyps[hyp_count].weight = weight;
            hyps[hyp_count].pf_pose_mean = pose_mean;
            hyps[hyp_count].pf_pose_cov = pose_cov;

            if(hyps[hyp_count].weight > max_weight)
            {
                max_weight = hyps[hyp_count].weight;
                max_weight_hyp = hyp_count;
            }
        }

        if(max_weight > 0.0)
        {
            ROS_DEBUG("Max weight pose: %.3f %.3f %.3f",
                      hyps[max_weight_hyp].pf_pose_mean.v[0],
                      hyps[max_weight_hyp].pf_pose_mean.v[1],
                      hyps[max_weight_hyp].pf_pose_mean.v[2]);

            /*
               puts("");
               pf_matrix_fprintf(hyps[max_weight_hyp].pf_pose_cov, stdout, "%6.3f");
               puts("");
             */

            geometry_msgs::PoseWithCovarianceStamped p;
            // Fill in the header
            p.header.frame_id = global_frame_id_;
            p.header.stamp = laser_scan->header.stamp;
            // Copy in the pose
            p.pose.pose.position.x = hyps[max_weight_hyp].pf_pose_mean.v[0];
            p.pose.pose.position.y = hyps[max_weight_hyp].pf_pose_mean.v[1];
            tf::quaternionTFToMsg(tf::createQuaternionFromYaw(hyps[max_weight_hyp].pf_pose_mean.v[2]),
                                  p.pose.pose.orientation);
            // Copy in the covariance, converting from 3-D to 6-D
            pf_sample_set_t* set = pf_->sets + pf_->current_set;
            for(int i=0; i<2; i++)
            {
                for(int j=0; j<2; j++)
                {
                    // Report the overall filter covariance, rather than the
                    // covariance for the highest-weight cluster
                    //p.covariance[6*i+j] = hyps[max_weight_hyp].pf_pose_cov.m[i][j];
                    p.pose.covariance[6*i+j] = set->cov.m[i][j];
                }
            }
            // Report the overall filter covariance, rather than the
            // covariance for the highest-weight cluster
            //p.covariance[6*5+5] = hyps[max_weight_hyp].pf_pose_cov.m[2][2];
            p.pose.covariance[6*5+5] = set->cov.m[2][2];

            /*
               printf("cov:\n");
               for(int i=0; i<6; i++)
               {
               for(int j=0; j<6; j++)
               printf("%6.3f ", p.covariance[6*i+j]);
               puts("");
               }
             */

            pose_pub_.publish(p);
            last_published_pose = p;

            ROS_DEBUG("New pose: %6.3f %6.3f %6.3f",
                      hyps[max_weight_hyp].pf_pose_mean.v[0],
                      hyps[max_weight_hyp].pf_pose_mean.v[1],
                      hyps[max_weight_hyp].pf_pose_mean.v[2]);

            // subtracting base to odom from map to base and send map to odom instead
            tf::Stamped<tf::Pose> odom_to_map;
            try
            {
                tf::Transform tmp_tf(tf::createQuaternionFromYaw(hyps[max_weight_hyp].pf_pose_mean.v[2]),
                                     tf::Vector3(hyps[max_weight_hyp].pf_pose_mean.v[0],
                                                 hyps[max_weight_hyp].pf_pose_mean.v[1],
                                                 0.0));
                tf::Stamped<tf::Pose> tmp_tf_stamped (tmp_tf.inverse(),
                                                      laser_scan->header.stamp,
                                                      base_frame_id_);
                this->tf_->transformPose(odom_frame_id_,
                                         tmp_tf_stamped,
                                         odom_to_map);
            }
            catch(tf::TransformException)
            {
                ROS_DEBUG("Failed to subtract base to odom transform");
                return;
            }

            latest_tf_ = tf::Transform(tf::Quaternion(odom_to_map.getRotation()),
                                       tf::Point(odom_to_map.getOrigin()));
            latest_tf_valid_ = true;

            if (tf_broadcast_ == true)
            {
                // We want to send a transform that is good up until a
                // tolerance time so that odom can be used
                ros::Time transform_expiration = (laser_scan->header.stamp +
                                                  transform_tolerance_);
                tf::StampedTransform tmp_tf_stamped(latest_tf_.inverse(),
                                                    transform_expiration,
                                                    global_frame_id_, odom_frame_id_);
                this->tfb_->sendTransform(tmp_tf_stamped);
                sent_first_transform_ = true;
            }
        }
        else
        {
            ROS_ERROR("No pose!");
        }
    }
    else if(latest_tf_valid_)
    {
        if (tf_broadcast_ == true)
        {
            // Nothing changed, so we'll just republish the last transform, to keep
            // everybody happy.
            ros::Time transform_expiration = (laser_scan->header.stamp +
                                              transform_tolerance_);
            tf::StampedTransform tmp_tf_stamped(latest_tf_.inverse(),
                                                transform_expiration,
                                                global_frame_id_, odom_frame_id_);
            this->tfb_->sendTransform(tmp_tf_stamped);
        }

        // Is it time to save our last pose to the param server
        ros::Time now = ros::Time::now();
        if((save_pose_period.toSec() > 0.0) &&
                (now - save_pose_last_time) >= save_pose_period)
        {
            // We need to apply the last transform to the latest odom pose to get
            // the latest map pose to store.  We'll take the covariance from
            // last_published_pose.
            tf::Pose map_pose = latest_tf_.inverse() * odom_pose;
            double yaw,pitch,roll;
            map_pose.getBasis().getEulerYPR(yaw, pitch, roll);

            private_nh_.setParam("initial_pose_x", map_pose.getOrigin().x());
            private_nh_.setParam("initial_pose_y", map_pose.getOrigin().y());
            private_nh_.setParam("initial_pose_a", yaw);
            private_nh_.setParam("initial_cov_xx",
                                 last_published_pose.pose.covariance[6*0+0]);
            private_nh_.setParam("initial_cov_yy",
                                 last_published_pose.pose.covariance[6*1+1]);
            private_nh_.setParam("initial_cov_aa",
                                 last_published_pose.pose.covariance[6*5+5]);
            save_pose_last_time = now;
        }
    }

}
void CheckerboardPoseMeasurementEdge::setError(tf::Transform cameraToMarker) {
	// left top point of the marker denotes the origin of the marker's coordinate system
	double xOrigin, yOrigin;
	this->frameImageConverter->project(cameraToMarker.inverse(), xOrigin,
			yOrigin);

	double rows = this->measurement.marker_data[CheckerboardDetection::idx_rows];
	double cols = this->measurement.marker_data[CheckerboardDetection::idx_cols];
	double squareLength =
			this->measurement.marker_data[measurement.marker_data.size() - 1]; // always the last element
	squareLength = squareLength / 1000; // conversion from [mm] to [m]

	// point on the x axis of the marker's coordinate system
	double xAxisX, yAxisX;
	tf::Transform alongXAxis(tf::Quaternion(0, 0, 0, 1),
			tf::Vector3(squareLength, 0, 0));
	this->frameImageConverter->project((alongXAxis * cameraToMarker).inverse(),
			xAxisX, yAxisX);

	// point on the y axis of the marker's coordinate system
	double xAxisY, yAxisY;
	tf::Transform alongYAxis(tf::Quaternion(0, 0, 0, 1),
			tf::Vector3(0, squareLength, 0));
	this->frameImageConverter->project((alongYAxis * cameraToMarker).inverse(),
			xAxisY, yAxisY);

	// set error
	double offset = 4;

	// top left corner
	this->_error[0] = measurement.marker_data[offset + 0 + 0] - xOrigin;
	this->_error[1] = measurement.marker_data[offset + 0 + 1] - yOrigin;

	// one right from the top left corner
	this->_error[2] = measurement.marker_data[offset + (2 * 1) + 0] - xAxisX;
	this->_error[3] = measurement.marker_data[offset + (2 * 1) + 1] - yAxisX;

	// one down from the top left corner
	this->_error[4] = measurement.marker_data[offset + (2 * cols) + 0] - xAxisY;
	this->_error[5] = measurement.marker_data[offset + (2 * cols) + 1] - yAxisY;

	if (false) {
		cv::Mat outImg;
		cv_bridge::CvImagePtr input_bridge;
		input_bridge = cv_bridge::toCvCopy(measurement.image,
				sensor_msgs::image_encodings::BGR8);
		outImg = input_bridge->image;

		cv::line(outImg, cv::Point(xOrigin - 3, yOrigin - 3),
				cv::Point(xOrigin + 3, yOrigin + 3), CV_RGB(255, 0, 0));
		cv::line(outImg, cv::Point(xOrigin - 3, yOrigin + 3),
				cv::Point(xOrigin + 3, yOrigin - 3), CV_RGB(255, 0, 0));
		cv::circle(outImg,
				cv::Point(measurement.marker_data[offset],
						measurement.marker_data[offset + 1]), 3,
				CV_RGB(255, 0, 0));
		cv::line(outImg, cv::Point(xAxisX - 3, yAxisX - 3),
				cv::Point(xAxisX + 3, yAxisX + 3), CV_RGB(0, 255, 0));
		cv::line(outImg, cv::Point(xAxisX - 3, yAxisX + 3),
				cv::Point(xAxisX + 3, yAxisX - 3), CV_RGB(0, 255, 0));
		cv::circle(outImg,
				cv::Point(measurement.marker_data[offset + (2 * 1)],
						measurement.marker_data[offset + (2 * 1) + 1]), 3,
				CV_RGB(0, 255, 0));
		cv::line(outImg, cv::Point(xAxisY - 3, yAxisY - 3),
				cv::Point(xAxisY + 3, yAxisY + 3), CV_RGB(0, 0, 255));
		cv::line(outImg, cv::Point(xAxisY - 3, yAxisY + 3),
				cv::Point(xAxisY + 3, yAxisY - 3), CV_RGB(0, 0, 255));
		cv::circle(outImg,
				cv::Point(measurement.marker_data[offset + (2 * cols)],
						measurement.marker_data[offset + (2 * cols) + 1]), 3,
				CV_RGB(0, 0, 255));
		string filename = "/tmp/" + measurement.id + "-pose.jpg";
		cv::imwrite(filename, outImg);
	}
}
void TabletopSegmentor::straightenPoints(PointCloudType &points, const tf::Transform& table_plane_trans, 
		      const tf::Transform& table_plane_trans_flat)
{
  tf::Transform trans = table_plane_trans_flat * table_plane_trans.inverse();
  pcl_ros::transformPointCloud(points, points, trans);
}
int main(int argc, char** argv)
{
	ros::init(argc, argv, "squirtle_localization_node");

	ROS_INFO("Squirtle Localization for ROS v0.1");

    ros::NodeHandle n;
	ros::NodeHandle pn("~");

	double rate;
	pn.param("rate", rate, 2.0);

    pn.param("true_north_compensation", true_north_compensation, 0.0499164166);
    
	pn.param<std::string>("global_frame_id", global_frame_id, "/world");
    pn.param<std::string>("odom_frame_id", odom_frame_id, "odom");
	
	tf_listener = new tf::TransformListener();

	message_filters::Subscriber<sensor_msgs::Imu> * imu_sub = new message_filters::Subscriber<sensor_msgs::Imu>();
	imu_sub->subscribe(n, "imu/data", 20);
    tf::MessageFilter<sensor_msgs::Imu> * tf_filter_imu = new tf::MessageFilter<sensor_msgs::Imu>(*imu_sub, *tf_listener, odom_frame_id, 20);
    tf_filter_imu->registerCallback( boost::bind(imuCallback, _1) );

	message_filters::Subscriber<sensor_msgs::NavSatFix> * gps_sub = new message_filters::Subscriber<sensor_msgs::NavSatFix>();
	gps_sub->subscribe(n, "gps/fix", 20);
    tf::MessageFilter<sensor_msgs::NavSatFix> * tf_filter_gps = new tf::MessageFilter<sensor_msgs::NavSatFix>(*gps_sub, *tf_listener, odom_frame_id, 20);
    tf_filter_gps->registerCallback( boost::bind(gpsCallback, _1) );

	/*message_filters::Subscriber<nav_msgs::Odometry> * odom_sub = new message_filters::Subscriber<nav_msgs::Odometry>();
	odom_sub->subscribe(n, "odom", 20);
    tf::MessageFilter<nav_msgs::Odometry> * tf_filter = new tf::MessageFilter<nav_msgs::Odometry>(*odom_sub, *tf_listener, base_footprint_frame_id, 20);
    tf_filter->registerCallback( boost::bind(odomCallback, _1) );*/

    ros::Publisher odom_pub = n.advertise<nav_msgs::Odometry>("global_odom", 10);

	static tf::TransformBroadcaster tf_broadcaster;

	ros::Rate r(rate);
	while(n.ok())
	{
        tf::Transform t_world_odom_rotation = t_world_imu * t_odom_imu.inverse();
        tf::Transform t_world_odom_translation = t_world_gps * t_odom_gps.inverse();

        tf::Transform t_world_odom;
        t_world_odom.setOrigin(t_world_odom_translation.getOrigin());
        t_world_odom.setRotation((t_world_odom_rotation.getRotation()).normalized());

        tf_broadcaster.sendTransform(tf::StampedTransform(t_world_odom, ros::Time::now(), global_frame_id, odom_frame_id));

        // Odometry header
        nav_msgs::Odometry odom_msg;
        odom_msg.header.frame_id = global_frame_id;
        odom_msg.header.stamp = ros::Time::now();

        // Odometry position
        tf::Vector3 position = t_world_odom_translation.getOrigin();
        odom_msg.pose.pose.position.x = position.getX();
        odom_msg.pose.pose.position.y = position.getY();
        odom_msg.pose.pose.position.z = position.getZ();

        // Odometry orientation
        tf::quaternionTFToMsg((t_world_odom_rotation.getRotation()).normalized(), odom_msg.pose.pose.orientation);

        // TODO: Finish this, add covariance...
        //odom_msg.pose.covariance...

        ros::Duration delta_t = odom_msg.header.stamp - last_odom.header.stamp;

        if(delta_t.toSec() < 1.0)
        {
            // Odometry linear velocity
            odom_msg.twist.twist.linear.x = (odom_msg.pose.pose.position.x - last_odom.pose.pose.position.x)/delta_t.toSec();
            odom_msg.twist.twist.linear.y = (odom_msg.pose.pose.position.y - last_odom.pose.pose.position.y)/delta_t.toSec();
            odom_msg.twist.twist.linear.z = (odom_msg.pose.pose.position.z - last_odom.pose.pose.position.z)/delta_t.toSec();

            // TODO: Fix this... We are assuming that the robot cannot rotate fast enought, so shortest_angular_distance might work.
            // Odometry angular velocity
            odom_msg.twist.twist.angular.x = 0.0;
            odom_msg.twist.twist.angular.y = 0.0;
            odom_msg.twist.twist.angular.z = (angles::shortest_angular_distance(tf::getYaw(odom_msg.pose.pose.orientation), tf::getYaw(last_odom.pose.pose.orientation)))/delta_t.toSec();

            // TODO: Finish this, add covariance...
            //odom_msg.twist.covariance...

            odom_pub.publish(odom_msg);
        }

        last_odom = odom_msg;

		ros::spinOnce();
		r.sleep();
	}
	
	return 0;
}
示例#13
0
  void imageCb(const sensor_msgs::ImageConstPtr& l_image,
               const sensor_msgs::CameraInfoConstPtr& l_cam_info,
               const sensor_msgs::ImageConstPtr& r_image,
               const sensor_msgs::CameraInfoConstPtr& r_cam_info)
  {
    ROS_INFO("In callback, seq = %u", l_cam_info->header.seq);
    
    // Convert ROS messages for use with OpenCV
    cv::Mat left, right;
    try {
      left  = l_bridge_.imgMsgToCv(l_image, "mono8");
      right = r_bridge_.imgMsgToCv(r_image, "mono8");
    }
    catch (sensor_msgs::CvBridgeException& e) {
      ROS_ERROR("Conversion error: %s", e.what());
      return;
    }
    cam_model_.fromCameraInfo(l_cam_info, r_cam_info);

    frame_common::CamParams cam_params;
    cam_params.fx = cam_model_.left().fx();
    cam_params.fy = cam_model_.left().fy();
    cam_params.cx = cam_model_.left().cx();
    cam_params.cy = cam_model_.left().cy();
    cam_params.tx = cam_model_.baseline();

    if (vslam_system_.addFrame(cam_params, left, right)) {
      /// @todo Not rely on broken encapsulation of VslamSystem here
      int size = vslam_system_.sba_.nodes.size();
      if (size % 2 == 0) {
        // publish markers
        sba::drawGraph(vslam_system_.sba_, cam_marker_pub_, point_marker_pub_);
      }

      // Publish VO tracks
      if (vo_tracks_pub_.getNumSubscribers() > 0) {
        frame_common::drawVOtracks(left, vslam_system_.vo_.frames, vo_display_);
        IplImage ipl = vo_display_;
        sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(&ipl);
        msg->header = l_cam_info->header;
        vo_tracks_pub_.publish(msg, l_cam_info);
      }
      
      // Refine large-scale SBA.
      const int LARGE_SBA_INTERVAL = 10;
      if (size > 4 && size % LARGE_SBA_INTERVAL == 0) {
        ROS_INFO("Running large SBA on %d nodes", size);
        vslam_system_.refine();
      }
      
      if (pointcloud_pub_.getNumSubscribers() > 0 && size % 2 == 0)
        publishRegisteredPointclouds(vslam_system_.sba_, vslam_system_.frames_, pointcloud_pub_);
      
      // Publish odometry data to tf.
      if (0) // TODO: Change this to parameter.
      {
        ros::Time stamp = l_cam_info->header.stamp;
        std::string image_frame = l_cam_info->header.frame_id;
        Eigen::Vector4d trans = -vslam_system_.sba_.nodes.back().trans;
        Eigen::Quaterniond rot = vslam_system_.sba_.nodes.back().qrot.conjugate();
        
        trans.head<3>() = rot.toRotationMatrix()*trans.head<3>(); 
        
        tf_transform_.setOrigin(tf::Vector3(trans(0), trans(1), trans(2)));
        tf_transform_.setRotation(tf::Quaternion(rot.x(), rot.y(), rot.z(), rot.w()) );
        
        tf::Transform simple_transform;
        simple_transform.setOrigin(tf::Vector3(0, 0, 0));
        simple_transform.setRotation(tf::Quaternion(.5, -.5, .5, .5));
        
        tf_broadcast_.sendTransform(tf::StampedTransform(tf_transform_, stamp, image_frame, "visual_odom"));
        tf_broadcast_.sendTransform(tf::StampedTransform(simple_transform, stamp, "visual_odom", "pgraph"));
      
      
        // Publish odometry data on topic.
        if (odom_pub_.getNumSubscribers() > 0)
        {
          tf::StampedTransform base_to_image;
          tf::Transform base_to_visodom;
         
          try
          {
            tf_listener_.lookupTransform(image_frame, "/base_footprint",
                                 stamp, base_to_image);
          }
          catch (tf::TransformException ex)
          {
              ROS_WARN("%s",ex.what());
              return;
          }
                                 
          base_to_visodom = tf_transform_.inverse() * base_to_image;
          
          geometry_msgs::PoseStamped pose;
          nav_msgs::Odometry odom;
          pose.header.frame_id = "/visual_odom";
          pose.pose.position.x = base_to_visodom.getOrigin().x();
          pose.pose.position.y = base_to_visodom.getOrigin().y();
          pose.pose.position.z = base_to_visodom.getOrigin().z();
          pose.pose.orientation.x = base_to_visodom.getRotation().x();
          pose.pose.orientation.y = base_to_visodom.getRotation().y();
          pose.pose.orientation.z = base_to_visodom.getRotation().z();
          pose.pose.orientation.w = base_to_visodom.getRotation().w();
          
          odom.header.stamp = stamp;
          odom.header.frame_id = "/visual_odom";
          odom.child_frame_id = "/base_footprint";
          odom.pose.pose = pose.pose;
          /* odom.pose.covariance[0] = 1;
          odom.pose.covariance[7] = 1;
          odom.pose.covariance[14] = 1;
          odom.pose.covariance[21] = 1;
          odom.pose.covariance[28] = 1;
          odom.pose.covariance[35] = 1; */
          odom_pub_.publish(odom);
        }
      }
    }
  }