bool planning_environment::getLatestIdentityTransform(const std::string& to_frame,
        const std::string& from_frame,
        tf::TransformListener& tf,
        tf::Transform& pose)
{
    geometry_msgs::PoseStamped temp_pose;
    temp_pose.pose.orientation.w = 1.0;
    temp_pose.header.frame_id = from_frame;
    geometry_msgs::PoseStamped trans_pose;
    ros::Time tm;
    std::string err_string;
    if (tf.getLatestCommonTime(to_frame, temp_pose.header.frame_id, tm, &err_string) == tf::NO_ERROR) {
        temp_pose.header.stamp = tm;
        try {
            tf.transformPose(to_frame, temp_pose, trans_pose);
        } catch(tf::TransformException& ex) {
            ROS_ERROR_STREAM("Unable to transform object from frame " << temp_pose.header.frame_id << " to " << to_frame << " error is " << ex.what());
            return false;
        }
    } else {
        ROS_ERROR_STREAM("No latest time for transforming " << from_frame << " to " << to_frame);
        return false;
    }
    tf::poseMsgToTF(trans_pose.pose, pose);
    return true;
}
  void commandForce(const geometry_msgs::WrenchStamped::ConstPtr &msg)
  {
    F_des_ << msg->wrench.force.x, msg->wrench.force.y, msg->wrench.force.z, 
              msg->wrench.torque.x, msg->wrench.torque.y, msg->wrench.torque.z;
    if(msg->header.frame_id == root_name_)
      return;

    geometry_msgs::PoseStamped in_root;
    in_root.pose.orientation.w = 1.0;
    in_root.header.frame_id = msg->header.frame_id;

    try {
      tf_.waitForTransform(root_name_, msg->header.frame_id, msg->header.stamp, ros::Duration(0.1));
      tf_.transformPose(root_name_, in_root, in_root);
    }
    catch (const tf::TransformException &ex)
    {
      ROS_ERROR("Failed to transform: %s", ex.what());
      return;
    }
    
    Eigen::Affine3d t;
    
    tf::poseMsgToEigen(in_root.pose, t);

    F_des_.head<3>() = t.linear() * F_des_.head<3>();
    F_des_.tail<3>() = t.linear() * F_des_.tail<3>();
  }
Пример #3
0
void PathTracer::pose_cb(const geometry_msgs::PoseStampedConstPtr& msg)
{
	std::string current_frame = msg->header.frame_id;
	geometry_msgs::PoseStamped p;

	if(current_frame.compare(path_frame) == 0)
	{		
		p.header.frame_id = msg->header.frame_id;
		p.header.stamp = msg->header.stamp;
		p.pose.position.x = msg->pose.position.x;
		p.pose.position.y = msg->pose.position.y;
		p.pose.position.z = msg->pose.position.z;
		p.pose.orientation.x = msg->pose.orientation.x;
		p.pose.orientation.y = msg->pose.orientation.y;
		p.pose.orientation.z = msg->pose.orientation.z;
		p.pose.orientation.w = msg->pose.orientation.w;
		path.poses.push_back(p);
	}
	else
	{
		try
		{
  			tf_listener.transformPose(current_frame,*msg,p);
			path.poses.push_back(p);
		}
  		catch( tf::TransformException ex)
  		{
      			ROS_ERROR("transfrom exception : %s",ex.what());
  		}

	}

	path_pub.publish(path);
}
void planning_environment::updateAttachedObjectBodyPoses(planning_environment::CollisionModels* cm,
        planning_models::KinematicState& state,
        tf::TransformListener& tf)
{
    cm->bodiesLock();

    const std::map<std::string, std::map<std::string, bodies::BodyVector*> >& link_att_objects = cm->getLinkAttachedObjects();

    if(link_att_objects.empty()) {
        cm->bodiesUnlock();
        return;
    }

    //this gets all the attached bodies in their correct current positions according to tf
    geometry_msgs::PoseStamped ps;
    ps.pose.orientation.w = 1.0;
    for(std::map<std::string, std::map<std::string, bodies::BodyVector*> >::const_iterator it = link_att_objects.begin();
            it != link_att_objects.end();
            it++) {
        ps.header.frame_id = it->first;
        std::string es;
        if (tf.getLatestCommonTime(cm->getWorldFrameId(), it->first, ps.header.stamp, &es) != tf::NO_ERROR) {
            ROS_INFO_STREAM("Problem transforming into fixed frame from " << it->first << ".  Error string " << es);
            continue;
        }
        geometry_msgs::PoseStamped psout;
        tf.transformPose(cm->getWorldFrameId(), ps, psout);
        tf::Transform link_trans;
        tf::poseMsgToTF(psout.pose, link_trans);
        state.updateKinematicStateWithLinkAt(it->first, link_trans);
        cm->updateAttachedBodyPosesForLink(state, it->first);
    }
    cm->bodiesUnlock();
}
//Baxter-robot specific:
//IK is written for tool flange--called right_hand--w/rt torso frame
//need to convert gripper request, generic name "generic_gripper_frame", expressed w/rt some frame (e.g. system_ref_frame)
// into pose of flange w/rt torso frame;  return this result as an Eigen::Affine3d (though frame id's are lost)
Eigen::Affine3d ArmMotionInterface::xform_gripper_pose_to_affine_flange_wrt_torso(geometry_msgs::PoseStamped des_pose_gripper) {
    Eigen::Affine3d affine_flange_wrt_torso;
    tf::StampedTransform flange_stf, flange_wrt_torso_stf;
    geometry_msgs::PoseStamped flange_gmps, flange_wrt_torso_gmps;
    //convert des gripper pose to a stamped transform, so we can do more transforms
    ROS_WARN("xform_gripper_pose_to_affine_flange_wrt_torso: input pose-stamped: ");
    xformUtils.printStampedPose(des_pose_gripper);
    tf::StampedTransform gripper_stf = xformUtils.convert_poseStamped_to_stampedTransform(des_pose_gripper, "generic_gripper_frame"); 
    //convert to transform of corresponding tool flange w/rt whatever reference frame_id
    ROS_INFO("gripper_stf: ");
    xformUtils.printStampedTf(gripper_stf);
    ROS_INFO("flange_stf");
    xformUtils.printStampedTf(flange_stf);    
    bool mult_ok = xformUtils.multiply_stamped_tfs(gripper_stf,generic_toolflange_frame_wrt_gripper_frame_stf_,flange_stf);
    if (!mult_ok) { ROS_WARN("stf multiply not legal! "); } //should not happen
    //ROS_INFO("corresponding flange frame: ");
    //xformUtils.printStampedTf(flange_stf);
    //convert stamped ‘tf::StampedTransform to geometry_msgs::PoseStamped
    flange_gmps = xformUtils.get_pose_from_stamped_tf(flange_stf);
    //change the reference frame from whatever to "torso":
    tfListener_->transformPose("torso", flange_gmps, flange_wrt_torso_gmps);    
    ROS_INFO("corresponding flange frame w/rt torso frame: ");
    xformUtils.printStampedPose(flange_wrt_torso_gmps);  
    //convert this to an affine.  parent and child frame id's are lost, so we'll have to remember what this means
    affine_flange_wrt_torso = xformUtils.transformPoseToEigenAffine3d(flange_wrt_torso_gmps);
    return affine_flange_wrt_torso;
}
void TransformPose::publish_all(tf::TransformListener& listener)
{

        geometry_msgs::PoseStamped odom_pose;
        odom_pose.header.frame_id = "/base_link";

        //we'll just use the most recent transform available for our simple example
        odom_pose.header.stamp = ros::Time();

        //just an arbitrary point in space
        odom_pose.pose.position.x = 0.0;
        odom_pose.pose.position.y = 0.0;
        odom_pose.pose.position.z = 0.0;

        odom_pose.pose.orientation.x = 0.0;
        odom_pose.pose.orientation.y = 0.0;
        odom_pose.pose.orientation.z = 0.0;
        odom_pose.pose.orientation.w = 1.0;


        try{
                ros::Time now = ros::Time::now();
                listener.waitForTransform("/odom", "/base_link", now, ros::Duration(5.0));
                geometry_msgs::PoseStamped base_pose;
                listener.transformPose("/odom", odom_pose, base_pose);

		my_odom.header.stamp = base_pose.header.stamp;
		my_odom.pose.pose.position.x = base_pose.pose.position.x;
		my_odom.pose.pose.position.y = base_pose.pose.position.y;
		my_odom.pose.pose.position.z = base_pose.pose.position.z;
		my_odom.pose.pose.orientation = base_pose.pose.orientation;

		//my_maximus_odom.twist.twist.linear.x = sqrt(pow(base_pose.pose.position.x - old_x, 2) + pow(base_pose.pose.position.y - old_y, 2)) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());
		my_odom.twist.twist.linear.x = xspeed.data;
		my_odom.twist.twist.linear.y = 0;
		my_odom.twist.twist.linear.z = 0;
		my_odom.twist.twist.angular.x = 0;
		my_odom.twist.twist.angular.y = 0;
		my_odom.twist.twist.angular.z = tspeed.data;
		//my_maximus_odom.twist.twist.angular.z = 1.1 * (tf::getYaw (my_maximus_odom.pose.pose.orientation) - old_theta) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());

		odom_pub.publish(my_odom);

		//old_x = base_pose.pose.position.x;
		//old_y = base_pose.pose.position.y;
		//old_theta = tf::getYaw (my_odom.pose.pose.orientation);
		//old_time = my_odom.header.stamp;
/*
                ROS_INFO("base_laser: (%.2f, %.2f. %.2f) -----> base_link: (%.2f, %.2f, %.2f) at time %.2f",
                        my_maximus_odom.pose.pose.position.x, my_maximus_odom.pose.pose.position.y, my_maximus_odom.pose.pose.position.z,
                        my_maximus_odom.twist.twist.linear.x, my_maximus_odom.twist.twist.linear.y, my_maximus_odom.twist.twist.angular.z, base_pose.header.stamp.toSec());
*/
        }
        catch(tf::TransformException& ex){
                ROS_ERROR("Received an exception trying to transform a point from \"odom\" to \"base_link\": %s", ex.what());
        }


}
void findPeople(const tf::TransformListener& tf_listener) {
	// initialize message
	hide_n_seek::PeopleFinder people_msg;
	people_msg.header.stamp = ros::Time();
	people_msg.header.frame_id = "base_link";
	people_msg.sensor_range = sensor_range;
	people_msg.sensor_fov = sensor_fov;
	bool foundPerson = false;

	try {
		fakePersonPoseMap.header.stamp = ros::Time(); // trust me, it is still there
		fakePersonPoseMap.header.frame_id = "/map";

		// transform
		geometry_msgs::PoseStamped fakePersonPoseBase;
		tf_listener.transformPose ("base_link", fakePersonPoseMap, fakePersonPoseBase);
		//ROS_INFO_STREAM( " fake person = "
		//  << fakePersonPoseBase.pose.position.x
		//  << ", " << fakePersonPoseBase.pose.position.y
		//  << ", " << fakePersonPoseBase.pose.position.z);

		// search for people
		float range = sqrt(pow(fakePersonPoseBase.pose.position.x,2) + pow(fakePersonPoseBase.pose.position.y,2));
		if(fakePersonPoseBase.pose.position.x > 0 && range <= sensor_range) {
			if(fakePersonPoseBase.pose.position.y == 0) foundPerson = true;
			else {
				float bearing = PI/2 - atan(fakePersonPoseBase.pose.position.x/fabs(fakePersonPoseBase.pose.position.y));
				if(bearing < sensor_fov/2) foundPerson = true;
				//ROS_INFO_STREAM(bearing);
			}
		}

		// make sensor reading noisy
		int randNum = rand() % 100;
		if(foundPerson && randNum > percent_true_detected) foundPerson = false;
		else if(!foundPerson && randNum <= percent_true_undetected) {
			foundPerson = true;
			fakePersonPoseBase.pose.position.x = sensor_range / 2 * cos(tan(sensor_fov / 4));
			fakePersonPoseBase.pose.position.y = sensor_range / 2 * sin(tan(sensor_fov / 4));
		}
		if(foundPerson) people_msg.people.push_back(fakePersonPoseBase.pose); // this is more realistic (Kinect node would give relative position)
	}
	catch (tf::TransformException ex) {
		ROS_ERROR("%s",ex.what());
	}

	// publish current sensor shadow
	pub_sensor_shadow.publish(sensor_shadow);

	// publish any people we found
	pub_people_finder.publish(people_msg);
	ROS_INFO_STREAM("found " << people_msg.people.size() << " people");
}
bool ExcavaROBArmKinematics::getPositionFK(excavaROB_arm_kinematics_msgs::GetPositionFK::Request &request,
                                           excavaROB_arm_kinematics_msgs::GetPositionFK::Response &response) {
    KDL::Frame p_out;
    KDL::JntArray jnt_pos_in;
    geometry_msgs::PoseStamped pose;
    tf::Stamped<tf::Pose> tf_pose;

    if (num_joints_arm_ != request.fk_link_names.size()) {
	ROS_ERROR("The request has not the same dimension of the arm_chain joints.");
	return false;
    }

    jnt_pos_in.resize(num_joints_arm_);
    for (unsigned int i = 0; i < num_joints_arm_; i++) {
        int jnt_index = getJointIndex(request.joint_state.name[i]);
        if (jnt_index >= 0)
            jnt_pos_in(jnt_index) = request.joint_state.position[i];
    }

    response.pose_stamped.resize(num_joints_arm_);
    response.fk_link_names.resize(num_joints_arm_);

    bool valid = true;
    for (unsigned int i = 0; i < num_joints_arm_; i++) {
        int segmentIndex = getKDLSegmentIndex(request.fk_link_names[i]);
        ROS_DEBUG("End effector index: %d", segmentIndex);
        ROS_DEBUG("Chain indices: %d", arm_chain_.getNrOfSegments());

        if (fk_solver_->JntToCart(jnt_pos_in, p_out, segmentIndex) >= 0) {
            tf_pose.frame_id_ = root_name_;
            tf_pose.stamp_ = ros::Time();
            tf::PoseKDLToTF(p_out, tf_pose);
            try {
                tf_listener_.transformPose(request.header.frame_id, tf_pose, tf_pose);
            } catch (...) {
                ROS_ERROR("ExcavaROB Kinematics: Could not transform FK pose to frame: %s", request.header.frame_id.c_str());
                response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
                return false;
            }
            tf::poseStampedTFToMsg(tf_pose, pose);
            response.pose_stamped[i] = pose;
            response.fk_link_names[i] = request.fk_link_names[i];
            response.error_code.val = response.error_code.SUCCESS;
        } else {
            ROS_ERROR("ExcavaROB Kinematics: Could not compute FK for %s", request.fk_link_names[i].c_str());
            response.error_code.val = response.error_code.NO_FK_SOLUTION;
            valid = false;
        }
    }
    return true;
}
Пример #9
0
bool Kinematics::getPositionIK(moveit_msgs::GetPositionIK::Request &request,
                               moveit_msgs::GetPositionIK::Response &response) {

    geometry_msgs::PoseStamped pose_msg_in = request.ik_request.pose_stamped;
    tf::Stamped<tf::Pose> transform;
    tf::Stamped<tf::Pose> transform_root;
    tf::poseStampedMsgToTF( pose_msg_in, transform );

    //Do the IK
    KDL::JntArray jnt_pos_in;
    KDL::JntArray jnt_pos_out;
    jnt_pos_in.resize(num_joints);
    for (unsigned int i=0; i < num_joints; i++) {
        // nalt: mod for moveit msgs
        int tmp_index = getJointIndex(request.ik_request.robot_state.joint_state.name[i]);
        if (tmp_index >=0) {
            jnt_pos_in(tmp_index) = request.ik_request.robot_state.joint_state.position[i];
        } else {
            ROS_ERROR("i: %d, No joint index for %s",i,request.ik_request.robot_state.joint_state.name[i].c_str());
        }
    }

    //Convert F to our root_frame
    try {
        tf_listener.transformPose(root_name, transform, transform_root);
    } catch (...) {
        ROS_ERROR("Could not transform IK pose to frame: %s", root_name.c_str());
        response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
       return false;
    }

    KDL::Frame F_dest;
    tf::TransformTFToKDL(transform_root, F_dest);

    int ik_valid = ik_solver_pos->CartToJnt(jnt_pos_in, F_dest, jnt_pos_out);

    if (ik_valid >= 0) {
        response.solution.joint_state.name = info.joint_names;
        response.solution.joint_state.position.resize(num_joints);
        for (unsigned int i=0; i < num_joints; i++) {
            response.solution.joint_state.position[i] = jnt_pos_out(i);
            ROS_DEBUG("IK Solution: %s %d: %f",response.solution.joint_state.name[i].c_str(),i,jnt_pos_out(i));
        }
        response.error_code.val = response.error_code.SUCCESS;
        return true;
    } else {
        ROS_DEBUG("An IK solution could not be found");
        response.error_code.val = response.error_code.NO_IK_SOLUTION;
        return true;
    }
}
Пример #10
0
    void obj_real(const object_recognition_msgs::RecognizedObjectArray::ConstPtr& msg)
    {
      try
      {
        int obj_id = 0;
        object_recognition_msgs::RecognizedObjectArray::_objects_type::const_iterator it;
        for (it = msg->objects.begin(); it != msg->objects.end(); ++it)
        {
          msg_obj_cam_.header = msg->header;
          msg_obj_cam_.header.stamp = ros::Time(0);
          msg_obj_cam_.pose = it->pose.pose.pose;

          listener_.transformPose(target_frame_, msg_obj_cam_, msg_obj_pose);

          moveit_msgs::CollisionObject msg_obj_collision;
          msg_obj_collision.header = msg->header;
          msg_obj_collision.header.frame_id = target_frame_;

          object_recognition_msgs::GetObjectInformation obj_info;
          obj_info.request.type = it->type;

          if (getMeshFromDB(obj_info))
          {
            msg_obj_collision.meshes.push_back(obj_info.response.information.ground_truth_mesh);
            msg_obj_collision.mesh_poses.push_back(msg_obj_pose.pose);
          }
          else
          {
            msg_obj_collision.primitive_poses.push_back(msg_obj_pose.pose);
            msg_obj_collision.primitives.push_back(msg_cylinder_);
          }

          msg_obj_collision.operation = moveit_msgs::CollisionObject::ADD;

          msg_obj_collision.type = it->type;
          std::stringstream ss;
          ss << obj_id << "_" << it->type.key;
          msg_obj_collision.id = ss.str();
          ++obj_id;

          object_moveit_pub_.publish(msg_obj_collision);
        }
      }
      catch (tf::TransformException ex)
      {
        ROS_ERROR("%s",ex.what());
        ros::Duration(1.0).sleep();
      }
    }
Пример #11
0
 geometry_msgs::Pose2D computeError(const ros::Time & now, const occgrid_planner::TrajectoryElement & te) {
     tf::StampedTransform transform;
     listener_.waitForTransform(base_frame_,frame_id_,now,ros::Duration(1.0));
     geometry_msgs::PoseStamped pose,error;
     pose.header.stamp = now;
     pose.header.frame_id = frame_id_;
     pose.pose  = te.pose;
     listener_.transformPose(base_frame_,pose,error);
     geometry_msgs::Pose2D result;
     result.x = error.pose.position.x;
     result.y = error.pose.position.y;
     result.theta = tf::getYaw(error.pose.orientation);
     // printf("Current error: %+6.2f %+6.2f %+6.2f\n",result.x,result.y,result.theta*180./M_PI);
     return result;
 }
  void commandPose(const geometry_msgs::PoseStamped::ConstPtr &command)
  {
    geometry_msgs::PoseStamped in_root;
    try {
      tf_.waitForTransform(root_name_, command->header.frame_id, command->header.stamp, ros::Duration(0.1));
      tf_.transformPose(root_name_, *command, in_root);
    }
    catch (const tf::TransformException &ex)
    {
      ROS_ERROR("Failed to transform: %s", ex.what());
      return;
    }

    tf::poseMsgToEigen(in_root.pose, x_desi_);
  }
Пример #13
0
  //  Callback to register with tf::MessageFilter to be called when transforms are available
  void msgCallback(const boost::shared_ptr<const geometry_msgs::PoseStamped>& point_ptr)
  {
    geometry_msgs::PoseStamped point_out;
    try
    {
      std::string str="odom";
      tf_.transformPose(str, *point_ptr, point_out);

      ROS_INFO("(x:%f y:%f z:%f)\n",
             point_out.pose.position.x,
             point_out.pose.position.y,
             point_out.pose.position.z);
    }
    catch (tf::TransformException &ex)
    {
      ROS_ERROR("Failure %s\n", ex.what()); //Print exception which was caught
    }
  };
  void poseCallback(const geometry_msgs::PoseStamped::ConstPtr& worldPose) {
    geometry_msgs::PoseStamped worldGoal;
    worldGoal.header = worldPose->header;
    worldGoal.pose = worldPose->pose;
    geometry_msgs::PoseStamped baseGoal;
    static tf::TransformListener ls;

    try{
      ls.transformPose("base_link", worldGoal, baseGoal);
      pubpose.publish(baseGoal);
      ROS_INFO("Pose Data Received: x=%f, y=%f, Transformed: x=%f, y=%f", 
        worldPose->pose.position.x, worldPose->pose.position.y,
        baseGoal.pose.position.x, baseGoal.pose.position.y);
    }
    catch (tf::TransformException ex){
      ROS_ERROR("%s",ex.what());
      // ros::Duration(1.0).sleep();
    }
  }
  void getPoseInRobotFrame(std::vector<SM_COND> nextRobotCond_w)
  { 
    //transforming position into robot frame
    geometry_msgs::PoseStamped goal;
    geometry_msgs::PoseStamped yawMapRobot;
    geometry_msgs::PoseStamped goalRobotFrame;

    // position
    goal.header.stamp= ros::Time(0);
    goal.header.frame_id= "map";
    goal.pose.position.x= nextRobotCond_w[0].x;
    goal.pose.position.y= nextRobotCond_w[1].x;
    goal.pose.position.z= 0.0;
    tf::Quaternion q = tf::createQuaternionFromRPY(0.0,0.0,nextRobotCond_w[5].x);
    tf::quaternionTFToMsg(q,goal.pose.orientation);
    
    _listener.waitForTransform("base_footprint", "map",
                               ros::Time(0), ros::Duration(1.0));
    _listener.transformPose("base_footprint", goal, goalRobotFrame); 
  
    _nextRobotCond_r[0].x = goalRobotFrame.pose.position.x;
    _nextRobotCond_r[1].x = goalRobotFrame.pose.position.y; 
    _nextRobotCond_r[5].x = tf::getYaw(goalRobotFrame.pose.orientation);

    // velocity and acceleration
    //record the starting transform from the odometry to the base frame
    _listener.lookupTransform("map", "base_footprint",
                              ros::Time(0), world_transform);
    
    double roll, pitch, yaw;
    q = world_transform.getRotation();
    btMatrix3x3(q).getRPY(roll, pitch, yaw);

    _nextRobotCond_r[0].v = cos(yaw)*nextRobotCond_w[0].v + sin(yaw)*nextRobotCond_w[1].v;
    _nextRobotCond_r[1].v = -sin(yaw)*nextRobotCond_w[0].v + cos(yaw)*nextRobotCond_w[1].v; 
    _nextRobotCond_r[5].v = nextRobotCond_w[5].v;

    _nextRobotCond_r[0].a = cos(yaw)*nextRobotCond_w[0].a + sin(yaw)*nextRobotCond_w[1].a;
    _nextRobotCond_r[1].a = -sin(yaw)*nextRobotCond_w[0].a + cos(yaw)*nextRobotCond_w[1].a;
    _nextRobotCond_r[5].a = nextRobotCond_w[5].a;
    
  }
geometry_msgs::PoseStamped getPose(const std::string ref_frame_id, const std::string frame_id,
                                   const tf::TransformListener & tf)
{
  geometry_msgs::PoseStamped pose;
  pose.header.stamp = ros::Time::now() - ros::Duration(0.1); // Add a little tolerance (because of "extraploation in to the future" issue)
  pose.header.frame_id = frame_id;
  pose.pose.orientation.w = 1;
  tf::StampedTransform tr;
  try
  {
    if (!tf.waitForTransform(ref_frame_id, pose.header.frame_id, pose.header.stamp, ros::Duration(0.1)))
      ROS_WARN("Could not transform in time");
    tf.transformPose(ref_frame_id, pose, pose);
  }
  catch (tf::TransformException & e)
  {
    ROS_WARN_STREAM(e.what());
  }
  return pose;
}
Пример #17
0
void GridConstructionNode::buildGrid (const ros::WallTimerEvent& scan)
{
    if (last_cloud_) {
        {
            Lock lock(mutex_);
            clouds_.push_back(last_cloud_);
            last_cloud_.reset();
        }

        ROS_DEBUG_NAMED ("build_grid", "Building grid with %zu scans", clouds_.size());

        // Figure out current position
        gm::PoseStamped identity, odom_pose;
        identity.pose.orientation = tf::createQuaternionMsgFromYaw(0);
        identity.header.frame_id = sensor_frame_;
        identity.header.stamp = ros::Time();
        tf_.transformPose(fixed_frame_, identity, odom_pose);

        // Set up map dimensions
        nm::MapMetaData info;
        info.origin.position.x = odom_pose.pose.position.x-local_grid_size_/2;
        info.origin.position.y = odom_pose.pose.position.y-local_grid_size_/2;
        info.origin.orientation = tf::createQuaternionMsgFromYaw(0);
        info.resolution = resolution_;
        info.width = local_grid_size_/resolution_;
        info.height = local_grid_size_/resolution_;

        nm::OccupancyGrid fake_grid;
        fake_grid.info = info;
        gu::OverlayClouds overlay = gu::createCloudOverlay(fake_grid, fixed_frame_, 0.1, 10, 2);
        vector<CloudConstPtr> clouds(clouds_.begin(), clouds_.end());
        BOOST_FOREACH  (CloudConstPtr cloud, clouds_)
        gu::addCloud(&overlay, cloud);
        nm::OccupancyGrid::ConstPtr grid = gu::getGrid(overlay);

        ROS_DEBUG_NAMED ("build_grid", "Done building grid");

        grid_pub_.publish(grid);
    }
}
bool convertPoseToRootFrame(const geometry_msgs::PoseStamped &pose_msg, 
                            geometry_msgs::PoseStamped &pose_msg_out, 
                            const std::string &root_frame,
                            tf::TransformListener& tf)
{
  geometry_msgs::PoseStamped pose_msg_in = pose_msg;
  ROS_DEBUG("Request:\nframe_id: %s\nPosition: %f %f %f\n:Orientation: %f %f %f %f\n",
            pose_msg_in.header.frame_id.c_str(),
            pose_msg_in.pose.position.x,
            pose_msg_in.pose.position.y,
            pose_msg_in.pose.position.z,
            pose_msg_in.pose.orientation.x,
            pose_msg_in.pose.orientation.y,
            pose_msg_in.pose.orientation.z,
            pose_msg_in.pose.orientation.w);
  pose_msg_out = pose_msg;
  tf::Stamped<tf::Pose> pose_stamped;
  poseStampedMsgToTF(pose_msg_in, pose_stamped);
  
  if (!tf.canTransform(root_frame, pose_stamped.frame_id_, pose_stamped.stamp_))
  {
    std::string err;    
    if (tf.getLatestCommonTime(pose_stamped.frame_id_, root_frame, pose_stamped.stamp_, &err) != tf::NO_ERROR)
    {
      ROS_ERROR("pr2_arm_ik:: Cannot transform from '%s' to '%s'. TF said: %s",pose_stamped.frame_id_.c_str(),root_frame.c_str(), err.c_str());
      return false;
    }
  }    
  try
  {
    tf.transformPose(root_frame, pose_stamped, pose_stamped);
  }
  catch(...)
  {
    ROS_ERROR("pr2_arm_ik:: Cannot transform from '%s' to '%s'",pose_stamped.frame_id_.c_str(),root_frame.c_str());
    return false;
  } 
  tf::poseStampedTFToMsg(pose_stamped,pose_msg_out);   
  return true;
}
Пример #19
0
bool
SlamKarto::getOdomPose(karto::Pose2& karto_pose, const ros::Time& t)
{
    // Get the robot's pose
    tf::Stamped<tf::Pose> ident (tf::Transform(tf::createQuaternionFromRPY(0,0,0),
                                               tf::Vector3(0,0,0)), t, base_frame_);
    tf::Stamped<tf::Transform> odom_pose;
    try
    {
        tf_.transformPose(odom_frame_, ident, odom_pose);
    }
    catch(tf::TransformException e)
    {
        ROS_WARN("Failed to compute odom pose, skipping scan (%s)", e.what());
        return false;
    }
    double yaw = tf::getYaw(odom_pose.getRotation());

    karto_pose = karto::Pose2(odom_pose.getOrigin().x(),
                         odom_pose.getOrigin().y(),
                         yaw);
    return true;
}
Пример #20
0
	void executeCB(const segbot_arm_manipulation::TabletopGraspGoalConstPtr  &goal)
	{
		if (goal->action_name == segbot_arm_manipulation::TabletopGraspGoal::GRASP){
		
			if (goal->cloud_clusters.size() == 0){
				ROS_INFO("[segbot_tabletop_grap_as.cpp] No object point clouds received...aborting");
				as_.setAborted(result_);
				return;
			}
		
			
			ROS_INFO("[segbot_tabletop_grap_as.cpp] Received action request...proceeding.");
			listenForArmData(40.0);
			
			//the result
			segbot_arm_manipulation::TabletopGraspResult result;
		
			//get the data out of the goal
			Eigen::Vector4f plane_coef_vector;
			for (int i = 0; i < 4; i ++)
				plane_coef_vector(i)=goal->cloud_plane_coef[i];
			int selected_object = goal->target_object_cluster_index;
			
			PointCloudT::Ptr target_object (new PointCloudT);
			pcl::PCLPointCloud2 target_object_pc2;
			pcl_conversions::toPCL(goal->cloud_clusters.at(goal->target_object_cluster_index),target_object_pc2);
			pcl::fromPCLPointCloud2(target_object_pc2,*target_object);
			
			ROS_INFO("[segbot_tabletop_grasp_as.cpp] Publishing point cloud...");
			cloud_grasp_pub.publish(goal->cloud_clusters.at(goal->target_object_cluster_index));
			
			//wait for response at 5 Hz
			listenForGrasps(40.0);
			
			ROS_INFO("[segbot_tabletop_grasp_as.cpp] Heard %i grasps",(int)current_grasps.grasps.size());
		
			//next, compute approach and grasp poses for each detected grasp
			
			//wait for transform from visual space to arm space
			
			std::string sensor_frame_id = goal->cloud_clusters.at(goal->target_object_cluster_index).header.frame_id;
			
			listener.waitForTransform(sensor_frame_id, "mico_link_base", ros::Time(0), ros::Duration(3.0));
			
			
			//here, we'll store all grasp options that pass the filters
			std::vector<GraspCartesianCommand> grasp_commands;
			
			
			for (unsigned int i = 0; i < current_grasps.grasps.size(); i++){
				
							
				GraspCartesianCommand gc_i = segbot_arm_manipulation::grasp_utils::constructGraspCommand(current_grasps.grasps.at(i),HAND_OFFSET_APPROACH,HAND_OFFSET_GRASP, sensor_frame_id);
				
				
				
				//filter 1: if the grasp is too close to plane, reject it
				bool ok_with_plane = segbot_arm_manipulation::grasp_utils::checkPlaneConflict(gc_i,plane_coef_vector,MIN_DISTANCE_TO_PLANE);
				
				//for filter 2, the grasps need to be in the arm's frame of reference
				listener.transformPose("mico_link_base", gc_i.approach_pose, gc_i.approach_pose);
				listener.transformPose("mico_link_base", gc_i.grasp_pose, gc_i.grasp_pose);

				
				//filter 2: apply grasp filter method in request
				bool passed_filter = passesFilter(goal->grasp_filter_method,gc_i);
				
				if (passed_filter && ok_with_plane){
					ROS_INFO("Found grasp fine with filter and plane");
					
					
					
					//filter two -- if IK fails
					moveit_msgs::GetPositionIK::Response  ik_response_approach = segbot_arm_manipulation::computeIK(nh_,gc_i.approach_pose);
					
					if (ik_response_approach.error_code.val == 1){
						moveit_msgs::GetPositionIK::Response  ik_response_grasp = segbot_arm_manipulation::computeIK(nh_,gc_i.grasp_pose);
				
						if (ik_response_grasp.error_code.val == 1){
							
							ROS_INFO("...grasp fine with IK");
							
							
							//now check to see how close the two sets of joint angles are -- if the joint configurations for the approach and grasp poses differ by too much, the grasp will not be accepted
							std::vector<double> D = segbot_arm_manipulation::getJointAngleDifferences(ik_response_approach.solution.joint_state, ik_response_grasp.solution.joint_state);
							
							double sum_d = 0;
							for (int p = 0; p < D.size(); p++){
								sum_d += D[p];
							}
						
							
							if (sum_d < ANGULAR_DIFF_THRESHOLD){
								//ROS_INFO("Angle diffs for grasp %i: %f, %f, %f, %f, %f, %f",(int)grasp_commands.size(),D[0],D[1],D[2],D[3],D[4],D[5]);
								
								//ROS_INFO("Sum diff: %f",sum_d);
							
								//store the IK results
								gc_i.approach_q = ik_response_approach.solution.joint_state;
								gc_i.grasp_q = ik_response_grasp.solution.joint_state;
								
								grasp_commands.push_back(gc_i);
								
								ROS_INFO("...fine with continuity");
							}
						}
					}
				}
			}
			
			//check to see if all potential grasps have been filtered out
			if (grasp_commands.size() == 0){
				ROS_WARN("[segbot_tabletop_grasp_as.cpp] No feasible grasps found. Aborting.");
				as_.setAborted(result_);
				return;
			}
			
			//make sure we're working with the correct tool pose
			listenForArmData(30.0);
			
			int selected_grasp_index = -1;
			
			
			
			if (goal->grasp_selection_method == segbot_arm_manipulation::TabletopGraspGoal::CLOSEST_ORIENTATION_SELECTION){
				//find the grasp with closest orientatino to current pose
				double min_diff = 1000000.0;
				for (unsigned int i = 0; i < grasp_commands.size(); i++){
					double d_i = segbot_arm_manipulation::grasp_utils::quat_angular_difference(grasp_commands.at(i).approach_pose.pose.orientation, current_pose.pose.orientation);
					
					ROS_INFO("Distance for pose %i:\t%f",(int)i,d_i);
					if (d_i < min_diff){
						selected_grasp_index = (int)i;
						min_diff = d_i;
					}
				}
			}
			else if (goal->grasp_selection_method == segbot_arm_manipulation::TabletopGraspGoal::RANDOM_SELECTION){

				srand (time(NULL));
				selected_grasp_index = rand() % grasp_commands.size(); 
				ROS_INFO("Randomly selected grasp = %i",selected_grasp_index);     
			}
			else if (goal->grasp_selection_method == segbot_arm_manipulation::TabletopGraspGoal::CLOSEST_JOINTSPACE_SELECTION){
				
				double min_diff = 1000000.0;
				for (unsigned int i = 0; i < grasp_commands.size(); i++){
					std::vector<double> D_i = segbot_arm_manipulation::getJointAngleDifferences(grasp_commands.at(i).approach_q, current_state);
					
					double sum_d = 0;
					for (int p = 0; p < D_i.size(); p++)
						sum_d += D_i[p];
					
					if (sum_d < min_diff){
						selected_grasp_index = (int)i;
						min_diff = sum_d;
					}
				}
				
				
				
			}
			
			if (selected_grasp_index == -1){
				ROS_WARN("[segbot_tabletop_grasp_as.cpp] Grasp selection failed. Aborting.");
				as_.setAborted(result_);
				return;
			}
			
			//compute RPY for target pose
			ROS_INFO("Selected approach pose:");
			ROS_INFO_STREAM(grasp_commands.at(selected_grasp_index).approach_pose);

			//publish individual pose for visualization purposes
			pose_pub.publish(grasp_commands.at(selected_grasp_index).approach_pose);
			
			//close fingers while moving
			segbot_arm_manipulation::closeHand();
			
			//move to approach pose -- do it twice to correct 
			segbot_arm_manipulation::moveToPoseMoveIt(nh_,grasp_commands.at(selected_grasp_index).approach_pose);
			segbot_arm_manipulation::moveToPoseMoveIt(nh_,grasp_commands.at(selected_grasp_index).approach_pose);
			
			//open fingers
			segbot_arm_manipulation::openHand();
		
			//move to grasp pose
			segbot_arm_manipulation::moveToPoseMoveIt(nh_,grasp_commands.at(selected_grasp_index).grasp_pose);
		
			//close hand
			segbot_arm_manipulation::closeHand();
			
			result_.success = true;
			as_.setSucceeded(result_);
			return;
		}
		else if (goal->action_name == segbot_arm_manipulation::TabletopGraspGoal::HANDOVER){
			//TO DO: move to handover position
			
			ROS_INFO("Starting handover action...");
			
			//listen for haptic feedback
			
			double rate = 40;
			
			ros::Rate r(rate);

			double total_grav_free_effort = 0;
			double total_delta;
			double delta_effort[6];

			listenForArmData(40.0);
			sensor_msgs::JointState prev_effort_state = current_effort;


			double elapsed_time = 0;

			while (ros::ok()){
		
				ros::spinOnce();
				
				total_delta=0.0;
				for (int i = 0; i < 6; i ++){
					delta_effort[i] = fabs(current_effort.effort[i]-prev_effort_state.effort[i]);
					total_delta+=delta_effort[i];
					ROS_INFO("Total delta=%f",total_delta);
				}
				
				if (total_delta > fabs(FORCE_HANDOVER_THRESHOLD)){
					ROS_INFO("[segbot_tabletop_grasp_as.cpp] Force detected");
					
					//now open the hand
					segbot_arm_manipulation::openHand();
					
					result_.success = true;
					as_.setSucceeded(result_);
					return;
					
				}
				
				r.sleep();
				elapsed_time+=(1.0)/rate;
				
				if (goal->timeout_seconds > 0 && elapsed_time > goal->timeout_seconds){
					
					ROS_WARN("Handover action timed out...");
					
					result_.success = false;
					as_.setAborted(result_);
					
					return;
				}
			}
			
			
			
			result_.success = true;
			as_.setSucceeded(result_);
		}
		else if (goal->action_name == segbot_arm_manipulation::TabletopGraspGoal::HANDOVER_FROM_HUMAN){
			//open fingers
			segbot_arm_manipulation::openHand();
			
			//update readings
			listenForArmData(40.0);
			
			bool result = waitForForce(FORCE_HANDOVER_THRESHOLD,goal->timeout_seconds);
		
			if (result){
				segbot_arm_manipulation::closeHand();
				result_.success = true;
				as_.setSucceeded(result_);
			}
			else {
				result_.success = false;
				as_.setAborted(result_);
			}
		
		}
	
	
	}
Пример #21
0
void MapMaker::image_callback(const sensor_msgs::ImageConstPtr& msg) {
//  printf("callback called\n");
  try
	{
	
	// if you want to work with color images, change from mono8 to bgr8
	  if(input_image==NULL){
		  input_image = cvCloneImage(bridge.imgMsgToCv(msg, "mono8"));
		  rotationImage=cvCloneImage(input_image);
		 // printf("cloned image\n");
		}
		else{
		  cvCopy(bridge.imgMsgToCv(msg, "mono8"),input_image);
		 // printf("copied image\n");
		}
	}
	catch (sensor_msgs::CvBridgeException& e)
	{
		ROS_ERROR("Could not convert from '%s' to 'mono8'.", msg->encoding.c_str());
		return;
	}
	
	if(input_image!=NULL) {
    //get tf transform here and put in map
    ros::Time acquisition_time = msg->header.stamp;
    geometry_msgs::PoseStamped basePose;
    geometry_msgs::PoseStamped mapPose;
    basePose.pose.orientation.w=1.0;
    ros::Duration timeout(3);
    basePose.header.frame_id="/base_link";
    mapPose.header.frame_id="/map";
    try {
      tf_listener_.waitForTransform("/base_link", "/map", acquisition_time, timeout);
       
      tf_listener_.transformPose("/map", acquisition_time,basePose,"/base_link",mapPose);
	    
	    printf("pose #%d %f %f %f\n",pic_number,mapPose.pose.position.x, mapPose.pose.position.y, tf::getYaw(mapPose.pose.orientation));
	    
	    
	    /*
	    char buffer [50];
	    sprintf (buffer, "/tmp/test%02d.jpg", pic_number);
			if(!cvSaveImage(buffer,input_image,0)) printf("Could not save: %s\n",buffer);
			else printf("picture taken!!!\n");
	    pic_number++;
	    */
	    
	    cv::Point_<double> center;
      center.x=input_image->width/2;
      center.y=input_image->height/2;
      double tranlation_arr[2][3];
      CvMat translation;
      cvInitMatHeader(&translation,2,3,CV_64F,tranlation_arr);
      
      cvSetZero(&translation);
      cv2DRotationMatrix(center, (tf::getYaw(mapPose.pose.orientation)*180/3.14159) -90,1.0,&translation);
      cvSetZero(rotationImage);
      cvWarpAffine(input_image,rotationImage,&translation,CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS,cvScalarAll(0));
      
      
      CvRect roi;
      roi.width=rotationImage->width;
      roi.height=rotationImage->height;
      
      if(init_zero_x==0){
        init_zero_x=(int)(mapPose.pose.position.x*(1.0/map_meters_per_pixel));
        init_zero_y=(int)(mapPose.pose.position.y*(-1.0/map_meters_per_pixel));
      }
      
      roi.x=(int)(mapPose.pose.position.x*(1.0/map_meters_per_pixel))-init_zero_x+map_zero_x-roi.width/2;
      roi.y=(int)(mapPose.pose.position.y*(-1.0/map_meters_per_pixel))-init_zero_y+map_zero_y-roi.height/2;
      
      printf("x %d, y %d, rot %f\n",roi.x,roi.y, (tf::getYaw(mapPose.pose.orientation)*180/3.14159) -90);
      
      cvSetImageROI(map,roi);
      
      cvMax(map,rotationImage,map);
      
      cvResetImageROI(map);
	    cvShowImage("map image",map);	    
    }
    catch (tf::TransformException& ex) {
      ROS_WARN("[map_maker] TF exception:\n%s", ex.what());
      printf("[map_maker] TF exception:\n%s", ex.what());
      return;
    }
    catch(...){
      printf("opencv shit itself cause our roi is bad\n");
    }
  }
}
Пример #22
0
    void executeCb(const frontier_exploration::ExploreTaskGoalConstPtr &goal)
    {

        success_ = false;
        moving_ = false;

        explore_costmap_ros_->resetLayers();

        //create costmap services
        ros::ServiceClient updateBoundaryPolygon = private_nh_.serviceClient<frontier_exploration::UpdateBoundaryPolygon>("explore_costmap/explore_boundary/update_boundary_polygon");
        ros::ServiceClient getNextFrontier = private_nh_.serviceClient<frontier_exploration::GetNextFrontier>("explore_costmap/explore_boundary/get_next_frontier");

        //wait for move_base and costmap services
        if(!move_client_.waitForServer() || !updateBoundaryPolygon.waitForExistence() || !getNextFrontier.waitForExistence()){
            as_.setAborted();
            return;
        }

        //set region boundary on costmap
        if(ros::ok() && as_.isActive()){
            frontier_exploration::UpdateBoundaryPolygon srv;
            srv.request.explore_boundary = goal->explore_boundary;
            if(updateBoundaryPolygon.call(srv)){
                ROS_INFO("Region boundary set");
            }else{
                ROS_ERROR("Failed to set region boundary");
                as_.setAborted();
                return;
            }
        }

        //loop until all frontiers are explored
        ros::Rate rate(frequency_);
        while(ros::ok() && as_.isActive()){

            frontier_exploration::GetNextFrontier srv;

            //placeholder for next goal to be sent to move base
            geometry_msgs::PoseStamped goal_pose;

            //get current robot pose in frame of exploration boundary
            tf::Stamped<tf::Pose> robot_pose;
            explore_costmap_ros_->getRobotPose(robot_pose);

            //provide current robot pose to the frontier search service request
            tf::poseStampedTFToMsg(robot_pose,srv.request.start_pose);

            //evaluate if robot is within exploration boundary using robot_pose in boundary frame
            geometry_msgs::PoseStamped eval_pose = srv.request.start_pose;
            if(eval_pose.header.frame_id != goal->explore_boundary.header.frame_id){
                tf_listener_.transformPose(goal->explore_boundary.header.frame_id, srv.request.start_pose, eval_pose);
            }

            //check if robot is not within exploration boundary and needs to return to center of search area
            if(goal->explore_boundary.polygon.points.size() > 0 && !pointInPolygon(eval_pose.pose.position,goal->explore_boundary.polygon)){
                
                //check if robot has explored at least one frontier, and promote debug message to warning
                if(success_){
                    ROS_WARN("Robot left exploration boundary, returning to center");
                }else{
                    ROS_DEBUG("Robot not initially in exploration boundary, traveling to center");
                }
                //get current robot position in frame of exploration center
                geometry_msgs::PointStamped eval_point;
                eval_point.header = eval_pose.header;
                eval_point.point = eval_pose.pose.position;
                if(eval_point.header.frame_id != goal->explore_center.header.frame_id){
                    geometry_msgs::PointStamped temp = eval_point;
                    tf_listener_.transformPoint(goal->explore_center.header.frame_id, temp, eval_point);
                }

                //set goal pose to exploration center
                goal_pose.header = goal->explore_center.header;
                goal_pose.pose.position = goal->explore_center.point;
                goal_pose.pose.orientation = tf::createQuaternionMsgFromYaw( yawOfVector(eval_point.point, goal->explore_center.point) );

            }else if(getNextFrontier.call(srv)){ //if in boundary, try to find next frontier to search

                ROS_DEBUG("Found frontier to explore");
                success_ = true;
                goal_pose = feedback_.next_frontier = srv.response.next_frontier;
                retry_ = 5;

            }else{ //if no frontier found, check if search is successful
                ROS_DEBUG("Couldn't find a frontier");

                //search is succesful
                if(retry_ == 0 && success_){
                    ROS_WARN("Finished exploring room");
                    as_.setSucceeded();
                    boost::unique_lock<boost::mutex> lock(move_client_lock_);
                    move_client_.cancelGoalsAtAndBeforeTime(ros::Time::now());
                    return;

                }else if(retry_ == 0 || !ros::ok()){ //search is not successful

                    ROS_ERROR("Failed exploration");
                    as_.setAborted();
                    return;
                }

                ROS_DEBUG("Retrying...");
                retry_--;
                //try to find frontier again, without moving robot
                continue;
            }
            //if above conditional does not escape this loop step, search has a valid goal_pose

            //check if new goal is close to old goal, hence no need to resend
            if(!moving_ || !pointsNearby(move_client_goal_.target_pose.pose.position,goal_pose.pose.position,goal_aliasing_*0.5)){
                ROS_DEBUG("New exploration goal");
                move_client_goal_.target_pose = goal_pose;
                boost::unique_lock<boost::mutex> lock(move_client_lock_);
                if(as_.isActive()){
                    move_client_.sendGoal(move_client_goal_, boost::bind(&FrontierExplorationServer::doneMovingCb, this, _1, _2),0,boost::bind(&FrontierExplorationServer::feedbackMovingCb, this, _1));
                    moving_ = true;
                }
                lock.unlock();
            }

            //check if continuous goal updating is enabled
            if(frequency_ > 0){
                //sleep for specified frequency and then continue searching
                rate.sleep();
            }else{
                //wait for movement to finish before continuing
                while(ros::ok() && as_.isActive() && moving_){
                    ros::WallDuration(0.1).sleep();
                }
            }
        }

        //goal should never be active at this point
        ROS_ASSERT(!as_.isActive());

    }
bool Kinematics::searchPositionIK(kinematics_msgs::GetPositionIK::Request &request,
                               kinematics_msgs::GetPositionIK::Response &response) {

    geometry_msgs::PoseStamped pose_msg_in = request.ik_request.pose_stamped;
    tf::Stamped<tf::Pose> transform;
    tf::Stamped<tf::Pose> transform_root;
    tf::poseStampedMsgToTF( pose_msg_in, transform );

    ros::WallTime n1 = ros::WallTime::now();

    //Do the IK
    KDL::JntArray jnt_pos_in;
    KDL::JntArray jnt_pos_out;
    jnt_pos_in.resize(num_joints);

    for (unsigned int i=0; i < num_joints; i++) {
        int tmp_index = getJointIndex(request.ik_request.ik_seed_state.joint_state.name[i]);
        if (tmp_index >=0) {
            jnt_pos_in(tmp_index) = request.ik_request.ik_seed_state.joint_state.position[i];
        } else {
            ROS_ERROR("i: %d, No joint index for %s",i,request.ik_request.ik_seed_state.joint_state.name[i].c_str());
        }
    }

    //Convert F to our root_frame
    try {
        tf_listener.transformPose(root_name, transform, transform_root);
    } catch (...) {
        ROS_ERROR("Could not transform IK pose to frame: %s", root_name.c_str());
        response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
       return false;
    }

    KDL::Frame F_dest;
    tf::TransformTFToKDL(transform_root, F_dest);

    for(unsigned int i=0; i < 100; i++) // max_search_iterations_
    {
        for(unsigned int j=0; j < num_joints; j++) // num_joints_
        {
            ROS_INFO_STREAM("seed state " << j << " " << jnt_pos_in(j) );
        }
        int ik_valid = ik_solver_pos->CartToJnt(jnt_pos_in,F_dest,jnt_pos_out); // F_dest is pose_desired
        ROS_INFO_STREAM("IK returned code " << ik_valid );

        if (ik_valid >= 0) {
            response.solution.joint_state.name = info.joint_names;
            response.solution.joint_state.position.resize(num_joints);
            for (unsigned int k=0; k < num_joints; k++) {
                response.solution.joint_state.position[k] = jnt_pos_out(k);
                ROS_DEBUG("IK Solution: %s %d: %f",response.solution.joint_state.name[k].c_str(),k,jnt_pos_out(k) );
            }
            response.error_code.val = response.error_code.SUCCESS;
            ROS_INFO_STREAM("Solved after " << i+1 << " iterations");
            ROS_INFO_STREAM("time: " << (ros::WallTime::now()-n1).toSec() << " sec" );

            return true;
        }
        // no sol, try another seed
        jnt_pos_in = getRandomConfiguration();
    } 
    ROS_ERROR("solver: An IK solution could not be found");
    response.error_code.val = response.error_code.NO_IK_SOLUTION;
    return false;
}
bool transformGlobalPlan(const tf::TransformListener& tf, const std::vector<geometry_msgs::PoseStamped>& global_plan, 
						const costmap_2d::Costmap2DROS& costmap, const std::string& global_frame, 
						std::vector<geometry_msgs::PoseStamped>& transformed_plan, std::vector<int>& start_end_counts)
{
	const geometry_msgs::PoseStamped& plan_pose = global_plan[0];

	// initiate refernce variables
	transformed_plan.clear();

	try
	{
		if (!global_plan.size() > 0)
		{
			ROS_ERROR("Recieved plan with zero length");
			return false;
		}

		tf::StampedTransform transform;
		tf.lookupTransform(global_frame, ros::Time(), plan_pose.header.frame_id, plan_pose.header.stamp, 
							plan_pose.header.frame_id, transform);

		//let's get the pose of the robot in the frame of the plan
		tf::Stamped<tf::Pose> robot_pose;
		robot_pose.setIdentity();
		robot_pose.frame_id_ = costmap.getBaseFrameID();
		robot_pose.stamp_ = ros::Time();
		tf.transformPose(plan_pose.header.frame_id, robot_pose, robot_pose);

		//we'll keep points on the plan that are within the window that we're looking at
		double dist_threshold = std::max(costmap.getSizeInCellsX() * costmap.getResolution() / 2.0, costmap.getSizeInCellsY() * costmap.getResolution() / 2.0);

		unsigned int i = 0;
		double sq_dist_threshold = dist_threshold * dist_threshold;
		double sq_dist = DBL_MAX;

		// --- start - modification w.r.t. base_local_planner
		// initiate start_end_count
		std::vector<int> start_end_count;
		start_end_count.assign(2, 0);

		// we know only one direction and that is forward! - initiate search with previous start_end_counts
		// this is neccesserry to work with the sampling based planners - path may severall time enter and leave moving window
		ROS_ASSERT( (start_end_counts.at(0) > 0) && (start_end_counts.at(0) <= (int) global_plan.size()) );
		i = (unsigned int) global_plan.size() - (unsigned int) start_end_counts.at(0);
		// --- end - modification w.r.t. base_local_planner

		//we need to loop to a point on the plan that is within a certain distance of the robot
		while(i < (unsigned int)global_plan.size() && sq_dist > sq_dist_threshold)
		{
			double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
			double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
			sq_dist = x_diff * x_diff + y_diff * y_diff;

			// --- start - modification w.r.t. base_local_planner
			// not yet in reach - get next frame
			if( sq_dist > sq_dist_threshold )
				++i;
			else
				// set counter for start of transformed intervall - from back as beginning of plan might be prunned
				start_end_count.at(0) = (int) (((unsigned int) global_plan.size()) - i);
			// --- end - modification w.r.t. base_local_planner
		}


		tf::Stamped<tf::Pose> tf_pose;
		geometry_msgs::PoseStamped newer_pose;

		//now we'll transform until points are outside of our distance threshold
		while(i < (unsigned int)global_plan.size() && sq_dist < sq_dist_threshold)
		{
			double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
			double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
			sq_dist = x_diff * x_diff + y_diff * y_diff;

			const geometry_msgs::PoseStamped& pose = global_plan[i];
			poseStampedMsgToTF(pose, tf_pose);
			tf_pose.setData(transform * tf_pose);
			tf_pose.stamp_ = transform.stamp_;
			tf_pose.frame_id_ = global_frame;
			poseStampedTFToMsg(tf_pose, newer_pose);

			transformed_plan.push_back(newer_pose);

			// --- start - modification w.r.t. base_local_planner
			// set counter for end of transformed intervall - from back as beginning of plan might be prunned
			start_end_count.at(1) = int (((unsigned int) global_plan.size()) - i);
			// --- end - modification w.r.t. base_local_planner

			++i;
		}

		// --- start - modification w.r.t. base_local_planner
		// write to reference variable
		start_end_counts = start_end_count;
		// --- end - modification w.r.t. base_local_planner
    }
	catch(tf::LookupException& ex)
	{
		ROS_ERROR("No Transform available Error: %s\n", ex.what());
		return false;
	}
	catch(tf::ConnectivityException& ex)
	{
		ROS_ERROR("Connectivity Error: %s\n", ex.what());
		return false;
	}
	catch(tf::ExtrapolationException& ex)
	{
		ROS_ERROR("Extrapolation Error: %s\n", ex.what());
		if (global_plan.size() > 0)
			ROS_ERROR("Global Frame: %s Plan Frame size %d: %s\n", global_frame.c_str(), (unsigned int)global_plan.size(), global_plan[0].header.frame_id.c_str());

		return false;
	}

	return true;
}
Пример #25
0
bool transformGlobalPlan(const tf::TransformListener& tf, const std::vector<geometry_msgs::PoseStamped>& global_plan,
                         const costmap_2d::Costmap2DROS& costmap, const std::string& global_frame,
                         std::vector<geometry_msgs::PoseStamped>& transformed_plan) {
    const geometry_msgs::PoseStamped& plan_pose = global_plan[0];

    transformed_plan.clear();

    try {
        if (!global_plan.size() > 0)
        {
            ROS_ERROR("Recieved plan with zero length");
            return false;
        }

        tf::StampedTransform transform;
        tf.lookupTransform(global_frame, ros::Time(),
                           plan_pose.header.frame_id, plan_pose.header.stamp,
                           plan_pose.header.frame_id, transform);

        //let's get the pose of the robot in the frame of the plan
        tf::Stamped<tf::Pose> robot_pose;
        robot_pose.setIdentity();
        robot_pose.frame_id_ = costmap.getBaseFrameID();
        robot_pose.stamp_ = ros::Time();
        tf.transformPose(plan_pose.header.frame_id, robot_pose, robot_pose);

        //we'll keep points on the plan that are within the window that we're looking at
        double dist_threshold = std::max(costmap.getSizeInCellsX() * costmap.getResolution() / 2.0, costmap.getSizeInCellsY() * costmap.getResolution() / 2.0);

        unsigned int i = 0;
        double sq_dist_threshold = dist_threshold * dist_threshold;
        double sq_dist = DBL_MAX;

        //we need to loop to a point on the plan that is within a certain distance of the robot
        while(i < (unsigned int)global_plan.size() && sq_dist > sq_dist_threshold) {
            double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
            double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
            sq_dist = x_diff * x_diff + y_diff * y_diff;
            ++i;
        }

        //make sure not to count the first point that is too far away
        if(i > 0)
            --i;

        tf::Stamped<tf::Pose> tf_pose;
        geometry_msgs::PoseStamped newer_pose;

        //now we'll transform until points are outside of our distance threshold
        while(i < (unsigned int)global_plan.size() && sq_dist < sq_dist_threshold) {
            double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
            double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
            sq_dist = x_diff * x_diff + y_diff * y_diff;

            const geometry_msgs::PoseStamped& pose = global_plan[i];
            poseStampedMsgToTF(pose, tf_pose);
            tf_pose.setData(transform * tf_pose);
            tf_pose.stamp_ = transform.stamp_;
            tf_pose.frame_id_ = global_frame;
            poseStampedTFToMsg(tf_pose, newer_pose);

            transformed_plan.push_back(newer_pose);

            ++i;
        }
    }
    catch(tf::LookupException& ex) {
        ROS_ERROR("No Transform available Error: %s\n", ex.what());
        return false;
    }
    catch(tf::ConnectivityException& ex) {
        ROS_ERROR("Connectivity Error: %s\n", ex.what());
        return false;
    }
    catch(tf::ExtrapolationException& ex) {
        ROS_ERROR("Extrapolation Error: %s\n", ex.what());
        if (global_plan.size() > 0)
            ROS_ERROR("Global Frame: %s Plan Frame size %d: %s\n", global_frame.c_str(), (unsigned int)global_plan.size(), global_plan[0].header.frame_id.c_str());

        return false;
    }

    return true;
}
void TransformPose::publish_all(tf::TransformListener& listener)
{

        geometry_msgs::PoseStamped odom_pose;
        odom_pose.header.frame_id = "/celoxia_base_link";

        //we'll just use the most recent transform available for our simple example
        odom_pose.header.stamp = ros::Time();

        //just an arbitrary point in space
        odom_pose.pose.position.x = 0.0;
        odom_pose.pose.position.y = 0.0;
        odom_pose.pose.position.z = 0.0;

        odom_pose.pose.orientation.x = 0.0;
        odom_pose.pose.orientation.y = 0.0;
        odom_pose.pose.orientation.z = 0.0;
        odom_pose.pose.orientation.w = 1.0;


        try{
                ros::Time now = ros::Time::now();
                listener.waitForTransform("/map", "/celoxia_base_link", now, ros::Duration(0.5));
                geometry_msgs::PoseStamped base_pose;
                listener.transformPose("/map", odom_pose, base_pose);
		
		robot_pose = base_pose;		

		my_odom.header.stamp = base_pose.header.stamp;
		my_odom.pose.pose.position.x = base_pose.pose.position.x;
		my_odom.pose.pose.position.y = base_pose.pose.position.y;
		my_odom.pose.pose.position.z = base_pose.pose.position.z;
		my_odom.pose.pose.orientation = base_pose.pose.orientation;

		my_odom.twist.twist.linear.x = sqrt(pow(base_pose.pose.position.x - old_x, 2) + pow(base_pose.pose.position.y - old_y, 2)) / (my_odom.header.stamp.toSec() - old_time.toSec());
		//my_odom.twist.twist.linear.x = xspeed.data;
		my_odom.twist.twist.linear.y = 0;
		my_odom.twist.twist.linear.z = 0;
		my_odom.twist.twist.angular.x = 0;
		my_odom.twist.twist.angular.y = 0;
		//my_odom.twist.twist.angular.z = tspeed.data;
		my_odom.twist.twist.angular.z = (tf::getYaw (my_odom.pose.pose.orientation) - old_theta) / (my_odom.header.stamp.toSec() - old_time.toSec());

		odom_pub.publish(my_odom);


    sensor_msgs::PointCloud cloud;
    cloud.header.stamp = ros::Time::now();
    cloud.header.frame_id = "/map";

    cloud.set_points_size(4);

    //we'll also add an intensity channel to the cloud
    cloud.set_channels_size(1);
    cloud.channels[0].name = "intensities";
    cloud.channels[0].set_values_size(4);

    //generate some fake data for our point cloud

    // Tree
      cloud.points[0].x = base_pose.pose.position.x + 0.1;
      cloud.points[0].y = base_pose.pose.position.y;
      cloud.points[0].z = 0;
      cloud.channels[0].values[0] = 199;

      cloud.points[1].x = base_pose.pose.position.x - 0.1;
      cloud.points[1].y = base_pose.pose.position.y;
      cloud.points[1].z = 0;
      cloud.channels[0].values[1] = 199;

      cloud.points[2].x = base_pose.pose.position.x;
      cloud.points[2].y = base_pose.pose.position.y + 0.1;
      cloud.points[2].z = 0;
      cloud.channels[0].values[2] = 199;

      cloud.points[3].x = base_pose.pose.position.x;
      cloud.points[3].y = base_pose.pose.position.y - 0.1;
      cloud.points[3].z = 0;
      cloud.channels[0].values[3] = 199;

      cloud_pub.publish(cloud);

		old_x = base_pose.pose.position.x;
		old_y = base_pose.pose.position.y;
		old_theta = tf::getYaw (my_odom.pose.pose.orientation);
		old_time = my_odom.header.stamp;
/*
                ROS_INFO("base_laser: (%.2f, %.2f. %.2f) -----> base_link: (%.2f, %.2f, %.2f) at time %.2f",
                        my_maximus_odom.pose.pose.position.x, my_maximus_odom.pose.pose.position.y, my_maximus_odom.pose.pose.position.z,
                        my_maximus_odom.twist.twist.linear.x, my_maximus_odom.twist.twist.linear.y, my_maximus_odom.twist.twist.angular.z, base_pose.header.stamp.toSec());
*/
        }
        catch(tf::TransformException& ex){
                ROS_ERROR("Received an exception trying to transform a point from \"odom\" to \"celoxia_base_link\": %s", ex.what());
        }


}
Пример #27
0
void FeatureTracker::image_callback(const sensor_msgs::ImageConstPtr& msg, const sensor_msgs::CameraInfoConstPtr& info_msg) {
  //need pose data for each picture, need to publish a camera pose
  ros::Time acquisition_time = msg->header.stamp;
  geometry_msgs::PoseStamped basePose;
  geometry_msgs::PoseStamped mapPose;
  basePose.pose.orientation.w=1.0;
  ros::Duration timeout(3);
  basePose.header.frame_id="/base_link";
  mapPose.header.frame_id="/map";
  
  try {
    tf_listener_.waitForTransform("/camera_1_link", "/map", acquisition_time, timeout);
    tf_listener_.transformPose("/map", acquisition_time,basePose,"/camera_1_link",mapPose);
    printf("pose #%d %f %f %f\n",pic_number++,mapPose.pose.position.x, mapPose.pose.position.y, tf::getYaw(mapPose.pose.orientation));
  }
  catch (tf::TransformException& ex) {
    ROS_WARN("[map_maker] TF exception:\n%s", ex.what());
    printf("[map_maker] TF exception:\n%s", ex.what());
    return;
  }
  cam_model.fromCameraInfo(info_msg);
  
  
  
  
  // printf("callback called\n");
  try
  {
    // if you want to work with color images, change from mono8 to bgr8
    if(image_rect==NULL){
      image_rect = cvCloneImage(bridge.imgMsgToCv(msg, "mono8"));
      last_image= cvCloneImage(bridge.imgMsgToCv(msg, "mono8"));
      pyrA=cvCreateImage(cvSize(last_image->width+8,last_image->height/3.0), IPL_DEPTH_32F, 1);
      pyrB=cvCloneImage(pyrA);
      //  printf("cloned image\n");
    }
    else{
      //save the last image
      cvCopy(image_rect,last_image);
      cvCopy(bridge.imgMsgToCv(msg, "mono8"),image_rect);
      // printf("copied image\n");
    }
    if(output_image==NULL){
      output_image =cvCloneImage(image_rect);
    }
    if(eigImage==NULL){
      eigImage =cvCloneImage(image_rect);
    }
    if(tempImage==NULL){
      tempImage =cvCloneImage(image_rect);
    }
  }
  catch (sensor_msgs::CvBridgeException& e)
  {
    ROS_ERROR("Could not convert from '%s' to 'mono8'.", msg->encoding.c_str());
    return;
  }
  
  if(image_rect!=NULL) {
    cvCopy(image_rect,output_image);
    
    printf("got image\n");
    
    track_features(mapPose);
    
    //draw features on the image
    for(int i=0;i<last_feature_count;i++){
      CvPoint center=cvPoint((int)features[i].x,(int)features[i].y);
      cvCircle(output_image,center,10,cvScalar(150),2);
      
      char strbuf [10];
      
      int n=sprintf(strbuf,"%d",current_feature_id[ i] );
      std::string text=std::string(strbuf,n);
      
      CvFont font;
      
      cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX,1,1);
      
      cvPutText(output_image,text.c_str(),cvPoint(center.x,center.y+20),&font,cvScalar(255));
      
      
      cv::Point3d tempRay;
      cv::Point2d tempPoint=cv::Point2d(features[i]);
      cam_model.projectPixelTo3dRay(tempPoint,tempRay);
  //    printf("%f x  %f y  %f z\n",tempRay.x,tempRay.y,tempRay.z);
    }
    
  //  featureList[0].print();
    
    //determine error gradient
    
    int min_features=10;
    
  //  printf("ypr %f %f %f\n",yaw,pitch,roll);
    
    cv::Point3d error_sum=calc_error(min_features,0, 0, 0);
    printf("total error is : %f\n",error_sum.x);
    
    for(int i=0;i<featureList.size();i++){
      if(min_features<featureList[i].numFeatures()){
	printf("\n\n\nfeature %d\n",i);
	printf("mean: %f %f %f\n",featureList[i].currentMean.x, featureList[i].currentMean.y, featureList[i].currentMean.z);
	
      }
    }
    
    
//    double error_up= calc_error(min_features,yalpha, 0, 0);
  //  printf("total up yaw error is : %f\n",error_up);
//    double error_down= calc_error(min_features,-yalpha, 0, 0);
  //  printf("total down yaw error is : %f\n",error_down);
  /*  
     
    double yaw_change=0;
    if(error_up<error_sum && error_up<error_down){
      yaw_change=yalpha;
    }else if(error_down<error_sum && error_down<error_up){
      yaw_change=-yalpha;
    }else if(error_down!=error_sum&&error_sum!=error_up){
      yalpha/=2;
    }
    
    error_up=   calc_error(min_features,0,palpha, 0);
   // printf("total up pitch error is : %f\n",error_up);
    error_down=   calc_error(min_features,0,-palpha, 0);
   // printf("total down pitch error is : %f\n",error_down);
    
    double pitch_change=0;
    if(error_up<error_sum && error_up<error_down){
      pitch_change=palpha;
    }else if(error_down<error_sum && error_down<error_up){
      pitch_change=-palpha;
    }else if(error_down!=error_sum&&error_sum!=error_up){
      //palpha/=2;
    }
    
    error_up=  calc_error(min_features,0,0,ralpha);
   // printf("total up roll error is : %f\n",error_up);
    
    error_down=   calc_error(min_features,0,0,-ralpha);
   // printf("total down roll error is : %f\n",error_down);
    
    double roll_change=0;
    if(error_up<error_sum && error_up<error_down){
      roll_change=ralpha;
    }else if(error_down<error_sum && error_down<error_up){
      roll_change=-ralpha;
    }else if(error_down!=error_sum&&error_sum!=error_up){
      ralpha/=2;
    }
    
  //  yaw+=yaw_change;
  
  //  pitch+=pitch_change;
 
  
  //   roll+=roll_change;
    */
    
    try{
      sensor_msgs::Image output_image_cvim =*bridge.cvToImgMsg(output_image, "mono8");
      output_image_cvim.header.stamp=msg->header.stamp;
      analyzed_pub_.publish(output_image_cvim);
    }
    catch (sensor_msgs::CvBridgeException& e){
      ROS_ERROR("Could not convert from '%s' to 'mono8'.", msg->encoding.c_str());
      return;
    }
    // printf("displaying image\n");
  }else{
    // printf("null image_rect\n");
  }
}
Пример #28
0
  bool transformGlobalPlan(
      const tf::TransformListener& tf,
      const std::vector<geometry_msgs::PoseStamped>& global_plan,
      const tf::Stamped<tf::Pose>& global_pose,
      const costmap_2d::Costmap2D& costmap,
      const std::string& global_frame,
      std::vector<geometry_msgs::PoseStamped>& transformed_plan){
    const geometry_msgs::PoseStamped& plan_pose = global_plan[0];

    transformed_plan.clear();

    try {
      if (!global_plan.size() > 0) {
        ROS_ERROR("Received plan with zero length");
        return false;
      }

      // get plan_to_global_transform from plan frame to global_frame
      tf::StampedTransform plan_to_global_transform;
      tf.waitForTransform(global_frame, ros::Time::now(),
                          plan_pose.header.frame_id, plan_pose.header.stamp,
                          plan_pose.header.frame_id, ros::Duration(0.5));
      tf.lookupTransform(global_frame, ros::Time(),
                         plan_pose.header.frame_id, plan_pose.header.stamp, 
                         plan_pose.header.frame_id, plan_to_global_transform);

      //let's get the pose of the robot in the frame of the plan
      tf::Stamped<tf::Pose> robot_pose;
      tf.transformPose(plan_pose.header.frame_id, global_pose, robot_pose);

      //we'll discard points on the plan that are outside the local costmap
      double dist_threshold = std::max(costmap.getSizeInCellsX() * costmap.getResolution() / 2.0,
                                       costmap.getSizeInCellsY() * costmap.getResolution() / 2.0);

      unsigned int i = 0;
      double sq_dist_threshold = dist_threshold * dist_threshold;
      double sq_dist = 0;

      //we need to loop to a point on the plan that is within a certain distance of the robot
      while(i < (unsigned int)global_plan.size()) {
        double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
        double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
        sq_dist = x_diff * x_diff + y_diff * y_diff;
        if (sq_dist <= sq_dist_threshold) {
          break;
        }
        ++i;
      }

      tf::Stamped<tf::Pose> tf_pose;
      geometry_msgs::PoseStamped newer_pose;

      //now we'll transform until points are outside of our distance threshold
      while(i < (unsigned int)global_plan.size() && sq_dist <= sq_dist_threshold) {
        const geometry_msgs::PoseStamped& pose = global_plan[i];
        poseStampedMsgToTF(pose, tf_pose);
        tf_pose.setData(plan_to_global_transform * tf_pose);
        tf_pose.stamp_ = plan_to_global_transform.stamp_;
        tf_pose.frame_id_ = global_frame;
        poseStampedTFToMsg(tf_pose, newer_pose);

        transformed_plan.push_back(newer_pose);

        double x_diff = robot_pose.getOrigin().x() - global_plan[i].pose.position.x;
        double y_diff = robot_pose.getOrigin().y() - global_plan[i].pose.position.y;
        sq_dist = x_diff * x_diff + y_diff * y_diff;

        ++i;
      }
    }
    catch(tf::LookupException& ex) {
      ROS_ERROR("No Transform available Error: %s\n", ex.what());
      return false;
    }
    catch(tf::ConnectivityException& ex) {
      ROS_ERROR("Connectivity Error: %s\n", ex.what());
      return false;
    }
    catch(tf::ExtrapolationException& ex) {
      ROS_ERROR("Extrapolation Error: %s\n", ex.what());
      if (global_plan.size() > 0)
        ROS_ERROR("Global Frame: %s Plan Frame size %d: %s\n", global_frame.c_str(), (unsigned int)global_plan.size(), global_plan[0].header.frame_id.c_str());

      return false;
    }

    return true;
  }
Пример #29
0
void Pathwrapper::celoxia_compute_next_pathpoint(tf::TransformListener& listener) {


    if(!pause) {

        geometry_msgs::PoseStamped odom_pose;
        odom_pose.header.frame_id = "/celoxia_base_link";

        //we'll just use the most recent transform available for our simple example
        odom_pose.header.stamp = ros::Time();

        //just an arbitrary point in space
        odom_pose.pose.position.x = 0.0;
        odom_pose.pose.position.y = 0.0;
        odom_pose.pose.position.z = 0.0;

        odom_pose.pose.orientation.x = 0.0;
        odom_pose.pose.orientation.y = 0.0;
        odom_pose.pose.orientation.z = 0.0;
        odom_pose.pose.orientation.w = 1.0;


        try{
            ros::Time now = ros::Time::now();
            listener.waitForTransform("/odom", "/celoxia_base_link", now, ros::Duration(5.0));
            geometry_msgs::PoseStamped base_pose;
            listener.transformPose("/odom", odom_pose, base_pose);

            //ROS_INFO("%f %f %f %f", final_pose.x, final_pose.y, base_pose.pose.position.x, base_pose.pose.position.y);

            if( sqrt( pow(celoxia_final_pose.x - base_pose.pose.position.x, 2) + pow(celoxia_final_pose.y - base_pose.pose.position.y, 2) ) < MAX_DIST_SKIP ) {

                if( !(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::empty()) ){
                    if(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::size() > NB_STEP_SKIP) {

                        for(int loop=0; loop < (NB_STEP_SKIP-2); loop++)
                            celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::erase (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::begin());

                        while( (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::size() > 1) && (sqrt( pow(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x - base_pose.pose.position.x, 2) + pow(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y - base_pose.pose.position.y, 2) ) < (MAX_DIST_SKIP-0.01)) ) {

                            celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::erase (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::begin());

                        }
                        celoxia_final_pose.x = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x;
                        celoxia_final_pose.y = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y;
                        ROS_INFO("%f", sqrt( pow(celoxia_final_pose.x - base_pose.pose.position.x, 2) + pow(celoxia_final_pose.y - base_pose.pose.position.y, 2) ));
                        Pathwrapper::celoxia_pose2D_pub.publish(celoxia_final_pose);


                        celoxia_final_ardugoal.x = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x;
                        celoxia_final_ardugoal.y = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y;
                        celoxia_final_ardugoal.theta = getHeadingFromQuat(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.orientation);
                        celoxia_final_ardugoal.last = 0;

                        celoxia_arduGoal_pub.publish(celoxia_final_ardugoal);

                        celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::erase (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::begin());
                    }
                    else {
                        while(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::size() > 1) {
                            celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::erase (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::begin());
                        }
                        //ROS_INFO("%f %f", my_maximus_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x, 
                        //		my_maximus_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y);

                        celoxia_final_pose.x = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x;
                        celoxia_final_pose.y = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y;
                        Pathwrapper::celoxia_pose2D_pub.publish(celoxia_final_pose);

                        celoxia_final_ardugoal.x = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.x;
                        celoxia_final_ardugoal.y = celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.position.y;
                        celoxia_final_ardugoal.theta = getHeadingFromQuat(celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::front().pose.orientation);
                        celoxia_final_ardugoal.last = 1;

                        celoxia_arduGoal_pub.publish(celoxia_final_ardugoal);


                        celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::erase (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::begin());
                    }

                    if( (celoxia_my_path.poses.std::vector<geometry_msgs::PoseStamped >::empty()) ){
                        usleep(100000);
                        int i = 0;
                        double test = sqrt( pow(celoxia_final_pose.x - base_pose.pose.position.x, 2) + pow(celoxia_final_pose.y - base_pose.pose.position.y, 2));
                        while( ( test > 0.02) && ( i < 35 ) ) {
                            usleep(100000);
                            ros::Time now = ros::Time::now();
                            listener.waitForTransform("/odom", "/celoxia_base_link", now, ros::Duration(5.0));
                            geometry_msgs::PoseStamped base_pose;
                            listener.transformPose("/odom", odom_pose, base_pose);
                            i++;
                            test = sqrt( pow(celoxia_final_pose.x - base_pose.pose.position.x, 2) + pow(celoxia_final_pose.y - base_pose.pose.position.y, 2));
                            ROS_ERROR("i : %d / dist : %f", i, test);
                        }
                        usleep(300000);
                        std_msgs::Empty empty;
                        Pathwrapper::celoxia_pathdone_pub.publish(empty);
                    }
                    //ros::Duration(0.02).sleep();
                    //ROS_INFO("Path sent.");
                }

            }
        }
        catch(tf::TransformException& ex){
            ROS_ERROR("Received an exception trying to transform a point from \"odom\" to \"celoxia_base_link\": %s", ex.what());
        }

    }

}
Пример #30
-1
void GridConstructionNode::scanCallback (const sm::LaserScan& scan)
{
    try {

        // We'll need the transform between sensor and fixed frames at the time when the scan was taken
        if (!tf_.waitForTransform(fixed_frame_, sensor_frame_, scan.header.stamp, ros::Duration(1.0)))
        {
            ROS_WARN_STREAM ("Timed out waiting for transform from " << sensor_frame_ << " to "
                             << fixed_frame_ << " at " << scan.header.stamp.toSec());
            return;
        }

        // Figure out current sensor position
        tf::Pose identity(tf::createIdentityQuaternion(), tf::Vector3(0, 0, 0));
        tf::Stamped<tf::Pose> odom_pose;
        tf_.transformPose(fixed_frame_, tf::Stamped<tf::Pose> (identity, ros::Time(), sensor_frame_), odom_pose);

        // Project scan from sensor frame (which varies over time) to odom (which doesn't)
        sm::PointCloud fixed_frame_cloud;
        laser_geometry::LaserProjection projector_;
        projector_.transformLaserScanToPointCloud (fixed_frame_, scan, fixed_frame_cloud, tf_);

        // Now transform back into sensor frame at a single time point
        sm::PointCloud sensor_frame_cloud;
        tf_.transformPointCloud (sensor_frame_, scan.header.stamp, fixed_frame_cloud, fixed_frame_,
                                 sensor_frame_cloud);

        // Construct and save LocalizedCloud
        CloudPtr loc_cloud(new gu::LocalizedCloud());
        loc_cloud->cloud.points = sensor_frame_cloud.points;
        tf::poseTFToMsg(odom_pose, loc_cloud->sensor_pose);
        loc_cloud->header.frame_id = fixed_frame_;
        Lock lock(mutex_);
        last_cloud_=loc_cloud;
    }
    catch (tf::TransformException& e) {
        ROS_INFO ("Not saving scan due to tf lookup exception: %s",
                  e.what());
    }
}