std::string lookForSpecificBodyTransform(tf::TransformListener& listener, std::string frame_id, std::string body_to_track_frame, tf::StampedTransform& transform)
{
	//Cerco il torso dell'utente o quello stimato dal filtro di Kalman

	try
	{
		if(skeleton_to_track == -1)			//Kalman
		{
			listener.lookupTransform(frame_id, "torso_k", ros::Time(0), transform);
		}
		else								//Utente
		{
			listener.lookupTransform(frame_id, body_to_track_frame, ros::Time(0), transform);
		}

		if(transform.stamp_ != last_stamp[body_to_track_frame])			//Controllo se ho una nuova rilevazione o ho trovato nuovamente l'ultima
		{
			last_stamp[body_to_track_frame] = transform.stamp_;

			return "found";
		}
		else															//Rilevazione uguale all'ultima, sto andando più veloce dello skeleton tracker, salto un fotogramma
		{
			return "skip";
		}
	}
	catch(tf::TransformException &ex)									//Torso non trovato
	{
		return "not found";
	}
}
示例#2
0
    bool checkObjectInMap(robot_msgs::checkObjectInMap::Request &req, robot_msgs::checkObjectInMap::Response &res) {
        int width = 16;

        geometry_msgs::PointStamped tf_object, obj;
        obj.point.x = req.point.x;
        obj.point.y = req.point.y;
        obj.header.frame_id = "camera";
        listener.waitForTransform("/camera", "/map", ros::Time(0), ros::Duration(1));
        listener.transformPoint("map", obj, tf_object);

        int x = floor(tf_object.point.x/resolution);
        int y = floor(tf_object.point.y/resolution);

        //ROS_INFO("COORDINATE OBJECT x: %d y: %d", x, y);
        for(int i = x-width/2; i <= x+width/2; i++)
        {
            //ROS_INFO("CHECK COORDINATE OBJECT x: %d", i);

            for(int j = y-width/2; j <= y+width/2; j++)
            {
                //ROS_INFO("Value %d", final_map[i + width_map*j]);
                if(final_map[i + width_map*j] == -106)
                {
                    res.inMap = true;
                    return true;
                }
            }
        }

        res.inMap = false;
        return true;
    }
bool planning_environment::getLatestIdentityTransform(const std::string& to_frame,
        const std::string& from_frame,
        tf::TransformListener& tf,
        tf::Transform& pose)
{
    geometry_msgs::PoseStamped temp_pose;
    temp_pose.pose.orientation.w = 1.0;
    temp_pose.header.frame_id = from_frame;
    geometry_msgs::PoseStamped trans_pose;
    ros::Time tm;
    std::string err_string;
    if (tf.getLatestCommonTime(to_frame, temp_pose.header.frame_id, tm, &err_string) == tf::NO_ERROR) {
        temp_pose.header.stamp = tm;
        try {
            tf.transformPose(to_frame, temp_pose, trans_pose);
        } catch(tf::TransformException& ex) {
            ROS_ERROR_STREAM("Unable to transform object from frame " << temp_pose.header.frame_id << " to " << to_frame << " error is " << ex.what());
            return false;
        }
    } else {
        ROS_ERROR_STREAM("No latest time for transforming " << from_frame << " to " << to_frame);
        return false;
    }
    tf::poseMsgToTF(trans_pose.pose, pose);
    return true;
}
void planning_environment::updateAttachedObjectBodyPoses(planning_environment::CollisionModels* cm,
        planning_models::KinematicState& state,
        tf::TransformListener& tf)
{
    cm->bodiesLock();

    const std::map<std::string, std::map<std::string, bodies::BodyVector*> >& link_att_objects = cm->getLinkAttachedObjects();

    if(link_att_objects.empty()) {
        cm->bodiesUnlock();
        return;
    }

    //this gets all the attached bodies in their correct current positions according to tf
    geometry_msgs::PoseStamped ps;
    ps.pose.orientation.w = 1.0;
    for(std::map<std::string, std::map<std::string, bodies::BodyVector*> >::const_iterator it = link_att_objects.begin();
            it != link_att_objects.end();
            it++) {
        ps.header.frame_id = it->first;
        std::string es;
        if (tf.getLatestCommonTime(cm->getWorldFrameId(), it->first, ps.header.stamp, &es) != tf::NO_ERROR) {
            ROS_INFO_STREAM("Problem transforming into fixed frame from " << it->first << ".  Error string " << es);
            continue;
        }
        geometry_msgs::PoseStamped psout;
        tf.transformPose(cm->getWorldFrameId(), ps, psout);
        tf::Transform link_trans;
        tf::poseMsgToTF(psout.pose, link_trans);
        state.updateKinematicStateWithLinkAt(it->first, link_trans);
        cm->updateAttachedBodyPosesForLink(state, it->first);
    }
    cm->bodiesUnlock();
}
  void commandForce(const geometry_msgs::WrenchStamped::ConstPtr &msg)
  {
    F_des_ << msg->wrench.force.x, msg->wrench.force.y, msg->wrench.force.z, 
              msg->wrench.torque.x, msg->wrench.torque.y, msg->wrench.torque.z;
    if(msg->header.frame_id == root_name_)
      return;

    geometry_msgs::PoseStamped in_root;
    in_root.pose.orientation.w = 1.0;
    in_root.header.frame_id = msg->header.frame_id;

    try {
      tf_.waitForTransform(root_name_, msg->header.frame_id, msg->header.stamp, ros::Duration(0.1));
      tf_.transformPose(root_name_, in_root, in_root);
    }
    catch (const tf::TransformException &ex)
    {
      ROS_ERROR("Failed to transform: %s", ex.what());
      return;
    }
    
    Eigen::Affine3d t;
    
    tf::poseMsgToEigen(in_root.pose, t);

    F_des_.head<3>() = t.linear() * F_des_.head<3>();
    F_des_.tail<3>() = t.linear() * F_des_.tail<3>();
  }
示例#6
0
	void DepthSensorCallback(const sensor_msgs::PointCloud2ConstPtr msg)
	{
		// Get the current world space tool position
		m_MsgHeaderStamp = msg->header.stamp;
		m_Listener.waitForTransform("/world", "VSV/Tool",
			m_MsgHeaderStamp, ros::Duration(1.0));
		tf::StampedTransform transform;
		m_Listener.lookupTransform("/world", "VSV/Tool",
			m_MsgHeaderStamp, transform);
		m_WorldSpaceToolPosition = transform * tf::Vector3(0.0, 0.0, 0.0);

		// Get the current world space robot position
		m_Listener.waitForTransform("/world", "VSV/base",
			m_MsgHeaderStamp, ros::Duration(1.0));
		m_Listener.lookupTransform("/world", "VSV/base",
			m_MsgHeaderStamp, transform);
		m_WorldSpaceRobotPosition = transform * tf::Vector3(0.0, 0.0, 0.0);
		
		//ROS_INFO("Base position %f %f %f",
			//	m_WorldSpaceRobotPosition.x(), m_WorldSpaceRobotPosition.y(), m_WorldSpaceRobotPosition.z());

		// Get the orientation of the robot
		auto Q = transform.getRotation();
		tf::Matrix3x3 M;
		M.setRotation(Q);
		double roll, pitch;
		M.getRPY(roll, pitch, m_Orientation);
	}
Eigen::Matrix4f TrajectoryPublisher::getMatrixForTf(const tf::TransformListener& tfListener,
                                                    const std::string& target_frame, const std::string& source_frame,
                                                    const ros::Time& time)
{
  Eigen::Matrix4f eigenTransform;
  tf::StampedTransform tfTransform;

  try
  {
    ros::Time tm;
    std::string err;

    {
      if (tfListener.waitForTransform(target_frame, source_frame, time, ros::Duration(0.2f)))
      {
        tfListener.lookupTransform(target_frame, source_frame, time, tfTransform);
        pcl_ros::transformAsMatrix(tfTransform, eigenTransform);
      }
    }
  }
  catch (tf::TransformException ex)
  {
    ROS_WARN("transofrm error: %s", ex.what());
  }
  return eigenTransform;
}
示例#8
0
void Explore::transFromOdomToMapPosition(float x_odom_pose, float y_odom_pose, float theta,
		float &x_map_pose, float &y_map_pose, tf::Quaternion& q){

	ros::Time now = ros::Time(0);
	tf::StampedTransform tfMO;								//	odom w map
	tf_listener_.waitForTransform("/map", "/odom", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/map", "/odom", now,  tfMO);


	tf::Transform tfOD;

	tf::Quaternion quat;
	quat.setRPY(0, 0, theta);

	tfOD.setOrigin(tf::Vector3(x_odom_pose, y_odom_pose, 0.0));			//	docelowe w odom
	tfOD.setRotation( quat );

	tf::Transform tfMD = tfMO * tfOD;							//	docelowe w map


	x_map_pose = tfMD.getOrigin ()[0];				//	wspolrzedne docelowe w ukladzie map
	y_map_pose = tfMD.getOrigin ()[1];				//	wspolrzedne docelowe w ukladzie map
	q = tfMD.getRotation();
	
	//ROS_INFO("ODOM TO MAP x_odom = %f,  y_odom = %f, x_map = %f,  y_map = %f", x_odom_pose, y_odom_pose, x_map_pose, y_map_pose);
}
    void simulateOneCycle()
    {
	tf::StampedTransform transform;
        try{
	  listener.waitForTransform("map","EEframe", ros::Time(0), ros::Duration(1.0));
          listener.lookupTransform( "map","EEframe", ros::Time(0), transform);
        }
        catch (tf::TransformException ex){
            ROS_ERROR("%s",ex.what());
        }
	// This is correct, but does not work.
	// state_ = transform;

	KDL::Frame state_copy;
	tf::transformTFToKDL(state_, state_copy);

	nh_.getParamCached("/cds_controller_pr2/run_controller", run_controller_flag);
	if (run_controller_flag == 1.0){  // Controller running -> Update next desired position
	    tf::transformKDLToTF(cds_.update(state_copy), state_);
	} else {
	    state_ = transform;
	}

	broadcastTF();
    }
示例#10
0
    bool getTransform(const std::string& refFrame,
                      const std::string& childFrame,
                      tf::StampedTransform& transform)
    {
        std::string errMsg;

        if ( !_tfListener.waitForTransform(refFrame,
                                           childFrame,
                                           ros::Time(0),
                                           ros::Duration(0.5),
                                           ros::Duration(0.01),
                                           &errMsg)
           )
        {
            ROS_ERROR_STREAM("Unable to get pose from TF: " << errMsg);
            return false;
        }
        else
        {
            try
            {
                _tfListener.lookupTransform( refFrame, childFrame,
                                             ros::Time(0),                  //get latest available
                                             transform);
            }
            catch ( const tf::TransformException& e)
            {
                ROS_ERROR_STREAM("Error in lookupTransform of " << childFrame << " in " << refFrame);
                return false;
            }

        }
        return true;
    }
void Explore::transfromRobotToMapPosition(float x_robot, float y_robot, float &x_map, float &y_map){

	ros::Time now = ros::Time::now();

	tf::StampedTransform tfRO;
	tf_listener_.waitForTransform("/base_link", "/odom", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/base_link", "/odom", now,  tfRO);
	tf::Transform tfOR = tfRO.inverse();


	tf::StampedTransform tfOM;
	tf_listener_.waitForTransform("/odom", "/map", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/odom", "/map", now,  tfOM);
	tf::Transform tfMO = tfOM.inverse();


	tf::Transform tfRM = tfMO*tfOR;

	float noused = 0.0;

	tf::Transform tfRD;
	tfRD.setOrigin(tf::Vector3(x_robot, y_robot, noused));

	tf::Transform tfOD = tfOR * tfRD;


	tf::Transform tfMD = tfMO * tfOD;


	x_map = tfMD.getOrigin ()[0];				//	wspolrzedne docelowe w ukladzie mapy
	y_map = tfMD.getOrigin ()[1];				//	wspolrzedne docelowe w ukladzie mapy


}
示例#12
0
void SpencerRobotReader::setRobotJointLocation(tf::TransformListener &listener, Joint* joint) {
    tf::StampedTransform transform;
    std::string jointId = "/";
    std::vector<double> jointOrientation;
    bg::model::point<double, 3, bg::cs::cartesian> jointPosition;
    jointId.append(joint->getName());
    try {
        ros::Time now = ros::Time::now();
        listener.waitForTransform("/map", jointId,
                now, ros::Duration(3.0));
        listener.lookupTransform("/map", jointId,
                now, transform);

        //Joint position
        jointPosition.set<0>(transform.getOrigin().x());
        jointPosition.set<1>(transform.getOrigin().y());
        jointPosition.set<2>(transform.getOrigin().z());

        //Joint orientation
        //curRobot->orientation.push_back(tf::getRoll(transform.getRotation()));
        //curRobot->orientation.push_back(tf::getPitch(transform.getRotation()));
        jointOrientation.push_back(0.0);
        jointOrientation.push_back(0.0);
        jointOrientation.push_back(tf::getYaw(transform.getRotation()));

        joint->setTime(now.toNSec());
        joint->setPosition(jointPosition);
        joint->setOrientation(jointOrientation);

    } catch (tf::TransformException ex) {
        ROS_ERROR("%s", ex.what());
    }
}
示例#13
0
void TFListener::transformPoint(const tf::TransformListener& listener)
{
	_laser_point.header.frame_id    = "laser";
	_camera_point.header.frame_id   = "base_camera";
	_base_point.header.frame_id     = "base_footprint";
	_odomPoint.header.frame_id      = "odom";
	
	_laser_point.header.stamp       = ros::Time();
    	_camera_point.header.stamp      = ros::Time();
    	_base_point.header.stamp        = ros::Time();
    	_odomPoint.header.stamp         = ros::Time();

	try	//Try transforming the LRF
	{
		geometry_msgs::PointStamped baseLinkToBaseLaser;
        	listener.transformPoint("base_link", _laser_point, baseLinkToBaseLaser);
	}
	catch(tf::TransformException& ex)
    	{
          	ROS_ERROR("Received an exception trying to transform a point from \"base_laser\" to \"base_link\": %s", ex.what());
    	}

	try	//Try Transforming the camera
    	{
		geometry_msgs::PointStamped baseLinkToBaseCamera;
            	listener.transformPoint("base_link", _camera_point, baseLinkToBaseCamera);
            		//1. (Frame being applied offset)
			//2. (Point from sensor)
			//3. ("returned" Point with applied offset) 
     	}
     	catch(tf::TransformException& ex)
     	{
          	ROS_ERROR("Received an exception trying to transform a point from \"base_laser\" to \"base_link\": %s", ex.what());
     	}
}
示例#14
0
void BaseFootprint::jsCallback(const sensor_msgs::JointState::ConstPtr & ptr)
{
  ros::Time time = ptr->header.stamp;
  tf::StampedTransform tf_odom_to_base, tf_odom_to_left_foot, tf_odom_to_right_foot;
  
  ROS_DEBUG("JointState callback function, computing frame %s", m_baseFootPrintID.c_str());
  try {
    m_listener.lookupTransform(m_odomFrameId, m_lfootFrameId, time, tf_odom_to_left_foot);
    m_listener.lookupTransform(m_odomFrameId, m_rfootFrameId, time, tf_odom_to_right_foot);
    m_listener.lookupTransform(m_odomFrameId, m_baseFrameId,  time, tf_odom_to_base);
  } catch (const tf::TransformException& ex){
    ROS_ERROR("%s",ex.what());
    return ;
  }
  
  tf::Vector3 new_origin = (tf_odom_to_right_foot.getOrigin() + tf_odom_to_left_foot.getOrigin())/2.0; // middle of both feet
  double height = std::min(tf_odom_to_left_foot.getOrigin().getZ(), tf_odom_to_right_foot.getOrigin().getZ()); // fix to lowest foot
  new_origin.setZ(height);
  
  // adjust yaw according to torso orientation, all other angles 0 (= in z-plane)
  double roll, pitch, yaw;
  tf_odom_to_base.getBasis().getRPY(roll, pitch, yaw);
  
  tf::Transform tf_odom_to_footprint(tf::createQuaternionFromYaw(yaw), new_origin);
  tf::Transform tf_base_to_footprint = tf_odom_to_base.inverse() * tf_odom_to_footprint;
  
  // publish transform with parent m_baseFrameId and new child m_baseFootPrintID
  // i.e. transform from m_baseFrameId to m_baseFootPrintID
  m_brBaseFootPrint.sendTransform(tf::StampedTransform(tf_base_to_footprint, time, m_baseFrameId, m_baseFootPrintID));
  ROS_DEBUG("Published Transform %s --> %s", m_baseFrameId.c_str(), m_baseFootPrintID.c_str());
  
  return;

}
    void receiveInformation(const geometry_msgs::PointStampedConstPtr &person)
    {

        notReceivedNumber = 0;
        hold = false;

        try
        {

            listener.waitForTransform(robot_frame, person->header.stamp, person->header.frame_id, person->header.stamp, fixed_frame_id, ros::Duration(10.0) );
            listener.transformPoint(robot_frame, person->header.stamp, *person, fixed_frame_id, personInRobotBaseFrame);

            cv::Point3d personPoint;
            personPoint.x = personInRobotBaseFrame.point.x;
            personPoint.y = personInRobotBaseFrame.point.y;
            personPoint.z = personInRobotBaseFrame.point.z;

            distanceToPerson = cv::norm(personPoint);
            ROS_ERROR("Person distance: %f", distanceToPerson);
        }
        catch(tf::TransformException ex)
        {
            ROS_WARN("%s",ex.what());
            //ros::Duration(1.0).sleep();
            return;
        }

        personInRobotBaseFrame.header.frame_id = robot_frame;
    }
示例#16
0
static int getHumanLocation(tf::TransformListener& listener){
    tf::StampedTransform transform;
    try{
        ros::Time now = ros::Time::now();
        listener.waitForTransform("/map", "/human_base",
                              now, ros::Duration(3.0));
        listener.lookupTransform("/map", "/human_base",
                               now, transform);
        
        humanConf.dof[0] = transform.getOrigin().x();
        humanConf.dof[1] = transform.getOrigin().y();
        humanConf.dof[2] = transform.getOrigin().z()+1.0;
        humanConf.dof[3] = 0.0;
        humanConf.dof[4] = 0.0;
        humanConf.dof[5] = tf::getYaw(transform.getRotation());
        

        if(humanPose_DEBUG){
          printf("%f %f %f %f %f %f\n", transform.getOrigin().x(), transform.getOrigin().y(), 
                transform.getOrigin().z(), humanConf.dof[3], humanConf.dof[4], humanConf.dof[5]);
        }
    }
    catch (tf::TransformException ex){
        ROS_ERROR("%s",ex.what());
    }
  return TRUE;
}
void Explore::transfromRobotToOdomPosition(float x_robot, float y_robot, float &x_odom, float &y_odom){

// 	ROS_INFO(" enter transfromRobotToOdomPosition  robot pose (%f, %f)", x_robot, y_robot);


	ros::Time now = ros::Time::now();

//	ROS_INFO("1");
	tf::StampedTransform tfRO;
//	ROS_INFO("2");
	tf_listener_.waitForTransform("/base_link", "/odom", now, ros::Duration(1.0));
//	ROS_INFO("3");
	tf_listener_.lookupTransform ("/base_link", "/odom",  now,  tfRO);
//	ROS_INFO("4");
	tf::Transform tfOR = tfRO.inverse();
//	ROS_INFO("5");
	float noused = 0.0;
//	ROS_INFO("6");
	tf::Transform tfRD;
//	ROS_INFO("7");
	tfRD.setOrigin(tf::Vector3(x_robot, y_robot, noused));
//	ROS_INFO("8");
	tf::Transform tfOD = tfOR * tfRD;


	 x_odom = tfOD.getOrigin ()[0];				//	wspolrzedna x_robot w ukladzie odom
	 y_odom = tfOD.getOrigin ()[1];				//	wspolrzedna y_robot w ukladzie odom

//	 ROS_INFO("x_odom = %f,  y_odom = %f", x_odom, y_odom);
}
std::string lookForSpecificBodyTransform(tf::TransformListener& listener, std::string frame_id, std::string body_to_track_frame, tf::StampedTransform& transform)
{
	try
	{
		if(skeleton_to_track == -1)
		{
			listener.lookupTransform(frame_id, "torso_k", ros::Time(0), transform);
		}
		else
		{
			listener.lookupTransform(frame_id, body_to_track_frame, ros::Time(0), transform);
		}

		if(transform.stamp_ != last_stamp[body_to_track_frame])
		{
			last_stamp[body_to_track_frame] = transform.stamp_;

			return "found";
		}
		else
		{
			return "skip";
		}
	}
	catch(tf::TransformException &ex)
	{
		return "not found";
	}
}
示例#19
0
 void wallCB(const geometry_msgs::PoseStamped& poseMsg)
 {
     if (!mocapOn)
     {
         std::cout << "Bebop pose is publishing. THE WALL IS UP!" << std::endl;
         mocapOn = true;
     }
     geometry_msgs::Pose pose = poseMsg.pose;
     
     // Center
     tf::Vector3 center(0,0,0);
     try
     {
         tf::StampedTransform tf1;
         tf::StampedTransform tf2;
         tf::StampedTransform tf3;
         tf::StampedTransform tf4;
         tfl.lookupTransform("world","ugv1",ros::Time(0),tf1);
         tfl.lookupTransform("world","ugv2",ros::Time(0),tf2);
         tfl.lookupTransform("world","ugv3",ros::Time(0),tf3);
         tfl.lookupTransform("world","ugv4",ros::Time(0),tf4);
         center = (tf1.getOrigin() + tf2.getOrigin() + tf3.getOrigin() + tf4.getOrigin())/4;
     }
     catch(tf::TransformException ex)
     {
     }
     
     // Boundary
     tf::Vector3 boundaryBottomLeft = center + boundaryOffsetBottomLeft;
     tf::Vector3 boundaryTopRight = center + boundaryOffsetTopRight;
     
     // Exceeding wall
     bool leftWall = pose.position.x <= boundaryBottomLeft.getX();
     bool rightWall = pose.position.x >= boundaryTopRight.getX();
     bool frontWall = pose.position.y <= boundaryBottomLeft.getY();
     bool backWall = pose.position.y >= boundaryTopRight.getY();
     bool bottomWall = pose.position.z <= boundaryBottomLeft.getZ();
     bool topWall = pose.position.z >= boundaryTopRight.getZ();
     
     if (leftWall or rightWall or frontWall or backWall or bottomWall or topWall)
     {
         wallOverride = true;
         
         // velocity command
         tf::Vector3 velCmd(leftWall - rightWall, frontWall - backWall, bottomWall - topWall);
         tf::Vector3 velCmdBody = tf::quatRotate(tf::Quaternion(pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w).inverse(),velCmd);
         
         // construct message and publish
         geometry_msgs::Twist twistMsg;
         twistMsg.linear.x = velCmdBody.getX();
         twistMsg.linear.y = velCmdBody.getY();
         twistMsg.linear.z = velCmdBody.getZ();
         velCmdPub.publish(twistMsg);
     }
     else
     {
         wallOverride = false;
     }
 }
void TransformPose::publish_all(tf::TransformListener& listener)
{

        geometry_msgs::PoseStamped odom_pose;
        odom_pose.header.frame_id = "/base_link";

        //we'll just use the most recent transform available for our simple example
        odom_pose.header.stamp = ros::Time();

        //just an arbitrary point in space
        odom_pose.pose.position.x = 0.0;
        odom_pose.pose.position.y = 0.0;
        odom_pose.pose.position.z = 0.0;

        odom_pose.pose.orientation.x = 0.0;
        odom_pose.pose.orientation.y = 0.0;
        odom_pose.pose.orientation.z = 0.0;
        odom_pose.pose.orientation.w = 1.0;


        try{
                ros::Time now = ros::Time::now();
                listener.waitForTransform("/odom", "/base_link", now, ros::Duration(5.0));
                geometry_msgs::PoseStamped base_pose;
                listener.transformPose("/odom", odom_pose, base_pose);

		my_odom.header.stamp = base_pose.header.stamp;
		my_odom.pose.pose.position.x = base_pose.pose.position.x;
		my_odom.pose.pose.position.y = base_pose.pose.position.y;
		my_odom.pose.pose.position.z = base_pose.pose.position.z;
		my_odom.pose.pose.orientation = base_pose.pose.orientation;

		//my_maximus_odom.twist.twist.linear.x = sqrt(pow(base_pose.pose.position.x - old_x, 2) + pow(base_pose.pose.position.y - old_y, 2)) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());
		my_odom.twist.twist.linear.x = xspeed.data;
		my_odom.twist.twist.linear.y = 0;
		my_odom.twist.twist.linear.z = 0;
		my_odom.twist.twist.angular.x = 0;
		my_odom.twist.twist.angular.y = 0;
		my_odom.twist.twist.angular.z = tspeed.data;
		//my_maximus_odom.twist.twist.angular.z = 1.1 * (tf::getYaw (my_maximus_odom.pose.pose.orientation) - old_theta) / (my_maximus_odom.header.stamp.toSec() - old_time.toSec());

		odom_pub.publish(my_odom);

		//old_x = base_pose.pose.position.x;
		//old_y = base_pose.pose.position.y;
		//old_theta = tf::getYaw (my_odom.pose.pose.orientation);
		//old_time = my_odom.header.stamp;
/*
                ROS_INFO("base_laser: (%.2f, %.2f. %.2f) -----> base_link: (%.2f, %.2f, %.2f) at time %.2f",
                        my_maximus_odom.pose.pose.position.x, my_maximus_odom.pose.pose.position.y, my_maximus_odom.pose.pose.position.z,
                        my_maximus_odom.twist.twist.linear.x, my_maximus_odom.twist.twist.linear.y, my_maximus_odom.twist.twist.angular.z, base_pose.header.stamp.toSec());
*/
        }
        catch(tf::TransformException& ex){
                ROS_ERROR("Received an exception trying to transform a point from \"odom\" to \"base_link\": %s", ex.what());
        }


}
	/** Callback: On new sensor data
	  */
	void onNewSensor_Laser2D(const sensor_msgs::LaserScanConstPtr & scan)
	{
		mrpt::utils::CTimeLoggerEntry tle(m_profiler,"onNewSensor_Laser2D");

		// Get the relative position of the sensor wrt the robot:
		tf::StampedTransform sensorOnRobot;
		try {
			mrpt::utils::CTimeLoggerEntry tle2(m_profiler,"onNewSensor_Laser2D.lookupTransform_sensor");
			m_tf_listener.lookupTransform(m_frameid_robot,scan->header.frame_id, ros::Time(0), sensorOnRobot);
		}
		catch (tf::TransformException &ex) {
			ROS_ERROR("%s",ex.what());
			return;
		}

		// Convert data to MRPT format:
		mrpt::poses::CPose3D sensorOnRobot_mrpt;
		mrpt_bridge::convert(sensorOnRobot,sensorOnRobot_mrpt);
		// In MRPT, CObservation2DRangeScan holds both: sensor data + relative pose:
		CObservation2DRangeScanPtr obsScan = CObservation2DRangeScan::Create();
		mrpt_bridge::convert(*scan,sensorOnRobot_mrpt, *obsScan);

		ROS_DEBUG("[onNewSensor_Laser2D] %u rays, sensor pose on robot %s", static_cast<unsigned int>(obsScan->scan.size()), sensorOnRobot_mrpt.asString().c_str() );

		// Get sensor timestamp:
		const double timestamp = scan->header.stamp.toSec();

		// Get robot pose at that time in the reference frame, typ: /odom -> /base_link
		mrpt::poses::CPose3D robotPose;
		try {
			mrpt::utils::CTimeLoggerEntry tle3(m_profiler,"onNewSensor_Laser2D.lookupTransform_robot");
			tf::StampedTransform tx;

			try {
				m_tf_listener.lookupTransform(m_frameid_reference,m_frameid_robot, scan->header.stamp, tx);
			}
			catch (tf::ExtrapolationException &) {
				// if we need a "too much " recent robot pose,be happy with the latest one:
				m_tf_listener.lookupTransform(m_frameid_reference,m_frameid_robot, ros::Time(0), tx);
			}
			mrpt_bridge::convert(tx,robotPose);
			ROS_DEBUG("[onNewSensor_Laser2D] robot pose %s", robotPose.asString().c_str() );
		}
		catch (tf::TransformException &ex) {
			ROS_ERROR("%s",ex.what());
			return;
		}

		// Insert into the observation history:
		TInfoPerTimeStep ipt;
		ipt.observation = obsScan;
		ipt.robot_pose  = robotPose;

		m_hist_obs_mtx.lock();
		m_hist_obs.insert( m_hist_obs.end(), TListObservations::value_type(timestamp,ipt) );
		m_hist_obs_mtx.unlock();

	} // end onNewSensor_Laser2D
float GoToSelectedBall::getRobotAngleInOdom(){

	ros::Time now = ros::Time(0);
	tf::StampedTransform tfOR;								//	robot w odom
	tf_listener_.waitForTransform("/odom", "/base_link", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/odom", "/base_link", now,  tfOR);
	return tf::getYaw(tfOR.getRotation());

}
示例#23
0
    void wrenchCallback(geometry_msgs::WrenchStamped::ConstPtr wrench_stamped) 
    {
        double cur_time = wrench_stamped->header.stamp.toSec();
        if(start_time == -1)
            start_time = cur_time;
        CartVec w;
        wrenchMsgToEigen(wrench_stamped->wrench, w);
        double force_mag = NORM(w(0, 0), w(1, 0), w(2, 0));
        tf::StampedTransform tool_loc_tf;
        try {
            tf_list.waitForTransform("/head_center", "/wipe_finger", wrench_stamped->header.stamp, ros::Duration(0.1));
            tf_list.lookupTransform("/head_center", "/wipe_finger", wrench_stamped->header.stamp, tool_loc_tf);
        }
        catch (tf::TransformException ex) {
            ROS_ERROR("%s", ex.what());
            return;
        }
        tool_loc_tf.mult(registration_tf, tool_loc_tf);
        btVector3 tool_loc = tool_loc_tf.getOrigin();
        PRGB query_pt;
        query_pt.x = tool_loc.x(); query_pt.y = tool_loc.y(); query_pt.z = tool_loc.z(); 
        vector<int> nk_inds(1);
        vector<float> nk_dists(1);
        kd_tree->nearestKSearch(query_pt, 1, nk_inds, nk_dists);
        int closest_ind = nk_inds[0];
        float closest_dist = nk_dists[0];

        hrl_phri_2011::ForceProcessed fp;
        fp.time_offset = cur_time - start_time;
        tf::transformStampedTFToMsg(tool_loc_tf, fp.tool_frame);
        fp.header = wrench_stamped->header;
        fp.header.frame_id = "/head_center";
        fp.wrench = wrench_stamped->wrench;
        fp.pc_ind = closest_ind;
        fp.pc_dist = closest_dist;
        fp.pc_pt.x = pc_head->points[closest_ind].x;
        fp.pc_pt.y = pc_head->points[closest_ind].y;
        fp.pc_pt.z = pc_head->points[closest_ind].z;
        fp.force_magnitude = force_mag;
        if(compute_norms) {
            fp.pc_normal.x = normals->points[closest_ind].normal[0];
            fp.pc_normal.y = normals->points[closest_ind].normal[1];
            fp.pc_normal.z = normals->points[closest_ind].normal[2];
        }

        // do ellipsoidal processing
        tf::Transform tool_loc_ell = ell_reg_tf * tool_loc_tf;
        tf::transformTFToMsg(tool_loc_ell, fp.tool_ell_frame);
        btVector3 tloce_pos = tool_loc_ell.getOrigin();
        ell.cartToEllipsoidal(tloce_pos.x(), tloce_pos.y(), tloce_pos.z(), 
                              fp.ell_coords.x, fp.ell_coords.y, fp.ell_coords.z);

        fp_list.push_back(fp);
        pub_ind++;
        if(pub_ind % 100 == 0)
            ROS_INFO("Recorded %d samples", pub_ind);
    }
示例#24
0
//Broadcasts the transform from the kinect_rgb_optical frame to the ground frame
//If kinect_rgb_optical has a grandparent (i.e. kinect_link), then a transform from the kinect_link frame to the ground frame is broadcasted such that the given transform is still as specified above
void broadcastKinectTransform(const btTransform& transform, const std::string& kinect_rgb_optical, const std::string& ground, tf::TransformBroadcaster& broadcaster, const tf::TransformListener& listener) {
	std::string link;
	if (listener.getParent(kinect_rgb_optical, ros::Time(0), link) && listener.getParent(link, ros::Time(0), link)) {
			tf::StampedTransform tfTransform;
			listener.lookupTransform (link, kinect_rgb_optical, ros::Time(0), tfTransform);
			broadcaster.sendTransform(tf::StampedTransform(transform * tfTransform.asBt().inverse(), ros::Time::now(), ground, link));
	} else {
			broadcaster.sendTransform(tf::StampedTransform(transform, ros::Time::now(), ground, kinect_rgb_optical));
	}
}
示例#25
0
  //Close the gripper
  void close(){
    
    ROS_INFO("Closing Gripper");
    ros::Duration(0.5).sleep();

    //wait for the listener to get the first message
    listener_.waitForTransform("base_footprint", "r_gripper_l_finger_tip_frame", 
                               ros::Time(0), ros::Duration(1.0));
    
    //we will record transforms here
    tf::StampedTransform start_transform;
    tf::StampedTransform current_transform;

    //record the starting transform from the gripper to the base frame
    listener_.lookupTransform("base_footprint", "r_gripper_l_finger_tip_frame", 
                              ros::Time(0), start_transform);

    bool done = false;
    pr2_controllers_msgs::Pr2GripperCommand gripper_cmd;
    gripper_cmd.position = 0.0;
    gripper_cmd.max_effort = 50.0;

    ros::Rate rate(10.0);

    double dist_moved_before;
    double dist_moved;
    while (!done && nh_.ok())
    {
      r_gripper_.publish(gripper_cmd);

      rate.sleep();
      //get the current transform
      try
      {
        listener_.lookupTransform("base_footprint", "r_gripper_l_finger_tip_frame", 
                                  ros::Time(0), current_transform);
      }
      catch (tf::TransformException ex)
      {
        ROS_ERROR("%s",ex.what());
        break;
      }
  
      //see how if the gripper is open or if it hit some object
      tf::Transform relative_transform = 
        start_transform.inverse() * current_transform;

      dist_moved_before = dist_moved;
      dist_moved = relative_transform.getOrigin().length();

      //ROS_INFO("%f",dist_moved);
      if(dist_moved > 0.03 || dist_moved < dist_moved_before) done = true;
    
    }
  }
tf::StampedTransform TargetPointSender::getRobotPosGL()
{
	tf::StampedTransform robot_gl;
	try{
		tf_listener_.waitForTransform(world_frame_, robot_frame_, ros::Time(0.0), ros::Duration(4.0));
		tf_listener_.lookupTransform(world_frame_, robot_frame_, ros::Time(0.0), robot_gl);
	}catch(tf::TransformException &e){
		ROS_WARN_STREAM("tf::TransformException: " << e.what());
	}
	return robot_gl;
}
示例#27
0
        void callback(const sensor_msgs::ImageConstPtr& msg) {
            // First extract the image to a cv:Mat structure, from opencv2
            cv::Mat img(cv_bridge::toCvShare(msg,"mono8")->image);
            try{
                // Then receive the transformation between the robot body and
                // the world
                tf::StampedTransform transform;
                // Use the listener to find where we are. Check the
                // tutorials... (note: the arguments of the 2 functions below
                // need to be completed
                listener_.waitForTransform("","",ros::Time::now(),ros::Duration(1.0));
                listener_.lookupTransform("","", ros::Time::now(), transform);
                

                double proj_x = transform.getOrigin().x();
                double proj_y = transform.getOrigin().y();
                double proj_theta = -tf::getYaw(transform.getRotation());
                printf("We were at %.2f %.2f theta %.2f\n",proj_x,proj_y,proj_theta*180./M_PI);

                // Once the transformation is know, you can use it to find the
                // affine transform mapping the local floor to the global floor
                // and use cv::warpAffine to fill p_floor
                cv::Mat_<uint8_t> p_floor(floor_size_pix,floor_size_pix,0xFF);
                cv::Mat affine = (cv::Mat_<float>(2,3) 
                        << 1, 0, 0,   
                           0, 1, 0);
                cv::warpAffine(img,p_floor,affine, p_floor.size(), 
                        cv::INTER_NEAREST,cv::BORDER_CONSTANT,0xFF);

                // This published the projected floor on the p_floor topic
                cv_bridge::CvImage pbr(msg->header,"mono8",p_floor);
                ptpub_.publish(pbr.toImageMsg());

                // Now that p_floor and floor have the same size, you can use
                // the following for loop to go through all the pixels and fuse
                // the current observation with the previous one.
                for (unsigned int j=0;j<(unsigned)floor_.rows;j++) {
                    for (unsigned int i=0;i<(unsigned)floor_.cols;i++) {
                        uint8_t p = p_floor.at<uint8_t>(i,j);
                        uint8_t f = floor_.at<uint8_t>(i,j);
                        
                        // Compute the new value of pixel i,j here

                        floor_.at<uint8_t>(i,j) = f;
                    }
                }
                // Finally publish the floor estimation.
                cv_bridge::CvImage br(msg->header,"mono8",floor_);
                tpub_.publish(br.toImageMsg());
            }
            catch (tf::TransformException ex){
                ROS_ERROR("%s",ex.what());
            }
        }
void Explore::getRobotPositionInOdom(float &x_odom_pose, float &y_odom_pose){

	ros::Time now = ros::Time(0);
	tf::StampedTransform tfOR;								//	robot w odom
	tf_listener_.waitForTransform("/odom", "/base_link", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/odom", "/base_link", now,  tfOR);

	x_odom_pose = tfOR.getOrigin ()[0];				//	wspolrzedne robota w ukladzie odom
	y_odom_pose = tfOR.getOrigin ()[1];				//	wspolrzedne robota w ukladzie odom

}
float Explore::getRobotAngleInOdom(){

	ros::Time now = ros::Time(0);
	tf::StampedTransform tfOR;								//	robot w odom
	tf_listener_.waitForTransform("/odom", "/base_link", now, ros::Duration(1.0));
	tf_listener_.lookupTransform ("/odom", "/base_link", now,  tfOR);

//	x_odom_pose = tfOR.getOrigin ()[0];				//	wspolrzedne robota w ukladzie odom
//	y_odom_pose = tfOR.getOrigin ()[1];				//	wspolrzedne robota w ukladzie odom

	return tf::getYaw(tfOR.getRotation());

}
//  void cloud_cb (const sensor_msgs::PointCloud2ConstPtr& pc)
  void callback (const sensor_msgs::PointCloud2ConstPtr& pc, const sensor_msgs::ImageConstPtr& im)
  {
    if (!cloud_and_image_received_)
    {
      //Write cloud
      //transform to target frame
      bool found_transform = tf_.waitForTransform(pc->header.frame_id, to_frame_,
                                                  pc->header.stamp, ros::Duration(10.0));
      tf::StampedTransform transform;
      if (found_transform)
      {
        //ROS_ASSERT_MSG(found_transform, "Could not transform to camera frame");
        tf_.lookupTransform(to_frame_,pc->header.frame_id, pc->header.stamp, transform);
        ROS_DEBUG("[TransformPointcloudNode:] Point cloud published in frame %s", pc->header.frame_id.c_str());
      }
      else
      {
        ROS_ERROR("No transform for pointcloud found!!!");
        return;
      }
      bag_.write(input_cloud_topic_, pc->header.stamp, *pc);
      geometry_msgs::TransformStamped transform_msg;
      tf::transformStampedTFToMsg(transform, transform_msg);
      bag_.write(input_cloud_topic_ + "/transform", transform_msg.header.stamp, transform_msg);
      ROS_INFO("Wrote cloud to %s", bag_name_.c_str());

      //Write image
      found_transform = tf_.waitForTransform(im->header.frame_id, to_frame_,
                                                  im->header.stamp, ros::Duration(10.0));
      if (found_transform)
      {
        //ROS_ASSERT_MSG(found_transform, "Could not transform to camera frame");
        tf_.lookupTransform(to_frame_,im->header.frame_id, im->header.stamp, transform);
        ROS_DEBUG("[TransformPointcloudNode:] Point cloud published in frame %s", im->header.frame_id.c_str());
      }
      else
      {
        ROS_ERROR("No transform for image found!!!");
        return;
      }
      bag_.write(input_image_topic_, im->header.stamp, im);
      tf::transformStampedTFToMsg(transform, transform_msg);
      bag_.write(input_image_topic_ + "/transform", transform_msg.header.stamp, transform_msg);
      ROS_INFO("Wrote image to %s", bag_name_.c_str());

      cam_info_ = ros::topic::waitForMessage<sensor_msgs::CameraInfo>(input_camera_info_topic_);
      bag_.write(input_camera_info_topic_, cam_info_->header.stamp, cam_info_);
      ROS_INFO("Wrote Camera Info to %s", bag_name_.c_str());
      cloud_and_image_received_ = true;
    }
  }