Ejemplo n.º 1
0
	bool poseTracker::newPose(tf::StampedTransform transform){
		
		Eigen::Vector3f curr(transform.getOrigin().x(), transform.getOrigin().y(), transform.getOrigin().z());
		pose.push(curr);
		if(pose.size() < 50){
			return true;
		}

		float squaredNorm = (pose.front() - curr).squaredNorm();
		pose.pop();
		bool tmp = squaredNorm > 0.2;
		std::cout << tmp << "  Max: " << squaredNorm << std::endl;
		return squaredNorm > 0.2;

	}
Ejemplo n.º 2
0
// compute nav fn (using wavefront assumption)
void getNavFn(double* cost_map, tf::StampedTransform &robot_to_map_transform, double* nav_fn) {
  Point robot_pt(robot_to_map_transform.getOrigin().getX() / MAP_RESOLUTION,robot_to_map_transform.getOrigin().getY() / MAP_RESOLUTION);
  Point goal_pt(goal_pose.getOrigin().getX() / MAP_RESOLUTION, goal_pose.getOrigin().getY() / MAP_RESOLUTION);
  ROS_INFO("[getNavFn]\trobot_pt: (%d,%d), goal_pt(%d,%d)",robot_pt.x,robot_pt.y,goal_pt.x,goal_pt.y);

  // setup init map
  bool init_set_map[MAP_CELLS];
  for (int i=0; i<MAP_CELLS;i++) {
    init_set_map[i] = false;
  }
  setVal(init_set_map,goal_pt.x,goal_pt.y,true);

  //updateMapDijkstra(nav_fn, init_set_map, goal_pt, cost_map, INT_MAX);
  LPN2(nav_fn, cost_map, goal_pt, robot_pt);
}
Ejemplo n.º 3
0
bool HectorMappingRos::rosPointCloudToDataContainer(const sensor_msgs::PointCloud& pointCloud, const tf::StampedTransform& laserTransform, hectorslam::DataContainer& dataContainer, float scaleToMap)
{
  size_t size = pointCloud.points.size();
  
  dataContainer.clear();
  
  tf::Vector3 laserPos (laserTransform.getOrigin());
  dataContainer.setOrigo(Eigen::Vector2f(laserPos.x(), laserPos.y())*scaleToMap);
  
  for (size_t i = 0; i < size; ++i)
    {
      
      const geometry_msgs::Point32& currPoint(pointCloud.points[i]);
      
      float dist_sqr = currPoint.x*currPoint.x + currPoint.y* currPoint.y;
      
      if ( (dist_sqr > p_sqr_laser_min_dist_) && (dist_sqr < p_sqr_laser_max_dist_) ){
	
	if ( (currPoint.x < 0.0f) && (dist_sqr < 0.50f)){
	  continue;
	}
	
	tf::Vector3 pointPosBaseFrame(laserTransform * tf::Vector3(currPoint.x, currPoint.y, currPoint.z));
	
	float pointPosLaserFrameZ = pointPosBaseFrame.z() - laserPos.z();
	
	if (pointPosLaserFrameZ > p_laser_z_min_value_ && pointPosLaserFrameZ < p_laser_z_max_value_)
	  {
	    dataContainer.add(Eigen::Vector2f(pointPosBaseFrame.x(),pointPosBaseFrame.y())*scaleToMap);
	  }
      }
    }
  
  return true;
}
Ejemplo n.º 4
0
carmen_6d_point 
get_carmen_pose6D_from_matrix(Matrix visual_odometry_transformation_matrix)
{
//	// See: http://www.ros.org/wiki/tf
	tf::Transform visual_odometry_pose;
	tf::Transform carmen_pose;
	carmen_6d_point carmen_pose_6d;

	// convert the visual odometry output matrix to the tf::Transform type.
	visual_odometry_pose = convert_matrix_to_tf_transform(visual_odometry_transformation_matrix);

	// compute the current visual odometry pose with respect to the carmen coordinate system
	carmen_pose = g_car_to_visual_odometry_transform * visual_odometry_pose * g_car_to_visual_odometry_transform.inverse();

	double yaw, pitch, roll;
	tf::Matrix3x3(carmen_pose.getRotation()).getRPY(roll, pitch, yaw);

	carmen_pose_6d.x = carmen_pose.getOrigin().x();
	carmen_pose_6d.y = carmen_pose.getOrigin().y();
	carmen_pose_6d.z = carmen_pose.getOrigin().z();

	carmen_pose_6d.yaw = yaw;
	carmen_pose_6d.pitch = pitch;
	carmen_pose_6d.roll = roll;

	 return carmen_pose_6d;
}
Ejemplo n.º 5
0
    // wait for one camerainfo, then shut down that subscriber
    void cam_info_callback(const sensor_msgs::CameraInfo &msg)
    {
        camParam = aruco_ros::rosCameraInfo2ArucoCamParams(msg, useRectifiedImages);

        // handle cartesian offset between stereo pairs
        // see the sensor_msgs/CamaraInfo documentation for details
        rightToLeft.setIdentity();
        rightToLeft.setOrigin(
            tf::Vector3(
                -msg.P[3]/msg.P[0],
                -msg.P[7]/msg.P[5],
                0.0));

        cam_info_received = true;
        cam_info_sub.shutdown();
    }
Ejemplo n.º 6
0
    bool spawnObject(){
        try{
            ros::Time time = ros::Time::now();
            tf_listener.waitForTransform("table_top", "object", time, ros::Duration(2.0));
            tf_listener.lookupTransform("table_top", "object", time, transform);
        }catch(tf::TransformException ex){
            return false;
        }

        objectHeight = transform.getOrigin().getZ();
        moveit_msgs::ApplyPlanningScene srv;
        moveit_msgs::PlanningScene planning_scene;
        planning_scene.is_diff = true;

        moveit_msgs::CollisionObject object;
        object.header.frame_id = "object";
        object.id = "can";

        shape_msgs::SolidPrimitive primitive;
        primitive.type = primitive.CYLINDER;
        primitive.dimensions.push_back(objectHeight); //height
        primitive.dimensions.push_back(0.04); //radius
        object.primitives.push_back(primitive);

        geometry_msgs::Pose pose;
        pose.orientation.w = 1.0;
        pose.position.z = -objectHeight/2;
        object.primitive_poses.push_back(pose);

        place_pose.header.frame_id = "table_top";
        place_pose.pose.orientation.x = 0.5;
        place_pose.pose.orientation.y = 0.5;
        place_pose.pose.orientation.z = -0.5;
        place_pose.pose.orientation.w = 0.5;
        place_pose.pose.position.x = transform.getOrigin().getX();
        place_pose.pose.position.y = transform.getOrigin().getY();
        place_pose.pose.position.z = transform.getOrigin().getZ() + 0.005;

        object.operation = object.ADD;
        planning_scene.world.collision_objects.push_back(object);

        srv.request.scene = planning_scene;
        planning_scene_diff_client.call(srv);

        return true;
    }
Ejemplo n.º 7
0
/**
 * Convert tf::StampedTransform to string
 */
template<> std::string toString<tf::StampedTransform>(const tf::StampedTransform& p_transform)
{
  std::stringstream ss;

  ss << "StampedTransform(Quaternion=" << toString(p_transform.getRotation()) << "; Vector3=" << toString(p_transform.getOrigin()) << ")";

  return ss.str();
}
Ejemplo n.º 8
0
 void poseStampedToStampedTF(geometry_msgs::PoseStamped &msg, tf::StampedTransform &stampedTF, std::string child)
 {
   tf::Transform btTrans;
   stampedTF.stamp_ = msg.header.stamp;
   stampedTF.frame_id_ = msg.header.frame_id;
   stampedTF.child_frame_id_ = child;
   
   tf::poseMsgToTF(msg.pose, btTrans);
   stampedTF.setData(btTrans);
 }
void StigmergyPlanner::makeViewFacingMarker(tf::StampedTransform robot_pose)
{
  int_marker.header.frame_id = robot_pose.frame_id_;
  int_marker.header.stamp = robot_pose.stamp_;

  int_marker.pose.position.x = robot_pose.getOrigin().getX();
  int_marker.pose.position.y = robot_pose.getOrigin().getY();
  int_marker.pose.position.z = robot_pose.getOrigin().getZ();
  int_marker.scale = 1;

  int_marker.name = "view_facing";
  int_marker.description = "3D Planning pencil";

  InteractiveMarkerControl control;

  // make a control that rotates around the view axis
  //control.orientation_mode = InteractiveMarkerControl::VIEW_FACING;
  //control.interaction_mode = InteractiveMarkerControl::ROTATE_AXIS;
  //control.orientation.w = 1;
  //control.name = "rotate";

  //int_marker.controls.push_back(control);

  // create a box in the center which should not be view facing,
  // but move in the camera plane.
  control.orientation_mode = InteractiveMarkerControl::VIEW_FACING;
  control.interaction_mode = InteractiveMarkerControl::MOVE_PLANE;
  control.independent_marker_orientation = true;
  control.name = "move";

  control.markers.push_back( makeBox(int_marker) );
  control.always_visible = true;

  int_marker.controls.push_back(control);

  control.interaction_mode = InteractiveMarkerControl::MENU;
  control.name = "menu";
  int_marker.controls.push_back(control);

  server->insert(int_marker);
  server->setCallback(int_marker.name, boost::bind(&StigmergyPlanner::processFeedback,this,_1));
  menu_handler.apply(*server, int_marker.name);
}
inline bool checkTF(tf::StampedTransform& transform, geometry_msgs::PoseStamped& robo,
					tf::TransformListener& listener)
{
	try{
		//listener.waitForTransform(header_frame, robot_frame, ros::Time(0), ros::Duration(1.0));
	    listener.lookupTransform(header_frame, robot_frame, ros::Time(0), transform);
        //copy robot position
        robo.pose.position.x=transform.getOrigin().x();
        robo.pose.position.y=transform.getOrigin().y();
        robo.pose.position.z=0;
        float angle = tf::getYaw(transform.getRotation());
        robo.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(0,0,angle);
		return true;
	}
	catch (tf::TransformException ex){
		// cout<<"waiting for global and robot frame relation"<<endl;
        ROS_ERROR("%s",ex.what());
		return false;
	}
}
Ejemplo n.º 11
0
void sptam::sptam_node::fixOdomFrame(const CameraPose& cameraPose, const tf::StampedTransform& camera_to_odom, const ros::Time& t)
{
  tf::Pose camera_to_map;
  CameraPose2TFPose( cameraPose, camera_to_map );

  // compute the new difference between map and odom
  tf::Transform odom_to_map = camera_to_map * camera_to_odom.inverse();

  std::lock_guard<std::mutex> lock( odom_to_map_mutex_ );
  odom_to_map_ = odom_to_map;
}
Ejemplo n.º 12
0
bool
WmObjectCapture::updateObjectFrame(const ros::Time& stamp, tf::StampedTransform& m2c)
{
	std::vector<tf::StampedTransform> valids;

	getValidMarks(valids, stamp);
	if(valids.size()<4)
	{
		//ROS_INFO("No enough marks detected [%zu]", valids.size());
		return false;
	}

	tf::Transform media;
	double stdev;
	getBestTransform(valids, media, stdev);

	double m_tolerance2 = 0.006*0.006;
	if(stdev<m_tolerance2)
	{
		//ROS_INFO("TRANSFORM ACCEPTED ");
		m2c.child_frame_id_ = objectFrameId_;
		m2c.frame_id_ = cameraFrameId_;
		m2c.stamp_ = ros::Time::now();

		m2c.setOrigin(media.getOrigin());
		m2c.setRotation(media.getRotation());

		ROS_DEBUG("NEW FINAL (%lf, %lf, %lf) (%lf, %lf, %lf, %lf)",
				m2c.getOrigin().x(), m2c.getOrigin().y(), m2c.getOrigin().z(),
				m2c.getRotation().x(), m2c.getRotation().y(), m2c.getRotation().z(), m2c.getRotation().w());

		try
		{
			tfBroadcaster_.sendTransform(m2c);

		}catch(tf::TransformException & ex)
		{
			ROS_WARN("WmObjectCapture::updateObjectFrame %s",ex.what());
		}
		return true;
	}

	//ROS_INFO("TRANSFORM REJECTED ");
	return false;

}
void InputOutputLinControl::odomMsgToStampedTransformB(double displacement, double yaw, nav_msgs::Odometry pose_odometry, tf::StampedTransform& pose_stamped)
{
	pose_stamped.stamp_ = pose_odometry.header.stamp;
	pose_stamped.frame_id_ = pose_odometry.header.frame_id;
	pose_stamped.child_frame_id_ = pose_odometry.child_frame_id;

	tf::Vector3 v;
	v.setX(pose_odometry.pose.pose.position.x + displacement*cos(yaw));
	v.setY(pose_odometry.pose.pose.position.y + displacement*sin(yaw));
	v.setZ(pose_odometry.pose.pose.position.z);

	pose_stamped.setOrigin(v);

	tf::Quaternion q;
	q.setX(pose_odometry.pose.pose.orientation.x);
	q.setY(pose_odometry.pose.pose.orientation.y);
	q.setZ(pose_odometry.pose.pose.orientation.z);
	q.setW(pose_odometry.pose.pose.orientation.w);

	pose_stamped.setRotation(q);
}
Ejemplo n.º 14
0
  void foutAppendTF(tf::StampedTransform &transform, std::ofstream &fout)
  {
    tf::Matrix3x3 rotMatrix = transform.getBasis();
    tf::Vector3 originVector = transform.getOrigin();

    
    tfScalar roll, pitch, yaw;
    
    
    
    
    rotMatrix.getRPY(roll, pitch, yaw);
    
    fout<< originVector.x() << " "
        << originVector.y() << " "
        << originVector.z() << " "
        << pcl::rad2deg(roll) << " "
        << pcl::rad2deg(pitch) << " "
        << pcl::rad2deg(yaw) << std::endl;
    
    
  }
bool InputOutputLinControl::updateRobotPoseByModel(double timestep, double linear_vel, double angular_vel, tf::StampedTransform& pose)
{

	pose.stamp_ = ros::Time::now();
	double roll, pitch, yaw;
	pose.getBasis().getRPY(roll,pitch,yaw);

	tf::Vector3 v;

	double theta = yaw + angular_vel * timestep;

	//Eulerian integration
	v.setX(pose.getOrigin().getX() + linear_vel * timestep * cos(theta));
	v.setY(pose.getOrigin().getY() + linear_vel * timestep * sin(theta));
	v.setZ(pose.getOrigin().getZ());

	pose.setOrigin(v);
	pose.setRotation(tf::createQuaternionFromYaw(theta));
	ROS_INFO("New orientation [%f]",theta);

	return true;
}
Ejemplo n.º 16
0
    void cloudCallback(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr & cloud)
    {
        if( ( !cloud_updated ) && ( goal_completion_time + ros::Duration(2.0) < cloud->header.stamp ) )
        {
            try
            {
                // update the pose
                listener.waitForTransform( "/arm_kinect_frame", "/map", cloud->header.stamp, ros::Duration(5.0));
                listener.lookupTransform( "/map", "/arm_kinect_frame", cloud->header.stamp, kinect2map);

                listener.waitForTransform( "/arm_kinect_optical_frame", "/map", cloud->header.stamp, ros::Duration(5.0));
                listener.lookupTransform( "/map", "/arm_kinect_optical_frame", cloud->header.stamp, optical2map);

                tf::Vector3 position_( kinect2map.getOrigin() );
                position.x() = position_.x();
                position.y() = position_.y();
                position.z() = position_.z();

                tf::Quaternion orientation_( kinect2map.getRotation() );
                orientation.x() = orientation_.x();
                orientation.y() = orientation_.y();
                orientation.z() = orientation_.z();
                orientation.w() = orientation_.w();

                ROS_INFO_STREAM("position = " << position.transpose() );
                ROS_INFO_STREAM("orientation = " << orientation.transpose() );

                // update the cloud
                pcl::copyPointCloud(*cloud, *xyz_cld_ptr);	// Do I need to copy it?
                //xyz_cld_ptr = cloud;
                cloud_updated = true;
            }
            catch (tf::TransformException ex) {
                ROS_ERROR("%s", ex.what());
            }

        }
    }
Ejemplo n.º 17
0
  void printTF(tf::StampedTransform &transform, std::string str)
  {
    tf::Matrix3x3 rotMatrix = transform.getBasis();
    tf::Vector3 originVector = transform.getOrigin();

    
    tfScalar roll, pitch, yaw;
    
    
    
    
    rotMatrix.getRPY(roll, pitch, yaw);
    
    
    ROS_INFO("TF %s (x y z r p y): %f %f %f %f %f %f", 
             str.c_str(),
             originVector.x(),
             originVector.y(), 
             originVector.z(),
             pcl::rad2deg(roll),
             pcl::rad2deg(pitch),
             pcl::rad2deg(yaw));
  }
Ejemplo n.º 18
0
	inline Localization(vector<ObstacleC> *obstacles) :
			location(-1.42, 0), locationKalman(location), kalmanI(
					locationKalman), _cameraProjections(
			NULL), lastOdomX(0), lastOdomY(0), globalPos(0, 0, 0), lastOdom(0,
					0, 0), lastOdomID(0), ballPos(0, 0), m_field(
					field_model::FieldModel::getInstance()), obstacles(
					obstacles), CurrentVertexId(0), PreviousVertexId(0), LandmarkCount(
					0), odomLastGet(0, 0), atLeastOneObservation(false), nodeCounter(
					0)
	{

		odom_sub = nodeHandle.subscribe("/gait/odometry", 10,
				&Localization::dead_reckoning_callback, this);
		setLoc_sub = nodeHandle.subscribe<geometry_msgs::Point>(
				"/vision/setLocation", 1, &Localization::setloc_callback, this);

		A = m_field->length();
		B = m_field->width();
		E = m_field->goalAreaDepth();
		F = m_field->goalAreaWidth();
		G = m_field->penaltyMarkerDist();
		H = m_field->centerCircleDiameter();
		D = m_field->goalWidth();
		I = m_field->boundary();
		params.ball->radius.set(m_field->ballDiameter() / 2.);
		A2 = A / 2.;
		B2 = B / 2.;
		H2 = H / 2.;
		A2 = A / 2.;
		B2 = B / 2.;
		E2 = E / 2.;
		F2 = F / 2.;
		G2 = G / 2.;
		H2 = H / 2.;
		D2 = D / 2.;
		I2 = I / 2.;

		// TF transforms
		tfLocField.frame_id_ = "/ego_floor";
		tfLocField.child_frame_id_ = "/loc_field";
		tfLocField.setIdentity();

		lastSavedNodeTime = ros::Time::now();
		// create the linear solver
		linearSolver = new LinearSolverCSparse<BlockSolverX::PoseMatrixType>();
		// create the block solver on the top of the linear solver
		blockSolver = new BlockSolverX(linearSolver);
		//create the algorithm to carry out the optimization
		optimizationAlgorithm = new OptimizationAlgorithmLevenberg(blockSolver);
	}
Ejemplo n.º 19
0
void addRead(string hexID, int rssi, tf::StampedTransform trans) {
    if (rssi == -1)  //The tags sometimes see each other; don't record this
        return;
    //Check to see if this tag has been seen yet
    if (heatmaps.find(hexID) == heatmaps.end()) {
        heatmaps[hexID] = PointCloud();
        heatmapPoses[hexID] = PoseArray();
        ChannelFloat32 rgbChannel;
        rgbChannel.name = "rgb";
        heatmaps[hexID].channels.push_back(rgbChannel);
        heatmaps[hexID].header.frame_id = "/map";
        heatmapSelected[hexID] = true;
    }
    double x = trans.getOrigin().x();
    double y = trans.getOrigin().y();
    double z = trans.getOrigin().z();
    Point32 P32;
    P32.x = x;P32.y = y;P32.z = z;
    Point P;
    P.x = x;P.y = y;P.z = z;
    btQuaternion rotation = trans.getRotation();
    Quaternion Q;
    Q.x = rotation.x();Q.y = rotation.y();Q.z = rotation.z();Q.w = rotation.w();
    Pose pose;
    pose.position = P;
    pose.orientation = Q;
    float float_rgb;
    if (rssi > 10) {
        double l = ((double)rssi + 50.0) / 200.0;
        int rgb = getColorHSL(20.0, 240.0, l, rssi);
        float_rgb = *reinterpret_cast<float*>(&rgb);
    }
    heatmaps[hexID].points.push_back(P32);
    //Color of point in RGB channel
    heatmaps[hexID].channels[0].values.push_back(float_rgb);
    heatmapPoses[hexID].poses.push_back(pose);
}
void laserOdometryHandler(const nav_msgs::Odometry::ConstPtr& laserOdometry)
{
  if (fabs(timeOdomBefMapped - timeOdomAftMapped) < 0.005) {

    double roll, pitch, yaw;
    geometry_msgs::Quaternion geoQuat = laserOdometry->pose.pose.orientation;
    tf::Matrix3x3(tf::Quaternion(geoQuat.z, -geoQuat.x, -geoQuat.y, geoQuat.w)).getRPY(roll, pitch, yaw);

    transformSum[0] = -pitch;
    transformSum[1] = -yaw;
    transformSum[2] = roll;

    transformSum[3] = laserOdometry->pose.pose.position.x;
    transformSum[4] = laserOdometry->pose.pose.position.y;
    transformSum[5] = laserOdometry->pose.pose.position.z;

    transformAssociateToMap();

    geoQuat = tf::createQuaternionMsgFromRollPitchYaw
              (transformMapped[2], -transformMapped[0], -transformMapped[1]);

    laserOdometry2.header.stamp = laserOdometry->header.stamp;
    laserOdometry2.pose.pose.orientation.x = -geoQuat.y;
    laserOdometry2.pose.pose.orientation.y = -geoQuat.z;
    laserOdometry2.pose.pose.orientation.z = geoQuat.x;
    laserOdometry2.pose.pose.orientation.w = geoQuat.w;
    laserOdometry2.pose.pose.position.x = transformMapped[3];
    laserOdometry2.pose.pose.position.y = transformMapped[4];
    laserOdometry2.pose.pose.position.z = transformMapped[5];
    pubLaserOdometry2Pointer->publish(laserOdometry2);

    laserOdometryTrans2.stamp_ = laserOdometry->header.stamp;
    laserOdometryTrans2.setRotation(tf::Quaternion(-geoQuat.y, -geoQuat.z, geoQuat.x, geoQuat.w));
    laserOdometryTrans2.setOrigin(tf::Vector3(transformMapped[3], transformMapped[4], transformMapped[5]));
    tfBroadcaster2Pointer->sendTransform(laserOdometryTrans2);
  }
}
Ejemplo n.º 21
0
geometry_msgs::PoseStamped OdomTf::get_pose_from_transform(tf::StampedTransform tf) {
    //clumsy conversions--points, vectors and quaternions are different data types in tf vs geometry_msgs
    geometry_msgs::PoseStamped stPose;
    geometry_msgs::Quaternion quat; //geometry_msgs object for quaternion
    tf::Quaternion tfQuat; // tf library object for quaternion
    tfQuat = tf.getRotation(); // member fnc to extract the quaternion from a transform
    quat.x = tfQuat.x(); // copy the data from tf-style quaternion to geometry_msgs-style quaternion
    quat.y = tfQuat.y();
    quat.z = tfQuat.z();
    quat.w = tfQuat.w();
    stPose.pose.orientation = quat; //set the orientation of our PoseStamped object from result

    // now do the same for the origin--equivalently, vector from parent to child frame 
    tf::Vector3 tfVec; //tf-library type
    geometry_msgs::Point pt; //equivalent geometry_msgs type
    tfVec = tf.getOrigin(); // extract the vector from parent to child from transform
    pt.x = tfVec.getX(); //copy the components into geometry_msgs type
    pt.y = tfVec.getY();
    pt.z = tfVec.getZ();
    stPose.pose.position = pt; //and use this compatible type to set the position of the PoseStamped
    stPose.header.frame_id = tf.frame_id_; //the pose is expressed w/rt this reference frame
    stPose.header.stamp = tf.stamp_; // preserve the time stamp of the original transform
    return stPose;
}
int main(int argc, char **argv) {
  ros::init(argc, argv, ROS_PACKAGE_NAME);

  ros::NodeHandle n;
  ros::NodeHandle pn("~");

  pn.param("base_stabilized_frame", p_base_stabilized_frame_, std::string("base_stabilized"));
  pn.param("base_frame", p_base_frame_, std::string("base_link"));
  
  tfB_ = new tf::TransformBroadcaster();
  transform_.getOrigin().setX(0.0);
  transform_.getOrigin().setY(0.0);
  transform_.getOrigin().setZ(0.0);
  transform_.frame_id_ = p_base_stabilized_frame_;
  transform_.child_frame_id_ = p_base_frame_;

  ros::Subscriber imu_subscriber = n.subscribe("imu_topic", 10, imuMsgCallback);

  ros::spin();

  delete tfB_;

  return 0;
}
void imuMsgCallback(const sensor_msgs::Imu& imu_msg)
{
  tf::quaternionMsgToTF(imu_msg.orientation, tmp_);

  tfScalar yaw, pitch, roll;
  tf::Matrix3x3(tmp_).getRPY(roll, pitch, yaw);

  tmp_.setRPY(roll, pitch, 0.0);

  transform_.setRotation(tmp_);

  transform_.stamp_ = imu_msg.header.stamp;

  tfB_->sendTransform(transform_);
}
Ejemplo n.º 24
0
tf::StampedTransform OdomTf::stamped_transform_inverse(tf::StampedTransform stf) {
    // instantiate stamped transform with constructor args
    //note: child_frame and frame_id are reversed, to correspond to inverted transform
    tf::StampedTransform stf_inv(stf.inverse(), stf.stamp_, stf.child_frame_id_, stf.frame_id_);
    /* long-winded equivalent:
    tf::StampedTransform stf_inv;    
    tf::Transform tf = get_tf_from_stamped_tf(stf);
    tf::Transform tf_inv = tf.inverse();
    
    stf_inv.stamp_ = stf.stamp_;
    stf_inv.frame_id_ = stf.child_frame_id_;
    stf_inv.child_frame_id_ = stf.frame_id_;
    stf_inv.setOrigin(tf_inv.getOrigin());
    stf_inv.setBasis(tf_inv.getBasis());
     * */
    return stf_inv;
}
void printTF(tf::StampedTransform t, WhichArm a)
{
	std::string p;
	if(a==RIGHT)
	{
		std::cout<< "#Right gripper:\n";
		p="r";
	}
	else
	{
		std::cout<< "#Left gripper:\n";
		p="l";
	}

	std::cout<< "p.position.x     = " << t.getOrigin().getX()    << ";\n"
			 << "p.position.y     = " << t.getOrigin().getY()    << ";\n"
			 << "p.position.z     = " << t.getOrigin().getZ()    << ";\n"
			 << "p.orientation.x  = " << t.getRotation().getX()  << ";\n"
			 << "p.orientation.y  = " << t.getRotation().getY()  << ";\n"
			 << "p.orientation.z  = " << t.getRotation().getZ()  << ";\n"
			 << "p.orientation.w  = " << t.getRotation().getW()  << ";\n";

	geometry_msgs::PoseStamped tmp;
	tmp.pose.position.x 	= t.getOrigin().getX()  ;
	tmp.pose.position.y     = t.getOrigin().getY()  ;
	tmp.pose.position.z     = t.getOrigin().getZ()  ;
	tmp.pose.orientation.x 	= t.getRotation().getX();
	tmp.pose.orientation.y  = t.getRotation().getY();
	tmp.pose.orientation.z  = t.getRotation().getZ();
	tmp.pose.orientation.w  = t.getRotation().getW();
	std::cout<< "    pose:\n"
			 << "      position:\n"
			 << "        x: "			<< tmp.pose.position.x << "\n"
			 << "        y: "			<< tmp.pose.position.y << "\n"
			 << "        z: "			<< tmp.pose.position.z << "\n"
			 << "      orientation:\n"
			 << "        x: "			<< tmp.pose.orientation.x << "\n"
			 << "        y: "			<< tmp.pose.orientation.y << "\n"
			 << "        z: "			<< tmp.pose.orientation.z << "\n"
			 << "        w: "			<< tmp.pose.orientation.w << "\n\n";
//	std::cout<<tmp<<"\n";
}
  void getPoseInRobotFrame(std::vector<SM_COND> nextRobotCond_w)
  { 
    //transforming position into robot frame
    geometry_msgs::PoseStamped goal;
    geometry_msgs::PoseStamped yawMapRobot;
    geometry_msgs::PoseStamped goalRobotFrame;

    // position
    goal.header.stamp= ros::Time(0);
    goal.header.frame_id= "map";
    goal.pose.position.x= nextRobotCond_w[0].x;
    goal.pose.position.y= nextRobotCond_w[1].x;
    goal.pose.position.z= 0.0;
    tf::Quaternion q = tf::createQuaternionFromRPY(0.0,0.0,nextRobotCond_w[5].x);
    tf::quaternionTFToMsg(q,goal.pose.orientation);
    
    _listener.waitForTransform("base_footprint", "map",
                               ros::Time(0), ros::Duration(1.0));
    _listener.transformPose("base_footprint", goal, goalRobotFrame); 
  
    _nextRobotCond_r[0].x = goalRobotFrame.pose.position.x;
    _nextRobotCond_r[1].x = goalRobotFrame.pose.position.y; 
    _nextRobotCond_r[5].x = tf::getYaw(goalRobotFrame.pose.orientation);

    // velocity and acceleration
    //record the starting transform from the odometry to the base frame
    _listener.lookupTransform("map", "base_footprint",
                              ros::Time(0), world_transform);
    
    double roll, pitch, yaw;
    q = world_transform.getRotation();
    btMatrix3x3(q).getRPY(roll, pitch, yaw);

    _nextRobotCond_r[0].v = cos(yaw)*nextRobotCond_w[0].v + sin(yaw)*nextRobotCond_w[1].v;
    _nextRobotCond_r[1].v = -sin(yaw)*nextRobotCond_w[0].v + cos(yaw)*nextRobotCond_w[1].v; 
    _nextRobotCond_r[5].v = nextRobotCond_w[5].v;

    _nextRobotCond_r[0].a = cos(yaw)*nextRobotCond_w[0].a + sin(yaw)*nextRobotCond_w[1].a;
    _nextRobotCond_r[1].a = -sin(yaw)*nextRobotCond_w[0].a + cos(yaw)*nextRobotCond_w[1].a;
    _nextRobotCond_r[5].a = nextRobotCond_w[5].a;
    
  }
Ejemplo n.º 27
0
	void MocapAlign::spinOnce( )
	{
		static tf::StampedTransform tf_a;
		static tf::StampedTransform tf_b;

		try
		{
			li.lookupTransform( frame_base, frame_a, ros::Time(0), tf_a );
			li.lookupTransform( frame_base, frame_b, ros::Time(0), tf_b );
		}
		catch( tf::TransformException ex )
		{
			ROS_INFO( "Missed a transform...chances are that we are still OK" );
			return;
		}
		if( tf_a.getOrigin( ).x( ) != tf_a.getOrigin( ).x( ) ||
			tf_b.getOrigin( ).x( ) != tf_b.getOrigin( ).x( ) )
		{
			ROS_WARN( "NaN DETECTED" );
			return;
		}

		// Rotate us to be aligned with the uav
		const tf::Quaternion delta_quat = tf::createQuaternionFromRPY( 0, 0, tf::getYaw( tf_a.getRotation( ) ) - tf::getYaw( tf_b.getRotation( ) ) );
		const tf::Quaternion quat_b_aligned = tf_b.getRotation( ) * delta_quat;

		// Broadcast the aligned tf
		tf::StampedTransform tf_b_aligned( tf_b );
		if( tf_b_aligned.getOrigin( ).x( ) != tf_b_aligned.getOrigin( ).x( ) )
		{
			ROS_WARN( "NaN PRODUCED" );
			return;
		}
		tf_b_aligned.setRotation( quat_b_aligned );
		tf_b_aligned.child_frame_id_ = tf_b_aligned.child_frame_id_ + "_aligned";
		br.sendTransform( tf_b_aligned );
	}
Ejemplo n.º 28
0
void ObservationModel::integratePoseMeasurement(Particles& particles, double poseRoll, double posePitch, const tf::StampedTransform& footprintToTorso){
  double poseHeight = footprintToTorso.getOrigin().getZ();
  ROS_DEBUG("Pose measurement z=%f R=%f P=%f", poseHeight, poseRoll, posePitch);
  // TODO cluster xy of particles => speedup
#pragma omp parallel for
  for (unsigned i=0; i < particles.size(); ++i){
    // integrate IMU meas.:
    double roll, pitch, yaw;
    particles[i].pose.getBasis().getRPY(roll, pitch, yaw);
    particles[i].weight += m_weightRoll * logLikelihood(poseRoll - roll, m_sigmaRoll);
    particles[i].weight += m_weightPitch * logLikelihood(posePitch - pitch, m_sigmaPitch);

    // integrate height measurement (z)
    double heightError = 0;
//    if (getHeightError(particles[i],footprintToTorso, heightError))
//      particles[i].weight += m_weightZ * logLikelihood(heightError, m_sigmaZ);
    particles[i].weight += m_weightZ * logLikelihood(heightError, m_sigmaZ);


  }

}
void InputOutputLinControl::getTrueRobotPose(double displacement, tf::StampedTransform robot_poseB, tf::StampedTransform& true_robot_pose)
{
	true_robot_pose.frame_id_ = robot_poseB.frame_id_;
	true_robot_pose.stamp_ = robot_poseB.stamp_;

	double roll, pitch, yaw;
	robot_poseB.getBasis().getRPY(roll,pitch,yaw);

	tf::Vector3 v;
	v.setX(robot_poseB.getOrigin().getX() - displacement*cos(yaw));
	v.setY(robot_poseB.getOrigin().getY() - displacement*sin(yaw));
	v.setZ(robot_poseB.getOrigin().getZ());

	true_robot_pose.setOrigin(v);

	true_robot_pose.setRotation(robot_poseB.getRotation());
}
void InputOutputLinControl::realRobotPoseB(double displacement, tf::StampedTransform real_robot_pose, tf::StampedTransform& real_robot_poseB)
{
	real_robot_poseB.frame_id_ = real_robot_pose.frame_id_;
	real_robot_poseB.stamp_ = real_robot_pose.stamp_;
	real_robot_poseB.child_frame_id_ = real_robot_pose.child_frame_id_;

	tf::Vector3 v;
	double roll, pitch, yaw;
	real_robot_pose.getBasis().getRPY(roll,pitch,yaw);

	v.setX(real_robot_pose.getOrigin().getX() + displacement*cos(yaw));
	v.setY(real_robot_pose.getOrigin().getY() + displacement*sin(yaw));
	v.setZ(real_robot_pose.getOrigin().getZ());
	real_robot_poseB.setOrigin(v);

	real_robot_poseB.setRotation(real_robot_pose.getRotation());
}