Ejemplo n.º 1
0
bool OdometryRemap::setOdomPoseCallback(nao_remote::SetTransform::Request& req, nao_remote::SetTransform::Response& res){
	ROS_INFO("New target for current odometry pose received");
	tf::Transform targetPose;
	tf::transformMsgToTF(req.offset, targetPose);

	m_odomOffset = targetPose * m_odomPose.inverse();

	return true;
}
  geometry_msgs::Twist PoseFollower::diff2D(const tf::Pose& pose1, const tf::Pose& pose2)
  {
    geometry_msgs::Twist res;
    tf::Pose diff = pose2.inverse() * pose1;
    res.linear.x = diff.getOrigin().x();
    res.linear.y = diff.getOrigin().y();
    res.angular.z = tf::getYaw(diff.getRotation());

    if(holonomic_ || (fabs(res.linear.x) <= tolerance_trans_ && fabs(res.linear.y) <= tolerance_trans_))
      return res;

    //in the case that we're not rotating to our goal position and we have a non-holonomic robot
    //we'll need to command a rotational velocity that will help us reach our desired heading
    
    //we want to compute a goal based on the heading difference between our pose and the target
    double yaw_diff = headingDiff(pose1.getOrigin().x(), pose1.getOrigin().y(), 
        pose2.getOrigin().x(), pose2.getOrigin().y(), tf::getYaw(pose2.getRotation()));

    //we'll also check if we can move more effectively backwards
    if (allow_backwards_) 
    {
      double neg_yaw_diff = headingDiff(pose1.getOrigin().x(), pose1.getOrigin().y(), 
					pose2.getOrigin().x(), pose2.getOrigin().y(), M_PI + tf::getYaw(pose2.getRotation()));

      //check if its faster to just back up
      if(fabs(neg_yaw_diff) < fabs(yaw_diff)){
        ROS_DEBUG("Negative is better: %.2f", neg_yaw_diff);
        yaw_diff = neg_yaw_diff;
      }
    }

    //compute the desired quaterion
    tf::Quaternion rot_diff = tf::createQuaternionFromYaw(yaw_diff);
    tf::Quaternion rot = pose2.getRotation() * rot_diff;
    tf::Pose new_pose = pose1;
    new_pose.setRotation(rot);

    diff = pose2.inverse() * new_pose;
    res.linear.x = diff.getOrigin().x();
    res.linear.y = diff.getOrigin().y();
    res.angular.z = tf::getYaw(diff.getRotation());
    return res;
  }
Ejemplo n.º 3
0
  void MotionModel::setMotion(const tf::Pose& pnew, const tf::Pose& pold)
    {
      tf::Pose delta_pose;
      tf::Transform odom_to_base(pold.inverse().getRotation());
      delta_pose.setOrigin(odom_to_base * (pnew.getOrigin() - pold.getOrigin()));
      delta_pose.setRotation(pnew.getRotation() * pold.getRotation().inverse());

      delta_x = delta_pose.getOrigin().x();
      delta_y = delta_pose.getOrigin().y();
      delta_yaw = atan2(sin(tf::getYaw(delta_pose.getRotation())), cos(tf::getYaw(delta_pose.getRotation())));

			dxy=sqrt(delta_x*delta_x+delta_y*delta_y);
    }
pair<Nodes, PointVec>
nodesOnGrid (const unsigned g, const Roadmap& r,
             const tmap::TopologicalMap& tmap)
{
  pair<Nodes, PointVec> res;

  // Add all nodes defined wrt this grid
  BOOST_FOREACH (const unsigned n, r.gridNodes(g))
  {
    res.first.push_back(n);
    res.second.push_back(r.nodeInfo(n).position);
  }

  const double x_size = tmap.nodeInfo(g).x_size;
  const double y_size = tmap.nodeInfo(g).y_size;

  // Also look at neighboring grids in topological graph
  BOOST_FOREACH (const tmap::GraphEdge e,
                 out_edges(tmap.node(g), tmap)) 
  {
    const bool flip = (tmap[e].dest == g);
    const tf::Pose offset = gmu::toPose(tmap[e].offset);
    const tf::Transform tr = flip ? offset.inverse() : offset;
    const unsigned g2 = flip ? tmap[e].src : tmap[e].dest;
    // Now tr maps from the neighbor grid g2 to g

    BOOST_FOREACH (const unsigned n, r.gridNodes(g2))
    {
      ROS_ASSERT(g2==r.nodeInfo(n).grid);
      const gm::Point& pos = r.nodeInfo(n).position;
      const gm::Point local_pos = gmu::transformPoint(tr, pos);
      // Now local_pos is the position of the waypoint in frame g

      ROS_DEBUG_STREAM_NAMED ("nodes_on_grid", "Node " << n << " position on " << g2 << " is "
                              << gmu::toString(pos) << " and on " << g <<
                              " is " << gmu::toString(local_pos));

      if (fabs(local_pos.x) < x_size/2 && fabs(local_pos.y) < y_size/2)
      {
        ROS_DEBUG_NAMED ("nodes_on_grid", "On grid");
        res.first.push_back(n);
        res.second.push_back(local_pos);
      }
    }
  }

  return res;
  
}
gm::Point positionOnGrid (const unsigned n, const unsigned g, const Roadmap& r,
                          const tmap::TopologicalMap& tmap) 
{
  const msg::RoadmapNode& info = r.nodeInfo(n);
  const unsigned g2 = info.grid;
  if (g2==g)
  {
    return info.position;
  }
  else
  {
    const tmap::GraphVertex v = tmap.node(g);
    const tmap::GraphVertex v2 = tmap.node(g2);
    pair<tmap::GraphEdge, bool> res = edge(v, v2, tmap);
    ROS_ASSERT_MSG (res.second, "No edge between grid %u and grid %u "
                    "containing %u", g, g2, n);
    const bool flip = (tmap[res.first].dest == g);
    const tf::Pose offset = gmu::toPose(tmap[res.first].offset);
    const tf::Transform tr = flip ? offset.inverse() : offset;
    return gmu::transformPoint(tr, info.position);
  }
}
bool poseEstimationFunction(kuri_mbzirc_challenge_1_msgs::PES::Request  &req,
                            kuri_mbzirc_challenge_1_msgs::PES::Response &res)
{
    tf::Transform BaseToCamera;
    double imageWidth = 1228 ;
    double imagehight = 1027 ;
    BaseToCamera.setOrigin(tf::Vector3(0.0, 0.0, -0.045));
    //BaseToCamera.setRotation(tf::Quaternion(0.707, -0.707, 0.000, -0.000));
    BaseToCamera.setRotation(tf::Quaternion(0.9999060498015511, 0, 0, -0.013707354604664749));

    tf::Transform extrisic;
    cv::Mat P_Mat_G(3, 4, CV_64FC1);
    //cv::Mat P(3, 4, CV_64FC1);
    tfScalar extrisic_data[4 * 4];
    pcl::PointCloud<pcl::PointXYZRGB> Pointcloud;
    Pointcloud.header.frame_id = "/world";
    Pointcloud.height = imagehight;
    Pointcloud.width = imageWidth;
    Pointcloud.resize(imagehight * imageWidth);
    Pointcloud.is_dense = true;
    // cv::Mat cvimg = cv_bridge::toCvShare(img, "bgr8")->image.clone();
    //tf::poseMsgToTF(odom->pose.pose, tfpose);
    extrisic = BaseToCamera * tfpose.inverse();
    //to test if the tf is correct, create testframe_to_camera
    //br.sendTransform(tf::StampedTransform(extrisic, ros::Time::now(), "/testframe_to_camera", "/world"));
    //pinv of projection matrix...
    /*for (int i = 0; i < 3; i++)
        for (int j = 0; j < 4; j++)
        {
            P.at<double>(i, j) = cam_info.P.at(i * 4 + j);
            //  std::cout << "PP" << P  << std::endl ;
        }*/
    //however, this P is in camera coordinate..
    extrisic.getOpenGLMatrix(extrisic_data);
    cv::Mat E_MAT(4, 4, CV_64FC1, extrisic_data);
    P_Mat_G = P * (E_MAT.t());
    // now is the ground, namely, world coordinate
    double a[4], b[4], c[4];
    a[0] = P_Mat_G.at<double>(0, 0);
    a[1] = P_Mat_G.at<double>(0, 1);
    a[2] = P_Mat_G.at<double>(0, 2);
    a[3] = P_Mat_G.at<double>(0, 3);
    b[0] = P_Mat_G.at<double>(1, 0);
    b[1] = P_Mat_G.at<double>(1, 1);
    b[2] = P_Mat_G.at<double>(1, 2);
    b[3] = P_Mat_G.at<double>(1, 3);
    c[0] = P_Mat_G.at<double>(2, 0);
    c[1] = P_Mat_G.at<double>(2, 1);
    c[2] = P_Mat_G.at<double>(2, 2);
    c[3] = P_Mat_G.at<double>(2, 3);
    std::clock_t start;
    //double duration;
    start = std::clock();

    // ************************** find 3D point ******************** //

    // just for the detected point
    float B[2][2], bvu[2];
    B[0][0] = req.A  * c[0] - a[0];
    B[0][1] = req.A  * c[1] - a[1];
    B[1][0] = req.B  * c[0] - b[0];
    B[1][1] = req.B  * c[1] - b[1];
    bvu[0] = a[2] * Ground_Z + a[3] - req.A  * c[2] * Ground_Z - req.A  * c[3];
    bvu[1] = b[2] * Ground_Z + b[3] - req.B  * c[2] * Ground_Z - req.B  * c[3];
    float DomB = B[1][1] * B[0][0] - B[0][1] * B[1][0];
    //res.obj.pose.pose.position.x  = (B[1][1] * bvu[0] - B[0][1] * bvu[1]) / DomB ;
    //res.obj.pose.pose.position.y  = (B[0][0] * bvu[1] - B[1][0] * bvu[0]) / DomB ;
    res.X   = ((B[1][1] * bvu[0] - B[0][1] * bvu[1]) / DomB)  / 10000000 ;
    res.Y   = ((B[0][0] * bvu[1] - B[1][0] * bvu[0]) / DomB ) / 10000000;
    res.Z = 2 ;
    //res.obj.pose.pose.position.z = 0 ;
    //ROS_INFO("request: x=%ld, y=%ld", (long int)req.A, (long int)req.B);
    //ROS_INFO("sending back response: [%ld]", (long int)res.obj.pose.pose.position.x);
    //std::cout << "request " << req.A << "    " << req.B << std::endl ; 
    //std::cout << "response" << res.X << "    " << res.Y << std::endl ; 
    
    ROS_INFO("request: x=%f, y=%f", (float)req.A, (float)req.B);
    ROS_INFO("sending back response: x=%f, y=%f", (float)res.X , (float)res.Y);
    return true;
}
Ejemplo n.º 7
0
void NaoqiJointStates::run()
{
   ros::Rate r(m_rate);
   ros::Time stamp1;
   ros::Time stamp2;
   ros::Time stamp;

   std::vector<float> odomData;
   float odomX, odomY, odomZ, odomWX, odomWY, odomWZ;

   std::vector<float> memData;

   std::vector<float> positionData;

   ROS_INFO("Staring main loop. ros::ok() is %d nh.ok() is %d",ros::ok(),m_nh.ok());
   while(ros::ok() )
   {
      r.sleep();
      ros::spinOnce();
      stamp1 = ros::Time::now();
      try
      {
         memData = m_memoryProxy->getListData(m_dataNamesList);
         // {SPACE_TORSO = 0, SPACE_WORLD = 1, SPACE_NAO = 2}. (second argument)
         odomData = m_motionProxy->getPosition("Torso", 1, true);
         positionData = m_motionProxy->getAngles("Body", true);
      }
      catch (const AL::ALError & e)
      {
         ROS_ERROR( "Error accessing ALMemory, exiting...");
         ROS_ERROR( "%s", e.what() );
         //ros.signal_shutdown("No NaoQI available anymore");
      }
      stamp2 = ros::Time::now();
      //ROS_DEBUG("dt is %f",(stamp2-stamp1).toSec()); % dt is typically around 1/1000 sec
      // TODO: Something smarter than this..
      stamp = stamp1 + ros::Duration((stamp2-stamp1).toSec()/2.0);

      /******************************************************************
       *                              IMU
       *****************************************************************/
      if (memData.size() != m_dataNamesList.getSize())
      {
         ROS_ERROR("memData length %zu does not match expected length %u",memData.size(),m_dataNamesList.getSize() );
         continue;
      }
      // IMU data:
      m_torsoIMU.header.stamp = stamp;

      float angleX = memData[1];
      float angleY = memData[2];
      float angleZ = memData[3];
      float gyroX = memData[4];
      float gyroY = memData[5];
      float gyroZ = memData[6];
      float accX = memData[7];
      float accY = memData[8];
      float accZ = memData[9];

      m_torsoIMU.orientation = tf::createQuaternionMsgFromRollPitchYaw(
                                angleX,
                                angleY,
                                angleZ); // yaw currently always 0

      m_torsoIMU.angular_velocity.x = gyroX;
      m_torsoIMU.angular_velocity.y = gyroY;
      m_torsoIMU.angular_velocity.z = gyroZ; // currently always 0

      m_torsoIMU.linear_acceleration.x = accX;
      m_torsoIMU.linear_acceleration.y = accY;
      m_torsoIMU.linear_acceleration.z = accZ;

      // covariances unknown
      // cf http://www.ros.org/doc/api/sensor_msgs/html/msg/Imu.html
      m_torsoIMU.orientation_covariance[0] = -1;
      m_torsoIMU.angular_velocity_covariance[0] = -1;
      m_torsoIMU.linear_acceleration_covariance[0] = -1;

      m_torsoIMUPub.publish(m_torsoIMU);


      /******************************************************************
       *                            Joint state
       *****************************************************************/
      m_jointState.header.stamp = stamp;
      m_jointState.header.frame_id = m_baseFrameId;
      m_jointState.position.resize(positionData.size());
      for(unsigned i = 0; i<positionData.size(); ++i)
      {
         m_jointState.position[i] = positionData[i];
      }

      // simulated model misses some joints, we need to fill:
      if (m_jointState.position.size() == 22)
      {
         m_jointState.position.insert(m_jointState.position.begin()+6,0.0);
         m_jointState.position.insert(m_jointState.position.begin()+7,0.0);
         m_jointState.position.push_back(0.0);
         m_jointState.position.push_back(0.0);
      }

      m_jointStatePub.publish(m_jointState);


        /******************************************************************
        *                            Odometry
        *****************************************************************/
        if (!m_paused) {

        // apply offset transformation:
        tf::Pose transformedPose;


        if (odomData.size()!=6)
        {
            ROS_ERROR( "Error getting odom data. length is %zu",odomData.size() );
            continue;
        }

        double dt = (stamp.toSec() - m_lastOdomTime);

        odomX = odomData[0];
        odomY = odomData[1];
        odomZ = odomData[2];
        odomWX = odomData[3];
        odomWY = odomData[4];
        odomWZ = odomData[5];

        tf::Quaternion q;
        // roll and pitch from IMU, yaw from odometry:
        if (m_useIMUAngles)
            q.setRPY(angleX, angleY, odomWZ);
        else
            q.setRPY(odomWX, odomWY, odomWZ);

        m_odomPose.setOrigin(tf::Vector3(odomX, odomY, odomZ));
        m_odomPose.setRotation(q);

        if(m_mustUpdateOffset) {
            if(!m_isInitialized) {
                if(m_initializeFromIMU) {
                    // Initialization from IMU: Take x, y, z, yaw from odometry, roll and pitch from IMU
                    m_targetPose.setOrigin(m_odomPose.getOrigin());
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, odomWZ));
                } else if(m_initializeFromOdometry) {
                    m_targetPose.setOrigin(m_odomPose.getOrigin());
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, odomWZ));
                }
                m_isInitialized = true;
            } else {
                // Overwrite target pitch and roll angles with IMU data
                const double target_yaw = tf::getYaw(m_targetPose.getRotation());
                if(m_initializeFromIMU) {
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(angleX, angleY, target_yaw));
                } else if(m_initializeFromOdometry){
                    m_targetPose.setRotation(tf::createQuaternionFromRPY(odomWX, odomWY, target_yaw));
                }
            }
            m_odomOffset = m_targetPose * m_odomPose.inverse();
            transformedPose = m_targetPose;
            m_mustUpdateOffset = false;
        } else {
            transformedPose = m_odomOffset * m_odomPose;
        }

        //
        // publish the transform over tf first
        //
        m_odomTransformMsg.header.stamp = stamp;
        tf::transformTFToMsg(transformedPose, m_odomTransformMsg.transform);
        m_transformBroadcaster.sendTransform(m_odomTransformMsg);


        //
        // Fill the Odometry msg
        //
        m_odom.header.stamp = stamp;
        //set the velocity first (old values still valid)
        m_odom.twist.twist.linear.x = (odomX - m_odom.pose.pose.position.x) / dt;
        m_odom.twist.twist.linear.y = (odomY - m_odom.pose.pose.position.y) / dt;
        m_odom.twist.twist.linear.z = (odomZ - m_odom.pose.pose.position.z) / dt;

        // TODO: calc angular velocity!
        //	m_odom.twist.twist.angular.z = vth; ??

        //set the position from the above calculated transform
        m_odom.pose.pose.orientation = m_odomTransformMsg.transform.rotation;
        m_odom.pose.pose.position.x = m_odomTransformMsg.transform.translation.x;
        m_odom.pose.pose.position.y = m_odomTransformMsg.transform.translation.y;
        m_odom.pose.pose.position.z = m_odomTransformMsg.transform.translation.z;


        m_odomPub.publish(m_odom);


        m_lastOdomTime = stamp.toSec();

        }
    }
    ROS_INFO("naoqi_sensors stopped.");

}
Ejemplo n.º 8
0
void OdometryRemap::torsoOdomCallback(const nao_msgs::TorsoOdometryConstPtr& msgOdom, const nao_msgs::TorsoIMUConstPtr& msgIMU){
	if (m_paused){
		ROS_DEBUG("Skipping odometry callback, paused");
		return;
	}

	double odomTime = (msgOdom->header.stamp).toSec();
	ROS_DEBUG("Received [%f %f %f %f (%f/%f) (%f/%f) %f]", odomTime, msgOdom->x, msgOdom->y, msgOdom->z, msgOdom->wx, msgIMU->angleX, msgOdom->wy, msgIMU->angleY, msgOdom->wz);

	double dt = (odomTime - m_lastOdomTime);


	tf::Quaternion q;
	// roll and pitch from IMU, yaw from odometry:
	if (m_useIMUAngles)
		q.setRPY(msgIMU->angleX, msgIMU->angleY, msgOdom->wz);
	else
		q.setRPY(msgOdom->wx, msgOdom->wy, msgOdom->wz);


	m_odomPose.setOrigin(tf::Vector3(msgOdom->x, msgOdom->y, msgOdom->z));
	m_odomPose.setRotation(q);

	// apply offset transformation:
	tf::Pose transformedPose;

	if(m_mustUpdateOffset) {
        if(!m_isInitialized) {
            if(m_initializeFromIMU) {
                // Initialization from IMU: Take x, y, z, yaw from odometry, roll and pitch from IMU
                m_targetPose.setOrigin(m_odomPose.getOrigin());
                m_targetPose.setRotation(tf::createQuaternionFromRPY(msgIMU->angleX, msgIMU->angleY, msgOdom->wz));
            } else if(m_initializeFromOdometry) {
                m_targetPose.setOrigin(m_odomPose.getOrigin());
                m_targetPose.setRotation(tf::createQuaternionFromRPY(msgOdom->wx, msgOdom->wy, msgOdom->wz));
            }
            m_isInitialized = true;
        } else {
            // Overwrite target pitch and roll angles with IMU data
            const double target_yaw = tf::getYaw(m_targetPose.getRotation());
            if(m_initializeFromIMU) {
                m_targetPose.setRotation(tf::createQuaternionFromRPY(msgIMU->angleX, msgIMU->angleY, target_yaw));
            } else if(m_initializeFromOdometry){
                m_targetPose.setRotation(tf::createQuaternionFromRPY(msgOdom->wx, msgOdom->wy, target_yaw));
            }
        }
 		m_odomOffset = m_targetPose * m_odomPose.inverse();
		transformedPose = m_targetPose;
		m_mustUpdateOffset = false;
	} else {
	    transformedPose = m_odomOffset * m_odomPose;
	}

	// publish the transform over tf first:
	m_odomTransformMsg.header.stamp = msgOdom->header.stamp;
	tf::transformTFToMsg(transformedPose, m_odomTransformMsg.transform);
	m_transformBroadcaster.sendTransform(m_odomTransformMsg);

	//next, publish the actual odometry message:
	m_odom.header.stamp = msgOdom->header.stamp;
	m_odom.header.frame_id = m_odomFrameId;


	//set the velocity first (old values still valid)
	m_odom.twist.twist.linear.x = (msgOdom->x - m_odom.pose.pose.position.x) / dt;
	m_odom.twist.twist.linear.y = (msgOdom->y - m_odom.pose.pose.position.y) / dt;
	m_odom.twist.twist.linear.z = (msgOdom->z - m_odom.pose.pose.position.z) / dt;
	// TODO: calc angular velocity!
//	m_odom.twist.twist.angular.z = vth; ??

	//set the position from the above calculated transform
	m_odom.pose.pose.orientation = m_odomTransformMsg.transform.rotation;
	m_odom.pose.pose.position.x = m_odomTransformMsg.transform.translation.x;
	m_odom.pose.pose.position.y = m_odomTransformMsg.transform.translation.y;
	m_odom.pose.pose.position.z = m_odomTransformMsg.transform.translation.z;

	//publish the message
	m_odomPub.publish(m_odom);

	m_lastOdomTime = odomTime;

	// TODO: not used
//	m_lastWx = msgOdom->wx;
//	m_lastWy = msgOdom->wy;
//	m_lastWz = msgOdom->wz;
}
bool
FootstepNavigation::getFootstep(const tf::Pose& from,
                                const State& from_planned,
		                            const State& to,
		                            humanoid_nav_msgs::StepTarget* footstep)
{
  // get footstep to reach 'to' from 'from'
  tf::Transform step = from.inverse() *
                       tf::Pose(tf::createQuaternionFromYaw(to.getTheta()),
                                tf::Point(to.getX(), to.getY(), 0.0));

  // set the footstep
  footstep->pose.x = step.getOrigin().x();
  footstep->pose.y = step.getOrigin().y();
  footstep->pose.theta = tf::getYaw(step.getRotation());
  if (to.getLeg() == LEFT)
    footstep->leg = humanoid_nav_msgs::StepTarget::left;
  else // to.leg == RIGHT
    footstep->leg = humanoid_nav_msgs::StepTarget::right;


  /* check if the footstep can be performed by the NAO robot ******************/

  // check if the step lies within the executable range
  if (performable(*footstep))
  {
    return true;
  }
  else
  {
    // check if there is only a minor divergence between the current support
	// foot and the foot placement used during the plannig task: in such a case
	// perform the step that has been used during the planning
    float step_diff_x = fabs(from.getOrigin().x() - from_planned.getX());
    float step_diff_y = fabs(from.getOrigin().y() - from_planned.getY());
    float step_diff_theta = fabs(
        angles::shortest_angular_distance(
            tf::getYaw(from.getRotation()), from_planned.getTheta()));
    if (step_diff_x < ivAccuracyX && step_diff_y < ivAccuracyY &&
        step_diff_theta < ivAccuracyTheta)
    {
	  step = tf::Pose(tf::createQuaternionFromYaw(from_planned.getTheta()),
	                  tf::Point(from_planned.getX(), from_planned.getY(), 0.0)
	                  ).inverse() *
		     tf::Pose(tf::createQuaternionFromYaw(to.getTheta()),
				      tf::Point(to.getX(), to.getY(), 0.0));

	  footstep->pose.x = step.getOrigin().x();
	  footstep->pose.y = step.getOrigin().y();
	  footstep->pose.theta = tf::getYaw(step.getRotation());

	  return true;
    }

    return false;
  }

//  // ALTERNATIVE: clip the footstep into the executable range; if nothing was
//  // clipped: perform; if too much was clipped: do not perform
//  humanoid_nav_msgs::ClipFootstep footstep_clip;
//  footstep_clip.request.step = footstep;
//  ivClipFootstepSrv.call(footstep_clip);
//
//  if (performanceValid(footstep_clip))
//  {
//  	footstep.pose.x = footstep_clip.response.step.pose.x;
//  	footstep.pose.y = footstep_clip.response.step.pose.y;
//  	footstep.pose.theta = footstep_clip.response.step.pose.theta;
//  	return true;
//  }
//  else
//  {
//    return false;
//  }
}