void DiffDrivePoseControllerROS::spinOnce()
{
  if (this->getState())
  {
    ROS_DEBUG_STREAM_THROTTLE(1.0, "Controller spinning. [" << name_ <<"]");
    // determine pose difference in polar coordinates
    if (!getPoseDiff())
    {
      ROS_WARN_STREAM_THROTTLE(1.0, "Getting pose difference failed. Skipping control loop. [" << name_ <<"]");
      return;
    }
    // determine controller output (v, w) and check if goal is reached
    step();

    // set control output (v, w)
    setControlOutput();
    // Logging
    ROS_DEBUG_STREAM_THROTTLE(1.0, "Current state: [" << name_ <<"]");
    ROS_DEBUG_STREAM_THROTTLE(1.0,
                              "r = " << r_ << ", theta = " << theta_ << ", delta = " << delta_ << " [" << name_ <<"]");
    ROS_DEBUG_STREAM_THROTTLE(1.0, "cur = " << cur_ << ", v = " << v_ << ", w = " << w_ << " [" << name_ <<"]");
  }
  else
  {
    ROS_DEBUG_STREAM_THROTTLE(3.0, "Controller is disabled. Idling ... [" << name_ <<"]");
  }
}
  void
  TrackerViewer::spin()
  {
    boost::format fmtWindowTitle ("ViSP MBT tracker viewer - [ns: %s]");
    fmtWindowTitle % ros::this_node::getNamespace ();

    vpDisplayX d(image_,
		 image_.getWidth(), image_.getHeight(),
		 fmtWindowTitle.str().c_str());
    vpImagePoint point (10, 10);
    vpImagePoint pointTime (22, 10);
    vpImagePoint pointCameraTopic (34, 10);
    ros::Rate loop_rate(80);

    boost::format fmt("tracking (x=%f y=%f z=%f)");
    boost::format fmtTime("time = %f");

    boost::format fmtCameraTopic("camera topic = %s");
    fmtCameraTopic % rectifiedImageTopic_;
    while (!exiting())
      {
	vpDisplay::display(image_);
  displayMovingEdgeSites();
  displayKltPoints();
  if (cMo_)
	  {
	    try
	      {
		tracker_.initFromPose(image_, *cMo_);
		tracker_.display(image_, *cMo_,
				 cameraParameters_, vpColor::red);
	      }
	    catch(...)
	      {
		ROS_DEBUG_STREAM_THROTTLE(10, "failed to display cmo");
	      }

        ROS_DEBUG_STREAM_THROTTLE(10, "cMo viewer:\n" << *cMo_);

	    fmt % (*cMo_)[0][3] % (*cMo_)[1][3] % (*cMo_)[2][3];
	    vpDisplay::displayCharString
	      (image_, point, fmt.str().c_str(), vpColor::red);
	    fmtTime % info_->header.stamp.toSec();
	    vpDisplay::displayCharString
	      (image_, pointTime, fmtTime.str().c_str(), vpColor::red);
	    vpDisplay::displayCharString
	      (image_, pointCameraTopic, fmtCameraTopic.str().c_str(),
	       vpColor::red);
	  }
	else
	  vpDisplay::displayCharString
	    (image_, point, "tracking failed", vpColor::red);
	vpDisplay::flush(image_);
    ros::spinOnce();
	loop_rate.sleep();
      }
  }
bool DiffDrivePoseControllerROS::getPoseDiff()
{
  // use tf to get information about the goal pose relative to the base
  try
  {
    tf_listener_.lookupTransform(base_frame_name_, goal_frame_name_, ros::Time(0), tf_goal_pose_rel_);
  }
  catch (tf::TransformException const &ex)
  {
    ROS_WARN_STREAM_THROTTLE(1.0, "Couldn't get transform from base to goal pose! [" << name_ <<"]");
    ROS_DEBUG_STREAM_THROTTLE(1.0, "tf error: " << ex.what());
    return false;
  }

  // determine distance to goal
  double r = std::sqrt(
      std::pow(tf_goal_pose_rel_.getOrigin().getX(), 2) + std::pow(tf_goal_pose_rel_.getOrigin().getY(), 2));
  // determine orientation of r relative to the base frame
  double delta = std::atan2(-tf_goal_pose_rel_.getOrigin().getY(), tf_goal_pose_rel_.getOrigin().getX());

  // determine orientation of r relative to the goal frame
  // helper: theta = tf's orientation + delta
  double heading = mtk::wrapAngle(tf::getYaw(tf_goal_pose_rel_.getRotation()));
  double theta = heading + delta;

  setInput(r, delta, theta);

  return true;
}
  void
  CovarianceVisual::setMessage
  (const geometry_msgs::PoseWithCovariance& msg)
  {
    // Construct pose position and orientation.
    const geometry_msgs::Point& p = msg.pose.position;
    Ogre::Vector3 position (p.x, p.y, p.z);
    Ogre::Quaternion orientation
      (msg.pose.orientation.w,
       msg.pose.orientation.x,
       msg.pose.orientation.y,
       msg.pose.orientation.z);

    // Set position and orientation for axes scene node.
    if(!position.isNaN())
      axes_->setPosition (position);
    else
      ROS_WARN_STREAM_THROTTLE(1, "position contains NaN: " << position);
    if(!orientation.isNaN())
      axes_->setOrientation (orientation);
    else
      ROS_WARN_STREAM_THROTTLE(1, "orientation contains NaN: " << orientation);

    // check for NaN in covariance
    for (unsigned i = 0; i < 3; ++i) {
      if(isnan(msg.covariance[i])) {
        ROS_WARN_THROTTLE(1, "covariance contains NaN");
        return;
      }
    }

    // Compute eigen values and vectors for both shapes.
    std::pair<Eigen::Matrix3d, Eigen::Vector3d>
      positionEigenVectorsAndValues
      (computeEigenValuesAndVectors (msg, 0));
    std::pair<Eigen::Matrix3d, Eigen::Vector3d>
      orientationEigenVectorsAndValues
      (computeEigenValuesAndVectors (msg, 3));

    Ogre::Quaternion positionQuaternion
      (computeRotation (msg, positionEigenVectorsAndValues));
    Ogre::Quaternion orientationQuaternion
      (computeRotation (msg, orientationEigenVectorsAndValues));

    positionNode_->setOrientation (positionQuaternion);
    orientationNode_->setOrientation (orientationQuaternion);

    // Compute scaling.
    Ogre::Vector3 positionScaling
      (std::sqrt (positionEigenVectorsAndValues.second[0]),
       std::sqrt (positionEigenVectorsAndValues.second[1]),
       std::sqrt (positionEigenVectorsAndValues.second[2]));
    positionScaling *= scaleFactor_;

    Ogre::Vector3 orientationScaling
      (std::sqrt (orientationEigenVectorsAndValues.second[0]),
       std::sqrt (orientationEigenVectorsAndValues.second[1]),
       std::sqrt (orientationEigenVectorsAndValues.second[2]));
    orientationScaling *= scaleFactor_;

    // Set the scaling.
    if(!positionScaling.isNaN())
      positionNode_->setScale (positionScaling);
    else
      ROS_WARN_STREAM("positionScaling contains NaN: " << positionScaling);

    if(!orientationScaling.isNaN())
      orientationNode_->setScale (orientationScaling);
    else
      ROS_WARN_STREAM("orientationScaling contains NaN: " << orientationScaling);

    // Debugging.
    ROS_DEBUG_STREAM_THROTTLE
      (1.,
       "Position:\n"
       << position << "\n"
       << "Positional part 3x3 eigen values:\n"
       << positionEigenVectorsAndValues.second << "\n"
       << "Positional part 3x3 eigen vectors:\n"
       << positionEigenVectorsAndValues.first << "\n"
       << "Sphere orientation:\n"
       << positionQuaternion << "\n"
       << positionQuaternion.getRoll () << " "
       << positionQuaternion.getPitch () << " "
       << positionQuaternion.getYaw () << "\n"
       << "Sphere scaling:\n"
       << positionScaling << "\n"
       << "Rotational part 3x3 eigen values:\n"
       << orientationEigenVectorsAndValues.second << "\n"
       << "Rotational part 3x3 eigen vectors:\n"
       << orientationEigenVectorsAndValues.first << "\n"
       << "Cone orientation:\n"
       << orientationQuaternion << "\n"
       << orientationQuaternion.getRoll () << " "
       << orientationQuaternion.getPitch () << " "
       << orientationQuaternion.getYaw () << "\n"
       << "Cone scaling:\n"
       << orientationScaling
       );
  }