/***********************************************************************
 *  Method: GlobalPlanner::PlanNaive
 *  Params:
 * Returns:
 * Effects: Naively iterates over waypoints in order and  assigns first available robot to each waypoint
 ***********************************************************************/
void GlobalPlanner::PlanNaive()
{
    std::vector<Waypoint_Ptr> availableWaypoints = m_tm.GetAvailableWaypoints();

    for (std::vector<Waypoint_Ptr>::iterator it = availableWaypoints.begin(); it != availableWaypoints.end(); ++it)
    {
        Waypoint_Ptr wp = *it;
        // int bestRobot = GetBestSearchBot(wp->GetID()); // Nearest Robot to waypoint SOLUTION
        int bestRobot = GetFirstAvailableBot(); // NAIVE SOLUTION
        if (bestRobot != NO_ROBOT_FOUND)
        {
            // ROS_INFO_STREAM("Found a robot to explore waypoint ("<<wp->GetID()<<") : "<<bestRobot);
            // Assign Robot to Waypoint
            if ( !AssignRobotWaypoint(bestRobot , wp->GetID()) )
            {
                ROS_ERROR_STREAM_THROTTLE(5, "Error Assigning Robot " << m_robots[bestRobot]->GetName() << "(" << m_robots[bestRobot]->GetID() << ")"
                                             << " - Waypoint(" << wp->GetID() << ")");
            }
        }
        else
        {
            // ROS_INFO_THROTTLE(5, "FAILED TO FIND A ROBOT");
        }
    }
}
Ejemplo n.º 2
0
void MotorController::publishResponse(const ResponseMessage& response)
{
  /* update the exponentially weighted moving voltage average and publish */
  std_msgs::Float64 battery_msg;
  battery_avg_ = (battery_alpha_ * battery_avg_) + ((1 - battery_alpha_) * response.voltage);
  battery_msg.data = battery_avg_;
  battery_pub_.publish(battery_msg);

  if (battery_avg_ < min_battery_voltage_)
  {
    ROS_ERROR_STREAM_THROTTLE(log_period_, "Battery voltage dangerously low:"
                                               << "\n\tCurr. Voltage: " << battery_avg_
                                               << "\n\tMin. Voltage: " << min_battery_voltage_);
  }

  std_msgs::Bool enabled_msg;
  enabled_msg.data = response.estop;
  enabled_pub_.publish(enabled_msg);

  /* publish encoder feedback */
  igvc_msgs::velocity_pair enc_msg;
  enc_msg.left_velocity = response.speed_l;
  enc_msg.right_velocity = response.speed_r;
  enc_msg.duration = response.dt_sec;
  enc_msg.header.stamp = ros::Time::now() - ros::Duration(response.dt_sec);
  enc_pub_.publish(enc_msg);
}
void QualisysDriver::run() {

  prt_packet = port_protocol.GetRTPacket();
  CRTPacket::EPacketType e_type;
  port_protocol.GetCurrentFrame(CRTProtocol::Component6dEuler);

  if(port_protocol.ReceiveRTPacket(e_type, true)) {

    switch(e_type) {
      // Case 1 - sHeader.nType 0 indicates an error
      case CRTPacket::PacketError:
        ROS_ERROR_STREAM_THROTTLE(
            1, "Error when streaming frames: "
            << port_protocol.GetRTPacket()->GetErrorString());
        break;

      // Case 2 - No more data
      case CRTPacket::PacketNoMoreData:
        ROS_WARN_STREAM_THROTTLE(1, "No more data");
        break;

      // Case 3 - Data received
      case CRTPacket::PacketData:
        handleFrame();
        break;

      default:
        ROS_ERROR_THROTTLE(1, "Unknown CRTPacket case");
        break;
    }
  }

  return;
}
void GlobalPlanner::QueryRobot(int id)
{
    return;
    global_planner::RobotStatusSrv s;
    s.request.id = id;

    if (m_statusServices[id])
    {
        if (m_statusServices[id].call(s))
        {
            m_robots[s.request.id]->SetData(s.response.status);
            ROS_INFO_STREAM_THROTTLE(10.0, "Received response from robot: "<<s.response.status.id<<" : "<<m_robots[s.request.id]->ToString());
        }
        else
        {
            ROS_ERROR_STREAM("Did not receive good response from robot: "<<id);
        }
    }
    else
    {
        ROS_ERROR_STREAM_THROTTLE(3.0, "Not connected to robot: "<<id<<"... retrying");
        std::string serviceTopic = Conversion::RobotIDToServiceName(id);
        m_statusServices[id] = m_nh->serviceClient<global_planner::RobotStatusSrv>(serviceTopic, false);
    }
}
void RobotTransforms::updateState(const std::string joint_name, double state) {
    if (!initialized_) return;
    try {
        robot_state_ptr_->setVariablePosition(joint_name, state);
    } catch (moveit::Exception e) {
        ROS_ERROR_STREAM_THROTTLE(1, "Couldn't update state for joint '" << joint_name << "'. Exception: " << e.what());
    }
}
Eigen::Affine3d RobotTransforms::getTransform(std::string link_name) {
    if (!initialized_) return Eigen::Affine3d::Identity();
    try {
        const Eigen::Affine3d& transform = robot_state_ptr_->getGlobalLinkTransform(link_name);
        return transform;
    } catch (moveit::Exception e) {
        ROS_ERROR_STREAM_THROTTLE(1, "Couldn't get transform for link '" << link_name << "'. Exception: " << e.what());
        return Eigen::Affine3d::Identity();
    }
}
double RobotTransforms::getJointPosition(const std::string joint_name) {
    if (!initialized_) return 0.0;
    double position = 0;
    try {
        position = robot_state_ptr_->getVariablePosition(joint_name);
    } catch (moveit::Exception e) {
        ROS_ERROR_STREAM_THROTTLE(1, "Couldn't get state for joint '" << joint_name << "'. Exception: " << e.what());
    }
    return position;
}
Ejemplo n.º 8
0
  bool CloudAssimilator::timeSyncCloud(std_msgs::Header header, pcl::PointCloud<pcl::PointXYZ>& cloud, pcl::PointCloud<pcl::PointXYZ>& synced_cloud)
  {
    if(!m_tf_listener.waitForTransform(header.frame_id, cloud.header.frame_id, header.stamp, ros::Duration(0.05), ros::Duration(0.001)))
    {
      ROS_ERROR_STREAM_THROTTLE(1.0, "Transform between " << header.frame_id << " and " << cloud.header.frame_id << " at time " << header.stamp << " failed!");
      return false;
    }
    pcl_ros::transformPointCloud(header.frame_id, header.stamp, cloud, cloud.header.frame_id, synced_cloud, m_tf_listener);

    return true;
  }
/***********************************************************************
 *  Method: GlobalPlanner::PlanNNWaypoint
 *  Params:
 * Returns:
 * Effects: Iterate over available robots and choose the nearest available waypoint
 ***********************************************************************/
void GlobalPlanner::PlanNNWaypoint()
{
    ROS_INFO_STREAM_THROTTLE(10,"Planning NN Waypoints...");
    std::vector<Robot_Ptr> availableRobots = GetAvailableRobots(1); // Find those robots with at least 1 storage space
    std::map<int, Waypoint_Ptr> allWaypoints = m_tm.GetWaypoints();
    std::vector<Waypoint_Ptr> availableWaypoints = m_tm.GetAvailableWaypoints();
    // Break if all waypoints reached.
    if (availableWaypoints.size() == 0) { return; }

    for (std::vector<Robot_Ptr>::iterator i = availableRobots.begin(); i != availableRobots.end(); ++i)
    {
        Robot_Ptr robot = *i;
        if (robot->GetType() == RobotState::BIN_BOT) {
            // TEMP: bin bots don't search waypoints..
            continue;
        }

        // Get updated set of available waypoints each time
        availableWaypoints = m_tm.GetAvailableWaypoints();
        ROS_INFO_STREAM("Waypoints to go: (" << availableWaypoints.size() << ")");

        // Break if all waypoints reached.
        if (availableWaypoints.size() == 0) {
            break;
        }




        int waypoint_id = GetWaypointClosestToRobot(robot->GetID());
        if (waypoint_id == NO_WAYPOINT_FOUND) {
            ROS_WARN("No Waypoint Found!");
            break;
        }

        Waypoint_Ptr bestwp = allWaypoints[waypoint_id];

        // Assign Robot to Waypoint
        if (!AssignRobotWaypoint(robot->GetID() , waypoint_id))
        {
            ROS_ERROR_STREAM_THROTTLE(1, "Error Assigning Robot " << robot->GetName() << "(" << robot->GetID() << ")"
                                         << " - Waypoint(" << waypoint_id << ")");
        }

        // Print out waypoints and their statuses
        for (std::map<int, Waypoint_Ptr>::iterator it = allWaypoints.begin(); it != allWaypoints.end(); ++it)
        {
            ROS_INFO_STREAM(it->second->ToShortString());
        }
    }
}
Ejemplo n.º 10
0
bool TfLookup::_lookupTransform(const std::string& target,
                                const std::string& source, const ros::Time& time,
                                tf::StampedTransform& trans)
{
    std::string err;

    if (!_tfListener->canTransform(target, source, time, &err))
    {
        ROS_ERROR_STREAM_THROTTLE(1,
                                  "Error getting transform from " << source
                                  << " to " << target
                                  << " at time: " << time
                                  << " : " << err);
        return false;
    }

    _tfListener->lookupTransform(target, source, time, trans);
    return true;
}
Ejemplo n.º 11
0
/// PINHOLE
void VoNode::incomingImage(const sensor_msgs::ImageConstPtr& msg){

    /// Measure HZ, Processing time, Image acquisation time
    ros::WallTime t0 = ros::WallTime::now();

    /// Check TIME
    if (lastTime>msg->header.stamp){
        ROS_WARN("Detected negative time jump, resetting TF buffer");
        subTF.clear();
    }
    lastTime = msg->header.stamp;


    /// Get Image
    cv_bridge::CvImageConstPtr cvPtr;
    try {
        cvPtr = cv_bridge::toCvShare(msg, OVO::COLORS[colorId]);
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
        return;
    }

    /// GET IMU
    tf::StampedTransform imuStamped;

    if (USE_IMU){
        try{
            // sent out by the crazyflie driver driver.py
            subTF.waitForTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, ros::Duration(0.1) );
            subTF.lookupTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, imuStamped);
        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());            
            return;
        }
    } else {
        imuStamped.setRotation(tf::Quaternion(1.0, 0.0, 0.0, 0.0));
    }

    ROS_INFO(OVO::colorise("\n==============================================================================",OVO::FG_WHITE, OVO::BG_BLUE).c_str());

    /// Make Frame
    ROS_ERROR("MIGHT NEED TO INVERSE IMU");
    Frame::Ptr frame(new Frame(cvPtr->image, imuStamped));

    if (frame->getQuality()>0.9 || frame->getQuality()<0){
        /// Process Frame
        ROS_INFO("NOD > PROCESSING FRAME [%d]", frame->getId());
        odometry.update(frame);
    } else {
        ROS_WARN("NOD = SKIPPING FRAME, BAD QUALITY");
    }

    /// Compute running average of processing time
    double time = (ros::WallTime::now()-t0).toSec();
    timeAvg = (timeAvg*timeAlpha) + (1.0 - timeAlpha)*time;

    /// Clean up and output
    ROS_INFO("NOD < FRAME [%d|%d] PROCESSED [%.1fms, Avg: %.1fms]", frame->getId(), frame->getKfId(), time*1000., timeAvg*1000.);


    if (frame->getQuality()>0.9 || frame->getQuality()<0){
        publishStuff();
    }

    /// publish debug image
    if (pubCamera.getNumSubscribers()>0 || pubImage.getNumSubscribers()>0){
        sensor_msgs::CameraInfoPtr camInfoPtr = frame->getCamInfo();
        cv::Mat drawImg;
        if (frame->getQuality()>0.9 || frame->getQuality()<0){
            drawImg = odometry.getVisualImage();
        } else {
            drawImg = odometry.getVisualImage(frame);
        }

        OVO::putInt(drawImg, time*1000., cv::Point(10,drawImg.rows-1*25), CV_RGB(200,0,200), false, "S:");

//        if (imageRect.channels() != image.channels()){
//            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
//        }

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = "cam_gt";//CAM_FRAME;
        cvi.encoding = sensor_msgs::image_encodings::BGR8;// OVO::COLORS[colorId];
        //cvi.image = imageRect;
        cvi.image = drawImg;
        camInfoPtr->header = cvi.header;
        pubImage.publish(cvi.toImageMsg());

        cvi.image=cvi.image.colRange(0, cvi.image.cols/2 -1);
        pubCamera.publish(cvi.toImageMsg(), camInfoPtr);

    } else {
        ROS_WARN_THROTTLE(1, "NOT DRAWING OUTPUT");
    }
}
Ejemplo n.º 12
0
void VoNode::incomingSynthetic(const ollieRosTools::synthFrameConstPtr& msg){
    ROS_INFO((std::string("\n")+OVO::colorise("============================================================================== FRAME %d ",OVO::FG_WHITE, OVO::BG_BLUE)).c_str(),msg->frameId);

    /// Measure HZ, Processing time, Image acquisation time
    ros::WallTime t0 = ros::WallTime::now();

    /// Check TIME
    if (lastTime>msg->header.stamp){
        ROS_WARN("Detected negative time jump, resetting TF buffer");
        subTF.clear();
    }
    lastTime = msg->header.stamp;
    /// Get Image
    cv_bridge::CvImageConstPtr cvPtr;
    try {
        cvPtr = cv_bridge::toCvShare(msg->img, msg, OVO::COLORS[colorId]);
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
        return;
    }

    /// GET IMU
    tf::StampedTransform imuStamped;

    if (USE_IMU){
        try{
            // sent out by the crazyflie driver driver.py
            subTF.waitForTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, ros::Duration(0.1) );
            subTF.lookupTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, imuStamped);
        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());
            return;
        }
    } else {
        imuStamped.setRotation(tf::Quaternion(0.0, 0.0, 0.0, 1.0));
    }

    /// NEED TO SEE ABOUT THIS
    tf::StampedTransform imuRevStamped(imuStamped.inverse(), imuStamped.stamp_, imuStamped.frame_id_, imuStamped.child_frame_id_);

    /// Make Frame
    tf::Transform cam, imu;
    tf::transformMsgToTF(msg->camPose, cam);
    tf::transformMsgToTF(msg->imuPose, imu);    
    //cam = cam.inverse();
    //imu = imu.inverse();
    Frame::Ptr frame(new FrameSynthetic(cvPtr->image, imuRevStamped, msg->pts2d_noise, cam, imu, msg->camInfo));


    /// Process Frame
    ROS_INFO("NOD > PROCESSING FRAME [%d]", frame->getId());
    odometry.update(frame);

    /// Compute running average of processing time
    double time = (ros::WallTime::now()-t0).toSec();
    timeAvg = (timeAvg*timeAlpha) + (1.0 - timeAlpha)*time;

    /// Clean up and output
    ROS_INFO("NOD < FRAME [%d|%d] PROCESSED [%.1fms, Avg: %.1fms]", frame->getId(), frame->getKfId(), time*1000., timeAvg*1000.);
    publishStuff();

    /// publish debug image
    if (pubCamera.getNumSubscribers()>0 || pubImage.getNumSubscribers()>0){
        sensor_msgs::CameraInfoPtr camInfoPtr = frame->getCamInfo();
        cv::Mat drawImg = odometry.getVisualImage();
        OVO::putInt(drawImg, time*1000., cv::Point(10,drawImg.rows-1*25), CV_RGB(200,0,200), false, "s:");

//        if (imageRect.channels() != image.channels()){
//            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
//        }

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = CAM_FRAME;
        cvi.encoding = sensor_msgs::image_encodings::BGR8;// OVO::COLORS[colorId];
        //cvi.image = imageRect;
        cvi.image = drawImg;
        camInfoPtr->header = cvi.header;
        //pubCamera.publish(cvi.toImageMsg(), camInfoPtr);
        pubImage.publish(cvi.toImageMsg());
    }

}
Ejemplo n.º 13
0
/// PINHOLE
void PreProcNode::incomingImage(const sensor_msgs::ImageConstPtr& msg){

    if (pubCamera.getNumSubscribers()>0){

        /// Measure HZ, Processing time, Image acquisation time
        ros::WallTime time_s0 = ros::WallTime::now();

        /// Get CV Image
        cv_bridge::CvImageConstPtr cvPtr;
        //cv_bridge::CvImagePtr cvPtr;
        try {
            //cvPtr = cv_bridge::toCvShare(msg, enc::MONO8);
            cvPtr = cv_bridge::toCvShare(msg, colors[colorId]);
            //cvPtr = cv_bridge::toCvCopy(msg, enc::MONO8);
            //cvPtr = cv_bridge::toCvCopy(msg, enc::BGR8);
        } catch (cv_bridge::Exception& e) {
            ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
            return;
        }

        /// PreProcess Frame
        cv::Mat image = preproc.process(cvPtr->image);

    
        /// PTAM Rectification
        cv::Mat imageRect;
        imageRect = camModel.rectify(image);

/*


        /// ////// Test Bearing Vectors
        std::vector<cv::Point2f> kps;
        bool found = cv::findChessboardCorners(imageRect, cv::Size(8,6), kps);
        //cv::drawChessboardCorners(imageRect, cv::Size(8,6), kps, found);



        //std::vector<cv::KeyPoint> kps;
        //cv::FAST(imageRect, kps, 100.5, true);
        //cv::drawKeypoints(imageRect, kps, imageRect);
        //cv::drawChessboardCorners(imageRect, cv::Size(8,6), kps, found);


        Eigen::MatrixXd bv;
        camModel.bearingVectors(kps, bv);
        const tf::Quaternion q(1,0,0,0);
        for (uint i=0; i<kps.size(); ++i){
            pubTF.sendTransform(tf::StampedTransform(tf::Transform(q,tf::Vector3(bv(i,0),bv(i,1),bv(i,2))), ros::Time::now(), "/cam", "/F"+boost::lexical_cast<std::string>(i)));
        }
        if (imageRect.channels() != image.channels()){
            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
        }


        /// //////

        try{
            // sent out by the crazyflie driver driver.py
            tf::StampedTransform imu;
            subTF.waitForTransform("/world", "/cf", msg->header.stamp-ros::Duration(0), ros::Duration(0.05) );
            subTF.lookupTransform("/world", "/cf", msg->header.stamp-ros::Duration(0), imu);

            double r,p,y;
            OVO::tf2RPY(imu, r,p,y);
            camModel.rotatePoints(kps, r);



        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());
        }

*/

        sensor_msgs::CameraInfoPtr infoMsgPtr = camModel.getCamInfo();

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = msg->header.frame_id;
        //cvi.encoding = enc::MONO8;
        cvi.encoding = colors[colorId];
        cvi.image = imageRect;
        infoMsgPtr->header = cvi.header;
        pubCamera.publish(cvi.toImageMsg(), infoMsgPtr);

        // Compute running average of processing time
        timeAvg = timeAvg*timeAlpha + (1.0 - timeAlpha)*(ros::WallTime::now()-time_s0).toSec();
        ROS_INFO_THROTTLE(1, "Processing Time: %.1fms", timeAvg*1000.);
    } else {
        //ROS_INFO_THROTTLE(5, "Not processing images as not being listened to");
    }

}
int main(int argc, char** argv)
{

  ros::init(argc, argv, "optitrack_tf_broadcaster");
  ros::NodeHandle nh;
  ros::NodeHandle nh_private("~");

  tf::Vector3 tmpV3;
  tf::Quaternion tmpQuat;

  tf::Matrix3x3 btm;
  OptiTrack *tracker;

  std::vector<tf::StampedTransform> robotTf;
  tf::StampedTransform visionTf;


  visionTf.setIdentity();
  visionTf.frame_id_ = "world";
  visionTf.child_frame_id_ = "vision";

  double pos[3];
  double orient[3][3];

  static tf::TransformBroadcaster br;

  std::string localip, objlist;
  bool exists = nh_private.getParam("local_ip", localip);

  if (!exists) {
    ROS_FATAL("You have to define local_ip!");
    return -1;
  }

  int publish_frequency = 250;
  nh_private.getParam("publish_frequency", publish_frequency);

  bool bUseThread = false;
  nh_private.getParam("use_thread", bUseThread);

  tracker = new OptiTrack();
  int ret;
  if((ret = tracker->Init(localip.c_str(), bUseThread)) <= 0) {
    if(ret == -1)
      ROS_FATAL("Cannot open socket. Check local ip!");
    else if(ret == -2)
      ROS_FATAL("Cannot receive data. Check windows server!");
    return -1;
  }
  tracker->enableWarnings(false);

  std::string calibfile;
  bool bUseCalibration = false;


  if(nh_private.getParam("calib_file", calibfile)) {

    std::vector<std::string> all_calib_files;
    explode(calibfile, all_calib_files);

    for(unsigned int f=0; f<all_calib_files.size(); ++f) {
      FILE* file = NULL;
      file = fopen(all_calib_files[f].c_str(), "r");

      robotTf.push_back(newInitialRobotFrame(f));
      if(file == NULL) {
        ROS_WARN_STREAM("Calibration file \""<<calibfile<<"\" not found for robot number "<<f<<". Robot frame will be identity.");
      } else {
        int dum;
        for(int i=0; i<3; i++)
          for(int j=0; j<3; j++)
            dum = fscanf(file, "%lf", &(orient[i][j]));

        btm.setValue(orient[0][0], orient[0][1], orient[0][2],
                     orient[1][0], orient[1][1], orient[1][2],
                     orient[2][0], orient[2][1], orient[2][2]);



        for(int j=0; j<3; j++)
          dum = fscanf(file, "%lf", &(pos[j]));

        tmpV3.setValue(pos[0], pos[1], pos[2]);

        btm = btm.transpose();
        tmpV3 = btm*tmpV3;
        tmpV3 *= -1.0;

        btm.getRotation(tmpQuat);
        robotTf[f].setOrigin(tmpV3);
        robotTf[f].setRotation(tmpQuat);

        bUseCalibration = true;
        fclose(file);
      }
    }
  }

  if(robotTf.size() == 0) {
    ROS_WARN("No calibration file provided! Assuming single robot with identity calibration.");
    robotTf.push_back(newInitialRobotFrame(0));
  }

  std::vector<std::string> obj_names;
  exists &= nh_private.getParam("obj_list", objlist);


  if(!exists) {			// If parameter obj_list does not exist, track all objects available
    obj_names = tracker->GetRBodyNameList();
  } else {				// If parameter obj_list exists, retrieve object names from the delimited list string

    explode(objlist, obj_names);
  }

  std::string tmp = "";
  unsigned int i;
  for(i=0; i<obj_names.size()-1; i++)
    tmp = tmp + obj_names[i] + " ; ";

  tmp = tmp + obj_names[i];
  unsigned int num_obj = obj_names.size();

  ROS_INFO("The following parameters will be used");
  ROS_INFO_STREAM("publish_frequency = " << publish_frequency << " Hz");
  ROS_INFO_STREAM("local_ip = " << localip);
  ROS_INFO_STREAM("Tracking " << num_obj << " objects ==> " <<tmp);
  ROS_INFO_STREAM("use_thread = " << bUseThread);
  ROS_INFO_STREAM("use_calibration = " << bUseCalibration);

  for(i=0; i<num_obj; i++)
    tracker->enableRBody(obj_names[i].c_str(), true);

  ros::Rate r(publish_frequency);

  std::vector<tf::StampedTransform> transforms;
  if(num_obj)
    transforms.resize(num_obj);

  for(i=0; i<num_obj; i++) {
    transforms[i].child_frame_id_ = obj_names[i];
    transforms[i].frame_id_ = "vision";
    transforms[i].setIdentity();
  }


  for(unsigned int i=0; i<robotTf.size(); ++i ) {
    transforms.push_back(robotTf[i]);
  }
  // Only for backward compatibility.
  // The robot frame used to be called /robot
  // In the multi-robot setting the frames are called /robot<i>
  // Here we make a new frame called /robot as a copy of the frame /robot<0>
  tf::StampedTransform rbtframe = robotTf[0];
  rbtframe.child_frame_id_ = "robot";
  transforms.push_back(rbtframe);

  transforms.push_back(visionTf);

  ROS_INFO("TF Broadcaster started");
  while(ros::ok()) {
    ros::spinOnce();

    if(tracker->Update() < 0) {
      ROS_ERROR_STREAM_THROTTLE(1, "Tracker update failed");
      break;
    }

    for(i=0; i<num_obj; i++) {
      if(!tracker->getRBodyPosition(pos, obj_names[i].c_str())
         || !tracker->getRBodyOrientation(orient, obj_names[i].c_str()))
        ROS_WARN_STREAM_THROTTLE(1, "Object " << obj_names[i] << " not detected");
      else {
        btm.setValue(orient[0][0], orient[0][1], orient[0][2],
                     orient[1][0], orient[1][1], orient[1][2],
                     orient[2][0], orient[2][1], orient[2][2]);

        tmpV3[0] = pos[0];
        tmpV3[1] = pos[1];
        tmpV3[2] = pos[2];

        btm.getRotation(tmpQuat);
        transforms[i].setOrigin(tmpV3);
        transforms[i].setRotation(tmpQuat);

      }
      transforms[i].stamp_ = ros::Time::now();
    }
    transforms[num_obj].stamp_ = ros::Time::now();
    transforms[num_obj+1].stamp_ = ros::Time::now();

    br.sendTransform(transforms);
    r.sleep();
  }

  ROS_INFO_STREAM("Stopping node");
  delete tracker;
  nh.shutdown();
  nh_private.shutdown();
  return 0;
}
/***********************************************************************
 *  Method: RobotController::StateExecute
 *  Params: void *args
 * Returns: void
 * Effects: specifies what to run while in a state, and transition if
 *  appropriate
 ***********************************************************************/
void RobotController::StateExecute()
{
    // WHILE in WAITING:
    //      IF received a waypoint: transition(NAVIGATING)
    //      IF received a dump message: transition(DUMPING)
    // WHILE in NAVIGATING (waypoint navigation):
    //      IF received a (different) waypoint: change the pose to the new one
    //      IF received a dump message: transition(DUMPING)
    //      IF received a stop/cancel/estop: send waypoint result message (forced_stop) -> transition(WAITING)
    //      IF reached final pose of the waypoint, send waypoint result message (succeed) -> transition(WAITING)
    // WHILE in DUMPING (going to dump):
    //      IF received a (different) waypoint: change the pose to the new one
    //      IF received a dump message: transition(DUMPING)
    //      IF received a stop/cancel/estop: send waypoint result message (forced_stop) -> transition(WAITING)
    //      IF reached final pose of the waypoint, send waypoint result message (succeed) -> transition(WAITING)
    // ...
    bool execResult = false;
    // ROS_INFO_STREAM_THROTTLE(1.0, "STATE: "<<RobotState::ToString(m_status.GetState()));
    switch(m_status.GetState())
    {
        case RobotState::WAITING:
            if (m_tagProcessor->ShouldPause())
            {
                SendText("should pause - Waiting");
                // SendSound("mario_pause.wav");
                Transition(RobotState::WAITING_TAG_SPOTTED);
            }
        break;

        //In this state, the robot should now be stopping and getting a more accurate view of the tag
        case RobotState::WAITING_TAG_SPOTTED:
            ros::spinOnce();
            execResult = m_tagProcessor->Execute();
            // ROS_DEBUG_STREAM_THROTTLE(0.5, "Result from execute: "<<execResult);
            if (m_tagProcessor->ShouldResume())
            {
                //Transition back to the navigating state, using the same goal as before
                SendText("should resume - WAITING_TAG_SPOTTED");
                ROS_INFO("Resuming robot");
                Transition(RobotState::WAITING_TAG_FINISHED);
                break;
            }
            else
            {
                ROS_INFO_STREAM_THROTTLE(1, "Waiting on the OK to resume from the tag processor");
            }

            if (ros::Time::now() - m_timeEnteringState > ros::Duration(5))
            {
                ROS_ERROR_STREAM("ERROR: could not find any tags while stopped.  we are going to just resume WAITING");
                Transition(RobotState::NAVIGATING);
            }
        break;

        case RobotState::WAITING_TAG_FINISHED:
            if (ros::Time::now() - m_timeEnteringState > ros::Duration(1.0))
            {
                SendSound("mario_pause.wav");
                SendText("resuming - WAITING_TAG_FINISHED");
                Transition(RobotState::WAITING);
                m_tagProcessor->SetShouldPause(false);
            }
            break;

        case RobotState::NAVIGATING:
            if (m_tagProcessor->ShouldPause())
            {
                Transition(RobotState::NAVIGATING_TAG_SPOTTED);
                // SendSound("mario_pause.wav");
            }
            else
            {
                actionlib::SimpleClientGoalState::StateEnum result = action_client_ptr->getState().state_;
                switch (result)
                {
                    case actionlib::SimpleClientGoalState::SUCCEEDED:
                        ROS_INFO_STREAM("Movebase reached target (task id = "<<m_status.GetTaskID()<<")");
                        SendWaypointFinished(TaskResult::SUCCESS);
                        if (m_status.GetTaskID() < WAYPOINT_START_ID)
                        {
                            m_status.IncrementStorageUsed();
                        }
                        Transition(RobotState::WAITING);
                        break;
                    case actionlib::SimpleClientGoalState::ABORTED:
                    case actionlib::SimpleClientGoalState::REJECTED:
                    case actionlib::SimpleClientGoalState::LOST:
                    case actionlib::SimpleClientGoalState::RECALLED:
                    case actionlib::SimpleClientGoalState::PREEMPTED:
                        ROS_ERROR_STREAM("Navigation Failed: " << action_client_ptr->getState().toString() );
                        SendWaypointFinished(TaskResult::FAILURE);
                        Transition(RobotState::WAITING);
                        break;

                    case actionlib::SimpleClientGoalState::ACTIVE:
                    case actionlib::SimpleClientGoalState::PENDING:
                    default:
                        ROS_INFO_STREAM_THROTTLE(10, "Navigation state = " << action_client_ptr->getState().toString() );
                        break;
                }
            }
        break;

        //In this state, the robot should now be stopping and getting a more accurate view of the tag
        case RobotState::NAVIGATING_TAG_SPOTTED:
            ros::spinOnce();
            execResult = m_tagProcessor->Execute();
            // ROS_DEBUG_STREAM_THROTTLE(0.5, "Result from execute: "<<execResult);
            if (m_tagProcessor->ShouldResume())
            {
                //Transition back to the navigating state, using the same goal as before
                SendText("Resuming robot");
                ROS_INFO("Resuming robot");
                SendText("should resume- NAVIGATING_TAG_SPOTTED");
                Transition(RobotState::NAVIGATING_TAG_FINISHED);
            }
            else
            {
                ROS_INFO_STREAM_THROTTLE(1, "Waiting on the OK to resume from the tag processor");
            }

            if (ros::Time::now() - m_timeEnteringState > ros::Duration(5))
            {
                ROS_ERROR_STREAM("Could not find any tags while stopped.  we are going to just resume NAVIGATING");
                Transition(RobotState::NAVIGATING);
                m_tagProcessor->SetShouldPause(false);
            }
        break;

        case RobotState::NAVIGATING_TAG_FINISHED:
            if (ros::Time::now() - m_timeEnteringState > ros::Duration(2))
            {
                SendSound("mario_pause.wav");
                SendText("resuming - NAVIGATING_TAG_FINISHED");
                ROS_INFO_STREAM("Transitioning back to navigating");
                Transition(RobotState::NAVIGATING);
            }
        break;

        case RobotState::DUMPING:
        {
            // Check if reached dump site yet
            actionlib::SimpleClientGoalState::StateEnum result = action_client_ptr->getState().state_;
            switch (result)
            {
                case actionlib::SimpleClientGoalState::SUCCEEDED:
                    ROS_INFO_STREAM("Movebase reached target.");
                    SendDumpFinished(TaskResult::SUCCESS);
                    Transition(RobotState::DUMPING_FINISHED);
                    break;
                case actionlib::SimpleClientGoalState::ABORTED:
                case actionlib::SimpleClientGoalState::REJECTED:
                case actionlib::SimpleClientGoalState::LOST:
                case actionlib::SimpleClientGoalState::RECALLED:
                case actionlib::SimpleClientGoalState::PREEMPTED:
                    ROS_ERROR_STREAM("Navigation Failed: " << action_client_ptr->getState().toString() );
                    SendDumpFinished(TaskResult::FAILURE);
                    Transition(RobotState::WAITING);
                    break;

                case actionlib::SimpleClientGoalState::ACTIVE:
                case actionlib::SimpleClientGoalState::PENDING:
                default:
                    ROS_INFO_STREAM_THROTTLE(10, "Navigation state = " << action_client_ptr->getState().toString() );
                    break;
            }
        break;
        }

        case RobotState::DUMPING_FINISHED:
        break;

        default:
            ROS_ERROR_STREAM_THROTTLE(5, "Unexpected State:" << RobotState::ToString(m_status.GetState()) );
            break;
    }
}