Beispiel #1
0
void
PersonTracker::skeletonCB(const body_msgs::Skeletons& skel_msg)
{
	//ROS_INFO_THROTTLE(5,"[person tracker] Got data, %lu skeletons.", skel_msg.skeletons.size());

	body_msgs::SkeletonJoint body_part;
	string                   body_part_name;
	
	std::vector<double> thresholds;
	thresholds.push_back(0.7);
	//thresholds.push_back(0.5);
	//thresholds.push_back(0.3);
	//thresholds.push_back(-0.1);
	
	for( int ithresh = 0; ithresh<thresholds.size(); ithresh++ )
	{
		// Find the first trackable person
		for( unsigned int i=0; i<skel_msg.skeletons.size(); i++ )
		{
			// If the person has a trackable joint, save the location
			if( getFirstGoodJoint(skel_msg.skeletons.at(i), thresholds.at(ithresh), body_part, body_part_name) )
			{
				ROS_INFO_THROTTLE(2,"[person tracker] Player %d's %s has confidence %f", skel_msg.skeletons.at(i).playerid, body_part_name.c_str(), body_part.confidence);
			
				// Create a new pose
				PoseWithCovarianceStamped temp_pose;
				temp_pose.pose.pose.position    = body_part.position;
				temp_pose.pose.pose.orientation = tf::createQuaternionMsgFromYaw(0.0);
				temp_pose.header.frame_id       = skel_msg.header.frame_id;
				temp_pose.header.stamp          = skel_msg.header.stamp;
				
				setVariance(temp_pose, 0.0);
				
				// Transform the goal to a global, fixed frame.
				PoseWithCovarianceStamped transformed_pose;
				if( poseToGlobalFrame(temp_pose, transformed_pose) )
				{
					person_pos_ = transformed_pose;
					last_detect_ = ros::Time::now();
					break;
				}
			}
		}
	}
}
Beispiel #2
0
void
Tracker::spinOnce()
{
    if (started){
        auto begin_time=std::chrono::high_resolution_clock::now();
        track();
        update_markers();
        publish_markers();
        broadcast_tf();
        auto end_time=std::chrono::high_resolution_clock::now();
        auto elapsed_time=std::chrono::duration_cast<std::chrono::milliseconds>(end_time - begin_time).count();
        ROS_INFO_THROTTLE(10,"[Tracker::%s]\tStep time: %ld ms.",__func__,elapsed_time);
    }
    else if (lost_it){
        find_object_in_scene();
    }
    else{
        if(!storage->readObjNames(est_names))
            ROS_WARN_THROTTLE(30,"[Tracker::%s]\tLooks like no Pose Estimation has been performed, perform one in order to start using the object tracker.",__func__);
    }
}
gm::Pose::ConstPtr Node::getBasePose (const ros::Time& t)
{
  bool found = tf_.waitForTransform(fixed_frame_, base_frame_, t,
                                    processing_interval_);
  if (found)
  {
    try
    {
      tf::StampedTransform trans;
      tf_.lookupTransform(fixed_frame_, base_frame_, t, trans);
      gm::Pose::Ptr p(new gm::Pose());
      poseTFToMsg(trans, *p);
      return p;
    }
    catch (tf::TransformException& e)
    {
      ROS_INFO_THROTTLE(1.0, "Skipping due to lack of transform from %s to %s",
                        base_frame_.c_str(), fixed_frame_.c_str());
    }
  }
  return gm::Pose::ConstPtr();
}
Beispiel #4
0
int main(int argc,char** argv)
{

	ros::init(argc,argv,"roboteq_controller");

	ros::NodeHandle n;
	ros::NodeHandle nh("~");

	ros::Subscriber s1,s2,s3;

	std::string joy_topic,cmd_vel_topic,serial_tx_topic,serial_rx_topic,encoder_topic,status_topic;
	double max_time_diff;
	int index;
	int max_missing;

	nh.param<std::string>("cmd_vel_topic",cmd_vel_topic,"/fmActuators/cmd_vel");
	nh.param<std::string>("serial_rx_topic",serial_rx_topic,"/fmCSP/S0_rx");
	nh.param<std::string>("serial_tx_topic",serial_tx_topic,"/fmCSP/S0_tx");
	nh.param<std::string>("deadman_joy_topic",joy_topic,"/fmHMI/joy");
	nh.param<std::string>("encoder_topic",encoder_topic,"/fmSensors/encoder");
	nh.param<std::string>("status_topic",status_topic,"/fmActuators/status");


	nh.param<double>("max_time_diff",max_time_diff,0.5);
	nh.param<int>("deadman_joy_button_index",index,3);
	nh.param<int>("deadman_joy_max_missing",max_missing,30);


	RoboTeq controller;

	controller.deadman_button_id = index;
	controller.max_deadman = max_missing;
	controller.max_time_diff = ros::Duration(max_time_diff);

	controller.serial_publisher = nh.advertise<msgs::serial>(serial_tx_topic,10);
	controller.encoder_publisher = nh.advertise<msgs::encoder>(encoder_topic,10);
	controller.status_publisher = nh.advertise<msgs::motor_status>(status_topic,10);

	s1 = nh.subscribe<msgs::serial>(serial_rx_topic,10,&RoboTeq::onSerialMsgReceived,&controller);
	s2 = nh.subscribe<geometry_msgs::TwistStamped>(cmd_vel_topic,10,&RoboTeq::onCmdVelReceived,&controller);
	s3 = nh.subscribe<sensor_msgs::Joy>(joy_topic,10,&RoboTeq::onJoy,&controller);

	ros::Rate r(5);
	while(controller.serial_publisher.getNumSubscribers() != 0)
	{
		ROS_INFO_THROTTLE(1,"Waiting for serial node to subscribe");
		r.sleep();
	}

	controller.transmitSelfInit();

	r.sleep();

	ros::Timer t = nh.createTimer(ros::Duration(0.05),&RoboTeq::spin,&controller);



	ros::spin();

	return 0;
}
Beispiel #5
0
void ISBTS::tsUpdate() {
	ts_count_.uLong++;
	ROS_INFO_THROTTLE(1,"count %d AND as on canbus %d %d %d %d bootcount %d CB %d %d", ts_count_.uLong , ts_count_.byte[0], ts_count_.byte[1], ts_count_.byte[2], ts_count_.byte[3],ts_bootcount_.uShort,ts_bootcount_.byte[0],ts_bootcount_.byte[1]);
	processCanTxEvent();
}
/// PINHOLE
void PreProcNode::incomingImage(const sensor_msgs::ImageConstPtr& msg){

    if (pubCamera.getNumSubscribers()>0){

        /// Measure HZ, Processing time, Image acquisation time
        ros::WallTime time_s0 = ros::WallTime::now();

        /// Get CV Image
        cv_bridge::CvImageConstPtr cvPtr;
        //cv_bridge::CvImagePtr cvPtr;
        try {
            //cvPtr = cv_bridge::toCvShare(msg, enc::MONO8);
            cvPtr = cv_bridge::toCvShare(msg, colors[colorId]);
            //cvPtr = cv_bridge::toCvCopy(msg, enc::MONO8);
            //cvPtr = cv_bridge::toCvCopy(msg, enc::BGR8);
        } catch (cv_bridge::Exception& e) {
            ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
            return;
        }

        /// PreProcess Frame
        cv::Mat image = preproc.process(cvPtr->image);

    
        /// PTAM Rectification
        cv::Mat imageRect;
        imageRect = camModel.rectify(image);

/*


        /// ////// Test Bearing Vectors
        std::vector<cv::Point2f> kps;
        bool found = cv::findChessboardCorners(imageRect, cv::Size(8,6), kps);
        //cv::drawChessboardCorners(imageRect, cv::Size(8,6), kps, found);



        //std::vector<cv::KeyPoint> kps;
        //cv::FAST(imageRect, kps, 100.5, true);
        //cv::drawKeypoints(imageRect, kps, imageRect);
        //cv::drawChessboardCorners(imageRect, cv::Size(8,6), kps, found);


        Eigen::MatrixXd bv;
        camModel.bearingVectors(kps, bv);
        const tf::Quaternion q(1,0,0,0);
        for (uint i=0; i<kps.size(); ++i){
            pubTF.sendTransform(tf::StampedTransform(tf::Transform(q,tf::Vector3(bv(i,0),bv(i,1),bv(i,2))), ros::Time::now(), "/cam", "/F"+boost::lexical_cast<std::string>(i)));
        }
        if (imageRect.channels() != image.channels()){
            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
        }


        /// //////

        try{
            // sent out by the crazyflie driver driver.py
            tf::StampedTransform imu;
            subTF.waitForTransform("/world", "/cf", msg->header.stamp-ros::Duration(0), ros::Duration(0.05) );
            subTF.lookupTransform("/world", "/cf", msg->header.stamp-ros::Duration(0), imu);

            double r,p,y;
            OVO::tf2RPY(imu, r,p,y);
            camModel.rotatePoints(kps, r);



        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());
        }

*/

        sensor_msgs::CameraInfoPtr infoMsgPtr = camModel.getCamInfo();

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = msg->header.frame_id;
        //cvi.encoding = enc::MONO8;
        cvi.encoding = colors[colorId];
        cvi.image = imageRect;
        infoMsgPtr->header = cvi.header;
        pubCamera.publish(cvi.toImageMsg(), infoMsgPtr);

        // Compute running average of processing time
        timeAvg = timeAvg*timeAlpha + (1.0 - timeAlpha)*(ros::WallTime::now()-time_s0).toSec();
        ROS_INFO_THROTTLE(1, "Processing Time: %.1fms", timeAvg*1000.);
    } else {
        //ROS_INFO_THROTTLE(5, "Not processing images as not being listened to");
    }

}
Beispiel #7
0
/**This function reads the sensor input from a bagfile specified in the parameter bagfile_name.
 * It is meant for offline processing of each frame */
void BagLoader::loadBag(std::string filename)
{
  ScopedTimer s(__FUNCTION__);
  ros::NodeHandle nh;
  ParameterServer* ps = ParameterServer::instance();
  std::string points_tpc = ps->get<std::string>("individual_cloud_out_topic").c_str();//ps->get<std::string>("topic_points");
  ROS_INFO_STREAM("Listening to " << points_tpc);
  std::string tf_tpc = std::string("/tf");
  tf_pub_ = nh.advertise<tf::tfMessage>(tf_tpc, 10);

  ROS_INFO("Loading Bagfile %s", filename.c_str());
  Q_EMIT setGUIStatus(QString("Loading ")+filename.c_str());
  { //bag will be destructed after this block (hopefully frees memory for the optimizer)
    rosbag::Bag bag;
    try{
      bag.open(filename, rosbag::bagmode::Read);
    } catch(rosbag::BagIOException ex) {
      ROS_FATAL("Opening Bagfile %s failed: %s Quitting!", filename.c_str(), ex.what());
      ros::shutdown();
      return;
    }
    ROS_INFO("Opened Bagfile %s", filename.c_str());
    Q_EMIT setGUIStatus(QString("Opened ")+filename.c_str());

    // Image topics to load for bagfiles
    std::vector<std::string> topics;
    topics.push_back(points_tpc);
    topics.push_back(tf_tpc);

    rosbag::View view(bag, rosbag::TopicQuery(topics));
    // Simulate sending of the messages in the bagfile
    std::deque<sensor_msgs::PointCloud2::ConstPtr> pointclouds;
    ros::Time last_tf(0);
    BOOST_FOREACH(rosbag::MessageInstance const m, view)
    {
      if(!ros::ok()) return;
      while(pause_) { 
        usleep(100);
        ROS_INFO_THROTTLE(5.0,"Paused - press Space to unpause");
        if(!ros::ok()) return;
      } 

      ROS_INFO_STREAM("Processing message on topic " << m.getTopic());
      if (m.getTopic() == points_tpc || ("/" + m.getTopic() == points_tpc))
      {
        sensor_msgs::PointCloud2::ConstPtr pointcloud = m.instantiate<sensor_msgs::PointCloud2>();
        //if (cam_info) cam_info_sub_->newMessage(cam_info);
        if (pointcloud) pointclouds.push_back(pointcloud);
        ROS_INFO("Found Message of %s", points_tpc.c_str());
      }
      if (m.getTopic() == tf_tpc|| ("/" + m.getTopic() == tf_tpc)){
        tf::tfMessage::ConstPtr tf_msg = m.instantiate<tf::tfMessage>();
        if (tf_msg) {
          //if(tf_msg->transforms[0].header.frame_id == "/kinect") continue;//avoid destroying tf tree if odom is used
          //prevents missing callerid warning
          boost::shared_ptr<std::map<std::string, std::string> > msg_header_map = tf_msg->__connection_header;
          (*msg_header_map)["callerid"] = "rgbdslam";
          tf_pub_.publish(tf_msg);
          ROS_INFO("Found Message of %s", tf_tpc.c_str());
          last_tf = tf_msg->transforms[0].header.stamp;
          last_tf -= ros::Duration(0.3);
        }
      }
      while(!pointclouds.empty() && pointclouds.front()->header.stamp < last_tf){
          Q_EMIT setGUIInfo(QString("Processing frame ") + QString::number(data_id_));
          callback(pointclouds.front());
          pointclouds.pop_front();
      }

    }
    bag.close();
  }
  ROS_INFO("Bagfile fully processed");
  Q_EMIT setGUIStatus(QString("Done with ")+filename.c_str());
  Q_EMIT bagFinished();
}
FootstepMarker::FootstepMarker():
ac_("footstep_planner", true), ac_exec_("footstep_controller", true),
plan_run_(false), lleg_first_(true) {
  // read parameters
  tf_listener_.reset(new tf::TransformListener);
  ros::NodeHandle pnh("~");
  ros::NodeHandle nh;
  srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> > (pnh);
  typename dynamic_reconfigure::Server<Config>::CallbackType f =
    boost::bind (&FootstepMarker::configCallback, this, _1, _2);
  srv_->setCallback (f);
  pnh.param("foot_size_x", foot_size_x_, 0.247);
  pnh.param("foot_size_y", foot_size_y_, 0.135);
  pnh.param("foot_size_z", foot_size_z_, 0.01);
  pnh.param("lfoot_frame_id", lfoot_frame_id_, std::string("lfsensor"));
  pnh.param("rfoot_frame_id", rfoot_frame_id_, std::string("rfsensor"));
  pnh.param("show_6dof_control", show_6dof_control_, true);
  // pnh.param("use_projection_service", use_projection_service_, false);
  // pnh.param("use_projection_topic", use_projection_topic_, false);
  pnh.param("always_planning", always_planning_, true);
  // if (use_projection_topic_) {
    project_footprint_pub_ = pnh.advertise<jsk_interactive_marker::SnapFootPrintInput>("project_footprint", 1);
  // }
  // read lfoot_offset
  readPoseParam(pnh, "lfoot_offset", lleg_offset_);
  readPoseParam(pnh, "rfoot_offset", rleg_offset_);
  
  pnh.param("footstep_margin", footstep_margin_, 0.2);
  pnh.param("use_footstep_planner", use_footstep_planner_, true);

  pnh.param("use_footstep_controller", use_footstep_controller_, true);
  pnh.param("use_initial_footstep_tf", use_initial_footstep_tf_, true);
  pnh.param("wait_snapit_server", wait_snapit_server_, false);
  bool nowait = true;
  pnh.param("no_wait", nowait, true);
  pnh.param("frame_id", marker_frame_id_, std::string("/map"));
  footstep_pub_ = nh.advertise<jsk_footstep_msgs::FootstepArray>("footstep_from_marker", 1);
  snapit_client_ = nh.serviceClient<jsk_pcl_ros::CallSnapIt>("snapit");
  snapped_pose_pub_ = pnh.advertise<geometry_msgs::PoseStamped>("snapped_pose", 1);
  current_pose_pub_ = pnh.advertise<geometry_msgs::PoseStamped>("current_pose", 1);
  estimate_occlusion_client_ = nh.serviceClient<std_srvs::Empty>("require_estimation");
  if (!nowait && wait_snapit_server_) {
    snapit_client_.waitForExistence();
  }
  
  if (pnh.getParam("initial_reference_frame", initial_reference_frame_)) {
    use_initial_reference_ = true;
    JSK_ROS_INFO_STREAM("initial_reference_frame: " << initial_reference_frame_);
  }
  else {
    use_initial_reference_ = false;
    JSK_ROS_INFO("initial_reference_frame is not specified ");
  }

  server_.reset( new interactive_markers::InteractiveMarkerServer(ros::this_node::getName()));
  // menu_handler_.insert( "Snap Legs",
  //                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  // menu_handler_.insert( "Reset Legs",
  //                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert( "Look Ground",
                        boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert( "Execute the Plan",
                        boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert( "Force to replan",
                        boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  // menu_handler_.insert( "Estimate occlusion",
  //                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert( "Cancel Walk",
                        boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert( "Toggle 6dof marker",
                        boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  // menu_handler_.insert( "Resume Footstep",
  //                     boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert("Straight Heuristic",
                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert("Stepcost Heuristic**",
                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert("LLeg First",
                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  menu_handler_.insert("RLeg First",
                       boost::bind(&FootstepMarker::menuFeedbackCB, this, _1));
  marker_pose_.header.frame_id = marker_frame_id_;
  marker_pose_.header.stamp = ros::Time::now();
  marker_pose_.pose.orientation.w = 1.0;

  resetLegPoses();

  // initialize lleg_initial_pose, rleg_initial_pose
  lleg_initial_pose_.position.y = footstep_margin_ / 2.0;
  lleg_initial_pose_.orientation.w = 1.0;
  rleg_initial_pose_.position.y = - footstep_margin_ / 2.0;
  rleg_initial_pose_.orientation.w = 1.0;
  
  if (use_initial_reference_) {
    while (ros::ok()) {
      try {
        if (!tf_listener_->waitForTransform(marker_frame_id_, initial_reference_frame_,
                                            ros::Time(0.0), ros::Duration(10.0))) {
          ROS_INFO_THROTTLE(1.0,
                            "waiting for transform %s => %s", marker_frame_id_.c_str(),
                            initial_reference_frame_.c_str());
          continue;
        }
        ROS_INFO("resolved transform %s => %s", marker_frame_id_.c_str(),
                 initial_reference_frame_.c_str());
        tf::StampedTransform transform;
        tf_listener_->lookupTransform(marker_frame_id_, initial_reference_frame_,
                                      ros::Time(0), transform);
        marker_pose_.pose.position.x = transform.getOrigin().x();
        marker_pose_.pose.position.y = transform.getOrigin().y();
        marker_pose_.pose.position.z = transform.getOrigin().z();
        marker_pose_.pose.orientation.x = transform.getRotation().x();
        marker_pose_.pose.orientation.y = transform.getRotation().y();
        marker_pose_.pose.orientation.z = transform.getRotation().z();
        marker_pose_.pose.orientation.w = transform.getRotation().w();
        break;
      }
      catch (tf2::TransformException& e) {
        ROS_ERROR("Failed to lookup transformation: %s", e.what());
      }
    }
  }

  initializeInteractiveMarker();

  if (use_footstep_planner_) {
    ROS_INFO("waiting planner server...");
    ac_.waitForServer();
    ROS_INFO("found planner server...");
  }
  if (use_footstep_controller_) {
    ROS_INFO("waiting controller server...");
    ac_exec_.waitForServer();
    ROS_INFO("found controller server...");
  }
  
  move_marker_sub_ = nh.subscribe("move_marker", 1, &FootstepMarker::moveMarkerCB, this);
  menu_command_sub_ = nh.subscribe("menu_command", 1, &FootstepMarker::menuCommandCB, this);
  exec_sub_ = pnh.subscribe("execute", 1, &FootstepMarker::executeCB, this);
  resume_sub_ = pnh.subscribe("resume", 1, &FootstepMarker::resumeCB, this);
  plan_if_possible_srv_ = pnh.advertiseService("force_to_replan", &FootstepMarker::forceToReplan, this);
  if (use_initial_footstep_tf_) {
    // waiting TF
    while (ros::ok()) {
      try {
      if (tf_listener_->waitForTransform(lfoot_frame_id_, marker_frame_id_,
                                         ros::Time(0.0), ros::Duration(10.0))
          && tf_listener_->waitForTransform(rfoot_frame_id_, marker_frame_id_,
                                            ros::Time(0.0), ros::Duration(10.0))) {
        break;
      }
      ROS_INFO("waiting for transform {%s, %s} => %s", lfoot_frame_id_.c_str(),
               rfoot_frame_id_.c_str(), marker_frame_id_.c_str());
      }
      catch (tf2::TransformException& e) {
        ROS_ERROR("Failed to lookup transformation: %s", e.what());
      }
    }
    ROS_INFO("resolved transform {%s, %s} => %s", lfoot_frame_id_.c_str(),
             rfoot_frame_id_.c_str(), marker_frame_id_.c_str());
  }
  // if (use_projection_topic_) {
    projection_sub_ = pnh.subscribe("projected_pose", 1,
                                    &FootstepMarker::projectionCallback, this);
  // }
}
bool CameraProjections::Update()
{
	try
	{
		diagnalAngleView = params.camera.diagonalAngleView->get();
		if (dummy && (ros::Time::now() < lastTime))
		{
			ROS_WARN(
					"Vision Module detected a backward time. Cleared all cached tf data.");
			m_tf->clear();
			delete m_tf;
			m_tf = new tf::TransformListener(ros::Duration(10));
			m_tf->waitForTransform("/ego_rot", "/camera_optical",
					ros::Time::now()
							- ros::Duration(params.debug.timeToShift->get()),
					ros::Duration(1.0));

		}
		lastTime = ros::Time::now();
		topViewMarker.clear();
		ros::Time past = ros::Time::now() - ros::Duration(0.060); //To get the 40ms before
		if (dummy)
		{
			past = ros::Time(0); //To get latest
		}
		m_tf->lookupTransform("/ego_rot", "/camera_optical",
				ros::Time::now()
						- ros::Duration(params.debug.timeToShift->get()),
				tfOptical);
		tfScalar roll, pitch, yaw;
		tfOptical.getBasis().getEulerYPR(yaw, pitch, roll);
		//	cout<< " X = "<<tfOptical.getOrigin().getX() << " Y= "<<	tfOptical.getOrigin().getY()<< " Z="<<	tfOptical.getOrigin().getZ()<<endl;
		OpticalAngle.x = pitch;
		OpticalAngle.y = -(roll + M_PI);
		OpticalAngle.z = yaw + M_PI / 2.;

		if (VERBOSE)
		{
			ROS_INFO_THROTTLE(0.33, "x= %1.2f y= %1.2f z=%1.2f",
					Radian2Degree(OpticalAngle.x),
					Radian2Degree(OpticalAngle.y),
					Radian2Degree(OpticalAngle.z));
		}

		cameraLocation.x = params.location.x->get(); //todo: it seems that the actual height published in odometry
		cameraLocation.y = params.location.y->get();
		cameraLocation.z = params.location.z->get();

		cameraOrintation.x = OpticalAngle.x + params.orientation.x->get();
		cameraOrintation.y = OpticalAngle.y + params.orientation.y->get();
		cameraOrintation.z = CorrectAngleRadian360(
				OpticalAngle.z + params.orientation.z->get());

	} catch (tf::TransformException &ex)
	{
		std::string errorMsg(ex.what());
//		if(errorMsg.find("Lookup would require extrapolation")!=std::string::npos)
//		{
//			reset_time_pub.publish(std_msgs::Empty());
//			reset_clock_pub.publish(std_msgs::Empty());
//		}
		ROS_ERROR("%s", errorMsg.c_str());
		return false;
	}
	return true;
}
/***********************************************************************
 *  Method: RobotController::Init
 *  Params: ros::NodeHandle *nh, int robotID, std::string robotName, int storage_cap, int storage_used, bool type
 * Returns: void
 * Effects: Initialize the controller with parameters
 ***********************************************************************/
void RobotController::Init(ros::NodeHandle *nh, int robotID, std::string robotName, int storage_cap, int storage_used, RobotState::Type type)
{
    m_nh = nh;
    m_listener.reset( new tf::TransformListener(*nh) );

    if (m_nh->getParam("controller/robot_id", robotID))
        ROS_INFO_STREAM("Set robot ID: " << robotID);
    else
        ROS_ERROR_STREAM("Did not read robot ID: default = " << robotID);
    m_status.SetID(robotID);

    if (m_nh->getParam("controller/robot_name", robotName))
        ROS_INFO_STREAM("Read robot name: " << robotName);
    else
        ROS_ERROR_STREAM("Did not read robot name: default = " << robotName);
    m_status.SetName(robotName);

    if(m_nh->getParam("controller/storage_capacity", storage_cap))
        ROS_INFO_STREAM("Read storage capacity: " << storage_cap);
    else
        ROS_ERROR_STREAM("Did not read storage capacity: default = " << storage_cap);
    m_status.SetStorageCapacity(storage_cap);

    if(m_nh->getParam("controller/storage_used", storage_used))
        ROS_INFO_STREAM("Read storage used: " << storage_used);
    else
        ROS_ERROR_STREAM("Did not read storage used: default = " << storage_used);
    m_status.SetStorageUsed(storage_used);

    std::string robotType;
    if (m_nh->getParam("controller/type", robotType))
        ROS_INFO_STREAM("Read robot type: " << robotType);
    else
        ROS_ERROR_STREAM("Did not read type: default = " << robotType);

    if (robotType.compare("collector") == 0)
    {
        ROS_INFO_STREAM("Type: Collector");
        type = RobotState::COLLECTOR_BOT;
    }
    else
    {
        ROS_INFO_STREAM("Type: Binbot");
        type = RobotState::BIN_BOT;
    }

    m_status.SetType(type);

    std::string tf_prefix;
    if (m_nh->getParam("controller/tf_prefix", tf_prefix))
        ROS_INFO_STREAM("Read tf prefix: " << tf_prefix);
    else
        ROS_ERROR_STREAM("Did not read tf_prefix: default = " << tf_prefix);

    base_frame = std::string("base_link");
    if (m_nh->getParam("controller/base_frame", base_frame))
        ROS_INFO_STREAM("Read base frame: " << base_frame);
    else
        ROS_ERROR_STREAM("Did not read base_frame: default = " << base_frame);

    base_frame = tf_prefix + "/"+base_frame;

    if (robotID < 0)
    {
        ROS_ERROR_STREAM("ERROR: Received invalid robot id: " << robotID);
        return;
    }
    if (storage_cap < 0)
    {
        ROS_ERROR_STREAM("ERROR: invalid storage capacity: " << storage_cap);
        return;
    }
    if (storage_used > storage_cap)
    {
        ROS_ERROR_STREAM("ERROR: storage used > storage capacity: used=" << storage_used <<  ", cap=" << storage_cap);
        return;
    }
    // Todo: Only start if fake trash is not used
    m_tagProcessor.reset( new AprilTagProcessor() );
    m_tagProcessor->Init(nh, robotID);

    action_client_ptr.reset( new MoveBaseClient("move_base", true) );
    // Wait for the action server to come up
    while(ros::ok() && !action_client_ptr->waitForServer(ros::Duration(2.0))){
        ROS_INFO_THROTTLE(3.0,"Waiting for the move_base action server to come up");
    }


    SetupCallbacks();

    ROS_DEBUG_STREAM("Robot has setup the movebase client");

    Transition(RobotState::WAITING);
    m_timeEnteringState = ros::Time::now();

    ROS_INFO_STREAM("Finished initializing");
}
int main(int argc, char** argv)
{
  ros::init(argc, argv, "exploration");
  ros::NodeHandle nh;
  ros::Publisher trajectory_pub = nh.advertise < trajectory_msgs::MultiDOFJointTrajectory
      > (mav_msgs::default_topics::COMMAND_TRAJECTORY, 5);
  ROS_INFO("Started exploration");

  std_srvs::Empty srv;
  bool unpaused = ros::service::call("/gazebo/unpause_physics", srv);
  unsigned int i = 0;

  // Trying to unpause Gazebo for 10 seconds.
  while (i <= 10 && !unpaused) {
    ROS_INFO("Wait for 1 second before trying to unpause Gazebo again.");
    std::this_thread::sleep_for(std::chrono::seconds(1));
    unpaused = ros::service::call("/gazebo/unpause_physics", srv);
    ++i;
  }

  if (!unpaused) {
    ROS_FATAL("Could not wake up Gazebo.");
    return -1;
  } else {
    ROS_INFO("Unpaused the Gazebo simulation.");
  }

  double dt = 1.0;
  std::string ns = ros::this_node::getName();
  if (!ros::param::get(ns + "/nbvp/dt", dt)) {
    ROS_FATAL("Could not start exploration. Parameter missing! Looking for %s",
              (ns + "/nbvp/dt").c_str());
    return -1;
  }

  static int n_seq = 0;

  trajectory_msgs::MultiDOFJointTrajectory samples_array;
  mav_msgs::EigenTrajectoryPoint trajectory_point;
  trajectory_msgs::MultiDOFJointTrajectoryPoint trajectory_point_msg;

  // Wait for 5 seconds to let the Gazebo GUI show up.
  ros::Duration(5.0).sleep();

  // This is the initialization motion, necessary that the known free space allows the planning
  // of initial paths.
  ROS_INFO("Starting the planner: Performing initialization motion");
  for (double i = 0; i <= 1.0; i = i + 0.1) {
    nh.param<double>("wp_x", trajectory_point.position_W.x(), 0.0);
    nh.param<double>("wp_y", trajectory_point.position_W.y(), 0.0);
    nh.param<double>("wp_z", trajectory_point.position_W.z(), 1.0);
    samples_array.header.seq = n_seq;
    samples_array.header.stamp = ros::Time::now();
    samples_array.points.clear();
    n_seq++;
    tf::Quaternion quat = tf::Quaternion(tf::Vector3(0.0, 0.0, 1.0), -M_PI * i);
    trajectory_point.setFromYaw(tf::getYaw(quat));
    mav_msgs::msgMultiDofJointTrajectoryPointFromEigen(trajectory_point, &trajectory_point_msg);
    samples_array.points.push_back(trajectory_point_msg);
    trajectory_pub.publish(samples_array);
    ros::Duration(1.0).sleep();
  }
  trajectory_point.position_W.x() -= 0.5;
  trajectory_point.position_W.y() -= 0.5;
  samples_array.header.seq = n_seq;
  samples_array.header.stamp = ros::Time::now();
  samples_array.points.clear();
  n_seq++;
  mav_msgs::msgMultiDofJointTrajectoryPointFromEigen(trajectory_point, &trajectory_point_msg);
  samples_array.points.push_back(trajectory_point_msg);
  trajectory_pub.publish(samples_array);
  ros::Duration(1.0).sleep();

  // Start planning: The planner is called and the computed path sent to the controller.
  int iteration = 0;
  while (ros::ok()) {
    ROS_INFO_THROTTLE(0.5, "Planning iteration %i", iteration);
    nbvplanner::nbvp_srv planSrv;
    planSrv.request.header.stamp = ros::Time::now();
    planSrv.request.header.seq = iteration;
    planSrv.request.header.frame_id = "world";
    if (ros::service::call("nbvplanner", planSrv)) {
      n_seq++;
      if (planSrv.response.path.size() == 0) {
        ros::Duration(1.0).sleep();
      }
      for (int i = 0; i < planSrv.response.path.size(); i++) {
        samples_array.header.seq = n_seq;
        samples_array.header.stamp = ros::Time::now();
        samples_array.header.frame_id = "world";
        samples_array.points.clear();
        tf::Pose pose;
        tf::poseMsgToTF(planSrv.response.path[i], pose);
        double yaw = tf::getYaw(pose.getRotation());
        trajectory_point.position_W.x() = planSrv.response.path[i].position.x;
        trajectory_point.position_W.y() = planSrv.response.path[i].position.y;
        // Add offset to account for constant tracking error of controller
        trajectory_point.position_W.z() = planSrv.response.path[i].position.z + 0.25;
        tf::Quaternion quat = tf::Quaternion(tf::Vector3(0.0, 0.0, 1.0), yaw);
        trajectory_point.setFromYaw(tf::getYaw(quat));
        mav_msgs::msgMultiDofJointTrajectoryPointFromEigen(trajectory_point, &trajectory_point_msg);
        samples_array.points.push_back(trajectory_point_msg);
        trajectory_pub.publish(samples_array);
        ros::Duration(dt).sleep();
      }
    } else {
      ROS_WARN_THROTTLE(1, "Planner not reachable");
      ros::Duration(1.0).sleep();
    }
    iteration++;
  }
}
int main(int argc, char **argv)
{
  ros::init(argc, argv, "updating_particles");
  ros::NodeHandle n;
  PlotRayUtils plt;
  RayTracer rayt;

  std::random_device rd;
  std::normal_distribution<double> randn(0.0,0.000001);

  ROS_INFO("Running...");

  ros::Publisher pub_init = 
    n.advertise<particle_filter::PFilterInit>("/particle_filter_init", 5);
  ros::ServiceClient srv_add = 
    n.serviceClient<particle_filter::AddObservation>("/particle_filter_add");


 
  ros::Duration(1).sleep();
  // pub_init.publish(getInitialPoints(plt));
 
  geometry_msgs::Point obs;
  geometry_msgs::Point dir;
  double radius = 0.00;

  int i = 0;
  //for(int i=0; i<20; i++){
  while (i < NUM_TOUCHES) {
    // ros::Duration(1).sleep();
    //tf::Point start(0.95,0,-0.15);
    //tf::Point end(0.95,2,-0.15);
    tf::Point start, end;
    // randomSelection(plt, rayt, start, end);
    fixedSelection(plt, rayt, start, end);

    Ray measurement(start, end);
    
    double distToPart;
    if(!rayt.traceRay(measurement, distToPart)){
      ROS_INFO("NO INTERSECTION, Skipping");
      continue;
    }
    tf::Point intersection(start.getX(), start.getY(), start.getZ());
    intersection = intersection + (end-start).normalize() * (distToPart - radius);
	std::cout << "Intersection at: " << intersection.getX() << "  " << intersection.getY() << "   " << intersection.getZ() << std::endl;
    tf::Point ray_dir(end.x()-start.x(),end.y()-start.y(),end.z()-start.z());
    ray_dir = ray_dir.normalize();
    obs.x=intersection.getX() + randn(rd); 
    obs.y=intersection.getY() + randn(rd); 
    obs.z=intersection.getZ() + randn(rd);
    dir.x=ray_dir.x();
    dir.y=ray_dir.y();
    dir.z=ray_dir.z();
    // obs.x=intersection.getX(); 
    // obs.y=intersection.getY(); 
    // obs.z=intersection.getZ();

    // pub_add.publish(obs);
    
    // plt.plotCylinder(start, end, 0.01, 0.002, true);
    plt.plotRay(Ray(start, end));
    // ros::Duration(1).sleep();

    particle_filter::AddObservation pfilter_obs;
    pfilter_obs.request.p = obs;
    pfilter_obs.request.dir = dir;
    if(!srv_add.call(pfilter_obs)){
      ROS_INFO("Failed to call add observation");
    }

    ros::spinOnce();
    while(!rayt.particleHandler.newParticles){
      ROS_INFO_THROTTLE(10, "Waiting for new particles...");
      ros::spinOnce();
      ros::Duration(.1).sleep();
    }
    i ++;
  }
  // std::ofstream myfile;
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/diff.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/time.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/diff_trans.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/diff_rot.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/workspace_max.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  // myfile.open("/home/shiyuan/Documents/ros_marsarm/workspace_min.csv", std::ios::out|std::ios::app);
  // myfile << "\n";
  // myfile.close();
  ROS_INFO("Finished all action");

}
bool TreeKinematics::getPositionIk(tree_kinematics::get_tree_position_ik::Request &request,
                                   tree_kinematics::get_tree_position_ik::Response &response)
{
  ik_srv_duration_ = ros::Time::now().toSec();
  ROS_DEBUG("getPositionIK method invoked.");

  // extract current joint positions from the request
  KDL::JntArray q, q_desi;
  double nr_of_jnts = request.pos_ik_request[0].ik_seed_state.joint_state.name.size();
  q.resize(nr_of_jnts);
  q_desi.resize(nr_of_jnts);
  for (unsigned int i=0; i < nr_of_jnts; ++i)
  {
    int tmp_index = getJointIndex(request.pos_ik_request[0].ik_seed_state.joint_state.name[i]);
    if (tmp_index >=0)
    {
      q(tmp_index) = request.pos_ik_request[0].ik_seed_state.joint_state.position[i];
      ROS_DEBUG("joint '%s' is now number '%d'", request.pos_ik_request[0].ik_seed_state.joint_state.name[i].c_str(),
                                                tmp_index);
    }
    else
    {
      ROS_FATAL("i: %d, No joint index for %s!",i,request.pos_ik_request[0].ik_seed_state.joint_state.name[i].c_str());
      ROS_FATAL("Service call aborted.");
      return false;
    }
  }

  // convert pose messages into transforms from the root frame to the given poses and further into KDL::Frames
  geometry_msgs::PoseStamped pose_msg_in;
  geometry_msgs::PoseStamped pose_msg_transformed;
  tf::Stamped<tf::Pose> transform;
  tf::Stamped<tf::Pose> transform_root;
  KDL::Frames desired_poses;
  KDL::Frame desired_pose;
  for(unsigned int i = 0; i < request.pos_ik_request.size(); ++i)
  {
    pose_msg_in = request.pos_ik_request[i].pose_stamped;
    try
    {
      tf_listener_.waitForTransform(tree_root_name_, pose_msg_in.header.frame_id, pose_msg_in.header.stamp,
      ros::Duration(0.1));
      tf_listener_.transformPose(tree_root_name_, pose_msg_in, pose_msg_transformed);
    }
    catch (tf::TransformException const &ex)
    {
      ROS_FATAL("%s",ex.what());
      ROS_FATAL("Could not transform IK pose from '%s' to frame '%s'", tree_root_name_.c_str(),
      pose_msg_in.header.frame_id.c_str());
      response.error_code.val = response.error_code.FRAME_TRANSFORM_FAILURE;
      return false;
    }
    tf::PoseMsgToKDL(pose_msg_transformed.pose, desired_pose);
    desired_poses[request.pos_ik_request[i].ik_link_name] = desired_pose;
  }

  // use the solver to compute desired joint positions
  ik_duration_ = ros::Time::now().toSec();
  int ik_ret = ik_pos_solver_->CartToJnt_it(q, desired_poses, q_desi); // NOTE: Before it was CartToJnt (without the _it). What's the difference?
  ik_duration_ = ros::Time::now().toSec() - ik_duration_;
  ik_duration_median_ = ((ik_duration_median_ * (loop_count_ - 1)) + ik_duration_) / loop_count_;

  // insert the solver's result into the service response
  if (ik_ret >= 0 || ik_ret == -3)
  {
    response.solution.joint_state.name = info_.joint_names;
    response.solution.joint_state.position.resize(nr_of_jnts_);
    response.solution.joint_state.velocity.resize(nr_of_jnts_);
    for (unsigned int i=0; i < nr_of_jnts_; ++i)
    {
      response.solution.joint_state.position[i] = q_desi(i);
      response.solution.joint_state.velocity[i] = (q_desi(i) - q(i)) * srv_call_frequency_;
      ROS_DEBUG("IK Solution: %s %d: pos = %f, vel = %f",response.solution.joint_state.name[i].c_str(), i,
      response.solution.joint_state.position[i], response.solution.joint_state.velocity[i]);
    }
    response.error_code.val = response.error_code.SUCCESS;

    ik_srv_duration_ = ros::Time::now().toSec() - ik_srv_duration_;
    ik_srv_duration_median_ = ((ik_srv_duration_median_ * (loop_count_ - 1)) + ik_srv_duration_) / loop_count_;
    loop_count_++;

    ROS_INFO_THROTTLE(1.0, "tree_kinematics: ik_srv_duration %f and median %f", ik_srv_duration_,
    ik_srv_duration_median_);
    ROS_INFO_THROTTLE(1.0, "tree_kinematics: ik_duration %f and median %f", ik_duration_, ik_duration_median_);

    if (ik_ret == -3)
    {
      ROS_WARN_THROTTLE(1.0, "Maximum iterations reached! (error code = %d)", ik_ret);
    }
  }
  else
  {
    ROS_WARN("An IK solution could not be found (error code = %d)", ik_ret);
    response.error_code.val = response.error_code.NO_IK_SOLUTION;

    ik_srv_duration_ = ros::Time::now().toSec() - ik_srv_duration_;
    ik_srv_duration_median_ = ((ik_srv_duration_median_ * (loop_count_ - 1)) + ik_srv_duration_) / loop_count_;
    loop_count_++;

    ROS_INFO_THROTTLE(1.0, "tree_kinematics: ik_srv_duration %f and median %f", ik_srv_duration_,
    ik_srv_duration_median_);
    ROS_INFO_THROTTLE(1.0, "tree_kinematics: ik_duration %f and median %f", ik_duration_, ik_duration_median_);
  }
  return true;
}
/**
 * \brief Fills data using XsDataPacket object 
 * @param _packet Incoming packet from Xsens device.
 */
void SensorData::fillData(XsDataPacket * _packet){
		XsDataPacket packet = *_packet;
		XsMessage msg = packet.toMessage();
		XsSize msg_size = msg.getDataSize();

		ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Data Size: " << msg_size);

		
	// Get the orientation data
		if (packet.containsOrientation()) {
			XsQuaternion quaternion = packet.orientationQuaternion();

 			q1 = quaternion.m_x;
			q2 = quaternion.m_y;
			q3 = quaternion.m_z;
			q0 = quaternion.m_w;
						
			XsEuler euler = packet.orientationEuler();
			eroll = euler.m_roll;
			epitch = euler.m_pitch;
			eyaw = euler.m_yaw;
 			
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE,"Orientation: Roll:" << std::setw(7) << std::fixed << std::setprecision(2) << euler.m_roll
							  << ", Pitch:" << std::setw(7) << std::fixed << std::setprecision(2) << euler.m_pitch
							  << ", Yaw:" << std::setw(7) << std::fixed << std::setprecision(2) << euler.m_yaw);
						
		}

		// Get the gyroscope data
		if (packet.containsCalibratedGyroscopeData()) {
			XsVector gyroscope = packet.calibratedGyroscopeData();
		
			gyrX = gyroscope.at(0);
			gyrY = gyroscope.at(1);
			gyrZ = gyroscope.at(2);

			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Angular Velocity: ( "
							 << gyrX << ", " << gyrY << ", " << gyrZ << ")" ); 
		}

		// Get the acceleration data
		if (packet.containsCalibratedAcceleration()) {
			XsVector acceleration = packet.calibratedAcceleration();

			accX = acceleration.at(0);
			accY = acceleration.at(1);
			accZ = acceleration.at(2);
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Linear Acceleration: (" 
							<< accX << "," << accX << "," << accZ << ")" );
		}

		//Get the altitude data
		if(packet.containsAltitude()){
			mAltitude = packet.altitude();
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "containsAltitude: ");
		}

			
		//Get the Latitude and Longitude Data
		if(packet.containsLatitudeLongitude()){
			XsVector latlon = packet.latitudeLongitude();
			mLatitude = latlon[0];
			mLongitude = latlon[1];
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "containsLatitudeLongitude");
		}

		// Get the magnetic field data
		if (packet.containsCalibratedMagneticField()) {
			XsVector magneticField = packet.calibratedMagneticField();

			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Magnetic Field: ("<< magneticField[0] <<" , " 
							<< magneticField[1] << " , "<< magneticField[2]   << ")");	

			magX = magneticField.at(0);
			magY = magneticField.at(1);
			magZ = magneticField.at(2);
		}
					
		// Get Temperature data
		if (packet.containsTemperature()){
			double temperature = packet.temperature();
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Temperature : " << temperature);
			mTemperature = temperature;
		}

		// Get Pressure Data 
		if(packet.containsPressure() ){
			XsPressure pressure = packet.pressure();
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "Pressure : " << pressure.m_pressure);
			mPressure= pressure.m_pressure;
		}	
	
		// Get Velocity Data
		if( packet.containsVelocity() ){
			XsVector velocity = packet.velocity();
			mVelocityX = velocity.at(0);
			mVelocityY = velocity.at(1);
			mVelocityZ = velocity.at(2);

			ROS_INFO_STREAM(" Velocity [ x (east) : " << mVelocityX << ", y (north) : "
					<< mVelocityY << ", z (up) " << mVelocityZ << " ]");
		}


		// Get GPS PVT Data
		if( packet.containsGpsPvtData() ){
			XsGpsPvtData gpsPvtData = packet.gpsPvtData();
			ROS_INFO_STREAM(" Horizontal accuracy: " << gpsPvtData.m_hacc ); 
		}

		if(packet.containsRawAcceleration() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "Raw Acceleration") ;}
		if(packet.containsRawGyroscopeData() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "Raw Gyroscope") ;}
		if(packet.containsRawGyroscopeTemperatureData() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "RawGyrTemp");}
		if(packet.containsRawMagneticField() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "RawMag");}
		if(packet.containsRawTemperature() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "RawTemp");}
		if(packet.containsRawData() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "RawData");}
		if(packet.containsCalibratedData() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "Calib Data");}
		if(packet.containsSdiData() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "SDI");}
		//if(packet.containsStatusByte() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "StatusByte");}
		if(packet.containsDetailedStatus() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "DetailedStatus");}
		if(packet.containsPacketCounter8() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "PacketCounter8");}
		if(packet.containsPacketCounter() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "PacketCounter");}
		if(packet.containsSampleTimeFine() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "SampleTimeFine");}
		if(packet.containsSampleTimeCoarse() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "SampleTimeCoarse");}
		if(packet.containsSampleTime64() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "SampleTime64");}
		if(packet.containsFreeAcceleration() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "FreeAcceleration");}
		if(packet.containsPressureAge() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "PressureAge");}
		if(packet.containsAnalogIn1Data() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "AN1IN");}
		if(packet.containsAnalogIn2Data() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "AN2IN");}
		if(packet.containsPositionLLA() ){ 
			
			XsVector posLLA = packet.positionLLA();
			mLatitude = posLLA[0];
			mLongitude = posLLA[1];
			mAltitude = posLLA[2];
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "mLatitude = " << mLatitude);
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "mLongitude = " << mLongitude);
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, "mAltitude = " << mAltitude);

			//for(int i=0; i < posLLA.size(); i++){
			//ROS_INFO_STREAM("posLLA[" << i << "] : " << posLLA[i] ); 
			//}
		}
		if(packet.containsUtcTime() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "UTC-Time");}
		if(packet.containsFrameRange() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "Frame Range");}
		if(packet.containsRssi() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "RSSI");}
		if(packet.containsRawGpsDop() ){ 
			//ROS_INFO_THROTTLE(THROTTLE_VALUE, "DOP");}
			XsRawGpsDop dop = packet.rawGpsDop();
			m_gdop = ((float) dop.m_gdop)/100;
			m_pdop = ((float) dop.m_pdop)/100;
			m_tdop = ((float) dop.m_tdop)/100;
			m_vdop = ((float) dop.m_vdop)/100;
			m_hdop = ((float) dop.m_hdop)/100;
			m_edop = ((float) dop.m_edop)/100;
			m_ndop = ((float) dop.m_ndop)/100;
			m_itow = ((float) dop.m_itow)/1000;

			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE, 
						"GDOP = " << m_gdop <<
						",PDOP = " << m_pdop <<						
						",TDOP = " << m_tdop <<						
						",VDOP = " << m_vdop <<						
						",HDOP = " << m_hdop <<						
						",EDOP = " << m_edop <<						
						",NDOP = " << m_ndop <<						
						",ITOW = " << m_itow);			
		}
		if(packet.containsRawGpsSol() ){
			// ROS_INFO_THROTTLE(THROTTLE_VALUE, "SOL");}
			XsRawGpsSol sol = packet.rawGpsSol();
			mPositionAccuracy = sol.m_pacc;
			mSpeedAccuracy = sol.m_sacc;
			mSatelliteNumber = sol.m_numsv;
			mGpsFixStatus = sol.m_gpsfix;
			
			gpsVelocityX=sol.m_ecef_vx/100;
			gpsVelocityY=sol.m_ecef_vy/100;
			gpsVelocityZ=sol.m_ecef_vz/100;
			ROS_INFO_STREAM_THROTTLE(THROTTLE_VALUE,
						"Pos Acc = " << mPositionAccuracy <<
						",Speed Acc = " << mSpeedAccuracy <<
						",Sat Number = " << mSatelliteNumber <<
						",GPS FIX = " << std::hex << mGpsFixStatus);
		}
		if(packet.containsRawGpsTimeUtc() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "GPS-UTC");}
		if(packet.containsRawGpsSvInfo() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "SV INFO");}
		//	if(packet.containsTriggerIndication() ){ ROS_INFO_THROTTLE(THROTTLE_VALUE, "TRIGGER");}




		//Get Status byte		
		if( packet.containsStatus() ){
			mStatus = packet.status();
			ROS_INFO_THROTTLE(10, "Status: %.8X", mStatus);
		}
}
void mavrosPoseController::control_loop(){
  ros::Rate r( rate_ );
  while( alive_.try_lock() && ros::ok() )
  {
    feedback_data_lock_.lock();
    std::shared_ptr<geometry_msgs::PoseStamped> tmp_pose( cur_pose_ );
    std::shared_ptr<geometry_msgs::TwistStamped> tmp_twist( cur_twist_ );
    feedback_data_lock_.unlock();
    setpoint_data_lock_.lock();
    std::shared_ptr<geometry_msgs::Pose> tmp_setpoint( desired_set_point_ );
    setpoint_data_lock_.unlock();

    // current time
    double ct = ros::Time::now().toSec();

    // process the cascaded z pid loops
    ROS_INFO_THROTTLE(1,"::::::::::::::::::::::::::::::::::::::::::::");
    ROS_INFO_THROTTLE(1,"::::::::::::::::::::[pos]:::::::::::::::::::");

    z_pos_->setSetPoint( tmp_setpoint->position.z );
    double desired_z_vel = z_pos_->process( ct, tmp_pose->pose.position.z );
    ROS_INFO_THROTTLE(1, "[CE]::%f  [DE]::%f  [SE]::%f", z_pos_->getCE(), z_pos_->getDE(), z_pos_->getSE() );

    y_pos_->setSetPoint( tmp_setpoint->position.y );
    double desired_y_vel = y_pos_->process( ct, tmp_pose->pose.position.y );

    x_pos_->setSetPoint( tmp_setpoint->position.x );
    double desired_x_vel = x_pos_->process( ct, tmp_pose->pose.position.x );
    ROS_INFO_THROTTLE(1, "[x]::%f  [y]::%f  [z]::%f", desired_x_vel, desired_y_vel, desired_z_vel );
    ROS_INFO_THROTTLE(1, "::::::::::::::::::::[vel]:::::::::::::::::::");
    z_vel_->setSetPoint( desired_z_vel );
    y_vel_->setSetPoint( desired_y_vel );
    x_vel_->setSetPoint( desired_x_vel );
    double z_cmd = z_vel_->process( ct, tmp_twist->twist.linear.z );
    ROS_INFO_THROTTLE(1, "[desired_z_cmd]::%f", (z_cmd)/z_throttle_divider_ + z_throttle_zero_ );
    std_msgs::Float64 throttle;
    throttle.data = ((z_cmd)/z_throttle_divider_) + z_throttle_zero_;
    attThrottlePub.publish(throttle);
    


    double y_world_cmd = y_vel_->process( ct, tmp_twist->twist.linear.y );
    double x_world_cmd = x_vel_->process( ct, tmp_twist->twist.linear.x );

    double mag = sqrt(x_world_cmd*x_world_cmd + y_world_cmd*y_world_cmd);
    double theta = atan2(y_world_cmd, x_world_cmd);
    ROS_INFO_THROTTLE(1, "[x_world_cmd]::%f  [y_world_cmd]::%f  [mag]::%f  [theta]::%f", x_world_cmd, y_world_cmd, mag, theta);

    double cr, cp, yaw;
    tf::Quaternion q = tf::Quaternion( tmp_pose->pose.orientation.x, tmp_pose->pose.orientation.y, tmp_pose->pose.orientation.z, tmp_pose->pose.orientation.w );
    tf::Matrix3x3  m = tf::Matrix3x3(q);
    m.getRPY(cr, cp, yaw);

    ROS_INFO_THROTTLE(1, "[yaw]::%f", yaw);

    double pitch_cmd = (sin(yaw)*x_world_cmd + cos(yaw)*y_world_cmd);
    double roll_cmd  = cos(yaw)*x_world_cmd - sin(yaw)*y_world_cmd;
    ROS_INFO_THROTTLE(1, "[pitch_cmd]::%f  [roll_cmd]::%f", (pitch_cmd), roll_cmd );

    geometry_msgs::PoseStamped att_twist;
    att_twist.header.stamp = ros::Time::now();
    att_twist.pose.orientation.x = roll_cmd;
    att_twist.pose.orientation.y = pitch_cmd;
    att_twist.pose.orientation.w = 1.0;

    attitudePub.publish(att_twist);

    // some debugging publishers
    std_msgs::Float64 roll;
    roll.data = roll_cmd;
    attRollPub.publish(roll);
    std_msgs::Float64 pitch;
    pitch.data = pitch_cmd;
    attPitchPub.publish(pitch);

    alive_.unlock();
    r.sleep();
  }
}
Beispiel #16
0
void Channel::onTimer(const ros::TimerEvent& e, RoboTeQ::status_t& status)
{
	/* Register time */
	ros::Time now = ros::Time::now();
	std::stringstream ss, out; /* streams for holding status message and command output */

	double period = 0, feedback = 0, feedback_filtered = 0, current_velocity = feedback_filter.get()*ticks_to_meter;

	if(status.online) /* is set when controller answers to FID request */
	{
		ss << "controller_online ";
		if(status.initialised) /* is set when init function completes */
		{
			ss << "controller_initialised ";
			if(status.responding) /* is set if the controller publishes serial messages */
			{
				ss << "controller_responding ";
				if(status.cmd_vel_publishing) /* is set if someone publishes twist messages */
				{
					ss << "cmd_vel_publishing ";
					if(status.deadman_pressed) /* is set if someone publishes true on deadman topic */
					{
						ss << "deadman_pressed ";

						if(status.emergency_stop)
						{
							/* release emergency stop */
							transmit("!MG\r");
							status.emergency_stop = false;
							current_setpoint = 0;
						}

						/* Calculate period */
						period = (now - time_stamp.last_regulation).toSec();

						/* Get latest feedback and reset */
						feedback = (( (double)hall_value)*ticks_to_meter)/period;
						hall_value = 0;

						/* Filter feedback */
						feedback_filtered = velocity_filter.update(feedback);


						/* Get new setpoint from regulator */
						//current_setpoint += regulator.output_from_input(velocity, feedback_filtered , period);
						current_setpoint = velocity + regulator.output_from_input(velocity, current_velocity , period);
						current_thrust =  (int)(current_setpoint * mps_to_thrust);

						/* Implement maximum output*/
						if(current_thrust >  max_output) current_thrust = max_output;
						if(current_thrust < -max_output) current_thrust = -max_output;

						/* Send motor output command  */
						out << "!G " << ch << " " << current_thrust << "\r";
						transmit(out.str());

						// Upkeep
						time_stamp.last_regulation = now;
					}
					else /* deadman not pressed */
					{
						/* Set speeds to 0 and activate emergency stop */
						transmit("!EX\r");
						status.emergency_stop = true;
						transmit("!G 1 0\r");
						transmit("!G 2 0\r");
						current_setpoint = current_thrust = 0;
						velocity = 0;
						regulator.reset_integrator();
					}
				}
				else /* Cmd_vel is not publishing */
				{
					/* Set speeds to 0 and activate emergency stop */
					transmit("!EX\r");
					status.emergency_stop = true;
					transmit("!G 1 0\r");
					transmit("!G 2 0\r");
					current_setpoint = current_thrust = 0;
					velocity = 0;
					regulator.reset_integrator();

				}
			}
			else /* controller is not responding */
			{
				ROS_INFO_THROTTLE(5,"%s: Controller is not responding",ros::this_node::getName().c_str());
				down_time++;
				if(down_time > 10)
				{
					/* Try to re-connect and re-initialise */
					transmit("?FID\r");
					down_time = 0;
				}
			}
		}
		else /* Controller is not initialised */
		{
			ROS_INFO("%s: Controller is not initialised",ros::this_node::getName().c_str());
			//initController("standard");
			//status.initialised = true;'
			transmit("?FID\r");
		}
	}
	else /* controller is not online */
	{
		ROS_INFO_THROTTLE(5,"%s: Controller is not yet online",ros::this_node::getName().c_str());
		/* Try to re-connect and re-initialise */
		transmit("?FID\r");
	}
	/* Publish feedback */
	message.feedback.header.stamp = now;
	message.feedback.velocity = current_velocity; /* Velocity in m/s */
	message.feedback.velocity_setpoint = velocity;
	message.feedback.thrust = (current_thrust*100)/roboteq_max; /* Thrust in % */
	publisher.feedback.publish(message.feedback);

	/* Publish the status message */
	message.status.header.stamp = ros::Time::now();
	message.status.data = ss.str();
	publisher.status.publish(message.status);
}
void OrchardDetector::processLaserScan(

const sensor_msgs::LaserScan::ConstPtr& laser_scan) {
	float rthetamean = 0, rrhomean = 0, lthetamean = 0, lrhomean = 0,
			theta = 0, rho = 0;
	double x0 = 0, y0 = 0, a, b;
	int lmc = 0, rmc = 0;

	static int count = 0;

	clearRawImage();

	sensor_msgs::PointCloud cloud;
	projector_.projectLaser(*laser_scan, cloud);

	int size = cloud.points.size();

	for (int i = 0; i < size; i++) {
		if (abs(cloud.points[i].y) < 1.5 && abs(cloud.points[i].y) > 0.5 &&cloud.points[i].y < 0 && cloud.points[i].x > -1 && cloud.points[i].x < 3) {
			point1.x = ((int)(cloud.points[i].x * 50) + 300);
			point1.y = ((int)(cloud.points[i].y * 50) + 300);
			point2.x = point1.x - 4;
			point2.y = point1.y;
			cvLine(rawData_, point1, point2, CV_RGB(255,255,255), 2, CV_AA, 0);
		}
	}

	cvCvtColor(rawData_, workingImg_, CV_BGR2GRAY);
	//cvThreshold(workingImg_,workingImg_,)

	cvThreshold(workingImg_,workingImg_, 100, 255, CV_THRESH_BINARY);

	CvMemStorage* stor = cvCreateMemStorage(0);
	CvSeq* cont = cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvSeq),
			sizeof(CvPoint), stor);

	// find external contours
	cvFindContours(workingImg_, stor, &cont, sizeof(CvContour),
			CV_RETR_EXTERNAL, 2, cvPoint(0, 0));

	for (; cont; cont = cont->h_next) {
		// size of pointArray and polygon
		int point_cnt = cont->total;

		// no small contours
		if (point_cnt > 20) {

			CvPoint* PointArray = (CvPoint*) malloc(point_cnt * sizeof(CvPoint));

			// Get contour point set.
			cvCvtSeqToArray(cont, PointArray, CV_WHOLE_SEQ);

			for (int i = 0; i <= point_cnt; i++) {
				// Show the Pixelcoordinates
				// have some fun with the color
				cvLine(rawData_, PointArray[i % point_cnt], PointArray[(i + 1) % point_cnt], cvScalar(0, 0, 0), 10);
				cvLine(workingImg_, PointArray[i % point_cnt], PointArray[(i + 1) % point_cnt], cvScalar(0, 0, 0), 50);
			}

			continue;
		}

		// Allocate memory for contour point set.
		CvPoint* PointArray = (CvPoint*) malloc(point_cnt * sizeof(CvPoint));

		// Get contour point set.
		cvCvtSeqToArray(cont, PointArray, CV_WHOLE_SEQ);

		for (int i = 0; i <= point_cnt; i++) {
			// Show the Pixelcoordinates
			//cout << PointArray[i].x << " " << PointArray[i].y << endl;
			// have some fun with the color
			int h_value = int(i * 3.0 / point_cnt * i) % 100;
			cvLine(rawData_, PointArray[i % point_cnt], PointArray[(i + 1) % point_cnt], cvScalar(0, 255, 0), 4);
		}

	}

	//cvDilate(workingImg_,workingImg_, 0, 3);
	//cvErode(workingImg_,workingImg_, 0, 3);

	//cvShowImage("Wporking", workingImg_);
	cvWaitKey(10);

	lines_ = cvHoughLines2(workingImg_, storage_, CV_HOUGH_STANDARD, 1, CV_PI / 180, 15, 0, 0);

	//cvHoughLines2(edgesImage, storage, CV_HOUGH_PROBABILISTIC, 1, CV_PI/360, 30, 10, MAXIMUM_GAP);


	for (int i = 0; i < MIN(lines_->total,15); i++) {


		float* line = (float*) cvGetSeqElem(lines_, i);
		rho = line[0];
		theta = line[1];

		a = cos(theta);
		b = sin(theta);
		x0 = a * rho;
		y0 = b * rho;
		point1.x = cvRound(x0 + 600 * (-b));
		point1.y = cvRound(y0 + 600 * (a));
		point2.x = cvRound(x0 - 600 * (-b));
		point2.y = cvRound(y0 - 600 * (a));
		point3.x = 300, point3.y = 300;
		point4.x = 300, point4.y = 600;
		point5.x = 300, point5.y = 0;

		cvLine(rawData_, point3, point4, CV_RGB(0,0,255), 1, CV_AA, 0);
		cvLine(rawData_, point3, point5, CV_RGB(0,0,255), 1, CV_AA, 0);

		if (intersect(point1, point2, point3, point4)) {
			{
				if(abs(left_angle_ -( (theta) - CV_PI / 2)) < 0.2){
					rrhomean += rho;
					rthetamean += theta;
					rmc++;
					cvLine(workingImg_, point1, point2, CV_RGB(0,0,255), 1, CV_AA, 0);
				}
			}
		} else if (intersect(point1, point2, point3, point5)) {
			{
				if(abs(right_angle_ -( (theta) - CV_PI / 2)) < 0.5){
					lrhomean += rho;
					lthetamean += theta;
					lmc++;
					cvLine(workingImg_, point1, point2, CV_RGB(255,255,255), 1,CV_AA, 0);
				}
			}
		}
	}
	if(lmc > 5){

		theta = lthetamean / lmc;
		rho = lrhomean / lmc;

		a = cos(theta);
		b = sin(theta);
		x0 = a * rho;
		y0 = b * rho;
		point1.x = cvRound(x0 + 600 * (-b)), point1.y = cvRound(y0 + 600 * (a));
		point2.x = cvRound(x0 - 600 * (-b)), point2.y = cvRound(y0 - 600 * (a));

		cvLine(rawData_, point1, point2, CV_RGB(255,0,0), 3, CV_AA, 0);

		point4.x = 300;
		point4.y = 300;
		point5.x = point4.x + 200 * sin(CV_PI - (theta - CV_PI / 2));
		point5.y = point4.y + 200 * cos(CV_PI - (theta - CV_PI / 2));

		cvLine(rawData_, point5, point4, CV_RGB(255,255,255), 1, CV_AA, 0);

		rows_.header.stamp = ros::Time::now();
		rows_.leftvalid = false;
		rows_.rightvalid = false;

		if (intersect(point1, point2, point4, point5)) {
			right_distance_ = sqrt(((intersection_.y - 300) * (intersection_.y
					- 300)) + ((intersection_.x - 300) * (intersection_.x - 300)))
					* 2;

			right_angle_ = (theta) - CV_PI / 2;
			count++;

			rolling_mean_[count % 100][0] = right_angle_;
			right_angle_ = 0;
			for (int i = 0; i < 100; i++) {
				right_angle_ += rolling_mean_[i][0];
			}
			right_angle_ = right_angle_ / 100;

			rolling_mean_[count % 50][1] = right_distance_;
			right_distance_ = 0;
			for (int i = 0; i < 50; i++) {
				right_distance_ += rolling_mean_[i][1];
			}
			right_distance_ = right_distance_ / 50;


			inrow_cnt++;
			if(inrow_cnt > 10)
				inrow_cnt = 10;

			/*

			ROS_INFO("angle: %f",right_angle_);
			//cvLine(rawData_, point1, point2, CV_RGB(0,255,0), 1, CV_AA, 0);
	 */
			geometry_msgs::Quaternion pose_quat = tf::createQuaternionMsgFromYaw(
					right_angle_);

			marker_r.header.frame_id = "/laser_link";
			marker_r.header.stamp = ros::Time::now();

			marker_r.ns = "basic_shapes";
			marker_r.id = 0;

			// Set the marker type.  Initially this is CUBE, and cycles between that and SPHERE, ARROW, and CYLINDER
			marker_r.type = visualization_msgs::Marker::CUBE;

			// Set the marker action.  Options are ADD and DELETE
			marker_r.action = visualization_msgs::Marker::ADD;

			// Set the pose of the marker.  This is a full 6DOF pose relative to the frame/time specified in the header
			marker_r.pose.position.x = 0;
			marker_r.pose.position.y = -((float) right_distance_) / 100;
			marker_r.pose.position.z = 0;
			marker_r.pose.orientation = pose_quat;

			// Set the scale of the marker -- 1x1x1 here means 1m on a side
			marker_r.scale.x = 10.0;
			marker_r.scale.y = 0.1;
			marker_r.scale.z = 0.5;

			// Set the color -- be sure to set alpha to something non-zero!
			marker_r.color.r = 0.0f;
			marker_r.color.g = 1.0f;
			marker_r.color.b = 0.0f;
			marker_r.color.a = 0.5;

			marker_r.lifetime = ros::Duration(.5);

			// Publish the marker
			marker_r_pub.publish(marker_r);

			rows_.rightvalid = true;
			rows_.rightdistance = ((float) right_distance_) / 100;
			rows_.rightangle = right_angle_;

		} else {

			inrow_cnt--;
			inrow_cnt--;
			if(inrow_cnt < -5){
				inrow_cnt = -5;
			}
					/*
			left_distance_ = -1;
			left_angle_ = 6000;

			rows_.rightdistance = ((float) left_distance_) / 100;
			rows_.rightangle = theta - CV_PI / 2;
			*/
			//	printf("\nDistance from right:%dmm",hough_lines.right_distance);
			//sprintf(textright, "Distance from right: %dmm, angle: %d",hough_lines.right_distance, hough_lines.right_angle);
		}

	}else{

		ROS_INFO_THROTTLE(1,"lmc = %d", lmc);

		inrow_cnt--;
		inrow_cnt--;
		if(inrow_cnt < -5){
			inrow_cnt = -5;
		}
	}
	CvFont font;
	cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 0.5, 0.5, 0, 1, CV_AA);


	if(inrow_cnt > 0){
		twist_msg_.twist.angular.z = -((right_angle_*1.5) - (((float)right_distance_/100) - 1.2) );
		cvPutText(rawData_, "INROW-NAVIGATION", cvPoint(10, 130), &font, cvScalar(255, 255, 255, 0));
		cvLine(rawData_, cvPoint(10, 140), cvPoint(10 + abs(inrow_cnt * 20),140),CV_RGB(0,255,0), 3, CV_AA, 0);
		rows_.headland = false;
	}else{
		twist_msg_.twist.angular.z = M_PI/4;
		cvPutText(rawData_, "HEADLAND", cvPoint(10, 130), &font, cvScalar(255, 255, 255, 0));
		cvLine(rawData_, cvPoint(10, 140), cvPoint(10 + abs(inrow_cnt * 20), 140 ),CV_RGB(255,0,0), 3, CV_AA, 0);
		rows_.headland = true;
	}
	twist_pub.publish(twist_msg_);

	cvLine(rawData_, cvPoint(0, 300 + 150), cvPoint(600, 300 + 150),CV_RGB(255,255,255), 1, CV_AA, 0);
	cvLine(rawData_, cvPoint(0, 300 - 150), cvPoint(600, 300 - 150),CV_RGB(255,255,255), 1, CV_AA, 0);
	rows_.header.stamp = ros::Time::now();

	row_pub.publish(rows_);

	cvShowImage("TEST", rawData_);
	cvWaitKey(10);
	//pc_pub.publish(cloud);

}
 void handleMessage(const nav_msgs::OdometryConstPtr &msg)
 {
   ROS_INFO_THROTTLE(1.0, "Receiving messages.");
 }