Пример #1
0
bool checkCopter(double copter_x, double copter_y, double copter_z, 
					double x, double y, double z, char &colour){
	// Initially red. For when the QC is far away.
	colour = 'r';

	// Region when marker should turn yellow. When the QC is reaching the tapping vicinity.
	double markerZone_height = z + (Radius/2) + 0.0625;
	double markerZone_bottom_x = x - Radius*2;
	double markerZone_top_x = x + Radius*2;
	double markerZone_bottom_y = y - Radius*2;
	double markerZone_top_y = y + Radius*2;

	//This uses one tenth of the radius. If it seems to be too much, raise the "10" below.
	//The radius does need to be present otherwise it may not detect the copter every time.
	double roomba_height = z + (Radius/2) + 0.0625;

	//The x coordinates that the copter has to be in
	double bottom_x = x - Radius*2;
	double top_x = x + Radius*2;

	//The y coordinates that the copter has to be in
	double bottom_y = y - Radius*2;
	double top_y = y + Radius*2;

	//If the copter coordinates are (0,0,0), something is probably wrong
	if (copter_x == 0 && copter_y == 0 && copter_z == 0) return false;

	// If the copter is close to the marker.
	if(markerZone_bottom_x <= copter_x && copter_x <= markerZone_top_x){
		if(markerZone_bottom_y <= copter_y && copter_y <= markerZone_top_y){
			if(copter_z <= markerZone_height){
				colour = 'y';
			}
		}
	}
	
	//If the copter is within the radius, return true
	if (bottom_x <= copter_x && copter_x <= top_x){
		if (bottom_y <= copter_y && copter_y <= top_y){
			if((copter_z - 0.182) <= roomba_height){ // 0.182 is the length between copter centre of mass and the tip of its legs (base)
				timer_5.stop();
				timer_20.stop();
				timer_5.start();
				timer_20.start();
				count_5 = looprate*3; // 3 so it never goes lower than limit
				count_20 = looprate*3;
				colour = 'g';
				return true;
			}
		}
	}
	return false;
}
Пример #2
0
    void handle_heartbeat(const mavlink_message_t *msg, uint8_t sysid, uint8_t compid) {
        if (!uas->is_my_target(sysid)) {
            ROS_DEBUG_NAMED("sys", "HEARTBEAT from [%d, %d] dropped.", sysid, compid);
            return;
        }

        mavlink_heartbeat_t hb;
        mavlink_msg_heartbeat_decode(msg, &hb);

        // update context && setup connection timeout
        uas->update_heartbeat(hb.type, hb.autopilot);
        uas->update_connection_status(true);
        timeout_timer.stop();
        timeout_timer.start();

        // build state message after updating uas
        auto state_msg = boost::make_shared<mavros::State>();
        state_msg->header.stamp = ros::Time::now();
        state_msg->armed = hb.base_mode & MAV_MODE_FLAG_SAFETY_ARMED;
        state_msg->guided = hb.base_mode & MAV_MODE_FLAG_GUIDED_ENABLED;
        state_msg->mode = uas->str_mode_v10(hb.base_mode, hb.custom_mode);

        state_pub.publish(state_msg);
        hb_diag.tick(hb.type, hb.autopilot, state_msg->mode, hb.system_status);
    }
Пример #3
0
    void autopilot_version_cb(const ros::TimerEvent &event) {
        bool ret = false;

        try {
            auto client = nh.serviceClient<mavros::CommandLong>("cmd/command");

            mavros::CommandLong cmd{};
            cmd.request.command = MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES;
            cmd.request.confirmation = false;
            cmd.request.param1 = 1.0;

            ROS_DEBUG_NAMED("sys", "VER: Sending request.");
            ret = client.call(cmd);
        }
        catch (ros::InvalidNameException &ex) {
            ROS_ERROR_NAMED("sys", "VER: %s", ex.what());
        }

        ROS_ERROR_COND_NAMED(!ret, "sys", "VER: command plugin service call failed!");

        if (version_retries > 0) {
            version_retries--;
            ROS_WARN_COND_NAMED(version_retries != RETRIES_COUNT - 1, "sys",
                                "VER: request timeout, retries left %d", version_retries);
        }
        else {
            uas->update_capabilities(false);
            autopilot_version_timer.stop();
            ROS_WARN_NAMED("sys", "VER: your FCU don't support AUTOPILOT_VERSION, "
                           "switched to default capabilities");
        }
    }
Пример #4
0
    void connection_cb(bool connected) {
        // if connection changes, start delayed version request
        version_retries = RETRIES_COUNT;
        if (connected)
            autopilot_version_timer.start();
        else
            autopilot_version_timer.stop();

        // add/remove APM diag tasks
        if (connected && disable_diag && uas->is_ardupilotmega()) {
#ifdef MAVLINK_MSG_ID_MEMINFO
            UAS_DIAG(uas).add(mem_diag);
#endif
#ifdef MAVLINK_MSG_ID_HWSTATUS
            UAS_DIAG(uas).add(hwst_diag);
#endif
#if !defined(MAVLINK_MSG_ID_MEMINFO) || !defined(MAVLINK_MSG_ID_HWSTATUS)
            ROS_INFO_NAMED("sys", "SYS: APM detected, but mavros uses different dialect. "
                           "Extra diagnostic disabled.");
#else
            ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics enabled.");
#endif
        }
        else {
            UAS_DIAG(uas).removeByName(mem_diag.getName());
            UAS_DIAG(uas).removeByName(hwst_diag.getName());
            ROS_DEBUG_NAMED("sys", "SYS: APM extra diagnostics disabled.");
        }
    }
Пример #5
0
/** 
 * Callback
 * initialisation de la liste des publishers inactif pendant une periode T
 */
void init_caplist(const ros::TimerEvent& ){
   ROS_INFO("Initialisation des Publishers.......!");
	timer2.stop();
	for(int i=0; i<(int)CAP_LIST.size()+1; i++){
	chatter_pub[i].shutdown();	
	}
	//ros::spinOnce();
	timer2.start();

   ROS_INFO("Initialisation terminee *");
}
Пример #6
0
void setVelocity(double linearVel, double angularVel) 
{
  // Stopping and starting the timer causes it to start counting from 0 again.
  // As long as this is called before the kill swith timer reaches killSwitchTimeout seconds
  // the rover's kill switch wont be called.
  killSwitchTimer.stop();
  killSwitchTimer.start();
  
  velocity.linear.x = linearVel * 1.5;
  velocity.angular.z = angularVel * 8; //scaling factor for sim; removed by aBridge node
  velocityPublish.publish(velocity);
}
Пример #7
0
void RosAriaNode::sonarDisconnectCb()
{
  if (!robot->tryLock()) {
    ROS_ERROR("Skipping sonarConnectCb because could not lock");
    return;
  }
  if (robot->areSonarsEnabled())
  {
    robot->disableSonar();
    sonar_tf_timer.stop();
  }
  robot->unlock();
}
Пример #8
0
void TimerCallback(const ros::TimerEvent&) {
	aero_srr_msgs::ObjectLocationMsg test_msg;

	test_msg.pose.pose.position.x = x_pos;
	test_msg.pose.pose.position.y = y_pos;
	test_msg.pose.pose.position.z = z_pos;

	tf::Quaternion q;

	q.setRPY(rx_pos, ry_pos, rz_pos);

	tf::quaternionTFToMsg(q, test_msg.pose.pose.orientation);
	test_msg.header.frame_id = "/arm_mount";
	test_msg.pose.header.frame_id = test_msg.header.frame_id;
	test_msg.header.stamp = ros::Time::now();
	test_msg.pose.header.stamp = ros::Time::now();
	pub.publish(test_msg);
	timer.stop();

}
Пример #9
0
  void openCamera(ros::Timer &timer)
  {
      timer.stop();
      while (!camera_)
      {
        try
        {
          boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
          camera_ = boost::make_shared<PMDCamboardNano>(device_serial, plugin_dir, source_plugin, process_plugin);
          device_serial = camera_->getSerialNumber().c_str();
          updater.setHardwareIDf("%s", device_serial.c_str());
          NODELET_INFO("Opened PMD camera with serial number \"%s\"", camera_->getSerialNumber().c_str());
          loadCalibrationData();
          NODELET_INFO("Loaded calibration data");
          camera_->update();

          camera_info_ = camera_->getCameraInfo();
        }
        catch (PMDCameraNotOpenedException& e)
        {
          camera_state_ = CAMERA_NOT_FOUND;
          if (device_serial != "")
          {
            std::stringstream err;
            err << "Unable to open PMD camera with serial number " << device_serial;
            state_info_ = err.str();
            NODELET_INFO("%s",state_info_.c_str());
          }
          else
          {
              state_info_ = "Unable to open PMD camera..";
              NODELET_INFO("%s",state_info_.c_str());
          }
          boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
          camera_.reset();
        }
        updater.update();
        boost::this_thread::sleep(boost::posix_time::seconds(open_camera_retry_period));
      }
      timer.start();
  }
Пример #10
0
		/********** callback for the cmd velocity from the autonomy **********/
		void cmd_vel_callback(const geometry_msgs::Twist& msg)
		{
			watchdogTimer.stop();
			
			error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z);
			//std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl;
			//std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl;
			error_yaw = msg.angular.z - body_vel.angular.z;
			//std::cout << "error yaw: " << error_yaw << std::endl;
			
			// if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD)
			if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001)
			{	
				errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error);
				//std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl;
				
				errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw);
				//std::cout << "error dot yaw " << errorDot_yaw << std::endl;
				velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX());
				velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY());
				velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ());
				velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html
			}
			
			last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved
			last_error = error;
			last_error_yaw = error_yaw;
			
			error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw;
			errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw;
			error_pub.publish(error_gm);
			errorDot_pub.publish(errorDot_gm);
			
			if (start_autonomous)
			{
				recieved_command_from_tracking = true;
			}
			
			watchdogTimer.start();
		}
Пример #11
0
    void handle_autopilot_version(const mavlink_message_t *msg, uint8_t sysid, uint8_t compid) {
        mavlink_autopilot_version_t apv;
        mavlink_msg_autopilot_version_decode(msg, &apv);

        autopilot_version_timer.stop();
        uas->update_capabilities(true, apv.capabilities);

        // Note based on current APM's impl.
        // APM uses custom version array[8] as a string
        ROS_INFO_NAMED("sys", "VER: Capabilities 0x%016llx", (long long int)apv.capabilities);
        ROS_INFO_NAMED("sys", "VER: Flight software:     %08x (%*s)",
                       apv.flight_sw_version,
                       8, apv.flight_custom_version);
        ROS_INFO_NAMED("sys", "VER: Middleware software: %08x (%*s)",
                       apv.middleware_sw_version,
                       8, apv.middleware_custom_version);
        ROS_INFO_NAMED("sys", "VER: OS software:         %08x (%*s)",
                       apv.os_sw_version,
                       8, apv.os_custom_version);
        ROS_INFO_NAMED("sys", "VER: Board hardware:      %08x", apv.board_version);
        ROS_INFO_NAMED("sys", "VER: VID/PID: %04x:%04x", apv.vendor_id, apv.product_id);
        ROS_INFO_NAMED("sys", "VER: UID: %016llx", (long long int)apv.uid);
    }
Пример #12
0
    /*!
     * @param cmd_vel_msg Received message from topic
     */
    void cmdVelCallback(const geometry_msgs::Twist::ConstPtr& cmd_vel_msg)
    {
    if(autonomus_mode_) // Only if we are in autonomous mode
      {

    	// Lock proxy to avoid interfering with laser scan reading
    	boost::mutex::scoped_lock lock(proxy_lock_);
    	ROS_DEBUG("Sent (vx,vtheta) [%f %f]", cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
    	if (cmd_vel_msg->linear.x == 0.0) //If stopping
      	{
    		soft_stop(cmd_vel_msg->angular.z);
      	}

		//ROS_DEBUG("Received cmd_vel msg [linear.x linear.y angular.z] [%f %f %f]", cmd_vel_msg->linear.x , cmd_vel_msg->linear.y,cmd_vel_msg->angular.z);
		//       float vel_linear = cmd_vel_msg->linear.x;
		// Send command to wheelchair
		last_linear_vel_ = cmd_vel_msg->linear.x;
		proxy_.motionSetSpeed(cmd_vel_msg->linear.x, cmd_vel_msg->angular.z);
		watchdog_timer.stop();
		if(watchdog_duration > 0)
			watchdog_timer = node_handle_.createTimer(ros::Duration(watchdog_duration), &BBRobotNode::watchdog_callback, this, true);

    }
    }
Пример #13
0
 EKF()
 {
     // Get Parameters
     ros::NodeHandle nhp("~");
     nhp.param<bool>("deadReckoning", deadReckoning, false);
     nhp.param<bool>("artificialSwitching", artificialSwitching, false);
     nhp.param<double>("visibilityTimeout", visibilityTimeout, 0.2);
     nhp.param<string>("cameraName",cameraName,"camera");
     nhp.param<string>("markerID",markerID,"100");
     nhp.param<double>("q",q,4.0); // process noise
     nhp.param<double>("r",r,4.0); // measurement noise
     nhp.param<double>("delTon",delTon,4.0);
     nhp.param<double>("delToff",delToff,1.0);
     
     // Initialize states
     xhat << 0,0,0.1;
     xlast << 0,0,0.1;
     lastImageTime = ros::Time::now().toSec();
     lastVelTime = lastImageTime;
     estimatorOn = true;
     gotCamParam = false;
     
     // Initialize EKF matrices
     Q = q*Matrix3d::Identity();
     R = r*Matrix2d::Identity();
     H << 1,0,0,
          0,1,0;
     
     // Get camera parameters
     cout << cameraName+"/camera_info" << endl;
     camInfoSub = nh.subscribe(cameraName+"/camera_info",1,&EKF::camInfoCB,this);
     ROS_DEBUG("Waiting for camera parameters on topic ...");
     do {
         ros::spinOnce();
         ros::Duration(0.1).sleep();
     } while (!(ros::isShuttingDown()) and !gotCamParam);
     ROS_DEBUG("Got camera parameters");
     
     // Output publishers
     outputPub = nh.advertise<switch_vis_exp::Output>("output",10);
     pointPub = nh.advertise<geometry_msgs::PointStamped>("output_point",10);
     
     // Subscribers for feature and velocity data
     if (deadReckoning)
     {
         targetVelSub = nh.subscribe("ugv0/odom",1,&EKF::targetVelCBdeadReckoning,this);
     }
     else
     {
         targetVelSub = nh.subscribe("ugv0/body_vel",1,&EKF::targetVelCBmocap,this);
     }
     camVelSub = nh.subscribe("image/body_vel",1,&EKF::camVelCB,this);
     featureSub = nh.subscribe("markerCenters",1,&EKF::featureCB,this);
     
     // Switching
     if (artificialSwitching)
     {
         switchingTimer = nh.createTimer(ros::Duration(delTon),&EKF::switchingTimerCB,this,true);
     }
     else
     {
         // Initialize watchdog timer for feature visibility check
         watchdogTimer = nh.createTimer(ros::Duration(visibilityTimeout),&EKF::timeout,this,true);
         watchdogTimer.stop(); // Dont start watchdog until feature first visible
     }
 }
Пример #14
0
 // Callback for estimator
 void featureCB(const aruco_ros::CenterConstPtr& center)
 {
     // Disregard erroneous tag tracks
     if (markerID.compare(center->header.frame_id) != 0)
     {
         return;
     }
     
     // Switching
     if (artificialSwitching)
     {
         if (!estimatorOn)
         {
             return;
         }
     }
     else
     {
         // Feature in FOV
         watchdogTimer.stop();
         estimatorOn = true;
     }
     
     // Time
     ros::Time timeStamp = center->header.stamp;
     double timeNow = timeStamp.toSec();
     double delT = timeNow - lastImageTime;
     lastImageTime = timeNow;
     
     // Object trans w.r.t. image frame, for ground truth
     Vector3d trans;
     tf::StampedTransform transform;
     tfl.waitForTransform("image","ugv0",timeStamp,ros::Duration(0.1));
     tfl.lookupTransform("image","ugv0",timeStamp,transform);
     tf::Vector3 temp_trans = transform.getOrigin();
     trans << temp_trans.getX(),temp_trans.getY(),temp_trans.getZ();
     
     // Object pose w.r.t. image frame
     if (deadReckoning)
     {
         tfl.waitForTransform("image",string("marker")+markerID,timeStamp,ros::Duration(0.1));
         tfl.lookupTransform("image",string("marker")+markerID,timeStamp,transform);
         
         try
         {
             // Additional transforms for predictor
             tf::StampedTransform tfWorld2Marker;
             tf::StampedTransform tfMarker2Odom;
             tfl.waitForTransform("world",string("marker")+markerID,timeStamp,ros::Duration(0.1));
             tfl.lookupTransform("world",string("marker")+markerID,timeStamp,tfWorld2Marker);
             tfl.waitForTransform("ugv0/base_footprint","ugv0/odom",timeStamp,ros::Duration(0.1));
             tfl.lookupTransform("ugv0/base_footprint","ugv0/odom",timeStamp,tfMarker2Odom);
             
             // Save transform
             tf::Quaternion temp_quat = tfWorld2Marker.getRotation();
             Quaterniond qW2M = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
             temp_quat = tfMarker2Odom.getRotation();
             Quaterniond qM2O = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
             qWorld2Odom = qW2M*qM2O;
         }
         catch (tf::TransformException e)
         {
         }
     }
     // else, use quaternion from image to ugv0 transform
     tf::Quaternion temp_quat = transform.getRotation();
     Quaterniond quat = Quaterniond(temp_quat.getW(),temp_quat.getX(),temp_quat.getY(),temp_quat.getZ());
     
     // Undistort image coordinates. Returns normalized Euclidean coordinates
     double ptsArray[2] = {center->x,center->y};
     cv::Mat pts(1,1,CV_64FC2,ptsArray);
     cv::Mat undistPts;
     cv::undistortPoints(pts,undistPts,camMat,distCoeffs);
     Vector3d x;
     x << undistPts.at<double>(0,0),undistPts.at<double>(0,1),1/trans(2);
     
     // Target velocities expressed in camera coordinates
     Vector3d vTc = quat*vTt;
     
     // Observer velocities
     Vector3d b = vTc - vCc;
     Vector3d w = -wGCc; //Vector3d::Zero();
     
     // EKF update
     Matrix<double,3,2> K = P*H.transpose()*(H*P*H.transpose()+R).inverse();
     xhat += K*(x.head<2>()-xhat.head<2>());
     P = (Matrix3d::Identity()-K*H)*P;
     
     // Publish output
     publishOutput(x,xhat,trans,timeStamp);
     
     if (!artificialSwitching)
     {
         // Restart watchdog timer for feature visibility check
         watchdogTimer.start();
     }
 }
Пример #15
0
int RosAriaNode::Setup()
{
  // Note, various objects are allocated here which are never deleted (freed), since Setup() is only supposed to be
  // called once per instance, and these objects need to persist until the process terminates.

  robot = new ArRobot();

  ArArgumentBuilder *args = new ArArgumentBuilder(); //  never freed
  ArArgumentParser *argparser = new ArArgumentParser(args); // Warning never freed
  argparser->loadDefaultArguments(); // adds any arguments given in /etc/Aria.args.  Useful on robots with unusual serial port or baud rate (e.g. pioneer lx)

  // Now add any parameters given via ros params (see RosAriaNode constructor):

  // if serial port parameter contains a ':' character, then interpret it as hostname:tcpport
  // for wireless serial connection. Otherwise, interpret it as a serial port name.
  size_t colon_pos = serial_port.find(":");
  if (colon_pos != std::string::npos)
  {
    args->add("-remoteHost"); // pass robot's hostname/IP address to Aria
    args->add(serial_port.substr(0, colon_pos).c_str());
    args->add("-remoteRobotTcpPort"); // pass robot's TCP port to Aria
    args->add(serial_port.substr(colon_pos+1).c_str());
  }
  else
  {
    args->add("-robotPort"); // pass robot's serial port to Aria
    args->add(serial_port.c_str());
  }

  // if a baud rate was specified in baud parameter
  if(serial_baud != 0)
  {
    args->add("-robotBaud");
    char tmp[100];
    snprintf(tmp, 100, "%d", serial_baud);
    args->add(tmp);
  }
  
  if( debug_aria )
  {
    // turn on all ARIA debugging
    args->add("-robotLogPacketsReceived"); // log received packets
    args->add("-robotLogPacketsSent"); // log sent packets
    args->add("-robotLogVelocitiesReceived"); // log received velocities
    args->add("-robotLogMovementSent");
    args->add("-robotLogMovementReceived");
    ArLog::init(ArLog::File, ArLog::Verbose, aria_log_filename.c_str(), true);
  }

  // Connect to the robot
  conn = new ArRobotConnector(argparser, robot); // warning never freed
  if (!conn->connectRobot()) {
    ROS_ERROR("RosAria: ARIA could not connect to robot! (Check ~port parameter is correct, and permissions on port device.)");
    return 1;
  }

  // causes ARIA to load various robot-specific hardware parameters from the robot parameter file in /usr/local/Aria/params
  if(!Aria::parseArgs())
  {
    ROS_ERROR("RosAria: ARIA error parsing ARIA startup parameters!");
    return 1;
  }

  // Start dynamic_reconfigure server
  dynamic_reconfigure_server = new dynamic_reconfigure::Server<rosaria::RosAriaConfig>;

  robot->lock();

  // Setup Parameter Minimums
  rosaria::RosAriaConfig dynConf_min;

  //arbitrary non-zero values so dynamic reconfigure isn't STUPID
  dynConf_min.trans_vel_max = 0.1; 
  dynConf_min.rot_vel_max = 0.1; 
  dynConf_min.trans_accel = 0.1;
  dynConf_min.trans_decel = 0.1;
  dynConf_min.rot_accel = 0.1;
  dynConf_min.rot_decel = 0.1; 
  
  // I'm setting these upper bounds relitivly arbitrarily, feel free to increase them.
  dynConf_min.TicksMM     = 10;
  dynConf_min.DriftFactor = -200;
  dynConf_min.RevCount    = -32760;
  
  dynamic_reconfigure_server->setConfigMin(dynConf_min);
  
  rosaria::RosAriaConfig dynConf_max;
  dynConf_max.trans_vel_max = robot->getAbsoluteMaxTransVel() / 1000.0; 
  dynConf_max.rot_vel_max = robot->getAbsoluteMaxRotVel() *M_PI/180.0; 
  dynConf_max.trans_accel = robot->getAbsoluteMaxTransAccel() / 1000.0;
  dynConf_max.trans_decel = robot->getAbsoluteMaxTransDecel() / 1000.0;
  dynConf_max.rot_accel = robot->getAbsoluteMaxRotAccel() * M_PI/180.0;
  dynConf_max.rot_decel = robot->getAbsoluteMaxRotDecel() * M_PI/180.0;
  
  // I'm setting these upper bounds relitivly arbitrarily, feel free to increase them.
  dynConf_max.TicksMM     = 200;
  dynConf_max.DriftFactor = 200;
  dynConf_max.RevCount    = 32760;
  
  dynamic_reconfigure_server->setConfigMax(dynConf_max);


  dynConf_default.trans_vel_max = robot->getTransVelMax() / 1000.0; 
  dynConf_default.rot_vel_max = robot->getRotVelMax() *M_PI/180.0; 
  dynConf_default.trans_accel = robot->getTransAccel() / 1000.0;
  dynConf_default.trans_decel = robot->getTransDecel() / 1000.0;
  dynConf_default.rot_accel   = robot->getRotAccel() * M_PI/180.0;
  dynConf_default.rot_decel   = robot->getRotDecel() * M_PI/180.0;

/*  ROS_ERROR("ON ROBOT NOW\n\
Trans vel max: %f\n\
Rot vel max: %f\n\
\n\
trans accel: %f\n\
trans decel: %f\n\
rot accel: %f\n\
rot decel: %f", robot->getTransVelMax(), robot->getRotVelMax(), robot->getTransAccel(), robot->getTransDecel(), robot->getRotAccel(), robot->getRotDecel());

  ROS_ERROR("IN DEFAULT CONFIG\n\
Trans vel max: %f\n\
Rot vel max: %f\n\
\n\
trans accel: %f\n\
trans decel: %f\n\
rot accel: %f\n\
rot decel: %f\n", dynConf_default.trans_vel_max,  dynConf_default.rot_vel_max, dynConf_default.trans_accel, dynConf_default.trans_decel, dynConf_default.rot_accel, dynConf_default.rot_decel);*/

  TicksMM = robot->getOrigRobotConfig()->getTicksMM();
  DriftFactor = robot->getOrigRobotConfig()->getDriftFactor();
  RevCount = robot->getOrigRobotConfig()->getRevCount();

  dynConf_default.TicksMM     = TicksMM;
  dynConf_default.DriftFactor = DriftFactor;
  dynConf_default.RevCount    = RevCount;
  
  dynamic_reconfigure_server->setConfigDefault(dynConf_default);

  for(int i = 0; i < 16; i++)
  {
    sonar_tf_array[i].header.frame_id = frame_id_base_link;
    std::stringstream _frame_id;
    _frame_id << "sonar" << i;
    sonar_tf_array[i].child_frame_id = _frame_id.str();
    ArSensorReading* _reading = NULL;
    _reading = robot->getSonarReading(i);
    sonar_tf_array[i].transform.translation.x = _reading->getSensorX() / 1000.0;
    sonar_tf_array[i].transform.translation.y = _reading->getSensorY() / 1000.0;
    sonar_tf_array[i].transform.translation.z = 0.19;
    sonar_tf_array[i].transform.rotation = tf::createQuaternionMsgFromYaw(_reading->getSensorTh() * M_PI / 180.0);
  }

  for (int i=0;i<16;i++) {
      sensor_msgs::Range r;
      ranges.data.push_back(r);
  }

  int i=0,j=0;
  if (sonars__crossed_the_streams) {
    i=8;
    j=8;
  }
  for(; i<16; i++) {
    //populate the RangeArray msg
    std::stringstream _frame_id;
    _frame_id << "sonar" << i;
    ranges.data[i].header.frame_id = _frame_id.str();
    ranges.data[i].radiation_type = 0;
    ranges.data[i].field_of_view = 0.2618f; 
    ranges.data[i].min_range = 0.03f;
    ranges.data[i].max_range = 5.0f;
  }

  // Enable the motors
  robot->enableMotors();

  robot->disableSonar();

  // Initialize bumpers with robot number of bumpers
  bumpers.front_bumpers.resize(robot->getNumFrontBumpers());
  bumpers.rear_bumpers.resize(robot->getNumRearBumpers());

  robot->unlock();

  pose_pub = n.advertise<nav_msgs::Odometry>("pose",1000);
  bumpers_pub = n.advertise<rosaria::BumperState>("bumper_state",1000);

  voltage_pub = n.advertise<std_msgs::Float64>("battery_voltage", 1000);
  
  combined_range_pub = n.advertise<rosaria::RangeArray>("ranges", 1000,
    boost::bind(&RosAriaNode::sonarConnectCb,this),
    boost::bind(&RosAriaNode::sonarDisconnectCb, this));

  for(int i =0; i < 16; i++) {
    std::stringstream topic_name;
    topic_name << "range" << i;
    range_pub[i] = n.advertise<sensor_msgs::Range>(topic_name.str().c_str(), 1000,
      boost::bind(&RosAriaNode::sonarConnectCb,this),
      boost::bind(&RosAriaNode::sonarDisconnectCb, this));
  }
  recharge_state_pub = n.advertise<std_msgs::Int8>("battery_recharge_state", 5, true /*latch*/ );
  recharge_state.data = -2;
  state_of_charge_pub = n.advertise<std_msgs::Float32>("battery_state_of_charge", 100);

  motors_state_pub = n.advertise<std_msgs::Bool>("motors_state", 5, true /*latch*/ );
  motors_state.data = false;
  published_motors_state = false;
 
  // subscribe to services
  cmdvel_sub = n.subscribe( "cmd_vel", 1, (boost::function <void(const geometry_msgs::TwistConstPtr&)>)
    boost::bind(&RosAriaNode::cmdvel_cb, this, _1 ));

  // advertise enable/disable services
  enable_srv = n.advertiseService("enable_motors", &RosAriaNode::enable_motors_cb, this);
  disable_srv = n.advertiseService("disable_motors", &RosAriaNode::disable_motors_cb, this);
 
  veltime = ros::Time::now();
  sonar_tf_timer = n.createTimer(ros::Duration(0.033), &RosAriaNode::sonarCallback, this);
  sonar_tf_timer.stop();

  dynamic_reconfigure_server->setCallback(boost::bind(&RosAriaNode::dynamic_reconfigureCB, this, _1, _2));

  // callback will  be called by ArRobot background processing thread for every SIP data packet received from robot
  robot->addSensorInterpTask("ROSPublishingTask", 100, &myPublishCB);

  // Run ArRobot background processing thread
  robot->runAsync(true);

  return 0;
}
Пример #16
0
void PlaceDetector::processImage()
{
    if(!currentImage.empty())
    {
        /*  int satLower =  30;

        int satUpper =  230;

        int valLower =  30;

        int valUpper = 230;*/


        timer.stop();

        //  Mat img;

        //  cv::cvtColor(currentImage,img,CV_BGR2HSV);

        Mat hueChannel= ImageProcess::generateChannelImage(currentImage,0,satLower,satUpper,valLower,valUpper);

        Mat hueChannelFiltered;

        cv::medianBlur(hueChannel, hueChannelFiltered,3);

        vector<bubblePoint> hueBubble = bubbleProcess::convertGrayImage2Bub(hueChannelFiltered,focalLengthPixels,180);
        vector<bubblePoint> reducedHueBubble = bubbleProcess::reduceBubble(hueBubble);



        /************************** Perform filtering and obtain resulting mat images ***********************/
        //  Mat satChannel= ImageProcess::generateChannelImage(img,1,satLower,satUpper,valLower,valUpper);
        Mat valChannel= ImageProcess::generateChannelImage(currentImage,2,satLower,satUpper,valLower,valUpper);
        /*****************************************************************************************************/

        /*************************** Convert images to bubbles ***********************************************/

        //  vector<bubblePoint> satBubble = bubbleProcess::convertGrayImage2Bub(satChannel,focalLengthPixels,255);
        vector<bubblePoint> valBubble = bubbleProcess::convertGrayImage2Bub(valChannel,focalLengthPixels,255);

        /*****************************************************************************************************/


        /***************** Reduce the bubbles ********************************************************/
        //   vector<bubblePoint> reducedSatBubble = bubbleProcess::reduceBubble(satBubble);
        vector<bubblePoint> reducedValBubble = bubbleProcess::reduceBubble(valBubble);


        // Calculate statistics
        //  bubbleStatistics statsHue =  bubbleProcess::calculateBubbleStatistics(reducedHueBubble,180);
        // bubbleStatistics statsSat =  bubbleProcess::calculateBubbleStatistics(reducedSatBubble,255);
        bubbleStatistics statsVal =  bubbleProcess::calculateBubbleStatistics(reducedValBubble,255);

        qDebug()<<"Bubble statistics: "<<statsVal.mean<<statsVal.variance;

        currentBasePoint.avgVal = statsVal.mean;
        currentBasePoint.varVal = statsVal.variance;
        currentBasePoint.id = image_counter;
        QString imagefilePath = imagesPath;
        imagefilePath.append("/rgb_");
        imagefilePath.append(QString::number(image_counter)).append(".jpg");
        imwrite(imagefilePath.toStdString().data(),currentImage);


        //imwrite()
        currentBasePoint.status = 0;

        /*********************** WE CHECK FOR THE UNINFORMATIVENESS OF THE FRAME   *************************/
        if(statsVal.mean <= this->tau_val_mean || statsVal.variance <= this->tau_val_var)
        {

            //   qDebug()<<"This image is uninformative"<<image_counter;

            currentBasePoint.status = 1;

            //  this->shouldProcess = true;


            // If we don't have an initialized window then initialize
            if(!this->tempwin)
            {
                this->tempwin = new TemporalWindow();
                this->tempwin->tau_n = this->tau_n;
                this->tempwin->tau_w = this->tau_w;
                this->tempwin->startPoint = image_counter;
                this->tempwin->endPoint = image_counter;
                this->tempwin->id = twindow_counter;

                this->tempwin->members.push_back(currentBasePoint);

            }
            else
            {

                this->tempwin->endPoint = image_counter;

                this->tempwin->members.push_back(currentBasePoint);

            }


            ///  dbmanager.insertBasePoint(currentBasePoint);
            wholebasepoints.push_back(currentBasePoint);

            //  previousBasePoint = currentBasePoint;

            image_counter++;

            detector.currentImage.release();

            //  timer.start();

            detector.shouldProcess = true;

            return;


        }
        /***********************************  IF THE FRAME IS INFORMATIVE *************************************************/
        else
        {
            Mat totalInvariants;

            qint64 start =  QDateTime::currentMSecsSinceEpoch();

            DFCoefficients dfcoeff = bubbleProcess::calculateDFCoefficients(reducedHueBubble,noHarmonics,noHarmonics);
            Mat hueInvariants = bubbleProcess::calculateInvariantsMat(dfcoeff,noHarmonics, noHarmonics);

            //  totalInvariants = hueInvariants.clone();


            cv::Mat logTotal;

            // qDebug()<<hueInvariants.rows<<hueInvariants.cols<<hueInvariants.at<float>(0,10);

            Mat grayImage;

            cv::cvtColor(currentImage,grayImage,CV_BGR2GRAY);

            std::vector<Mat> sonuc = ImageProcess::applyFilters(grayImage);

            for(uint j = 0; j < sonuc.size(); j++)
            {
                vector<bubblePoint> imgBubble = bubbleProcess::convertGrayImage2Bub(sonuc[j],focalLengthPixels,255);

                vector<bubblePoint> resred = bubbleProcess::reduceBubble(imgBubble);

                DFCoefficients dfcoeff =  bubbleProcess::calculateDFCoefficients(resred,noHarmonics,noHarmonics);

                Mat invariants=  bubbleProcess::calculateInvariantsMat(dfcoeff,noHarmonics,noHarmonics);

                if(j==0)
                    totalInvariants = invariants.clone();
                else
                    cv::hconcat(totalInvariants, invariants, totalInvariants);


            }



            //  qDebug()<<totalInvariants.at<float>(0,64);

            cv::log(totalInvariants,logTotal);
            logTotal = logTotal/25;
            cv::transpose(logTotal,logTotal);

            qint64 stop = QDateTime::currentMSecsSinceEpoch();

            qDebug()<<"Bubble time"<<(stop-start);
            // TOTAL INVARIANTS N X 1 vector
            for(int kk = 0; kk < logTotal.rows; kk++)
            {
                if(logTotal.at<float>(kk,0) < 0)
                    logTotal.at<float>(kk,0) = 0.5;
            }

            //   qDebug()<<logTotal.rows<<logTotal.cols<<logTotal.at<float>(10,0);

            // We don't have a previous base point
            if(previousBasePoint.id == 0)
            {
                currentBasePoint.id = image_counter;
                currentBasePoint.invariants = logTotal;
                previousBasePoint = currentBasePoint;


                currentPlace->members.push_back(currentBasePoint);


                /// dbmanager.insertBasePoint(currentBasePoint);
                wholebasepoints.push_back(currentBasePoint);

            }
            else
            {

                currentBasePoint.id = image_counter;
                currentBasePoint.invariants = logTotal;

                //   double result = compareHistHK(currentBasePoint.invariants,previousBasePoint.invariants, CV_COMP_CHISQR);

                // MY VERSION FOR CHISQR, SIMPLER!!
                double result= compareHKCHISQR(currentBasePoint.invariants,previousBasePoint.invariants);

                qDebug()<<"Invariant diff between "<<currentBasePoint.id<<previousBasePoint.id<<"is"<<result;

                //  qDebug()<<"Invariant diff between "<<currentBasePoint.id<<previousBasePoint.id<<"is"<<result<<result2;//previousBasePoint.invariants.rows<<currentBasePoint.invariants.rows;

                //   qDebug()<<currentBasePoint.invariants.at<float>(1,0)<<currentBasePoint.invariants.at<float>(2,0)<<previousBasePoint.invariants.at<float>(1,0)<<previousBasePoint.invariants.at<float>(2,0);

                // JUST FOR DEBUGGING-> WRITES INVARIANT TO THE HOME FOLDER
                //   writeInvariant(previousBasePoint.invariants,previousBasePoint.id);

                ///////////////////////////// IF THE FRAMES ARE COHERENT ///////////////////////////////////////////////////////////////////////////////////////////////////////
                if(result <= tau_inv && result > 0)
                {

                    ///  dbmanager.insertBasePoint(currentBasePoint);
                    wholebasepoints.push_back(currentBasePoint);

                    /// If we have a temporal window
                    if(tempwin)
                    {
                        // Temporal window will extend, we are still looking for the next incoming frames
                        if(tempwin->checkExtensionStatus(currentBasePoint.id))
                        {

                            tempwin->cohMembers.push_back(currentBasePoint);


                            basepointReservoir.push_back(currentBasePoint);


                        }
                        // Temporal window will not extend anymore, we should check whether it is really a temporal window or not
                        else
                        {

                            float area = this->tempwin->totalDiff/(tempwin->endPoint - tempwin->startPoint+1);

                            qDebug()<<"Temporal Window Area"<<area;

                            // This is a valid temporal window
                            if(tempwin->endPoint - tempwin->startPoint >= tau_w && area>= tau_avgdiff)
                            {
                                qDebug()<<"New Place";
                                currentPlace->calculateMeanInvariant();

                                qDebug()<<"Current place mean invariant: "<<currentPlace->meanInvariant.rows<<currentPlace->meanInvariant.cols<<currentPlace->meanInvariant.at<float>(50,0);

                                if(currentPlace->memberIds.rows >= tau_p){

                                    dbmanager.insertPlace(*currentPlace);

                                    std_msgs::Int16 plID;
                                    plID.data = this->placeID;

                                    placedetectionPublisher.publish(plID);

                                    this->detectedPlaces.push_back(*currentPlace);

                                    this->placeID++;
                                }


                                delete currentPlace;
                                currentPlace = 0;
                                // this->placeID++;

                                /*    cv::Mat result = DatabaseManager::getPlaceMeanInvariant(this->placeID-1);

                                qDebug()<<"Previous place mean invariant: "<<result.rows<<result.cols<<result.at<float>(50,0);

                                result = DatabaseManager::getPlaceMemberIds(this->placeID-1);

                                for(int k = 0; k< result.rows; k++){

                                    qDebug()<<"Previous place members: "<<result.rows<<result.cols<<result.at<unsigned short>(k,0);
                                }*/

                                currentPlace = new Place(this->placeID);

                                //   currentPlace->


                                basepointReservoir.push_back(currentBasePoint);

                                currentPlace->members = basepointReservoir;
                                basepointReservoir.clear();

                                dbmanager.insertTemporalWindow(*tempwin);

                                delete tempwin;
                                tempwin = 0;
                                this->twindow_counter++;
                                // A new place will be created. Current place will be published




                            }
                            // This is just a noisy temporal window. We should add the coherent basepoints to the current place
                            else
                            {
                                basepointReservoir.push_back(currentBasePoint);

                                delete tempwin;
                                tempwin = 0;

                                std::vector<BasePoint> AB;
                                AB.reserve( currentPlace->members.size() + basepointReservoir.size() ); // preallocate memory
                                AB.insert( AB.end(), currentPlace->members.begin(), currentPlace->members.end() );
                                AB.insert( AB.end(), basepointReservoir.begin(), basepointReservoir.end() );
                                currentPlace->members.clear();
                                currentPlace->members = AB;

                                basepointReservoir.clear();



                            }



                        }



                    }
                    else
                    {
                        currentPlace->members.push_back(currentBasePoint);

                    }



                }
                ///////////////////////// IF THE FRAMES ARE INCOHERENT /////////////////////////////////////
                else
                {
                    currentBasePoint.status = 2;

                    ///    dbmanager.insertBasePoint(currentBasePoint);
                    wholebasepoints.push_back(currentBasePoint);



                    // If we don't have a temporal window create one
                    if(!tempwin)
                    {
                        tempwin = new TemporalWindow();
                        this->tempwin->tau_n = this->tau_n;
                        this->tempwin->tau_w = this->tau_w;
                        this->tempwin->startPoint = image_counter;
                        this->tempwin->endPoint = image_counter;
                        this->tempwin->id = twindow_counter;
                        this->tempwin->totalDiff +=result;
                        this->tempwin->members.push_back(currentBasePoint);

                    }
                    // add the basepoint to the temporal window
                    else
                    {
                        // Temporal window will extend, we are still looking for the next incoming frames
                        if(tempwin->checkExtensionStatus(currentBasePoint.id))
                        {

                            this->tempwin->endPoint = image_counter;

                            this->tempwin->members.push_back(currentBasePoint);

                            this->tempwin->totalDiff +=result;

                            basepointReservoir.clear();


                        }
                        else
                        {
                            float avgdiff;

                            avgdiff = this->tempwin->totalDiff/(tempwin->endPoint - tempwin->startPoint+1);


                            qDebug()<<"Temporal Window Average Diff"<<avgdiff;

                            // This is a valid temporal window
                            if(tempwin->endPoint - tempwin->startPoint >= tau_w && avgdiff >= tau_avgdiff)
                            {
                                //  float summ = 0;
                                //Modifikasyon
                                /* for(int kl = 0; kl < tempwin->members.size(); kl++)
                                {
                                    BasePoint abasepoint = tempwin->members.at(kl);
                                    abasepoint.

                                }*/


                                qDebug()<<"New Place";
                                currentPlace->calculateMeanInvariant();

                                qDebug()<<"Current place mean invariant: "<<currentPlace->meanInvariant.rows<<currentPlace->meanInvariant.cols<<currentPlace->meanInvariant.at<float>(50,0);

                                if(currentPlace->memberIds.rows >= tau_p)
                                {

                                    dbmanager.insertPlace(*currentPlace);

                                    std_msgs::Int16 plID ;
                                    plID.data = this->placeID;

                                    placedetectionPublisher.publish(plID);


                                    this->detectedPlaces.push_back(*currentPlace);

                                    this->placeID++;
                                }


                                delete currentPlace;
                                currentPlace = 0;
                                // this->placeID++;

                                //  cv::Mat result = DatabaseManager::getPlaceMeanInvariant(this->placeID-1);

                                //  qDebug()<<"Previous place mean invariant: "<<result.rows<<result.cols<<result.at<float>(50,0);

                                //  result = DatabaseManager::getPlaceMemberIds(this->placeID-1);

                                /*    for(int k = 0; k< result.rows; k++){

                                    qDebug()<<"Previous place members: "<<result.rows<<result.cols<<result.at<unsigned short>(k,0);
                                }*/

                                currentPlace = new Place(this->placeID);

                                //   currentPlace->


                                //basepointReservoir.push_back(currentBasePoint);

                                currentPlace->members = basepointReservoir;
                                basepointReservoir.clear();

                                dbmanager.insertTemporalWindow(*tempwin);

                                delete tempwin;
                                tempwin = 0;
                                this->twindow_counter++;
                                // A new place will be created. Current place will be published




                            }
                            // This is just a noisy temporal window. We should add the coherent basepoints to the current place
                            else
                            {
                                //  basepointReservoir.push_back(currentBasePoint);

                                delete tempwin;
                                tempwin = 0;

                                std::vector<BasePoint> AB;
                                AB.reserve( currentPlace->members.size() + basepointReservoir.size() ); // preallocate memory
                                AB.insert( AB.end(), currentPlace->members.begin(), currentPlace->members.end() );
                                AB.insert( AB.end(), basepointReservoir.begin(), basepointReservoir.end() );
                                currentPlace->members.clear();
                                currentPlace->members = AB;

                                basepointReservoir.clear();



                            }


                            tempwin = new TemporalWindow();
                            this->tempwin->tau_n = this->tau_n;
                            this->tempwin->tau_w = this->tau_w;
                            this->tempwin->startPoint = image_counter;
                            this->tempwin->endPoint = image_counter;
                            this->tempwin->id = twindow_counter;

                            this->tempwin->members.push_back(currentBasePoint);



                        }


                        /*     this->tempwin->endPoint = image_counter;

                          this->tempwin->members.push_back(currentBasePoint);

                          basepointReservoir.clear();*/

                    }



                }


                previousBasePoint = currentBasePoint;

                //////////////////////////////////////////////////////////////////////////////////////////////////
            }

            // DatabaseManager::insertInvariants(HUE_TYPE,frameNumber,invariants);
            //   qDebug()<<"Image Counter: "<<image_counter;
            image_counter++;

            //this->shouldProcess = true;

        }



    }

    this->currentImage.release();

    this->shouldProcess = true;

    //  timer.start();



}
Пример #17
0
int main(int argc, char **argv) {
	int mobot_count = 1; //counter for number of mobots working
	if (argv[1] != NULL) {
		if (atoi(argv[1]) == 2)
			mobot_count = 2;
		if (atoi(argv[1]) >= 3)
			mobot_count = 3;
	}
	//initialising ROS
	ros::init(argc, argv, "path_planner");
	ros::NodeHandle nh;
	nh_ = &nh;

	//initialising Mobot 1
	ros::Subscriber infraredScan_1 = nh.subscribe("/mobot0/infrared", 1, irCallback1);
	ros::Subscriber userInput_1 = nh.subscribe("/mobot0/waypoint_user", 20, userCallback);
	ros::Subscriber userInput = nh.subscribe("/path_planner/waypoint_user", 20, userInputCallback);
	nextPoseRel_1 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot0/waypoint_rel", 20);
	mobots[0].id = 1;
	mobots[0].theta = 0.0;
	mobots[0].x = 0.0;
	mobots[0].y = 0.0;
	mobots[0].obstacle = false;
	mobots[0].userControlled = 0;
	mobots[0].timer = 0;
	timerMobot_1 = nh.createTimer(ros::Duration(1), timerCallback1);
#if 0
	if(mobot_count > 1){ //initialising Mobot 2
		ros::Subscriber infraredScan_2 = nh.subscribe("/mobot2/infrared", 1, irCallback2);
		ros::Subscriber userInput_2 = nh.subscribe("/mobot2/waypoint_user", 20, userCallback);
		nextPoseRel_2 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot2/waypoint_rel", 20);
		mobots[1].id = 2;
		mobots[1].theta = 0.0;
		mobots[1].x = 0.0;
		mobots[1].y = 0.0;
		mobots[1].obstacle = false;
		mobots[1].userControlled = 0;
		mobots[1].timer = 0;
		timerMobot_2 = nh.createTimer(ros::Duration(1), timerCallback2);
	}
	if(mobot_count > 2){ //initialising Mobot 3
		ros::Subscriber infraredScan_3 = nh.subscribe("/mobot3/infrared", 1, irCallback3);
		ros::Subscriber userInput_3 = nh.subscribe("/mobot3/waypoint_user", 20, userCallback);
		nextPoseRel_3 = nh.advertise<mobots_msgs::Pose2DPrio> ("/mobot3/waypoint_rel", 20);
		mobots[2].id = 3;
		mobots[2].theta = 0.0;
		mobots[2].x = 0.0;
		mobots[2].y = 0.0;
		mobots[2].obstacle = false;
		mobots[2].userControlled = 0;
		mobots[2].timer = 0;
		timerMobot_3 = nh.createTimer(ros::Duration(1), timerCallback3);
	}
	keyReqServer = nh.advertiseService("/path_planner/keyboard_request", keyReqCallback);

	if(argv[2] != NULL){
		std::string str = std::string(argv[2]);
		if(str == "square"){
			exploreSquare(1, 1000);
		}
	}

	/* Entering the event loop. If there is no obstacle or the user is not controlling the
	 * Mobot, it is driving straight till something occurs. */
	while (ros::ok()) {
		for (int i = 0; i <= mobot_count-1; i++) {
			if (mobots[i].userControlled == 0 && !mobots[i].obstacle) {
				moveMobot(mobots[i].id, 1);
			}
		}
		wait(5000, true);
	}
	//stopping the timers
	timerMobot_1.stop();
	if(mobot_count > 1)	timerMobot_2.stop();
	if(mobot_count > 2) timerMobot_3.stop();
	return 0;
#endif
	ros::spin();
}
  bool grab_vicon_pose_callback(vicon_mocap::viconGrabPose::Request& req, vicon_mocap::viconGrabPose::Response& resp)
  {
    ROS_INFO("Got request for a VICON pose");
    updateTimer.stop();
    tf::StampedTransform transform;
    //tf::Quaternion quat;

    if (not segment_data_enabled)
    {
      MyClient.EnableSegmentData();
      ROS_ASSERT(MyClient.IsSegmentDataEnabled().Enabled);
      segment_data_enabled = true;
    }

    // Gather data:
    int N = req.n_measurements;
    double accum_trans[3] = {0, 0, 0};
    double accum_quat[4] = {0, 0, 0, 0};
    Result::Enum the_result;
    int n_success = 0;
    for (int k = 0; k < N; k++)
    {
      ros::Duration(1 / vicon_capture_rate).sleep(); // Wait at least as long as vicon needs to capture the next frame
      if ((the_result = MyClient.GetFrame().Result) == Result::Success)
      {
        try
        {
          Output_GetSegmentGlobalTranslation trans = MyClient.GetSegmentGlobalTranslation(req.subject_name,
                                                                                          req.segment_name);
          Output_GetSegmentGlobalRotationQuaternion quat =
              MyClient.GetSegmentGlobalRotationQuaternion(req.subject_name, req.segment_name);
          if ((!trans.Occluded) && (!quat.Occluded))
          {
            accum_trans[0] += trans.Translation[0] / 1000;
            accum_trans[1] += trans.Translation[1] / 1000;
            accum_trans[2] += trans.Translation[2] / 1000;
            accum_quat[0] += quat.Rotation[3];
            accum_quat[1] += quat.Rotation[0];
            accum_quat[2] += quat.Rotation[1];
            accum_quat[3] += quat.Rotation[2];
            n_success++;
          }
        }
        catch (tf::TransformException ex)
        {
          ROS_ERROR("%s", ex.what());
          resp.success = false;
          return false; // TODO: should we really bail here, or just try again?
        }
      }
      else
      {
        if (the_result != Result::NoFrame)
        {
          ROS_ERROR_STREAM("GetFrame() returned " << (int)the_result);
        }
      }
    }

    ROS_INFO("Averaging %d measurements", n_success);
    // Average the data:
    double normalized_quat[4];
    double quat_norm = sqrt(
                            accum_quat[0] * accum_quat[0] + accum_quat[1] * accum_quat[1] + accum_quat[2]
                                * accum_quat[2] + accum_quat[3] * accum_quat[3]);
    for (int i = 0; i < 4; i++)
    {
      normalized_quat[i] = accum_quat[i] / quat_norm;
    }
    double normalized_vector[3];
    // Copy to inputs:
    for (int i = 0; i < 3; i++)
    {
      normalized_vector[i] = accum_trans[i] / n_success;
    }

    // copy what we used to service call response:
    resp.success = true;
    resp.pose.header.stamp = ros::Time::now();
    resp.pose.header.frame_id = tf_ref_frame_id;
    resp.pose.pose.position.x = normalized_vector[0];
    resp.pose.pose.position.y = normalized_vector[1];
    resp.pose.pose.position.z = normalized_vector[2];
    resp.pose.pose.orientation.w = normalized_quat[0];
    resp.pose.pose.orientation.x = normalized_quat[1];
    resp.pose.pose.orientation.y = normalized_quat[2];
    resp.pose.pose.orientation.z = normalized_quat[3];

    updateTimer.start();
    return true;
  }
		void shutdown(){
		    timer_dsa.stop();
		    timer_publish.stop();
		    timer_diag.stop();
		    nh_.shutdown();
		}
Пример #20
0
		/********** callback for the controller **********/
		void joy_callback(const sensor_msgs::Joy& msg)
		{
			watchdogTimer.stop();
			
			command_from_xbox = geometry_msgs::Twist();// cleaning the xbox twist message
			a_button = msg.buttons[0];// a button lands
			if (a_button > 0)
			{
				land_pub.publish(std_msgs::Empty());
			}
			x_button = msg.buttons[2];// x button sends constant
			if (x_button > 0)
			{
				send_0 = true;
			}
			
			b_button = msg.buttons[1];// b button kills motors
			if (b_button > 0)
			{
				reset_pub.publish(std_msgs::Empty());
			}
			
			y_button = msg.buttons[3];// y button takes off
			if (y_button > 0)
			{
				takeoff_pub.publish(std_msgs::Empty());
			}
				
			dpad_l = msg.buttons[11];//dpad left value, fine adjustment of tilt angle negatively
			dpad_r = msg.buttons[12];//dpad right value, fine adjustment of tilt angle positively
			dpad_u = msg.buttons[13];//dpad up value, coarse adjustment of tilt angle positively
			dpad_d = msg.buttons[14];//dpad down value, coarse adjustment of tilt angle negatively
			
			int tilt_neg = -1*dpad_l - 10*dpad_d;// tilt negatively
			int tilt_pos = 1*dpad_r + 10*dpad_u;// tilt positively
			gimbal_state_desired.angular.y = gimbal_state_desired.angular.y + tilt_neg + tilt_pos;// adjust the gimbal
			camera_state_pub.publish(gimbal_state_desired);// update the angle
			
			left_bumper = msg.buttons[4];// left bumper for using the tracking as the controller output
			right_bumper = msg.buttons[5];// right bumper for using the mocap as the controller output
			
			start_autonomous = (left_bumper > 0) || (right_bumper > 0);// start the autonomous if either are greater than 0
			
			right_stick_vert = joy_deadband(msg.axes[4]);// right thumbstick vert controls linear x
			command_from_xbox.linear.x = joy_gain*right_stick_vert;
			
			right_stick_horz = joy_deadband(msg.axes[3]);// right thumbstick horz controls linear y
			command_from_xbox.linear.y = joy_gain*right_stick_horz;
			
			left_stick_vert = joy_deadband(msg.axes[1]);// left thumbstick vert controls linear z
			command_from_xbox.linear.z = joy_gain*left_stick_vert;
			
			left_stick_horz = joy_deadband(msg.axes[0]);// left thumbstick horz controls angular z
			command_from_xbox.angular.z = -1*joy_gain*left_stick_horz;
			
			if (!start_autonomous)// if not using the autonomous velocities as output will indicate recieved a new control input
			{
				if (send_0)
				{
					//command_from_xbox.linear.x = 0;
					//command_from_xbox.linear.y = 0;
					//command_from_xbox.linear.z = 0;
					//command_from_xbox.angular.z = 0;
					send_0 = false;
				}
				recieved_command_from_xbox = true;
			}
			
			watchdogTimer.start();
		}