// Move the real block!
  void feedbackCb( const InteractiveMarkerFeedbackConstPtr &feedback )
  {
    if (!action_server_.isActive())
    {
      ROS_INFO("[block logic] Got feedback but not active!");
      return;
    }
    switch ( feedback->event_type )
    {
    case visualization_msgs::InteractiveMarkerFeedback::MOUSE_DOWN:
      ROS_INFO_STREAM("[block logic] Staging " << feedback->marker_name);
      old_pose_ = feedback->pose;
      break;

    case visualization_msgs::InteractiveMarkerFeedback::MOUSE_UP:
      ROS_INFO_STREAM("[block logic] Now moving " << feedback->marker_name);
      moveBlock(old_pose_, feedback->pose);
      break;
    }

    interactive_m_server_.applyChanges();
  }
  void processGoal(const geometry_msgs::Pose& start_block_pose, const geometry_msgs::Pose& end_block_pose )
  {
    // Change the goal constraints on the servos to be less strict, so that the controllers don't die. this is a hack
    nh_.setParam("/clam_trajectory_controller/joint_trajectory_action_node/constraints/elbow_pitch_joint/goal", 2); // originally it was 0.45
    nh_.setParam("/clam_trajectory_controller/joint_trajectory_action_node/constraints/shoulder_pan_joint/goal", 2); // originally it was 0.45
    nh_.setParam("/clam_trajectory_controller/joint_trajectory_action_node/constraints/wrist_pitch_joint/goal", 2); // originally it was 0.45
    nh_.setParam("/clam_trajectory_controller/joint_trajectory_action_node/constraints/gripper_roll_joint/goal", 2); // originally it was 0.45
    nh_.setParam("/clam_trajectory_controller/joint_trajectory_action_node/constraints/wrist_pitch_joint/goal", 2); // originally it was 0.45

    if( !pickAndPlace(start_block_pose, end_block_pose) )
    {
      ROS_ERROR_STREAM_NAMED("pick_place_moveit","Pick and place failed");

      if(action_server_.isActive()) // Make sure we haven't sent a fake goal
      {
        // Report failure
        result_.success = false;
        action_server_.setSucceeded(result_);
      }
    }
    else
    {
      if(action_server_.isActive()) // Make sure we haven't sent a fake goal
      {
        // Report success
        result_.success = true;
        action_server_.setSucceeded(result_);
      }
    }

    // TODO: remove
    ros::shutdown();
  }
  void trackBucketPointCB(const geometry_msgs::Point::ConstPtr& msg)
  {
	  // make sure that the action hasn't been canceled
	  if (!as_.isActive())
		  return;
	  bucket_detected_ = true;
  }
 void ultraSonicsCB(const std_msgs::Int32::ConstPtr& dist)
 {
     // make sure that the action hasn't been canceled
     if (!as_.isActive())
         return;
     obstacle_detected_ = false;
     if ( dist->data > 0 ){
         obstacle_detected_ = true;
     }
 }
 void trackAreaCB(const std_msgs::Int32::ConstPtr& area)
 {
     // make sure that the action hasn't been canceled
     if (!as_.isActive())
         return;
     drop_flag = false;
     if ( area->data > 38000 ){
         drop_flag = true;
     }
 }
示例#6
0
    /**
     * @brief Preempt callback for the server, cancels the current running goal and all associated movement actions.
     */
    void preemptCb(){

        boost::unique_lock<boost::mutex> lock(move_client_lock_);
        move_client_.cancelGoalsAtAndBeforeTime(ros::Time::now());
        ROS_WARN("Current exploration task cancelled");

        if(as_.isActive()){
            as_.setPreempted();
        }

    }
示例#7
0
文件: node.cpp 项目: ErolB/Sub8
  void timer_callback(const ros::TimerEvent &) {
    if (!c3trajectory) return;

    ros::Time now = ros::Time::now();

    if (actionserver.isPreemptRequested()) {
      current_waypoint = c3trajectory->getCurrentPoint();
      current_waypoint.r.qdot = subjugator::Vector6d::Zero();  // zero velocities
      current_waypoint_t = now;

      // don't try to make output c3 continuous when cancelled - instead stop as quickly as possible
      c3trajectory.reset(new subjugator::C3Trajectory(current_waypoint.r, limits));
      c3trajectory_t = now;
    }

    if (actionserver.isNewGoalAvailable()) {
      boost::shared_ptr<const uf_common::MoveToGoal> goal = actionserver.acceptNewGoal();
      current_waypoint = subjugator::C3Trajectory::Waypoint(
          Point_from_PoseTwist(goal->posetwist.pose, goal->posetwist.twist), goal->speed,
          !goal->uncoordinated);
      current_waypoint_t = now;  // goal->header.stamp;
      this->linear_tolerance = goal->linear_tolerance;
      this->angular_tolerance = goal->angular_tolerance;
      c3trajectory_t = now;
    }

    while ((c3trajectory_t + traj_dt < now) and ros::ok()) {
      // ROS_INFO("Acting");

      c3trajectory->update(traj_dt.toSec(), current_waypoint,
                           (c3trajectory_t - current_waypoint_t).toSec());
      c3trajectory_t += traj_dt;
    }

    PoseTwistStamped msg;
    msg.header.stamp = c3trajectory_t;
    msg.header.frame_id = fixed_frame;
    msg.posetwist = PoseTwist_from_PointWithAcceleration(c3trajectory->getCurrentPoint());
    trajectory_pub.publish(msg);

    PoseStamped posemsg;
    posemsg.header.stamp = c3trajectory_t;
    posemsg.header.frame_id = fixed_frame;
    posemsg.pose = Pose_from_Waypoint(current_waypoint);
    waypoint_pose_pub.publish(posemsg);

    if (actionserver.isActive() &&
        c3trajectory->getCurrentPoint().is_approximately(
            current_waypoint.r, max(1e-3, linear_tolerance), max(1e-3, angular_tolerance)) &&
        current_waypoint.r.qdot == subjugator::Vector6d::Zero()) {
      actionserver.setSucceeded();
    }
  }
  void trackIRCB(const std_msgs::Int32::ConstPtr& msg)
  {
	  // make sure that the action hasn't been canceled
	  if (!as_.isActive())
		  return;
      if ( msg->data == 1 )
      {
          entered_ir = true;
      }
      if (entered_ir && (msg->data == 0))
      {
          ir_detected_ = true;
          entered_ir = false;
      }
  }
  void addBlocks(const geometry_msgs::PoseArrayConstPtr& msg)
  {
    interactive_m_server_.clear();
    interactive_m_server_.applyChanges();

    ROS_INFO("[block logic] Got block detection callback. Adding blocks.");
    geometry_msgs::Pose block;
    bool active = action_server_.isActive();

    for (unsigned int i=0; i < msg->poses.size(); i++)
    {
      block = msg->poses[i];
      addBlock(block, i, active, msg->header.frame_id);
    }
    ROS_INFO("[block logic] Added %d blocks to Rviz", int(msg->poses.size()));

    interactive_m_server_.applyChanges();

    msg_ = msg;
    initialized_ = true;
  }
  void trackPointCB(const geometry_msgs::Point::ConstPtr& msg)
  {
	  // make sure that the action hasn't been canceled
	  if (!as_.isActive())
	  {
		  twist_.angular.z = 0;
		  twist_.linear.x = 0;
		  twist_pub_.publish(twist_);
		  return;
	  }

	  if( !has_goal_)
	  {
		  twist_.angular.z = 0;
		  twist_.linear.x = 0;
		  twist_pub_.publish(twist_);
		  return;
	  }

	  if( obstacle_detected_ && msg->y < SVTIUS)
	  {
		  result_.success = false;
		  ROS_INFO("%s: Obsticle Detected", action_name_.c_str());
		  // set the action state to succeeded
		  as_.setSucceeded(result_);
	  }

	  feedback_.success = false;
	  twist_.angular.z = 0;
	  twist_.linear.x = 0;

	  if ( msg->y >= TOP_T
		&& msg->x <= RIGHT_T
		&& msg->x >= LEFT_T )
	  {
		  twist_pub_.publish(twist_);
		  ROS_INFO("%s: Succeeded", action_name_.c_str());
		  // set the action state to succeeded
		  result_.success = true;
		  as_.setSucceeded(result_);
	  }

	  if(drop_flag) {
	  	twist_pub_.publish(twist_);
		  ROS_INFO("%s: Succeeded", action_name_.c_str());
		  // set the action state to succeeded
		  result_.success = true;
		  drop_flag = false;
		  as_.setSucceeded(result_);
	  }

	  if (msg->y < TOP_T)
	  {
		  ROS_INFO("MOVE FORWARD");
		  twist_.linear.x = getLinVel(msg->y, TOP_T);
	  }

	  if (msg->x < LEFT_T)
	  {
		  ROS_INFO("TURN RIGHT");
		  twist_.angular.z = getAngVel(msg->x, LEFT_T);
	  }
	  else if (msg->x  > RIGHT_T)
	  {
		  ROS_INFO("TURN LEFT");
		  twist_.angular.z = -1.0 * getAngVel(msg->x, RIGHT_T);
	  }

	  twist_pub_.publish(twist_);
	  as_.publishFeedback(feedback_);
  }
  void Update(const ros::TimerEvent& e) {
    if(!server_.isActive() ||  is_running)
      return;
    
    if(got_goals)
      {
	is_running = true;
	ROS_INFO("Update--> Got goals of size %d", num_feature_goals_);
	feedback_.features.resize(num_feature_goals_);
	feedback_.signal.resize(num_feature_goals_);
	//	feedback_.signal.resize(num_feature_goals_);

	for(int i=0; i<num_feature_goals_; i++)
	  {
	    feedback_.signal[i].signal_type = "hue";
	    feedback_.signal[i].mean.resize(1);
	    feedback_.signal[i].variance.resize(1);
	    feedback_.signal[i].mean[0] = feature_mean_goals_[i];
	    feedback_.signal[i].variance[0] = feature_var_goals_[i];
	    feedback_.signal[i].size = 1;

	    mean_color = feature_mean_goals_[i];
	    window_size = feature_var_goals_[i];
	    int rangeMin = ((mean_color - window_size + 255)%255);
	    int rangeMax = ((mean_color + window_size + 255)%255);
	    ROS_INFO("Range [%d] : %d,%d",i,rangeMin,rangeMax);
	    ROS_INFO("Threshold : %d,%d",minCCThreshold,maxCCThreshold);
	    
	    if(rangeMin > rangeMax){
	      int temp = rangeMax;
	      rangeMax = rangeMin;
	      rangeMin = temp;
	    }
	    
	    if(minCCThreshold >= maxCCThreshold){
	      //ROS_INFO("Min radius must be smaller than Max radius");
	      is_running = false;
	      return;
	    }
	    
	    if(fabs(rangeMin - rangeMax) <= 2*window_size){
	      //ROS_INFO("REG rangeMin %d rangeMax %d", rangeMin, rangeMax);
	      inRange(*hue_image, Scalar((double)((uchar)rangeMin)),Scalar((double)((uchar)rangeMax)), *back_img);
	    }
	    else if ((mean_color + window_size) > 255){
	      //ROS_INFO("BIG rangeMin %d rangeMax %d", rangeMin, rangeMax);
	      inRange(*hue_image, Scalar((double)((uchar)rangeMax)),Scalar((double)((uchar)255)), *back_img);
	    }
	    else {
	      //ROS_INFO("SML rangeMin %d rangeMax %d", rangeMin, rangeMax);
	      inRange(*hue_image, Scalar((double)((uchar)0)),Scalar((double)((uchar)rangeMin)), *back_img);
	    }  
	    
	    Size ksize = Size(2 * blurKernel + 1,2 * blurKernel + 1);
	    ROS_INFO("Adding Gaussian Blur");
	    GaussianBlur(*back_img, *blurred_image, ksize, -1, -1);
	    
	    ROS_INFO("Thresholding --->");
	    threshold(*blurred_image, *temp_blurred_image, 110, 255, THRESH_BINARY); 
	    convertScaleAbs(*temp_blurred_image, *back_img, 1, 0);
	    
	    //Find Connected Components
	    ROS_INFO("Finding Connected Comps for feature %d",i);
	    getConnectedComponents(i);
	    ROS_INFO("After finding conn comps, num[%d] : %d",i,numCC[i]);
	    feedback_.features[i].num = numCC[i];
	    if(numCC[i] > 0)
	      feedback_.features[i].moments.resize(numCC[i]);
	  
	    ROS_INFO("Getting Moments");
	    if(numCC[i] > 0)
	      getMoments(i);

	    ROS_INFO("Updating KF ");
	    blobTracker[i].updateKalmanFiltersConnectedComponents();
	    if (numCC[i] > 0)
	      blobTracker[i].getFilteredBlobs(true);
	    else
	      blobTracker[i].getFilteredBlobs(false);
	    
	    RotatedRect box;
	    Point pt;
	    	    
	    for(int j=0; j<maxNumComponents; j++)
	      {
		FeatureBlob ftemp;
		blobTracker[i].filters_[j].getFiltered(ftemp);
		if(ftemp.getValid() && display_image)
		  {
		    Mat firstTemp, secondTemp;
		    ftemp.getValues(firstTemp, secondTemp);
		    pt.x = firstTemp.at<float>(0,0); pt.y = firstTemp.at<float>(1,0);
		    blobTracker[i].getBoxFromCov(pt, secondTemp, box);
		    
		    if (box.size.width > 0 && box.size.height > 0 && box.size.width < width && box.size.height < height)
		      {
			ellipse(*rgb_image, box, CV_RGB(255,255,255), 3, 8);
			circle(*rgb_image, pt, 3, CV_RGB(255, 255, 255), -1, 8);
		      }	    
		    
		  }
	      }
	    
	  }
	if (display_image) {
	  imshow(name, *rgb_image);
	}
	
	server_.publishFeedback(feedback_);
	is_running = false;
	waitKey(100);
      }
  }
  bool turnOdom(double radians)
  {
    // If the distance to travel is negligble, don't even try.
    if (fabs(radians) < 0.01)
      return true;
  
    while(radians < -M_PI) radians += 2*M_PI;
    while(radians > M_PI) radians -= 2*M_PI;

    //we will record transforms here
    tf::StampedTransform start_transform;
    tf::StampedTransform current_transform;

    try
    {
      //wait for the listener to get the first message
      listener_.waitForTransform(base_frame, odom_frame, 
                                 ros::Time::now(), ros::Duration(1.0));

      //record the starting transform from the odometry to the base frame
      listener_.lookupTransform(base_frame, odom_frame, 
                                ros::Time(0), start_transform);
    }
    catch (tf::TransformException ex)
    {
      ROS_ERROR("%s",ex.what());
      return false;
    }
    
    //we will be sending commands of type "twist"
    geometry_msgs::Twist base_cmd;
    //the command will be to turn at 0.75 rad/s
    base_cmd.linear.x = base_cmd.linear.y = 0.0;
    base_cmd.angular.z = turn_rate;
    if (radians < 0)
      base_cmd.angular.z = -turn_rate;
    
    //the axis we want to be rotating by
    tf::Vector3 desired_turn_axis(0,0,1);
    
    ros::Rate rate(25.0);
    bool done = false;
    while (!done && nh_.ok() && as_.isActive())
    {
      //send the drive command
      cmd_vel_pub_.publish(base_cmd);
      rate.sleep();
      //get the current transform
      try
      {
        listener_.lookupTransform(base_frame, odom_frame, 
                                  ros::Time(0), current_transform);
      }
      catch (tf::TransformException ex)
      {
        ROS_ERROR("%s",ex.what());
        break;
      }
      tf::Transform relative_transform = 
        start_transform.inverse() * current_transform;
      tf::Vector3 actual_turn_axis = 
        relative_transform.getRotation().getAxis();
      double angle_turned = relative_transform.getRotation().getAngle();
      
      // Update feedback and result.
      feedback_.turn_distance = angle_turned;
      result_.turn_distance = angle_turned;
      as_.publishFeedback(feedback_);
      
      if ( fabs(angle_turned) < 1.0e-2) continue;

      //if ( actual_turn_axis.dot( desired_turn_axis ) < 0 ) 
      //  angle_turned = 2 * M_PI - angle_turned;

      if (fabs(angle_turned) > fabs(radians)) done = true;
    }
    if (done) return true;
    return false;
  }
/*
void TrajActionServer::cmd_pose_right(Vectorq7x1 qvec) {
    //member var right_cmd_ already has joint names populated
    for (int i = 0; i < 7; i++) {
        right_cmd.command[i] = qvec[i];
    }
    joint_cmd_pub_right.publish(right_cmd);
}
*/
void TrajActionServer::executeCB(const actionlib::SimpleActionServer<davinci_traj_streamer::trajAction>::GoalConstPtr& goal) {
    double traj_clock, dt_segment, dq_segment, delta_q_segment, traj_final_time;
    int isegment;
    trajectory_msgs::JointTrajectoryPoint trajectory_point0;

    //Vectorq7x1 qvec, qvec0, qvec_prev, qvec_new;
    Eigen::VectorXd qvec, qvec0, qvec_prev, qvec_new;
    // TEST TEST TEST
    //Eigen::VectorXd q_vec;
    //q_vec<<0.1,0.2,0.15,0.4,0.5,0.6,0.7;    

    ROS_INFO("in executeCB");
    //ROS_INFO("goal input is: %d", goal->input);
    //do work here: this is where your interesting code goes

    //....

    // for illustration, populate the "result" message with two numbers:
    // the "input" is the message count, copied from goal->input (as sent by the client)
    // the "goal_stamp" is the server's count of how many goals it has serviced so far
    // if there is only one client, and if it is never restarted, then these two numbers SHOULD be identical...
    // unless some communication got dropped, indicating an error
    // send the result message back with the status of "success"

    g_count++; // keep track of total number of goals serviced since this server was started
    result_.return_val = g_count; // we'll use the member variable result_, defined in our class
    result_.traj_id = goal->traj_id;
    cout<<"received trajectory w/ "<<goal->trajectory.points.size()<<" points"<<endl;
    // copy trajectory to global var:
    new_trajectory = goal->trajectory; // 
    // insist that a traj have at least 2 pts
    int npts = new_trajectory.points.size();
    if (npts  < 2) {
        ROS_WARN("too few points; aborting goal");
        as_.setAborted(result_);
    } else { //OK...have a valid trajectory goal; execute it
        //got_new_goal = true;
        //got_new_trajectory = true;
        ROS_INFO("Cb received traj w/ npts = %d",npts);
        //cout << "Cb received traj w/ npts = " << new_trajectory.points.size() << endl;
        //trajectory_msgs::JointTrajectoryPoint trajectory_point0;
        //trajectory_point0 = new_trajectory.points[0];  
        //trajectory_point0 =  tj_msg.points[0];   
        //cout<<new_trajectory.points[0].positions.size()<<" =  new_trajectory.points[0].positions.size()"<<endl;
        //cout<<"size of positions[]: "<<trajectory_point0.positions.size()<<endl;
        cout << "subgoals: " << endl;
        int njnts; 
        for (int i = 0; i < npts; i++) {
            njnts = new_trajectory.points[i].positions.size();
            cout<<"njnts: "<<njnts<<endl;
            for (int j = 0; j < njnts; j++) { //copy from traj point to 7x1 vector
                cout << new_trajectory.points[i].positions[j] << ", ";
            }
            cout<<endl;
            cout<<"time from start: "<<new_trajectory.points[i].time_from_start.toSec()<<endl;
            cout << endl;
        }

        as_.isActive();

        working_on_trajectory = true;
        //got_new_trajectory=false;
        traj_clock = 0.0; // initialize clock for trajectory;
        isegment = 0;
        trajectory_point0 = new_trajectory.points[0];
        njnts = new_trajectory.points[0].positions.size();
        int njnts_new;
        qvec_prev.resize(njnts);
        qvec_new.resize(njnts);
        ROS_INFO("populating qvec_prev: ");
        for (int i = 0; i < njnts; i++) { //copy from traj point to Eigen type vector
            qvec_prev[i] = trajectory_point0.positions[i];
        }
        //cmd_pose_right(qvec0); //populate and send out first command  
        //qvec_prev = qvec0;
        cout << "start pt: " << qvec_prev.transpose() << endl;
    }
    while (working_on_trajectory) {
        traj_clock += dt_traj;
        // update isegment and qvec according to traj_clock; 
        //if traj_clock>= final_time, use exact end coords and set "working_on_trajectory" to false 
        ROS_INFO("traj_clock = %f; updating qvec_new",traj_clock);
        working_on_trajectory = update_trajectory(traj_clock, new_trajectory, qvec_prev, isegment, qvec_new);
        //cmd_pose_right(qvec_new); // use qvec to populate object and send it to robot
        ROS_INFO("publishing qvec_new as command");
        davinciJointPublisher.pubJointStatesAll(qvec_new);
;
        qvec_prev = qvec_new;


    
    //use the publisher object to map these correctly and send them to rviz

    
   //THE FOLLOWING LINE FAILS
   //  davinciJointPublisherPtr->pubJointStates(qvec_new); //
     
                
        cout << "traj_clock: " << traj_clock << "; vec:" << qvec_new.transpose() << endl;
        ros::spinOnce();
        ros::Duration(dt_traj).sleep();
    }
    ROS_INFO("completed execution of a trajectory" );
    as_.setSucceeded(result_); // tell the client that we were successful acting on the request, and return the "result" message 
}
	bool isActionServerActive(){return as_.isActive();};
//this is where the bulk of the work is done, interpolating between potentially coarse joint-space poses
// using the specified arrival times
void trajActionServer::executeCB(const actionlib::SimpleActionServer<baxter_trajectory_streamer::trajAction>::GoalConstPtr& goal) {
    double traj_clock, dt_segment, dq_segment, delta_q_segment, traj_final_time;
    int isegment;
    trajectory_msgs::JointTrajectoryPoint trajectory_point0;

    Vectorq7x1 qvec, qvec0, qvec_prev, qvec_new;

    //ROS_INFO("in executeCB");
    //ROS_INFO("goal input is: %d", goal->input);

    g_count++; // keep track of total number of goals serviced since this server was started
    result_.return_val = g_count; // we'll use the member variable result_, defined in our class
    //result_.traj_id = goal->traj_id;
    //cout<<"received trajectory w/ "<<goal->trajectory.points.size()<<" points"<<endl;
    // copy trajectory to global var:
    new_trajectory = goal->trajectory; // 
    // insist that a traj have at least 2 pts
    int npts = new_trajectory.points.size();
    if (npts  < 2) {
        ROS_WARN("too few points; aborting goal");
        as_.setAborted(result_);
    } else { //OK...have a valid trajectory goal; execute it
        //got_new_goal = true;
        //got_new_trajectory = true;
        ROS_INFO("Cb received traj w/ npts = %d",npts);
        //cout << "Cb received traj w/ npts = " << new_trajectory.points.size() << endl;
        //debug output...
        /*
        cout << "subgoals: " << endl;
        for (int i = 0; i < npts; i++) {
            for (int j = 0; j < 7; j++) { //copy from traj point to 7x1 vector
                cout << new_trajectory.points[i].positions[j] << ", ";
            }
            cout << endl;
        }
        */
        as_.isActive();

        working_on_trajectory = true;

        traj_clock = 0.0; // initialize clock for trajectory;
        isegment = 0; //initialize the segment count
        trajectory_point0 = new_trajectory.points[0]; //start trajectory from first point...should be at least close to
          //current state of system; SHOULD CHECK THIS
        for (int i = 0; i < 7; i++) { //copy from traj point to 7x1 Eigen-type vector
            qvec0[i] = trajectory_point0.positions[i];
        }
        cmd_pose_left(qvec0); //populate and send out first command  
        qvec_prev = qvec0;
        cout << "start pt: " << qvec0.transpose() << endl;
    }
    while (working_on_trajectory) {
        traj_clock += dt_traj;
        // update isegment and qvec according to traj_clock; 
        //if traj_clock>= final_time, use exact end coords and set "working_on_trajectory" to false          
        working_on_trajectory = update_trajectory(traj_clock, new_trajectory, qvec_prev, isegment, qvec_new);
        cmd_pose_left(qvec_new); // use qvec to populate object and send it to robot
        qvec_prev = qvec_new;
        //cout << "traj_clock: " << traj_clock << "; vec:" << qvec_new.transpose() << endl;
        ros::spinOnce();
        ros::Duration(dt_traj).sleep(); //update the outputs at time-step resolution specified by dt_traj
    }
    ROS_INFO("completed execution of a trajectory" );
    as_.setSucceeded(result_); // tell the client that we were successful acting on the request, and return the "result" message 
}
示例#16
0
    void executeCb(const frontier_exploration::ExploreTaskGoalConstPtr &goal)
    {

        success_ = false;
        moving_ = false;

        explore_costmap_ros_->resetLayers();

        //create costmap services
        ros::ServiceClient updateBoundaryPolygon = private_nh_.serviceClient<frontier_exploration::UpdateBoundaryPolygon>("explore_costmap/explore_boundary/update_boundary_polygon");
        ros::ServiceClient getNextFrontier = private_nh_.serviceClient<frontier_exploration::GetNextFrontier>("explore_costmap/explore_boundary/get_next_frontier");

        //wait for move_base and costmap services
        if(!move_client_.waitForServer() || !updateBoundaryPolygon.waitForExistence() || !getNextFrontier.waitForExistence()){
            as_.setAborted();
            return;
        }

        //set region boundary on costmap
        if(ros::ok() && as_.isActive()){
            frontier_exploration::UpdateBoundaryPolygon srv;
            srv.request.explore_boundary = goal->explore_boundary;
            if(updateBoundaryPolygon.call(srv)){
                ROS_INFO("Region boundary set");
            }else{
                ROS_ERROR("Failed to set region boundary");
                as_.setAborted();
                return;
            }
        }

        //loop until all frontiers are explored
        ros::Rate rate(frequency_);
        while(ros::ok() && as_.isActive()){

            frontier_exploration::GetNextFrontier srv;

            //placeholder for next goal to be sent to move base
            geometry_msgs::PoseStamped goal_pose;

            //get current robot pose in frame of exploration boundary
            tf::Stamped<tf::Pose> robot_pose;
            explore_costmap_ros_->getRobotPose(robot_pose);

            //provide current robot pose to the frontier search service request
            tf::poseStampedTFToMsg(robot_pose,srv.request.start_pose);

            //evaluate if robot is within exploration boundary using robot_pose in boundary frame
            geometry_msgs::PoseStamped eval_pose = srv.request.start_pose;
            if(eval_pose.header.frame_id != goal->explore_boundary.header.frame_id){
                tf_listener_.transformPose(goal->explore_boundary.header.frame_id, srv.request.start_pose, eval_pose);
            }

            //check if robot is not within exploration boundary and needs to return to center of search area
            if(goal->explore_boundary.polygon.points.size() > 0 && !pointInPolygon(eval_pose.pose.position,goal->explore_boundary.polygon)){
                
                //check if robot has explored at least one frontier, and promote debug message to warning
                if(success_){
                    ROS_WARN("Robot left exploration boundary, returning to center");
                }else{
                    ROS_DEBUG("Robot not initially in exploration boundary, traveling to center");
                }
                //get current robot position in frame of exploration center
                geometry_msgs::PointStamped eval_point;
                eval_point.header = eval_pose.header;
                eval_point.point = eval_pose.pose.position;
                if(eval_point.header.frame_id != goal->explore_center.header.frame_id){
                    geometry_msgs::PointStamped temp = eval_point;
                    tf_listener_.transformPoint(goal->explore_center.header.frame_id, temp, eval_point);
                }

                //set goal pose to exploration center
                goal_pose.header = goal->explore_center.header;
                goal_pose.pose.position = goal->explore_center.point;
                goal_pose.pose.orientation = tf::createQuaternionMsgFromYaw( yawOfVector(eval_point.point, goal->explore_center.point) );

            }else if(getNextFrontier.call(srv)){ //if in boundary, try to find next frontier to search

                ROS_DEBUG("Found frontier to explore");
                success_ = true;
                goal_pose = feedback_.next_frontier = srv.response.next_frontier;
                retry_ = 5;

            }else{ //if no frontier found, check if search is successful
                ROS_DEBUG("Couldn't find a frontier");

                //search is succesful
                if(retry_ == 0 && success_){
                    ROS_WARN("Finished exploring room");
                    as_.setSucceeded();
                    boost::unique_lock<boost::mutex> lock(move_client_lock_);
                    move_client_.cancelGoalsAtAndBeforeTime(ros::Time::now());
                    return;

                }else if(retry_ == 0 || !ros::ok()){ //search is not successful

                    ROS_ERROR("Failed exploration");
                    as_.setAborted();
                    return;
                }

                ROS_DEBUG("Retrying...");
                retry_--;
                //try to find frontier again, without moving robot
                continue;
            }
            //if above conditional does not escape this loop step, search has a valid goal_pose

            //check if new goal is close to old goal, hence no need to resend
            if(!moving_ || !pointsNearby(move_client_goal_.target_pose.pose.position,goal_pose.pose.position,goal_aliasing_*0.5)){
                ROS_DEBUG("New exploration goal");
                move_client_goal_.target_pose = goal_pose;
                boost::unique_lock<boost::mutex> lock(move_client_lock_);
                if(as_.isActive()){
                    move_client_.sendGoal(move_client_goal_, boost::bind(&FrontierExplorationServer::doneMovingCb, this, _1, _2),0,boost::bind(&FrontierExplorationServer::feedbackMovingCb, this, _1));
                    moving_ = true;
                }
                lock.unlock();
            }

            //check if continuous goal updating is enabled
            if(frequency_ > 0){
                //sleep for specified frequency and then continue searching
                rate.sleep();
            }else{
                //wait for movement to finish before continuing
                while(ros::ok() && as_.isActive() && moving_){
                    ros::WallDuration(0.1).sleep();
                }
            }
        }

        //goal should never be active at this point
        ROS_ASSERT(!as_.isActive());

    }
  // run face recognition on the recieved image 
  void recognizeCb(const sensor_msgs::ImageConstPtr& msg) {
    // cout << "entering.. recognizeCb" << endl;

    _ph.getParam("algorithm", _recognitionAlgo);

    if (_as.isPreemptRequested() || !ros::ok()) {
      // std::cout << "preempt req or not ok" << std::endl;
      ROS_INFO("%s: Preempted", _action_name.c_str());
      // set the action state to preempted
      _as.setPreempted();
      // success = false;
      // break;
      // cout << "shutting down _image_sub" << endl;
      _image_sub.shutdown();
      Mat inactive(_window_rows, _window_cols, CV_8UC3, CV_RGB(20,20,20));
      appendStatusBar(inactive, "INACTIVE", "");
      cv::imshow(_windowName, inactive);
      cv::waitKey(3);
      return;
    }

    if (!_as.isActive()) {
      // std::cout << "not active" << std::endl;
      // cout << "shutting down _image_sub" << endl;
      _image_sub.shutdown();
      Mat inactive(_window_rows, _window_cols, CV_8UC3, CV_RGB(20,20,20));
      appendStatusBar(inactive, "INACTIVE", "");
      cv::imshow(_windowName, inactive);
      cv::waitKey(3);
      return;
    }

    cv_bridge::CvImagePtr cv_ptr;
    try
    {
      cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR("%s: cv_bridge exception: %s", _action_name.c_str(), e.what());
      return;
    }

    if (_window_rows == 0) {
      _window_rows = cv_ptr->image.rows;
      _window_cols = cv_ptr->image.cols;
    }

    // clear previous feedback
    _feedback.names.clear();
    _feedback.confidence.clear();
    // _result.names.clear();
    // _result.confidence.clear();

    std::vector<cv::Rect> faces;
    std::vector<cv::Mat> faceImgs = _fd.getFaceImgs(cv_ptr->image, faces, true);
    std::map<string, std::pair<string, double> > results;
    
    for( size_t i = 0; i < faceImgs.size(); i++ ) {
      cv::resize(faceImgs[i], faceImgs[i], cv::Size(100.0, 100.0));
      cv::cvtColor( faceImgs[i], faceImgs[i], CV_BGR2GRAY );
      cv::equalizeHist( faceImgs[i], faceImgs[i] );

      // perform recognition
      results = _fr->recognize(faceImgs[i],
                                ("eigenfaces" == _recognitionAlgo),
                                ("fisherfaces" == _recognitionAlgo),
                                ("lbph" == _recognitionAlgo)
                              );

      ROS_INFO("Face %lu:", i);
      if ("eigenfaces" == _recognitionAlgo)
        ROS_INFO("\tEigenfaces: %s %f", results["eigenfaces"].first.c_str(), results["eigenfaces"].second);
      if ("fisherfaces" == _recognitionAlgo)
        ROS_INFO("\tFisherfaces: %s %f", results["fisherfaces"].first.c_str(), results["fisherfaces"].second);
      if ("lbph" == _recognitionAlgo)
        ROS_INFO("\tLBPH: %s %f", results["lbph"].first.c_str(), results["lbph"].second);
    }


    // update GUI window
    // TODO check gui parameter
    appendStatusBar(cv_ptr->image, "RECOGNITION", "");
    cv::imshow(_windowName, cv_ptr->image);
    cv::waitKey(3);

    // if faces were detected
    if (faceImgs.size() > 0) {
      // recognize only once
      if (_goal_id == 0) {
        // std::cout << "goal_id 0. setting succeeded." << std::endl;
        // cout << _recognitionAlgo << endl;
        _result.names.push_back(results[_recognitionAlgo].first);
        _result.confidence.push_back(results[_recognitionAlgo].second);
        _as.setSucceeded(_result);
      }
      // recognize continuously
      else {
        _feedback.names.push_back(results[_recognitionAlgo].first);
        _feedback.confidence.push_back(results[_recognitionAlgo].second);
        _as.publishFeedback(_feedback);
      }
    }
  } 
void CartMoveActionServer::executeCB(const actionlib::SimpleActionServer<cwru_action::cart_moveAction>::GoalConstPtr& goal) {

    ROS_INFO("in executeCB of CartMoveActionServer");
   cart_result_.err_code=0;
   cart_move_as_.isActive();
   //unpack the necessary info:
   gripper_ang1_ = goal->gripper_jaw_angle1;
   gripper_ang2_ = goal->gripper_jaw_angle2;
   arrival_time_ = goal->move_time;
   // interpret the desired gripper poses:
   geometry_msgs::PoseStamped des_pose_gripper1 = goal->des_pose_gripper1;
   geometry_msgs::PoseStamped des_pose_gripper2 = goal->des_pose_gripper2;
   // convert the above to affine objects:
   des_gripper1_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper1.pose);
   cout<<"gripper1 desired pose;  "<<endl;
   cout<<des_gripper1_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper1_affine_wrt_lcamera_.translation().transpose()<<endl;

   des_gripper2_affine_wrt_lcamera_ = transformPoseToEigenAffine3d(des_pose_gripper2.pose);
   cout<<"gripper2 desired pose;  "<<endl;
   cout<<des_gripper2_affine_wrt_lcamera_.linear()<<endl;
   cout<<"origin: "<<des_gripper2_affine_wrt_lcamera_.translation().transpose()<<endl;

   //do IK to convert these to joint angles:
    //Eigen::VectorXd q_vec1,q_vec2;
    Vectorq7x1 q_vec1,q_vec2;
    q_vec1.resize(7);
    q_vec2.resize(7);

    
    trajectory_msgs::JointTrajectory des_trajectory; // an empty trajectory 
    des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    des_trajectory.joint_names.clear(); //could put joint names in...but I assume a fixed order and fixed size, so this is unnecessary
    // if using wsn's trajectory streamer action server
    des_trajectory.header.stamp = ros::Time::now();

    trajectory_msgs::JointTrajectoryPoint trajectory_point; //,trajectory_point2; 
    trajectory_point.positions.resize(14);
    
    ROS_INFO("\n");
    ROS_INFO("stored previous command to gripper one: ");
    cout<<gripper1_affine_last_commanded_pose_.linear()<<endl;
    cout<<"origin: "<<gripper1_affine_last_commanded_pose_.translation().transpose()<<endl;
   

    // first, reiterate previous command:
    // this could be easier, if saved previous joint-space trajectory point...
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*gripper1_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = last_gripper_ang1_; // include desired gripper opening angle
    
    
    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*gripper2_affine_last_commanded_pose_; //previous pose
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln(); 
    cout<<"q_vec1 of stored pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = last_gripper_ang2_; // include desired gripper opening angle
    
       for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
    cout<<"start traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      trajectory_point.time_from_start = ros::Duration(0.0); // start time set to 0
    // PUSH IN THE START POINT:
      des_trajectory.points.push_back(trajectory_point);            

    // compute and append the goal point, in joint space trajectory:
    des_gripper_affine1_ = affine_lcamera_to_psm_one_.inverse()*des_gripper1_affine_wrt_lcamera_;
    ROS_INFO("desired gripper one location in base frame: ");
    cout<<"gripper1 desired pose;  "<<endl;
    cout<<des_gripper_affine1_.linear()<<endl;
    cout<<"origin: "<<des_gripper_affine1_.translation().transpose()<<endl;

    ik_solver_.ik_solve(des_gripper_affine1_); //convert desired pose into equiv joint displacements
    q_vec1 = ik_solver_.get_soln(); 
    q_vec1(6) = gripper_ang1_; // include desired gripper opening angle

    des_gripper_affine2_ = affine_lcamera_to_psm_two_.inverse()*des_gripper2_affine_wrt_lcamera_;
    ik_solver_.ik_solve(des_gripper_affine2_); //convert desired pose into equiv joint displacements
    q_vec2 = ik_solver_.get_soln();  
    cout<<"q_vec1 of goal pose: "<<endl;
    for (int i=0;i<6;i++) {
        cout<<q_vec1[i]<<", ";
    }
    cout<<endl;
    q_vec2(6) = gripper_ang2_;
        for (int i=0;i<7;i++) {
            trajectory_point.positions[i] = q_vec1(i);
            trajectory_point.positions[i+7] = q_vec2(i);  
        }
      trajectory_point.time_from_start = ros::Duration(arrival_time_);
   cout<<"goal traj pt: "<<endl;
    for (int i=0;i<14;i++) {
        cout<<trajectory_point.positions[i]<<", ";
    }
    cout<<endl;
      des_trajectory.points.push_back(trajectory_point);

    js_goal_.trajectory = des_trajectory;

    // Need boost::bind to pass in the 'this' pointer
  // see example: http://library.isr.ist.utl.pt/docs/roswiki/actionlib_tutorials%282f%29Tutorials%282f%29Writing%2820%29a%2820%29Callback%2820%29Based%2820%29Simple%2820%29Action%2820%29Client.html
  //  ac.sendGoal(goal,
  //              boost::bind(&MyNode::doneCb, this, _1, _2),
  //              Client::SimpleActiveCallback(),
  //              Client::SimpleFeedbackCallback());

    js_action_client_.sendGoal(js_goal_, boost::bind(&CartMoveActionServer::js_doneCb_,this,_1,_2)); // we could also name additional callback functions here, if desired
    //    action_client.sendGoal(goal, &doneCb, &activeCb, &feedbackCb); //alt--more callback funcs possible
    
    double t_timeout=arrival_time_+2.0; //wait 2 sec longer than expected duration of move
    
    bool finished_before_timeout = js_action_client_.waitForResult(ros::Duration(t_timeout));
    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
    if (!finished_before_timeout) {
        ROS_WARN("joint-space interpolation result is overdue ");
    } else {
        ROS_INFO("finished before timeout");
    }

    ROS_INFO("completed callback" );
    cart_move_as_.setSucceeded(cart_result_); // tell the client that we were successful acting on the request, and return the "result" message 
    
    //let's remember the last pose commanded, so we can use it as start pose for next move
    gripper1_affine_last_commanded_pose_ = des_gripper1_affine_wrt_lcamera_;
    gripper2_affine_last_commanded_pose_ = des_gripper2_affine_wrt_lcamera_;    
    //and the jaw opening angles:
    last_gripper_ang1_=gripper_ang1_;
    last_gripper_ang2_=gripper_ang2_;
}
  void executeCB(const robot_actionlib::TestGoalConstPtr &goal)
  {
    // helper variables
    ros::Rate loop_rate(25);
    bool success = true;

    // push_back the seeds for the fibonacci sequence
    feedback_.sequence.clear();
    feedback_.sequence.push_back(0);
    feedback_.sequence.push_back(1);

    // publish info to the console for the user
    ROS_INFO("%s: Executing, creating fibonacci sequence of order %i with seeds %i, %i", action_name_.c_str(), goal->order, feedback_.sequence[0], feedback_.sequence[1]);


    bool front = 0;
    bool right = 0;
    bool left = 0;
    double right_average = 0;
    double left_average = 0;
    
    
    while(ros::ok())
    {
      ROS_INFO("Preempted ?: %d ", as_.isPreemptRequested());
      ROS_INFO("Active ?: %d ", as_.isActive());
      ROS_INFO("GOOALLLLLL: %d ", goal->order);
      // check that preempt has not been requested by the client
      if (as_.isPreemptRequested() || !ros::ok())
      {
        ROS_INFO("%s: Preempted", action_name_.c_str());
        // set the action state to preempted
        as_.setPreempted();
        success = false;
        break;
      }

      
	front = (distance_front_left < front_limit) || (distance_front_right < front_limit);
        right = (distance_rights_front < side_limit) && (distance_rights_back < side_limit);
        left = (distance_lefts_front < side_limit) && (distance_lefts_back < side_limit);
        right_average = (distance_rights_front + distance_rights_back)/2;
        left_average = (distance_lefts_front + distance_lefts_back)/2;

	flagReady = true;
	if (!TestAction::flagReady) {
	  ros::spinOnce();
	  loop_rate.sleep();
	  continue;
        }

	// Update of the state
        if(front){ // The front is the most prior
	  if(right_average > left_average){
	    side = 0;
	  }else{
	    side = 1;
	  }
	  state = FRONT;
	  std::cerr << "FRONT" << std::endl;
        } else if (right) { // The right is prefered
	  /*if(state != RIGHT){
	    direction = rand()%2;
	    std::cerr << direction << std::endl;
            }*/
	  state = RIGHT;
	  std::cerr << "RIGHT" << std::endl;
        } else if (left) {
	  /*if(state != LEFT){
	    direction = rand()%2;
	    std::cerr << direction << std::endl;
            }*/
	  //direction = rand()%1;

	  state = LEFT;
	  std::cerr << "LEFT" << std::endl;
        } else {
	  state = EMPTY;
	  std::cerr << "EMPTY" << std::endl;
        }

        // Action depending on the state
        switch(state) {
        case(EMPTY): // Just go straight
	  msg.linear.x = smoothUpdateVelocity(msg.linear.x, l_speed, 0.05);
	  msg.angular.z = smoothUpdateVelocity(msg.angular.z, 0, 0.1);
	  break;
        case(FRONT):
	  msg.linear.x = 0;
	  if(side == 1){
	    msg.angular.z = smoothUpdateVelocity(msg.angular.z, a_speed, 0.1);
	  }else{
	    msg.angular.z = smoothUpdateVelocity(msg.angular.z, -a_speed, 0.1);
	  }

	  break;
        case(RIGHT):
	  msg.linear.x = smoothUpdateVelocity(msg.linear.x, l_speed, 0.05);
	  msg.angular.z = - alpha * ( (double)distance_rights_front - (double)distance_rights_back);
	  break;
        case(LEFT):
	  msg.linear.x = smoothUpdateVelocity(msg.linear.x, l_speed, 0.05);
	  msg.angular.z = alpha * ( (double)distance_lefts_front - (double)distance_lefts_back);
	  break;
        default:
	  break;
        }

        // P-controller to generate the angular velocity of the robot, to follow the wall
        //msg.angular.z = alpha * ( (double)distance_lefts_front- (double)distance_lefts_back);
        //ROS_INFO("Linear velocity :%f", msg.linear.x);
        //ROS_INFO("Angular velocity :%f", msg.angular.z);
        if(counter > 10){
	  twist_pub.publish(msg);
        }else {
	  counter += 1;
        }


	feedback_.sequence.push_back(feedback_.sequence[1] + feedback_.sequence[0]);
	// publish the feedback
	as_.publishFeedback(feedback_);
	
	
	

        ros::spinOnce();
        loop_rate.sleep();
	
    }
    
    if(success)
    {
      result_.sequence = feedback_.sequence;
      ROS_INFO("%s: Succeeded", action_name_.c_str());
      // set the action state to succeeded
      as_.setSucceeded(result_);
    }
  }
  bool driveForwardOdom(double distance)
  {
    // If the distance to travel is negligble, don't even try.
    if (fabs(distance) < 0.01)
      return true;
    
    //we will record transforms here
    tf::StampedTransform start_transform;
    tf::StampedTransform current_transform;
  
    try
    {
      //wait for the listener to get the first message
      listener_.waitForTransform(base_frame, odom_frame, 
                                 ros::Time::now(), ros::Duration(1.0));
      
      //record the starting transform from the odometry to the base frame
      listener_.lookupTransform(base_frame, odom_frame, 
                                ros::Time(0), start_transform);
    }
    catch (tf::TransformException ex)
    {
      ROS_ERROR("%s",ex.what());
      return false;
    }
    
    //we will be sending commands of type "twist"
    geometry_msgs::Twist base_cmd;
    //the command will be to go forward at 0.25 m/s
    base_cmd.linear.y = base_cmd.angular.z = 0;
    base_cmd.linear.x = forward_rate;
    
    if (distance < 0)
      base_cmd.linear.x = -base_cmd.linear.x;
    
    ros::Rate rate(25.0);
    bool done = false;
    while (!done && nh_.ok() && as_.isActive())
    {
      //send the drive command
      cmd_vel_pub_.publish(base_cmd);
      rate.sleep(); 
      //get the current transform
      try
      {
        listener_.lookupTransform(base_frame, odom_frame, 
                                  ros::Time(0), current_transform);
      }
      catch (tf::TransformException ex)
      {
        ROS_ERROR("%s",ex.what());
        break;
      }
      //see how far we've traveled
      tf::Transform relative_transform = 
        start_transform.inverse() * current_transform;
      double dist_moved = relative_transform.getOrigin().length();
      
      // Update feedback and result.
      feedback_.forward_distance = dist_moved;
      result_.forward_distance = dist_moved;
      as_.publishFeedback(feedback_);

      if(fabs(dist_moved) > fabs(distance))
      {
        done = true;
      }
    }
    base_cmd.linear.x = 0.0;
    base_cmd.angular.z = 0.0;
    cmd_vel_pub_.publish(base_cmd);

    if (done) return true;
    return false;
  }
示例#21
0
  void timer_callback(const ros::TimerEvent &)
  {
    mil_msgs::MoveToResult actionresult;

    // Handle disabled, killed, or no odom before attempting to produce trajectory
    std::string err = "";
    if (disabled)
      err = "c3 disabled";
    else if (kill_listener.isRaised())
      err = "killed";
    else if (!c3trajectory)
      err = "no odom";

    if (!err.empty())
    {
      if (c3trajectory)
        c3trajectory.reset();  // On revive/enable, wait for odom before station keeping

      // Cancel all goals while killed/disabled/no odom
      if (actionserver.isNewGoalAvailable())
        actionserver.acceptNewGoal();
      if (actionserver.isActive())
      {
        actionresult.error = err;
        actionresult.success = false;
        actionserver.setAborted(actionresult);
      }
      return;
    }

    ros::Time now = ros::Time::now();

    auto old_waypoint = current_waypoint;

    if (actionserver.isNewGoalAvailable())
    {
      boost::shared_ptr<const mil_msgs::MoveToGoal> goal = actionserver.acceptNewGoal();
      current_waypoint =
          subjugator::C3Trajectory::Waypoint(Point_from_PoseTwist(goal->posetwist.pose, goal->posetwist.twist),
                                             goal->speed, !goal->uncoordinated, !goal->blind);
      current_waypoint_t = now;
      this->linear_tolerance = goal->linear_tolerance;
      this->angular_tolerance = goal->angular_tolerance;

      waypoint_validity_.pub_size_ogrid(Pose_from_Waypoint(current_waypoint), (int)OGRID_COLOR::GREEN);

      // Check if waypoint is valid
      std::pair<bool, WAYPOINT_ERROR_TYPE> checkWPResult = waypoint_validity_.is_waypoint_valid(
          Pose_from_Waypoint(current_waypoint), current_waypoint.do_waypoint_validation);
      actionresult.error = WAYPOINT_ERROR_TO_STRING.at(checkWPResult.second);
      actionresult.success = checkWPResult.first;
      if (checkWPResult.first == false && waypoint_check_)  // got a point that we should not move to
      {
        waypoint_validity_.pub_size_ogrid(Pose_from_Waypoint(current_waypoint), (int)OGRID_COLOR::RED);
        if (checkWPResult.second ==
            WAYPOINT_ERROR_TYPE::UNKNOWN)  // if unknown, check if there's a huge displacement with the new waypoint
        {
          auto a_point = Pose_from_Waypoint(current_waypoint);
          auto b_point = Pose_from_Waypoint(old_waypoint);
          // If moved more than .5m, then don't allow
          if (abs(a_point.position.x - b_point.position.x) > .5 || abs(a_point.position.y - b_point.position.y) > .5)
          {
            ROS_ERROR("can't move there! - need to rotate");
            current_waypoint = old_waypoint;
          }
        }
        // if point is occupied, reject move
        if (checkWPResult.second == WAYPOINT_ERROR_TYPE::OCCUPIED)
        {
          ROS_ERROR("can't move there! - waypoint is occupied");
          current_waypoint = old_waypoint;
        }
        // if point is above water, reject move
        if (checkWPResult.second == WAYPOINT_ERROR_TYPE::ABOVE_WATER)
        {
          ROS_ERROR("can't move there! - waypoint is above water");
          current_waypoint = old_waypoint;
        }
        if (checkWPResult.second == WAYPOINT_ERROR_TYPE::NO_OGRID)
        {
          ROS_ERROR("WaypointValidity - Did not recieve any ogrid");
        }
      }
    }
    if (actionserver.isPreemptRequested())
    {
      current_waypoint = c3trajectory->getCurrentPoint();
      current_waypoint.do_waypoint_validation = false;
      current_waypoint.r.qdot = subjugator::Vector6d::Zero();  // zero velocities
      current_waypoint_t = now;

      // don't try to make output c3 continuous when cancelled - instead stop as quickly as possible
      c3trajectory.reset(new subjugator::C3Trajectory(current_waypoint.r, limits));
      c3trajectory_t = now;
    }

    // Remember the previous trajectory
    auto old_trajectory = c3trajectory->getCurrentPoint();

    while (c3trajectory_t + traj_dt < now)
    {
      c3trajectory->update(traj_dt.toSec(), current_waypoint, (c3trajectory_t - current_waypoint_t).toSec());
      c3trajectory_t += traj_dt;
    }

    // Check if we will hit something while in trajectory the new trajectory
    geometry_msgs::Pose traj_point;  // Convert messages to correct type
    auto p = c3trajectory->getCurrentPoint();
    traj_point.position = vec2xyz<Point>(p.q.head(3));
    quaternionTFToMsg(tf::createQuaternionFromRPY(p.q[3], p.q[4], p.q[5]), traj_point.orientation);

    std::pair<bool, WAYPOINT_ERROR_TYPE> checkWPResult =
        waypoint_validity_.is_waypoint_valid(Pose_from_Waypoint(p), c3trajectory->do_waypoint_validation);

    if (checkWPResult.first == false && checkWPResult.second == WAYPOINT_ERROR_TYPE::OCCUPIED && waypoint_check_)
    {  // New trajectory will hit an occupied goal, so reject
      ROS_ERROR("can't move there! - bad trajectory");
      current_waypoint = old_trajectory;
      current_waypoint.do_waypoint_validation = false;
      current_waypoint.r.qdot = subjugator::Vector6d::Zero();  // zero velocities
      current_waypoint_t = now;

      c3trajectory.reset(new subjugator::C3Trajectory(current_waypoint.r, limits));
      c3trajectory_t = now;
      actionresult.success = false;
      actionresult.error = WAYPOINT_ERROR_TO_STRING.at(WAYPOINT_ERROR_TYPE::OCCUPIED_TRAJECTORY);
    }

    PoseTwistStamped msg;
    msg.header.stamp = c3trajectory_t;
    msg.header.frame_id = fixed_frame;
    msg.posetwist = PoseTwist_from_PointWithAcceleration(c3trajectory->getCurrentPoint());
    trajectory_pub.publish(msg);

    waypoint_validity_.pub_size_ogrid(Pose_from_Waypoint(c3trajectory->getCurrentPoint()), 200);

    PoseStamped msgVis;
    msgVis.header = msg.header;
    msgVis.pose = msg.posetwist.pose;
    trajectory_vis_pub.publish(msgVis);

    PoseStamped posemsg;
    posemsg.header.stamp = c3trajectory_t;
    posemsg.header.frame_id = fixed_frame;
    posemsg.pose = Pose_from_Waypoint(current_waypoint);
    waypoint_pose_pub.publish(posemsg);

    if (actionserver.isActive() &&
        c3trajectory->getCurrentPoint().is_approximately(current_waypoint.r, max(1e-3, linear_tolerance),
                                                         max(1e-3, angular_tolerance)) &&
        current_waypoint.r.qdot == subjugator::Vector6d::Zero())
    {
      actionresult.error = "";
      actionresult.success = true;
      actionserver.setSucceeded(actionresult);
    }
  }
  void executeCB(const corobot_face_recognition::FaceRecognitionGoalConstPtr &goal)
  {
    if (_as.isPreemptRequested() || !ros::ok()) {
      ROS_INFO("%s: Preempted", _action_name.c_str());
      // set the action state to preempted
      _as.setPreempted();
      // success = false;
      // break;
    }
    
    // helper variables
    ros::Rate r(60);
    bool success = true;

    _goal_id = goal->order_id;
    _goal_argument = goal->order_argument;

    _feedback.order_id = _goal_id;
    _feedback.names.clear();
    _feedback.confidence.clear();
    
    _result.order_id = _goal_id;
    _result.names.clear();
    _result.confidence.clear();

    // publish info to the console for the user
    ROS_INFO("%s: Executing. order_id: %i, order_argument: %s ", _action_name.c_str(), _goal_id, _goal_argument.c_str());

    switch (_goal_id) {
      // RECOGNIZE ONCE
      case 0:
        // cout << "case 0" << endl;
        // _fr = new FaceRec(true);
        _image_sub = _it.subscribe("/usb_cam/image_raw", 1, &FaceRecognition::recognizeCb, this);
        
        while( _as.isActive() && !_as.isPreemptRequested() && !ros::isShuttingDown() )
          r.sleep();

        break;
      
      // RECOGNIZE CONTINUOUSLY
      case 1:
        //cout << "case 1" << endl;
        // _fr = new FaceRec(true);
        _image_sub = _it.subscribe("/usb_cam/image_raw", 1, &FaceRecognition::recognizeCb, this);
        
        while( _as.isActive() && !_as.isPreemptRequested() && !ros::isShuttingDown() )
          r.sleep();
        
        break;
      
      // ADD TRAINING IMAGES
      case 2:
        // cout << "case 2" << endl;
        _fc = new FaceImgCapturer(_goal_argument);
        _image_sub = _it.subscribe("/usb_cam/image_raw", 1, &FaceRecognition::addTrainingImagesCb, this);
        
        while( _as.isActive() && !_as.isPreemptRequested() && !ros::isShuttingDown() )
          r.sleep();

        break;
      
      // TRAIN DATABASE
      case 3:
        // call training function and check if successful
        // bool trainingSuccessful = true;
        // cout << "case 3" << endl;
        _fr->train(_preprocessing);
        if (true) 
          _as.setSucceeded(_result);
        else 
          _as.setAborted(_result);
        break;
      
      // STOP
      case 4:
        if (_as.isActive()) {
          _as.setPreempted();
          // _image_sub.shutdown();
        }
        break;

      // EXIT
      case 5:
        // cout << "case 4" << endl;
        ROS_INFO("%s: Exit request.", _action_name.c_str());
        _as.setSucceeded(_result);
        r.sleep();
        ros::shutdown();
        break;
    }

  }
  // add training images for a person
  void addTrainingImagesCb(const sensor_msgs::ImageConstPtr& msg) {
    // cout << "addTrainingImagesCb" << endl;

    if (_as.isPreemptRequested() || !ros::ok()) {
      // std::cout << "preempt req or not ok" << std::endl;
      ROS_INFO("%s: Preempted", _action_name.c_str());
      // set the action state to preempted
      _as.setPreempted();
      // success = false;
      // break;
      // cout << "shutting down _image_sub" << endl;
      _image_sub.shutdown();
      return;
    }

    if (!_as.isActive()) {
      // std::cout << "not active" << std::endl;
      // cout << "shutting down _image_sub" << endl;
      _image_sub.shutdown();
      return;
    }

    cv_bridge::CvImagePtr cv_ptr;
    try
    {
      cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR("%s: cv_bridge exception: %s", _action_name.c_str(), e.what());
      return;
    }

    if (_window_rows == 0) {
      _window_rows = cv_ptr->image.rows;
      _window_cols = cv_ptr->image.cols;
    }

    std::vector<cv::Rect> faces;
    // _fd.detectFaces(cv_ptr->image, faces, true);

    std::vector<cv::Mat> faceImgs = _fd.getFaceImgs(cv_ptr->image, faces, true);
    
    if (faceImgs.size() > 0)
      _fc->capture(faceImgs[0]);

    // call images capturing function
    _result.names.push_back(_goal_argument);
    // _result.confidence.push_back(2.0);
    
    int no_images_to_click;
    _ph.getParam("no_training_images", no_images_to_click);
    
    if (_fc->getNoImgsClicked() >= no_images_to_click) {
      // Mat inactive = cv::Mat::zeros(cv_ptr->image.rows, cv_ptr->image.cols, CV_32F);
      Mat inactive(_window_rows, _window_cols, CV_8UC3, CV_RGB(20,20,20));
      appendStatusBar(inactive, "INACTIVE", "Images added. Please train.");
      cv::imshow(_windowName, inactive);  
      // cv::displayOverlay(_windowName, "Images added", 3000); 
      _as.setSucceeded(_result);
    } else {
      // update GUI window
      // check GUI parameter
      appendStatusBar(cv_ptr->image, "ADDING IMAGES.", "Images added");
      cv::imshow(_windowName, cv_ptr->image);  
    }

    cv::waitKey(3);
  }
void TrajActionServer::executeCB(const actionlib::SimpleActionServer<davinci_traj_streamer::trajAction>::GoalConstPtr& goal) {
    double traj_clock, dt_segment, dq_segment, delta_q_segment, traj_final_time;
    int isegment;
    trajectory_msgs::JointTrajectoryPoint trajectory_point0;

    Eigen::VectorXd qvec, qvec0, qvec_prev, qvec_new;
    // TEST TEST TEST
    //Eigen::VectorXd q_vec;
    //q_vec<<0.1,0.2,0.15,0.4,0.5,0.6,0.7;    

    ROS_INFO("in executeCB");

    g_count++; // keep track of total number of goals serviced since this server was started
    result_.return_val = g_count; // we'll use the member variable result_, defined in our class
    result_.traj_id = goal->traj_id;
    cout<<"received trajectory w/ "<<goal->trajectory.points.size()<<" points"<<endl;
    // copy trajectory to global var:
    new_trajectory = goal->trajectory; // 
    // insist that a traj have at least 2 pts
    int npts = new_trajectory.points.size();
    if (npts  < 2) {
        ROS_WARN("too few points; aborting goal");
        as_.setAborted(result_);
    } else { //OK...have a valid trajectory goal; execute it
        //got_new_goal = true;
        //got_new_trajectory = true;
        ROS_INFO("Cb received traj w/ npts = %d",npts);
        //cout << "Cb received traj w/ npts = " << new_trajectory.points.size() << endl;
        //trajectory_msgs::JointTrajectoryPoint trajectory_point0;
        //trajectory_point0 = new_trajectory.points[0];  
        //trajectory_point0 =  tj_msg.points[0];   
        //cout<<new_trajectory.points[0].positions.size()<<" =  new_trajectory.points[0].positions.size()"<<endl;
        //cout<<"size of positions[]: "<<trajectory_point0.positions.size()<<endl;
        cout << "subgoals: " << endl;
        int njnts; 
        for (int i = 0; i < npts; i++) {
            njnts = new_trajectory.points[i].positions.size();
            cout<<"njnts: "<<njnts<<endl;
            for (int j = 0; j < njnts; j++) { //copy from traj point to 7x1 vector
                cout << new_trajectory.points[i].positions[j] << ", ";
            }
            cout<<endl;
            cout<<"time from start: "<<new_trajectory.points[i].time_from_start.toSec()<<endl;
            cout << endl;
        }

        as_.isActive();

        working_on_trajectory = true;
        //got_new_trajectory=false;
        traj_clock = 0.0; // initialize clock for trajectory;
        isegment = 0;
        trajectory_point0 = new_trajectory.points[0];
        njnts = new_trajectory.points[0].positions.size();
        int njnts_new;
        qvec_prev.resize(njnts);
        qvec_new.resize(njnts);
        ROS_INFO("populating qvec_prev: ");
        for (int i = 0; i < njnts; i++) { //copy from traj point to Eigen type vector
            qvec_prev[i] = trajectory_point0.positions[i];
        }
        //cmd_pose_right(qvec0); //populate and send out first command  
        //qvec_prev = qvec0;
        cout << "start pt: " << qvec_prev.transpose() << endl;
    }
    while (working_on_trajectory) {
        traj_clock += dt_traj;
        // update isegment and qvec according to traj_clock; 
        //if traj_clock>= final_time, use exact end coords and set "working_on_trajectory" to false 
        //ROS_INFO("traj_clock = %f; updating qvec_new",traj_clock);
        working_on_trajectory = update_trajectory(traj_clock, new_trajectory, qvec_prev, isegment, qvec_new);
        //cmd_pose_right(qvec_new); // use qvec to populate object and send it to robot
        //ROS_INFO("publishing qvec_new as command");
        //davinciJointPublisher.pubJointStatesAll(qvec_new);
        command_joints(qvec_new);  //map these to all gazebo joints and publish as commands      
        qvec_prev = qvec_new;

        //cout << "traj_clock: " << traj_clock << "; vec:" << qvec_new.transpose() << endl;
        ros::spinOnce();
        ros::Duration(dt_traj).sleep();
    }
    ROS_INFO("completed execution of a trajectory" );
    as_.setSucceeded(result_); // tell the client that we were successful acting on the request, and return the "result" message 
}
示例#25
0
    void executeCB(const race_move_arms::CarryarmGoalConstPtr &goal)
    {
      // make sure that the action hasn't been canceled
      if (!as_.isActive())
        return;

      bool success = true;
      bool rightArm = true; // right arm

      ROS_INFO("Executing, creating carryarm action");

      // start executing the action
      if (goal->carrypose == 1) // both arms are not in carrypose!
      {
        // check that preempt has not been requested by the client
        if (as_.isPreemptRequested() || !ros::ok())
        {
          ROS_INFO("%s: Preempted", action_name_.c_str());
          // set the action state to preempted
          as_.setPreempted();
          success = false;
        }
        else
        {
          ROS_INFO("Executing arm trajectory..");

          switch (goal->carryarm)
          {
            case 0:
              ROS_INFO("Using right arm");
              success = moveRightArm();
              break;
            case 1:
              ROS_INFO("Using left arm");
              success = moveLeftArm();
              break;
            case 2:
              ROS_INFO("Using both arms");
              rightArm = true;
              success = moveArmToAbove(rightArm);
              success &= moveBothArms();
              break;
            default:
              ROS_INFO("Unknown carryarm command, ignoring");
              break;
          }
        }
      }
      else if (goal->carrypose == 2) // the other arm is already in carrypose!
      {
        // check that preempt has not been requested by the client
        if (as_.isPreemptRequested() || !ros::ok())
        {
          ROS_INFO("%s: Preempted", action_name_.c_str());
          // set the action state to preempted
          as_.setPreempted();
          success = false;
        }
        else
        {
          ROS_INFO("Executing arm trajectory..");

          switch (goal->carryarm)
          {
            case 0:
              ROS_INFO("Using right arm");
              success = moveBothArms(); // trivial case
              break;
            case 1:
              ROS_INFO("Using left arm");
              //move right arm to side first
              rightArm = true;
              success = moveArmToAbove(rightArm);
              success &= moveBothArms(); // trivial case
              break;
            default:
              ROS_INFO("Unknown carryarm command, ignoring");
              break;
          }
        }
      }
      else if (goal->carrypose == 3) // move arm to above
      {
        // check that preempt has not been requested by the client
        if (as_.isPreemptRequested() || !ros::ok())
        {
          ROS_INFO("%s: Preempted", action_name_.c_str());
          // set the action state to preempted
          as_.setPreempted();
          success = false;
        }
        else
        {
          ROS_INFO("Executing arm trajectory..");

          switch (goal->carryarm)
          {
            case 0:
              ROS_INFO("Using right arm");
              rightArm = true;
              success = moveArmToAbove(rightArm);
              break;
            case 1:
              ROS_INFO("Using left arm");
              rightArm = false;
              success = moveArmToAbove(rightArm);
              break;
            case 2:
              ROS_INFO("Using both arms");
              rightArm = true;
              success = moveArmToAbove(rightArm);
              rightArm = false;
              success &= moveArmToAbove(rightArm);
              break;
            default:
              ROS_INFO("Unknown carryarm command, ignoring");
              break;
          }
        }
      }
      else if (goal->carrypose == 4) // move arm to side
      {
        // check that preempt has not been requested by the client
        if (as_.isPreemptRequested() || !ros::ok())
        {
          ROS_INFO("%s: Preempted", action_name_.c_str());
          // set the action state to preempted
          as_.setPreempted();
          success = false;
        }
        else
        {
          ROS_INFO("Executing arm trajectory..");

          switch (goal->carryarm)
          {
            case 0:
              ROS_INFO("Using right arm");
              rightArm = true;
              //success = moveArmToAbove(rightArm);
              success = moveArmToSide(rightArm);
              break;
            case 1:
              ROS_INFO("Using left arm");
              rightArm = true;
              success = moveArmToAbove(rightArm);
              rightArm = false;
              //success = moveArmToAbove(rightArm);
              success &= moveArmToSide(rightArm);
              success &= moveRightArm();
              break;
            case 2:
              ROS_INFO("Using both arms");
              rightArm = true;
              //success = moveArmToAbove(rightArm);
              success = moveArmToSide(rightArm);
              rightArm = false;
              //success &= moveArmToAbove(rightArm);
              success &= moveArmToSide(rightArm);
              break;
            default:
              ROS_INFO("Unknown carryarm command, ignoring");
              break;
          }
        }
      }


      if(success)
      {
        result_.result = 1;
        ROS_INFO("%s: Succeeded", action_name_.c_str());
        // set the action state to succeeded
        as_.setSucceeded(result_);
      }
      else
      {
        result_.result = 0;
        ROS_INFO("%s: Failed", action_name_.c_str());
        // set the action state to failed
        //as_.setSucceeded(result_);
        //as_.setAborted(result_);
        as_.setAborted(result_);
      }
    }