Ejemplo n.º 1
0
template <typename PointInT, typename IntensityT> void
pcl::tracking::PyramidalKLTTracker<PointInT, IntensityT>::track (const PointCloudInConstPtr& prev_input,
                                                                 const PointCloudInConstPtr& input,
                                                                 const std::vector<FloatImageConstPtr>& prev_pyramid,
                                                                 const std::vector<FloatImageConstPtr>& pyramid,
                                                                 const pcl::PointCloud<pcl::PointUV>::ConstPtr& prev_keypoints,
                                                                 pcl::PointCloud<pcl::PointUV>::Ptr& keypoints,
                                                                 std::vector<int>& status,
                                                                 Eigen::Affine3f& motion) const
{
  std::vector<Eigen::Array2f, Eigen::aligned_allocator<Eigen::Array2f> > next_pts (prev_keypoints->size ());
  Eigen::Array2f half_win ((track_width_-1)*0.5f, (track_height_-1)*0.5f);
  pcl::TransformationFromCorrespondences transformation_computer;
  const int nb_points = prev_keypoints->size ();
  for (int level = nb_levels_ - 1; level >= 0; --level)
  {
    const FloatImage& prev = *(prev_pyramid[level*3]);
    const FloatImage& next = *(pyramid[level*3]);
    const FloatImage& grad_x = *(prev_pyramid[level*3+1]);
    const FloatImage& grad_y = *(prev_pyramid[level*3+2]);

    Eigen::ArrayXXf prev_win (track_height_, track_width_);
    Eigen::ArrayXXf grad_x_win (track_height_, track_width_);
    Eigen::ArrayXXf grad_y_win (track_height_, track_width_);
    float ratio (1./(1 << level));
    for (int ptidx = 0; ptidx < nb_points; ptidx++)
    {
      Eigen::Array2f prev_pt (prev_keypoints->points[ptidx].u * ratio,
                              prev_keypoints->points[ptidx].v * ratio);
      Eigen::Array2f next_pt;
      if (level == nb_levels_ -1)
        next_pt = prev_pt;
      else
        next_pt = next_pts[ptidx]*2.f;

      next_pts[ptidx] = next_pt;

      Eigen::Array2i iprev_point;
      prev_pt -= half_win;
      iprev_point[0] = floor (prev_pt[0]);
      iprev_point[1] = floor (prev_pt[1]);

      if (iprev_point[0] < -track_width_ || (uint32_t) iprev_point[0] >= grad_x.width ||
          iprev_point[1] < -track_height_ || (uint32_t) iprev_point[1] >= grad_y.height)
      {
        if (level == 0)
          status [ptidx] = -1;
        continue;
      }

      float a = prev_pt[0] - iprev_point[0];
      float b = prev_pt[1] - iprev_point[1];
      Eigen::Array4f weight;
      weight[0] = (1.f - a)*(1.f - b);
      weight[1] = a*(1.f - b);
      weight[2] = (1.f - a)*b;
      weight[3] = 1 - weight[0] - weight[1] - weight[2];

      Eigen::Array3f covar = Eigen::Array3f::Zero ();
      spatialGradient (prev, grad_x, grad_y, iprev_point, weight, prev_win, grad_x_win, grad_y_win, covar);

      float det = covar[0]*covar[2] - covar[1]*covar[1];
      float min_eigenvalue = (covar[2] + covar[0] - std::sqrt ((covar[0]-covar[2])*(covar[0]-covar[2]) + 4.f*covar[1]*covar[1]))/2.f;

      if (min_eigenvalue < min_eigenvalue_threshold_ || det < std::numeric_limits<float>::epsilon ())
      {
        status[ptidx] = -2;
        continue;
      }

      det = 1.f/det;
      next_pt -= half_win;

      Eigen::Array2f prev_delta (0, 0);
      for (unsigned int j = 0; j < max_iterations_; j++)
      {
        Eigen::Array2i inext_pt = next_pt.floor ().cast<int> ();

        if (inext_pt[0] < -track_width_ || (uint32_t) inext_pt[0] >= next.width ||
            inext_pt[1] < -track_height_ || (uint32_t) inext_pt[1] >= next.height)
        {
          if (level == 0)
            status[ptidx] = -1;
          break;
        }

        a = next_pt[0] - inext_pt[0];
        b = next_pt[1] - inext_pt[1];
        weight[0] = (1.f - a)*(1.f - b);
        weight[1] = a*(1.f - b);
        weight[2] = (1.f - a)*b;
        weight[3] = 1 - weight[0] - weight[1] - weight[2];
        // compute mismatch vector
        Eigen::Array2f beta = Eigen::Array2f::Zero ();
        mismatchVector (prev_win, grad_x_win, grad_y_win, next, inext_pt, weight, beta);
        // optical flow resolution
        Eigen::Vector2f delta ((covar[1]*beta[1] - covar[2]*beta[0])*det, (covar[1]*beta[0] - covar[0]*beta[1])*det);
        // update position
        next_pt[0] += delta[0]; next_pt[1] += delta[1];
        next_pts[ptidx] = next_pt + half_win;

        if (delta.squaredNorm () <= epsilon_)
          break;

        if (j > 0 && std::abs (delta[0] + prev_delta[0]) < 0.01 &&
            std::abs (delta[1] + prev_delta[1]) < 0.01 )
        {
          next_pts[ptidx][0] -= delta[0]*0.5f;
          next_pts[ptidx][1] -= delta[1]*0.5f;
          break;
        }
        // update delta
        prev_delta = delta;
      }

      // update tracked points
      if (level == 0 && !status[ptidx])
      {
        Eigen::Array2f next_point = next_pts[ptidx] - half_win;
        Eigen::Array2i inext_point;

        inext_point[0] = floor (next_point[0]);
        inext_point[1] = floor (next_point[1]);

        if (inext_point[0] < -track_width_ || (uint32_t) inext_point[0] >= next.width ||
            inext_point[1] < -track_height_ || (uint32_t) inext_point[1] >= next.height)
        {
          status[ptidx] = -1;
          continue;
        }
        // insert valid keypoint
        pcl::PointUV n;
        n.u = next_pts[ptidx][0];
        n.v = next_pts[ptidx][1];
        keypoints->push_back (n);
        // add points pair to compute transformation
        inext_point[0] = floor (next_pts[ptidx][0]);
        inext_point[1] = floor (next_pts[ptidx][1]);
        iprev_point[0] = floor (prev_keypoints->points[ptidx].u);
        iprev_point[1] = floor (prev_keypoints->points[ptidx].v);
        const PointInT& prev_pt = prev_input->points[iprev_point[1]*prev_input->width + iprev_point[0]];
        const PointInT& next_pt = input->points[inext_point[1]*input->width + inext_point[0]];
        transformation_computer.add (prev_pt.getVector3fMap (), next_pt.getVector3fMap (), 1.0);
      }
    }
  }
  motion = transformation_computer.getTransformation ();
}
Ejemplo n.º 2
0
  void QNode::clingoInterfaceHandler(
      const segbot_logical_translator_gui::ClingoInterfaceGoalConstPtr &req) {

    segbot_logical_translator_gui::ClingoInterfaceResult resp;

    if (req->command.op == "approach" || req->command.op == "gothrough") {
      std::string door_name = req->command.args[0];
      size_t door_idx = handler_->getDoorIdx(door_name);
      if (door_idx == (size_t)-1) {
        // Interface failure
        resp.success = false;
        resp.status = "Could not resolve argument: " + door_name;
      } else {
        bwi_mapper::Point2f approach_pt;
        float approach_yaw = 0;
        bool door_approachable = false;
        if (req->command.op == "approach") {
          door_approachable = handler_->getApproachPoint(door_idx, 
              bwi_mapper::Point2f(robot_x_, robot_y_), approach_pt,
              approach_yaw);
        } else {
          door_approachable = handler_->getThroughDoorPoint(door_idx, 
              bwi_mapper::Point2f(robot_x_, robot_y_), approach_pt,
              approach_yaw);
        }
        if (door_approachable) {

          /* Update the display */
          button1_enabled_ = false;
          button2_enabled_ = false;
          location_box_enabled_ = false;
          display_text_ = req->command.op + " " + door_name;
          Q_EMIT updateFrameInfo();

          geometry_msgs::PoseStamped pose;
          pose.header.stamp = ros::Time::now();
          pose.header.frame_id = "/map"; //TODO
          pose.pose.position.x = approach_pt.x;
          pose.pose.position.y = approach_pt.y;
          tf::quaternionTFToMsg(tf::createQuaternionFromYaw(approach_yaw), 
              pose.pose.orientation); 
          float start_time = ros::Time::now().toSec();
          bool success = executeRobotGoal(pose);
          float cost = ros::Time::now().toSec() - start_time;
          resp.success = success;

          // Publish the observable fluents
          senseDoorProximity(resp.observable_fluents, door_idx);
          if (success) {
            if (prev_door_idx_ != -1 &&
                prev_door_idx_ != door_idx) {
              size_t location_idx = 
                handler_->getLocationIdx(bwi_mapper::Point2f(robot_x_, robot_y_));
              std::string at_str = handler_->getLocationString(location_idx);
              ce_->addSample(at_str, prev_door_idx_, door_idx, cost); 
            }
            prev_door_idx_ = door_idx;
          } else {
            prev_door_idx_ = -1;
          }

          // Close a door is opened previously
          if (success) {
            if (sim_auto_door_ && close_door_idx_ != -1) {
              gh_->closeDoor(close_door_idx_);
              close_door_idx_ = -1;
            }
          }

          // We can't sense the door once we go through as we won't be facing
          // it. We should only sense a door when we approach it.
          if (req->command.op == "approach") {
            segbot_logical_translator_gui::ClingoFluent door_open;
            door_open.args.push_back(door_name);
            if (gh_->isDoorOpen(door_idx)) {
              door_open.op = "open";
              resp.observable_fluents.push_back(door_open);
            } else {
              door_open.op = "-open";
              resp.observable_fluents.push_back(door_open);
            }
          }

        } else {
          // Planning failure
          resp.success = false;
          resp.status = "Cannot approach " + door_name + " from here.";
        }

      }
    } else if (req->command.op == "opendoor") {
      std::string door_name = req->command.args[0];
      size_t door_idx = handler_->getDoorIdx(door_name);
      if (door_idx == (size_t)-1) {
        resp.success = false;
        resp.status = "Could not resolve argument: " + door_name;
      } else {

        /* Update the display */
        button1_enabled_ = false;
        button2_enabled_ = false;
        location_box_enabled_ = false;
        display_text_ = "Can you please open " + door_name;
        Q_EMIT updateFrameInfo();

        /* Wait for the door to be opened */
        ros::Rate r(1);
        int count = 0;
        bool door_open = false; 
        while (!door_open && count < 30) {
          if (sim_auto_door_ && count == 0) {
            gh_->openDoor(door_idx);
            close_door_idx_ = door_idx;
          }
          if (as_->isPreemptRequested() || !ros::ok()) { // TODO What about goal not being active or new goal?
            ROS_INFO("Preempting action");
            as_->setPreempted();
            return;
          }
          door_open = gh_->isDoorOpen(door_idx);
          count++;
          r.sleep();
        }

        /* Update the display and send back appropriate fluents */
        segbot_logical_translator_gui::ClingoFluent door_open_fluent;
        door_open_fluent.args.push_back(door_name);
        if (!door_open) {
          display_text_ = "Oh no! The request timed out. Let me think...";
          Q_EMIT updateFrameInfo();
          door_open_fluent.op = "-open";
          resp.success = false;
          resp.status = "User unable to open door";
        } else {
          display_text_ = "Thank you for opening the door!";
          Q_EMIT updateFrameInfo();
          door_open_fluent.op = "open";
          resp.success = true;
        }
        resp.observable_fluents.push_back(door_open_fluent);
      }
    } else if (req->command.op == "sense") {
      std::string sense_fluent_op = req->sense_fluent.op;
      if (sense_fluent_op == "open") {
        std::string door_name = req->sense_fluent.args[0];
        size_t door_idx = handler_->getDoorIdx(door_name);
        segbot_logical_translator_gui::ClingoFluent door_open;
        door_open.args.push_back(door_name);
        if (gh_->isDoorOpen(door_idx)) {
          door_open.op = "open";
          resp.observable_fluents.push_back(door_open);
        } else {
          door_open.op = "-open";
          resp.observable_fluents.push_back(door_open);
        }
        resp.success = true;
      } else if (sense_fluent_op == "beside" || sense_fluent_op == "facing") {
        std::string door_name = req->sense_fluent.args[0];
        size_t door_idx = handler_->getDoorIdx(door_name);
        bwi_mapper::Point2f robot_loc(robot_x_, robot_y_);
        bool op_door = (sense_fluent_op == "beside") ? 
          handler_->isRobotBesideDoor(robot_loc, robot_yaw_, 1.5, door_idx) :
          handler_->isRobotFacingDoor(robot_loc, robot_yaw_, 1.5, door_idx);
        if (!op_door) {
          segbot_logical_translator_gui::ClingoFluent n_op;
          n_op.op = "-" + sense_fluent_op;
          n_op.args.push_back(door_name);
          resp.observable_fluents.push_back(n_op);
        } else {
          segbot_logical_translator_gui::ClingoFluent op;
          op.op = sense_fluent_op;
          op.args.push_back(door_name);
          resp.observable_fluents.push_back(op);
        }
        resp.success = true;
      } else if (sense_fluent_op == "ploc") {
        person_name_ = req->sense_fluent.args[0];
        /* Update the display */
        button1_enabled_ = false;
        button2_enabled_ = false;
        location_box_enabled_ = true;
        display_text_ = "Can you tell me where " + person_name_ + " is?";
        location_received_ = false;
        Q_EMIT updateFrameInfo();

        /* Wait for the location to be received */
        ros::Rate r(10);
        int count = 0;
        bool door_open = false; 
        while (!location_received_ && count < 300) {
          if (as_->isPreemptRequested() || !ros::ok()) { // TODO What about goal not being active or new goal?
            ROS_INFO("Preempting action");
            as_->setPreempted();
            return;
          }
          count++;
          r.sleep();
        }

        if (location_received_) {
          display_text_ = "Thank you! I will try and find " + person_name_ + " in " + person_location_;
          segbot_logical_translator_gui::ClingoFluent person_loc_fluent;
          person_loc_fluent.op = "inside";
          person_loc_fluent.args.push_back(person_name_);
          person_loc_fluent.args.push_back(person_location_);
          resp.observable_fluents.push_back(person_loc_fluent);
        } else {
          display_text_ = "Oh no! The request timed out. Let me think...";
        }
        location_box_enabled_ = false; 
        Q_EMIT updateFrameInfo();

      }
    } else if (req->command.op == "evaluate") {
      resp.plan_cost = 0;
      resp.success = true;
      bwi_mapper::Point2f prev_pt(robot_x_, robot_y_), approach_pt;
      float prev_yaw = robot_yaw_, approach_yaw = 0;
      for (size_t i = 0; i < req->evaluate_fluents.size(); ++i) {
        if (req->evaluate_fluents[i].op == "approach") {
          std::string door_name = req->evaluate_fluents[i].args[0];
          std::cout << "door_name: " << door_name << std::endl;
          size_t door_idx = handler_->getDoorIdx(door_name);
          bool door_approachable = handler_->getApproachPoint(door_idx, 
              prev_pt, approach_pt, approach_yaw);
          if (!door_approachable) {
            resp.plan_cost = std::numeric_limits<float>::max();
            resp.success = false;
            resp.status = "unable to evaluate one approach leg of plan.";
            break;
          }
          resp.plan_cost += handler_->getPathCost(prev_pt, prev_yaw, approach_pt, approach_yaw);
        } else if (req->evaluate_fluents[i].op == "gothrough") {
          std::string door_name = req->evaluate_fluents[i].args[0];
          size_t door_idx = handler_->getDoorIdx(door_name);
          bool door_approachable = handler_->getThroughDoorPoint(door_idx, 
              prev_pt, approach_pt, approach_yaw);
          if (!door_approachable) {
            resp.plan_cost = std::numeric_limits<float>::max();
            resp.success = false;
            resp.status = "unable to evaluate one gothrough leg of plan.";
            std::cout << "from " << prev_pt << " to " << approach_pt << std::endl;
            break;
          }
          resp.plan_cost += 
            bwi_mapper::getMagnitude(approach_pt - prev_pt); // don't worry about closed doors for now
        } else {
          continue;
        }

        prev_pt = approach_pt;
        prev_yaw = approach_yaw;
      }
    } else if (req->command.op == "greet") {
      /* Update the display */
      button1_enabled_ = false;
      button2_enabled_ = false;
      location_box_enabled_ = false;
      display_text_ = "Hello " + req->command.args[0] + "!!";
      Q_EMIT updateFrameInfo();
      segbot_logical_translator_gui::ClingoFluent visiting;
      visiting.op = "visiting";
      visiting.args.push_back(req->command.args[0]);
      resp.observable_fluents.push_back(visiting);
      resp.success = true;
    } else if (req->command.op == "noop") {
      resp.success = true;
      senseDoorProximity(resp.observable_fluents);
    } else if (req->command.op == "finish") {
      ROS_INFO("RECEIVED FINISH");
      resp.success = true;
      prev_door_idx_ = -1; // Robot may be teleported
      ce_->finalizeEpisode();
    }

    // Get location
    size_t location_idx = 
      handler_->getLocationIdx(bwi_mapper::Point2f(robot_x_, robot_y_));
    if (location_idx == (size_t) -1) {
      ROS_ERROR("Unable to compute position");
    } else {
      std::string at_str = handler_->getLocationString(location_idx);
      segbot_logical_translator_gui::ClingoFluent at;
      at.op = "at";
      at.args.push_back(at_str);
      resp.observable_fluents.push_back(at);
    }

    /* boost::this_thread::sleep(boost::posix_time::seconds(1)); */

    if (resp.success) {
      as_->setSucceeded(resp, resp.status);
    } else {
      as_->setAborted(resp, resp.status);
    }
  }