/***********************************************************************
 *  Method: RobotController::UpdateStatus
 *  Params:
 * Returns: void
 * Effects: Gathers any needed information to update the underlying
 * 	status message
 ***********************************************************************/
bool RobotController::UpdatePose()
{
    //get current pose of the robot
    tf::StampedTransform transform;
    try{
        m_listener->lookupTransform("/map", base_frame.c_str(),
                      ros::Time(0), transform);

        tf::Transform trans = transform;

        geometry_msgs::Transform msg;
        tf::transformTFToMsg(trans, msg);

        geometry_msgs::Pose pose;
        pose.position.x = msg.translation.x;
        pose.position.y = msg.translation.y;
        pose.orientation.z = msg.rotation.z;
        pose.orientation.w = msg.rotation.w;
        m_status.SetPose(pose);
        return true;
    }
    catch (tf::TransformException& ex){
        ROS_ERROR_THROTTLE(5, "%s",ex.what());
        return false;
    }
}
void TerrainClassifierNode::setPointCloud(const sensor_msgs::PointCloud2& point_cloud_msg)
{
  std::string world_frame_id = vigir_footstep_planning::strip_const(terrain_classifier.getFrameId(), '/');
  std::string cloud_frame_id = vigir_footstep_planning::strip_const(point_cloud_msg.header.frame_id, '/');
  if (world_frame_id != cloud_frame_id)
  {
    ROS_ERROR_THROTTLE(5.0, "[TerrainClassifierNode] setPointCloud: Frame of input point ('%s') cloud mismatch! Should be '%s'.", cloud_frame_id.c_str(), world_frame_id.c_str());
    return;
  }

  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
  pcl::fromROSMsg(point_cloud_msg, *point_cloud);

  // filter input
  filterVoxelGrid<pcl::PointXYZ>(point_cloud, 0.02, 0.02, 2.0);

  // filtering of input point cloud
  terrain_classifier.filterPointCloudData(point_cloud);

  terrain_classifier.setPointCloud(point_cloud);
  generateTerrainModel();

  publishDebugData();
  publishTerrainModel();

  //ROS_INFO("Saved");
  //pcl::io::savePCDFile("/home/alex/vigir/catkin_ws/vigir_footstep_planning/vigir_terrain_classifier/pointclouds/new.pcd", cloud_input);
}
  void imageCallback(const sensor_msgs::ImageConstPtr& msg)
  {
    cv_bridge::CvImagePtr cv_img_ptr;
    msg->header;

    try
    {
      cv_img_ptr = cv_bridge::toCvCopy(msg, "mono8");
    }
    catch (cv_bridge::Exception& e)
    {
      ROS_ERROR_STREAM("Could not convert ROS image to CV: "<< e.what());
      return;
    }

    static int id = 0;
    int n = reader_.parse(cv_img_ptr);
    ROS_DEBUG_STREAM("Got image, has "<<n<<" barcodes");
    std::vector<barcode::Barcode> barcodes = reader_.getBarcodes();
    for (uint i = 0; i < reader_.getBarcodes().size(); i++)
    {
      ROS_DEBUG_STREAM("Barcode: " << barcodes[i].data //
          << " x:"<<barcodes[i].x//
          << " y:"<<barcodes[i].y);
      vis_.publish(barcodes[i].x, barcodes[i].y, barcodes[i].z, msg->header.frame_id, id++ % 1000);
    }
    if (msg->header.frame_id == "")
    {
      ROS_ERROR_THROTTLE(1, "Received image with empty frame_id, would cause tf connectivity issues.");
    }
    object_pub_.publish(barcodes, msg);
  }
void QualisysDriver::run() {

  prt_packet = port_protocol.GetRTPacket();
  CRTPacket::EPacketType e_type;
  port_protocol.GetCurrentFrame(CRTProtocol::Component6dEuler);

  if(port_protocol.ReceiveRTPacket(e_type, true)) {

    switch(e_type) {
      // Case 1 - sHeader.nType 0 indicates an error
      case CRTPacket::PacketError:
        ROS_ERROR_STREAM_THROTTLE(
            1, "Error when streaming frames: "
            << port_protocol.GetRTPacket()->GetErrorString());
        break;

      // Case 2 - No more data
      case CRTPacket::PacketNoMoreData:
        ROS_WARN_STREAM_THROTTLE(1, "No more data");
        break;

      // Case 3 - Data received
      case CRTPacket::PacketData:
        handleFrame();
        break;

      default:
        ROS_ERROR_THROTTLE(1, "Unknown CRTPacket case");
        break;
    }
  }

  return;
}
예제 #5
0
bool OccupancyMapMonitor::getShapeTransformCache(std::size_t index, const std::string& target_frame,
                                                 const ros::Time& target_time, ShapeTransformCache& cache) const
{
  if (transform_cache_callback_)
  {
    ShapeTransformCache temp_cache;
    if (transform_cache_callback_(target_frame, target_time, temp_cache))
    {
      for (std::pair<const ShapeHandle, Eigen::Isometry3d>& it : temp_cache)
      {
        std::map<ShapeHandle, ShapeHandle>::const_iterator jt = mesh_handles_[index].find(it.first);
        if (jt == mesh_handles_[index].end())
        {
          ROS_ERROR_THROTTLE(1, "Incorrect mapping of mesh handles");
          return false;
        }
        else
          cache[jt->second] = it.second;
      }
      return true;
    }
    else
      return false;
  }
  else
    return false;
}
예제 #6
0
파일: driver.hpp 프로젝트: DSsoto/Sub8
  void send_heartbeat()
  {
    double maxdepth = 15;

    std::stringstream buf;
    buf << "CR0\r";              // load factory settings (won't change baud rate)
    buf << "#BJ 100 110 000\r";  // enable only bottom track high res velocity and bottom track
                                 // range
    // buf << "#BK2\r"; // send water mass pings when bottom track pings fail
    // buf << "#BL7,36,46\r"; // configure near layer and far layer to 12 and 15 feet
    buf << "ES0\r";         // 0 salinity
    buf << "EX00000\r";     // no transformation
    buf << "EZ10000010\r";  // configure sensor sources. Provide manual data for everything except
                            // speed of sound and temperature
    buf << "BX" << std::setw(5) << std::setfill('0') << (int)(maxdepth * 10 + 0.5) << '\r';  // configure max depth
    buf << "TT2012/03/04, 05:06:07\r";                                                       // set RTC
    buf << "CS\r";                                                                           // start pinging

    std::string str = buf.str();

    try  // Write heartbeat to serial port
    {
      size_t written = 0;
      while (written < str.size())
      {
        written += p.write_some(boost::asio::buffer(str.data() + written, str.size() - written));
      }
    }
    catch (const std::exception &exc)
    {
      ROS_ERROR_THROTTLE(0.5, "DVL: error on write: %s; dropping heartbeat", exc.what());
    }
  }
예제 #7
0
  void
  TrackerViewer::displayMovingEdgeSites()
  {
    if (!sites_)
      return;
    for (unsigned i = 0; i < sites_->moving_edge_sites.size(); ++i)
      {
	double x = sites_->moving_edge_sites[i].x;
	double y = sites_->moving_edge_sites[i].y;
	int suppress = sites_->moving_edge_sites[i].suppress;
	vpColor color = vpColor::black;

	switch(suppress)
	  {
	  case 0:
	    color = vpColor::green;
	    break;
	  case 1:
	    color = vpColor::blue;
	    break;
	  case 2:
	    color = vpColor::purple;
	    break;
	  case 4:
	    color = vpColor::red;
	    break;
	  default:
	    ROS_ERROR_THROTTLE(10, "bad suppress value");
	  }

	vpDisplay::displayCross(image_, vpImagePoint(x, y), 3, color, 1);
      }
  }
  void cloudCallback (const sensor_msgs::PointCloud2::ConstPtr& cloud_in)
  {

    if (tfl_->waitForTransform(p_target_frame_, cloud_in->header.frame_id, cloud_in->header.stamp, wait_duration_)){
      tf::StampedTransform transform;
      tfl_->lookupTransform(p_target_frame_, cloud_in->header.frame_id, cloud_in->header.stamp, transform);

      //ROS_INFO("Lookup %s %s", p_target_frame_.c_str(), cloud_in->header.frame_id.c_str());

      double roll, pitch, yaw;
      tf::Matrix3x3(transform.getRotation()).getRPY(roll, pitch, yaw);
      //ROS_INFO("roll: %f pitch: %f yaw: %f", roll, pitch, yaw);

      if (((prior_roll_angle_ < (-M_PI*0.5)) && (roll > (-M_PI*0.5))) ||
          ((prior_roll_angle_ < ( M_PI*0.5)) && (roll > ( M_PI*0.5)))){

        pcl::PointCloud<pcl::PointXYZ> tmp_agg_cloud;

        for (size_t i=0; i < cloud_agg_.size(); ++i){
          if (tmp_agg_cloud.empty()){
            tmp_agg_cloud = *cloud_agg_[i];
          }else{
            tmp_agg_cloud += *cloud_agg_[i];
          }
        }

        pcl::toROSMsg(tmp_agg_cloud, cloud2_);

        cloud2_.header.frame_id = p_target_frame_;
        cloud2_.header.stamp = ros::Time::now();
        point_cloud2_pub_.publish(cloud2_);
        cloud_agg_.clear();

      }else{

        pcl::PointCloud<pcl::PointXYZ> pc_tmp;

        pcl::fromROSMsg(*cloud_in, pc_tmp);

        Eigen::Matrix4f sensorToWorld;
        pcl_ros::transformAsMatrix(transform, sensorToWorld);

        boost::shared_ptr<pcl::PointCloud<pcl::PointXYZ> > pc;
        pc.reset(new pcl::PointCloud<pcl::PointXYZ>());

        pcl::transformPointCloud(pc_tmp, *pc, sensorToWorld);

        cloud_agg_.push_back(pc);
      }

      prior_roll_angle_ = roll;
    }else{
      ROS_ERROR_THROTTLE(5.0, "Cannot transform from sensor %s to target %s . This message is throttled.",
                         cloud_in->header.frame_id.c_str(),
                         p_target_frame_.c_str());
    }
  }
static void publishOctomap(ros::Publisher *octree_binary_pub, occupancy_map_monitor::OccupancyMapMonitor *server)
{
  octomap_msgs::Octomap map;
  
  map.header.frame_id = server->getMapFrame();
  map.header.stamp = ros::Time::now();
  
  server->getOcTreePtr()->lockRead();
  try
  {
    if (!octomap_msgs::binaryMapToMsgData(*server->getOcTreePtr(), map.data))
      ROS_ERROR_THROTTLE(1, "Could not generate OctoMap message");
  }
  catch(...)
  {
    ROS_ERROR_THROTTLE(1, "Exception thrown while generating OctoMap message");
  }
  server->getOcTreePtr()->unlockRead();
  
  octree_binary_pub->publish(map);
}
  bool HectorPathFollower::getRobotPose(tf::Stamped<tf::Pose>& global_pose) const {

    global_pose.setIdentity();
    tf::Stamped<tf::Pose> robot_pose;
    robot_pose.setIdentity();
    robot_pose.frame_id_ = p_robot_base_frame_;
    robot_pose.stamp_ = ros::Time(0);
    ros::Time current_time = ros::Time::now(); // save time for checking tf delay later

    //get the global pose of the robot
    try{
      tf_->transformPose(p_global_frame_, robot_pose, global_pose);
    }
    catch(tf::LookupException& ex) {
      ROS_ERROR_THROTTLE(1.0, "No Transform available Error looking up robot pose: %s\n", ex.what());
      return false;
    }
    catch(tf::ConnectivityException& ex) {
      ROS_ERROR_THROTTLE(1.0, "Connectivity Error looking up robot pose: %s\n", ex.what());
      return false;
    }
    catch(tf::ExtrapolationException& ex) {
      ROS_ERROR_THROTTLE(1.0, "Extrapolation Error looking up robot pose: %s\n", ex.what());
      return false;
    }
    // check global_pose timeout

    /*
    if (current_time.toSec() - global_pose.stamp_.toSec() > transform_tolerance_) {
      ROS_WARN_THROTTLE(1.0, "Costmap2DROS transform timeout. Current time: %.4f, global_pose stamp: %.4f, tolerance: %.4f",
          current_time.toSec() ,global_pose.stamp_.toSec() ,transform_tolerance_);
      return false;
    }
    */


    return true;
  }
void TerrainClassifierNode::insertPointCloud(const sensor_msgs::PointCloud2& point_cloud_msg)
{
  std::string world_frame_id = vigir_footstep_planning::strip_const(terrain_classifier.getFrameId(), '/');
  std::string cloud_frame_id = vigir_footstep_planning::strip_const(point_cloud_msg.header.frame_id, '/');
  if (world_frame_id != cloud_frame_id)
  {
    ROS_ERROR_THROTTLE(5.0, "[TerrainClassifierNode] insertPointCloud: Frame of input point ('%s') cloud mismatch! Should be '%s'.", cloud_frame_id.c_str(), world_frame_id.c_str());
    return;
  }

  if (point_cloud_msg.data.empty())
    return;

  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud(new pcl::PointCloud<pcl::PointXYZ>());
  pcl::fromROSMsg(point_cloud_msg, *point_cloud);

  // filtering of input point cloud
  terrain_classifier.filterPointCloudData(point_cloud);

  terrain_classifier.insertPointCloud(point_cloud);

  if (terrain_classifier.getInputCloud()->size() <= min_aggregation_size)
  {
    if (cloud_input_pub.getNumSubscribers() > 0)
    {
      sensor_msgs::PointCloud2 cloud_point_msg;
      pcl::toROSMsg(*(terrain_classifier.getInputCloud()), cloud_point_msg);
      cloud_point_msg.header.stamp = ros::Time::now();
      cloud_point_msg.header.frame_id = terrain_classifier.getFrameId();
      cloud_input_pub.publish(cloud_point_msg);
    }

    return;
  }

  if (compute_update_skips_counter++ >= compute_update_skips)
  {
    terrain_classifier.computeNormals(point_cloud);
    compute_update_skips_counter = 0;
  }

  terrain_classifier.generateHeightGridmap(point_cloud);

  publishDebugData();

  //ROS_INFO("Saved");
  //pcl::io::savePCDFile("/home/alex/vigir/catkin_ws/vigir_footstep_planning/vigir_terrain_classifier/pointclouds/new.pcd", cloud_input);
}
예제 #12
0
    void XYOriginCallback(const topic_tools::ShapeShifter::ConstPtr msg)
    {
      try
      {
        const gps_common::GPSFixConstPtr origin = msg->instantiate<gps_common::GPSFix>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->latitude,
                origin->longitude,
                origin->track,
                origin->altitude));
        origin_sub_.shutdown();
        return;
      }
      catch (...) {}

      try
      {
        const geometry_msgs::PoseStampedConstPtr origin = msg->instantiate<geometry_msgs::PoseStamped>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->pose.position.y,    // Latitude
                origin->pose.position.x,    // Longitude
                0.0,                        // Heading
                origin->pose.position.z));  // Altitude
        origin_sub_.shutdown();
        return;
      }
      catch(...) {}

      try
      {
        const geographic_msgs::GeoPoseConstPtr origin = msg->instantiate<geographic_msgs::GeoPose>();
        xy_wgs84_util_.reset(
            new swri_transform_util::LocalXyWgs84Util(
                origin->position.latitude,
                origin->position.longitude,
                tf::getYaw(origin->orientation),
                origin->position.altitude));
        origin_sub_.shutdown();
        return;
      }
      catch(...) {}

      ROS_ERROR_THROTTLE(1.0, "Unsupported message type received for local_xy_origin.");
    }
예제 #13
0
파일: driver.hpp 프로젝트: DSsoto/Sub8
 /*
   Reads byte into a uint8_t
   res - uint8_t reference to write byte into
 */
 bool read_byte(uint8_t &res)
 {
   while (true)
   {
     try
     {
       p.read_some(boost::asio::buffer(&res, sizeof(res)));
       return true;
     }
     catch (const std::exception &exc)
     {
       ROS_ERROR_THROTTLE(0.5, "DVL: error on read: %s; reopening serial port", exc.what());
       open();
       return false;
     }
   }
 }
bool CollisionCheckGridMapPlugin::isAccessible(const State& s) const
{
  boost::shared_lock<boost::shared_mutex> lock(grid_map_shared_mutex);

  if (!occupancy_grid_map)
  {
    ROS_ERROR_THROTTLE(10, "[CollisionCheckGridMapPlugin] No grid map available yet.");
    return true;
  }

  double x = s.getX();
  double y = s.getY();
  int idx = 0;

  if (getGridMapIndex(*occupancy_grid_map, x, y, idx))
    return occupancy_grid_map->data.at(idx) <= occ_thresh;

  return false;
}
 void TransformableMarkerOperatorAction::updateFocusMarkerDimensions() {
   std::string server_name = server_name_editor_->text().toStdString();
   ros::ServiceClient client_focus = nh_.serviceClient<jsk_interactive_marker::GetTransformableMarkerFocus>(
     server_name + "/get_focus", true);
   jsk_interactive_marker::GetTransformableMarkerFocus srv_focus;
   ros::ServiceClient client_dim = nh_.serviceClient<jsk_interactive_marker::GetMarkerDimensions>(
     server_name + "/get_dimensions", true);
   jsk_interactive_marker::GetMarkerDimensions srv_dim;
   if (client_focus.call(srv_focus) && client_dim.call(srv_dim)) {
     transform_name_editor_->setPlaceholderText(QString::fromStdString(srv_focus.response.target_name));
     dimension_x_editor_->setPlaceholderText(QString::number(srv_dim.response.dimensions.x, 'f', 4));
     dimension_y_editor_->setPlaceholderText(QString::number(srv_dim.response.dimensions.y, 'f', 4));
     dimension_z_editor_->setPlaceholderText(QString::number(srv_dim.response.dimensions.z, 'f', 4));
     dimension_radius_editor_->setPlaceholderText(QString::number(srv_dim.response.dimensions.radius, 'f', 4));
     dimension_sm_radius_editor_->setPlaceholderText(
       QString::number(srv_dim.response.dimensions.small_radius, 'f', 4));
   } else{
     ROS_ERROR_THROTTLE(10, "Service call FAIL: %s", server_name.c_str());
   }
 }
예제 #16
0
void imageCb(const sensor_msgs::ImageConstPtr& msg)
{
  // We want to scale floating point images so that they display nicely
  bool do_dynamic_scaling = (msg->encoding.find("F") != std::string::npos);

  boost::mutex::scoped_lock lock(g_image_mutex);

  // Convert to OpenCV native BGR color
  try {
    g_last_image = cv_bridge::cvtColorForDisplay(cv_bridge::toCvShare(msg), "",
                                                 do_dynamic_scaling)->image;
  } catch (cv_bridge::Exception& e) {
    ROS_ERROR_THROTTLE(30, "Unable to convert '%s' image for display: '%s'",
                       msg->encoding.c_str(), e.what());
  }
  if (!g_last_image.empty()) {
    const cv::Mat &image = g_last_image;
    cv::imshow(g_window_name, image);
  }
}
예제 #17
0
void DetectorNode::CameraCb(const sensor_msgs::ImageConstPtr &image_msg,
                            const sensor_msgs::CameraInfoConstPtr &cinfo_msg) {
  // Only show detection if camera is not calibrated
  if (cinfo_msg->K[0] == 0.0 || cinfo_msg->height == 0) {
    ROS_ERROR_THROTTLE(1, "%s: %s", nh_.getNamespace().c_str(),
                       "camera not calibrate");
    cam_calibrated_ = false;
  }

  // Retrieve camera info and image
  model_.fromCameraInfo(cinfo_msg);
  cv::Mat image = cv_bridge::toCvCopy(
                      image_msg, sensor_msgs::image_encodings::MONO8)->image;

  // Disable drawing later
  cv::Mat color;
  cv::cvtColor(image, color, CV_GRAY2BGR);

  // Detect tags
  std::vector<AprilTags::TagDetection> detections =
      tag_detector_.extractTags(image);

  // Process detection
  if (!detections.empty()) {
    // Maybe use Ptr?
    Apriltags tags_c_msg;
    tags_c_msg.header = image_msg->header;
    // Actual processing
    std::for_each(begin(detections), end(detections),
                  [&](const AprilTags::TagDetection &detection) {
      tags_c_msg.apriltags.push_back(DetectionToApriltagMsg(detection));
      detection.draw(color);  // Disable drawing later
    });
    tag_viz_.PublishApriltagsMarker(tags_c_msg);
    pub_tags_.publish(tags_c_msg);
  }

  // Display
  cv::imshow("image", color);
  cv::waitKey(1);
}
void multiCallback(topic_tools::ShapeShifter const &input) {
  if (input.getDataType() == "nav_msgs/Odometry") {
    nav_msgs::Odometry::ConstPtr odom = input.instantiate<nav_msgs::Odometry>();
    odomCallback(*odom);
    return;
  }

  if (input.getDataType() == "geometry_msgs/PoseStamped") {
    geometry_msgs::PoseStamped::ConstPtr pose = input.instantiate<geometry_msgs::PoseStamped>();
    poseCallback(*pose);
    return;
  }

  if (input.getDataType() == "sensor_msgs/Imu") {
    sensor_msgs::Imu::ConstPtr imu = input.instantiate<sensor_msgs::Imu>();
    imuCallback(*imu);
    return;
  }

  ROS_ERROR_THROTTLE(1.0, "message_to_tf received a %s message. Supported message types: nav_msgs/Odometry geometry_msgs/PoseStamped sensor_msgs/Imu", input.getDataType().c_str());
}
예제 #19
0
  bool BallPickerLayer::getRobotPose(tf::Stamped<tf::Pose>& global_pose) const
  {
    global_pose.setIdentity();

    tf::Stamped <tf::Pose> robot_pose;
    robot_pose.setIdentity();
    robot_pose.frame_id_ = robot_base_frame_;
    robot_pose.stamp_ = ros::Time();

    try
    {
      tfl.transformPose(global_frame_, robot_pose, global_pose);
    }
    catch (tf::TransformException& ex)
    {
      ROS_ERROR_THROTTLE(1.0, "TF exception looking up robot pose: %s/n", ex.what());
      return false; 
    }

    return true;

  }
예제 #20
0
/// Looks ithe IMU -> CAM transformation
void VoNode::initImu2Cam(){
    if (USE_IMU){
        tf::StampedTransform imu2cam;
        ROS_INFO("Lookingup  for [%s] -> [%s] transform...", IMU_FRAME.c_str(), CAM_FRAME.c_str());
        while(1){
            try{
                // sent out by the crazyflie driver driver.py
                subTF.lookupTransform(CAM_FRAME, IMU_FRAME, ros::Time(0), imu2cam); // get the latest
                tf::transformTFToEigen(imu2cam.inverse(), IMU2CAM);
                ROS_INFO("...IMU transform [%s] -> [%s] INVERSED and set:", IMU_FRAME.c_str(), CAM_FRAME.c_str());
                ROS_INFO_STREAM("IMU2CAM\n"<<IMU2CAM.matrix());
                break;
                ros::Rate(10).sleep();
            } catch(tf::TransformException& ex){
                ROS_ERROR_THROTTLE(3,"TF exception. Could not get IMU2CAM transform: %s", ex.what());
            }

        }
    } else {
        ROS_INFO("Not using IMU, so no [%s] -> [%s] transform needed", IMU_FRAME.c_str(), CAM_FRAME.c_str());
        IMU2CAM.setIdentity();
    }
}
예제 #21
0
//tracker step
void Tracker::track()
{
    storage->readSceneProcessed(scene);
    if (error_count >= 30){
        //failed 30 times in a row
        ROS_ERROR("[Tracker::%s] Object is lost ... stopping tracker...",__func__);
        started = false;
        lost_it = true;
        return;
    }
    PXC::Ptr target (new PXC);
    crop_a_box(scene, target, (*bounding_box)*factor, false, *transform, false);
    if (target->points.size() <= 15){
        ROS_ERROR_THROTTLE(10,"[Tracker::%s] Not enought points in bounding box, retryng with larger bounding box", __func__);
        factor += 0.2;
        rej_distance +=0.005;
        ++error_count;
        return;
    }
    /*
     *  Alignment
     */
    //check if user changed leaf size
    double val;
    PXC::Ptr aligned = boost::make_shared<PXC>();
    basic_config->get("downsampling_leaf_size", val);
    if (val != leaf){
      leaf = val;
      computeModel();
      icp.setInputSource(model);
      pcl::CentroidPoint<PX> mc;
      for (size_t i=0; i<model->points.size(); ++i)
          mc.add(model->points[i]);
      mc.get(model_centroid);
    }
    // bool feat_align(true);
    // if (feat_align){
    //     NTC::Ptr target_n = boost::make_shared<NTC>();
    //     pcl::PointCloud<pcl::FPFHSignature33>::Ptr target_f = boost::make_shared<pcl::PointCloud<pcl::FPFHSignature33>>();
    //     ne.setRadiusSearch(2.0f*leaf);
    //     ne.useSensorOriginAsViewPoint();
    //     ne.setInputCloud(target);
    //     ne.compute(*target_n);
    //     fpfh.setInputNormals(target_n);
    //     fpfh.setInputCloud(target);
    //     fpfh.setRadiusSearch(3.5f*leaf);
    //     fpfh.compute(*target_f);
    //     //Assemble correspondences based on model-target features
    //     SearchT tree (true, CreatorT(new IndexT(4)));
    //     tree.setPointRepresentation (RepT(new pcl::DefaultFeatureRepresentation<pcl::FPFHSignature33>));
    //     tree.setChecks(256);
    //     tree.setInputCloud(target_f);
    //     //Search model features over target features
    //     //If model features are n, these will be n*k_nn matrices
    //     std::vector<std::vector<int>> k_idx;
    //     std::vector<std::vector<float>> k_dist;
    //     int k_nn(1);
    //     tree.nearestKSearch (*model_feat, std::vector<int> (), k_nn, k_idx, k_dist);
    //     //define a distance threshold
    //     float dist_thresh_m = 125.0f;
    //     //fill in model-target correpsondences
    //     pcl::Correspondences corr_m_over_t;
    //     for(size_t i=0; i < k_idx.size(); ++i)
    //     {
    //         if (k_dist[i][0] > dist_thresh_m){
    //             //we have a correspondence
    //             PX p1 (model->points[i]);
    //             PX p2 (target->points[k_idx[i][0]]);
    //             Eigen::Vector3f diff (p1.x - p2.x, p1.y - p2.y, p1.z - p2.z);
    //             float eu_dist = diff.squaredNorm();
    //             //Add a correspondence only if distance is below threshold
    //             pcl::Correspondence cor(i, k_idx[i][0], eu_dist);
    //             corr_m_over_t.push_back(cor);
    //         }
    //     }
    //     //Estimate the rigid transformation of model -> target
    //     teDQ->estimateRigidTransformation(*model, *target, corr_m_over_t, *transform);
    // }
    crd->setMaximumDistance(rej_distance);
    icp.setInputTarget(target);
    if (centroid_counter >=10){
        pcl::CentroidPoint<PX> tc;
        for (size_t i=0; i<target->points.size(); ++i)
            tc.add(target->points[i]);
        PX target_centroid, mc_transformed;
        mc_transformed = pcl::transformPoint(model_centroid, Eigen::Affine3f(*transform));
        tc.get(target_centroid);
        Eigen::Matrix4f Tcen, guess;
        Tcen << 1, 0, 0,  (target_centroid.x - mc_transformed.x),
                0, 1, 0,  (target_centroid.y - mc_transformed.y),
                0, 0, 1,  (target_centroid.z - mc_transformed.z),
                0, 0, 0,  1;
        guess = Tcen*(*transform);
        icp.align(*aligned, guess);
        centroid_counter = 0;
        ROS_WARN("[Tracker::%s] Centroid Translation Performed!",__func__);
        centroid_counter = 0;
    }
    else if (disturbance_counter >= 20)
    {
        float angx = D2R*UniformRealIn(30.0,90.0,true);
        float angy = D2R*UniformRealIn(30.0,90.0,true);
        float angz = D2R*UniformRealIn(30.0,90.0,true);
        Eigen::Matrix4f T_rotx, T_roty, T_rotz;
        if (UniformIntIn(0,1))
            angx *= -1;
        if (UniformIntIn(0,1))
            angy *= -1;
        if (UniformIntIn(0,1))
            angz *= -1;
        Eigen::AngleAxisf rotx (angx, Eigen::Vector3f::UnitX());
        T_rotx<< rotx.matrix()(0,0), rotx.matrix()(0,1), rotx.matrix()(0,2), 0,
                rotx.matrix()(1,0), rotx.matrix()(1,1), rotx.matrix()(1,2), 0,
                rotx.matrix()(2,0), rotx.matrix()(2,1), rotx.matrix()(2,2), 0,
                0,                0,                  0,                 1;
        Eigen::AngleAxisf roty (angy, Eigen::Vector3f::UnitY());
        T_roty<< roty.matrix()(0,0), roty.matrix()(0,1), roty.matrix()(0,2), 0,
                roty.matrix()(1,0), roty.matrix()(1,1), roty.matrix()(1,2), 0,
                roty.matrix()(2,0), roty.matrix()(2,1), roty.matrix()(2,2), 0,
                0,                0,                  0,                 1;
        Eigen::AngleAxisf rotz (angz, Eigen::Vector3f::UnitZ());
        T_rotz<< rotz.matrix()(0,0), rotz.matrix()(0,1), rotz.matrix()(0,2), 0,
                rotz.matrix()(1,0), rotz.matrix()(1,1), rotz.matrix()(1,2), 0,
                rotz.matrix()(2,0), rotz.matrix()(2,1), rotz.matrix()(2,2), 0,
                0,                0,                  0,                 1;
        Eigen::Matrix4f disturbed, inverse;
        inverse = transform->inverse();
        disturbed = (T_rotz*T_roty*T_rotx*inverse).inverse();
        ROS_WARN("[Tracker::%s] Triggered Disturbance! With angles %g, %g, %g",__func__,  angx*R2D, angy*R2D, angz*R2D);
        icp.align(*aligned, disturbed);
        disturbance_counter = 0;
    }
    else
        icp.align(*aligned, *transform);
    fitness = icp.getFitnessScore();
    // ROS_WARN("Fitness %g", fitness);
    *(transform) = icp.getFinalTransformation();
    //adjust distance and factor according to fitness
    if (fitness > 0.001 ){
        //fitness is high something is prolly wrong
        rej_distance +=0.001;
        factor += 0.05;
        if (rej_distance > 2.0)
            rej_distance = 2.0;
        if (factor > 5.0)
            factor = 5.0;
        ++disturbance_counter;
        ++centroid_counter;
    }
    else if (fitness < 0.0006){
        //all looks good
        rej_distance -=0.005;
        if(rej_distance < 0.015)
            rej_distance = 0.015; //we dont want to go lower than this
        factor -=0.05;
        if(factor < 1.1)
            factor = 1.1;
        error_count = 0;
        disturbance_counter = 0;
        centroid_counter = 0;
    }
}
예제 #22
0
void planning_scene_monitor::CurrentStateMonitor::jointStateCallback(const sensor_msgs::JointStateConstPtr &joint_state)
{
  if (joint_state->name.size() != joint_state->position.size())
  {
    ROS_ERROR_THROTTLE(1, "State monitor received invalid joint state (number of joint names does not match number of positions)");
    return;
  }
  bool update = false;
  
  {    
    boost::mutex::scoped_lock _(state_update_lock_);
    // read the received values, and update their time stamps
    std::size_t n = joint_state->name.size();
    current_state_time_ = joint_state->header.stamp;
    for (std::size_t i = 0 ; i < n ; ++i)
    {
      const robot_model::JointModel* jm = robot_model_->getJointModel(joint_state->name[i]);
      if (!jm)
        continue;
      // ignore fixed joints, multi-dof joints (they should not even be in the message)
      if (jm->getVariableCount() != 1)
        continue;

      joint_time_[joint_state->name[i]] = joint_state->header.stamp;

      if (robot_state_.getJointPositions(jm)[0] != joint_state->position[i])
      {
        update = true;
        robot_state_.setJointPositions(jm, &(joint_state->position[i]));
        
        // continuous joints wrap, so we don't modify them (even if they are outside bounds!)
        if (jm->getType() == robot_model::JointModel::REVOLUTE)
          if (static_cast<const robot_model::RevoluteJointModel*>(jm)->isContinuous())
            continue;
        
        const robot_model::VariableBounds &b = jm->getVariableBounds()[0]; // only one variable in the joint, so we get its bounds
        
        // if the read variable is 'almost' within bounds (up to error_ difference), then consider it to be within bounds
        if (joint_state->position[i] < b.min_position_ && joint_state->position[i] >= b.min_position_ - error_)
          robot_state_.setJointPositions(jm, &b.min_position_);
        else
          if (joint_state->position[i] > b.max_position_ && joint_state->position[i] <= b.max_position_ + error_)
            robot_state_.setJointPositions(jm, &b.max_position_);
      }
    }
    
    // read root transform, if needed
    if (tf_ && (robot_model_->getRootJoint()->getType() == robot_model::JointModel::PLANAR ||
                robot_model_->getRootJoint()->getType() == robot_model::JointModel::FLOATING))
    {
      const std::string &child_frame = robot_model_->getRootLink()->getName();
      const std::string &parent_frame = robot_model_->getModelFrame();
      
      std::string err;
      ros::Time tm;
      tf::StampedTransform transf;
      bool ok = false;
      if (tf_->getLatestCommonTime(parent_frame, child_frame, tm, &err) == tf::NO_ERROR)
      {
        try
        {
          tf_->lookupTransform(parent_frame, child_frame, tm, transf);
          ok = true;
        }
        catch(tf::TransformException& ex)
        {
          ROS_ERROR_THROTTLE(1, "Unable to lookup transform from %s to %s.  Exception: %s", parent_frame.c_str(), child_frame.c_str(), ex.what());
        }
      }
      else
        ROS_DEBUG_THROTTLE(1, "Unable to lookup transform from %s to %s: no common time.", parent_frame.c_str(), child_frame.c_str());
      if (ok && last_tf_update_ != tm)
      {
        update = true;
        last_tf_update_ = tm;
        const std::vector<std::string> &vars = robot_model_->getRootJoint()->getVariableNames();
        for (std::size_t j = 0; j < vars.size() ; ++j)
          joint_time_[vars[j]] = tm;
        Eigen::Affine3d eigen_transf;
        tf::transformTFToEigen(transf, eigen_transf);
        robot_state_.setJointPositions(robot_model_->getRootJoint(), eigen_transf);        
      }
    }
  }
  
  // callbacks, if needed
  if (update)
    for (std::size_t i = 0 ; i < update_callbacks_.size() ; ++i)
      update_callbacks_[i](joint_state);
}
예제 #23
0
int main(int argc, char **argv)
{
	// Iniciar nodo IK
	ros::init (argc, argv, "kuka_ik");
	ros::NodeHandle n;
	ros::NodeHandle nh("~");

	// Parametro rate
	int rate;
	nh.param("rate", rate, 5);
	ros::Rate loop_rate(rate);
	ROS_INFO("IK Solve: %d Hz", rate);

	// Parametro origin
	std::string origin;
	nh.param<std::string>("origin", origin, "world");

	// Cargar robot KUKA
	robot_model_loader::RobotModelLoader robot_model_loader("robot_description");
	robot_model::RobotModelPtr kinematic_model = robot_model_loader.getModel();
	// Frame por defecto base
	ROS_INFO("Model frame: %s", kinematic_model->getModelFrame().c_str());

	robot_state::RobotStatePtr kinematic_state(new robot_state::RobotState(kinematic_model));
	kinematic_state->setToDefaultValues();
	// Grupo de movimiento del robot
	const robot_state::JointModelGroup* joint_model_group = kinematic_model->getJointModelGroup("arm");
	// Obtener nombres de joints
	const std::vector<std::string> &joint_names = joint_model_group->getJointModelNames();
	// Posicion actual de joints
	std::vector<double> joint_values;
	kinematic_state->copyJointGroupPositions(joint_model_group, joint_values);

	// Publisher for KUKA model joint position from IK solver
	ros::Publisher kukaJointPub = n.advertise<sensor_msgs::JointState>("joint_states", 1000);

	// Suscriber for goal position (InteractiveMarker)
	ros::Subscriber kukaIkGoalSub = n.subscribe("goal_pose", 50, kukaIkCallback);


	// JointState Msg for KUKA model from IK solver
	sensor_msgs::JointState jointMsg;
	jointMsg.name.resize(6);
	jointMsg.position.resize(6);
	jointMsg.velocity.resize(6);
	for(std::size_t i=0; i < joint_names.size(); ++i){
		jointMsg.name[i] = joint_names[i].c_str();
		jointMsg.position[i] = joint_values[i];
	}
	// Publish init joint state
	ros::Duration(1).sleep();
	jointMsg.header.stamp = ros::Time::now();
	kukaJointPub.publish(jointMsg);

	// Mensaje
	while (ros::ok()) {

		if (updatePose){
			// IK
			kinematic_state->setJointGroupPositions(joint_model_group, joint_values);

			bool found_ik = kinematic_state->setFromIK(joint_model_group, goalPose, 5, 0.01);

			// IK solution (if found):
			if (found_ik) {
				// Actualizar joint_values
				kinematic_state->copyJointGroupPositions(joint_model_group, joint_values);

				// for(std::size_t i=0; i < joint_names.size(); ++i){
				// 	jointMsg.position[i] = joint_values[i];
				// 	//ROS_INFO("Joint %s: %f", joint_names[i].c_str(), joint_values[i]);
				// }
				jointMsg.position[0] = joint_values[0];
				jointMsg.position[1] = joint_values[1];
				jointMsg.position[2] = joint_values[2];
				//jointMsg.position[3] = 0.0; //GIRO KINECT
				jointMsg.position[3] = joint_values[3]; //GIRO KINECT
				jointMsg.position[4] = joint_values[4];
				jointMsg.position[5] = joint_values[5];
			}
			else {
				ROS_ERROR_THROTTLE(2,"INVERSE KINEMATIC IS NOT FEASIBLE!");
			}
			updatePose=false;
		}
		// Publicar mensaje para KUKA model
		jointMsg.header.stamp = ros::Time::now();

		kukaJointPub.publish(jointMsg);

		ros::spinOnce();
		loop_rate.sleep();
	}

	ros::shutdown();
	return 0;
}
void PointCloudOctomapUpdater::cloudMsgCallback(const sensor_msgs::PointCloud2::ConstPtr &cloud_msg)
{
  ROS_DEBUG("Received a new point cloud message");
  ros::WallTime start = ros::WallTime::now();
  
  if (monitor_->getMapFrame().empty())
    monitor_->setMapFrame(cloud_msg->header.frame_id);
  
  /* get transform for cloud into map frame */
  tf::StampedTransform map_H_sensor;
  if (monitor_->getMapFrame() == cloud_msg->header.frame_id)
    map_H_sensor.setIdentity();
  else
  {
    if (tf_)
    {
      try
      {
        tf_->lookupTransform(monitor_->getMapFrame(), cloud_msg->header.frame_id, cloud_msg->header.stamp, map_H_sensor);
      }
      catch (tf::TransformException& ex)
      {
        ROS_ERROR_STREAM("Transform error of sensor data: " << ex.what() << "; quitting callback");
        return;
      }
    }
    else
      return;
  } 
  
  /* convert cloud message to pcl cloud object */
  pcl::PointCloud<pcl::PointXYZ> cloud;
  pcl::fromROSMsg(*cloud_msg, cloud);
  
  /* compute sensor origin in map frame */
  const tf::Vector3 &sensor_origin_tf = map_H_sensor.getOrigin();
  octomap::point3d sensor_origin(sensor_origin_tf.getX(), sensor_origin_tf.getY(), sensor_origin_tf.getZ());
  Eigen::Vector3d sensor_origin_eigen(sensor_origin_tf.getX(), sensor_origin_tf.getY(), sensor_origin_tf.getZ());
  
  if (!updateTransformCache(cloud_msg->header.frame_id, cloud_msg->header.stamp))
    ROS_ERROR_THROTTLE(1, "Transform cache was not updated. Self-filtering may fail.");
  
  /* mask out points on the robot */
  shape_mask_->maskContainment(cloud, sensor_origin_eigen, 0.0, max_range_, mask_);
  
  octomap::KeySet free_cells, occupied_cells, model_cells;

  tree_->lockRead();
  
  try
  {
    /* do ray tracing to find which cells this point cloud indicates should be free, and which it indicates
     * should be occupied */
    for (unsigned int row = 0; row < cloud.height; row += point_subsample_)
    {
      unsigned int row_c = row * cloud.width;
      for (unsigned int col = 0; col < cloud.width; col += point_subsample_)
      {
        if (mask_[row_c + col] == point_containment_filter::ShapeMask::CLIP)
          continue;
        const pcl::PointXYZ &p = cloud(col, row);
        
        /* check for NaN */
        if ((p.x == p.x) && (p.y == p.y) && (p.z == p.z))
	{        
	  /* transform to map frame */
	  tf::Vector3 point_tf = map_H_sensor * tf::Vector3(p.x, p.y, p.z);
          
	  /* occupied cell at ray endpoint if ray is shorter than max range and this point
	     isn't on a part of the robot*/
	  if (mask_[row_c + col] == point_containment_filter::ShapeMask::INSIDE)
	    model_cells.insert(tree_->coordToKey(point_tf.getX(), point_tf.getY(), point_tf.getZ()));
	  else
            occupied_cells.insert(tree_->coordToKey(point_tf.getX(), point_tf.getY(), point_tf.getZ()));
        }
      }
    }

    /* compute the free cells along each ray that ends at an occupied cell */
    for (octomap::KeySet::iterator it = occupied_cells.begin(), end = occupied_cells.end(); it != end; ++it)
      if (tree_->computeRayKeys(sensor_origin, tree_->keyToCoord(*it), key_ray_))
        free_cells.insert(key_ray_.begin(), key_ray_.end());

    /* compute the free cells along each ray that ends at a model cell */
    for (octomap::KeySet::iterator it = model_cells.begin(), end = model_cells.end(); it != end; ++it)
      if (tree_->computeRayKeys(sensor_origin, tree_->keyToCoord(*it), key_ray_))
        free_cells.insert(key_ray_.begin(), key_ray_.end());
  }
  catch (...)
  { 
    tree_->unlockRead();
    return;
  }
  
  tree_->unlockRead(); 
  
  /* cells that overlap with the model are not occupied */
  for (octomap::KeySet::iterator it = model_cells.begin(), end = model_cells.end(); it != end; ++it)
    occupied_cells.erase(*it);

  /* occupied cells are not free */
  for (octomap::KeySet::iterator it = occupied_cells.begin(), end = occupied_cells.end(); it != end; ++it)
    free_cells.erase(*it);
  
  tree_->lockWrite();
  
  try
  {    
    /* mark free cells only if not seen occupied in this cloud */
    for (octomap::KeySet::iterator it = free_cells.begin(), end = free_cells.end(); it != end; ++it)
      tree_->updateNode(*it, false);
    
    /* now mark all occupied cells */
    for (octomap::KeySet::iterator it = occupied_cells.begin(), end = occupied_cells.end(); it != end; ++it)
      tree_->updateNode(*it, true);

    // set the logodds to the minimum for the cells that are part of the model
    const float lg = tree_->getClampingThresMinLog() - tree_->getClampingThresMaxLog();
    for (octomap::KeySet::iterator it = model_cells.begin(), end = model_cells.end(); it != end; ++it)
      tree_->updateNode(*it, lg);
  }
  catch (...)
  {
    ROS_ERROR("Internal error while updating octree");
  }
  tree_->unlockWrite();
  ROS_DEBUG("Processed point cloud in %lf ms", (ros::WallTime::now() - start).toSec() * 1000.0);
  tree_->triggerUpdateCallback();
}
예제 #25
0
void Robot::update(const LinkUpdater& updater)
{
  M_NameToLink::iterator link_it = links_.begin();
  M_NameToLink::iterator link_end = links_.end();
  for ( ; link_it != link_end; ++link_it )
  {
    RobotLink* link = link_it->second;

    link->setToNormalMaterial();

    Ogre::Vector3 visual_position, collision_position;
    Ogre::Quaternion visual_orientation, collision_orientation;
    if( updater.getLinkTransforms( link->getName(),
                                   visual_position, visual_orientation,
                                   collision_position, collision_orientation
                                   ))
    {
      // Check if visual_orientation, visual_position, collision_orientation, and collision_position are NaN.
      if (visual_orientation.isNaN())
      {
        ROS_ERROR_THROTTLE(
          1.0,
          "visual orientation of %s contains NaNs. Skipping render as long as the orientation is invalid.",
          link->getName().c_str()
        );
        continue;
      }
      if (visual_position.isNaN())
      {
        ROS_ERROR_THROTTLE(
          1.0,
          "visual position of %s contains NaNs. Skipping render as long as the position is invalid.",
          link->getName().c_str()
        );
        continue;
      }
      if (collision_orientation.isNaN())
      {
        ROS_ERROR_THROTTLE(
          1.0,
          "collision orientation of %s contains NaNs. Skipping render as long as the orientation is invalid.",
          link->getName().c_str()
        );
        continue;
      }
      if (collision_position.isNaN())
      {
        ROS_ERROR_THROTTLE(
          1.0,
          "collision position of %s contains NaNs. Skipping render as long as the position is invalid.",
          link->getName().c_str()
        );
        continue;
      }
      link->setTransforms( visual_position, visual_orientation, collision_position, collision_orientation );

      std::vector<std::string>::const_iterator joint_it = link->getChildJointNames().begin();
      std::vector<std::string>::const_iterator joint_end = link->getChildJointNames().end();
      for ( ; joint_it != joint_end ; ++joint_it )
      {
        RobotJoint *joint = getJoint(*joint_it);
        if (joint)
        {
          joint->setTransforms(visual_position, visual_orientation);
        }
      }
    }
    else
    {
      link->setToErrorMaterial();
    }
  }
}
void planning_scene_monitor::CurrentStateMonitor::jointStateCallback(const sensor_msgs::JointStateConstPtr &joint_state)
{
  if (joint_state->name.size() != joint_state->position.size())
  {
    ROS_ERROR_THROTTLE(1, "State monitor received invalid joint state");
    return;
  }
  
  // read the received values, and update their time stamps
  std::size_t n = joint_state->name.size();
  std::map<std::string, double> joint_state_map;
  const std::map<std::string, std::pair<double, double> > &bounds = kmodel_->getAllVariableBounds();
  for (std::size_t i = 0 ; i < n ; ++i)
  {    
    joint_state_map[joint_state->name[i]] = joint_state->position[i];
    joint_time_[joint_state->name[i]] = joint_state->header.stamp;
    
    // continuous joints wrap, so we don't modify them (even if they are outside bounds!)
    const robot_model::JointModel* jm = kmodel_->getJointModel(joint_state->name[i]);
    if (jm && jm->getType() == robot_model::JointModel::REVOLUTE)
      if (static_cast<const robot_model::RevoluteJointModel*>(jm)->isContinuous())
        continue;
    
    std::map<std::string, std::pair<double, double> >::const_iterator bi = bounds.find(joint_state->name[i]);
    // if the read variable is 'almost' within bounds (up to error_ difference), then consider it to be within bounds
    if (bi != bounds.end())
    {
      if (joint_state->position[i] < bi->second.first && joint_state->position[i] >= bi->second.first - error_)
        joint_state_map[joint_state->name[i]] = bi->second.first;
      else
        if (joint_state->position[i] > bi->second.second && joint_state->position[i] <= bi->second.second + error_)
          joint_state_map[joint_state->name[i]] = bi->second.second;
    }
  }
  bool set_map_values = true;
  
  // read root transform, if needed
  if (tf_ && (root_->getType() == robot_model::JointModel::PLANAR ||
              root_->getType() == robot_model::JointModel::FLOATING))
  {
    const std::string &child_frame = root_->getJointModel()->getChildLinkModel()->getName();
    const std::string &parent_frame = kmodel_->getModelFrame();
    
    std::string err;
    ros::Time tm;
    tf::StampedTransform transf;
    bool ok = false;
    if (tf_->getLatestCommonTime(parent_frame, child_frame, tm, &err) == tf::NO_ERROR)
    {
      try
      {
        tf_->lookupTransform(parent_frame, child_frame, tm, transf);
        ok = true;
      }
      catch(tf::TransformException& ex)
      {
        ROS_ERROR_THROTTLE(1, "Unable to lookup transform from %s to %s.  Exception: %s", parent_frame.c_str(), child_frame.c_str(), ex.what());
      }
    }
    else
      ROS_DEBUG_THROTTLE(1, "Unable to lookup transform from %s to %s: no common time.", parent_frame.c_str(), child_frame.c_str());
    if (ok)
    {
      const std::vector<std::string> &vars = root_->getJointModel()->getVariableNames();
      for (std::size_t j = 0; j < vars.size() ; ++j)
        joint_time_[vars[j]] = tm;
      set_map_values = false;
      Eigen::Affine3d eigen_transf;
      tf::transformTFToEigen(transf, eigen_transf);
      boost::mutex::scoped_lock slock(state_update_lock_);
      root_->setVariableValues(eigen_transf);
      kstate_.setStateValues(joint_state_map); 
      current_state_time_ = joint_state->header.stamp;
    }
  }
  
  if (set_map_values)
  {
    boost::mutex::scoped_lock slock(state_update_lock_);
    kstate_.setStateValues(joint_state_map);
    current_state_time_ = joint_state->header.stamp;
  }
  
  // callbacks, if needed
  for (std::size_t i = 0 ; i < update_callbacks_.size() ; ++i)
    update_callbacks_[i](joint_state);
}
예제 #27
0
/// PINHOLE
void VoNode::incomingImage(const sensor_msgs::ImageConstPtr& msg){

    /// Measure HZ, Processing time, Image acquisation time
    ros::WallTime t0 = ros::WallTime::now();

    /// Check TIME
    if (lastTime>msg->header.stamp){
        ROS_WARN("Detected negative time jump, resetting TF buffer");
        subTF.clear();
    }
    lastTime = msg->header.stamp;


    /// Get Image
    cv_bridge::CvImageConstPtr cvPtr;
    try {
        cvPtr = cv_bridge::toCvShare(msg, OVO::COLORS[colorId]);
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
        return;
    }

    /// GET IMU
    tf::StampedTransform imuStamped;

    if (USE_IMU){
        try{
            // sent out by the crazyflie driver driver.py
            subTF.waitForTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, ros::Duration(0.1) );
            subTF.lookupTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, imuStamped);
        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());            
            return;
        }
    } else {
        imuStamped.setRotation(tf::Quaternion(1.0, 0.0, 0.0, 0.0));
    }

    ROS_INFO(OVO::colorise("\n==============================================================================",OVO::FG_WHITE, OVO::BG_BLUE).c_str());

    /// Make Frame
    ROS_ERROR("MIGHT NEED TO INVERSE IMU");
    Frame::Ptr frame(new Frame(cvPtr->image, imuStamped));

    if (frame->getQuality()>0.9 || frame->getQuality()<0){
        /// Process Frame
        ROS_INFO("NOD > PROCESSING FRAME [%d]", frame->getId());
        odometry.update(frame);
    } else {
        ROS_WARN("NOD = SKIPPING FRAME, BAD QUALITY");
    }

    /// Compute running average of processing time
    double time = (ros::WallTime::now()-t0).toSec();
    timeAvg = (timeAvg*timeAlpha) + (1.0 - timeAlpha)*time;

    /// Clean up and output
    ROS_INFO("NOD < FRAME [%d|%d] PROCESSED [%.1fms, Avg: %.1fms]", frame->getId(), frame->getKfId(), time*1000., timeAvg*1000.);


    if (frame->getQuality()>0.9 || frame->getQuality()<0){
        publishStuff();
    }

    /// publish debug image
    if (pubCamera.getNumSubscribers()>0 || pubImage.getNumSubscribers()>0){
        sensor_msgs::CameraInfoPtr camInfoPtr = frame->getCamInfo();
        cv::Mat drawImg;
        if (frame->getQuality()>0.9 || frame->getQuality()<0){
            drawImg = odometry.getVisualImage();
        } else {
            drawImg = odometry.getVisualImage(frame);
        }

        OVO::putInt(drawImg, time*1000., cv::Point(10,drawImg.rows-1*25), CV_RGB(200,0,200), false, "S:");

//        if (imageRect.channels() != image.channels()){
//            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
//        }

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = "cam_gt";//CAM_FRAME;
        cvi.encoding = sensor_msgs::image_encodings::BGR8;// OVO::COLORS[colorId];
        //cvi.image = imageRect;
        cvi.image = drawImg;
        camInfoPtr->header = cvi.header;
        pubImage.publish(cvi.toImageMsg());

        cvi.image=cvi.image.colRange(0, cvi.image.cols/2 -1);
        pubCamera.publish(cvi.toImageMsg(), camInfoPtr);

    } else {
        ROS_WARN_THROTTLE(1, "NOT DRAWING OUTPUT");
    }
}
예제 #28
0
void VoNode::incomingSynthetic(const ollieRosTools::synthFrameConstPtr& msg){
    ROS_INFO((std::string("\n")+OVO::colorise("============================================================================== FRAME %d ",OVO::FG_WHITE, OVO::BG_BLUE)).c_str(),msg->frameId);

    /// Measure HZ, Processing time, Image acquisation time
    ros::WallTime t0 = ros::WallTime::now();

    /// Check TIME
    if (lastTime>msg->header.stamp){
        ROS_WARN("Detected negative time jump, resetting TF buffer");
        subTF.clear();
    }
    lastTime = msg->header.stamp;
    /// Get Image
    cv_bridge::CvImageConstPtr cvPtr;
    try {
        cvPtr = cv_bridge::toCvShare(msg->img, msg, OVO::COLORS[colorId]);
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR_STREAM_THROTTLE(1,"Failed to understand incoming image:" << e.what());
        return;
    }

    /// GET IMU
    tf::StampedTransform imuStamped;

    if (USE_IMU){
        try{
            // sent out by the crazyflie driver driver.py
            subTF.waitForTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, ros::Duration(0.1) );
            subTF.lookupTransform(IMU_FRAME, WORLD_FRAME, msg->header.stamp-imgDelay, imuStamped);
        } catch(tf::TransformException& ex){
            ROS_ERROR_THROTTLE(1,"TF exception. Could not get flie IMU transform: %s", ex.what());
            return;
        }
    } else {
        imuStamped.setRotation(tf::Quaternion(0.0, 0.0, 0.0, 1.0));
    }

    /// NEED TO SEE ABOUT THIS
    tf::StampedTransform imuRevStamped(imuStamped.inverse(), imuStamped.stamp_, imuStamped.frame_id_, imuStamped.child_frame_id_);

    /// Make Frame
    tf::Transform cam, imu;
    tf::transformMsgToTF(msg->camPose, cam);
    tf::transformMsgToTF(msg->imuPose, imu);    
    //cam = cam.inverse();
    //imu = imu.inverse();
    Frame::Ptr frame(new FrameSynthetic(cvPtr->image, imuRevStamped, msg->pts2d_noise, cam, imu, msg->camInfo));


    /// Process Frame
    ROS_INFO("NOD > PROCESSING FRAME [%d]", frame->getId());
    odometry.update(frame);

    /// Compute running average of processing time
    double time = (ros::WallTime::now()-t0).toSec();
    timeAvg = (timeAvg*timeAlpha) + (1.0 - timeAlpha)*time;

    /// Clean up and output
    ROS_INFO("NOD < FRAME [%d|%d] PROCESSED [%.1fms, Avg: %.1fms]", frame->getId(), frame->getKfId(), time*1000., timeAvg*1000.);
    publishStuff();

    /// publish debug image
    if (pubCamera.getNumSubscribers()>0 || pubImage.getNumSubscribers()>0){
        sensor_msgs::CameraInfoPtr camInfoPtr = frame->getCamInfo();
        cv::Mat drawImg = odometry.getVisualImage();
        OVO::putInt(drawImg, time*1000., cv::Point(10,drawImg.rows-1*25), CV_RGB(200,0,200), false, "s:");

//        if (imageRect.channels() != image.channels()){
//            cv::cvtColor(imageRect, imageRect,CV_BGR2GRAY);
//        }

        /// Send out
        cv_bridge::CvImage cvi;
        cvi.header.stamp = msg->header.stamp;
        cvi.header.seq = msg->header.seq;
        cvi.header.frame_id = CAM_FRAME;
        cvi.encoding = sensor_msgs::image_encodings::BGR8;// OVO::COLORS[colorId];
        //cvi.image = imageRect;
        cvi.image = drawImg;
        camInfoPtr->header = cvi.header;
        //pubCamera.publish(cvi.toImageMsg(), camInfoPtr);
        pubImage.publish(cvi.toImageMsg());
    }

}
예제 #29
0
/*
 *	\brief Method call when receiving a message from the topic /joy
 *
 */
void GuardianPad::padCallback(const sensor_msgs::Joy::ConstPtr& joy)
{
	std::vector<double> joint_positions; // positions ordered as joint_names_ vector
	
	// Check availavility of joint_states msg
	//ROS_INFO("now: %d, joint_states_: %d", ros::Time::now().sec, joint_states_msg_.header.stamp.sec);
	if( (ros::Time::now() - joint_states_msg_.header.stamp) > ros::Duration(1) )
	{
		ROS_ERROR_THROTTLE(1, "GBallPad::padCallback: joint_states msg is too old");
		return;
	}
	
	// Check if we have a JointState msg with all the defined joints
	for(int i=0; i<joint_names_.size(); i++)
	{
		bool joint_found = false;
		for(int j=0; j<joint_states_msg_.name.size(); j++)
		{
			if(joint_states_msg_.name[j] == joint_names_[i])
			{
				joint_found = true;
				joint_positions.push_back(joint_states_msg_.position[j]);
				break;
			}
		}
		if(!joint_found)
		{
			ROS_ERROR_THROTTLE(1, "GBallPad::padCallback: one or more joints can't be found in joint_states topic");
			return;
		}
	}
	//ROS_INFO("joint_positions size %d", (int)joint_positions.size());

	
	if(joy->buttons[dead_man_button_lwa4p_] == 1 && joy->buttons[dead_man_button_] != 1)
	{
		//ROS_INFO("Dead man button LWA4P pressed");
		
		// Next joint button
		if(joy->buttons[next_joint_button_] == 1 && !bRegisteredButtonEvent[next_joint_button_])
		{
			bRegisteredButtonEvent[next_joint_button_] = true;
			selected_joint_++;
			if(selected_joint_ == joint_names_.size())
				selected_joint_ = 0;
			ROS_INFO("GBallPad::padCallback: selected joint %s", joint_names_[selected_joint_].c_str());
		}
		else if( joy->buttons[next_joint_button_] != 1)
			bRegisteredButtonEvent[next_joint_button_] = false;
		
		// Previous joint button
		if(joy->buttons[previous_joint_button_] == 1 && !bRegisteredButtonEvent[previous_joint_button_])
		{
			bRegisteredButtonEvent[previous_joint_button_] = true;
			selected_joint_--;
			if(selected_joint_ < 0)
				selected_joint_ = joint_names_.size()-1;
			ROS_INFO("GBallPad::padCallback: selected joint %s", joint_names_[selected_joint_].c_str());
		}
		else if(joy->buttons[previous_joint_button_] != 1)
			bRegisteredButtonEvent[previous_joint_button_] = false;
			
			
		std::vector<double> trajectory;
		float desired_speed = l_scale_*joy->axes[linear_];
		if (desired_speed == 0.0)
		{
			// Stop movement
			publishJointTrajectory(joint_positions, 0.5); // Send current position
			return;	
		}
			
		
		double distance;
		double desired_position;
		if(desired_speed > 0)
			desired_position = joint_limits_[selected_joint_].max;
		else if(desired_speed < 0)
			desired_position = joint_limits_[selected_joint_].min;
		distance = desired_position - joint_positions[selected_joint_];
		
		double time_from_start;
		if(desired_speed != 0.0)
			time_from_start = distance / desired_speed;
		else
			time_from_start = 0.0;
		
		
		//ROS_INFO("Desired position: %f, current position: %f, distance: %f", desired_position, joint_positions[selected_joint_], distance);
		//ROS_INFO("Desired_speed: %f, time_from_start: %f", desired_speed, time_from_start);
		
		// Publish trajectory
		trajectory = joint_positions;
		trajectory[selected_joint_] = desired_position;
		publishJointTrajectory(trajectory, time_from_start);
	}
	else
	{
		// Stop movement
		publishJointTrajectory(joint_positions, 0.5); // Send current position
	}

}
예제 #30
0
/* Method to calculate the torques required to apply at each of the joints for gravity compensation
 *  @returns true is successful
 */
bool arm_kinematics::Kinematics::getGravityTorques(
    const sensor_msgs::JointState joint_configuration, baxter_core_msgs::SEAJointState &left_gravity, baxter_core_msgs::SEAJointState &right_gravity, bool isEnabled) {

  bool res;
  KDL::JntArray torques_l, torques_r;
  KDL::JntArray jntPosIn_l, jntPosIn_r;
  left_gravity.name = left_joint;
  right_gravity.name = right_joint;
  left_gravity.gravity_model_effort.resize(num_joints);
  right_gravity.gravity_model_effort.resize(num_joints);
  if (isEnabled) {
    torques_l.resize(num_joints);
    torques_r.resize(num_joints);
    jntPosIn_l.resize(num_joints);
    jntPosIn_r.resize(num_joints);

    // Copying the positions of the joints relative to its index in the KDL chain
    for (unsigned int j = 0; j < joint_configuration.name.size(); j++) {
      for (unsigned int i = 0; i < num_joints; i++) {
        if (joint_configuration.name[j] == left_joint.at(i)) {
          jntPosIn_l(i) = joint_configuration.position[j];
          break;
        } else if (joint_configuration.name[j] == right_joint.at(i)) {
          jntPosIn_r(i) = joint_configuration.position[j];
          break;
        }
      }
    }
    KDL::JntArray jntArrayNull(num_joints);
    KDL::Wrenches wrenchNull_l(grav_chain_l.getNrOfSegments(),
                               KDL::Wrench::Zero());
    int code_l = gravity_solver_l->CartToJnt(jntPosIn_l, jntArrayNull,
                                             jntArrayNull, wrenchNull_l,
                                             torques_l);
    KDL::Wrenches wrenchNull_r(grav_chain_r.getNrOfSegments(),
                               KDL::Wrench::Zero());
    int code_r = gravity_solver_r->CartToJnt(jntPosIn_r, jntArrayNull,
                                             jntArrayNull, wrenchNull_r,
                                             torques_r);

    //Check if the gravity was succesfully calculated by both the solvers
    if (code_l >= 0 && code_r >= 0) {

	for (unsigned int i = 0; i < num_joints; i++) {
            left_gravity.gravity_model_effort[i] = torques_l(i); 
            right_gravity.gravity_model_effort[i] = torques_r(i); 
      }
      return true;
    } else {
      ROS_ERROR_THROTTLE(
          1.0,
          "KT: Failed to compute gravity torques from KDL return code for left and right arms %d %d",
          code_l, code_r);
      return false;
    }
  } else {
    for (unsigned int i = 0; i <  num_joints; i++) {
        left_gravity.gravity_model_effort[i]=0;
        right_gravity.gravity_model_effort[i]=0;
    }
  }
  return true;
}