Пример #1
0
 void loadCalibrationData()
 {
   ros::NodeHandle& pn = getPrivateNodeHandle();
   std::string calibration_file;
   // Try to load a specific calibration file (if requested)
   if (pn.getParam("calibration_file", calibration_file))
   {
     if (camera_->loadCalibrationData(calibration_file))
     {
       NODELET_INFO("Loaded calibration data from \"%s\"", calibration_file.c_str());
       return;
     }
     else
     {
       NODELET_WARN("Failed to load calibration data from \"%s\"", calibration_file.c_str());
     }
   }
   // Check whether the calibration data was loaded from the default location
   if (camera_->isCalibrationDataLoaded())
   {
     NODELET_INFO("Loaded calibration data from default location (\"%s.dat\" in the working directory)", camera_->getSerialNumber().c_str());
   }
   else
   {
     NODELET_WARN("Will use default calibration data");
   }
 }
Пример #2
0
Transform OdometryROS::getTransform(const std::string & fromFrameId, const std::string & toFrameId, const ros::Time & stamp) const
{
	// TF ready?
	Transform transform;
	try
	{
		if(waitForTransform_ && !stamp.isZero() && waitForTransformDuration_ > 0.0)
		{
			//if(!tfBuffer_.canTransform(fromFrameId, toFrameId, stamp, ros::Duration(1)))
			if(!tfListener_.waitForTransform(fromFrameId, toFrameId, stamp, ros::Duration(waitForTransformDuration_)))
			{
				NODELET_WARN( "odometry: Could not get transform from %s to %s (stamp=%f) after %f seconds (\"wait_for_transform_duration\"=%f)!",
						fromFrameId.c_str(), toFrameId.c_str(), stamp.toSec(), waitForTransformDuration_, waitForTransformDuration_);
				return transform;
			}
		}

		tf::StampedTransform tmp;
		tfListener_.lookupTransform(fromFrameId, toFrameId, stamp, tmp);
		transform = rtabmap_ros::transformFromTF(tmp);
	}
	catch(tf::TransformException & ex)
	{
		NODELET_WARN( "%s",ex.what());
	}
	return transform;
}
Пример #3
0
 void loadCalibrationData()
 {
   ros::NodeHandle& pn = getPrivateNodeHandle();
   std::string calibration_file;
   // Try to load a specific calibration file (if requested)
   if (pn.getParam("calibration_file", calibration_file) && !calibration_file.empty())
   {
     NODELET_INFO("Trying to load calibration from \"%s\"", calibration_file.c_str());
     try
     {
         boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
         camera_->loadCalibrationData(calibration_file);
         NODELET_INFO("Loaded calibration data from \"%s\"", calibration_file.c_str());
         return;
     }
     catch(PMDCameraNotOpenedException& e)
     {
         NODELET_WARN("Failed to load calibration data from \"%s\"", calibration_file.c_str());
     }
   }
   // Check whether the calibration data was loaded from the default location
   if (camera_->isCalibrationDataLoaded())
   {
     NODELET_INFO("Loaded calibration data from default location (\"%s.dat\" in the working directory)", camera_->getSerialNumber().c_str());
   }
   else
   {
     NODELET_WARN("Will use default calibration data");
   }
 }
 void ProjectImagePoint::project(
   const geometry_msgs::PointStamped::ConstPtr& msg)
 {
   vital_checker_->poke();
   boost::mutex::scoped_lock lock(mutex_);
   if (!camera_info_) {
     NODELET_WARN(
       "[ProjectImagePoint::project] camera info is not yet available");
     return;
   }
   image_geometry::PinholeCameraModel model;
   model.fromCameraInfo(camera_info_);
   cv::Point3d ray = model.projectPixelTo3dRay(
     cv::Point2d(msg->point.x, msg->point.y));
   geometry_msgs::Vector3Stamped vector;
   vector.header.frame_id = camera_info_->header.frame_id;
   vector.header = msg->header;
   vector.vector.x = ray.x;
   vector.vector.y = ray.y;
   vector.vector.z = ray.z;
   pub_vector_.publish(vector);
   if (ray.z == 0.0) {
     NODELET_ERROR("Z value of projected ray is 0");
     return;
   }
   double alpha = z_ / ray.z;
   geometry_msgs::PointStamped point;
   point.header = msg->header;
   point.header.frame_id = camera_info_->header.frame_id;
   point.point.x = ray.x * alpha;
   point.point.y = ray.y * alpha;
   point.point.z = ray.z * alpha;
   pub_.publish(point);
   
 }
 void PolygonToMaskImage::convert(
   const geometry_msgs::PolygonStamped::ConstPtr& polygon_msg)
 {
   boost::mutex::scoped_lock lock(mutex_);
   vital_checker_->poke();
   if (camera_info_) {
     image_geometry::PinholeCameraModel model;
     model.fromCameraInfo(camera_info_);
     cv::Mat mask_image = cv::Mat::zeros(camera_info_->height,
                                         camera_info_->width,
                                         CV_8UC1);
     std::vector<cv::Point> points;
     // we expect same tf frame
     if (polygon_msg->polygon.points.size() >= 3) {
       for (size_t i = 0; i < polygon_msg->polygon.points.size(); i++) {
         geometry_msgs::Point32 p = polygon_msg->polygon.points[i];
         cv::Point uv = model.project3dToPixel(cv::Point3d(p.x, p.y, p.z));
         points.push_back(uv);
       }
       cv::fillConvexPoly(mask_image, &(points[0]), points.size(), cv::Scalar(255));
     }
     pub_.publish(cv_bridge::CvImage(polygon_msg->header,
                                     sensor_msgs::image_encodings::MONO8,
                                     mask_image).toImageMsg());
   }
   else {
     NODELET_WARN("no camera info is available");
   }
 }
Пример #6
0
    ~TrackerNodelet ()
    {
      exiting_ = true;
      if (thread_)
	if (!thread_->timed_join (boost::posix_time::seconds (2)))
	  NODELET_WARN ("failed to join thread but continuing anyway");
      thread_.reset ();
      tracker_.reset ();
    }
Пример #7
0
 bool MUX::deleteTopicCallback(topic_tools::MuxDelete::Request& req,
                               topic_tools::MuxDelete::Response& res)
 {
   // cannot delete the topic now selected
   for (size_t i = 0; i < topics_.size(); i++) {
     if (pnh_.resolveName(topics_[i]) == pnh_.resolveName(req.topic)) {
       if (pnh_.resolveName(req.topic) == pnh_.resolveName(selected_topic_)) {
         NODELET_WARN("tried to delete currently selected topic %s from mux",
                      req.topic.c_str());
         return false;
       }
       topics_.erase(topics_.begin() + i);
       return true;
     }
   }
   NODELET_WARN("cannot find the topics %s in the list of mux",
                req.topic.c_str());
   return false;
 }
 void GridSampler::configCallback(Config &config, uint32_t level)
 {
   boost::mutex::scoped_lock(mutex_);
   if (config.grid_size == 0.0) {
     NODELET_WARN("grid_size == 0.0 is prohibited");
     return;
   }
   else {
     grid_size_ = config.grid_size;
     min_indices_ = config.min_indices;
   }
 }
Пример #9
0
bool OdometryROS::resume(std_srvs::Empty::Request&, std_srvs::Empty::Response&)
{
	if(!paused_)
	{
		NODELET_WARN( "visual_odometry: Already running!");
	}
	else
	{
		paused_ = false;
		NODELET_INFO( "visual_odometry: resumed!");
	}
	return true;
}
Пример #10
0
bool OdometryROS::pause(std_srvs::Empty::Request&, std_srvs::Empty::Response&)
{
	if(paused_)
	{
		NODELET_WARN( "visual_odometry: Already paused!");
	}
	else
	{
		paused_ = true;
		NODELET_INFO( "visual_odometry: paused!");
	}
	return true;
}
  // What we want to do here is to look at all the points that are *not* included in the indices list,
  // (i.e. these are the outliers, the points not on the floor) and determine which are the closest to the camera
  void cloudIndicesModelCallback(const sensor_msgs::PointCloud2ConstPtr& cloud_msg,
                                 const PointIndicesConstPtr& indices, const pcl::ModelCoefficientsConstPtr& model)
  {
    boost::mutex::scoped_lock lock (callback_mutex);
    NODELET_DEBUG_STREAM("Got cloud with timestamp " << cloud_msg->header.stamp << " + indices with timestamp "
        << indices->header.stamp << " + model with timestamp " << model->header.stamp);

    double dt;
    if (first)
    {
      first = false;
      dt = 0;
    }
    else
    {
      dt = (cloud_msg->header.stamp - prev_cloud_time).toSec();
      prev_cloud_time = cloud_msg->header.stamp;
    }
    if (model->values.size() > 0)
    {
      // Determine altitude:
      kinect_z = reject_outliers(-fabs(model->values[3])) - imu_to_kinect_offset;
      calcVelocity(kinect_z, dt, kinect_vz);
      // Detect obstacles:
      pcl::PointXYZ obstacle_location;
      bool obstacle_found = detectObstacle(cloud_msg, indices, model, obstacle_location);
      if (obstacle_found)
      {
        publishObstacleMarker(obstacle_location);
        NODELET_DEBUG("Detected obstacle at: [%f, %f, %f]", obstacle_location.x, obstacle_location.y,
                      obstacle_location.z);
      }
      publishObstacle(obstacle_found, obstacle_location);
    }
    else
    {
      NODELET_WARN("No planar model found -- cannot determine altitude or obstacles");
    }

    updateMask();

    if (not use_backup_estimator_alt)
    {
      publishOdom();
    }

    if (debug_throttle_rate > 0)
    {
      ros::Duration(1 / debug_throttle_rate).sleep();
    }
  }
 void ColorHistogramMatcher::feature(
     const sensor_msgs::PointCloud2::ConstPtr& input_cloud,
     const jsk_pcl_ros::ClusterPointIndices::ConstPtr& input_indices)
 {
   boost::mutex::scoped_lock(mutex_);
   if (!reference_set_) {
     NODELET_WARN("reference histogram is not available yet");
     return;
   }
   pcl::PointCloud<pcl::PointXYZRGB>::Ptr pcl_cloud (new pcl::PointCloud<pcl::PointXYZRGB>);
   pcl::fromROSMsg(*input_cloud, *pcl_cloud);
   pcl::PointCloud<pcl::PointXYZHSV>::Ptr hsv_cloud (new pcl::PointCloud<pcl::PointXYZHSV>);
   pcl::PointCloudXYZRGBtoXYZHSV(*pcl_cloud, *hsv_cloud);
   // compute histograms first
   std::vector<std::vector<float> > histograms;
   histograms.resize(input_indices->cluster_indices.size());
   
   pcl::ExtractIndices<pcl::PointXYZHSV> extract;
   extract.setInputCloud(hsv_cloud);
   // for debug
   jsk_pcl_ros::ColorHistogramArray histogram_array;
   histogram_array.header = input_cloud->header;
   for (size_t i = 0; i < input_indices->cluster_indices.size(); i++) {
     pcl::IndicesPtr indices (new std::vector<int>(input_indices->cluster_indices[i].indices));
     extract.setIndices(indices);
     pcl::PointCloud<pcl::PointXYZHSV> segmented_cloud;
     extract.filter(segmented_cloud);
     std::vector<float> histogram;
     computeHistogram(segmented_cloud, histogram, policy_);
     histograms[i] = histogram;
     ColorHistogram ros_histogram;
     ros_histogram.header = input_cloud->header;
     ros_histogram.histogram = histogram;
     histogram_array.histograms.push_back(ros_histogram);
   }
   all_histogram_pub_.publish(histogram_array);
   
   // compare histograms
   jsk_pcl_ros::ClusterPointIndices result;
   result.header = input_indices->header;
   for (size_t i = 0; i < input_indices->cluster_indices.size(); i++) {
     const double coefficient = bhattacharyyaCoefficient(histograms[i], reference_histogram_);
     NODELET_DEBUG_STREAM("coefficient: " << i << "::" << coefficient);
     if (coefficient > coefficient_thr_) {
       result.cluster_indices.push_back(input_indices->cluster_indices[i]);
     }
   }
   result_pub_.publish(result);
 }
Пример #13
0
  bool MUX::addTopicCallback(topic_tools::MuxAdd::Request& req,
                        topic_tools::MuxAdd::Response& res)
  {
    NODELET_INFO("trying to add %s to mux", req.topic.c_str());
    if (req.topic == g_none_topic) {
      NODELET_WARN("failed to add topic %s to mux, because it's reserved for special use",
                   req.topic.c_str());
      return false;
    }
    
    for (size_t i = 0; i < topics_.size(); i++) {
      if (pnh_.resolveName(topics_[i]) == pnh_.resolveName(req.topic)) {
        NODELET_WARN("tried to add a topic that mux was already listening to: [%s]", 
                     topics_[i].c_str());
        return false;
      }
    }

    // in original mux, it subscribes the topic immediately after adds topic.
    // in this version, we postpone the subscription until selected.
    
    topics_.push_back(ros::names::resolve(req.topic));
    return true;
  }
Пример #14
0
	virtual void onInit()
	{
		ros::NodeHandle& nh = getNodeHandle();
		ros::NodeHandle& private_nh = getPrivateNodeHandle();

		ros::NodeHandle rgb_nh(nh, "rgb");
		ros::NodeHandle depth_nh(nh, "depth");
		ros::NodeHandle rgb_pnh(private_nh, "rgb");
		ros::NodeHandle depth_pnh(private_nh, "depth");
		image_transport::ImageTransport rgb_it(rgb_nh);
		image_transport::ImageTransport depth_it(depth_nh);
		image_transport::TransportHints hintsRgb("raw", ros::TransportHints(), rgb_pnh);
		image_transport::TransportHints hintsDepth("raw", ros::TransportHints(), depth_pnh);

		int queueSize = 10;
		bool approxSync = true;
		if(private_nh.getParam("max_rate", rate_))
		{
			NODELET_WARN("\"max_rate\" is now known as \"rate\".");
		}
		private_nh.param("rate", rate_, rate_);
		private_nh.param("queue_size", queueSize, queueSize);
		private_nh.param("approx_sync", approxSync, approxSync);
		private_nh.param("decimation", decimation_, decimation_);
		ROS_ASSERT(decimation_ >= 1);
		NODELET_INFO("Rate=%f Hz", rate_);
		NODELET_INFO("Decimation=%d", decimation_);
		NODELET_INFO("Approximate time sync = %s", approxSync?"true":"false");

		if(approxSync)
		{
			approxSync_ = new message_filters::Synchronizer<MyApproxSyncPolicy>(MyApproxSyncPolicy(queueSize), image_sub_, image_depth_sub_, info_sub_);
			approxSync_->registerCallback(boost::bind(&DataThrottleNodelet::callback, this, _1, _2, _3));
		}
		else
		{
			exactSync_ = new message_filters::Synchronizer<MyExactSyncPolicy>(MyExactSyncPolicy(queueSize), image_sub_, image_depth_sub_, info_sub_);
			exactSync_->registerCallback(boost::bind(&DataThrottleNodelet::callback, this, _1, _2, _3));
		}

		image_sub_.subscribe(rgb_it, rgb_nh.resolveName("image_in"), 1, hintsRgb);
		image_depth_sub_.subscribe(depth_it, depth_nh.resolveName("image_in"), 1, hintsDepth);
		info_sub_.subscribe(rgb_nh, "camera_info_in", 1);

		imagePub_ = rgb_it.advertise("image_out", 1);
		imageDepthPub_ = depth_it.advertise("image_out", 1);
		infoPub_ = rgb_nh.advertise<sensor_msgs::CameraInfo>("camera_info_out", 1);
	};
Пример #15
0
  void MUX::subscribeSelectedTopic()
  {
    advertised_ = false;
    // assume that selected_topic_ is already set correctly
    if (selected_topic_ == g_none_topic) {
      NODELET_WARN("none topic is selected");
      return;
    }
    sub_.reset(new ros::Subscriber(
                 pnh_.subscribe<topic_tools::ShapeShifter>(selected_topic_, 10,
                                                           &MUX::inputCallback, this, th_)));
    std_msgs::String msg;
    msg.data = selected_topic_;
    pub_selected_.publish(msg);

  }
/** Open the camera device.
 *
 * @param newconfig_ config_uration parameters
 * @return true, if successful
 *
 * if successful:
 *   state_ is Driver::OPENED
 *   camera_name_ set to camera_name string
 */
bool HiwrCameraControllerNodelet::openCamera(Config &new_config){

    new_config.format_mode = uvc_cam::Cam::MODE_YUYV;
    uvc_cam::Cam::mode_t mode = uvc_cam::Cam::MODE_YUYV;
    bool success = true;
    final_=NULL;
    try
    {
        ROS_INFO("opening uvc_cam at %dx%d, %f fps - %s : %s", new_config.width, new_config.height, new_config.frame_rate , new_config.device.c_str() , findVideoDevice(new_config.device.c_str()));
        printf("before cam creating, cam is at %p\n", cam_);
        ROS_INFO("[Uvc Cam Nodelet] video device is {%s}", new_config.device.c_str() );
        char * video_device = findVideoDevice(new_config.device.c_str());
        NODELET_INFO("Video DEVICE IS %s",video_device);
        cam_ = new uvc_cam::Cam(video_device, mode, new_config.width, new_config.height, new_config.frame_rate);
        printf("after cam creating, cam is at %p\n", cam_);

        camera_name_ = config_.camera_name ;
        NODELET_INFO("[%s] camera_name_=%s",config_.camera_name.c_str(), camera_name_.c_str()  );
        if (camera_name_ != camera_name_)
        {
            camera_name_ = camera_name_;
        }
        state_ = Driver::OPENED;
        calibration_matches_ = true;

    }
    catch (uvc_cam::Exception& e)
    {
        ROS_FATAL_STREAM("[" << camera_name_
                         << "] exception opening device: " << e.what());
        success = false;
    }
    catch(std::runtime_error& ex){
        success = false;
        NODELET_WARN("[UVC Cam Node] runtime_error ex : %s",ex.what());
    }

    return success;
}
void ScanToPointCloud::scanCallback(const sensor_msgs::LaserScan::ConstPtr &scanPtr) {
  ros::Time t1 = ros::Time::now();
  // NODELET_DEBUG("ScanToPointCloud::scanCallback: Point cloud received (%lu points).", scanPtr->ranges.size());
  // Wait for the transform if the target frame differs from that of the scan.
  std::string frame = targetFrame.size() ? targetFrame : scanPtr->header.frame_id;
  if (frame != scanPtr->header.frame_id
      && !transformListener.waitForTransform(frame, scanPtr->header.frame_id,
                                             scanPtr->header.stamp + ros::Duration(scanPtr->scan_time),
                                             ros::Duration(waitForTransform))) {
    NODELET_WARN("ScanToPointCloud::scanCallback: Cannot transform from %s to %s at %.2f s.",
                 scanPtr->header.frame_id.c_str(), frame.c_str(), scanPtr->header.stamp.toSec());
    return;
  }
  sensor_msgs::PointCloud2::Ptr cloud2(new sensor_msgs::PointCloud2);
  try {
    projector.transformLaserScanToPointCloud(frame, *scanPtr, *cloud2, transformListener, -1.0, channelOptions);
    pointCloudPublisher.publish(cloud2);
    // NODELET_DEBUG("ScanToPointCloud::scanCallback: Converting scan to point cloud: %.3f s.",
    // (ros::Time::now() - t1).toSec());
  } catch(tf::TransformException& ex) {
    ROS_ERROR("ScanToPointCloud::scanCallback: Transform exception: %s.", ex.what());
    return;
  }
}
Пример #18
0
void ImageNodelet::mouseCb(int event, int x, int y, int flags, void* param)
{
  ImageNodelet *this_ = reinterpret_cast<ImageNodelet*>(param);
  // Trick to use NODELET_* logging macros in static function
  boost::function<const std::string&()> getName =
    boost::bind(&ImageNodelet::getName, this_);

  if (event == cv::EVENT_LBUTTONDOWN)
  {
    NODELET_WARN_ONCE("Left-clicking no longer saves images. Right-click instead.");
    return;
  }
  if (event != cv::EVENT_RBUTTONDOWN)
    return;
  
  boost::lock_guard<boost::mutex> guard(this_->image_mutex_);

  const cv::Mat &image = this_->last_image_;
  if (image.empty())
  {
    NODELET_WARN("Couldn't save image, no data!");
    return;
  }

  std::string filename = (this_->filename_format_ % this_->count_).str();
  if (cv::imwrite(filename, image))
  {
    NODELET_INFO("Saved image %s", filename.c_str());
    this_->count_++;
  }
  else
  {
    /// @todo Show full path, ask if user has permission to write there
    NODELET_ERROR("Failed to save image.");
  }
}
Пример #19
0
  bool MUX::selectTopicCallback(topic_tools::MuxSelect::Request  &req,
                                topic_tools::MuxSelect::Response &res)
  {
    res.prev_topic = selected_topic_;
    if (selected_topic_ != g_none_topic) {
      sub_->shutdown();            // unsubscribe first
    }

    if (req.topic == g_none_topic) {
      selected_topic_ = g_none_topic;
      return true;
    }
    for (size_t i = 0; i < topics_.size(); i++) {
      if (pnh_.resolveName(topics_[i]) == pnh_.resolveName(req.topic)) {
        // subscribe the topic
        selected_topic_ = topics_[i];
        subscribeSelectedTopic();
        return true;
      }
    }

    NODELET_WARN("%s is not provided in topic list", req.topic.c_str());
    return false;
  }
Пример #20
0
  /*!
  * \brief Function for the boost::thread to grabImages and publish them.
  *
  * This function continues until the thread is interupted.  Responsible for getting sensor_msgs::Image and publishing them.
  */
  void devicePoll()
  {
    while(!boost::this_thread::interruption_requested())   // Block until we need to stop this thread.
    {
      try
      {
        wfov_camera_msgs::WFOVImagePtr wfov_image(new wfov_camera_msgs::WFOVImage);
        // Get the image from the camera library
        NODELET_DEBUG("Starting a new grab from camera.");
        pg_.grabImage(wfov_image->image, frame_id_);

        // Set other values
        wfov_image->header.frame_id = frame_id_;

        wfov_image->gain = gain_;
        wfov_image->white_balance_blue = wb_blue_;
        wfov_image->white_balance_red = wb_red_;

        wfov_image->temperature = pg_.getCameraTemperature();

        ros::Time time = ros::Time::now();
        wfov_image->header.stamp = time;
        wfov_image->image.header.stamp = time;

        // Set the CameraInfo message
        ci_.reset(new sensor_msgs::CameraInfo(cinfo_->getCameraInfo()));
        ci_->header.stamp = wfov_image->image.header.stamp;
        ci_->header.frame_id = wfov_image->header.frame_id;
        // The height, width, distortion model, and parameters are all filled in by camera info manager.
        ci_->binning_x = binning_x_;
        ci_->binning_y = binning_y_;
        ci_->roi.x_offset = roi_x_offset_;
        ci_->roi.y_offset = roi_y_offset_;
        ci_->roi.height = roi_height_;
        ci_->roi.width = roi_width_;
        ci_->roi.do_rectify = do_rectify_;

        wfov_image->info = *ci_;

        // Publish the full message
        if(pub_->getPublisher().getNumSubscribers() > 0)
        {
          pub_->publish(wfov_image);
        }

        // Publish the message using standard image transport
        if(it_pub_.getNumSubscribers() > 0)
        {
          sensor_msgs::ImagePtr image(new sensor_msgs::Image(wfov_image->image));
          it_pub_.publish(image, ci_);
        }


      }
      catch(CameraTimeoutException& e)
      {
        NODELET_WARN("%s", e.what());
      }
      catch(std::runtime_error& e)
      {
        NODELET_ERROR("%s", e.what());
        ///< @todo Look into readding this
        /*try{
          // Something terrible has happened, so let's just disconnect and reconnect to see if we can recover.
          pg_.disconnect();
          ros::Duration(1.0).sleep(); // sleep for one second each time
          pg_.connect();
          pg_.start();
        } catch(std::runtime_error& e2){
          NODELET_ERROR("%s", e2.what());
        }*/
      }
      // Update diagnostics
      updater_.update();
    }
    NODELET_DEBUG("Leaving thread.");
  }
 void ConnectionBasedNodelet::warnNeverSubscribedCallback(const ros::WallTimerEvent& event)
 {
   if (!ever_subscribed_) {
     NODELET_WARN("'%s' subscribes topics only with child subscribers.", nodelet::Nodelet::getName().c_str());
   }
 }
  void GeometricConsistencyGrouping::recognize(
    const sensor_msgs::PointCloud2::ConstPtr& scene_cloud_msg,
    const sensor_msgs::PointCloud2::ConstPtr& scene_feature_msg)
  {
    boost::mutex::scoped_lock lock(mutex_);
    if (!reference_cloud_ || !reference_feature_) {
      NODELET_ERROR_THROTTLE(1.0, "Not yet reference is available");
      return;
    }

    pcl::PointCloud<pcl::SHOT352>::Ptr
      scene_feature (new pcl::PointCloud<pcl::SHOT352>);
    pcl::PointCloud<pcl::PointNormal>::Ptr
      scene_cloud (new pcl::PointCloud<pcl::PointNormal>);
    pcl::fromROSMsg(*scene_cloud_msg, *scene_cloud);
    pcl::fromROSMsg(*scene_feature_msg, *scene_feature);

    pcl::CorrespondencesPtr model_scene_corrs (new pcl::Correspondences ());
    pcl::KdTreeFLANN<pcl::SHOT352> match_search;
    match_search.setInputCloud (reference_feature_);

    for (size_t i = 0; i < scene_feature->size(); ++i)
    {
      std::vector<int> neigh_indices(1);
      std::vector<float> neigh_sqr_dists(1);
      if (!pcl_isfinite (scene_feature->at(i).descriptor[0])) { //skipping NaNs
        continue;
      }
      int found_neighs
        = match_search.nearestKSearch(scene_feature->at(i), 1,
                                      neigh_indices, neigh_sqr_dists);
      //  add match only if the squared descriptor distance is less than 0.25
      // (SHOT descriptor distances are between 0 and 1 by design)
      if (found_neighs == 1 && neigh_sqr_dists[0] < 0.25f) {
        pcl::Correspondence corr (neigh_indices[0], static_cast<int> (i), neigh_sqr_dists[0]);
        model_scene_corrs->push_back (corr);
      }
    }

    pcl::GeometricConsistencyGrouping<pcl::PointNormal, pcl::PointNormal> gc_clusterer;
    gc_clusterer.setGCSize(gc_size_);
    gc_clusterer.setGCThreshold(gc_thresh_);

    gc_clusterer.setInputCloud(reference_cloud_);
    gc_clusterer.setSceneCloud(scene_cloud);
    gc_clusterer.setModelSceneCorrespondences(model_scene_corrs);

    //gc_clusterer.cluster (clustered_corrs);
    std::vector<pcl::Correspondences> clustered_corrs;
    std::vector<Eigen::Matrix4f,
                Eigen::aligned_allocator<Eigen::Matrix4f> > rototranslations;
    gc_clusterer.recognize(rototranslations, clustered_corrs);
    if (rototranslations.size() > 0) {
      NODELET_INFO("detected %lu objects", rototranslations.size());
      Eigen::Matrix4f result_mat = rototranslations[0];
      Eigen::Affine3f affine(result_mat);
      geometry_msgs::PoseStamped ros_pose;
      tf::poseEigenToMsg(affine, ros_pose.pose);
      ros_pose.header = scene_cloud_msg->header;
      pub_output_.publish(ros_pose);
    }
    else {
      NODELET_WARN("Failed to find object");
    }
    
  }
Пример #23
0
void OdometryROS::processData(const SensorData & data, const ros::Time & stamp)
{
	if(odometry_->getPose().isIdentity() &&
	   !groundTruthFrameId_.empty())
	{
		// sync with the first value of the ground truth
		Transform initialPose = getTransform(groundTruthFrameId_, frameId_, stamp);
		if(initialPose.isNull())
		{
			return;
		}

		NODELET_INFO( "Initializing odometry pose to %s (from \"%s\" -> \"%s\")",
				initialPose.prettyPrint().c_str(),
				groundTruthFrameId_.c_str(),
				frameId_.c_str());
		odometry_->reset(initialPose);
	}

	Transform guess;
	if(guessFromTf_)
	{
		Transform previousPose = this->getTransform(odomFrameId_, frameId_, ros::Time(odometry_->previousStamp()));
		Transform pose = this->getTransform(odomFrameId_, frameId_, stamp);
		if(!previousPose.isNull() && !pose.isNull())
		{
			guess = previousPose.inverse() * pose;

			/*if(!odometry_->previousVelocityTransform().isNull())
			{
				float dt = rtabmap_ros::timestampFromROS(stamp) - odometry_->previousStamp();
				float vx,vy,vz, vroll,vpitch,vyaw;
				odometry_->previousVelocityTransform().getTranslationAndEulerAngles(vx,vy,vz, vroll,vpitch,vyaw);
				Transform motionGuess(vx*dt, vy*dt, vz*dt, vroll*dt, vpitch*dt, vyaw*dt);
				NODELET_WARN( "P  Guess %s", motionGuess.prettyPrint().c_str());
			}
			NODELET_WARN( "TF Guess %s", guess.prettyPrint().c_str());*/
		}
	}

	// process data
	ros::WallTime time = ros::WallTime::now();
	rtabmap::OdometryInfo info;
	SensorData dataCpy = data;
	rtabmap::Transform pose = odometry_->process(dataCpy, guess, &info);
	if(!pose.isNull())
	{
		resetCurrentCount_ = resetCountdown_;

		//*********************
		// Update odometry
		//*********************
		geometry_msgs::TransformStamped poseMsg;
		poseMsg.child_frame_id = frameId_;
		poseMsg.header.frame_id = odomFrameId_;
		poseMsg.header.stamp = stamp;
		rtabmap_ros::transformToGeometryMsg(pose, poseMsg.transform);

		if(publishTf_)
		{
			tfBroadcaster_.sendTransform(poseMsg);
		}

		if(odomPub_.getNumSubscribers())
		{
			//next, we'll publish the odometry message over ROS
			nav_msgs::Odometry odom;
			odom.header.stamp = stamp; // use corresponding time stamp to image
			odom.header.frame_id = odomFrameId_;
			odom.child_frame_id = frameId_;

			//set the position
			odom.pose.pose.position.x = poseMsg.transform.translation.x;
			odom.pose.pose.position.y = poseMsg.transform.translation.y;
			odom.pose.pose.position.z = poseMsg.transform.translation.z;
			odom.pose.pose.orientation = poseMsg.transform.rotation;

			//set covariance
			// libviso2 uses approximately vel variance * 2
			odom.pose.covariance.at(0) = info.variance*2;  // xx
			odom.pose.covariance.at(7) = info.variance*2;  // yy
			odom.pose.covariance.at(14) = info.variance*2; // zz
			odom.pose.covariance.at(21) = info.variance*2; // rr
			odom.pose.covariance.at(28) = info.variance*2; // pp
			odom.pose.covariance.at(35) = info.variance*2; // yawyaw

			//set velocity
			bool setTwist = !odometry_->previousVelocityTransform().isNull();
			if(setTwist)
			{
				float x,y,z,roll,pitch,yaw;
				odometry_->previousVelocityTransform().getTranslationAndEulerAngles(x,y,z,roll,pitch,yaw);
				odom.twist.twist.linear.x = x;
				odom.twist.twist.linear.y = y;
				odom.twist.twist.linear.z = z;
				odom.twist.twist.angular.x = roll;
				odom.twist.twist.angular.y = pitch;
				odom.twist.twist.angular.z = yaw;
			}

			odom.twist.covariance.at(0) = setTwist?info.variance:BAD_COVARIANCE;  // xx
			odom.twist.covariance.at(7) = setTwist?info.variance:BAD_COVARIANCE;  // yy
			odom.twist.covariance.at(14) = setTwist?info.variance:BAD_COVARIANCE; // zz
			odom.twist.covariance.at(21) = setTwist?info.variance:BAD_COVARIANCE; // rr
			odom.twist.covariance.at(28) = setTwist?info.variance:BAD_COVARIANCE; // pp
			odom.twist.covariance.at(35) = setTwist?info.variance:BAD_COVARIANCE; // yawyaw

			//publish the message
			odomPub_.publish(odom);
		}

		// local map / reference frame
		if(odomLocalMap_.getNumSubscribers() && dynamic_cast<OdometryF2M*>(odometry_))
		{
			pcl::PointCloud<pcl::PointXYZ> cloud;
			const std::multimap<int, cv::Point3f> & map = ((OdometryF2M*)odometry_)->getMap().getWords3();
			for(std::multimap<int, cv::Point3f>::const_iterator iter=map.begin(); iter!=map.end(); ++iter)
			{
				cloud.push_back(pcl::PointXYZ(iter->second.x, iter->second.y, iter->second.z));
			}
			sensor_msgs::PointCloud2 cloudMsg;
			pcl::toROSMsg(cloud, cloudMsg);
			cloudMsg.header.stamp = stamp; // use corresponding time stamp to image
			cloudMsg.header.frame_id = odomFrameId_;
			odomLocalMap_.publish(cloudMsg);
		}

		if(odomLastFrame_.getNumSubscribers())
		{
			if(dynamic_cast<OdometryF2M*>(odometry_))
			{
				const std::multimap<int, cv::Point3f> & words3  = ((OdometryF2M*)odometry_)->getLastFrame().getWords3();
				if(words3.size())
				{
					pcl::PointCloud<pcl::PointXYZ> cloud;
					for(std::multimap<int, cv::Point3f>::const_iterator iter=words3.begin(); iter!=words3.end(); ++iter)
					{
						// transform to odom frame
						cv::Point3f pt = util3d::transformPoint(iter->second, pose);
						cloud.push_back(pcl::PointXYZ(pt.x, pt.y, pt.z));
					}

					sensor_msgs::PointCloud2 cloudMsg;
					pcl::toROSMsg(cloud, cloudMsg);
					cloudMsg.header.stamp = stamp; // use corresponding time stamp to image
					cloudMsg.header.frame_id = odomFrameId_;
					odomLastFrame_.publish(cloudMsg);
				}
			}
			else
			{
				//Frame to Frame
				const Signature & refFrame = ((OdometryF2F*)odometry_)->getRefFrame();
				if(refFrame.getWords3().size())
				{
					pcl::PointCloud<pcl::PointXYZ> cloud;
					for(std::multimap<int, cv::Point3f>::const_iterator iter=refFrame.getWords3().begin(); iter!=refFrame.getWords3().end(); ++iter)
					{
						// transform to odom frame
						cv::Point3f pt = util3d::transformPoint(iter->second, pose);
						cloud.push_back(pcl::PointXYZ(pt.x, pt.y, pt.z));
					}
					sensor_msgs::PointCloud2 cloudMsg;
					pcl::toROSMsg(cloud, cloudMsg);
					cloudMsg.header.stamp = stamp; // use corresponding time stamp to image
					cloudMsg.header.frame_id = odomFrameId_;
					odomLastFrame_.publish(cloudMsg);
				}
			}
		}
	}
	else if(publishNullWhenLost_)
	{
		//NODELET_WARN( "Odometry lost!");

		//send null pose to notify that odometry is lost
		nav_msgs::Odometry odom;
		odom.header.stamp = stamp; // use corresponding time stamp to image
		odom.header.frame_id = odomFrameId_;
		odom.child_frame_id = frameId_;
		odom.pose.covariance.at(0) = BAD_COVARIANCE;  // xx
		odom.pose.covariance.at(7) = BAD_COVARIANCE;  // yy
		odom.pose.covariance.at(14) = BAD_COVARIANCE; // zz
		odom.pose.covariance.at(21) = BAD_COVARIANCE; // rr
		odom.pose.covariance.at(28) = BAD_COVARIANCE; // pp
		odom.pose.covariance.at(35) = BAD_COVARIANCE; // yawyaw
		odom.twist.covariance.at(0) = BAD_COVARIANCE;  // xx
		odom.twist.covariance.at(7) = BAD_COVARIANCE;  // yy
		odom.twist.covariance.at(14) = BAD_COVARIANCE; // zz
		odom.twist.covariance.at(21) = BAD_COVARIANCE; // rr
		odom.twist.covariance.at(28) = BAD_COVARIANCE; // pp
		odom.twist.covariance.at(35) = BAD_COVARIANCE; // yawyaw

		//publish the message
		odomPub_.publish(odom);
	}

	if(pose.isNull() && resetCurrentCount_ > 0)
	{
		NODELET_WARN( "Odometry lost! Odometry will be reset after next %d consecutive unsuccessful odometry updates...", resetCurrentCount_);

		--resetCurrentCount_;
		if(resetCurrentCount_ == 0)
		{
			// Check TF to see if sensor fusion is used (e.g., the output of robot_localization)
			Transform tfPose = this->getTransform(odomFrameId_, frameId_, stamp);
			if(tfPose.isNull())
			{
				NODELET_WARN( "Odometry automatically reset to latest computed pose!");
				odometry_->reset(odometry_->getPose());
			}
			else
			{
				NODELET_WARN( "Odometry automatically reset to latest odometry pose available from TF (%s->%s)!",
						odomFrameId_.c_str(), frameId_.c_str());
				odometry_->reset(tfPose);
			}

		}
	}

	if(odomInfoPub_.getNumSubscribers())
	{
		rtabmap_ros::OdomInfo infoMsg;
		odomInfoToROS(info, infoMsg);
		infoMsg.header.stamp = stamp; // use corresponding time stamp to image
		infoMsg.header.frame_id = odomFrameId_;
		odomInfoPub_.publish(infoMsg);
	}

	NODELET_INFO( "Odom: quality=%d, std dev=%fm, update time=%fs", info.inliers, pose.isNull()?0.0f:std::sqrt(info.variance), (ros::WallTime::now()-time).toSec());
}
Пример #24
0
void OdometryROS::onInit()
{
	ros::NodeHandle & nh = getNodeHandle();
	ros::NodeHandle & pnh = getPrivateNodeHandle();

	odomPub_ = nh.advertise<nav_msgs::Odometry>("odom", 1);
	odomInfoPub_ = nh.advertise<rtabmap_ros::OdomInfo>("odom_info", 1);
	odomLocalMap_ = nh.advertise<sensor_msgs::PointCloud2>("odom_local_map", 1);
	odomLastFrame_ = nh.advertise<sensor_msgs::PointCloud2>("odom_last_frame", 1);

	Transform initialPose = Transform::getIdentity();
	std::string initialPoseStr;
	std::string tfPrefix;
	std::string configPath;
	pnh.param("frame_id", frameId_, frameId_);
	pnh.param("odom_frame_id", odomFrameId_, odomFrameId_);
	pnh.param("publish_tf", publishTf_, publishTf_);
	pnh.param("tf_prefix", tfPrefix, tfPrefix);
	pnh.param("wait_for_transform", waitForTransform_, waitForTransform_);
	pnh.param("wait_for_transform_duration",  waitForTransformDuration_, waitForTransformDuration_);
	pnh.param("initial_pose", initialPoseStr, initialPoseStr); // "x y z roll pitch yaw"
	pnh.param("ground_truth_frame_id", groundTruthFrameId_, groundTruthFrameId_);
	pnh.param("config_path", configPath, configPath);
	pnh.param("publish_null_when_lost", publishNullWhenLost_, publishNullWhenLost_);
	pnh.param("guess_from_tf", guessFromTf_, guessFromTf_);

	if(publishTf_ && guessFromTf_)
	{
		NODELET_WARN( "\"publish_tf\" and \"guess_from_tf\" cannot be used at the same time. \"guess_from_tf\" is disabled.");
		guessFromTf_ = false;
	}

	configPath = uReplaceChar(configPath, '~', UDirectory::homeDir());
	if(configPath.size() && configPath.at(0) != '/')
	{
		configPath = UDirectory::currentDir(true) + configPath;
	}

	if(!tfPrefix.empty())
	{
		if(!frameId_.empty())
		{
			frameId_ = tfPrefix + "/" + frameId_;
		}
		if(!odomFrameId_.empty())
		{
			odomFrameId_ = tfPrefix + "/" + odomFrameId_;
		}
		if(!groundTruthFrameId_.empty())
		{
			groundTruthFrameId_ = tfPrefix + "/" + groundTruthFrameId_;
		}
	}

	if(initialPoseStr.size())
	{
		std::vector<std::string> values = uListToVector(uSplit(initialPoseStr, ' '));
		if(values.size() == 6)
		{
			initialPose = Transform(
					uStr2Float(values[0]), uStr2Float(values[1]), uStr2Float(values[2]),
					uStr2Float(values[3]), uStr2Float(values[4]), uStr2Float(values[5]));
		}
		else
		{
			NODELET_ERROR( "Wrong initial_pose format: %s (should be \"x y z roll pitch yaw\" with angle in radians). "
					  "Identity will be used...", initialPoseStr.c_str());
		}
	}


	//parameters
	parameters_ = Parameters::getDefaultOdometryParameters(stereo_);
	if(!configPath.empty())
	{
		if(UFile::exists(configPath.c_str()))
		{
			NODELET_INFO( "Odometry: Loading parameters from %s", configPath.c_str());
			rtabmap::ParametersMap allParameters;
			Parameters::readINI(configPath.c_str(), allParameters);
			// only update odometry parameters
			for(ParametersMap::iterator iter=parameters_.begin(); iter!=parameters_.end(); ++iter)
			{
				ParametersMap::iterator jter = allParameters.find(iter->first);
				if(jter!=allParameters.end())
				{
					iter->second = jter->second;
				}
			}
		}
		else
		{
			NODELET_ERROR( "Config file \"%s\" not found!", configPath.c_str());
		}
	}
	for(rtabmap::ParametersMap::iterator iter=parameters_.begin(); iter!=parameters_.end(); ++iter)
	{
		std::string vStr;
		bool vBool;
		int vInt;
		double vDouble;
		if(pnh.getParam(iter->first, vStr))
		{
			NODELET_INFO( "Setting odometry parameter \"%s\"=\"%s\"", iter->first.c_str(), vStr.c_str());
			iter->second = vStr;
		}
		else if(pnh.getParam(iter->first, vBool))
		{
			NODELET_INFO( "Setting odometry parameter \"%s\"=\"%s\"", iter->first.c_str(), uBool2Str(vBool).c_str());
			iter->second = uBool2Str(vBool);
		}
		else if(pnh.getParam(iter->first, vDouble))
		{
			NODELET_INFO( "Setting odometry parameter \"%s\"=\"%s\"", iter->first.c_str(), uNumber2Str(vDouble).c_str());
			iter->second = uNumber2Str(vDouble);
		}
		else if(pnh.getParam(iter->first, vInt))
		{
			NODELET_INFO( "Setting odometry parameter \"%s\"=\"%s\"", iter->first.c_str(), uNumber2Str(vInt).c_str());
			iter->second = uNumber2Str(vInt);
		}

		if(iter->first.compare(Parameters::kVisMinInliers()) == 0 && atoi(iter->second.c_str()) < 8)
		{
			NODELET_WARN( "Parameter min_inliers must be >= 8, setting to 8...");
			iter->second = uNumber2Str(8);
		}
	}

	std::vector<std::string> argList = getMyArgv();
	char * argv[argList.size()];
	for(unsigned int i=0; i<argList.size(); ++i)
	{
		argv[i] = &argList[i].at(0);
	}

	rtabmap::ParametersMap parameters = rtabmap::Parameters::parseArguments(argList.size(), argv);
	for(rtabmap::ParametersMap::iterator iter=parameters.begin(); iter!=parameters.end(); ++iter)
	{
		rtabmap::ParametersMap::iterator jter = parameters_.find(iter->first);
		if(jter!=parameters_.end())
		{
			NODELET_INFO( "Update odometry parameter \"%s\"=\"%s\" from arguments", iter->first.c_str(), iter->second.c_str());
			jter->second = iter->second;
		}
	}

	// Backward compatibility
	for(std::map<std::string, std::pair<bool, std::string> >::const_iterator iter=Parameters::getRemovedParameters().begin();
		iter!=Parameters::getRemovedParameters().end();
		++iter)
	{
		std::string vStr;
		if(pnh.getParam(iter->first, vStr))
		{
			if(iter->second.first)
			{
				// can be migrated
				parameters_.at(iter->second.second)= vStr;
				NODELET_WARN( "Odometry: Parameter name changed: \"%s\" -> \"%s\". Please update your launch file accordingly. Value \"%s\" is still set to the new parameter name.",
						iter->first.c_str(), iter->second.second.c_str(), vStr.c_str());
			}
			else
			{
				if(iter->second.second.empty())
				{
					NODELET_ERROR( "Odometry: Parameter \"%s\" doesn't exist anymore!",
							iter->first.c_str());
				}
				else
				{
					NODELET_ERROR( "Odometry: Parameter \"%s\" doesn't exist anymore! You may look at this similar parameter: \"%s\"",
							iter->first.c_str(), iter->second.second.c_str());
				}
			}
		}
	}

	Parameters::parse(parameters_, Parameters::kOdomResetCountdown(), resetCountdown_);
	parameters_.at(Parameters::kOdomResetCountdown()) = "0"; // use modified reset countdown here
	odometry_ = Odometry::create(parameters_);
	if(!initialPose.isIdentity())
	{
		odometry_->reset(initialPose);
	}

	resetSrv_ = nh.advertiseService("reset_odom", &OdometryROS::reset, this);
	resetToPoseSrv_ = nh.advertiseService("reset_odom_to_pose", &OdometryROS::resetToPose, this);
	pauseSrv_ = nh.advertiseService("pause_odom", &OdometryROS::pause, this);
	resumeSrv_ = nh.advertiseService("resume_odom", &OdometryROS::resume, this);

	setLogDebugSrv_ = pnh.advertiseService("log_debug", &OdometryROS::setLogDebug, this);
	setLogInfoSrv_ = pnh.advertiseService("log_info", &OdometryROS::setLogInfo, this);
	setLogWarnSrv_ = pnh.advertiseService("log_warning", &OdometryROS::setLogWarn, this);
	setLogErrorSrv_ = pnh.advertiseService("log_error", &OdometryROS::setLogError, this);

	onOdomInit();
}
Пример #25
0
void DriverNodelet::setupDevice ()
{
  // Initialize the openni device
  FreenectDriver& driver = FreenectDriver::getInstance();

  // Enable debugging in libfreenect if requested
  if (libfreenect_debug_)
    driver.enableDebug();

  do {
    driver.updateDeviceList ();

    if (driver.getNumberDevices () == 0)
    {
      NODELET_INFO ("No devices connected.... waiting for devices to be connected");
      boost::this_thread::sleep(boost::posix_time::seconds(3));
      continue;
    }

    NODELET_INFO ("Number devices connected: %d", driver.getNumberDevices ());
    for (unsigned deviceIdx = 0; deviceIdx < driver.getNumberDevices (); ++deviceIdx)
    {
      NODELET_INFO("%u. device on bus %03u:%02u is a %s (%03x) from %s (%03x) with serial id \'%s\'",
                   deviceIdx + 1, driver.getBus(deviceIdx), driver.getAddress(deviceIdx),
                   driver.getProductName(deviceIdx), driver.getProductID(deviceIdx),
                   driver.getVendorName(deviceIdx), driver.getVendorID(deviceIdx),
                   driver.getSerialNumber(deviceIdx));
    }

    try {
      string device_id;
      if (!getPrivateNodeHandle().getParam("device_id", device_id))
      {
        NODELET_WARN ("~device_id is not set! Using first device.");
        device_ = driver.getDeviceByIndex(0);
      }
      else if (device_id.find ('@') != string::npos)
      {
        size_t pos = device_id.find ('@');
        unsigned bus = atoi (device_id.substr (0, pos).c_str ());
        unsigned address = atoi (device_id.substr (pos + 1, device_id.length () - pos - 1).c_str ());
        NODELET_INFO ("Searching for device with bus@address = %d@%d", bus, address);
        device_ = driver.getDeviceByAddress (bus, address);
      }
      else if (device_id[0] == '#')
      {
        unsigned index = atoi (device_id.c_str() + 1);
        NODELET_INFO ("Searching for device with index = %d", index);
        device_ = driver.getDeviceByIndex (index - 1);
      }
      else
      {
        NODELET_INFO ("Searching for device with serial number = '%s'", device_id.c_str ());
        device_ = driver.getDeviceBySerialNumber (device_id);
      }
    }
    catch (exception& e)
    {
      if (!device_)
      {
        NODELET_INFO ("No matching device found.... waiting for devices. Reason: %s", e.what ());
        boost::this_thread::sleep(boost::posix_time::seconds(3));
        continue;
      }
      else
      {
        NODELET_FATAL ("Could not retrieve device. Reason: %s", e.what ());
        exit (-1);
      }
    }
  } while (!device_);

  NODELET_INFO ("Opened '%s' on bus %d:%d with serial number '%s'", device_->getProductName (),
                device_->getBus (), device_->getAddress (), device_->getSerialNumber ());

  device_->registerImageCallback(&DriverNodelet::rgbCb,   *this);
  device_->registerDepthCallback(&DriverNodelet::depthCb, *this);
  device_->registerIRCallback   (&DriverNodelet::irCb,    *this);
}
Пример #26
0
void DriverNodelet::configCb(Config &config, uint32_t level)
{
  depth_ir_offset_x_ = config.depth_ir_offset_x;
  depth_ir_offset_y_ = config.depth_ir_offset_y;
  z_offset_mm_ = config.z_offset_mm;

  // We need this for the ASUS Xtion Pro
  OutputMode old_image_mode, image_mode, compatible_image_mode;
  if (device_->hasImageStream ())
  {
    old_image_mode = device_->getImageOutputMode ();
     
    // does the device support the new image mode?
    image_mode = mapConfigMode2OutputMode (config.image_mode);

    if (!device_->findCompatibleImageMode (image_mode, compatible_image_mode))
    {
      OutputMode default_mode = device_->getDefaultImageMode();
      NODELET_WARN("Could not find any compatible image output mode for %d. "
                   "Falling back to default image output mode %d.",
                    image_mode,
                    default_mode);

      config.image_mode = mapMode2ConfigMode(default_mode);
      image_mode = compatible_image_mode = default_mode;
    }
  }
  
  OutputMode old_depth_mode, depth_mode, compatible_depth_mode;
  old_depth_mode = device_->getDepthOutputMode();
  depth_mode = mapConfigMode2OutputMode (config.depth_mode);
  if (!device_->findCompatibleDepthMode (depth_mode, compatible_depth_mode))
  {
    OutputMode default_mode = device_->getDefaultDepthMode();
    NODELET_WARN("Could not find any compatible depth output mode for %d. "
                 "Falling back to default depth output mode %d.",
                  depth_mode,
                  default_mode);
    
    config.depth_mode = mapMode2ConfigMode(default_mode);
    depth_mode = compatible_depth_mode = default_mode;
  }

  // here everything is fine. Now make the changes
  if ( (device_->hasImageStream () && compatible_image_mode != old_image_mode) ||
       compatible_depth_mode != old_depth_mode)
  {
    // streams need to be reset!
    stopSynchronization();

    if (device_->hasImageStream () && compatible_image_mode != old_image_mode)
      device_->setImageOutputMode (compatible_image_mode);

    if (compatible_depth_mode != old_depth_mode)
      device_->setDepthOutputMode (compatible_depth_mode);

    startSynchronization ();
  }

  // @todo Run connectCb if registration setting changes
  if (device_->isDepthRegistered () && !config.depth_registration)
  {
    device_->setDepthRegistration (false);
  }
  else if (!device_->isDepthRegistered () && config.depth_registration)
  {
    device_->setDepthRegistration (true);
  }

  // now we can copy
  config_ = config;
}
Пример #27
0
  void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg)
  {
    // Work on the image.
    try
    {
      // Convert the image into something opencv can handle.
      cv::Mat frame_src = cv_bridge::toCvShare(msg, msg->encoding)->image;

      /// Convert it to gray
      cv::Mat frame;
      if ( frame_src.channels() > 1 ) {
        frame = frame_src;
      } else {
        cv::cvtColor( frame_src, frame, cv::COLOR_GRAY2BGR );
      }

      cv::resize(frame, gray, cv::Size(frame.cols/(double)MAX(1,scale_), frame.rows/(double)MAX(1,scale_)), 0, 0, CV_INTER_AREA);
      if(prevGray.empty())
        gray.copyTo(prevGray);

      if (gray.rows != prevGray.rows && gray.cols != prevGray.cols) {
        NODELET_WARN("Images should be of equal sizes");
        gray.copyTo(prevGray);
      }

      if (frame.type() != 16) {
        NODELET_ERROR("Images should be of equal type CV_8UC3");
      }
      // Messages
      opencv_apps::FlowArrayStamped flows_msg;
      flows_msg.header = msg->header;

      // Do the work
      cv::Mat flow;

      if( debug_view_) {
        cv::namedWindow( window_name_, cv::WINDOW_AUTOSIZE );
        cv::createTrackbar( "Scale", window_name_, &scale_, 24, trackbarCallback);
        if (need_config_update_) {
          config_.scale = scale_;
          srv.updateConfig(config_);
          need_config_update_ = false;
        }
      }

      float start = (float)cv::getTickCount();
#if CV_MAJOR_VERSION == 3
      cv::optflow::calcOpticalFlowSF(gray, prevGray,
#else
      cv::calcOpticalFlowSF(gray, prevGray,
#endif
                            flow,
                            3, 2, 4, 4.1, 25.5, 18, 55.0, 25.5, 0.35, 18, 55.0, 25.5, 10);
      NODELET_INFO("calcOpticalFlowSF : %lf sec", (cv::getTickCount() - start) / cv::getTickFrequency());

      //writeOpticalFlowToFile(flow, file);
      int cols = flow.cols;
      int rows = flow.rows;
      double scale_col = frame.cols/(double)flow.cols;
      double scale_row = frame.rows/(double)flow.rows;


      for (int i= 0; i < rows; ++i) {
        for (int j = 0; j < cols; ++j) {
          cv::Vec2f flow_at_point = flow.at<cv::Vec2f>(i, j);
          cv::line(frame, cv::Point(scale_col*j, scale_row*i), cv::Point(scale_col*(j+flow_at_point[0]), scale_row*(i+flow_at_point[1])), cv::Scalar(0,255,0), 1, 8, 0 );

          opencv_apps::Flow flow_msg;
          opencv_apps::Point2D point_msg;
          opencv_apps::Point2D velocity_msg;
          point_msg.x = scale_col*j;
          point_msg.y = scale_row*i;
          velocity_msg.x = scale_col*flow_at_point[0];
          velocity_msg.y = scale_row*flow_at_point[1];
          flow_msg.point = point_msg;
          flow_msg.velocity = velocity_msg;
          flows_msg.flow.push_back(flow_msg);
        }
      }

      //cv::cvtColor( frame, src_gray, cv::COLOR_BGR2GRAY );
      /// Apply Canny edge detector
      //Canny( src_gray, edges, 50, 200, 3 );

      //-- Show what you got
      if ( debug_view_) {
        cv::imshow( window_name_, frame );
        int c = cv::waitKey(1);
      }

      cv::swap(prevGray, gray);
      // Publish the image.
      sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, "bgr8", frame).toImageMsg();
      img_pub_.publish(out_img);
      msg_pub_.publish(flows_msg);
    }
    catch (cv::Exception &e)
    {
      NODELET_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line);
    }

    prev_stamp_ = msg->header.stamp;
  }
Пример #28
0
void DriverNodelet::onInitImpl ()
{
  ros::NodeHandle& nh       = getNodeHandle();        // topics
  ros::NodeHandle& param_nh = getPrivateNodeHandle(); // parameters

  // Allow remapping namespaces rgb, ir, depth, depth_registered
  image_transport::ImageTransport it(nh);
  ros::NodeHandle rgb_nh(nh, "rgb");
  image_transport::ImageTransport rgb_it(rgb_nh);
  ros::NodeHandle ir_nh(nh, "ir");
  image_transport::ImageTransport ir_it(ir_nh);
  ros::NodeHandle depth_nh(nh, "depth");
  image_transport::ImageTransport depth_it(depth_nh);
  ros::NodeHandle depth_registered_nh(nh, "depth_registered");
  image_transport::ImageTransport depth_registered_it(depth_registered_nh);
  ros::NodeHandle projector_nh(nh, "projector");

  rgb_frame_counter_ = depth_frame_counter_ = ir_frame_counter_ = 0;
  publish_rgb_ = publish_ir_ = publish_depth_ = true;

  // Check to see if we should enable debugging messages in libfreenect
  // libfreenect_debug_ should be set before calling setupDevice
  param_nh.param("debug" , libfreenect_debug_, false);

  // Initialize the sensor, but don't start any streams yet. That happens in the connection callbacks.
  updateModeMaps();
  setupDevice();

  // Initialize dynamic reconfigure
  reconfigure_server_.reset( new ReconfigureServer(param_nh) );
  reconfigure_server_->setCallback(boost::bind(&DriverNodelet::configCb, this, _1, _2));

  // Camera TF frames
  param_nh.param("rgb_frame_id",   rgb_frame_id_,   string("/openni_rgb_optical_frame"));
  param_nh.param("depth_frame_id", depth_frame_id_, string("/openni_depth_optical_frame"));
  NODELET_INFO("rgb_frame_id = '%s' ",   rgb_frame_id_.c_str());
  NODELET_INFO("depth_frame_id = '%s' ", depth_frame_id_.c_str());

  // Pixel offset between depth and IR images.
  // By default assume offset of (5,4) from 9x7 correlation window.
  // NOTE: These are now (temporarily?) dynamically reconfigurable, to allow tweaking.
  //param_nh.param("depth_ir_offset_x", depth_ir_offset_x_, 5.0);
  //param_nh.param("depth_ir_offset_y", depth_ir_offset_y_, 4.0);

  // The camera names are set to [rgb|depth]_[serial#], e.g. depth_B00367707227042B.
  // camera_info_manager substitutes this for ${NAME} in the URL.
  std::string serial_number = device_->getSerialNumber();
  std::string rgb_name, ir_name;
  if (serial_number.empty())
  {
    rgb_name = "rgb";
    ir_name  = "depth"; /// @todo Make it ir instead?
  }
  else
  {
    rgb_name = "rgb_"   + serial_number;
    ir_name  = "depth_" + serial_number;
  }

  std::string rgb_info_url, ir_info_url;
  param_nh.param("rgb_camera_info_url", rgb_info_url, std::string());
  param_nh.param("depth_camera_info_url", ir_info_url, std::string());

  double diagnostics_max_frequency, diagnostics_min_frequency;
  double diagnostics_tolerance, diagnostics_window_time;
  param_nh.param("enable_rgb_diagnostics", enable_rgb_diagnostics_, false);
  param_nh.param("enable_ir_diagnostics", enable_ir_diagnostics_, false);
  param_nh.param("enable_depth_diagnostics", enable_depth_diagnostics_, false);
  param_nh.param("diagnostics_max_frequency", diagnostics_max_frequency, 30.0);
  param_nh.param("diagnostics_min_frequency", diagnostics_min_frequency, 30.0);
  param_nh.param("diagnostics_tolerance", diagnostics_tolerance, 0.05);
  param_nh.param("diagnostics_window_time", diagnostics_window_time, 5.0);

  // Suppress some of the output from loading camera calibrations (kinda hacky)
  log4cxx::LoggerPtr logger_ccp = log4cxx::Logger::getLogger("ros.camera_calibration_parsers");
  log4cxx::LoggerPtr logger_cim = log4cxx::Logger::getLogger("ros.camera_info_manager");
  logger_ccp->setLevel(log4cxx::Level::getFatal());
  logger_cim->setLevel(log4cxx::Level::getWarn());
  // Also suppress sync warnings from image_transport::CameraSubscriber. When subscribing to
  // depth_registered/foo with Freenect registration disabled, the rectify nodelet for depth_registered/
  // will complain because it receives depth_registered/camera_info (from the register nodelet), but
  // the driver is not publishing depth_registered/image_raw.
  log4cxx::LoggerPtr logger_its = log4cxx::Logger::getLogger("ros.image_transport.sync");
  logger_its->setLevel(log4cxx::Level::getError());
  ros::console::notifyLoggerLevelsChanged();
  
  // Load the saved calibrations, if they exist
  rgb_info_manager_ = boost::make_shared<camera_info_manager::CameraInfoManager>(rgb_nh, rgb_name, rgb_info_url);
  ir_info_manager_  = boost::make_shared<camera_info_manager::CameraInfoManager>(ir_nh,  ir_name,  ir_info_url);

  if (!rgb_info_manager_->isCalibrated())
    NODELET_WARN("Using default parameters for RGB camera calibration.");
  if (!ir_info_manager_->isCalibrated())
    NODELET_WARN("Using default parameters for IR camera calibration.");

  // Advertise all published topics
  {
    // Prevent connection callbacks from executing until we've set all the publishers. Otherwise
    // connectCb() can fire while we're advertising (say) "depth/image_raw", but before we actually
    // assign to pub_depth_. Then pub_depth_.getNumSubscribers() returns 0, and we fail to start
    // the depth generator.
    boost::lock_guard<boost::mutex> lock(connect_mutex_);

    // Instantiate the diagnostic updater
    pub_freq_max_ = diagnostics_max_frequency;
    pub_freq_min_ = diagnostics_min_frequency;
    diagnostic_updater_.reset(new diagnostic_updater::Updater);

    // Set hardware id
    std::string hardware_id = std::string(device_->getProductName()) + "-" +
        std::string(device_->getSerialNumber());
    diagnostic_updater_->setHardwareID(hardware_id);
    
    // Asus Xtion PRO does not have an RGB camera
    if (device_->hasImageStream())
    {
      image_transport::SubscriberStatusCallback itssc = boost::bind(&DriverNodelet::rgbConnectCb, this);
      ros::SubscriberStatusCallback rssc = boost::bind(&DriverNodelet::rgbConnectCb, this);
      pub_rgb_ = rgb_it.advertiseCamera("image_raw", 1, itssc, itssc, rssc, rssc);
      if (enable_rgb_diagnostics_) {
        pub_rgb_freq_.reset(new TopicDiagnostic("RGB Image", *diagnostic_updater_,
            FrequencyStatusParam(&pub_freq_min_, &pub_freq_max_, 
                diagnostics_tolerance, diagnostics_window_time)));
      }
    }

    if (device_->hasIRStream())
    {
      image_transport::SubscriberStatusCallback itssc = boost::bind(&DriverNodelet::irConnectCb, this);
      ros::SubscriberStatusCallback rssc = boost::bind(&DriverNodelet::irConnectCb, this);
      pub_ir_ = ir_it.advertiseCamera("image_raw", 1, itssc, itssc, rssc, rssc);
      if (enable_ir_diagnostics_) {
        pub_ir_freq_.reset(new TopicDiagnostic("IR Image", *diagnostic_updater_,
            FrequencyStatusParam(&pub_freq_min_, &pub_freq_max_, 
                diagnostics_tolerance, diagnostics_window_time)));
      }
    }

    if (device_->hasDepthStream())
    {
      image_transport::SubscriberStatusCallback itssc = boost::bind(&DriverNodelet::depthConnectCb, this);
      ros::SubscriberStatusCallback rssc = boost::bind(&DriverNodelet::depthConnectCb, this);
      pub_depth_ = depth_it.advertiseCamera("image_raw", 1, itssc, itssc, rssc, rssc);
      if (enable_depth_diagnostics_) {
        pub_depth_freq_.reset(new TopicDiagnostic("Depth Image", *diagnostic_updater_,
            FrequencyStatusParam(&pub_freq_min_, &pub_freq_max_, 
                diagnostics_tolerance, diagnostics_window_time)));
      }

      pub_projector_info_ = projector_nh.advertise<sensor_msgs::CameraInfo>("camera_info", 1, rssc, rssc);
      
      if (device_->isDepthRegistrationSupported()) {
        pub_depth_registered_ = depth_registered_it.advertiseCamera("image_raw", 1, itssc, itssc, rssc, rssc);
      }
    }
  }

  // Create separate diagnostics thread
  close_diagnostics_ = false;
  diagnostics_thread_ = boost::thread(boost::bind(&DriverNodelet::updateDiagnostics, this));

  // Create watch dog timer callback
  if (param_nh.getParam("time_out", time_out_) && time_out_ > 0.0)
  {
    time_stamp_ = ros::Time(0,0);
    watch_dog_timer_ = nh.createTimer(ros::Duration(time_out_), &DriverNodelet::watchDog, this);
  }
}