示例#1
0
void TopicGUI::initPlugin(qt_gui_cpp::PluginContext& ctx)
{
    m_w = new DotWidget;
    ctx.addWidget(m_w);

    qRegisterMetaType<nimbro_topic_transport::SenderStatsConstPtr>();
    qRegisterMetaType<nimbro_topic_transport::ReceiverStatsConstPtr>();

    connect(
        this, SIGNAL(senderStatsReceived(nimbro_topic_transport::SenderStatsConstPtr)),
        this, SLOT(handleSenderStats(nimbro_topic_transport::SenderStatsConstPtr)),
        Qt::QueuedConnection
    );

    m_sub_senderStats = getPrivateNodeHandle().subscribe(
                            "/network/sender_stats", 1, &TopicGUI::senderStatsReceived, this
                        );

    connect(
        this, SIGNAL(receiverStatsReceived(nimbro_topic_transport::ReceiverStatsConstPtr)),
        this, SLOT(handleReceiverStats(nimbro_topic_transport::ReceiverStatsConstPtr)),
        Qt::QueuedConnection
    );

    m_sub_receiverStats = getPrivateNodeHandle().subscribe(
                              "/network/receiver_stats", 1, &TopicGUI::receiverStatsReceived, this
                          );

    QTimer* updateTimer = new QTimer(this);
    connect(updateTimer, SIGNAL(timeout()), SLOT(update()));
    updateTimer->start(2000);
}
  void onInit()
  {
    ros::NodeHandle& pn = this->getPrivateNodeHandle();
    
    //int img_width = 640;
    //int img_height = 480;

    pn.param("camera_name", p_camera_name_, std::string("ps_eye"));
//     pn.param("camera_topic", p_camera_topic_, p_camera_name_ + "/image_raw");
    pn.param("frame_name", p_frame_name_, std::string("ps_eye_frame"));
    pn.param("dev", p_device_name_, std::string("/dev/video0"));
    pn.param("left_camera_info_url", p_left_camera_info_url_, std::string(""));
    pn.param("right_camera_info_url", p_right_camera_info_url_, std::string(""));
    pn.param("use_every_n_th_image", p_use_every_n_th_image_, 1);
    pn.param("fps", p_fps_, 30);
    //pn.param("width", img_width, 640);
    //pn.param("height", img_height, 480);

    left_camera_info_manager_.reset(new camera_info_manager::CameraInfoManager(ros::NodeHandle("~/left"), p_camera_name_+"/left", p_left_camera_info_url_));
    right_camera_info_manager_.reset(new camera_info_manager::CameraInfoManager(ros::NodeHandle("~/right"), p_camera_name_+"/right", p_right_camera_info_url_));

    image_transport_ = new image_transport::ImageTransport(getPrivateNodeHandle());

    raw_debug_publisher_ = image_transport_->advertise("image_raw_debug", 5);

    left_mono_publisher_ = image_transport_->advertise("left/image_mono", 5);
    right_mono_publisher_ = image_transport_->advertise("right/image_mono", 5);
    left_color_publisher_ = image_transport_->advertise("left/image_raw", 5);
    right_color_publisher_ = image_transport_->advertise("right/image_raw", 5);

    left_camera_info_publisher_ = getPrivateNodeHandle().advertise<sensor_msgs::CameraInfo>("left/camera_info",5, false);
    right_camera_info_publisher_ = getPrivateNodeHandle().advertise<sensor_msgs::CameraInfo>("right/camera_info",5, false);

    this->setupModes();

    current_mode_ = 1;

    this->setupResolution(camera_modes_[current_mode_].img_width,
                          camera_modes_[current_mode_].img_height,
                          camera_modes_[current_mode_].raw_width,
                          camera_modes_[current_mode_].raw_height
                          );

    camera_.reset(new Camera(p_device_name_.c_str(),
                             camera_modes_[current_mode_].raw_width,
                             camera_modes_[current_mode_].raw_height,
                             p_fps_));

    stream_thread_.reset(new boost::thread(boost::bind(&StereoCameraNodelet::run, this)));

    //update_timer_ = pn.createTimer(ros::Duration(1.0/(static_cast<double>(p_fps_))), &StereoCameraNodelet::timerPublishImageCallback, this, false );

    //cv::Mat* img = &cvImg.image;
  }
void ImageNodelet::onInit()
{
  ros::NodeHandle nh = getNodeHandle();
  ros::NodeHandle local_nh = getPrivateNodeHandle();

  // Command line argument parsing
  const std::vector<std::string>& argv = getMyArgv();
  // First positional argument is the transport type
  std::string transport;
  local_nh.param("image_transport", transport, std::string("raw"));
  for (int i = 0; i < (int)argv.size(); ++i)
  {
    if (argv[i][0] != '-')
    {
      transport = argv[i];
      break;
    }
  }
  NODELET_INFO_STREAM("Using transport \"" << transport << "\"");
  // Internal option, should be used only by the image_view node
  bool shutdown_on_close = std::find(argv.begin(), argv.end(),
                                     "--shutdown-on-close") != argv.end();

  // Default window name is the resolved topic name
  std::string topic = nh.resolveName("image");
  local_nh.param("window_name", window_name_, topic);

  bool autosize;
  local_nh.param("autosize", autosize, false);
  
  std::string format_string;
  local_nh.param("filename_format", format_string, std::string("frame%04i.jpg"));
  filename_format_.parse(format_string);

  cv::namedWindow(window_name_, autosize ? cv::WND_PROP_AUTOSIZE : 0);
  cv::setMouseCallback(window_name_, &ImageNodelet::mouseCb, this);
  
#ifdef HAVE_GTK
  // Register appropriate handler for when user closes the display window
  GtkWidget *widget = GTK_WIDGET( cvGetWindowHandle(window_name_.c_str()) );
  if (shutdown_on_close)
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNode), NULL);
  else
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNodelet), &sub_);
#endif

  // Start the OpenCV window thread so we don't have to waitKey() somewhere
  startWindowThread();

  image_transport::ImageTransport it(nh);
  image_transport::TransportHints hints(transport, ros::TransportHints(), getPrivateNodeHandle());
  sub_ = it.subscribe(topic, 1, &ImageNodelet::imageCb, this, hints);
}
 /**
  * @brief Start capture/publish thread.
  */
 virtual void onInit()
 {
   driver_.reset(new Driver(getPrivateNodeHandle(),
                            getPrivateNodeHandle()));
   try {
     driver_->setup();
     is_running_ = true;
     thread_ = boost::shared_ptr<boost::thread>
         (new boost::thread(boost::bind(&CvCameraNodelet::main, this)));
   } catch(cv_camera::DeviceError& e) {
     NODELET_ERROR_STREAM("failed to open device... do nothing: " << e.what());
   }
 }
示例#5
0
 virtual void onInit() {
     std::string port; ROS_ASSERT_MSG(getPrivateNodeHandle().getParam("port", port),
         "\"port\" param missing");
     int baudrate = 115200; getPrivateNodeHandle().getParam("baudrate", baudrate);
     ROS_ASSERT_MSG(getPrivateNodeHandle().getParam("frame_id", frame_id),
         "\"frame_id\" param missing");
     
     pub = getNodeHandle().advertise<uf_common::Float64Stamped>("depth", 10);
     
     device = boost::make_shared<Device>(port, baudrate);
     heartbeat_timer = getNodeHandle().createTimer(ros::Duration(0.5), boost::bind(&Nodelet::heartbeat_callback, this, _1));
     running = true;
     polling_thread_inst = boost::thread(boost::bind(&Nodelet::polling_thread, this));
 }
示例#6
0
文件: nodelet.cpp 项目: ErolB/Sub8
  virtual void onInit() {
    std::string port = uf_common::getParam<std::string>(getPrivateNodeHandle(), "port");
    frame_id = uf_common::getParam<std::string>(getPrivateNodeHandle(), "frame_id");
    drop_every_ = uf_common::getParam<unsigned int>(getPrivateNodeHandle(), "divide", 1);

    ros::NodeHandle& nh = getNodeHandle();
    pub = nh.advertise<sensor_msgs::Imu>("imu/data_raw", 10);
    mag_pub = nh.advertise<sensor_msgs::MagneticField>("imu/mag_raw", 10);

    count = 0;
    running = true;
    device = boost::make_shared<Device>(port);
    polling_thread_inst = boost::thread(boost::bind(&Nodelet::polling_thread, this));
  }
示例#7
0
 virtual void onInit() {
     std::string port = uf_common::getParam<std::string>(
       getPrivateNodeHandle(), "port");
     int baudrate = uf_common::getParam<int>(
       getPrivateNodeHandle(), "baudrate", 115200);
     std::string frame_id = uf_common::getParam<std::string>(
       getPrivateNodeHandle(), "frame_id");
     
     pub = getNodeHandle().advertise<uf_common::Float64Stamped>("depth", 10);
     
     device = boost::make_shared<Device>(port, baudrate);
     heartbeat_timer = getNodeHandle().createTimer(ros::Duration(0.5), boost::bind(&Nodelet::heartbeat_callback, this, _1));
     running = true;
     polling_thread_inst = boost::thread(boost::bind(&Nodelet::polling_thread, this));
 }
示例#8
0
void ArduinoOnboard::onInit()
{
  ros::NodeHandle nh = getNodeHandle();
  m_nhPvt = getPrivateNodeHandle();

  m_lastTime = ros::Time::now();
 	std::string port;

  if(!m_nhPvt.getParam("port", port) ||
     !m_nhPvt.getParam("numMovingAverageValues", m_numMovingAverageValues) ||
     !m_nhPvt.getParam("wheelDiameter", m_wheelDiameter) ||
     !m_nhPvt.getParam("triggerFPS", m_triggerFPS) ||
     !m_nhPvt.getParam("srvBatteryCrit", srvBatteryCrit) ||
     !m_nhPvt.getParam("srvBatteryLow", srvBatteryLow) ||
     !m_nhPvt.getParam("camBatteryCrit", camBatteryCrit) ||
     !m_nhPvt.getParam("camBatteryLow", camBatteryLow) ||
     !m_nhPvt.getParam("lfRotationEnabled", m_lfEnabled) ||
     !m_nhPvt.getParam("rfRotationEnabled", m_rfEnabled) ||
     !m_nhPvt.getParam("lbRotationEnabled", m_lbEnabled) ||
     !m_nhPvt.getParam("rbRotationEnabled", m_rbEnabled) )
  {
      ROS_ERROR("ArduinoOnboard: Could not get all arduinoOnboard parameters");
  }

  m_wheelSpeedPub = nh.advertise<autorally_msgs::wheelSpeeds>
                                    ("wheelSpeeds", 1);

  m_servoPub = nh.advertise<autorally_msgs::servoMSG> ("RC", 1);

	m_port.init(m_nhPvt, getName(), "", "ArduinoOnboard", port, true);
	m_port.registerDataCallback(
                      boost::bind(&ArduinoOnboard::arduinoDataCallback, this));

  loadServoParams();
}
 void loadCalibrationData()
 {
   ros::NodeHandle& pn = getPrivateNodeHandle();
   std::string calibration_file;
   // Try to load a specific calibration file (if requested)
   if (pn.getParam("calibration_file", calibration_file))
   {
     if (camera_->loadCalibrationData(calibration_file))
     {
       NODELET_INFO("Loaded calibration data from \"%s\"", calibration_file.c_str());
       return;
     }
     else
     {
       NODELET_WARN("Failed to load calibration data from \"%s\"", calibration_file.c_str());
     }
   }
   // Check whether the calibration data was loaded from the default location
   if (camera_->isCalibrationDataLoaded())
   {
     NODELET_INFO("Loaded calibration data from default location (\"%s.dat\" in the working directory)", camera_->getSerialNumber().c_str());
   }
   else
   {
     NODELET_WARN("Will use default calibration data");
   }
 }
	virtual void onInit() 
	{
		NODELET_WARN_STREAM("Initializing nodelet..." << getName());
		
		lp_.reset(new bta_tof_driver::BtaRos(getNodeHandle(), getPrivateNodeHandle(), getName()));
		stream_thread_.reset(new boost::thread(boost::bind(&BtaRos::initialize, lp_.get())));
	};
	virtual void onInit() {
		ros::NodeHandle& nh = getNodeHandle();
		ros::NodeHandle& private_nh = getPrivateNodeHandle();

		dynamic_set = false;

		private_nh.getParam("min_height", min_height_);
		private_nh.getParam("max_height", max_height_);

		private_nh.getParam("row_min", u_min_);
		private_nh.getParam("row_max", u_max_);

		private_nh.getParam("output_frame_id", output_frame_id_);
		pub_ = nh.advertise<sensor_msgs::LaserScan> ("scan", 10);
		sub_ = nh.subscribe<PointCloud> ("cloud", 10, &CloudToScan::callback,
				this);

		marker_pub = nh.advertise<visualization_msgs::Marker> (
				"visualization_marker", 10);

		pcl_to_scan::cloud_to_scan_paramsConfig config;
		config.max_height = max_height_;
		config.min_height = min_height_;

		// Set up a dynamic reconfigure server.
		dr_srv = new dynamic_reconfigure::Server < pcl_to_scan::cloud_to_scan_paramsConfig > ();


		cb = boost::bind(&CloudToScan::configCallback, this, _1, _2);
		dr_srv->updateConfig(config);
		dr_srv->setCallback(cb);
	}
  /*!
   * @brief OnInit method from node handle.
   * OnInit method from node handle. Sets up the parameters
   * and topics.
   */
  virtual void onInit()
  {
    ros::NodeHandle& nh = getNodeHandle();
    ros::NodeHandle& private_nh = getPrivateNodeHandle();

    private_nh.getParam("min_y", min_y_);
    private_nh.getParam("max_y", max_y_);
    private_nh.getParam("min_x", min_x_);
    private_nh.getParam("max_x", max_x_);
    private_nh.getParam("max_z", max_z_);
    private_nh.getParam("goal_z", goal_z_);
    private_nh.getParam("z_scale", z_scale_);
    private_nh.getParam("x_scale", x_scale_);
    private_nh.getParam("enabled", enabled_);

    cmdpub_ = private_nh.advertise<geometry_msgs::Twist> ("cmd_vel", 1);
    markerpub_ = private_nh.advertise<visualization_msgs::Marker>("marker",1);
    bboxpub_ = private_nh.advertise<visualization_msgs::Marker>("bbox",1);
    sub_= nh.subscribe<PointCloud>("depth/points", 1, &TurtlebotFollower::cloudcb, this);

    switch_srv_ = private_nh.advertiseService("change_state", &TurtlebotFollower::changeModeSrvCb, this);

    config_srv_ = new dynamic_reconfigure::Server<turtlebot_follower::FollowerConfig>(private_nh);
    dynamic_reconfigure::Server<turtlebot_follower::FollowerConfig>::CallbackType f =
        boost::bind(&TurtlebotFollower::reconfigure, this, _1, _2);
    config_srv_->setCallback(f);
  }
 void loadCalibrationData()
 {
   ros::NodeHandle& pn = getPrivateNodeHandle();
   std::string calibration_file;
   // Try to load a specific calibration file (if requested)
   if (pn.getParam("calibration_file", calibration_file) && !calibration_file.empty())
   {
     NODELET_INFO("Trying to load calibration from \"%s\"", calibration_file.c_str());
     try
     {
         boost::lock_guard<boost::recursive_mutex> lock(config_mutex_);
         camera_->loadCalibrationData(calibration_file);
         NODELET_INFO("Loaded calibration data from \"%s\"", calibration_file.c_str());
         return;
     }
     catch(PMDCameraNotOpenedException& e)
     {
         NODELET_WARN("Failed to load calibration data from \"%s\"", calibration_file.c_str());
     }
   }
   // Check whether the calibration data was loaded from the default location
   if (camera_->isCalibrationDataLoaded())
   {
     NODELET_INFO("Loaded calibration data from default location (\"%s.dat\" in the working directory)", camera_->getSerialNumber().c_str());
   }
   else
   {
     NODELET_WARN("Will use default calibration data");
   }
 }
void base_controller_nodelet::onInit( )
{
	controller = new base_controller( getNodeHandle( ), getPrivateNodeHandle( ) );

	if( autostart && !start( ) )
		ROS_ERROR( "Failed to start controller" );
}
void Dm32ToDmNodelet::onInit()
{
    nh_ = getNodeHandle();
    private_nh_ = getPrivateNodeHandle();

    nh_.param("sensor/name", sensor_name_, std::string("realsenseR200"));

    image_transport::ImageTransport it(nh_);
    image_pub_ = it.advertise("dm32_to_dm/output", 1);
    sub_ = it.subscribe("dm32_to_dm/input", 1, &Dm32ToDmNodelet::cb, this);

    scale_ = 0.001;

    if(sensor_name_ == "realsenseF200")
    {
        sensor_depth_max_ = 1200;
    }
    else if(sensor_name_ == "realsenseR200")
    {
        private_nh_.param("sensor_depth_max", sensor_depth_max_, int(4000));
    }
    else if(sensor_name_ == "picoflexx")
    {
        nh_.param("sensor/depth/max", sensor_depth_max_, int(5000));
    }
    else {
        NODELET_ERROR("Wrong sensor name!");
        return;
    }

    NODELET_INFO_STREAM("Initialized dm32 to dm nodelet for sensor " << sensor_name_
                        << ", with sensor_depth_max=" << sensor_depth_max_);
}
  void SLICSuperPixels::onInit()
  {
    ROS_WARN("Maybe this node does not work for large size images with segfault.");
    nh_ = ros::NodeHandle(getNodeHandle(), "image");
    pnh_ = getPrivateNodeHandle();
    srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> > (pnh_);
    dynamic_reconfigure::Server<Config>::CallbackType f =
      boost::bind (
        &SLICSuperPixels::configCallback, this, _1, _2);
    srv_->setCallback (f);

    pnh_.param("publish_debug_images", debug_image_, false);

    it_.reset(new image_transport::ImageTransport(nh_));
    pub_ = pnh_.advertise<sensor_msgs::Image>("output", 1);
    if (debug_image_) {
      pub_debug_ = pnh_.advertise<sensor_msgs::Image>("debug", 1);
      pub_debug_mean_color_ = pnh_.advertise<sensor_msgs::Image>("debug/mean_color", 1);
      pub_debug_center_grid_ = pnh_.advertise<sensor_msgs::Image>("debug/center_grid", 1);
    }
    image_sub_ = it_->subscribe("", 1, &SLICSuperPixels::imageCallback, this);

    ros::V_string names = boost::assign::list_of("image");
    jsk_topic_tools::warnNoRemap(names);
  }
void ScanToPointCloud::onInit() {
  NODELET_INFO("ScanToPointCloud::onInit: Initializing...");

#ifdef USE_MT_NODE_HANDLE
  ros::NodeHandle &nh = getMTNodeHandle();
  ros::NodeHandle &pnh = getMTPrivateNodeHandle();
#else
  ros::NodeHandle &nh = getNodeHandle();
  ros::NodeHandle &pnh = getPrivateNodeHandle();
#endif

  // Process parameters.
  pnh.param("target_frame", targetFrame, targetFrame);
  NODELET_INFO("ScanToPointCloud::onInit: Using target frame: %s.", targetFrame.data());
  pnh.param("wait_for_transform", waitForTransform, waitForTransform);
  NODELET_INFO("ScanToPointCloud::onInit: Maximum time to wait for transform: %.2f s.", waitForTransform);
  pnh.param("channel_options", channelOptions, channelOptions);
  NODELET_INFO("ScanToPointCloud::onInit: Channel options: %#x.", channelOptions);
  pnh.param("scan_queue", scanQueue, scanQueue);
  NODELET_INFO("ScanToPointCloud::onInit: Scan queue size: %i.", scanQueue);
  pnh.param("point_cloud_queue", pointCloudQueue, pointCloudQueue);
  NODELET_INFO("ScanToPointCloud::onInit: Point cloud queue size: %i.", pointCloudQueue);

  // Subscribe scan topic.
  std::string scanTopic = nh.resolveName("scan", true);
  NODELET_INFO("ScanToPointCloud::onInit: Subscribing scan %s.", scanTopic.data());
  scanSubscriber = nh.subscribe<sensor_msgs::LaserScan>(scanTopic, scanQueue, &ScanToPointCloud::scanCallback, this);

  // Advertise scan point cloud.
  std::string cloudTopic = nh.resolveName("cloud", true);
  NODELET_INFO("ScanToPointCloud::onInit: Advertising point cloud %s.", cloudTopic.data());
  pointCloudPublisher = nh.advertise<sensor_msgs::PointCloud2>(cloudTopic, pointCloudQueue, false);
}
示例#18
0
// Handles (un)subscribing when clients (un)subscribe
void Stereoproc::connectCb()
{
    boost::lock_guard<boost::mutex> connect_lock(connect_mutex_);
    connected_.DebayerMonoLeft = (pub_mono_left_.getNumSubscribers() > 0)?1:0;
    connected_.DebayerMonoRight = (pub_mono_right_.getNumSubscribers() > 0)?1:0;
    connected_.DebayerColorLeft = (pub_color_left_.getNumSubscribers() > 0)?1:0;
    connected_.DebayerColorRight = (pub_color_right_.getNumSubscribers() > 0)?1:0;
    connected_.RectifyMonoLeft = (pub_mono_rect_left_.getNumSubscribers() > 0)?1:0;
    connected_.RectifyMonoRight = (pub_mono_rect_right_.getNumSubscribers() > 0)?1:0;
    connected_.RectifyColorLeft = (pub_color_rect_left_.getNumSubscribers() > 0)?1:0;
    connected_.RectifyColorRight = (pub_color_rect_right_.getNumSubscribers() > 0)?1:0;
    connected_.Disparity = (pub_disparity_.getNumSubscribers() > 0)?1:0;
    connected_.DisparityVis = (pub_disparity_vis_.getNumSubscribers() > 0)?1:0;
    connected_.Pointcloud = (pub_pointcloud_.getNumSubscribers() > 0)?1:0;
    int level = connected_.level();
    if (level == 0)
    {
        sub_l_raw_image_.unsubscribe();
        sub_l_info_.unsubscribe();
        sub_r_raw_image_.unsubscribe();
        sub_r_info_.unsubscribe();
    }
    else if (!sub_l_raw_image_.getSubscriber())
    {
        ros::NodeHandle &nh = getNodeHandle();
        // Queue size 1 should be OK; the one that matters is the synchronizer queue size.
        /// @todo Allow remapping left, right?
        image_transport::TransportHints hints("raw", ros::TransportHints(), getPrivateNodeHandle());
        sub_l_raw_image_.subscribe(*it_, "left/image_raw", 1, hints);
        sub_l_info_ .subscribe(nh,   "left/camera_info", 1);
        sub_r_raw_image_.subscribe(*it_, "right/image_raw", 1, hints);
        sub_r_info_ .subscribe(nh,   "right/camera_info", 1);
    }
}
示例#19
0
void ParserNodelet::onInit() {
  ros::NodeHandle& nh = getPrivateNodeHandle();
  std::string gnss_conf;
  std::string raw_data_topic;
  std::string gpgga_topic;
  std::string corr_imu_topic;
  std::string odometry_topic;
  std::string gnss_status_topic;
  std::string ins_status_topic;

  nh.param("gnss_conf", gnss_conf, std::string("./conf/gnss_conf.txt"));
  nh.param("raw_data_topic", raw_data_topic,
           std::string("/apollo/sensor/gnss/raw_data"));
  nh.param("gpgga_topic", gpgga_topic,
           std::string("/apollo/sensor/gnss/gpgga"));
  nh.param("corr_imu_topic", corr_imu_topic,
           std::string("/apollo/sensor/gnss/corrected_imu"));
  nh.param("odometry_topic", odometry_topic,
           std::string("/apollo/sensor/gnss/odometry"));
  nh.param("gnss_status_topic", gnss_status_topic,
           std::string("/apollo/sensor/gnss/gnss_status"));
  nh.param("ins_status_topic", ins_status_topic,
           std::string("/apollo/sensor/gnss/ins_status"));

  _data_parser.reset(new DataParser(nh, raw_data_topic, gpgga_topic,
                                    corr_imu_topic, odometry_topic,
                                    gnss_status_topic, ins_status_topic));
  if (!_data_parser->init(gnss_conf)) {
    ROS_ERROR("Init parser nodelet failed.");
    ROS_ERROR_STREAM("Init parser nodelet failed.");
    return;
  }
  ROS_INFO("Init parser nodelet success.");
}
示例#20
0
	virtual void onInit()
	{
		ros::NodeHandle & nh = getNodeHandle();
		ros::NodeHandle & pnh = getPrivateNodeHandle();

		std::string modelPath;
		pnh.param("model", modelPath, modelPath);

		if(modelPath.empty())
		{
			NODELET_ERROR("undistort_depth: \"model\" parameter should be set!");
		}

		model_.load(modelPath);
		if(!model_.isValid())
		{
			NODELET_ERROR("Loaded distortion model from \"%s\" is not valid!", modelPath.c_str());
		}
		else
		{
			image_transport::ImageTransport it(nh);
			sub_ = it.subscribe("depth", 1, &UndistortDepth::callback, this);
			pub_ = it.advertise(uFormat("%s_undistorted", nh.resolveName("depth").c_str()), 1);
		}
	}
    virtual void onInit() {
        image_publisher_.reset(new ImagePublisher(getNodeHandle(), getPrivateNodeHandle()));
        image_publisher_->load();

        thread_available_ = true;
        thread_ = boost::shared_ptr<boost::thread>(
                new boost::thread(boost::bind(&ImagePublisherNodelet::spin, this)));
    }
示例#22
0
void
Image_nodelet::onInit(){
	inst_.reset( new image_publisher(getNodeHandle(), getPrivateNodeHandle()));
	//topic_listener_thread_ = boost::shared_ptr<boost::thread>(new boost::thread( boost::bind( &Image_nodelet::topic_listener, this )));

//    ros::MultiThreadedSpinner spinner(0);
//    spinner.spin();
}
 void ImageConverter::onInit() 
 {
  ros::NodeHandle& private_nh = getPrivateNodeHandle();
  private_nh.getParam("color",color_);
  image_sub_ = private_nh.subscribe("/camera/rgb/image_raw", 1, &ImageConverter::imageCb, this);
  image_pub_ = private_nh.advertise<sensor_msgs::Image>("/camera/red/image_raw", 1);
 
 }
示例#24
0
 virtual void onInit()
 {
   ros::NodeHandle& private_nh = getPrivateNodeHandle();
   private_nh.getParam("value", value_);
   pub = private_nh.advertise<std_msgs::Float64>("in", 10);
   sub = private_nh.subscribe("in", 10, &Plus::callback, this);
   //sub1 = private_nh.subscribe("in", 10, &Plus::callback, this);
 }
示例#25
0
void UsbCamNodelet::onInit()
{
  ROS_INFO("Usb cam nodelet init");
  usb_cam_.reset(new UsbCamWrapper(getNodeHandle(), getPrivateNodeHandle()));
  // spawn device poll thread
  device_thread_ = boost::shared_ptr<boost::thread>
        (new boost::thread(boost::bind(&UsbCamWrapper::spin, usb_cam_)));
}
示例#26
0
    void
    onInit()
    {
        n_ = getNodeHandle();

        ros::NodeHandle pn = getPrivateNodeHandle();
        pn.getParam("target_frame_id", target_frame_id_);
        pn.getParam("table_frame_id", table_frame_id_);
        pn.getParam("height_min", height_min_);
        pn.getParam("height_max", height_max_);
        XmlRpc::XmlRpcValue v;
        pn.getParam("table_dimensions", v);
        if( v.size() < 3)
        {
            ROS_ERROR("Hull points not set correctly, nodelet will not work");
            return;
        }
        double x ,y, z;
        x = (double)v[0];
        y = (double)v[1];
        z = (double)v[2];
        Eigen::Vector3d p;
        p << -x/2, -y/2, z;
        hull_points_[0] = p;
        p << -x/2, y/2, z;
        hull_points_[1] = p;
        p << x/2, y/2, z;
        hull_points_[2] = p;
        p << x/2, -y/2, z;
        hull_points_[3] = p;

        tf::StampedTransform trf_table;
        try
        {
            tf_listener_.waitForTransform(target_frame_id_, table_frame_id_, ros::Time(), ros::Duration(2));
            tf_listener_.lookupTransform(target_frame_id_, table_frame_id_, ros::Time(), trf_table);
        }
        catch (tf::TransformException& ex) {
            ROS_ERROR("[transform region crop] : %s",ex.what());
            return;
        }
        Eigen::Affine3d ad;
        tf::transformTFToEigen(trf_table, ad);

        for(int i = 0; i < hull_points_.size(); i++)
        {
            Point p;
            p.getVector3fMap() = (ad*hull_points_[i]).cast<float>();
            hull_->points[i] = p;
        }

        pc_sub_ = n_.subscribe<PointCloud>("point_cloud_in", 1, boost::bind(&TableRegionCropNodelet::topicCallback, this, _1));
        pc_pub_ = n_.advertise<PointCloud>("point_cloud_out", 1);

        eppd_.setInputPlanarHull(hull_);
        eppd_.setHeightLimits(height_min_, height_max_);
        eppd_.setViewPoint(0,0,5);
    }
示例#27
0
void Stereoproc::onInit()
{
    ros::NodeHandle &nh = getNodeHandle();
    ros::NodeHandle &private_nh = getPrivateNodeHandle();

    it_.reset(new image_transport::ImageTransport(nh));

    // Synchronize inputs. Topic subscriptions happen on demand in the connection
    // callback. Optionally do approximate synchronization.
    int queue_size;
    private_nh.param("queue_size", queue_size, 5);
    bool approx;
    private_nh.param("approximate_sync", approx, false);
    if (approx)
    {
        approximate_sync_.reset( new ApproximateSync(ApproximatePolicy(queue_size),
                    sub_l_raw_image_, sub_l_info_,
                    sub_r_raw_image_, sub_r_info_) );
        approximate_sync_->registerCallback(boost::bind(&Stereoproc::imageCb,
                    this, _1, _2, _3, _4));
    }
    else
    {
        exact_sync_.reset( new ExactSync(ExactPolicy(queue_size),
                    sub_l_raw_image_, sub_l_info_,
                    sub_r_raw_image_, sub_r_info_) );
        exact_sync_->registerCallback(boost::bind(&Stereoproc::imageCb,
                    this, _1, _2, _3, _4));
    }

    // Set up dynamic reconfiguration
    ReconfigureServer::CallbackType f = boost::bind(&Stereoproc::configCb,
            this, _1, _2);
    reconfigure_server_.reset(new ReconfigureServer(config_mutex_, private_nh));
    reconfigure_server_->setCallback(f);

    // Monitor whether anyone is subscribed to the output
    ros::SubscriberStatusCallback connect_cb = boost::bind(&Stereoproc::connectCb, this);
    // Make sure we don't enter connectCb() between advertising and assigning to pub_disparity_
    boost::lock_guard<boost::mutex> lock(connect_mutex_);
    int publisher_queue_size;
    private_nh.param("publisher_queue_size", publisher_queue_size, 1);
    pub_mono_left_ = nh.advertise<sensor_msgs::Image>("left/image_mono", publisher_queue_size, connect_cb, connect_cb);
    pub_mono_right_ = nh.advertise<sensor_msgs::Image>("right/image_mono", publisher_queue_size, connect_cb, connect_cb);
    pub_color_left_ = nh.advertise<sensor_msgs::Image>("left/image_color", publisher_queue_size, connect_cb, connect_cb);
    pub_color_right_ = nh.advertise<sensor_msgs::Image>("right/image_color", publisher_queue_size, connect_cb, connect_cb);
    pub_mono_rect_left_ = nh.advertise<sensor_msgs::Image>("left/rect_mono", publisher_queue_size, connect_cb, connect_cb);
    pub_color_rect_left_ = nh.advertise<sensor_msgs::Image>("left/rect_color", publisher_queue_size, connect_cb, connect_cb);
    pub_mono_rect_right_ = nh.advertise<sensor_msgs::Image>("right/rect_mono", publisher_queue_size, connect_cb, connect_cb);
    pub_color_rect_right_ = nh.advertise<sensor_msgs::Image>("right/rect_color", publisher_queue_size, connect_cb, connect_cb);
    pub_disparity_ = nh.advertise<stereo_msgs::DisparityImage>("disparity", publisher_queue_size, connect_cb, connect_cb);
    pub_disparity_vis_ = nh.advertise<sensor_msgs::Image>("disparity_vis", publisher_queue_size, connect_cb, connect_cb);
    pub_pointcloud_ = nh.advertise<sensor_msgs::PointCloud2>("pointcloud", publisher_queue_size, connect_cb, connect_cb);

    cv::cuda::printShortCudaDeviceInfo(cv::cuda::getDevice());
}
void GpsTransformPublisher::onInit()
{
  ros::NodeHandle priv = getPrivateNodeHandle();
  swri::param(priv,"child_frame_id", veh_frame_id_, std::string("base_link"));
  swri::param(priv,"parent_frame_id", global_frame_id_, std::string("map"));

  gps_sub_ = getNodeHandle().subscribe("gps", 100, &GpsTransformPublisher::HandleGps, this);

  tf_manager_.Initialize();
}
void VisualOdometryNodelet::onInit()
{
  NODELET_INFO("Initializing Visual Odometry Nodelet");
  
  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getNodeHandle();
  ros::NodeHandle nh_private = getPrivateNodeHandle();

  visual_odometry_.reset(new VisualOdometry(nh, nh_private));
}
示例#30
0
  virtual void onInit()
  {
    ros::NodeHandle& nh = getNodeHandle();
    ros::NodeHandle& private_nh = getPrivateNodeHandle();
    
    private_nh.getParam("max_rate", max_update_rate_);

    pub_ = nh.advertise<PointCloud>("cloud_out", 10);
    sub_ = nh.subscribe<PointCloud>("cloud_in", 10, &CloudThrottle::callback, this);
  };