void checkInputsSynchronized() { int threshold = 3 * all_received_; if (left_received_ >= threshold || right_received_ >= threshold || left_info_received_ >= threshold || right_info_received_ >= threshold) { ROS_WARN("[stereo_processor] Low number of synchronized left/right/left_info/right_info tuples received.\n" "Left images received: %d (topic '%s')\n" "Right images received: %d (topic '%s')\n" "Left camera info received: %d (topic '%s')\n" "Right camera info received: %d (topic '%s')\n" "Synchronized tuples: %d\n" "Possible issues:\n" "\t* stereo_image_proc is not running.\n" "\t Does `rosnode info %s` show any connections?\n" "\t* The cameras are not synchronized.\n" "\t Try restarting the node with parameter _approximate_sync:=True\n" "\t* The network is too slow. One or more images are dropped from each tuple.\n" "\t Try restarting the node, increasing parameter 'queue_size' (currently %d)", left_received_, left_sub_.getTopic().c_str(), right_received_, right_sub_.getTopic().c_str(), left_info_received_, left_info_sub_.getTopic().c_str(), right_info_received_, right_info_sub_.getTopic().c_str(), all_received_, ros::this_node::getName().c_str(), queue_size_); } }
CaptureServer() : nh_private("~") { ROS_INFO("Creating localization"); queue_size_ = 5; odom_pub = nh_.advertise<nav_msgs::Odometry>("vo", queue_size_); keyframe_pub = nh_.advertise<rm_localization::Keyframe>("keyframe", queue_size_); update_map_service = nh_.advertiseService("update_map", &CaptureServer::update_map, this); send_all_keyframes_service = nh_.advertiseService("send_all_keyframes", &CaptureServer::send_all_keyframes, this); clear_keyframes_service = nh_.advertiseService("clear_keyframes", &CaptureServer::clear_keyframes, this); rgb_sub.subscribe(nh_, "rgb/image_raw", queue_size_); depth_sub.subscribe(nh_, "depth/image_raw", queue_size_); info_sub.subscribe(nh_, "rgb/camera_info", queue_size_); // Synchronize inputs. sync.reset( new Synchronizer(SyncPolicy(queue_size_), rgb_sub, depth_sub, info_sub)); sync->registerCallback( boost::bind(&CaptureServer::RGBDCallback, this, _1, _2, _3)); }
DogPositionMeasurer() : pnh("~"), meanPositionDeviation(0), m2PositionDeviation(0), meanTimeDuration(0), m2TimeDuration(0), meanUnknownTimeDuration(0), m2UnknownTimeDuration(0), n(0), unknownN(0), startMeasuringSub(nh, "start_measuring", 1), stopMeasuringSub(nh, "stop_measuring", 1) { pnh.param<string>("model_name", modelName, "dog"); // Setup the subscriber dogSub.reset( new message_filters::Subscriber<dogsim::DogPosition>(nh, "/dog_position_detector/dog_position", 1)); dogSub->unsubscribe(); dogSub->registerCallback(boost::bind(&DogPositionMeasurer::callback, this, _1)); startMeasuringSub.registerCallback( boost::bind(&DogPositionMeasurer::startMeasuring, this, _1)); stopMeasuringSub.registerCallback( boost::bind(&DogPositionMeasurer::stopMeasuring, this, _1)); }
CaptureServer() : nh_private("~") { ROS_INFO("Creating localization"); tf_prefix_ = tf::getPrefixParam(nh_private); odom_frame = tf::resolve(tf_prefix_, "odom_combined"); map_frame = tf::resolve(tf_prefix_, "map"); map_to_odom.setIdentity(); queue_size_ = 1; rgb_sub.subscribe(nh_, "rgb/image_raw", queue_size_); depth_sub.subscribe(nh_, "depth/image_raw", queue_size_); info_sub.subscribe(nh_, "rgb/camera_info", queue_size_); // Synchronize inputs. sync.reset( new Synchronizer(SyncPolicy(queue_size_), rgb_sub, depth_sub, info_sub)); sync->registerCallback( boost::bind(&CaptureServer::RGBDCallback, this, _1, _2, _3)); tracked_points.reset(new std::vector<cv::Vec2f>); }
void subscribe() { if (use_indices_) { sub_input_.subscribe(*pnh_, "input", 1); sub_indices_.subscribe(*pnh_, "indices", 1); sync_ = boost::make_shared<message_filters::Synchronizer<SyncPolicy> >(10); sync_->connectInput(sub_input_, sub_indices_); if (!not_use_rgb_) { sync_->registerCallback(boost::bind(&ResizePointsPublisher::filter<pcl::PointXYZRGB>, this, _1, _2)); } else { sync_->registerCallback(boost::bind(&ResizePointsPublisher::filter<pcl::PointXYZ>, this, _1, _2)); } } else { if (!not_use_rgb_) { sub_ = pnh_->subscribe( "input", 1, &ResizePointsPublisher::filter<pcl::PointXYZRGB>, this); } else { sub_ = pnh_->subscribe( "input", 1, &ResizePointsPublisher::filter<pcl::PointXYZ>, this); } } }
explicit NegExampleNode(const ros::NodeHandle& nh): node_(nh) { disparity_scale_factor_ = 2.0; // hard coded to match roiPlayer string nn = ros::this_node::getName(); node_.param(nn+"/imageFolderPath",imgFolderPath, std::string(".")); int qs; if(!node_.getParam(nn + "/Q_Size",qs)){ qs=3; } // Subscribe to Messages sub_image_.subscribe(node_,"Color_Image",qs); sub_disparity_.subscribe(node_, "Disparity_Image",qs); sub_rois_.subscribe(node_,"input_rois",qs); // Sync the Synchronizer approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(qs), sub_image_, sub_disparity_, sub_rois_)); approximate_sync_->registerCallback(boost::bind(&NegExampleNode::imageCb, this, _1, _2, _3)); }
void unsubscribe() { if (use_indices_) { sub_input_.unsubscribe(); sub_indices_.unsubscribe(); } else { sub_.shutdown(); } }
ObjectTracker(ros::NodeHandle& nh) { camera_sub.subscribe(nh, "image", 1); saliency_sub.subscribe(nh, "saliency_img", 1); fg_objects_sub.subscribe(nh, "probability_image", 1); // ApproximateTime takes a queue size as its constructor argument, hence SyncPolicy(10) sync = new message_filters::Synchronizer<SyncPolicy>(SyncPolicy(10), camera_sub, fg_objects_sub, saliency_sub); sync->registerCallback(boost::bind(&ObjectTracker::process_images, this, _1, _2, _3)); }
/** @brief Конструктор @details Подписывается на топики с камер, создает объект sync для синхронизации получаемых видео-потоков и регистрирует коллбэк, который будет вызыватся при получении новых данных с топиков. */ ImageConverter() : it_(nh_) { ROS_INFO("ImageConverter constructor"); image_sub_left.subscribe(nh_, "/wide_stereo/left/image_rect_color", 1); image_sub_right.subscribe(nh_, "/wide_stereo/right/image_rect_color", 1); message_filters::TimeSynchronizer<sensor_msgs::Image, sensor_msgs::Image> sync(image_sub_left, image_sub_right, 1); sync.registerCallback(boost::bind(ImageConverter::imageCb, _1, _2)); cv::namedWindow(OPENCV_WINDOW); ros::spin(); }
/** * Constructor, subscribes to input topics using image transport and registers * callbacks. * \param transport The image transport to use */ MonoDepthProcessor(const std::string& transport) : image_received_(0), depth_received_(0), image_info_received_(0), depth_info_received_(0), all_received_(0) { // Read local parameters ros::NodeHandle local_nh("~"); // Resolve topic names ros::NodeHandle nh; std::string camera_ns = nh.resolveName("camera"); std::string image_topic = ros::names::clean(camera_ns + "/rgb/image_rect"); std::string depth_topic = ros::names::clean(camera_ns + "/depth_registered/image_rect"); std::string image_info_topic = camera_ns + "/rgb/camera_info"; std::string depth_info_topic = camera_ns + "/depth_registered/camera_info"; // Subscribe to four input topics. ROS_INFO("Subscribing to:\n\t* %s\n\t* %s\n\t* %s\n\t* %s", image_topic.c_str(), depth_topic.c_str(), image_info_topic.c_str(), depth_info_topic.c_str()); image_transport::ImageTransport it(nh); image_sub_.subscribe(it, image_topic, 1, transport); depth_sub_.subscribe(it, depth_topic, 1, transport); image_info_sub_.subscribe(nh, image_info_topic, 1); depth_info_sub_.subscribe(nh, depth_info_topic, 1); // Complain every 15s if the topics appear unsynchronized image_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &image_received_)); depth_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &depth_received_)); image_info_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &image_info_received_)); depth_info_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &depth_info_received_)); check_synced_timer_ = nh.createWallTimer(ros::WallDuration(15.0), boost::bind(&MonoDepthProcessor::checkInputsSynchronized, this)); // Synchronize input topics. Optionally do approximate synchronization. local_nh.param("queue_size", queue_size_, 5); bool approx; local_nh.param("approximate_sync", approx, true); if (approx) { approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(queue_size_), image_sub_, depth_sub_, image_info_sub_, depth_info_sub_) ); approximate_sync_->registerCallback(boost::bind(&MonoDepthProcessor::dataCb, this, _1, _2, _3, _4)); } else { exact_sync_.reset(new ExactSync(ExactPolicy(queue_size_), image_sub_, depth_sub_, image_info_sub_, depth_info_sub_) ); exact_sync_->registerCallback(boost::bind(&MonoDepthProcessor::dataCb, this, _1, _2, _3, _4)); } }
/** * Constructor, subscribes to input topics using image transport and registers * callbacks. * \param transport The image transport to use */ StereoProcessor(const std::string& transport) : left_received_(0), right_received_(0), left_info_received_(0), right_info_received_(0), all_received_(0) { // Read local parameters ros::NodeHandle local_nh("~"); // Resolve topic names ros::NodeHandle nh; std::string stereo_ns = nh.resolveName("stereo"); std::string left_topic = ros::names::clean(stereo_ns + "/left/" + nh.resolveName("image")); std::string right_topic = ros::names::clean(stereo_ns + "/right/" + nh.resolveName("image")); std::string left_info_topic = stereo_ns + "/left/camera_info"; std::string right_info_topic = stereo_ns + "/right/camera_info"; // Subscribe to four input topics. ROS_INFO("Subscribing to:\n\t* %s\n\t* %s\n\t* %s\n\t* %s", left_topic.c_str(), right_topic.c_str(), left_info_topic.c_str(), right_info_topic.c_str()); image_transport::ImageTransport it(nh); left_sub_.subscribe(it, left_topic, 1, transport); right_sub_.subscribe(it, right_topic, 1, transport); left_info_sub_.subscribe(nh, left_info_topic, 1); right_info_sub_.subscribe(nh, right_info_topic, 1); // Complain every 15s if the topics appear unsynchronized left_sub_.registerCallback(boost::bind(StereoProcessor::increment, &left_received_)); right_sub_.registerCallback(boost::bind(StereoProcessor::increment, &right_received_)); left_info_sub_.registerCallback(boost::bind(StereoProcessor::increment, &left_info_received_)); right_info_sub_.registerCallback(boost::bind(StereoProcessor::increment, &right_info_received_)); check_synced_timer_ = nh.createWallTimer(ros::WallDuration(15.0), boost::bind(&StereoProcessor::checkInputsSynchronized, this)); // Synchronize input topics. Optionally do approximate synchronization. local_nh.param("queue_size", queue_size_, 5); bool approx; local_nh.param("approximate_sync", approx, false); if (approx) { approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(queue_size_), left_sub_, right_sub_, left_info_sub_, right_info_sub_) ); approximate_sync_->registerCallback(boost::bind(&StereoProcessor::dataCb, this, _1, _2, _3, _4)); } else { exact_sync_.reset(new ExactSync(ExactPolicy(queue_size_), left_sub_, right_sub_, left_info_sub_, right_info_sub_) ); exact_sync_->registerCallback(boost::bind(&StereoProcessor::dataCb, this, _1, _2, _3, _4)); } }
explicit HaarAdaNode(const ros::NodeHandle& nh): node_(nh) { num_class1 = 0; num_class0 = 0; num_TP_class1 = 0; num_FP_class1 = 0; num_TP_class0 = 0; num_FP_class0 = 0; string nn = ros::this_node::getName(); int qs; if(!node_.getParam(nn + "/Q_Size",qs)){ qs=3; } int NS; if(!node_.getParam(nn + "/num_Training_Samples",NS)){ NS = 350; // default sets asside very little for training node_.setParam(nn + "/num_Training_Samples",NS); } HAC_.setMaxSamples(NS); if(!node_.getParam(nn + "/RemoveOverlappingRois",remove_overlapping_rois_)){ remove_overlapping_rois_ = false; } // Published Messages pub_rois_ = node_.advertise<Rois>("HaarAdaOutputRois",qs); pub_Color_Image_ = node_.advertise<Image>("HaarAdaColorImage",qs); pub_Disparity_Image_= node_.advertise<DisparityImage>("HaarAdaDisparityImage",qs); // Subscribe to Messages sub_image_.subscribe(node_,"Color_Image",qs); sub_disparity_.subscribe(node_, "Disparity_Image",qs); sub_rois_.subscribe(node_,"input_rois",qs); // Sync the Synchronizer approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(qs), sub_image_, sub_disparity_, sub_rois_)); approximate_sync_->registerCallback(boost::bind(&HaarAdaNode::imageCb, this, _1, _2, _3)); }
explicit roiViewerNode(const ros::NodeHandle& nh): node_(nh) { label = 0; show_confidence = false; //Read mode from launch file std::string mode=""; node_.param(ros::this_node::getName() + "/mode", mode, std::string("none")); ROS_INFO("Selected mode: %s",mode.c_str()); if(mode.compare("roi_display")==0) { ROS_INFO("MODE: %s",mode.c_str()); node_.param(ros::this_node::getName()+"/vertical",vertical,false); //Get the image width and height node_.param(ros::this_node::getName()+"/label",label,0); //Read parameter for showing roi confidence node_.param(ros::this_node::getName()+"/show_confidence",show_confidence,false); //Read parameter stating if the image is grayscale or with colors node_.param(ros::this_node::getName()+"/color_image", color_image, true); // Subscribe to Messages sub_image_.subscribe(node_,"input_image",20); sub_detections_.subscribe(node_,"input_detections",20); // Sync the Synchronizer approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(20), sub_image_, sub_detections_)); approximate_sync_->registerCallback(boost::bind(&roiViewerNode::imageCb, this, _1, _2)); } else { ROS_INFO("Unknown mode:%s Please set to {roi_display} in roiViewer.launch",mode.c_str()); } // Visualization cv::namedWindow("Detections", WINDOW_AUTOSIZE ); cv::startWindowThread(); }
void onInit() { nh = getMTNodeHandle(); nh_priv = getMTPrivateNodeHandle(); // Parameters nh_priv.param("use_backup_estimator_alt", use_backup_estimator_alt, use_backup_estimator_alt); nh_priv.param("imu_to_kinect_offset", imu_to_kinect_offset, imu_to_kinect_offset); nh_priv.param("max_est_kinect_delta_alt", max_est_kinect_delta_alt, max_est_kinect_delta_alt); nh_priv.param("obstacle_height_threshold", obstacle_height_threshold, obstacle_height_threshold); nh_priv.param("debug_throttle_rate", debug_throttle_rate, debug_throttle_rate); nh_priv.param("z_vel_filt_a", z_vel_filt_a, z_vel_filt_a); nh_priv.param("z_vel_filt_b", z_vel_filt_b, z_vel_filt_b); // Publishers odom_pub = nh_priv.advertise<nav_msgs::Odometry> ("output", 10); obstacle_pub = nh_priv.advertise<starmac_kinect::Obstacle> ("obstacle", 10); debug_pub = nh_priv.advertise<starmac_kinect::Debug> ("debug", 10); marker_pub = nh_priv.advertise<visualization_msgs::Marker> ("marker", 10); mask_indices_pub = nh.advertise<pcl::PointIndices> ("mask_indices", 10); findplane_indices_pub = nh.advertise<pcl::PointIndices> ("findplane_indices", 10, true); // Subscribers sub_input_filter.subscribe(nh_priv, "input", max_queue_size); sub_indices_filter.subscribe(nh_priv, "indices", max_queue_size); sub_model_filter.subscribe(nh_priv, "model", max_queue_size); sync_input_indices_e = make_shared<Synchronizer<ExactTime<PointCloud2, PointIndices, ModelCoefficients> > > (max_queue_size); sync_input_indices_e->connectInput(sub_input_filter, sub_indices_filter, sub_model_filter); sync_input_indices_e->registerCallback(bind(&KinectEstimator::cloudIndicesModelCallback, this, _1, _2, _3)); // Fill out most of the output Odometry message (we'll only be filling in the z pose value) odom_msg_output.child_frame_id = "imu"; odom_msg_output.header.frame_id = "ned"; odom_msg_output.pose.pose.orientation.x = 0; odom_msg_output.pose.pose.orientation.y = 0; odom_msg_output.pose.pose.orientation.z = 0; odom_msg_output.pose.pose.orientation.w = 1; odom_msg_output.pose.pose.position.x = 0; odom_msg_output.pose.pose.position.y = 0; if (use_backup_estimator_alt) { est_odom_sub = nh.subscribe("estimator/output", 1, &KinectEstimator::estOdomCallback, this, ros::TransportHints().tcpNoDelay()); } updateMask(); // force once to start things }
laserScanToSamgar(ros::NodeHandle n) : n_(n), laser_sub_(n_, "base_scan", 10) { laser_sub_.registerCallback(boost::bind(&laserScanToSamgar::scanCallback, this, _1)); //scan_pub_ = n_.advertise<sensor_msgs::PointCloud>("/my_cloud",1); }
///////////////////////////////////////////////////////////////// // Constructor PedestrianDetectorHOG(ros::NodeHandle nh): nh_(nh), local_nh_("~"), it_(nh_), stereo_sync_(4), // cam_model_(NULL), counter(0) { hog_.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector()); // Get parameters from the server local_nh_.param("hit_threshold",hit_threshold_,0.0); local_nh_.param("group_threshold",group_threshold_,2); local_nh_.param("use_depth", use_depth_, true); local_nh_.param("use_height",use_height_,true); local_nh_.param("max_height_m",max_height_m_,2.2); local_nh_.param("ground_frame",ground_frame_,std::string("base_link")); local_nh_.param("do_display", do_display_, true); // Advertise a 3d position measurement for each head. people_pub_ = nh_.advertise<people_msgs::PositionMeasurement>("people_tracker_measurements",1); // Subscribe to tf & the images if (use_depth_) { tf_.setExtrapolationLimit(ros::Duration().fromSec(0.01)); std::string left_topic = nh_.resolveName("stereo") + "/left/image"; std::string disp_topic = nh_.resolveName("stereo") + "/disparity"; std::string left_info_topic = nh_.resolveName("stereo") + "/left/camera_info"; std::string right_info_topic = nh_.resolveName("stereo") + "/right/camera_info"; left_sub_.subscribe(it_, left_topic, 10); disp_sub_.subscribe(it_, disp_topic, 10); left_info_sub_.subscribe(nh_, left_info_topic, 10); right_info_sub_.subscribe(nh_, right_info_topic, 10); stereo_sync_.connectInput(left_sub_, left_info_sub_, disp_sub_, right_info_sub_); stereo_sync_.registerCallback(boost::bind(&PedestrianDetectorHOG::imageCBAll, this, _1, _2, _3, _4)); } else { std::string topic = nh_.resolveName("image"); image_sub_ = it_.subscribe("image",10,&PedestrianDetectorHOG::imageCB,this); } }
void checkInputsSynchronized() { int threshold = 3 * all_received_; if (image_received_ >= threshold || depth_received_ >= threshold || image_info_received_ >= threshold || depth_info_received_ >= threshold) { ROS_WARN("[stereo_processor] Low number of synchronized image/depth/image_info/depth_info tuples received.\n" "Images received: %d (topic '%s')\n" "Depth images received: %d (topic '%s')\n" "Image camera info received: %d (topic '%s')\n" "Depth camera info received: %d (topic '%s')\n" "Synchronized tuples: %d\n", image_received_, image_sub_.getTopic().c_str(), depth_received_, depth_sub_.getTopic().c_str(), image_info_received_, image_info_sub_.getTopic().c_str(), depth_info_received_, depth_info_sub_.getTopic().c_str(), all_received_); } }
void connectCb() { num_subs++; if(num_subs > 0) { color_image_sub_.subscribe(image_transport_,"image_color", 1); pc_sub_.subscribe(n_, "point_cloud2", 1); } }
bool PointCloudProjector::enable(platform_motion_msgs::Enable::Request& req, platform_motion_msgs::Enable::Response& resp) { ros::NodeHandle nh; if(req.state && !enabled_) { pointcloud_sub.subscribe(nh, "pointcloud", 1); patch_sub.subscribe(nh, "patches", 1); } else if(enabled_) { pointcloud_sub.unsubscribe(); patch_sub.unsubscribe(); } enabled_ = req.state; resp.state = enabled_; return true; }
void connectCb() { num_subs++; if(num_subs > 0) { color_image_sub_.subscribe(image_transport_,"/camera/rgb/image_raw", 1); pc_sub_.subscribe(n_, "/camera/depth/points", 1); } }
void disconnectCb() { num_subs--; if(num_subs <= 0) { color_image_sub_.unsubscribe(); pc_sub_.unsubscribe(); } }
unsigned long init() { color_camera_image_sub_.subscribe(*it_, "colorimage", 1); point_cloud_sub_.subscribe(node_handle_, "pointcloud", 1); sync_pointcloud_->connectInput(point_cloud_sub_, color_camera_image_sub_); sync_pointcloud_callback_connection_ = sync_pointcloud_->registerCallback(boost::bind(&CobKinectImageFlipNodelet::inputCallback, this, _1, _2)); return ipa_Utils::RET_OK; }
void initNode() { pc_sync_ = boost::shared_ptr<message_filters::Synchronizer<SyncPolicy> >(new message_filters::Synchronizer<SyncPolicy>(SyncPolicy(3))); pc_pub_ = n_.advertise<sensor_msgs::PointCloud2>("colored_point_cloud2", 1); color_image_sub_.subscribe(image_transport_,"image_color", 1); pc_sub_.subscribe(n_, "point_cloud2", 1); pc_sync_->connectInput(color_image_sub_, pc_sub_); pc_sync_->registerCallback(boost::bind(&CreateColoredPointCloud::syncCallback, this, _1, _2)); }
// Connection callback that unsubscribes from the tracker if no one is subscribed. void connectCallback(message_filters::Subscriber<CameraInfo> &sub_cam, message_filters::Subscriber<GroundPlane> &sub_gp, image_transport::SubscriberFilter &sub_col, image_transport::SubscriberFilter &sub_dep, image_transport::ImageTransport &it) { if(!pub_message.getNumSubscribers() && !pub_result_image.getNumSubscribers() && !pub_centres.getNumSubscribers() && !pub_detected_persons.getNumSubscribers()) { ROS_DEBUG("Upper Body Detector: No subscribers. Unsubscribing."); sub_cam.unsubscribe(); sub_gp.unsubscribe(); sub_col.unsubscribe(); sub_dep.unsubscribe(); } else { ROS_DEBUG("Upper Body Detector: New subscribers. Subscribing."); sub_cam.subscribe(); sub_gp.subscribe(); sub_col.subscribe(it,sub_col.getTopic().c_str(),1); sub_dep.subscribe(it,sub_dep.getTopic().c_str(),1); } }
// Handles (un)subscribing when clients (un)subscribe void PointCloud2Nodelet::connectCb() { boost::lock_guard<boost::mutex> lock(connect_mutex_); if (pub_points2_.getNumSubscribers() == 0) { sub_l_image_ .unsubscribe(); sub_l_info_ .unsubscribe(); sub_r_info_ .unsubscribe(); sub_disparity_.unsubscribe(); } else if (!sub_l_image_.getSubscriber()) { ros::NodeHandle &nh = getNodeHandle(); // Queue size 1 should be OK; the one that matters is the synchronizer queue size. sub_l_image_ .subscribe(*it_, "left/image_rect_color", 1); sub_l_info_ .subscribe(nh, "left/camera_info", 1); sub_r_info_ .subscribe(nh, "right/camera_info", 1); sub_disparity_.subscribe(nh, "disparity", 1); } }
PointCloudCapturer(ros::NodeHandle &n) : nh_(n), synchronizer_( MySyncPolicy(1), cloud_sub_, camera_sub_) { nh_.param("input_cloud_topic", input_cloud_topic_, std::string("/kinect_head/camera/rgb/points")); nh_.param("input_image_topic", input_image_topic_, std::string("/kinect_head/camera/rgb/image_color")); nh_.param("input_camera_info_topic", input_camera_info_topic_, std::string("/kinect_head/camera/rgb/camera_info")); // cloud_sub_= nh_.subscribe (input_cloud_topic_, 10, &PointCloudCapturer::cloud_cb, this); camera_sub_.subscribe( nh_, input_image_topic_, 1000 ); cloud_sub_.subscribe( nh_, input_cloud_topic_, 1000 ); sync_connection_ = synchronizer_.registerCallback( &PointCloudCapturer::callback, this ); ROS_INFO("[PointCloudColorizer:] Subscribing to cloud topic %s", input_cloud_topic_.c_str()); point_head_client_ = new PointHeadClient("/head_traj_controller/point_head_action", true); //wait for head controller action server to come up while(!point_head_client_->waitForServer(ros::Duration(5.0)) && ros::ok()) { ROS_INFO("Waiting for the point_head_action server to come up"); } cloud_and_image_received_ = false; nh_.param("move_offset_y_min", move_offset_y_min_, -1.5); nh_.param("move_offset_y_max", move_offset_y_max_, 1.5); nh_.param("step_y", step_y_, 0.3); nh_.param("move_offset_z_min", move_offset_z_min_, 0.3); nh_.param("move_offset_z_max", move_offset_z_max_, 1.5); nh_.param("step_z", step_z_, 0.3); nh_.param("move_offset_x", move_offset_x_, 1.0); nh_.param("bag_name", bag_name_, std::string("bosch_kitchen_tr.bag")); nh_.param("to_frame", to_frame_, std::string("base_link")); nh_.param("rate", rate_, 1.0); current_position_x_ = move_offset_x_; current_position_y_ = move_offset_y_min_; current_position_z_ = move_offset_z_min_; move_head("base_link", current_position_x_, current_position_y_, current_position_z_); EPS = 1e-5; //thread ROS spinner spin_thread_ = boost::thread (boost::bind (&ros::spin)); bag_.open(bag_name_, rosbag::bagmode::Write); }
PlanarCoordinator () { ROS_DEBUG("Instantiating a PlanarCoordinator Class"); // define subscribers, synchronizer, and the corresponding // callback: robot_kinect_sub.subscribe(node_, "robot_kinect_position", 10); mass_kinect_sub.subscribe(node_, "object1_position", 10); sync = new message_filters::TimeSynchronizer<PointPlus, PointPlus> (robot_kinect_sub, mass_kinect_sub, 10); sync->registerCallback(boost::bind( &PlanarCoordinator::callback, this, _1, _2)); // define publisher config_pub = node_.advertise<PlanarSystemConfig> ("meas_config", 100); // wait for service, and get the starting config: ROS_INFO("Waiting for get_ref_config service:"); ROS_INFO("Will not continue until available..."); if(ros::service::waitForService("get_ref_config")) { // service available: PlanarSystemService::Request req; PlanarSystemService::Response resp; req.index = 0; //initial config: ros::service::call("get_ref_config", req, resp); q0 = resp.config; ROS_DEBUG("Initial config:"); ROS_DEBUG("xm=%f, ym=%f, xr=%f, r=%f\r\n",q0.xm,q0.ym,q0.xr,q0.r); } else { ROS_ERROR("Could not get service, shutting down"); ros::shutdown(); } // set all default variables: calibrated_flag = false; calibrate_count = 0; return; }
/// Unsubscribe from camera topics if possible. void disconnectCallback() { if (sub_counter_ > 0) { ROS_INFO("[fiducials] Unsubscribing from camera topics"); color_camera_image_sub_.unsubscribe(); color_camera_info_sub_.unsubscribe(); sub_counter_--; ROS_INFO("[fiducials] %i subscribers on camera topics [OK]", sub_counter_); } }
/// Subscribe to camera topics if not already done. void connectCallback() { if (sub_counter_ == 0) { sub_counter_++; ROS_DEBUG("[all_camera_viewer] Subscribing to camera topics"); if (use_right_color_camera_) { right_color_camera_image_sub_.subscribe(image_transport_, "right/image_color", 1); right_camera_info_sub_.subscribe(node_handle_, "right/camera_info", 1); } if (use_left_color_camera_) { left_color_camera_image_sub_.subscribe(image_transport_, "left/image_color", 1); left_camera_info_sub_.subscribe(node_handle_, "left/camera_info", 1); } if (use_tof_camera_) { tof_camera_grey_image_sub_.subscribe(image_transport_, "image_grey", 1); } } }
/// Unsubscribe from camera topics if possible. void disconnectCallback() { sub_counter_--; if (sub_counter_ == 0) { ROS_DEBUG("[all_camera_viewer] Unsubscribing from camera topics"); if (use_right_color_camera_) { right_color_camera_image_sub_.unsubscribe(); right_camera_info_sub_.unsubscribe(); } if (use_left_color_camera_) { left_color_camera_image_sub_.unsubscribe(); left_camera_info_sub_.unsubscribe(); } if (use_tof_camera_) { tof_camera_grey_image_sub_.unsubscribe(); } } }