コード例 #1
0
 unsigned long init()
 {
   color_camera_image_sub_.subscribe(*it_, "colorimage", 1);
   point_cloud_sub_.subscribe(node_handle_, "pointcloud", 1);
   
   sync_pointcloud_->connectInput(point_cloud_sub_, color_camera_image_sub_);
   sync_pointcloud_callback_connection_ = sync_pointcloud_->registerCallback(boost::bind(&CobKinectImageFlipNodelet::inputCallback, this, _1, _2));
   
   return ipa_Utils::RET_OK;
 }
コード例 #2
0
ファイル: kinect_pub.cpp プロジェクト: tal42levy/beohawk-ros
		virtual void onInit()
		{
			/// Configure publisher and synchronizer.
			nh = getMTNodeHandle();
			pub = nh.advertise<art_common::KinectMsg>("/kinect/compressed", 10);
			synchronizer.registerCallback(&KinectPub::callback_synchronizer, this);
			
			/// Load and configure the Kinect device.
			OpenNIDriver& driver = OpenNIDriver::getInstance();
			driver.updateDeviceList();
			device = driver.getDeviceByIndex(0);
			XnMapOutputMode output_mode;
			output_mode.nXRes = XN_VGA_X_RES;
			output_mode.nYRes = XN_VGA_Y_RES;
			output_mode.nFPS = 30;
			device->setDepthOutputMode(output_mode);
			device->setImageOutputMode(output_mode);
			device->setDepthRegistration(true);
			dynamic_cast<DeviceKinect*>(device.get())->setDebayeringMethod(ImageBayerGRBG::EdgeAwareWeighted);

			/// Register callbacks and start the streams.
			device->registerImageCallback(&KinectPub::cb_image, *this);
			device->registerDepthCallback(&KinectPub::cb_depth, *this);
			device->startImageStream();
			device->startDepthStream();
		}
コード例 #3
0
  RecogInfo() :
    okao_sub( nh, "/humans/okao_server", 100 ),
    okaoNot_sub( nh, "/humans/okao_server_not", 100 ), 
    sync( MySyncPolicy( 100 ), okao_sub, okaoNot_sub )
  {
    sync.registerCallback( boost::bind( &RecogInfo::callback, this, _1, _2 ) );
    
    recog_pub_ = 
      nh.advertise<humans_msgs::Humans>("/humans/recog_info", 1);

    //unknownの場合
    humans_msgs::Person unknown;
    unknown.okao_id = 0;
    unknown.name = "Unknown";
    unknown.laboratory = "Unknown";
    unknown.grade = "Unknown";
    prop_buf[ 0 ] = unknown;

    //決定できない場合
    humans_msgs::Person undetermined;
    undetermined.okao_id = -1;
    undetermined.name = "Undetermined";
    undetermined.laboratory = "Undetermined";
    undetermined.grade = "Undetermined";
    prop_buf[ -1 ] = undetermined;

    //ファイル名
    time_t now = time(NULL);
    struct tm *pnow = localtime(&now);
    file_name <<"/home/uema/histdata/" 
	      << pnow->tm_year+1900<< "-"<< pnow->tm_mon<< "-" 
	      << pnow->tm_mday<< "_" <<pnow->tm_hour<<"-"
	      << pnow->tm_min<< "-" << pnow->tm_sec<<".txt"; 

  }
コード例 #4
0
        // Constructor
        PcPublisher()
        	: image_transport_(n_),
        	tof_sync_(SyncPolicy(3)),
        	c_xyz_image_32F3_(0),
           	c_grey_image_32F1_(0)
           	
        {
			topicPub_pointCloud_ = n_.advertise<sensor_msgs::PointCloud>("point_cloud", 1);
			xyz_image_subscriber_.subscribe(image_transport_, "image_xyz", 1);
			grey_image_subscriber_.subscribe(image_transport_,"image_grey", 1);

			tof_sync_.connectInput(xyz_image_subscriber_, grey_image_subscriber_);
			tof_sync_.registerCallback(boost::bind(&PcPublisher::syncCallback, this, _1, _2));

           // topicSub_xyzImage_ = n_.subscribe("xyz_image", 1, &PcPublisher::topicCallback_xyzImage, this);
        }
コード例 #5
0
 RecogInfo() :
   okao_sub( nh, "/humans/OkaoServer", 100 ),
   okaoNot_sub( nh, "/humans/OkaoServerNot", 100 ), 
   sync( MySyncPolicy( 100 ), okao_sub, okaoNot_sub )
 {
   sync.registerCallback( boost::bind( &RecogInfo::callback, this, _1, _2 ) );
   
   recog_pub_ = 
     nh.advertise<humans_msgs::Humans>("/humans/RecogInfo", 1);
 }
コード例 #6
0
ファイル: object_tracker.cpp プロジェクト: Kenkoko/ua-ros-pkg
    ObjectTracker(ros::NodeHandle& nh)
    {
        camera_sub.subscribe(nh, "image", 1);
        saliency_sub.subscribe(nh, "saliency_img", 1);
        fg_objects_sub.subscribe(nh, "probability_image", 1);

        // ApproximateTime takes a queue size as its constructor argument, hence SyncPolicy(10)
        sync = new message_filters::Synchronizer<SyncPolicy>(SyncPolicy(10), camera_sub, fg_objects_sub, saliency_sub);
        sync->registerCallback(boost::bind(&ObjectTracker::process_images, this, _1, _2, _3));
    }
コード例 #7
0
  StereoSynchronizer () : nh_(), it_(nh_), sync_(15) {

    std::string left_raw = nh_.resolveName("left_raw");
    std::string right_raw = nh_.resolveName("right_raw");
    image_sub_l_.subscribe(it_, left_raw  + "/image_raw", 4);
    info_sub_l_ .subscribe(nh_, left_raw  + "/camera_info", 4);
    image_sub_r_.subscribe(it_, right_raw + "/image_raw", 4);
    info_sub_r_ .subscribe(nh_, right_raw + "/camera_info", 4);

    left_ns_ = nh_.resolveName("left");
    right_ns_ = nh_.resolveName("right");
    ros::NodeHandle cam_l_nh(left_ns_);
    ros::NodeHandle cam_r_nh(right_ns_);
    it_l_ = new image_transport::ImageTransport(cam_l_nh);
    it_r_ = new image_transport::ImageTransport(cam_r_nh);
    pub_left_ = it_l_->advertiseCamera("image_raw", 1);
    pub_right_ = it_r_->advertiseCamera("image_raw", 1);

    sync_.connectInput(image_sub_l_, info_sub_l_, image_sub_r_, info_sub_r_);
    sync_.registerCallback(boost::bind(&StereoSynchronizer::imageCB, this, _1, _2, _3, _4));
  }
コード例 #8
0
ファイル: testbed.cpp プロジェクト: Lolu28/visual_odometry
  MatcherTestbed(ros::NodeHandle nh):
    matcher_(), image_transport_(nh),
    subscriber_left_( image_transport_, left_image_topic, 1 ),
    subscriber_right_( image_transport_, right_image_topic, 1 ),
      lcinfo_sub_(nh,left_camera_info_topic,1),
      rcinfo_sub_(nh,right_camera_info_topic,1),
      sync_( MySyncPolicy(10), subscriber_left_, subscriber_right_ ), first_callback_(false)
  {
    template_image_ = cv::Mat (cvLoadImage (template_filename.c_str (), CV_LOAD_IMAGE_COLOR));
    subscriber_ = image_transport_.subscribe(subscribe_topic, 1, &MatcherTestbed::imageCallback, this);
    sync_.registerCallback( boost::bind( &MatcherTestbed::stereoImagesCallback, this, _1, _2) );

    publisher_ = image_transport_.advertise (image_matches_topic, 1);
  }
コード例 #9
0
ファイル: pc_publisher.cpp プロジェクト: OMARI1988/kinect2
  void start(const Mode mode)
  {
    this->mode = mode;
    running = true;

    std::string topicCameraInfoColor = topicColor.substr(0, topicColor.rfind('/')) + "/camera_info";
    std::string topicCameraInfoDepth = topicDepth.substr(0, topicDepth.rfind('/')) + "/camera_info";
    // std::cout << "test1..." << std::endl;

    image_transport::TransportHints hints("raw");
    subImageColor = new image_transport::SubscriberFilter(it, topicColor, queueSize, hints);
    subImageDepth = new image_transport::SubscriberFilter(it, topicDepth, queueSize, hints);
    subCameraInfoColor = new message_filters::Subscriber<sensor_msgs::CameraInfo>(nh, topicCameraInfoColor, queueSize);
    subCameraInfoDepth = new message_filters::Subscriber<sensor_msgs::CameraInfo>(nh, topicCameraInfoDepth, queueSize);
    // std::cout << "test2..." << std::endl;

    syncExact = new message_filters::Synchronizer<ExactSyncPolicy>(ExactSyncPolicy(queueSize), *subImageColor, *subImageDepth, *subCameraInfoColor, *subCameraInfoDepth);
    syncExact->registerCallback(boost::bind(&Receiver::callback, this, _1, _2, _3, _4));

    spinner.start();
    std::cout << "waiting for kinect2_bridge..." << std::endl;

    std::chrono::milliseconds duration(1);
    while(!updateImage || !updateCloud)
    {
      if(!ros::ok())
      {
        return;
      }
      std::this_thread::sleep_for(duration);
    }
    std::cout << "kinect2_bridge is running.." << std::endl;
    cloud = pcl::PointCloud<pcl::PointXYZRGBA>::Ptr(new pcl::PointCloud<pcl::PointXYZRGBA>());

    cloud->height = color.rows;
    cloud->width = color.cols;
    cloud->is_dense = false;
    cloud->points.resize(cloud->height * cloud->width);
    createLookup(this->color.cols, this->color.rows);
//
//     // Create the filtering object
//     // pcl::PassThrough<pcl::PointXYZRGBA> pass;
//     // pass.setInputCloud (cloud);
//     // pass.setFilterFieldName ("z");
//     // pass.setFilterLimits (0.0, 1.0);
//     // //pass.setFilterLimitsNegative (true);
//     // pass.filter (*cloud);

    cloudViewer();
  }
コード例 #10
0
	KinectDataDumper(ros::NodeHandle nh) :
			visual_sub_(nh, "/camera/rgb/image_raw", 10000), depth_sub_(nh,
					"/camera/depth/image_raw", 10000), sync_(MySyncPolicy(10000),
					visual_sub_, depth_sub_) {
		std::cout << "KinectDataDumper() - start!" << std::endl;
		nh_ = nh;
		sync_.registerCallback(
				boost::bind(&KinectDataDumper::callback, this, _1, _2));

		std::string fileName = path + "timestamps.txt";
		timestampStream.open(fileName.c_str());

		std::cout << "KinectDataDumper() - end!" << std::endl;

	}
コード例 #11
0
  MatcherNode() : _it(_node)
		  , _subImage( _it, "image", 1)
		  , _subInfo(_node, "camera_info", 1)
		  , sync( SyncPolicy(10), _subImage, _subInfo)
  {
    _pubBestRect = _node.advertise< jsk_recognition_msgs::Rect >("best_rect", 1);
    _pubBestPolygon = _node.advertise< geometry_msgs::PolygonStamped >("best_polygon", 1);
    _pubBestPoint = _node.advertise< geometry_msgs::PointStamped >("rough_point", 1);
    _pubBestBoundingBox = _node.advertise< jsk_recognition_msgs::BoundingBoxArray >("best_box", 1);
    _debug_pub = _it.advertise("debug_image", 1);
    // _subImage = _it.subscribe("image", 1, &MatcherNode::image_cb, this);
    
    sync.registerCallback( boost::bind (&MatcherNode::image_cb , this, _1, _2) );

    
    ros::NodeHandle local_nh("~");
    std::string template_filename;
    std::string default_template_file_name;
    try {
#ifdef ROSPACK_EXPORT
      rospack::ROSPack rp;
      rospack::Package *p = rp.get_pkg("jsk_perception");
      if (p!=NULL) default_template_file_name = p->path + std::string("/sample/opencv-logo2.png");
#else
      rospack::Rospack rp;
      std::vector<std::string> search_path;
      rp.getSearchPathFromEnv(search_path);
      rp.crawl(search_path, 1);
      std::string path;
      if (rp.find("jsk_perception",path)==true) default_template_file_name = path + std::string("/sample/opencv-logo2.png");
#endif
    } catch (std::runtime_error &e) {}
    local_nh.param("template_filename", template_filename, default_template_file_name);
    local_nh.param("standard_width", standard_width, 12);
    local_nh.param("standard_height", standard_height, 12);
    local_nh.param("best_window_estimation_method", best_window_estimation_method, 1);
    local_nh.param("coefficient_threshold", coefficient_thre, 0.5);
    local_nh.param("coefficient_threshold_for_best_window", coefficient_thre_for_best_window, 0.67);
    local_nh.param("sliding_factor", sliding_factor, 1.0);
    local_nh.param("show_result", show_result_, true);
    local_nh.param("pub_debug_image", pub_debug_image_, true);
    local_nh.param("object_width", template_width, 0.06);
    local_nh.param("object_height", template_height, 0.0739);
    
    cv::Mat template_image=cv::imread(template_filename);
    if(template_add(template_image))template_images.push_back(template_image);
  }
コード例 #12
0
  PointCloudCapturer(ros::NodeHandle &n)
    :  nh_(n), synchronizer_( MySyncPolicy(1), cloud_sub_, camera_sub_)
  {
    nh_.param("input_cloud_topic", input_cloud_topic_, std::string("/kinect_head/camera/rgb/points"));
    nh_.param("input_image_topic", input_image_topic_, std::string("/kinect_head/camera/rgb/image_color"));
    nh_.param("input_camera_info_topic", input_camera_info_topic_, std::string("/kinect_head/camera/rgb/camera_info"));
//    cloud_sub_= nh_.subscribe (input_cloud_topic_, 10, &PointCloudCapturer::cloud_cb, this);
  
    camera_sub_.subscribe( nh_, input_image_topic_, 1000 );
    cloud_sub_.subscribe( nh_, input_cloud_topic_, 1000 );

    sync_connection_ = synchronizer_.registerCallback( &PointCloudCapturer::callback, this );

    ROS_INFO("[PointCloudColorizer:] Subscribing to cloud topic %s", input_cloud_topic_.c_str());

    point_head_client_ = new PointHeadClient("/head_traj_controller/point_head_action", true);
    //wait for head controller action server to come up 
    while(!point_head_client_->waitForServer(ros::Duration(5.0)) && ros::ok())
    {
      ROS_INFO("Waiting for the point_head_action server to come up");
    }
    cloud_and_image_received_ = false;
    nh_.param("move_offset_y_min", move_offset_y_min_, -1.5);
    nh_.param("move_offset_y_max", move_offset_y_max_, 1.5);
    nh_.param("step_y", step_y_, 0.3);
    nh_.param("move_offset_z_min", move_offset_z_min_, 0.3);
    nh_.param("move_offset_z_max", move_offset_z_max_, 1.5);
    nh_.param("step_z", step_z_, 0.3);
    nh_.param("move_offset_x", move_offset_x_, 1.0);
    nh_.param("bag_name", bag_name_, std::string("bosch_kitchen_tr.bag"));
    nh_.param("to_frame", to_frame_, std::string("base_link"));
    nh_.param("rate", rate_, 1.0);
    current_position_x_ = move_offset_x_;
    current_position_y_ = move_offset_y_min_;
    current_position_z_ = move_offset_z_min_;
    move_head("base_link", current_position_x_, current_position_y_, current_position_z_);
    EPS = 1e-5;
    //thread ROS spinner
    spin_thread_ = boost::thread (boost::bind (&ros::spin));
    bag_.open(bag_name_, rosbag::bagmode::Write);
  }
コード例 #13
0
	/// Initialize sensor fusion node.
	/// Setup publisher of point cloud and corresponding color data,
	/// setup camera toolboxes and colored point cloud toolbox
	/// @return <code>true</code> on success, <code>false</code> otherwise
	bool init()
	{
		if (loadParameters() == false) return false;

		image_transport::SubscriberStatusCallback imgConnect    = boost::bind(&AllCameraViewer::connectCallback, this);
		image_transport::SubscriberStatusCallback imgDisconnect = boost::bind(&AllCameraViewer::disconnectCallback, this);

		save_camera_images_service_ = node_handle_.advertiseService("save_camera_images", &AllCameraViewer::saveCameraImagesServiceCallback, this);

		// Synchronize inputs of incoming image data.
		// Topic subscriptions happen on demand in the connection callback.

		// TODO: Dynamically determine, which cameras are available

		if(use_right_color_camera_ && use_tof_camera_ && !use_left_color_camera_)
		{
			ROS_INFO("[all_camera_viewer] Setting up subscribers for right color and tof camera");
			shared_sub_sync_.connectInput(right_color_camera_image_sub_, tof_camera_grey_image_sub_);
			shared_sub_sync_.registerCallback(boost::bind(&AllCameraViewer::sharedModeSrvCallback, this, _1, _2));
		}
		else if(use_right_color_camera_ && !use_tof_camera_ && use_left_color_camera_)
		{
			ROS_INFO("[all_camera_viewer] Setting up subscribers left and right color camera");
			stereo_sub_sync_.connectInput(left_color_camera_image_sub_, right_color_camera_image_sub_);
			stereo_sub_sync_.registerCallback(boost::bind(&AllCameraViewer::stereoModeSrvCallback, this, _1, _2));
		}
		else if(use_right_color_camera_ && use_tof_camera_ && use_left_color_camera_)
		{
			ROS_INFO("[all_camera_viewer] Setting up subscribers for left color, right color and tof camera");
			all_sub_sync_.connectInput(left_color_camera_image_sub_, right_color_camera_image_sub_, tof_camera_grey_image_sub_);
			all_sub_sync_.registerCallback(boost::bind(&AllCameraViewer::allModeSrvCallback, this, _1, _2, _3));
		}
		else
		{
			ROS_ERROR("[all_camera_viewer] Specified camera configuration not available");
			return false;
		}


		connectCallback();

  		ROS_INFO("[all_camera_viewer] Initializing [OK]");
		return true;
	}
コード例 #14
0
  void startRecord()
  {
    std::cout << "Controls:" << std::endl
              << "   [ESC, q] - Exit" << std::endl
              << " [SPACE, s] - Save current frame" << std::endl
              << "        [l] - decreas min and max value for IR value rage" << std::endl
              << "        [h] - increas min and max value for IR value rage" << std::endl
              << "        [1] - decreas min value for IR value rage" << std::endl
              << "        [2] - increas min value for IR value rage" << std::endl
              << "        [3] - decreas max value for IR value rage" << std::endl
              << "        [4] - increas max value for IR value rage" << std::endl;

    image_transport::TransportHints hints("compressed");
    image_transport::TransportHints hintsIr("compressed");
    image_transport::TransportHints hintsDepth("compressedDepth");
    subImageColor = new image_transport::SubscriberFilter(it, topicColor, 4, hints);
    subImageIr = new image_transport::SubscriberFilter(it, topicIr, 4, hintsIr);
    subImageDepth = new image_transport::SubscriberFilter(it, topicDepth, 4, hintsDepth);

    sync = new message_filters::Synchronizer<ColorIrDepthSyncPolicy>(ColorIrDepthSyncPolicy(4), *subImageColor, *subImageIr, *subImageDepth);
    sync->registerCallback(boost::bind(&Recorder::callback, this, _1, _2, _3));

    spinner.start();
  }
コード例 #15
0
  dataPublisher(ros::NodeHandle nh) : nh_(nh), priv_nh_("~"),sub_markers_readings(nh,nh.resolveName("/all_markers"),1),sub_camera_readings(nh,"/camera/depth_registered/points",1),sub_starlink_readings(nh,"/world_star",1),sub_wristlink_readings(nh,"/world_wrist",1),sync(MySyncPolicy(10),sub_markers_readings,sub_camera_readings,sub_starlink_readings,sub_wristlink_readings)
 { 
   sync.registerCallback( boost::bind( &dataPublisher::callback, this, _1, _2 ,_3,_4) );
   data_info = nh_.advertise<AllData>(nh_.resolveName("/data_info"), 10);
 }
コード例 #16
0
    IMU_odom(ros::NodeHandle& _nh): nh(_nh), sub_scan(nh, "/scan", 10), sub_arduino(nh, "/arduino/yaw", 10), 
				       sync_subs(Policy_sync_subs(10), sub_scan, sub_arduino) 
	{
	    sync_subs.registerCallback(boost::bind(&IMU_odom::sync_subs_callback, this, _1, _2));
	    first_execution = true;
	}
コード例 #17
0
ファイル: EKF.cpp プロジェクト: michaelhappycheng/software
	EKF(ros::NodeHandle& _nh): nh(_nh), sub_xyheading(nh, "/odometry", 10), sub_z(nh, "/altitude", 10), sub_arduino(nh, "/arduino/IMU", 10),
										sync_subs(Policy_sync_subs(10), sub_xyheading, sub_z, sub_arduino) 
		{
			//pub_ekf = nh.advertise<sensor_msgs::PointCloud>("/world", 10);
			sync_subs.registerCallback(boost::bind(&EKF::sync_subs_callback, this, _1, _2, _3));
		}