virtual void onInit()
  {
    nh_ = getNodeHandle();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh_));
    local_nh_ = ros::NodeHandle("~");

    local_nh_.param("debug_view", debug_view_, false);
    subscriber_count_ = 0;
    prev_stamp_ = ros::Time(0, 0);

    window_name_ = "Hough Circle Detection Demo";
    canny_threshold_initial_value_ = 200;
    accumulator_threshold_initial_value_ = 50;
    max_accumulator_threshold_ = 200;
    max_canny_threshold_ = 255;

    //declare and initialize both parameters that are subjects to change
    canny_threshold_ = canny_threshold_initial_value_;
    accumulator_threshold_ = accumulator_threshold_initial_value_;

    image_transport::SubscriberStatusCallback img_connect_cb    = boost::bind(&HoughCirclesNodelet::img_connectCb, this, _1);
    image_transport::SubscriberStatusCallback img_disconnect_cb = boost::bind(&HoughCirclesNodelet::img_disconnectCb, this, _1);
    ros::SubscriberStatusCallback msg_connect_cb    = boost::bind(&HoughCirclesNodelet::msg_connectCb, this, _1);
    ros::SubscriberStatusCallback msg_disconnect_cb = boost::bind(&HoughCirclesNodelet::msg_disconnectCb, this, _1);
    img_pub_ = image_transport::ImageTransport(local_nh_).advertise("image", 1, img_connect_cb, img_disconnect_cb);
    msg_pub_ = local_nh_.advertise<opencv_apps::CircleArrayStamped>("circles", 1, msg_connect_cb, msg_disconnect_cb);

    if( debug_view_ ) {
      subscriber_count_++;
    }

    dynamic_reconfigure::Server<hough_circles::HoughCirclesConfig>::CallbackType f =
      boost::bind(&HoughCirclesNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);
  }
示例#2
0
    virtual void onInit()
    {
        nh_ = getNodeHandle();
        it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh_));
        local_nh_ = ros::NodeHandle("~");

        local_nh_.param("debug_view", debug_view_, false);
        subscriber_count_ = 0;
        prev_stamp_ = ros::Time(0, 0);

        window_name_ = "Hough Lines Demo";
        min_threshold_ = 50;
        max_threshold_ = 150;
        threshold_ = max_threshold_;

        image_transport::SubscriberStatusCallback img_connect_cb    = boost::bind(&HoughLinesNodelet::img_connectCb, this, _1);
        image_transport::SubscriberStatusCallback img_disconnect_cb = boost::bind(&HoughLinesNodelet::img_disconnectCb, this, _1);
        ros::SubscriberStatusCallback msg_connect_cb    = boost::bind(&HoughLinesNodelet::msg_connectCb, this, _1);
        ros::SubscriberStatusCallback msg_disconnect_cb = boost::bind(&HoughLinesNodelet::msg_disconnectCb, this, _1);
        img_pub_ = image_transport::ImageTransport(local_nh_).advertise("image", 1, img_connect_cb, img_disconnect_cb);
        msg_pub_ = local_nh_.advertise<opencv_apps::LineArrayStamped>("lines", 1, msg_connect_cb, msg_disconnect_cb);

        if( debug_view_ ) {
            subscriber_count_++;
        }

        dynamic_reconfigure::Server<hough_lines::HoughLinesConfig>::CallbackType f =
            boost::bind(&HoughLinesNodelet::reconfigureCallback, this, _1, _2);
        srv.setCallback(f);
    }
 PassthroughCarBody(){
   ROS_INFO("START SUBSCRIBING");
   point_sub = n.subscribe("input_points2", 1, &PassthroughCarBody::passthrough_cb, this);
   point_pub = n.advertise<sensor_msgs::PointCloud2>("passthrough_output/points2", 1);
   f = boost::bind(&PassthroughCarBody::dynamic_reconfigure_cb, this, _1, _2);
   server.setCallback(f);
 }
示例#4
0
        AngularRateControl() : nh_("~"),lastIMU_(0.0),
        lastCommand_(0.0), timeout_(1.5),
        joy_control_("/teleop/twistCommand"), auto_control_("/mux/safeCommand"),
        manual_override_(false), rc_override_(true)
         {
            nh_.param("kp",kp_,1.0);
            nh_.param("ki",ki_,0.0);
            nh_.param("kd",kd_,0.0);
            nh_.param("imax",imax_,1E9);
            nh_.param("period",period_,0.05);
            nh_.param("motor_zero",motor_zero_,0.0);
            nh_.param("input_scale",input_scale_,1.0);
            double dTimeout;
            nh_.param("timeout",dTimeout,1.5);
            timeout_ = ros::Duration(dTimeout);

            Pid_.initPid(kp_,ki_,kd_,imax_,-imax_);

            // Make sure TF is ready
            ros::Duration(0.5).sleep();

            // Subscribe to the velocity command and setup the motor publisher
            imu_sub_ = nh_.subscribe("imu",1,&AngularRateControl::imu_callback,this);
            cmd_sub_ = nh_.subscribe("command",1,&AngularRateControl::cmd_callback,this);
            mux_sub_ = nh_.subscribe("/mux/selected",1,&AngularRateControl::mux_callback,this);
            rc_sub_ = nh_.subscribe("/sense",1,&AngularRateControl::rc_callback,this);
            motor_pub_ = nh_.advertise<geometry_msgs::Twist>("motors",1);
            debug_pub_ = nh_.advertise<geometry_msgs::Point>("debug",1);
            //Create a callback for the PID loop
            pid_loop_ = nh_.createTimer(ros::Duration(period_), &AngularRateControl::do_pid, this);

            f_ = boost::bind(&AngularRateControl::config_callback, this, _1, _2);
            server_.setCallback(f_);

        }
        object_segmentation_ros()
        {
       	
  			f = boost::bind(&object_segmentation_ros::configure_callback, this, _1, _2);
  			server.setCallback(f);
        	
        	
        		std::string get_scene_objects_remap;
        		n_.param("get_scene_objects_remap", get_scene_objects_remap, (std::string)"get_scene_objects");
        		get_scene_objects_ = n_.advertiseService<brics_3d_msgs::GetSceneObjects::Request , brics_3d_msgs::GetSceneObjects::Response>(get_scene_objects_remap, boost::bind(&object_segmentation_impl::callback_get_scene_objects, &component_implementation_,_1,_2,component_config_));
        
				object_points_ = 	n_.advertise<sensor_msgs::PointCloud2>("object_points", 1);
				plane_points_ = 	n_.advertise<sensor_msgs::PointCloud2>("plane_points", 1);
  	

				n_.param("camera_frame", component_config_.camera_frame, (std::string)"/openni_rgb_optical_frame");
				n_.param("extract_obj_in_rgb_img", component_config_.extract_obj_in_rgb_img, (bool)false);
				n_.param("min_x", component_config_.min_x, (double)0.25);
				n_.param("max_x", component_config_.max_x, (double)1.5);
				n_.param("min_y", component_config_.min_y, (double)-1.0);
				n_.param("max_y", component_config_.max_y, (double)1.0);
				n_.param("min_z", component_config_.min_z, (double)-0.1);
				n_.param("max_z", component_config_.max_z, (double)1.0);
				n_.param("threshold_points_above_lower_plane", component_config_.threshold_points_above_lower_plane, (double)0.01);
				n_.param("downsampling_distance", component_config_.downsampling_distance, (double)0.005);
				n_.param("min_points_per_objects", component_config_.min_points_per_objects, (int)11);
				n_.param("spherical_distance", component_config_.spherical_distance, (double)2.5);
            
        }
  virtual void onInit()
  {
    nh_ = getNodeHandle();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh_));
    local_nh_ = ros::NodeHandle("~");

    local_nh_.param("debug_view", debug_view_, false);
    subscriber_count_ = 0;
    prev_stamp_ = ros::Time(0, 0);

    image_transport::SubscriberStatusCallback img_connect_cb    = boost::bind(&FaceDetectionNodelet::img_connectCb, this, _1);
    image_transport::SubscriberStatusCallback img_disconnect_cb = boost::bind(&FaceDetectionNodelet::img_disconnectCb, this, _1);
    ros::SubscriberStatusCallback msg_connect_cb    = boost::bind(&FaceDetectionNodelet::msg_connectCb, this, _1);
    ros::SubscriberStatusCallback msg_disconnect_cb = boost::bind(&FaceDetectionNodelet::msg_disconnectCb, this, _1);
    img_pub_ = image_transport::ImageTransport(local_nh_).advertise("image", 1, img_connect_cb, img_disconnect_cb);
    msg_pub_ = local_nh_.advertise<opencv_apps::FaceArrayStamped>("faces", 1, msg_connect_cb, msg_disconnect_cb);
        
    if( debug_view_ ) {
      subscriber_count_++;
    }
    std::string face_cascade_name, eyes_cascade_name;
    local_nh_.param("face_cascade_name", face_cascade_name, std::string("/usr/share/opencv/haarcascades/haarcascade_frontalface_alt.xml"));
    local_nh_.param("eyes_cascade_name", eyes_cascade_name, std::string("/usr/share/opencv/haarcascades/haarcascade_eye_tree_eyeglasses.xml"));
        
    if( !face_cascade_.load( face_cascade_name ) ){ NODELET_ERROR("--Error loading %s", face_cascade_name.c_str()); };
    if( !eyes_cascade_.load( eyes_cascade_name ) ){ NODELET_ERROR("--Error loading %s", eyes_cascade_name.c_str()); };

    dynamic_reconfigure::Server<face_detection::FaceDetectionConfig>::CallbackType f =
      boost::bind(&FaceDetectionNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);
  }
  virtual void onInit()
  {
    nh_ = getNodeHandle();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh_));
    local_nh_ = ros::NodeHandle("~");

    local_nh_.param("debug_view", debug_view_, false);
    subscriber_count_ = 0;
    prev_stamp_ = ros::Time(0, 0);

    window_name_ = "Demo code to find contours in an image";
    low_threshold_ = 100; // only for canny

    image_transport::SubscriberStatusCallback img_connect_cb    = boost::bind(&FindContoursNodelet::img_connectCb, this, _1);
    image_transport::SubscriberStatusCallback img_disconnect_cb = boost::bind(&FindContoursNodelet::img_disconnectCb, this, _1);
    ros::SubscriberStatusCallback msg_connect_cb    = boost::bind(&FindContoursNodelet::msg_connectCb, this, _1);
    ros::SubscriberStatusCallback msg_disconnect_cb = boost::bind(&FindContoursNodelet::msg_disconnectCb, this, _1);
    img_pub_ = image_transport::ImageTransport(local_nh_).advertise("image", 1, img_connect_cb, img_disconnect_cb);
    msg_pub_ = local_nh_.advertise<opencv_apps::ContourArrayStamped>("contours", 1, msg_connect_cb, msg_disconnect_cb);
        
    if( debug_view_ ) {
      subscriber_count_++;
    }

    dynamic_reconfigure::Server<find_contours::FindContoursConfig>::CallbackType f =
      boost::bind(&FindContoursNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);
  }
  virtual void onInit()
  {
    nh_ = getNodeHandle();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(nh_));
    local_nh_ = ros::NodeHandle("~");

    local_nh_.param("debug_view", debug_view_, false);
    subscriber_count_ = 0;
    prev_stamp_ = ros::Time(0, 0);

    window_name_ = "flow";

    image_transport::SubscriberStatusCallback img_connect_cb    = boost::bind(&FBackFlowNodelet::img_connectCb, this, _1);
    image_transport::SubscriberStatusCallback img_disconnect_cb = boost::bind(&FBackFlowNodelet::img_disconnectCb, this, _1);
    ros::SubscriberStatusCallback msg_connect_cb    = boost::bind(&FBackFlowNodelet::msg_connectCb, this, _1);
    ros::SubscriberStatusCallback msg_disconnect_cb = boost::bind(&FBackFlowNodelet::msg_disconnectCb, this, _1);
    img_pub_ = image_transport::ImageTransport(local_nh_).advertise("image", 1, img_connect_cb, img_disconnect_cb);
    msg_pub_ = local_nh_.advertise<opencv_apps::FlowArrayStamped>("flows", 1, msg_connect_cb, msg_disconnect_cb);

    if( debug_view_ ) {
      subscriber_count_++;
    }

    dynamic_reconfigure::Server<fback_flow::FBackFlowConfig>::CallbackType f =
      boost::bind(&FBackFlowNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);
  }
    eurobot2016_robotclaw_driver_ros() : np_("~")
    {
        f = boost::bind(&eurobot2016_robotclaw_driver_ros::configure_callback, this, _1, _2);
        server.setCallback(f);


        cmd_vel_ = n_.subscribe("cmd_vel", 1, &eurobot2016_robotclaw_driver_impl::topicCallback_cmd_vel, &component_implementation_);

    }
示例#10
0
    Omni_Vision(int argc, char **argv)
    {
        char * environment;
        if((environment = getenv("AGENT"))==NULL)
        {
            ROS_ERROR("this agent number is not read by robot");
            return ;
        }
        Agent_ID_ = atoi(environment);
        std::stringstream ss;
        ss<<Agent_ID_;
        std::string calibration_path="/home/nubot"+ss.str()+"/nubot_ws/src/nubot/omni_vision/calib_results";
        if(argc>1)
            calibration_path=argv[1];

        ROS_INFO("initialize the omni_vision  process");
        imginfo_     = new OmniImage(calibration_path+"/"+ss.str()+"/ROI.xml");
        tranfer_     = new Transfer(calibration_path+"/"+ss.str()+"/mirror_calib.xml",*imginfo_);
        scanpts_     = new ScanPoints(*imginfo_);
        whites_      = new Whites(*scanpts_,*tranfer_);
        optimise_    = new Optimise(calibration_path+"/errortable.bin",
                                    calibration_path+"/Diff_X.bin",
                                    calibration_path+"/Diff_Y.bin",
                                    *tranfer_);
        glocation_   = new Globallocalization(*optimise_);
        odometry_    = new Odometry();
        location_    = new Localization(*optimise_);
        obstacles_   = new Obstacles(*scanpts_,*tranfer_);
        ball_finder_ = new BallFinder(*tranfer_);
        colorsegment_= new ColorSegment(calibration_path+"/"+ss.str()+"/CTable.dat");

        is_show_ball_=false;
        is_show_obstacles_=false;
        is_show_whites_=false;
        is_show_scan_points=false;
        is_show_result_=false;
        is_robot_stuck_ = false;

        robot.isglobal_=true;
        is_restart_=false;
        is_power_off_=false;
        if(WIDTH_RATIO<1)
            field_image_ = cv::imread(calibration_path+"/"+ss.str()+"/field_mine.bmp");
        else
            field_image_ = cv::imread(calibration_path+"/"+ss.str()+"/field.bmp");

        ros::NodeHandle nh;
        image_transport::ImageTransport it(nh);
        img_sub_= it.subscribe("/camera/image_raw", 1, &Omni_Vision::imageCallback,this);
        ros::NodeHandle local_nh;
        motor_info_sub_ = local_nh.subscribe("/nubotdriver/odoinfo", 1, &Omni_Vision::odometryupdate,this);

        ros::NodeHandle node;
        omin_vision_pub_   = node.advertise<nubot_common::OminiVisionInfo>("/omnivision/OmniVisionInfo",1);
        reconfigureServer_.setCallback(boost::bind(&Omni_Vision::configure, this, _1, _2));
    }
void callback(dynamic_reconfigure::Server<dynamic_reconfigure::TestConfig>& srv, dynamic_reconfigure::TestConfig &config, uint32_t level)
{
  ROS_INFO("Reconfigure request : %i %f %s %i %i Group1:%i Group2: %f %s", config.int_, config.double_, config.str_.c_str(), (int) config.bool_, config.level, config.group1_int, config.group2_double, config.group2_string.c_str());
  
  config.int_ |= 1;
  config.double_ = -config.double_;
  config.str_ += "A";
  config.bool_ = !config.bool_;
  config.level = level;
  dynamic_reconfigure::TestConfig max;
  srv.getConfigMax(max);
  max.int_ = max.int_ + 1;
  srv.setConfigMax(max);
  ROS_INFO("TEST");
  ROS_INFO("Group 2 requested to be set to %d", config.groups.group_one.group2.state);
  ROS_INFO("group2_doube requested to be set to %f", config.groups.group_one.group2.group2_double);

  ROS_INFO("Reconfigured to     : %i %f %s %i %i", config.int_, config.double_, config.str_.c_str(), (int) config.bool_, config.level);
}
void BeaconKFNode::initializeRos( void )
{
    ros::NodeHandle private_nh("~");
    ros::NodeHandle nh("");

    private_nh.param("world_fixed_frame", _world_fixed_frame, std::string("map"));
    private_nh.param("odometry_frame", _odometry_frame, std::string("odom"));
    private_nh.param("platform_frame", _platform_frame, std::string("platform"));

    //setup platform offset position and angle
    int platform_index;
    std::vector<double> empty_vec;    
    private_nh.param("platform_index", platform_index, 0);
    private_nh.param("platform_x_coords", _platform_x_coords, empty_vec);
    private_nh.param("platform_y_coords", _platform_y_coords, empty_vec); 

    double reference_x;
    double reference_y;
    private_nh.param("reference_x", reference_x, 1.0);
    private_nh.param("reference_y", reference_y, 0.0);
    _reference_coords.push_back(reference_x);
    _reference_coords.push_back(reference_y);
    
    double platform_angle;
    private_nh.param("platform_orientation", platform_angle, 0.0);
    _platform_origin.setX(_platform_x_coords[platform_index]);
    _platform_origin.setY(_platform_y_coords[platform_index]);
    _platform_origin.setZ(0);
    platform_angle = platform_angle*(M_PI/180);
    platform_angle = platform_angle + atan(_reference_coords[1]/_reference_coords[0]);
    _platform_orientation = tf::createQuaternionFromYaw(platform_angle);
    
    //beacon message subscriber callback
    _beacon_sub = nh.subscribe( "beacon_pose", 1, &BeaconKFNode::beaconCallback, this);

    //define timer rates
    double update_period;
    double broadcast_period;
    private_nh.param("update_period", update_period, 1.0);
    private_nh.param("broadcast_period", broadcast_period, 0.05);

    //timer for broadcasting the map correction xfrom    
    _transform_broadcast_timer = private_nh.createTimer(ros::Duration(broadcast_period),
                                           &BeaconKFNode::transformBroadcastCallback,
                                           this);
    
    //timer for updating the EKF
    _update_timer = private_nh.createTimer(ros::Duration(update_period),
                                           &BeaconKFNode::filterUpdateCallback,
                                           this);

    //set dynamic reconfigure callback 
    dr_server.setCallback(boost::bind(&BeaconKFNode::configCallback, this,  _1, _2));
    
}
    common_servo_converter_ros() : np_("~")
    {
        f = boost::bind(&common_servo_converter_ros::configure_callback, this, _1, _2);
        server.setCallback(f);


        output_ = n_.advertise<std_msgs::UInt16>("output", 1);
        input_ = n_.subscribe("input", 1, &common_servo_converter_impl::topicCallback_input, &component_implementation_);

        np_.param("inverted", component_config_.inverted, (int)0);
        np_.param("offset", component_config_.offset, (int)0);
    }
    RectangleDetector(ros::NodeHandle nh = ros::NodeHandle()) : it_(nh), nh_(nh)
    {

        image_sub = new message_filters::Subscriber<sensor_msgs::Image>(nh_, "image", 1);
        line_sub = new message_filters::Subscriber<jsk_perception::LineArray>(nh_, "lines", 1);
        sync = new TimeSynchronizer<sensor_msgs::Image, jsk_perception::LineArray>(*image_sub, *line_sub, 10);
        sync->registerCallback(boost::bind(&RectangleDetector::callback, this, _1, _2));
        image_pub_ = nh_.advertise<sensor_msgs::Image>("rectangle/image", 1);

        dynamic_reconfigure::Server<jsk_perception::RectangleDetectorConfig>::CallbackType f =
            boost::bind(&RectangleDetector::reconfigureCallback, this, _1, _2);
        srv.setCallback(f);
    }
    HoughLines(ros::NodeHandle nh = ros::NodeHandle()) : it_(nh), nh_(nh), subscriber_count_(0)
    {
        image_transport::SubscriberStatusCallback connect_cb    = boost::bind(&HoughLines::connectCb, this, _1);
        image_transport::SubscriberStatusCallback disconnect_cb = boost::bind(&HoughLines::disconnectCb, this, _1);
        img_pub_ = image_transport::ImageTransport(ros::NodeHandle(nh_, "hough")).advertise("image", 1, connect_cb, disconnect_cb);

        out_pub_ = nh.advertise<jsk_perception::LineArray>(nh_.resolveName("lines"), 1);


        dynamic_reconfigure::Server<jsk_perception::HoughLinesConfig>::CallbackType f =
            boost::bind(&HoughLines::reconfigureCallback, this, _1, _2);
        srv.setCallback(f);
    }
示例#16
0
  /** get initial parameters (only when node starts). */
  void getInitParams(void)
  {
    nh_.param("modulation_freq", modulation_freq_, -1);
    std::string default_addr("");
    nh_.param("ether_addr",  ether_addr_, default_addr);
    cinfo_ = new camera_info_manager::CameraInfoManager(nh_);

    nh_.param("camera_name",  camera_name_, std::string("swissranger"));

    nh_.param("use_filter", use_filter_, static_cast<bool>(USE_FILTER));
    dynamic_reconfigure::Server<swissranger_camera::SwissRangerConfig >::CallbackType f
      = boost::bind(&SRNode::reconfig, this, _1, _2);
    dynsrv.setCallback(f);
  }
示例#17
0
        kr16_node_ros()
        :as_follow_joint_trajectory_action_(n_, "follow_joint_trajectory_action", boost::bind(&kr16_node_impl::callback_follow_joint_trajectory_action_, &component_implementation_, _1, &as_follow_joint_trajectory_action_), false)
        {
       	
  			f = boost::bind(&kr16_node_ros::configure_callback, this, _1, _2);
  			server.setCallback(f);
 			as_follow_joint_trajectory_action_.start();
        	
        
				joint_states_ = 	n_.advertise<sensor_msgs::JointState>("joint_states", 1);
				state_ = 	n_.advertise<control_msgs::JointTrajectoryControllerState>("state", 1);
  	

				n_.param("robot_ip_address", component_config_.robot_ip_address, (std::string)"192.1.10.20");
				n_.param("robot_description", component_config_.robot_description, (std::string)"/home/ros/");
				n_.param("robot_port", component_config_.robot_port, (int)49152);
            
        }
  ColorHistogramDescriptorNode() {
    ros::NodeHandle nh;

    dynamic_reconfigure::Server<saliency_detector::color_histogram_descriptor_paramsConfig>::CallbackType cb;
    cb = boost::bind(&ColorHistogramDescriptorNode::configCallback, this,  _1, _2);
    dr_srv.setCallback(cb);

    ros::NodeHandle private_node_handle_("~");

    sub_patch_array =
      nh.subscribe("projected_patch_array", 1, &ColorHistogramDescriptorNode::patchArrayCallback, this);

    pub_named_points =
      nh.advertise<samplereturn_msgs::NamedPointArray>("named_point", 1);

    pub_debug_image =
      nh.advertise<sensor_msgs::Image>("color_debug_image", 1);

  }
示例#19
0
	ArdroneController(ros::NodeHandle& nh) :
			nh(nh), reconfigure_server(), enabled(false) {
		pub_vel = nh.advertise<geometry_msgs::Twist>("/cmd_vel", 1);
		pub_cmd_marker = nh.advertise<visualization_msgs::Marker>(
				"visualization_marker", 10);

		reconfigure_server.setCallback(
				boost::bind(&ArdroneController::onConfig, this, _1, _2));

		twist.linear.x = twist.linear.y = twist.linear.z = 0;
		twist.angular.x = twist.angular.y = twist.angular.z = 0;
		setGoalPose(0, 0, 0);

		sub_enabled = nh.subscribe<std_msgs::Bool>("/ardrone/enable_controller",
				1,
				boost::bind(&ArdroneController::onEnableController, this, _1));
		sub_pose = nh.subscribe<visnav2013_exercise3::State>(
				"/ardrone/filtered_pose", 1,
				boost::bind(&ArdroneController::onFilteredPose, this, _1));
	}
示例#20
0
  /*!
  * \brief Default contructor.
  *
  * This description is displayed lower in the doxygen as an extended description along with
  * the above brief description.
  */
  HapticNode(char* argv[]):
      nh_("/")
      , nh_pvt_("~")
      , input_topic_("/cloud_in")
      , output_topic_("/cloud_out")
      //, dyn_srv(ros::NodeHandle("~config")),
  {
    got_first_cloud_ = false;
    now_ = ros::Time::now() - ros::Duration(1.0);

    // Clouds in and out
    sub_cloud_ = nh_.subscribe<sensor_msgs::PointCloud2>
                 (input_topic_, 1, boost::bind(&HapticNode::cloudCallback,this, _1));
    pub_cloud_ = nh_.advertise<sensor_msgs::PointCloud2>(output_topic_, 1);
    pub_marker_cloud_ = nh_.advertise<visualization_msgs::MarkerArray>("/marker_cloud_array", 1);

    // Disparity images in and out:
    sub_disparity_1_ = nh_.subscribe<stereo_msgs::DisparityImage>
                 ("/disparity_1", 1, boost::bind(&HapticNode::disparityCallback, this, _1));

    // Marker topics
    pub_marker_ = nh_.advertise<visualization_msgs::Marker>("/markers", 100);
    pub_marker_array_ = nh_.advertise<visualization_msgs::MarkerArray>("/markers_array", 1);

    // String status topic
    pub_status_ = nh_pvt_.advertise<std_msgs::String>("status", 10);

    // Create timer callback for auto-running of algorithm
    update_timer_ = nh_.createTimer(ros::Duration(0.03333), boost::bind(&HapticNode::timerCallback, this));
    slow_update_timer_ = nh_.createTimer(ros::Duration(0.5), boost::bind(&HapticNode::slowTimerCallback, this));


    // This server is for the user to adjust the algorithm weights
    dyn_cb = boost::bind( &HapticNode::dynamicCallback, this, _1, _2 );
    dyn_srv.setCallback(dyn_cb);

    // Set up the chai world and device
    initializeHaptics();

    ROS_INFO("Finished constructor!");
  }
示例#21
0
    ForceField(ros::NodeHandle & n_, double & freq_, double & ro_,   Eigen::Vector3d kp_, Eigen::Vector3d kd_, double & laser_max_distance_, double & robot_mass_, double & robot_radius_, std::string & pose_topic_name_, std::string & sonar_topic_name_) :
    n(n_), freq(freq_), ro(ro_), kp(kp_), kd(kd_), laser_max_distance(laser_max_distance_), robot_mass(robot_mass_), robot_radius(robot_radius_), pose_topic_name(pose_topic_name_), sonar_topic_name(sonar_topic_name_)
    {
        std::cout << "In the class" << std::endl ;
        kp_mat << kp.x(), 0, 0,
                0, kp.y(), 0,
                0, 0, kp.z();
        kd_mat << kd.x(), 0, 0,
                0, kd.y(), 0,
                0, 0, kd.z();


        param_callback_type = boost::bind(&ForceField::paramsCallback, this, _1, _2);
        param_server.setCallback(param_callback_type);
        visualization_markers_pub = n.advertise<visualization_msgs::MarkerArray>( "/force_field_markers", 1);
       // repulsive_force_out = n.advertise<geometry_msgs::Twist>( "/feedback_force/repulsive_force", 1);
        force_out = n.advertise<phantom_omni::OmniFeedback>( "/omni1_force_feedback", 1);
        init_flag=false;
        obstacle_readings_sub = n.subscribe("/cloud", 100, &ForceField::sonarCallback, this);
        lastTimeCalled = ros::Time::now().toSec();
};
    beacon_robot_pose_estimate_ros() : np_("~")
    {
        f = boost::bind(&beacon_robot_pose_estimate_ros::configure_callback, this, _1, _2);
        server.setCallback(f);


        robot1_pose_ = n_.advertise<geometry_msgs::Pose2D>("robot1_pose", 1);
        robot2_pose_ = n_.advertise<geometry_msgs::Pose2D>("robot2_pose", 1);
        input_cloud_ = n_.subscribe("input_cloud", 1, &beacon_robot_pose_estimate_impl::topicCallback_input_cloud, &component_implementation_);
        odom1_ = n_.subscribe("odom1", 1, &beacon_robot_pose_estimate_impl::topicCallback_odom1, &component_implementation_);
        odom2_ = n_.subscribe("odom2", 1, &beacon_robot_pose_estimate_impl::topicCallback_odom2, &component_implementation_);

        np_.param("x_robot1", component_config_.x_robot1, (double)0.0);
        np_.param("y_robot1", component_config_.y_robot1, (double)0.0);
        np_.param("x_robot2", component_config_.x_robot2, (double)0.0);
        np_.param("y_robot2", component_config_.y_robot2, (double)0.0);
        np_.param("world_link", component_config_.world_link, (std::string)"world");
        np_.param("robot1_link", component_config_.robot1_link, (std::string)"robot1_base_link");
        np_.param("robot2_link", component_config_.robot2_link, (std::string)"robot2_base_link");
        np_.param("detection_distance", component_config_.detection_distance, (double)0.07);
    }
        schunk_gripper_ros()
        {
       	
  			f = boost::bind(&schunk_gripper_ros::configure_callback, this, _1, _2);
  			server.setCallback(f);
        	
        	
        		std::string MoveGripper_remap;
        		n_.param("MoveGripper_remap", MoveGripper_remap, (std::string)"MoveGripper");
        		MoveGripper_ = n_.advertiseService<brics_showcase_industry_interfaces::MoveGripper::Request , brics_showcase_industry_interfaces::MoveGripper::Response>(MoveGripper_remap, boost::bind(&schunk_gripper_impl::callback_MoveGripper, &component_implementation_,_1,_2,component_config_));
        
  	

				n_.param("dev_string", component_config_.dev_string, (std::string)"/dev/pcan32");
				n_.param("open_pos", component_config_.open_pos, (double)1.0);
				n_.param("close_pos", component_config_.close_pos, (double)1.0);
				n_.param("baudrate", component_config_.baudrate, (int)1000);
				n_.param("modul_id", component_config_.modul_id, (int)12);
				n_.param("speed", component_config_.speed, (double)0.01);
            
        }
示例#24
0
  virtual void onInit()
  {
    Nodelet::onInit();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(*nh_));

    pnh_->param("debug_view", debug_view_, false);
    if (debug_view_) {
      always_subscribe_ = true;
    }
    prev_stamp_ = ros::Time(0, 0);

    window_name_ = "Hull Demo";
    threshold_ = 100;
    
    dynamic_reconfigure::Server<convex_hull::ConvexHullConfig>::CallbackType f =
      boost::bind(&ConvexHullNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);

    img_pub_ = advertiseImage(*pnh_, "image", 1);
    msg_pub_ = advertise<opencv_apps::ContourArrayStamped>(*pnh_, "hulls", 1);
    onInitPostProcess();
  }
    home_command_manager_ros() : np_("~")
    {
        f = boost::bind(&home_command_manager_ros::configure_callback, this, _1, _2);
        server.setCallback(f);


        command1_ = n_.advertise<std_msgs::Empty>("command1", 1);
        command2_ = n_.advertise<std_msgs::Empty>("command2", 1);
        command3_ = n_.advertise<std_msgs::Empty>("command3", 1);
        command4_ = n_.advertise<std_msgs::Empty>("command4", 1);
        command5_ = n_.advertise<std_msgs::Empty>("command5", 1);
        command6_ = n_.advertise<std_msgs::Empty>("command6", 1);
        command7_ = n_.advertise<std_msgs::Empty>("command7", 1);
        command8_ = n_.advertise<std_msgs::Empty>("command8", 1);
        command9_ = n_.advertise<std_msgs::Empty>("command9", 1);
        command10_ = n_.advertise<std_msgs::Empty>("command10", 1);
        command11_ = n_.advertise<std_msgs::Empty>("command11", 1);
        command12_ = n_.advertise<std_msgs::Empty>("command12", 1);
        command13_ = n_.advertise<std_msgs::Empty>("command13", 1);
        command14_ = n_.advertise<std_msgs::Empty>("command14", 1);
        command_ = n_.subscribe("command", 1, &home_command_manager_impl::topicCallback_command, &component_implementation_);

    }
  virtual void onInit()
  {
    Nodelet::onInit();
    it_ = boost::shared_ptr<image_transport::ImageTransport>(new image_transport::ImageTransport(*nh_));

    pnh_->param("debug_view", debug_view_, false);
    if (debug_view_) {
      always_subscribe_ = true;
    }
    prev_stamp_ = ros::Time(0, 0);

    window_name_ = "Contours";
    threshold_ = 100;

    dynamic_reconfigure::Server<general_contours::GeneralContoursConfig>::CallbackType f =
      boost::bind(&GeneralContoursNodelet::reconfigureCallback, this, _1, _2);
    srv.setCallback(f);
    
    img_pub_ = advertiseImage(*pnh_, "image", 1);
    rects_pub_ = advertise<opencv_apps::RotatedRectArrayStamped>(*pnh_, "rectangles", 1);
    ellipses_pub_ = advertise<opencv_apps::RotatedRectArrayStamped>(*pnh_, "ellipses", 1);

    onInitPostProcess();
  }
示例#27
0
int RosAriaNode::Setup()
{
  // Note, various objects are allocated here which are never deleted (freed), since Setup() is only supposed to be
  // called once per instance, and these objects need to persist until the process terminates.

  robot = new ArRobot();
  ArArgumentBuilder *args = new ArArgumentBuilder(); //  never freed
  ArArgumentParser *argparser = new ArArgumentParser(args); // Warning never freed
  argparser->loadDefaultArguments(); // adds any arguments given in /etc/Aria.args.  Useful on robots with unusual serial port or baud rate (e.g. pioneer lx)

  // Now add any parameters given via ros params (see RosAriaNode constructor):

  // if serial port parameter contains a ':' character, then interpret it as hostname:tcpport
  // for wireless serial connection. Otherwise, interpret it as a serial port name.
  size_t colon_pos = serial_port.find(":");
  if (colon_pos != std::string::npos)
  {
    args->add("-remoteHost"); // pass robot's hostname/IP address to Aria
    args->add(serial_port.substr(0, colon_pos).c_str());
    //ROS_INFO( "RosAria: using IP: [%s]", serial_port.substr(0, colon_pos).c_str() );
    args->add("-remoteRobotTcpPort"); // pass robot's TCP port to Aria
    args->add(serial_port.substr(colon_pos+1).c_str());
    //ROS_INFO( "RosAria: using port: [%s]", serial_port.substr(colon_pos+1).c_str() );
  }
  else
  {
    args->add("-robotPort"); // pass robot's serial port to Aria
    args->add(serial_port.c_str());
  }

  // if a baud rate was specified in baud parameter
  if(serial_baud != 0)
  {
    args->add("-robotBaud");
    char tmp[100];
    snprintf(tmp, 100, "%d", serial_baud);
    args->add(tmp);
  }
  
  if( debug_aria )
  {
    // turn on all ARIA debugging
    args->add("-robotLogPacketsReceived"); // log received packets
    args->add("-robotLogPacketsSent"); // log sent packets
    args->add("-robotLogVelocitiesReceived"); // log received velocities
    args->add("-robotLogMovementSent");
    args->add("-robotLogMovementReceived");
    ArLog::init(ArLog::File, ArLog::Verbose, aria_log_filename.c_str(), true);
  }


  // Connect to the robot
  conn = new ArRobotConnector(argparser, robot); // warning never freed
  if (!conn->connectRobot()) {
    ROS_ERROR("RosAria: ARIA could not connect to robot! (Check ~port parameter is correct, and permissions on port device.)");
    return 1;
  }

  // causes ARIA to load various robot-specific hardware parameters from the robot parameter file in /usr/local/Aria/params
  if(!Aria::parseArgs())
  {
    ROS_ERROR("RosAria: ARIA error parsing ARIA startup parameters!");
    return 1;
  }

  readParameters();

  // Start dynamic_reconfigure server
  dynamic_reconfigure_server = new dynamic_reconfigure::Server<rosaria::RosAriaConfig>;
  

  /*
   * 横向速度和纵向速度单位为米
*/

  /*
   * 初始化参数的最小值 其中TickMM=10 DriftFactor=-200 Revount=-32760
   */

  // Setup Parameter Minimums
  rosaria::RosAriaConfig dynConf_min;
  dynConf_min.trans_accel = robot->getAbsoluteMaxTransAccel() / 1000;
  dynConf_min.trans_decel = robot->getAbsoluteMaxTransDecel() / 1000;
  // TODO: Fix rqt dynamic_reconfigure gui to handle empty intervals
  // Until then, set unit length interval.
  dynConf_min.lat_accel = ((robot->getAbsoluteMaxLatAccel() > 0.0) ? robot->getAbsoluteMaxLatAccel() : 0.1) / 1000;
  dynConf_min.lat_decel = ((robot->getAbsoluteMaxLatDecel() > 0.0) ? robot->getAbsoluteMaxLatDecel() : 0.1) / 1000;
  dynConf_min.rot_accel = robot->getAbsoluteMaxRotAccel() * M_PI/180;
  dynConf_min.rot_decel = robot->getAbsoluteMaxRotDecel() * M_PI/180;
  
  // I'm setting these upper bounds relitivly arbitrarily, feel free to increase them.
  dynConf_min.TicksMM     = 10;
  dynConf_min.DriftFactor = -200;
  dynConf_min.RevCount    = -32760;
  
  dynamic_reconfigure_server->setConfigMin(dynConf_min);
  
  
  // 初始化参数的最大值 其中TickMM=200 DriftFactor=200 Revount=32760
  rosaria::RosAriaConfig dynConf_max;
  dynConf_max.trans_accel = robot->getAbsoluteMaxTransAccel() / 1000;
  dynConf_max.trans_decel = robot->getAbsoluteMaxTransDecel() / 1000;
  // TODO: Fix rqt dynamic_reconfigure gui to handle empty intervals
  // Until then, set unit length interval.
  dynConf_max.lat_accel = ((robot->getAbsoluteMaxLatAccel() > 0.0) ? robot->getAbsoluteMaxLatAccel() : 0.1) / 1000;
  dynConf_max.lat_decel = ((robot->getAbsoluteMaxLatDecel() > 0.0) ? robot->getAbsoluteMaxLatDecel() : 0.1) / 1000;
  dynConf_max.rot_accel = robot->getAbsoluteMaxRotAccel() * M_PI/180;
  dynConf_max.rot_decel = robot->getAbsoluteMaxRotDecel() * M_PI/180;
  
  // I'm setting these upper bounds relitivly arbitrarily, feel free to increase them.
  dynConf_max.TicksMM     = 200;
  dynConf_max.DriftFactor = 200;
  dynConf_max.RevCount    = 32760;
  
  dynamic_reconfigure_server->setConfigMax(dynConf_max);
  
   // 初始化参数的默认值
  rosaria::RosAriaConfig dynConf_default;
  dynConf_default.trans_accel = robot->getTransAccel() / 1000;
  dynConf_default.trans_decel = robot->getTransDecel() / 1000;
  dynConf_default.lat_accel   = robot->getLatAccel() / 1000;
  dynConf_default.lat_decel   = robot->getLatDecel() / 1000;
  dynConf_default.rot_accel   = robot->getRotAccel() * M_PI/180;
  dynConf_default.rot_decel   = robot->getRotDecel() * M_PI/180;

  dynConf_default.TicksMM     = TicksMM;
  dynConf_default.DriftFactor = DriftFactor;
  dynConf_default.RevCount    = RevCount;
  
  dynamic_reconfigure_server->setConfigDefault(dynConf_max);
  
  dynamic_reconfigure_server->setCallback(boost::bind(&RosAriaNode::dynamic_reconfigureCB, this, _1, _2));

  // Enable the motors
  robot->enableMotors();

  // disable sonars on startup
  robot->disableSonar();

  // callback will  be called by ArRobot background processing thread for every SIP data packet received from robot
  robot->addSensorInterpTask("ROSPublishingTask", 100, &myPublishCB);

  // Initialize bumpers with robot number of bumpers
  bumpers.front_bumpers.resize(robot->getNumFrontBumpers());
  bumpers.rear_bumpers.resize(robot->getNumRearBumpers());

  // Run ArRobot background processing thread
  robot->runAsync(true);

  return 0;
}
示例#28
0
 FakeCamParameters() {
   server_.setCallback(boost::bind(&FakeCamParameters::reconfigure_cb, this, _1, _2));
 }
  void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg)
  {
    // Work on the image.
    try
    {
      // Convert the image into something opencv can handle.
      cv::Mat frame = cv_bridge::toCvShare(msg, msg->encoding)->image;

      // Messages
      opencv_apps::CircleArrayStamped circles_msg;
      circles_msg.header = msg->header;

      // Do the work
      std::vector<cv::Rect> faces;
      cv::Mat src_gray, edges;

      if ( frame.channels() > 1 ) {
        cv::cvtColor( frame, src_gray, cv::COLOR_BGR2GRAY );
      } else {
        src_gray = frame;
      }

      // Reduce the noise so we avoid false circle detection
      cv::GaussianBlur( src_gray, src_gray, cv::Size(9, 9), 2, 2 );

      // create the main window, and attach the trackbars
      if( debug_view_) {
        cv::namedWindow( window_name_, cv::WINDOW_AUTOSIZE );

        cv::createTrackbar("Canny Threshold", window_name_, &canny_threshold_, max_canny_threshold_, trackbarCallback);
        cv::createTrackbar("Accumulator Threshold", window_name_, &accumulator_threshold_, max_accumulator_threshold_, trackbarCallback);

        if (need_config_update_) {
          config_.canny_threshold = canny_threshold_;
          config_.accumulator_threshold = accumulator_threshold_;
          srv.updateConfig(config_);
          need_config_update_ = false;
        }
      }

      // those paramaters cannot be =0
      // so we must check here
      canny_threshold_ = std::max(canny_threshold_, 1);
      accumulator_threshold_ = std::max(accumulator_threshold_, 1);

      //runs the detection, and update the display
      // will hold the results of the detection
      std::vector<cv::Vec3f> circles;
      // runs the actual detection
      cv::HoughCircles( src_gray, circles, CV_HOUGH_GRADIENT, 1, src_gray.rows/8, canny_threshold_, accumulator_threshold_, 0, 0 );


      // clone the colour, input image for displaying purposes
      for( size_t i = 0; i < circles.size(); i++ )
      {
        cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
        int radius = cvRound(circles[i][2]);
        // circle center
        circle( frame, center, 3, cv::Scalar(0,255,0), -1, 8, 0 );
        // circle outline
        circle( frame, center, radius, cv::Scalar(0,0,255), 3, 8, 0 );

        opencv_apps::Circle circle_msg;
        circle_msg.center.x = center.x;
        circle_msg.center.y = center.y;
        circle_msg.radius = radius;
        circles_msg.circles.push_back(circle_msg);
      }

      // shows the results
      if( debug_view_) {
        cv::imshow( window_name_, frame );
        int c = cv::waitKey(1);
      }

      // Publish the image.
      sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, msg->encoding,frame).toImageMsg();
      img_pub_.publish(out_img);
      msg_pub_.publish(circles_msg);
    }
    catch (cv::Exception &e)
    {
      NODELET_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line);
    }

    prev_stamp_ = msg->header.stamp;
  }
  void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg)
  {
    // Work on the image.
    try
    {
      // Convert the image into something opencv can handle.
      cv::Mat frame = cv_bridge::toCvShare(msg, msg->encoding)->image;

      // Messages
      opencv_apps::ContourArrayStamped contours_msg;
      contours_msg.header = msg->header;

      // Do the work
      cv::Mat src_gray;

      /// Convert it to gray
      cv::cvtColor( frame, src_gray, cv::COLOR_RGB2GRAY );
      cv::GaussianBlur( src_gray, src_gray, cv::Size(3,3), 0, 0, cv::BORDER_DEFAULT );
      
      /// Create window
      if( debug_view_) {
        cv::namedWindow( window_name_, cv::WINDOW_AUTOSIZE );
      }
      
      cv::Mat canny_output;
      int max_thresh = 255;
      std::vector<std::vector<cv::Point> > contours;
      std::vector<cv::Vec4i> hierarchy;
      cv::RNG rng(12345);

      /// Reduce noise with a kernel 3x3
      cv::blur( src_gray, canny_output, cv::Size(3,3) );

      /// Canny detector
      cv::Canny( canny_output, canny_output, low_threshold_, low_threshold_*2, 3 );

      /// Find contours
      cv::findContours( canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cv::Point(0, 0) );

      /// Draw contours
      cv::Mat drawing = cv::Mat::zeros( canny_output.size(), CV_8UC3 );
      for( size_t i = 0; i< contours.size(); i++ )
      {
        cv::Scalar color = cv::Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
        cv::drawContours( drawing, contours, (int)i, color, 2, 8, hierarchy, 0, cv::Point() );
        
        opencv_apps::Contour contour_msg;
        for ( size_t j = 0; j < contours[i].size(); j++ ) {
          opencv_apps::Point2D point_msg;
          point_msg.x = contours[i][j].x;
          point_msg.y = contours[i][j].y;
          contour_msg.points.push_back(point_msg);
        }
        contours_msg.contours.push_back(contour_msg);
      }

      /// Create a Trackbar for user to enter threshold
      if( debug_view_) {
        if (need_config_update_) {
          config_.canny_low_threshold = low_threshold_;
          srv.updateConfig(config_);
          need_config_update_ = false;
        }
        cv::createTrackbar( "Canny thresh:", window_name_, &low_threshold_, max_thresh, trackbarCallback);

        cv::imshow( window_name_, drawing );
        int c = cv::waitKey(1);
      }

      // Publish the image.
      sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, sensor_msgs::image_encodings::BGR8, drawing).toImageMsg();
      img_pub_.publish(out_img);
      msg_pub_.publish(contours_msg);
    }
    catch (cv::Exception &e)
    {
      NODELET_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line);
    }

    prev_stamp_ = msg->header.stamp;
  }