Ejemplo n.º 1
0
    void controller_nodelet::onInit()
    {
        NODELET_INFO("Initialising controller nodelet");
        ros::NodeHandle node = getMTNodeHandle();

        /* Get differential base parameters */
        node.param<float>("diffbase/wheelbase", wheelbase, 0.194f);
        node.param<float>("diffbase/right_wheel/radius", right_wheel_radius, 0.016f);
        node.param<float>("diffbase/left_wheel/radius", left_wheel_radius, 0.016f);
        node.param<int>("diffbase/right_wheel/direction", right_wheel_direction, 1);
        node.param<int>("diffbase/left_wheel/direction", left_wheel_direction, 1);
        node.param<int>("diffbase/external_to_internal_wheelbase_encoder_direction",
                        external_to_internal_wheelbase_encoder_direction, 1);

        /* Initialise controller */
        right_setpt_pub = node.advertise
            <cvra_msgs::MotorControlSetpoint>("right_wheel/setpoint", 10);
        left_setpt_pub = node.advertise
            <cvra_msgs::MotorControlSetpoint>("left_wheel/setpoint", 10);

        cmdvel_sub = node.subscribe("cmd_vel", 10,
            &goldorak_base::controller_nodelet::cmdvel_cb, this);

        NODELET_INFO("Controller nodelet ready");

    }
Ejemplo n.º 2
0
		virtual void onInit()
		{
			/// Configure publisher and synchronizer.
			nh = getMTNodeHandle();
			pub = nh.advertise<art_common::KinectMsg>("/kinect/compressed", 10);
			synchronizer.registerCallback(&KinectPub::callback_synchronizer, this);
			
			/// Load and configure the Kinect device.
			OpenNIDriver& driver = OpenNIDriver::getInstance();
			driver.updateDeviceList();
			device = driver.getDeviceByIndex(0);
			XnMapOutputMode output_mode;
			output_mode.nXRes = XN_VGA_X_RES;
			output_mode.nYRes = XN_VGA_Y_RES;
			output_mode.nFPS = 30;
			device->setDepthOutputMode(output_mode);
			device->setImageOutputMode(output_mode);
			device->setDepthRegistration(true);
			dynamic_cast<DeviceKinect*>(device.get())->setDebayeringMethod(ImageBayerGRBG::EdgeAwareWeighted);

			/// Register callbacks and start the streams.
			device->registerImageCallback(&KinectPub::cb_image, *this);
			device->registerDepthCallback(&KinectPub::cb_depth, *this);
			device->startImageStream();
			device->startDepthStream();
		}
void ScanToPointCloud::onInit() {
  NODELET_INFO("ScanToPointCloud::onInit: Initializing...");

#ifdef USE_MT_NODE_HANDLE
  ros::NodeHandle &nh = getMTNodeHandle();
  ros::NodeHandle &pnh = getMTPrivateNodeHandle();
#else
  ros::NodeHandle &nh = getNodeHandle();
  ros::NodeHandle &pnh = getPrivateNodeHandle();
#endif

  // Process parameters.
  pnh.param("target_frame", targetFrame, targetFrame);
  NODELET_INFO("ScanToPointCloud::onInit: Using target frame: %s.", targetFrame.data());
  pnh.param("wait_for_transform", waitForTransform, waitForTransform);
  NODELET_INFO("ScanToPointCloud::onInit: Maximum time to wait for transform: %.2f s.", waitForTransform);
  pnh.param("channel_options", channelOptions, channelOptions);
  NODELET_INFO("ScanToPointCloud::onInit: Channel options: %#x.", channelOptions);
  pnh.param("scan_queue", scanQueue, scanQueue);
  NODELET_INFO("ScanToPointCloud::onInit: Scan queue size: %i.", scanQueue);
  pnh.param("point_cloud_queue", pointCloudQueue, pointCloudQueue);
  NODELET_INFO("ScanToPointCloud::onInit: Point cloud queue size: %i.", pointCloudQueue);

  // Subscribe scan topic.
  std::string scanTopic = nh.resolveName("scan", true);
  NODELET_INFO("ScanToPointCloud::onInit: Subscribing scan %s.", scanTopic.data());
  scanSubscriber = nh.subscribe<sensor_msgs::LaserScan>(scanTopic, scanQueue, &ScanToPointCloud::scanCallback, this);

  // Advertise scan point cloud.
  std::string cloudTopic = nh.resolveName("cloud", true);
  NODELET_INFO("ScanToPointCloud::onInit: Advertising point cloud %s.", cloudTopic.data());
  pointCloudPublisher = nh.advertise<sensor_msgs::PointCloud2>(cloudTopic, pointCloudQueue, false);
}
Ejemplo n.º 4
0
  /*!
  * \brief Serves as a psuedo constructor for nodelets.
  *
  * This function needs to do the MINIMUM amount of work to get the nodelet running.  Nodelets should not call blocking functions here.
  */
  void onInit()
  {
    // Get nodeHandles
    ros::NodeHandle &nh = getMTNodeHandle();
    ros::NodeHandle &pnh = getMTPrivateNodeHandle();

    // Get a serial number through ros
    int serial;
    pnh.param<int>("serial", serial, 0);
    pg_.setDesiredCamera((uint32_t)serial);

    // Get the location of our camera config yaml
    std::string camera_info_url;
    pnh.param<std::string>("camera_info_url", camera_info_url, "");
    // Get the desired frame_id, set to 'camera' if not found
    pnh.param<std::string>("frame_id", frame_id_, "camera");

    // Do not call the connectCb function until after we are done initializing.
    boost::mutex::scoped_lock scopedLock(connect_mutex_);

    // Start up the dynamic_reconfigure service, note that this needs to stick around after this function ends
    srv_ = boost::make_shared <dynamic_reconfigure::Server<pointgrey_camera_driver::PointGreyConfig> > (pnh);
    dynamic_reconfigure::Server<pointgrey_camera_driver::PointGreyConfig>::CallbackType f =  boost::bind(&pointgrey_camera_driver::PointGreyCameraNodelet::paramCallback, this, _1, _2);
    srv_->setCallback(f);

    // Start the camera info manager and attempt to load any configurations
    std::stringstream cinfo_name;
    cinfo_name << serial;
    cinfo_.reset(new camera_info_manager::CameraInfoManager(nh, cinfo_name.str(), camera_info_url));

    // Publish topics using ImageTransport through camera_info_manager (gives cool things like compression)
    it_.reset(new image_transport::ImageTransport(nh));
    image_transport::SubscriberStatusCallback cb = boost::bind(&PointGreyCameraNodelet::connectCb, this);
    it_pub_ = it_->advertiseCamera("image_raw", 5, cb, cb);

    // Set up diagnostics
    updater_.setHardwareID("pointgrey_camera " + cinfo_name.str());

    // Set up a diagnosed publisher
    double desired_freq;
    pnh.param<double>("desired_freq", desired_freq, 7.0);
    pnh.param<double>("min_freq", min_freq_, desired_freq);
    pnh.param<double>("max_freq", max_freq_, desired_freq);
    double freq_tolerance; // Tolerance before stating error on publish frequency, fractional percent of desired frequencies.
    pnh.param<double>("freq_tolerance", freq_tolerance, 0.1);
    int window_size; // Number of samples to consider in frequency
    pnh.param<int>("window_size", window_size, 100);
    double min_acceptable; // The minimum publishing delay (in seconds) before warning.  Negative values mean future dated messages.
    pnh.param<double>("min_acceptable_delay", min_acceptable, 0.0);
    double max_acceptable; // The maximum publishing delay (in seconds) before warning.
    pnh.param<double>("max_acceptable_delay", max_acceptable, 0.2);
    ros::SubscriberStatusCallback cb2 = boost::bind(&PointGreyCameraNodelet::connectCb, this);
    pub_.reset(new diagnostic_updater::DiagnosedPublisher<wfov_camera_msgs::WFOVImage>(nh.advertise<wfov_camera_msgs::WFOVImage>("image", 5, cb2, cb2),
               updater_,
               diagnostic_updater::FrequencyStatusParam(&min_freq_, &max_freq_, freq_tolerance, window_size),
               diagnostic_updater::TimeStampStatusParam(min_acceptable, max_acceptable)));
  }
Ejemplo n.º 5
0
    void spin ()
    {
      trackerViewer_ = boost::shared_ptr<visp_tracker::TrackerViewer>
	(new visp_tracker::TrackerViewer
	 (getMTNodeHandle (),
	  getMTPrivateNodeHandle (),
	  exiting_, 5u));
      while (ros::ok () && !exiting_)
	trackerViewer_->spin ();
    }
void PhidgetsImuNodelet::onInit()
{
    NODELET_INFO("Initializing Phidgets IMU Nodelet");

    // TODO: Do we want the single threaded or multithreaded NH?
    ros::NodeHandle nh         = getMTNodeHandle();
    ros::NodeHandle nh_private = getMTPrivateNodeHandle();

    imu_ = new ImuRosI(nh, nh_private);
}
void LaserScanSplitterNodelet::onInit ()
{
  NODELET_INFO("Initializing LaserScanSplitter Nodelet");
  
  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getMTNodeHandle();
  ros::NodeHandle nh_private = getMTPrivateNodeHandle();

  laser_scan_splitter_ = new LaserScanSplitter(nh, nh_private);  
}
void LaserOrthoProjectorNodelet::onInit ()
{
  NODELET_INFO("Initializing LaserOrthoProjector Nodelet");
  
  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getMTNodeHandle();
  ros::NodeHandle nh_private = getMTPrivateNodeHandle();

  laser_ortho_projector_ = new scan_tools::LaserOrthoProjector(nh, nh_private);
}
Ejemplo n.º 9
0
void asctec::AutoPilotNodelet::onInit ()
{
  NODELET_INFO("Initializing Autopilot Nodelet");

  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getMTNodeHandle();
  ros::NodeHandle nh_private = getMTPrivateNodeHandle();

  autopilot = new asctec::AutoPilot(nh, nh_private);  
}
Ejemplo n.º 10
0
void QuadJoyTeleopNodelet::onInit ()
{
  NODELET_INFO("Initializing QuadJoyTeleop Nodelet");
  
  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getMTNodeHandle();
  ros::NodeHandle nh_private = getMTPrivateNodeHandle();

  quad_joy_teleop_ = new QuadJoyTeleop(nh, nh_private);  
}
Ejemplo n.º 11
0
    void spin ()
    {
      trackerClient_ = boost::shared_ptr<visp_tracker::TrackerClient>
	(new visp_tracker::TrackerClient
	 (getMTNodeHandle (),
	  getMTPrivateNodeHandle (),
	  exiting_, 5u));
      if (ros::ok () && !exiting_)
	trackerClient_->spin ();
    }
Ejemplo n.º 12
0
void RGBDImageProcNodelet::onInit()
{
  NODELET_INFO("Initializing RGBD Image Proc Nodelet");

  // TODO: Do we want the single threaded or multithreaded NH?
  ros::NodeHandle nh         = getMTNodeHandle();
  ros::NodeHandle nh_private = getMTPrivateNodeHandle();

  rgbd_image_proc_ = new RGBDImageProc(nh, nh_private);  
}
  void onInit()
  {
    nh = getMTNodeHandle();
    nh_priv = getMTPrivateNodeHandle();

    // Parameters
    nh_priv.param("use_backup_estimator_alt", use_backup_estimator_alt, use_backup_estimator_alt);
    nh_priv.param("imu_to_kinect_offset", imu_to_kinect_offset, imu_to_kinect_offset);
    nh_priv.param("max_est_kinect_delta_alt", max_est_kinect_delta_alt, max_est_kinect_delta_alt);
    nh_priv.param("obstacle_height_threshold", obstacle_height_threshold, obstacle_height_threshold);
    nh_priv.param("debug_throttle_rate", debug_throttle_rate, debug_throttle_rate);
    nh_priv.param("z_vel_filt_a", z_vel_filt_a, z_vel_filt_a);
    nh_priv.param("z_vel_filt_b", z_vel_filt_b, z_vel_filt_b);
    // Publishers
    odom_pub = nh_priv.advertise<nav_msgs::Odometry> ("output", 10);
    obstacle_pub = nh_priv.advertise<starmac_kinect::Obstacle> ("obstacle", 10);
    debug_pub = nh_priv.advertise<starmac_kinect::Debug> ("debug", 10);
    marker_pub = nh_priv.advertise<visualization_msgs::Marker> ("marker", 10);
    mask_indices_pub = nh.advertise<pcl::PointIndices> ("mask_indices", 10);
    findplane_indices_pub = nh.advertise<pcl::PointIndices> ("findplane_indices", 10, true);
    // Subscribers
    sub_input_filter.subscribe(nh_priv, "input", max_queue_size);
    sub_indices_filter.subscribe(nh_priv, "indices", max_queue_size);
    sub_model_filter.subscribe(nh_priv, "model", max_queue_size);
    sync_input_indices_e
        = make_shared<Synchronizer<ExactTime<PointCloud2, PointIndices, ModelCoefficients> > > (max_queue_size);
    sync_input_indices_e->connectInput(sub_input_filter, sub_indices_filter, sub_model_filter);
    sync_input_indices_e->registerCallback(bind(&KinectEstimator::cloudIndicesModelCallback, this, _1, _2, _3));

    // Fill out most of the output Odometry message (we'll only be filling in the z pose value)
    odom_msg_output.child_frame_id = "imu";
    odom_msg_output.header.frame_id = "ned";
    odom_msg_output.pose.pose.orientation.x = 0;
    odom_msg_output.pose.pose.orientation.y = 0;
    odom_msg_output.pose.pose.orientation.z = 0;
    odom_msg_output.pose.pose.orientation.w = 1;
    odom_msg_output.pose.pose.position.x = 0;
    odom_msg_output.pose.pose.position.y = 0;

    if (use_backup_estimator_alt)
    {
      est_odom_sub = nh.subscribe("estimator/output", 1, &KinectEstimator::estOdomCallback, this,
                                  ros::TransportHints().tcpNoDelay());
    }

    updateMask(); // force once to start things

  }
Ejemplo n.º 14
0
   /*********************************************************
    * @brief OnInit method from node handle.
    * OnInit method from node handle. Sets up the parameters
    * and topics.
    *********************************************************/
    virtual void onInit()
    {
        ROS_ERROR("envir: hello from onInit()");
        ros::NodeHandle& nh = getMTNodeHandle();
        ros::NodeHandle& private_nh = getMTPrivateNodeHandle();

        private_nh.getParam("min_y", min_y_);
        private_nh.getParam("max_y", max_y_);
        private_nh.getParam("min_x", min_x_);
        private_nh.getParam("max_x", max_x_);
        private_nh.getParam("max_z", max_z_);
        private_nh.getParam("goal_z", goal_z_);
        private_nh.getParam("z_scale", z_scale_);
        private_nh.getParam("x_scale", x_scale_);
        private_nh.getParam("enabled", enabled_);
        
        receivedRectCoord = false;
        ul_x = ul_y = lr_x = lr_y = 0;
        focal_length_ = 575.8157348632812;
        goaldepthFlag = odomFlag = false;
        
        DIST_TO_GOAL_ = 0.5;
        APPROX_ORDER_ = 4;
        TERMINAL_STATE_.isTerminal = true;
        episodeNum=0;
        stepCnt=0;
        MAX_STEP_CNT_ = 50; // Max number of steps the agent can take to find goal
        
        // Constants for discretizing state space
        ANGLE_DIVIDER_ = 12;
        DIST_DIVIDER_ = 1;
        
        
        /** Agent's GPS location */
        getmodelstate_ = nh.serviceClient<gazebo_msgs::GetModelState>("/gazebo/get_model_state");

        reinforcement_srv_ = nh.advertiseService("agent_environment", &LileeEnvironment::agentenvironmentcb, this);
        respawnerpub_ = private_nh.advertise<gazebo_msgs::ModelState>("/gazebo/set_model_state", 1);
        cmdpub_ = private_nh.advertise<geometry_msgs::Twist>("/cmd_vel_mux/input/navi", 1); //("/cmd_vel", 1);
        rectanglesub_ = nh.subscribe<std_msgs::String>("/location_data", 1, &LileeEnvironment::rectanglecb, this);
        goaldepthsub_ = nh.subscribe("/camera/depth/image_raw", 1, &LileeEnvironment::goaldepthcb, this);
        bumpersub_ = nh.subscribe("/mobile_base/events/bumper", 1, &LileeEnvironment::bumpercb, this);
        
        respawn_goal();
        respawn_agent();
    }
 void ConnectionBasedNodelet::onInit()
 {
   connection_status_ = NOT_SUBSCRIBED;
   nh_.reset (new ros::NodeHandle (getMTNodeHandle ()));
   pnh_.reset (new ros::NodeHandle (getMTPrivateNodeHandle ()));
   pnh_->param("always_subscribe", always_subscribe_, false);
   pnh_->param("verbose_connection", verbose_connection_, false);
   if (!verbose_connection_) {
     nh_->param("verbose_connection", verbose_connection_, false);
   }
   // timer to warn when no connection in a few seconds
   ever_subscribed_ = false;
   timer_ = nh_->createWallTimer(
     ros::WallDuration(5),
     &ConnectionBasedNodelet::warnNeverSubscribedCallback,
     this,
     /*oneshot=*/true);
 }
void ControlMode::onInit()
{
  NODELET_INFO("ControlMode onInit() called");
  nh = getMTNodeHandle();
  nh_priv = getMTPrivateNodeHandle();

  // Diagnostics
  diag_updater.add(getName() + " status", this, &ControlMode::diagnostics);
  diag_updater.setHardwareID("none");
  diag_updater.force_update();

  // Parameters
  nh_priv.param("status_report_rate", status_report_rate, status_report_rate);
  nh_priv.param("control_output_rate", control_output_rate, control_output_rate);
  CHECK_PARAMETER((status_report_rate > 0), "parameter value out of range");
  CHECK_PARAMETER((control_output_rate > 0), "parameter value out of range");

  // Publishers
  control_mode_status_pub = nh_priv.advertise<control_mode_status> ("status", 1, true);
}
Ejemplo n.º 17
0
		virtual void onInit()
		{
			nh = getMTNodeHandle();
			pub = nh.advertise<art_common::KinectMsg>("/kinect/raw", 10);
			sub = nh.subscribe("/kinect/compressed", 10, &KinectSub::cb_sub, this);
		}
void CameraTrackerNodelet::onInit()
{
  //tracker_.reset(new CameraTracker(getNodeHandle(), getPrivateNodeHandle()));
  tracker_.reset(new dvo_ros::CameraDenseTracker(getMTNodeHandle(), getMTPrivateNodeHandle()));
}
Ejemplo n.º 19
0
  /*!
  * \brief Connection callback to only do work when someone is listening.
  *
  * This function will connect/disconnect from the camera depending on who is using the output.
  */
  void connectCb()
  {
    NODELET_DEBUG("Connect callback!");
    boost::mutex::scoped_lock scopedLock(connect_mutex_); // Grab the mutex.  Wait until we're done initializing before letting this function through.
    // Check if we should disconnect (there are 0 subscribers to our data)
    if(it_pub_.getNumSubscribers() == 0 && pub_->getPublisher().getNumSubscribers() == 0)
    {
      try
      {
        NODELET_DEBUG("Disconnecting.");
        pubThread_->interrupt();
        pubThread_->join();
        sub_.shutdown();
        NODELET_DEBUG("Stopping camera capture.");
        pg_.stop();
        /*NODELET_DEBUG("Disconnecting from camera.");
        pg_.disconnect();*/
      }
      catch(std::runtime_error& e)
      {
        NODELET_ERROR("%s", e.what());
      }
    }
    else if(!sub_)     // We need to connect
    {
      NODELET_DEBUG("Connecting");
      // Try connecting to the camera
      volatile bool connected = false;
      while(!connected && ros::ok())
      {
        try
        {
          NODELET_DEBUG("Connecting to camera.");
          pg_.connect(); // Probably already connected from the reconfigure thread.  This will will not throw if successfully connected.
          connected = true;
        }
        catch(std::runtime_error& e)
        {
          NODELET_ERROR("%s", e.what());
          ros::Duration(1.0).sleep(); // sleep for one second each time
        }
      }

      // Set the timeout for grabbing images.
      double timeout;
      getMTPrivateNodeHandle().param("timeout", timeout, 1.0);
      try
      {
        NODELET_DEBUG("Setting timeout to: %f.", timeout);
        pg_.setTimeout(timeout);
      }
      catch(std::runtime_error& e)
      {
        NODELET_ERROR("%s", e.what());
      }

      // Subscribe to gain and white balance changes
      sub_ = getMTNodeHandle().subscribe("image_exposure_sequence", 10, &pointgrey_camera_driver::PointGreyCameraNodelet::gainWBCallback, this);

      volatile bool started = false;
      while(!started && ros::ok())
      {
        try
        {
          NODELET_DEBUG("Starting camera capture.");
          pg_.start();
          started = true;
        }
        catch(std::runtime_error& e)
        {
          NODELET_ERROR("%s", e.what());
          ros::Duration(1.0).sleep(); // sleep for one second each time
        }
      }

      // Start the thread to loop through and publish messages
      pubThread_.reset(new boost::thread(boost::bind(&pointgrey_camera_driver::PointGreyCameraNodelet::devicePoll, this)));
    }
    else
    {
      NODELET_DEBUG("Do nothing in callback.");
    }
  }