Пример #1
0
int main(int argc, char** argv)
{
  ros::init(argc, argv, "static_image_publisher");
  ros::NodeHandle nh;
  image_transport::ImageTransport it(nh);
  image_transport::CameraPublisher pub = it.advertiseCamera("image", 1);

#if 0
  cv::Mat image = cv::imread(argv[1]); // color
  sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(&((IplImage)image), "bgr8");
#else
  cv::Mat image = cv::imread(argv[1], 0); // grayscale
  sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(&((IplImage)image), "mono8");
#endif
  msg->encoding = "bayer_rggb8"; // prosilica

  ros::NodeHandle local_nh("~");
  std::string url;
  local_nh.param("camera_info_url", url, std::string());
  CameraInfoManager manager(nh, "prosilica", url);
  sensor_msgs::CameraInfo cam_info = manager.getCameraInfo();
  cam_info.header.frame_id = "high_def_optical_frame";

  ros::Rate loop_rate(4);
  while (nh.ok()) {
    pub.publish(*msg, cam_info, ros::Time::now());
    ros::spinOnce();
    loop_rate.sleep();
  }
}
  // constructor
  PeopleTrackingNode::PeopleTrackingNode(ros::NodeHandle nh)
    : nh_(nh),
      robot_state_(),
      tracker_counter_(0)
  {
    // initialize
    meas_cloud_.points = vector<geometry_msgs::Point32>(1);
    meas_cloud_.points[0].x = 0;
    meas_cloud_.points[0].y = 0;
    meas_cloud_.points[0].z = 0;

    // get parameters
    ros::NodeHandle local_nh("~");
    local_nh.param("fixed_frame", fixed_frame_, string("default"));
    local_nh.param("freq", freq_, 1.0);
    local_nh.param("start_distance_min", start_distance_min_, 0.0);
    local_nh.param("reliability_threshold", reliability_threshold_, 1.0);
    local_nh.param("sys_sigma_pos_x", sys_sigma_.pos_[0], 0.0);
    local_nh.param("sys_sigma_pos_y", sys_sigma_.pos_[1], 0.0);
    local_nh.param("sys_sigma_pos_z", sys_sigma_.pos_[2], 0.0);
    local_nh.param("sys_sigma_vel_x", sys_sigma_.vel_[0], 0.0);
    local_nh.param("sys_sigma_vel_y", sys_sigma_.vel_[1], 0.0);
    local_nh.param("sys_sigma_vel_z", sys_sigma_.vel_[2], 0.0);
    local_nh.param("follow_one_person", follow_one_person_, false);

    // advertise filter output
    people_filter_pub_ = nh_.advertise<cob_perception_msgs::PositionMeasurement>("people_tracker_filter",10);
    // advertise visualization
    people_filter_vis_pub_ = nh_.advertise<sensor_msgs::PointCloud>("people_tracker_filter_visualization",10);
    people_tracker_vis_pub_ = nh_.advertise<sensor_msgs::PointCloud>("people_tracker_measurements_visualization",10);
	ROS_INFO("LOCKED D");
    // register message sequencer
    people_meas_sub_ = nh_.subscribe("people_tracker_measurements", 1, &PeopleTrackingNode::callbackRcv, this);
 
  }
Пример #3
0
Chroma_processing::Chroma_processing()
: it_(nh_)
{
	//Getting the parameters specified by the launch file 
	ros::NodeHandle local_nh("~");
	local_nh.param("image_topic", image_topic, string("/camera/rgb/image_raw"));
	local_nh.param("image_out_topic", image_out_topic, string("/chroma_proc/image"));
	local_nh.param("image_out_dif_topic", image_out_dif_topic, string("/chroma_proc/image_dif"));
	local_nh.param("project_path",path_, string(""));
	local_nh.param("playback_topics", playback_topics, false);
	local_nh.param("display", display, false);
	
	if(playback_topics)
	{
		ROS_INFO_STREAM_NAMED("Chroma_processing","Subscribing at compressed topics \n"); 
		
		image_sub = it_.subscribe(image_topic, 1, 
		  &Chroma_processing::imageCb, this, image_transport::TransportHints("compressed"));
    }
    else
    {
		// Subscribe to input video feed 
		image_sub = it_.subscribe(image_topic, 1, &Chroma_processing::imageCb, this);
		
	} 
	
	image_pub = it_.advertise(image_out_topic, 1); 
	image_pub_dif = it_.advertise(image_out_dif_topic, 1); 
}
Пример #4
0
Depth_processing::Depth_processing()
: it_(nh_)
{
    //Getting the parameters specified by the launch file 
    ros::NodeHandle local_nh("~");
    local_nh.param("depth_topic"		, depth_topic		, string("/camera/depth/image_raw"));
    local_nh.param("depth_out_image_topic"  , depth_out_image_topic , string("/depth_proc/image"));
    local_nh.param("project_path"		,path_			, string(""));
    local_nh.param("playback_topics"	, playback_topics	,false);
    local_nh.param("display"		, display		, false);
    local_nh.param("max_depth"		, max_depth		, DEPTH_MAX);
    local_nh.param("min_depth"		, min_depth		, DEPTH_MIN);
    
    if(playback_topics)
    {
	ROS_INFO_STREAM_NAMED("Depth_processing","Subscribing at compressed topics \n"); 
			
	depth_sub = it_.subscribe(depth_topic, 10, 
	   &Depth_processing::depthCb, this, image_transport::TransportHints("compressedDepth"));
    }
    else	  
	depth_sub = it_.subscribe(depth_topic, 10, &Depth_processing::depthCb, this);
	
    depth_pub = it_.advertise(depth_out_image_topic, 100);
}
Пример #5
0
bool StompNode::run()
{
  rviz_trajectory_pub_ = node_handle_.advertise<visualization_msgs::Marker>("trajectories", 20, true);
  collision_models_interface_.reset(new planning_environment::CollisionModelsInterface("robot_description"));

  ros::NodeHandle stomp_task_nh(node_handle_, "task");
  ros::NodeHandle optimizer_task_nh(node_handle_, "optimizer");

  ros::NodeHandle local_nh("~");
  std::string weights_file, means_file, variances_file;
  ROS_VERIFY(local_nh.getParam("weights_file", weights_file));
  ROS_VERIFY(local_nh.getParam("variances_file", variances_file));
  ROS_VERIFY(local_nh.getParam("means_file", means_file));
  XmlRpc::XmlRpcValue features_xml;
  ROS_VERIFY(local_nh.getParam("features", features_xml));

  int max_rollouts;
  ROS_VERIFY(optimizer_task_nh.getParam("max_rollouts", max_rollouts));

  XmlRpc::XmlRpcValue planning_groups_xml;
  if (!stomp_task_nh.getParam("planning_groups", planning_groups_xml) || planning_groups_xml.getType()!=XmlRpc::XmlRpcValue::TypeArray)
  {
    ROS_ERROR("planning_groups parameter needs to be an array");
    return false;
  }
  for (int i=0; i<planning_groups_xml.size(); ++i)
  {
    std::string name;
    ROS_VERIFY(usc_utilities::getParam(planning_groups_xml[i], "name", name));

    // for this planning group, create a STOMP task
    boost::shared_ptr<StompOptimizationTask> stomp_task;
    stomp_task.reset(new stomp_ros_interface::StompOptimizationTask(stomp_task_nh, name));
    stomp_task->initialize(8, max_rollouts+1);
    stomp_task->setTrajectoryVizPublisher(rviz_trajectory_pub_);

    stomp_task->setFeaturesFromXml(features_xml);
    stomp_task->setFeatureWeightsFromFile(weights_file);
    stomp_task->setFeatureScalingFromFile(means_file, variances_file);

    // TODO: hardcoded control cost weight for now
    stomp_task->setControlCostWeight(0.00001);

    stomp_tasks_.insert(std::make_pair(name, stomp_task));

    ROS_INFO("Initialized STOMP task for group %s", name.c_str());
  }

  stomp_.reset(new stomp::STOMP());

  if (!collision_models_interface_->loadedModels())
    return false;

  plan_path_service_ = node_handle_.advertiseService("plan_path", &StompNode::plan, this);
  return true;
}
Пример #6
0
int main( int argc, char** argv )
{
	ros::init( argc, argv, ROS_PACKAGE_NAME, ros::init_options::NoSigintHandler );
	ros::NodeHandle nh, local_nh("~");

	bool external_trigger = false;
	float frame_rate, publish_rate, aoi_ratio;
	int camera_id, master_gain, image_format, timeout_ms;
	std::string camera_name, color_mode, image_topic;

	local_nh.param<int>( "camera_id", camera_id, 0 );
	local_nh.param<int>( "master_gain", master_gain, 0 );
	local_nh.param<int>( "image_format", image_format, 20 );
	local_nh.param<int>( "timeout_ms", timeout_ms, 100 );
	local_nh.param<bool>( "external_trigger", external_trigger, false );
	local_nh.param<float>( "frame_rate", frame_rate, 30 );
	local_nh.param<float>( "publish_rate", publish_rate, frame_rate );
	local_nh.param<float>( "aoi_ratio", aoi_ratio, 1.0 );
	local_nh.param<std::string>( "camera_name", camera_name, "camera" );
	local_nh.param<std::string>( "image_topic", image_topic, "image_raw" );
	local_nh.param<std::string>( "color_mode", color_mode, "mono8" );

	image_transport::ImageTransport it(nh);
	auto pub = it.advertiseCamera( camera_name + "/" + image_topic, 1 );

	ros::Rate spinner(publish_rate);

	auto available_cameras = ueye::Camera::get_camera_list();
	ROS_INFO("found %lu available cameras", available_cameras.size() );

	for ( auto cam : available_cameras )
		ROS_INFO("id=%d serial='%s' model='%s'", cam.dwCameraID, cam.SerNo, cam.Model );

	auto camera_info = std::find_if( available_cameras.begin(), available_cameras.end(), 
		[&camera_id]( const UEYE_CAMERA_INFO& cam_info ) { return cam_info.dwCameraID == (DWORD)camera_id; } );
	
	if ( camera_info == available_cameras.end() ) 
		{ ROS_ERROR("invalid camera id"); return 0; }
	
	ueye::Camera camera( *camera_info, image_format, frame_rate, color_mode, aoi_ratio );

	camera.set_master_gain( master_gain );
	camera.start_capture( external_trigger );

	while ( ros::ok() )
	{
		const ueye::CameraFrame* frame = camera.get_frame(timeout_ms);
		if (frame) pub.publish( frame->get_image(), camera.get_info() );

		ros::spinOnce();
		spinner.sleep();
	}
}
Пример #7
0
  MonoOdometer(const std::string& transport) : OdometerBase(), replace_(false)
  {
    // Read local parameters
    ros::NodeHandle local_nh("~");
    odometry_params::loadParams(local_nh, visual_odometer_params_);

    ros::NodeHandle nh;
    image_transport::ImageTransport it(nh);
    camera_sub_ = it.subscribeCamera("image", 1, &MonoOdometer::imageCallback, this, transport);

    info_pub_ = local_nh.advertise<VisoInfo>("info", 1);
  }
Пример #8
0
int main(int argc, char **argv)
{
  ros::init(argc, argv, "image_view", ros::init_options::AnonymousName);
  if (ros::names::remap("image") == "image") {
    ROS_WARN("Topic 'image' has not been remapped! Typical command-line usage:\n"
             "\t$ rosrun image_view image_view image:=<image topic> [transport]");
  }

  ros::NodeHandle nh;
  ros::NodeHandle local_nh("~");

  // Default window name is the resolved topic name
  std::string topic = nh.resolveName("image");
  local_nh.param("window_name", g_window_name, topic);

  std::string format_string;
  local_nh.param("filename_format", format_string, std::string("frame%04i.jpg"));
  g_filename_format.parse(format_string);

  // Handle window size
  bool autosize;
  local_nh.param("autosize", autosize, false);
  cv::namedWindow(g_window_name, autosize ? (CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO | CV_GUI_EXPANDED) : 0);
  cv::setMouseCallback(g_window_name, &mouseCb);

  // Start the OpenCV window thread so we don't have to waitKey() somewhere
  cv::startWindowThread();

  // Handle transport
  // priority:
  //    1. command line argument
  //    2. rosparam '~image_transport'
  std::string transport;
  local_nh.param("image_transport", transport, std::string("raw"));
  ros::V_string myargv;
  ros::removeROSArgs(argc, argv, myargv);
  for (size_t i = 1; i < myargv.size(); ++i) {
    if (myargv[i][0] != '-') {
      transport = myargv[i];
      break;
    }
  }
  ROS_INFO_STREAM("Using transport \"" << transport << "\"");
  image_transport::ImageTransport it(nh);
  image_transport::TransportHints hints(transport, ros::TransportHints(), local_nh);
  image_transport::Subscriber sub = it.subscribe(topic, 1, imageCb, hints);

  ros::spin();

  cv::destroyWindow(g_window_name);
  return 0;
}
Пример #9
0
  /**
   * Constructor, subscribes to input topics using image transport and registers
   * callbacks.
   * \param transport The image transport to use
   */
  MonoDepthProcessor(const std::string& transport) :
    image_received_(0), depth_received_(0), image_info_received_(0), depth_info_received_(0), all_received_(0)
  {
    // Read local parameters
    ros::NodeHandle local_nh("~");

    // Resolve topic names
    ros::NodeHandle nh;
    std::string camera_ns = nh.resolveName("camera");
    std::string image_topic = ros::names::clean(camera_ns + "/rgb/image_rect");
    std::string depth_topic = ros::names::clean(camera_ns + "/depth_registered/image_rect");

    std::string image_info_topic = camera_ns + "/rgb/camera_info";
    std::string depth_info_topic = camera_ns + "/depth_registered/camera_info";

    // Subscribe to four input topics.
    ROS_INFO("Subscribing to:\n\t* %s\n\t* %s\n\t* %s\n\t* %s", 
        image_topic.c_str(), depth_topic.c_str(),
        image_info_topic.c_str(), depth_info_topic.c_str());

    image_transport::ImageTransport it(nh);
    image_sub_.subscribe(it, image_topic, 1, transport);
    depth_sub_.subscribe(it, depth_topic, 1, transport);
    image_info_sub_.subscribe(nh, image_info_topic, 1);
    depth_info_sub_.subscribe(nh, depth_info_topic, 1);

    // Complain every 15s if the topics appear unsynchronized
    image_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &image_received_));
    depth_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &depth_received_));
    image_info_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &image_info_received_));
    depth_info_sub_.registerCallback(boost::bind(MonoDepthProcessor::increment, &depth_info_received_));
    check_synced_timer_ = nh.createWallTimer(ros::WallDuration(15.0),
                                             boost::bind(&MonoDepthProcessor::checkInputsSynchronized, this));

    // Synchronize input topics. Optionally do approximate synchronization.
    local_nh.param("queue_size", queue_size_, 5);
    bool approx;
    local_nh.param("approximate_sync", approx, true);
    if (approx)
    {
      approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(queue_size_),
                                                  image_sub_, depth_sub_, image_info_sub_, depth_info_sub_) );
      approximate_sync_->registerCallback(boost::bind(&MonoDepthProcessor::dataCb, this, _1, _2, _3, _4));
    }
    else
    {
      exact_sync_.reset(new ExactSync(ExactPolicy(queue_size_),
                                      image_sub_, depth_sub_, image_info_sub_, depth_info_sub_) );
      exact_sync_->registerCallback(boost::bind(&MonoDepthProcessor::dataCb, this, _1, _2, _3, _4));
    }
  }
Пример #10
0
  /**
   * Constructor, subscribes to input topics using image transport and registers
   * callbacks.
   * \param transport The image transport to use
   */
  StereoProcessor(const std::string& transport) :
    left_received_(0), right_received_(0), left_info_received_(0), right_info_received_(0), all_received_(0)
  {
    // Read local parameters
    ros::NodeHandle local_nh("~");

    // Resolve topic names
    ros::NodeHandle nh;
    std::string stereo_ns = nh.resolveName("stereo");
    std::string left_topic = ros::names::clean(stereo_ns + "/left/" + nh.resolveName("image"));
    std::string right_topic = ros::names::clean(stereo_ns + "/right/" + nh.resolveName("image"));

    std::string left_info_topic = stereo_ns + "/left/camera_info";
    std::string right_info_topic = stereo_ns + "/right/camera_info";

    // Subscribe to four input topics.
    ROS_INFO("Subscribing to:\n\t* %s\n\t* %s\n\t* %s\n\t* %s", 
        left_topic.c_str(), right_topic.c_str(),
        left_info_topic.c_str(), right_info_topic.c_str());

    image_transport::ImageTransport it(nh);
    left_sub_.subscribe(it, left_topic, 1, transport);
    right_sub_.subscribe(it, right_topic, 1, transport);
    left_info_sub_.subscribe(nh, left_info_topic, 1);
    right_info_sub_.subscribe(nh, right_info_topic, 1);

    // Complain every 15s if the topics appear unsynchronized
    left_sub_.registerCallback(boost::bind(StereoProcessor::increment, &left_received_));
    right_sub_.registerCallback(boost::bind(StereoProcessor::increment, &right_received_));
    left_info_sub_.registerCallback(boost::bind(StereoProcessor::increment, &left_info_received_));
    right_info_sub_.registerCallback(boost::bind(StereoProcessor::increment, &right_info_received_));
    check_synced_timer_ = nh.createWallTimer(ros::WallDuration(15.0),
                                             boost::bind(&StereoProcessor::checkInputsSynchronized, this));

    // Synchronize input topics. Optionally do approximate synchronization.
    local_nh.param("queue_size", queue_size_, 5);
    bool approx;
    local_nh.param("approximate_sync", approx, false);
    if (approx)
    {
      approximate_sync_.reset(new ApproximateSync(ApproximatePolicy(queue_size_),
                                                  left_sub_, right_sub_, left_info_sub_, right_info_sub_) );
      approximate_sync_->registerCallback(boost::bind(&StereoProcessor::dataCb, this, _1, _2, _3, _4));
    }
    else
    {
      exact_sync_.reset(new ExactSync(ExactPolicy(queue_size_),
                                      left_sub_, right_sub_, left_info_sub_, right_info_sub_) );
      exact_sync_->registerCallback(boost::bind(&StereoProcessor::dataCb, this, _1, _2, _3, _4));
    }
  }
Пример #11
0
  StereoOdometer(const std::string& transport) : 
    StereoProcessor(transport), OdometerBase(), 
    got_lost_(false), last_motion_small_(false)
  {
    // Read local parameters
    ros::NodeHandle local_nh("~");
    odometry_params::loadParams(local_nh, visual_odometer_params_);

    local_nh.param("motion_threshold", motion_threshold_, 6.0);

    point_cloud_pub_ = local_nh.advertise<PointCloud>("point_cloud", 1);

    reference_motion_ = Matrix::eye(4);
  }
int main(int argc, char** argv)
{

  const char * val = ::getenv( "ROS_HOSTNAME");
  std::string hostname;
  if (val == 0)
  {
    hostname =  "ros_master";
  }
  else
  {
    hostname =  std::string(val);
    std::replace( hostname.begin(), hostname.end(), '.', '_');
  }

  ros::NodeHandle nh;

  ros::init(argc, argv, "local_clock_syncroniser_" + hostname );

  ros::NodeHandle local_nh("~");
  event_pub.reset(new ros::Publisher(nh.advertise<trigger_sync::Event>("event",1 )));

  const bool use_tcp = false;  // Use TCP or UDP. UDP seems to work better.
  if (use_tcp)
  {
      ros::Subscriber event_sub = nh.subscribe("/event",1, &event_msg_callback, ros::TransportHints().unreliable());
  } else {
      ros::Subscriber event_sub = nh.subscribe("/event",1, &event_msg_callback, ros::TransportHints().tcpNoDelay());
  }

  local_nh.param("clock_name", clock_name, std::string("clock"));
  clock_name = hostname + "_" +  clock_name;

  ROS_INFO_STREAM("clock name is: '" << clock_name << "'");

  // Setup timer for sendinding event messages
  ros::Timer timer = nh.createTimer(ros::Duration(0.2), timerCallback);
  while (ros::ok())
  {
    ros::spin();
  }
  return 0;
}
Пример #13
0
int main(int argc, char **argv)
{
    ros::init(argc, argv, "image_view");

    if (ros::names::remap("image")=="image"){
        ROS_WARN("Topic 'image' has not been remapped! Typical command-line usage:\n"
                 "/t$ rosrun movesense_sensor image_view image:=<image topic>");
    }
    ros::NodeHandle nh;
    ros::NodeHandle local_nh("~");

    // Default window name is the resolved topic name
    std::string topic = nh.resolveName("image");
    local_nh.param("window_name",g_window_name,topic);

    // Handle window size
    bool autosize;
    local_nh.param("autosize",autosize, true);
    cv::namedWindow(g_window_name, autosize ? (CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO | CV_GUI_EXPANDED) : 0);

    // CreateTrackbar
    cvCreateTrackbar( "Depth_Min", g_window_name.c_str(), &MIN_DEPTH, 10000);
    cvCreateTrackbar( "Depth_Max", g_window_name.c_str(), &MAX_DEPTH, 50000);

    // Start the OpenCV window thread so we don't have to waitKey() somewhere
    cv::startWindowThread();

    // Handle transport
    std::string transport;
    local_nh.param("image_transport", transport, std::string("raw"));
    ROS_INFO_STREAM("Using transport \"" << transport << "\"");

    image_transport::ImageTransport it(nh);
    image_transport::TransportHints hints(transport, ros::TransportHints(), local_nh);
    image_transport::Subscriber sub = it.subscribe(topic, 1, imageCallback, hints);

    ros::spin();

    cv::destroyWindow(g_window_name);

    return 0;
}
Пример #14
0
  RemotePtamm(const ros::NodeHandle& nh, const std::string& _transport)
  {
    topic_ = "/preview";
    ros::NodeHandle local_nh("~");
    local_nh.param("window_name", window_name_, topic_);

    transport_ = _transport;

    bool autosize;
    local_nh.param("autosize", autosize, false);
    cv::namedWindow(window_name_, autosize ? 1 : 0);

#ifdef HAVE_GTK
    // Register appropriate handler for when user closes the display window
    GtkWidget *widget = GTK_WIDGET( cvGetWindowHandle(window_name_.c_str()) );
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNode), NULL);
#endif

    sub_ = NULL;
    subscribe(nh);
  }
Пример #15
0
  ExtractImages(const ros::NodeHandle& nh, const std::string& transport)
    : filename_format_(""), count_(0), _time(ros::Time::now().toSec())
  {
    std::string topic = nh.resolveName("image");
    ros::NodeHandle local_nh("~");

    std::string format_string;
    local_nh.param("filename_format", format_string, std::string("frame%05i.png"));
    filename_format_.parse(format_string);

    local_nh.param("sec_per_frame", sec_per_frame_, 0.1);

    image_transport::ImageTransport it(nh);
    sub_ = it.subscribe(topic, 1, &ExtractImages::image_cb, this, transport);

#if defined(_VIDEO)
    video_writer = 0;
#endif

    ROS_INFO("Initialized sec per frame to %f", sec_per_frame_);
  }
Пример #16
0
int main(int argc, char** argv)
{
  ros::init(argc, argv, "image_saver", ros::init_options::AnonymousName);
  ros::NodeHandle nh;
  image_transport::ImageTransport it(nh);
  std::string topic = nh.resolveName("image");

  Callbacks callbacks;
  // Useful when CameraInfo is being published
  image_transport::CameraSubscriber sub_image_and_camera = it.subscribeCamera(topic, 1,
                                                                              &Callbacks::callbackWithCameraInfo,
                                                                              &callbacks);
  // Useful when CameraInfo is not being published
  image_transport::Subscriber sub_image = it.subscribe(
      topic, 1, boost::bind(&Callbacks::callbackWithoutCameraInfo, &callbacks, _1));

  ros::NodeHandle local_nh("~");
  std::string format_string;
  local_nh.param("filename_format", format_string, std::string("left%04i.%s"));
  local_nh.param("encoding", encoding, std::string("bgr8"));
  local_nh.param("save_all_image", save_all_image, true);
  local_nh.param("request_start_end", request_start_end, false);
  g_format.parse(format_string);
  ros::ServiceServer save = local_nh.advertiseService ("save", service);

  if (request_start_end && !save_all_image)
    ROS_WARN("'request_start_end' is true, so overwriting 'save_all_image' as true");

  // FIXME(unkown): This does not make services appear
  // if (request_start_end) {
    ros::ServiceServer srv_start = local_nh.advertiseService(
      "start", &Callbacks::callbackStartSave, &callbacks);
    ros::ServiceServer srv_end = local_nh.advertiseService(
      "end", &Callbacks::callbackEndSave, &callbacks);
  // }

  ros::spin();
}
Пример #17
0
  ProsilicaNode(const ros::NodeHandle& node_handle)
    : nh_(node_handle),
      it_(nh_),
      cam_(NULL), running_(false), auto_adjust_stream_bytes_per_second_(false),
      count_(0),
      frames_dropped_total_(0), frames_completed_total_(0),
      frames_dropped_acc_(WINDOW_SIZE),
      frames_completed_acc_(WINDOW_SIZE),
      packets_missed_total_(0), packets_received_total_(0),
      packets_missed_acc_(WINDOW_SIZE),
      packets_received_acc_(WINDOW_SIZE)
  {
    // Two-stage initialization: in the constructor we open the requested camera. Most
    // parameters controlling capture are set and streaming started in configure(), the
    // callback to dynamic_reconfig.
    prosilica::init();

    if (prosilica::numCameras() == 0)
      ROS_WARN("Found no cameras on local subnet");

    // Determine which camera to use. Opening by IP address is preferred, then guid. If both
    // parameters are set we open by IP and verify the guid. If neither are set we default
    // to opening the first available camera.
    ros::NodeHandle local_nh("~");
    unsigned long guid = 0;
    std::string guid_str;
    if (local_nh.getParam("guid", guid_str) && !guid_str.empty())
      guid = strtol(guid_str.c_str(), NULL, 0);

    std::string ip_str;
    if (local_nh.getParam("ip_address", ip_str) && !ip_str.empty()) {
      cam_.reset( new prosilica::Camera(ip_str.c_str()) );

      // Verify guid is the one expected
      unsigned long cam_guid = cam_->guid();
      if (guid != 0 && guid != cam_guid)
        throw prosilica::ProsilicaException(ePvErrBadParameter,
                                            "guid does not match expected");
      guid = cam_guid;
    }
    else {
      if (guid == 0) guid = prosilica::getGuid(0);
      cam_.reset( new prosilica::Camera(guid) );
    }
    hw_id_ = boost::lexical_cast<std::string>(guid);
    ROS_INFO("Found camera, guid = %s", hw_id_.c_str());
    diagnostic_.setHardwareID(hw_id_);

    // Record some attributes of the camera
    tPvUint32 dummy;
    PvAttrRangeUint32(cam_->handle(), "Width", &dummy, &sensor_width_);
    PvAttrRangeUint32(cam_->handle(), "Height", &dummy, &sensor_height_);

    // Try to load intrinsics from on-camera memory.
    loadIntrinsics();

    // Set up self tests and diagnostics.
    // NB: Need to wait until here to construct self_test_, otherwise an exception
    // above from failing to find the camera gives bizarre backtraces
    // (http://answers.ros.org/question/430/trouble-with-prosilica_camera-pvapi).
    self_test_.reset(new self_test::TestRunner);
    self_test_->add( "Info Test", this, &ProsilicaNode::infoTest );
    self_test_->add( "Attribute Test", this, &ProsilicaNode::attributeTest );
    self_test_->add( "Image Test", this, &ProsilicaNode::imageTest );

    diagnostic_.add( "Frequency Status", this, &ProsilicaNode::freqStatus );
    diagnostic_.add( "Frame Statistics", this, &ProsilicaNode::frameStatistics );
    diagnostic_.add( "Packet Statistics", this, &ProsilicaNode::packetStatistics );
    diagnostic_.add( "Packet Error Status", this, &ProsilicaNode::packetErrorStatus );

    diagnostic_timer_ = nh_.createTimer(ros::Duration(0.1), boost::bind(&ProsilicaNode::runDiagnostics, this));

    // Service call for setting calibration.
    set_camera_info_srv_ = nh_.advertiseService("set_camera_info", &ProsilicaNode::setCameraInfo, this);

    // Start dynamic_reconfigure
    reconfigure_server_.setCallback(boost::bind(&ProsilicaNode::configure, this, _1, _2));
  }
void event_msg_callback(const trigger_sync::EventConstPtr& event_msg)
{

  ros::Time t = ros::Time::now();

  trigger_sync::Event new_event_msg = *event_msg;
  // Check for time request
  if (
         event_msg->local_clock_id     != clock_name     // Event was not sent by us
      && event_msg->local_request_time != ros::Time(0)   // Request time is defined
      && event_msg->device_time        == ros::Time(0)   // Device time is undefined
      && event_msg->local_receive_time == ros::Time(0)   // Recieve time is undefined
      )
  {

    // Fill out the device time with our local time and republish
    new_event_msg.device_clock_id = clock_name;
    new_event_msg.device_time = t;
    event_pub->publish(new_event_msg);
    return;
  }

  // Check for time request
  if (
         event_msg->local_clock_id == clock_name         // Event was sent by us
      && event_msg->local_request_time != ros::Time(0)   // Request time is defined
      && event_msg->device_time        != ros::Time(0)   // Device  time is defined
      && event_msg->local_receive_time == ros::Time(0)   // Recieve time is undefined
      )
  {

    // Fill out the local recive time field of the messge
    new_event_msg.local_receive_time = t;

    // Add event to the appropriate clock estimator
    if (sync_map.count(event_msg->device_clock_id) == 0 )
    {
      sync_map[event_msg->device_clock_id].reset(new TriggerSync(event_msg->device_clock_id,event_msg->local_clock_id));

      double switch_period;
      ros::NodeHandle local_nh("~");
      local_nh.param("switch_period", switch_period, 30.0);
      sync_map[event_msg->device_clock_id]->setSwitchPeriod(ros::Duration(30.0));
    }

    // Add the event to the TriggerSync estimator
    new_event_msg.corrected_local_time = sync_map[event_msg->device_clock_id]->updateTwoWay(
          new_event_msg.local_request_time,
          new_event_msg.device_time,
          new_event_msg.local_receive_time);


    // Publish the event with the
    event_pub->publish(new_event_msg);


    printf(" offset_lb = %+15.1fus , offset_ub = %+15.1fus , offset_estimate = %+15.1fus \r",
             (new_event_msg.local_request_time   - new_event_msg.device_time ).toSec()*1.0e6,
             (new_event_msg.local_receive_time   - new_event_msg.device_time ).toSec()*1.0e6,
             (new_event_msg.corrected_local_time - new_event_msg.device_time ).toSec()*1.0e6
             )      ;

    fflush(stdout);


  }
}