コード例 #1
0
ファイル: tof_camera.cpp プロジェクト: Berntorp/cob_driver
    /// Initializes and opens the time-of-flight camera sensor.
	/// @return <code>false</code> on failure, <code>true</code> otherwise
    bool init()
    {
		if (loadParameters() == false)
		{
			ROS_ERROR("[color_camera] Could not read all parameters from launch file");
			return false;
		}		
		

		if (tof_camera_->Init(config_directory_, tof_camera_index_) & ipa_CameraSensors::RET_FAILED)
		{

			std::stringstream ss;
			ss << "Initialization of tof camera ";
			ss << tof_camera_index_;
			ss << " failed";
			ROS_ERROR("[tof_camera] %s", ss.str().c_str());
			tof_camera_ = AbstractRangeImagingSensorPtr();
			return false;
		}
	
		if (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED)
		{
			std::stringstream ss;
			ss << "Could not open tof camera ";
			ss << tof_camera_index_;
			ROS_ERROR("[tof_camera] %s", ss.str().c_str());
			tof_camera_ = AbstractRangeImagingSensorPtr();
			return false;
		}

		/// Read camera properties of range tof sensor
		ipa_CameraSensors::t_cameraProperty cameraProperty;
		cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
		tof_camera_->GetProperty(&cameraProperty);
		int range_sensor_width = cameraProperty.cameraResolution.xResolution;
		int range_sensor_height = cameraProperty.cameraResolution.yResolution;
		cv::Size range_image_size(range_sensor_width, range_sensor_height);

		/// Setup camera toolbox
		ipa_CameraSensors::CameraSensorToolboxPtr tof_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
		tof_sensor_toolbox->Init(config_directory_, tof_camera_->GetCameraType(), tof_camera_index_, range_image_size);

		cv::Mat intrinsic_mat = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_type_, tof_camera_index_);
		cv::Mat distortion_map_X = tof_sensor_toolbox->GetDistortionMapX(tof_camera_type_, tof_camera_index_);
		cv::Mat distortion_map_Y = tof_sensor_toolbox->GetDistortionMapY(tof_camera_type_, tof_camera_index_);
		tof_camera_->SetIntrinsics(intrinsic_mat, distortion_map_X, distortion_map_Y);

	        /// Advertise service for other nodes to set intrinsic calibration parameters
		camera_info_service_ = node_handle_.advertiseService("set_camera_info", &CobTofCameraNode::setCameraInfo, this);
		image_service_ = node_handle_.advertiseService("get_images", &CobTofCameraNode::imageSrvCallback, this);
		xyz_image_publisher_ = image_transport_.advertiseCamera("image_xyz", 1);
		grey_image_publisher_ = image_transport_.advertiseCamera("image_grey", 1);
		if(publish_point_cloud_2_) topicPub_pointCloud2_ = node_handle_.advertise<sensor_msgs::PointCloud2>("point_cloud2", 1);
		if(publish_point_cloud_) topicPub_pointCloud_ = node_handle_.advertise<sensor_msgs::PointCloud>("point_cloud", 1);

		cv::Mat d = tof_sensor_toolbox->GetDistortionParameters(tof_camera_type_, tof_camera_index_);
		camera_info_msg_.header.stamp = ros::Time::now();
		camera_info_msg_.header.frame_id = "head_tof_link";
		/*camera_info_msg_.D[0] = d.at<double>(0, 0);
		camera_info_msg_.D[1] = d.at<double>(0, 1);
		camera_info_msg_.D[2] = d.at<double>(0, 2);
		camera_info_msg_.D[3] = d.at<double>(0, 3);
		camera_info_msg_.D[4] = 0;*/
	
		cv::Mat k = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_type_, tof_camera_index_);
		/*camera_info_msg_.K[0] = k.at<double>(0, 0);
		camera_info_msg_.K[1] = k.at<double>(0, 1);
		camera_info_msg_.K[2] = k.at<double>(0, 2);
		camera_info_msg_.K[3] = k.at<double>(1, 0);
		camera_info_msg_.K[4] = k.at<double>(1, 1);
		camera_info_msg_.K[5] = k.at<double>(1, 2);
		camera_info_msg_.K[6] = k.at<double>(2, 0);
		camera_info_msg_.K[7] = k.at<double>(2, 1);
		camera_info_msg_.K[8] = k.at<double>(2, 2);*/

		camera_info_msg_.width = range_sensor_width;		
		camera_info_msg_.height = range_sensor_height;

		return true;
	}
コード例 #2
0
 ImageConverter()
   : it_(nh_)
 {
   image_sub_ = it_.subscribe("camera_patrick", 1, &ImageConverter::imageCb, this);
   //cv::namedWindow(WINDOW);
 }
コード例 #3
0
ファイル: color2mono.cpp プロジェクト: silverbullet1/bbauv
	ImageConverter()
		: it_(nh_)
	{
		image_pub_ = it_.advertise("mono_out", 1);
		image_sub_ = it_.subscribe("color_in", 1, &ImageConverter::imageCb, this);
	}
コード例 #4
0
ファイル: all_cameras.cpp プロジェクト: ipa-mars/care-o-bot
	/// Opens the camera sensor
	bool init()
	{
		if (loadParameters() == false)
		{
			ROS_ERROR("[all_cameras] Could not read parameters from launch file");
			return false;
		}
		
		if (left_color_camera_ && (left_color_camera_->Init(config_directory_, 1) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of left camera (1) failed");
			left_color_camera_ = AbstractColorCameraPtr();
		}

		if (left_color_camera_ && (left_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening left color camera (1) failed");
			left_color_camera_ = AbstractColorCameraPtr();
		}
		if (left_color_camera_)
		{
			/// Read camera properties of range tof sensor
			int camera_index = 1;
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			left_color_camera_->GetProperty(&cameraProperty);
			int color_sensor_width = cameraProperty.cameraResolution.xResolution;
			int color_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size color_image_size(color_sensor_width, color_sensor_height);
			
			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr color_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			color_sensor_toolbox->Init(config_directory_, left_color_camera_->GetCameraType(), camera_index, color_image_size);
	
			cv::Mat d = color_sensor_toolbox->GetDistortionParameters(left_color_camera_intrinsic_type_, left_color_camera_intrinsic_id_);
			left_color_camera_info_msg_.D[0] = d.at<double>(0, 0);
			left_color_camera_info_msg_.D[1] = d.at<double>(0, 1);
			left_color_camera_info_msg_.D[2] = d.at<double>(0, 2);
			left_color_camera_info_msg_.D[3] = d.at<double>(0, 3);
			left_color_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = color_sensor_toolbox->GetIntrinsicMatrix(left_color_camera_intrinsic_type_, left_color_camera_intrinsic_id_);
			left_color_camera_info_msg_.K[0] = k.at<double>(0, 0);
			left_color_camera_info_msg_.K[1] = k.at<double>(0, 1);
			left_color_camera_info_msg_.K[2] = k.at<double>(0, 2);
			left_color_camera_info_msg_.K[3] = k.at<double>(1, 0);
			left_color_camera_info_msg_.K[4] = k.at<double>(1, 1);
			left_color_camera_info_msg_.K[5] = k.at<double>(1, 2);
			left_color_camera_info_msg_.K[6] = k.at<double>(2, 0);
			left_color_camera_info_msg_.K[7] = k.at<double>(2, 1);
			left_color_camera_info_msg_.K[8] = k.at<double>(2, 2);
	
			left_color_camera_info_msg_.width = color_sensor_width;		
			left_color_camera_info_msg_.height = color_sensor_height;		
		}

		if (right_color_camera_ && (right_color_camera_->Init(config_directory_, 0) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of right camera (0) failed");
			right_color_camera_ = AbstractColorCameraPtr();
		}

		if (right_color_camera_ && (right_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening right color camera (0) failed");
			right_color_camera_ = AbstractColorCameraPtr();
		}
		if (right_color_camera_)
		{
			int camera_index = 0;
			/// Read camera properties of range tof sensor
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			right_color_camera_->GetProperty(&cameraProperty);
			int color_sensor_width = cameraProperty.cameraResolution.xResolution;
			int color_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size color_image_size(color_sensor_width, color_sensor_height);

			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr color_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			color_sensor_toolbox->Init(config_directory_, left_color_camera_->GetCameraType(), camera_index, color_image_size);
	
			cv::Mat d = color_sensor_toolbox->GetDistortionParameters(right_color_camera_intrinsic_type_, right_color_camera_intrinsic_id_);
			right_color_camera_info_msg_.D[0] = d.at<double>(0, 0);
			right_color_camera_info_msg_.D[1] = d.at<double>(0, 1);
			right_color_camera_info_msg_.D[2] = d.at<double>(0, 2);
			right_color_camera_info_msg_.D[3] = d.at<double>(0, 3);
			right_color_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = color_sensor_toolbox->GetIntrinsicMatrix(right_color_camera_intrinsic_type_, right_color_camera_intrinsic_id_);
			right_color_camera_info_msg_.K[0] = k.at<double>(0, 0);
			right_color_camera_info_msg_.K[1] = k.at<double>(0, 1);
			right_color_camera_info_msg_.K[2] = k.at<double>(0, 2);
			right_color_camera_info_msg_.K[3] = k.at<double>(1, 0);
			right_color_camera_info_msg_.K[4] = k.at<double>(1, 1);
			right_color_camera_info_msg_.K[5] = k.at<double>(1, 2);
			right_color_camera_info_msg_.K[6] = k.at<double>(2, 0);
			right_color_camera_info_msg_.K[7] = k.at<double>(2, 1);
			right_color_camera_info_msg_.K[8] = k.at<double>(2, 2);
	
			right_color_camera_info_msg_.width = color_sensor_width;		
			right_color_camera_info_msg_.height = color_sensor_height;		
		}

		if (tof_camera_ && (tof_camera_->Init(config_directory_) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of tof camera (0) failed");
			tof_camera_ = AbstractRangeImagingSensorPtr();
		}

		if (tof_camera_ && (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening tof camera (0) failed");
			tof_camera_ = AbstractRangeImagingSensorPtr();
		}
		if (tof_camera_)
		{
			int camera_index = 0;
			/// Read camera properties of range tof sensor
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			tof_camera_->GetProperty(&cameraProperty);
			int range_sensor_width = cameraProperty.cameraResolution.xResolution;
			int range_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size rangeImageSize(range_sensor_width, range_sensor_height);
	
			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr tof_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			tof_sensor_toolbox->Init(config_directory_, tof_camera_->GetCameraType(), camera_index, rangeImageSize);

			cv::Mat intrinsic_mat = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			cv::Mat distortion_map_X = tof_sensor_toolbox->GetDistortionMapX(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			cv::Mat distortion_map_Y = tof_sensor_toolbox->GetDistortionMapY(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_->SetIntrinsics(intrinsic_mat, distortion_map_X, distortion_map_Y);

			cv::Mat d = tof_sensor_toolbox->GetDistortionParameters(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_info_msg_.D[0] = d.at<double>(0, 0);
			tof_camera_info_msg_.D[1] = d.at<double>(0, 1);
			tof_camera_info_msg_.D[2] = d.at<double>(0, 2);
			tof_camera_info_msg_.D[3] = d.at<double>(0, 3);
			tof_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_info_msg_.K[0] = k.at<double>(0, 0);
			tof_camera_info_msg_.K[1] = k.at<double>(0, 1);
			tof_camera_info_msg_.K[2] = k.at<double>(0, 2);
			tof_camera_info_msg_.K[3] = k.at<double>(1, 0);
			tof_camera_info_msg_.K[4] = k.at<double>(1, 1);
			tof_camera_info_msg_.K[5] = k.at<double>(1, 2);
			tof_camera_info_msg_.K[6] = k.at<double>(2, 0);
			tof_camera_info_msg_.K[7] = k.at<double>(2, 1);
			tof_camera_info_msg_.K[8] = k.at<double>(2, 2);

			tof_camera_info_msg_.width = range_sensor_width;		
			tof_camera_info_msg_.height = range_sensor_height;		
		}
	
		/// Topics and Services to publish
		if (left_color_camera_) 
		{
			// Adapt name according to camera type
			left_color_image_publisher_ = image_transport_.advertiseCamera(left_color_camera_ns_ + "/left/image_color", 1);
			left_color_camera_info_service_ = node_handle_.advertiseService(left_color_camera_ns_ + "/left/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (right_color_camera_)
		{
			// Adapt name according to camera type
			right_color_image_publisher_ = image_transport_.advertiseCamera(right_color_camera_ns_ + "/right/image_color", 1);
			right_color_camera_info_service_ = node_handle_.advertiseService(right_color_camera_ns_ + "/right/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (tof_camera_)
		{
			grey_tof_image_publisher_ = image_transport_.advertiseCamera(tof_camera_ns_ + "/image_grey", 1);
			xyz_tof_image_publisher_ = image_transport_.advertiseCamera(tof_camera_ns_ + "/image_xyz", 1);
			tof_camera_info_service_ = node_handle_.advertiseService(tof_camera_ns_ + "/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
	
		return true;
	}
コード例 #5
0
	Filter(): it_(nh_)
  	{
	// Subscrive to input video feed and publish output video feed
	image_sub_ = it_.subscribe("/radar_raw_will", 1, &Filter::imageCb, this);
	image_pub_ = it_.advertise("/pirate_filter/image", 1);
	}
コード例 #6
0
ファイル: raycast.cpp プロジェクト: jdalphon/igvc-2012
 ToLaserScan() : it(n){
     it_sub = it.subscribe("cam_segment", 1, &ToLaserScan::got_frame, this);
     scan_pub = n.advertise<sensor_msgs::LaserScan>("cam_laser", 50);
 }
コード例 #7
0
  ColorBlobDetector(ros::NodeHandle &n, char **argv) :
    n_(n), it_(n_) {
    
    first = true;
    
//Set default parameters if they are not already defined
    if (!n_.getParam("minCCThreshold", minCCThreshold)) {
      ROS_INFO("Could not retrieve 'minCCThreshold' parameter, setting to default value of '%d'", DEFAULT_MIN_CC_THRESHOLD);
      minCCThreshold = DEFAULT_MIN_CC_THRESHOLD;
    }
    
    if (!n_.getParam("display_image", display_image_)) {
      ROS_INFO("Could not retrieve 'display_image' parameter, setting to default value of '%d'", DEFAULT_DISPLAY_IMAGE);
      display_image_ = DEFAULT_DISPLAY_IMAGE;
    }	
    
    int color_blob_num;
    if (!n_.getParam("color_blob_num", color_blob_num)) {
      ROS_INFO("Could not retrieve 'color_blob_num' parameter, setting to default value of '%d'", DEFAULT_COLOR_BLOB_NUM);
      color_blob_num = DEFAULT_COLOR_BLOB_NUM;
    }
    
    std::string source_identifier;
    if (!n_.getParam("source_identifier", source_identifier)) {
      ROS_INFO("Could not retrieve 'source_identifier' parameter, setting to default value of '%s'", DEFAULT_SOURCE_IDENTIFIER);
      source_identifier = DEFAULT_SOURCE_IDENTIFIER;
    }
    
    //Not entirely clear on what this does
    sprintf(hue_topic, "%s", source_identifier.c_str());	//input topic
    sprintf(rgb_topic, "%s", source_identifier.c_str());	//input topic
    
    sprintf(color_topic, "%s/Feature/ColorBlobs%d", source_identifier.c_str(), color_blob_num);		//output topic
    
    printf("Topic name : %s\n", color_topic);
    
    //hue_sub_ = it_.subscribe(hue_topic, 1, &ColorBlobDetector::hueCallback, this);


    //Gets the most recently published image from the topic and activates the callback method
    rgb_sub_ = it_.subscribe(rgb_topic, 1, &ColorBlobDetector::rgbCallback, this);
    image_pub_ = it_.advertise(color_topic, 1);
    
	//Creates screen display if the boolean is set
    if (display_image_) {
      // Create a window
      namedWindow(color_topic, CV_WINDOW_AUTOSIZE);
      namedWindow("Color Blobs", CV_WINDOW_AUTOSIZE);
      namedWindow("Connected Components", CV_WINDOW_AUTOSIZE);
      
      // create a toolbar
      createTrackbar("Mean Color", color_topic, &mean_color, 255, &on_trackbar);
      //createTrackbar("Other Mean Color", color_topic, &other_mean_color, 255, &on_trackbar);
      createTrackbar("Window Size", color_topic, &window_size, 89, &on_trackbar);
      createTrackbar("Blur Kernel", color_topic, &blurKernel, 25, &on_trackbar);
    }


	//Create matrices to store the three layers of the HSV image	
    hue_image = new Mat(height, width, CV_8UC1);
    sat_image = new Mat(height, width, CV_8UC1);
    int_image = new Mat(height, width, CV_8UC1);

	//Creates the blurred image, and other modifications
    back_img = new Mat(hue_image->size(), CV_8UC1);
    temp_blurred_image = new Mat(hue_image->size(), CV_8UC1);
    blurred_image = new Mat(hue_image->size(), CV_8UC1);
    color_cc_image = new Mat(hue_image->size(), CV_8UC3, Scalar(0));
    copy_image = new Mat(hue_image->size(), CV_8UC1);
    flipped = new Mat(hue_image->size(), CV_8UC1);
    rgb_image = new Mat(hue_image->size(), CV_8UC3);
    hsv_image = new Mat(hue_image->size(), CV_8UC3);
    
	//Blobtracker records the location of blobs between time frames (
    blobTracker = new FeatureBlobTracker(maxNumComponents, width, height);
    blobTracker->initialize();

    comps = vector<ConnectedComp>(); 
    contours = vector<vector<Point> >();
    hierarchy = vector<Vec4i>();

    comps.resize(maxNumComponents);
    is_running = false;

  }
コード例 #8
0
 Catcher() : it(nh) {
     image_sub = it.subscribe("retranslator/cv_bottom", 1, &Catcher::showImage, this);
     cv::namedWindow(WINDOW);
 }
コード例 #9
0
 ImageConverter()
     : it_(nh_)
 {
     image_sub_ = nh_.subscribe<ocular::HandImage>("image_in", 1,&ImageConverter::imageCb, this);
     image_pub=it_.advertise("image_out", 1);
 }
コード例 #10
0
ファイル: test.cpp プロジェクト: ucd-robotics/AR.Drone.2.0
 drone_image()
    : it_(nh_)
  {
     image_sub_ = it_.subscribe("/ardrone/image_raw", 1, &drone_image::imageCb, this);
     //image_pub_= it_.advertise("/arcv/Image",1);
  }
コード例 #11
0
ファイル: all_cameras.cpp プロジェクト: phymur/care-o-bot
	/// Opens the camera sensor
	bool init()
	{
		std::string directory = "NULL/";
		std::string tmp_string = "NULL";

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/configuration_files", directory) == false)
		{
			ROS_ERROR("Path to xml configuration file not specified");
			return false;
		}

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/color_camera_type", tmp_string) == false)
		{
			ROS_ERROR("[all_cameras] Color camera type not specified");
			return false;
		}
		if (tmp_string == "CAM_AVTPIKE")
		{
			 right_color_camera_ = ipa_CameraSensors::CreateColorCamera_AVTPikeCam();
			 left_color_camera_ = ipa_CameraSensors::CreateColorCamera_AVTPikeCam();
		}
		else if (tmp_string == "CAM_PROSILICA") ROS_ERROR("[all_cameras] Color camera type not CAM_PROSILICA not yet implemented");
		else
		{
			std::string str = "[all_cameras] Camera type '" + tmp_string + "' unknown, try 'CAM_AVTPIKE' or 'CAM_PROSILICA'";
			ROS_ERROR("%s", str.c_str());
			return false;
		}
	
		if (left_color_camera_ && (left_color_camera_->Init(directory, 1) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of left camera (1) failed");
			left_color_camera_ = 0;
		}

		if (left_color_camera_ && (left_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening left color camera (1) failed");
			left_color_camera_ = 0;
		}

		if (right_color_camera_ && (right_color_camera_->Init(directory, 0) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of right camera (0) failed");
			right_color_camera_ = 0;
		}

		if (right_color_camera_ && (right_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening right color camera (0) failed");
			right_color_camera_ = 0;
		}

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/tof_camera_type", tmp_string) == false)
		{
			ROS_ERROR("[all_cameras] tof camera type not specified");
			return false;
		}
		if (tmp_string == "CAM_SWISSRANGER") tof_camera_ = ipa_CameraSensors::CreateRangeImagingSensor_Swissranger();
		else if(tmp_string == "CAM_PMDCAMCUBE") 
		{
			ROS_ERROR("[all_cameras] tof camera type CAM_PMDCAMCUBE not yet implemented");
		}
		else
		{
			std::string str = "[all_cameras] Camera type '" + tmp_string + "' unknown, try 'CAM_SWISSRANGER'";
			ROS_ERROR("%s", str.c_str());
		}
		
		if (tof_camera_ && (tof_camera_->Init(directory) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of tof camera (0) failed");
			tof_camera_ = 0;
		}

		if (tof_camera_ && (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening tof camera (0) failed");
			tof_camera_ = 0;
		}

		/// Topics and Services to publish
		if (left_color_camera_) 
		{
			left_color_image_publisher_ = image_transport_.advertiseCamera("pike_145C/left/image_raw", 1);
			left_color_camera_info_service_ = node_handle_.advertiseService("pike_145C/left/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (right_color_camera_)
		{
			right_color_image_publisher_ = image_transport_.advertiseCamera("pike_145C/right/image_raw", 1);
			right_color_camera_info_service_ = node_handle_.advertiseService("pike_145C/right/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (tof_camera_)
		{
			grey_tof_image_publisher_ = image_transport_.advertiseCamera("sr4000/image_grey", 1);
			xyz_tof_image_publisher_ = image_transport_.advertiseCamera("sr4000/image_xyz", 1);
			tof_camera_info_service_ = node_handle_.advertiseService("sr4000/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
	
		return true;
	}