コード例 #1
0
ファイル: camera.cpp プロジェクト: amiltonwong/openni2_camera
 DepthSensorStreamManager(ros::NodeHandle& nh, Device& device, std::string rgb_frame_id, std::string depth_frame_id, VideoMode& default_mode) :
   SensorStreamManager(nh, device, SENSOR_DEPTH, "depth", depth_frame_id, default_mode),
   nh_registered_(nh, "depth_registered"),
   it_registered_(nh_registered_),
   active_publisher_(0),
   rgb_frame_id_(rgb_frame_id),
   depth_frame_id_(depth_frame_id)
 {
   depth_registered_publisher_ = it_registered_.advertiseCamera("image_raw", 1, callback_, callback_);
   disparity_publisher_ = it_.advertiseCamera("disparity", 1, callback_, callback_);
   disparity_registered_publisher_ = it_registered_.advertiseCamera("disparity", 1, callback_, callback_);
 }
コード例 #2
0
ファイル: camera1394v2.cpp プロジェクト: sagrawal/Flylab
  /** driver main spin loop */
  void spin(void)
  {
    // the bring up order is tricky
    ros::NodeHandle node;

    // define segmentation fault handler, sometimes libdc1394 craps out
    signal(SIGSEGV, &sigsegv_handler);

    // Define dynamic reconfigure callback, which gets called
    // immediately with level 0xffffffff.  The reconfig() method will
    // set initial parameter values, then open the device if it can.
    dynamic_reconfigure::Server<Config> srv;
    dynamic_reconfigure::Server<Config>::CallbackType f = boost::bind(&Camera1394v2Node::reconfig, this, _1, _2);
    srv.setCallback(f);

    // set up ROS interfaces in camera namespace
    it_ = new image_transport::ImageTransport(camera_nh_);
    image_pub_ = it_->advertiseCamera("image_raw", 1);

    while (node.ok())
      {
        if (state_ != Driver::CLOSED)
          {
            if (read())
              {
                publish();
              }
          }

        ros::spinOnce();
      }

    closeCamera();
  }
コード例 #3
0
  void start()
  {
    if (running_) return;

    if (trigger_mode_ == prosilica::Software) {
      poll_srv_ = polled_camera::advertise(nh_, "request_image", &ProsilicaNode::pollCallback, this);
      // Auto-exposure tends to go wild the first few frames after startup
      // if (auto_expose) normalizeExposure();
    }
    else {
      if ((trigger_mode_ == prosilica::SyncIn1) || (trigger_mode_ == prosilica::SyncIn2)) {
        if (!trig_timestamp_topic_.empty())
          trigger_sub_ = nh_.subscribe(trig_timestamp_topic_, 1, &ProsilicaNode::syncInCallback, this);
      }
      else if (trigger_mode_ == prosilica::FixedRate) {
        if (!trig_timestamp_topic_.empty())
          trigger_sub_ = nh_.subscribe(trig_timestamp_topic_, 1, &ProsilicaNode::syncInCallback, this);
      }
      else {
        assert(trigger_mode_ == prosilica::Freerun);
      }
      cam_->setFrameCallback(boost::bind(&ProsilicaNode::publishImage, this, _1));
      streaming_pub_ = it_.advertiseCamera("image_raw", 1);
    }
    cam_->start(trigger_mode_, prosilica::Continuous);
    running_ = true;
  }
コード例 #4
0
ファイル: uvc_cam_node.cpp プロジェクト: OTL/uvc_cam
	void spin() {

		ros::NodeHandle node;

		// define segmentation fault handler in case the underlying uvc driver craps out
		signal(SIGSEGV, &sigsegv_handler);

		// Define dynamic reconfigure callback, which gets called
		// immediately with level 0xffffffff.  The reconfig() method will
		// set initial parameter values, then open the device if it can.
		dynamic_reconfigure::Server<Config> srv;
		dynamic_reconfigure::Server<Config>::CallbackType f
		= boost::bind(&UVCCamNode::reconfig, this, _1, _2);
		srv.setCallback(f);

		image_pub_ = it_.advertiseCamera("image_raw", 1);

		while (node.ok())
		{
			if (state_ != Driver::CLOSED)
			{
				if (read())
				{
					publish();
				}
			}

			ros::spinOnce();
		}

		closeCamera();
	}
コード例 #5
0
  StereoVslamNode(const std::string& vocab_tree_file, const std::string& vocab_weights_file,
                  const std::string& calonder_trees_file)
    : it_(nh_), sync_(3),
      vslam_system_(vocab_tree_file, vocab_weights_file),
      detector_(new vslam_system::AnyDetector)
  {
    // Use calonder descriptor
    typedef cv::CalonderDescriptorExtractor<float> Calonder;
    vslam_system_.frame_processor_.setFrameDescriptor(new Calonder(calonder_trees_file));
    
    // Advertise outputs
    cam_marker_pub_ = nh_.advertise<visualization_msgs::Marker>("/vslam/cameras", 1);
    point_marker_pub_ = nh_.advertise<visualization_msgs::Marker>("/vslam/points", 1);
    vo_tracks_pub_ = it_.advertiseCamera("/vslam/vo_tracks/image", 1);
    odom_pub_ = nh_.advertise<nav_msgs::Odometry>("/vo", 1);
    pointcloud_pub_ = nh_.advertise<sensor_msgs::PointCloud2>("/vslam/pointcloud", 1);

    // Synchronize inputs
    l_image_sub_.subscribe(it_, "left/image_rect", 1);
    l_info_sub_ .subscribe(nh_, "left/camera_info", 1);
    r_image_sub_.subscribe(it_, "right/image_rect", 1);
    r_info_sub_ .subscribe(nh_, "right/camera_info", 1);
    sync_.connectInput(l_image_sub_, l_info_sub_, r_image_sub_, r_info_sub_);
    sync_.registerCallback( boost::bind(&StereoVslamNode::imageCb, this, _1, _2, _3, _4) );

    // Dynamic reconfigure for parameters
    ReconfigureServer::CallbackType f = boost::bind(&StereoVslamNode::configCb, this, _1, _2);
    reconfigure_server_.setCallback(f);

  }
コード例 #6
0
  void start ()
  {
    if (running)
      return;

    cam_->setFrameCallback (boost::bind (&PGRCameraNode::publishImage, this, _1));
    streaming_pub_ = it_.advertiseCamera ("image_raw", 1);

    cam_->start ();
    running = true;
  }
コード例 #7
0
  StereoSynchronizer () : nh_(), it_(nh_), sync_(15) {

    std::string left_raw = nh_.resolveName("left_raw");
    std::string right_raw = nh_.resolveName("right_raw");
    image_sub_l_.subscribe(it_, left_raw  + "/image_raw", 4);
    info_sub_l_ .subscribe(nh_, left_raw  + "/camera_info", 4);
    image_sub_r_.subscribe(it_, right_raw + "/image_raw", 4);
    info_sub_r_ .subscribe(nh_, right_raw + "/camera_info", 4);

    left_ns_ = nh_.resolveName("left");
    right_ns_ = nh_.resolveName("right");
    ros::NodeHandle cam_l_nh(left_ns_);
    ros::NodeHandle cam_r_nh(right_ns_);
    it_l_ = new image_transport::ImageTransport(cam_l_nh);
    it_r_ = new image_transport::ImageTransport(cam_r_nh);
    pub_left_ = it_l_->advertiseCamera("image_raw", 1);
    pub_right_ = it_r_->advertiseCamera("image_raw", 1);

    sync_.connectInput(image_sub_l_, info_sub_l_, image_sub_r_, info_sub_r_);
    sync_.registerCallback(boost::bind(&StereoSynchronizer::imageCB, this, _1, _2, _3, _4));
  }
コード例 #8
0
ファイル: camera.cpp プロジェクト: amiltonwong/openni2_camera
  SensorStreamManager(ros::NodeHandle& nh, Device& device, SensorType type, std::string name, std::string frame_id, VideoMode& default_mode) :
    device_(device),
    default_mode_(default_mode),
    name_(name),
    frame_id_(frame_id),
    running_(false),
    nh_(nh, name_),
    it_(nh_),
    camera_info_manager_(nh_)
  {
    assert(device_.hasSensor(type));

    callback_ = boost::bind(&SensorStreamManager::onSubscriptionChanged, this, _1);
    publisher_ = it_.advertiseCamera("image_raw", 1, callback_, callback_);

    ROS_ERROR_STREAM_COND(stream_.create(device_, type) != STATUS_OK, "Failed to create stream '" << toString(type) << "'!");
    stream_.addNewFrameListener(this);
    ROS_ERROR_STREAM_COND(stream_.setVideoMode(default_mode_) != STATUS_OK, "Failed to set default video mode for stream '" << toString(type) << "'!");
  }
コード例 #9
0
    bool requestCallback (polled_camera::GetPolledImage::Request& req,
                          polled_camera::GetPolledImage::Response& rsp)
    {
        std::string image_topic = req.response_namespace + "/image_raw";
        image_transport::CameraPublisher& pub = client_map_[image_topic];

        if (!pub)
        {
            // Create a latching camera publisher.
            pub = it_.advertiseCamera (image_topic, 1, image_transport::SubscriberStatusCallback(),
                                       boost::bind (&Impl::disconnectCallback, this, _1),
                                       ros::SubscriberStatusCallback(), ros::SubscriberStatusCallback(),
                                       ros::VoidPtr(), true /*latch*/);
            ROS_INFO ("Advertising %s", pub.getTopic().c_str());
        }

        // Correct zero binning values to one for convenience
        req.binning_x = std::max (req.binning_x, (uint32_t) 1);
        req.binning_y = std::max (req.binning_y, (uint32_t) 1);

        /// @todo Use pointers in prep for nodelet drivers?
        sensor_msgs::Image image;
        sensor_msgs::CameraInfo info;
        driver_cb_ (req, rsp, image, info);

        if (rsp.success)
        {
            assert (image.header.stamp == info.header.stamp);
            rsp.stamp = image.header.stamp;
            pub.publish (image, info);
        }
        else
        {
            ROS_ERROR ("Failed to capture requested image, status message: '%s'",
                       rsp.status_message.c_str());
        }

        return true; // Success/failure indicated by rsp.success
    }
コード例 #10
0
ファイル: all_cameras.cpp プロジェクト: Kuhn1981/care-o-bot
	/// Opens the camera sensor
	bool init()
	{
		std::string directory = "NULL/";
		std::string tmp_string = "NULL";

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/configuration_files", directory) == false)
		{
			ROS_ERROR("Path to xml configuration file not specified");
			return false;
		}

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/color_camera_type", tmp_string) == false)
		{
			ROS_ERROR("[all_cameras] Color camera type not specified");
			return false;
		}
		if (tmp_string == "CAM_AVTPIKE")
		{
			 right_color_camera_ = ipa_CameraSensors::CreateColorCamera_AVTPikeCam();
			 left_color_camera_ = ipa_CameraSensors::CreateColorCamera_AVTPikeCam();
		}
		else if (tmp_string == "CAM_PROSILICA") ROS_ERROR("[all_cameras] Color camera type not CAM_PROSILICA not yet implemented");
		else
		{
			std::string str = "[all_cameras] Camera type '" + tmp_string + "' unknown, try 'CAM_AVTPIKE' or 'CAM_PROSILICA'";
			ROS_ERROR("%s", str.c_str());
			return false;
		}
	
		if (left_color_camera_ && (left_color_camera_->Init(directory, 1) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of left camera (1) failed");
			left_color_camera_ = 0;
		}

		if (left_color_camera_ && (left_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening left color camera (1) failed");
			left_color_camera_ = 0;
		}

		if (right_color_camera_ && (right_color_camera_->Init(directory, 0) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of right camera (0) failed");
			right_color_camera_ = 0;
		}

		if (right_color_camera_ && (right_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening right color camera (0) failed");
			right_color_camera_ = 0;
		}

		/// Parameters are set within the launch file
		if (node_handle_.getParam("all_cameras/tof_camera_type", tmp_string) == false)
		{
			ROS_ERROR("[all_cameras] tof camera type not specified");
			return false;
		}
		if (tmp_string == "CAM_SWISSRANGER") tof_camera_ = ipa_CameraSensors::CreateRangeImagingSensor_Swissranger();
		else if(tmp_string == "CAM_PMDCAMCUBE") 
		{
			ROS_ERROR("[all_cameras] tof camera type CAM_PMDCAMCUBE not yet implemented");
		}
		else
		{
			std::string str = "[all_cameras] Camera type '" + tmp_string + "' unknown, try 'CAM_SWISSRANGER'";
			ROS_ERROR("%s", str.c_str());
		}
		
		if (tof_camera_ && (tof_camera_->Init(directory) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of tof camera (0) failed");
			tof_camera_ = 0;
		}

		if (tof_camera_ && (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening tof camera (0) failed");
			tof_camera_ = 0;
		}

		/// Topics and Services to publish
		if (left_color_camera_) 
		{
			left_color_image_publisher_ = image_transport_.advertiseCamera("left/color_camera_stream/image_raw", 1);
			left_color_camera_info_service_ = node_handle_.advertiseService("left/color_camera/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (right_color_camera_)
		{
			right_color_image_publisher_ = image_transport_.advertiseCamera("right/color_camera_stream/image_raw", 1);
			right_color_camera_info_service_ = node_handle_.advertiseService("right/color_camera/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (tof_camera_)
		{
			grey_tof_image_publisher_ = image_transport_.advertiseCamera("grey_tof_data", 1);
			xyz_tof_image_publisher_ = image_transport_.advertiseCamera("xyz_tof_data", 1);
			tof_camera_info_service_ = node_handle_.advertiseService("tof_camera/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
	
		return true;
	}
コード例 #11
0
ファイル: tof_camera.cpp プロジェクト: ipa-mars/care-o-bot
    /// Initializes and opens the time-of-flight camera sensor.
	/// @return <code>false</code> on failure, <code>true</code> otherwise
    bool init()
    {
		if (loadParameters() == false)
		{
			ROS_ERROR("[color_camera] Could not read all parameters from launch file");
			return false;
		}		
		

		if (tof_camera_->Init(config_directory_, tof_camera_index_) & ipa_CameraSensors::RET_FAILED)
		{

			std::stringstream ss;
			ss << "Initialization of tof camera ";
			ss << tof_camera_index_;
			ss << " failed";
			ROS_ERROR("[tof_camera] %s", ss.str().c_str());
			tof_camera_ = AbstractRangeImagingSensorPtr();
			return false;
		}
	
		if (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED)
		{
			std::stringstream ss;
			ss << "Could not open tof camera ";
			ss << tof_camera_index_;
			ROS_ERROR("[tof_camera] %s", ss.str().c_str());
			tof_camera_ = AbstractRangeImagingSensorPtr();
			return false;
		}

		/// Read camera properties of range tof sensor
		ipa_CameraSensors::t_cameraProperty cameraProperty;
		cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
		tof_camera_->GetProperty(&cameraProperty);
		int range_sensor_width = cameraProperty.cameraResolution.xResolution;
		int range_sensor_height = cameraProperty.cameraResolution.yResolution;
		cv::Size range_image_size(range_sensor_width, range_sensor_height);

		/// Setup camera toolbox
		ipa_CameraSensors::CameraSensorToolboxPtr tof_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
		tof_sensor_toolbox->Init(config_directory_, tof_camera_->GetCameraType(), tof_camera_index_, range_image_size);

		cv::Mat intrinsic_mat = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_type_, tof_camera_index_);
		cv::Mat distortion_map_X = tof_sensor_toolbox->GetDistortionMapX(tof_camera_type_, tof_camera_index_);
		cv::Mat distortion_map_Y = tof_sensor_toolbox->GetDistortionMapY(tof_camera_type_, tof_camera_index_);
		tof_camera_->SetIntrinsics(intrinsic_mat, distortion_map_X, distortion_map_Y);

	        /// Advertise service for other nodes to set intrinsic calibration parameters
		camera_info_service_ = node_handle_.advertiseService("set_camera_info", &CobTofCameraNode::setCameraInfo, this);
		image_service_ = node_handle_.advertiseService("get_images", &CobTofCameraNode::imageSrvCallback, this);
		xyz_image_publisher_ = image_transport_.advertiseCamera("image_xyz", 1);
		grey_image_publisher_ = image_transport_.advertiseCamera("image_grey", 1);

		cv::Mat d = tof_sensor_toolbox->GetDistortionParameters(tof_camera_type_, tof_camera_index_);
		camera_info_msg_.D[0] = d.at<double>(0, 0);
		camera_info_msg_.D[1] = d.at<double>(0, 1);
		camera_info_msg_.D[2] = d.at<double>(0, 2);
		camera_info_msg_.D[3] = d.at<double>(0, 3);
		camera_info_msg_.D[4] = 0;
	
		cv::Mat k = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_type_, tof_camera_index_);
		camera_info_msg_.K[0] = k.at<double>(0, 0);
		camera_info_msg_.K[1] = k.at<double>(0, 1);
		camera_info_msg_.K[2] = k.at<double>(0, 2);
		camera_info_msg_.K[3] = k.at<double>(1, 0);
		camera_info_msg_.K[4] = k.at<double>(1, 1);
		camera_info_msg_.K[5] = k.at<double>(1, 2);
		camera_info_msg_.K[6] = k.at<double>(2, 0);
		camera_info_msg_.K[7] = k.at<double>(2, 1);
		camera_info_msg_.K[8] = k.at<double>(2, 2);

		camera_info_msg_.width = range_sensor_width;		
		camera_info_msg_.height = range_sensor_height;

		return true;
	}
コード例 #12
0
        /// Initializes and opens the time-of-flight camera sensor.
	/// @return <code>false</code> on failure, <code>true</code> otherwise
        bool init()
        {
		int camera_index = -1;
                std::string directory = "NULL/";
		std::string tmp_string = "NULL";
               
		/// Parameters are set within the launch file
                if (node_handle_.getParam("tof_camera/configuration_files", directory) == false)
                {
                        ROS_ERROR("[tof_camera] Path to xml configuration for color camera not specified");
                        return false;
                }

		/// Parameters are set within the launch file
		if (node_handle_.getParam("tof_camera/camera_index", camera_index) == false)
		{
			ROS_ERROR("[tof_camera] Tof camera index (0 or 1) not specified");
			return false;
		}


		/// Parameters are set within the launch file
		if (node_handle_.getParam("tof_camera/tof_camera_type", tmp_string) == false)
		{
			ROS_ERROR("[tof_camera] tof camera type not specified");
			return false;
		}
		if (tmp_string == "CAM_SWISSRANGER") tof_camera_ = ipa_CameraSensors::CreateRangeImagingSensor_Swissranger();
		else if (tmp_string == "CAM_PMDCAMCUBE") 
		{
			ROS_ERROR("[tof_camera] tof camera type CAM_PMDCAMCUBE not yet implemented");
			return false;
		}
		else
		{
			std::string str = "[tof_camera] Camera type '" + tmp_string + "' unknown, try 'CAM_SWISSRANGER'";
			ROS_ERROR("%s", str.c_str());
			return false;
		}

		if (tof_camera_->Init(directory, camera_index) & ipa_CameraSensors::RET_FAILED)
		{

			std::stringstream ss;
                        ss << "Initialization of tof camera ";
                        ss << camera_index;
                        ss << " failed";
                        ROS_ERROR("[tof_camera] %s", ss.str().c_str());
                        tof_camera_ = 0;
                        return false;
		}
	
		if (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED)
		{
			std::stringstream ss;
                        ss << "Could not open tof camera ";
                        ss << camera_index;
                        ROS_ERROR("[tof_camera] %s", ss.str().c_str());
                        tof_camera_ = 0;
                        return false;
		}

                /// Advertise service for other nodes to set intrinsic calibration parameters
                camera_info_service_ = node_handle_.advertiseService("set_camera_info", &CobTofCameraNode::setCameraInfo, this);
		xyz_image_publisher_ = image_transport_.advertiseCamera("xyz_tof_data", 1);
		grey_image_publisher_ = image_transport_.advertiseCamera("grey_tof_data", 1);

		return true;
	}
コード例 #13
0
ファイル: all_cameras.cpp プロジェクト: ipa-mars/care-o-bot
	/// Opens the camera sensor
	bool init()
	{
		if (loadParameters() == false)
		{
			ROS_ERROR("[all_cameras] Could not read parameters from launch file");
			return false;
		}
		
		if (left_color_camera_ && (left_color_camera_->Init(config_directory_, 1) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of left camera (1) failed");
			left_color_camera_ = AbstractColorCameraPtr();
		}

		if (left_color_camera_ && (left_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening left color camera (1) failed");
			left_color_camera_ = AbstractColorCameraPtr();
		}
		if (left_color_camera_)
		{
			/// Read camera properties of range tof sensor
			int camera_index = 1;
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			left_color_camera_->GetProperty(&cameraProperty);
			int color_sensor_width = cameraProperty.cameraResolution.xResolution;
			int color_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size color_image_size(color_sensor_width, color_sensor_height);
			
			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr color_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			color_sensor_toolbox->Init(config_directory_, left_color_camera_->GetCameraType(), camera_index, color_image_size);
	
			cv::Mat d = color_sensor_toolbox->GetDistortionParameters(left_color_camera_intrinsic_type_, left_color_camera_intrinsic_id_);
			left_color_camera_info_msg_.D[0] = d.at<double>(0, 0);
			left_color_camera_info_msg_.D[1] = d.at<double>(0, 1);
			left_color_camera_info_msg_.D[2] = d.at<double>(0, 2);
			left_color_camera_info_msg_.D[3] = d.at<double>(0, 3);
			left_color_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = color_sensor_toolbox->GetIntrinsicMatrix(left_color_camera_intrinsic_type_, left_color_camera_intrinsic_id_);
			left_color_camera_info_msg_.K[0] = k.at<double>(0, 0);
			left_color_camera_info_msg_.K[1] = k.at<double>(0, 1);
			left_color_camera_info_msg_.K[2] = k.at<double>(0, 2);
			left_color_camera_info_msg_.K[3] = k.at<double>(1, 0);
			left_color_camera_info_msg_.K[4] = k.at<double>(1, 1);
			left_color_camera_info_msg_.K[5] = k.at<double>(1, 2);
			left_color_camera_info_msg_.K[6] = k.at<double>(2, 0);
			left_color_camera_info_msg_.K[7] = k.at<double>(2, 1);
			left_color_camera_info_msg_.K[8] = k.at<double>(2, 2);
	
			left_color_camera_info_msg_.width = color_sensor_width;		
			left_color_camera_info_msg_.height = color_sensor_height;		
		}

		if (right_color_camera_ && (right_color_camera_->Init(config_directory_, 0) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of right camera (0) failed");
			right_color_camera_ = AbstractColorCameraPtr();
		}

		if (right_color_camera_ && (right_color_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening right color camera (0) failed");
			right_color_camera_ = AbstractColorCameraPtr();
		}
		if (right_color_camera_)
		{
			int camera_index = 0;
			/// Read camera properties of range tof sensor
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			right_color_camera_->GetProperty(&cameraProperty);
			int color_sensor_width = cameraProperty.cameraResolution.xResolution;
			int color_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size color_image_size(color_sensor_width, color_sensor_height);

			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr color_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			color_sensor_toolbox->Init(config_directory_, left_color_camera_->GetCameraType(), camera_index, color_image_size);
	
			cv::Mat d = color_sensor_toolbox->GetDistortionParameters(right_color_camera_intrinsic_type_, right_color_camera_intrinsic_id_);
			right_color_camera_info_msg_.D[0] = d.at<double>(0, 0);
			right_color_camera_info_msg_.D[1] = d.at<double>(0, 1);
			right_color_camera_info_msg_.D[2] = d.at<double>(0, 2);
			right_color_camera_info_msg_.D[3] = d.at<double>(0, 3);
			right_color_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = color_sensor_toolbox->GetIntrinsicMatrix(right_color_camera_intrinsic_type_, right_color_camera_intrinsic_id_);
			right_color_camera_info_msg_.K[0] = k.at<double>(0, 0);
			right_color_camera_info_msg_.K[1] = k.at<double>(0, 1);
			right_color_camera_info_msg_.K[2] = k.at<double>(0, 2);
			right_color_camera_info_msg_.K[3] = k.at<double>(1, 0);
			right_color_camera_info_msg_.K[4] = k.at<double>(1, 1);
			right_color_camera_info_msg_.K[5] = k.at<double>(1, 2);
			right_color_camera_info_msg_.K[6] = k.at<double>(2, 0);
			right_color_camera_info_msg_.K[7] = k.at<double>(2, 1);
			right_color_camera_info_msg_.K[8] = k.at<double>(2, 2);
	
			right_color_camera_info_msg_.width = color_sensor_width;		
			right_color_camera_info_msg_.height = color_sensor_height;		
		}

		if (tof_camera_ && (tof_camera_->Init(config_directory_) & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Initialization of tof camera (0) failed");
			tof_camera_ = AbstractRangeImagingSensorPtr();
		}

		if (tof_camera_ && (tof_camera_->Open() & ipa_CameraSensors::RET_FAILED))
		{
			ROS_WARN("[all_cameras] Opening tof camera (0) failed");
			tof_camera_ = AbstractRangeImagingSensorPtr();
		}
		if (tof_camera_)
		{
			int camera_index = 0;
			/// Read camera properties of range tof sensor
			ipa_CameraSensors::t_cameraProperty cameraProperty;
			cameraProperty.propertyID = ipa_CameraSensors::PROP_CAMERA_RESOLUTION;
			tof_camera_->GetProperty(&cameraProperty);
			int range_sensor_width = cameraProperty.cameraResolution.xResolution;
			int range_sensor_height = cameraProperty.cameraResolution.yResolution;
			cv::Size rangeImageSize(range_sensor_width, range_sensor_height);
	
			/// Setup camera toolbox
			ipa_CameraSensors::CameraSensorToolboxPtr tof_sensor_toolbox = ipa_CameraSensors::CreateCameraSensorToolbox();
			tof_sensor_toolbox->Init(config_directory_, tof_camera_->GetCameraType(), camera_index, rangeImageSize);

			cv::Mat intrinsic_mat = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			cv::Mat distortion_map_X = tof_sensor_toolbox->GetDistortionMapX(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			cv::Mat distortion_map_Y = tof_sensor_toolbox->GetDistortionMapY(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_->SetIntrinsics(intrinsic_mat, distortion_map_X, distortion_map_Y);

			cv::Mat d = tof_sensor_toolbox->GetDistortionParameters(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_info_msg_.D[0] = d.at<double>(0, 0);
			tof_camera_info_msg_.D[1] = d.at<double>(0, 1);
			tof_camera_info_msg_.D[2] = d.at<double>(0, 2);
			tof_camera_info_msg_.D[3] = d.at<double>(0, 3);
			tof_camera_info_msg_.D[4] = 0;
	
			cv::Mat k = tof_sensor_toolbox->GetIntrinsicMatrix(tof_camera_intrinsic_type_, tof_camera_intrinsic_id_);
			tof_camera_info_msg_.K[0] = k.at<double>(0, 0);
			tof_camera_info_msg_.K[1] = k.at<double>(0, 1);
			tof_camera_info_msg_.K[2] = k.at<double>(0, 2);
			tof_camera_info_msg_.K[3] = k.at<double>(1, 0);
			tof_camera_info_msg_.K[4] = k.at<double>(1, 1);
			tof_camera_info_msg_.K[5] = k.at<double>(1, 2);
			tof_camera_info_msg_.K[6] = k.at<double>(2, 0);
			tof_camera_info_msg_.K[7] = k.at<double>(2, 1);
			tof_camera_info_msg_.K[8] = k.at<double>(2, 2);

			tof_camera_info_msg_.width = range_sensor_width;		
			tof_camera_info_msg_.height = range_sensor_height;		
		}
	
		/// Topics and Services to publish
		if (left_color_camera_) 
		{
			// Adapt name according to camera type
			left_color_image_publisher_ = image_transport_.advertiseCamera(left_color_camera_ns_ + "/left/image_color", 1);
			left_color_camera_info_service_ = node_handle_.advertiseService(left_color_camera_ns_ + "/left/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (right_color_camera_)
		{
			// Adapt name according to camera type
			right_color_image_publisher_ = image_transport_.advertiseCamera(right_color_camera_ns_ + "/right/image_color", 1);
			right_color_camera_info_service_ = node_handle_.advertiseService(right_color_camera_ns_ + "/right/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
		if (tof_camera_)
		{
			grey_tof_image_publisher_ = image_transport_.advertiseCamera(tof_camera_ns_ + "/image_grey", 1);
			xyz_tof_image_publisher_ = image_transport_.advertiseCamera(tof_camera_ns_ + "/image_xyz", 1);
			tof_camera_info_service_ = node_handle_.advertiseService(tof_camera_ns_ + "/set_camera_info", &CobAllCamerasNode::setCameraInfo, this);
		}
	
		return true;
	}