Ejemplo n.º 1
0
bool DeinterlaceDriver::Capture( hal::CameraMsg& vImages )
{
    m_Message.Clear();
    m_Input->Capture( m_Message );

    if( m_Message.mutable_image(0)->type() != hal::PB_UNSIGNED_SHORT ) {
      std::cerr << "HAL: Error! Expecting image with depth of 16 bits." << std::endl;
      return false;
    }

    vImages.set_device_time( m_Message.device_time() );

    dc1394_deinterlace_stereo((uint8_t*)m_Message.mutable_image(0)->data().data(),
                              (uint8_t*)m_Buffer, m_nImgWidth*2, m_nImgHeight);

    const unsigned int nImgSize = m_nImgWidth * m_nImgHeight;

    hal::ImageMsg* pbImg = vImages.add_image();
    pbImg->set_width( m_nImgWidth );
    pbImg->set_height( m_nImgHeight );
    pbImg->set_data( m_Buffer, nImgSize );
    pbImg->set_type( hal::PB_UNSIGNED_BYTE );
    pbImg->set_format( m_Message.mutable_image(0)->format() );

    pbImg = vImages.add_image();
    pbImg->set_width( m_nImgWidth );
    pbImg->set_height( m_nImgHeight );
    pbImg->set_data( m_Buffer+nImgSize, nImgSize);
    pbImg->set_type( hal::PB_UNSIGNED_BYTE );
    pbImg->set_format( m_Message.mutable_image(0)->format() );

    return true;
}
Ejemplo n.º 2
0
bool OpenCVDriver::Capture(hal::CameraMsg& images_msg) {
  if(!cam_.isOpened()) {
    std::cerr << "HAL: Error reading from camera." << std::endl;
    return false;
  }
  images_msg.set_device_time(Tic());

  cv::Mat temp;
  bool success = cam_.read(temp);
  hal::ImageMsg* pbImg = images_msg.add_image();
  pbImg->set_type(hal::PB_UNSIGNED_BYTE);
  pbImg->set_height(img_height_);
  pbImg->set_width(img_width_);
  pbImg->set_format(force_greyscale_ ? hal::PB_LUMINANCE : hal::PB_BGR);

  if (!success) return false;

  // This may not store the image in contiguous memory which PbMsgs
  // requires, so we might need to copy it
  cv::Mat cv_image;
  if(force_greyscale_) {
    cvtColor(temp, cv_image, CV_RGB2GRAY);
  } else if (!cv_image.isContinuous()) {
    temp.copyTo(cv_image);
  } else {
    cv_image = temp;
  }

  pbImg->set_data(static_cast<const unsigned char*>(cv_image.data),
                  cv_image.elemSize() * cv_image.total());
  return true;
}
Ejemplo n.º 3
0
bool UvcDriver::Capture( hal::CameraMsg& vImages )
{
    vImages.Clear();

    uvc_frame_t* frame = NULL;
    uvc_error_t err = uvc_get_frame(devh_, &frame, 0);
    if(err!= UVC_SUCCESS) {
        uvc_perror(err, "uvc_get_frame");
    }else{
        if(frame) {
            hal::ImageMsg* pimg = vImages.add_image();
            pimg->set_type( (hal::Type) pbtype );
            pimg->set_format( (hal::Format) pbformat );            
            pimg->set_width(frame->width);
            pimg->set_height(frame->height);
            pimg->set_data(frame->data, width_ * height_);
            return true;
        }else{
            std::cout << "No data..." << std::endl;
        }
        
    }
                     
    return true;
}
Ejemplo n.º 4
0
bool JoinCameraDriver::Capture( hal::CameraMsg& vImages )
{
  vImages.Clear();
  const double time = Tic();
  vImages.set_system_time(time);
  vImages.set_device_time(time);
  unsigned activeWorkerCount = 0;

  std::vector<hal::CameraMsg>& results = m_WorkTeam.process();

  int ixResult = 0;
  for( hal::CameraMsg& result : results ) {
    if(m_WorkTeam.m_bWorkerCaptureNotOver[ixResult]){
      for( int i = 0; i < result.image_size(); i++ ) {
	vImages.add_image()->Swap(result.mutable_image(i));
      }
      result.Clear();
    }
    ixResult++;
    activeWorkerCount++;
  }
  
  return activeWorkerCount == results.size();
}
Ejemplo n.º 5
0
bool ConvertDriver::Capture( hal::CameraMsg& vImages )
{
  m_Message.Clear();
  bool srcGood = m_Input->Capture( m_Message );

  if (!srcGood)
    return false;
  
  // Guess source color coding.
  if( m_nCvType.empty() ) {
    for(int i = 0; i < m_Message.image_size(); ++i) {
      int cvtype = -1;
      hal::Format pbtype = m_Message.image(i).format();
      int channels = 0;

      if( m_Message.image(i).format() == hal::PB_LUMINANCE )
        channels = 1;
      else if( m_Message.image(i).format() == hal::PB_RGB ||
               m_Message.image(i).format() == hal::PB_BGR )
        channels = 3;

      if( channels != 0 ) {
        if( m_Message.image(i).type() == hal::PB_BYTE ||
            m_Message.image(i).type() == hal::PB_UNSIGNED_BYTE )
          cvtype = (channels == 1 ? CV_8UC1 : CV_8UC3);
        else if( m_Message.image(i).type() == hal::PB_UNSIGNED_SHORT ||
                 m_Message.image(i).type() == hal::PB_SHORT )
          cvtype = (channels == 1 ? CV_16UC1 : CV_16UC3);
        else if( m_Message.image(i).type() == hal::PB_FLOAT )
          cvtype = (channels == 1 ? CV_32FC1 : CV_32FC3);
      }

      m_nCvType.push_back(cvtype);
      m_nPbType.push_back(pbtype);

      if( cvtype == -1 ) {
        std::cerr << "HAL: Error! Could not guess source color coding of "
                     "channel " << i << ". Is it RAW?" << std::endl;
      }
    }
  }

  // Prepare return images.
  vImages.set_device_time(m_Message.device_time());
  vImages.set_system_time(m_Message.system_time());

  for(size_t ii = 0; ii < m_nNumChannels; ++ii) {
    hal::ImageMsg* pbImg = vImages.add_image();

    // If the user has specified to convert a single channel only,
    // gate it here
    if (m_iChannel != -1) {
      if (m_iChannel != (int)ii) {
        *pbImg = m_Message.image(ii);
        continue;
      }
    }

    if( m_nCvType[ii] == -1 ) { // this image cannot be converted
      *pbImg = m_Message.image(ii);
      continue;
    }

    const bool resize_requested = (m_Dims.x != 0 || m_Dims.y != 0);
    size_t final_width, final_height;
    if (resize_requested) {
      final_width = m_Dims.x;
      final_height = m_Dims.y;
    } else {
      final_width = m_nOrigImgWidth[ii];
      final_height = m_nOrigImgHeight[ii];
    }
    pbImg->set_width( final_width );
    pbImg->set_height( final_height );
    pbImg->set_type( hal::PB_UNSIGNED_BYTE );
    pbImg->set_format( m_nOutPbType );
    pbImg->mutable_data()->resize(final_width * final_height *
                                 (m_nOutCvType == CV_8UC1 ? 1 : 3) );

    pbImg->set_timestamp( m_Message.image(ii).timestamp() );
    pbImg->set_serial_number( m_Message.image(ii).serial_number() );

    cv::Mat s_origImg(m_nOrigImgHeight[ii], m_nOrigImgWidth[ii], m_nCvType[ii],
                   (void*)m_Message.mutable_image(ii)->data().data());

    cv::Mat sImg;
    if (resize_requested) {
      cv::resize(s_origImg, sImg, cv::Size(final_width, final_height));
      m_nImgWidth[ii] = final_width;
      m_nImgHeight[ii] = final_height;
    } else {
      sImg = s_origImg;
    }

    cv::Mat dImg(final_height, final_width, m_nOutCvType,
                   (void*)pbImg->mutable_data()->data());

    // note: cv::cvtColor cannot convert between depth types and
    // cv::Mat::convertTo cannot change the number of channels
    cv::Mat aux;
    switch( m_nCvType[ii] ) {
      case CV_8UC1:
        if( m_nOutCvType == CV_8UC1 )
          std::copy(sImg.begin<unsigned char>(), sImg.end<unsigned char>(),
                    dImg.begin<unsigned char>());
        else
          cv::cvtColor(sImg, dImg,
            (m_nOutPbType == hal::Format::PB_RGB ? CV_GRAY2RGB : CV_GRAY2BGR));
        break;

      case CV_8UC3:
        if( m_nOutCvType == CV_8UC1 )
          cv::cvtColor(sImg, dImg,
            (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2GRAY : CV_BGR2GRAY));
        else {
          if( m_nPbType[ii] == m_nOutPbType )
            std::copy(sImg.begin<unsigned char>(), sImg.end<unsigned char>(),
                      dImg.begin<unsigned char>());
          else
            cv::cvtColor(sImg, dImg,
              (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2BGR : CV_BGR2RGB));
        }
        break;

      case CV_16UC1:
        sImg.convertTo(aux, CV_64FC1);
        if( m_nOutCvType == CV_8UC1 )
          aux.convertTo(dImg, CV_8UC1, 255. / m_dRange);
        else {
          aux.convertTo(aux, CV_8UC1, 255. / m_dRange);
          cv::cvtColor(aux, dImg,
            (m_nOutPbType == hal::Format::PB_RGB ? CV_GRAY2RGB : CV_GRAY2BGR));
        }
        break;

      case CV_16UC3:
        sImg.convertTo(aux, CV_64FC3);
        if( m_nOutCvType == CV_8UC1 ) {
          aux.convertTo(aux, CV_8UC3, 255. / m_dRange);
          cv::cvtColor(aux, dImg,
            (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2GRAY : CV_BGR2GRAY));
        } else {
          if( m_nPbType[ii] == m_nOutPbType )
            aux.convertTo(dImg, CV_8UC3, 255. / m_dRange);
          else {
            aux.convertTo(aux, CV_8UC3, 255. / m_dRange);
            cv::cvtColor(aux, dImg,
              (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2BGR : CV_BGR2RGB));
          }
        }
        break;

      case CV_32FC1:
        if( m_nOutCvType == CV_8UC1 ) {
          sImg.convertTo(dImg, CV_8UC1, 255. / m_dRange);
        } else {
          sImg.convertTo(aux, CV_8UC1, 255. / m_dRange);
          cv::cvtColor(aux, dImg,
            (m_nOutPbType == hal::Format::PB_RGB ? CV_GRAY2RGB : CV_GRAY2BGR));
        }
        break;

      case CV_32FC3:
        if( m_nOutCvType == CV_8UC1 ) {
          sImg.convertTo(aux, CV_8UC3, 255. / m_dRange);
          cv::cvtColor(aux, dImg,
            (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2GRAY : CV_BGR2GRAY));
        } else {
          if( m_nPbType[ii] == m_nOutPbType )
            sImg.convertTo(dImg, CV_8UC3, 255. / m_dRange);
          else {
            sImg.convertTo(aux, CV_8UC3, 255. / m_dRange);
            cv::cvtColor(aux, dImg,
              (m_nPbType[ii] == hal::Format::PB_RGB ? CV_RGB2BGR : CV_BGR2RGB));
          }
        }
        break;
    }
  }

  return true;
}
Ejemplo n.º 6
0
bool UndistortDriver::Capture( hal::CameraMsg& vImages )
{
  m_InMsg.Clear();
  const bool success = m_Input->Capture( m_InMsg );

  // Sanity check.
  if (static_cast<size_t>(m_InMsg.image_size()) > m_CamModel.size()) {
    fprintf(stderr, "HAL: Error! Expecting %d images but captured %d\n",
             static_cast<int>(m_CamModel.size()), m_InMsg.image_size());
    return false;
  }

  //Transfer the container's timestamps
  vImages.set_device_time(m_InMsg.device_time());
  vImages.set_system_time(m_InMsg.system_time());
  
  if(success) {
    for (int ii = 0; ii < m_InMsg.image_size(); ++ii) {
  
      hal::Image inimg = hal::Image(m_InMsg.image(ii));
      hal::ImageMsg* pimg = vImages.add_image();
      pimg->set_width(inimg.Width());
      pimg->set_height(inimg.Height());

      pimg->set_type( (hal::Type)inimg.Type());
      pimg->set_format( (hal::Format)inimg.Format());
      
      //Transfer the timestamps from the source to the destination
      pimg->set_timestamp(inimg.Timestamp());
      
      uint num_channels = 1;
      if (pimg->format() == hal::PB_LUMINANCE) {
        num_channels = 1;
      } else if (pimg->format() == hal::PB_BGRA ||
                 pimg->format() == hal::PB_RGBA) {
        num_channels = 4;
      } else {
        num_channels = 3;
      }

      hal::Image img = hal::Image(*pimg);

      if (pimg->type() == hal::PB_UNSIGNED_BYTE) {
        pimg->mutable_data()->resize(inimg.Width() * inimg.Height() *
                                     sizeof(unsigned char) * num_channels);
        calibu::Rectify<unsigned char>(
              m_vLuts[ii], inimg.data(),
              reinterpret_cast<unsigned char*>(&pimg->mutable_data()->front()),
              img.Width(), img.Height(), num_channels);
      } else if (pimg->type() == hal::PB_FLOAT) {
        pimg->mutable_data()->resize(inimg.Width() * inimg.Height() *
                                     sizeof(float) * num_channels);
        calibu::Rectify<float>(
              m_vLuts[ii], (float*)inimg.data(),
              reinterpret_cast<float*>(&pimg->mutable_data()->front()),
              img.Width(), img.Height(), num_channels);
      }
    }
  }

  return success;
}
Ejemplo n.º 7
0
bool OpenNI2Driver::Capture( hal::CameraMsg& vImages )
{
  int changedIndex;
  openni::Status rc;
  static double d0;

  rc = openni::OpenNI::waitForAnyStream( &m_streams[0], m_streams.size(), &changedIndex );
  if (rc != openni::STATUS_OK)	{
    printf("Wait failed\n");
    return false;
  }

  
  if( m_streams[changedIndex] == &m_depthStream ){
    m_depthStream.readFrame(&m_depthFrame);
    if(  m_colorStream.isValid() ){ 
      m_colorStream.readFrame(&m_colorFrame);
    }
    if( m_irStream.isValid() ){ 
      m_irStream.readFrame(&m_irFrame);
    }
  }

  if( m_streams[changedIndex] == &m_colorStream ){
    m_colorStream.readFrame(&m_colorFrame);
    if( m_depthStream.isValid() ){
      m_depthStream.readFrame(&m_depthFrame);
    } 
    if( m_irStream.isValid() ){ 
      m_irStream.readFrame(&m_irFrame);
    }
  }
  
  if( m_streams[changedIndex] == &m_irStream ){
    if( m_irStream.isValid() ){
      m_irStream.readFrame(&m_irFrame);
    }
  }
  
  // if the channel is on, make sure we get the image
  if( m_colorStream.isValid() ){
    if( !m_colorFrame.isValid() ){
      return false;
    }
  }
  if( m_depthStream.isValid() ){
    if( !m_depthFrame.isValid() ){
      return false;
    }
  }
  if( m_irStream.isValid() ){
    if( !m_irFrame.isValid() ){
      return false;
    }
  }

  //LOG(INFO) << m_dev_sn << " capturing at\t" <<  1000.0/hal::TocMS(d0) << "hz\n";

  if( m_colorStream.isValid() ){ 
    hal::ImageMsg* im = vImages.add_image();
    im->set_timestamp( m_colorFrame.getTimestamp() );
    im->set_width( m_width );
    im->set_height( m_height );
    im->set_type( hal::PB_UNSIGNED_BYTE );
    im->set_format( hal::PB_RGB );
    im->set_data( m_colorFrame.getData(), m_colorFrame.getDataSize() );
  }

  if( m_depthStream.isValid() ){
    hal::ImageMsg* im = vImages.add_image();
    im->set_timestamp( m_depthFrame.getTimestamp() );
    im->set_width( m_width );
    im->set_height( m_height );
    im->set_type( hal::PB_UNSIGNED_SHORT );
    im->set_format( hal::PB_LUMINANCE );
    im->set_data( m_depthFrame.getData(), m_depthFrame.getDataSize() );
  }

  if( m_irStream.isValid() ){
    hal::ImageMsg* im = vImages.add_image();
    im->set_timestamp( m_irFrame.getTimestamp() );
    im->set_width( m_width );
    im->set_height( m_height );
    im->set_type( hal::PB_UNSIGNED_SHORT);
    im->set_format( hal::PB_LUMINANCE );
    uint16_t *scaled = AutoScale(m_irFrame.getData(), m_irFrame.getHeight()*m_irFrame.getWidth());
    im->set_data( scaled, 2*m_irFrame.getHeight()*m_irFrame.getWidth() );
    delete[] scaled;
  }

  if (m_sCameraModel.size() > 0)
    SoftwareAlign(vImages);
  
  return true;
}
Ejemplo n.º 8
0
bool XimeaDriver::Capture(hal::CameraMsg& images)
{
  XI_RETURN error = XI_OK;

  // Software sync -- all cameras.
  if (sync_ == 1) {
    for (HANDLE handle: cam_handles_) {
      xiSetParamInt(handle, XI_PRM_TRG_SOFTWARE, 1);
    }
  }

  // Hardware sync -- only first camera.
  if (sync_ == 2) {
    // Trigger acquisition on Master camera
    xiSetParamInt(cam_handles_[0], XI_PRM_TRG_SOFTWARE, 1);
  }

  // Grab data.
  for (HANDLE handle: cam_handles_) {
    error = xiGetImage(handle, 100, &image_);

    // If timeout, return false;
    if (error == 10) {
      return false;
    }
    _CheckError(error, "xiGetImage");

    // Allocate memory.
    hal::ImageMsg* pb_img = images.add_image();

    // Set timestamp from camera.
    images.set_device_time(image_.tsSec + 1e-6*image_.tsUSec);
    images.set_system_time(hal::Tic());
 
    pb_img->set_width(image_.width);
    pb_img->set_height(image_.height);

    if (image_format_ == XI_RAW8) {
      pb_img->set_data(image_.bp, image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_BYTE);
      pb_img->set_format(hal::PB_LUMINANCE);
    } else if (image_format_ == XI_RAW16) {
      pb_img->set_data(image_.bp, 2 * image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_SHORT);
      pb_img->set_format(hal::PB_LUMINANCE);
    } else if (image_format_ == XI_MONO8) {
      pb_img->set_data(image_.bp, image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_BYTE);
      pb_img->set_format(hal::PB_LUMINANCE);
    } else if (image_format_ == XI_MONO16) {
      pb_img->set_data(image_.bp, 2 * image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_SHORT);
      pb_img->set_format(hal::PB_LUMINANCE);
    } else if (image_format_ == XI_RGB24) {
      pb_img->set_data(image_.bp, 3 * image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_BYTE);
      pb_img->set_format(hal::PB_BGR);
    } else if (image_format_ == XI_RGB32) {
      pb_img->set_data(image_.bp, 4 * image_width_ * image_height_);
      pb_img->set_type(hal::PB_UNSIGNED_BYTE);
      pb_img->set_format(hal::PB_RGBA);
    } else {
      std::cerr << "Image format not supported." << std::endl;
    }

  }

  images.set_system_time(hal::Tic());

  return error == XI_OK ? true : false;

}
Ejemplo n.º 9
0
bool Freenect2Driver::Capture(hal::CameraMsg& vImages) {
	vImages.Clear();
	vImages.set_device_time(Tic());

	libfreenect2::FrameMap frames;
	libfreenect2::FrameMap::const_iterator fit;

	for (size_t i = 0; i < m_devices.size(); ++i) {
		m_devices[i].listener->waitForNewFrame(frames);
		const double time = Tic();
		bool save_rgb = false;

		if ((fit = frames.find(libfreenect2::Frame::Color)) != frames.end()) {
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(m_nImgWidth);
			pbImg->set_height(m_nImgHeight);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			
			pbImg->set_type(hal::PB_UNSIGNED_BYTE);
			if (m_bColor)
				pbImg->set_format(hal::PB_BGR);
			else
				pbImg->set_format(hal::PB_LUMINANCE);

			const libfreenect2::Frame* frame = fit->second;

			cv::Mat rgbaTrg(frame->height, frame->width, CV_8UC4, frame->data);
			//From http://fixall.online/well--basically-cvtcolor-only-works-with-a-fixed-set/2220695/
			
			std::vector<cv::Mat1b> channels_rgba;
			cv::split(rgbaTrg, channels_rgba);
			// create matrix with first three channels only
			std::vector<cv::Mat1b> channels_rgb(channels_rgba.begin(),
							    channels_rgba.begin()+3);
			cv::Mat3b trg;
			cv::merge(channels_rgb, trg);

			if (frame->height != m_nImgHeight || frame->width != m_nImgWidth)
				cv::resize(trg, trg, cv::Size(m_nImgWidth, m_nImgHeight));
			cv::flip(trg, trg, 1);
			cv::Mat1b gray;
			if (!m_bColor)
			  {
			    cv::cvtColor(trg,gray, CV_BGR2GRAY,1);
			    //Only copy one gray plane
			    pbImg->set_data(gray.ptr<unsigned char>(),
					gray.rows * gray.cols * 1);
			  }
			else
			  {
			    pbImg->set_data(trg.ptr<unsigned char>(),
					    trg.rows * trg.cols * trg.channels());
			  }
			save_rgb = true;
		}

		if ((fit = frames.find(libfreenect2::Frame::Ir)) != frames.end()) {
			const libfreenect2::Frame* frame = fit->second;
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(frame->width);
			pbImg->set_height(frame->height);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			pbImg->set_type(hal::PB_FLOAT);
			pbImg->set_format(hal::PB_LUMINANCE);

			cv::Mat trg(frame->height, frame->width, CV_32F, frame->data);
			cv::flip(trg, trg, 1);
			pbImg->set_data(trg.ptr<float>(),
					trg.rows * trg.cols * sizeof(float));
		}

		if ((fit = frames.find(libfreenect2::Frame::Depth)) != frames.end()) {
			const libfreenect2::Frame* frame = fit->second;
			cv::Mat trg = cv::Mat(frame->height, frame->width, CV_32FC1,
					frame->data);

			if (save_rgb && m_bAlign) {
				if (m_nImgHeight != IR_IMAGE_HEIGHT
						|| m_nImgWidth != IR_IMAGE_WIDTH)
					m_devices[i].registration->depthToRGBResolution(trg, trg);
			}

			// change rgb and depth image
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(trg.cols);
			pbImg->set_height(trg.rows);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			pbImg->set_type(hal::PB_FLOAT);
			pbImg->set_format(hal::PB_LUMINANCE);

			cv::flip(trg, trg, 1);
			pbImg->set_data(trg.ptr<float>(),
					trg.rows * trg.cols * sizeof(float));
		}

		m_devices[i].listener->release(frames);
	}

	return true;
}
Ejemplo n.º 10
0
bool Freenect2Driver::Capture(hal::CameraMsg& vImages) {
	vImages.Clear();
	vImages.set_device_time(Tic());

	libfreenect2::FrameMap frames;
	libfreenect2::FrameMap::const_iterator fit;

	for (size_t i = 0; i < m_devices.size(); ++i) {
		m_devices[i].listener->waitForNewFrame(frames);
		const double time = Tic();
		bool save_rgb = false;

		if ((fit = frames.find(libfreenect2::Frame::Color)) != frames.end()) {
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(m_nImgWidth);
			pbImg->set_height(m_nImgHeight);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			pbImg->set_type(hal::PB_UNSIGNED_BYTE);
			if (m_bColor)
				pbImg->set_format(hal::PB_BGR);
			else
				pbImg->set_format(hal::PB_LUMINANCE);

			const libfreenect2::Frame* frame = fit->second;
			cv::Mat trg(frame->height, frame->width, CV_8UC3, frame->data);
			if (frame->height != m_nImgHeight || frame->width != m_nImgWidth)
				cv::resize(trg, trg, cv::Size(m_nImgWidth, m_nImgHeight));
			if (!m_bColor)
				cv::cvtColor(trg, trg, CV_BGR2GRAY);
			cv::flip(trg, trg, 1);

			pbImg->set_data(trg.ptr<unsigned char>(),
					trg.rows * trg.cols * trg.channels());
			save_rgb = true;
		}

		if ((fit = frames.find(libfreenect2::Frame::Ir)) != frames.end()) {
			const libfreenect2::Frame* frame = fit->second;
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(frame->width);
			pbImg->set_height(frame->height);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			pbImg->set_type(hal::PB_FLOAT);
			pbImg->set_format(hal::PB_LUMINANCE);

			cv::Mat trg(frame->height, frame->width, CV_32F, frame->data);
			cv::flip(trg, trg, 1);
			pbImg->set_data(trg.ptr<float>(),
					trg.rows * trg.cols * sizeof(float));
		}

		if ((fit = frames.find(libfreenect2::Frame::Depth)) != frames.end()) {
			const libfreenect2::Frame* frame = fit->second;
			cv::Mat trg = cv::Mat(frame->height, frame->width, CV_32FC1,
					frame->data);

			if (save_rgb && m_bAlign) {
				if (m_nImgHeight != IR_IMAGE_HEIGHT
						|| m_nImgWidth != IR_IMAGE_WIDTH)
					m_devices[i].registration->depthToRGBResolution(trg, trg);
			}

			// change rgb and depth image
			hal::ImageMsg* pbImg = vImages.add_image();
			pbImg->set_timestamp(time);
			pbImg->set_width(trg.cols);
			pbImg->set_height(trg.rows);
			pbImg->set_serial_number(m_devices[i].serialNumber);

			pbImg->set_type(hal::PB_FLOAT);
			pbImg->set_format(hal::PB_LUMINANCE);

			cv::flip(trg, trg, 1);
			pbImg->set_data(trg.ptr<float>(),
					trg.rows * trg.cols * sizeof(float));
		}

		m_devices[i].listener->release(frames);
	}

	return true;
}