Exemple #1
1
void RealSenseDriver::Start(int vid, int pid, char* sn)
{
  //The RealSense camera is capable of delivering separate streams for rgb and depth at separate resolutions
  //For now, use 640x480@30fps for each one
  //There are separate streams defined for depth using non-standard UVC GUIDs - use the YUYV-advertised GUID, which is really just 
  //gray16 data


    width_ = 640;
    height_ = 480;
    fps_ = 30;
    
    if(ctx_) {
        Stop();
    }
    
    uvc_init(&ctx_, NULL);
    if(!ctx_) {
        throw DeviceException("Unable to open UVC library context");
    }

    uvc_error_t find_err;
    if (sn)
      find_err = uvc_find_device(ctx_, &dev_, vid, pid, sn );
    else
      find_err = uvc_find_device(ctx_, &dev_, vid, pid, NULL);

    if (find_err != UVC_SUCCESS) {
        uvc_perror(find_err, "uvc_find_device");
        throw DeviceException("Unable to open RealSense device");
    }
    if(!dev_) {
        throw DeviceException("Unable to open RealSense Device - no pointer returned.");
    }
    
    uvc_error_t open_err = uvc_open2(dev_, &devh_rgb, 0); //Camera 0 is RGB
    if (open_err != UVC_SUCCESS) {
        uvc_perror(open_err, "uvc_open");
        uvc_unref_device(dev_);
        throw DeviceException("Unable to open RealSense RGB device");
    }

    open_err = uvc_open2(dev_, &devh_d, 1); //Camera 1 is depth
    if (open_err != UVC_SUCCESS) {
        uvc_perror(open_err, "uvc_open");
        uvc_unref_device(dev_);
        throw DeviceException("Unable to open RealSense Depth device");
    }
    
  
//    uvc_set_status_callback(devh_, &CameraDriver::AutoControlsCallbackAdapter, this);
    

    
    uvc_error_t mode_err = uvc_get_stream_ctrl_format_size(
                devh_rgb, &ctrl_rgb,
                UVC_COLOR_FORMAT_YUYV,
                width_, height_,
                60);
    
    pbtype_rgb = pb::PB_UNSIGNED_BYTE;
    pbformat_rgb = pb::PB_RGB;
        
    if (mode_err != UVC_SUCCESS) {
        uvc_perror(mode_err, "uvc_get_stream_ctrl_format_size");
        uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to get RGB device mode.");
    }

    //Use the same frame parameters for both RGB and depth (for now)
    if (useIR)
      {
	//Depth as mono8
	mode_err = uvc_get_stream_ctrl_format_size(
						   devh_d, &ctrl_d,
						   UVC_FRAME_FORMAT_INVI,
						   width_, height_,
						   fps_);
	
	pbtype_d = pb::PB_UNSIGNED_BYTE;
      }
    else
      {
	//Grayscale depth as yuyv, mono16
	mode_err = uvc_get_stream_ctrl_format_size(
						   devh_d, &ctrl_d,
						   //UVC_FRAME_FORMAT_INVI,
						   UVC_FRAME_FORMAT_YUYV,
						   width_, height_,
						   fps_);
    
	pbtype_d = pb::PB_UNSIGNED_SHORT;
      }
    
    pbformat_d = pb::PB_LUMINANCE;
        
    if (mode_err != UVC_SUCCESS) {
        uvc_perror(mode_err, "uvc_get_stream_ctrl_format_size");
        uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to get depth device mode.");
    }

    uvc_error_t stream_ctl_err = uvc_stream_open_ctrl(devh_rgb, &streamh_rgb, &ctrl_rgb);
   
    if (stream_ctl_err != UVC_SUCCESS) {
        uvc_perror(stream_ctl_err, "uvc_stream_open_ctrl");
        uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to open stream on RGB.");
    }

    stream_ctl_err = uvc_stream_open_ctrl(devh_d, &streamh_d, &ctrl_d);
   
    if (stream_ctl_err != UVC_SUCCESS) {
        uvc_perror(stream_ctl_err, "uvc_stream_open_ctrl");
        uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to open stream on depth.");
    }

    uvc_error_t stream_start_err = uvc_stream_start(streamh_rgb, NULL, this, 0);
    
    if (stream_start_err != UVC_SUCCESS) {
        uvc_perror(stream_start_err, "uvc_stream_start");
	uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to start streaming on rgb.");
    }

    stream_start_err = uvc_stream_start(streamh_d, NULL, this, 0);

    if (stream_start_err != UVC_SUCCESS) {
        uvc_perror(stream_start_err, "uvc_stream_start");
	uvc_close(devh_rgb);
	uvc_close(devh_d);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to start streaming on depth.");
    }
    
    if (frame_rgb)
        uvc_free_frame(frame_rgb);
     if (frame_d)
        uvc_free_frame(frame_d);
    
    
    frame_rgb = uvc_allocate_frame(ctrl_rgb.dwMaxVideoFrameSize);
    if(!frame_rgb) {
        throw DeviceException("Unable to allocate RGB frame.");
    }

    frame_d = uvc_allocate_frame(ctrl_d.dwMaxVideoFrameSize);
    if(!frame_d) {
        throw DeviceException("Unable to allocate depth frame.");
    }

    if (useSync)
      {
	std::cout << "RealSense: Using SCR to sync" << std::endl;
      }
}
Exemple #2
0
// changed to return original frame instead of returning converted frame even if convert_func is not null.
uvc_frame_t *UVCPreview::draw_preview_one(uvc_frame_t *frame, ANativeWindow **window, convFunc_t convert_func, int pixcelBytes) {
	// ENTER();

	int b = 0;
	pthread_mutex_lock(&preview_mutex);
	{
		b = *window != NULL;
	}
	pthread_mutex_unlock(&preview_mutex);
	if (LIKELY(b)) {
		uvc_frame_t *converted;
		if (convert_func) {
			converted = uvc_allocate_frame(frame->width * frame->height * pixcelBytes);
			if LIKELY(converted) {
				b = convert_func(frame, converted);
				if (!b) {
					pthread_mutex_lock(&preview_mutex);
					copyToSurface(converted, window);
					pthread_mutex_unlock(&preview_mutex);
				} else {
					LOGE("failed converting");
				}
				uvc_free_frame(converted);
			}
		} else {
			pthread_mutex_lock(&preview_mutex);
			copyToSurface(frame, window);
			pthread_mutex_unlock(&preview_mutex);
		}
	}
Exemple #3
0
//**********************************************************************
//
//**********************************************************************
void UVCPreview::uvc_preview_frame_callback(uvc_frame_t *frame, void *vptr_args) {
	UVCPreview *preview = reinterpret_cast<UVCPreview *>(vptr_args);
	if UNLIKELY(!preview->isRunning() || !frame || !frame->frame_format || !frame->data || !frame->data_bytes) return;
	if (UNLIKELY(
		((frame->frame_format != UVC_FRAME_FORMAT_MJPEG) && (frame->actual_bytes < preview->frameBytes))
		|| (frame->width != preview->frameWidth) || (frame->height != preview->frameHeight) )) {

#if LOCAL_DEBUG
		LOGD("broken frame!:format=%d,actual_bytes=%d/%d(%d,%d/%d,%d)",
			frame->frame_format, frame->actual_bytes, preview->frameBytes,
			frame->width, frame->height, preview->frameWidth, preview->frameHeight);
#endif
		return;
	}
	if (LIKELY(preview->isRunning())) {
		uvc_frame_t *copy = uvc_allocate_frame(frame->data_bytes);
		if (UNLIKELY(!copy)) {
#if LOCAL_DEBUG
			LOGE("uvc_callback:unable to allocate duplicate frame!");
#endif
			return;
		}
		uvc_error_t ret = uvc_duplicate_frame(frame, copy);
		if (UNLIKELY(ret)) {
			uvc_free_frame(copy);
			return;
		}
		preview->addPreviewFrame(copy);
	}
}
Exemple #4
0
void UVCPreview::do_preview(uvc_stream_ctrl_t *ctrl) {
	ENTER();

	uvc_frame_t *frame = NULL;
	uvc_frame_t *frame_mjpeg = NULL;
	uvc_error_t result = uvc_start_iso_streaming(
		mDeviceHandle, ctrl, uvc_preview_frame_callback, (void *)this);

	if (LIKELY(!result)) {
		clearPreviewFrame();
		pthread_create(&capture_thread, NULL, capture_thread_func, (void *)this);

#if LOCAL_DEBUG
		LOGI("Streaming...");
#endif
		if (frameMode) {
			// MJPEG mode
			while (LIKELY(isRunning())) {
				frame_mjpeg = waitPreviewFrame();
				if (LIKELY(frame_mjpeg)) {
					frame = uvc_allocate_frame(frame_mjpeg->width * frame_mjpeg->height * 2);
					result = uvc_mjpeg2yuyv(frame_mjpeg, frame);   // MJPEG => yuyv
					uvc_free_frame(frame_mjpeg);
					if (LIKELY(!result)) {
						frame = draw_preview_one(frame, &mPreviewWindow, uvc_any2rgbx, 4);
						addCaptureFrame(frame);
					} else {
						uvc_free_frame(frame);
					}
				}
			}
		} else {
			// yuvyv mode
			while (LIKELY(isRunning())) {
				frame = waitPreviewFrame();
				if (LIKELY(frame)) {
					frame = draw_preview_one(frame, &mPreviewWindow, uvc_any2rgbx, 4);
					addCaptureFrame(frame);
				}
			}
		}
		pthread_cond_signal(&capture_sync);
#if LOCAL_DEBUG
		LOGI("preview_thread_func:wait for all callbacks complete");
#endif
		uvc_stop_streaming(mDeviceHandle);
#if LOCAL_DEBUG
		LOGI("Streaming finished");
#endif
	} else {
		uvc_perror(result, "failed start_streaming");
	}

	EXIT();
}
Exemple #5
0
/* This callback function runs once per frame. Use it to perform any
 * quick processing you need, or have it put the frame into your application's
 * input queue. If this function takes too long, you'll start losing frames. */
void cb_rgb(uvc_frame_t *frame, void *ptr) {
  uvc_frame_t *bgr;
  uvc_error_t ret;

  /* We'll convert the image from YUV/JPEG to BGR, so allocate space */
  bgr = uvc_allocate_frame(frame->width * frame->height * 3);
  if (!bgr) {
    printf("unable to allocate bgr frame!");
    return;
  }

  /* Do the BGR conversion */
  ret = uvc_any2bgr(frame, bgr);
  if (ret) {
    uvc_perror(ret, "uvc_any2bgr");
    uvc_free_frame(bgr);
    return;
  }

  /* Call a user function:
   *
   * my_type *my_obj = (*my_type) ptr;
   * my_user_function(ptr, bgr);
   * my_other_function(ptr, bgr->data, bgr->width, bgr->height);
   */

  /* Call a C++ method:
   *
   * my_type *my_obj = (*my_type) ptr;
   * my_obj->my_func(bgr);
   */

  /* Use opencv.highgui to display the image: */
  IplImage* cvImg;
    cvImg = cvCreateImageHeader(
        cvSize(bgr->width, bgr->height),
        IPL_DEPTH_8U,
        3);
   
    cvSetData(cvImg, bgr->data, bgr->width * 3); 
   
    cvNamedWindow("Test", CV_WINDOW_AUTOSIZE);
    cvShowImage("Test", cvImg);
    cvWaitKey(10);
   
    cvReleaseImageHeader(&cvImg);
   

  uvc_free_frame(bgr);
}
Exemple #6
0
void UvcVideo::Start()
{
    uvc_error_t stream_err = uvc_stream_start(strm_, NULL, this, 0);
    
    if (stream_err != UVC_SUCCESS) {
        uvc_perror(stream_err, "uvc_stream_start");
        uvc_close(devh_);
        uvc_unref_device(dev_);
        throw VideoException("Unable to start streaming.");
    }
    
    if (frame_) {
        uvc_free_frame(frame_);
    }
    
    size_bytes = ctrl_.dwMaxVideoFrameSize;
    frame_ = uvc_allocate_frame(size_bytes);
    if(!frame_) {
        throw VideoException("Unable to allocate frame.");
    }
}
Exemple #7
0
uvc_frame_t *UVCPreview::draw_preview_one(uvc_frame_t *frame, ANativeWindow **window, convFunc_t convert_func, int pixcelBytes) {
	// ENTER();

	uvc_error_t ret;
	ANativeWindow_Buffer buffer;

	int b = 0;
	pthread_mutex_lock(&preview_mutex);
	{
		b = *window != NULL;
	}
	pthread_mutex_unlock(&preview_mutex);
	if (LIKELY(b)) {
		uvc_frame_t *converted;
		if (convert_func) {
			converted = uvc_allocate_frame(frame->width * frame->height * pixcelBytes);
			if LIKELY(converted) {
				b = convert_func(frame, converted);
				if (UNLIKELY(b)) {
					LOGE("failed converting");
					uvc_free_frame(converted);
					converted = NULL;
				}
			}
			// release original frame data(YUYV)
			uvc_free_frame(frame);
			frame = NULL;
		} else {
			converted = frame;
		}
		if (LIKELY(converted)) {
			// draw a frame to the view when success to convert
			pthread_mutex_lock(&preview_mutex);
			copyToSurface(converted, window);
			pthread_mutex_unlock(&preview_mutex);
			return converted; // RETURN(converted, uvc_frame_t *);
		} else {
			LOGE("draw_preview_one:unable to allocate converted frame!");
		}
	}
void CameraDriver::OpenCamera(UVCCameraConfig &new_config) {
  assert(state_ == kStopped);

  int vendor_id = strtol(new_config.vendor.c_str(), NULL, 0);
  int product_id = strtol(new_config.product.c_str(), NULL, 0);

  ROS_INFO("Opening camera with vendor=0x%x, product=0x%x, serial=\"%s\", index=%d",
           vendor_id, product_id, new_config.serial.c_str(), new_config.index);

  uvc_device_t **devs;

  // Implement missing index select behavior
  // https://github.com/ros-drivers/libuvc_ros/commit/4f30e9a0
#if libuvc_VERSION     > 00005 /* version > 0.0.5 */
  uvc_error_t find_err = uvc_find_devices(
    ctx_, &devs,
    vendor_id,
    product_id,
    new_config.serial.empty() ? NULL : new_config.serial.c_str());

  if (find_err != UVC_SUCCESS) {
    uvc_perror(find_err, "uvc_find_device");
    return;
  }

  // select device by index
  dev_ = NULL;
  int dev_idx = 0;
  while (devs[dev_idx] != NULL) {
    if(dev_idx == new_config.index) {
      dev_ = devs[dev_idx];
    }
    else {
      uvc_unref_device(devs[dev_idx]);
    }

    dev_idx++;
  }

  if(dev_ == NULL) {
    ROS_ERROR("Unable to find device at index %d", new_config.index);
    return;
  }
#else
  uvc_error_t find_err = uvc_find_device(
    ctx_, &dev_,
    vendor_id,
    product_id,
    new_config.serial.empty() ? NULL : new_config.serial.c_str());

  if (find_err != UVC_SUCCESS) {
    uvc_perror(find_err, "uvc_find_device");
    return;
  }

#endif
  uvc_error_t open_err = uvc_open(dev_, &devh_);

  if (open_err != UVC_SUCCESS) {
    switch (open_err) {
    case UVC_ERROR_ACCESS:
#ifdef __linux__
      ROS_ERROR("Permission denied opening /dev/bus/usb/%03d/%03d",
                uvc_get_bus_number(dev_), uvc_get_device_address(dev_));
#else
      ROS_ERROR("Permission denied opening device %d on bus %d",
                uvc_get_device_address(dev_), uvc_get_bus_number(dev_));
#endif
      break;
    default:
#ifdef __linux__
      ROS_ERROR("Can't open /dev/bus/usb/%03d/%03d: %s (%d)",
                uvc_get_bus_number(dev_), uvc_get_device_address(dev_),
                uvc_strerror(open_err), open_err);
#else
      ROS_ERROR("Can't open device %d on bus %d: %s (%d)",
                uvc_get_device_address(dev_), uvc_get_bus_number(dev_),
                uvc_strerror(open_err), open_err);
#endif
      break;
    }

    uvc_unref_device(dev_);
    return;
  }

  uvc_set_status_callback(devh_, &CameraDriver::AutoControlsCallbackAdapter, this);

  uvc_stream_ctrl_t ctrl;
  uvc_error_t mode_err = uvc_get_stream_ctrl_format_size(
    devh_, &ctrl,
    GetVideoMode(new_config.video_mode),
    new_config.width, new_config.height,
    new_config.frame_rate);

  if (mode_err != UVC_SUCCESS) {
    uvc_perror(mode_err, "uvc_get_stream_ctrl_format_size");
    uvc_close(devh_);
    uvc_unref_device(dev_);
    ROS_ERROR("check video_mode/width/height/frame_rate are available");
    uvc_print_diag(devh_, NULL);
    return;
  }

  uvc_error_t stream_err = uvc_start_streaming(devh_, &ctrl, &CameraDriver::ImageCallbackAdapter, this, 0);

  if (stream_err != UVC_SUCCESS) {
    uvc_perror(stream_err, "uvc_start_streaming");
    uvc_close(devh_);
    uvc_unref_device(dev_);
    return;
  }

  if (rgb_frame_)
    uvc_free_frame(rgb_frame_);

  rgb_frame_ = uvc_allocate_frame(new_config.width * new_config.height * 3);
  assert(rgb_frame_);

  state_ = kRunning;
}
/* This callback function runs once per frame. Use it to perform any
 * quick processing you need, or have it put the frame into your application's
 * input queue. If this function takes too long, you'll start losing frames. */
void cb(uvc_frame_t *frame, void *ptr) {
  frame->frame_format = UVC_FRAME_FORMAT_YUYV;
  std::cout << "Frame format" << std::endl;
  std::cout << frame->frame_format << std::endl; 
  uvc_frame_t *greyLeft;
  uvc_frame_t *greyRight;
  uvc_error_t retLeft;
  uvc_error_t retRight;

  /* We'll convert the image from YUV/JPEG to gray8, so allocate space */
  greyLeft = uvc_allocate_frame(frame->width * frame->height);
  greyRight = uvc_allocate_frame(frame->width * frame->height);
  if (!greyLeft) {
    printf("unable to allocate grey left frame!");
    return;
  }
  if (!greyRight) {
    printf("unable to allocate grey right frame!");
    return;
  }

  /* Do the BGR conversion */
  retLeft = uvc_yuyv2y(frame, greyLeft);
  retRight = uvc_yuyv2uv(frame, greyRight);
  if (retLeft) {
    uvc_perror(retLeft, "uvc_yuyv2y");
    uvc_free_frame(greyLeft);
    printf("Error with yuyv to y conversion");
    return;
  }
  if (retRight) {
    uvc_perror(retRight, "uvc_yuyv2uv");
    uvc_free_frame(greyRight);
    printf("Error with yuyv to uv conversion");
    return;
  }

  /* Call a user function:
   *
   * my_type *my_obj = (*my_type) ptr;
   * my_user_function(ptr, bgr);
   * my_other_function(ptr, bgr->data, bgr->width, bgr->height);
   */

  /* Call a C++ method:
   *
   * my_type *my_obj = (*my_type) ptr;
   * my_obj->my_func(bgr);
   */

  /* Use opencv.highgui to display the image:
   * 
   * cvImg = cvCreateImageHeader(
   *     cvSize(bgr->width, bgr->height),
   *     IPL_DEPTH_8U,
   *     3);
   *
   * cvSetData(cvImg, bgr->data, bgr->width * 3); 
   *
   * cvNamedWindow("Test", CV_WINDOW_AUTOSIZE);
   * cvShowImage("Test", cvImg);
   * cvWaitKey(10);
   *
   * cvReleaseImageHeader(&cvImg);
   */

  IplImage *cvImg, *cvImg2;
  cvImg = cvCreateImage(cvSize(greyLeft->width, greyLeft->height),
		  IPL_DEPTH_8U, 1);
  cvImg2 = cvCreateImage(cvSize(greyRight->width, greyRight->height),
		  IPL_DEPTH_8U, 1);
  cvImg->imageData = (char*) greyLeft->data; 
  cvImg2->imageData = (char*) greyRight->data;

  cv::Mat left (greyLeft->height, greyLeft->width, CV_8UC1, greyLeft->data);
  cv::Mat right (greyRight->height, greyRight->width, CV_8UC1, greyRight->data);
  //cv::Mat right (cv::cvarrToMat(cvImg2));

  cv::imshow("Test", left);
  cv::imshow("Test2", right);
  cvWaitKey(10);

  cvReleaseImage(&cvImg);
  cvReleaseImage(&cvImg2);

  uvc_free_frame(greyLeft);
  uvc_free_frame(greyRight);
}
Exemple #10
0
void UvcDriver::Start(int vid, int pid, char* sn)
{
    width_ = 640;
    height_ = 480;
    fps_ = 30;
    
    if(ctx_) {
        Stop();
    }
    
    uvc_init(&ctx_, NULL);
    if(!ctx_) {
        throw DeviceException("Unable to open UVC Context");
    }
    
    uvc_error_t find_err = uvc_find_device(ctx_, &dev_, vid, pid, sn );
    if (find_err != UVC_SUCCESS) {
        uvc_perror(find_err, "uvc_find_device");
        throw DeviceException("Unable to open UVC Device");
    }
    if(!dev_) {
        throw DeviceException("Unable to open UVC Device - no pointer returned.");
    }
    
    uvc_error_t open_err = uvc_open(dev_, &devh_);
    if (open_err != UVC_SUCCESS) {
        uvc_perror(open_err, "uvc_open");
        uvc_unref_device(dev_);
        throw DeviceException("Unable to open device");
    }



    
//    uvc_set_status_callback(devh_, &CameraDriver::AutoControlsCallbackAdapter, this);
    
    uvc_stream_ctrl_t ctrl;
    uvc_error_t mode_err = uvc_get_stream_ctrl_format_size(
                devh_, &ctrl,
                UVC_COLOR_FORMAT_YUYV,
                width_, height_,
                fps_);
    
    pbtype = hal::PB_UNSIGNED_BYTE;
    pbformat = hal::PB_LUMINANCE;
        
    if (mode_err != UVC_SUCCESS) {
        uvc_perror(mode_err, "uvc_get_stream_ctrl_format_size");
        uvc_close(devh_);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to device mode.");
    }
    
//    uvc_error_t stream_err = uvc_start_iso_streaming(devh_, &ctrl, &UvcDriver::ImageCallbackAdapter, this);
    //uvc_error_t stream_err = uvc_start_iso_streaming(devh_, &ctrl, NULL, this);
    uvc_error_t stream_err = uvc_start_streaming(devh_, &ctrl, NULL, this, 0);
    
    if (stream_err != UVC_SUCCESS) {
        uvc_perror(stream_err, "uvc_start_iso_streaming");
        uvc_close(devh_);
        uvc_unref_device(dev_);
        throw DeviceException("Unable to start iso streaming.");
    }
    
    if (frame_)
        uvc_free_frame(frame_);
    
    frame_ = uvc_allocate_frame(ctrl.dwMaxVideoFrameSize);
    if(!frame_) {
        throw DeviceException("Unable to allocate frame.");
    }
}
Exemple #11
0
Fichier : usb.c Projet : khuey/rr
*/

/* Tests a subset of the USBDEVFS ioctls. Requires libuvc to build:
   https://github.com/ktossell/libuvc
   Therefore this test is not built/run by default. */

#include "util.h"

#include "libuvc/libuvc.h"

static int frame_count = 0;

void cb(uvc_frame_t* frame, __attribute__((unused)) void* ptr) {
  uvc_frame_t* bgr;
  uvc_error_t ret;
  bgr = uvc_allocate_frame(frame->width * frame->height * 3);
  test_assert(bgr != NULL);
  ret = uvc_any2bgr(frame, bgr);
  test_assert(ret == 0);
  ++frame_count;
  uvc_free_frame(bgr);
}

int main(void) {
  uvc_context_t* ctx;
  uvc_device_t* dev;
  uvc_device_handle_t* devh;
  uvc_stream_ctrl_t ctrl;
  uvc_error_t res;
  res = uvc_init(&ctx, NULL);
  test_assert(res >= 0);
Exemple #12
0
bool RealSenseDriver::Capture( pb::CameraMsg& vImages )
{
  /*use a two-vector, where the first image is RGB, second is gray16 for depth or gray8 for IR */
    vImages.Clear();

    uvc_frame_t* frameRGB = NULL;
    uvc_frame_t* frameDepth = NULL;
    uvc_frame_t *rgb; //to hold the YUV->RGB conversion
    uvc_error_t err;
    pb::ImageMsg* pimg;
    uint64_t scrDepth, scrRGB;

    scrDepth = 0;
    scrRGB = 0;
    
    /* Try to sync up the streams
       Based on the SCR value that is shared between the two cameras, it looks like 
       RGB leads (comes before) IR by about 21475451171 units (roughly) between frames
    
       Strategy: Make sure the difference between SCR values for each frame is less than
       frameSyncLimit units.

       Also, depth takes some time to actually start publishing - keep reading RGB until we get
       a good pair

    */

    //Two functions: advanceDepth, advanceRGB
    //Pick up an RGB, wait for a depth, see if diff > synclimit
    //yes? Repeat with new RGB
    //no? Stash both into protobuf and continue

    
    /*Pick up the depth image */
    int gotPair = 0;
    int needRGB = 1;
    int needDepth = 1;
    int advRGB = 0;
    
    int64_t frameSync;
    
    while (!gotPair)
      {
	while (needRGB)
	  {
	    err = getFrame(streamh_rgb, &frameRGB);
	   
	    if ((err == UVC_SUCCESS) && (frameRGB != 0))
	      {
		//Calc the sync
		memcpy(&scrRGB, &frameRGB->capture_time, sizeof(scrRGB));
		needRGB = 0;
	      }
	    usleep(1);
	  }

	//Pick up a Depth
	while (needDepth)
	  {
	    err = getFrame(streamh_d, &frameDepth);
	    if ((err == UVC_SUCCESS) && (frameDepth != 0))
	      {
		//Calc the sync
		memcpy(&scrDepth, &frameDepth->capture_time, sizeof(scrDepth));
		needDepth = 0;
	      }
	    usleep(1);
	  }
	
	frameSync = diffSCR(scrDepth, scrRGB);


	if (!useSync)
	  {
	    //Don't care about the sync, just return the images
	    
	    gotPair = 1;
	    break;
	  }
	
	std::cout << "Sync?: R:" << scrRGB << " D: " << scrDepth << " diffSCR: " << frameSync;
		
	if (frameSync > frameSyncLimitPos)
	  {
	    //Depth is ahead of RGB, advance the RGB stream
	    advRGB++;
	    if (advRGB > 3)
	      {
		//restart, the rgb is out of whack
		needDepth = 1;
		std::cout << " Restarting acq sequence" << std::endl;
	      }
	    else
	      {
		std::cout << " Advancing RGB stream" << std::endl;
	      }
	    needRGB = 1;
	    continue;
	  }
	else if (frameSync < frameSyncLimitNeg)
	  {
	    // depth after RGB by too much, start over
	    needRGB = 1;
	    needDepth = 1;
	    std::cout << " Bad sync, starting over" << std::endl;
	    advRGB = 0;
	    continue;
	  }
	else
	  {
	    //Within limits, exit the loop
	    std::cout << " Good sync" << std::endl;
	    gotPair = 1;
	  }
      }
     
    if(frameRGB)
      {
	pimg = vImages.add_image();
	pimg->set_type( (pb::Type) pbtype_rgb );
	pimg->set_format( (pb::Format) pbformat_rgb );            
	pimg->set_width(frameRGB->width);
	pimg->set_height(frameRGB->height);
	rgb = uvc_allocate_frame(frameRGB->width * frameRGB->height * 3);
	uvc_any2rgb(frameRGB, rgb);
	//Convert the YUYV to RGB
	pimg->set_data(rgb->data, width_ * height_ * 3); //RGB uint8_t
	    
	uvc_free_frame(rgb);
      }
    
    if(frameDepth)
      {
	pimg = vImages.add_image();
	pimg->set_type( (pb::Type) pbtype_d );
	pimg->set_format( (pb::Format) pbformat_d );            
	pimg->set_width(frameDepth->width);
	pimg->set_height(frameDepth->height);
	    

	if (useIR)
	  {
	    pimg->set_data(frameDepth->data, width_ * height_); //gray uint8_t
	  }
	else
	  {
	    pimg->set_data(frameDepth->data, width_ * height_ * 2); //gray uint16_t
	  }
      }

    
    return true;
}