示例#1
0
int v4l2_init() {
  struct v4l2_capability video_cap;
  if (xioctl(video_fd, VIDIOC_QUERYCAP, &video_cap) == -1)
    return v4l2_error("VIDIOC_QUERYCAP");
  if (!(video_cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    return v4l2_error("No video capture device");
  if (!(video_cap.capabilities & V4L2_CAP_STREAMING))
    return v4l2_error("No capture streaming");
  
  struct v4l2_format video_fmt;
  video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  video_fmt.fmt.pix.width       = width;
  video_fmt.fmt.pix.height      = height;
  video_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
  video_fmt.fmt.pix.field       = V4L2_FIELD_ANY;
  if (xioctl(video_fd, VIDIOC_S_FMT, &video_fmt) == -1)
    v4l2_error("VIDIOC_S_FMT");
    
  // Query V4L2 controls:
  v4l2_query_ctrl(V4L2_CID_BASE,
		  V4L2_CID_LASTP1);
  v4l2_query_ctrl(V4L2_CID_PRIVATE_BASE,
		  V4L2_CID_PRIVATE_BASE+20);
  v4l2_query_ctrl(V4L2_CID_CAMERA_CLASS_BASE+1,
		  V4L2_CID_CAMERA_CLASS_BASE+20);

  // Logitech specific controls:
  v4l2_query_ctrl(V4L2_CID_FOCUS,
		  V4L2_CID_FOCUS+1);
  v4l2_query_ctrl(V4L2_CID_LED1_MODE,
		  V4L2_CID_LED1_MODE+1);
  v4l2_query_ctrl(V4L2_CID_LED1_FREQUENCY,
		  V4L2_CID_LED1_FREQUENCY+1);
  v4l2_query_ctrl(V4L2_CID_DISABLE_PROCESSING,
		  V4L2_CID_DISABLE_PROCESSING+1);
  v4l2_query_ctrl(V4L2_CID_RAW_BITS_PER_PIXEL,
		  V4L2_CID_RAW_BITS_PER_PIXEL+1);

  // Initialize memory map
  v4l2_init_mmap();

  return 0;
}
示例#2
0
int v4l2_init_device (PyCameraObject* self)
{
    struct v4l2_capability cap;
    struct v4l2_format fmt;
    unsigned int min;

    if (-1 == v4l2_xioctl (self->fd, VIDIOC_QUERYCAP, &cap)) {
        if (EINVAL == errno) {
            PyErr_Format(PyExc_SystemError, "%s is not a V4L2 device",
                self->device_name);
            return 0;
        }
        else {
            PyErr_Format(PyExc_SystemError, "ioctl(VIDIOC_QUERYCAP) failure : %d, %s",
                errno, strerror (errno));
            return 0;
        }
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        PyErr_Format(PyExc_SystemError, "%s is not a video capture device",
            self->device_name);
        return 0;
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        PyErr_Format(PyExc_SystemError, "%s does not support streaming i/o",
            self->device_name);
        return 0;
    }

    CLEAR (fmt);

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = self->width;
    fmt.fmt.pix.height = self->height;
    fmt.fmt.pix.field = V4L2_FIELD_ANY;
    
    /* Find the pixelformat supported by the camera that will take the least
       processing power to convert to the desired output.  Thus, for YUV out,
       YUYVand YUV420 are first, while for RGB and HSV, the packed RGB formats
       are first. */
    switch (self->color_out) {
        case YUV_OUT:
            if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_YUYV)) {
                self->pixelformat = V4L2_PIX_FMT_YUYV;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_YUV420)) {
                self->pixelformat = V4L2_PIX_FMT_YUV420;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_RGB24)) {
                self->pixelformat = V4L2_PIX_FMT_RGB24;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_RGB444)) {
                self->pixelformat = V4L2_PIX_FMT_RGB444;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_SBGGR8)) {
                self->pixelformat = V4L2_PIX_FMT_SBGGR8;
            } else {
                PyErr_Format(PyExc_SystemError,
                           "ioctl(VIDIOC_S_FMT) failure: no supported formats");
                return 0;
            }
            break;
        default:
            if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_RGB24)) {
                self->pixelformat = V4L2_PIX_FMT_RGB24;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_RGB444)) {
                self->pixelformat = V4L2_PIX_FMT_RGB444;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_YUYV)) {
                self->pixelformat = V4L2_PIX_FMT_YUYV;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_SBGGR8)) {
                self->pixelformat = V4L2_PIX_FMT_SBGGR8;
            } else if (v4l2_pixelformat(self->fd, &fmt, V4L2_PIX_FMT_YUV420)) {
                self->pixelformat = V4L2_PIX_FMT_YUV420;
            } else {
                PyErr_Format(PyExc_SystemError,
                           "ioctl(VIDIOC_S_FMT) failure: no supported formats");
                return 0;
            }
            break;
    }    

    /* Note VIDIOC_S_FMT may change width and height. */
    self->width = fmt.fmt.pix.width;
    self->height = fmt.fmt.pix.height;
    self->size = self->width * self->height;
    self->pixelformat = fmt.fmt.pix.pixelformat;

    /* Buggy driver paranoia. */
    min = fmt.fmt.pix.width * 2;
    if (fmt.fmt.pix.bytesperline < min)
        fmt.fmt.pix.bytesperline = min;
    min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
    if (fmt.fmt.pix.sizeimage < min)
        fmt.fmt.pix.sizeimage = min;

    v4l2_init_mmap (self);

    return 1;
}