/* It should set the capture resolution Cheated from the openCV cap_libv4l.cpp the method is the following: Turn off the stream (video_disable) Unmap buffers Close the filedescriptor Initialize the camera again with the new resolution */ int setResolution(struct vdIn *vd, int width, int height) { int ret; DBG("setResolution(%d, %d)\n", width, height); vd->streamingState = STREAMING_PAUSED; if(video_disable(vd, STREAMING_PAUSED) == 0) { // do streamoff DBG("Unmap buffers\n"); int i; for(i = 0; i < NB_BUFFER; i++) munmap(vd->mem[i], vd->buf.length); if(CLOSE_VIDEO(vd->fd) == 0) { DBG("Device closed successfully\n"); } vd->width = width; vd->height = height; if(init_v4l2(vd) < 0) { fprintf(stderr, " Init v4L2 failed !! exit fatal \n"); return -1; } else { DBG("reinit done\n"); video_enable(vd); return 0; } } else { DBG("Unable to disable streaming\n"); return -1; } return ret; }
int uvcGrab (struct vdIn *vd) { #define HEADERFRAME1 0xaf int ret; if (!vd->isstreaming) if (video_enable (vd)) goto err; memset (&vd->buf, 0, sizeof (struct v4l2_buffer)); vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl (vd->fd, VIDIOC_DQBUF, &vd->buf); if (ret < 0) { fprintf (stderr, "Unable to dequeue buffer (%d).\n", errno); goto err; } switch (vd->formatIn) { case V4L2_PIX_FMT_MJPEG: memcpy (vd->tmpbuffer, vd->mem[vd->buf.index], HEADERFRAME1); memcpy (vd->tmpbuffer + HEADERFRAME1, dht_data, DHT_SIZE); memcpy (vd->tmpbuffer + HEADERFRAME1 + DHT_SIZE, vd->mem[vd->buf.index] + HEADERFRAME1, (vd->buf.bytesused - HEADERFRAME1)); if (debug) fprintf (stderr, "bytes in used %d \n", vd->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: if (vd->buf.bytesused > vd->framesizeIn) memcpy (vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->framesizeIn); else memcpy (vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->buf.bytesused); break; default: goto err; break; } ret = ioctl (vd->fd, VIDIOC_QBUF, &vd->buf); if (ret < 0) { fprintf (stderr, "Unable to requeue buffer (%d).\n", errno); goto err; } return 0; err: vd->signalquit = 0; return -1; }
int setResolution(struct vdIn *vd, int width, int height) { int ret; DBG("setResolution(%d, %d)\n", width, height); vd->streamingState = STREAMING_PAUSED; if (video_disable(vd, STREAMING_PAUSED) == 0) { ret = IOCTL_VIDEO(vd->fd, VIDIOC_G_FMT, &vd->fmt); if (ret != 0) { DBG("Unable to get current format\n"); return ret; } else { DBG("Current size: %d, %d)\n", vd->fmt.fmt.pix.width, vd->fmt.fmt.pix.height); } vd->fmt.fmt.pix.width = width; vd->fmt.fmt.pix.height = height; ret = IOCTL_VIDEO(vd->fd, VIDIOC_S_FMT, &vd->fmt); if (ret != 0) { DBG("Unable to set the new format code: %d errno: %d\n", ret, errno); if (errno == EBUSY) DBG("EBUSY: IO is in progress\n"); } else { DBG("New resolution is successfully applied\n"); } if (video_enable(vd) == 0) { DBG("Streaming on again\n"); memset(&vd->rb, 0, sizeof(struct v4l2_requestbuffers)); vd->rb.count = NB_BUFFER; vd->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->rb.memory = V4L2_MEMORY_MMAP; ret = IOCTL_VIDEO(vd->fd, VIDIOC_REQBUFS, &vd->rb); if (ret < 0) { perror("Unable to reallocate buffers"); } } else { DBG("Unable to reenable streaming\n"); return -1; } } else { DBG("Unable to disable streaming\n"); return -1; } return ret; }
int OrangePi_Grab(struct vdIn *vd) { #define HEADERFRAME1 0xaf fd_set fds; struct timeval tv; struct OrangePi_v4l2_device *dev = &OrangePi; struct v4l2_buffer buf; memset(&tv , 0 , sizeof(struct timeval)); FD_ZERO(&fds); FD_SET(dev->fd,&fds); tv.tv_sec = dev->timeout; tv.tv_usec = 0; if(select(dev->fd + 1 , &fds , NULL , NULL , &tv) == -1) { printf("ERROR:Waiting for Fram\n"); return -1; } memset(&buf , 0 , sizeof(struct v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if(vd->streamingState == STREAMING_OFF) { if(video_enable(vd)) printf("%s %d\n",__func__,__LINE__); } /* Dequeue buffer */ if(ioctl(dev->fd , VIDIOC_DQBUF , &buf) < 0) { printf("ERROR:VIDIOC_DQBUF\n"); return -1; } memcpy(vd->Y2V, dev->buffers->Raw_buffers[buf.index].start , vd->framesizeIn); // convert_yuv420p_to_yuyv(vd->Y2V,vd->framebuffer,vd->width,vd->height); OrangePi_420PTo422Pack(vd->Y2V,vd->framebuffer,vd->width,vd->height); /* Put buffers */ if(ioctl(dev->fd , VIDIOC_QBUF , &buf) < 0) { printf("ERROR:Bad Put Buffer\n"); return -1; } return 0; }
int CameraManager2::uvcGrab() { int ret; if (vd->isstreaming==0) { if (video_enable()) { goto err; } } this->saving_buffer = true; memset (&vd->buf, 0, sizeof (struct v4l2_buffer)); vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl (vd->fd, VIDIOC_DQBUF, &vd->buf); if (ret < 0) { fprintf (stderr, "Unable to dequeue buffer (%d).\n", errno); goto err; } memcpy (vd->tmpbuffer, vd->mem[vd->buf.index], HEADERFRAME1); memcpy (vd->tmpbuffer + HEADERFRAME1, dht_data, DHT_SIZE); memcpy (vd->tmpbuffer + HEADERFRAME1 + DHT_SIZE, vd->mem[vd->buf.index] + HEADERFRAME1, (vd->buf.bytesused - HEADERFRAME1)); if (debug) { fprintf (stderr, "bytes in used %d \n", vd->buf.bytesused); } this->jpg_buffer = vd->tmpbuffer; this->jpg_buffer_size = vd->buf.bytesused + DHT_SIZE; ret = ioctl (vd->fd, VIDIOC_QBUF, &vd->buf); if (ret < 0) { fprintf (stderr, "Unable to requeue buffer (%d).\n", errno); goto err; } this->saving_buffer = false; return 0; err: vd->signalquit = 0; return -1; }
int uvcGrab(struct vdIn *vd) { #define HEADERFRAME1 0xaf int ret; if (!vd->isstreaming) if (video_enable(vd)) goto err; memset(&vd->buf, 0, sizeof(struct v4l2_buffer)); vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf); if (ret < 0) { perror("Unable to dequeue buffer"); goto err; } switch (vd->formatIn) { case V4L2_PIX_FMT_MJPEG: if (vd->buf.bytesused <= HEADERFRAME1) { /* Prevent crash * on empty image */ fprintf(stderr, "Ignoring empty buffer ...\n"); return 0; } /* memcpy(vd->tmpbuffer, vd->mem[vd->buf.index], vd->buf.bytesused); memcpy (vd->tmpbuffer, vd->mem[vd->buf.index], HEADERFRAME1); memcpy (vd->tmpbuffer + HEADERFRAME1, dht_data, sizeof(dht_data)); memcpy (vd->tmpbuffer + HEADERFRAME1 + sizeof(dht_data), vd->mem[vd->buf.index] + HEADERFRAME1, (vd->buf.bytesused - HEADERFRAME1)); */ memcpy(vd->tmpbuffer, vd->mem[vd->buf.index], vd->buf.bytesused); if (debug) fprintf(stderr, "bytes in used %d \n", vd->buf.bytesused); break; //case V4L2_PIX_FMT_YUYV: default: if (vd->buf.bytesused > vd->framesizeIn) memcpy (vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->framesizeIn); else memcpy (vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->buf.bytesused); break; /*default: goto err; break;*/ } ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf); if (ret < 0) { perror("Unable to requeue buffer"); goto err; } return 0; err: vd->signalquit = 0; return -1; }
static int video_do_capture(struct device *dev, unsigned int nframes, unsigned int skip, unsigned int delay, const char *filename_prefix, int do_requeue_last) { char *filename = NULL; struct timeval start = { 0, 0 }; struct timeval end, ts; struct v4l2_buffer buf; unsigned int size; unsigned int i; FILE *file; double bps; double fps; int ret; if (filename_prefix != NULL) { filename = malloc(strlen(filename_prefix) + 12); if (filename == NULL) return -ENOMEM; } /* Start streaming. */ video_enable(dev, 1); size = 0; for (i = 0; i < nframes; ++i) { /* Dequeue a buffer. */ memset(&buf, 0, sizeof buf); buf.type = dev->type; buf.memory = dev->memtype; ret = ioctl(dev->fd, VIDIOC_DQBUF, &buf); if (ret < 0) { if (errno != EIO) { printf("Unable to dequeue buffer (%d).\n", errno); goto done; } buf.type = dev->type; buf.memory = dev->memtype; if (dev->memtype == V4L2_MEMORY_USERPTR) buf.m.userptr = (unsigned long)dev->buffers[i].mem; } if (dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && dev->imagesize != 0 && buf.bytesused != dev->imagesize) printf("Warning: bytes used %u != image size %u\n", buf.bytesused, dev->imagesize); size += buf.bytesused; gettimeofday(&ts, NULL); printf("%u (%u) [%c] %u %u bytes %ld.%06ld %ld.%06ld\n", i, buf.index, (buf.flags & V4L2_BUF_FLAG_ERROR) ? 'E' : '-', buf.sequence, buf.bytesused, buf.timestamp.tv_sec, buf.timestamp.tv_usec, ts.tv_sec, ts.tv_usec); if (i == 0) start = ts; /* Save the image. */ if (dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && filename_prefix && !skip) { sprintf(filename, "%s-%06u.bin", filename_prefix, i); file = fopen(filename, "wb"); if (file != NULL) { ret = fwrite(dev->buffers[buf.index].mem, buf.bytesused, 1, file); fclose(file); } } if (skip) --skip; /* Requeue the buffer. */ if (delay > 0) usleep(delay * 1000); fflush(stdout); if (i == nframes - dev->nbufs && !do_requeue_last) continue; ret = video_queue_buffer(dev, buf.index); if (ret < 0) { printf("Unable to requeue buffer (%d).\n", errno); goto done; } } gettimeofday(&end, NULL); /* Stop streaming. */ video_enable(dev, 0); if (nframes == 0) { printf("No frames captured.\n"); goto done; } if (end.tv_sec == start.tv_sec && end.tv_usec == start.tv_usec) goto done; end.tv_sec -= start.tv_sec; end.tv_usec -= start.tv_usec; if (end.tv_usec < 0) { end.tv_sec--; end.tv_usec += 1000000; } bps = size/(end.tv_usec+1000000.0*end.tv_sec)*1000000.0; fps = (i-1)/(end.tv_usec+1000000.0*end.tv_sec)*1000000.0; printf("Captured %u frames in %lu.%06lu seconds (%f fps, %f B/s).\n", i-1, end.tv_sec, end.tv_usec, fps, bps); done: free(filename); return video_free_buffers(dev); }
int uvcGrab(struct vdIn *vd) { #define HEADERFRAME1 0xaf int ret; if (!vd->isstreaming) if (video_enable(vd)) goto err; memset(&vd->buf, 0, sizeof(struct v4l2_buffer)); vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf); if (ret < 0) { printf("Unable to dequeue buffer (%d) fd is %d.\n", errno, vd->fd); goto err; } /* Capture a single raw frame */ if (vd->rawFrameCapture && vd->buf.bytesused > 0) { FILE *frame = NULL; char filename[13]; int ret; /* Disable frame capturing unless we're in frame stream mode */ if(vd->rawFrameCapture == 1) vd->rawFrameCapture = 0; /* Create a file name and open the file */ sprintf(filename, "frame%03u.raw", vd->fileCounter++ % 1000); frame = fopen(filename, "wb"); if(frame == NULL) { perror("Unable to open file for raw frame capturing"); goto end_capture; } /* Write the raw data to the file */ ret = fwrite(vd->mem[vd->buf.index], vd->buf.bytesused, 1, frame); if(ret < 1) { perror("Unable to write to file"); goto end_capture; } printf("Saved raw frame to %s (%u bytes)\n", filename, vd->buf.bytesused); if(vd->rawFrameCapture == 2) { vd->rfsBytesWritten += vd->buf.bytesused; vd->rfsFramesWritten++; } /* Clean up */ end_capture: if(frame) fclose(frame); } /* Capture raw stream data */ if (vd->captureFile && vd->buf.bytesused > 0) { int ret; ret = fwrite(vd->mem[vd->buf.index], vd->buf.bytesused, 1, vd->captureFile); if (ret < 1) { perror("Unable to write raw stream to file"); fprintf(stderr, "Stream capturing terminated.\n"); fclose(vd->captureFile); vd->captureFile = NULL; vd->framesWritten = 0; vd->bytesWritten = 0; } else { vd->framesWritten++; vd->bytesWritten += vd->buf.bytesused; if (debug) printf("Appended raw frame to stream file (%u bytes)\n", vd->buf.bytesused); } } switch (vd->formatIn) { case V4L2_PIX_FMT_MJPEG: if(vd->buf.bytesused <= HEADERFRAME1) { /* Prevent crash on empty image */ /* if(debug)*/ printf("Ignoring empty buffer ...\n"); return 0; } memcpy(vd->tmpbuffer, vd->mem[vd->buf.index],vd->buf.bytesused); if (jpeg_decode(&vd->framebuffer, vd->tmpbuffer, &vd->width, &vd->height) < 0) { printf("jpeg decode errors\n"); goto err; } if (debug) printf("bytes in used %d \n", vd->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: if (vd->buf.bytesused > vd->framesizeIn) memcpy(vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->framesizeIn); else memcpy(vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->buf.bytesused); break; default: goto err; break; } ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf); if (ret < 0) { printf("Unable to requeue buffer (%d).\n", errno); goto err; } return 0; err: vd->signalquit = 0; return -1; }
int video_do_capture(struct device *dev, unsigned int nframes, unsigned int skip, unsigned int delay, const char *pattern, int do_requeue_last, enum buffer_fill_mode fill) { struct timespec start; struct timeval last; struct timespec ts; struct v4l2_buffer buf; unsigned int size; unsigned int i; double bps; double fps; int ret; /* Start streaming. */ ret = video_enable(dev, 1); if (ret < 0) goto done; size = 0; clock_gettime(CLOCK_MONOTONIC, &start); last.tv_sec = start.tv_sec; last.tv_usec = start.tv_nsec / 1000; for (i = 0; i < nframes; ++i) { /* Dequeue a buffer. */ memset(&buf, 0, sizeof buf); buf.type = dev->type; buf.memory = dev->memtype; ret = ioctl(dev->fd, VIDIOC_DQBUF, &buf); if (ret < 0) { if (errno != EIO) { printf("Unable to dequeue buffer: %s (%d).\n", strerror(errno), errno); goto done; } buf.type = dev->type; buf.memory = dev->memtype; if (dev->memtype == V4L2_MEMORY_USERPTR) buf.m.userptr = (unsigned long)dev->buffers[i].mem; } if (dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && dev->imagesize != 0 && buf.bytesused != dev->imagesize) printf("Warning: bytes used %u != image size %u\n", buf.bytesused, dev->imagesize); if (dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) video_verify_buffer(dev, buf.index); size += buf.bytesused; fps = (buf.timestamp.tv_sec - last.tv_sec) * 1000000 + buf.timestamp.tv_usec - last.tv_usec; fps = fps ? 1000000.0 / fps : 0.0; clock_gettime(CLOCK_MONOTONIC, &ts); printf("%u (%u) [%c] %u %u bytes %ld.%06ld %ld.%06ld %.3f fps\n", i, buf.index, (buf.flags & V4L2_BUF_FLAG_ERROR) ? 'E' : '-', buf.sequence, buf.bytesused, buf.timestamp.tv_sec, buf.timestamp.tv_usec, ts.tv_sec, ts.tv_nsec/1000, fps); last = buf.timestamp; /* Save the image. */ if (dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && pattern && !skip) video_save_image(dev, &buf, pattern, i); if (skip) --skip; /* Requeue the buffer. */ if (delay > 0) usleep(delay * 1000); fflush(stdout); if (i == nframes - dev->nbufs && !do_requeue_last) continue; ret = video_queue_buffer(dev, buf.index, fill); if (ret < 0) { printf("Unable to requeue buffer: %s (%d).\n", strerror(errno), errno); goto done; } } /* Stop streaming. */ video_enable(dev, 0); if (nframes == 0) { printf("No frames captured.\n"); goto done; } if (ts.tv_sec == start.tv_sec && ts.tv_nsec == start.tv_nsec) goto done; ts.tv_sec -= start.tv_sec; ts.tv_nsec -= start.tv_nsec; if (ts.tv_nsec < 0) { ts.tv_sec--; ts.tv_nsec += 1000000000; } bps = size/(ts.tv_nsec/1000.0+1000000.0*ts.tv_sec)*1000000.0; fps = i/(ts.tv_nsec/1000.0+1000000.0*ts.tv_sec)*1000000.0; printf("Captured %u frames in %lu.%06lu seconds (%f fps, %f B/s).\n", i, ts.tv_sec, ts.tv_nsec/1000, fps, bps); done: return video_free_buffers(dev); }
int init_v4l2(struct video_device *vd) { int i; struct v4l2_streamparm setfps; if ((vd->fd = OPEN_VIDEO(vd->device_filename, O_RDWR)) == -1) { log_itf(LOG_ERROR, "Error opening V4L2 interface on %s. errno %d", vd->device_filename, errno); } memset(&vd->cap, 0, sizeof(struct v4l2_capability)); if (xioctl(vd->fd, VIDIOC_QUERYCAP, &vd->cap) < 0) { log_itf(LOG_ERROR, "Error opening device %s: unable to query device.", vd->device_filename); return -1; } if ((vd->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { log_itf(LOG_ERROR, "Error opening device %s: video capture not supported.", vd->device_filename); return -1; } if (vd->use_streaming) { if (!(vd->cap.capabilities & V4L2_CAP_STREAMING)) { log_itf(LOG_ERROR, "%s does not support streaming I/O", vd->device_filename); return -1; } } else { if (!(vd->cap.capabilities & V4L2_CAP_READWRITE)) { log_itf(LOG_ERROR, "%s does not support read I/O", vd->device_filename); return -1; } } vd->streaming_state = STREAMING_OFF; // set format in memset(&vd->fmt, 0, sizeof(struct v4l2_format)); vd->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->fmt.fmt.pix.width = vd->width; vd->fmt.fmt.pix.height = vd->height; vd->fmt.fmt.pix.pixelformat = vd->format_in; vd->fmt.fmt.pix.field = V4L2_FIELD_ANY; if (xioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt) < 0) { log_itf(LOG_WARNING, "Unable to set format %d, res %dx%d, device %s. Trying fallback.", vd->format_in, vd->width, vd->height, vd->device_filename); // Try the fallback format vd->format_in = UVC_FALLBACK_FORMAT; vd->fmt.fmt.pix.pixelformat = vd->format_in; if (xioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt) < 0) { log_itf(LOG_ERROR, "Unable to set fallback format %d, res %dx%d, device %s.", vd->format_in, vd->width, vd->height, vd->device_filename); return -1; } } if ((vd->fmt.fmt.pix.width != vd->width) || (vd->fmt.fmt.pix.height != vd->height)) { log_itf(LOG_WARNING, "The format asked unavailable, so the width %d height %d on device %s.", vd->fmt.fmt.pix.width, vd->fmt.fmt.pix.height, vd->device_filename); vd->width = vd->fmt.fmt.pix.width; vd->height = vd->fmt.fmt.pix.height; // look the format is not part of the deal ??? if (vd->format_in != vd->fmt.fmt.pix.pixelformat) { if (vd->format_in == V4L2_PIX_FMT_MJPEG) { log_itf(LOG_ERROR, "The input device %s does not supports MJPEG mode.\nYou may also try the YUV mode, but it requires a much more CPU power.", vd->device_filename); return -1; } else if (vd->format_in == V4L2_PIX_FMT_YUYV) { log_itf(LOG_ERROR, "The input device %s does not supports YUV mode.", vd->device_filename); return -1; } } else { vd->format_in = vd->fmt.fmt.pix.pixelformat; } } // set framerate memset(&setfps, 0, sizeof(struct v4l2_streamparm)); setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; setfps.parm.capture.timeperframe.numerator = 1; setfps.parm.capture.timeperframe.denominator = vd->fps; if (xioctl(vd->fd, VIDIOC_S_PARM, &setfps) < 0) { log_itf(LOG_ERROR, "Unable to set FPS to %d on device %s.", vd->fps, vd->device_filename); return -1; } // request buffers memset(&vd->rb, 0, sizeof(struct v4l2_requestbuffers)); vd->rb.count = NB_BUFFER; vd->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->rb.memory = V4L2_MEMORY_MMAP; if (xioctl(vd->fd, VIDIOC_REQBUFS, &vd->rb) < 0) { log_itf(LOG_ERROR, "Unable to allocate buffers for device %s.", vd->device_filename); return -1; } // map the buffers for(i = 0; i < NB_BUFFER; i++) { memset(&vd->buf, 0, sizeof(struct v4l2_buffer)); vd->buf.index = i; vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; if (xioctl(vd->fd, VIDIOC_QUERYBUF, &vd->buf)) { log_itf(LOG_ERROR, "Unable to query buffer on device %s.", vd->device_filename); return -1; } vd->mem[i] = mmap(0 /* start anywhere */ , vd->buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, vd->fd, vd->buf.m.offset); if (vd->mem[i] == MAP_FAILED) { log_itf(LOG_ERROR, "Unable to map buffer on device %s.", vd->device_filename); return -1; } DBG("Buffer mapped at address %p for device %s.", vd->mem[i], vd->device_filename); } // Queue the buffers. for(i = 0; i < NB_BUFFER; ++i) { memset(&vd->buf, 0, sizeof(struct v4l2_buffer)); vd->buf.index = i; vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; if (xioctl(vd->fd, VIDIOC_QBUF, &vd->buf) < 0) { log_itf(LOG_ERROR, "Unable to query buffer on device %s.", vd->device_filename); return -1; } } if (video_enable(vd)) { log_itf(LOG_ERROR, "Unable to enable video for device %s.", vd->device_filename); return -1; } return 0; }