static int init_mmap(struct vidsrc_st *st, const char *dev_name) { struct v4l2_requestbuffers req; memset(&req, 0, sizeof(req)); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (-1 == xioctl(st->fd, VIDIOC_REQBUFS, &req)) { if (EINVAL == errno) { warning("v4l2: %s does not support " "memory mapping\n", dev_name); return errno; } else { return errno; } } if (req.count < 2) { warning("v4l2: Insufficient buffer memory on %s\n", dev_name); return ENOMEM; } st->buffers = mem_zalloc(req.count * sizeof(*st->buffers), NULL); if (!st->buffers) return ENOMEM; for (st->n_buffers = 0; st->n_buffers<req.count; ++st->n_buffers) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = st->n_buffers; if (-1 == xioctl(st->fd, VIDIOC_QUERYBUF, &buf)) { warning("v4l2: VIDIOC_QUERYBUF\n"); return errno; } st->buffers[st->n_buffers].length = buf.length; st->buffers[st->n_buffers].start = v4l2_mmap(NULL /* start anywhere */, buf.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, st->fd, buf.m.offset); if (MAP_FAILED == st->buffers[st->n_buffers].start) { warning("v4l2: mmap failed\n"); return ENODEV; } } return 0; }
static void init_mmap(void) { struct v4l2_requestbuffers req; CLEAR(req); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl(fd, VIDIOC_REQBUFS, &req) < 0) { if (EINVAL == errno) { fprintf(stderr, "%s does not support " "memory mapping\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_REQBUFS"); } } if (req.count < 2) { fprintf(stderr, "Insufficient buffer memory on %s\n", dev_name); exit(EXIT_FAILURE); } buffers = calloc(req.count, sizeof(*buffers)); if (!buffers) { fprintf(stderr, "Out of memory\n"); exit(EXIT_FAILURE); } for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { struct v4l2_buffer buf; CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; if (v4l2_ioctl(fd, VIDIOC_QUERYBUF, &buf) < 0) errno_exit("VIDIOC_QUERYBUF"); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap( NULL /* start anywhere */ , buf.length, PROT_READ | PROT_WRITE /* required */ , MAP_SHARED /* recommended */ , fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) errno_exit("mmap"); } }
static int init_mmap (void) { struct v4l2_requestbuffers req; CLEAR (req); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (-1 == xioctl (fd, VIDIOC_REQBUFS, &req)) { if (EINVAL == errno) { return -1; } else { return -1; } } if (req.count < 2) { return -1; } buffers = calloc (req.count, sizeof (*buffers)); if (!buffers) { return -1; } for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { struct v4l2_buffer buf; CLEAR (buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; if (-1 == xioctl (fd, VIDIOC_QUERYBUF, &buf)) { return -1; } buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap (NULL /* start anywhere */, buf.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { return -1; } } return 0; }
GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator) { GstV4l2MemoryGroup *group; gint i; g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL); group = gst_v4l2_allocator_alloc (allocator); if (group == NULL) return NULL; for (i = 0; i < group->n_mem; i++) { if (group->mem[i] == NULL) { gpointer data; data = v4l2_mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE, MAP_SHARED, allocator->video_fd, group->planes[i].m.mem_offset); if (data == MAP_FAILED) goto mmap_failed; GST_LOG_OBJECT (allocator, "mmap buffer length %d, data offset %d, plane %d", group->planes[i].length, group->planes[i].data_offset, i); group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator), NULL, group->planes[i].length, 0, 0, group->planes[i].length, i, data, -1, group); } else { /* Take back the allocator reference */ gst_object_ref (allocator); } group->mems_allocated++; } /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set * to 0. As length might be bigger then the expected size exposed in the * format, we simply set bytesused initially and reset it here for * simplicity */ gst_v4l2_allocator_reset_size (allocator, group); return group; mmap_failed: { GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s", g_strerror (errno)); _cleanup_failed_alloc (allocator, group); return NULL; } }
/* Util: initiate v4l2 streaming via mmap */ static pj_status_t vid4lin_stream_init_streaming(vid4lin_stream *stream) { struct v4l2_requestbuffers req; unsigned i; pj_status_t status; pj_bzero(&req, sizeof(req)); req.count = BUFFER_CNT; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; status = xioctl(stream->fd, VIDIOC_REQBUFS, &req); if (status != PJ_SUCCESS) return status; stream->buffers = pj_pool_calloc(stream->pool, req.count, sizeof(*stream->buffers)); stream->buf_cnt = 0; for (i = 0; i < req.count; ++i) { struct v4l2_buffer buf; pj_bzero(&buf, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; status = xioctl(stream->fd, VIDIOC_QUERYBUF, &buf); if (status != PJ_SUCCESS) goto on_error; stream->buffers[i].length = buf.length; stream->buffers[i].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, stream->fd, buf.m.offset); if (MAP_FAILED == stream->buffers[i].start) { status = pj_get_os_error(); goto on_error; } stream->buf_cnt++; } PJ_LOG(5,(THIS_FILE, " mmap streaming initialized")); stream->io_type = IO_TYPE_MMAP; return PJ_SUCCESS; on_error: return status; }
void *v4l1_mmap(void *start, size_t length, int prot, int flags, int fd, int64_t offset) { int index; void *result; /* Check if the mmap data matches our answer to VIDIOCGMBUF, if not pass through libv4l2 for applications which are using v4l2 through libv4l1 (this can happen with the v4l1compat.so wrapper preloaded */ index = v4l1_get_index(fd); if (index == -1 || start || offset || length != (V4L1_NO_FRAMES * V4L1_FRAME_BUF_SIZE)) return v4l2_mmap(start, length, prot, flags, fd, offset); pthread_mutex_lock(&devices[index].stream_lock); /* It could be that we get called with an mmap which seems to match what we expect, but no VIDIOCGMBUF has been done yet, then it is certainly not for us so pass it through */ if (devices[index].v4l1_frame_pointer == MAP_FAILED) { result = v4l2_mmap(start, length, prot, flags, fd, offset); goto leave; } devices[index].v4l1_frame_buf_map_count++; V4L1_LOG("v4l1 buffer @ %p mapped by application\n", devices[index].v4l1_frame_pointer); result = devices[index].v4l1_frame_pointer; leave: pthread_mutex_unlock(&devices[index].stream_lock); return result; }
static int tc_v4l2_video_setup_capture_buffers(V4L2Source *vs) { struct v4l2_buffer buffer; int ix, err = 0; /* map the buffers */ for (ix = 0; ix < vs->buffers_count; ix++) { buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = ix; err = v4l2_ioctl(vs->video_fd, VIDIOC_QUERYBUF, &buffer); if (err < 0) { tc_log_perror(MOD_NAME, "VIDIOC_QUERYBUF"); return TC_ERROR; } vs->buffers[ix].length = buffer.length; vs->buffers[ix].start = v4l2_mmap(0, buffer.length, PROT_READ|PROT_WRITE, MAP_SHARED, vs->video_fd, buffer.m.offset); if (vs->buffers[ix].start == MAP_FAILED) { tc_log_perror(MOD_NAME, "mmap"); return TC_ERROR; } } /* then enqueue them all */ for (ix = 0; ix < vs->buffers_count; ix++) { buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = ix; err = v4l2_ioctl(vs->video_fd, VIDIOC_QBUF, &buffer); if (err < 0) { tc_log_perror(MOD_NAME, "VIDIOC_QBUF"); return TC_ERROR; } } return TC_OK; }
static int msv4l2_do_mmap(V4l2State *s){ struct v4l2_requestbuffers req; int i; enum v4l2_buf_type type; memset(&req,0,sizeof(req)); req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl (s->fd, VIDIOC_REQBUFS, &req)<0) { ms_error("Error requesting info on mmap'd buffers: %s",strerror(errno)); return -1; } for (i=0; i<req.count; ++i) { struct v4l2_buffer buf; mblk_t *msg; void *start; memset(&buf,0,sizeof(buf)); buf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory=V4L2_MEMORY_MMAP; buf.index=i; if (v4l2_ioctl (s->fd, VIDIOC_QUERYBUF, &buf)<0){ ms_error("Could not VIDIOC_QUERYBUF : %s",strerror(errno)); return -1; } start=v4l2_mmap (NULL /* start anywhere */, buf.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, s->fd, buf.m.offset); if (start==NULL){ ms_error("Could not v4l2_mmap: %s",strerror(errno)); } msg=esballoc(start,buf.length,0,NULL); msg->b_wptr+=buf.length; s->frames[i]=ms_yuv_buf_alloc_from_buffer(s->vsize.width, s->vsize.height, msg); } s->frame_max=req.count; for (i = 0; i < s->frame_max; ++i) { struct v4l2_buffer buf; memset(&buf,0,sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (-1==v4l2_ioctl (s->fd, VIDIOC_QBUF, &buf)){ ms_error("VIDIOC_QBUF failed: %s",strerror(errno)); }else { inc_ref(s->frames[i]); s->queued++; } } /*start capture immediately*/ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (-1 ==v4l2_ioctl (s->fd, VIDIOC_STREAMON, &type)){ ms_error("VIDIOC_STREAMON failed: %s",strerror(errno)); return -1; } return 0; }
void *module_camera_main(void *args) { struct v4l2_format fmt = {0}; struct v4l2_buffer buf = {0}; struct v4l2_requestbuffers req = {0}; enum v4l2_buf_type type; struct { void *start; size_t length; } *buffers = NULL; fd_set rfds; struct timeval tv = { 3, 0 }; int i, camfd = -1; char *dataptr = NULL; long datalen = 0; unsigned int quality = 90; debugme("Module CAMERA executed\n"); if(initlib(INIT_LIBV4L2|INIT_LIBJPEG)) return NULL; if(MODULE_CAMERA_P) quality = MODULE_CAMERA_P->quality; do { if((camfd = v4l2_open(SO"/dev/video0", O_RDWR | O_NONBLOCK, 0)) < 0) break; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if(v4l2_ioctl(camfd, VIDIOC_S_FMT, &fmt) == -1) break; if(fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) break; req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if(v4l2_ioctl(camfd, VIDIOC_REQBUFS, &req) == -1) break; if(!(buffers = calloc(req.count, sizeof(*buffers)))) break; for(i = 0; i < req.count; i++) { memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if(v4l2_ioctl(camfd, VIDIOC_QUERYBUF, &buf) == -1) break; buffers[i].length = buf.length; if((buffers[i].start = v4l2_mmap(NULL, buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, camfd, buf.m.offset)) == MAP_FAILED) break; } if(i != req.count) break; for(i = 0; i < req.count; i++) { memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if(v4l2_ioctl(camfd, VIDIOC_QBUF, &buf) == -1) break; } if(i != req.count) break; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(v4l2_ioctl(camfd, VIDIOC_STREAMON, &type) == -1) break; FD_ZERO(&rfds); FD_SET(camfd, &rfds); if((i = select(camfd + 1, &rfds, NULL, NULL, &tv)) <= 0) break; memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if(v4l2_ioctl(camfd, VIDIOC_DQBUF, &buf) == -1) break; if(!(datalen = encodeimage(buffers[buf.index].start, fmt.fmt.pix.width, fmt.fmt.pix.height, quality, &dataptr))) break; evidence_write(EVIDENCE_TYPE_CAMERA, NULL, 0, dataptr, datalen); } while(0); if(camfd != -1) { type = V4L2_BUF_TYPE_VIDEO_CAPTURE; v4l2_ioctl(camfd, VIDIOC_STREAMOFF, &type); } for(i = 0; i < req.count; i++) v4l2_munmap(buffers[i].start, buffers[i].length); if(camfd != -1) v4l2_close(camfd); if(dataptr) free(dataptr); if(buffers) free(buffers); debugme("Module CAMERA ended\n"); return NULL; }
static GstFlowReturn gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool); GstBuffer *newbuf; GstV4l2Meta *meta; GstV4l2Object *obj; GstVideoInfo *info; guint index; obj = pool->obj; info = &obj->info; switch (obj->mode) { case GST_V4L2_IO_RW: { newbuf = gst_buffer_new_allocate (pool->allocator, pool->size, &pool->params); break; } case GST_V4L2_IO_MMAP: { newbuf = gst_buffer_new (); meta = GST_V4L2_META_ADD (newbuf); index = pool->num_allocated; GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf); meta->vbuffer.index = index; meta->vbuffer.type = obj->type; meta->vbuffer.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0) goto querybuf_failed; GST_LOG_OBJECT (pool, " index: %u", meta->vbuffer.index); GST_LOG_OBJECT (pool, " type: %d", meta->vbuffer.type); GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused); GST_LOG_OBJECT (pool, " flags: %08x", meta->vbuffer.flags); GST_LOG_OBJECT (pool, " field: %d", meta->vbuffer.field); GST_LOG_OBJECT (pool, " memory: %d", meta->vbuffer.memory); if (meta->vbuffer.memory == V4L2_MEMORY_MMAP) GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset); GST_LOG_OBJECT (pool, " length: %u", meta->vbuffer.length); GST_LOG_OBJECT (pool, " input: %u", meta->vbuffer.input); meta->mem = v4l2_mmap (0, meta->vbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd, meta->vbuffer.m.offset); if (meta->mem == MAP_FAILED) goto mmap_failed; gst_buffer_append_memory (newbuf, gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, meta->mem, meta->vbuffer.length, 0, meta->vbuffer.length, NULL, NULL)); /* add metadata to raw video buffers */ if (pool->add_videometa && info->finfo) { gsize offset[GST_VIDEO_MAX_PLANES]; gint stride[GST_VIDEO_MAX_PLANES]; offset[0] = 0; stride[0] = obj->bytesperline; GST_DEBUG_OBJECT (pool, "adding video meta, stride %d", stride[0]); gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), offset, stride); } break; } case GST_V4L2_IO_USERPTR: default: g_assert_not_reached (); break; } pool->num_allocated++; *buffer = newbuf; return GST_FLOW_OK; /* ERRORS */ querybuf_failed: { gint errnosave = errno; GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave)); gst_buffer_unref (newbuf); errno = errnosave; return GST_FLOW_ERROR; } mmap_failed: { gint errnosave = errno; GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave)); gst_buffer_unref (newbuf); errno = errnosave; return GST_FLOW_ERROR; } }
unsigned char* cam_capture(int fd, int width, int height) { unsigned int i, n_buffers=0; struct v4l2_buffer buf; struct buffer *buffers; int bufferCount=2; if(width<=0) width=640; if(height<=0) width=480; buffers = (struct buffer*) calloc(bufferCount, sizeof(struct buffer)); for (n_buffers = 0; n_buffers < bufferCount; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < bufferCount; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); int r; fd_set fds; struct timeval tv; do { FD_ZERO(&fds); FD_SET(fd, &fds); // Timeout. tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return NULL; //return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); //unmap memory for (i = 0; i < bufferCount; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); //return buffers; return (unsigned char*)buffers->start; }
int main(int argc, char **argv) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = "/dev/video0"; char out_name[256]; FILE *fout; struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 80; fmt.fmt.pix.height = 60; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_NONE; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 80) || (fmt.fmt.pix.height != 60)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < 20; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); sprintf(out_name, "out%03d.ppm", i); fout = fopen(out_name, "w"); if (!fout) { perror("Cannot open image"); exit(EXIT_FAILURE); } fprintf(fout, "P6\n%d %d 255\n", fmt.fmt.pix.width, fmt.fmt.pix.height); fwrite(buffers[buf.index].start, buf.bytesused, 1, fout); fclose(fout); xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
static int capture(char *dev_name, int x_res, int y_res, int n_frames, char *out_dir, int block, int threads, int sleep_ms) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; int fd = -1; unsigned int i, n_buffers; struct buffer *buffers; if (block) fd = v4l2_open(dev_name, O_RDWR, 0); else fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = x_res; fmt.fmt.pix.height = y_res; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != x_res) || (fmt.fmt.pix.height != y_res)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); if (threads) capture_threads(fd, buffers, 2, fmt, n_frames, out_dir, sleep_ms); else capture_loop(fd, buffers, fmt, n_frames, out_dir); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
static int capture(char *dev_name, int x_res, int y_res, int n_frames, char *out_dir) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, j, n_buffers; struct buffer *buffers; Display *dpy; Window win; int num_textures = 1; GLuint texture_id[num_textures]; Window root; XVisualInfo *vi; XSetWindowAttributes swa; GLXContext glc; GLint att[] = { GLX_RGBA, GLX_DEPTH_SIZE, 24, GLX_DOUBLEBUFFER, None }; dpy = XOpenDisplay(NULL); if (!dpy) { printf("\tcannot open display.\n"); exit(EXIT_FAILURE); } root = DefaultRootWindow(dpy); vi = glXChooseVisual(dpy, 0, att); if (!vi) { printf("no appropriate visual found.\n"); exit(EXIT_FAILURE); } swa.event_mask = ExposureMask | KeyPressMask; swa.colormap = XCreateColormap(dpy, root, vi->visual, AllocNone); fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = x_res; fmt.fmt.pix.height = y_res; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if ((fmt.fmt.pix.width != x_res) || (fmt.fmt.pix.height != y_res)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); printf("Fourcc format: %c%c%c%c\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) &0xff, (fmt.fmt.pix.pixelformat >> 16) &0xff, (fmt.fmt.pix.pixelformat >> 24) &0xff); win = XCreateWindow(dpy, root, 0, 0, fmt.fmt.pix.width, fmt.fmt.pix.height, 0, vi->depth, InputOutput, vi->visual, CWEventMask | CWColormap, &swa); XMapWindow(dpy, win); XStoreName(dpy, win, dev_name); glc = glXCreateContext(dpy, vi, NULL, GL_TRUE); if (glc == NULL) { printf("\n\tcannot create gl context\n\n"); exit(0); } glXMakeCurrent(dpy, win, glc); glEnable(GL_DEPTH_TEST); XCreatePixmap(dpy, root, fmt.fmt.pix.width, fmt.fmt.pix.height, vi->depth); glEnable(GL_TEXTURE_2D); glGenTextures(1, texture_id); for (j = 0; j < num_textures; j++) { glActiveTexture(GL_TEXTURE0 + j); glBindTexture(GL_TEXTURE_2D, texture_id[j]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glEnable(GL_TEXTURE_2D); } CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); i = 0; while (i < n_frames || n_frames <= 0) { /* Request new buffer */ if (i) xioctl(fd, VIDIOC_QBUF, &buf); do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno == EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); /* * Display the image via GL - for RGB, only one texture is enough */ for (j = 0; j < num_textures; j++) { glActiveTexture(GL_TEXTURE0 + j); glBindTexture(GL_TEXTURE_2D, texture_id[j]); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, fmt.fmt.pix.width, fmt.fmt.pix.height, 0, GL_RGB, GL_UNSIGNED_BYTE, ((char *)buffers[buf.index].start) + j); } Redraw(dpy, win); i++; } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
int grab_frame() { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = "/dev/video1"; struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } printf("grabbing frame...\n"); CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 100; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < req.count; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); xioctl(fd, VIDIOC_QBUF, &buf); } binarize(buffers, buf); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
void OMX_CameraSurfaceElement::videoAcquire() { LOG_VERBOSE(LOG_TAG, "Started acquisition thread..."); struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = (char*)"/dev/video0"; char out_name[256]; #ifdef ENABLE_DUMP FILE *fout; #endif struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < 200; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return; } QElapsedTimer timer; timer.start(); CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; m_semAcquire.acquire(); xioctl(fd, VIDIOC_DQBUF, &buf); printf("Time: %lld.\n", timer.elapsed()); sprintf(out_name, "out%03d.ppm", i); #ifdef ENABLE_DUMP fout = fopen(out_name, "w"); if (!fout) { perror("Cannot open image"); exit(EXIT_FAILURE); } fprintf(fout, "P6\n%d %d 255\n", fmt.fmt.pix.width, fmt.fmt.pix.height); fwrite(buffers[buf.index].start, buf.bytesused, 1, fout); fclose(fout); #endif xioctl(fd, VIDIOC_QBUF, &buf); printf("Time: %lld.\n", timer.elapsed()); m_frame = QImage((uchar*)buffers[buf.index].start, 640, 480, QImage::Format_RGB888); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return; }
void CaptureThread::run(){ //do real stuff fd = -1; dev_name = "/dev/video0"; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { qDebug("Cannot open device"); //exit(EXIT_FAILURE); return; } static struct v4lconvert_data *v4lconvert_data; static struct v4l2_format src_fmt; static unsigned char *dst_buf; CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); //exit(EXIT_FAILURE); return; } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) qDebug("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) qDebug("v4lconvert_try_format"); xioctl(fd, VIDIOC_S_FMT, &src_fmt); dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { qDebug("mmap"); //exit(EXIT_FAILURE); return; } } for (int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); int di=0; char header[]="P6\n640 480 255\n"; while(devam){ /* bu döngü datanın birikmesini sağlıyor */ do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { qDebug("select"); //exit(1) ; return; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); try{ if (v4lconvert_convert(v4lconvert_data, &src_fmt, &fmt, (unsigned char*)buffers[buf.index].start, buf.bytesused, dst_buf, fmt.fmt.pix.sizeimage) < 0) { if (errno != EAGAIN) qDebug("v4l_convert"); } unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header)); memmove(asil, dst_buf, fmt.fmt.pix.sizeimage); memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage); memcpy(asil,header,qstrlen(header)); QImage qq;//=new QImage(dst_buf,640,480,QImage::Format_RGB32); if(qq.loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header),"PPM")){ if(parent->isVisible()){ QImage q1(qq); parent->img=q1; parent->update(); //this->msleep(50); } //qApp->processEvents(); if(asil) free(asil); } }catch(...){} xioctl(fd, VIDIOC_QBUF, &buf); di++; } try{ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); }catch(...){} }
bool VideoGrabV4L2::open(const std::string& dev_name, int width, int height) { if (fd >= 0) { printf("Closing previously opened video device."); release(); } struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; fd = v4l2_open(dev_name.c_str(), O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); return false; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_BGR24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); release(); return false; } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); // Store image dimensions in image header cvInitImageHeader( &frame, cvSize( fmt.fmt.pix.width, fmt.fmt.pix.height ), IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 ); // Allocate memory for image data frame.imageData = new char[fmt.fmt.pix.sizeimage]; if (!frame.imageData) { perror("Not enough memory to allocate image."); release(); return false; } CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); release(); return false; } } for (unsigned int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); return true; }
static void *test_mmap(void *start, size_t length, int prot, int flags, int fd, int64_t offset) { return options[OptUseWrapper] ? v4l2_mmap(start, length, prot, flags, fd, offset) : mmap(start, length, prot, flags, fd, offset); }
LIBV4L_PUBLIC void *mmap64(void *start, size_t length, int prot, int flags, int fd, __off64_t offset) { return v4l2_mmap(start, length, prot, flags, fd, offset); }
static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args) { int buffer_count; if(!PyArg_ParseTuple(args, "I", &buffer_count)) { return NULL; } ASSERT_OPEN; if(self->buffers) { PyErr_SetString(PyExc_ValueError, "Buffers are already created"); return NULL; } struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; reqbuf.memory = V4L2_MEMORY_MMAP; if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf)) { return NULL; } if(!reqbuf.count) { PyErr_SetString(PyExc_IOError, "Not enough buffer memory"); return NULL; } self->buffers = malloc(reqbuf.count * sizeof(struct buffer)); if(!self->buffers) { PyErr_NoMemory(); return NULL; } int i; for(i = 0; i < reqbuf.count; i++) { struct v4l2_buffer buffer; buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer)) { return NULL; } self->buffers[i].length = buffer.length; self->buffers[i].start = v4l2_mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset); if(self->buffers[i].start == MAP_FAILED) { PyErr_SetFromErrno(PyExc_IOError); return NULL; } } self->buffer_count = i; Py_RETURN_NONE; }
static GstV4l2Buffer * gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps) { GstV4l2Buffer *ret; guint8 *data; ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER); GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index, ret, pool); ret->pool = (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool)); ret->vbuffer.index = index; ret->vbuffer.type = pool->type; ret->vbuffer.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0) goto querybuf_failed; GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index); GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type); GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused); GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags); GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field); GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory); if (ret->vbuffer.memory == V4L2_MEMORY_MMAP) GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u", ret->vbuffer.m.offset); GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length); GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input); data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd, ret->vbuffer.m.offset); if (data == MAP_FAILED) goto mmap_failed; GST_BUFFER_DATA (ret) = data; GST_BUFFER_SIZE (ret) = ret->vbuffer.length; GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY); gst_buffer_set_caps (GST_BUFFER (ret), caps); return ret; /* ERRORS */ querybuf_failed: { gint errnosave = errno; GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; } mmap_failed: { gint errnosave = errno; GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave)); gst_buffer_unref (GST_BUFFER (ret)); errno = errnosave; return NULL; } }
void *v4l2::mmap(size_t length, int64_t offset) { if (useWrapper()) return v4l2_mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, m_fd, offset); return ::mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, m_fd, offset); }
int Video_in_Manager::StartDeviceInternal(int buffer_count = 10) { if(verbose) printf("StartDeviceInternal\n"); //Check this device has not already been start if(this->fd==-1) { throw std::runtime_error("Device not open"); } //Set other parameters for capture //TODO /* //Query current pixel format self.size_x, self.size_y, self.pixelFmt = self.video.get_format() //Set target frames per second self.fps = self.video.set_fps(reqFps) */ // Create a buffer to store image data in. This must be done before // calling 'start' if v4l2capture is compiled with libv4l2. Otherwise // raises IOError. if(this->pxFmt.length()==0) { //Get current pixel format //TODO int ret = GetFormatInternal(); if(!ret) throw std::runtime_error("Could not determine image format"); } struct v4l2_requestbuffers reqbuf; reqbuf.count = buffer_count; reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; reqbuf.memory = V4L2_MEMORY_MMAP; if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf)) { throw std::runtime_error("VIDIOC_REQBUFS failed"); } if(!reqbuf.count) { throw std::runtime_error("Not enough buffer memory"); } this->buffers = new struct buffer [reqbuf.count]; if(this->buffers == NULL) { throw std::runtime_error("Failed to allocate buffer memory"); } for(unsigned int i = 0; i < reqbuf.count; i++) { struct v4l2_buffer buffer; buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer)) { throw std::runtime_error("VIDIOC_QUERYBUF failed"); } this->buffers[i].length = buffer.length; this->buffers[i].start = v4l2_mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset); if(this->buffers[i].start == MAP_FAILED) { throw std::runtime_error("v4l2_mmap failed"); } } this->buffer_counts = reqbuf.count; // Send the buffer to the device. Some devices require this to be done // before calling 'start'. for(int i = 0; i < buffer_count; i++) { struct v4l2_buffer buffer; buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; if(my_ioctl(fd, VIDIOC_QBUF, &buffer)) { //This may fail with some devices but does not seem to be harmful. } } // Start the device. This lights the LED if it's a camera that has one. enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(my_ioctl(fd, VIDIOC_STREAMON, &type)) { throw std::runtime_error("VIDIOC_STREAMON failed"); } this->Test(); this->deviceStarted = 1; if(verbose) printf("Started ok\n"); return 1; }
int main() { // PREPARACAO V4L // VAR struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = "/dev/video0"; // char out_name[256]; // FILE *fout; // struct buffer *buffers; clock_t getCapTime; // // OPEN fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } // SET FORMAT and DIMM CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = WIDTH; fmt.fmt.pix.height = HEIGHT; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l only accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: d"); else printf("D"); printf("river is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); // INIT MEMORY MAPPING CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); // CREATE BUFFER buffers = (buffer*) calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); // QUERY THE STATUS OF THE BUFFER buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } // SET CAP QUEUE DEST for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // START STREAMING xioctl(fd, VIDIOC_STREAMON, &type); //GET CAP for (i = 0; i < (unsigned)NUM_OF_CAPS; i++) { getCapTime = clock(); do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; //delay maximo pra ler a imagem tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; // GET CAP xioctl(fd, VIDIOC_DQBUF, &buf); #ifdef SAVE_CAPS sprintf(out_name, "out%03d.ppm", i); fout = fopen(out_name, "w"); if (!fout) { perror("Cannot open image"); exit(EXIT_FAILURE); } fprintf(fout, "P6\n%d %d 255\n", fmt.fmt.pix.width, fmt.fmt.pix.height); fwrite(buffers[buf.index].start, buf.bytesused, 1, fout); fclose(fout); #endif // "PREPARE" NEXT CAP xioctl(fd, VIDIOC_QBUF, &buf); break; } // STOP STREAMING type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
void StartCapture() { if( mCapturing ) THROW("already capturing!"); mCapturing = true; // grab current frame format v4l2_pix_format fmt = GetFormat(); // from the v4l2 docs: "Buggy driver paranoia." unsigned int min = fmt.width * 2; if (fmt.bytesperline < min) fmt.bytesperline = min; min = fmt.bytesperline * fmt.height; if (fmt.sizeimage < min) fmt.sizeimage = min; const unsigned int bufCount = 4; if( mIO == READ ) { // allocate buffer mBuffers.resize( 1 ); mBuffers[ 0 ].length = fmt.sizeimage; mBuffers[ 0 ].start = new char[ fmt.sizeimage ]; } else { // request buffers v4l2_requestbuffers req; memset( &req, 0, sizeof(req) ); req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = ( mIO == MMAP ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR ); req.count = bufCount; xioctl( mFd, VIDIOC_REQBUFS, &req ); if( mIO == USERPTR ) { // allocate buffers mBuffers.resize( req.count ); for( size_t i = 0; i < mBuffers.size(); ++i ) { mBuffers[ i ].length = fmt.sizeimage; mBuffers[ i ].start = new char[ fmt.sizeimage ]; } } else { // mmap buffers mBuffers.resize( req.count ); for( size_t i = 0; i < mBuffers.size(); ++i ) { v4l2_buffer buf; memset( &buf, 0, sizeof(buf) ); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl( mFd, VIDIOC_QUERYBUF, &buf ); mBuffers[i].length = buf.length; mBuffers[i].start = (char*)v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, mFd, buf.m.offset); if( mBuffers[i].start == MAP_FAILED ) THROW("mmap() failed!"); } } // queue buffers for( size_t i = 0; i < mBuffers.size(); ++i ) { v4l2_buffer buf; memset( &buf, 0, sizeof(buf) ); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.index = i; buf.memory = ( mIO == MMAP ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR ); if( mIO == USERPTR ) { buf.m.userptr = (unsigned long)mBuffers[i].start; buf.length = mBuffers[i].length; } xioctl( mFd, VIDIOC_QBUF, &buf ); } // start streaming v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl( mFd, VIDIOC_STREAMON, &type ); } }
VideoCapture::VideoCapture(const std::string& dev, uint32_t w, uint32_t h) { // Video 4 Linux library implementation. #if defined(DUNE_SYS_HAS_LIBV4L2_H) m_fd = v4l2_open(dev.c_str(), O_RDWR | O_NONBLOCK, 0); if (m_fd < 0) throw Error(errno, String::str("failed to open device '%s'", dev.c_str())); // Initialize V4L2 format. m_fmt = new v4l2_format; std::memset(m_fmt, 0, sizeof(v4l2_format)); m_fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; m_fmt->fmt.pix.width = w; m_fmt->fmt.pix.height = h; m_fmt->fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; m_fmt->fmt.pix.field = V4L2_FIELD_INTERLACED; doIoctl(m_fd, VIDIOC_S_FMT, m_fmt); if (m_fmt->fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) throw std::runtime_error("pixel format RGB24 is not supported by device"); // Initialize V4L2 request buffers. m_bfr_req = new v4l2_requestbuffers; std::memset(m_bfr_req, 0, sizeof(v4l2_requestbuffers)); m_bfr_req->count = 2; m_bfr_req->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; m_bfr_req->memory = V4L2_MEMORY_MMAP; doIoctl(m_fd, VIDIOC_REQBUFS, m_bfr_req); m_bfr = new v4l2_buffer; m_bfrs = (Buffer*)calloc(m_bfr_req->count, sizeof(Buffer)); for (unsigned i = 0; i < m_bfr_req->count; ++i) { std::memset(m_bfr, 0, sizeof(v4l2_buffer)); m_bfr->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; m_bfr->memory = V4L2_MEMORY_MMAP; m_bfr->index = i; doIoctl(m_fd, VIDIOC_QUERYBUF, m_bfr); m_bfrs[i].length = m_bfr->length; m_bfrs[i].start = v4l2_mmap(0, m_bfr->length, PROT_READ | PROT_WRITE, MAP_SHARED, m_fd, m_bfr->m.offset); if (MAP_FAILED == m_bfrs[i].start) { perror("mmap"); exit(EXIT_FAILURE); } std::memset(m_bfr, 0, sizeof(v4l2_buffer)); m_bfr->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; m_bfr->memory = V4L2_MEMORY_MMAP; m_bfr->index = i; doIoctl(m_fd, VIDIOC_QBUF, m_bfr); } #else (void)dev; (void)h; (void)w; throw std::runtime_error("VideoCapture is not yet implemented in this system."); #endif }
int CaptureThread::start() { wait(); devam=false; fd = -1; // read config dev_name = Settings::node(); width = Settings::width(); height = Settings::height(); fps = Settings::fps(); if (fps>0) { delay = 1000/fps; } else { delay = 0; } // open webcam device node fd = v4l2_open(dev_name.toStdString().c_str(), O_RDWR | O_NONBLOCK, 0); if (fd < 0) { kError() << "Cannot open device"; quit(); return 1; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { kError() << "Libv4l didn't accept RGB24 format. Can't proceed."; quit(); return 1; } emit startedCapture(fmt.fmt.pix.width, fmt.fmt.pix.height); v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) kDebug() << "v4lconvert_create"; if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) kDebug() << "v4lconvert_try_format"; xioctl(fd, VIDIOC_S_FMT, &src_fmt); dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { kDebug() << "mmap"; quit(); return 1; } } for (unsigned int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); di=0; sprintf(header,"P6\n%d %d 255\n",fmt.fmt.pix.width,fmt.fmt.pix.height); devam=true; // start processing video data running = true; QThread::start(); return 0; }
void V4LThread::OpenSource(const char *filename) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; unsigned int i; recebuf_len = 0; // fd = v4l2_open(filename, O_RDWR | O_NONBLOCK, 0); fd = open(filename, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { qCritical("Cannot open /dev/swradio0 :%d", fd); return; } pixelformat = V4L2_PIX_FMT_SDR_U8; // RTLSDR has limited ioctls in 3.18, expect fail. qCritical("Want Pixelformat : CU08"); CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_SDR_CAPTURE; fmt.fmt.sdr.pixelformat = pixelformat; xioctl(fd, VIDIOC_S_FMT, &fmt); qCritical("Got Pixelformat : %4.4s", (char *)&fmt.fmt.sdr.pixelformat); CLEAR(req); req.count = 8; req.type = V4L2_BUF_TYPE_SDR_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (struct v4l_buffer*) calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; n_buffers++) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_SDR_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (buffers[n_buffers].start == MAP_FAILED) { qCritical("V4L2 buffer mmap failed"); } } for (i = 0; i < n_buffers; i++) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_SDR_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } set_sample_rate((double)SAMPLERATE); set_center_freq( centerFreq + (SAMPLERATE / 4) ); // start streaming type = V4L2_BUF_TYPE_SDR_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); }
int cameraInit(char *device) { fd = v4l2_open(device, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); return -1; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUV420) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); return -1; } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); return -1; } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); return fd; }