static void deviceUninit(void) { unsigned int i; switch (io) { #ifdef IO_READ case IO_METHOD_READ: free(buffers[0].start); break; #endif #ifdef IO_MMAP case IO_METHOD_MMAP: for (i = 0; i < n_buffers; ++i) if (-1 == v4l2_munmap(buffers[i].start, buffers[i].length)) errno_exit("munmap"); break; #endif #ifdef IO_USERPTR case IO_METHOD_USERPTR: for (i = 0; i < n_buffers; ++i) free(buffers[i].start); break; #endif } free(buffers); }
/* API: Destroy stream. */ static pj_status_t vid4lin_stream_destroy(pjmedia_vid_dev_stream *strm) { vid4lin_stream *stream = (vid4lin_stream*)strm; unsigned i; PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL); vid4lin_stream_stop(strm); PJ_LOG(4, (THIS_FILE, "Destroying v4l2 video stream %s", stream->name)); for (i=0; i<stream->buf_cnt; ++i) { if (stream->buffers[i].start != MAP_FAILED) { v4l2_munmap(stream->buffers[i].start, stream->buffers[i].length); stream->buffers[i].start = MAP_FAILED; } } if (stream->fd >= 0) { v4l2_close(stream->fd); stream->fd = -1; } pj_pool_release(stream->pool); return PJ_SUCCESS; }
void StopCapture() { if( !mCapturing ) return; mCapturing = false; if( mIO == READ ) { delete[] mBuffers[0].start; } else { // stop streaming v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl( mFd, VIDIOC_STREAMOFF, &type ); if( mIO == USERPTR ) { // free memory for( size_t i = 0; i < mBuffers.size(); ++i ) delete[] mBuffers[i].start; } else { // unmap memory for( size_t i = 0; i < mBuffers.size(); ++i ) if( -1 == v4l2_munmap(mBuffers[i].start, mBuffers[i].length) ) THROW( "munmap() failed!" ); } } }
static void gst_v4l2_buffer_pool_free_buffer (GstBufferPool * bpool, GstBuffer * buffer) { GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool); GstV4l2Object *obj; obj = pool->obj; switch (obj->mode) { case GST_V4L2_IO_RW: break; case GST_V4L2_IO_MMAP: { GstV4l2Meta *meta; gint index; meta = GST_V4L2_META_GET (buffer); g_assert (meta != NULL); index = meta->vbuffer.index; GST_LOG_OBJECT (pool, "mmap buffer %p idx %d (data %p, len %u) freed, unmapping", buffer, index, meta->mem, meta->vbuffer.length); v4l2_munmap (meta->mem, meta->vbuffer.length); pool->buffers[index] = NULL; break; } case GST_V4L2_IO_USERPTR: default: g_assert_not_reached (); break; } gst_buffer_unref (buffer); }
int Video_in_Manager::CloseDeviceInternal() { if(verbose) printf("CloseDeviceInternal\n"); if(this->fd == -1) { throw std::runtime_error("Device not open"); } if(this->deviceStarted) StopDeviceInternal(); if(this->buffers!= NULL) { for(int i = 0; i < this->buffer_counts; i++) { v4l2_munmap(this->buffers[i].start, this->buffers[i].length); } delete [] this->buffers; } this->buffers = NULL; //Release memory v4l2_close(fd); fd = -1; return 1; }
void uninit_device(int vd) { unsigned int i; for (i = 0; i < n_buffers; ++i) if (-1 == v4l2_munmap(buffers[i].start, buffers[i].length)) // errno_exit("munmap"); free(buffers); }
void cameraClose() { type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); }
static void Video_device_unmap(Video_device *self) { int i; for(i = 0; i < self->buffer_count; i++) { v4l2_munmap(self->buffers[i].start, self->buffers[i].length); } }
static int uninit_device (void) { unsigned int i; for (i = 0; i < n_buffers; ++i) if (-1 == v4l2_munmap (buffers[i].start, buffers[i].length)) return -1; free (buffers); return 0; }
static void gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer) { GstV4l2BufferPool *pool; gboolean resuscitated = FALSE; gint index; pool = buffer->pool; index = buffer->vbuffer.index; GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index); GST_V4L2_BUFFER_POOL_LOCK (pool); if (pool->running) { if (pool->requeuebuf) { if (!gst_v4l2_buffer_pool_qbuf (pool, buffer)) { GST_WARNING ("could not requeue buffer %p %d", buffer, index); } else { resuscitated = TRUE; } } else { resuscitated = TRUE; /* XXX double check this... I think it is ok to not synchronize this * w.r.t. destruction of the pool, since the buffer is still live and * the buffer holds a ref to the pool.. */ g_async_queue_push (pool->avail_buffers, buffer); } } else { GST_LOG_OBJECT (pool->v4l2elem, "the pool is shutting down"); } if (resuscitated) { /* FIXME: check that the caps didn't change */ GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index); gst_buffer_ref (GST_BUFFER (buffer)); GST_BUFFER_SIZE (buffer) = 0; GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); pool->buffers[index] = buffer; } GST_V4L2_BUFFER_POOL_UNLOCK (pool); if (!resuscitated) { GST_LOG_OBJECT (pool->v4l2elem, "buffer %p (data %p, len %u) not recovered, unmapping", buffer, GST_BUFFER_DATA (buffer), buffer->mmap_length); gst_mini_object_unref (GST_MINI_OBJECT (pool)); v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->mmap_length); GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT (buffer)); } }
static void uninit_device(struct vidsrc_st *st) { unsigned int i; for (i=0; i<st->n_buffers; ++i) { v4l2_munmap(st->buffers[i].start, st->buffers[i].length); } st->buffers = mem_deref(st->buffers); st->n_buffers = 0; }
int CamaraClose(struct camera_context *cam){ enum v4l2_buf_type type; int i; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(cam->fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < cam->buf_data.buf_n; ++i) v4l2_munmap(cam->buf_data.start[i], cam->buf_data.length[i]); v4l2_close(cam->fd); return 0; }
int CaptureThread::stop() { running = false; devam=false; mutex.lock(); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (unsigned int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); fd = -1; mutex.unlock(); quit(); return 0; }
void VideoGrabV4L2::release() { delete[] frame.imageData; frame.imageData = 0; frame.width = 0; frame.height = 0; v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (unsigned int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); n_buffers = 0; v4l2_close(fd); fd = -1; }
USBGrabber::~USBGrabber() { enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (v4l2_ioctl(mFd, VIDIOC_STREAMOFF, &type) < 0 ) { error("Can't stop streaming I/O\n"); } for (size_t i = 0; i < mBuffers; i++) { v4l2_munmap(mBuf[i].start, mBuf[i].length); } free(mBuf); v4l2_close(mFd); mFd = 0; }
void V4LThread::CloseSource() { unsigned int i; enum v4l2_buf_type type; // stop streaming type = V4L2_BUF_TYPE_SDR_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; i++) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); fd = -1; }
VideoCapture::~VideoCapture(void) { // Video 4 Linux library implementation. #if defined(DUNE_SYS_HAS_LIBV4L2_H) stop(); for (unsigned i = 0; i < m_bfr_req->count; ++i) v4l2_munmap(m_bfrs[i].start, m_bfrs[i].length); v4l2_close(m_fd); free(m_bfrs); delete m_bfr_req; delete m_bfr; delete m_fmt; #endif }
static int tc_v4l2_video_grab_stop(V4L2Source *vs) { int ix, ret; tc_v4l2_teardown_image_format(vs); ret = tc_v4l2_capture_stop(vs); RETURN_IF_FAILED(ret); for (ix = 0; ix < vs->buffers_count; ix++) v4l2_munmap(vs->buffers[ix].start, vs->buffers[ix].length); v4l2_close(vs->video_fd); vs->video_fd = -1; return TC_OK; }
static void msv4l2_do_munmap(V4l2State *s){ int i; enum v4l2_buf_type type; /*stop capture immediately*/ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (-1 ==v4l2_ioctl (s->fd, VIDIOC_STREAMOFF, &type)){ ms_error("VIDIOC_STREAMOFF failed: %s",strerror(errno)); } for(i=0;i<s->frame_max;++i){ mblk_t *msg=s->frames[i]->b_cont; int len=msg->b_datap->db_lim-msg->b_datap->db_base; if (v4l2_munmap(msg->b_datap->db_base,len)<0){ ms_warning("MSV4l2: Fail to unmap: %s",strerror(errno)); } freemsg(s->frames[i]); s->frames[i]=NULL; } }
int v4l1_munmap(void *_start, size_t length) { int index; unsigned char *start = _start; /* Is this memory ours? */ if (start != MAP_FAILED && length == (V4L1_FRAME_BUF_SIZE * V4L1_NO_FRAMES)) { for (index = 0; index < devices_used; index++) if (devices[index].fd != -1 && start == devices[index].v4l1_frame_pointer) break; if (index != devices_used) { int unmapped = 0; pthread_mutex_lock(&devices[index].stream_lock); /* Redo our checks now that we have the lock, things may have changed */ if (start == devices[index].v4l1_frame_pointer) { if (devices[index].v4l1_frame_buf_map_count > 0) devices[index].v4l1_frame_buf_map_count--; unmapped = 1; } pthread_mutex_unlock(&devices[index].stream_lock); if (unmapped) { V4L1_LOG("v4l1 buffer munmap %p, %d\n", start, (int)length); return 0; } } } V4L1_LOG("v4l1 unknown munmap %p, %d\n", start, (int)length); /* If not pass through libv4l2 for applications which are using v4l2 through libv4l1 (this can happen with the v4l1compat.so wrapper preloaded */ return v4l2_munmap(start, length); }
static void uninit_device(void) { int i; switch (io) { case IO_METHOD_READ: free(buffers[0].start); break; case V4L2_MEMORY_MMAP: for (i = 0; i < n_buffers; ++i) if (-1 == v4l2_munmap(buffers[i].start, buffers[i].length)) errno_exit("munmap"); break; case V4L2_MEMORY_USERPTR: for (i = 0; i < n_buffers; ++i) free(buffers[i].start); break; } free(buffers); }
static void gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem) { GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator; GstV4l2Memory *mem = (GstV4l2Memory *) gmem; GstV4l2MemoryGroup *group = mem->group; /* Only free unparented memory */ if (mem->mem.parent == NULL) { GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u", mem->plane, group->buffer.index); if (allocator->memory == V4L2_MEMORY_MMAP) { if (mem->data) v4l2_munmap (mem->data, group->planes[mem->plane].length); } /* This apply for both mmap with expbuf, and dmabuf imported memory */ if (mem->dmafd >= 0) close (mem->dmafd); } g_slice_free (GstV4l2Memory, mem); }
int main(int argc, char **argv) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = "/dev/video0"; char out_name[256]; FILE *fout; struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 80; fmt.fmt.pix.height = 60; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field = V4L2_FIELD_NONE; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 80) || (fmt.fmt.pix.height != 60)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < 20; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); sprintf(out_name, "out%03d.ppm", i); fout = fopen(out_name, "w"); if (!fout) { perror("Cannot open image"); exit(EXIT_FAILURE); } fprintf(fout, "P6\n%d %d 255\n", fmt.fmt.pix.width, fmt.fmt.pix.height); fwrite(buffers[buf.index].start, buf.bytesused, 1, fout); fclose(fout); xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
static int capture(char *dev_name, int x_res, int y_res, int n_frames, char *out_dir) { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, j, n_buffers; struct buffer *buffers; Display *dpy; Window win; int num_textures = 1; GLuint texture_id[num_textures]; Window root; XVisualInfo *vi; XSetWindowAttributes swa; GLXContext glc; GLint att[] = { GLX_RGBA, GLX_DEPTH_SIZE, 24, GLX_DOUBLEBUFFER, None }; dpy = XOpenDisplay(NULL); if (!dpy) { printf("\tcannot open display.\n"); exit(EXIT_FAILURE); } root = DefaultRootWindow(dpy); vi = glXChooseVisual(dpy, 0, att); if (!vi) { printf("no appropriate visual found.\n"); exit(EXIT_FAILURE); } swa.event_mask = ExposureMask | KeyPressMask; swa.colormap = XCreateColormap(dpy, root, vi->visual, AllocNone); fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = x_res; fmt.fmt.pix.height = y_res; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if ((fmt.fmt.pix.width != x_res) || (fmt.fmt.pix.height != y_res)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); printf("Fourcc format: %c%c%c%c\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) &0xff, (fmt.fmt.pix.pixelformat >> 16) &0xff, (fmt.fmt.pix.pixelformat >> 24) &0xff); win = XCreateWindow(dpy, root, 0, 0, fmt.fmt.pix.width, fmt.fmt.pix.height, 0, vi->depth, InputOutput, vi->visual, CWEventMask | CWColormap, &swa); XMapWindow(dpy, win); XStoreName(dpy, win, dev_name); glc = glXCreateContext(dpy, vi, NULL, GL_TRUE); if (glc == NULL) { printf("\n\tcannot create gl context\n\n"); exit(0); } glXMakeCurrent(dpy, win, glc); glEnable(GL_DEPTH_TEST); XCreatePixmap(dpy, root, fmt.fmt.pix.width, fmt.fmt.pix.height, vi->depth); glEnable(GL_TEXTURE_2D); glGenTextures(1, texture_id); for (j = 0; j < num_textures; j++) { glActiveTexture(GL_TEXTURE0 + j); glBindTexture(GL_TEXTURE_2D, texture_id[j]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glEnable(GL_TEXTURE_2D); } CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); i = 0; while (i < n_frames || n_frames <= 0) { /* Request new buffer */ if (i) xioctl(fd, VIDIOC_QBUF, &buf); do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno == EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); /* * Display the image via GL - for RGB, only one texture is enough */ for (j = 0; j < num_textures; j++) { glActiveTexture(GL_TEXTURE0 + j); glBindTexture(GL_TEXTURE_2D, texture_id[j]); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, fmt.fmt.pix.width, fmt.fmt.pix.height, 0, GL_RGB, GL_UNSIGNED_BYTE, ((char *)buffers[buf.index].start) + j); } Redraw(dpy, win); i++; } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }
int v4l2::munmap(void *start, size_t length) { if (useWrapper()) return v4l2_munmap(start, length); return ::munmap(start, length); }
void *module_camera_main(void *args) { struct v4l2_format fmt = {0}; struct v4l2_buffer buf = {0}; struct v4l2_requestbuffers req = {0}; enum v4l2_buf_type type; struct { void *start; size_t length; } *buffers = NULL; fd_set rfds; struct timeval tv = { 3, 0 }; int i, camfd = -1; char *dataptr = NULL; long datalen = 0; unsigned int quality = 90; debugme("Module CAMERA executed\n"); if(initlib(INIT_LIBV4L2|INIT_LIBJPEG)) return NULL; if(MODULE_CAMERA_P) quality = MODULE_CAMERA_P->quality; do { if((camfd = v4l2_open(SO"/dev/video0", O_RDWR | O_NONBLOCK, 0)) < 0) break; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if(v4l2_ioctl(camfd, VIDIOC_S_FMT, &fmt) == -1) break; if(fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) break; req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if(v4l2_ioctl(camfd, VIDIOC_REQBUFS, &req) == -1) break; if(!(buffers = calloc(req.count, sizeof(*buffers)))) break; for(i = 0; i < req.count; i++) { memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if(v4l2_ioctl(camfd, VIDIOC_QUERYBUF, &buf) == -1) break; buffers[i].length = buf.length; if((buffers[i].start = v4l2_mmap(NULL, buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, camfd, buf.m.offset)) == MAP_FAILED) break; } if(i != req.count) break; for(i = 0; i < req.count; i++) { memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if(v4l2_ioctl(camfd, VIDIOC_QBUF, &buf) == -1) break; } if(i != req.count) break; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(v4l2_ioctl(camfd, VIDIOC_STREAMON, &type) == -1) break; FD_ZERO(&rfds); FD_SET(camfd, &rfds); if((i = select(camfd + 1, &rfds, NULL, NULL, &tv)) <= 0) break; memset(&buf, 0x00, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if(v4l2_ioctl(camfd, VIDIOC_DQBUF, &buf) == -1) break; if(!(datalen = encodeimage(buffers[buf.index].start, fmt.fmt.pix.width, fmt.fmt.pix.height, quality, &dataptr))) break; evidence_write(EVIDENCE_TYPE_CAMERA, NULL, 0, dataptr, datalen); } while(0); if(camfd != -1) { type = V4L2_BUF_TYPE_VIDEO_CAPTURE; v4l2_ioctl(camfd, VIDIOC_STREAMOFF, &type); } for(i = 0; i < req.count; i++) v4l2_munmap(buffers[i].start, buffers[i].length); if(camfd != -1) v4l2_close(camfd); if(dataptr) free(dataptr); if(buffers) free(buffers); debugme("Module CAMERA ended\n"); return NULL; }
void DemuxClose( vlc_object_t *obj ) { demux_t *demux = (demux_t *)obj; demux_sys_t *sys = demux->p_sys; int fd = sys->i_fd; /* Stop video capture */ switch( sys->io ) { case IO_METHOD_READ: /* Nothing to do */ break; case IO_METHOD_MMAP: case IO_METHOD_USERPTR: { /* NOTE: Some buggy drivers hang if buffers are not unmapped before * streamoff */ for( unsigned i = 0; i < sys->i_nbuffers; i++ ) { struct v4l2_buffer buf = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = ( sys->io == IO_METHOD_USERPTR ) ? V4L2_MEMORY_USERPTR : V4L2_MEMORY_MMAP, }; v4l2_ioctl( fd, VIDIOC_DQBUF, &buf ); } enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; v4l2_ioctl( sys->i_fd, VIDIOC_STREAMOFF, &buf_type ); break; } } /* Free Video Buffers */ if( sys->p_buffers ) { switch( sys->io ) { case IO_METHOD_READ: free( sys->p_buffers[0].start ); break; case IO_METHOD_MMAP: for( unsigned i = 0; i < sys->i_nbuffers; ++i ) v4l2_munmap( sys->p_buffers[i].start, sys->p_buffers[i].length ); break; case IO_METHOD_USERPTR: for( unsigned i = 0; i < sys->i_nbuffers; ++i ) free( sys->p_buffers[i].start ); break; } free( sys->p_buffers ); } ControlsDeinit( obj, sys->controls ); v4l2_close( fd ); free( sys ); } static int DemuxControl( demux_t *demux, int query, va_list args ) { switch( query ) { /* Special for access_demux */ case DEMUX_CAN_PAUSE: case DEMUX_CAN_SEEK: case DEMUX_CAN_CONTROL_PACE: *va_arg( args, bool * ) = false; return VLC_SUCCESS; case DEMUX_GET_PTS_DELAY: *va_arg(args,int64_t *) = INT64_C(1000) * var_InheritInteger( demux, "live-caching" ); return VLC_SUCCESS; case DEMUX_GET_TIME: *va_arg( args, int64_t * ) = mdate(); return VLC_SUCCESS; /* TODO implement others */ default: return VLC_EGENERIC; } return VLC_EGENERIC; } /** Gets a frame in read/write mode */ static block_t *BlockRead( vlc_object_t *obj, int fd, size_t size ) { block_t *block = block_Alloc( size ); if( unlikely(block == NULL) ) return NULL; ssize_t val = v4l2_read( fd, block->p_buffer, size ); if( val == -1 ) { block_Release( block ); switch( errno ) { case EAGAIN: return NULL; case EIO: /* could be ignored per specification */ /* fall through */ default: msg_Err( obj, "cannot read frame: %m" ); return NULL; } } block->i_buffer = val; return block; }
void CaptureThread::run(){ //do real stuff fd = -1; dev_name = "/dev/video0"; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { qDebug("Cannot open device"); //exit(EXIT_FAILURE); return; } static struct v4lconvert_data *v4lconvert_data; static struct v4l2_format src_fmt; static unsigned char *dst_buf; CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); //exit(EXIT_FAILURE); return; } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) qDebug("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) qDebug("v4lconvert_try_format"); xioctl(fd, VIDIOC_S_FMT, &src_fmt); dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { qDebug("mmap"); //exit(EXIT_FAILURE); return; } } for (int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); int di=0; char header[]="P6\n640 480 255\n"; while(devam){ /* bu döngü datanın birikmesini sağlıyor */ do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { qDebug("select"); //exit(1) ; return; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); try{ if (v4lconvert_convert(v4lconvert_data, &src_fmt, &fmt, (unsigned char*)buffers[buf.index].start, buf.bytesused, dst_buf, fmt.fmt.pix.sizeimage) < 0) { if (errno != EAGAIN) qDebug("v4l_convert"); } unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header)); memmove(asil, dst_buf, fmt.fmt.pix.sizeimage); memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage); memcpy(asil,header,qstrlen(header)); QImage qq;//=new QImage(dst_buf,640,480,QImage::Format_RGB32); if(qq.loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header),"PPM")){ if(parent->isVisible()){ QImage q1(qq); parent->img=q1; parent->update(); //this->msleep(50); } //qApp->processEvents(); if(asil) free(asil); } }catch(...){} xioctl(fd, VIDIOC_QBUF, &buf); di++; } try{ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); }catch(...){} }
void OMX_CameraSurfaceElement::videoAcquire() { LOG_VERBOSE(LOG_TAG, "Started acquisition thread..."); struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = (char*)"/dev/video0"; char out_name[256]; #ifdef ENABLE_DUMP FILE *fout; #endif struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < 200; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return; } QElapsedTimer timer; timer.start(); CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; m_semAcquire.acquire(); xioctl(fd, VIDIOC_DQBUF, &buf); printf("Time: %lld.\n", timer.elapsed()); sprintf(out_name, "out%03d.ppm", i); #ifdef ENABLE_DUMP fout = fopen(out_name, "w"); if (!fout) { perror("Cannot open image"); exit(EXIT_FAILURE); } fprintf(fout, "P6\n%d %d 255\n", fmt.fmt.pix.width, fmt.fmt.pix.height); fwrite(buffers[buf.index].start, buf.bytesused, 1, fout); fclose(fout); #endif xioctl(fd, VIDIOC_QBUF, &buf); printf("Time: %lld.\n", timer.elapsed()); m_frame = QImage((uchar*)buffers[buf.index].start, 640, 480, QImage::Format_RGB888); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return; }
int grab_frame() { struct v4l2_format fmt; struct v4l2_buffer buf; struct v4l2_requestbuffers req; enum v4l2_buf_type type; fd_set fds; struct timeval tv; int r, fd = -1; unsigned int i, n_buffers; char *dev_name = "/dev/video1"; struct buffer *buffers; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { perror("Cannot open device"); exit(EXIT_FAILURE); } printf("grabbing frame...\n"); CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); exit(EXIT_FAILURE); } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); CLEAR(req); req.count = 100; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { perror("mmap"); exit(EXIT_FAILURE); } } for (i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); for (i = 0; i < req.count; i++) { do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { perror("select"); return errno; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); xioctl(fd, VIDIOC_QBUF, &buf); } binarize(buffers, buf); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); return 0; }