static int video_device_is_usable(const char *dev, char **shortname) { int fd = v4l2_open(dev, O_RDWR); if (fd < 0) return 0; struct v4l2_capability caps; if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &caps) != 0) goto err_1; #ifdef V4L2_CAP_DEVICE_CAPS const uint32_t device_caps = (caps.capabilities & V4L2_CAP_DEVICE_CAPS) ? caps.device_caps : caps.capabilities; #else const uint32_t device_caps = caps.capabilities; #endif // V4L2_CAP_DEVICE_CAPS if (!(device_caps & V4L2_CAP_VIDEO_CAPTURE)) goto err_1; if (!(device_caps & V4L2_CAP_READWRITE)) goto err_1; *shortname = g_strdup((char *)caps.card); v4l2_close(fd); return 1; err_1: v4l2_close(fd); return 0; }
static void v4l2_postprocess(MSFilter *f){ V4l2State *s=(V4l2State*)f->data; if (s->fd!=-1){ v4l2_do_munmap(s); v4l2_close(s); } }
//close and deallocate everything opened by the other functions int v4lClose(v4lT* s) { if( s->fmts ) { free(s->fmts); s->fmts = 0; s->fmtsCount = 0; } if( s->frmSizes ) { free(s->frmSizes); s->frmSizes = 0; s->frmSizeCount = 0; } if( s->frmIvals ) { free(s->frmIvals); s->frmIvals = 0; s->frmIvalCount = 0; } if( s->bufs ) { for( unsigned int i = 0; i < s->rqbuf.count; i++ ) munmap(s->bufs[i].data, s->bufs[i].length); free(s->bufs); s->bufs = 0; } v4l2_close(s->cam); return 0; }
/* API: Destroy stream. */ static pj_status_t vid4lin_stream_destroy(pjmedia_vid_dev_stream *strm) { vid4lin_stream *stream = (vid4lin_stream*)strm; unsigned i; PJ_ASSERT_RETURN(stream != NULL, PJ_EINVAL); vid4lin_stream_stop(strm); PJ_LOG(4, (THIS_FILE, "Destroying v4l2 video stream %s", stream->name)); for (i=0; i<stream->buf_cnt; ++i) { if (stream->buffers[i].start != MAP_FAILED) { v4l2_munmap(stream->buffers[i].start, stream->buffers[i].length); stream->buffers[i].start = MAP_FAILED; } } if (stream->fd >= 0) { v4l2_close(stream->fd); stream->fd = -1; } pj_pool_release(stream->pool); return PJ_SUCCESS; }
int Video_in_Manager::CloseDeviceInternal() { if(verbose) printf("CloseDeviceInternal\n"); if(this->fd == -1) { throw std::runtime_error("Device not open"); } if(this->deviceStarted) StopDeviceInternal(); if(this->buffers!= NULL) { for(int i = 0; i < this->buffer_counts; i++) { v4l2_munmap(this->buffers[i].start, this->buffers[i].length); } delete [] this->buffers; } this->buffers = NULL; //Release memory v4l2_close(fd); fd = -1; return 1; }
void camera_control_backup_system_settings(CameraControl* cc, const char* file) { int AutoAEC = 0; int AutoAGC = 0; int Gain = 0; int Exposure = 0; int Contrast = 0; int Brightness = 0; int fd = open_v4l2_device(cc->cameraID); if (fd != -1) { AutoAEC = v4l2_get_control(fd, V4L2_CID_EXPOSURE_AUTO); AutoAGC = v4l2_get_control(fd, V4L2_CID_AUTOGAIN); Gain = v4l2_get_control(fd, V4L2_CID_GAIN); Exposure = v4l2_get_control(fd, V4L2_CID_EXPOSURE); Contrast = v4l2_get_control(fd, V4L2_CID_CONTRAST); Brightness = v4l2_get_control(fd, V4L2_CID_BRIGHTNESS); v4l2_close(fd); dictionary* ini = dictionary_new(0); iniparser_set(ini, "PSEye", 0); iniparser_set_int(ini, "PSEye:AutoAEC", AutoAEC); iniparser_set_int(ini, "PSEye:AutoAGC", AutoAGC); iniparser_set_int(ini, "PSEye:Gain", Gain); iniparser_set_int(ini, "PSEye:Exposure", Exposure); iniparser_set_int(ini, "PSEye:Contrast", Contrast); iniparser_set_int(ini, "PSEye:Brightness", Brightness); iniparser_save_ini(ini, file); dictionary_del(ini); } }
/** close device */ static void deviceClose(void) { if (-1 == v4l2_close(fd)) errno_exit("close"); fd = -1; }
int main(int argc, char *argv[]) { remove("test.264"); cam = (struct camera *) malloc(sizeof(struct camera)); if (!cam) { printf("malloc camera failure!\n"); exit(1); } cam->device_name = (char *)DEVICE; cam->buffers = NULL; cam->width = SET_WIDTH; cam->height = SET_HEIGHT; cam->fps = 30;//设置 30 fps framelength=sizeof(unsigned char)*cam->width * cam->height * 2; v4l2_init(cam); init(Buff); //创建线程 printf("Making thread...\n"); thread_create(); printf("Waiting for thread...\n"); thread_wait(); printf("-----------end program------------"); v4l2_close(cam); return 0; }
static int close_device (void) { if (-1 == v4l2_close (fd)) return -1; fd = -1; return 0; }
status_e UVCVisionCam::deinit() { if (m_dev) v4l2_close(&m_dev); m_frame.clear(); return STATUS_SUCCESS; }
int main(int argc, char **argv) { cam = (Camera *) malloc(sizeof(Camera)); if (!cam) { printf("malloc camera failure!\n"); exit(1); } cam->device_name = "/dev/video0"; cam->buffers = NULL; cam->width = 640; cam->height = 480; cam->display_depth = 5; /* RGB24 */ enc = (Encoder*)malloc(sizeof(Encoder)); v4l2_init(cam); init_x264_encoder(enc, cam->width, cam->height); h264_buf = (uint8_t *) malloc(sizeof(uint8_t) * cam->width * cam->height * 2); if (0 != pthread_create(&mythread, NULL, (void *) capture_encode_thread, NULL)) { fprintf(stderr, "thread create fail\n"); } pthread_join(mythread, NULL); printf("-----------end program------------"); v4l2_close(cam); close_x264_encoder(enc); free(h264_buf); h264_buf = 0 ; return 0; }
static int msv4l2_close(V4l2State *s){ if (s->fd!=-1){ v4l2_close(s->fd); s->fd=-1; s->configured=FALSE; } return 0; }
void cameraClose() { type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); }
void AccessClose( vlc_object_t *obj ) { access_t *access = (access_t *)obj; demux_sys_t *sys = (demux_sys_t *)access->p_sys; ControlsDeinit( obj, sys->controls ); v4l2_close( sys->i_fd ); free( sys ); }
void AccessClose( vlc_object_t *obj ) { access_t *access = (access_t *)obj; access_sys_t *sys = access->p_sys; if (sys->bufv != NULL) StopMmap (sys->fd, sys->bufv, sys->bufc); ControlsDeinit( obj, sys->controls ); v4l2_close (sys->fd); free( sys ); }
CaptureThread::~CaptureThread() { try{ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); /*for (int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length);*/ v4l2_close(fd); }catch(...){} fd = -1; }
static void gst_v4l2_allocator_finalize (GObject * obj) { GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj; GST_LOG_OBJECT (obj, "called"); v4l2_close (allocator->video_fd); gst_atomic_queue_unref (allocator->free_queue); G_OBJECT_CLASS (parent_class)->finalize (obj); }
int v4l1_close(int fd) { int index, result; index = v4l1_get_index(fd); if (index == -1) return SYS_CLOSE(fd); /* Abuse stream_lock to stop 2 closes from racing and trying to free the resources twice */ pthread_mutex_lock(&devices[index].stream_lock); devices[index].open_count--; result = devices[index].open_count != 0; pthread_mutex_unlock(&devices[index].stream_lock); if (result) return v4l2_close(fd); /* Free resources */ if (devices[index].v4l1_frame_pointer != MAP_FAILED) { if (devices[index].v4l1_frame_buf_map_count) V4L1_LOG("v4l1 capture buffer still mapped: %d times on close()\n", devices[index].v4l1_frame_buf_map_count); else SYS_MUNMAP(devices[index].v4l1_frame_pointer, V4L1_NO_FRAMES * V4L1_FRAME_BUF_SIZE); devices[index].v4l1_frame_pointer = MAP_FAILED; } /* Remove the fd from our list of managed fds before closing it, because as soon as we've done the actual close the fd maybe returned by an open in another thread and we don't want to intercept calls to this new fd. */ devices[index].fd = -1; result = v4l2_close(fd); V4L1_LOG("close: %d\n", fd); return result; }
static void gst_v4l2_buffer_pool_finalize (GObject * object) { GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object); if (pool->video_fd >= 0) v4l2_close (pool->video_fd); if (pool->allocator) gst_allocator_unref (pool->allocator); g_free (pool->buffers); G_OBJECT_CLASS (parent_class)->finalize (object); }
void fg_unref(fg_grabber *fg) { // Make sure we free all memory (backwards!) if (v4l2_close(fg->fd) != 0) fg_debug_error("fg_close(): warning: failed closing device file"); free(fg->device); free(fg->inputs); free(fg->tuners); free(fg->controls); free(fg); }
int CamaraClose(struct camera_context *cam){ enum v4l2_buf_type type; int i; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(cam->fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < cam->buf_data.buf_n; ++i) v4l2_munmap(cam->buf_data.start[i], cam->buf_data.length[i]); v4l2_close(cam->fd); return 0; }
int DemuxOpen( vlc_object_t *obj ) { demux_t *demux = (demux_t *)obj; demux_sys_t *sys = malloc (sizeof (*sys)); if (unlikely(sys == NULL)) return VLC_ENOMEM; demux->p_sys = sys; ParseMRL( obj, demux->psz_location ); char *path = var_InheritString (obj, CFG_PREFIX"dev"); if (unlikely(path == NULL)) goto error; /* probably OOM */ msg_Dbg (obj, "opening device '%s'", path); int rawfd = vlc_open (path, O_RDWR); if (rawfd == -1) { msg_Err (obj, "cannot open device '%s': %m", path); free (path); goto error; } free (path); int fd = v4l2_fd_open (rawfd, 0); if (fd == -1) { msg_Warn (obj, "cannot initialize user-space library: %m"); /* fallback to direct kernel mode anyway */ fd = rawfd; } sys->fd = fd; if (InitVideo (demux, fd)) { v4l2_close (fd); goto error; } sys->controls = ControlsInit (VLC_OBJECT(demux), fd); demux->pf_demux = NULL; demux->pf_control = DemuxControl; demux->info.i_update = 0; demux->info.i_title = 0; demux->info.i_seekpoint = 0; return VLC_SUCCESS; error: free (sys); return VLC_EGENERIC; }
int vx_source_v4l2_close(vx_source* s) { vx_source_v4l2 *source = VX_V4L2_CAST(s); // int i = 0; // for (i = 0; i < source->_requestbuffers.count; ++i) // v4l2_munmap(source->buffers[i].start, source->buffers[i].length); if (source->_fd != 0) source->_fd = v4l2_close(source->_fd); printf("%s %d\n",__FUNCTION__,__LINE__); return 0; }
static void Video_device_dealloc(Video_device *self) { if(self->fd >= 0) { if(self->buffers) { Video_device_unmap(self); } v4l2_close(self->fd); } self->ob_type->tp_free((PyObject *)self); }
static void v4l2_preprocess(MSFilter *f){ V4l2State *s=(V4l2State*)f->data; if (s->fd==-1 && v4l2_open(s)!=0) { return; } if (!s->configured && v4l2_configure(s)!=0){ return; } if (v4l2_do_mmap(s)==0){ ms_message("V4L2 video capture started."); }else{ v4l2_close(s); } s->start_time=f->ticker->time; }
static void destructor(void *arg) { struct vidsrc_st *st = arg; if (st->run) { st->run = false; pthread_join(st->thread, NULL); } stop_capturing(st); uninit_device(st); if (st->fd >= 0) v4l2_close(st->fd); }
static PyObject *Video_device_close(Video_device *self) { if(self->fd >= 0) { if(self->buffers) { Video_device_unmap(self); } v4l2_close(self->fd); self->fd = -1; } Py_RETURN_NONE; }
int CaptureThread::stop() { running = false; devam=false; mutex.lock(); type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (unsigned int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); fd = -1; mutex.unlock(); quit(); return 0; }
void V4LThread::CloseSource() { unsigned int i; enum v4l2_buf_type type; // stop streaming type = V4L2_BUF_TYPE_SDR_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < n_buffers; i++) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); fd = -1; }
static void v4l2_terminate(struct v4l2_data *data) { if (data->thread) { os_event_signal(data->event); pthread_join(data->thread, NULL); os_event_destroy(data->event); data->thread = 0; } v4l2_destroy_mmap(&data->buffers); if (data->dev != -1) { v4l2_close(data->dev); data->dev = -1; } }