int camera_init(struct picture_t *out_info) { fd_cam = open(CAM_NAME, O_RDWR); if(fd_cam < 0){ perror("open camera " CAM_NAME); return 0; } if(!get_format()) goto label_close; lib = v4lconvert_create(fd_cam); if(!lib) { perror("v4lconvert_create"); goto label_close; } if(!buf_alloc_mmap()) goto label_free; YUV420_size = current_pic.width*current_pic.height*3/2; if(!(current_pic.buffer = malloc(YUV420_size))){ perror("malloc"); goto label_free; } *out_info = current_pic; return 1; label_free: free_buf_mmap(); v4lconvert_destroy(lib); label_close: close(fd_cam); return 0; }
// FIXME return 0;-1 and set errno int video_config(struct video_t *vid, struct v4l2_format *fmt) { // fd : the file descriptor of the video device // src_fmt : will be set to the format the closer to what we want in output // fmt : must be set with the format we want int ret; // setup convert vid->convert_data = v4lconvert_create(vid->fd); if (vid->convert_data == NULL) exit(1);//, "v4lconvert_create"); // FIXME errno if (v4lconvert_try_format(vid->convert_data, fmt, &vid->src_fmt) != 0) exit(1);//, "v4lconvert_try_format"); // FIXME errno ret = xioctl(vid->fd, VIDIOC_S_FMT, &vid->src_fmt); if(ret<0) exit(1); // FIXME fail #ifdef DEBUG printf("raw pixfmt: %c%c%c%c %dx%d\n", vid->src_fmt.fmt.pix.pixelformat & 0xff, (vid->src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (vid->src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (vid->src_fmt.fmt.pix.pixelformat >> 24) & 0xff, vid->src_fmt.fmt.pix.width, vid->src_fmt.fmt.pix.height); #endif // allocate space for a raw image vid->raw_buffer = malloc(vid->src_fmt.fmt.pix.sizeimage); if(!vid->raw_buffer) exit(1); // FIXME out of memory // keep the destination format memcpy(&vid->vid_fmt, fmt, sizeof(*fmt)); return 0; }
//device file, width, height, channel, std, nb_buf struct capture_device *init_capture_device(struct video_device *vdev, int w, int h, int ch, int s, int nb_buf) { if(vdev->capture!=NULL) return vdev->capture; //create capture device dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG, "CAP: Initialising capture interface\n"); XMALLOC(vdev->capture,struct capture_device *,sizeof(struct capture_device)); XMALLOC(vdev->capture->mmap, struct mmap *, sizeof(struct mmap)); //fill in cdev struct vdev->capture->mmap->req_buffer_nr = nb_buf; vdev->capture->width = w; vdev->capture->height = h; vdev->capture->channel = ch; vdev->capture->std = s; setup_capture_actions(vdev); if(vdev->v4l_version == V4L2_VERSION) { XMALLOC(vdev->capture->convert, struct convert_data *,\ sizeof(struct convert_data)); vdev->capture->convert->priv = v4lconvert_create(vdev->fd); }
static int tc_v4l2_video_setup_image_format(V4L2Source *vs, int width, int height) { int err = 0; vs->width = width; vs->height = height; vs->v4l_convert = v4lconvert_create(vs->video_fd); if (!vs->v4l_convert) { return TC_ERROR; } memset(&(vs->v4l_dst_fmt), 0, sizeof(vs->v4l_dst_fmt)); vs->v4l_dst_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vs->v4l_dst_fmt.fmt.pix.width = width; vs->v4l_dst_fmt.fmt.pix.height = height; vs->v4l_dst_fmt.fmt.pix.pixelformat = vs->v4l_dst_csp; err = v4lconvert_try_format(vs->v4l_convert, &(vs->v4l_dst_fmt), &(vs->v4l_src_fmt)); if (err) { tc_log_error(MOD_NAME, "unable to match formats: %s", v4lconvert_get_error_message(vs->v4l_convert)); return TC_ERROR; } err = v4l2_ioctl(vs->video_fd, VIDIOC_S_FMT, &(vs->v4l_src_fmt)); if (err < 0) { tc_log_error(MOD_NAME, "error while setting the cam image format"); return TC_ERROR; } if (!v4lconvert_needs_conversion(vs->v4l_convert, &(vs->v4l_src_fmt), &(vs->v4l_dst_fmt))) { tc_log_info(MOD_NAME, "fetch frames directly"); vs->fetch_data = tc_v4l2_fetch_data_memcpy; /* Into the near future we should aim for zero-copy. -- FR */ } else { char src_fcc[5] = { '\0' }; char dst_fcc[5] = { '\0' }; pixfmt_to_fourcc(vs->v4l_src_fmt.fmt.pix.pixelformat, src_fcc); pixfmt_to_fourcc(vs->v4l_dst_fmt.fmt.pix.pixelformat, dst_fcc); tc_log_info(MOD_NAME, "fetch frames using libv4lconvert " "[%s] -> [%s]", src_fcc, dst_fcc); vs->fetch_data = tc_v4l2_fetch_data_v4lconv; } return TC_OK; }
void Videostreaming::setDevice(QString deviceName) { close(); deviceName.append(QString::number(deviceNumber,10)); if(open(deviceName,false)) { emit logDebugHandle("Device Opened - "+deviceName); m_convertData = v4lconvert_create(fd()); m_buftype= V4L2_BUF_TYPE_VIDEO_CAPTURE; openSuccess = true; } else { emit logCriticalHandle("Device Opening Failed - "+deviceName); } }
void V4LCamera::openCaptureDevice() { /// Open Capture Device _fd = open(_deviceName.toAscii().constData(), O_RDWR | O_NONBLOCK, 0); /// Check if is it open if (_fd < 0 ) qFatal("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - Unable to open device!"); /// Get Capture Device Frame Format configuration V4LSettings::qioctl(_fd, VIDIOC_G_FMT, &_fmt, "V4LCamera::openCaptureDevice()"); /// Set V4L frame buffer data conversion _v4lconvert_data = v4lconvert_create(_fd); #ifdef _DEBUG_CAPTURE_THREADS if (_v4lconvert_data == NULL) qWarning("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - v4lconvert_create() returns error"); if (v4lconvert_try_format(_v4lconvert_data, &_fmt, &_src_fmt) != 0) qWarning("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - v4lconvert_try_format() returns error"); #endif //_DEBUG_CAPTURE_THREADS }
void init_device() { struct v4l2_capability cap; int ret; int sizeimage; if(xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { if(EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); perror("EXIT_FAILURE"); return; } else { perror("VIDIOC_QUERYCAP"); return ; } } if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); /*exit(EXIT_FAILURE);*/ perror("EXIT_FAILURE"); return; } memset(&(fmt), 0, sizeof(fmt)); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_NONE; v4lconvert_data = v4lconvert_create(fd); if(v4lconvert_data == NULL) { perror("v4lconvert_create"); return; } if(v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) { /*errno_exit("v4lconvert_try_format");*/ perror("v4lconvert_try_format"); return; } ret = xioctl(fd, VIDIOC_S_FMT, &src_fmt); sizeimage = src_fmt.fmt.pix.sizeimage; dst_buf = (unsigned char *)malloc(fmt.fmt.pix.sizeimage); #ifdef DEBUG printf("raw pixfmt: %c%c%c%c %dx%d\n", src_fmt.fmt.pix.pixelformat & 0xff, (src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (src_fmt.fmt.pix.pixelformat >> 24) & 0xff, src_fmt.fmt.pix.width, src_fmt.fmt.pix.height); #endif if(ret < 0) { perror("VIDIOC_S_FMT"); return; } #ifdef DEBUG printf("pixfmt: %c%c%c%c %dx%d\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) & 0xff, (fmt.fmt.pix.pixelformat >> 16) & 0xff, (fmt.fmt.pix.pixelformat >> 24) & 0xff, fmt.fmt.pix.width, fmt.fmt.pix.height); fmt.fmt.pix.width, fmt.fmt.pix.height); /* Note VIDIOC_S_FMT may change width and height. */ #endif w = fmt.fmt.pix.width; h = fmt.fmt.pix.height; init_mmap(); }
void capture() { unsigned char *yuv420_buf; int yuv420_size = fmt.fmt.pix.width*fmt.fmt.pix.height*3/2; int src_size, i, j, k, nframe; struct v4lconvert_data *lib; buffersize = calc_size(fmt.fmt.pix.sizeimage); buf_alloc_mmap(); lib = v4lconvert_create(fd_cam); if(!lib) { perror("v4lconvert_create"); exit(1); } yuv420_buf = malloc(yuv420_size); if(!yuv420_buf){ perror("malloc"); exit(1); } if(ioctl(fd_cam, VIDIOC_STREAMON, &reqbuf.type) < 0) { perror("VIDIOC_STREAMON"); exit(1); } dst_fmt = fmt; dst_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; if(!v4lconvert_supported_dst_format(dst_fmt.fmt.pix.pixelformat)){ puts("v4lconvert_supported_dst_format"); exit(1); } for(errno = 0, nframe = 0; nframe < NUM_FRAME; nframe++) { struct v4l2_buffer cam_buf = {0}; cam_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; cam_buf.memory = reqbuf.memory; if(ioctl(fd_cam, VIDIOC_DQBUF, &cam_buf) < 0) { perror("VIDIOC_DQBUF"); exit(1); } printf("DQBUF: index=%d, seq=%d, time=%d.%06d\n", cam_buf.index, cam_buf.sequence, cam_buf.timestamp.tv_sec, cam_buf.timestamp.tv_usec); src_size = cam_buf.length; if(v4lconvert_convert(lib, &fmt, &dst_fmt, (void*)buf_pointer[cam_buf.index], src_size, yuv420_buf, yuv420_size) <= 0){ perror("v4lconvert_convert"); exit(1); } cam_buf.length = buffersize; if(ioctl(fd_cam, VIDIOC_QBUF, &cam_buf) < 0) { perror("VIDIOC_QBUF"); exit(1); } write(fd_out, yuv420_buf, yuv420_size); } if(ioctl(fd_cam, VIDIOC_STREAMOFF, &reqbuf.type) < 0) { perror("VIDIOC_STREAMOFF"); exit(1); } free(yuv420_buf); v4lconvert_destroy(lib); free_buf_mmap(); }
int CaptureThread::start() { wait(); devam=false; fd = -1; // read config dev_name = Settings::node(); width = Settings::width(); height = Settings::height(); fps = Settings::fps(); if (fps>0) { delay = 1000/fps; } else { delay = 0; } // open webcam device node fd = v4l2_open(dev_name.toStdString().c_str(), O_RDWR | O_NONBLOCK, 0); if (fd < 0) { kError() << "Cannot open device"; quit(); return 1; } CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { kError() << "Libv4l didn't accept RGB24 format. Can't proceed."; quit(); return 1; } emit startedCapture(fmt.fmt.pix.width, fmt.fmt.pix.height); v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) kDebug() << "v4lconvert_create"; if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) kDebug() << "v4lconvert_try_format"; xioctl(fd, VIDIOC_S_FMT, &src_fmt); dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { kDebug() << "mmap"; quit(); return 1; } } for (unsigned int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); di=0; sprintf(header,"P6\n%d %d 255\n",fmt.fmt.pix.width,fmt.fmt.pix.height); devam=true; // start processing video data running = true; QThread::start(); return 0; }
void CaptureThread::run(){ //do real stuff fd = -1; dev_name = "/dev/video0"; fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0); if (fd < 0) { qDebug("Cannot open device"); //exit(EXIT_FAILURE); return; } static struct v4lconvert_data *v4lconvert_data; static struct v4l2_format src_fmt; static unsigned char *dst_buf; CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = 640; fmt.fmt.pix.height = 480; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; xioctl(fd, VIDIOC_S_FMT, &fmt); if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) { printf("Libv4l didn't accept RGB24 format. Can't proceed.\n"); //exit(EXIT_FAILURE); return; } if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480)) printf("Warning: driver is sending image at %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height); v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) qDebug("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) qDebug("v4lconvert_try_format"); xioctl(fd, VIDIOC_S_FMT, &src_fmt); dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage); CLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_REQBUFS, &req); buffers = (buffer*)calloc(req.count, sizeof(*buffers)); for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; xioctl(fd, VIDIOC_QUERYBUF, &buf); buffers[n_buffers].length = buf.length; buffers[n_buffers].start = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (MAP_FAILED == buffers[n_buffers].start) { qDebug("mmap"); //exit(EXIT_FAILURE); return; } } for (int i = 0; i < n_buffers; ++i) { CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; xioctl(fd, VIDIOC_QBUF, &buf); } type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMON, &type); int di=0; char header[]="P6\n640 480 255\n"; while(devam){ /* bu döngü datanın birikmesini sağlıyor */ do { FD_ZERO(&fds); FD_SET(fd, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select(fd + 1, &fds, NULL, NULL, &tv); } while ((r == -1 && (errno = EINTR))); if (r == -1) { qDebug("select"); //exit(1) ; return; } CLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; xioctl(fd, VIDIOC_DQBUF, &buf); try{ if (v4lconvert_convert(v4lconvert_data, &src_fmt, &fmt, (unsigned char*)buffers[buf.index].start, buf.bytesused, dst_buf, fmt.fmt.pix.sizeimage) < 0) { if (errno != EAGAIN) qDebug("v4l_convert"); } unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header)); memmove(asil, dst_buf, fmt.fmt.pix.sizeimage); memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage); memcpy(asil,header,qstrlen(header)); QImage qq;//=new QImage(dst_buf,640,480,QImage::Format_RGB32); if(qq.loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header),"PPM")){ if(parent->isVisible()){ QImage q1(qq); parent->img=q1; parent->update(); //this->msleep(50); } //qApp->processEvents(); if(asil) free(asil); } }catch(...){} xioctl(fd, VIDIOC_QBUF, &buf); di++; } try{ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(fd, VIDIOC_STREAMOFF, &type); for (int i = 0; i < n_buffers; ++i) v4l2_munmap(buffers[i].start, buffers[i].length); v4l2_close(fd); }catch(...){} }
int v4l2_fd_open(int fd, int v4l2_flags) { int i, index; char *lfname; struct v4l2_capability cap; struct v4l2_format fmt; struct v4lconvert_data *convert; /* If no log file was set by the app, see if one was specified through the environment */ if (!v4l2_log_file) { lfname = getenv("LIBV4L2_LOG_FILENAME"); if (lfname) v4l2_log_file = fopen(lfname, "w"); } /* check that this is an v4l2 device */ if (SYS_IOCTL(fd, VIDIOC_QUERYCAP, &cap)) { int saved_err = errno; V4L2_LOG_ERR("getting capabilities: %s\n", strerror(errno)); errno = saved_err; return -1; } /* we only add functionality for video capture devices */ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) || !(cap.capabilities & (V4L2_CAP_STREAMING | V4L2_CAP_READWRITE))) return fd; /* Get current cam format */ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (SYS_IOCTL(fd, VIDIOC_G_FMT, &fmt)) { int saved_err = errno; V4L2_LOG_ERR("getting pixformat: %s\n", strerror(errno)); errno = saved_err; return -1; } /* init libv4lconvert */ convert = v4lconvert_create(fd); if (!convert) return -1; /* So we have a v4l2 capture device, register it in our devices array */ pthread_mutex_lock(&v4l2_open_mutex); for (index = 0; index < V4L2_MAX_DEVICES; index++) if (devices[index].fd == -1) { devices[index].fd = fd; break; } pthread_mutex_unlock(&v4l2_open_mutex); if (index == V4L2_MAX_DEVICES) { V4L2_LOG_ERR("attempting to open more then %d video devices\n", V4L2_MAX_DEVICES); errno = EBUSY; return -1; } devices[index].flags = v4l2_flags; if (cap.capabilities & V4L2_CAP_READWRITE) devices[index].flags |= V4L2_SUPPORTS_READ; if (!(cap.capabilities & V4L2_CAP_STREAMING)) { devices[index].flags |= V4L2_USE_READ_FOR_READ; /* This device only supports read so the stream gets started by the driver on the first read */ devices[index].first_frame = V4L2_IGNORE_FIRST_FRAME_ERRORS; } if (!strcmp((char *)cap.driver, "uvcvideo")) devices[index].flags |= V4L2_IS_UVC; devices[index].open_count = 1; devices[index].src_fmt = fmt; devices[index].dest_fmt = fmt; /* When a user does a try_fmt with the current dest_fmt and the dest_fmt is a supported one we will align the resolution (see try_fmt for why). Do the same here now, so that a try_fmt on the result of a get_fmt done immediately after open leaves the fmt unchanged. */ if (v4lconvert_supported_dst_format( devices[index].dest_fmt.fmt.pix.pixelformat)) { devices[index].dest_fmt.fmt.pix.width &= ~7; devices[index].dest_fmt.fmt.pix.height &= ~1; } pthread_mutex_init(&devices[index].stream_lock, NULL); devices[index].no_frames = 0; devices[index].nreadbuffers = V4L2_DEFAULT_NREADBUFFERS; devices[index].convert = convert; devices[index].convert_mmap_buf = MAP_FAILED; for (i = 0; i < V4L2_MAX_NO_FRAMES; i++) { devices[index].frame_pointers[i] = MAP_FAILED; devices[index].frame_map_count[i] = 0; } devices[index].frame_queued = 0; devices[index].readbuf = NULL; devices[index].readbuf_size = 0; if (index >= devices_used) devices_used = index + 1; V4L2_LOG("open: %d\n", fd); return fd; }
static int vidcap_v4l2_init(const struct vidcap_params *params, void **state) { struct vidcap_v4l2_state *s; const char *dev_name = DEFAULT_DEVICE; uint32_t pixelformat = 0; uint32_t width = 0, height = 0; uint32_t numerator = 0, denominator = 0; int buffer_count = DEFAULT_BUF_COUNT; printf("vidcap_v4l2_init\n"); if (vidcap_params_get_flags(params) & VIDCAP_FLAG_AUDIO_ANY) { return VIDCAP_INIT_AUDIO_NOT_SUPPOTED; } if(vidcap_params_get_fmt(params) && strcmp(vidcap_params_get_fmt(params), "help") == 0) { show_help(); return VIDCAP_INIT_NOERR; } s = (struct vidcap_v4l2_state *) calloc(1, sizeof(struct vidcap_v4l2_state)); if(s == NULL) { printf("Unable to allocate v4l2 capture state\n"); return VIDCAP_INIT_FAIL; } s->fd = -1; char *tmp = NULL; if(vidcap_params_get_fmt(params)) { tmp = strdup(vidcap_params_get_fmt(params)); char *init_fmt = tmp; char *save_ptr = NULL; char *item; while((item = strtok_r(init_fmt, ":", &save_ptr))) { if (strncmp(item, "dev=", strlen("dev=")) == 0) { dev_name = item + strlen("dev="); } else if (strncmp(item, "fmt=", strlen("fmt=")) == 0) { char *fmt = item + strlen("fmt="); union { uint32_t fourcc; char str[4]; } str_to_uint; int len = 4; if(strlen(fmt) < 4) len = strlen(fmt); memset(str_to_uint.str, 0, 4); memcpy(str_to_uint.str, fmt, len); pixelformat = str_to_uint.fourcc; } else if (strncmp(item, "size=", strlen("size=")) == 0) { if(strchr(item, 'x')) { width = atoi(item + strlen("size=")); height = atoi(strchr(item, 'x') + 1); } } else if (strncmp(item, "tpf=", strlen("tpf=")) == 0) { if(strchr(item, '/')) { numerator = atoi(item + strlen("tpf=")); denominator = atoi(strchr(item, '/') + 1); } } else if (strncmp(item, "buffers=", strlen("buffers=")) == 0) { buffer_count = atoi(item + strlen("buffers=")); assert (buffer_count <= MAX_BUF_COUNT); } else { fprintf(stderr, "[V4L2] Invalid configuration argument: %s\n", item); goto error; } init_fmt = NULL; } } s->fd = open(dev_name, O_RDWR); if(s->fd == -1) { fprintf(stderr, "[V4L2] Unable to open input device %s: %s\n", dev_name, strerror(errno)); goto error; } int index = 0; if (ioctl(s->fd, VIDIOC_S_INPUT, &index) != 0) { perror ("Could not enable input (VIDIOC_S_INPUT)"); goto error; } struct v4l2_capability capability; memset(&capability, 0, sizeof(capability)); if (ioctl(s->fd,VIDIOC_QUERYCAP, &capability) != 0) { perror("V4L2: ioctl VIDIOC_QUERYCAP"); goto error; } if (!(capability.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s, %s can't capture\n",capability.card,capability.bus_info); goto error; } if (!(capability.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "[V4L2] Streaming capability not present.\n"); goto error; } struct v4l2_format fmt; memset(&fmt, 0, sizeof(fmt)); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(ioctl(s->fd, VIDIOC_G_FMT, &fmt) != 0) { perror("[V4L2] Unable to get video formant"); goto error; } struct v4l2_streamparm stream_params; memset(&stream_params, 0, sizeof(stream_params)); stream_params.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if(ioctl(s->fd, VIDIOC_G_PARM, &stream_params) != 0) { perror("[V4L2] Unable to get stream params"); goto error; } if (pixelformat) { fmt.fmt.pix.pixelformat = pixelformat; } if(width != 0 && height != 0) { fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; } fmt.fmt.pix.field = V4L2_FIELD_ANY; fmt.fmt.pix.bytesperline = 0; if(ioctl(s->fd, VIDIOC_S_FMT, &fmt) != 0) { perror("[V4L2] Unable to set video formant"); goto error; } if(numerator != 0 && denominator != 0) { stream_params.parm.capture.timeperframe.numerator = numerator; stream_params.parm.capture.timeperframe.denominator = denominator; if(ioctl(s->fd, VIDIOC_S_PARM, &stream_params) != 0) { perror("[V4L2] Unable to set stream params"); goto error; } } memcpy(&s->src_fmt, &fmt, sizeof(fmt)); memcpy(&s->dst_fmt, &fmt, sizeof(fmt)); if(ioctl(s->fd, VIDIOC_G_FMT, &fmt) != 0) { perror("[V4L2] Unable to get video formant"); goto error; } if(ioctl(s->fd, VIDIOC_G_PARM, &stream_params) != 0) { perror("[V4L2] Unable to get stream params"); goto error; } s->desc.tile_count = 1; s->conversion_needed = false; switch(fmt.fmt.pix.pixelformat) { case V4L2_PIX_FMT_YUYV: s->desc.color_spec = YUYV; break; case V4L2_PIX_FMT_UYVY: s->desc.color_spec = UYVY; break; case V4L2_PIX_FMT_RGB24: s->desc.color_spec = RGB; break; case V4L2_PIX_FMT_RGB32: s->desc.color_spec = RGBA; break; case V4L2_PIX_FMT_MJPEG: s->desc.color_spec = MJPG; break; case V4L2_PIX_FMT_H264: s->desc.color_spec = H264; break; default: s->conversion_needed = true; s->dst_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; s->desc.color_spec = RGB; break; } switch(fmt.fmt.pix.field) { case V4L2_FIELD_NONE: s->desc.interlacing = PROGRESSIVE; break; case V4L2_FIELD_TOP: s->desc.interlacing = UPPER_FIELD_FIRST; break; case V4L2_FIELD_BOTTOM: s->desc.interlacing = LOWER_FIELD_FIRST; break; case V4L2_FIELD_INTERLACED: s->desc.interlacing = INTERLACED_MERGED; break; case V4L2_FIELD_SEQ_TB: case V4L2_FIELD_SEQ_BT: case V4L2_FIELD_ALTERNATE: case V4L2_FIELD_INTERLACED_TB: case V4L2_FIELD_INTERLACED_BT: default: fprintf(stderr, "[V4L2] Unsupported interlacing format reported from driver.\n"); goto error; } s->desc.fps = (double) stream_params.parm.capture.timeperframe.denominator / stream_params.parm.capture.timeperframe.numerator; s->desc.width = fmt.fmt.pix.width; s->desc.height = fmt.fmt.pix.height; if(s->conversion_needed) { s->convert = v4lconvert_create(s->fd); } else { s->convert = NULL; } struct v4l2_requestbuffers reqbuf; memset(&reqbuf, 0, sizeof(reqbuf)); reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; reqbuf.memory = V4L2_MEMORY_MMAP; reqbuf.count = buffer_count; if (ioctl (s->fd, VIDIOC_REQBUFS, &reqbuf) != 0) { if (errno == EINVAL) printf("Video capturing or mmap-streaming is not supported\n"); else perror("VIDIOC_REQBUFS"); goto error; } if (reqbuf.count < 2) { /* You may need to free the buffers here. */ printf("Not enough buffer memory\n"); goto error; } for (unsigned int i = 0; i < reqbuf.count; i++) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(buf)); buf.type = reqbuf.type; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (-1 == ioctl (s->fd, VIDIOC_QUERYBUF, &buf)) { perror("VIDIOC_QUERYBUF"); goto error; } s->buffers[i].length = buf.length; /* remember for munmap() */ s->buffers[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, /* recommended */ MAP_SHARED, /* recommended */ s->fd, buf.m.offset); if (MAP_FAILED == s->buffers[i].start) { /* If you do not exit here you should unmap() and free() the buffers mapped so far. */ perror("mmap"); goto error; } buf.flags = 0; if(ioctl(s->fd, VIDIOC_QBUF, &buf) != 0) { perror("Unable to enqueue buffer"); goto error; } } if(ioctl(s->fd, VIDIOC_STREAMON, &reqbuf.type) != 0) { perror("Unable to start stream"); goto error; }; gettimeofday(&s->t0, NULL); s->frames = 0; free(tmp); *state = s; return VIDCAP_INIT_OK; error: free(tmp); if (s->fd != -1) close(s->fd); free(s); return VIDCAP_INIT_FAIL; }
static void init_device(int w, int h) { struct v4lconvert_data *v4lconvert_data; struct v4l2_format src_fmt; /* raw source format */ struct v4l2_capability cap; if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } /* libv4l emulates read() on those v4l2 devices that do not support it, so this print is just instructional, it should work regardless */ printf("device capabilities\n\tread:\t%c\n\tstream:\t%c\n", (cap.capabilities & V4L2_CAP_READWRITE) ? 'Y' : 'N', (cap.capabilities & V4L2_CAP_STREAMING) ? 'Y' : 'N'); /* set our requested format to V4L2_PIX_FMT_RGB24 */ CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; /* libv4l also converts mutiple supported formats to V4l2_PIX_FMT_BGR24 or V4l2_PIX_FMT_YUV420, which means the following call should *always* succeed However, we use the libv4lconvert library to print debugging information to tell us if libv4l will be doing the conversion internally*/ v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) errno_exit("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) errno_exit("v4lconvert_try_format"); printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n", src_fmt.fmt.pix.pixelformat & 0xff, (src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (src_fmt.fmt.pix.pixelformat >> 24) & 0xff, src_fmt.fmt.pix.width, src_fmt.fmt.pix.height); printf("application\n\tconv:\t%c\n", v4lconvert_needs_conversion(v4lconvert_data, &src_fmt, &fmt) ? 'Y' : 'N'); v4lconvert_destroy(v4lconvert_data); /* Actually set the pixfmt so that libv4l uses its conversion magic */ if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0) errno_exit("VIDIOC_S_FMT"); printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) & 0xff, (fmt.fmt.pix.pixelformat >> 16) & 0xff, (fmt.fmt.pix.pixelformat >> 24) & 0xff, fmt.fmt.pix.width, fmt.fmt.pix.height); switch (io) { case IO_METHOD_READ: printf("\tio:\tio\n"); init_read(fmt.fmt.pix.sizeimage); break; case V4L2_MEMORY_MMAP: printf("\tio:\tmmap\n"); init_mmap(); break; case V4L2_MEMORY_USERPTR: printf("\tio:\tusrptr\n"); init_userp(fmt.fmt.pix.sizeimage); break; } }
static void init_device(int w, int h) { struct v4l2_capability cap; int ret; int sizeimage; if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { if (EINVAL == errno) { fprintf(stderr, "%s is no V4L2 device\n", dev_name); exit(EXIT_FAILURE); } else { errno_exit("VIDIOC_QUERYCAP"); } } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { fprintf(stderr, "%s is no video capture device\n", dev_name); exit(EXIT_FAILURE); } switch (io) { case IO_METHOD_READ: if (!(cap.capabilities & V4L2_CAP_READWRITE)) { fprintf(stderr, "%s does not support read i/o\n", dev_name); exit(EXIT_FAILURE); } break; case V4L2_MEMORY_MMAP: case V4L2_MEMORY_USERPTR: if (!(cap.capabilities & V4L2_CAP_STREAMING)) { fprintf(stderr, "%s does not support streaming i/o\n", dev_name); exit(EXIT_FAILURE); } break; } // if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0) // perror("get fmt"); CLEAR(fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; #ifdef WITH_V4L2_LIB v4lconvert_data = v4lconvert_create(fd); if (v4lconvert_data == NULL) errno_exit("v4lconvert_create"); if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0) errno_exit("v4lconvert_try_format"); ret = xioctl(fd, VIDIOC_S_FMT, &src_fmt); sizeimage = src_fmt.fmt.pix.sizeimage; dst_buf = malloc(fmt.fmt.pix.sizeimage); printf("raw pixfmt: %c%c%c%c %dx%d\n", src_fmt.fmt.pix.pixelformat & 0xff, (src_fmt.fmt.pix.pixelformat >> 8) & 0xff, (src_fmt.fmt.pix.pixelformat >> 16) & 0xff, (src_fmt.fmt.pix.pixelformat >> 24) & 0xff, src_fmt.fmt.pix.width, src_fmt.fmt.pix.height); #else ret = xioctl(fd, VIDIOC_S_FMT, &fmt); sizeimage = fmt.fmt.pix.sizeimage; #endif if (ret < 0) errno_exit("VIDIOC_S_FMT"); // // /* Note VIDIOC_S_FMT may change width and height. */ // printf("pixfmt: %c%c%c%c %dx%d\n", fmt.fmt.pix.pixelformat & 0xff, (fmt.fmt.pix.pixelformat >> 8) & 0xff, (fmt.fmt.pix.pixelformat >> 16) & 0xff, (fmt.fmt.pix.pixelformat >> 24) & 0xff, fmt.fmt.pix.width, fmt.fmt.pix.height); switch (io) { case IO_METHOD_READ: init_read(sizeimage); break; case V4L2_MEMORY_MMAP: init_mmap(); break; case V4L2_MEMORY_USERPTR: init_userp(sizeimage); break; } }
/*}}}*/ static inline int video_initio (opi_video_device_t *dev, int oneshot, opi_video_frameinfo_t *finfo) /*{{{*/ { dev->oneshot = oneshot; dev->convert = v4lconvert_create (dev->fd); if (dev->fd < 0) { /* nothing */ } else if (dev->api == 1) { struct video_mbuf vmbuf; int r; memset (&(dev->src), 0, sizeof (dev->src)); dev->src.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; dev->src.fmt.pix.width = finfo->width; dev->src.fmt.pix.height = finfo->height; dev->src.fmt.pix.pixelformat = convert_pal_opi_to_v4l2 (finfo->format); dev->src.fmt.pix.colorspace = V4L2_COLORSPACE_SMPTE170M; memcpy (&(dev->dst), &(dev->src), sizeof (dev->dst)); dev->dst.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; dev->dst.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; vmbuf.size = 0; vmbuf.frames = 0; while (((r = ioctl (dev->fd, VIDIOCGMBUF, &vmbuf)) == -1) && (errno == EINTR)); /* retry */ if (r >= 0) { /* got memory-map buffer info */ dev->buffers[0].size = vmbuf.size; dev->buffers[0].addr = mmap (NULL, vmbuf.size, PROT_READ | PROT_WRITE, MAP_SHARED, dev->fd, 0); if (dev->buffers[0].addr == MAP_FAILED) { /* failed to mmap, default to non-mmap */ dev->use_mmap = 0; } else { dev->use_mmap = 1; dev->n_buffers = 1; } } else { dev->use_mmap = 0; } return 1; } else if (dev->api == 2) { int r; memset (&(dev->src), 0, sizeof (dev->src)); dev->src.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; while (((r = ioctl (dev->fd, VIDIOC_G_FMT, &(dev->src))) == -1) && (errno == EINTR)); /* retry */ memcpy (&(dev->dst), &(dev->src), sizeof (dev->dst)); dev->dst.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; dev->dst.fmt.pix.colorspace = V4L2_COLORSPACE_SRGB; if (dev->caps & V4L2_CAP_STREAMING) { struct v4l2_requestbuffers req; struct v4l2_buffer buffer; int i; memset (&req, 0, sizeof (req)); req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; req.count = oneshot ? 1 : MAX_NBUFS; if (ioctl (dev->fd, VIDIOC_REQBUFS, &req) < 0) return 0; if (req.count < 1) return 0; dev->n_buffers = 0; for (i = 0; i < req.count; ++i) { void *addr; memset (&buffer, 0, sizeof (buffer)); buffer.type = req.type; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = i; if (ioctl (dev->fd, VIDIOC_QUERYBUF, &buffer) < 0) continue; addr = mmap (NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, dev->fd, buffer.m.offset); if (addr == MAP_FAILED) { continue; } else { int idx = dev->n_buffers++; dev->buffers[idx].addr = addr; dev->buffers[idx].size = buffer.length; if (!oneshot) { while (((r = ioctl (dev->fd, VIDIOC_QBUF, &buffer)) == -1) && (errno == EINTR)); /* retry */ } } } if (dev->n_buffers <= 0) return 0; dev->use_mmap = 1; } else { dev->use_mmap = 0; } return 1; } return 0; }
//============================================================================ // // v4l2_init_device( THIS , Width , Height ) // // Initialise the device and associated data structures, this is the most // complex operation in the code and has to cope with it's own MMAP // handling whereas V4L did a lot of this for us. // // FIXME:: test the READ interface, I only use MMAP cameras ... // int gv4l2_init_device(CWEBCAM * _object , int width , int height ) { unsigned int min; static unsigned int n_buffers = 0; int save; if ( gv4l2_xioctl (THIS->io, VIDIOC_QUERYCAP, &THIS->cap) == -1 ) { gv4l2_debug("VIDIOC_QUERYCAP error"); return 0; } if (!(THIS->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { gv4l2_debug("not video capture device"); return 0; } // // We need to choose which IO method to use, well try MMAP and // if that fails, fall back to READ // if (!(THIS->cap.capabilities & V4L2_CAP_STREAMING)) { // // No MMAP support! // THIS->use_mmap = 0; if (!(THIS->cap.capabilities & V4L2_CAP_READWRITE)) { gv4l2_debug("device does not support mmap or read"); return 0; } } else THIS->use_mmap = 1; MCLEAR(THIS->cropcap); THIS->cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (!gv4l2_xioctl (THIS->io, VIDIOC_CROPCAP, &THIS->cropcap)) { THIS->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; THIS->crop.c = THIS->cropcap.defrect; if ( gv4l2_xioctl (THIS->io, VIDIOC_S_CROP, &THIS->crop) == -1 ) { if( errno == EINVAL ) { gv4l2_debug("cropping not supported"); } } } MCLEAR(THIS->fmt); THIS->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if( gv4l2_xioctl( THIS->io, VIDIOC_G_FMT, &THIS->fmt ) == -1 ) { gv4l2_debug("Unable to get Video formats"); return 0; } THIS->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; THIS->fmt.fmt.pix.width = width; THIS->fmt.fmt.pix.height = height; THIS->fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; save = THIS->fmt.fmt.pix.pixelformat; // // Camera format should be picked up from VIDIOC_G_FMT above // FIXME:: do cameras support multiple formats and so we want // to be able to pick the format?? // // Try the supported formats; // a. YUYV // b. YUV420 // c. revert to whatever the camera was set to // THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; if ( gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt) == -1) { gv4l2_debug("VIDIOC_S_FMT, can't set YUYV, trying YUV 420"); THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; if ( gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt) == -1) { gv4l2_debug("VIDIOC_S_FMT, can't set YUV420, defaulting "); THIS->fmt.fmt.pix.pixelformat = save; } } // BM: Final image gb.image format THIS->format = GB_IMAGE_BGR; //IMAGE.GetDefaultFormat(); // BM: Conversion structure THIS->convert = v4lconvert_create(THIS->io); //THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; //if ( gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt) == -1) { // gv4l2_debug("VIDIOC_S_FMT, unable to set format"); // return 0; //} // THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt); /* Note VIDIOC_S_FMT may change width and height. */ /* Buggy driver paranoia. */ min = THIS->fmt.fmt.pix.width * 2; if (THIS->fmt.fmt.pix.bytesperline < min) THIS->fmt.fmt.pix.bytesperline = min; min = THIS->fmt.fmt.pix.bytesperline * THIS->fmt.fmt.pix.height; if (THIS->fmt.fmt.pix.sizeimage < min) THIS->fmt.fmt.pix.sizeimage = min; GB.Alloc(&THIS->frame, THIS->fmt.fmt.pix.width * THIS->fmt.fmt.pix.height * (GB_IMAGE_FMT_IS_24_BITS(THIS->format) ? 3 : 4)); gv4l2_brightness_setup( THIS ); gv4l2_contrast_setup( THIS ); gv4l2_color_setup( THIS ); gv4l2_whiteness_setup( THIS ); gv4l2_hue_setup( THIS ); if( !THIS->use_mmap ) { GB.Alloc( POINTER(&THIS->buffers) ,sizeof(*THIS->buffers)); if( !THIS->buffers ) { gv4l2_debug("Failed to allocate buffer space"); return 0; } THIS->buffers[0].length = THIS->fmt.fmt.pix.sizeimage; GB.Alloc( POINTER(&THIS->buffers[0].start),THIS->fmt.fmt.pix.sizeimage); if( !THIS->buffers[0].start ) { gv4l2_debug("Failed to allocate buffer space"); return 0; } return 1; } // We don't support USERPTR in Gambas (!) // So now we initialise MMAP // struct v4l2_requestbuffers req; MCLEAR(req); req.count = 2; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if ( gv4l2_xioctl (THIS->io, VIDIOC_REQBUFS, &req) == -1 ) { gv4l2_debug("mmap not supported or error"); return 0; } if (req.count < 2) { gv4l2_debug("not enough memory for mmap"); return 0; } GB.Alloc ( POINTER(&THIS->buffers),req.count * sizeof (*THIS->buffers)); if (!THIS->buffers) { gv4l2_debug("not memory for mmap"); return 0; } THIS->buffer_count = req.count; for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { struct v4l2_buffer buf; MCLEAR(buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = n_buffers; if( gv4l2_xioctl (THIS->io, VIDIOC_QUERYBUF, &buf) == -1 ) { gv4l2_debug("VIDIOC_QUERYBUF"); return 0; } THIS->buffers[n_buffers].length = buf.length; THIS->buffers[n_buffers].start = mmap (NULL /* start anywhere */, buf.length, PROT_READ | PROT_WRITE /* required */, MAP_SHARED /* recommended */, THIS->io, buf.m.offset); if (MAP_FAILED == THIS->buffers[n_buffers].start) { gv4l2_debug("mmap failed"); return 0; } } return 1; }