コード例 #1
0
ファイル: video.c プロジェクト: Ludo6431/iptk
// FIXME return 0;-1 and set errno
int video_config(struct video_t *vid, struct v4l2_format *fmt) {
    // fd      : the file descriptor of the video device
    // src_fmt : will be set to the format the closer to what we want in output
    // fmt     : must be set with the format we want
    int ret;

    // setup convert
    vid->convert_data = v4lconvert_create(vid->fd);
    if (vid->convert_data == NULL)
        exit(1);//, "v4lconvert_create");  // FIXME errno
    if (v4lconvert_try_format(vid->convert_data, fmt, &vid->src_fmt) != 0)
        exit(1);//, "v4lconvert_try_format");  // FIXME errno
    ret = xioctl(vid->fd, VIDIOC_S_FMT, &vid->src_fmt);
    if(ret<0)
        exit(1);    // FIXME fail

#ifdef DEBUG
    printf("raw pixfmt: %c%c%c%c %dx%d\n",
    vid->src_fmt.fmt.pix.pixelformat & 0xff,
    (vid->src_fmt.fmt.pix.pixelformat >> 8) & 0xff,
    (vid->src_fmt.fmt.pix.pixelformat >> 16) & 0xff,
    (vid->src_fmt.fmt.pix.pixelformat >> 24) & 0xff,
    vid->src_fmt.fmt.pix.width, vid->src_fmt.fmt.pix.height);
#endif

    // allocate space for a raw image
    vid->raw_buffer = malloc(vid->src_fmt.fmt.pix.sizeimage);
    if(!vid->raw_buffer)
        exit(1);    // FIXME out of memory

    // keep the destination format
    memcpy(&vid->vid_fmt, fmt, sizeof(*fmt));

    return 0;
}
コード例 #2
0
ファイル: v4l2-input.c プロジェクト: francoisDode/v4l4j
//this function takes in 2 struct v4l2_format and a libvideo palette index
//it will try and see if the chosen palette can be obtained, either straight
//from the driver or after conversion using libv4lconvert
//it returns the libvideo palette to use in order to get the requested
//libvideo palette, or -1 upon return, src * dst will contain meaningful values
static int try_image_format(struct capture_device *c, struct v4l2_format *src,
		struct v4l2_format *dst, int palette_idx){
	int index = -1;
	CLEAR(*src);
	CLEAR(*dst);

	dst->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	dst->fmt.pix.width = c->width;
	dst->fmt.pix.height = c->height;
	dst->fmt.pix.field = V4L2_FIELD_ANY;
	dst->fmt.pix.pixelformat = libvideo_palettes[palette_idx].v4l2_palette;
	if(0 == v4lconvert_try_format(c->convert->priv, dst, src)){
		dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG1,
				"CAP: For dest palette %#x (%s - %d) %dx%d - ...\n",\
				dst->fmt.pix.pixelformat,
				libvideo_palettes[palette_idx].name,
				palette_idx,
				dst->fmt.pix.width,
				dst->fmt.pix.height);
		dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG1,
				"CAP: libv4lconvert said to use palette %#x %dx%d - ...\n",\
				src->fmt.pix.pixelformat,
				src->fmt.pix.width,
				src->fmt.pix.height);

		if((index = get_palette_index(src->fmt.pix.pixelformat))!= -1){
			dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG1,
					"CAP: which is libvideo index %d, palette %s\n",\
					index,
					libvideo_palettes[index].name);


			dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG,
					"CAP: libv4lconvert required ? %s\n",
					(v4lconvert_needs_conversion(
							c->convert->priv,
							src,
							dst)==0?"No":"Yes"
					)
				);

		} else {
			dprint(LIBVIDEO_SOURCE_CAP, LIBVIDEO_LOG_DEBUG1,
					"CAP: palette returned by libv4lconvert is unknown to "
					"libvideo\n");
			info("The source image format returned by libv4l_convert is ");
			info("unknown\nPlease, let the author known about this error:\n");
			info("Destination palette: %#x (%s)\n",\
						libvideo_palettes[palette_idx].v4l2_palette,
						libvideo_palettes[palette_idx].name);
			info("libv4l_convert source palette: %#x", src->fmt.pix.pixelformat);
			info("See the README file on how to submit bug reports.");
		}
	}

	return index;
}
コード例 #3
0
static int tc_v4l2_video_setup_image_format(V4L2Source *vs, int width, int height)
{
    int err = 0;

    vs->width  = width;
    vs->height = height;

    vs->v4l_convert = v4lconvert_create(vs->video_fd);
    if (!vs->v4l_convert) {
        return TC_ERROR;
    }

    memset(&(vs->v4l_dst_fmt), 0, sizeof(vs->v4l_dst_fmt));
    vs->v4l_dst_fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    vs->v4l_dst_fmt.fmt.pix.width       = width;
    vs->v4l_dst_fmt.fmt.pix.height      = height;
    vs->v4l_dst_fmt.fmt.pix.pixelformat = vs->v4l_dst_csp;
	
    err = v4lconvert_try_format(vs->v4l_convert,
                                &(vs->v4l_dst_fmt), &(vs->v4l_src_fmt));
    if (err) {
        tc_log_error(MOD_NAME, "unable to match formats: %s",
                     v4lconvert_get_error_message(vs->v4l_convert));
        return TC_ERROR;
    }

    err = v4l2_ioctl(vs->video_fd, VIDIOC_S_FMT, &(vs->v4l_src_fmt));
    if (err < 0) {
        tc_log_error(MOD_NAME, "error while setting the cam image format");
        return TC_ERROR;            
    }

    if (!v4lconvert_needs_conversion(vs->v4l_convert,
                                    &(vs->v4l_src_fmt),
                                    &(vs->v4l_dst_fmt))) {
        tc_log_info(MOD_NAME, "fetch frames directly");
        vs->fetch_data = tc_v4l2_fetch_data_memcpy;
        /* Into the near future we should aim for zero-copy. -- FR */
    } else {
        char src_fcc[5] = { '\0' };
        char dst_fcc[5] = { '\0' };

        pixfmt_to_fourcc(vs->v4l_src_fmt.fmt.pix.pixelformat, src_fcc);
        pixfmt_to_fourcc(vs->v4l_dst_fmt.fmt.pix.pixelformat, dst_fcc);

        tc_log_info(MOD_NAME, "fetch frames using libv4lconvert "
                              "[%s] -> [%s]",
                              src_fcc, dst_fcc);
        vs->fetch_data = tc_v4l2_fetch_data_v4lconv;
    }

    return TC_OK;
}
コード例 #4
0
int v4lconvert_enum_frameintervals(struct v4lconvert_data *data,
		struct v4l2_frmivalenum *frmival)
{
	int res;
	struct v4l2_format src_fmt, dest_fmt;

	if (!v4lconvert_supported_dst_format(frmival->pixel_format)) {
		if (v4lconvert_supported_dst_fmt_only(data)) {
			errno = EINVAL;
			return -1;
		}
		res = SYS_IOCTL(data->fd, VIDIOC_ENUM_FRAMEINTERVALS, frmival);
		if (res)
			V4LCONVERT_ERR("%s\n", strerror(errno));
		return res;
	}

	/* Check which format we will be using to convert to frmival->pixel_format */
	memset(&dest_fmt, 0, sizeof(dest_fmt));
	dest_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	dest_fmt.fmt.pix.pixelformat = frmival->pixel_format;
	dest_fmt.fmt.pix.width = frmival->width;
	dest_fmt.fmt.pix.height = frmival->height;
	res = v4lconvert_try_format(data, &dest_fmt, &src_fmt);
	if (res) {
		V4LCONVERT_ERR("trying format: %s\n", strerror(errno));
		return res;
	}

	/* Check the requested format is supported exactly as requested */
	if (dest_fmt.fmt.pix.pixelformat != frmival->pixel_format ||
			dest_fmt.fmt.pix.width  != frmival->width ||
			dest_fmt.fmt.pix.height != frmival->height) {
		int frmival_pixformat = frmival->pixel_format;
		int dest_pixformat = dest_fmt.fmt.pix.pixelformat;

		V4LCONVERT_ERR("Could not find matching framesize for: %c%c%c%c %dx%d "
				"closest match: %c%c%c%c %dx%d\n",
				frmival_pixformat & 0xff,
				(frmival_pixformat >> 8) & 0xff,
				(frmival_pixformat >> 16) & 0xff,
				frmival_pixformat >> 24,
				frmival->width, frmival->height,
				dest_pixformat & 0xff,
				(dest_pixformat >> 8) & 0xff,
				(dest_pixformat >> 16) & 0xff,
				dest_pixformat >> 24,
				dest_fmt.fmt.pix.width , dest_fmt.fmt.pix.height);
		errno = EINVAL;
		return -1;
	}
コード例 #5
0
ファイル: v4lcamera.cpp プロジェクト: sarace77/QeekVision
void V4LCamera::openCaptureDevice() {
    /// Open Capture Device
    _fd = open(_deviceName.toAscii().constData(), O_RDWR | O_NONBLOCK, 0);
    /// Check if is it open
    if (_fd < 0 )
        qFatal("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - Unable to open device!");
    /// Get Capture Device Frame Format configuration
    V4LSettings::qioctl(_fd, VIDIOC_G_FMT, &_fmt, "V4LCamera::openCaptureDevice()");

    /// Set V4L frame buffer data conversion
    _v4lconvert_data = v4lconvert_create(_fd);
#ifdef _DEBUG_CAPTURE_THREADS
    if (_v4lconvert_data == NULL)
        qWarning("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - v4lconvert_create() returns error");
    if (v4lconvert_try_format(_v4lconvert_data, &_fmt, &_src_fmt) != 0)
        qWarning("[CAMERA_THREAD::V4L_CAMERA] - openCaptureDevice() - v4lconvert_try_format() returns error");
#endif //_DEBUG_CAPTURE_THREADS
}
コード例 #6
0
ファイル: core.c プロジェクト: rsisto/luaRoboEmb
void init_device()
{
    struct v4l2_capability cap;
    int ret;
    int sizeimage;

    if(xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
    {
        if(EINVAL == errno)
        {
            fprintf(stderr, "%s is no V4L2 device\n", dev_name);
            perror("EXIT_FAILURE");
            return;
        } 
        else
        {
            perror("VIDIOC_QUERYCAP");
            return ;
        }
    }

    if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        fprintf(stderr, "%s is no video capture device\n", dev_name);
        /*exit(EXIT_FAILURE);*/
        perror("EXIT_FAILURE");
        return;
    }

    memset(&(fmt), 0, sizeof(fmt));
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = w;
    fmt.fmt.pix.height = h;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
    fmt.fmt.pix.field = V4L2_FIELD_NONE;
    
    v4lconvert_data = v4lconvert_create(fd);

    if(v4lconvert_data == NULL)
    {
        perror("v4lconvert_create");
        return;
    }
        
    if(v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
    {
        /*errno_exit("v4lconvert_try_format");*/
        perror("v4lconvert_try_format");
        return;
    }
    
    ret = xioctl(fd, VIDIOC_S_FMT, &src_fmt);
    sizeimage = src_fmt.fmt.pix.sizeimage;
    dst_buf = (unsigned char *)malloc(fmt.fmt.pix.sizeimage);

#ifdef DEBUG

    printf("raw pixfmt: %c%c%c%c %dx%d\n",
               src_fmt.fmt.pix.pixelformat & 0xff,
               (src_fmt.fmt.pix.pixelformat >> 8) & 0xff,
               (src_fmt.fmt.pix.pixelformat >> 16) & 0xff,
               (src_fmt.fmt.pix.pixelformat >> 24) & 0xff,
               src_fmt.fmt.pix.width, src_fmt.fmt.pix.height);
#endif    
    
    if(ret < 0)
    {
        perror("VIDIOC_S_FMT");
        return;
    }
    
#ifdef DEBUG
    printf("pixfmt: %c%c%c%c %dx%d\n",
           fmt.fmt.pix.pixelformat & 0xff,
           (fmt.fmt.pix.pixelformat >> 8) & 0xff,
           (fmt.fmt.pix.pixelformat >> 16) & 0xff,
           (fmt.fmt.pix.pixelformat >> 24) & 0xff,
           fmt.fmt.pix.width, fmt.fmt.pix.height);
           fmt.fmt.pix.width, fmt.fmt.pix.height);
    
    /* Note VIDIOC_S_FMT may change width and height. */
#endif

    w = fmt.fmt.pix.width;
    h = fmt.fmt.pix.height;
   
    init_mmap();
}
コード例 #7
0
ファイル: capturethread.cpp プロジェクト: DHalens/kamerka
int CaptureThread::start() {
	wait();

	devam=false;
	fd = -1;

	// read config
	dev_name = Settings::node();
	width    = Settings::width();
	height   = Settings::height();
	fps      = Settings::fps();
	if (fps>0) {
		delay = 1000/fps;
	}
	else { delay = 0; }

	// open webcam device node
	fd = v4l2_open(dev_name.toStdString().c_str(), O_RDWR | O_NONBLOCK, 0);
	if (fd < 0) {
		kError() << "Cannot open device";
		quit();
		return 1;
	}

	CLEAR(fmt);
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.fmt.pix.width       = width;
	fmt.fmt.pix.height      = height;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
	fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
	xioctl(fd, VIDIOC_S_FMT, &fmt);
	if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) {
		kError() << "Libv4l didn't accept RGB24 format. Can't proceed.";
		quit();
		return 1;
	}
	emit startedCapture(fmt.fmt.pix.width, fmt.fmt.pix.height);

	v4lconvert_data = v4lconvert_create(fd);
	if (v4lconvert_data == NULL)
		kDebug() << "v4lconvert_create";
	if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
		kDebug() << "v4lconvert_try_format";
	xioctl(fd, VIDIOC_S_FMT, &src_fmt);
	dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage);

	CLEAR(req);
	req.count = 2;
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	req.memory = V4L2_MEMORY_MMAP;
	xioctl(fd, VIDIOC_REQBUFS, &req);

	buffers = (buffer*)calloc(req.count, sizeof(*buffers));
	for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
		CLEAR(buf);

		buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory      = V4L2_MEMORY_MMAP;
		buf.index       = n_buffers;

		xioctl(fd, VIDIOC_QUERYBUF, &buf);

		buffers[n_buffers].length = buf.length;
		buffers[n_buffers].start = v4l2_mmap(NULL, buf.length,
																				 PROT_READ | PROT_WRITE, MAP_SHARED,
																				 fd, buf.m.offset);

		if (MAP_FAILED == buffers[n_buffers].start) {
			kDebug() << "mmap";
			quit();
			return 1;
		}
	}

	for (unsigned int i = 0; i < n_buffers; ++i) {
		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;
		buf.index = i;
		xioctl(fd, VIDIOC_QBUF, &buf);
	}
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	xioctl(fd, VIDIOC_STREAMON, &type);

	di=0;
	sprintf(header,"P6\n%d %d 255\n",fmt.fmt.pix.width,fmt.fmt.pix.height);
	devam=true;

	// start processing video data
	running = true;
	QThread::start();
	return 0;
}
コード例 #8
0
ファイル: capturethread.cpp プロジェクト: xian0gang/test
void CaptureThread::run(){
//do real stuff
fd = -1;
dev_name = "/dev/video0";


    fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
    if (fd < 0) {
           qDebug("Cannot open device");
           //exit(EXIT_FAILURE);
           return;
    }


    static struct v4lconvert_data *v4lconvert_data;
    static struct v4l2_format src_fmt;
    static unsigned char *dst_buf;

    CLEAR(fmt);
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width       = 640;
    fmt.fmt.pix.height      = 480;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
    fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
    xioctl(fd, VIDIOC_S_FMT, &fmt);
    if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24) {
           printf("Libv4l didn't accept RGB24 format. Can't proceed.\n");
           //exit(EXIT_FAILURE);
           return;
    }
    if ((fmt.fmt.pix.width != 640) || (fmt.fmt.pix.height != 480))
           printf("Warning: driver is sending image at %dx%d\n",
                   fmt.fmt.pix.width, fmt.fmt.pix.height);

    v4lconvert_data = v4lconvert_create(fd);
    if (v4lconvert_data == NULL)
        qDebug("v4lconvert_create");
    if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
        qDebug("v4lconvert_try_format");
    xioctl(fd, VIDIOC_S_FMT, &src_fmt);
    dst_buf = (unsigned char*)malloc(fmt.fmt.pix.sizeimage);

    CLEAR(req);
    req.count = 2;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    xioctl(fd, VIDIOC_REQBUFS, &req);

    buffers = (buffer*)calloc(req.count, sizeof(*buffers));
    for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
           CLEAR(buf);

           buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
           buf.memory      = V4L2_MEMORY_MMAP;
           buf.index       = n_buffers;

           xioctl(fd, VIDIOC_QUERYBUF, &buf);

           buffers[n_buffers].length = buf.length;
           buffers[n_buffers].start = v4l2_mmap(NULL, buf.length,
                         PROT_READ | PROT_WRITE, MAP_SHARED,
                         fd, buf.m.offset);

           if (MAP_FAILED == buffers[n_buffers].start) {
                   qDebug("mmap");
                   //exit(EXIT_FAILURE);
                   return;
           }
    }

    for (int i = 0; i < n_buffers; ++i) {
           CLEAR(buf);
           buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
           buf.memory = V4L2_MEMORY_MMAP;
           buf.index = i;
           xioctl(fd, VIDIOC_QBUF, &buf);
    }
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    xioctl(fd, VIDIOC_STREAMON, &type);

    int di=0;
    char header[]="P6\n640 480 255\n";
    while(devam){
        /* bu döngü datanın birikmesini sağlıyor */
        do {
                FD_ZERO(&fds);
                FD_SET(fd, &fds);

                /* Timeout. */
                tv.tv_sec = 2;
                tv.tv_usec = 0;

                r = select(fd + 1, &fds, NULL, NULL, &tv);
        } while ((r == -1 && (errno = EINTR)));
        if (r == -1) {
                qDebug("select");
                //exit(1) ;
                return;
        }

        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        xioctl(fd, VIDIOC_DQBUF, &buf);

        try{
            
        if (v4lconvert_convert(v4lconvert_data,
                                &src_fmt,
                                &fmt,
                                (unsigned char*)buffers[buf.index].start, buf.bytesused,
                                dst_buf, fmt.fmt.pix.sizeimage) < 0) {
                if (errno != EAGAIN)
                        qDebug("v4l_convert");

        }


        unsigned char* asil=(unsigned char*)malloc(fmt.fmt.pix.sizeimage+qstrlen(header));
        memmove(asil, dst_buf, fmt.fmt.pix.sizeimage);
        memmove(asil+qstrlen(header), asil, fmt.fmt.pix.sizeimage);
        memcpy(asil,header,qstrlen(header));

        QImage qq;//=new QImage(dst_buf,640,480,QImage::Format_RGB32);

        if(qq.loadFromData(asil,fmt.fmt.pix.sizeimage+qstrlen(header),"PPM")){
            if(parent->isVisible()){
                QImage q1(qq);
                parent->img=q1;
                parent->update();
              //this->msleep(50);
            }
        //qApp->processEvents();
            if(asil)
                free(asil);
        }
        }catch(...){}
        xioctl(fd, VIDIOC_QBUF, &buf);
        di++;
   }
    try{
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    xioctl(fd, VIDIOC_STREAMOFF, &type);
    for (int i = 0; i < n_buffers; ++i)
           v4l2_munmap(buffers[i].start, buffers[i].length);

        v4l2_close(fd);
    }catch(...){}
}
コード例 #9
0
ファイル: libv4l2.c プロジェクト: zccrs/opencv
int v4l2_ioctl(int fd, unsigned long int request, ...)
{
	void *arg;
	va_list ap;
	int result, index, saved_err;
	int is_capture_request = 0, stream_needs_locking = 0;

	va_start(ap, request);
	arg = va_arg(ap, void *);
	va_end(ap);

	index = v4l2_get_index(fd);
	if (index == -1)
		return SYS_IOCTL(fd, request, arg);

	/* Appearantly the kernel and / or glibc ignore the 32 most significant bits
	   when long = 64 bits, and some applications pass an int holding the req to
	   ioctl, causing it to get sign extended, depending upon this behavior */
	request = (unsigned int)request;

	/* Is this a capture request and do we need to take the stream lock? */
	switch (request) {
	case VIDIOC_QUERYCTRL:
	case VIDIOC_G_CTRL:
	case VIDIOC_S_CTRL:
		if (!(devices[index].flags & V4L2_DISABLE_CONVERSION))
			is_capture_request = 1;
		break;
	case VIDIOC_QUERYCAP:
		is_capture_request = 1;
		break;
	case VIDIOC_ENUM_FMT:
		if (((struct v4l2_fmtdesc *)arg)->type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
				!(devices[index].flags & V4L2_DISABLE_CONVERSION))
			is_capture_request = 1;
		break;
	case VIDIOC_ENUM_FRAMESIZES:
	case VIDIOC_ENUM_FRAMEINTERVALS:
		if (!(devices[index].flags & V4L2_DISABLE_CONVERSION))
			is_capture_request = 1;
		break;
	case VIDIOC_TRY_FMT:
		if (((struct v4l2_format *)arg)->type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
				!(devices[index].flags & V4L2_DISABLE_CONVERSION))
			is_capture_request = 1;
		break;
	case VIDIOC_S_FMT:
	case VIDIOC_G_FMT:
		if (((struct v4l2_format *)arg)->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			is_capture_request = 1;
			stream_needs_locking = 1;
		}
		break;
	case VIDIOC_REQBUFS:
		if (((struct v4l2_requestbuffers *)arg)->type ==
				V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			is_capture_request = 1;
			stream_needs_locking = 1;
		}
		break;
	case VIDIOC_QUERYBUF:
	case VIDIOC_QBUF:
	case VIDIOC_DQBUF:
		if (((struct v4l2_buffer *)arg)->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			is_capture_request = 1;
			stream_needs_locking = 1;
		}
		break;
	case VIDIOC_STREAMON:
	case VIDIOC_STREAMOFF:
		if (*((enum v4l2_buf_type *)arg) == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			is_capture_request = 1;
			stream_needs_locking = 1;
		}
	}

	if (!is_capture_request) {
		result = SYS_IOCTL(fd, request, arg);
		saved_err = errno;
		v4l2_log_ioctl(request, arg, result);
		errno = saved_err;
		return result;
	}


	if (stream_needs_locking) {
		pthread_mutex_lock(&devices[index].stream_lock);
		/* If this is the first stream related ioctl, and we should only allow
		   libv4lconvert supported destination formats (so that it can do flipping,
		   processing, etc.) and the current destination format is not supported,
		   try setting the format to RGB24 (which is a supported dest. format). */
		if (!(devices[index].flags & V4L2_STREAM_TOUCHED) &&
				!(devices[index].flags & V4L2_DISABLE_CONVERSION) &&
				v4lconvert_supported_dst_fmt_only(devices[index].convert) &&
				!v4lconvert_supported_dst_format(
					devices[index].dest_fmt.fmt.pix.pixelformat)) {
			struct v4l2_format fmt = devices[index].dest_fmt;

			V4L2_LOG("Setting pixelformat to RGB24 (supported_dst_fmt_only)");
			devices[index].flags |= V4L2_STREAM_TOUCHED;
			fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
			pthread_mutex_unlock(&devices[index].stream_lock);
			v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt);
			pthread_mutex_lock(&devices[index].stream_lock);
			V4L2_LOG("Done setting pixelformat (supported_dst_fmt_only)");
		}
		devices[index].flags |= V4L2_STREAM_TOUCHED;
	}

	switch (request) {
	case VIDIOC_QUERYCTRL:
		result = v4lconvert_vidioc_queryctrl(devices[index].convert, arg);
		break;

	case VIDIOC_G_CTRL:
		result = v4lconvert_vidioc_g_ctrl(devices[index].convert, arg);
		break;

	case VIDIOC_S_CTRL:
		result = v4lconvert_vidioc_s_ctrl(devices[index].convert, arg);
		break;

	case VIDIOC_QUERYCAP: {
		struct v4l2_capability *cap = arg;

		result = SYS_IOCTL(devices[index].fd, VIDIOC_QUERYCAP, cap);
		if (result == 0)
			/* We always support read() as we fake it using mmap mode */
			cap->capabilities |= V4L2_CAP_READWRITE;
		break;
	}

	case VIDIOC_ENUM_FMT:
		result = v4lconvert_enum_fmt(devices[index].convert, arg);
		break;

	case VIDIOC_ENUM_FRAMESIZES:
		result = v4lconvert_enum_framesizes(devices[index].convert, arg);
		break;

	case VIDIOC_ENUM_FRAMEINTERVALS:
		result = v4lconvert_enum_frameintervals(devices[index].convert, arg);
		if (result)
			V4L2_LOG("ENUM_FRAMEINTERVALS Error: %s",
					v4lconvert_get_error_message(devices[index].convert));
		break;

	case VIDIOC_TRY_FMT:
		result = v4lconvert_try_format(devices[index].convert, arg, NULL);
		break;

	case VIDIOC_S_FMT: {
		struct v4l2_format src_fmt, *dest_fmt = arg;
		struct v4l2_pix_format req_pix_fmt;

		/* Don't be lazy on uvc cams, as this triggers a bug in the uvcvideo
		   driver in kernel <= 2.6.28 (with certain cams) */
		if (!(devices[index].flags & V4L2_IS_UVC) &&
				v4l2_pix_fmt_identical(&devices[index].dest_fmt, dest_fmt)) {
			*dest_fmt = devices[index].dest_fmt;
			result = 0;
			break;
		}

		if (v4l2_log_file) {
			int pixfmt = dest_fmt->fmt.pix.pixelformat;

			fprintf(v4l2_log_file, "VIDIOC_S_FMT app requesting: %c%c%c%c\n",
					pixfmt & 0xff,
					(pixfmt >> 8) & 0xff,
					(pixfmt >> 16) & 0xff,
					pixfmt >> 24);
		}

		if (devices[index].flags & V4L2_DISABLE_CONVERSION) {
			result = SYS_IOCTL(devices[index].fd, VIDIOC_TRY_FMT,
					dest_fmt);
			src_fmt = *dest_fmt;
		} else {
			result = v4lconvert_try_format(devices[index].convert, dest_fmt,
					&src_fmt);
		}

		if (result) {
			saved_err = errno;
			V4L2_LOG("S_FMT error trying format: %s\n", strerror(errno));
			errno = saved_err;
			break;
		}

		if (src_fmt.fmt.pix.pixelformat != dest_fmt->fmt.pix.pixelformat &&
				v4l2_log_file) {
			int pixfmt = src_fmt.fmt.pix.pixelformat;

			fprintf(v4l2_log_file, "VIDIOC_S_FMT converting from: %c%c%c%c\n",
					pixfmt & 0xff,
					(pixfmt >> 8) & 0xff,
					(pixfmt >> 16) & 0xff,
					pixfmt >> 24);
		}

		/* Maybe after try format has adjusted width/height etc, to whats
		   available nothing has changed (on the cam side) ? */
		if (!(devices[index].flags & V4L2_IS_UVC) &&
				v4l2_pix_fmt_identical(&devices[index].src_fmt, &src_fmt)) {
			v4l2_set_src_and_dest_format(index, &devices[index].src_fmt,
					dest_fmt);
			result = 0;
			break;
		}

		result = v4l2_check_buffer_change_ok(index);
		if (result)
			break;

		req_pix_fmt = src_fmt.fmt.pix;
		result = SYS_IOCTL(devices[index].fd, VIDIOC_S_FMT, &src_fmt);
		if (result) {
			saved_err = errno;
			V4L2_LOG_ERR("setting pixformat: %s\n", strerror(errno));
			/* Report to the app dest_fmt has not changed */
			*dest_fmt = devices[index].dest_fmt;
			errno = saved_err;
			break;
		}
		/* See if we've gotten what try_fmt promised us
		   (this check should never fail) */
		if (src_fmt.fmt.pix.width != req_pix_fmt.width ||
				src_fmt.fmt.pix.height != req_pix_fmt.height ||
				src_fmt.fmt.pix.pixelformat != req_pix_fmt.pixelformat) {
			V4L2_LOG_ERR("set_fmt gave us a different result then try_fmt!\n");
			/* Not what we expected / wanted, disable conversion */
			*dest_fmt = src_fmt;
		}

		v4l2_set_src_and_dest_format(index, &src_fmt, dest_fmt);
		break;
	}

	case VIDIOC_G_FMT: {
		struct v4l2_format *fmt = arg;

		*fmt = devices[index].dest_fmt;
		result = 0;
		break;
	}

	case VIDIOC_REQBUFS: {
		struct v4l2_requestbuffers *req = arg;

		/* IMPROVEME (maybe?) add support for userptr's? */
		if (req->memory != V4L2_MEMORY_MMAP) {
			errno = EINVAL;
			result = -1;
			break;
		}

		result = v4l2_check_buffer_change_ok(index);
		if (result)
			break;

		/* No more buffers then we can manage please */
		if (req->count > V4L2_MAX_NO_FRAMES)
			req->count = V4L2_MAX_NO_FRAMES;

		result = SYS_IOCTL(devices[index].fd, VIDIOC_REQBUFS, req);
		if (result < 0)
			break;
		result = 0; /* some drivers return the number of buffers on success */

		devices[index].no_frames = MIN(req->count, V4L2_MAX_NO_FRAMES);
		devices[index].flags &= ~V4L2_BUFFERS_REQUESTED_BY_READ;
		break;
	}

	case VIDIOC_QUERYBUF: {
		struct v4l2_buffer *buf = arg;

		if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) {
			result = v4l2_deactivate_read_stream(index);
			if (result)
				break;
		}

		/* Do a real query even when converting to let the driver fill in
		   things like buf->field */
		result = SYS_IOCTL(devices[index].fd, VIDIOC_QUERYBUF, buf);
		if (result || !v4l2_needs_conversion(index))
			break;

		buf->m.offset = V4L2_MMAP_OFFSET_MAGIC | buf->index;
		buf->length = V4L2_FRAME_BUF_SIZE;
		if (devices[index].frame_map_count[buf->index])
			buf->flags |= V4L2_BUF_FLAG_MAPPED;
		else
			buf->flags &= ~V4L2_BUF_FLAG_MAPPED;
		break;
	}

	case VIDIOC_QBUF:
		if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) {
			result = v4l2_deactivate_read_stream(index);
			if (result)
				break;
		}

		/* With some drivers the buffers must be mapped before queuing */
		if (v4l2_needs_conversion(index)) {
			result = v4l2_map_buffers(index);
			if (result)
				break;
		}

		result = SYS_IOCTL(devices[index].fd, VIDIOC_QBUF, arg);
		break;

	case VIDIOC_DQBUF: {
		struct v4l2_buffer *buf = arg;

		if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) {
			result = v4l2_deactivate_read_stream(index);
			if (result)
				break;
		}

		if (!v4l2_needs_conversion(index)) {
			result = SYS_IOCTL(devices[index].fd, VIDIOC_DQBUF, buf);
			if (result) {
				int saved_err = errno;

				V4L2_LOG_ERR("dequeuing buf: %s\n", strerror(errno));
				errno = saved_err;
			}
			break;
		}

		/* An application can do a DQBUF before mmap-ing in the buffer,
		   but we need the buffer _now_ to write our converted data
		   to it! */
		if (devices[index].convert_mmap_buf == MAP_FAILED) {
			devices[index].convert_mmap_buf = (void *)SYS_MMAP(NULL,
				(size_t)(devices[index].no_frames * V4L2_FRAME_BUF_SIZE),
				PROT_READ | PROT_WRITE,
				MAP_ANONYMOUS | MAP_PRIVATE,
				-1, 0);
			if (devices[index].convert_mmap_buf == MAP_FAILED) {
				saved_err = errno;
				V4L2_LOG_ERR("allocating conversion buffer\n");
				errno = saved_err;
				result = -1;
				break;
			}
		}

		result = v4l2_dequeue_and_convert(index, buf, 0, V4L2_FRAME_BUF_SIZE);
		if (result < 0)
			break;

		buf->bytesused = result;
		buf->m.offset = V4L2_MMAP_OFFSET_MAGIC | buf->index;
		buf->length = V4L2_FRAME_BUF_SIZE;
		if (devices[index].frame_map_count[buf->index])
			buf->flags |= V4L2_BUF_FLAG_MAPPED;
		else
			buf->flags &= ~V4L2_BUF_FLAG_MAPPED;

		result = 0;
		break;
	}

	case VIDIOC_STREAMON:
	case VIDIOC_STREAMOFF:
		if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) {
			result = v4l2_deactivate_read_stream(index);
			if (result)
				break;
		}

		if (request == VIDIOC_STREAMON)
			result = v4l2_streamon(index);
		else
			result = v4l2_streamoff(index);
		break;

	default:
		result = SYS_IOCTL(fd, request, arg);
		break;
	}
コード例 #10
0
ファイル: svv.c プロジェクト: engie/robo_gui
static void init_device(int w, int h)
{
	struct v4lconvert_data *v4lconvert_data;
	struct v4l2_format src_fmt;	 /* raw source format */
	struct v4l2_capability cap;

	if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
		if (EINVAL == errno) {
			fprintf(stderr, "%s is no V4L2 device\n",
				dev_name);
			exit(EXIT_FAILURE);
		} else {
			errno_exit("VIDIOC_QUERYCAP");
		}
	}

	if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
		fprintf(stderr, "%s is no video capture device\n",
			dev_name);
		exit(EXIT_FAILURE);
	}

	/* libv4l emulates read() on those v4l2 devices that do not support
	it, so this print is just instructional, it should work regardless */
	printf("device capabilities\n\tread:\t%c\n\tstream:\t%c\n",
		(cap.capabilities & V4L2_CAP_READWRITE) ? 'Y' : 'N',
		(cap.capabilities & V4L2_CAP_STREAMING) ? 'Y' : 'N');

	/* set our requested format to V4L2_PIX_FMT_RGB24 */
	CLEAR(fmt);
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.fmt.pix.width = w;
	fmt.fmt.pix.height = h;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
	fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;

	/* libv4l also converts mutiple supported formats to V4l2_PIX_FMT_BGR24 or 
	V4l2_PIX_FMT_YUV420, which means the following call should *always* 
	succeed 

	However, we use the libv4lconvert library to print debugging information
	to tell us if libv4l will be doing the conversion internally*/
	v4lconvert_data = v4lconvert_create(fd);
	if (v4lconvert_data == NULL)
		errno_exit("v4lconvert_create");
	if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
		errno_exit("v4lconvert_try_format");

	printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n",
		src_fmt.fmt.pix.pixelformat & 0xff,
		(src_fmt.fmt.pix.pixelformat >> 8) & 0xff,
		(src_fmt.fmt.pix.pixelformat >> 16) & 0xff,
		(src_fmt.fmt.pix.pixelformat >> 24) & 0xff,
		src_fmt.fmt.pix.width, src_fmt.fmt.pix.height);

	printf("application\n\tconv:\t%c\n", 
		v4lconvert_needs_conversion(v4lconvert_data,
			&src_fmt,
			&fmt) ? 'Y' : 'N');

	v4lconvert_destroy(v4lconvert_data);

	/* Actually set the pixfmt so that libv4l uses its conversion magic */
	if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
		errno_exit("VIDIOC_S_FMT");

	printf("\tpixfmt:\t%c%c%c%c (%dx%d)\n",
		fmt.fmt.pix.pixelformat & 0xff,
		(fmt.fmt.pix.pixelformat >> 8) & 0xff,
		(fmt.fmt.pix.pixelformat >> 16) & 0xff,
		(fmt.fmt.pix.pixelformat >> 24) & 0xff,
		fmt.fmt.pix.width, fmt.fmt.pix.height);

	switch (io) {
	case IO_METHOD_READ:
		printf("\tio:\tio\n");
		init_read(fmt.fmt.pix.sizeimage);
		break;
	case V4L2_MEMORY_MMAP:
		printf("\tio:\tmmap\n");
		init_mmap();
		break;
	case V4L2_MEMORY_USERPTR:
		printf("\tio:\tusrptr\n");
		init_userp(fmt.fmt.pix.sizeimage);
		break;
	}
}
コード例 #11
0
ファイル: v4l-ref.c プロジェクト: ezequielgarcia/zeta-devel
static void init_device(int w, int h)
{
	struct v4l2_capability cap;
	int ret;
	int sizeimage;

	if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
		if (EINVAL == errno) {
			fprintf(stderr, "%s is no V4L2 device\n",
				dev_name);
			exit(EXIT_FAILURE);
		} else {
			errno_exit("VIDIOC_QUERYCAP");
		}
	}

	if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
		fprintf(stderr, "%s is no video capture device\n",
			dev_name);
		exit(EXIT_FAILURE);
	}

	switch (io) {
	case IO_METHOD_READ:
		if (!(cap.capabilities & V4L2_CAP_READWRITE)) {
			fprintf(stderr, "%s does not support read i/o\n",
				dev_name);
			exit(EXIT_FAILURE);
		}
		break;
	case V4L2_MEMORY_MMAP:
	case V4L2_MEMORY_USERPTR:
		if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
			fprintf(stderr,
				"%s does not support streaming i/o\n",
				dev_name);
			exit(EXIT_FAILURE);
		}
		break;
	}


//	if (xioctl(fd, VIDIOC_G_FMT, &fmt) < 0)
//		perror("get fmt");

	CLEAR(fmt);
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.fmt.pix.width = w;
	fmt.fmt.pix.height = h;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
	fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
#ifdef WITH_V4L2_LIB
	v4lconvert_data = v4lconvert_create(fd);
	if (v4lconvert_data == NULL)
		errno_exit("v4lconvert_create");
	if (v4lconvert_try_format(v4lconvert_data, &fmt, &src_fmt) != 0)
		errno_exit("v4lconvert_try_format");
	ret = xioctl(fd, VIDIOC_S_FMT, &src_fmt);
	sizeimage = src_fmt.fmt.pix.sizeimage;
	dst_buf = malloc(fmt.fmt.pix.sizeimage);
	printf("raw pixfmt: %c%c%c%c %dx%d\n",
		src_fmt.fmt.pix.pixelformat & 0xff,
	       (src_fmt.fmt.pix.pixelformat >> 8) & 0xff,
	       (src_fmt.fmt.pix.pixelformat >> 16) & 0xff,
	       (src_fmt.fmt.pix.pixelformat >> 24) & 0xff,
		src_fmt.fmt.pix.width, src_fmt.fmt.pix.height);
#else
	ret = xioctl(fd, VIDIOC_S_FMT, &fmt);
	sizeimage = fmt.fmt.pix.sizeimage;
#endif

	if (ret < 0)
		errno_exit("VIDIOC_S_FMT");
// 
//      /* Note VIDIOC_S_FMT may change width and height. */
// 
	printf("pixfmt: %c%c%c%c %dx%d\n",
		fmt.fmt.pix.pixelformat & 0xff,
	       (fmt.fmt.pix.pixelformat >> 8) & 0xff,
	       (fmt.fmt.pix.pixelformat >> 16) & 0xff,
	       (fmt.fmt.pix.pixelformat >> 24) & 0xff,
		fmt.fmt.pix.width, fmt.fmt.pix.height);

	switch (io) {
	case IO_METHOD_READ:
		init_read(sizeimage);
		break;
	case V4L2_MEMORY_MMAP:
		init_mmap();
		break;
	case V4L2_MEMORY_USERPTR:
		init_userp(sizeimage);
		break;
	}
}