コード例 #1
0
static noinline int v4l1_compat_get_picture(
					struct video_picture *pict,
					struct inode *inode,
					struct file *file,
					v4l2_kioctl drv)
{
	int err;
	struct v4l2_format *fmt;

	fmt = kzalloc(sizeof(*fmt), GFP_KERNEL);
	if (!fmt) {
		err = -ENOMEM;
		return err;
	}

	pict->brightness = get_v4l_control(inode, file,
					   V4L2_CID_BRIGHTNESS, drv);
	pict->hue = get_v4l_control(inode, file,
				    V4L2_CID_HUE, drv);
	pict->contrast = get_v4l_control(inode, file,
					 V4L2_CID_CONTRAST, drv);
	pict->colour = get_v4l_control(inode, file,
				       V4L2_CID_SATURATION, drv);
	pict->whiteness = get_v4l_control(inode, file,
					  V4L2_CID_WHITENESS, drv);

	fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	err = drv(inode, file, VIDIOC_G_FMT, fmt);
	if (err < 0) {
		dprintk("VIDIOCGPICT / VIDIOC_G_FMT: %d\n", err);
		goto done;
	}

	pict->depth   = ((fmt->fmt.pix.bytesperline << 3)
			 + (fmt->fmt.pix.width - 1))
			 / fmt->fmt.pix.width;
	pict->palette = pixelformat_to_palette(
		fmt->fmt.pix.pixelformat);
done:
	kfree(fmt);
	return err;
}
コード例 #2
0
ファイル: v4l1-compat.c プロジェクト: mrtos/Logitech-Revue
/*
 *	This function is exported.
 */
int
v4l_compat_translate_ioctl(struct inode         *inode,
                           struct file		*file,
                           int			cmd,
                           void			*arg,
                           v4l2_kioctl          drv)
{
    struct v4l2_capability  *cap2 = NULL;
    struct v4l2_format	*fmt2 = NULL;
    enum v4l2_buf_type      captype = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    struct v4l2_framebuffer fbuf2;
    struct v4l2_input	input2;
    struct v4l2_tuner	tun2;
    struct v4l2_standard	std2;
    struct v4l2_frequency   freq2;
    struct v4l2_audio	aud2;
    struct v4l2_queryctrl	qctrl2;
    struct v4l2_buffer	buf2;
    v4l2_std_id    		sid;
    int i, err = 0;

    switch (cmd) {
    case VIDIOCGCAP:	/* capability */
    {
        struct video_capability *cap = arg;

        cap2 = kzalloc(sizeof(*cap2),GFP_KERNEL);
        memset(cap, 0, sizeof(*cap));
        memset(&fbuf2, 0, sizeof(fbuf2));

        err = drv(inode, file, VIDIOC_QUERYCAP, cap2);
        if (err < 0) {
            dprintk("VIDIOCGCAP / VIDIOC_QUERYCAP: %d\n",err);
            break;
        }
        if (cap2->capabilities & V4L2_CAP_VIDEO_OVERLAY) {
            err = drv(inode, file, VIDIOC_G_FBUF, &fbuf2);
            if (err < 0) {
                dprintk("VIDIOCGCAP / VIDIOC_G_FBUF: %d\n",err);
                memset(&fbuf2, 0, sizeof(fbuf2));
            }
            err = 0;
        }

        memcpy(cap->name, cap2->card,
               min(sizeof(cap->name), sizeof(cap2->card)));
        cap->name[sizeof(cap->name) - 1] = 0;
        if (cap2->capabilities & V4L2_CAP_VIDEO_CAPTURE)
            cap->type |= VID_TYPE_CAPTURE;
        if (cap2->capabilities & V4L2_CAP_TUNER)
            cap->type |= VID_TYPE_TUNER;
        if (cap2->capabilities & V4L2_CAP_VBI_CAPTURE)
            cap->type |= VID_TYPE_TELETEXT;
        if (cap2->capabilities & V4L2_CAP_VIDEO_OVERLAY)
            cap->type |= VID_TYPE_OVERLAY;
        if (fbuf2.capability & V4L2_FBUF_CAP_LIST_CLIPPING)
            cap->type |= VID_TYPE_CLIPPING;

        cap->channels  = count_inputs(inode,file,drv);
        check_size(inode,file,drv,
                   &cap->maxwidth,&cap->maxheight);
        cap->audios    =  0; /* FIXME */
        cap->minwidth  = 48; /* FIXME */
        cap->minheight = 32; /* FIXME */
        break;
    }
    case VIDIOCGFBUF: /*  get frame buffer  */
    {
        struct video_buffer	*buffer = arg;

        memset(buffer, 0, sizeof(*buffer));
        memset(&fbuf2, 0, sizeof(fbuf2));

        err = drv(inode, file, VIDIOC_G_FBUF, &fbuf2);
        if (err < 0) {
            dprintk("VIDIOCGFBUF / VIDIOC_G_FBUF: %d\n",err);
            break;
        }
        buffer->base   = fbuf2.base;
        buffer->height = fbuf2.fmt.height;
        buffer->width  = fbuf2.fmt.width;

        switch (fbuf2.fmt.pixelformat) {
        case V4L2_PIX_FMT_RGB332:
            buffer->depth = 8;
            break;
        case V4L2_PIX_FMT_RGB555:
            buffer->depth = 15;
            break;
        case V4L2_PIX_FMT_RGB565:
            buffer->depth = 16;
            break;
        case V4L2_PIX_FMT_BGR24:
            buffer->depth = 24;
            break;
        case V4L2_PIX_FMT_BGR32:
            buffer->depth = 32;
            break;
        default:
            buffer->depth = 0;
        }
        if (fbuf2.fmt.bytesperline) {
            buffer->bytesperline = fbuf2.fmt.bytesperline;
            if (!buffer->depth && buffer->width)
                buffer->depth   = ((fbuf2.fmt.bytesperline<<3)
                                   + (buffer->width-1) )
                                  /buffer->width;
        } else {
            buffer->bytesperline =
                (buffer->width * buffer->depth + 7) & 7;
            buffer->bytesperline >>= 3;
        }
        break;
    }
    case VIDIOCSFBUF: /*  set frame buffer  */
    {
        struct video_buffer	*buffer = arg;

        memset(&fbuf2, 0, sizeof(fbuf2));
        fbuf2.base       = buffer->base;
        fbuf2.fmt.height = buffer->height;
        fbuf2.fmt.width  = buffer->width;
        switch (buffer->depth) {
        case 8:
            fbuf2.fmt.pixelformat = V4L2_PIX_FMT_RGB332;
            break;
        case 15:
            fbuf2.fmt.pixelformat = V4L2_PIX_FMT_RGB555;
            break;
        case 16:
            fbuf2.fmt.pixelformat = V4L2_PIX_FMT_RGB565;
            break;
        case 24:
            fbuf2.fmt.pixelformat = V4L2_PIX_FMT_BGR24;
            break;
        case 32:
            fbuf2.fmt.pixelformat = V4L2_PIX_FMT_BGR32;
            break;
        }
        fbuf2.fmt.bytesperline = buffer->bytesperline;
        err = drv(inode, file, VIDIOC_S_FBUF, &fbuf2);
        if (err < 0)
            dprintk("VIDIOCSFBUF / VIDIOC_S_FBUF: %d\n",err);
        break;
    }
    case VIDIOCGWIN: /*  get window or capture dimensions  */
    {
        struct video_window	*win = arg;

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        memset(win,0,sizeof(*win));

        fmt2->type = V4L2_BUF_TYPE_VIDEO_OVERLAY;
        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err < 0)
            dprintk("VIDIOCGWIN / VIDIOC_G_WIN: %d\n",err);
        if (err == 0) {
            win->x         = fmt2->fmt.win.w.left;
            win->y         = fmt2->fmt.win.w.top;
            win->width     = fmt2->fmt.win.w.width;
            win->height    = fmt2->fmt.win.w.height;
            win->chromakey = fmt2->fmt.win.chromakey;
            win->clips     = NULL;
            win->clipcount = 0;
            break;
        }

        fmt2->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err < 0) {
            dprintk("VIDIOCGWIN / VIDIOC_G_FMT: %d\n",err);
            break;
        }
        win->x         = 0;
        win->y         = 0;
        win->width     = fmt2->fmt.pix.width;
        win->height    = fmt2->fmt.pix.height;
        win->chromakey = 0;
        win->clips     = NULL;
        win->clipcount = 0;
        break;
    }
    case VIDIOCSWIN: /*  set window and/or capture dimensions  */
    {
        struct video_window	*win = arg;
        int err1,err2;

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        fmt2->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        drv(inode, file, VIDIOC_STREAMOFF, &fmt2->type);
        err1 = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err1 < 0)
            dprintk("VIDIOCSWIN / VIDIOC_G_FMT: %d\n",err);
        if (err1 == 0) {
            fmt2->fmt.pix.width  = win->width;
            fmt2->fmt.pix.height = win->height;
            fmt2->fmt.pix.field  = V4L2_FIELD_ANY;
            fmt2->fmt.pix.bytesperline = 0;
            err = drv(inode, file, VIDIOC_S_FMT, fmt2);
            if (err < 0)
                dprintk("VIDIOCSWIN / VIDIOC_S_FMT #1: %d\n",
                        err);
            win->width  = fmt2->fmt.pix.width;
            win->height = fmt2->fmt.pix.height;
        }

        memset(fmt2,0,sizeof(*fmt2));
        fmt2->type = V4L2_BUF_TYPE_VIDEO_OVERLAY;
        fmt2->fmt.win.w.left    = win->x;
        fmt2->fmt.win.w.top     = win->y;
        fmt2->fmt.win.w.width   = win->width;
        fmt2->fmt.win.w.height  = win->height;
        fmt2->fmt.win.chromakey = win->chromakey;
        fmt2->fmt.win.clips     = (void __user *)win->clips;
        fmt2->fmt.win.clipcount = win->clipcount;
        err2 = drv(inode, file, VIDIOC_S_FMT, fmt2);
        if (err2 < 0)
            dprintk("VIDIOCSWIN / VIDIOC_S_FMT #2: %d\n",err);

        if (err1 != 0 && err2 != 0)
            err = err1;
        break;
    }
    case VIDIOCCAPTURE: /*  turn on/off preview  */
    {
        int *on = arg;

        if (0 == *on) {
            /* dirty hack time.  But v4l1 has no STREAMOFF
             * equivalent in the API, and this one at
             * least comes close ... */
            drv(inode, file, VIDIOC_STREAMOFF, &captype);
        }
        err = drv(inode, file, VIDIOC_OVERLAY, arg);
        if (err < 0)
            dprintk("VIDIOCCAPTURE / VIDIOC_PREVIEW: %d\n",err);
        break;
    }
    case VIDIOCGCHAN: /*  get input information  */
    {
        struct video_channel	*chan = arg;

        memset(&input2,0,sizeof(input2));
        input2.index = chan->channel;
        err = drv(inode, file, VIDIOC_ENUMINPUT, &input2);
        if (err < 0) {
            dprintk("VIDIOCGCHAN / VIDIOC_ENUMINPUT: "
                    "channel=%d err=%d\n",chan->channel,err);
            break;
        }
        chan->channel = input2.index;
        memcpy(chan->name, input2.name,
               min(sizeof(chan->name), sizeof(input2.name)));
        chan->name[sizeof(chan->name) - 1] = 0;
        chan->tuners = (input2.type == V4L2_INPUT_TYPE_TUNER) ? 1 : 0;
        chan->flags = (chan->tuners) ? VIDEO_VC_TUNER : 0;
        switch (input2.type) {
        case V4L2_INPUT_TYPE_TUNER:
            chan->type = VIDEO_TYPE_TV;
            break;
        default:
        case V4L2_INPUT_TYPE_CAMERA:
            chan->type = VIDEO_TYPE_CAMERA;
            break;
        }
        chan->norm = 0;
        err = drv(inode, file, VIDIOC_G_STD, &sid);
        if (err < 0)
            dprintk("VIDIOCGCHAN / VIDIOC_G_STD: %d\n",err);
        if (err == 0) {
            if (sid & V4L2_STD_PAL)
                chan->norm = VIDEO_MODE_PAL;
            if (sid & V4L2_STD_NTSC)
                chan->norm = VIDEO_MODE_NTSC;
            if (sid & V4L2_STD_SECAM)
                chan->norm = VIDEO_MODE_SECAM;
        }
        break;
    }
    case VIDIOCSCHAN: /*  set input  */
    {
        struct video_channel *chan = arg;

        sid = 0;
        err = drv(inode, file, VIDIOC_S_INPUT, &chan->channel);
        if (err < 0)
            dprintk("VIDIOCSCHAN / VIDIOC_S_INPUT: %d\n",err);
        switch (chan->norm) {
        case VIDEO_MODE_PAL:
            sid = V4L2_STD_PAL;
            break;
        case VIDEO_MODE_NTSC:
            sid = V4L2_STD_NTSC;
            break;
        case VIDEO_MODE_SECAM:
            sid = V4L2_STD_SECAM;
            break;
        }
        if (0 != sid) {
            err = drv(inode, file, VIDIOC_S_STD, &sid);
            if (err < 0)
                dprintk("VIDIOCSCHAN / VIDIOC_S_STD: %d\n",err);
        }
        break;
    }
    case VIDIOCGPICT: /*  get tone controls & partial capture format  */
    {
        struct video_picture	*pict = arg;

        pict->brightness = get_v4l_control(inode, file,
                                           V4L2_CID_BRIGHTNESS,drv);
        pict->hue = get_v4l_control(inode, file,
                                    V4L2_CID_HUE, drv);
        pict->contrast = get_v4l_control(inode, file,
                                         V4L2_CID_CONTRAST, drv);
        pict->colour = get_v4l_control(inode, file,
                                       V4L2_CID_SATURATION, drv);
        pict->whiteness = get_v4l_control(inode, file,
                                          V4L2_CID_WHITENESS, drv);

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        fmt2->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err < 0) {
            dprintk("VIDIOCGPICT / VIDIOC_G_FMT: %d\n",err);
            break;
        }

        pict->depth   = ((fmt2->fmt.pix.bytesperline<<3)
                         + (fmt2->fmt.pix.width-1) )
                        /fmt2->fmt.pix.width;
        pict->palette = pixelformat_to_palette(
                            fmt2->fmt.pix.pixelformat);
        break;
    }
    case VIDIOCSPICT: /*  set tone controls & partial capture format  */
    {
        struct video_picture	*pict = arg;
        int mem_err = 0, ovl_err = 0;

        memset(&fbuf2, 0, sizeof(fbuf2));

        set_v4l_control(inode, file,
                        V4L2_CID_BRIGHTNESS, pict->brightness, drv);
        set_v4l_control(inode, file,
                        V4L2_CID_HUE, pict->hue, drv);
        set_v4l_control(inode, file,
                        V4L2_CID_CONTRAST, pict->contrast, drv);
        set_v4l_control(inode, file,
                        V4L2_CID_SATURATION, pict->colour, drv);
        set_v4l_control(inode, file,
                        V4L2_CID_WHITENESS, pict->whiteness, drv);
        /*
         * V4L1 uses this ioctl to set both memory capture and overlay
         * pixel format, while V4L2 has two different ioctls for this.
         * Some cards may not support one or the other, and may support
         * different pixel formats for memory vs overlay.
         */

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        fmt2->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        /* If VIDIOC_G_FMT failed, then the driver likely doesn't
           support memory capture.  Trying to set the memory capture
           parameters would be pointless.  */
        if (err < 0) {
            dprintk("VIDIOCSPICT / VIDIOC_G_FMT: %d\n",err);
            mem_err = -1000;  /* didn't even try */
        } else if (fmt2->fmt.pix.pixelformat !=
                   palette_to_pixelformat(pict->palette)) {
            fmt2->fmt.pix.pixelformat = palette_to_pixelformat(
                                            pict->palette);
            mem_err = drv(inode, file, VIDIOC_S_FMT, fmt2);
            if (mem_err < 0)
                dprintk("VIDIOCSPICT / VIDIOC_S_FMT: %d\n",
                        mem_err);
        }

        err = drv(inode, file, VIDIOC_G_FBUF, &fbuf2);
        /* If VIDIOC_G_FBUF failed, then the driver likely doesn't
           support overlay.  Trying to set the overlay parameters
           would be quite pointless.  */
        if (err < 0) {
            dprintk("VIDIOCSPICT / VIDIOC_G_FBUF: %d\n",err);
            ovl_err = -1000;  /* didn't even try */
        } else if (fbuf2.fmt.pixelformat !=
                   palette_to_pixelformat(pict->palette)) {
            fbuf2.fmt.pixelformat = palette_to_pixelformat(
                                        pict->palette);
            ovl_err = drv(inode, file, VIDIOC_S_FBUF, &fbuf2);
            if (ovl_err < 0)
                dprintk("VIDIOCSPICT / VIDIOC_S_FBUF: %d\n",
                        ovl_err);
        }
        if (ovl_err < 0 && mem_err < 0)
            /* ioctl failed, couldn't set either parameter */
            if (mem_err != -1000) {
                err = mem_err;
            } else if (ovl_err == -EPERM) {
                err = 0;
            } else {
                err = ovl_err;
            }
        else
            err = 0;
        break;
    }
    case VIDIOCGTUNER: /*  get tuner information  */
    {
        struct video_tuner	*tun = arg;

        memset(&tun2,0,sizeof(tun2));
        err = drv(inode, file, VIDIOC_G_TUNER, &tun2);
        if (err < 0) {
            dprintk("VIDIOCGTUNER / VIDIOC_G_TUNER: %d\n",err);
            break;
        }
        memcpy(tun->name, tun2.name,
               min(sizeof(tun->name), sizeof(tun2.name)));
        tun->name[sizeof(tun->name) - 1] = 0;
        tun->rangelow = tun2.rangelow;
        tun->rangehigh = tun2.rangehigh;
        tun->flags = 0;
        tun->mode = VIDEO_MODE_AUTO;

        for (i = 0; i < 64; i++) {
            memset(&std2,0,sizeof(std2));
            std2.index = i;
            if (0 != drv(inode, file, VIDIOC_ENUMSTD, &std2))
                break;
            if (std2.id & V4L2_STD_PAL)
                tun->flags |= VIDEO_TUNER_PAL;
            if (std2.id & V4L2_STD_NTSC)
                tun->flags |= VIDEO_TUNER_NTSC;
            if (std2.id & V4L2_STD_SECAM)
                tun->flags |= VIDEO_TUNER_SECAM;
        }

        err = drv(inode, file, VIDIOC_G_STD, &sid);
        if (err < 0)
            dprintk("VIDIOCGTUNER / VIDIOC_G_STD: %d\n",err);
        if (err == 0) {
            if (sid & V4L2_STD_PAL)
                tun->mode = VIDEO_MODE_PAL;
            if (sid & V4L2_STD_NTSC)
                tun->mode = VIDEO_MODE_NTSC;
            if (sid & V4L2_STD_SECAM)
                tun->mode = VIDEO_MODE_SECAM;
        }

        if (tun2.capability & V4L2_TUNER_CAP_LOW)
            tun->flags |= VIDEO_TUNER_LOW;
        if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
            tun->flags |= VIDEO_TUNER_STEREO_ON;
        tun->signal = tun2.signal;
        break;
    }
    case VIDIOCSTUNER: /*  select a tuner input  */
    {
        struct video_tuner	*tun = arg;
        struct v4l2_tuner	t;
        memset(&t,0,sizeof(t));

        t.index=tun->tuner;

        err = drv(inode, file, VIDIOC_S_INPUT, &t);
        if (err < 0)
            dprintk("VIDIOCSTUNER / VIDIOC_S_INPUT: %d\n",err);

        break;
    }
    case VIDIOCGFREQ: /*  get frequency  */
    {
        unsigned long *freq = arg;
        memset(&freq2,0,sizeof(freq2));

        freq2.tuner = 0;
        err = drv(inode, file, VIDIOC_G_FREQUENCY, &freq2);
        if (err < 0)
            dprintk("VIDIOCGFREQ / VIDIOC_G_FREQUENCY: %d\n",err);
        if (0 == err)
            *freq = freq2.frequency;
        break;
    }
    case VIDIOCSFREQ: /*  set frequency  */
    {
        unsigned long *freq = arg;
        memset(&freq2,0,sizeof(freq2));

        drv(inode, file, VIDIOC_G_FREQUENCY, &freq2);
        freq2.frequency = *freq;
        err = drv(inode, file, VIDIOC_S_FREQUENCY, &freq2);
        if (err < 0)
            dprintk("VIDIOCSFREQ / VIDIOC_S_FREQUENCY: %d\n",err);
        break;
    }
    case VIDIOCGAUDIO: /*  get audio properties/controls  */
    {
        struct video_audio	*aud = arg;
        memset(&aud2,0,sizeof(aud2));

        err = drv(inode, file, VIDIOC_G_AUDIO, &aud2);
        if (err < 0) {
            dprintk("VIDIOCGAUDIO / VIDIOC_G_AUDIO: %d\n",err);
            break;
        }
        memcpy(aud->name, aud2.name,
               min(sizeof(aud->name), sizeof(aud2.name)));
        aud->name[sizeof(aud->name) - 1] = 0;
        aud->audio = aud2.index;
        aud->flags = 0;
        i = get_v4l_control(inode, file, V4L2_CID_AUDIO_VOLUME, drv);
        if (i >= 0) {
            aud->volume = i;
            aud->flags |= VIDEO_AUDIO_VOLUME;
        }
        i = get_v4l_control(inode, file, V4L2_CID_AUDIO_BASS, drv);
        if (i >= 0) {
            aud->bass = i;
            aud->flags |= VIDEO_AUDIO_BASS;
        }
        i = get_v4l_control(inode, file, V4L2_CID_AUDIO_TREBLE, drv);
        if (i >= 0) {
            aud->treble = i;
            aud->flags |= VIDEO_AUDIO_TREBLE;
        }
        i = get_v4l_control(inode, file, V4L2_CID_AUDIO_BALANCE, drv);
        if (i >= 0) {
            aud->balance = i;
            aud->flags |= VIDEO_AUDIO_BALANCE;
        }
        i = get_v4l_control(inode, file, V4L2_CID_AUDIO_MUTE, drv);
        if (i >= 0) {
            if (i)
                aud->flags |= VIDEO_AUDIO_MUTE;
            aud->flags |= VIDEO_AUDIO_MUTABLE;
        }
        aud->step = 1;
        qctrl2.id = V4L2_CID_AUDIO_VOLUME;
        if (drv(inode, file, VIDIOC_QUERYCTRL, &qctrl2) == 0 &&
                !(qctrl2.flags & V4L2_CTRL_FLAG_DISABLED))
            aud->step = qctrl2.step;
        aud->mode = 0;

        memset(&tun2,0,sizeof(tun2));
        err = drv(inode, file, VIDIOC_G_TUNER, &tun2);
        if (err < 0) {
            dprintk("VIDIOCGAUDIO / VIDIOC_G_TUNER: %d\n",err);
            err = 0;
            break;
        }

        if (tun2.rxsubchans & V4L2_TUNER_SUB_LANG2)
            aud->mode = VIDEO_SOUND_LANG1 | VIDEO_SOUND_LANG2;
        else if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
            aud->mode = VIDEO_SOUND_STEREO;
        else if (tun2.rxsubchans & V4L2_TUNER_SUB_MONO)
            aud->mode = VIDEO_SOUND_MONO;
        break;
    }
    case VIDIOCSAUDIO: /*  set audio controls  */
    {
        struct video_audio	*aud = arg;

        memset(&aud2,0,sizeof(aud2));
        memset(&tun2,0,sizeof(tun2));

        aud2.index = aud->audio;
        err = drv(inode, file, VIDIOC_S_AUDIO, &aud2);
        if (err < 0) {
            dprintk("VIDIOCSAUDIO / VIDIOC_S_AUDIO: %d\n",err);
            break;
        }

        set_v4l_control(inode, file, V4L2_CID_AUDIO_VOLUME,
                        aud->volume, drv);
        set_v4l_control(inode, file, V4L2_CID_AUDIO_BASS,
                        aud->bass, drv);
        set_v4l_control(inode, file, V4L2_CID_AUDIO_TREBLE,
                        aud->treble, drv);
        set_v4l_control(inode, file, V4L2_CID_AUDIO_BALANCE,
                        aud->balance, drv);
        set_v4l_control(inode, file, V4L2_CID_AUDIO_MUTE,
                        !!(aud->flags & VIDEO_AUDIO_MUTE), drv);

        err = drv(inode, file, VIDIOC_G_TUNER, &tun2);
        if (err < 0)
            dprintk("VIDIOCSAUDIO / VIDIOC_G_TUNER: %d\n",err);
        if (err == 0) {
            switch (aud->mode) {
            default:
            case VIDEO_SOUND_MONO:
            case VIDEO_SOUND_LANG1:
                tun2.audmode = V4L2_TUNER_MODE_MONO;
                break;
            case VIDEO_SOUND_STEREO:
                tun2.audmode = V4L2_TUNER_MODE_STEREO;
                break;
            case VIDEO_SOUND_LANG2:
                tun2.audmode = V4L2_TUNER_MODE_LANG2;
                break;
            }
            err = drv(inode, file, VIDIOC_S_TUNER, &tun2);
            if (err < 0)
                dprintk("VIDIOCSAUDIO / VIDIOC_S_TUNER: %d\n",err);
        }
        err = 0;
        break;
    }
    case VIDIOCMCAPTURE: /*  capture a frame  */
    {
        struct video_mmap	*mm = arg;

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        memset(&buf2,0,sizeof(buf2));

        fmt2->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err < 0) {
            dprintk("VIDIOCMCAPTURE / VIDIOC_G_FMT: %d\n",err);
            break;
        }
        if (mm->width   != fmt2->fmt.pix.width  ||
                mm->height  != fmt2->fmt.pix.height ||
                palette_to_pixelformat(mm->format) !=
                fmt2->fmt.pix.pixelformat)
        {   /* New capture format...  */
            fmt2->fmt.pix.width = mm->width;
            fmt2->fmt.pix.height = mm->height;
            fmt2->fmt.pix.pixelformat =
                palette_to_pixelformat(mm->format);
            fmt2->fmt.pix.field = V4L2_FIELD_ANY;
            fmt2->fmt.pix.bytesperline = 0;
            err = drv(inode, file, VIDIOC_S_FMT, fmt2);
            if (err < 0) {
                dprintk("VIDIOCMCAPTURE / VIDIOC_S_FMT: %d\n",err);
                break;
            }
        }
        buf2.index = mm->frame;
        buf2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_QUERYBUF, &buf2);
        if (err < 0) {
            dprintk("VIDIOCMCAPTURE / VIDIOC_QUERYBUF: %d\n",err);
            break;
        }
        err = drv(inode, file, VIDIOC_QBUF, &buf2);
        if (err < 0) {
            dprintk("VIDIOCMCAPTURE / VIDIOC_QBUF: %d\n",err);
            break;
        }
        err = drv(inode, file, VIDIOC_STREAMON, &captype);
        if (err < 0)
            dprintk("VIDIOCMCAPTURE / VIDIOC_STREAMON: %d\n",err);
        break;
    }
    case VIDIOCSYNC: /*  wait for a frame  */
    {
        int			*i = arg;

        memset(&buf2,0,sizeof(buf2));
        buf2.index = *i;
        buf2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        err = drv(inode, file, VIDIOC_QUERYBUF, &buf2);
        if (err < 0) {
            /*  No such buffer */
            dprintk("VIDIOCSYNC / VIDIOC_QUERYBUF: %d\n",err);
            break;
        }
        if (!(buf2.flags & V4L2_BUF_FLAG_MAPPED)) {
            /* Buffer is not mapped  */
            err = -EINVAL;
            break;
        }

        /* make sure capture actually runs so we don't block forever */
        err = drv(inode, file, VIDIOC_STREAMON, &captype);
        if (err < 0) {
            dprintk("VIDIOCSYNC / VIDIOC_STREAMON: %d\n",err);
            break;
        }

        /*  Loop as long as the buffer is queued, but not done  */
        while ((buf2.flags &
                (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
                == V4L2_BUF_FLAG_QUEUED)
        {
            err = poll_one(file);
            if (err < 0 ||	/* error or sleep was interrupted  */
                    err == 0)	/* timeout? Shouldn't occur.  */
                break;
            err = drv(inode, file, VIDIOC_QUERYBUF, &buf2);
            if (err < 0)
                dprintk("VIDIOCSYNC / VIDIOC_QUERYBUF: %d\n",err);
        }
        if (!(buf2.flags & V4L2_BUF_FLAG_DONE)) /* not done */
            break;
        do {
            err = drv(inode, file, VIDIOC_DQBUF, &buf2);
            if (err < 0)
                dprintk("VIDIOCSYNC / VIDIOC_DQBUF: %d\n",err);
        } while (err == 0 && buf2.index != *i);
        break;
    }

    case VIDIOCGVBIFMT: /* query VBI data capture format */
    {
        struct vbi_format      *fmt = arg;

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);
        fmt2->type = V4L2_BUF_TYPE_VBI_CAPTURE;

        err = drv(inode, file, VIDIOC_G_FMT, fmt2);
        if (err < 0) {
            dprintk("VIDIOCGVBIFMT / VIDIOC_G_FMT: %d\n", err);
            break;
        }
        if (fmt2->fmt.vbi.sample_format != V4L2_PIX_FMT_GREY) {
            err = -EINVAL;
            break;
        }
        memset(fmt, 0, sizeof(*fmt));
        fmt->samples_per_line = fmt2->fmt.vbi.samples_per_line;
        fmt->sampling_rate    = fmt2->fmt.vbi.sampling_rate;
        fmt->sample_format    = VIDEO_PALETTE_RAW;
        fmt->start[0]         = fmt2->fmt.vbi.start[0];
        fmt->count[0]         = fmt2->fmt.vbi.count[0];
        fmt->start[1]         = fmt2->fmt.vbi.start[1];
        fmt->count[1]         = fmt2->fmt.vbi.count[1];
        fmt->flags            = fmt2->fmt.vbi.flags & 0x03;
        break;
    }
    case VIDIOCSVBIFMT:
    {
        struct vbi_format      *fmt = arg;

        if (VIDEO_PALETTE_RAW != fmt->sample_format) {
            err = -EINVAL;
            break;
        }

        fmt2 = kzalloc(sizeof(*fmt2),GFP_KERNEL);

        fmt2->type = V4L2_BUF_TYPE_VBI_CAPTURE;
        fmt2->fmt.vbi.samples_per_line = fmt->samples_per_line;
        fmt2->fmt.vbi.sampling_rate    = fmt->sampling_rate;
        fmt2->fmt.vbi.sample_format    = V4L2_PIX_FMT_GREY;
        fmt2->fmt.vbi.start[0]         = fmt->start[0];
        fmt2->fmt.vbi.count[0]         = fmt->count[0];
        fmt2->fmt.vbi.start[1]         = fmt->start[1];
        fmt2->fmt.vbi.count[1]         = fmt->count[1];
        fmt2->fmt.vbi.flags            = fmt->flags;
        err = drv(inode, file, VIDIOC_TRY_FMT, fmt2);
        if (err < 0) {
            dprintk("VIDIOCSVBIFMT / VIDIOC_TRY_FMT: %d\n", err);
            break;
        }

        if (fmt2->fmt.vbi.samples_per_line != fmt->samples_per_line ||
                fmt2->fmt.vbi.sampling_rate    != fmt->sampling_rate    ||
                fmt2->fmt.vbi.sample_format    != V4L2_PIX_FMT_GREY     ||
                fmt2->fmt.vbi.start[0]         != fmt->start[0]         ||
                fmt2->fmt.vbi.count[0]         != fmt->count[0]         ||
                fmt2->fmt.vbi.start[1]         != fmt->start[1]         ||
                fmt2->fmt.vbi.count[1]         != fmt->count[1]         ||
                fmt2->fmt.vbi.flags            != fmt->flags) {
            err = -EINVAL;
            break;
        }
        err = drv(inode, file, VIDIOC_S_FMT, fmt2);
        if (err < 0)
            dprintk("VIDIOCSVBIFMT / VIDIOC_S_FMT: %d\n", err);
        break;
    }

    default:
        err = -ENOIOCTLCMD;
        break;
    }

    kfree(cap2);
    kfree(fmt2);
    return err;
}
コード例 #3
0
ファイル: libv4l1.c プロジェクト: Distrotech/v4l-utils
int v4l1_ioctl(int fd, unsigned long int request, ...)
{
	void *arg;
	va_list ap;
	int result, index, saved_err, stream_locked = 0;

	va_start(ap, request);
	arg = va_arg(ap, void *);
	va_end(ap);

	index = v4l1_get_index(fd);
	if (index == -1)
		return SYS_IOCTL(fd, request, arg);

	/* Appearantly the kernel and / or glibc ignore the 32 most significant bits
	   when long = 64 bits, and some applications pass an int holding the req to
	   ioctl, causing it to get sign extended, depending upon this behavior */
	request = (unsigned int)request;

	/* do we need to take the stream lock for this ioctl? */
	switch (request) {
	case VIDIOCSPICT:
	case VIDIOCGPICT:
	case VIDIOCSWIN:
	case VIDIOCGWIN:
	case VIDIOCGMBUF:
	case VIDIOCMCAPTURE:
	case VIDIOCSYNC:
	case VIDIOC_S_FMT:
		pthread_mutex_lock(&devices[index].stream_lock);
		stream_locked = 1;
	}

	switch (request) {
	case VIDIOCGCAP: {
		struct video_capability *cap = arg;
		struct v4l2_framebuffer fbuf = { 0, };
		struct v4l2_capability cap2 = { { 0 }, };

		result = v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap2);
		if (result < 0)
			break;

		if (cap2.capabilities & V4L2_CAP_DEVICE_CAPS)
			cap2.capabilities = cap2.device_caps;
		if (cap2.capabilities & V4L2_CAP_VIDEO_OVERLAY) {
			result = v4l2_ioctl(fd, VIDIOC_G_FBUF, &fbuf);
			if (result < 0)
				memset(&fbuf, 0, sizeof(fbuf));
			result = 0;
		}

		memcpy(cap->name, cap2.card,
		       min(sizeof(cap->name), sizeof(cap2.card)));

		cap->name[sizeof(cap->name) - 1] = 0;

		if (cap2.capabilities & V4L2_CAP_VIDEO_CAPTURE)
			cap->type |= VID_TYPE_CAPTURE;
		if (cap2.capabilities & V4L2_CAP_TUNER)
			cap->type |= VID_TYPE_TUNER;
		if (cap2.capabilities & V4L2_CAP_VBI_CAPTURE)
			cap->type |= VID_TYPE_TELETEXT;
		if (cap2.capabilities & V4L2_CAP_VIDEO_OVERLAY)
			cap->type |= VID_TYPE_OVERLAY;
		if (fbuf.capability & V4L2_FBUF_CAP_LIST_CLIPPING)
			cap->type |= VID_TYPE_CLIPPING;

		cap->channels  = count_inputs(fd);
		cap->minwidth  = devices[index].min_width;
		cap->minheight = devices[index].min_height;
		cap->maxwidth  = devices[index].max_width;
		cap->maxheight = devices[index].max_height;
		break;
	}

	case VIDIOCSPICT: {
		struct video_picture *pic = arg;

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED;

		v4l2_set_control(fd, V4L2_CID_BRIGHTNESS, pic->brightness);
		v4l2_set_control(fd, V4L2_CID_HUE, pic->hue);
		v4l2_set_control(fd, V4L2_CID_CONTRAST, pic->contrast);
		v4l2_set_control(fd, V4L2_CID_SATURATION, pic->colour);
		v4l2_set_control(fd, V4L2_CID_WHITENESS, pic->whiteness);

		result = v4l1_set_format(index, devices[index].width,
				devices[index].height, pic->palette, 0);
		break;
	}

	case VIDIOCGPICT: {
		struct video_picture *pic = arg;
		int i;

		/* If our v4l2 pixformat has no corresponding v4l1 palette, and
		   the app has not touched the pixformat sofar, try setting a
		   palette which does (and which we emulate when necessary) so
		   that applications which just query the current format and
		   then take whatever they get will work */
		if (!(devices[index].flags & V4L1_PIX_FMT_TOUCHED) &&
		    !pixelformat_to_palette(devices[index].v4l2_pixfmt))
			v4l1_set_format(index, devices[index].width,
					devices[index].height,
					VIDEO_PALETTE_RGB24,
					(devices[index].flags &
					 V4L1_PIX_SIZE_TOUCHED) ? 0 : 1);

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED;

		memset(pic, 0, sizeof(*pic));
		pic->depth = devices[index].depth;
		pic->palette = devices[index].v4l1_pal;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_HUE);
		if (i >= 0)
			pic->hue = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_SATURATION);
		if (i >= 0)
			pic->colour = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_CONTRAST);
		if (i >= 0)
			pic->contrast = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_WHITENESS);
		if (i >= 0)
			pic->whiteness = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_BRIGHTNESS);
		if (i >= 0)
			pic->brightness = i;

		result = 0;
		break;
	}

	case VIDIOCSWIN:
	case VIDIOCGWIN: {
		struct video_window *win = arg;

		devices[index].flags |= V4L1_PIX_SIZE_TOUCHED;

		if (request == VIDIOCSWIN)
			result = v4l1_set_format(index, win->width, win->height, -1, 1);
		else
			result = 0;

		if (result == 0) {
			win->x = 0;
			win->y = 0;
			win->width  = devices[index].width;
			win->height = devices[index].height;
			win->flags = 0;
		}
		break;
	}

	case VIDIOCGCHAN: {
		struct video_channel *chan = arg;

		/* Set some defaults */
		chan->tuners = 0;
		chan->flags = 0;
		chan->type = VIDEO_TYPE_CAMERA;
		chan->norm = 0;

		if (devices[index].flags & V4L1_SUPPORTS_ENUMINPUT) {
			struct v4l2_input input2 = { .index = chan->channel };

			result = v4l2_ioctl(fd, VIDIOC_ENUMINPUT, &input2);
			if (result < 0)
				break;

			snprintf(chan->name, sizeof(chan->name), "%s",
				 (char *)input2.name);
			if (input2.type == V4L2_INPUT_TYPE_TUNER) {
				chan->tuners = 1;
				chan->type = VIDEO_TYPE_TV;
				chan->flags = VIDEO_VC_TUNER;
			}
		} else {
			/* No ENUMINPUT support, fake it. */
			if (chan->channel == 0) {
				snprintf(chan->name, sizeof(chan->name),
					 "Camera");
				result = 0;
			} else {
				errno  = EINVAL;
				result = -1;
				break;
			}
		}

		/* In case of no ENUMSTD support, ignore the norm member of the
		   channel struct */
		if (devices[index].flags & V4L1_SUPPORTS_ENUMSTD) {
			v4l2_std_id sid;

			result = v4l2_ioctl(fd, VIDIOC_G_STD, &sid);
			if (result < 0)
				break;

			if (sid & V4L2_STD_PAL)
				chan->norm = VIDEO_MODE_PAL;
			if (sid & V4L2_STD_NTSC)
				chan->norm = VIDEO_MODE_NTSC;
			if (sid & V4L2_STD_SECAM)
				chan->norm = VIDEO_MODE_SECAM;
			if (sid == V4L2_STD_ALL)
				chan->norm = VIDEO_MODE_AUTO;
		}
		break;
	}

	case VIDIOCSCHAN: {
		struct video_channel *chan = arg;

		if (devices[index].flags & V4L1_SUPPORTS_ENUMINPUT) {
			result = v4l2_ioctl(fd, VIDIOC_S_INPUT, &chan->channel);
			if (result < 0)
				break;
		} else {
			/* No ENUMINPUT support, assume a single input */
			if (chan->channel != 0) {
				errno  = EINVAL;
				result = -1;
				break;
			}
			result = 0;
		}

		/* In case of no ENUMSTD support, ignore the norm member of the
		   channel struct */
		if (devices[index].flags & V4L1_SUPPORTS_ENUMSTD) {
			v4l2_std_id sid = 0;

			switch (chan->norm) {
			case VIDEO_MODE_PAL:
				sid = V4L2_STD_PAL;
				break;
			case VIDEO_MODE_NTSC:
				sid = V4L2_STD_NTSC;
				break;
			case VIDEO_MODE_SECAM:
				sid = V4L2_STD_SECAM;
				break;
			case VIDEO_MODE_AUTO:
				sid = V4L2_STD_ALL;
				break;
			}

			if (sid)
				result = v4l2_ioctl(fd, VIDIOC_S_STD, &sid);
		}
		break;
	}

	case VIDIOCGMBUF: {
		/* When VIDIOCGMBUF is done, we don't necessarrily know the format the
		   application wants yet (with some apps this is passed for the first
		   time through VIDIOCMCAPTURE), so we just create an anonymous mapping
		   that should be large enough to hold any sort of frame. Note this only
		   takes virtual memory, and does not use memory until actually used. */
		int i;
		struct video_mbuf *mbuf = arg;

		mbuf->size = V4L1_NO_FRAMES * V4L1_FRAME_BUF_SIZE;
		mbuf->frames = V4L1_NO_FRAMES;
		for (i = 0; i < mbuf->frames; i++)
			mbuf->offsets[i] = i * V4L1_FRAME_BUF_SIZE;

		if (devices[index].v4l1_frame_pointer == MAP_FAILED) {
			devices[index].v4l1_frame_pointer = (void *)SYS_MMAP(NULL,
					(size_t)mbuf->size,
					PROT_READ | PROT_WRITE,
					MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
			if (devices[index].v4l1_frame_pointer == MAP_FAILED) {
				saved_err = errno;
				V4L1_LOG_ERR("allocating v4l1 buffer: %s\n", strerror(errno));
				errno = saved_err;
				result = -1;
				break;
			}
			V4L1_LOG("allocated v4l1 buffer @ %p\n",
					devices[index].v4l1_frame_pointer);
		}
		result = 0;
		break;
	}

	case VIDIOCMCAPTURE: {
		struct video_mmap *map = arg;

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED |
			V4L1_PIX_SIZE_TOUCHED;

		result = v4l1_set_format(index, map->width, map->height,
				map->format, 0);
		break;
	}

	case VIDIOCSYNC: {
		int *frame_index = arg;

		if (devices[index].v4l1_frame_pointer == MAP_FAILED ||
				*frame_index < 0 || *frame_index >= V4L1_NO_FRAMES) {
			errno = EINVAL;
			result = -1;
			break;
		}

		result = v4l2_read(devices[index].fd,
				devices[index].v4l1_frame_pointer +
				*frame_index * V4L1_FRAME_BUF_SIZE,
				V4L1_FRAME_BUF_SIZE);
		result = (result > 0) ? 0 : result;
		break;
	}

		/* We are passing through v4l2 calls to libv4l2 for applications which are
		   using v4l2 through libv4l1 (possible with the v4l1compat.so wrapper).

		   So the application could be calling VIDIOC_S_FMT, in this case update
		   our own bookkeeping of the cam's format. Note that this really only is
		   relevant if an application is mixing and matching v4l1 and v4l2 calls,
		   which is crazy, but better safe then sorry. */
	case VIDIOC_S_FMT: {
		struct v4l2_format *fmt2 = arg;

		result = v4l2_ioctl(fd, request, arg);

		if (result == 0 && fmt2->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			if (devices[index].v4l2_pixfmt != fmt2->fmt.pix.pixelformat) {
				devices[index].v4l2_pixfmt = fmt2->fmt.pix.pixelformat;
				devices[index].v4l1_pal =
					pixelformat_to_palette(fmt2->fmt.pix.pixelformat);
			}
			devices[index].width  = fmt2->fmt.pix.width;
			devices[index].height = fmt2->fmt.pix.height;
		}
		break;
	}

	case VIDIOCGFBUF: {
		struct video_buffer *buffer = arg;
		struct v4l2_framebuffer fbuf = { 0, };

		result = v4l2_ioctl(fd, VIDIOC_G_FBUF, &fbuf);
		if (result < 0)
			break;

		buffer->base = fbuf.base;
		buffer->height = fbuf.fmt.height;
		buffer->width = fbuf.fmt.width;

		switch (fbuf.fmt.pixelformat) {
		case V4L2_PIX_FMT_RGB332:
			buffer->depth = 8;
			break;
		case V4L2_PIX_FMT_RGB555:
			buffer->depth = 15;
			break;
		case V4L2_PIX_FMT_RGB565:
			buffer->depth = 16;
			break;
		case V4L2_PIX_FMT_BGR24:
			buffer->depth = 24;
			break;
		case V4L2_PIX_FMT_BGR32:
			buffer->depth = 32;
			break;
		default:
			buffer->depth = 0;
		}

		if (fbuf.fmt.bytesperline) {
			buffer->bytesperline = fbuf.fmt.bytesperline;
			if (!buffer->depth && buffer->width)
				buffer->depth = ((fbuf.fmt.bytesperline << 3)
						+ (buffer->width - 1))
						/ buffer->width;
		} else {
			buffer->bytesperline =
				(buffer->width * buffer->depth + 7) & 7;
			buffer->bytesperline >>= 3;
		}
		break;
	}

	case VIDIOCSFBUF: {
		struct video_buffer *buffer = arg;
		struct v4l2_framebuffer fbuf = { 0, };

		fbuf.base = buffer->base;
		fbuf.fmt.height = buffer->height;
		fbuf.fmt.width = buffer->width;

		switch (buffer->depth) {
		case 8:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB332;
			break;
		case 15:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB555;
			break;
		case 16:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB565;
			break;
		case 24:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_BGR24;
			break;
		case 32:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_BGR32;
			break;
		}

		fbuf.fmt.bytesperline = buffer->bytesperline;
		result = v4l2_ioctl(fd, VIDIOC_S_FBUF, &fbuf);
		break;
	}

	case VIDIOCSTUNER: {
		struct video_tuner *tun = arg;
		struct v4l2_tuner t = { 0, };

		t.index = tun->tuner;
		result = v4l2_ioctl(fd, VIDIOC_S_TUNER, &t);

		break;
	}

	case VIDIOCGTUNER: {
		int i;
		struct video_tuner *tun = arg;
		struct v4l2_tuner tun2 = { 0, };
		struct v4l2_standard std2 = { 0, };
		v4l2_std_id sid;

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result < 0)
			break;

		memcpy(tun->name, tun2.name,
			min(sizeof(tun->name), sizeof(tun2.name)));
		tun->name[sizeof(tun->name) - 1] = 0;
		tun->rangelow = tun2.rangelow;
		tun->rangehigh = tun2.rangehigh;
		tun->flags = 0;
		tun->mode = VIDEO_MODE_AUTO;

		for (i = 0; i < 64; i++) {
			std2.index = i;
			if (0 != v4l2_ioctl(fd, VIDIOC_ENUMSTD, &std2))
				break;
			if (std2.id & V4L2_STD_PAL)
				tun->flags |= VIDEO_TUNER_PAL;
			if (std2.id & V4L2_STD_NTSC)
				tun->flags |= VIDEO_TUNER_NTSC;
			if (std2.id & V4L2_STD_SECAM)
				tun->flags |= VIDEO_TUNER_SECAM;
		}

		if (v4l2_ioctl(fd, VIDIOC_G_STD, &sid) == 0) {
			if (sid & V4L2_STD_PAL)
				tun->mode = VIDEO_MODE_PAL;
			if (sid & V4L2_STD_NTSC)
				tun->mode = VIDEO_MODE_NTSC;
			if (sid & V4L2_STD_SECAM)
				tun->mode = VIDEO_MODE_SECAM;
		}
		if (tun2.capability & V4L2_TUNER_CAP_LOW)
			tun->flags |= VIDEO_TUNER_LOW;
		if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
			tun->flags |= VIDEO_TUNER_STEREO_ON;
		tun->signal = tun2.signal;

		break;
	}

	case VIDIOCSFREQ: {
		unsigned long *freq = arg;
		struct v4l2_frequency freq2 = { 0, };

		result = v4l2_ioctl(fd, VIDIOC_G_FREQUENCY, &freq2);
		if (result < 0)
			break;

		freq2.frequency = *freq;

		result = v4l2_ioctl(fd, VIDIOC_S_FREQUENCY, &freq2);

		break;
	}

	case VIDIOCGFREQ: {
		unsigned long *freq = arg;
		struct v4l2_frequency freq2 = { 0, };

		freq2.tuner = 0;
		result = v4l2_ioctl(fd, VIDIOC_G_FREQUENCY, &freq2);
		if (result < 0)
			break;
		if (0 == result)
			*freq = freq2.frequency;

		break;
	}

	case VIDIOCCAPTURE: {
		int *on = arg;
		enum v4l2_buf_type captype = V4L2_BUF_TYPE_VIDEO_CAPTURE;

		if (0 == *on) {
		/* dirty hack time.  But v4l1 has no STREAMOFF
		* equivalent in the API, and this one at
		* least comes close ... */
			v4l2_ioctl(fd, VIDIOC_STREAMOFF, &captype);
		}

		result = v4l2_ioctl(fd, VIDIOC_OVERLAY, on);

		break;
	}

	case VIDIOCSAUDIO: {
		struct video_audio *aud = arg;
		struct v4l2_audio aud2 = { 0, };
		struct v4l2_tuner tun2 = { 0, };

		aud2.index = aud->audio;
		result = v4l2_ioctl(fd, VIDIOC_S_AUDIO, &aud2);
		if (result < 0)
			break;

		v4l2_set_control(fd, V4L2_CID_AUDIO_VOLUME,
			aud->volume);
		v4l2_set_control(fd, V4L2_CID_AUDIO_BASS,
			aud->bass);
		v4l2_set_control(fd, V4L2_CID_AUDIO_TREBLE,
			aud->treble);
		v4l2_set_control(fd, V4L2_CID_AUDIO_BALANCE,
			aud->balance);
		v4l2_set_control(fd, V4L2_CID_AUDIO_MUTE,
			!!(aud->flags & VIDEO_AUDIO_MUTE));

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result == 0) {
			switch (aud->mode) {
			default:
			case VIDEO_SOUND_MONO:
			case VIDEO_SOUND_LANG1:
				tun2.audmode = V4L2_TUNER_MODE_MONO;
				break;
			case VIDEO_SOUND_STEREO:
				tun2.audmode = V4L2_TUNER_MODE_STEREO;
				break;
			case VIDEO_SOUND_LANG2:
				tun2.audmode = V4L2_TUNER_MODE_LANG2;
				break;
			}
			result = v4l2_ioctl(fd, VIDIOC_S_TUNER, &tun2);
		}
		/* Ignore errors modifying the tuner settings. */
		result = 0;
		break;
	}

	case VIDIOCGAUDIO: {
		int i;
		struct video_audio *aud = arg;
		struct v4l2_queryctrl qctrl2;
		struct v4l2_audio aud2 = { 0, };
		struct v4l2_tuner tun2;

		result = v4l2_ioctl(fd, VIDIOC_G_AUDIO, &aud2);
		if (result < 0)
			break;

		memcpy(aud->name, aud2.name,
			min(sizeof(aud->name), sizeof(aud2.name)));
		aud->name[sizeof(aud->name) - 1] = 0;
		aud->audio = aud2.index;
		aud->flags = 0;
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_VOLUME);
		if (i >= 0) {
			aud->volume = i;
			aud->flags |= VIDEO_AUDIO_VOLUME;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_BASS);
		if (i >= 0) {
			aud->bass = i;
			aud->flags |= VIDEO_AUDIO_BASS;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_TREBLE);
		if (i >= 0) {
			aud->treble = i;
			aud->flags |= VIDEO_AUDIO_TREBLE;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_BALANCE);
		if (i >= 0) {
			aud->balance = i;
			aud->flags |= VIDEO_AUDIO_BALANCE;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_MUTE);
		if (i >= 0) {
			if (i)
				aud->flags |= VIDEO_AUDIO_MUTE;

			aud->flags |= VIDEO_AUDIO_MUTABLE;
		}
		aud->step = 1;
		qctrl2.id = V4L2_CID_AUDIO_VOLUME;
		if (v4l2_ioctl(fd, VIDIOC_QUERYCTRL, &qctrl2) == 0 &&
			!(qctrl2.flags & V4L2_CTRL_FLAG_DISABLED))
			aud->step = qctrl2.step;
		aud->mode = 0;

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result < 0) {
			result = 0;
			break;
		}

		if (tun2.rxsubchans & V4L2_TUNER_SUB_LANG2)
			aud->mode = VIDEO_SOUND_LANG1 | VIDEO_SOUND_LANG2;
		else if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
			aud->mode = VIDEO_SOUND_STEREO;
		else if (tun2.rxsubchans & V4L2_TUNER_SUB_MONO)
			aud->mode = VIDEO_SOUND_MONO;

		break;
	}

	case VIDIOCSVBIFMT: {
		struct vbi_format *fmt = arg;
		struct v4l2_format fmt2;

		if (VIDEO_PALETTE_RAW != fmt->sample_format) {
			result = -EINVAL;
			break;
		}

		fmt2.type = V4L2_BUF_TYPE_VBI_CAPTURE;
		fmt2.fmt.vbi.samples_per_line = fmt->samples_per_line;
		fmt2.fmt.vbi.sampling_rate    = fmt->sampling_rate;
		fmt2.fmt.vbi.sample_format    = V4L2_PIX_FMT_GREY;
		fmt2.fmt.vbi.start[0]         = fmt->start[0];
		fmt2.fmt.vbi.count[0]         = fmt->count[0];
		fmt2.fmt.vbi.start[1]         = fmt->start[1];
		fmt2.fmt.vbi.count[1]         = fmt->count[1];
		fmt2.fmt.vbi.flags            = fmt->flags;

		result  = v4l2_ioctl(fd, VIDIOC_TRY_FMT, fmt2);
		if (result < 0)
			break;

		if (fmt2.fmt.vbi.samples_per_line != fmt->samples_per_line ||
		    fmt2.fmt.vbi.sampling_rate    != fmt->sampling_rate    ||
		    fmt2.fmt.vbi.sample_format    != V4L2_PIX_FMT_GREY     ||
		    fmt2.fmt.vbi.start[0]         != fmt->start[0]         ||
		    fmt2.fmt.vbi.count[0]         != fmt->count[0]         ||
		    fmt2.fmt.vbi.start[1]         != fmt->start[1]         ||
		    fmt2.fmt.vbi.count[1]         != fmt->count[1]         ||
		    fmt2.fmt.vbi.flags            != fmt->flags) {
			result = -EINVAL;
			break;
		}
		result = v4l2_ioctl(fd, VIDIOC_S_FMT, fmt2);
		break;
	}

	case VIDIOCGVBIFMT: {
		struct vbi_format *fmt = arg;
		struct v4l2_format fmt2 = { 0, };

		fmt2.type = V4L2_BUF_TYPE_VBI_CAPTURE;
		result = v4l2_ioctl(fd, VIDIOC_G_FMT, &fmt2);

		if (result < 0)
			break;

		if (fmt2.fmt.vbi.sample_format != V4L2_PIX_FMT_GREY) {
			result = -EINVAL;
			break;
		}

		fmt->samples_per_line = fmt2.fmt.vbi.samples_per_line;
		fmt->sampling_rate    = fmt2.fmt.vbi.sampling_rate;
		fmt->sample_format    = VIDEO_PALETTE_RAW;
		fmt->start[0]         = fmt2.fmt.vbi.start[0];
		fmt->count[0]         = fmt2.fmt.vbi.count[0];
		fmt->start[1]         = fmt2.fmt.vbi.start[1];
		fmt->count[1]         = fmt2.fmt.vbi.count[1];
		fmt->flags            = fmt2.fmt.vbi.flags & 0x03;

		break;
	}

	default:
		/* Pass through libv4l2 for applications which are using v4l2 through
		   libv4l1 (this can happen with the v4l1compat.so wrapper preloaded */
		result = v4l2_ioctl(fd, request, arg);
		break;
	}

	if (stream_locked)
		pthread_mutex_unlock(&devices[index].stream_lock);

	saved_err = errno;
	v4l1_log_ioctl(request, arg, result);
	errno = saved_err;

	return result;
}
コード例 #4
0
ファイル: libv4l1.c プロジェクト: Distrotech/v4l-utils
int v4l1_open(const char *file, int oflag, ...)
{
	int index, fd;
	char *lfname;
	struct v4l2_capability cap2;
	struct v4l2_format fmt2;
	struct v4l2_input input2;
	struct v4l2_standard standard2;
	int v4l_device = 0;

	/* check if we're opening a video4linux2 device */
	if (!strncmp(file, "/dev/video", 10) || !strncmp(file, "/dev/v4l/", 9)) {
		/* Some apps open the device read only, but we need rw rights as the
		   buffers *MUST* be mapped rw */
		oflag = (oflag & ~O_ACCMODE) | O_RDWR;
		v4l_device = 1;
	}

	/* original open code */
	if (oflag & O_CREAT) {
		va_list ap;
		mode_t mode;

		va_start(ap, oflag);
		mode = va_arg(ap, PROMOTED_MODE_T);

		fd = SYS_OPEN(file, oflag, mode);

		va_end(ap);
	} else {
		fd = SYS_OPEN(file, oflag, 0);
	}

	/* end of original open code */

	if (fd == -1 || !v4l_device)
		return fd;

	/* check that this is an v4l2 device, no need to emulate v4l1 on
	   a v4l1 device */
	if (SYS_IOCTL(fd, VIDIOC_QUERYCAP, &cap2))
		return fd;

	/* If no log file was set by the app, see if one was specified through the
	   environment */
	if (!v4l1_log_file) {
		lfname = getenv("LIBV4L1_LOG_FILENAME");
		if (lfname)
			v4l1_log_file = fopen(lfname, "w");
	}

	/* redirect libv4l2 log messages to our logfile if no libv4l2 logfile is
	   specified */
	if (!v4l2_log_file)
		v4l2_log_file = v4l1_log_file;

	/* Register with libv4l2, as we use that todo format conversion and read()
	   emulation for us */
	if (v4l2_fd_open(fd, 0) == -1) {
		int saved_err = errno;

		SYS_CLOSE(fd);
		errno = saved_err;
		return -1;
	}

	/* Get initial width, height and pixelformat */
	fmt2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (v4l2_ioctl(fd, VIDIOC_G_FMT, &fmt2)) {
		int saved_err = errno;

		SYS_CLOSE(fd);
		errno = saved_err;
		return -1;
	}

	/* So we have a device on which we can (and want to) emulate v4l1, register
	   it in our devices array */
	pthread_mutex_lock(&v4l1_open_mutex);
	for (index = 0; index < V4L1_MAX_DEVICES; index++)
		if (devices[index].fd == -1) {
			devices[index].fd = fd;
			break;
		}
	pthread_mutex_unlock(&v4l1_open_mutex);

	if (index == V4L1_MAX_DEVICES) {
		V4L1_LOG_ERR("attempting to open more then %d video devices\n",
				V4L1_MAX_DEVICES);
		v4l2_close(fd);
		errno = EBUSY;
		return -1;
	}

	if (index >= devices_used)
		devices_used = index + 1;

	devices[index].flags = 0;
	devices[index].open_count = 1;
	devices[index].v4l1_frame_buf_map_count = 0;
	devices[index].v4l1_frame_pointer = MAP_FAILED;
	devices[index].width  = fmt2.fmt.pix.width;
	devices[index].height = fmt2.fmt.pix.height;
	devices[index].v4l2_pixfmt = fmt2.fmt.pix.pixelformat;
	devices[index].v4l1_pal = pixelformat_to_palette(fmt2.fmt.pix.pixelformat);
	devices[index].depth = ((fmt2.fmt.pix.bytesperline << 3) +
			(fmt2.fmt.pix.width - 1)) / fmt2.fmt.pix.width;

	v4l1_find_min_and_max_size(index, &fmt2);

	/* Check ENUM_INPUT and ENUM_STD support */
	input2.index = 0;
	if (v4l2_ioctl(fd, VIDIOC_ENUMINPUT, &input2) == 0)
		devices[index].flags |= V4L1_SUPPORTS_ENUMINPUT;

	standard2.index = 0;
	if (v4l2_ioctl(fd, VIDIOC_ENUMSTD, &standard2) == 0)
		devices[index].flags |= V4L1_SUPPORTS_ENUMSTD;

	V4L1_LOG("open: %d\n", fd);

	return fd;
}