コード例 #1
0
ファイル: capture.c プロジェクト: grimmohe/libfg2
int fg_grab_frame(fg_grabber *fg, fg_frame *fr)
{

    for (;;)
    {

        fd_set fds;
        struct timeval tv;
        int r;

        FD_ZERO(&fds);
        FD_SET(fg->fd, &fds);

        tv.tv_sec = FG_READ_TIMEOUT;
        tv.tv_usec = 0;

        r = select(fg->fd + 1, &fds, NULL, NULL, &tv);

        if ( r == -1 )
        {
            if (EINTR == errno)
                continue;

            fg_debug_error("fg_grab_frame(): grabbing frame failed");
            return -1;
        }

        if (0 == r)
        {
            fg_debug_error("fg_grab_frame(): frame grabbing timeout reached");
            return -1;
        }

        if (v4l2_read(fg->fd, fr->data, fr->length) == -1) {

            if (errno == EAGAIN)
                continue;
            else
            {
                fg_debug_error(
                    "fg_grab_frame(): error reading from device");
                return -1;
            }
        }
        else
        {
            gettimeofday(&(fr->timestamp), NULL);
            return 0;
        }
    }

    return -1;
}
コード例 #2
0
ファイル: libv4l1.c プロジェクト: Distrotech/v4l-utils
ssize_t v4l1_read(int fd, void *buffer, size_t n)
{
	int index = v4l1_get_index(fd);
	ssize_t result;

	if (index == -1)
		return SYS_READ(fd, buffer, n);

	pthread_mutex_lock(&devices[index].stream_lock);
	result = v4l2_read(fd, buffer, n);
	pthread_mutex_unlock(&devices[index].stream_lock);

	return result;
}
コード例 #3
0
ファイル: access.c プロジェクト: 0xheart0/vlc
static block_t *ReadBlock (access_t *access)
{
    access_sys_t *sys = access->p_sys;

    if (AccessPoll (access))
        return NULL;

    block_t *block = block_Alloc (sys->blocksize);
    if (unlikely(block == NULL))
        return NULL;

    ssize_t val = v4l2_read (sys->fd, block->p_buffer, block->i_buffer);
    if (val < 0)
    {
        block_Release (block);
        msg_Err (access, "cannot read buffer: %s", vlc_strerror_c(errno));
        access->info.b_eof = true;
        return NULL;
    }

    block->i_buffer = val;
    return block;
}
コード例 #4
0
ファイル: access.c プロジェクト: CSRedRat/vlc
static ssize_t AccessReadStream( access_t *access, uint8_t *buf, size_t len )
{
    demux_sys_t *sys = (demux_sys_t *)access->p_sys;
    struct pollfd ufd;
    int i_ret;

    ufd.fd = sys->i_fd;
    ufd.events = POLLIN;

    if( access->info.b_eof )
        return 0;

    /* FIXME: kill timeout and vlc_object_alive() */
    do
    {
        if( !vlc_object_alive(access) )
            return 0;

        ufd.revents = 0;
    }
    while( ( i_ret = poll( &ufd, 1, 500 ) ) == 0 );

    if( i_ret < 0 )
    {
        if( errno != EINTR )
            msg_Err( access, "poll error: %m" );
        return -1;
    }

    i_ret = v4l2_read( sys->i_fd, buf, len );
    if( i_ret == 0 )
        access->info.b_eof = true;
    else if( i_ret > 0 )
        access->info.i_pos += i_ret;

    return i_ret;
}
コード例 #5
0
int v4l2::read(unsigned char *p, int size)
{
	if (useWrapper())
		return v4l2_read(m_fd, p, size);
	return ::read(m_fd, p, size);
}
コード例 #6
0
static GstFlowReturn
gst_v4l2_do_read (GstV4l2BufferPool * pool, GstBuffer * buf)
{
  GstFlowReturn res;
  GstV4l2Object *obj = pool->obj;
  gint amount;
  GstMapInfo map;
  gint toread;

  toread = obj->sizeimage;

  GST_LOG_OBJECT (pool, "reading %d bytes into buffer %p", toread, buf);

  gst_buffer_map (buf, &map, GST_MAP_WRITE);

  do {
    if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK)
      goto poll_error;

    amount = v4l2_read (obj->video_fd, map.data, toread);

    if (amount == toread) {
      break;
    } else if (amount == -1) {
      if (errno == EAGAIN || errno == EINTR) {
        continue;
      } else
        goto read_error;
    } else {
      /* short reads can happen if a signal interrupts the read */
      continue;
    }
  } while (TRUE);

  GST_LOG_OBJECT (pool, "read %d bytes", amount);
  gst_buffer_unmap (buf, &map);
  gst_buffer_resize (buf, 0, amount);

  return GST_FLOW_OK;

  /* ERRORS */
poll_error:
  {
    GST_DEBUG ("poll error %s", gst_flow_get_name (res));
    goto cleanup;
  }
read_error:
  {
    GST_ELEMENT_ERROR (obj->element, RESOURCE, READ,
        (_("Error reading %d bytes from device '%s'."),
            toread, obj->videodev), GST_ERROR_SYSTEM);
    res = GST_FLOW_ERROR;
    goto cleanup;
  }
cleanup:
  {
    gst_buffer_unmap (buf, &map);
    gst_buffer_resize (buf, 0, 0);
    return res;
  }
}
コード例 #7
0
    // wait for next frame and return it
    // timeout in seconds
    // someway to indicate timeout?  zero buffer?
    const Buffer& LockFrame( const float aTimeout = -1.0f )
    {
        if( mIsLocked ) THROW( "already locked!" );
        mIsLocked = true;

        // wait for frame
        while( true )
        {
            fd_set fds;
            FD_ZERO( &fds);
            FD_SET( mFd, &fds );

            timeval tv;
            tv.tv_sec = 2;
            tv.tv_usec = 0;

            int r = select( mFd + 1, &fds, NULL, NULL, &tv);
            if( -1 == r && EINTR == errno )
            {
                if( EINTR == errno ) 
                    continue;
                THROW( "select() error" );
            }

            // timeout
            if( 0 == r ) continue;

            // fd readable
            break;
        }

        if( mIO == READ )
        {
            if( -1 == v4l2_read( mFd, mBuffers[0].start, mBuffers[0].length) ) 
            {
                if( errno != EAGAIN && errno != EIO )
                    THROW( "read() error" );
            }

            mLockedFrame.start = mBuffers[0].start;
            mLockedFrame.length = mBuffers[0].length;
        }
        else
        {
            memset( &mLockedBuffer, 0, sizeof(mLockedBuffer) );
            mLockedBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            mLockedBuffer.memory = ( mIO == MMAP ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR );
            if( -1 == v4l2_ioctl( mFd, VIDIOC_DQBUF, &mLockedBuffer) ) 
            {
                if( errno != EAGAIN && errno != EIO )
                    THROW( "ioctl() error" );
            }

            size_t i;
            if( mIO == USERPTR )
            {
                // only given pointers, find corresponding index
                for( i = 0; i < mBuffers.size(); ++i )
                {
                    if( mLockedBuffer.m.userptr == (unsigned long)mBuffers[i].start &&
                        mLockedBuffer.length == mBuffers[i].length )
                    {
                        break;
                    }
                }
            }
            else
            {
                i = mLockedBuffer.index;
            }

            if( i >= mBuffers.size() )
                THROW( "buffer index out of range" );

            mLockedFrame.start = mBuffers[i].start;
            mLockedFrame.length = mLockedBuffer.bytesused;            
        }

        return mLockedFrame;
    }
コード例 #8
0
ファイル: demux.c プロジェクト: CSRedRat/vlc
void DemuxClose( vlc_object_t *obj )
{
    demux_t *demux = (demux_t *)obj;
    demux_sys_t *sys = demux->p_sys;
    int fd = sys->i_fd;

    /* Stop video capture */
    switch( sys->io )
    {
        case IO_METHOD_READ:
            /* Nothing to do */
            break;

        case IO_METHOD_MMAP:
        case IO_METHOD_USERPTR:
        {
            /* NOTE: Some buggy drivers hang if buffers are not unmapped before
             * streamoff */
            for( unsigned i = 0; i < sys->i_nbuffers; i++ )
            {
                struct v4l2_buffer buf = {
                    .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
                    .memory = ( sys->io == IO_METHOD_USERPTR ) ?
                    V4L2_MEMORY_USERPTR : V4L2_MEMORY_MMAP,
                };
                v4l2_ioctl( fd, VIDIOC_DQBUF, &buf );
            }
            enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            v4l2_ioctl( sys->i_fd, VIDIOC_STREAMOFF, &buf_type );
            break;
        }
    }

    /* Free Video Buffers */
    if( sys->p_buffers ) {
        switch( sys->io )
        {
        case IO_METHOD_READ:
            free( sys->p_buffers[0].start );
            break;

        case IO_METHOD_MMAP:
            for( unsigned i = 0; i < sys->i_nbuffers; ++i )
                v4l2_munmap( sys->p_buffers[i].start,
                             sys->p_buffers[i].length );
            break;

        case IO_METHOD_USERPTR:
            for( unsigned i = 0; i < sys->i_nbuffers; ++i )
               free( sys->p_buffers[i].start );
            break;
        }
        free( sys->p_buffers );
    }

    ControlsDeinit( obj, sys->controls );
    v4l2_close( fd );
    free( sys );
}

static int DemuxControl( demux_t *demux, int query, va_list args )
{
    switch( query )
    {
        /* Special for access_demux */
        case DEMUX_CAN_PAUSE:
        case DEMUX_CAN_SEEK:
        case DEMUX_CAN_CONTROL_PACE:
            *va_arg( args, bool * ) = false;
            return VLC_SUCCESS;

        case DEMUX_GET_PTS_DELAY:
            *va_arg(args,int64_t *) = INT64_C(1000)
                * var_InheritInteger( demux, "live-caching" );
            return VLC_SUCCESS;

        case DEMUX_GET_TIME:
            *va_arg( args, int64_t * ) = mdate();
            return VLC_SUCCESS;

        /* TODO implement others */
        default:
            return VLC_EGENERIC;
    }

    return VLC_EGENERIC;
}

/** Gets a frame in read/write mode */
static block_t *BlockRead( vlc_object_t *obj, int fd, size_t size )
{
    block_t *block = block_Alloc( size );
    if( unlikely(block == NULL) )
        return NULL;

    ssize_t val = v4l2_read( fd, block->p_buffer, size );
    if( val == -1 )
    {
        block_Release( block );
        switch( errno )
        {
            case EAGAIN:
                return NULL;
            case EIO: /* could be ignored per specification */
                /* fall through */
            default:
                msg_Err( obj, "cannot read frame: %m" );
                return NULL;
        }
    }
    block->i_buffer = val;
    return block;
}
コード例 #9
0
ファイル: v4l2convert.c プロジェクト: kangear/v4l-utils
LIBV4L_PUBLIC ssize_t read(int fd, void *buffer, size_t n)
{
	return v4l2_read(fd, buffer, n);
}
コード例 #10
0
ファイル: v4l2grab.c プロジェクト: twam/v4l2grab
/**
	read single frame
*/
static int frameRead(void)
{
    struct v4l2_buffer buf;
#ifdef IO_USERPTR
    unsigned int i;
#endif

    switch (io) {
#ifdef IO_READ
    case IO_METHOD_READ:
        if (-1 == v4l2_read(fd, buffers[0].start, buffers[0].length)) {
            switch (errno) {
            case EAGAIN:
                return 0;

            case EIO:
            // Could ignore EIO, see spec.
            // fall through

            default:
                errno_exit("read");
            }
        }

        struct timespec ts;
        struct timeval timestamp;
        clock_gettime(CLOCK_MONOTONIC,&ts);
        timestamp.tv_sec = ts.tv_sec;
        timestamp.tv_usec = ts.tv_nsec/1000;

        imageProcess(buffers[0].start,timestamp);
        break;
#endif

#ifdef IO_MMAP
    case IO_METHOD_MMAP:
        CLEAR(buf);

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;

        if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
            switch (errno) {
            case EAGAIN:
                return 0;

            case EIO:
            // Could ignore EIO, see spec
            // fall through

            default:
                errno_exit("VIDIOC_DQBUF");
            }
        }

        assert(buf.index < n_buffers);

        imageProcess(buffers[buf.index].start,buf.timestamp);

        if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
            errno_exit("VIDIOC_QBUF");

        break;
#endif

#ifdef IO_USERPTR
    case IO_METHOD_USERPTR:
        CLEAR (buf);

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_USERPTR;

        if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
            switch (errno) {
            case EAGAIN:
                return 0;

            case EIO:
            // Could ignore EIO, see spec.
            // fall through

            default:
                errno_exit("VIDIOC_DQBUF");
            }
        }

        for (i = 0; i < n_buffers; ++i)
            if (buf.m.userptr == (unsigned long)buffers[i].start && buf.length == buffers[i].length)
                break;

        assert (i < n_buffers);

        imageProcess((void *)buf.m.userptr,buf.timestamp);

        if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
            errno_exit("VIDIOC_QBUF");
        break;
#endif
    }

    return 1;
}
コード例 #11
0
size_t V4l2ReadCapture::read(char* buffer, size_t bufferSize)
{
	return v4l2_read(m_fd, buffer,  bufferSize);
}
コード例 #12
0
ファイル: svv.c プロジェクト: engie/robo_gui
static int read_frame(void)
{
	struct v4l2_buffer buf;
	int i;

	switch (io) {
	case IO_METHOD_READ:
		i = v4l2_read(fd, buffers[0].start, buffers[0].length);
		if (i < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;
			case EIO:
				/* Could ignore EIO, see spec. */
				/* fall through */
			default:
				errno_exit("read");
			}
		}
		process_image(buffers[0].start, i);
		break;

	case V4L2_MEMORY_MMAP:
		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;

		if (v4l2_ioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;
			case EIO:
				/* Could ignore EIO, see spec. */
				/* fall through */
			default:
				errno_exit("VIDIOC_DQBUF");
			}
		}
		assert(buf.index < n_buffers);

		process_image(buffers[buf.index].start, buf.bytesused);

		if (v4l2_ioctl(fd, VIDIOC_QBUF, &buf) < 0)
			errno_exit("VIDIOC_QBUF");
		break;
	case V4L2_MEMORY_USERPTR:
		CLEAR(buf);

		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_USERPTR;

		if (v4l2_ioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;
			case EIO:
				/* Could ignore EIO, see spec. */
				/* fall through */
			default:
				errno_exit("VIDIOC_DQBUF");
			}
		}

		for (i = 0; i < n_buffers; ++i)
			if (buf.m.userptr == (unsigned long) buffers[i].start
				&& buf.length == buffers[i].length)
				break;
		assert(i < n_buffers);

		process_image((unsigned char *) buf.m.userptr,
				buf.bytesused);

		if (v4l2_ioctl(fd, VIDIOC_QBUF, &buf) < 0)
			errno_exit("VIDIOC_QBUF");
		break;
	}
	return 1;
}
コード例 #13
0
ファイル: libv4l1.c プロジェクト: Distrotech/v4l-utils
int v4l1_ioctl(int fd, unsigned long int request, ...)
{
	void *arg;
	va_list ap;
	int result, index, saved_err, stream_locked = 0;

	va_start(ap, request);
	arg = va_arg(ap, void *);
	va_end(ap);

	index = v4l1_get_index(fd);
	if (index == -1)
		return SYS_IOCTL(fd, request, arg);

	/* Appearantly the kernel and / or glibc ignore the 32 most significant bits
	   when long = 64 bits, and some applications pass an int holding the req to
	   ioctl, causing it to get sign extended, depending upon this behavior */
	request = (unsigned int)request;

	/* do we need to take the stream lock for this ioctl? */
	switch (request) {
	case VIDIOCSPICT:
	case VIDIOCGPICT:
	case VIDIOCSWIN:
	case VIDIOCGWIN:
	case VIDIOCGMBUF:
	case VIDIOCMCAPTURE:
	case VIDIOCSYNC:
	case VIDIOC_S_FMT:
		pthread_mutex_lock(&devices[index].stream_lock);
		stream_locked = 1;
	}

	switch (request) {
	case VIDIOCGCAP: {
		struct video_capability *cap = arg;
		struct v4l2_framebuffer fbuf = { 0, };
		struct v4l2_capability cap2 = { { 0 }, };

		result = v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap2);
		if (result < 0)
			break;

		if (cap2.capabilities & V4L2_CAP_DEVICE_CAPS)
			cap2.capabilities = cap2.device_caps;
		if (cap2.capabilities & V4L2_CAP_VIDEO_OVERLAY) {
			result = v4l2_ioctl(fd, VIDIOC_G_FBUF, &fbuf);
			if (result < 0)
				memset(&fbuf, 0, sizeof(fbuf));
			result = 0;
		}

		memcpy(cap->name, cap2.card,
		       min(sizeof(cap->name), sizeof(cap2.card)));

		cap->name[sizeof(cap->name) - 1] = 0;

		if (cap2.capabilities & V4L2_CAP_VIDEO_CAPTURE)
			cap->type |= VID_TYPE_CAPTURE;
		if (cap2.capabilities & V4L2_CAP_TUNER)
			cap->type |= VID_TYPE_TUNER;
		if (cap2.capabilities & V4L2_CAP_VBI_CAPTURE)
			cap->type |= VID_TYPE_TELETEXT;
		if (cap2.capabilities & V4L2_CAP_VIDEO_OVERLAY)
			cap->type |= VID_TYPE_OVERLAY;
		if (fbuf.capability & V4L2_FBUF_CAP_LIST_CLIPPING)
			cap->type |= VID_TYPE_CLIPPING;

		cap->channels  = count_inputs(fd);
		cap->minwidth  = devices[index].min_width;
		cap->minheight = devices[index].min_height;
		cap->maxwidth  = devices[index].max_width;
		cap->maxheight = devices[index].max_height;
		break;
	}

	case VIDIOCSPICT: {
		struct video_picture *pic = arg;

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED;

		v4l2_set_control(fd, V4L2_CID_BRIGHTNESS, pic->brightness);
		v4l2_set_control(fd, V4L2_CID_HUE, pic->hue);
		v4l2_set_control(fd, V4L2_CID_CONTRAST, pic->contrast);
		v4l2_set_control(fd, V4L2_CID_SATURATION, pic->colour);
		v4l2_set_control(fd, V4L2_CID_WHITENESS, pic->whiteness);

		result = v4l1_set_format(index, devices[index].width,
				devices[index].height, pic->palette, 0);
		break;
	}

	case VIDIOCGPICT: {
		struct video_picture *pic = arg;
		int i;

		/* If our v4l2 pixformat has no corresponding v4l1 palette, and
		   the app has not touched the pixformat sofar, try setting a
		   palette which does (and which we emulate when necessary) so
		   that applications which just query the current format and
		   then take whatever they get will work */
		if (!(devices[index].flags & V4L1_PIX_FMT_TOUCHED) &&
		    !pixelformat_to_palette(devices[index].v4l2_pixfmt))
			v4l1_set_format(index, devices[index].width,
					devices[index].height,
					VIDEO_PALETTE_RGB24,
					(devices[index].flags &
					 V4L1_PIX_SIZE_TOUCHED) ? 0 : 1);

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED;

		memset(pic, 0, sizeof(*pic));
		pic->depth = devices[index].depth;
		pic->palette = devices[index].v4l1_pal;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_HUE);
		if (i >= 0)
			pic->hue = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_SATURATION);
		if (i >= 0)
			pic->colour = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_CONTRAST);
		if (i >= 0)
			pic->contrast = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_WHITENESS);
		if (i >= 0)
			pic->whiteness = i;
		i = v4l2_get_control(devices[index].fd, V4L2_CID_BRIGHTNESS);
		if (i >= 0)
			pic->brightness = i;

		result = 0;
		break;
	}

	case VIDIOCSWIN:
	case VIDIOCGWIN: {
		struct video_window *win = arg;

		devices[index].flags |= V4L1_PIX_SIZE_TOUCHED;

		if (request == VIDIOCSWIN)
			result = v4l1_set_format(index, win->width, win->height, -1, 1);
		else
			result = 0;

		if (result == 0) {
			win->x = 0;
			win->y = 0;
			win->width  = devices[index].width;
			win->height = devices[index].height;
			win->flags = 0;
		}
		break;
	}

	case VIDIOCGCHAN: {
		struct video_channel *chan = arg;

		/* Set some defaults */
		chan->tuners = 0;
		chan->flags = 0;
		chan->type = VIDEO_TYPE_CAMERA;
		chan->norm = 0;

		if (devices[index].flags & V4L1_SUPPORTS_ENUMINPUT) {
			struct v4l2_input input2 = { .index = chan->channel };

			result = v4l2_ioctl(fd, VIDIOC_ENUMINPUT, &input2);
			if (result < 0)
				break;

			snprintf(chan->name, sizeof(chan->name), "%s",
				 (char *)input2.name);
			if (input2.type == V4L2_INPUT_TYPE_TUNER) {
				chan->tuners = 1;
				chan->type = VIDEO_TYPE_TV;
				chan->flags = VIDEO_VC_TUNER;
			}
		} else {
			/* No ENUMINPUT support, fake it. */
			if (chan->channel == 0) {
				snprintf(chan->name, sizeof(chan->name),
					 "Camera");
				result = 0;
			} else {
				errno  = EINVAL;
				result = -1;
				break;
			}
		}

		/* In case of no ENUMSTD support, ignore the norm member of the
		   channel struct */
		if (devices[index].flags & V4L1_SUPPORTS_ENUMSTD) {
			v4l2_std_id sid;

			result = v4l2_ioctl(fd, VIDIOC_G_STD, &sid);
			if (result < 0)
				break;

			if (sid & V4L2_STD_PAL)
				chan->norm = VIDEO_MODE_PAL;
			if (sid & V4L2_STD_NTSC)
				chan->norm = VIDEO_MODE_NTSC;
			if (sid & V4L2_STD_SECAM)
				chan->norm = VIDEO_MODE_SECAM;
			if (sid == V4L2_STD_ALL)
				chan->norm = VIDEO_MODE_AUTO;
		}
		break;
	}

	case VIDIOCSCHAN: {
		struct video_channel *chan = arg;

		if (devices[index].flags & V4L1_SUPPORTS_ENUMINPUT) {
			result = v4l2_ioctl(fd, VIDIOC_S_INPUT, &chan->channel);
			if (result < 0)
				break;
		} else {
			/* No ENUMINPUT support, assume a single input */
			if (chan->channel != 0) {
				errno  = EINVAL;
				result = -1;
				break;
			}
			result = 0;
		}

		/* In case of no ENUMSTD support, ignore the norm member of the
		   channel struct */
		if (devices[index].flags & V4L1_SUPPORTS_ENUMSTD) {
			v4l2_std_id sid = 0;

			switch (chan->norm) {
			case VIDEO_MODE_PAL:
				sid = V4L2_STD_PAL;
				break;
			case VIDEO_MODE_NTSC:
				sid = V4L2_STD_NTSC;
				break;
			case VIDEO_MODE_SECAM:
				sid = V4L2_STD_SECAM;
				break;
			case VIDEO_MODE_AUTO:
				sid = V4L2_STD_ALL;
				break;
			}

			if (sid)
				result = v4l2_ioctl(fd, VIDIOC_S_STD, &sid);
		}
		break;
	}

	case VIDIOCGMBUF: {
		/* When VIDIOCGMBUF is done, we don't necessarrily know the format the
		   application wants yet (with some apps this is passed for the first
		   time through VIDIOCMCAPTURE), so we just create an anonymous mapping
		   that should be large enough to hold any sort of frame. Note this only
		   takes virtual memory, and does not use memory until actually used. */
		int i;
		struct video_mbuf *mbuf = arg;

		mbuf->size = V4L1_NO_FRAMES * V4L1_FRAME_BUF_SIZE;
		mbuf->frames = V4L1_NO_FRAMES;
		for (i = 0; i < mbuf->frames; i++)
			mbuf->offsets[i] = i * V4L1_FRAME_BUF_SIZE;

		if (devices[index].v4l1_frame_pointer == MAP_FAILED) {
			devices[index].v4l1_frame_pointer = (void *)SYS_MMAP(NULL,
					(size_t)mbuf->size,
					PROT_READ | PROT_WRITE,
					MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
			if (devices[index].v4l1_frame_pointer == MAP_FAILED) {
				saved_err = errno;
				V4L1_LOG_ERR("allocating v4l1 buffer: %s\n", strerror(errno));
				errno = saved_err;
				result = -1;
				break;
			}
			V4L1_LOG("allocated v4l1 buffer @ %p\n",
					devices[index].v4l1_frame_pointer);
		}
		result = 0;
		break;
	}

	case VIDIOCMCAPTURE: {
		struct video_mmap *map = arg;

		devices[index].flags |= V4L1_PIX_FMT_TOUCHED |
			V4L1_PIX_SIZE_TOUCHED;

		result = v4l1_set_format(index, map->width, map->height,
				map->format, 0);
		break;
	}

	case VIDIOCSYNC: {
		int *frame_index = arg;

		if (devices[index].v4l1_frame_pointer == MAP_FAILED ||
				*frame_index < 0 || *frame_index >= V4L1_NO_FRAMES) {
			errno = EINVAL;
			result = -1;
			break;
		}

		result = v4l2_read(devices[index].fd,
				devices[index].v4l1_frame_pointer +
				*frame_index * V4L1_FRAME_BUF_SIZE,
				V4L1_FRAME_BUF_SIZE);
		result = (result > 0) ? 0 : result;
		break;
	}

		/* We are passing through v4l2 calls to libv4l2 for applications which are
		   using v4l2 through libv4l1 (possible with the v4l1compat.so wrapper).

		   So the application could be calling VIDIOC_S_FMT, in this case update
		   our own bookkeeping of the cam's format. Note that this really only is
		   relevant if an application is mixing and matching v4l1 and v4l2 calls,
		   which is crazy, but better safe then sorry. */
	case VIDIOC_S_FMT: {
		struct v4l2_format *fmt2 = arg;

		result = v4l2_ioctl(fd, request, arg);

		if (result == 0 && fmt2->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
			if (devices[index].v4l2_pixfmt != fmt2->fmt.pix.pixelformat) {
				devices[index].v4l2_pixfmt = fmt2->fmt.pix.pixelformat;
				devices[index].v4l1_pal =
					pixelformat_to_palette(fmt2->fmt.pix.pixelformat);
			}
			devices[index].width  = fmt2->fmt.pix.width;
			devices[index].height = fmt2->fmt.pix.height;
		}
		break;
	}

	case VIDIOCGFBUF: {
		struct video_buffer *buffer = arg;
		struct v4l2_framebuffer fbuf = { 0, };

		result = v4l2_ioctl(fd, VIDIOC_G_FBUF, &fbuf);
		if (result < 0)
			break;

		buffer->base = fbuf.base;
		buffer->height = fbuf.fmt.height;
		buffer->width = fbuf.fmt.width;

		switch (fbuf.fmt.pixelformat) {
		case V4L2_PIX_FMT_RGB332:
			buffer->depth = 8;
			break;
		case V4L2_PIX_FMT_RGB555:
			buffer->depth = 15;
			break;
		case V4L2_PIX_FMT_RGB565:
			buffer->depth = 16;
			break;
		case V4L2_PIX_FMT_BGR24:
			buffer->depth = 24;
			break;
		case V4L2_PIX_FMT_BGR32:
			buffer->depth = 32;
			break;
		default:
			buffer->depth = 0;
		}

		if (fbuf.fmt.bytesperline) {
			buffer->bytesperline = fbuf.fmt.bytesperline;
			if (!buffer->depth && buffer->width)
				buffer->depth = ((fbuf.fmt.bytesperline << 3)
						+ (buffer->width - 1))
						/ buffer->width;
		} else {
			buffer->bytesperline =
				(buffer->width * buffer->depth + 7) & 7;
			buffer->bytesperline >>= 3;
		}
		break;
	}

	case VIDIOCSFBUF: {
		struct video_buffer *buffer = arg;
		struct v4l2_framebuffer fbuf = { 0, };

		fbuf.base = buffer->base;
		fbuf.fmt.height = buffer->height;
		fbuf.fmt.width = buffer->width;

		switch (buffer->depth) {
		case 8:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB332;
			break;
		case 15:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB555;
			break;
		case 16:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_RGB565;
			break;
		case 24:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_BGR24;
			break;
		case 32:
			fbuf.fmt.pixelformat = V4L2_PIX_FMT_BGR32;
			break;
		}

		fbuf.fmt.bytesperline = buffer->bytesperline;
		result = v4l2_ioctl(fd, VIDIOC_S_FBUF, &fbuf);
		break;
	}

	case VIDIOCSTUNER: {
		struct video_tuner *tun = arg;
		struct v4l2_tuner t = { 0, };

		t.index = tun->tuner;
		result = v4l2_ioctl(fd, VIDIOC_S_TUNER, &t);

		break;
	}

	case VIDIOCGTUNER: {
		int i;
		struct video_tuner *tun = arg;
		struct v4l2_tuner tun2 = { 0, };
		struct v4l2_standard std2 = { 0, };
		v4l2_std_id sid;

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result < 0)
			break;

		memcpy(tun->name, tun2.name,
			min(sizeof(tun->name), sizeof(tun2.name)));
		tun->name[sizeof(tun->name) - 1] = 0;
		tun->rangelow = tun2.rangelow;
		tun->rangehigh = tun2.rangehigh;
		tun->flags = 0;
		tun->mode = VIDEO_MODE_AUTO;

		for (i = 0; i < 64; i++) {
			std2.index = i;
			if (0 != v4l2_ioctl(fd, VIDIOC_ENUMSTD, &std2))
				break;
			if (std2.id & V4L2_STD_PAL)
				tun->flags |= VIDEO_TUNER_PAL;
			if (std2.id & V4L2_STD_NTSC)
				tun->flags |= VIDEO_TUNER_NTSC;
			if (std2.id & V4L2_STD_SECAM)
				tun->flags |= VIDEO_TUNER_SECAM;
		}

		if (v4l2_ioctl(fd, VIDIOC_G_STD, &sid) == 0) {
			if (sid & V4L2_STD_PAL)
				tun->mode = VIDEO_MODE_PAL;
			if (sid & V4L2_STD_NTSC)
				tun->mode = VIDEO_MODE_NTSC;
			if (sid & V4L2_STD_SECAM)
				tun->mode = VIDEO_MODE_SECAM;
		}
		if (tun2.capability & V4L2_TUNER_CAP_LOW)
			tun->flags |= VIDEO_TUNER_LOW;
		if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
			tun->flags |= VIDEO_TUNER_STEREO_ON;
		tun->signal = tun2.signal;

		break;
	}

	case VIDIOCSFREQ: {
		unsigned long *freq = arg;
		struct v4l2_frequency freq2 = { 0, };

		result = v4l2_ioctl(fd, VIDIOC_G_FREQUENCY, &freq2);
		if (result < 0)
			break;

		freq2.frequency = *freq;

		result = v4l2_ioctl(fd, VIDIOC_S_FREQUENCY, &freq2);

		break;
	}

	case VIDIOCGFREQ: {
		unsigned long *freq = arg;
		struct v4l2_frequency freq2 = { 0, };

		freq2.tuner = 0;
		result = v4l2_ioctl(fd, VIDIOC_G_FREQUENCY, &freq2);
		if (result < 0)
			break;
		if (0 == result)
			*freq = freq2.frequency;

		break;
	}

	case VIDIOCCAPTURE: {
		int *on = arg;
		enum v4l2_buf_type captype = V4L2_BUF_TYPE_VIDEO_CAPTURE;

		if (0 == *on) {
		/* dirty hack time.  But v4l1 has no STREAMOFF
		* equivalent in the API, and this one at
		* least comes close ... */
			v4l2_ioctl(fd, VIDIOC_STREAMOFF, &captype);
		}

		result = v4l2_ioctl(fd, VIDIOC_OVERLAY, on);

		break;
	}

	case VIDIOCSAUDIO: {
		struct video_audio *aud = arg;
		struct v4l2_audio aud2 = { 0, };
		struct v4l2_tuner tun2 = { 0, };

		aud2.index = aud->audio;
		result = v4l2_ioctl(fd, VIDIOC_S_AUDIO, &aud2);
		if (result < 0)
			break;

		v4l2_set_control(fd, V4L2_CID_AUDIO_VOLUME,
			aud->volume);
		v4l2_set_control(fd, V4L2_CID_AUDIO_BASS,
			aud->bass);
		v4l2_set_control(fd, V4L2_CID_AUDIO_TREBLE,
			aud->treble);
		v4l2_set_control(fd, V4L2_CID_AUDIO_BALANCE,
			aud->balance);
		v4l2_set_control(fd, V4L2_CID_AUDIO_MUTE,
			!!(aud->flags & VIDEO_AUDIO_MUTE));

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result == 0) {
			switch (aud->mode) {
			default:
			case VIDEO_SOUND_MONO:
			case VIDEO_SOUND_LANG1:
				tun2.audmode = V4L2_TUNER_MODE_MONO;
				break;
			case VIDEO_SOUND_STEREO:
				tun2.audmode = V4L2_TUNER_MODE_STEREO;
				break;
			case VIDEO_SOUND_LANG2:
				tun2.audmode = V4L2_TUNER_MODE_LANG2;
				break;
			}
			result = v4l2_ioctl(fd, VIDIOC_S_TUNER, &tun2);
		}
		/* Ignore errors modifying the tuner settings. */
		result = 0;
		break;
	}

	case VIDIOCGAUDIO: {
		int i;
		struct video_audio *aud = arg;
		struct v4l2_queryctrl qctrl2;
		struct v4l2_audio aud2 = { 0, };
		struct v4l2_tuner tun2;

		result = v4l2_ioctl(fd, VIDIOC_G_AUDIO, &aud2);
		if (result < 0)
			break;

		memcpy(aud->name, aud2.name,
			min(sizeof(aud->name), sizeof(aud2.name)));
		aud->name[sizeof(aud->name) - 1] = 0;
		aud->audio = aud2.index;
		aud->flags = 0;
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_VOLUME);
		if (i >= 0) {
			aud->volume = i;
			aud->flags |= VIDEO_AUDIO_VOLUME;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_BASS);
		if (i >= 0) {
			aud->bass = i;
			aud->flags |= VIDEO_AUDIO_BASS;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_TREBLE);
		if (i >= 0) {
			aud->treble = i;
			aud->flags |= VIDEO_AUDIO_TREBLE;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_BALANCE);
		if (i >= 0) {
			aud->balance = i;
			aud->flags |= VIDEO_AUDIO_BALANCE;
		}
		i = v4l2_get_control(fd, V4L2_CID_AUDIO_MUTE);
		if (i >= 0) {
			if (i)
				aud->flags |= VIDEO_AUDIO_MUTE;

			aud->flags |= VIDEO_AUDIO_MUTABLE;
		}
		aud->step = 1;
		qctrl2.id = V4L2_CID_AUDIO_VOLUME;
		if (v4l2_ioctl(fd, VIDIOC_QUERYCTRL, &qctrl2) == 0 &&
			!(qctrl2.flags & V4L2_CTRL_FLAG_DISABLED))
			aud->step = qctrl2.step;
		aud->mode = 0;

		result = v4l2_ioctl(fd, VIDIOC_G_TUNER, &tun2);
		if (result < 0) {
			result = 0;
			break;
		}

		if (tun2.rxsubchans & V4L2_TUNER_SUB_LANG2)
			aud->mode = VIDEO_SOUND_LANG1 | VIDEO_SOUND_LANG2;
		else if (tun2.rxsubchans & V4L2_TUNER_SUB_STEREO)
			aud->mode = VIDEO_SOUND_STEREO;
		else if (tun2.rxsubchans & V4L2_TUNER_SUB_MONO)
			aud->mode = VIDEO_SOUND_MONO;

		break;
	}

	case VIDIOCSVBIFMT: {
		struct vbi_format *fmt = arg;
		struct v4l2_format fmt2;

		if (VIDEO_PALETTE_RAW != fmt->sample_format) {
			result = -EINVAL;
			break;
		}

		fmt2.type = V4L2_BUF_TYPE_VBI_CAPTURE;
		fmt2.fmt.vbi.samples_per_line = fmt->samples_per_line;
		fmt2.fmt.vbi.sampling_rate    = fmt->sampling_rate;
		fmt2.fmt.vbi.sample_format    = V4L2_PIX_FMT_GREY;
		fmt2.fmt.vbi.start[0]         = fmt->start[0];
		fmt2.fmt.vbi.count[0]         = fmt->count[0];
		fmt2.fmt.vbi.start[1]         = fmt->start[1];
		fmt2.fmt.vbi.count[1]         = fmt->count[1];
		fmt2.fmt.vbi.flags            = fmt->flags;

		result  = v4l2_ioctl(fd, VIDIOC_TRY_FMT, fmt2);
		if (result < 0)
			break;

		if (fmt2.fmt.vbi.samples_per_line != fmt->samples_per_line ||
		    fmt2.fmt.vbi.sampling_rate    != fmt->sampling_rate    ||
		    fmt2.fmt.vbi.sample_format    != V4L2_PIX_FMT_GREY     ||
		    fmt2.fmt.vbi.start[0]         != fmt->start[0]         ||
		    fmt2.fmt.vbi.count[0]         != fmt->count[0]         ||
		    fmt2.fmt.vbi.start[1]         != fmt->start[1]         ||
		    fmt2.fmt.vbi.count[1]         != fmt->count[1]         ||
		    fmt2.fmt.vbi.flags            != fmt->flags) {
			result = -EINVAL;
			break;
		}
		result = v4l2_ioctl(fd, VIDIOC_S_FMT, fmt2);
		break;
	}

	case VIDIOCGVBIFMT: {
		struct vbi_format *fmt = arg;
		struct v4l2_format fmt2 = { 0, };

		fmt2.type = V4L2_BUF_TYPE_VBI_CAPTURE;
		result = v4l2_ioctl(fd, VIDIOC_G_FMT, &fmt2);

		if (result < 0)
			break;

		if (fmt2.fmt.vbi.sample_format != V4L2_PIX_FMT_GREY) {
			result = -EINVAL;
			break;
		}

		fmt->samples_per_line = fmt2.fmt.vbi.samples_per_line;
		fmt->sampling_rate    = fmt2.fmt.vbi.sampling_rate;
		fmt->sample_format    = VIDEO_PALETTE_RAW;
		fmt->start[0]         = fmt2.fmt.vbi.start[0];
		fmt->count[0]         = fmt2.fmt.vbi.count[0];
		fmt->start[1]         = fmt2.fmt.vbi.start[1];
		fmt->count[1]         = fmt2.fmt.vbi.count[1];
		fmt->flags            = fmt2.fmt.vbi.flags & 0x03;

		break;
	}

	default:
		/* Pass through libv4l2 for applications which are using v4l2 through
		   libv4l1 (this can happen with the v4l1compat.so wrapper preloaded */
		result = v4l2_ioctl(fd, request, arg);
		break;
	}

	if (stream_locked)
		pthread_mutex_unlock(&devices[index].stream_lock);

	saved_err = errno;
	v4l1_log_ioctl(request, arg, result);
	errno = saved_err;

	return result;
}
コード例 #14
0
int32_t
ppb_video_capture_open(PP_Resource video_capture, PP_Resource device_ref,
                       const struct PP_VideoCaptureDeviceInfo_Dev *requested_info,
                       uint32_t buffer_count, struct PP_CompletionCallback callback)
{
    int32_t result;
    struct pp_video_capture_s *vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, bad resource\n", __func__);
        return PP_ERROR_BADRESOURCE;
    }

    const char *capture_device = default_capture_device;
    struct PP_Var longname = ppb_device_ref_get_longname(device_ref);

    if (longname.type == PP_VARTYPE_STRING)
        capture_device = ppb_var_var_to_utf8(longname, NULL);

    vc->fd = v4l2_open(capture_device, O_RDWR);

    ppb_var_release(longname);

    if (vc->fd < 0) {
        result = PP_ERROR_NOACCESS;
        goto point_1;
    }

    struct v4l2_capability caps;
    if (v4l2_ioctl(vc->fd, VIDIOC_QUERYCAP, &caps) != 0) {
        result = PP_ERROR_FAILED;
        goto point_2;
    }

#ifdef V4L2_CAP_DEVICE_CAPS
    const uint32_t device_caps = (caps.capabilities & V4L2_CAP_DEVICE_CAPS) ? caps.device_caps
                                                                            : caps.capabilities;
#else
    const uint32_t device_caps = caps.capabilities;
#endif // V4L2_CAP_DEVICE_CAPS

    if (!(device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
        trace_error("%s, device can't capture\n", __func__);
        result = PP_ERROR_FAILED;
        goto point_2;
    }

    if (!(device_caps & V4L2_CAP_READWRITE)) {
        trace_error("%s, device doesn't support read/write interface\n", __func__);
        result = PP_ERROR_FAILED;
        goto point_2;
    }

    if (requested_info) {
        vc->width =  requested_info->width;
        vc->height = requested_info->height;
        vc->fps =    requested_info->frames_per_second;
    } else {
        vc->width =  640;
        vc->height = 480;
        vc->fps =    15;
    }

    struct v4l2_format fmt = {
        .type =                 V4L2_BUF_TYPE_VIDEO_CAPTURE,
        .fmt.pix.width =        vc->width,
        .fmt.pix.height =       vc->height,
        .fmt.pix.pixelformat =  V4L2_PIX_FMT_YUV420,    // PPAPI hardcodes format to YUV420
        .fmt.pix.field =        V4L2_FIELD_INTERLACED,
    };

    if (v4l2_ioctl(vc->fd, VIDIOC_S_FMT, &fmt) != 0) {
        trace_error("%s, failed to set resolution\n", __func__);
        result = PP_ERROR_FAILED;
        goto point_2;
    }

    vc->width =  fmt.fmt.pix.width;
    vc->height = fmt.fmt.pix.height;

    vc->buffer_size = fmt.fmt.pix.sizeimage;    // buffer size in bytes
    vc->buffer_count = MAX(buffer_count, 5);    // limit lowest number of buffers, just in case

    vc->buffers = calloc(sizeof(*vc->buffers), vc->buffer_count);
    if (!vc->buffers) {
        trace_error("%s, memory allocation failure (1)\n", __func__);
        result = PP_ERROR_FAILED;
        goto point_2;
    }

    vc->buffer_is_free = malloc(sizeof(*vc->buffer_is_free) * vc->buffer_count);
    if (!vc->buffer_is_free) {
        trace_error("%s, memory allocation failure (2)\n", __func__);
        result = PP_ERROR_FAILED;
        goto point_3;
    }

    for (unsigned int k = 0; k < vc->buffer_count; k ++) {
        vc->buffer_is_free[k] = 1;
        vc->buffers[k] = ppb_buffer_create(vc->instance->id, vc->buffer_size);
        if (vc->buffers[k] == 0)
            goto point_4;
    }

    struct PP_VideoCaptureDeviceInfo_Dev info = {
        .width =             vc->width,
        .height =            vc->height,
        .frames_per_second = vc->fps,
    };

    vc->ppp_video_capture_dev->OnDeviceInfo(vc->instance->id, video_capture, &info,
                                            vc->buffer_count, vc->buffers);

    result = PP_OK;
    goto point_1;

point_4:
    for (unsigned int k = 0; k < vc->buffer_count; k ++)
        ppb_core_release_resource(vc->buffers[k]);
    free_and_nullify(vc->buffer_is_free);
point_3:
    free_and_nullify(vc->buffers);
point_2:
    v4l2_close(vc->fd);
    vc->fd = -1;
point_1:
    pp_resource_release(video_capture);
    ppb_core_call_on_main_thread2(0, callback, result, __func__);
    return PP_OK_COMPLETIONPENDING;
}

struct on_buffer_ready_param_s {
    PP_Instance                            instance;
    PP_Resource                            video_capture;
    uint32_t                               buf_idx;
    const struct PPP_VideoCapture_Dev_0_1 *ppp_video_capture_dev;
};

static
void
on_buffer_ready_comt(void *user_data, int32_t result)
{
    struct on_buffer_ready_param_s *p = user_data;
    struct pp_instance_s *pp_i = tables_get_pp_instance(p->instance);
    if (!pp_i)
        return;

    p->ppp_video_capture_dev->OnBufferReady(p->instance, p->video_capture, p->buf_idx);
    g_slice_free1(sizeof(*p), p);
}

static
void *
video_capture_thread(void *param)
{
    struct pp_video_capture_s *vc = param;

    PP_Resource  video_capture = vc->self_id;
    PP_Instance  instance = vc->instance->id;
    const int    fd = vc->fd;
    const size_t buffer_size = vc->buffer_size;

    vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc)
        goto gone;

    while (!vc->terminate_thread) {
        // find free buffer
        uint32_t buf_idx = (uint32_t)-1;
        for (uint32_t k = 0; k < vc->buffer_count; k ++) {
            if (vc->buffer_is_free[k]) {
                buf_idx = k;
                vc->buffer_is_free[k] = 0;
                break;
            }
        }

        if (buf_idx == (uint32_t)-1) {
            // all buffers are busy, wait for some to free, with resource unlocked
            pp_resource_release(video_capture);
            usleep(10);
            vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
            if (!vc)
                goto gone;
            continue;
        }

        PP_Resource buffer = vc->buffers[buf_idx];
        pp_resource_release(video_capture);

        // wait on v4l2_read() with resource unlocked
        void *ptr = ppb_buffer_map(buffer);
        RETRY_ON_EINTR(v4l2_read(fd, ptr, buffer_size));
        ppb_buffer_unmap(buffer);

        vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
        if (!vc)
            goto gone;

        struct on_buffer_ready_param_s *p = g_slice_alloc(sizeof(*p));
        p->instance =               instance;
        p->video_capture =          video_capture;
        p->buf_idx =                buf_idx;
        p->ppp_video_capture_dev =  vc->ppp_video_capture_dev;
        ppb_core_call_on_main_thread2(0, PP_MakeCCB(on_buffer_ready_comt, p), PP_OK, __func__);
    }

    pp_resource_release(video_capture);
    return NULL;

gone:
    trace_error("%s, resource gone\n", __func__);
    return NULL;
}

int32_t
ppb_video_capture_start_capture(PP_Resource video_capture)
{
    struct pp_video_capture_s *vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, bad resource\n", __func__);
        return PP_ERROR_BADRESOURCE;
    }

    if (vc->thread_started)
        goto done;

    if (vc->fd < 0) {
        trace_error("%s, device is closed\n", __func__);
        pp_resource_release(video_capture);
        return PP_ERROR_FAILED;
    }

    vc->ppp_video_capture_dev->OnStatus(vc->instance->id, video_capture,
                                        PP_VIDEO_CAPTURE_STATUS_STARTING);

    pp_resource_ref(video_capture); // prevents freeing while thread is still running
    pthread_create(&vc->thread, NULL, video_capture_thread, vc);
    vc->thread_started = 1;

    vc->ppp_video_capture_dev->OnStatus(vc->instance->id, video_capture,
                                        PP_VIDEO_CAPTURE_STATUS_STARTED);

done:
    pp_resource_release(video_capture);
    return PP_OK;
}

int32_t
ppb_video_capture_reuse_buffer(PP_Resource video_capture, uint32_t buffer)
{
    struct pp_video_capture_s *vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, bad resource\n", __func__);
        return PP_ERROR_BADRESOURCE;
    }

    if (buffer < vc->buffer_count)
        vc->buffer_is_free[buffer] = 1;

    pp_resource_release(video_capture);
    return PP_OK;
}

int32_t
ppb_video_capture_stop_capture(PP_Resource video_capture)
{
    struct pp_video_capture_s *vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, bad resource\n", __func__);
        return PP_ERROR_BADRESOURCE;
    }

    if (!vc->thread_started)
        goto done;

    vc->ppp_video_capture_dev->OnStatus(vc->instance->id, video_capture,
                                        PP_VIDEO_CAPTURE_STATUS_STOPPING);

    vc->terminate_thread = 1;
    pthread_t thread = vc->thread;

    pp_resource_release(video_capture);

    pthread_join(thread, NULL);

    vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, resource gone\n", __func__);
        return PP_ERROR_BADRESOURCE;
    }

    vc->thread_started = 0;
    vc->terminate_thread = 0;
    vc->ppp_video_capture_dev->OnStatus(vc->instance->id, video_capture,
                                        PP_VIDEO_CAPTURE_STATUS_STOPPED);

    pp_resource_unref(video_capture);   // remove reference made in start_capture()

done:
    pp_resource_release(video_capture);
    return PP_OK;
}

void
ppb_video_capture_close(PP_Resource video_capture)
{
    ppb_video_capture_stop_capture(video_capture);

    struct pp_video_capture_s *vc = pp_resource_acquire(video_capture, PP_RESOURCE_VIDEO_CAPTURE);
    if (!vc) {
        trace_error("%s, bad resource\n", __func__);
        return;
    }

    ppb_video_capture_destroy(vc);

    pp_resource_release(video_capture);
    return;
}


// trace wrappers
TRACE_WRAPPER
PP_Resource
trace_ppb_video_capture_create(PP_Instance instance)
{
    trace_info("[PPB] {full} %s instance=%d\n", __func__+6, instance);
    return ppb_video_capture_create(instance);
}