コード例 #1
0
gboolean
gst_v4l2_get_output (GstV4l2Object * v4l2object, gint * output)
{
  gint n;

  GST_DEBUG_OBJECT (v4l2object->element, "trying to get output");

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_OUTPUT, &n) < 0)
    goto output_failed;

  *output = n;

  GST_DEBUG_OBJECT (v4l2object->element, "output: %d", n);

  return TRUE;

  /* ERRORS */
output_failed:
  if (v4l2object->vcap.capabilities & V4L2_CAP_TUNER) {
    /* only give a warning message if driver actually claims to have tuner
     * support
     */
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to get current output on device '%s'. May be it is a radio device"), v4l2object->videodev), GST_ERROR_SYSTEM);
  }
  return FALSE;
}
コード例 #2
0
ファイル: msv4l2.c プロジェクト: korobool/linphonecdbus
static void msv4l2_detect(MSWebCamManager *obj){
	struct v4l2_capability cap;
	char devname[32];
	int i;
	for(i=0;i<10;++i){
		int fd;
		snprintf(devname,sizeof(devname),"/dev/video%i",i);
		fd=open(devname,O_RDWR);
		if (fd!=-1){
			if (v4l2_ioctl (fd, VIDIOC_QUERYCAP, &cap)==0) {
				/* is a V4LV2 */
				uint32_t camera_caps = cap.capabilities;
#ifdef V4L2_CAP_DEVICE_CAPS
				if (cap.capabilities & V4L2_CAP_DEVICE_CAPS) {
					camera_caps = cap.device_caps;
				}
#endif
				if (((camera_caps & V4L2_CAP_VIDEO_CAPTURE)
#ifdef V4L2_CAP_VIDEO_CAPTURE_MPLANE
					|| (camera_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
#endif
					) && !((camera_caps & V4L2_CAP_VIDEO_OUTPUT)
#ifdef V4L2_CAP_VIDEO_OUTPUT_MPLANE
					|| (camera_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
#endif
					)) {
					MSWebCam *cam=ms_web_cam_new(&v4l2_card_desc);
					cam->name=ms_strdup(devname);
					ms_web_cam_manager_add_cam(obj,cam);
				}
			}
			close(fd);
		}
	}
}
コード例 #3
0
ファイル: msv4l2.c プロジェクト: korobool/linphonecdbus
static mblk_t * v4lv2_grab_image(V4l2State *s, int poll_timeout_ms){
	struct v4l2_buffer buf;
	unsigned int k;
	memset(&buf,0,sizeof(buf));
	mblk_t *ret=NULL;

	buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	buf.memory = V4L2_MEMORY_MMAP;
	
	/*queue buffers whose ref count is 1, because they are not
	still used anywhere in the filter chain */
	for(k=0;k<s->frame_max;++k){
		if (s->frames[k]->b_datap->db_ref==1){
			buf.index=k;
			if (-1==v4l2_ioctl (s->fd, VIDIOC_QBUF, &buf))
				ms_warning("VIDIOC_QBUF %i failed: %s",k,  strerror(errno));
			else {
				ms_debug("v4l2: queue buf %i",k);
				/*increment ref count of queued buffer*/
				inc_ref(s->frames[k]);
				s->queued++;
			}
		}
	}

	if (s->queued){
		ret=v4l2_dequeue_ready_buffer(s,poll_timeout_ms);
	}
	return ret;
}
コード例 #4
0
static
int
video_device_is_usable(const char *dev, char **shortname)
{
    int fd = v4l2_open(dev, O_RDWR);
    if (fd < 0)
        return 0;

    struct v4l2_capability caps;
    if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &caps) != 0)
        goto err_1;

#ifdef V4L2_CAP_DEVICE_CAPS
    const uint32_t device_caps = (caps.capabilities & V4L2_CAP_DEVICE_CAPS) ? caps.device_caps
                                                                            : caps.capabilities;
#else
    const uint32_t device_caps = caps.capabilities;
#endif // V4L2_CAP_DEVICE_CAPS

    if (!(device_caps & V4L2_CAP_VIDEO_CAPTURE))
        goto err_1;

    if (!(device_caps & V4L2_CAP_READWRITE))
        goto err_1;

    *shortname = g_strdup((char *)caps.card);

    v4l2_close(fd);
    return 1;

err_1:
    v4l2_close(fd);
    return 0;
}
コード例 #5
0
gboolean
gst_v4l2_set_output (GstV4l2Object * v4l2object, gint output)
{
  GST_DEBUG_OBJECT (v4l2object->element, "trying to set output to %d", output);

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_OUTPUT, &output) < 0)
    goto output_failed;

  return TRUE;

  /* ERRORS */
output_failed:
  if (v4l2object->vcap.capabilities & V4L2_CAP_TUNER) {
    /* only give a warning message if driver actually claims to have tuner
     * support
     */
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to set output %d on device %s."),
            output, v4l2object->videodev), GST_ERROR_SYSTEM);
  }
  return FALSE;
}
コード例 #6
0
static int tc_v4l2_video_check_capabilities(V4L2Source *vs)
{
    struct v4l2_capability caps;
    int err = 0;

    err = v4l2_ioctl(vs->video_fd, VIDIOC_QUERYCAP, &caps);
    if (err < 0) {
        tc_log_error(MOD_NAME, "driver does not support querying capabilities");
        return TC_ERROR;
    }

    if (!(caps.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        tc_log_error(MOD_NAME, "driver does not support video capture");
        return TC_ERROR;
    }

    if (!(caps.capabilities & V4L2_CAP_STREAMING)) {
        tc_log_error(MOD_NAME, "driver does not support streaming (mmap) video capture");
        return TC_ERROR;
    }

    if (verbose_flag > TC_INFO) {
        tc_log_info(MOD_NAME, "v4l2 video grabbing, driver = %s, device = %s",
                    caps.driver, caps.card);
    }

    return TC_OK;
}
コード例 #7
0
ファイル: v4l2-input.c プロジェクト: kmoore134/obs-studio
/*
 * List formats for device
 */
static void v4l2_format_list(int dev, obs_property_t *prop)
{
	struct v4l2_fmtdesc fmt;
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.index = 0;
	struct dstr buffer;
	dstr_init(&buffer);

	obs_property_list_clear(prop);

	while (v4l2_ioctl(dev, VIDIOC_ENUM_FMT, &fmt) == 0) {
		dstr_copy(&buffer, (char *) fmt.description);
		if (fmt.flags & V4L2_FMT_FLAG_EMULATED)
			dstr_cat(&buffer, " (Emulated)");

		if (v4l2_to_obs_video_format(fmt.pixelformat)
				!= VIDEO_FORMAT_NONE) {
			obs_property_list_add_int(prop, buffer.array,
					fmt.pixelformat);
			blog(LOG_INFO, "Pixelformat: %s (available)",
			     buffer.array);
		} else {
			blog(LOG_INFO, "Pixelformat: %s (unavailable)",
			     buffer.array);
		}
		fmt.index++;
	}

	dstr_free(&buffer);
}
コード例 #8
0
ファイル: v4l2_calls.c プロジェクト: zsx/ossbuild
/******************************************************
 * gst_v4l2_get_frequency():
 *   get the current frequency
 * return value: TRUE on success, FALSE on error
 ******************************************************/
gboolean
gst_v4l2_get_frequency (GstV4l2Object * v4l2object,
    gint tunernum, gulong * frequency)
{
  struct v4l2_frequency freq = { 0, };

  GstTunerChannel *channel;

  GST_DEBUG_OBJECT (v4l2object->element, "getting current tuner frequency");

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  channel = gst_tuner_get_channel (GST_TUNER (v4l2object->element));

  freq.tuner = tunernum;
  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq) < 0)
    goto freq_failed;

  *frequency = freq.frequency * channel->freq_multiplicator;

  return TRUE;

  /* ERRORS */
freq_failed:
  {
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to get current tuner frequency for device '%s'."),
            v4l2object->videodev), GST_ERROR_SYSTEM);
    return FALSE;
  }
}
コード例 #9
0
ファイル: capture.c プロジェクト: grimmohe/libfg2
fg_rect fg_get_capture_window(fg_grabber *fg)
{
    fg_rect rect = { 0, 0, 0, 0 };
    struct v4l2_crop crop;

    crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (v4l2_ioctl(fg->fd, VIDIOC_S_CROP, &crop) == -1)
    {
        if (errno == EINVAL)
        {
            fg_debug_error("fg_get_capture_window(): "
                            "device does not support cropping");
            return rect;
        }
        else
        {
            fg_debug_error("fg_get_capture_window(): "
                            "getting cropping window failed");
            return rect;
        }
    }

    rect.left = crop.c.left;
    rect.top = crop.c.top;
    rect.width = crop.c.width;
    rect.height = crop.c.height;

    return rect;
}
コード例 #10
0
static int tc_v4l2_video_get_capture_buffer_count(V4L2Source *vs)
{
    struct v4l2_requestbuffers reqbuf;
    int err = 0;

    reqbuf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    reqbuf.count  = TC_V4L2_BUFFERS_NUM;

    err = v4l2_ioctl(vs->video_fd, VIDIOC_REQBUFS, &reqbuf);
    if (err < 0) {
        tc_log_perror(MOD_NAME, "VIDIOC_REQBUFS");
        return TC_ERROR;
    }

    vs->buffers_count = TC_MIN(reqbuf.count, TC_V4L2_BUFFERS_NUM);

    if (vs->buffers_count < 2) {
        tc_log_error(MOD_NAME, "not enough buffers for capture");
        return TC_ERROR;
    }

    if (verbose_flag > TC_INFO) {
        tc_log_info(MOD_NAME, "%i buffers available (maximum supported: %i)",
                    vs->buffers_count, TC_V4L2_BUFFERS_NUM);
    }
    return TC_OK;
}
コード例 #11
0
static gboolean
start_streaming (GstV4l2BufferPool * pool)
{
  GstV4l2Object *obj = pool->obj;

  switch (obj->mode) {
    case GST_V4L2_IO_RW:
      break;
    case GST_V4L2_IO_MMAP:
    case GST_V4L2_IO_USERPTR:
      GST_DEBUG_OBJECT (pool, "STREAMON");
      if (v4l2_ioctl (pool->video_fd, VIDIOC_STREAMON, &obj->type) < 0)
        goto start_failed;
      break;
    default:
      g_assert_not_reached ();
      break;
  }

  pool->streaming = TRUE;

  return TRUE;

  /* ERRORS */
start_failed:
  {
    GST_ERROR_OBJECT (pool, "error with STREAMON %d (%s)", errno,
        g_strerror (errno));
    return FALSE;
  }
}
コード例 #12
0
static gboolean
gst_v4l2_decoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
{
  struct v4l2_decoder_cmd dcmd = { 0, };

  GST_DEBUG_OBJECT (v4l2object->element,
      "sending v4l2 decoder command %u with flags %u", cmd, flags);

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  dcmd.cmd = cmd;
  dcmd.flags = flags;
  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
    goto dcmd_failed;

  return TRUE;

dcmd_failed:
  if (errno == ENOTTY) {
    GST_INFO_OBJECT (v4l2object->element,
        "Failed to send decoder command %u with flags %u for '%s'. (%s)",
        cmd, flags, v4l2object->videodev, g_strerror (errno));
  } else {
    GST_ERROR_OBJECT (v4l2object->element,
        "Failed to send decoder command %u with flags %u for '%s'. (%s)",
        cmd, flags, v4l2object->videodev, g_strerror (errno));
  }
  return FALSE;
}
コード例 #13
0
ファイル: v4l2_calls.c プロジェクト: zsx/ossbuild
gboolean
gst_v4l2_get_input (GstV4l2Object * v4l2object, gint * input)
{
  gint n;

  GST_DEBUG_OBJECT (v4l2object->element, "trying to get input");

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_INPUT, &n) < 0)
    goto input_failed;

  *input = n;

  GST_DEBUG_OBJECT (v4l2object->element, "input: %d", n);

  return TRUE;

  /* ERRORS */
input_failed:
  {
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to get current input on device '%s'. May be it is a radio device"), v4l2object->videodev), GST_ERROR_SYSTEM);
    return FALSE;
  }
}
コード例 #14
0
ファイル: msv4l2.c プロジェクト: Amini-Philips/mediastreamer2
static bool_t v4lv2_try_format( V4l2State *s, struct v4l2_format *fmt, int fmtid){
	
	fmt->type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt->fmt.pix.pixelformat = fmtid;
	fmt->fmt.pix.field = V4L2_FIELD_ANY;

        if (v4l2_ioctl (s->fd, VIDIOC_TRY_FMT, fmt)<0){
		ms_message("VIDIOC_TRY_FMT: %s",strerror(errno));
		return FALSE;
	}
	if (v4l2_ioctl (s->fd, VIDIOC_S_FMT, fmt)<0){
		ms_message("VIDIOC_S_FMT: %s",strerror(errno));
		return FALSE;
	}
	return TRUE;
}
コード例 #15
0
ファイル: v4l2_calls.c プロジェクト: zsx/ossbuild
/******************************************************
 * gst_v4l2_set_attribute():
 *   try to set the value of one specific attribute
 * return value: TRUE on success, FALSE on error
 ******************************************************/
gboolean
gst_v4l2_set_attribute (GstV4l2Object * v4l2object,
    int attribute_num, const int value)
{
  struct v4l2_control control = { 0, };

  GST_DEBUG_OBJECT (v4l2object->element, "setting value of attribute %d to %d",
      attribute_num, value);

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  control.id = attribute_num;
  control.value = value;
  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0)
    goto ctrl_failed;

  return TRUE;

  /* ERRORS */
ctrl_failed:
  {
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to set value %d for control %d on device '%s'."),
            value, attribute_num, v4l2object->videodev), GST_ERROR_SYSTEM);
    return FALSE;
  }
}
コード例 #16
0
ファイル: v4l2_calls.c プロジェクト: zsx/ossbuild
/******************************************************
 * gst_v4l2_signal_strength():
 *   get the strength of the signal on the current input
 * return value: TRUE on success, FALSE on error
 ******************************************************/
gboolean
gst_v4l2_signal_strength (GstV4l2Object * v4l2object,
    gint tunernum, gulong * signal_strength)
{
  struct v4l2_tuner tuner = { 0, };

  GST_DEBUG_OBJECT (v4l2object->element, "trying to get signal strength");

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  tuner.index = tunernum;
  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &tuner) < 0)
    goto tuner_failed;

  *signal_strength = tuner.signal;

  return TRUE;

  /* ERRORS */
tuner_failed:
  {
    GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
        (_("Failed to get signal strength for device '%s'."),
            v4l2object->videodev), GST_ERROR_SYSTEM);
    return FALSE;
  }
}
コード例 #17
0
ファイル: v4l2_calls.c プロジェクト: zsx/ossbuild
/******************************************************
 * gst_v4l2_get_capabilities():
 *   get the device's capturing capabilities
 * return value: TRUE on success, FALSE on error
 ******************************************************/
gboolean
gst_v4l2_get_capabilities (GstV4l2Object * v4l2object)
{
  GstElement *e;

  e = v4l2object->element;

  GST_DEBUG_OBJECT (e, "getting capabilities");

  if (!GST_V4L2_IS_OPEN (v4l2object))
    return FALSE;

  if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_QUERYCAP, &v4l2object->vcap) < 0)
    goto cap_failed;

  GST_LOG_OBJECT (e, "driver:      '%s'", v4l2object->vcap.driver);
  GST_LOG_OBJECT (e, "card:        '%s'", v4l2object->vcap.card);
  GST_LOG_OBJECT (e, "bus_info:    '%s'", v4l2object->vcap.bus_info);
  GST_LOG_OBJECT (e, "version:     %08x", v4l2object->vcap.version);
  GST_LOG_OBJECT (e, "capabilites: %08x", v4l2object->vcap.capabilities);

  return TRUE;

  /* ERRORS */
cap_failed:
  {
    GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
        (_("Error getting capabilities for device '%s': "
                "It isn't a v4l2 driver. Check if it is a v4l1 driver."),
            v4l2object->videodev), GST_ERROR_SYSTEM);
    return FALSE;
  }
}
コード例 #18
0
ファイル: capture.c プロジェクト: grimmohe/libfg2
//--------------------------------------------------------------------------
// TODO: fg_*_channel() functions not tested at all yet.
int fg_set_channel( fg_grabber* fg, float freq )
{
    int val, scale;
    struct v4l2_frequency frq;

    if ( !(fg->inputs[fg->input].type & V4L2_INPUT_TYPE_TUNER) )
    {
        fg_debug_error("fg_set_channel(): current source is not a tuner");
        return -1;
    }

    // TODO: is this still correct?
    // The LOW flag means freq in 1/16 MHz, not 1/16 kHz
    if ( fg->tuners[fg->tuner].capability & V4L2_TUNER_CAP_LOW )
        scale = 16000;
    else
        scale = 16;
    val = (int)( freq * scale );

    frq.tuner = fg->inputs[fg->input].tuner;
    frq.type = fg->tuners[fg->tuner].type;
    frq.frequency = val;
    FG_CLEAR(frq.reserved);

    if ( v4l2_ioctl( fg->fd, VIDIOC_S_FREQUENCY, &frq ) < 0 )
    {
        fg_debug_error( "fg_set_channel(): failed to tune channel" );
        return -1;
    }

    return 0;
}
コード例 #19
0
static gboolean
gst_v4l2_buffer_pool_streamoff (GstV4l2BufferPool * pool)
{
  GstV4l2Object *obj = pool->obj;

  switch (obj->mode) {
    case GST_V4L2_IO_MMAP:
    case GST_V4L2_IO_USERPTR:
    case GST_V4L2_IO_DMABUF:
    case GST_V4L2_IO_DMABUF_IMPORT:
      if (pool->streaming) {
        if (v4l2_ioctl (pool->video_fd, VIDIOC_STREAMOFF, &obj->type) < 0)
          goto streamoff_failed;

        pool->streaming = FALSE;

        GST_DEBUG_OBJECT (pool, "Stopped streaming");
      }
      break;
    default:
      break;
  }

  return TRUE;

streamoff_failed:
  {
    GST_ERROR_OBJECT (pool, "error with STREAMOFF %d (%s)", errno,
        g_strerror (errno));
    return FALSE;
  }
}
コード例 #20
0
ファイル: v4l.c プロジェクト: flesniak/xilinx-vgain
int v4lCheckFormats(v4lT* s, uint32_t preferredFormat) {
  if( s->fmts )
    free(s->fmts);
  struct v4l2_fmtdesc camFmtDesc;
  memset(&camFmtDesc, 0, sizeof(camFmtDesc));
  camFmtDesc.index = 0;
  camFmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  s->fmtsCount = 0;
  while( v4l2_ioctl(s->cam, VIDIOC_ENUM_FMT, &camFmtDesc) != -1 ) {
    s->fmtsCount++;
    camFmtDesc.index++;
  }
  s->fmts = calloc(s->fmtsCount, sizeof(struct v4l2_fmtdesc));
  int fmtsCaptured = 0;
  while( fmtsCaptured < s->fmtsCount ) {
    s->fmts[fmtsCaptured].index = fmtsCaptured;
    s->fmts[fmtsCaptured].type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if( v4l2_ioctl(s->cam, VIDIOC_ENUM_FMT, s->fmts+fmtsCaptured) == -1 )
      break;
    fmtsCaptured++;
  }
  if( errno != EINVAL || fmtsCaptured < s->fmtsCount ) {
    perror("Error while querying available image formats");
    v4lClose(s);
    return 1;
  }

  s->preferredPixFmtIndex = -1;
  for( int fmt = 0; fmt < s->fmtsCount; fmt++ )
    if( !(s->fmts[fmt].flags & V4L2_FMT_FLAG_EMULATED) && s->fmts[fmt].pixelformat == preferredFormat ) {
      s->preferredPixFmtIndex = fmt;
      break;
    }
  if( s->preferredPixFmtIndex == -1 )
    for( int fmt = 0; fmt < s->fmtsCount; fmt++ )
      if( !(s->fmts[fmt].flags & V4L2_FMT_FLAG_EMULATED) && (s->fmts[fmt].pixelformat == V4L2_PIX_FMT_MJPEG || s->fmts[fmt].pixelformat == V4L2_PIX_FMT_YUYV) ) {
        s->preferredPixFmtIndex = fmt;
        break;
      }
  if( s->preferredPixFmtIndex == -1 ) {
    fprintf(stderr, "No supported video format (YUV/MJPEG)\n");
    v4lClose(s);
    return 1;
  }

  return 0;
}
コード例 #21
0
ファイル: v4l2-input.c プロジェクト: Bl00drav3n/obs-studio
/*
 * List framerates for device and resolution
 */
static void v4l2_framerate_list(int dev, uint_fast32_t pixelformat,
		uint_fast32_t width, uint_fast32_t height, obs_property_t *prop)
{
	struct v4l2_frmivalenum frmival;
	frmival.pixel_format = pixelformat;
	frmival.width = width;
	frmival.height = height;
	frmival.index = 0;
	struct dstr buffer;
	dstr_init(&buffer);

	obs_property_list_clear(prop);

	obs_property_list_add_int(prop, obs_module_text("LeaveUnchanged"), -1);

	v4l2_ioctl(dev, VIDIOC_ENUM_FRAMEINTERVALS, &frmival);

	switch(frmival.type) {
	case V4L2_FRMIVAL_TYPE_DISCRETE:
		while (v4l2_ioctl(dev, VIDIOC_ENUM_FRAMEINTERVALS,
				&frmival) == 0) {
			float fps = (float) frmival.discrete.denominator /
				frmival.discrete.numerator;
			int pack = v4l2_pack_tuple(frmival.discrete.numerator,
					frmival.discrete.denominator);
			dstr_printf(&buffer, "%.2f", fps);
			obs_property_list_add_int(prop, buffer.array, pack);
			frmival.index++;
		}
		break;
	default:
		blog(LOG_INFO, "Stepwise and Continuous framerates "
			"are currently hardcoded");

		for (const int *packed = v4l2_framerates; *packed; ++packed) {
			int num;
			int denom;
			v4l2_unpack_tuple(&num, &denom, *packed);
			float fps = (float) denom / num;
			dstr_printf(&buffer, "%.2f", fps);
			obs_property_list_add_int(prop, buffer.array, *packed);
		}
		break;
	}

	dstr_free(&buffer);
}
コード例 #22
0
ファイル: usb_grabber.cpp プロジェクト: acperez/AmbiLed
int USBGrabber::initCaptureFormat() {
  if (v4l2_ioctl(mFd, VIDIOC_G_FMT, &mFmt) < 0) {
    error("failed to determine video format\n");
    return 0;
  }

  return 1;
}
コード例 #23
0
static int xioctl (int fd, int request, void * arg) {
	int r;

	do r = v4l2_ioctl (fd, request, arg);
	while (-1 == r && EINTR == errno);

	return r;
}
コード例 #24
0
    std::vector< IO > IOMethods()
    {
        std::vector< IO > supported;

        v4l2_capability cap;
        xioctl( mFd, VIDIOC_QUERYCAP, &cap );

        // test read/write
        if( cap.capabilities & V4L2_CAP_READWRITE )
            supported.push_back( READ );

        if( cap.capabilities & V4L2_CAP_STREAMING )
        {
            v4l2_requestbuffers req;
            int ret = 0;

            // test userptr
            memset( &req, 0, sizeof(req) );
            req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            req.count = 1;
            req.memory = V4L2_MEMORY_USERPTR;
            if( 0 == v4l2_ioctl( mFd, VIDIOC_REQBUFS, &req ) )
            {
                supported.push_back( USERPTR );
                req.count = 0;
                // blind ioctl, some drivers get pissy with count = 0
                v4l2_ioctl( mFd, VIDIOC_REQBUFS, &req );
            }
            
            // test mmap
            memset( &req, 0, sizeof(req) );
            req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            req.count = 1;
            req.memory = V4L2_MEMORY_MMAP;
            if( 0 == v4l2_ioctl( mFd, VIDIOC_REQBUFS, &req ) )
            {
                supported.push_back( MMAP );
                req.count = 0;
                // blind ioctl, some drivers get pissy with count = 0
                v4l2_ioctl( mFd, VIDIOC_REQBUFS, &req );
            }
        }
        
        return supported;
    }
コード例 #25
0
ファイル: ocv.cpp プロジェクト: turcofran/omfootctrl
int OCV::disable_exposure_auto_priority(const string dev) 
{
  int descriptor = v4l2_open(dev.c_str(), O_RDWR);

  v4l2_control c;   // auto exposure control to aperture priority 
  c.id = V4L2_CID_EXPOSURE_AUTO;
  c.value = V4L2_EXPOSURE_APERTURE_PRIORITY; 
  if (v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c)!=0)
    return -1;
  
  c.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY; // auto priority control to false
  c.value = 0;
  if (v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c)!=0)
    return -1;
  
  v4l2_close(descriptor);
  return 0;
}
コード例 #26
0
ファイル: main.cpp プロジェクト: frc-862/vision
void disableAutoExposure() {
    string vidDevice = "/dev/video";
    vidDevice.append(toString(CAMERA_ID));
    int descriptor = v4l2_open(vidDevice.c_str(), O_RDWR);

    v4l2_control c;
    c.id = V4L2_CID_EXPOSURE_AUTO;
    c.value = camSettings.exposureMode;
    if(v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c) == 0)
        cout << "Disabled auto exposure" << endl;

    c.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
    c.value = 0;
    if(v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c) == 0)
        cout << "Disabled auto priority" << endl;

    v4l2_close(descriptor);
}
コード例 #27
0
ファイル: capture.c プロジェクト: grimmohe/libfg2
int fg_get_format(fg_grabber *fg)
{
    if (v4l2_ioctl(fg->fd, VIDIOC_G_FMT, &(fg->format)) == -1)
    {
        fg_debug_error("fg_get_format(): getting video format failed");
        return -1;
    }
    return fg->format.fmt.pix.pixelformat;
}
コード例 #28
0
bool v4l2::ioctl_exists(unsigned cmd, void *arg)
{
	int err;

	if (useWrapper())
		err = v4l2_ioctl(m_fd, cmd, arg);
	else
		err = ::ioctl(m_fd, cmd, arg);
	return !err || errno != ENOTTY;
}
コード例 #29
0
ファイル: gstv4l2sink.c プロジェクト: PeterXu/gst-mobile
static void
gst_v4l2sink_sync_crop_fields (GstV4l2Sink * v4l2sink)
{
  if (!v4l2sink->crop_fields_set)
    return;

  if (GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {

    gint fd = v4l2sink->v4l2object->video_fd;
    struct v4l2_crop crop;

    memset (&crop, 0x00, sizeof (struct v4l2_crop));
    crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;

    if (v4l2_ioctl (fd, VIDIOC_G_CROP, &crop) < 0) {
      GST_WARNING_OBJECT (v4l2sink, "VIDIOC_G_CROP failed");
      return;
    }

    GST_DEBUG_OBJECT (v4l2sink,
        "setting crop: crop_fields_set=0x%02x, top=%d, left=%d, width=%d, height=%d",
        v4l2sink->crop_fields_set,
        v4l2sink->crop.top, v4l2sink->crop.left,
        v4l2sink->crop.width, v4l2sink->crop.height);

    if (v4l2sink->crop_fields_set & RECT_TOP_SET)
      crop.c.top = v4l2sink->crop.top;
    if (v4l2sink->crop_fields_set & RECT_LEFT_SET)
      crop.c.left = v4l2sink->crop.left;
    if (v4l2sink->crop_fields_set & RECT_WIDTH_SET)
      crop.c.width = v4l2sink->crop.width;
    if (v4l2sink->crop_fields_set & RECT_HEIGHT_SET)
      crop.c.height = v4l2sink->crop.height;

    if (v4l2_ioctl (fd, VIDIOC_S_CROP, &crop) < 0) {
      GST_WARNING_OBJECT (v4l2sink, "VIDIOC_S_CROP failed");
      return;
    }

    v4l2sink->crop_fields_set = 0;
    v4l2sink->crop = crop.c;
  }
}
コード例 #30
0
ファイル: gstv4l2sink.c プロジェクト: PeterXu/gst-mobile
static void
gst_v4l2sink_sync_overlay_fields (GstV4l2Sink * v4l2sink)
{
  if (!v4l2sink->overlay_fields_set)
    return;

  if (GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {

    gint fd = v4l2sink->v4l2object->video_fd;
    struct v4l2_format format;

    memset (&format, 0x00, sizeof (struct v4l2_format));
    format.type = V4L2_BUF_TYPE_VIDEO_OVERLAY;

    if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0) {
      GST_WARNING_OBJECT (v4l2sink, "VIDIOC_G_FMT failed");
      return;
    }

    GST_DEBUG_OBJECT (v4l2sink,
        "setting overlay: overlay_fields_set=0x%02x, top=%d, left=%d, width=%d, height=%d",
        v4l2sink->overlay_fields_set,
        v4l2sink->overlay.top, v4l2sink->overlay.left,
        v4l2sink->overlay.width, v4l2sink->overlay.height);

    if (v4l2sink->overlay_fields_set & RECT_TOP_SET)
      format.fmt.win.w.top = v4l2sink->overlay.top;
    if (v4l2sink->overlay_fields_set & RECT_LEFT_SET)
      format.fmt.win.w.left = v4l2sink->overlay.left;
    if (v4l2sink->overlay_fields_set & RECT_WIDTH_SET)
      format.fmt.win.w.width = v4l2sink->overlay.width;
    if (v4l2sink->overlay_fields_set & RECT_HEIGHT_SET)
      format.fmt.win.w.height = v4l2sink->overlay.height;

    if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0) {
      GST_WARNING_OBJECT (v4l2sink, "VIDIOC_S_FMT failed");
      return;
    }

    v4l2sink->overlay_fields_set = 0;
    v4l2sink->overlay = format.fmt.win.w;
  }
}