示例#1
0
static int tc_v4l2_video_init(V4L2Source *vs, 
                           int layout, const char *device,
                           int width, int height, int fps,
                           const char *options)
{
    int ret = tc_v4l2_parse_options(vs, layout, options);
    RETURN_IF_FAILED(ret);

    vs->video_fd = v4l2_open(device, O_RDWR, 0);
    if (vs->video_fd < 0) {
        tc_log_error(MOD_NAME, "cannot open video device %s", device);
        return TC_ERROR;
    }

    ret = tc_v4l2_video_check_capabilities(vs);
    RETURN_IF_FAILED(ret);

    ret = tc_v4l2_video_setup_image_format(vs, width, height);
    RETURN_IF_FAILED(ret);

    ret = tc_v4l2_video_setup_stream_parameters(vs, fps);
    RETURN_IF_FAILED(ret);

    ret = tc_v4l2_video_get_capture_buffer_count(vs);
    RETURN_IF_FAILED(ret);

    ret = tc_v4l2_video_setup_capture_buffers(vs);
    RETURN_IF_FAILED(ret);

    return tc_v4l2_capture_start(vs);
}
static
int
video_device_is_usable(const char *dev, char **shortname)
{
    int fd = v4l2_open(dev, O_RDWR);
    if (fd < 0)
        return 0;

    struct v4l2_capability caps;
    if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &caps) != 0)
        goto err_1;

#ifdef V4L2_CAP_DEVICE_CAPS
    const uint32_t device_caps = (caps.capabilities & V4L2_CAP_DEVICE_CAPS) ? caps.device_caps
                                                                            : caps.capabilities;
#else
    const uint32_t device_caps = caps.capabilities;
#endif // V4L2_CAP_DEVICE_CAPS

    if (!(device_caps & V4L2_CAP_VIDEO_CAPTURE))
        goto err_1;

    if (!(device_caps & V4L2_CAP_READWRITE))
        goto err_1;

    *shortname = g_strdup((char *)caps.card);

    v4l2_close(fd);
    return 1;

err_1:
    v4l2_close(fd);
    return 0;
}
示例#3
0
文件: v4l2grab.c 项目: twam/v4l2grab
/**
	open device
*/
static void deviceOpen(void)
{
    struct stat st;

    // stat file
    if (-1 == stat(deviceName, &st)) {
        fprintf(stderr, "Cannot identify '%s': %d, %s\n", deviceName, errno, strerror(errno));
        exit(EXIT_FAILURE);
    }

    // check if its device
    if (!S_ISCHR(st.st_mode)) {
        fprintf(stderr, "%s is no device\n", deviceName);
        exit(EXIT_FAILURE);
    }

    // open device
    fd = v4l2_open(deviceName, O_RDWR /* required */ | O_NONBLOCK, 0);

    // check if opening was successfull
    if (-1 == fd) {
        fprintf(stderr, "Cannot open '%s': %d, %s\n", deviceName, errno, strerror(errno));
        exit(EXIT_FAILURE);
    }
}
    VideoCapture( const std::string& device, const IO aIO = MMAP )
    {
        mFd = v4l2_open( device.c_str(), O_RDWR | O_NONBLOCK, 0);
        if( mFd == - 1 )
            THROW( "can't open " << device << ": " << strerror(errno) );
               
        // make sure this is a capture device
        v4l2_capability cap;
        xioctl( mFd, VIDIOC_QUERYCAP, &cap );
        if( !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) )
            THROW("not a video capture device!");
        
        mSupported = IOMethods();
        if( mSupported.empty() ) THROW("no supported IO methods!");
        
        bool found_requested = false;
        for( size_t i = 0; i < mSupported.size(); ++i )
        {
            if( mSupported[i] == aIO )
            {
                found_requested = true;
                break;
            }
        }
        
        // use requested IO if supported, otherwise "fastest"
        mIO = ( found_requested ? aIO : mSupported.back() );

        mCapturing = false;
        mIsLocked = false;
    }
示例#5
0
int DeviceV4L2Base::open_dev(int color_model)
{
	v4l2_lock->lock("DeviceV4L2Base::open_dev");
	int result = 0;
	if( !opened )
	{
		result = dev_open();
		if( !result )
			result = v4l2_open(color_model);
		if( !result )
			result = start_dev();
		if( !result )
		{
			qbfrs_lock->reset();
			video_lock->reset();
			getq = new DeviceV4L2BufferQ(total_buffers+1);
			put_thread = new DeviceV4L2Put(this);
			put_thread->start();
			done = 0;
			Thread::start();
		}
		else
			printf("DeviceV4L2Base::open_dev failed\n");
	}
	if( result )
	{
		printf("DeviceV4L2Base::open_dev: adaptor open failed\n");
		stop_dev();
		dev_close();
	}
	else
		opened = 1;
	v4l2_lock->unlock();
	return result;
}
示例#6
0
int open_device(char *dev_name_)
{
	struct stat st;
	int fd = -1;
	dev_name = dev_name_;
	if (-1 == stat(dev_name, &st)) {
		fprintf(stderr, "Cannot identify '%s': %d, %s\n",
		        dev_name, errno, strerror(errno));
		exit(EXIT_FAILURE);
	}

	if (!S_ISCHR(st.st_mode)) {
		fprintf(stderr, "%s is no device\n", dev_name);
		exit(EXIT_FAILURE);
	}

	fd = v4l2_open(dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);

	if (-1 == fd) {
		fprintf(stderr, "Cannot open '%s': %d, %s\n",
		        dev_name, errno, strerror(errno));
		exit(EXIT_FAILURE);
	}
	return fd;
}
示例#7
0
static int msv4l2_open(V4l2State *s){
	int fd=v4l2_open(s->dev,O_RDWR|O_NONBLOCK);
	if (fd==-1){
		ms_error("Could not open %s: %s",s->dev,strerror(errno));
		return -1;
	}
	s->fd=fd;
	return 0;
}
示例#8
0
static int vd_open(struct vidsrc_st *st, const char *device)
{
	st->fd = v4l2_open(device, O_RDWR);
	if (st->fd < 0) {
		warning("v4l2: open %s: %m\n", device, errno);
		return errno;
	}

	return 0;
}
示例#9
0
//open v4l2 device
int v4lOpen(v4lT* s, char* device) {
  s->cam = v4l2_open(device, O_RDWR);
  if( s->cam <= 0 ) {
    perror("Failed to open the video device");
    return 1;
  }
  s->fmts = 0;
  s->frmSizes = 0;
  s->frmIvals = 0;
  s->bufs = 0;
  return 0;
}
示例#10
0
static int v4l2_get_pixfmt(MSFilter *f, void *arg){
	V4l2State *s=(V4l2State*)f->data;
	if (s->fd==-1){
		if (v4l2_open(s)==0){
			v4l2_configure(s);
			*(MSPixFmt*)arg=s->pix_fmt;
			return 0;
		}else return -1;
	}
	*(MSPixFmt*)arg=s->pix_fmt;
	return 0;
}
示例#11
0
文件: UVCVisionCam.cpp 项目: aosp/dvp
// interface methods
status_e UVCVisionCam::init(void *cookie)
{
    int device_num = 0;
#if defined(PANDA)
    device_num = 1;
#endif
    m_dev = v4l2_open(device_num, V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING, false_e);
    m_frame.clear();
    m_frame.mCookie = cookie;
    if (m_dev)
        return STATUS_SUCCESS;
    else
        return STATUS_NO_RESOURCES;
}
示例#12
0
static void v4l2_preprocess(MSFilter *f){
	V4l2State *s=(V4l2State*)f->data;
	if (s->fd==-1 && v4l2_open(s)!=0) {
		return;
	}
	if (!s->configured && v4l2_configure(s)!=0){
		return;
	}
	if (v4l2_do_mmap(s)==0){
		ms_message("V4L2 video capture started.");
	}else{
		v4l2_close(s);
	}
	s->start_time=f->ticker->time;
}
示例#13
0
int Video_in_Manager::OpenDeviceInternal()
{
	if(verbose) printf("OpenDeviceInternal\n");
	//Open the video device.
	this->fd = v4l2_open(this->devName.c_str(), O_RDWR | O_NONBLOCK);

	if(fd < 0)
	{
		throw std::runtime_error("Error opening device");
	}

	this->deviceStarted = 0;
	if(verbose) printf("Done opening\n");
	return 1;
}
示例#14
0
static int v4l2_input_open(input_plugin_t *this_gen) {
    v4l2_input_plugin_t *this = (v4l2_input_plugin_t*) this_gen;
    int ret;
    lprintf("Opening %s\n", this->mrl);
	this->fd = v4l2_open(this->mrl, O_RDWR);
	if (this->fd) {
        /* TODO: Clean up this mess */
        this->events = xine_event_new_queue(this->stream);
	ret = v4l2_ioctl(this->fd, VIDIOC_QUERYCAP, &(this->cap));
	if (ret < 0)
	{
	  xprintf (this->stream->xine, XINE_VERBOSITY_LOG,
		   LOG_MODULE": capability query failed: %s\n", strerror (-ret));
	  return 0;
	}
        if (this->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
            this->video = malloc(sizeof(v4l2_video_t));
            this->video->headerSent = 0;
            this->video->bufcount = 0;
        }
        if (this->cap.capabilities & V4L2_CAP_STREAMING) {
            lprintf("Supports streaming. Allocating buffers...\n");
            if (this->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
                if (v4l2_input_setup_video_streaming(this)) {
                    lprintf("Video streaming ready.\n");
                    return 1;
                } else {
                    /* TODO: Fallbacks */
		    xprintf (this->stream->xine, XINE_VERBOSITY_LOG,
			     LOG_MODULE": video streaming setup failed\n");
                    return 0;
                }
            } else {
                /* TODO: Radio streaming */
		xprintf (this->stream->xine, XINE_VERBOSITY_LOG,
			 LOG_MODULE": sorry, only video is supported for now\n");
                return 0;
            }
        } else {
	    xprintf (this->stream->xine, XINE_VERBOSITY_LOG,
		     LOG_MODULE": device doesn't support streaming - prod the author to support the other methods\n");
            return 0;
        }
    } else {
        return 0;
    }
}
示例#15
0
/*
 * Format selected callback
 */
static bool format_selected(obs_properties_t *props, obs_property_t *p,
		obs_data_t *settings)
{
	UNUSED_PARAMETER(p);
	int dev = v4l2_open(obs_data_get_string(settings, "device_id"),
			O_RDWR | O_NONBLOCK);
	if (dev == -1)
		return false;

	int input     = (int) obs_data_get_int(settings, "input");
	uint32_t caps = 0;
	if (v4l2_get_input_caps(dev, input, &caps) < 0)
		return false;
	caps &= V4L2_IN_CAP_STD | V4L2_IN_CAP_DV_TIMINGS;

	obs_property_t *resolution = obs_properties_get(props, "resolution");
	obs_property_t *framerate  = obs_properties_get(props, "framerate");
	obs_property_t *standard   = obs_properties_get(props, "standard");
	obs_property_t *dv_timing  = obs_properties_get(props, "dv_timing");

	obs_property_set_visible(resolution, (!caps) ? true : false);
	obs_property_set_visible(framerate,  (!caps) ? true : false);
	obs_property_set_visible(standard,
			(caps & V4L2_IN_CAP_STD) ? true : false);
	obs_property_set_visible(dv_timing,
			(caps & V4L2_IN_CAP_DV_TIMINGS) ? true : false);

	if (!caps) {
		v4l2_resolution_list(dev, obs_data_get_int(
				settings, "pixelformat"), resolution);
	}
	if (caps & V4L2_IN_CAP_STD)
		v4l2_standard_list(dev, standard);
	if (caps & V4L2_IN_CAP_DV_TIMINGS)
		v4l2_dv_timing_list(dev, dv_timing);

	v4l2_close(dev);

	if (!caps)
		obs_property_modified(resolution, settings);
	if (caps & V4L2_IN_CAP_STD)
		obs_property_modified(standard, settings);
	if (caps & V4L2_IN_CAP_DV_TIMINGS)
		obs_property_modified(dv_timing, settings);

	return true;
}
示例#16
0
/*
 * Input selected callback
 */
static bool input_selected(obs_properties_t *props, obs_property_t *p,
		obs_data_t *settings)
{
	UNUSED_PARAMETER(p);
	int dev = v4l2_open(obs_data_get_string(settings, "device_id"),
			O_RDWR | O_NONBLOCK);
	if (dev == -1)
		return false;

	obs_property_t *prop = obs_properties_get(props, "pixelformat");
	v4l2_format_list(dev, prop);
	v4l2_close(dev);

	obs_property_modified(prop, settings);

	return true;
}
示例#17
0
int OCV::disable_exposure_auto_priority(const string dev) 
{
  int descriptor = v4l2_open(dev.c_str(), O_RDWR);

  v4l2_control c;   // auto exposure control to aperture priority 
  c.id = V4L2_CID_EXPOSURE_AUTO;
  c.value = V4L2_EXPOSURE_APERTURE_PRIORITY; 
  if (v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c)!=0)
    return -1;
  
  c.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY; // auto priority control to false
  c.value = 0;
  if (v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c)!=0)
    return -1;
  
  v4l2_close(descriptor);
  return 0;
}
示例#18
0
文件: main.cpp 项目: frc-862/vision
void disableAutoExposure() {
    string vidDevice = "/dev/video";
    vidDevice.append(toString(CAMERA_ID));
    int descriptor = v4l2_open(vidDevice.c_str(), O_RDWR);

    v4l2_control c;
    c.id = V4L2_CID_EXPOSURE_AUTO;
    c.value = camSettings.exposureMode;
    if(v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c) == 0)
        cout << "Disabled auto exposure" << endl;

    c.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
    c.value = 0;
    if(v4l2_ioctl(descriptor, VIDIOC_S_CTRL, &c) == 0)
        cout << "Disabled auto priority" << endl;

    v4l2_close(descriptor);
}
示例#19
0
static int open_device (void) {
	struct stat st;

	if (-1 == stat (dev_name, &st)) {
		return -1;
	}

	if (!S_ISCHR (st.st_mode)) {
		return -1;
	}

	fd = v4l2_open (dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);

	if (-1 == fd) {
		return -1;
	}

	return 0;
}
示例#20
0
/*
 * Device selected callback
 */
static bool device_selected(obs_properties_t *props, obs_property_t *p,
		obs_data_t *settings)
{
	int dev = v4l2_open(obs_data_get_string(settings, "device_id"),
			O_RDWR | O_NONBLOCK);

	v4l2_props_set_enabled(props, p, (dev == -1) ? false : true);

	if (dev == -1)
		return false;

	obs_property_t *prop = obs_properties_get(props, "input");
	v4l2_input_list(dev, prop);
	v4l2_close(dev);

	obs_property_modified(prop, settings);

	return true;
}
示例#21
0
/*
 * Resolution selected callback
 */
static bool resolution_selected(obs_properties_t *props, obs_property_t *p,
		obs_data_t *settings)
{
	UNUSED_PARAMETER(p);
	int width, height;
	int dev = v4l2_open(obs_data_get_string(settings, "device_id"),
			O_RDWR | O_NONBLOCK);
	if (dev == -1)
		return false;

	obs_property_t *prop = obs_properties_get(props, "framerate");
	v4l2_unpack_tuple(&width, &height, obs_data_get_int(settings,
				"resolution"));
	v4l2_framerate_list(dev, obs_data_get_int(settings, "pixelformat"),
			width, height, prop);
	v4l2_close(dev);

	obs_property_modified(prop, settings);

	return true;
}
static int Video_device_init(Video_device *self, PyObject *args,
    PyObject *kwargs)
{
  const char *device_path;

  if(!PyArg_ParseTuple(args, "s", &device_path))
    {
      return -1;
    }

  int fd = v4l2_open(device_path, O_RDWR | O_NONBLOCK);

  if(fd < 0)
    {
      PyErr_SetFromErrnoWithFilename(PyExc_IOError, (char *)device_path);
      return -1;
    }

  self->fd = fd;
  self->buffers = NULL;
  return 0;
}
示例#23
0
int USBGrabber::open(const char* aDevPath, size_t *aWidth, size_t *aHeight, uint32_t *aFormat) {
  if (mFd != 0) {
    error("Device already opened");
    return 0;
  }

  // Open capture device
  mFd = v4l2_open(aDevPath, O_RDWR | O_NONBLOCK, 0);
  if (mFd < 0)
  {
    error("Cannot open capturer device\n");
    return 0;
  }

  if (!validateCaptureDevice()) return 0;
  if (!initCaptureFormat()) return 0;
  if (!setCaptureFormat(aWidth, aHeight, aFormat)) return 0;
  if (!getCaptureInfo()) return 0;
  if (!initBuffers()) return 0;
  if (!startStreaming()) return 0;

  return 1;
}
示例#24
0
int check_videoIn(struct vdIn *vd, char *device)
{
    int ret;
    if (vd == NULL || device == NULL)
        return -1;
    vd->videodevice = (char *) calloc(1, 16 * sizeof(char));
    snprintf(vd->videodevice, 12, "%s", device);
    printf("Device information:\n");
    printf("  Device path:  %s\n", vd->videodevice);
    if ((vd->fd = v4l2_open(vd->videodevice, O_RDWR)) == -1) {
        perror("ERROR opening V4L interface");
        exit(1);
    }
    memset(&vd->cap, 0, sizeof(struct v4l2_capability));
    ret = v4l2_ioctl(vd->fd, VIDIOC_QUERYCAP, &vd->cap);
    if (ret < 0) {
        printf("Error opening device %s: unable to query device.\n",
               vd->videodevice);
        goto fatal;
    }
    if ((vd->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
        printf("Error opening device %s: video capture not supported.\n",
               vd->videodevice);
    }
    if (!(vd->cap.capabilities & V4L2_CAP_STREAMING)) {
        printf("%s does not support streaming i/o\n", vd->videodevice);
    }
    if (!(vd->cap.capabilities & V4L2_CAP_READWRITE)) {
        printf("%s does not support read i/o\n", vd->videodevice);
    }
    enum_frame_formats(vd->fd, NULL, 0);
fatal:
    v4l2_close(vd->fd);
    free(vd->videodevice);
    return 0;
}
示例#25
0
/**
 * Initialize the v4l2 device
 *
 * This function:
 * - tries to open the device
 * - sets pixelformat and requested resolution
 * - sets the requested framerate
 * - maps the buffers
 * - starts the capture thread
 */
static void v4l2_init(struct v4l2_data *data)
{
	uint32_t input_caps;
	int fps_num, fps_denom;

	blog(LOG_INFO, "Start capture from %s", data->device_id);
	data->dev = v4l2_open(data->device_id, O_RDWR | O_NONBLOCK);
	if (data->dev == -1) {
		blog(LOG_ERROR, "Unable to open device");
		goto fail;
	}

	/* set input */
	if (v4l2_set_input(data->dev, &data->input) < 0) {
		blog(LOG_ERROR, "Unable to set input %d", data->input);
		goto fail;
	}
	blog(LOG_INFO, "Input: %d", data->input);
	if (v4l2_get_input_caps(data->dev, -1, &input_caps) < 0) {
		blog(LOG_ERROR, "Unable to get input capabilities");
		goto fail;
	}

	/* set video standard if supported */
	if (input_caps & V4L2_IN_CAP_STD) {
		if (v4l2_set_standard(data->dev, &data->standard) < 0) {
			blog(LOG_ERROR, "Unable to set video standard");
			goto fail;
		}
		data->resolution = -1;
		data->framerate  = -1;
	}
	/* set dv timing if supported */
	if (input_caps & V4L2_IN_CAP_DV_TIMINGS) {
		if (v4l2_set_dv_timing(data->dev, &data->dv_timing) < 0) {
			blog(LOG_ERROR, "Unable to set dv timing");
			goto fail;
		}
		data->resolution = -1;
		data->framerate  = -1;
	}

	/* set pixel format and resolution */
	if (v4l2_set_format(data->dev, &data->resolution, &data->pixfmt,
			&data->linesize) < 0) {
		blog(LOG_ERROR, "Unable to set format");
		goto fail;
	}
	if (v4l2_to_obs_video_format(data->pixfmt) == VIDEO_FORMAT_NONE) {
		blog(LOG_ERROR, "Selected video format not supported");
		goto fail;
	}
	v4l2_unpack_tuple(&data->width, &data->height, data->resolution);
	blog(LOG_INFO, "Resolution: %dx%d", data->width, data->height);
	blog(LOG_INFO, "Pixelformat: %s", V4L2_FOURCC_STR(data->pixfmt));
	blog(LOG_INFO, "Linesize: %d Bytes", data->linesize);

	/* set framerate */
	if (v4l2_set_framerate(data->dev, &data->framerate) < 0) {
		blog(LOG_ERROR, "Unable to set framerate");
		goto fail;
	}
	v4l2_unpack_tuple(&fps_num, &fps_denom, data->framerate);
	blog(LOG_INFO, "Framerate: %.2f fps", (float) fps_denom / fps_num);

	/* map buffers */
	if (v4l2_create_mmap(data->dev, &data->buffers) < 0) {
		blog(LOG_ERROR, "Failed to map buffers");
		goto fail;
	}

	/* start the capture thread */
	if (os_event_init(&data->event, OS_EVENT_TYPE_MANUAL) != 0)
		goto fail;
	if (pthread_create(&data->thread, NULL, v4l2_thread, data) != 0)
		goto fail;
	return;
fail:
	blog(LOG_ERROR, "Initialization failed");
	v4l2_terminate(data);
}
示例#26
0
/*
 * List available devices
 */
static void v4l2_device_list(obs_property_t *prop, obs_data_t *settings)
{
	DIR *dirp;
	struct dirent *dp;
	struct dstr device;
	bool cur_device_found;
	size_t cur_device_index;
	const char *cur_device_name;

#ifdef __FreeBSD__
	dirp = opendir("/dev");
#else
	dirp = opendir("/sys/class/video4linux");
#endif
	if (!dirp)
		return;

	cur_device_found = false;
	cur_device_name  = obs_data_get_string(settings, "device_id");

	obs_property_list_clear(prop);

	dstr_init_copy(&device, "/dev/");

	while ((dp = readdir(dirp)) != NULL) {
		int fd;
		uint32_t caps;
		struct v4l2_capability video_cap;

#ifdef __FreeBSD__
		if (strstr(dp->d_name, "video") == NULL)
			continue;
#endif

		if (dp->d_type == DT_DIR)
			continue;

		dstr_resize(&device, 5);
		dstr_cat(&device, dp->d_name);

		if ((fd = v4l2_open(device.array, O_RDWR | O_NONBLOCK)) == -1) {
			blog(LOG_INFO, "Unable to open %s", device.array);
			continue;
		}

		if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &video_cap) == -1) {
			blog(LOG_INFO, "Failed to query capabilities for %s",
			     device.array);
			v4l2_close(fd);
			continue;
		}

#ifndef V4L2_CAP_DEVICE_CAPS
		caps = video_cap.capabilities;
#else
		/* ... since Linux 3.3 */
		caps = (video_cap.capabilities & V4L2_CAP_DEVICE_CAPS)
			? video_cap.device_caps
			: video_cap.capabilities;
#endif

		if (!(caps & V4L2_CAP_VIDEO_CAPTURE)) {
			blog(LOG_INFO, "%s seems to not support video capture",
			     device.array);
			v4l2_close(fd);
			continue;
		}

		obs_property_list_add_string(prop, (char *) video_cap.card,
				device.array);
		blog(LOG_INFO, "Found device '%s' at %s", video_cap.card,
				device.array);

		/* check if this is the currently used device */
		if (cur_device_name && !strcmp(cur_device_name, device.array))
			cur_device_found = true;

		v4l2_close(fd);
	}

	/* add currently selected device if not present, but disable it ... */
	if (!cur_device_found && cur_device_name && strlen(cur_device_name)) {
		cur_device_index = obs_property_list_add_string(prop,
				cur_device_name, cur_device_name);
		obs_property_list_item_disable(prop, cur_device_index, true);
	}

	closedir(dirp);
	dstr_free(&device);
}
示例#27
0
    VideoCapture::VideoCapture(const std::string& dev, uint32_t w, uint32_t h)
    {
      // Video 4 Linux library implementation.
#if defined(DUNE_SYS_HAS_LIBV4L2_H)
      m_fd = v4l2_open(dev.c_str(), O_RDWR | O_NONBLOCK, 0);
      if (m_fd < 0)
        throw Error(errno, String::str("failed to open device '%s'", dev.c_str()));

      // Initialize V4L2 format.
      m_fmt = new v4l2_format;
      std::memset(m_fmt, 0, sizeof(v4l2_format));
      m_fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      m_fmt->fmt.pix.width = w;
      m_fmt->fmt.pix.height = h;
      m_fmt->fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
      m_fmt->fmt.pix.field = V4L2_FIELD_INTERLACED;
      doIoctl(m_fd, VIDIOC_S_FMT, m_fmt);

      if (m_fmt->fmt.pix.pixelformat != V4L2_PIX_FMT_RGB24)
        throw std::runtime_error("pixel format RGB24 is not supported by device");

      // Initialize V4L2 request buffers.
      m_bfr_req = new v4l2_requestbuffers;
      std::memset(m_bfr_req, 0, sizeof(v4l2_requestbuffers));
      m_bfr_req->count = 2;
      m_bfr_req->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      m_bfr_req->memory = V4L2_MEMORY_MMAP;
      doIoctl(m_fd, VIDIOC_REQBUFS, m_bfr_req);

      m_bfr = new v4l2_buffer;
      m_bfrs = (Buffer*)calloc(m_bfr_req->count, sizeof(Buffer));

      for (unsigned i = 0; i < m_bfr_req->count; ++i)
      {
        std::memset(m_bfr, 0, sizeof(v4l2_buffer));
        m_bfr->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        m_bfr->memory = V4L2_MEMORY_MMAP;
        m_bfr->index = i;
        doIoctl(m_fd, VIDIOC_QUERYBUF, m_bfr);

        m_bfrs[i].length = m_bfr->length;
        m_bfrs[i].start = v4l2_mmap(0, m_bfr->length,
                                    PROT_READ | PROT_WRITE, MAP_SHARED,
                                    m_fd, m_bfr->m.offset);

        if (MAP_FAILED == m_bfrs[i].start)
        {
          perror("mmap");
          exit(EXIT_FAILURE);
        }

        std::memset(m_bfr, 0, sizeof(v4l2_buffer));
        m_bfr->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        m_bfr->memory = V4L2_MEMORY_MMAP;
        m_bfr->index = i;
        doIoctl(m_fd, VIDIOC_QBUF, m_bfr);
      }

#else
      (void)dev;
      (void)h;
      (void)w;

      throw std::runtime_error("VideoCapture is not yet implemented in this system.");
#endif
    }
int main(int argc, char **argv)
{
        struct v4l2_format              fmt;
        struct v4l2_buffer              buf;
        struct v4l2_requestbuffers      req;
        enum v4l2_buf_type              type;
        fd_set                          fds;
        struct timeval                  tv;
        int                             r, fd = -1;
        unsigned int                    i, n_buffers;
        char                            *dev_name = "/dev/video0";
        char                            out_name[256];
        FILE                            *fout;
        struct buffer                   *buffers;

        fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
        if (fd < 0) {
                perror("Cannot open device");
                exit(EXIT_FAILURE);
        }

        CLEAR(fmt);
        fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        fmt.fmt.pix.width       = 80;
        fmt.fmt.pix.height      = 60;
        fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
        fmt.fmt.pix.field       = V4L2_FIELD_NONE;
        xioctl(fd, VIDIOC_S_FMT, &fmt);
        if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) {
                printf("Libv4l didn't accept RGB24 format. Can't proceed.\n");
                exit(EXIT_FAILURE);
        }
        if ((fmt.fmt.pix.width != 80) || (fmt.fmt.pix.height != 60))
                printf("Warning: driver is sending image at %dx%d\n",
                        fmt.fmt.pix.width, fmt.fmt.pix.height);

        CLEAR(req);
        req.count = 2;
        req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        req.memory = V4L2_MEMORY_MMAP;
        xioctl(fd, VIDIOC_REQBUFS, &req);

        buffers = calloc(req.count, sizeof(*buffers));
        for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
                CLEAR(buf);

                buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory      = V4L2_MEMORY_MMAP;
                buf.index       = n_buffers;

                xioctl(fd, VIDIOC_QUERYBUF, &buf);

                buffers[n_buffers].length = buf.length;
                buffers[n_buffers].start = v4l2_mmap(NULL, buf.length,
                              PROT_READ | PROT_WRITE, MAP_SHARED,
                              fd, buf.m.offset);

                if (MAP_FAILED == buffers[n_buffers].start) {
                        perror("mmap");
                        exit(EXIT_FAILURE);
                }
        }

        for (i = 0; i < n_buffers; ++i) {
                CLEAR(buf);
                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_MMAP;
                buf.index = i;
                xioctl(fd, VIDIOC_QBUF, &buf);
        }
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

        xioctl(fd, VIDIOC_STREAMON, &type);
        for (i = 0; i < 20; i++) {
                do {
                        FD_ZERO(&fds);
                        FD_SET(fd, &fds);

                        /* Timeout. */
                        tv.tv_sec = 2;
                        tv.tv_usec = 0;

                        r = select(fd + 1, &fds, NULL, NULL, &tv);
                } while ((r == -1 && (errno = EINTR)));
                if (r == -1) {
                        perror("select");
                        return errno;
                }

                CLEAR(buf);
                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_MMAP;
                xioctl(fd, VIDIOC_DQBUF, &buf);

                sprintf(out_name, "out%03d.ppm", i);
                fout = fopen(out_name, "w");
                if (!fout) {
                        perror("Cannot open image");
                        exit(EXIT_FAILURE);
                }
                fprintf(fout, "P6\n%d %d 255\n",
                        fmt.fmt.pix.width, fmt.fmt.pix.height);
                fwrite(buffers[buf.index].start, buf.bytesused, 1, fout);
                fclose(fout);

                xioctl(fd, VIDIOC_QBUF, &buf);
        }

        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        xioctl(fd, VIDIOC_STREAMOFF, &type);
        for (i = 0; i < n_buffers; ++i)
                v4l2_munmap(buffers[i].start, buffers[i].length);
        v4l2_close(fd);

        return 0;
}
示例#29
0
int vx_source_v4l2_open(vx_source* s, const char* n,vx_device_capability* cap)
{

    struct vx_source_v4l2 *source = VX_V4L2_CAST(s);
    int i = 0;

    char* devName = 0;

    if (n == 0) {
        vx_source_enumerate(s,0,0);
        s->enumerate(s);
        if (s->deviceCount) {
            devName = s->devices[0].uuid;
        } else {
            return -1;
        }
    } else {
        devName = &n[0];
    }

    if (cap == 0) {
        for(i = 0;i<s->deviceCount;++i) {
            if (0 == strcmp(s->devices[i].uuid,devName)) {
                if (s->devices[i].capabilitiesCount) {
                    cap = &s->devices[i].capabilities[0];
                }
            }
        }
    }

    source->frame.frame = 0;

    /* open the video device */
    source->_fd = v4l2_open(n, O_RDWR|O_NONBLOCK, 0);

    /* another check */
    if (source->_fd < 0)
        return -1;

    // request a format
    memset(&source->_format,0,sizeof(struct v4l2_format));

    source->_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	source->_format.fmt.pix.width			 = cap->width;
	source->_format.fmt.pix.height			 = cap->height;
    source->_format.fmt.pix.pixelformat      = cap->pixelFormat;

    source->_format.fmt.pix.field            = V4L2_FIELD_INTERLACED;

    // check if we can capture in this format
    ioctl(source->_fd, VIDIOC_S_FMT, &source->_format);

    // post-check
    if (source->_format.fmt.pix.pixelformat != cap->pixelFormat)
    {
        printf("libv4l didn't accept 0x%x format for %s. Can't proceed.\n",cap->pixelFormat,n);
        return -1;
        //exit(EXIT_FAILURE);
    }

    char fourCC[5]; fourCC[4] = '\0';
    VX_FOURCC_TO_CHAR(cap->pixelFormat,fourCC);

    fprintf(stdout,"Choose %s %dx%d (%d) %s\n",devName,cap->width,cap->height,cap->pixelFormat,fourCC);


    // set the camera speed
    struct v4l2_streamparm streamparm;
    memset (&streamparm, 0, sizeof(struct v4l2_streamparm));
    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    streamparm.parm.capture.timeperframe.numerator = cap->speed.numerator;
    streamparm.parm.capture.timeperframe.denominator = cap->speed.denominator;

    ioctl(source->_fd, VIDIOC_S_PARM, &streamparm);


    memset(&source->_requestbuffers,0,sizeof(struct v4l2_requestbuffers));
    source->_requestbuffers.count = V4L_BUFFERS_COUNT;
    source->_requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    source->_requestbuffers.memory = V4L2_MEMORY_MMAP;
    if (ioctl(source->_fd, VIDIOC_REQBUFS, &source->_requestbuffers) < 0)
        return -1;

    for (i = 0; i < source->_requestbuffers.count; ++i)
    {
        memset(&source->_buffer,0,sizeof(struct v4l2_buffer));

        source->_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        source->_buffer.memory = V4L2_MEMORY_MMAP;
        source->_buffer.index = i;

        if (-1 == ioctl (source->_fd, VIDIOC_QUERYBUF, &source->_buffer))
        {
            perror ("VIDIOC_QUERYBUF");
            //close();
            return -1;
        }

        source->memAddress[i] = mmap((void*)0,source->_buffer.length,PROT_READ|PROT_WRITE,MAP_SHARED,source->_fd, source->_buffer.m.offset);


        if (MAP_FAILED == source->memAddress[i]) {
            perror ("mmap");

            return -1;
        }

        if (ioctl(source->_fd, VIDIOC_QBUF, &source->_buffer) < 0) {

            return -1;
        }
    }

    return 0;
}
示例#30
0
static int capture(char *dev_name, int x_res, int y_res, int n_frames,
		   char *out_dir)
{
	struct v4l2_format		fmt;
	struct v4l2_buffer		buf;
	struct v4l2_requestbuffers	req;
	enum v4l2_buf_type		type;
	fd_set				fds;
	struct timeval			tv;
	int				r, fd = -1;
	unsigned int			i, j, n_buffers;
	struct buffer			*buffers;
	Display				*dpy;
	Window				win;
	int				num_textures = 1;
	GLuint				texture_id[num_textures];
	Window				root;
	XVisualInfo			*vi;
	XSetWindowAttributes		swa;
	GLXContext			glc;
	GLint				att[] = {
		GLX_RGBA, GLX_DEPTH_SIZE, 24, GLX_DOUBLEBUFFER, None
	};

	dpy = XOpenDisplay(NULL);
	if (!dpy) {
		printf("\tcannot open display.\n");
		exit(EXIT_FAILURE);
	}

	root = DefaultRootWindow(dpy);

	vi = glXChooseVisual(dpy, 0, att);
	if (!vi) {
		printf("no appropriate visual found.\n");
		exit(EXIT_FAILURE);
	}
	swa.event_mask = ExposureMask | KeyPressMask;
	swa.colormap   = XCreateColormap(dpy, root, vi->visual, AllocNone);

	fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
	if (fd < 0) {
		perror("Cannot open device");
		exit(EXIT_FAILURE);
	}

	CLEAR(fmt);
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	fmt.fmt.pix.width       = x_res;
	fmt.fmt.pix.height      = y_res;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
	fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;
	xioctl(fd, VIDIOC_S_FMT, &fmt);
	if ((fmt.fmt.pix.width != x_res) || (fmt.fmt.pix.height != y_res))
		printf("Warning: driver is sending image at %dx%d\n",
			fmt.fmt.pix.width, fmt.fmt.pix.height);

	printf("Fourcc format: %c%c%c%c\n",
		fmt.fmt.pix.pixelformat & 0xff,
		(fmt.fmt.pix.pixelformat >> 8) &0xff,
		(fmt.fmt.pix.pixelformat >> 16) &0xff,
		(fmt.fmt.pix.pixelformat >> 24) &0xff);

	win = XCreateWindow(dpy, root, 0, 0,
			    fmt.fmt.pix.width, fmt.fmt.pix.height, 0, vi->depth,
			    InputOutput, vi->visual, CWEventMask  | CWColormap,
			    &swa);
	XMapWindow(dpy, win);
	XStoreName(dpy, win, dev_name);

	glc = glXCreateContext(dpy, vi, NULL, GL_TRUE);
	if (glc == NULL) {
		printf("\n\tcannot create gl context\n\n");
		exit(0);
	}

	glXMakeCurrent(dpy, win, glc);
	glEnable(GL_DEPTH_TEST);

	XCreatePixmap(dpy, root,
		      fmt.fmt.pix.width, fmt.fmt.pix.height,
		      vi->depth);

	glEnable(GL_TEXTURE_2D);
	glGenTextures(1, texture_id);
	for (j = 0; j < num_textures; j++) {
		glActiveTexture(GL_TEXTURE0 + j);
		glBindTexture(GL_TEXTURE_2D, texture_id[j]);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
		glEnable(GL_TEXTURE_2D);
	}

	CLEAR(req);
	req.count = 2;
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	req.memory = V4L2_MEMORY_MMAP;
	xioctl(fd, VIDIOC_REQBUFS, &req);

	buffers = calloc(req.count, sizeof(*buffers));
	for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
		CLEAR(buf);

		buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory      = V4L2_MEMORY_MMAP;
		buf.index       = n_buffers;

		xioctl(fd, VIDIOC_QUERYBUF, &buf);

		buffers[n_buffers].length = buf.length;
		buffers[n_buffers].start = v4l2_mmap(NULL, buf.length,
			      PROT_READ | PROT_WRITE, MAP_SHARED,
			      fd, buf.m.offset);

		if (MAP_FAILED == buffers[n_buffers].start) {
			perror("mmap");
			exit(EXIT_FAILURE);
		}
	}

	for (i = 0; i < n_buffers; ++i) {
		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;
		buf.index = i;
		xioctl(fd, VIDIOC_QBUF, &buf);
	}
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	xioctl(fd, VIDIOC_STREAMON, &type);
	i = 0;

	while (i < n_frames || n_frames <= 0) {
		/* Request new buffer */
		if (i)
			xioctl(fd, VIDIOC_QBUF, &buf);

		do {
			FD_ZERO(&fds);
			FD_SET(fd, &fds);

			/* Timeout. */
			tv.tv_sec = 2;
			tv.tv_usec = 0;

			r = select(fd + 1, &fds, NULL, NULL, &tv);
		} while ((r == -1 && (errno == EINTR)));
		if (r == -1) {
			perror("select");
			return errno;
		}

		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;
		xioctl(fd, VIDIOC_DQBUF, &buf);

		/*
		 * Display the image via GL - for RGB, only one texture is enough
		 */
		for (j = 0; j < num_textures; j++) {
			glActiveTexture(GL_TEXTURE0 + j);
			glBindTexture(GL_TEXTURE_2D, texture_id[j]);
			glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB,
				fmt.fmt.pix.width, fmt.fmt.pix.height, 0,
				GL_RGB, GL_UNSIGNED_BYTE,
				((char *)buffers[buf.index].start) + j);
		}
		Redraw(dpy, win);

		i++;
	}

	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	xioctl(fd, VIDIOC_STREAMOFF, &type);
	for (i = 0; i < n_buffers; ++i)
		v4l2_munmap(buffers[i].start, buffers[i].length);
	v4l2_close(fd);

	return 0;
}