コード例 #1
0
ファイル: v4l2capture.c プロジェクト: AchironOS/chromium-2
static PyObject *Video_device_read_internal(Video_device *self, int queue)
{
  if(!self->buffers)
    {
      ASSERT_OPEN;
      PyErr_SetString(PyExc_ValueError, "Buffers have not been created");
      return NULL;
    }

  struct v4l2_buffer buffer;
  buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  buffer.memory = V4L2_MEMORY_MMAP;

  if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer))
    {
      return NULL;
    }

  PyObject *result = PyString_FromStringAndSize(
      self->buffers[buffer.index].start, buffer.bytesused);

  if(!result)
    {
      return NULL;
    }

  if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer))
    {
      return NULL;
    }

  return result;
}
コード例 #2
0
static PyObject *Video_device_set_format(Video_device *self, PyObject *args)
{
  int size_x;
  int size_y;
  int yuv420 = 0;

  if(!PyArg_ParseTuple(args, "ii|i", &size_x, &size_y, &yuv420))
    {
      return NULL;
    }

  struct v4l2_format format;
  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  format.fmt.pix.width = size_x;
  format.fmt.pix.height = size_y;
#ifdef USE_LIBV4L
  format.fmt.pix.pixelformat =
    yuv420 ? V4L2_PIX_FMT_YUV420 : V4L2_PIX_FMT_RGB24;
#else
  format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
#endif
  format.fmt.pix.field = V4L2_FIELD_INTERLACED;
  format.fmt.pix.bytesperline = 0;

  if(my_ioctl(self->fd, VIDIOC_S_FMT, &format))
    {
      return NULL;
    }

  return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height);
}
コード例 #3
0
static PyObject *Video_device_queue_all_buffers(Video_device *self)
{
  if(!self->buffers)
    {
      ASSERT_OPEN;
      PyErr_SetString(PyExc_ValueError, "Buffers have not been created");
      return NULL;
    }

  int i;
  int buffer_count = self->buffer_count;

  for(i = 0; i < buffer_count; i++)
    {
      struct v4l2_buffer buffer;
      buffer.index = i;
      buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      buffer.memory = V4L2_MEMORY_MMAP;

      if(my_ioctl(self->fd, VIDIOC_QBUF, &buffer))
	{
	  return NULL;
	}
    }

  Py_RETURN_NONE;
}
コード例 #4
0
static PyObject *Video_device_stop(Video_device *self)
{
  ASSERT_OPEN;
  enum v4l2_buf_type type;
  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

  if(my_ioctl(self->fd, VIDIOC_STREAMOFF, &type))
    {
      return NULL;
    }

  Py_RETURN_NONE;
}
コード例 #5
0
static PyObject *Video_device_set_fps(Video_device *self, PyObject *args)
{
  int fps;
  if(!PyArg_ParseTuple(args, "i", &fps))
    {
      return NULL;
    }
  struct v4l2_streamparm setfps;
  memset(&setfps, 0, sizeof(struct v4l2_streamparm));
  setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  setfps.parm.capture.timeperframe.numerator = 1;
  setfps.parm.capture.timeperframe.denominator = fps;
  if(my_ioctl(self->fd, VIDIOC_S_PARM, &setfps)){
  	return NULL;
  }
  return Py_BuildValue("i",setfps.parm.capture.timeperframe.denominator);
}
コード例 #6
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
static void enumerate_menu (int fd, struct v4l2_queryctrl &queryctrl)
{
	struct v4l2_querymenu querymenu;
	std::cout << "  Menu items:" << std::endl;

	memset (&querymenu, 0, sizeof (querymenu));
	querymenu.id = queryctrl.id;

	for (querymenu.index = queryctrl.minimum;
	     querymenu.index <= queryctrl.maximum;
	      querymenu.index++) {
		if (0 == my_ioctl (fd, VIDIOC_QUERYMENU, &querymenu)) {
			std::cout << "  " << querymenu.index << " " << querymenu.name << std::endl;
		} else {
			std::cout << "  Error VIDIOC_QUERYMENU" << std::endl;
		}
	}
}
コード例 #7
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
void Video_in_Manager::StopDeviceInternal()
{
	if(verbose) printf("StopDeviceInternal\n");
	if(this->fd==-1)
	{
		throw std::runtime_error("Device not started");
	}

	//Signal V4l2 api
	enum v4l2_buf_type type;
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	if(my_ioctl(this->fd, VIDIOC_STREAMOFF, &type))
	{
		throw std::runtime_error("VIDIOC_STREAMOFF failed");
	}

	this->deviceStarted = 0;
}
コード例 #8
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
int Video_in_Manager::GetFormatInternal()
{
	struct v4l2_format format;
	format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if(my_ioctl(this->fd, VIDIOC_G_FMT, &format))
	{
		return 0;
	}

	this->frameWidth = format.fmt.pix.width;
	this->frameHeight = format.fmt.pix.height;

	switch(format.fmt.pix.pixelformat)
	{
	case V4L2_PIX_FMT_MJPEG:
		this->pxFmt = "MJPEG";
		break;
	case V4L2_PIX_FMT_RGB24:
		this->pxFmt = "RGB24";
		break;
	case V4L2_PIX_FMT_YUV420:
		this->pxFmt = "YUV420";
		break;
	case V4L2_PIX_FMT_YVU420:
		this->pxFmt = "YVU420";
		break;
	case V4L2_PIX_FMT_YUYV:
		this->pxFmt = "YUYV";
		break;
	default:
		this->pxFmt = "Unknown ";
		std::ostringstream oss;
		oss << format.fmt.pix.pixelformat;
		this->pxFmt.append(oss.str());

		break;
	}

	if(verbose) printf("Current format %s %i %i\n", this->pxFmt.c_str(), this->frameWidth, this->frameHeight);
	return 1;
}
コード例 #9
0
static PyObject *Video_device_get_info(Video_device *self)
{
  ASSERT_OPEN;
  struct v4l2_capability caps;

  if(my_ioctl(self->fd, VIDIOC_QUERYCAP, &caps))
    {
      return NULL;
    }

  PyObject *set = PySet_New(NULL);

  if(!set)
    {
      return NULL;
    }

  struct capability *capability = capabilities;

  while((void *)capability < (void *)capabilities + sizeof(capabilities))
    {
      if(caps.capabilities & capability->id)
	{
	  PyObject *s = PyString_FromString(capability->name);

	  if(!s)
	    {
              Py_DECREF(set);
	      return NULL;
	    }

	  PySet_Add(set, s);
	}

      capability++;
    }

  return Py_BuildValue("sssO", caps.driver, caps.card, caps.bus_info, set);
}
コード例 #10
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
int Video_in_Manager::SetFormatInternal(class SetFormatParams &args)
{
	if(verbose) printf("SetFormatInternal\n");
	//int size_x, int size_y, const char *fmt;

	struct v4l2_format format;
	format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	format.fmt.pix.width = args.width;
	format.fmt.pix.height = args.height;
	format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;

	if(strcmp(args.fmt.c_str(), "MJPEG")==0)
		format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
	if(strcmp(args.fmt.c_str(), "RGB24")==0)
		format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
	if(strcmp(args.fmt.c_str(), "YUV420")==0)
		format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
	if(strcmp(args.fmt.c_str(), "YVU420")==0)
		format.fmt.pix.pixelformat = V4L2_PIX_FMT_YVU420;
	if(strcmp(args.fmt.c_str(), "YUYV")==0)
		format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;

	format.fmt.pix.field = V4L2_FIELD_NONE;
	format.fmt.pix.bytesperline = 0;

	if(my_ioctl(this->fd, VIDIOC_S_FMT, &format))
	{
		return 0;
	}

	//Store pixel format for decoding usage later
	//this->pxFmt = args.fmt;
	//this->frameWidth = args.width;
	//this->frameHeight = args.height;
	this->GetFormatInternal();

	return 1;
}
コード例 #11
0
ファイル: v4l2capture.c プロジェクト: AchironOS/chromium-2
static PyObject *Video_device_set_format(Video_device *self, PyObject *args)
{
  int size_x;
  int size_y;
  if(!PyArg_ParseTuple(args, "ii", &size_x, &size_y))
    {
      return NULL;
    }

  struct v4l2_format format;
  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  format.fmt.pix.width = size_x;
  format.fmt.pix.height = size_y;
  format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
  format.fmt.pix.field = V4L2_FIELD_NONE;
  format.fmt.pix.bytesperline = 0;

  if(my_ioctl(self->fd, VIDIOC_S_FMT, &format))
    {
      return NULL;
    }

  return Py_BuildValue("ii", format.fmt.pix.width, format.fmt.pix.height);
}
コード例 #12
0
ファイル: yenicap.c プロジェクト: dhirajkhatiwada1/uludag
int main(int argc, char** argv)
{
    if(argc == 2) {
        strcpy(my_video_dev, argv[1]);
    } else {
        strcpy(my_video_dev, "/dev/video0");
    }
   
   //if (-1 == (fd = open(my_video_dev, O_RDWR))) {
   if (-1 == (fd = v4l1_open(my_video_dev, O_RDWR))) {
	printf("Error opening device: %s\n", my_video_dev);
	goto error;
   }





    //if( -1 == ioctl(fd, VIDIOC_QUERYCAP, &v4l2_capability) ) {
    if( ioctl(fd, VIDIOC_QUERYCAP, &v4l2_capability) < 0 ) {
        printf("asd  1\n");
        if( -1 == ioctl(fd, VIDIOCGCAP, &capability) ) {
        printf("asd  2\n");
            printf("Error1: ioctl(fd,VIDIOCGCAP,&capability)\n");
            camDriver = DRIVER_NONE;
            goto error;
        } else {
        printf("asd  3\n");
            camDriver = DRIVER_V4L;
        }
    } else {
        printf("asd  4\n");
        camDriver = DRIVER_V4L2;

        v4l1_close(fd);
        fd = v4l1_open(my_video_dev, O_RDWR);

        if( -1 == my_ioctl(VIDIOCGCAP, &capability)) {
        printf("asd  5\n");
            printf("Error2: ioctl(fd,VIDIOCGCAP,&capability)\n");
            goto error;
        }
    }


//    if( DRIVER_V4L == camDriver ) {

        printf("\n -----[  VIDIOCGCAP returns ]-----\n");
        printf(" name:      %s\n", capability.name);
        printf(" type:      %i\n", capability.type);
        printf(" channels:  %i\n", capability.channels);
        printf(" audios:    %i\n", capability.audios);
        printf(" maxwidth:  %i\n", capability.maxwidth);
        printf(" maxheight: %i\n", capability.maxheight);
        printf(" minwidth:  %i\n", capability.minwidth);
        printf(" minheight: %i\n", capability.minheight);

//    }


   if (-1 == my_ioctl(VIDIOCGPICT,&picture)) {
        printf("Error: ioctl(fd,VIDIOCGCPICT,&picture)\n");
        goto error;
   }

	printf("\n -----[  VIDIOCGPICT returns ]-----\n");
	printf(" brightness: %i\n", picture.brightness);
	printf(" hue:        %i\n", picture.hue);
	printf(" colour:     %i\n", picture.colour);
	printf(" contrast:   %i\n", picture.contrast);
	printf(" whiteness:  %i\n", picture.whiteness);
	printf(" depth:      %i\n", picture.depth);

	char static palet_tipi_str[64];
	palette_name(palet_tipi_str, picture.palette);
	printf(" palette:    %s\n\n", palet_tipi_str);


	

   vch.channel = 0;
   // vch.norm = VIDEO_MODE_PAL;
   
   if(-1 == my_ioctl(VIDIOCSCHAN,&vch)) {
        perror("Setting channel\n");
	goto error;
   }
   
   fcntl(fd,F_SETFD,FD_CLOEXEC);
   if (-1 == my_ioctl(VIDIOCGMBUF,&gb_buffers)) {
	printf("Error: Error getting buffers\n");
	goto error;
   }

   map = my_mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,fd,0); 
   if (map == NULL) {
	printf("Error: Mmap returned NULL\n");
	goto error;
   }

   // Set up out capture to use the correct resolution
   
   my_buf.width = mywidth;
   my_buf.height = myheight;
   my_buf.format = VIDEO_PALETTE_RGB24;

   // Set up out video output

   SDL_Init(SDL_INIT_VIDEO);
   screen = SDL_SetVideoMode(mywidth, myheight, 24, SDL_SWSURFACE);
   if ( screen == NULL ) {
	fprintf(stderr, "Couldn't set video mode: %s\n",
	SDL_GetError());
	exit(1);
   }
   SDL_WM_SetCaption("Oy oy oy teve pirogrami", NULL);

   // Tell the capture card to fill frame 0
   
   my_buf.frame = 0;
   if (-1 == my_ioctl(VIDIOCMCAPTURE, &my_buf)) { 
	printf(" ilk my_buf.frame=0 da hata olustu\n");
	// printf("Error: Grabber chip can't sync (no station tuned in?)\n"); 
	goto error;
   }

   // This is the infinate loop
   // We basically:
   //	capture frame 1
   //   sync frame 0
   //   process frame 0
   //	capture frame 0 
   //   sync frame 1
   //   process frame 1
   // For more information, read the programming how-to that came with xawtv
   
   do {

	my_buf.frame = 1;
	if (-1 == my_ioctl(VIDIOCMCAPTURE, &my_buf)) {
		printf(" loop icinde frame=1 \n");
		// printf("Error: Grabber chip can't sync (no station tuned in?)\n"); 
		goto error;
	}
 
	my_buf.frame = 0;
	if (-1 == my_ioctl(VIDIOCSYNC, &my_buf.frame)) {
		 printf("Error on sync!\n"); 
		goto error;
	}

	copytoscreen(map);

	my_buf.frame = 0;
	if (-1 == my_ioctl(VIDIOCMCAPTURE, &my_buf)) {
		printf(" loop icinde frame=0 \n");
		// printf("Error: Grabber chip can't sync (no station tuned in?)\n"); 
		goto error;
	}

	my_buf.frame = 1;
	if (-1 == my_ioctl(VIDIOCSYNC, &my_buf.frame)) {
		printf("Error on sync!\n"); 
		goto error;
	}

	copytoscreen(map + gb_buffers.offsets[1]);
	SDL_PollEvent(&event);
   } while (event.type != SDL_KEYDOWN);

   error:

	SDL_Quit();
   	return EXIT_SUCCESS;

}
コード例 #13
0
struct ifi_info *
get_ifi_info(int family, int doaliases)
{
	struct ifi_info		*ifi, *ifihead, **ifipnext;
	int					sockfd, len, lastlen, flags, myflags, idx = 0, hlen = 0;
	char				*ptr, *buf, lastname[IFNAMSIZ], *cptr, *haddr, *sdlname;
	struct ifconf		ifc;
	struct ifreq		*ifr, ifrcopy;
	struct sockaddr_in	*sinptr;
	struct sockaddr_in6	*sin6ptr;

	sockfd = my_socket(AF_INET, SOCK_DGRAM, 0);

	lastlen = 0;
	len = 100 * sizeof(struct ifreq);	/* initial buffer size guess */
	for ( ; ; ) {
		buf = my_malloc(len);
		ifc.ifc_len = len;
		ifc.ifc_buf = buf;
		if (ioctl(sockfd, SIOCGIFCONF, &ifc) < 0) {
			if (errno != EINVAL || lastlen != 0)
				err_sys("ioctl error");
		} else {
			if (ifc.ifc_len == lastlen)
				break;		/* success, len has not changed */
			lastlen = ifc.ifc_len;
		}
		len += 10 * sizeof(struct ifreq);	/* increment */
		free(buf);
	}
	ifihead = NULL;
	ifipnext = &ifihead;
	lastname[0] = 0;
	sdlname = NULL;
/* end get_ifi_info1 */

/* include get_ifi_info2 */
	for (ptr = buf; ptr < buf + ifc.ifc_len; ) {
		ifr = (struct ifreq *) ptr;

#ifdef	HAVE_SOCKADDR_SA_LEN
		len = max(sizeof(struct sockaddr), ifr->ifr_addr.sa_len);
#else
		switch (ifr->ifr_addr.sa_family) {
#ifdef	IPV6
		case AF_INET6:	
			len = sizeof(struct sockaddr_in6);
			break;
#endif
		case AF_INET:	
		default:	
			len = sizeof(struct sockaddr);
			break;
		}
#endif	/* HAVE_SOCKADDR_SA_LEN */
		ptr += sizeof(ifr->ifr_name) + len;	/* for next one in buffer */

#ifdef	HAVE_SOCKADDR_DL_STRUCT
		/* assumes that AF_LINK precedes AF_INET or AF_INET6 */
		if (ifr->ifr_addr.sa_family == AF_LINK) {
			struct sockaddr_dl *sdl = (struct sockaddr_dl *)&ifr->ifr_addr;
			sdlname = ifr->ifr_name;
			idx = sdl->sdl_index;
			haddr = sdl->sdl_data + sdl->sdl_nlen;
			hlen = sdl->sdl_alen;
		}
#endif

		if (ifr->ifr_addr.sa_family != family)
			continue;	/* ignore if not desired address family */

		myflags = 0;
		if ( (cptr = strchr(ifr->ifr_name, ':')) != NULL)
			*cptr = 0;		/* replace colon with null */
		if (strncmp(lastname, ifr->ifr_name, IFNAMSIZ) == 0) {
			if (doaliases == 0)
				continue;	/* already processed this interface */
			myflags = IFI_ALIAS;
		}
		memcpy(lastname, ifr->ifr_name, IFNAMSIZ);

		ifrcopy = *ifr;
		my_ioctl(sockfd, SIOCGIFFLAGS, &ifrcopy);
		flags = ifrcopy.ifr_flags;
		if ((flags & IFF_UP) == 0)
			continue;	/* ignore if interface not up */
/* end get_ifi_info2 */

/* include get_ifi_info3 */
		ifi = my_calloc(1, sizeof(struct ifi_info));
		*ifipnext = ifi;			/* prev points to this new one */
		ifipnext = &ifi->ifi_next;	/* pointer to next one goes here */

		ifi->ifi_flags = flags;		/* IFF_xxx values */
		ifi->ifi_myflags = myflags;	/* IFI_xxx values */
#if defined(SIOCGIFMTU) && defined(HAVE_STRUCT_IFREQ_IFR_MTU)
		Ioctl(sockfd, SIOCGIFMTU, &ifrcopy);
		ifi->ifi_mtu = ifrcopy.ifr_mtu;
#else
		ifi->ifi_mtu = 0;
#endif
		memcpy(ifi->ifi_name, ifr->ifr_name, IFI_NAME);
		ifi->ifi_name[IFI_NAME-1] = '\0';
		/* If the sockaddr_dl is from a different interface, ignore it */
		if (sdlname == NULL || strcmp(sdlname, ifr->ifr_name) != 0)
			idx = hlen = 0;
		ifi->ifi_index = idx;
		ifi->ifi_hlen = hlen;
		if (ifi->ifi_hlen > IFI_HADDR)
			ifi->ifi_hlen = IFI_HADDR;
		if (hlen)
			memcpy(ifi->ifi_haddr, haddr, ifi->ifi_hlen);
/* end get_ifi_info3 */
/* include get_ifi_info4 */
		switch (ifr->ifr_addr.sa_family) {
		case AF_INET:
			sinptr = (struct sockaddr_in *) &ifr->ifr_addr;
			ifi->ifi_addr = my_calloc(1, sizeof(struct sockaddr_in));
			memcpy(ifi->ifi_addr, sinptr, sizeof(struct sockaddr_in));

#ifdef	SIOCGIFBRDADDR
			if (flags & IFF_BROADCAST) {
				my_ioctl(sockfd, SIOCGIFBRDADDR, &ifrcopy);
				sinptr = (struct sockaddr_in *) &ifrcopy.ifr_broadaddr;
				ifi->ifi_brdaddr = my_calloc(1, sizeof(struct sockaddr_in));
				memcpy(ifi->ifi_brdaddr, sinptr, sizeof(struct sockaddr_in));
			}
#endif

#ifdef	SIOCGIFDSTADDR
			if (flags & IFF_POINTOPOINT) {
				my_ioctl(sockfd, SIOCGIFDSTADDR, &ifrcopy);
				sinptr = (struct sockaddr_in *) &ifrcopy.ifr_dstaddr;
				ifi->ifi_dstaddr = my_calloc(1, sizeof(struct sockaddr_in));
				memcpy(ifi->ifi_dstaddr, sinptr, sizeof(struct sockaddr_in));
			}
#endif
			break;

		case AF_INET6:
			sin6ptr = (struct sockaddr_in6 *) &ifr->ifr_addr;
			ifi->ifi_addr = my_calloc(1, sizeof(struct sockaddr_in6));
			memcpy(ifi->ifi_addr, sin6ptr, sizeof(struct sockaddr_in6));

#ifdef	SIOCGIFDSTADDR
			if (flags & IFF_POINTOPOINT) {
				my_ioctl(sockfd, SIOCGIFDSTADDR, &ifrcopy);
				sin6ptr = (struct sockaddr_in6 *) &ifrcopy.ifr_dstaddr;
				ifi->ifi_dstaddr = my_calloc(1, sizeof(struct sockaddr_in6));
				memcpy(ifi->ifi_dstaddr, sin6ptr, sizeof(struct sockaddr_in6));
			}
#endif
			break;

		default:
			break;
		}
	}
	free(buf);
	return(ifihead);	/* pointer to first structure in linked list */
}
コード例 #14
0
static PyObject *Video_device_read_internal(Video_device *self, int queue)
{
  if(!self->buffers)
    {
      ASSERT_OPEN;
      PyErr_SetString(PyExc_ValueError, "Buffers have not been created");
      return NULL;
    }

  struct v4l2_buffer buffer;
  buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  buffer.memory = V4L2_MEMORY_MMAP;

  if(my_ioctl(self->fd, VIDIOC_DQBUF, &buffer))
    {
      return NULL;
    }

#ifdef USE_LIBV4L
  PyObject *result = PyString_FromStringAndSize(
      self->buffers[buffer.index].start, buffer.bytesused);

  if(!result)
    {
      return NULL;
    }
#else
  // Convert buffer from YUYV to RGB.
  // For the byte order, see: http://v4l2spec.bytesex.org/spec/r4339.htm
  // For the color conversion, see: http://v4l2spec.bytesex.org/spec/x2123.htm
  int length = buffer.bytesused * 6 / 4;
  PyObject *result = PyString_FromStringAndSize(NULL, length);

  if(!result)
    {
      return NULL;
    }

  char *rgb = PyString_AS_STRING(result);
  char *rgb_max = rgb + length;
  unsigned char *yuyv = self->buffers[buffer.index].start;

#define CLAMP(c) ((c) <= 0 ? 0 : (c) >= 65025 ? 255 : (c) >> 8)
  while(rgb < rgb_max)
    {
      int u = yuyv[1] - 128;
      int v = yuyv[3] - 128;
      int uv = 100 * u + 208 * v;
      u *= 516;
      v *= 409;

      int y = 298 * (yuyv[0] - 16);
      rgb[0] = CLAMP(y + v);
      rgb[1] = CLAMP(y - uv);
      rgb[2] = CLAMP(y + u);

      y = 298 * (yuyv[2] - 16);
      rgb[3] = CLAMP(y + v);
      rgb[4] = CLAMP(y - uv);
      rgb[5] = CLAMP(y + u);

      rgb += 6;
      yuyv += 4;
    }
#undef CLAMP
#endif

  if(queue && my_ioctl(self->fd, VIDIOC_QBUF, &buffer))
    {
      return NULL;
    }

  return result;
}
コード例 #15
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
int Video_in_Manager::ReadFrame()
{
	if(this->fd<0)
		throw std::runtime_error("File not open");

	if(this->buffers == NULL)
		throw std::runtime_error("Buffers have not been created");

	struct v4l2_buffer buffer;
	buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	buffer.memory = V4L2_MEMORY_MMAP;

	if(my_ioctl(this->fd, VIDIOC_DQBUF, &buffer, 10000))
	{
		return 0;
	}

	unsigned char *rgbBuff = NULL;
	unsigned rgbBuffLen = 0;
	int ok = DecodeFrame((const unsigned char*)this->buffers[buffer.index].start, buffer.bytesused, 
		this->pxFmt.c_str(),
		this->frameWidth,
		this->frameHeight,
		this->targetFmt.c_str(), &rgbBuff, &rgbBuffLen);

	//Return a frame, decoded or not
	pthread_mutex_lock(&this->lock);
				
	class FrameMetaData meta;
	meta.width = this->frameWidth;
	meta.height = this->frameHeight;
	if(ok && rgbBuff != NULL)
	{
		meta.fmt = this->targetFmt;
		meta.buffLen = rgbBuffLen;
		this->decodedFrameBuff.push_back(rgbBuff);
	}
	else
	{
		//Make a copy of un-decodable buffer to return
		unsigned char* buffOut = new unsigned char[buffer.bytesused];
		memcpy(buffOut, this->buffers[buffer.index].start, buffer.bytesused);
		meta.fmt = this->pxFmt;
		meta.buffLen = buffer.bytesused;
		this->decodedFrameBuff.push_back(buffOut);
	}
	meta.sequence = buffer.sequence;
	meta.tv_sec = buffer.timestamp.tv_sec;
	meta.tv_usec = buffer.timestamp.tv_usec;

	this->decodedFrameMetaBuff.push_back(meta);
	while(this->decodedFrameBuff.size() > this->decodedFrameBuffMaxSize)
	{
		this->decodedFrameBuff.erase(this->decodedFrameBuff.begin());
		this->decodedFrameMetaBuff.erase(this->decodedFrameMetaBuff.begin());
	}
	pthread_mutex_unlock(&this->lock);

	//Queue buffer for next frame
	if(my_ioctl(this->fd, VIDIOC_QBUF, &buffer))
	{
		throw std::runtime_error("VIDIOC_QBUF failed");
	}

	return 1;
}
コード例 #16
0
static PyObject *Video_device_create_buffers(Video_device *self, PyObject *args)
{
  int buffer_count;

  if(!PyArg_ParseTuple(args, "I", &buffer_count))
    {
      return NULL;
    }

  ASSERT_OPEN;

  if(self->buffers)
    {
      PyErr_SetString(PyExc_ValueError, "Buffers are already created");
      return NULL;
    }

  struct v4l2_requestbuffers reqbuf;
  reqbuf.count = buffer_count;
  reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  reqbuf.memory = V4L2_MEMORY_MMAP;

  if(my_ioctl(self->fd, VIDIOC_REQBUFS, &reqbuf))
    {
      return NULL;
    }

  if(!reqbuf.count)
    {
      PyErr_SetString(PyExc_IOError, "Not enough buffer memory");
      return NULL;
    }

  self->buffers = malloc(reqbuf.count * sizeof(struct buffer));

  if(!self->buffers)
    {
      PyErr_NoMemory();
      return NULL;
    }

  int i;

  for(i = 0; i < reqbuf.count; i++)
    {
      struct v4l2_buffer buffer;
      buffer.index = i;
      buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      buffer.memory = V4L2_MEMORY_MMAP;

      if(my_ioctl(self->fd, VIDIOC_QUERYBUF, &buffer))
	{
	  return NULL;
	}

      self->buffers[i].length = buffer.length;
      self->buffers[i].start = v4l2_mmap(NULL, buffer.length,
	  PROT_READ | PROT_WRITE, MAP_SHARED, self->fd, buffer.m.offset);

      if(self->buffers[i].start == MAP_FAILED)
	{
	  PyErr_SetFromErrno(PyExc_IOError);
	  return NULL;
	}
    }

  self->buffer_count = i;
  Py_RETURN_NONE;
}
コード例 #17
0
ファイル: v4l2capture.cpp プロジェクト: TimSC/libvideolive
int Video_in_Manager::StartDeviceInternal(int buffer_count = 10)
{
	if(verbose) printf("StartDeviceInternal\n");
	//Check this device has not already been start
	if(this->fd==-1)
	{
		throw std::runtime_error("Device not open");
	}

	//Set other parameters for capture
	//TODO

	/*
	//Query current pixel format
	self.size_x, self.size_y, self.pixelFmt = self.video.get_format()

	//Set target frames per second
	self.fps = self.video.set_fps(reqFps)
	*/

	// Create a buffer to store image data in. This must be done before
	// calling 'start' if v4l2capture is compiled with libv4l2. Otherwise
	// raises IOError.

	if(this->pxFmt.length()==0)
	{
		//Get current pixel format
		//TODO
		int ret = GetFormatInternal();
		if(!ret) throw std::runtime_error("Could not determine image format");
	}

	struct v4l2_requestbuffers reqbuf;
	reqbuf.count = buffer_count;
	reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	reqbuf.memory = V4L2_MEMORY_MMAP;

	if(my_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf))
	{
		throw std::runtime_error("VIDIOC_REQBUFS failed");
	}

	if(!reqbuf.count)
	{
		throw std::runtime_error("Not enough buffer memory");
	}

	this->buffers = new struct buffer [reqbuf.count];

	if(this->buffers == NULL)
	{
		throw std::runtime_error("Failed to allocate buffer memory");
	}

	for(unsigned int i = 0; i < reqbuf.count; i++)
	{
		struct v4l2_buffer buffer;
		buffer.index = i;
		buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buffer.memory = V4L2_MEMORY_MMAP;

		if(my_ioctl(fd, VIDIOC_QUERYBUF, &buffer))
		{
			throw std::runtime_error("VIDIOC_QUERYBUF failed");
		}

		this->buffers[i].length = buffer.length;
		this->buffers[i].start = v4l2_mmap(NULL, buffer.length,
		PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset);

		if(this->buffers[i].start == MAP_FAILED)
		{
			throw std::runtime_error("v4l2_mmap failed");
		}
	}

	this->buffer_counts = reqbuf.count;

	// Send the buffer to the device. Some devices require this to be done
	// before calling 'start'.

	for(int i = 0; i < buffer_count; i++)
	{
		struct v4l2_buffer buffer;
		buffer.index = i;
		buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buffer.memory = V4L2_MEMORY_MMAP;

		if(my_ioctl(fd, VIDIOC_QBUF, &buffer))
		{
			//This may fail with some devices but does not seem to be harmful.
		}
	}

	// Start the device. This lights the LED if it's a camera that has one.
	enum v4l2_buf_type type;
	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	if(my_ioctl(fd, VIDIOC_STREAMON, &type))
	{
		throw std::runtime_error("VIDIOC_STREAMON failed");
	}

	this->Test();

	this->deviceStarted = 1;
	if(verbose) printf("Started ok\n");	
	return 1;
}