Пример #1
0
//--------------------------------------------------------------------
bool ofUCUtils::getFrameUC(unsigned char ** _pixels) {

	if ( !SUCCESS( unicap_queue_buffer( handle, &buffer ) )) {
		printf("Unicap : Failed to queue a buffer\n");
		return false;
	}
	/*
	 Wait until the image buffer is ready
	 */
	if ( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) )) {
		printf("Unicap : Failed to wait for buffer\n");
		return false;
	}

	if(src_pix_fmt!=PIX_FMT_RGB24){
		avpicture_fill(src,returned_buffer->data,src_pix_fmt,format.size.width,format.size.height);
		img_convert(dst,PIX_FMT_RGB24,src,src_pix_fmt,format.size.width,format.size.height);
		avpicture_layout(dst,PIX_FMT_RGB24,format.size.width,format.size.height,*_pixels,format.size.width*format.size.height*3);
	}else{
		*_pixels = returned_buffer->data;
	}
	return true;

}
Пример #2
0
int main (int argc, char *argv [])
{
    unicap_handle_t handle;
    unicap_device_t device;
    unicap_format_t src_format;
    unicap_format_t dest_format;
    unicap_data_buffer_t src_buffer;
    unicap_data_buffer_t dest_buffer;
    unicap_data_buffer_t *returned_buffer;

    if (argc != 4) {
        fprintf (stderr, "Usage: sender <hostname> <camera name> "
            "<interface>\n");
        exit (1);
    }

    //  Initialise 0MQ infrastructure

    //  1. Set error handler function (to ignore disconnected receivers)
    zmq::set_error_handler (error_handler);

    //  2. Initialise basic infrastructure for 2 threads
    zmq::dispatcher_t dispatcher (2);

    //  3. Initialise local locator (to connect to global locator)
    zmq::locator_t locator (argv [1]);

    //  4. Start one working thread (to send data to receivers)
    zmq::poll_thread_t *pt = zmq::poll_thread_t::create (&dispatcher);

    //  5. Register one API thread (the application thread - the one that
    //     is being executed at the moment)
    zmq::api_thread_t *api = zmq::api_thread_t::create (&dispatcher, &locator);

    //  6.  Define an entry point for the messages. The name of the entry point
    //      is user-defined ("camera name"). Specify that working thread "pt"
    //      will be used to listen to new connections being created as well as
    //      to send frames to existing connections.
    int e_id = api->create_exchange (argv [2], zmq::scope_global, argv [3],
        pt, 1, &pt);
    
    //  Open first available video capture device
    if (!SUCCESS (unicap_enumerate_devices (NULL, &device, 0))) {
        fprintf (stderr, "Could not enumerate devices\n");
        exit (1);
    }
    if (!SUCCESS (unicap_open (&handle, &device))) {
        fprintf (stderr, "Failed to open device: %s\n", device.identifier);
        exit (1);
    }
    printf( "Opened video capture device: %s\n", device.identifier );

    //  Find a suitable video format that we can convert to RGB24
    bool conversion_found = false;
    int index = 0;
    while (SUCCESS (unicap_enumerate_formats (handle, NULL, &src_format,
          index))) {
        printf ("Trying video format: %s\n", src_format.identifier);
        if (ucil_conversion_supported (FOURCC ('R', 'G', 'B', '3'), 
            src_format.fourcc)) {
            conversion_found = true;
            break;
        }
        index++;
    }
    if (!conversion_found) {
        fprintf (stderr, "Could not find a suitable video format\n");
        exit (1);
    }
    src_format.buffer_type = UNICAP_BUFFER_TYPE_USER;
    if (!SUCCESS (unicap_set_format (handle, &src_format))) {
        fprintf (stderr, "Failed to set video format\n");
        exit (1);
    }
    printf ("Using video format: %s [%dx%d]\n", 
        src_format.identifier, 
        src_format.size.width, 
        src_format.size.height);

    //  Clone destination format with equal dimensions, but RGB24 colorspace
    unicap_copy_format (&dest_format, &src_format);
    strcpy (dest_format.identifier, "RGB 24bpp");
    dest_format.fourcc = FOURCC ('R', 'G', 'B', '3');
    dest_format.bpp = 24;
    dest_format.buffer_size = dest_format.size.width *
        dest_format.size.height * 3;
    
    //  Initialise image buffers
    memset (&src_buffer, 0, sizeof (unicap_data_buffer_t));
    src_buffer.data = (unsigned char *)malloc (src_format.buffer_size);
    src_buffer.buffer_size = src_format.buffer_size;
    memset (&dest_buffer, 0, sizeof (unicap_data_buffer_t));
    dest_buffer.data = (unsigned char *)malloc (dest_format.buffer_size);
    dest_buffer.buffer_size = dest_format.buffer_size;
    dest_buffer.format = dest_format;

    //  Start video capture
    if (!SUCCESS (unicap_start_capture (handle))) {
        fprintf (stderr, "Failed to start capture on device: %s\n",
            device.identifier);
        exit (1);
    }

    //  Loop, sending video to defined exchange
    while (1) {

        //  Queue buffer for video capture
        if (!SUCCESS (unicap_queue_buffer (handle, &src_buffer))) {
            fprintf (stderr, "Failed to queue a buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Wait until buffer is ready
        if (!SUCCESS (unicap_wait_buffer (handle, &returned_buffer))) {
            fprintf (stderr, "Failed to wait for buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Convert colorspace
        if (!SUCCESS (ucil_convert_buffer (&dest_buffer, &src_buffer))) {
            //  TODO: This fails sometimes for unknown reasons,
            //  just skip the frame for now
            fprintf (stderr, "Failed to convert video buffer\n");
        }

        //  Create ZMQ message
        zmq::message_t msg (dest_format.buffer_size + (2 * sizeof (uint32_t)));
        unsigned char *data = (unsigned char *)msg.data();

        //  Image width in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.width);
        data += sizeof (uint32_t);

        //  Image height in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.height);
        data += sizeof (uint32_t);

        //  RGB24 image data
        memcpy (data, dest_buffer.data, dest_format.buffer_size);

        //  Send message 
        api->send (e_id, msg);
    }
    
    return 0;
}
int UnicapCamera::GetColorImage(cv::Mat* img, char* FileName )
{
	unsigned char *image_buffer = NULL;
	unicap_data_buffer_t buffer;
	unicap_data_buffer_t *returned_buffer;
	int error = 0;

	// Initialize IPL image
	CV_Assert(img != 0);
	img->create(m_Format->size.height, m_Format->size.width, CV_8UC3);

	
	// Initialize the image buffer
	memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) );
	
	if (!m_Format)
	{
		printf("UnicapCamera::Acquire: No format set!\n");
		error =  ERROR_NO_FORMAT_SET;
	}

	if ((error == 0) && (!m_Handle))
	{
		printf("UnicapCamera::Acquire: No Camera handle available.\n");
		error =   ERROR_NOT_OPENED;
	}
	
	// Allocate memory for the image buffer
	if (error == 0)
	{
		if( !( image_buffer = (unsigned char *)malloc( m_Format->buffer_size ) ) )
		{
			printf("UnicapCamera::Acquire: Failed to allocate %d bytes\n" );
			error =   ERROR_NO_MEMORY;
		}

		buffer.data = image_buffer;
		buffer.buffer_size = m_Format->buffer_size;
	}
	
	// Start the capture process on the device
	// Queue the buffer
	// The buffer now gets filled with image data by the capture device
	if (error == 0)
	{
		if( !SUCCESS( unicap_queue_buffer( *m_Handle, &buffer ) ) )
		{
			printf("UnicapCamera::Acquire: Failed to queue a buffer on device: %s\n", m_Device->identifier );
			error =   UNSPECIFIED_ERROR;
		}
	}
	
	// Wait until the image buffer is ready
	if (error == 0)
	{
		if( !SUCCESS( unicap_wait_buffer( *m_Handle, &returned_buffer ) ) )
		{
			printf("UnicapCamera::Acquire: Failed to wait for buffer on device: %s\n", m_Device->identifier );
			error =   UNSPECIFIED_ERROR;
		}
	}

	// Stop the device
	if (error == 0)
	{
		if( !returned_buffer->buffer_size )
		{
			printf("UnicapCamera::Acquire: Returned a buffer size of 0!\n" );
			error = UNSPECIFIED_ERROR;
		}
	}
	if (error == 0)
	{
		ConvertImage(img, returned_buffer);
	}

	if ((error==0) && (FileName)) cvSaveImage(FileName, Img);
	if (image_buffer)
	{
		free( image_buffer );
	}
		
	return error;
}
int main( int argc, char **argv )
{
   unicap_handle_t handle;
   unicap_device_t device;
   unicap_format_t format_spec;
   unicap_format_t format;
   unicap_data_buffer_t buffer;
   unicap_data_buffer_t *returned_buffer;
	
   int i;

   SDL_Surface *screen;
   SDL_Overlay *overlay;

   int quit=0;

   /*
     Enumerate available video capture devices
    */
   printf( "select video device\n" );
   for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ )
   {
      printf( "%i: %s\n", i, device.identifier );
   }
   if( --i > 0 )
   {
      printf( "Select video capture device: " );
      scanf( "%d", &i );
   }

   if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) )
   {
      fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier );
      exit( 1 );
   }

   /*
     Acquire a handle to selected device
   */
   if( !SUCCESS( unicap_open( &handle, &device ) ) )
   {
      fprintf( stderr, "Failed to open device: %s\n", device.identifier );
      exit( 1 );
   }

   printf( "Opened video capture device: %s\n", device.identifier );

   unicap_void_format( &format_spec );
	
   /*
     Get the list of video formats
   */
   for( i = 0; SUCCESS( unicap_enumerate_formats( handle, 
						  NULL, 
						  &format, i ) ); 
	i++ )
   {
      printf( "%d: %s\n", 
	      i,
	      format.identifier );
   }
   if( --i > 0 )
   {
      printf( "Select video format: " );
      scanf( "%d", &i );
   }
   if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) )
   {
      fprintf( stderr, "Failed to get video format %d\n", i );
      exit( 1 );
   }
	
   /*
     If a video format has more than one size, ask for which size to use
   */
   if( format.size_count )
   {
      for( i = 0; i < format.size_count; i++ )
      {
	 printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height );
      }
      do
      {
	 printf( "Select video format size: " );
	 scanf( "%d", &i );
      }while( ( i < 0 ) && ( i > format.size_count ) );
      format.size.width = format.sizes[i].width;
      format.size.height = format.sizes[i].height;
   }

   /*
     Set this video format
   */
   if( !SUCCESS( unicap_set_format( handle, &format ) ) )
   {
      fprintf( stderr, "Failed to set video format\n" );
      exit( 1 );
   }

   /*
     Initialize the image buffer
   */
   memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) );

   /**
      Init SDL & SDL_Overlay
   **/
   if ( SDL_Init(SDL_INIT_VIDEO) < 0 )
   {
      fprintf(stderr, "Failed to initialize SDL:  %s\n", SDL_GetError());
      exit(1);
   }
	
   atexit(SDL_Quit);

   screen = SDL_SetVideoMode( format.size.width, format.size.height, 32, SDL_HWSURFACE);
   if ( screen == NULL ) {
      fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError());
      exit(1);
   }
   
   

   overlay = SDL_CreateYUVOverlay( format.size.width, 
				   format.size.height, 
				   format.fourcc, 
				   screen );
   if( overlay == NULL )
   {
      fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() );
      exit( 1 );
   }

   /*
     Pass the pointer to the overlay to the unicap data buffer. 
   */
   buffer.data = overlay->pixels[0];	
   buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8;

   /*
     Start the capture process on the device
   */
   if( !SUCCESS( unicap_start_capture( handle ) ) )
   {
      fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier );
      exit( 1 );
   }
	

   while( !quit )
   {
      SDL_Rect rect;
      SDL_Event event;

      rect.x = rect.y = 0;
      rect.w = format.size.width;
      rect.h = format.size.height;
      /*
	Queue the buffer
		  
	The buffer now gets filled with image data by the capture device
      */
      if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) )
      {
	 fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier );
	 exit( 1 );
      }
		
      /*
	Wait until the image buffer is ready
      */
      if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) )
      {
	 fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier );
      }

      /*
	Display the video data
      */
      SDL_UnlockYUVOverlay( overlay );
      SDL_DisplayYUVOverlay( overlay, &rect );
      SDL_LockYUVOverlay(overlay);

      while( SDL_PollEvent( &event ) )
      {
	 if( event.type == SDL_QUIT )
	 {
	    printf( "Quit\n" );
	    quit=1;
	 }
      }
   }
	
   /*
     Stop the device
   */
   if( !SUCCESS( unicap_stop_capture( handle ) ) )
   {
      fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier );
   }

   /*
     Close the device 

     This invalidates the handle
   */
   if( !SUCCESS( unicap_close( handle ) ) )
   {
      fprintf( stderr, "Failed to close the device: %s\n", device.identifier );
   }

   SDL_Quit();
	
   return 0;
}
int main( int argc, char **argv )
{
	unicap_handle_t handle;
	unicap_device_t device;
	unicap_format_t format_spec;
	unicap_format_t format;
	unicap_data_buffer_t buffer;
	unicap_data_buffer_t *returned_buffer;
	int width, height;
	
	int i;

	SDL_Surface *screen;
	SDL_Overlay *overlay;

	int quit=0;
	int imgcnt = 0;

	printf( "select video device\n" );
	for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ )
	{
		printf( "%i: %s\n", i, device.identifier );
	}
	if( --i > 0 )
	{
		printf( "Select video capture device: " );
		scanf( "%d", &i );
	}

	if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) )
	{
		fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier );
		exit( 1 );
	}

	/*
	  Acquire a handle to this device
	 */
	if( !SUCCESS( unicap_open( &handle, &device ) ) )
	{
		fprintf( stderr, "Failed to open device: %s\n", device.identifier );
		exit( 1 );
	}

	printf( "Opened video capture device: %s\n", device.identifier );

	/*
	  Create a format specification to limit the list of formats returned by 
	  unicap_enumerate_formats to the ones with the color format 'UYVY'
	 */
	unicap_void_format( &format_spec );
	format_spec.fourcc = FOURCC('U','Y','V','Y');
	
	/*
	  Get the list of video formats of the colorformat UYVY
	 */
	for( i = 0; SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ); i++ )
	{
		printf( "%d: %s [%dx%d]\n", 
				i,
				format.identifier, 
				format.size.width, 
				format.size.height );
	}
	if( --i > 0 )
	{
		printf( "Select video format: " );
		scanf( "%d", &i );
	}
	if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) )
	{
		fprintf( stderr, "Failed to get video format\n" );
		exit( 1 );
	}
	
   /*
     If a video format has more than one size, ask for which size to use
   */
   if( format.size_count )
   {
      for( i = 0; i < format.size_count; i++ )
      {
	 printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height );
      }
      do
      {
	 printf( "Select video format size: " );
	 scanf( "%d", &i );
      }while( ( i < 0 ) && ( i > format.size_count ) );
      format.size.width = format.sizes[i].width;
      format.size.height = format.sizes[i].height;
   }

	/*
	  Set this video format
	 */
	if( !SUCCESS( unicap_set_format( handle, &format ) ) )
	{
		fprintf( stderr, "Failed to set video format\n" );
		exit( 1 );
	}

	/*
	  Initialize the image buffer
	 */
	memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) );

	/**
	   Init SDL & SDL_Overlay
	 **/
	if ( SDL_Init(SDL_INIT_VIDEO) < 0 )
	{
	   fprintf(stderr, "Failed to initialize SDL:  %s\n", SDL_GetError());
	   exit(1);
	}
	
	atexit(SDL_Quit);

	/*
	  Make sure the video window does not get too big. 
	 */
	width = MIN( format.size.width, 800 );
	height = MIN( format.size.height, 600 );

	screen = SDL_SetVideoMode( width, height, 32, SDL_HWSURFACE);
	if ( screen == NULL ) {
	   fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError());
	   exit(1);
	}
	
	overlay = SDL_CreateYUVOverlay( format.size.width, 
									format.size.height, SDL_UYVY_OVERLAY, screen );
	if( overlay == NULL )
	{
	   fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() );
	   exit( 1 );
	}

	/*
	  Pass the pointer to the overlay to the unicap data buffer. 
	 */
	buffer.data = overlay->pixels[0];	
   buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8;
	
	/*
	  Start the capture process on the device
	 */
	if( !SUCCESS( unicap_start_capture( handle ) ) )
	{
		fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier );
		exit( 1 );
	}
	

	while( !quit )
	{
		SDL_Rect rect;
		SDL_Event event;

		rect.x = 0;
		rect.y = 0;
		rect.w = width;
		rect.h = height;
		/*
		  Queue the buffer
		  
		  The buffer now gets filled with image data by the capture device
		*/
		if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) )
		{
			fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier );
			exit( 1 );
		}
		
		/*
		  Wait until the image buffer is ready
		*/
		if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) )
		{
			fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier );
		}

		/*
		  Display the video data
		 */
		SDL_UnlockYUVOverlay( overlay );
		SDL_DisplayYUVOverlay( overlay, &rect );

		while( SDL_PollEvent( &event ) )
		{
			switch( event.type )
			{
				case SDL_QUIT:
					quit = 1;
					break;
					
				case SDL_MOUSEBUTTONDOWN:
				{
					unsigned char *pixels;
					struct jpeg_compress_struct cinfo;
					struct jpeg_error_mgr jerr;
					FILE *outfile;
					JSAMPROW row_pointer[1];
					int row_stride;
					char filename[128];

					struct timeval t1, t2;
					unsigned long long usecs;
					
					sprintf( filename, "%04d.jpg", imgcnt++ );
					
					cinfo.err = jpeg_std_error(&jerr);
					/* Now we can initialize the JPEG compression object. */
					jpeg_create_compress(&cinfo);
					if ((outfile = fopen( filename, "wb" ) ) == NULL ) 
					{
						fprintf(stderr, "can't open %s\n", "file");
						exit(1);
					}
					jpeg_stdio_dest(&cinfo, outfile);
					
					cinfo.image_width = format.size.width; 	/* image width and height, in pixels */
					cinfo.image_height = format.size.height;
					cinfo.input_components = 3;		/* # of color components per pixel */
					cinfo.in_color_space = JCS_RGB; 	/* colorspace of input image */
					
					jpeg_set_defaults(&cinfo);
					pixels = malloc( format.size.width * format.size.height * 3 );
					uyvy2rgb24( pixels, returned_buffer->data,
						    format.size.width * format.size.height * 3,
						    format.size.width * format.size.height * 2 );

					gettimeofday( &t1, NULL );
					jpeg_start_compress(&cinfo, TRUE);
					while( cinfo.next_scanline < cinfo.image_height )
					{
						row_pointer[0] = &pixels[cinfo.next_scanline * format.size.width * 3 ];
						(void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
					}
					jpeg_finish_compress(&cinfo);
					gettimeofday( &t2, NULL );

					usecs = t2.tv_sec * 1000000LL + t2.tv_usec;
					usecs -= ( t1.tv_sec * 1000000LL + t1.tv_usec );
					
					printf( "Compression took: %lld usec\n", usecs );

					/* After finish_compress, we can close the output file. */
					fclose(outfile);
					jpeg_destroy_compress(&cinfo);
					
					free( pixels );
				}
				
				break;
				
				
				default: 
					break;
			}
		}
		SDL_LockYUVOverlay(overlay);
	}
	
	/*
	  Stop the device
	 */
	if( !SUCCESS( unicap_stop_capture( handle ) ) )
	{
		fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier );
	}

	/*
	  Close the device 

	  This invalidates the handle
	 */
	if( !SUCCESS( unicap_close( handle ) ) )
	{
		fprintf( stderr, "Failed to close the device: %s\n", device.identifier );
	}

	SDL_Quit();
	
	return 0;
}