예제 #1
0
void 
setformat(unicap_handle_t handle, unicap_format_t format)
{
  format.buffer_type = UNICAP_BUFFER_TYPE_SYSTEM;
  if (!SUCCESS(unicap_set_format(handle, &format))) 
    {
      fprintf(stderr, "cannot set format\n");
    }
}
예제 #2
0
static void
set_format (unicap_handle_t handle)
{
   unicap_format_t formats[MAX_FORMATS];
   int format_count;
   unicap_status_t status = STATUS_SUCCESS;
   int f = -1;

   for (format_count = 0; SUCCESS (status) && (format_count < MAX_FORMATS);
	format_count++)
   {
      status = unicap_enumerate_formats (handle, NULL, &formats[format_count],	// (1)
					 format_count);
      if (SUCCESS (status))
      {
	 printf ("%d: %s\n", format_count, formats[format_count].identifier);
      }
      else
      {
	 break;
      }
   }

   if (format_count == 0)
   {
      // no video formats
      return;
   }

   while ((f < 0) || (f >= format_count))
   {
      printf ("Use Format: ");
      scanf ("%d", &f);
   }

   if (formats[f].size_count)
   {
      // (2)
      int i;
      int s = -1;

      for (i = 0; i < formats[f].size_count; i++)
      {
	 printf ("%d: %dx%d\n", i, formats[f].sizes[i].width,
		 formats[f].sizes[i].height);
      }

      while ((s < 0) || (s >= formats[f].size_count))
      {
	 printf ("Select Size: ");
	 scanf ("%d", &s);
      }

      formats[f].size.width = formats[f].sizes[s].width;
      formats[f].size.height = formats[f].sizes[s].height;
   }

   formats[f].buffer_type = UNICAP_BUFFER_TYPE_SYSTEM;

   if (!SUCCESS (unicap_set_format (handle, &formats[f])))	// (3)
   {
      fprintf (stderr, "Failed to set the format!\n");
      exit (-1);
   }
}
예제 #3
0
//--------------------------------------------------------------------
// If a 24 bit video format is founded this is the preferred one, if not, the first
// returned by unicap is selected.
//
// Then it tries to set the desired width and height, if these fails, tries find the
// nearest size or to set the default width and height.
//
// On V4L devices 24 bit format is always BGR, so it needs conversion.
// On some V4L devices using non-default width/heigth it reports BGR but returns RGB.
// ffmpeg color conversion
void ofUCUtils::set_format(int w, int h) {
	if(!deviceReady)
		return;
	d_width=w;
	d_height=h;
	unicap_format_t formats[MAX_FORMATS];
	int format_count;
	unicap_status_t status = STATUS_SUCCESS;
	int rgb24 = -1;

	ofLog(OF_NOTICE,"ofUCUtils : Available formats for this device:");
	for (format_count = 0; SUCCESS (status) && (format_count < MAX_FORMATS); format_count++) {
		status = unicap_enumerate_formats (handle, NULL, &formats[format_count], format_count);
		if (SUCCESS (status)) {
			if (formats[format_count].bpp == 24) {
				rgb24 = format_count;
			}
			ofLog(OF_NOTICE,
					"ofUCUtils : %d: %s, min size: %dx%d, max size:%dx%d, default size: %dx%d",
					format_count, formats[format_count].identifier,
					formats[format_count].min_size.width,
					formats[format_count].min_size.height,
					formats[format_count].max_size.width,
					formats[format_count].max_size.height,
					formats[format_count].size.width,
					formats[format_count].size.height);
			ofLog(OF_VERBOSE,"ofUCUtils: available sizes for this format:");
			for(int i=0; i<formats[format_count].size_count;i++){
				ofLog(OF_VERBOSE,"          %dx%d",formats[format_count].sizes[i].width,formats[format_count].sizes[i].height);
			}
		}
	}

	if (format_count > 0) {
		int selected_format = 0;
		if (rgb24 != -1)
			selected_format = rgb24;
		else{
			for(selected_format=0;selected_format<format_count;selected_format++){
				format = formats[selected_format];
				if(fourcc_to_pix_fmt(format.fourcc)!=-1)
					break;
			}
		}
		format = formats[selected_format];

		bool exactMatch  = false;
		int sizeDiff = 99999999;
		int mostAproxSize = -1;

		for(int i=0; i<format.size_count;i++){
			if(format.sizes[i].width == w && format.sizes[i].height==h){
				exactMatch=true;
				format.size.width  = format.sizes[i].width;
				format.size.height = format.sizes[i].height;
				break;
			}else{
				if(abs(format.sizes[i].width-w)+abs(format.sizes[i].height-h)<sizeDiff){
					sizeDiff=abs(format.sizes[i].width-w)+abs(format.sizes[i].height-h);
					mostAproxSize=i;
				}
			}
		}
		if(!exactMatch && mostAproxSize!=-1){
			format.size.width  = format.sizes[mostAproxSize].width;
			format.size.height = format.sizes[mostAproxSize].height;

			ofLog(OF_WARNING, "ofUCUtils : Can't set video format %s, with size %dx%d, will use %dx%d",
							format.identifier, w, h,
							format.size.width, format.size.height);
		}else if(format.size_count==0){
			int defaultFormatWidth = format.size.width;
			int defaultFormatHeight = format.size.height;

			format.size.width  = w;
			format.size.height = h;

			ofLog(OF_WARNING, "ofUCUtils : Can't recognize supported video sizes for %s, trying with requested size: %i,%i",
										format.identifier, format.size.width, format.size.height);

			if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) {
				format.size.width  = defaultFormatWidth;
				format.size.height = defaultFormatHeight;

				ofLog(OF_WARNING, "ofUCUtils : Can't set requested size, trying with format defaults: %i,%i",
						defaultFormatWidth, defaultFormatHeight);
			}
			ofLog(OF_WARNING, "ofUCUtils : If this doesn't work try using the reported default size in initGrabber:",
					defaultFormatWidth, defaultFormatHeight);
		}
		if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) {
			ofLog(OF_ERROR, "ofUCUtils : Failed to set alternative video format!");
			return;
		}
		ofLog(OF_NOTICE,"ofUCUtils : Selected format: %s, with size %dx%d\n", format.identifier,
				format.size.width, format.size.height);

		src_pix_fmt=fourcc_to_pix_fmt(format.fourcc);
		if( src_pix_fmt==-1){
			ofLog(OF_ERROR,"ofUCUtils : Format not suported\n");
			return;
		}

		if(src_pix_fmt!=PIX_FMT_RGB24 || !exactMatch){
			src=new AVPicture;
			avpicture_alloc(src,src_pix_fmt,format.size.width,format.size.height);
			dst=new AVPicture;
			avpicture_alloc(dst,PIX_FMT_RGB24,d_width,d_height);

			toRGB_convert_ctx = sws_getContext(
							format.size.width, format.size.height, src_pix_fmt,
							d_width, d_height, PIX_FMT_RGB24,
							VIDEOGRABBER_RESIZE_FLAGS, NULL, NULL, NULL);


			ofLog(OF_NOTICE,"ofUCUtils: Converting to RGB24 (%i,%i)\n",w,h);

			pixels=new unsigned char[d_width*d_height*3];
		}

	   if( !SUCCESS( unicap_get_format( handle, &format ) ) )
	   {
		   ofLog(OF_ERROR, "can't get format" );
		   return;
	   }

		format.buffer_type = UNICAP_BUFFER_TYPE_SYSTEM;

	   if( !SUCCESS( unicap_set_format( handle, &format ) ) )
	   {
		   ofLog(OF_WARNING, "ofUCUtils: Failed to activate SYSTEM_BUFFERS" );
	   }
	}
}
예제 #4
0
//--------------------------------------------------------------------
// If a 24 bit video format is founded this is the preferred one, if not, the first
// returned by unicap is selected.
//
// Then it tries to set the desired width and height, if these fails, tries find the
// nearest size or to set the default width and height.
//
// On V4L devices 24 bit format is always BGR, so it needs conversion.
// On some V4L devices using non-default width/heigth it reports BGR but returns RGB.
// ffmpeg color conversion
void ofUCUtils::set_format(int w, int h) {

	unicap_format_t formats[MAX_FORMATS];
	int format_count;
	unicap_status_t status = STATUS_SUCCESS;
	int rgb24 = -1;

	if(verbose) printf("Unicap : Available formats for this device:\n");
	for (format_count = 0; SUCCESS (status) && (format_count < MAX_FORMATS); format_count++) {
		status = unicap_enumerate_formats (handle, NULL, &formats[format_count], format_count);
		if (SUCCESS (status)) {
			if (formats[format_count].bpp == 8) {
				rgb24 = format_count;
			}
			if(verbose) printf (
					"Unicap : %d: %s, min size: %dx%d, max size:%dx%d, default size: %dx%d\n",
					format_count, formats[format_count].identifier,
					formats[format_count].min_size.width,
					formats[format_count].min_size.height,
					formats[format_count].max_size.width,
					formats[format_count].max_size.height,
					formats[format_count].size.width,
					formats[format_count].size.height);
		}
	}

	if (format_count > 0) {
		int selected_format = 0;
		if (rgb24 != -1)
			selected_format = rgb24;
		format = formats[selected_format];

		bool sizeFounded = true;
		bool exactMatch  = false;
		if(w == format.size.width && h == format.size.height){
			exactMatch = true;
		}else if(w <= format.min_size.width && h <= format.min_size.height){
			format.size.width  = format.min_size.width;
			format.size.height = format.min_size.height;
		}else if(w >= format.max_size.width && h >= format.max_size.height){
			format.size.width  = format.max_size.width;
			format.size.height = format.max_size.height;
		}else{
			sizeFounded=false;
		}

		if(sizeFounded){
			if(verbose && !exactMatch)
				printf ("Unicap : Can't set video format %s, with size %dx%d\n",
								format.identifier, w, h);

			if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) {
				printf ("Unicap : Failed to set alternative video format!\n");
				return;
			}
		}else{
			format.size.width  = w;
			format.size.height = h;

			//Try selected size
			if (!SUCCESS (unicap_set_format (handle, &format))) {
				printf ("Unicap : Can't set video format %s, with size %dx%d\n",
						format.identifier, w, h);


				// If selected size doesn't work try to find a supported one
				unicap_format_t format_spec;
				unicap_void_format(&format_spec);

				int nearW				= 9999999;
				int nearH				= 9999999;


				//Try with unicap reported sizes
				if(format.size_count > 0){
					if(verbose)printf ("Unicap : Available sizes: %d\n",format.size_count);

					for(int i = 0; i < format.size_count; i++){
						if(verbose) printf ("%d,%d\n",format.sizes[i].width,format.sizes[i].height);
						if(abs(w-format.sizes[i].width)<abs(w-nearW)){
							nearW = format.sizes[i].width;
							nearH = format.sizes[i].height;
						}
					}
					format.size.width  = nearW;
		            format.size.height = nearH;

		        //Try with stepping
				}else if(format.h_stepping > 1 || format.v_stepping > 1){
					//This is how many diff sizes are available for the format
		            int stepX = format.h_stepping;
		            int stepY = format.v_stepping;
		            for(int x = format.min_size.x; x <= format.max_size.x; x+= stepX)
		            {
		            	if( abs(w-x) < abs(w-nearW) ){
		            		nearW = x;
		            	}
		            }

		            for(int y = format.min_size.y; y <= format.max_size.y; y+= stepY)
		            {
		            	if( abs(h-y) < abs(h-nearH) ){
		            		nearH = y;
		            	}
		            }
		            format.size.width  = nearW;
		            format.size.height = nearH;
				}

				//Try to set founded size
				sizeFounded = SUCCESS ( unicap_set_format (handle, &format) );

				//If none of the above work, try default size
				if(!sizeFounded){
	       			if ( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, selected_format ) ) ) {
						printf("Unicap : Failed to get alternative video format\n");
						return;
					}

					if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) {
						printf ("Unicap : Failed to set alternative video format!\n");
						return;
					}
				}
			}

		}
		if(verbose) printf("Unicap : Selected format: %s, with size %dx%d\n", format.identifier,
				format.size.width, format.size.height);

		src_pix_fmt=fourcc_to_pix_fmt(format.fourcc);
		if( src_pix_fmt==-1){
			printf("Unicap : Format not suported\n");
			return;
		}

		if(src_pix_fmt!=PIX_FMT_RGB24){
			src=new AVPicture;
			avpicture_alloc(src,src_pix_fmt,format.size.width,format.size.height);
			dst=new AVPicture;
			avpicture_alloc(dst,PIX_FMT_RGB24,format.size.width,format.size.height);
			printf("Converting to RGB24");
		}
	}
}
예제 #5
0
파일: sender.cpp 프로젝트: tenderlove/quail
int main (int argc, char *argv [])
{
    unicap_handle_t handle;
    unicap_device_t device;
    unicap_format_t src_format;
    unicap_format_t dest_format;
    unicap_data_buffer_t src_buffer;
    unicap_data_buffer_t dest_buffer;
    unicap_data_buffer_t *returned_buffer;

    if (argc != 4) {
        fprintf (stderr, "Usage: sender <hostname> <camera name> "
            "<interface>\n");
        exit (1);
    }

    //  Initialise 0MQ infrastructure

    //  1. Set error handler function (to ignore disconnected receivers)
    zmq::set_error_handler (error_handler);

    //  2. Initialise basic infrastructure for 2 threads
    zmq::dispatcher_t dispatcher (2);

    //  3. Initialise local locator (to connect to global locator)
    zmq::locator_t locator (argv [1]);

    //  4. Start one working thread (to send data to receivers)
    zmq::poll_thread_t *pt = zmq::poll_thread_t::create (&dispatcher);

    //  5. Register one API thread (the application thread - the one that
    //     is being executed at the moment)
    zmq::api_thread_t *api = zmq::api_thread_t::create (&dispatcher, &locator);

    //  6.  Define an entry point for the messages. The name of the entry point
    //      is user-defined ("camera name"). Specify that working thread "pt"
    //      will be used to listen to new connections being created as well as
    //      to send frames to existing connections.
    int e_id = api->create_exchange (argv [2], zmq::scope_global, argv [3],
        pt, 1, &pt);
    
    //  Open first available video capture device
    if (!SUCCESS (unicap_enumerate_devices (NULL, &device, 0))) {
        fprintf (stderr, "Could not enumerate devices\n");
        exit (1);
    }
    if (!SUCCESS (unicap_open (&handle, &device))) {
        fprintf (stderr, "Failed to open device: %s\n", device.identifier);
        exit (1);
    }
    printf( "Opened video capture device: %s\n", device.identifier );

    //  Find a suitable video format that we can convert to RGB24
    bool conversion_found = false;
    int index = 0;
    while (SUCCESS (unicap_enumerate_formats (handle, NULL, &src_format,
          index))) {
        printf ("Trying video format: %s\n", src_format.identifier);
        if (ucil_conversion_supported (FOURCC ('R', 'G', 'B', '3'), 
            src_format.fourcc)) {
            conversion_found = true;
            break;
        }
        index++;
    }
    if (!conversion_found) {
        fprintf (stderr, "Could not find a suitable video format\n");
        exit (1);
    }
    src_format.buffer_type = UNICAP_BUFFER_TYPE_USER;
    if (!SUCCESS (unicap_set_format (handle, &src_format))) {
        fprintf (stderr, "Failed to set video format\n");
        exit (1);
    }
    printf ("Using video format: %s [%dx%d]\n", 
        src_format.identifier, 
        src_format.size.width, 
        src_format.size.height);

    //  Clone destination format with equal dimensions, but RGB24 colorspace
    unicap_copy_format (&dest_format, &src_format);
    strcpy (dest_format.identifier, "RGB 24bpp");
    dest_format.fourcc = FOURCC ('R', 'G', 'B', '3');
    dest_format.bpp = 24;
    dest_format.buffer_size = dest_format.size.width *
        dest_format.size.height * 3;
    
    //  Initialise image buffers
    memset (&src_buffer, 0, sizeof (unicap_data_buffer_t));
    src_buffer.data = (unsigned char *)malloc (src_format.buffer_size);
    src_buffer.buffer_size = src_format.buffer_size;
    memset (&dest_buffer, 0, sizeof (unicap_data_buffer_t));
    dest_buffer.data = (unsigned char *)malloc (dest_format.buffer_size);
    dest_buffer.buffer_size = dest_format.buffer_size;
    dest_buffer.format = dest_format;

    //  Start video capture
    if (!SUCCESS (unicap_start_capture (handle))) {
        fprintf (stderr, "Failed to start capture on device: %s\n",
            device.identifier);
        exit (1);
    }

    //  Loop, sending video to defined exchange
    while (1) {

        //  Queue buffer for video capture
        if (!SUCCESS (unicap_queue_buffer (handle, &src_buffer))) {
            fprintf (stderr, "Failed to queue a buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Wait until buffer is ready
        if (!SUCCESS (unicap_wait_buffer (handle, &returned_buffer))) {
            fprintf (stderr, "Failed to wait for buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Convert colorspace
        if (!SUCCESS (ucil_convert_buffer (&dest_buffer, &src_buffer))) {
            //  TODO: This fails sometimes for unknown reasons,
            //  just skip the frame for now
            fprintf (stderr, "Failed to convert video buffer\n");
        }

        //  Create ZMQ message
        zmq::message_t msg (dest_format.buffer_size + (2 * sizeof (uint32_t)));
        unsigned char *data = (unsigned char *)msg.data();

        //  Image width in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.width);
        data += sizeof (uint32_t);

        //  Image height in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.height);
        data += sizeof (uint32_t);

        //  RGB24 image data
        memcpy (data, dest_buffer.data, dest_format.buffer_size);

        //  Send message 
        api->send (e_id, msg);
    }
    
    return 0;
}
int main( int argc, char **argv )
{
   unicap_handle_t handle;
   unicap_device_t device;
   unicap_format_t format_spec;
   unicap_format_t format;
   unicap_data_buffer_t buffer;
   unicap_data_buffer_t *returned_buffer;
	
   int i;

   SDL_Surface *screen;
   SDL_Overlay *overlay;

   int quit=0;

   /*
     Enumerate available video capture devices
    */
   printf( "select video device\n" );
   for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ )
   {
      printf( "%i: %s\n", i, device.identifier );
   }
   if( --i > 0 )
   {
      printf( "Select video capture device: " );
      scanf( "%d", &i );
   }

   if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) )
   {
      fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier );
      exit( 1 );
   }

   /*
     Acquire a handle to selected device
   */
   if( !SUCCESS( unicap_open( &handle, &device ) ) )
   {
      fprintf( stderr, "Failed to open device: %s\n", device.identifier );
      exit( 1 );
   }

   printf( "Opened video capture device: %s\n", device.identifier );

   unicap_void_format( &format_spec );
	
   /*
     Get the list of video formats
   */
   for( i = 0; SUCCESS( unicap_enumerate_formats( handle, 
						  NULL, 
						  &format, i ) ); 
	i++ )
   {
      printf( "%d: %s\n", 
	      i,
	      format.identifier );
   }
   if( --i > 0 )
   {
      printf( "Select video format: " );
      scanf( "%d", &i );
   }
   if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) )
   {
      fprintf( stderr, "Failed to get video format %d\n", i );
      exit( 1 );
   }
	
   /*
     If a video format has more than one size, ask for which size to use
   */
   if( format.size_count )
   {
      for( i = 0; i < format.size_count; i++ )
      {
	 printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height );
      }
      do
      {
	 printf( "Select video format size: " );
	 scanf( "%d", &i );
      }while( ( i < 0 ) && ( i > format.size_count ) );
      format.size.width = format.sizes[i].width;
      format.size.height = format.sizes[i].height;
   }

   /*
     Set this video format
   */
   if( !SUCCESS( unicap_set_format( handle, &format ) ) )
   {
      fprintf( stderr, "Failed to set video format\n" );
      exit( 1 );
   }

   /*
     Initialize the image buffer
   */
   memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) );

   /**
      Init SDL & SDL_Overlay
   **/
   if ( SDL_Init(SDL_INIT_VIDEO) < 0 )
   {
      fprintf(stderr, "Failed to initialize SDL:  %s\n", SDL_GetError());
      exit(1);
   }
	
   atexit(SDL_Quit);

   screen = SDL_SetVideoMode( format.size.width, format.size.height, 32, SDL_HWSURFACE);
   if ( screen == NULL ) {
      fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError());
      exit(1);
   }
   
   

   overlay = SDL_CreateYUVOverlay( format.size.width, 
				   format.size.height, 
				   format.fourcc, 
				   screen );
   if( overlay == NULL )
   {
      fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() );
      exit( 1 );
   }

   /*
     Pass the pointer to the overlay to the unicap data buffer. 
   */
   buffer.data = overlay->pixels[0];	
   buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8;

   /*
     Start the capture process on the device
   */
   if( !SUCCESS( unicap_start_capture( handle ) ) )
   {
      fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier );
      exit( 1 );
   }
	

   while( !quit )
   {
      SDL_Rect rect;
      SDL_Event event;

      rect.x = rect.y = 0;
      rect.w = format.size.width;
      rect.h = format.size.height;
      /*
	Queue the buffer
		  
	The buffer now gets filled with image data by the capture device
      */
      if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) )
      {
	 fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier );
	 exit( 1 );
      }
		
      /*
	Wait until the image buffer is ready
      */
      if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) )
      {
	 fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier );
      }

      /*
	Display the video data
      */
      SDL_UnlockYUVOverlay( overlay );
      SDL_DisplayYUVOverlay( overlay, &rect );
      SDL_LockYUVOverlay(overlay);

      while( SDL_PollEvent( &event ) )
      {
	 if( event.type == SDL_QUIT )
	 {
	    printf( "Quit\n" );
	    quit=1;
	 }
      }
   }
	
   /*
     Stop the device
   */
   if( !SUCCESS( unicap_stop_capture( handle ) ) )
   {
      fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier );
   }

   /*
     Close the device 

     This invalidates the handle
   */
   if( !SUCCESS( unicap_close( handle ) ) )
   {
      fprintf( stderr, "Failed to close the device: %s\n", device.identifier );
   }

   SDL_Quit();
	
   return 0;
}
int UnicapCamera::Open()
{
	unicap_device_t  device;
	unicap_handle_t  handle;
	unicap_format_t format;
	
	//Initialisation
	if (m_Device == NULL)
	{
		m_Device = (unicap_device_t *)malloc(sizeof(unicap_device_t));
		if (m_Device == NULL)
		{
			printf("UnicapCamera::Open: Error, no memory!\n");
			return ERROR_NO_MEMORY;
		}
	}
	
	if (m_Handle == NULL)
	{
		m_Handle = (unicap_handle_t *)malloc(sizeof(unicap_handle_t));
			if (m_Handle == NULL)
		{
			printf("UnicapCamera::Open: Error, no memory!\n");
			return ERROR_NO_MEMORY;
		}
	}
	if (m_Format == NULL)
	{
		m_Format = (unicap_format_t *)malloc(sizeof(unicap_format_t));
		if (m_Format == NULL)
		{
			printf("UnicapCamera::Open: Error, no memory!\n");
			return ERROR_NO_MEMORY;
		}
	}
	
	
	// Search camera devices
	
	if( !SUCCESS( unicap_enumerate_devices( NULL, &device, 0 ) ) )
	{
		printf("UnicapCamera::Open: No device found!\n");
		return ERROR_NO_CAMERAS_FOUND;
	}
	else
	{
		*m_Device = device;
		printf("UnicapCamera::Open: Device %s found, vendor: %s, controlled by %s, %s\n", m_Device->identifier, m_Device->vendor_name, m_Device->device, m_Device->cpi_layer);
	}

	/*
	  Acquire a handle to this device
	 */
	if( !SUCCESS( unicap_open( &handle, m_Device ) ) )
	{
		printf("UnicapCamera::Open: Failed to open device %s: %s\n", m_Device->identifier, strerror(errno));
		return ERROR_CAMERA_COULD_NOT_BE_OPENED;
	}
	else
	{
		*m_Handle = handle;
		m_CameraActive = true;
		printf("DUnicapCamera::Open:evice %s successfully opened!\n", m_Device->identifier);
		
	}
	
	
	//Set format according to specified resolution	
	if( !SUCCESS( unicap_enumerate_formats( *m_Handle, NULL, &format, m_Resolution ) ) )
	{
		printf("UnicapCamera::Open: Failed to get video format, setting to default\n" );
			
		
		//return UNSPECIFIED_ERROR;;
	}
	else
	{
		*m_Format = format;
		printf("UnicapCamera::Open: Format %s chosen\n", format.identifier );
		printf("UnicapCamera::Open: Setting video format: \nwidth: %d\nheight: %d\nbpp: %d\nFOURCC: %c%c%c%c\n\n", \
		m_Format->size.width,\
		m_Format->size.height,\
		m_Format->bpp, \
		m_Format->fourcc & 0xff, \
		( m_Format->fourcc >> 8 ) & 0xff, \
		( m_Format->fourcc >> 16 ) & 0xff, \
		( m_Format->fourcc >> 24 ) & 0xff  \
		);
		/*
		Set this video format
		*/
		if( !SUCCESS( unicap_set_format( *m_Handle, m_Format ) ) )
		{
			printf("UnicapCamera::Open: Failed to set video format\n" );
			return UNSPECIFIED_ERROR;
		}
	}
	
	unicap_property_t property;

	// Set white balance mode to auto
        strcpy( property.identifier, "white_balance_mode" );
        if( !SUCCESS( unicap_get_property( *m_Handle, &property ) ) )
        {
                fprintf( stderr, "UnicapCamera::Open: Failed to get WB mode property\n" );
                exit( 1 );
        }

        // Set auto on this property
        property.flags = UNICAP_FLAGS_AUTO;//UNICAP_FLAGS_MANUAL;
        unicap_set_property( handle, &property );
	
	if( !SUCCESS( unicap_set_property( *m_Handle, &property ) ) )
        {
                printf( "UnicapCamera::Open: Failed to set property!\n" );
		return UNSPECIFIED_ERROR;
        }
	
	if( !SUCCESS( unicap_get_property( *m_Handle, &property ) ) )
        {
                fprintf( stderr, "UnicapCamera::Open: Failed to get WB mode property\n" );
                exit( 1 );
        }

        printf( "UnicapCamera::Open: Current white balance mode: %s\n", property.flags & UNICAP_FLAGS_AUTO ? "AUTO" : "MANUAL" );
	
	
	
	if( !SUCCESS( unicap_start_capture( *m_Handle ) ) )
		{
			printf( "UnicapCamera::Open: Failed to start capture on device %s\n", m_Device->identifier );
			return   UNSPECIFIED_ERROR;
		}
		
		

		
	return OK;
	
}
int main( int argc, char **argv )
{
	unicap_handle_t handle;
	unicap_device_t device;
	unicap_format_t format_spec;
	unicap_format_t format;
	unicap_data_buffer_t buffer;
	unicap_data_buffer_t *returned_buffer;
	int width, height;
	
	int i;

	SDL_Surface *screen;
	SDL_Overlay *overlay;

	int quit=0;
	int imgcnt = 0;

	printf( "select video device\n" );
	for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ )
	{
		printf( "%i: %s\n", i, device.identifier );
	}
	if( --i > 0 )
	{
		printf( "Select video capture device: " );
		scanf( "%d", &i );
	}

	if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) )
	{
		fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier );
		exit( 1 );
	}

	/*
	  Acquire a handle to this device
	 */
	if( !SUCCESS( unicap_open( &handle, &device ) ) )
	{
		fprintf( stderr, "Failed to open device: %s\n", device.identifier );
		exit( 1 );
	}

	printf( "Opened video capture device: %s\n", device.identifier );

	/*
	  Create a format specification to limit the list of formats returned by 
	  unicap_enumerate_formats to the ones with the color format 'UYVY'
	 */
	unicap_void_format( &format_spec );
	format_spec.fourcc = FOURCC('U','Y','V','Y');
	
	/*
	  Get the list of video formats of the colorformat UYVY
	 */
	for( i = 0; SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ); i++ )
	{
		printf( "%d: %s [%dx%d]\n", 
				i,
				format.identifier, 
				format.size.width, 
				format.size.height );
	}
	if( --i > 0 )
	{
		printf( "Select video format: " );
		scanf( "%d", &i );
	}
	if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) )
	{
		fprintf( stderr, "Failed to get video format\n" );
		exit( 1 );
	}
	
   /*
     If a video format has more than one size, ask for which size to use
   */
   if( format.size_count )
   {
      for( i = 0; i < format.size_count; i++ )
      {
	 printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height );
      }
      do
      {
	 printf( "Select video format size: " );
	 scanf( "%d", &i );
      }while( ( i < 0 ) && ( i > format.size_count ) );
      format.size.width = format.sizes[i].width;
      format.size.height = format.sizes[i].height;
   }

	/*
	  Set this video format
	 */
	if( !SUCCESS( unicap_set_format( handle, &format ) ) )
	{
		fprintf( stderr, "Failed to set video format\n" );
		exit( 1 );
	}

	/*
	  Initialize the image buffer
	 */
	memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) );

	/**
	   Init SDL & SDL_Overlay
	 **/
	if ( SDL_Init(SDL_INIT_VIDEO) < 0 )
	{
	   fprintf(stderr, "Failed to initialize SDL:  %s\n", SDL_GetError());
	   exit(1);
	}
	
	atexit(SDL_Quit);

	/*
	  Make sure the video window does not get too big. 
	 */
	width = MIN( format.size.width, 800 );
	height = MIN( format.size.height, 600 );

	screen = SDL_SetVideoMode( width, height, 32, SDL_HWSURFACE);
	if ( screen == NULL ) {
	   fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError());
	   exit(1);
	}
	
	overlay = SDL_CreateYUVOverlay( format.size.width, 
									format.size.height, SDL_UYVY_OVERLAY, screen );
	if( overlay == NULL )
	{
	   fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() );
	   exit( 1 );
	}

	/*
	  Pass the pointer to the overlay to the unicap data buffer. 
	 */
	buffer.data = overlay->pixels[0];	
   buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8;
	
	/*
	  Start the capture process on the device
	 */
	if( !SUCCESS( unicap_start_capture( handle ) ) )
	{
		fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier );
		exit( 1 );
	}
	

	while( !quit )
	{
		SDL_Rect rect;
		SDL_Event event;

		rect.x = 0;
		rect.y = 0;
		rect.w = width;
		rect.h = height;
		/*
		  Queue the buffer
		  
		  The buffer now gets filled with image data by the capture device
		*/
		if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) )
		{
			fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier );
			exit( 1 );
		}
		
		/*
		  Wait until the image buffer is ready
		*/
		if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) )
		{
			fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier );
		}

		/*
		  Display the video data
		 */
		SDL_UnlockYUVOverlay( overlay );
		SDL_DisplayYUVOverlay( overlay, &rect );

		while( SDL_PollEvent( &event ) )
		{
			switch( event.type )
			{
				case SDL_QUIT:
					quit = 1;
					break;
					
				case SDL_MOUSEBUTTONDOWN:
				{
					unsigned char *pixels;
					struct jpeg_compress_struct cinfo;
					struct jpeg_error_mgr jerr;
					FILE *outfile;
					JSAMPROW row_pointer[1];
					int row_stride;
					char filename[128];

					struct timeval t1, t2;
					unsigned long long usecs;
					
					sprintf( filename, "%04d.jpg", imgcnt++ );
					
					cinfo.err = jpeg_std_error(&jerr);
					/* Now we can initialize the JPEG compression object. */
					jpeg_create_compress(&cinfo);
					if ((outfile = fopen( filename, "wb" ) ) == NULL ) 
					{
						fprintf(stderr, "can't open %s\n", "file");
						exit(1);
					}
					jpeg_stdio_dest(&cinfo, outfile);
					
					cinfo.image_width = format.size.width; 	/* image width and height, in pixels */
					cinfo.image_height = format.size.height;
					cinfo.input_components = 3;		/* # of color components per pixel */
					cinfo.in_color_space = JCS_RGB; 	/* colorspace of input image */
					
					jpeg_set_defaults(&cinfo);
					pixels = malloc( format.size.width * format.size.height * 3 );
					uyvy2rgb24( pixels, returned_buffer->data,
						    format.size.width * format.size.height * 3,
						    format.size.width * format.size.height * 2 );

					gettimeofday( &t1, NULL );
					jpeg_start_compress(&cinfo, TRUE);
					while( cinfo.next_scanline < cinfo.image_height )
					{
						row_pointer[0] = &pixels[cinfo.next_scanline * format.size.width * 3 ];
						(void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
					}
					jpeg_finish_compress(&cinfo);
					gettimeofday( &t2, NULL );

					usecs = t2.tv_sec * 1000000LL + t2.tv_usec;
					usecs -= ( t1.tv_sec * 1000000LL + t1.tv_usec );
					
					printf( "Compression took: %lld usec\n", usecs );

					/* After finish_compress, we can close the output file. */
					fclose(outfile);
					jpeg_destroy_compress(&cinfo);
					
					free( pixels );
				}
				
				break;
				
				
				default: 
					break;
			}
		}
		SDL_LockYUVOverlay(overlay);
	}
	
	/*
	  Stop the device
	 */
	if( !SUCCESS( unicap_stop_capture( handle ) ) )
	{
		fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier );
	}

	/*
	  Close the device 

	  This invalidates the handle
	 */
	if( !SUCCESS( unicap_close( handle ) ) )
	{
		fprintf( stderr, "Failed to close the device: %s\n", device.identifier );
	}

	SDL_Quit();
	
	return 0;
}