static unicap_status_t aravis_get_format( aravis_handle_t handle, unicap_format_t *format ) { ArvPixelFormat pixel_fmt = arv_camera_get_pixel_format (handle->camera); if (!pixel_fmt) return STATUS_FAILURE; unicap_void_format (format); strcpy (format->identifier, aravis_tools_get_pixel_format_string (pixel_fmt)); format->fourcc = aravis_tools_get_fourcc (pixel_fmt); format->bpp = aravis_tools_get_bpp (pixel_fmt); arv_camera_get_region (handle->camera, &format->size.x, &format->size.y, &format->size.width, &format->size.height); arv_camera_get_width_bounds (handle->camera, &format->min_size.width, &format->max_size.width); arv_camera_get_height_bounds (handle->camera, &format->min_size.height, &format->max_size.height); format->buffer_size = format->bpp * format->size.width * format->size.height / 8; format->buffer_type = UNICAP_BUFFER_TYPE_SYSTEM; return STATUS_SUCCESS; }
__HIDDEN__ unicap_data_buffer_t *ucil_allocate_buffer( int width, int height, unsigned int fourcc, int bpp ) { unicap_data_buffer_t *buffer; buffer = malloc( sizeof( unicap_data_buffer_t) ); unicap_void_format(&buffer->format); buffer->format.size.width = width; buffer->format.size.height = height; buffer->format.fourcc = fourcc; buffer->format.bpp = bpp; buffer->format.buffer_size = buffer->format.size.width * buffer->format.size.height * buffer->format.bpp / 8; buffer->buffer_size = buffer->format.buffer_size; buffer->data = malloc( buffer->buffer_size ); return buffer; }
void backend_gtk_get_image_data( gpointer _data, unicap_data_buffer_t *data_buffer, int b ) { struct backend_data *data = _data; int tmp; unicap_void_format( &data_buffer->format ); data_buffer->format.fourcc = UCIL_FOURCC( 'R', 'G', 'B', '3' ); data_buffer->format.bpp = 24; data_buffer->format.size.width = data->format.size.width; data_buffer->format.size.height = data->format.size.height; data_buffer->format.buffer_size = data->format.size.width * data->format.size.height * 3; data_buffer->buffer_size = data_buffer->format.buffer_size; tmp = ( data->current_buffer + 1 ) % NUM_BUFFERS; data_buffer->data = data->image_data[ tmp ]; memcpy( &data_buffer->fill_time, &data->fill_times[tmp], sizeof( struct timeval ) ); }
static unicap_status_t aravis_reenumerate_formats( aravis_handle_t handle, int *_pcount ) { int idx = 0; int i; guint n_pixel_formats; gint64 *pixel_formats; int min_width, max_width; int min_height, max_height; pixel_formats = arv_camera_get_available_pixel_formats (handle->camera, &n_pixel_formats); arv_camera_get_width_bounds (handle->camera, &min_width, &max_width); arv_camera_get_height_bounds (handle->camera, &min_height, &max_height); for (i = 0; i < n_pixel_formats; i++){ unsigned int fourcc; fourcc = aravis_tools_get_fourcc (pixel_formats[i]); if (fourcc){ unicap_void_format (&handle->formats[idx]); handle->formats[idx].fourcc = fourcc; strcpy (handle->formats[idx].identifier, aravis_tools_get_pixel_format_string (pixel_formats[i])); handle->formats[idx].bpp = aravis_tools_get_bpp (pixel_formats[i]); handle->formats[idx].min_size.width = min_width; handle->formats[idx].min_size.height = min_height; handle->formats[idx].max_size.width = max_width; handle->formats[idx].max_size.height = max_height; handle->formats[idx].size.width = max_width; handle->formats[idx].size.height = max_height; handle->formats[idx].buffer_size = max_width * max_height * handle->formats[idx].bpp / 8; handle->formats[idx].buffer_type = UNICAP_BUFFER_TYPE_SYSTEM; idx++; } } g_free (pixel_formats); handle->n_formats = idx; if( _pcount ) *_pcount = handle->n_formats; return STATUS_SUCCESS; }
//-------------------------------------------------------------------- // If a 24 bit video format is founded this is the preferred one, if not, the first // returned by unicap is selected. // // Then it tries to set the desired width and height, if these fails, tries find the // nearest size or to set the default width and height. // // On V4L devices 24 bit format is always BGR, so it needs conversion. // On some V4L devices using non-default width/heigth it reports BGR but returns RGB. // ffmpeg color conversion void ofUCUtils::set_format(int w, int h) { unicap_format_t formats[MAX_FORMATS]; int format_count; unicap_status_t status = STATUS_SUCCESS; int rgb24 = -1; if(verbose) printf("Unicap : Available formats for this device:\n"); for (format_count = 0; SUCCESS (status) && (format_count < MAX_FORMATS); format_count++) { status = unicap_enumerate_formats (handle, NULL, &formats[format_count], format_count); if (SUCCESS (status)) { if (formats[format_count].bpp == 8) { rgb24 = format_count; } if(verbose) printf ( "Unicap : %d: %s, min size: %dx%d, max size:%dx%d, default size: %dx%d\n", format_count, formats[format_count].identifier, formats[format_count].min_size.width, formats[format_count].min_size.height, formats[format_count].max_size.width, formats[format_count].max_size.height, formats[format_count].size.width, formats[format_count].size.height); } } if (format_count > 0) { int selected_format = 0; if (rgb24 != -1) selected_format = rgb24; format = formats[selected_format]; bool sizeFounded = true; bool exactMatch = false; if(w == format.size.width && h == format.size.height){ exactMatch = true; }else if(w <= format.min_size.width && h <= format.min_size.height){ format.size.width = format.min_size.width; format.size.height = format.min_size.height; }else if(w >= format.max_size.width && h >= format.max_size.height){ format.size.width = format.max_size.width; format.size.height = format.max_size.height; }else{ sizeFounded=false; } if(sizeFounded){ if(verbose && !exactMatch) printf ("Unicap : Can't set video format %s, with size %dx%d\n", format.identifier, w, h); if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) { printf ("Unicap : Failed to set alternative video format!\n"); return; } }else{ format.size.width = w; format.size.height = h; //Try selected size if (!SUCCESS (unicap_set_format (handle, &format))) { printf ("Unicap : Can't set video format %s, with size %dx%d\n", format.identifier, w, h); // If selected size doesn't work try to find a supported one unicap_format_t format_spec; unicap_void_format(&format_spec); int nearW = 9999999; int nearH = 9999999; //Try with unicap reported sizes if(format.size_count > 0){ if(verbose)printf ("Unicap : Available sizes: %d\n",format.size_count); for(int i = 0; i < format.size_count; i++){ if(verbose) printf ("%d,%d\n",format.sizes[i].width,format.sizes[i].height); if(abs(w-format.sizes[i].width)<abs(w-nearW)){ nearW = format.sizes[i].width; nearH = format.sizes[i].height; } } format.size.width = nearW; format.size.height = nearH; //Try with stepping }else if(format.h_stepping > 1 || format.v_stepping > 1){ //This is how many diff sizes are available for the format int stepX = format.h_stepping; int stepY = format.v_stepping; for(int x = format.min_size.x; x <= format.max_size.x; x+= stepX) { if( abs(w-x) < abs(w-nearW) ){ nearW = x; } } for(int y = format.min_size.y; y <= format.max_size.y; y+= stepY) { if( abs(h-y) < abs(h-nearH) ){ nearH = y; } } format.size.width = nearW; format.size.height = nearH; } //Try to set founded size sizeFounded = SUCCESS ( unicap_set_format (handle, &format) ); //If none of the above work, try default size if(!sizeFounded){ if ( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, selected_format ) ) ) { printf("Unicap : Failed to get alternative video format\n"); return; } if ( !SUCCESS ( unicap_set_format (handle, &format) ) ) { printf ("Unicap : Failed to set alternative video format!\n"); return; } } } } if(verbose) printf("Unicap : Selected format: %s, with size %dx%d\n", format.identifier, format.size.width, format.size.height); src_pix_fmt=fourcc_to_pix_fmt(format.fourcc); if( src_pix_fmt==-1){ printf("Unicap : Format not suported\n"); return; } if(src_pix_fmt!=PIX_FMT_RGB24){ src=new AVPicture; avpicture_alloc(src,src_pix_fmt,format.size.width,format.size.height); dst=new AVPicture; avpicture_alloc(dst,PIX_FMT_RGB24,format.size.width,format.size.height); printf("Converting to RGB24"); } } }
int main( int argc, char **argv ) { unicap_handle_t handle; unicap_device_t device; unicap_format_t format_spec; unicap_format_t format; unicap_data_buffer_t buffer; unicap_data_buffer_t *returned_buffer; int i; SDL_Surface *screen; SDL_Overlay *overlay; int quit=0; /* Enumerate available video capture devices */ printf( "select video device\n" ); for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ ) { printf( "%i: %s\n", i, device.identifier ); } if( --i > 0 ) { printf( "Select video capture device: " ); scanf( "%d", &i ); } if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) ) { fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier ); exit( 1 ); } /* Acquire a handle to selected device */ if( !SUCCESS( unicap_open( &handle, &device ) ) ) { fprintf( stderr, "Failed to open device: %s\n", device.identifier ); exit( 1 ); } printf( "Opened video capture device: %s\n", device.identifier ); unicap_void_format( &format_spec ); /* Get the list of video formats */ for( i = 0; SUCCESS( unicap_enumerate_formats( handle, NULL, &format, i ) ); i++ ) { printf( "%d: %s\n", i, format.identifier ); } if( --i > 0 ) { printf( "Select video format: " ); scanf( "%d", &i ); } if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) ) { fprintf( stderr, "Failed to get video format %d\n", i ); exit( 1 ); } /* If a video format has more than one size, ask for which size to use */ if( format.size_count ) { for( i = 0; i < format.size_count; i++ ) { printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height ); } do { printf( "Select video format size: " ); scanf( "%d", &i ); }while( ( i < 0 ) && ( i > format.size_count ) ); format.size.width = format.sizes[i].width; format.size.height = format.sizes[i].height; } /* Set this video format */ if( !SUCCESS( unicap_set_format( handle, &format ) ) ) { fprintf( stderr, "Failed to set video format\n" ); exit( 1 ); } /* Initialize the image buffer */ memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) ); /** Init SDL & SDL_Overlay **/ if ( SDL_Init(SDL_INIT_VIDEO) < 0 ) { fprintf(stderr, "Failed to initialize SDL: %s\n", SDL_GetError()); exit(1); } atexit(SDL_Quit); screen = SDL_SetVideoMode( format.size.width, format.size.height, 32, SDL_HWSURFACE); if ( screen == NULL ) { fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError()); exit(1); } overlay = SDL_CreateYUVOverlay( format.size.width, format.size.height, format.fourcc, screen ); if( overlay == NULL ) { fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() ); exit( 1 ); } /* Pass the pointer to the overlay to the unicap data buffer. */ buffer.data = overlay->pixels[0]; buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8; /* Start the capture process on the device */ if( !SUCCESS( unicap_start_capture( handle ) ) ) { fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier ); exit( 1 ); } while( !quit ) { SDL_Rect rect; SDL_Event event; rect.x = rect.y = 0; rect.w = format.size.width; rect.h = format.size.height; /* Queue the buffer The buffer now gets filled with image data by the capture device */ if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) ) { fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier ); exit( 1 ); } /* Wait until the image buffer is ready */ if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) ) { fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier ); } /* Display the video data */ SDL_UnlockYUVOverlay( overlay ); SDL_DisplayYUVOverlay( overlay, &rect ); SDL_LockYUVOverlay(overlay); while( SDL_PollEvent( &event ) ) { if( event.type == SDL_QUIT ) { printf( "Quit\n" ); quit=1; } } } /* Stop the device */ if( !SUCCESS( unicap_stop_capture( handle ) ) ) { fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier ); } /* Close the device This invalidates the handle */ if( !SUCCESS( unicap_close( handle ) ) ) { fprintf( stderr, "Failed to close the device: %s\n", device.identifier ); } SDL_Quit(); return 0; }
int parse_video_format( unicap_format_t *format, PyObject *obj ) { PyObject *tmp; unicap_void_format( format ); tmp = PyDict_GetItemString( obj, "identifier" ); if( tmp ) { char *str; str = PyString_AsString( tmp ); if( str ) { strcpy( format->identifier, str ); } else { return -1; } } tmp = PyDict_GetItemString( obj, "fourcc" ); if( tmp ) { char *str = PyString_AsString( tmp ); if( str ) { format->fourcc = ((unsigned int)str[0]) | (((unsigned int)str[1])<<8) | (((unsigned int)str[2])<<16) | (((unsigned int)str[3])<<24); } else { return -1; } } tmp = PyDict_GetItemString( obj, "bpp" ); if( tmp ) { if( !PyInt_Check( tmp ) ) { PyErr_SetString( PyExc_TypeError, "'bpp' must be of type Integer" ); return -1; } format->bpp = PyInt_AsLong( tmp ); } tmp = PyDict_GetItemString( obj, "size" ); if( tmp ) { PyObject *t; if( !PyTuple_Check( tmp ) ) { PyErr_SetString( PyExc_TypeError, "'size' must be a tuple of Integers" ); return -1; } t = PyTuple_GetItem( tmp, 0 ); if( !t ) return -1; if( !PyInt_Check( t ) ) { PyErr_SetString( PyExc_TypeError, "'size' must be a tuple of Integers" ); return -1; } format->size.width = PyInt_AsLong( t ); t = PyTuple_GetItem( tmp, 1 ); if( !t ) return -1; if( !PyInt_Check( t ) ) { PyErr_SetString( PyExc_TypeError, "'size' must be a tuple of Integers" ); return -1; } format->size.height = PyInt_AsLong( t ); } format->buffer_size = format->size.width * format->size.height * format->bpp / 8; return 0; }
int main( int argc, char **argv ) { unicap_handle_t handle; unicap_device_t device; unicap_format_t format_spec; unicap_format_t format; unicap_data_buffer_t buffer; unicap_data_buffer_t *returned_buffer; int width, height; int i; SDL_Surface *screen; SDL_Overlay *overlay; int quit=0; int imgcnt = 0; printf( "select video device\n" ); for( i = 0; SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ); i++ ) { printf( "%i: %s\n", i, device.identifier ); } if( --i > 0 ) { printf( "Select video capture device: " ); scanf( "%d", &i ); } if( !SUCCESS( unicap_enumerate_devices( NULL, &device, i ) ) ) { fprintf( stderr, "Failed to get info for device '%s'\n", device.identifier ); exit( 1 ); } /* Acquire a handle to this device */ if( !SUCCESS( unicap_open( &handle, &device ) ) ) { fprintf( stderr, "Failed to open device: %s\n", device.identifier ); exit( 1 ); } printf( "Opened video capture device: %s\n", device.identifier ); /* Create a format specification to limit the list of formats returned by unicap_enumerate_formats to the ones with the color format 'UYVY' */ unicap_void_format( &format_spec ); format_spec.fourcc = FOURCC('U','Y','V','Y'); /* Get the list of video formats of the colorformat UYVY */ for( i = 0; SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ); i++ ) { printf( "%d: %s [%dx%d]\n", i, format.identifier, format.size.width, format.size.height ); } if( --i > 0 ) { printf( "Select video format: " ); scanf( "%d", &i ); } if( !SUCCESS( unicap_enumerate_formats( handle, &format_spec, &format, i ) ) ) { fprintf( stderr, "Failed to get video format\n" ); exit( 1 ); } /* If a video format has more than one size, ask for which size to use */ if( format.size_count ) { for( i = 0; i < format.size_count; i++ ) { printf( "%d: %dx%d\n", i, format.sizes[i].width, format.sizes[i].height ); } do { printf( "Select video format size: " ); scanf( "%d", &i ); }while( ( i < 0 ) && ( i > format.size_count ) ); format.size.width = format.sizes[i].width; format.size.height = format.sizes[i].height; } /* Set this video format */ if( !SUCCESS( unicap_set_format( handle, &format ) ) ) { fprintf( stderr, "Failed to set video format\n" ); exit( 1 ); } /* Initialize the image buffer */ memset( &buffer, 0x0, sizeof( unicap_data_buffer_t ) ); /** Init SDL & SDL_Overlay **/ if ( SDL_Init(SDL_INIT_VIDEO) < 0 ) { fprintf(stderr, "Failed to initialize SDL: %s\n", SDL_GetError()); exit(1); } atexit(SDL_Quit); /* Make sure the video window does not get too big. */ width = MIN( format.size.width, 800 ); height = MIN( format.size.height, 600 ); screen = SDL_SetVideoMode( width, height, 32, SDL_HWSURFACE); if ( screen == NULL ) { fprintf(stderr, "Unable to set video mode: %s\n", SDL_GetError()); exit(1); } overlay = SDL_CreateYUVOverlay( format.size.width, format.size.height, SDL_UYVY_OVERLAY, screen ); if( overlay == NULL ) { fprintf( stderr, "Unable to create overlay: %s\n", SDL_GetError() ); exit( 1 ); } /* Pass the pointer to the overlay to the unicap data buffer. */ buffer.data = overlay->pixels[0]; buffer.buffer_size = format.size.width * format.size.height * format.bpp / 8; /* Start the capture process on the device */ if( !SUCCESS( unicap_start_capture( handle ) ) ) { fprintf( stderr, "Failed to start capture on device: %s\n", device.identifier ); exit( 1 ); } while( !quit ) { SDL_Rect rect; SDL_Event event; rect.x = 0; rect.y = 0; rect.w = width; rect.h = height; /* Queue the buffer The buffer now gets filled with image data by the capture device */ if( !SUCCESS( unicap_queue_buffer( handle, &buffer ) ) ) { fprintf( stderr, "Failed to queue a buffer on device: %s\n", device.identifier ); exit( 1 ); } /* Wait until the image buffer is ready */ if( !SUCCESS( unicap_wait_buffer( handle, &returned_buffer ) ) ) { fprintf( stderr, "Failed to wait for buffer on device: %s\n", device.identifier ); } /* Display the video data */ SDL_UnlockYUVOverlay( overlay ); SDL_DisplayYUVOverlay( overlay, &rect ); while( SDL_PollEvent( &event ) ) { switch( event.type ) { case SDL_QUIT: quit = 1; break; case SDL_MOUSEBUTTONDOWN: { unsigned char *pixels; struct jpeg_compress_struct cinfo; struct jpeg_error_mgr jerr; FILE *outfile; JSAMPROW row_pointer[1]; int row_stride; char filename[128]; struct timeval t1, t2; unsigned long long usecs; sprintf( filename, "%04d.jpg", imgcnt++ ); cinfo.err = jpeg_std_error(&jerr); /* Now we can initialize the JPEG compression object. */ jpeg_create_compress(&cinfo); if ((outfile = fopen( filename, "wb" ) ) == NULL ) { fprintf(stderr, "can't open %s\n", "file"); exit(1); } jpeg_stdio_dest(&cinfo, outfile); cinfo.image_width = format.size.width; /* image width and height, in pixels */ cinfo.image_height = format.size.height; cinfo.input_components = 3; /* # of color components per pixel */ cinfo.in_color_space = JCS_RGB; /* colorspace of input image */ jpeg_set_defaults(&cinfo); pixels = malloc( format.size.width * format.size.height * 3 ); uyvy2rgb24( pixels, returned_buffer->data, format.size.width * format.size.height * 3, format.size.width * format.size.height * 2 ); gettimeofday( &t1, NULL ); jpeg_start_compress(&cinfo, TRUE); while( cinfo.next_scanline < cinfo.image_height ) { row_pointer[0] = &pixels[cinfo.next_scanline * format.size.width * 3 ]; (void) jpeg_write_scanlines(&cinfo, row_pointer, 1); } jpeg_finish_compress(&cinfo); gettimeofday( &t2, NULL ); usecs = t2.tv_sec * 1000000LL + t2.tv_usec; usecs -= ( t1.tv_sec * 1000000LL + t1.tv_usec ); printf( "Compression took: %lld usec\n", usecs ); /* After finish_compress, we can close the output file. */ fclose(outfile); jpeg_destroy_compress(&cinfo); free( pixels ); } break; default: break; } } SDL_LockYUVOverlay(overlay); } /* Stop the device */ if( !SUCCESS( unicap_stop_capture( handle ) ) ) { fprintf( stderr, "Failed to stop capture on device: %s\n", device.identifier ); } /* Close the device This invalidates the handle */ if( !SUCCESS( unicap_close( handle ) ) ) { fprintf( stderr, "Failed to close the device: %s\n", device.identifier ); } SDL_Quit(); return 0; }