Пример #1
0
static void prepare_yuv_buffer( unicap_data_buffer_t *buffer, unicap_format_t *format )
{
   unicap_copy_format( &buffer->format, format );
   buffer->format.fourcc = UCIL_FOURCC( 'I', '4', '2', '0' );
   buffer->format.bpp = 12;
   buffer->format.buffer_size = format->size.width * format->size.height * 12 / 8;
   
   buffer->buffer_size = buffer->format.buffer_size;
   buffer->data = malloc( buffer->buffer_size );
/*    start_time = get_time(); */  
}
Пример #2
0
static unicap_status_t theoracpi_enumerate_formats( ucil_theora_input_file_object_t *vobj, unicap_format_t *format, int index )
{
   unicap_status_t status = STATUS_NO_MATCH;
   if( index == 0 )
   {
      unicap_copy_format( format, &vobj->format );
      status = STATUS_SUCCESS;
   }
   
   return status;
}
Пример #3
0
static unicap_status_t aravis_enumerate_formats( aravis_handle_t handle, unicap_format_t *format, int index )
{
	unicap_status_t status = STATUS_NO_MATCH;
	
	if ((index >= 0) && (index < handle->n_formats)){
		unicap_copy_format (format, &handle->formats[index]);
		status = STATUS_SUCCESS;
	}
   
	return status;
}
Пример #4
0
static unicap_status_t aravis_set_format( aravis_handle_t handle, unicap_format_t *format )
{
	ArvPixelFormat fmt = aravis_tools_get_pixel_format (format);
	
	if (fmt == 0)
		return STATUS_INVALID_PARAMETER;
	
	arv_camera_set_pixel_format (handle->camera, fmt);
	arv_camera_set_region (handle->camera, format->size.x, format->size.y, format->size.width, format->size.height);
	unicap_copy_format (&handle->current_format, format);

	return STATUS_SUCCESS;
}
Пример #5
0
static void aravis_stream_callback( aravis_handle_t handle, ArvStreamCallbackType type, ArvBuffer *buffer)
{
	if (type == ARV_STREAM_CALLBACK_TYPE_BUFFER_DONE){
		unicap_data_buffer_t data_buffer;
		unicap_copy_format (&data_buffer.format, &handle->current_format);
		data_buffer.buffer_size = buffer->size;
		data_buffer.data = buffer->data;
		data_buffer.type = UNICAP_BUFFER_TYPE_SYSTEM;
		data_buffer.fill_time.tv_sec =  buffer->timestamp_ns / 1000000000ULL;
		data_buffer.fill_time.tv_usec = (buffer->timestamp_ns % 1000000000ULL) / 1000ULL;
		handle->event_callback (handle->unicap_handle, UNICAP_EVENT_NEW_FRAME, &data_buffer);
		arv_stream_push_buffer (handle->stream, buffer);
	}
}
void backend_gtk_update_image( gpointer _data, unicap_data_buffer_t *data_buffer, GError **err )
{
   unicap_data_buffer_t tmp_buffer;
   struct backend_data *data = _data;
   int tmp;

   if( !data_buffer )
   {
      g_warning( "update_image: data_buffer == NULL!\n" );
      return;
   }

   if( /* ( data_buffer->format.fourcc != data->format.fourcc ) ||  */
       ( data_buffer->format.size.width != data->format.size.width ) || 
       ( data_buffer->format.size.height != data->format.size.height ) /* || */
/*        ( data_buffer->format.bpp != data->format.bpp  )*/ )
   {
      g_warning( "update_image: data_buffer format missmatch\n" );      
      return;
   }

   unicap_copy_format( &tmp_buffer.format, &data_buffer->format );
   tmp_buffer.format.bpp = 24;
   tmp_buffer.format.fourcc = UCIL_FOURCC( 'R', 'G', 'B', '3' );

   tmp = ( data->current_buffer + 1 ) % NUM_BUFFERS;   
   memcpy( &data->fill_times[tmp], &data_buffer->fill_time, sizeof( struct timeval ) );
   tmp_buffer.data = data->image_data[tmp];
   tmp_buffer.buffer_size = data->format.size.width * data->format.size.height * 3;
   if( !data->color_conversion_cb )
   {
      ucil_convert_buffer( &tmp_buffer, data_buffer );
   }
   else
   {
      if( !data->color_conversion_cb( &tmp_buffer, data_buffer, data->color_conversion_data ) )
      {
	 ucil_convert_buffer( &tmp_buffer, data_buffer );
      } 
   }
}
Пример #7
0
int main (int argc, char *argv [])
{
    unicap_handle_t handle;
    unicap_device_t device;
    unicap_format_t src_format;
    unicap_format_t dest_format;
    unicap_data_buffer_t src_buffer;
    unicap_data_buffer_t dest_buffer;
    unicap_data_buffer_t *returned_buffer;

    if (argc != 4) {
        fprintf (stderr, "Usage: sender <hostname> <camera name> "
            "<interface>\n");
        exit (1);
    }

    //  Initialise 0MQ infrastructure

    //  1. Set error handler function (to ignore disconnected receivers)
    zmq::set_error_handler (error_handler);

    //  2. Initialise basic infrastructure for 2 threads
    zmq::dispatcher_t dispatcher (2);

    //  3. Initialise local locator (to connect to global locator)
    zmq::locator_t locator (argv [1]);

    //  4. Start one working thread (to send data to receivers)
    zmq::poll_thread_t *pt = zmq::poll_thread_t::create (&dispatcher);

    //  5. Register one API thread (the application thread - the one that
    //     is being executed at the moment)
    zmq::api_thread_t *api = zmq::api_thread_t::create (&dispatcher, &locator);

    //  6.  Define an entry point for the messages. The name of the entry point
    //      is user-defined ("camera name"). Specify that working thread "pt"
    //      will be used to listen to new connections being created as well as
    //      to send frames to existing connections.
    int e_id = api->create_exchange (argv [2], zmq::scope_global, argv [3],
        pt, 1, &pt);
    
    //  Open first available video capture device
    if (!SUCCESS (unicap_enumerate_devices (NULL, &device, 0))) {
        fprintf (stderr, "Could not enumerate devices\n");
        exit (1);
    }
    if (!SUCCESS (unicap_open (&handle, &device))) {
        fprintf (stderr, "Failed to open device: %s\n", device.identifier);
        exit (1);
    }
    printf( "Opened video capture device: %s\n", device.identifier );

    //  Find a suitable video format that we can convert to RGB24
    bool conversion_found = false;
    int index = 0;
    while (SUCCESS (unicap_enumerate_formats (handle, NULL, &src_format,
          index))) {
        printf ("Trying video format: %s\n", src_format.identifier);
        if (ucil_conversion_supported (FOURCC ('R', 'G', 'B', '3'), 
            src_format.fourcc)) {
            conversion_found = true;
            break;
        }
        index++;
    }
    if (!conversion_found) {
        fprintf (stderr, "Could not find a suitable video format\n");
        exit (1);
    }
    src_format.buffer_type = UNICAP_BUFFER_TYPE_USER;
    if (!SUCCESS (unicap_set_format (handle, &src_format))) {
        fprintf (stderr, "Failed to set video format\n");
        exit (1);
    }
    printf ("Using video format: %s [%dx%d]\n", 
        src_format.identifier, 
        src_format.size.width, 
        src_format.size.height);

    //  Clone destination format with equal dimensions, but RGB24 colorspace
    unicap_copy_format (&dest_format, &src_format);
    strcpy (dest_format.identifier, "RGB 24bpp");
    dest_format.fourcc = FOURCC ('R', 'G', 'B', '3');
    dest_format.bpp = 24;
    dest_format.buffer_size = dest_format.size.width *
        dest_format.size.height * 3;
    
    //  Initialise image buffers
    memset (&src_buffer, 0, sizeof (unicap_data_buffer_t));
    src_buffer.data = (unsigned char *)malloc (src_format.buffer_size);
    src_buffer.buffer_size = src_format.buffer_size;
    memset (&dest_buffer, 0, sizeof (unicap_data_buffer_t));
    dest_buffer.data = (unsigned char *)malloc (dest_format.buffer_size);
    dest_buffer.buffer_size = dest_format.buffer_size;
    dest_buffer.format = dest_format;

    //  Start video capture
    if (!SUCCESS (unicap_start_capture (handle))) {
        fprintf (stderr, "Failed to start capture on device: %s\n",
            device.identifier);
        exit (1);
    }

    //  Loop, sending video to defined exchange
    while (1) {

        //  Queue buffer for video capture
        if (!SUCCESS (unicap_queue_buffer (handle, &src_buffer))) {
            fprintf (stderr, "Failed to queue a buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Wait until buffer is ready
        if (!SUCCESS (unicap_wait_buffer (handle, &returned_buffer))) {
            fprintf (stderr, "Failed to wait for buffer on device: %s\n",
                device.identifier);
            exit (1);
        }

        //  Convert colorspace
        if (!SUCCESS (ucil_convert_buffer (&dest_buffer, &src_buffer))) {
            //  TODO: This fails sometimes for unknown reasons,
            //  just skip the frame for now
            fprintf (stderr, "Failed to convert video buffer\n");
        }

        //  Create ZMQ message
        zmq::message_t msg (dest_format.buffer_size + (2 * sizeof (uint32_t)));
        unsigned char *data = (unsigned char *)msg.data();

        //  Image width in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.width);
        data += sizeof (uint32_t);

        //  Image height in pixels
        zmq::put_uint32 (data, (uint32_t)dest_format.size.height);
        data += sizeof (uint32_t);

        //  RGB24 image data
        memcpy (data, dest_buffer.data, dest_format.buffer_size);

        //  Send message 
        api->send (e_id, msg);
    }
    
    return 0;
}
Пример #8
0
static void *ucil_theora_worker_thread( ucil_theora_input_file_object_t *vobj )
{
   unicap_data_buffer_t new_frame_buffer;

   struct timeval ltime;
   int eos = 0;

   unicap_copy_format( &new_frame_buffer.format, &vobj->format );
   new_frame_buffer.type = UNICAP_BUFFER_TYPE_SYSTEM;
   new_frame_buffer.buffer_size = new_frame_buffer.format.buffer_size;
   new_frame_buffer.data = malloc( new_frame_buffer.format.buffer_size );

   gettimeofday( &ltime, NULL );
   
   while( !vobj->quit_capture_thread )
   {
      struct timespec abs_timeout;
      struct timeval  ctime;
      GList *entry;
      ogg_page og;
      ogg_packet op;
      size_t bytes;

      int buffer_ready = 0;
      


      if( !eos && ( ogg_stream_packetout( &vobj->os, &op ) > 0 ) )
      {
	 yuv_buffer yuv;

	 theora_decode_packetin( &vobj->th, &op );
	 theora_decode_YUVout( &vobj->th, &yuv );
	 copy_yuv( new_frame_buffer.data, &yuv, &vobj->ti );

	 buffer_ready = 1;
      } 
      else if( !eos )
      {
	 bytes = buffer_data( vobj->f, &vobj->oy );      
	 if( !bytes )
	 {
	    TRACE( "End of stream\n" );
	    eos = 1;
	    
	 }
	 
	 while( ogg_sync_pageout( &vobj->oy, &og ) > 0 )
	 {
	    ogg_stream_pagein( &vobj->os, &og );
	 }
	 continue;
      }
      else
      {
	 buffer_ready = 1;
      }

      gettimeofday( &ctime, NULL );
      abs_timeout.tv_sec = ctime.tv_sec + 1;
      abs_timeout.tv_nsec = ctime.tv_usec * 1000;      
      if( sem_timedwait( &vobj->sema, &abs_timeout ) )
      {
	 TRACE( "SEM_WAIT FAILED\n" );
	 continue;
      }

      if( buffer_ready && vobj->event_callback )
      {
	 vobj->event_callback( vobj->event_unicap_handle, UNICAP_EVENT_NEW_FRAME, &new_frame_buffer );
	 TRACE( "New frame\n" );
      }
      
      unicap_data_buffer_t *data_buffer = g_queue_pop_head( vobj->in_queue );
      if( data_buffer )
      {
	 unicap_copy_format( &data_buffer->format, &vobj->format );
	 memcpy( data_buffer->data, new_frame_buffer.data, vobj->format.buffer_size );
	 
	 g_queue_push_tail( vobj->out_queue, data_buffer );
      }

      sem_post( &vobj->sema );
      
      if( buffer_ready )
      {
	 gettimeofday( &ctime, NULL );
	 if( ctime.tv_usec < ltime.tv_usec )
	 {
	    ctime.tv_usec += 1000000;
	    ctime.tv_sec -= 1;
	 }
	 
	 ctime.tv_usec -= ltime.tv_usec;
	 ctime.tv_sec -= ltime.tv_sec;
	 
	 if( ( ctime.tv_sec == 0 ) &&
	     ( ctime.tv_usec < vobj->frame_intervall ) )
	 {
	    usleep( vobj->frame_intervall - ctime.tv_usec );
	 }
      
	 gettimeofday( &ltime, NULL );
      }
   }

   free( new_frame_buffer.data );
   return NULL;
}
Пример #9
0
static unicap_status_t theoracpi_get_format( ucil_theora_input_file_object_t *vobj, unicap_format_t *format )
{
   unicap_copy_format( format, &vobj->format );
   return STATUS_SUCCESS;
}
Пример #10
0
unicap_status_t dcam_dma_wait_buffer( dcam_handle_t dcamhandle )
{
   struct video1394_wait vwait;
   unicap_queue_t *entry;

   int i = 0;
   int ready_buffer;
   
   vwait.channel = dcamhandle->channel_allocated;
   i = vwait.buffer = ( dcamhandle->current_dma_capture_buffer + 1 ) % DCAM_NUM_DMA_BUFFERS;
	
   if( ioctl( dcamhandle->dma_fd, VIDEO1394_IOC_LISTEN_WAIT_BUFFER, &vwait ) != 0 )
      {
	 TRACE( "VIDEO1394_LISTEN_WAIT_BUFFER ioctl failed\n" );
	 TRACE( "channel: %d, buffer: %d\n", vwait.channel, vwait.buffer );
	 TRACE( "error: %s\n", strerror( errno ) );
	 // increase the buffer counter to wait for next buffer on the next try
	 dcamhandle->current_dma_capture_buffer = ( dcamhandle->current_dma_capture_buffer + 1 ) % DCAM_NUM_DMA_BUFFERS;
	 return STATUS_FAILURE;
      }
		
   ready_buffer = ( vwait.buffer + i ) % DCAM_NUM_DMA_BUFFERS;
		
   entry = ucutil_get_front_queue( &dcamhandle->input_queue );
   if( entry )
   {
      unicap_data_buffer_t *data_buffer;
      data_buffer = entry->data;
      memcpy( &data_buffer->fill_time, &vwait.filltime, sizeof( struct timeval ) );
			
      if( data_buffer->type == UNICAP_BUFFER_TYPE_SYSTEM )
      {
	 data_buffer->data = dcamhandle->dma_buffer + i * dcamhandle->buffer_size;
      }
      else
      {
	 memcpy( data_buffer->data, 
		 dcamhandle->dma_buffer + i * dcamhandle->dma_vmmap_frame_size, 
		 dcamhandle->buffer_size );
      }
			
      data_buffer->buffer_size = dcamhandle->buffer_size;
      ucutil_insert_back_queue( &dcamhandle->output_queue, entry );			
      data_buffer = 0;
   }

   {
      unicap_data_buffer_t tmpbuffer;
		   
      tmpbuffer.data = dcamhandle->dma_buffer + i * dcamhandle->buffer_size;
      tmpbuffer.buffer_size = dcamhandle->buffer_size;
      unicap_copy_format( &tmpbuffer.format, 
			  &dcamhandle->v_format_array[dcamhandle->current_format_index] );
      memcpy( &tmpbuffer.fill_time, &vwait.filltime, sizeof( struct timeval ) );
      new_frame_event( dcamhandle, &tmpbuffer );
   }   
		
   for( ; i != ready_buffer; i = ( ( i + 1 ) % DCAM_NUM_DMA_BUFFERS ) )
   {
      entry = ucutil_get_front_queue( &dcamhandle->input_queue );
      if( entry )
      {
	 unicap_data_buffer_t *data_buffer;

	 data_buffer = entry->data;
	 memcpy( &data_buffer->fill_time, &vwait.filltime, sizeof( struct timeval ) );
	 if( data_buffer->type == UNICAP_BUFFER_TYPE_SYSTEM )
	 {
	    data_buffer->data = dcamhandle->dma_buffer + i * dcamhandle->buffer_size;
	 }
	 else
	 {
	    memcpy( data_buffer->data, 
		    dcamhandle->dma_buffer + i * dcamhandle->dma_vmmap_frame_size, 
		    dcamhandle->buffer_size );
	 }
	 data_buffer->buffer_size = dcamhandle->buffer_size;

	 ucutil_insert_back_queue( &dcamhandle->output_queue, entry );

	 data_buffer = 0;
      }

      {
	 unicap_data_buffer_t tmpbuffer;
			   
	 tmpbuffer.data = dcamhandle->dma_buffer + i * dcamhandle->buffer_size;
	 tmpbuffer.buffer_size = dcamhandle->buffer_size;
	 unicap_copy_format( &tmpbuffer.format, 
			     &dcamhandle->v_format_array[dcamhandle->current_format_index] );
	 new_frame_event( dcamhandle, &tmpbuffer );
      }   


      vwait.buffer = i;

      if( ioctl( dcamhandle->dma_fd, VIDEO1394_IOC_LISTEN_QUEUE_BUFFER, &vwait ) < 0 )
      {
	 TRACE( "VIDEO1394_LISTEN_QUEUE_BUFFER ioctl failed\n" );
	 return STATUS_FAILURE;
      }
   }
		
   vwait.buffer = ready_buffer;
		
   if( ioctl( dcamhandle->dma_fd, VIDEO1394_IOC_LISTEN_QUEUE_BUFFER, &vwait ) < 0 )
   {
      TRACE( "VIDEO1394_LISTEN_QUEUE_BUFFER ioctl failed\n" );
      return STATUS_FAILURE;
   }

   dcamhandle->current_dma_capture_buffer = ready_buffer;
   
   return STATUS_SUCCESS;
}