PROTO_THREAD_ROUTINE(app,nomParams)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_com_config_t       icc;
  vp_stages_decoder_filter_config_t  dfc;
  vp_stages_output_sdl_config_t      osc;

  vp_com_t                        com;
  vp_com_bluetooth_connection_t   connection;
  vp_com_bluetooth_config_t       config;

  vp_os_memset( &icc,         0, sizeof(vp_stages_input_com_config_t) );
  vp_os_memset( &connection,  0, sizeof(vp_com_bluetooth_connection_t) );
  vp_com_str_to_address("00:01:48:03:70:54",&connection.address);
  vp_stages_fill_default_config(BLUETOOTH_COM_CONFIG, &config, sizeof(config));
  vp_os_memset(&com, 0, sizeof(vp_com_t));
  vp_stages_fill_default_config(DECODER_MPEG4_CONFIG, &dfc, sizeof(dfc));
  vp_stages_fill_default_config(SDL_DECODING_CONFIG, &osc, sizeof(osc));

  com.type                          = VP_COM_BLUETOOTH;
  icc.com                           = &com;
  icc.config                        = (vp_com_config_t*)&config;
  icc.connection                    = (vp_com_connection_t*)&connection;
  icc.socket.type                   = VP_COM_CLIENT;
  icc.socket.protocol               = VP_COM_TCP;
  icc.socket.port                   = 5555;
  icc.buffer_size                   = 6400;

  strcpy(icc.socket.serverHost,"192.168.2.23");

  stages[0].type      = VP_API_INPUT_SOCKET;
  stages[0].cfg       = (void *)&icc;
  stages[0].funcs     = vp_stages_input_com_funcs;
#ifdef USE_FFMPEG
  stages[1].type      = VP_API_FILTER_DECODER;
  stages[1].cfg       = (void *)&dfc;
  stages[1].funcs     = vp_stages_decoder_filter_funcs;

  stages[2].type      = VP_API_OUTPUT_SDL;
  stages[2].cfg       = (void *)&osc;
  stages[2].funcs     = vp_stages_output_sdl_funcs;
#endif

  pipeline.nb_stages = NB_STAGES;
  pipeline.stages = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING));
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_com_t                          com;
  vp_stages_input_com_config_t      icc;
  vp_com_serial_config_t            config;
  vp_stages_decoder_ffmpeg_config_t dfc;
  vp_stages_output_sdl_config_t     osc;


  vp_os_memset(&icc,0,sizeof(vp_stages_input_com_config_t));
  vp_os_memset(&com, 0, sizeof(vp_com_t));
  vp_stages_fill_default_config(UART1_COM_CONFIG, &config, sizeof(config));
  vp_stages_fill_default_config(DECODER_MPEG4_CONFIG, &dfc, sizeof(dfc));
  vp_stages_fill_default_config(SDL_DECODING_CONFIG, &osc, sizeof(osc));


  com.type                = VP_COM_SERIAL;
  icc.com                 = &com;
  icc.socket.type         = VP_COM_CLIENT;
  icc.config              = (vp_com_config_t *)&config;
  icc.buffer_size         = 1024;


  stages[0].type    = VP_API_INPUT_SOCKET;
  stages[0].cfg     = (void *)&icc;
  stages[0].funcs   = vp_stages_input_com_funcs;

  stages[1].type    = VP_API_FILTER_DECODER;
  stages[1].cfg     = (void *)&dfc;
  stages[1].funcs   = vp_stages_decoder_ffmpeg_funcs;

  stages[2].type    = VP_API_OUTPUT_SDL;
  stages[2].cfg     = (void *)&osc;
  stages[2].funcs   = vp_stages_output_sdl_funcs;

  pipeline.nb_stages = NB_STAGES;
  pipeline.stages = &stages[0];


  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING))
    {
      //vp_os_delay( 200 );
    }

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_com_config_t      icc;
  vp_stages_output_sdl_config_t     osc;

  vp_com_t                      com;
  vp_com_wifi_config_t          config;
  vp_com_wifi_connection_t      connection;

  vp_os_memset( &icc,         0, sizeof(vp_stages_input_com_config_t));
  vp_os_memset( &connection,  0, sizeof(vp_com_wifi_connection_t) );
  strcpy(connection.networkName,"linksys");
  vp_stages_fill_default_config(WIFI_COM_CONFIG, &config, sizeof(config));
  vp_os_memset(&com, 0, sizeof(vp_com_t));
  vp_stages_fill_default_config(SDL_DECODING_QCIF_CONFIG, &osc, sizeof(osc));

  com.type                          = VP_COM_WIFI;
  icc.com                           = &com;
  icc.config                        = (vp_com_config_t*)&config;
  icc.connection                    = (vp_com_connection_t*)&connection;
  icc.socket.type                   = VP_COM_CLIENT;
  icc.socket.protocol               = VP_COM_TCP;
  icc.socket.port                   = 5555;
  icc.buffer_size                   = 6400;

  strcpy(icc.socket_client.serverHost,"192.168.1.23");

  stages[0].type = VP_API_INPUT_SOCKET;
  stages[0].cfg = (void *)&icc;
  stages[0].funcs = vp_stages_input_com_funcs;

  stages[1].type = VP_API_OUTPUT_SDL;
  stages[1].cfg = (void *)&osc;
  stages[1].funcs = vp_stages_output_sdl_funcs;

  pipeline.nb_stages = NB_STAGES;
  pipeline.stages = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING));
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
int
main(int argc, char **argv)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_com_config_t     icc;

  vp_com_t                      com;
  vp_com_bluetooth_config_t     config;
  vp_com_bluetooth_connection_t connection;

  vp_os_memset( &icc,         0, sizeof(vp_stages_input_com_config_t) );
  vp_os_memset( &connection,  0, sizeof(vp_com_bluetooth_connection_t) );
  vp_com_str_to_address("00:01:48:03:70:54",&connection.address);
  vp_stages_fill_default_config(BLUETOOTH_COM_CONFIG, &config, sizeof(config));
  vp_os_memset(&com, 0, sizeof(vp_com_t));

  com.type                          = VP_COM_BLUETOOTH;
  icc.com                           = &com;
  icc.config                        = (vp_com_config_t*)&config;
  icc.connection                    = (vp_com_connection_t*)&connection;
  icc.socket.type                   = VP_COM_CLIENT;
  icc.socket.protocol               = VP_COM_TCP;
  icc.socket.port                   = 5555;
  icc.buffer_size                   = 64;

  strcpy(icc.socket.serverHost,"192.168.2.23");

  stages[0].type = VP_API_INPUT_SOCKET;
  stages[0].cfg = (void *)&icc;
  stages[0].funcs = vp_stages_input_com_funcs;

  stages[1].type = VP_API_OUTPUT_CONSOLE;
  stages[1].cfg = NULL;
  stages[1].funcs = vp_stages_output_console_funcs;

  pipeline.nb_stages = NB_STAGES;
  pipeline.stages = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING));
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Пример #5
0
DEFINE_THREAD_ROUTINE(video_update_thread, data)
{
    C_RESULT res;

    vp_api_io_pipeline_t    pipeline;
    vp_api_io_data_t        out;
    vp_api_io_stage_t       stages[NB_STAGES];

    vp_api_picture_t picture;

    video_com_config_t              icc;
    vlib_stage_decoding_config_t    vec;
    vp_stages_yuv2rgb_config_t      yuv2rgbconf;

    /// Picture configuration
    picture.format        = PIX_FMT_YUV420P;
    picture.width         = STREAM_WIDTH;
    picture.height        = STREAM_HEIGHT;
    picture.framerate     = 30;

    picture.y_buf   = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT     );
    picture.cr_buf  = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
    picture.cb_buf  = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );

    picture.y_line_size   = STREAM_WIDTH;
    picture.cb_line_size  = STREAM_WIDTH / 2;
    picture.cr_line_size  = STREAM_WIDTH / 2;

    vp_os_memset(&icc,          0, sizeof( icc ));
    vp_os_memset(&vec,          0, sizeof( vec ));
    vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

    icc.com                 = COM_VIDEO();
    icc.buffer_size         = 100000;
    icc.protocol            = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    vec.width               = STREAM_WIDTH;
    vec.height              = STREAM_HEIGHT;
    vec.picture             = &picture;
#ifdef USE_VIDEO_YUV
    vec.luma_only           = FALSE;
#else
    vec.luma_only           = TRUE;
#endif // USE_VIDEO_YUV
    vec.block_mode_enable   = TRUE;

    vec.luma_only           = FALSE;
    yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
    if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
        yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;

    pipeline.nb_stages = 0;

    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *)&icc;
    stages[pipeline.nb_stages].funcs   = video_com_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*)&vec;
    stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
    stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
    stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
    stages[pipeline.nb_stages].cfg     = (void *)&vec;
    stages[pipeline.nb_stages].funcs   = vp_stages_export_funcs;

    pipeline.nb_stages++;

    pipeline.stages = &stages[0];
    PIPELINE_HANDLE pipeline_handle;
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) ) {
        while( SUCCEED(vp_api_run(&pipeline, &out)) ) {
        }

        vp_api_close(&pipeline, &pipeline_handle);
    }

    return (THREAD_RET)0;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[NB_STAGES_MAX];

  uint16_t time1,time2;
  uint16_t i;

  time1 = clock();

  vp_os_memset(&ivc,0,sizeof(ivc));
  //vp_os_memset(&vec,0,sizeof(vec));
  vp_os_memset(&occ,0,sizeof(occ));

  ivc.device = "/tmp/video0";
  ivc.width = ACQ_WIDTH;
  ivc.height = ACQ_HEIGHT;
  ivc.vp_api_picture = &picture;

/*   vec.width                               = ACQ_WIDTH; */
/*   vec.height                              = ACQ_HEIGHT; */
/*   vec.block_mode_enable                   = FALSE; */
/*   vec.picture                             = &picture; */

  occ.com                           = COM_VIDEO();
  occ.config                        = COM_CONFIG_VIDEO();
  occ.connection                    = COM_CONNECTION_VIDEO();
  occ.socket.type                   = VP_COM_SERVER;
  occ.socket.protocol               = VP_COM_TCP;
  occ.socket.port                   = 5555;
  occ.buffer_size                   = 320*240*3/2;

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_FILE;
  stages[pipeline.nb_stages].cfg     = (void *)&ivc;
  stages[pipeline.nb_stages++].funcs = vp_stages_input_v4l_funcs;
  
/*   stages[pipeline.nb_stages].type    = VP_API_FILTER_ENCODER; */
/*   stages[pipeline.nb_stages].cfg     = (void *)NULL; */
/*   stages[pipeline.nb_stages++].funcs = buffer_to_picture_funcs; */
  
/*   stages[pipeline.nb_stages].type    = VP_API_FILTER_ENCODER; */
/*   stages[pipeline.nb_stages].cfg     = (void*)&vec; */
/*   stages[pipeline.nb_stages++].funcs = vlib_encoding_funcs; */

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&occ;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_com_funcs;

  pipeline.stages = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
 
  out.status = VP_API_STATUS_PROCESSING;

  /////////////////////////////////////////////////////////////////////////////////////////
  i = 0;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING))
    {
      i++;
      PRINT("image %d \n",i);
      //vp_os_delay(50);
    }
  /////////////////////////////////////////////////////////////////////////////////////////

  time2 = clock();
  printf("temps ecoule : %d \n",time2-time1);
    
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Пример #7
0
PROTO_THREAD_ROUTINE(app, params)
{
  uint32_t num_stages = 0;
  vp_api_picture_t picture;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_file_config_t     ifc;
  vp_stages_output_file_config_t    ofc;
  // vp_stages_output_sdl_config_t     osc;

  buffer_to_picture_config_t        bpc;
  mjpeg_stage_encoding_config_t     mec;

  picture_to_buffer_config_t        pbc;
  mjpeg_stage_decoding_config_t     dec;

  /// Picture configuration
  picture.format              = PIX_FMT_YUV420P;

  picture.width               = ACQ_WIDTH;
  picture.height              = ACQ_HEIGHT;
  picture.framerate           = 15;

  picture.y_buf               = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf              = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf              = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );

  picture.y_line_size         = ACQ_WIDTH;
  picture.cb_line_size        = ACQ_WIDTH / 2;
  picture.cr_line_size        = ACQ_WIDTH / 2;

  picture.y_pad               = 0;
  picture.c_pad               = 0;

  vp_os_memset(&ifc,0,sizeof(vp_stages_input_file_config_t));

  ifc.name                    = "../in.yuv";
  ifc.buffer_size             = (ACQ_WIDTH*ACQ_HEIGHT*3)/2;

  ofc.name                    = "../temp.mjpg";

  stages[num_stages].type     = VP_API_INPUT_FILE;
  stages[num_stages].cfg      = (void *)&ifc;
  stages[num_stages].funcs    = vp_stages_input_file_funcs;

  num_stages++;

  if( codec == MJPEG_ENCODER )
  {
    bpc.picture         = &picture;

    mec.picture         = &picture;
    mec.out_buffer_size = 4096 * 4;

    stages[num_stages].type      = VP_API_FILTER_DECODER;
    stages[num_stages].cfg       = (void *)&bpc;
    stages[num_stages].funcs     = buffer_to_picture_funcs;

    num_stages++;

    stages[num_stages].type      = MJPEG_ENCODER;
    stages[num_stages].cfg       = (void*)&mec;
    stages[num_stages].funcs     = mjpeg_encoding_funcs;
  }
  else if( codec == MJPEG_DECODER )
  {
    dec.picture         = &picture;
    dec.out_buffer_size = 4096 * 4;

    pbc.picture         = &picture;

    stages[num_stages].type      = MJPEG_DECODER;
    stages[num_stages].cfg       = (void*)&dec;
    stages[num_stages].funcs     = mjpeg_decoding_funcs;

    num_stages++;

    stages[num_stages].type      = VP_API_FILTER_ENCODER;
    stages[num_stages].cfg       = (void *)&pbc;
    stages[num_stages].funcs     = picture_to_buffer_funcs;
  }

  num_stages++;

  stages[num_stages].type      = VP_API_OUTPUT_FILE;
  stages[num_stages].cfg       = (void*)&ofc;
  stages[num_stages].funcs     = vp_stages_output_file_funcs;

  num_stages++;

  pipeline.nb_stages  = num_stages;
  pipeline.stages     = &stages[0];

  PRINT("Pipeline configured with %d stages\n", num_stages);

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING));

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Пример #8
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");

    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #9
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
	C_RESULT res;
	
	vp_api_io_pipeline_t    pipeline;
	vp_api_io_data_t        out;
	vp_api_io_stage_t       stages[NB_STAGES];
	
	vp_api_picture_t picture;
	
	vlib_stage_decoding_config_t    vec;

	vp_os_memset(&icc,          0, sizeof( icc ));
	vp_os_memset(&vec,          0, sizeof( vec ));
	vp_os_memset(&picture,      0, sizeof( picture ));

//#ifdef RECORD_VIDEO
//	video_stage_recorder_config_t vrc;
//#endif
	
	/// Picture configuration
	picture.format        = /*PIX_FMT_YUV420P */ PIX_FMT_RGB565;
	
	picture.width         = 512;
	picture.height        = 512;
	picture.framerate     = 15;
	
	picture.y_buf   = vp_os_malloc( picture.width * picture.height * 2);
	picture.cr_buf  = NULL;
	picture.cb_buf  = NULL;

	picture.y_line_size   = picture.width * 2;
	picture.cb_line_size  = 0;
	picture.cr_line_size  = 0;
		
	icc.com                 = COM_VIDEO();
	icc.buffer_size         = 100000;
	icc.protocol            = VP_COM_UDP;
	COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
	
	vec.width               = 512;
	vec.height              = 512;
	vec.picture             = &picture;
	vec.luma_only           = FALSE;
	vec.block_mode_enable   = TRUE;
	
	pipeline.nb_stages = 0;
	
	stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
	stages[pipeline.nb_stages].cfg     = (void *)&icc;
	stages[pipeline.nb_stages].funcs   = video_com_funcs;
	pipeline.nb_stages++;
	
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;
	pipeline.nb_stages++;
	
/*
#ifdef RECORD_VIDEO
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vrc;
	stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
	pipeline.nb_stages++;
#endif
*/
	stages[pipeline.nb_stages].type    = VP_API_OUTPUT_LCD;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = video_stage_funcs;
	pipeline.nb_stages++;
		
	pipeline.stages = &stages[0];
		
	if( !ardrone_tool_exit() )
	{
		PRINT("\nvideo stage thread initialisation\n\n");
		
		// NICK
		video_stage_init();

		res = vp_api_open(&pipeline, &pipeline_handle);

		
		if( VP_SUCCEEDED(res) )
		{

			int loop = VP_SUCCESS;
			out.status = VP_API_STATUS_PROCESSING;
#ifdef RECORD_VIDEO		    
			{
				DEST_HANDLE dest;
				dest.stage = 2;
				dest.pipeline = pipeline_handle;
				vp_api_post_message( dest, PIPELINE_MSG_START, NULL, (void*)NULL);
			}
#endif			

			video_stage_in_pause = FALSE;
			
			while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
			{

				if(video_stage_in_pause)
				{
					vp_os_mutex_lock(&video_stage_mutex);
					icc.num_retries = VIDEO_MAX_RETRIES;
					vp_os_cond_wait(&video_stage_condition);
					vp_os_mutex_unlock(&video_stage_mutex);
				}

				if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
					if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						loop = VP_SUCCESS;
					}
				}
				else loop = -1; // Finish this thread
			}
			
			vp_api_close(&pipeline, &pipeline_handle);
		}
	}
	
	PRINT("   video stage thread ended\n\n");
	
	return (THREAD_RET)0;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_picture_t picture;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_com_config_t    icc;
  mjpeg_stage_decoding_config_t   dec;
  vp_stages_output_sdl_config_t   osc;

  vp_com_t                        com;
  vp_com_bluetooth_connection_t   connection;
  vp_com_bluetooth_config_t       config;

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = ACQ_WIDTH;
  picture.height        = ACQ_HEIGHT;
  picture.framerate     = 15;

  picture.y_buf   = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );

  picture.y_line_size   = ACQ_WIDTH;
  picture.cb_line_size  = ACQ_WIDTH / 2;
  picture.cr_line_size  = ACQ_WIDTH / 2;

  picture.y_pad         = 0;
  picture.c_pad         = 0;

  dec.picture         = &picture;
  dec.out_buffer_size = 4096;

  vp_os_memset( &icc,         0, sizeof(vp_stages_input_com_config_t)  );
  vp_os_memset( &osc,         0, sizeof(vp_stages_output_sdl_config_t) );
  vp_os_memset( &connection,  0, sizeof(vp_com_bluetooth_connection_t) );
  vp_os_memset( &config,      0, sizeof(vp_com_bluetooth_config_t)     );
  vp_os_memset( &com,         0, sizeof(vp_com_t)                      );

  vp_com_str_to_address("08:75:48:03:60:34",&connection.address);
  //  vp_com_str_to_address("00:12:1C:FF:A4:EE",&connection.address);

  strcpy(config.itfName,    "bnep0");
  strcpy(config.localHost,  "192.168.2.58");
  strcpy(config.netmask,    "255.255.255.0");
  strcpy(config.broadcast,  "192.168.2.255");
  strcpy(config.gateway,    "192.168.2.0");
  strcpy(config.server,     "192.168.2.0");
  strcpy(config.passkey,    "1234" );
  config.secure = 1;

  com.type                          = VP_COM_BLUETOOTH;

  icc.com                           = &com;
  icc.config                        = (vp_com_config_t*)&config;
  icc.connection                    = (vp_com_connection_t*)&connection;
  icc.socket.type                   = VP_COM_CLIENT;
  icc.socket.protocol               = VP_COM_TCP;
  icc.socket.port                   = 5555;
  icc.buffer_size                   = 1600;
  //  icc.sockopt                       = VP_COM_NON_BLOCKING;

  strcpy(icc.socket.serverHost,"192.168.2.23");

  osc.width           = 320;
  osc.height          = 240;
  osc.bpp             = 16;
  osc.window_width    = 320;
  osc.window_height   = 240;
  osc.pic_width       = ACQ_WIDTH;
  osc.pic_height      = ACQ_HEIGHT;
  osc.y_size          = ACQ_WIDTH*ACQ_HEIGHT;
  osc.c_size          = (ACQ_WIDTH*ACQ_HEIGHT) >> 2;

  stages[0].type                    = VP_API_INPUT_SOCKET;
  stages[0].cfg                     = (void *)&icc;
  stages[0].funcs                   = vp_stages_input_com_funcs;

  stages[1].type                    = VP_API_FILTER_DECODER;
  stages[1].cfg                     = (void*)&dec;
  stages[1].funcs                   = mjpeg_decoding_funcs;

  stages[2].type                    = VP_API_OUTPUT_SDL;
  stages[2].cfg                     = (void *)&osc;
  stages[2].funcs                   = vp_stages_output_sdl_funcs;

  pipeline.nb_stages                = NB_STAGES;
  pipeline.stages                   = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING));

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES_MAX];

  vp_os_memset( &icc,     0, sizeof(icc) );
  vp_os_memset( &bpc,     0, sizeof(bpc) );
  vp_os_memset( &osc,     0, sizeof(osc) );

  icc.com               = COM_VIDEO();
  icc.buffer_size       = ACQ_WIDTH*ACQ_HEIGHT*3/2;
  icc.socket.protocol   = VP_COM_TCP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, 5555, SERVER_HOST);

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;
  picture.width         = ACQ_WIDTH;
  picture.height        = ACQ_HEIGHT;
  picture.framerate     = 30;
  picture.y_buf         = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf        = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf        = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.y_line_size   = ACQ_WIDTH;
  picture.cb_line_size  = ACQ_WIDTH / 2;
  picture.cr_line_size  = ACQ_WIDTH / 2;
  picture.y_pad         = 0;
  picture.c_pad         = 0;

  bpc.picture             = &picture;
  bpc.block_mode_enable   = FALSE;
  bpc.y_buffer_size       = ACQ_WIDTH * ACQ_HEIGHT;
  bpc.y_blockline_size    = ACQ_WIDTH * 16; // each blockline have 16 lines
  bpc.y_current_size      = 0;
  bpc.num_frames          = 0;
  bpc.y_buf_ptr           = NULL;
  bpc.luma_only           = FALSE;
  bpc.cr_buf_ptr          = NULL;
  bpc.cb_buf_ptr          = NULL;

  osc.width           = ACQ_WIDTH;
  osc.height          = ACQ_HEIGHT;
  osc.bpp             = 16;
  osc.window_width    = ACQ_WIDTH;
  osc.window_height   = ACQ_HEIGHT;
  osc.pic_width       = ACQ_WIDTH;
  osc.pic_height      = ACQ_HEIGHT;
  osc.y_size          = ACQ_WIDTH*ACQ_HEIGHT;
  osc.c_size          = (ACQ_WIDTH*ACQ_HEIGHT) >> 2;

  pipeline.nb_stages                 = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages++].funcs = vp_stages_input_com_funcs;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&bpc;
  stages[pipeline.nb_stages++].funcs = vp_stages_buffer_to_picture_funcs;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = (void *)&osc;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_sdl_funcs;

  pipeline.stages                    = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING))
    {
    }

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
DEFINE_THREAD_ROUTINE(video_recorder, data)
{
  if (1 >= ARDRONE_VERSION ()) // Run only for ARDrone 2 and upper
    {
      return (THREAD_RET)0;
    }
  C_RESULT res = C_OK;
    
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[3];
  PIPELINE_HANDLE video_recorder_pipeline_handle;
  video_recorder_thread_param_t *video_recorder_thread_param = (video_recorder_thread_param_t *)data;

  video_stage_tcp_config_t tcpConf;
    
  if(video_recorder_thread_param != NULL)
  {
      CHANGE_THREAD_PRIO(video_recorder, video_recorder_thread_param->priority);
      video_stage_encoded_recorder_config.finish_callback = video_recorder_thread_param->finish_callback; 
  }
    
  vp_os_memset (&record_icc, 0x0, sizeof (record_icc));
  record_icc.com = COM_VIDEO ();
  record_icc.buffer_size = (8*1024);
  record_icc.protocol = VP_COM_TCP;
  record_icc.forceNonBlocking = &tcpConf.tcpStageHasMoreData;
  COM_CONFIG_SOCKET_VIDEO (&record_icc.socket, VP_COM_CLIENT, VIDEO_RECORDER_PORT, wifi_ardrone_ip);
  record_icc.timeoutFunc = &video_stage_encoded_recorder_com_timeout;
  record_icc.timeoutFuncAfterSec = 10;
  
  vp_os_memset (&tcpConf, 0, sizeof (tcpConf));
  tcpConf.maxPFramesPerIFrame = 30;
  tcpConf.frameMeanSize = 160*1024;
  tcpConf.latencyDrop = 0;
    
  pipeline.nb_stages = 0;
  pipeline.stages = &stages[0];
    
  // Com
  stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg  = (void *)&record_icc;
  stages[pipeline.nb_stages++].funcs = video_com_funcs;

  // TCP
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &tcpConf;
  stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
    
  // Record
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &video_stage_encoded_recorder_config;
  stages[pipeline.nb_stages++].funcs = video_encoded_recorder_funcs;
    
  while (FALSE == isInit)
    {
        printf ("Waiting for init\n");
    }
    
  if (! ardrone_tool_exit ())
    {
      PRINT ("Video recorder thread initialisation\n");
      res = vp_api_open (&pipeline, &video_recorder_pipeline_handle);
        
      if (SUCCEED (res))
        {
          int loop = SUCCESS;
          out.status = VP_API_STATUS_PROCESSING;
            
          while (! ardrone_tool_exit () && (SUCCESS == loop))
            {
              if (video_recorder_in_pause)
                {
                  vp_os_mutex_lock (&video_recorder_mutex);
                  record_icc.num_retries = VIDEO_MAX_RETRIES;
                  vp_os_cond_wait (&video_recorder_condition);
                  vp_os_mutex_unlock (&video_recorder_mutex);
                }

              if (SUCCEED (vp_api_run (&pipeline, &out)))
                {
                  if ((VP_API_STATUS_PROCESSING == out.status) ||
                      (VP_API_STATUS_STILL_RUNNING == out.status))
                    {
                      loop = SUCCESS;
                    }
                  else loop = -1;
                }
              else loop = -1;
            }
          vp_api_close (&pipeline, &video_recorder_pipeline_handle);
        }
    }
  PRINT ("Video recorder thread ended\n");
    
  return (THREAD_RET)res;
}
Пример #13
0
/*
	The video processing thread.
	This function can be kept as it is by most users.
	It automatically receives the video stream in a loop, decode it, and then 
		call the 'output_rendering_device_stage_transform' function for each decoded frame.
*/
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
  

  /* Picture configuration */
	  picture.format        = PIX_FMT_YUV420P;

	  picture.width         = DRONE_VIDEO_MAX_WIDTH;
	  picture.height        = DRONE_VIDEO_MAX_HEIGHT;
	  picture.framerate     = 15;
	  //picture.framerate     = 3;

	  picture.y_buf   = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT     );
	  picture.cr_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
	  picture.cb_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );

	  picture.y_line_size   = DRONE_VIDEO_MAX_WIDTH;
	  picture.cb_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;
	  picture.cr_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;

	  vp_os_memset(&icc,          0, sizeof( icc ));
	  vp_os_memset(&vec,          0, sizeof( vec ));
	  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

   /* Video socket configuration */
	  icc.com                 = COM_VIDEO();
	  icc.buffer_size         = 100000;
	  icc.protocol            = VP_COM_UDP;
  
	  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  /* Video decoder configuration */
	  /* Size of the buffers used for decoding 
         This must be set to the maximum possible video resolution used by the drone 
         The actual video resolution will be stored by the decoder in vec.controller 
		 (see vlib_stage_decode.h) */
	  vec.width               = DRONE_VIDEO_MAX_WIDTH;
	  vec.height              = DRONE_VIDEO_MAX_HEIGHT;
	  vec.picture             = &picture;
	  vec.block_mode_enable   = TRUE;
	  vec.luma_only           = FALSE;

	yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

   /* Video pipeline building */
  
		 pipeline.nb_stages = 0;

		/* Video stream reception */
		stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
		stages[pipeline.nb_stages].cfg     = (void *)&icc;
		stages[pipeline.nb_stages].funcs   = video_com_funcs;

		pipeline.nb_stages++;

		/* Video stream decoding */
		stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
		stages[pipeline.nb_stages].cfg     = (void*)&vec;
		stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

		pipeline.nb_stages++;

		/* YUV to RGB conversion 
		YUV format is used by the video stream protocol
		Remove this stage if your rendering device can handle 
		YUV data directly
		*/
		stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
		stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
		stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

		pipeline.nb_stages++;

		/* User code */  
		stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;  /* Set to VP_API_OUTPUT_SDL even if SDL is not used */
		stages[pipeline.nb_stages].cfg     = (void*)&vec;   /* give the decoder information to the renderer */
		stages[pipeline.nb_stages].funcs   = vp_stages_output_rendering_device_funcs;

		  pipeline.nb_stages++;
		  pipeline.stages = &stages[0];
 
 
		  /* Processing of a pipeline */
			  if( !ardrone_tool_exit() )
			  {
				PRINT("\n   Video stage thread initialisation\n\n");

				// switch to vertical camera
				//ardrone_at_zap(ZAP_CHANNEL_VERT);

				res = vp_api_open(&pipeline, &pipeline_handle);

				if( VP_SUCCEEDED(res) )
				{
				  int loop = VP_SUCCESS;
				  out.status = VP_API_STATUS_PROCESSING;

				  while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
				  {
					  if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
						if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						  loop = VP_SUCCESS;
						}
					  }
					  else loop = -1; // Finish this thread
				  }

				  vp_api_close(&pipeline, &pipeline_handle);
				}
			}

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #14
0
DEFINE_THREAD_ROUTINE(video_stage, data) {
    C_RESULT res;

    vp_api_io_pipeline_t pipeline;
    vp_api_io_data_t out;
    
    vp_api_io_stage_t * stages;
    video_stage_merge_slices_config_t merge_slices_cfg;
    
    uint8_t i;
    
    specific_parameters_t * params = (specific_parameters_t *)(data);

    if (1 == params->needSetPriority)
    {
      CHANGE_THREAD_PRIO (video_stage, params->priority);
    }
    
    vp_os_memset(&icc_tcp, 0, sizeof ( icc_tcp));
    vp_os_memset(&icc_udp, 0, sizeof ( icc_udp));
    
    // Video Communication config
    icc_tcp.com = COM_VIDEO();
    icc_tcp.buffer_size = (1024*1024);
    icc_tcp.protocol = VP_COM_TCP;
    COM_CONFIG_SOCKET_VIDEO(&icc_tcp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
    
    // Video Communication config
    icc_udp.com = COM_VIDEO();
    icc_udp.buffer_size = (1024*1024);
    icc_udp.protocol = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc_udp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    icc.nb_sockets = 2;
    icc.configs = icc_tab;
    icc.forceNonBlocking = &(tcpConf.tcpStageHasMoreData);
    icc_tab[1]  = &icc_tcp;
    icc_tab[0]  = &icc_udp;
    
    icc.buffer_size = (1024*1024);
    
     vp_os_memset(&vec, 0, sizeof ( vec));

     stages = (vp_api_io_stage_t*) (vp_os_calloc(
        NB_STAGES + params->pre_processing_stages_list->length + params->post_processing_stages_list->length,
        sizeof (vp_api_io_stage_t)
    ));
    
    vec.src_picture = params->in_pic;
    vec.dst_picture = params->out_pic;
    
    vp_os_memset(&tcpConf, 0, sizeof ( tcpConf));
    tcpConf.maxPFramesPerIFrame = 30;
    tcpConf.frameMeanSize = 160*1024;
    tcpConf.tcpStageHasMoreData = FALSE;
    tcpConf.latencyDrop = 1;

	pipeline.name = "video_stage_pipeline";
    pipeline.nb_stages = 0;
    pipeline.stages = &stages[0];

    //ENCODED FRAME PROCESSING STAGES
    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *) &icc;
    stages[pipeline.nb_stages++].funcs = video_com_multisocket_funcs;
	printf("thread_video_stage: adding multisocket_funcs as %d th stage (input type) to video_pipeline of AR.Drone with version %d.%d.%d\n", pipeline.nb_stages, ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *) &tcpConf;
    stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
	printf("thread_video_stage: adding tcp_funcs to video_pipeline as %d nd stage (filter type) of AR.Drone\n", pipeline.nb_stages );
    
    // Record Encoded video
    if(1 == ARDRONE_VERSION())
    {

        ardrone_academy_stage_recorder_config.dest.pipeline = video_pipeline_handle;
        ardrone_academy_stage_recorder_config.dest.stage    = pipeline.nb_stages;
        stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
        stages[pipeline.nb_stages].cfg     = (void*)&ardrone_academy_stage_recorder_config;
        stages[pipeline.nb_stages++].funcs = ardrone_academy_stage_recorder_funcs;
		printf("thread_video_stage: adding academy_recorder_funcs to video_pipeline as %d rd stage (filter type) of AR.Drone\n", pipeline.nb_stages );

    }
    else
    {
      // Nothing to do for AR.Drone 2 as we have a separated thread for recording
    }

    //PRE-DECODING STAGES ==> recording, ...
    for(i=0;i<params->pre_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->pre_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->pre_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->pre_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding pre_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *)&merge_slices_cfg;
    stages[pipeline.nb_stages++].funcs = video_stage_merge_slices_funcs;
	printf("thread_video_stage: adding merge_slices_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //DECODING STAGES
    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*) &vec;
    stages[pipeline.nb_stages++].funcs = video_decoding_funcs;
	printf("thread_video_stage: adding video_decoding_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //POST-DECODING STAGES ==> transformation, display, ...
    for(i=0;i<params->post_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->post_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->post_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->post_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding post_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );


    if (!ardrone_tool_exit()) {
        PRINT("\nvideo stage thread initialisation\n\n");

        res = vp_api_open(&pipeline, &video_pipeline_handle);

        if (SUCCEED(res)) {
            int loop = SUCCESS;
            out.status = VP_API_STATUS_PROCESSING;

            while (!ardrone_tool_exit() && (loop == SUCCESS)) {
                if (video_stage_in_pause) {
                    vp_os_mutex_lock(&video_stage_mutex);
                    icc.num_retries = VIDEO_MAX_RETRIES;
                    vp_os_cond_wait(&video_stage_condition);
                    vp_os_mutex_unlock(&video_stage_mutex);
                }

                if (SUCCEED(vp_api_run(&pipeline, &out))) {
                    if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
                        loop = SUCCESS;
                    }
                } else loop = -1; // Finish this thread
            }
            
            vp_os_free(params->pre_processing_stages_list->stages_list);
            vp_os_free(params->post_processing_stages_list->stages_list);
            vp_os_free(params->pre_processing_stages_list);
            vp_os_free(params->post_processing_stages_list);
            
            vp_os_free(params->in_pic);
            vp_os_free(params->out_pic);
            
            vp_os_free(params);
            
            vp_api_close(&pipeline, &video_pipeline_handle);
        }
    }

    PRINT("\nvideo stage thread ended\n\n");

    return (THREAD_RET) 0;
}
Пример #15
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  PRINT("\n   Record Video\n\n");
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
  
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");
    
    if ((sockfd = socket(AF_UNIX,SOCK_STREAM,0)) < 0)
   {
       printf("error creating socket");
   }
   bzero((char *) &serv_addr, sizeof(serv_addr));
   serv_addr.sun_family = AF_UNIX;
   unlink("/tmp/video");
   strcpy(serv_addr.sun_path, "/tmp/video");
   servlen=strlen(serv_addr.sun_path) + 
                     sizeof(serv_addr.sun_family);
   if(bind(sockfd,(struct sockaddr *)&serv_addr,servlen)<0)
   {
       printf("error binding socket");
   } 
   
   listen(sockfd,5);
   
   clilen = sizeof(serv_addr);
   
   newsockfd = accept(
        sockfd,(struct sockaddr *)&serv_addr,&clilen);
   
   printf("1 sockfd, %d", sockfd);
    
   
   PRINT("\n   Video socket initialisation\n\n");
    
    
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
          
          //printf("%s\n", pixbuf_data);
          //printf("!%d  sizeof *\n", sizeof(pixbuf_data));
          
          //printf("!%d --- %d\n", n, errno);
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }
  

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_picture_t picture;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_file_config_t     ifc;
  vp_stages_output_sdl_config_t     osc;
  mjpeg_stage_decoding_config_t     dec;

  vp_os_memset(&ifc,0,sizeof(vp_stages_input_file_config_t));

  ifc.name            = ((char**)argv)[1];
  ifc.buffer_size     = (ACQ_WIDTH*ACQ_HEIGHT*3)/2;

  osc.width           = WINDOW_WIDTH;
  osc.height          = WINDOW_HEIGHT;
  osc.bpp             = 16;
  osc.window_width    = WINDOW_WIDTH;
  osc.window_height   = WINDOW_HEIGHT;
  osc.pic_width       = ACQ_WIDTH;
  osc.pic_height      = ACQ_HEIGHT;
  osc.y_size          = ACQ_WIDTH*ACQ_HEIGHT;
  osc.c_size          = (ACQ_WIDTH*ACQ_HEIGHT) >> 2;

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = ACQ_WIDTH;
  picture.height        = ACQ_HEIGHT;
  picture.framerate     = 15;

  picture.y_buf   = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );

  picture.y_line_size   = ACQ_WIDTH;
  picture.cb_line_size  = ACQ_WIDTH / 2;
  picture.cr_line_size  = ACQ_WIDTH / 2;

  picture.y_pad         = 0;
  picture.c_pad         = 0;

  dec.picture           = &picture;
  dec.out_buffer_size   = 4096;

  stages[0].type      = VP_API_INPUT_FILE;
  stages[0].cfg       = (void *)&ifc;
  stages[0].funcs     = vp_stages_input_file_funcs;

  stages[1].type      = VP_API_FILTER_DECODER;
  stages[1].cfg       = (void *)&dec;
  stages[1].funcs     = mjpeg_decoding_funcs;

  stages[2].type      = VP_API_OUTPUT_SDL;
  stages[2].cfg       = (void *)&osc;
  stages[2].funcs     = vp_stages_output_sdl_funcs;

  pipeline.nb_stages  = NB_STAGES;
  pipeline.stages     = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING))
  {
    vp_os_delay( 30 );
  }

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Пример #17
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("Video stage thread initialization\n");

    res = vp_api_open(&pipeline, &pipeline_handle);
   // PRINT("VP_API OPENED\n");
    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;
      cvStartWindowThread();
      #define frontWindow "DroneView"
      cvNamedWindow(frontWindow, CV_WINDOW_AUTOSIZE);
      frontImgStream = cvCreateImage(cvSize(picture.width, picture.height), IPL_DEPTH_8U, 3);

      #define bottomWindow "BomberView"
      if(extractBottomImage)
	cvNamedWindow(bottomWindow, CV_WINDOW_AUTOSIZE);
      bottomImgStream = cvCreateImage(cvSize(bottomWidth, bottomHeight), IPL_DEPTH_8U, 3);

      
      IplImage *frame;
      CvCapture *capture;
      if(USEWEBCAM)
      {
        capture = cvCaptureFromCAM(WEBCAMINDEX);
    
        if(!capture)
        {
          printf("ERROR: Cannot Initialize Webcam.\n");
          loop = !SUCCESS;
        }
      }

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
        if(!USEWEBCAM)
        {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
              if (vec.controller.num_frames==0) continue;
	      /*int i;
              for(i = 0; i < (picture.width)*(picture.height); i++)
              {
                frontImgStream->imageData[i*3] = picture.y_buf[i];
                frontImgStream->imageData[i*3+1] = picture.y_buf[i];
                frontImgStream->imageData[i*3+2] = picture.y_buf[i];
              }
	      */
             cvCvtColor(rgbHeader, frontImgStream, CV_RGB2BGR); //[for colour]

              if(extractBottomImage)
              {
                int j = 0, i;
                for(i = 0; j < bottomHeight*bottomWidth; i = i%picture.width >= bottomWidth-1 ? i - (i%picture.width) + picture.width : i+1)
                {
                  bottomImgStream->imageData[j*3] = frontImgStream->imageData[i*3];
                  bottomImgStream->imageData[j*3+1] = frontImgStream->imageData[i*3+1];
                  bottomImgStream->imageData[j*3+2] = frontImgStream->imageData[i*3+2];
                  frontImgStream->imageData[i*3] = 0;
                  frontImgStream->imageData[i*3+1] = 0;
                  frontImgStream->imageData[i*3+2] = 0;
                  j++;
                }
              }
              
              //cvLine(frontImgStream, cvPoint(picture.width/2, 0), cvPoint(picture.width/2, picture.height), CV_RGB(0,255,0), 1, CV_AA, 0 );
              cvShowImage(frontWindow, frontImgStream);
              if(extractBottomImage)
              cvShowImage(bottomWindow, bottomImgStream);
              
              if(CAPTUREIMAGESTREAM)
              {
                char filename[256];
                struct timeval t;
                gettimeofday(&t, NULL);
                sprintf(filename, "%d.%06d.jpg", (int)t.tv_sec, (int)t.tv_usec);
                if(frontImgStream != NULL && cvSaveImage(filename, cvCloneImage(frontImgStream), 0))
                  printf("Image dumped to %s\n", filename);
                else
                  printf("Error dumping image...\n");
                if(extractBottomImage)
                {
                  sprintf(filename, "%d.%06dbottom.jpg", (int)t.tv_sec, (int)t.tv_usec);
                  if(bottomImgStream != NULL && cvSaveImage(filename, cvCloneImage(bottomImgStream), 0))
                    printf("Bottom Image dumped to %s\n", filename);
                  else
                    printf("Error dumping bottom image...\n");
                }
              }
          }
          else loop = -1;
        }
        else //use webcam
        {
          frame = cvQueryFrame(capture);
      
          if(!frame) break;
          
          cvResize(frame, frontImgStream, CV_INTER_LINEAR);
          cvShowImage(frontWindow, frontImgStream);
          
          cvWaitKey(1);
        }
      }

      cvDestroyWindow(frontWindow);
      if(extractBottomImage)
	cvDestroyWindow(bottomWindow);
      //cvReleaseImage(&imgBottom);
      //cvDestroyWindow(bottomWindow);
      cvReleaseImage(&frontImgStream);
      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #18
0
int main(int argc, char **argv)
{
  vp_api_picture_t  video_picture;
  vp_api_picture_t  buffer_blockline;
#ifdef BLOCK_MODE
  vp_api_picture_t  video_blockline;
#endif
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[NB_STAGES_MAX];

  vp_os_memset(&ivc,0,sizeof(ivc));
  vp_os_memset(&ofc,0,sizeof(ofc));
  vp_os_memset(&occ,0,sizeof(occ));


  // CAMIF config
  ivc.camera = "/dev/video1";
  ivc.i2c = "/dev/i2c-0";
#ifdef OV_CAM
  ivc.camera_configure = &camera_configure_ov77xx;
#else
#ifdef CRESYN_CAM
  ivc.camera_configure = &camera_configure_cresyn;
#else  
#error no cam selected
#endif
#endif

  ivc.resolution = VGA;
  ivc.nb_buffers = 8;
  ivc.format                              = V4L2_PIX_FMT_YUV420;
  ivc.y_pad                               = 0;
  ivc.c_pad                               = 0;
  ivc.offset_delta                        = 0;
#ifdef BLOCK_MODE
  ivc.vp_api_blockline                    = &video_blockline;
#endif
  ivc.vp_api_picture                      = &video_picture;
  ivc.use_chrominances                    = 1;
  ivc.framerate				  = 15;

  // BUFFER
#ifdef BLOCK_MODE
  bbc.picture                    = &video_blockline;
#endif

  // ENCODER
  vec.width                               = 320;
  vec.height                              = 240;
#ifdef BLOCK_MODE
  vec.block_mode_enable                   = TRUE;
#else
  vec.block_mode_enable                   = FALSE;
#endif
  vec.picture                             = &video_picture;
 
  // OUTPUT FILE config
  ofc.name = "/tmp/out.yuv";
  
  // COM CONFIG
  occ.com                            = wired_com();
  occ.config                         = wired_config();
  occ.connection                     = wired_connection();
  occ.socket.type                    = VP_COM_SERVER;
  occ.socket.protocol                = VP_COM_TCP;
  occ.socket.port                    = 5555;
  occ.buffer_size                    = 640*480*3/2;

  // build pipeline
  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_CAMIF;
  stages[pipeline.nb_stages].cfg     = (void *)&ivc;
  stages[pipeline.nb_stages++].funcs = V4L2_funcs;

#ifdef BLOCK_MODE
#ifdef BUFFER
  stages[pipeline.nb_stages].type    = VP_API_INPUT_CAMIF;
  stages[pipeline.nb_stages].cfg     = (void *)&bbc;
  stages[pipeline.nb_stages++].funcs =  blockline_to_buffer_funcs;
#endif
#endif

#ifdef ENCODE 
  stages[pipeline.nb_stages].type    = VP_API_FILTER_ENCODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages++].funcs = vlib_encoding_funcs;
#endif

#ifdef FILE_OUTPUT
  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_FILE;
  stages[pipeline.nb_stages].cfg     = (void *)&ofc;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_file_funcs;
#endif

#ifdef ETHERNET_OUTPUT
  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&occ;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_com_funcs;
#endif

  pipeline.stages = &stages[0];

  // launch pipelines
  vp_api_open(&pipeline, &pipeline_handle);
  
  out.status = VP_API_STATUS_PROCESSING;

  /////////////////////////////////////////////////////////////////////////////////////////
  
  uint16_t i = 0;

  struct timeval time1,time2;

  printf("Pipeline launched....\n");
  gettimeofday(&time1,NULL);
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING))
    {
      i++;
      //printf("image %d \n",i);
      if (i>50)
        break;
      //vp_os_delay(20);
    }   
  /////////////////////////////////////////////////////////////////////////////////////////
 
  gettimeofday(&time2,NULL);
  int seconde = time2.tv_sec-time1.tv_sec;
  int ms = time2.tv_usec-time1.tv_usec;
  if (ms<0)
  {
     seconde--;
     ms += 1000000;
  }
  ms = ms/1000;
  float fps = ((float)i*1000)/(float)(seconde*1000+ms);
  printf("temps ecoule : %d.%d / nombre image : %d average fps : %f\n",seconde,ms,i,fps);
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}