Ejemplo n.º 1
0
DEFINE_THREAD_ROUTINE(video_update_thread, data)
{
    C_RESULT res;

    vp_api_io_pipeline_t    pipeline;
    vp_api_io_data_t        out;
    vp_api_io_stage_t       stages[NB_STAGES];

    vp_api_picture_t picture;

    video_com_config_t              icc;
    vlib_stage_decoding_config_t    vec;
    vp_stages_yuv2rgb_config_t      yuv2rgbconf;

    /// Picture configuration
    picture.format        = PIX_FMT_YUV420P;
    picture.width         = STREAM_WIDTH;
    picture.height        = STREAM_HEIGHT;
    picture.framerate     = 30;

    picture.y_buf   = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT     );
    picture.cr_buf  = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );
    picture.cb_buf  = (uint8_t *)vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT / 4 );

    picture.y_line_size   = STREAM_WIDTH;
    picture.cb_line_size  = STREAM_WIDTH / 2;
    picture.cr_line_size  = STREAM_WIDTH / 2;

    vp_os_memset(&icc,          0, sizeof( icc ));
    vp_os_memset(&vec,          0, sizeof( vec ));
    vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

    icc.com                 = COM_VIDEO();
    icc.buffer_size         = 100000;
    icc.protocol            = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    vec.width               = STREAM_WIDTH;
    vec.height              = STREAM_HEIGHT;
    vec.picture             = &picture;
#ifdef USE_VIDEO_YUV
    vec.luma_only           = FALSE;
#else
    vec.luma_only           = TRUE;
#endif // USE_VIDEO_YUV
    vec.block_mode_enable   = TRUE;

    vec.luma_only           = FALSE;
    yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
    if( CAMIF_H_CAMERA_USED == CAMIF_CAMERA_OVTRULY_UPSIDE_DOWN_ONE_BLOCKLINE_LESS )
        yuv2rgbconf.mode = VP_STAGES_YUV2RGB_MODE_UPSIDE_DOWN;

    pipeline.nb_stages = 0;

    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *)&icc;
    stages[pipeline.nb_stages].funcs   = video_com_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*)&vec;
    stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
    stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
    stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

    pipeline.nb_stages++;

    stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
    stages[pipeline.nb_stages].cfg     = (void *)&vec;
    stages[pipeline.nb_stages].funcs   = vp_stages_export_funcs;

    pipeline.nb_stages++;

    pipeline.stages = &stages[0];
    PIPELINE_HANDLE pipeline_handle;
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) ) {
        while( SUCCEED(vp_api_run(&pipeline, &out)) ) {
        }

        vp_api_close(&pipeline, &pipeline_handle);
    }

    return (THREAD_RET)0;
}
Ejemplo n.º 2
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
	C_RESULT res;
	
	vp_api_io_pipeline_t    pipeline;
	vp_api_io_data_t        out;
	vp_api_io_stage_t       stages[NB_STAGES];
	
	vp_api_picture_t picture;
	
	vlib_stage_decoding_config_t    vec;

	vp_os_memset(&icc,          0, sizeof( icc ));
	vp_os_memset(&vec,          0, sizeof( vec ));
	vp_os_memset(&picture,      0, sizeof( picture ));

//#ifdef RECORD_VIDEO
//	video_stage_recorder_config_t vrc;
//#endif
	
	/// Picture configuration
	picture.format        = /*PIX_FMT_YUV420P */ PIX_FMT_RGB565;
	
	picture.width         = 512;
	picture.height        = 512;
	picture.framerate     = 15;
	
	picture.y_buf   = vp_os_malloc( picture.width * picture.height * 2);
	picture.cr_buf  = NULL;
	picture.cb_buf  = NULL;

	picture.y_line_size   = picture.width * 2;
	picture.cb_line_size  = 0;
	picture.cr_line_size  = 0;
		
	icc.com                 = COM_VIDEO();
	icc.buffer_size         = 100000;
	icc.protocol            = VP_COM_UDP;
	COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
	
	vec.width               = 512;
	vec.height              = 512;
	vec.picture             = &picture;
	vec.luma_only           = FALSE;
	vec.block_mode_enable   = TRUE;
	
	pipeline.nb_stages = 0;
	
	stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
	stages[pipeline.nb_stages].cfg     = (void *)&icc;
	stages[pipeline.nb_stages].funcs   = video_com_funcs;
	pipeline.nb_stages++;
	
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;
	pipeline.nb_stages++;
	
/*
#ifdef RECORD_VIDEO
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vrc;
	stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
	pipeline.nb_stages++;
#endif
*/
	stages[pipeline.nb_stages].type    = VP_API_OUTPUT_LCD;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = video_stage_funcs;
	pipeline.nb_stages++;
		
	pipeline.stages = &stages[0];
		
	if( !ardrone_tool_exit() )
	{
		PRINT("\nvideo stage thread initialisation\n\n");
		
		// NICK
		video_stage_init();

		res = vp_api_open(&pipeline, &pipeline_handle);

		
		if( VP_SUCCEEDED(res) )
		{

			int loop = VP_SUCCESS;
			out.status = VP_API_STATUS_PROCESSING;
#ifdef RECORD_VIDEO		    
			{
				DEST_HANDLE dest;
				dest.stage = 2;
				dest.pipeline = pipeline_handle;
				vp_api_post_message( dest, PIPELINE_MSG_START, NULL, (void*)NULL);
			}
#endif			

			video_stage_in_pause = FALSE;
			
			while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
			{

				if(video_stage_in_pause)
				{
					vp_os_mutex_lock(&video_stage_mutex);
					icc.num_retries = VIDEO_MAX_RETRIES;
					vp_os_cond_wait(&video_stage_condition);
					vp_os_mutex_unlock(&video_stage_mutex);
				}

				if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
					if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						loop = VP_SUCCESS;
					}
				}
				else loop = -1; // Finish this thread
			}
			
			vp_api_close(&pipeline, &pipeline_handle);
		}
	}
	
	PRINT("   video stage thread ended\n\n");
	
	return (THREAD_RET)0;
}
Ejemplo n.º 3
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");

    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES_MAX];

  vp_os_memset( &icc,     0, sizeof(icc) );
  vp_os_memset( &bpc,     0, sizeof(bpc) );
  vp_os_memset( &osc,     0, sizeof(osc) );

  icc.com               = COM_VIDEO();
  icc.buffer_size       = ACQ_WIDTH*ACQ_HEIGHT*3/2;
  icc.socket.protocol   = VP_COM_TCP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, 5555, SERVER_HOST);

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;
  picture.width         = ACQ_WIDTH;
  picture.height        = ACQ_HEIGHT;
  picture.framerate     = 30;
  picture.y_buf         = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf        = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf        = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.y_line_size   = ACQ_WIDTH;
  picture.cb_line_size  = ACQ_WIDTH / 2;
  picture.cr_line_size  = ACQ_WIDTH / 2;
  picture.y_pad         = 0;
  picture.c_pad         = 0;

  bpc.picture             = &picture;
  bpc.block_mode_enable   = FALSE;
  bpc.y_buffer_size       = ACQ_WIDTH * ACQ_HEIGHT;
  bpc.y_blockline_size    = ACQ_WIDTH * 16; // each blockline have 16 lines
  bpc.y_current_size      = 0;
  bpc.num_frames          = 0;
  bpc.y_buf_ptr           = NULL;
  bpc.luma_only           = FALSE;
  bpc.cr_buf_ptr          = NULL;
  bpc.cb_buf_ptr          = NULL;

  osc.width           = ACQ_WIDTH;
  osc.height          = ACQ_HEIGHT;
  osc.bpp             = 16;
  osc.window_width    = ACQ_WIDTH;
  osc.window_height   = ACQ_HEIGHT;
  osc.pic_width       = ACQ_WIDTH;
  osc.pic_height      = ACQ_HEIGHT;
  osc.y_size          = ACQ_WIDTH*ACQ_HEIGHT;
  osc.c_size          = (ACQ_WIDTH*ACQ_HEIGHT) >> 2;

  pipeline.nb_stages                 = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages++].funcs = vp_stages_input_com_funcs;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&bpc;
  stages[pipeline.nb_stages++].funcs = vp_stages_buffer_to_picture_funcs;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = (void *)&osc;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_sdl_funcs;

  pipeline.stages                    = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING))
    {
    }

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Ejemplo n.º 5
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("Video stage thread initialization\n");

    res = vp_api_open(&pipeline, &pipeline_handle);
   // PRINT("VP_API OPENED\n");
    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;
      cvStartWindowThread();
      #define frontWindow "DroneView"
      cvNamedWindow(frontWindow, CV_WINDOW_AUTOSIZE);
      frontImgStream = cvCreateImage(cvSize(picture.width, picture.height), IPL_DEPTH_8U, 3);

      #define bottomWindow "BomberView"
      if(extractBottomImage)
	cvNamedWindow(bottomWindow, CV_WINDOW_AUTOSIZE);
      bottomImgStream = cvCreateImage(cvSize(bottomWidth, bottomHeight), IPL_DEPTH_8U, 3);

      
      IplImage *frame;
      CvCapture *capture;
      if(USEWEBCAM)
      {
        capture = cvCaptureFromCAM(WEBCAMINDEX);
    
        if(!capture)
        {
          printf("ERROR: Cannot Initialize Webcam.\n");
          loop = !SUCCESS;
        }
      }

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
        if(!USEWEBCAM)
        {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
              if (vec.controller.num_frames==0) continue;
	      /*int i;
              for(i = 0; i < (picture.width)*(picture.height); i++)
              {
                frontImgStream->imageData[i*3] = picture.y_buf[i];
                frontImgStream->imageData[i*3+1] = picture.y_buf[i];
                frontImgStream->imageData[i*3+2] = picture.y_buf[i];
              }
	      */
             cvCvtColor(rgbHeader, frontImgStream, CV_RGB2BGR); //[for colour]

              if(extractBottomImage)
              {
                int j = 0, i;
                for(i = 0; j < bottomHeight*bottomWidth; i = i%picture.width >= bottomWidth-1 ? i - (i%picture.width) + picture.width : i+1)
                {
                  bottomImgStream->imageData[j*3] = frontImgStream->imageData[i*3];
                  bottomImgStream->imageData[j*3+1] = frontImgStream->imageData[i*3+1];
                  bottomImgStream->imageData[j*3+2] = frontImgStream->imageData[i*3+2];
                  frontImgStream->imageData[i*3] = 0;
                  frontImgStream->imageData[i*3+1] = 0;
                  frontImgStream->imageData[i*3+2] = 0;
                  j++;
                }
              }
              
              //cvLine(frontImgStream, cvPoint(picture.width/2, 0), cvPoint(picture.width/2, picture.height), CV_RGB(0,255,0), 1, CV_AA, 0 );
              cvShowImage(frontWindow, frontImgStream);
              if(extractBottomImage)
              cvShowImage(bottomWindow, bottomImgStream);
              
              if(CAPTUREIMAGESTREAM)
              {
                char filename[256];
                struct timeval t;
                gettimeofday(&t, NULL);
                sprintf(filename, "%d.%06d.jpg", (int)t.tv_sec, (int)t.tv_usec);
                if(frontImgStream != NULL && cvSaveImage(filename, cvCloneImage(frontImgStream), 0))
                  printf("Image dumped to %s\n", filename);
                else
                  printf("Error dumping image...\n");
                if(extractBottomImage)
                {
                  sprintf(filename, "%d.%06dbottom.jpg", (int)t.tv_sec, (int)t.tv_usec);
                  if(bottomImgStream != NULL && cvSaveImage(filename, cvCloneImage(bottomImgStream), 0))
                    printf("Bottom Image dumped to %s\n", filename);
                  else
                    printf("Error dumping bottom image...\n");
                }
              }
          }
          else loop = -1;
        }
        else //use webcam
        {
          frame = cvQueryFrame(capture);
      
          if(!frame) break;
          
          cvResize(frame, frontImgStream, CV_INTER_LINEAR);
          cvShowImage(frontWindow, frontImgStream);
          
          cvWaitKey(1);
        }
      }

      cvDestroyWindow(frontWindow);
      if(extractBottomImage)
	cvDestroyWindow(bottomWindow);
      //cvReleaseImage(&imgBottom);
      //cvDestroyWindow(bottomWindow);
      cvReleaseImage(&frontImgStream);
      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
DEFINE_THREAD_ROUTINE(video_recorder, data)
{
  if (1 >= ARDRONE_VERSION ()) // Run only for ARDrone 2 and upper
    {
      return (THREAD_RET)0;
    }
  C_RESULT res = C_OK;
    
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[3];
  PIPELINE_HANDLE video_recorder_pipeline_handle;
  video_recorder_thread_param_t *video_recorder_thread_param = (video_recorder_thread_param_t *)data;

  video_stage_tcp_config_t tcpConf;
    
  if(video_recorder_thread_param != NULL)
  {
      CHANGE_THREAD_PRIO(video_recorder, video_recorder_thread_param->priority);
      video_stage_encoded_recorder_config.finish_callback = video_recorder_thread_param->finish_callback; 
  }
    
  vp_os_memset (&record_icc, 0x0, sizeof (record_icc));
  record_icc.com = COM_VIDEO ();
  record_icc.buffer_size = (8*1024);
  record_icc.protocol = VP_COM_TCP;
  record_icc.forceNonBlocking = &tcpConf.tcpStageHasMoreData;
  COM_CONFIG_SOCKET_VIDEO (&record_icc.socket, VP_COM_CLIENT, VIDEO_RECORDER_PORT, wifi_ardrone_ip);
  record_icc.timeoutFunc = &video_stage_encoded_recorder_com_timeout;
  record_icc.timeoutFuncAfterSec = 10;
  
  vp_os_memset (&tcpConf, 0, sizeof (tcpConf));
  tcpConf.maxPFramesPerIFrame = 30;
  tcpConf.frameMeanSize = 160*1024;
  tcpConf.latencyDrop = 0;
    
  pipeline.nb_stages = 0;
  pipeline.stages = &stages[0];
    
  // Com
  stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg  = (void *)&record_icc;
  stages[pipeline.nb_stages++].funcs = video_com_funcs;

  // TCP
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &tcpConf;
  stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
    
  // Record
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &video_stage_encoded_recorder_config;
  stages[pipeline.nb_stages++].funcs = video_encoded_recorder_funcs;
    
  while (FALSE == isInit)
    {
        printf ("Waiting for init\n");
    }
    
  if (! ardrone_tool_exit ())
    {
      PRINT ("Video recorder thread initialisation\n");
      res = vp_api_open (&pipeline, &video_recorder_pipeline_handle);
        
      if (SUCCEED (res))
        {
          int loop = SUCCESS;
          out.status = VP_API_STATUS_PROCESSING;
            
          while (! ardrone_tool_exit () && (SUCCESS == loop))
            {
              if (video_recorder_in_pause)
                {
                  vp_os_mutex_lock (&video_recorder_mutex);
                  record_icc.num_retries = VIDEO_MAX_RETRIES;
                  vp_os_cond_wait (&video_recorder_condition);
                  vp_os_mutex_unlock (&video_recorder_mutex);
                }

              if (SUCCEED (vp_api_run (&pipeline, &out)))
                {
                  if ((VP_API_STATUS_PROCESSING == out.status) ||
                      (VP_API_STATUS_STILL_RUNNING == out.status))
                    {
                      loop = SUCCESS;
                    }
                  else loop = -1;
                }
              else loop = -1;
            }
          vp_api_close (&pipeline, &video_recorder_pipeline_handle);
        }
    }
  PRINT ("Video recorder thread ended\n");
    
  return (THREAD_RET)res;
}
Ejemplo n.º 7
0
/*
	The video processing thread.
	This function can be kept as it is by most users.
	It automatically receives the video stream in a loop, decode it, and then 
		call the 'output_rendering_device_stage_transform' function for each decoded frame.
*/
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
  

  /* Picture configuration */
	  picture.format        = PIX_FMT_YUV420P;

	  picture.width         = DRONE_VIDEO_MAX_WIDTH;
	  picture.height        = DRONE_VIDEO_MAX_HEIGHT;
	  picture.framerate     = 15;
	  //picture.framerate     = 3;

	  picture.y_buf   = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT     );
	  picture.cr_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
	  picture.cb_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );

	  picture.y_line_size   = DRONE_VIDEO_MAX_WIDTH;
	  picture.cb_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;
	  picture.cr_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;

	  vp_os_memset(&icc,          0, sizeof( icc ));
	  vp_os_memset(&vec,          0, sizeof( vec ));
	  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

   /* Video socket configuration */
	  icc.com                 = COM_VIDEO();
	  icc.buffer_size         = 100000;
	  icc.protocol            = VP_COM_UDP;
  
	  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  /* Video decoder configuration */
	  /* Size of the buffers used for decoding 
         This must be set to the maximum possible video resolution used by the drone 
         The actual video resolution will be stored by the decoder in vec.controller 
		 (see vlib_stage_decode.h) */
	  vec.width               = DRONE_VIDEO_MAX_WIDTH;
	  vec.height              = DRONE_VIDEO_MAX_HEIGHT;
	  vec.picture             = &picture;
	  vec.block_mode_enable   = TRUE;
	  vec.luma_only           = FALSE;

	yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

   /* Video pipeline building */
  
		 pipeline.nb_stages = 0;

		/* Video stream reception */
		stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
		stages[pipeline.nb_stages].cfg     = (void *)&icc;
		stages[pipeline.nb_stages].funcs   = video_com_funcs;

		pipeline.nb_stages++;

		/* Video stream decoding */
		stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
		stages[pipeline.nb_stages].cfg     = (void*)&vec;
		stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

		pipeline.nb_stages++;

		/* YUV to RGB conversion 
		YUV format is used by the video stream protocol
		Remove this stage if your rendering device can handle 
		YUV data directly
		*/
		stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
		stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
		stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

		pipeline.nb_stages++;

		/* User code */  
		stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;  /* Set to VP_API_OUTPUT_SDL even if SDL is not used */
		stages[pipeline.nb_stages].cfg     = (void*)&vec;   /* give the decoder information to the renderer */
		stages[pipeline.nb_stages].funcs   = vp_stages_output_rendering_device_funcs;

		  pipeline.nb_stages++;
		  pipeline.stages = &stages[0];
 
 
		  /* Processing of a pipeline */
			  if( !ardrone_tool_exit() )
			  {
				PRINT("\n   Video stage thread initialisation\n\n");

				// switch to vertical camera
				//ardrone_at_zap(ZAP_CHANNEL_VERT);

				res = vp_api_open(&pipeline, &pipeline_handle);

				if( VP_SUCCEEDED(res) )
				{
				  int loop = VP_SUCCESS;
				  out.status = VP_API_STATUS_PROCESSING;

				  while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
				  {
					  if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
						if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						  loop = VP_SUCCESS;
						}
					  }
					  else loop = -1; // Finish this thread
				  }

				  vp_api_close(&pipeline, &pipeline_handle);
				}
			}

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Ejemplo n.º 8
0
DEFINE_THREAD_ROUTINE(video_stage, data) {
    C_RESULT res;

    vp_api_io_pipeline_t pipeline;
    vp_api_io_data_t out;
    
    vp_api_io_stage_t * stages;
    video_stage_merge_slices_config_t merge_slices_cfg;
    
    uint8_t i;
    
    specific_parameters_t * params = (specific_parameters_t *)(data);

    if (1 == params->needSetPriority)
    {
      CHANGE_THREAD_PRIO (video_stage, params->priority);
    }
    
    vp_os_memset(&icc_tcp, 0, sizeof ( icc_tcp));
    vp_os_memset(&icc_udp, 0, sizeof ( icc_udp));
    
    // Video Communication config
    icc_tcp.com = COM_VIDEO();
    icc_tcp.buffer_size = (1024*1024);
    icc_tcp.protocol = VP_COM_TCP;
    COM_CONFIG_SOCKET_VIDEO(&icc_tcp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
    
    // Video Communication config
    icc_udp.com = COM_VIDEO();
    icc_udp.buffer_size = (1024*1024);
    icc_udp.protocol = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc_udp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    icc.nb_sockets = 2;
    icc.configs = icc_tab;
    icc.forceNonBlocking = &(tcpConf.tcpStageHasMoreData);
    icc_tab[1]  = &icc_tcp;
    icc_tab[0]  = &icc_udp;
    
    icc.buffer_size = (1024*1024);
    
     vp_os_memset(&vec, 0, sizeof ( vec));

     stages = (vp_api_io_stage_t*) (vp_os_calloc(
        NB_STAGES + params->pre_processing_stages_list->length + params->post_processing_stages_list->length,
        sizeof (vp_api_io_stage_t)
    ));
    
    vec.src_picture = params->in_pic;
    vec.dst_picture = params->out_pic;
    
    vp_os_memset(&tcpConf, 0, sizeof ( tcpConf));
    tcpConf.maxPFramesPerIFrame = 30;
    tcpConf.frameMeanSize = 160*1024;
    tcpConf.tcpStageHasMoreData = FALSE;
    tcpConf.latencyDrop = 1;

	pipeline.name = "video_stage_pipeline";
    pipeline.nb_stages = 0;
    pipeline.stages = &stages[0];

    //ENCODED FRAME PROCESSING STAGES
    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *) &icc;
    stages[pipeline.nb_stages++].funcs = video_com_multisocket_funcs;
	printf("thread_video_stage: adding multisocket_funcs as %d th stage (input type) to video_pipeline of AR.Drone with version %d.%d.%d\n", pipeline.nb_stages, ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *) &tcpConf;
    stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
	printf("thread_video_stage: adding tcp_funcs to video_pipeline as %d nd stage (filter type) of AR.Drone\n", pipeline.nb_stages );
    
    // Record Encoded video
    if(1 == ARDRONE_VERSION())
    {

        ardrone_academy_stage_recorder_config.dest.pipeline = video_pipeline_handle;
        ardrone_academy_stage_recorder_config.dest.stage    = pipeline.nb_stages;
        stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
        stages[pipeline.nb_stages].cfg     = (void*)&ardrone_academy_stage_recorder_config;
        stages[pipeline.nb_stages++].funcs = ardrone_academy_stage_recorder_funcs;
		printf("thread_video_stage: adding academy_recorder_funcs to video_pipeline as %d rd stage (filter type) of AR.Drone\n", pipeline.nb_stages );

    }
    else
    {
      // Nothing to do for AR.Drone 2 as we have a separated thread for recording
    }

    //PRE-DECODING STAGES ==> recording, ...
    for(i=0;i<params->pre_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->pre_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->pre_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->pre_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding pre_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *)&merge_slices_cfg;
    stages[pipeline.nb_stages++].funcs = video_stage_merge_slices_funcs;
	printf("thread_video_stage: adding merge_slices_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //DECODING STAGES
    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*) &vec;
    stages[pipeline.nb_stages++].funcs = video_decoding_funcs;
	printf("thread_video_stage: adding video_decoding_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //POST-DECODING STAGES ==> transformation, display, ...
    for(i=0;i<params->post_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->post_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->post_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->post_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding post_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );


    if (!ardrone_tool_exit()) {
        PRINT("\nvideo stage thread initialisation\n\n");

        res = vp_api_open(&pipeline, &video_pipeline_handle);

        if (SUCCEED(res)) {
            int loop = SUCCESS;
            out.status = VP_API_STATUS_PROCESSING;

            while (!ardrone_tool_exit() && (loop == SUCCESS)) {
                if (video_stage_in_pause) {
                    vp_os_mutex_lock(&video_stage_mutex);
                    icc.num_retries = VIDEO_MAX_RETRIES;
                    vp_os_cond_wait(&video_stage_condition);
                    vp_os_mutex_unlock(&video_stage_mutex);
                }

                if (SUCCEED(vp_api_run(&pipeline, &out))) {
                    if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
                        loop = SUCCESS;
                    }
                } else loop = -1; // Finish this thread
            }
            
            vp_os_free(params->pre_processing_stages_list->stages_list);
            vp_os_free(params->post_processing_stages_list->stages_list);
            vp_os_free(params->pre_processing_stages_list);
            vp_os_free(params->post_processing_stages_list);
            
            vp_os_free(params->in_pic);
            vp_os_free(params->out_pic);
            
            vp_os_free(params);
            
            vp_api_close(&pipeline, &video_pipeline_handle);
        }
    }

    PRINT("\nvideo stage thread ended\n\n");

    return (THREAD_RET) 0;
}
Ejemplo n.º 9
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  PRINT("\n   Record Video\n\n");
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
  
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");
    
    if ((sockfd = socket(AF_UNIX,SOCK_STREAM,0)) < 0)
   {
       printf("error creating socket");
   }
   bzero((char *) &serv_addr, sizeof(serv_addr));
   serv_addr.sun_family = AF_UNIX;
   unlink("/tmp/video");
   strcpy(serv_addr.sun_path, "/tmp/video");
   servlen=strlen(serv_addr.sun_path) + 
                     sizeof(serv_addr.sun_family);
   if(bind(sockfd,(struct sockaddr *)&serv_addr,servlen)<0)
   {
       printf("error binding socket");
   } 
   
   listen(sockfd,5);
   
   clilen = sizeof(serv_addr);
   
   newsockfd = accept(
        sockfd,(struct sockaddr *)&serv_addr,&clilen);
   
   printf("1 sockfd, %d", sockfd);
    
   
   PRINT("\n   Video socket initialisation\n\n");
    
    
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
          
          //printf("%s\n", pixbuf_data);
          //printf("!%d  sizeof *\n", sizeof(pixbuf_data));
          
          //printf("!%d --- %d\n", n, errno);
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }
  

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Ejemplo n.º 10
0
DEFINE_THREAD_ROUTINE(drone_video_stage_thread, data)
{
  PIPELINE_HANDLE pipeline_handle;

  printf("Vision thread started\n");

  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[DRONE_NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_video_output_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];

  pthread_mutex_lock(&drone_sharedDataMutex);
  int doRun = drone_doRun;
  pthread_mutex_unlock(&drone_sharedDataMutex);

  /* Processing of a pipeline */
  if (doRun) {
    res = 0;
    res = vp_api_open(&pipeline, &pipeline_handle);

    if (SUCCEED(res)) {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while (doRun && (loop == SUCCESS)) {
          pthread_mutex_lock(&drone_sharedDataMutex);
          doRun = drone_doRun;
          pthread_mutex_unlock(&drone_sharedDataMutex);

          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
              loop = SUCCESS;
            }
          }
      }

      fprintf(stderr, "BUG and WORKAROUND: vp_api_close was never called because it is buggy\n");
//      vp_api_close(&pipeline, &pipeline_handle); // <- TODO: do not call this - fix this function in SDK
    }
  }

  vp_os_free(picture.y_buf);
  vp_os_free(picture.cr_buf);
  vp_os_free(picture.cb_buf);

  printf("Vision thread terminated\n");

  return (THREAD_RET) 0;
}