コード例 #1
0
ファイル: mobile_main.c プロジェクト: LYJHub/AR.FreeFlight
C_RESULT ardrone_tool_shutdown_custom() {
#ifdef USE_TABLE_PILOTAGE
    JOIN_THREAD(novadem);
#endif // USE_TABLE_PILOTAGE
#ifdef PC_USE_POLARIS
    JOIN_THREAD(polaris);
#endif // PC_USE_POLARIS
#ifdef USE_ARDRONE_VICON
    JOIN_THREAD(vicon);
#endif // USE_ARDRONE_VICON
    JOIN_THREAD(ihm);
    video_stage_resume_thread(); //Resume thread to kill it !
    JOIN_THREAD(video_stage);
    if (2 <= ARDRONE_VERSION ())
      {
        video_recorder_resume_thread ();
        JOIN_THREAD (video_recorder);
      }
#ifdef RAW_CAPTURE
    JOIN_THREAD(raw_capture);
#endif
    JOIN_THREAD(remote_console);

    /*ardrone_tool_input_remove( &gamepad );
    ardrone_tool_input_remove( &radioGP );
    ardrone_tool_input_remove( &ps3pad );
    ardrone_tool_input_remove( &wiimote_device );*/
    ardrone_tool_input_remove(&control_device);

    return C_OK;
}
コード例 #2
0
ファイル: mobile_main.c プロジェクト: evenator/senior-project
void ardroneEngineResume( void )
{
#ifdef DEBUG_THREAD
	PRINT( "%s\n", __FUNCTION__ );
#endif
	video_stage_resume_thread();
	ardrone_tool_resume();
}
コード例 #3
0
ファイル: ardrone_sdk.cpp プロジェクト: andresjhe/JdeRobot
	C_RESULT ardrone_tool_shutdown_custom() 
	{
		PRINT("Shutting down ... \n ");
		JOIN_THREAD(update_jde);
		delete jdeDriver;
		video_stage_resume_thread();
		ardrone_tool_input_remove(&teleop);
		return C_OK;
	}
コード例 #4
0
C_RESULT ardrone_tool_shutdown_custom ()
{
    // Unregister the game controller
    ardrone_tool_input_remove( &gamepad );
    
    video_stage_resume_thread(); //Resume thread to kill it !
    JOIN_THREAD(video_stage);
    
    return C_OK;
}
コード例 #5
0
ファイル: ardrone_testing_tool.c プロジェクト: MorS25/ardrone
C_RESULT ardrone_tool_shutdown_custom ()
{
    video_stage_resume_thread(); //Resume thread to kill it !
    JOIN_THREAD(video_stage);
    if (2 <= ARDRONE_VERSION ())
    {
        video_recorder_resume_thread ();
        JOIN_THREAD (video_recorder);
    }

    return C_OK;
}
コード例 #6
0
ファイル: app.c プロジェクト: lxy0816/Ardrone_video
void parrot_ardrone_notify_resume()
{
	video_stage_resume_thread();

	if (IS_LEAST_ARDRONE2)
	{
		video_recorder_resume_thread();
	}

	ardrone_tool_resume();

	LOGD(TAG, "AR.Drone Tool Resume [OK]");
}
コード例 #7
0
ファイル: pilot.c プロジェクト: DisruptiveMind/KARD
C_RESULT ardrone_tool_shutdown_custom ()
{
    video_stage_resume_thread(); //Resume thread to kill it !
    JOIN_THREAD(video_stage);
    JOIN_THREAD(kinect);

    if (2 <= ARDRONE_VERSION ()) {
        video_recorder_resume_thread ();
        JOIN_THREAD (video_recorder);
    }

	//JOIN_THREAD(main_application_thread);
	//ardrone_tool_input_remove(&input_controller);
    return C_OK;
}
コード例 #8
0
C_RESULT ardrone_tool_shutdown_custom (){
    
    //King of the Hill threads
    JOIN_THREAD(wiimote_logic);
    JOIN_THREAD(drone_logic);
    JOIN_THREAD(score_logic);
    
    video_stage_resume_thread(); //Resume thread to kill it !
    JOIN_THREAD(video_stage);
    if (2 <= ARDRONE_VERSION ())
    {
        video_recorder_resume_thread ();
        JOIN_THREAD (video_recorder);
    }

    return C_OK;
}
コード例 #9
0
C_RESULT ardrone_tool_init_custom(void)
{
    /**
     * Set application default configuration
     *
     * In this example, we use the AR.FreeFlight configuration :
     * - Demo navdata rate (15Hz)
     * - Useful additionnal navdata packets enabled (detection, games, video record, wifi quality estimation ...)
     * - Adaptive video enabled (bitrate_ctrl_mode) -> video bitrate will change according to the available bandwidth
     */
    ardrone_application_default_config.navdata_demo = TRUE;
    //use this -> NAVDATA_OPTION_FULL_MASK
    //or coment the line below if detection doesn't work
    ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK;//(NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG));
    if (IS_ARDRONE2){
        ardrone_application_default_config.video_codec = drone2Codec;
    } else {
        ardrone_application_default_config.video_codec = drone1Codec;
    }
    ardrone_application_default_config.video_channel = videoChannel;
    ardrone_application_default_config.bitrate_ctrl_mode = 1;

    /**
     * Define the number of video stages we'll add before/after decoding
     */
#define EXAMPLE_PRE_STAGES 1
#define EXAMPLE_POST_STAGES 1

    /**
     * Allocate useful structures :
     * - index counter
     * - thread param structure and its substructures
     */
    uint8_t stages_index = 0;

    specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t));
    specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));
    vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));

    /**
     * Fill the vp_api_pictures used for video decodig
     * --> out_picture->format is mandatory for AR.Drone 1 and 2. Other lines are only necessary for AR.Drone 1 video decoding
     */
    in_picture->width = 640; // Drone 1 only : Must be greater than the drone 1 picture size (320)
    in_picture->height = 360; // Drone 1 only : Must be greater that the drone 1 picture size (240)

    out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS
    out_picture->format = PIX_FMT_RGB24; // MANDATORY ! Only RGB24, RGB565 are supported
    out_picture->width = in_picture->width;
    out_picture->height = in_picture->height;

    // Alloc Y, CB, CR bufs according to target format
    uint32_t bpp = 0;
    switch (out_picture->format)
    {
    case PIX_FMT_RGB24:
        // One buffer, three bytes per pixel
        bpp = 3;
        out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp );
        out_picture->cr_buf = NULL;
        out_picture->cb_buf = NULL;
        out_picture->y_line_size = out_picture->width * bpp;
        out_picture->cb_line_size = 0;
        out_picture->cr_line_size = 0;
        break;
    case PIX_FMT_RGB565:
        // One buffer, two bytes per pixel
        bpp = 2;
        out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp );
        out_picture->cr_buf = NULL;
        out_picture->cb_buf = NULL;
        out_picture->y_line_size = out_picture->width * bpp;
        out_picture->cb_line_size = 0;
        out_picture->cr_line_size = 0;
        break;
    default:
        fprintf (stderr, "Wrong video format, must be either PIX_FMT_RGB565 or PIX_FMT_RGB24\n");
        exit (-1);
        break;
    }

    /**
     * Allocate the stage lists
     *
     * - "pre" stages are called before video decoding is done
     *  -> A pre stage get the encoded video frame (including PaVE header for AR.Drone 2 frames) as input
     *  -> A pre stage MUST NOT modify these data, and MUST pass it to the next stage
     * - Typical "pre" stage : Encoded video recording for AR.Drone 1 (recording for AR.Drone 2 is handled differently)
     *
     * - "post" stages are called after video decoding
     *  -> The first post stage will get the decoded video frame as its input
     *   --> Video frame format depend on out_picture->format value (RGB24 / RGB565)
     *  -> A post stage CAN modify the data, as ardrone_tool won't process it afterwards
     *  -> All following post stages will use the output of the previous stage as their inputs
     * - Typical "post" stage : Display the decoded frame
     */
    example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_PRE_STAGES, sizeof (vp_api_io_stage_t));
    example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_POST_STAGES, sizeof (vp_api_io_stage_t));

    /**
     * Fill the PRE stage list
     * - name and type are debug infos only
     * - cfg is the pointer passed as "cfg" in all the stages calls
     * - funcs is the pointer to the stage functions
     */
    stages_index = 0;

    vp_os_memset (&precfg, 0, sizeof (pre_stage_cfg_t));
    strncpy (precfg.outputName, encodedFileName, 255);

    example_pre_stages->stages_list[stages_index].name = "Encoded Dumper"; // Debug info
    example_pre_stages->stages_list[stages_index].type = VP_API_FILTER_DECODER; // Debug info
    example_pre_stages->stages_list[stages_index].cfg  = &precfg;
    example_pre_stages->stages_list[stages_index++].funcs  = pre_stage_funcs;

    example_pre_stages->length = stages_index;

    /**
     * Fill the POST stage list
     * - name and type are debug infos only
     * - cfg is the pointer passed as "cfg" in all the stages calls
     * - funcs is the pointer to the stage functions
     */
    stages_index = 0;

    vp_os_memset (&dispCfg, 0, sizeof (display_stage_cfg_t));
    dispCfg.bpp = bpp;
    dispCfg.decoder_info = in_picture;

    example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info
    example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info
    example_post_stages->stages_list[stages_index].cfg  = &dispCfg;
    example_post_stages->stages_list[stages_index++].funcs  = display_stage_funcs;

    example_post_stages->length = stages_index;

    /**
     * Fill thread params for the ardrone_tool video thread
     *  - in_pic / out_pic are reference to our in_picture / out_picture
     *  - pre/post stages lists are references to our stages lists
     *  - needSetPriority and priority are used to control the video thread priority
     *   -> if needSetPriority is set to 1, the thread will try to set its priority to "priority"
     *   -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC)
     */
    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = example_pre_stages;
    params->post_processing_stages_list = example_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;
    
    
    //set the tag detection
    ENEMY_COLORS_TYPE enemyColors = ARDRONE_DETECTION_COLOR_ORANGE_BLUE;
    ARDRONE_TOOL_CONFIGURATION_ADDEVENT (enemy_colors, &enemyColors, NULL);
    
    CAD_TYPE detectType = CAD_TYPE_MULTIPLE_DETECTION_MODE;
    ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detect_type, &detectType, NULL);
    
    uint32_t detectH = TAG_TYPE_MASK (TAG_TYPE_SHELL_TAG_V2);
    ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detections_select_h, &detectH, NULL);
    

    /**
     * Start the video thread (and the video recorder thread for AR.Drone 2)
     */
    START_THREAD(video_stage, params);
    START_THREAD (video_recorder, NULL);
    video_stage_init();
    if (2 <= ARDRONE_VERSION ())
    {
        START_THREAD (video_recorder, NULL);
        video_recorder_init ();
    }
    video_stage_resume_thread();
    
    //King of the Hill threads
    START_THREAD(wiimote_logic, NULL);
    START_THREAD(drone_logic, NULL);
    START_THREAD(score_logic, NULL);
    
    return C_OK;
}
コード例 #10
0
C_RESULT ardrone_tool_init_custom (void)
{
    // Reset global data
    bzero(&g_navdata, sizeof(g_navdata));
    g_autopilot = FALSE;
    g_pass_button = 0;
    
    // Register a new game controller
    ardrone_tool_input_add( &gamepad );
    
    ardrone_application_default_config.navdata_demo = TRUE;
    
     ardrone_application_default_config.navdata_options = 
    (NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | 
        NAVDATA_OPTION_FULL_MASK |
        NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG));

    vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));
    vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));
    
    if (IS_ARDRONE2)
    {
        ardrone_application_default_config.video_codec = H264_360P_CODEC;
        in_picture->width = 640;  
        in_picture->height = 360;
    }
    else
    {
        ardrone_application_default_config.video_codec = P264_CODEC;
        in_picture->width = 320;  
        in_picture->height = 240;
    }
    
    ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI;
    ardrone_application_default_config.bitrate_ctrl_mode = 1;
    
    
    /**
    * Allocate useful structures :
    * - index counter
    * - thread param structure and its substructures
    */
    uint8_t stages_index = 0;
    
    specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t));
    specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    
    out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS
    out_picture->format = PIX_FMT_RGB24; 
    out_picture->width = in_picture->width;
    out_picture->height = in_picture->height;
    
    // One buffer, three bytes per pixel
    out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * 3 );
    out_picture->cr_buf = NULL;
    out_picture->cb_buf = NULL;
    out_picture->y_line_size = out_picture->width * 3;
    out_picture->cb_line_size = 0;
    out_picture->cr_line_size = 0;
    
    example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t));
    example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t));
    
    /**
    * Fill the POST stage list
    * - name and type are debug infos only
    * - cfg is the pointer passed as "cfg" in all the stages calls
    * - funcs is the pointer to the stage functions
    */
    
    stages_index = 0;
    
    vp_os_memset (&dispCfg, 0, sizeof (video_cfg_t));
    
    dispCfg.width = in_picture->width;
    dispCfg.height = in_picture->height;
    
    example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info
    example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info
    example_post_stages->stages_list[stages_index].cfg  = &dispCfg;
    example_post_stages->stages_list[stages_index++].funcs  = video_funcs;
    
    example_post_stages->length = stages_index;
    
    
    /**
    * Fill thread params for the ardrone_tool video thread
    *  - in_pic / out_pic are reference to our in_picture / out_picture
    *  - pre/post stages lists are references to our stages lists
    *  - needSetPriority and priority are used to control the video thread priority
    *   -> if needSetPriority is set to 1, the thread will try to set its priority to "priority"
    *   -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC)
    */
    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = example_pre_stages;
    params->post_processing_stages_list = example_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;
    
    /**
    * Start the video thread (and the video recorder thread for AR.Drone 2)
    */
    START_THREAD(video_stage, params);
    video_stage_init();
    
    video_stage_resume_thread ();
    
    return C_OK;
}
コード例 #11
0
ファイル: ardrone_sdk.cpp プロジェクト: andresjhe/JdeRobot
	 C_RESULT ardrone_tool_init_custom(void) 
	{
		should_exit = 0;
		vp_os_mutex_init(&navdata_lock);
		vp_os_mutex_init(&video_lock);
		vp_os_mutex_init(&twist_lock);

		//jdeDriver = new ARDroneDriver();
		int _w, _h;

		if (IS_ARDRONE2)
		{
			ardrone_application_default_config.video_codec = H264_360P_CODEC;
			_w = D2_STREAM_WIDTH;
			_h = D2_STREAM_HEIGHT;
		}
		else if (IS_ARDRONE1)
		{
			ardrone_application_default_config.video_codec = UVLC_CODEC;
			_w = D1_STREAM_WIDTH;
			_h = D1_STREAM_HEIGHT;
			   
		}
		else
		{
			printf("Something must be really wrong with the SDK!");
		}


		realtime_navdata=false;
		realtime_video=false;
		looprate=LOOPRATE;
		// SET SOME NON-STANDARD DEFAULT VALUES FOR THE DRIVER
		// THESE CAN BE OVERWRITTEN BY ROS PARAMETERS (below)
		ardrone_application_default_config.bitrate_ctrl_mode = VBC_MODE_DISABLED;
		if (IS_ARDRONE2)
		{
			ardrone_application_default_config.max_bitrate = 4000;
		}

		ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK;        
		ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI;
		ardrone_application_default_config.control_level = (0 << CONTROL_LEVEL_COMBINED_YAW);
		ardrone_application_default_config.flying_mode = FLYING_MODE_FREE_FLIGHT;

		jdeDriver->configureDrone((char*)CONFIG_FILE_PATH);
		cam_state=jdeDriver->getParameter("video_channel",0);

		char buffer[MULTICONFIG_ID_SIZE+1];

		sprintf(buffer,"-%s",usr_id);
		printf("Deleting Profile %s\n",buffer);
		ARDRONE_TOOL_CONFIGURATION_ADDEVENT (profile_id, buffer, NULL);

		sprintf(buffer,"-%s",app_id);
		printf("Deleting Application %s\n",buffer);
		ARDRONE_TOOL_CONFIGURATION_ADDEVENT (application_id, buffer, NULL);

		// Now continue with the rest of the initialization

		ardrone_tool_input_add(&teleop);
		uint8_t post_stages_index = 0;

		//Alloc structs
		specific_parameters_t * params             = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t));
		specific_stages_t * driver_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
		specific_stages_t * driver_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
		vp_api_picture_t  * in_picture             = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
		vp_api_picture_t  * out_picture            = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));

		in_picture->width          = _w;
		in_picture->height         = _h;

		out_picture->framerate     = 20;
		out_picture->format        = PIX_FMT_RGB24;
		out_picture->width         = _w;
		out_picture->height        = _h;

		out_picture->y_buf         = (uint8_t*) vp_os_malloc( _w * _h * 3 );
		out_picture->cr_buf        = NULL;
		out_picture->cb_buf        = NULL;

		out_picture->y_line_size   = _w * 3;
		out_picture->cb_line_size  = 0;
		out_picture->cr_line_size  = 0;

		//Alloc the lists
		driver_pre_stages->stages_list  = NULL;
		driver_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_DRIVER_POST_STAGES,sizeof(vp_api_io_stage_t));


		driver_post_stages->stages_list[post_stages_index].name    = "ExtractData";
		driver_post_stages->stages_list[post_stages_index].type    = VP_API_OUTPUT_SDL;
		driver_post_stages->stages_list[post_stages_index].cfg     = NULL;
		driver_post_stages->stages_list[post_stages_index++].funcs   = vp_stages_export_funcs;

		driver_pre_stages->length  = 0;
		driver_post_stages->length = post_stages_index;

		params->in_pic = in_picture;
		params->out_pic = out_picture;
		params->pre_processing_stages_list  = driver_pre_stages;
		params->post_processing_stages_list = driver_post_stages;
		params->needSetPriority = 1;
		params->priority = 31;
		// Using the provided threaded pipeline implementation from SDK
		START_THREAD(video_stage, params);
		video_stage_init();
		if (ARDRONE_VERSION() >= 2)
		{
			START_THREAD (video_recorder, NULL);
			video_recorder_init ();
			video_recorder_resume_thread();
		}
		// Threads do not start automatically
		video_stage_resume_thread();
		ardrone_tool_set_refresh_time(25);
		//jdeDriver->configure_drone();

		 
		jdeDriver->initInterfaces();
		START_THREAD(update_jde, jdeDriver);
		return C_OK;
	}
コード例 #12
0
/* The delegate object calls this method during initialization of an ARDrone application */
C_RESULT ardrone_tool_init_custom(void)
{
  
  if (customPad)
	  ardrone_tool_input_add( &fpad );
	  
  //init_gui(0, NULL); /// Creating the GUI 
	START_THREAD(gui, NULL); // Starting the GUI thread 
	START_THREAD(imgproc,NULL);
	START_THREAD(ttiofiles,NULL);

    ardrone_application_default_config.video_channel = ZAP_CHANNEL_VERT;
   // ARDRONE_TOOL_CONFIGURATION_ADDEVENT (video_channel, &ZAP_CHANNEL_VERT, NULL);	

    
    /************************ VIDEO STAGE CONFIG ******************************/

    #define NB_NAVIGATION_POST_STAGES   10
    uint8_t post_stages_index = 0;

    //Alloc structs
    specific_parameters_t * params             = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t));
    specific_stages_t * navigation_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    vp_api_picture_t  * in_picture             = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
    vp_api_picture_t  * out_picture            = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));

    in_picture->width          = STREAM_WIDTH;
    in_picture->height         = STREAM_HEIGHT;

    out_picture->framerate     = 20;
    out_picture->format        = PIX_FMT_RGB24;
    out_picture->width         = STREAM_WIDTH;
    out_picture->height        = STREAM_HEIGHT;

    out_picture->y_buf         = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 );
    out_picture->cr_buf        = NULL;
    out_picture->cb_buf        = NULL;

    out_picture->y_line_size   = STREAM_WIDTH * 3;
    out_picture->cb_line_size  = 0;
    out_picture->cr_line_size  = 0;

    //Alloc the lists
    navigation_pre_stages->stages_list  = NULL;
    navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t));

    //Fill the POST-stages------------------------------------------------------
    vp_os_memset(&vlat,         0, sizeof( vlat ));
    vlat.state = 0;
    vlat.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].name    = "(latency estimator)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&vlat;
    navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs;


    vp_os_memset(&draw_trackers_cfg,         0, sizeof( draw_trackers_funcs ));
    draw_trackers_cfg.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&draw_trackers_cfg;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = draw_trackers_funcs;


    vp_os_memset(&gtkconf,         0, sizeof( gtkconf ));
    gtkconf.rowstride               = out_picture->width * 3;
    gtkconf.last_decoded_frame_info = (void*)&vec;
    gtkconf.desired_display_width   = 0;  /* auto */
    gtkconf.desired_display_height  = 0;  /* auto */
    gtkconf.gdk_interpolation_mode  = 0;  /* fastest */
    navigation_post_stages->stages_list[post_stages_index].name    = "(Gtk display)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_OUTPUT_SDL;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&gtkconf;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = vp_stages_output_gtk_funcs;

    //Define the list of stages size
    navigation_pre_stages->length  = 0;
    navigation_post_stages->length = post_stages_index;

    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = navigation_pre_stages;
    params->post_processing_stages_list = navigation_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;

    START_THREAD(video_stage, params);
    video_stage_init();
    video_stage_resume_thread ();
    
    /************************ END OF VIDEO STAGE CONFIG ***********************/
    

    return C_OK;
    
  //START_THREAD( video_stage, NULL );
  
  return C_OK;
}//*/