void ardroneEngineStart ( ardroneEngineCallback callback ) { #ifdef DEBUG_THREAD PRINT( "%s\n", __FUNCTION__ ); #endif video_stage_init(); START_THREAD( mobile_main, callback); }
void ardroneEngineStart ( ardroneEngineCallback callback, const char *appName, const char *usrName ) { #ifdef DEBUG_THREAD PRINT( "%s\n", __FUNCTION__ ); #endif video_stage_init(); mobile_main_param_t *param = vp_os_malloc (sizeof (mobile_main_param_t)); if (NULL != param) { param->callback = callback; strcpy(param->appName, appName); strcpy(param->usrName, usrName); START_THREAD( mobile_main, param); } }
void parrot_ardrone_notify_start(JNIEnv* env, jobject obj, ardroneEngineCallback callback, const char *appName, const char *userName, const char* rootdir, const char* flightdir, int flight_storing_size, academy_download_new_media academy_download_callback_func, VIDEO_RECORDING_CAPABILITY recordingCapability) { video_stage_init(); video_recorder_init(); mobile_main_param_t *param = vp_os_malloc(sizeof(mobile_main_param_t)); if(param==NULL){ LOGII("paramisnull"); } if (NULL != param) { param->obj = (*env)->NewGlobalRef(env, obj); param->callback = callback; vp_os_memset(¶m->app_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->user_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->root_dir, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->flight_dir, 0, STRING_BUFFER_LENGTH); strncpy(param->app_name, appName, STRING_BUFFER_LENGTH); strncpy(param->user_name, userName, STRING_BUFFER_LENGTH); strncpy(param->root_dir, rootdir, STRING_BUFFER_LENGTH); strncpy(param->flight_dir, flightdir, STRING_BUFFER_LENGTH); param->flight_storing_size = flight_storing_size; param->academy_download_callback_func = academy_download_callback_func; ctrldata.recordingCapability = recordingCapability; LOGII("startsuccess"); START_THREAD(app_main, param); } }
C_RESULT ardrone_tool_init_custom(void) { /** * Set application default configuration * * In this example, we use the AR.FreeFlight configuration : * - Demo navdata rate (15Hz) * - Useful additionnal navdata packets enabled (detection, games, video record, wifi quality estimation ...) * - Adaptive video enabled (bitrate_ctrl_mode) -> video bitrate will change according to the available bandwidth */ ardrone_application_default_config.navdata_demo = TRUE; //use this -> NAVDATA_OPTION_FULL_MASK //or coment the line below if detection doesn't work ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK;//(NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG)); if (IS_ARDRONE2){ ardrone_application_default_config.video_codec = drone2Codec; } else { ardrone_application_default_config.video_codec = drone1Codec; } ardrone_application_default_config.video_channel = videoChannel; ardrone_application_default_config.bitrate_ctrl_mode = 1; /** * Define the number of video stages we'll add before/after decoding */ #define EXAMPLE_PRE_STAGES 1 #define EXAMPLE_POST_STAGES 1 /** * Allocate useful structures : * - index counter * - thread param structure and its substructures */ uint8_t stages_index = 0; specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t)); specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); /** * Fill the vp_api_pictures used for video decodig * --> out_picture->format is mandatory for AR.Drone 1 and 2. Other lines are only necessary for AR.Drone 1 video decoding */ in_picture->width = 640; // Drone 1 only : Must be greater than the drone 1 picture size (320) in_picture->height = 360; // Drone 1 only : Must be greater that the drone 1 picture size (240) out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS out_picture->format = PIX_FMT_RGB24; // MANDATORY ! Only RGB24, RGB565 are supported out_picture->width = in_picture->width; out_picture->height = in_picture->height; // Alloc Y, CB, CR bufs according to target format uint32_t bpp = 0; switch (out_picture->format) { case PIX_FMT_RGB24: // One buffer, three bytes per pixel bpp = 3; out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = out_picture->width * bpp; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; break; case PIX_FMT_RGB565: // One buffer, two bytes per pixel bpp = 2; out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = out_picture->width * bpp; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; break; default: fprintf (stderr, "Wrong video format, must be either PIX_FMT_RGB565 or PIX_FMT_RGB24\n"); exit (-1); break; } /** * Allocate the stage lists * * - "pre" stages are called before video decoding is done * -> A pre stage get the encoded video frame (including PaVE header for AR.Drone 2 frames) as input * -> A pre stage MUST NOT modify these data, and MUST pass it to the next stage * - Typical "pre" stage : Encoded video recording for AR.Drone 1 (recording for AR.Drone 2 is handled differently) * * - "post" stages are called after video decoding * -> The first post stage will get the decoded video frame as its input * --> Video frame format depend on out_picture->format value (RGB24 / RGB565) * -> A post stage CAN modify the data, as ardrone_tool won't process it afterwards * -> All following post stages will use the output of the previous stage as their inputs * - Typical "post" stage : Display the decoded frame */ example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_PRE_STAGES, sizeof (vp_api_io_stage_t)); example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_POST_STAGES, sizeof (vp_api_io_stage_t)); /** * Fill the PRE stage list * - name and type are debug infos only * - cfg is the pointer passed as "cfg" in all the stages calls * - funcs is the pointer to the stage functions */ stages_index = 0; vp_os_memset (&precfg, 0, sizeof (pre_stage_cfg_t)); strncpy (precfg.outputName, encodedFileName, 255); example_pre_stages->stages_list[stages_index].name = "Encoded Dumper"; // Debug info example_pre_stages->stages_list[stages_index].type = VP_API_FILTER_DECODER; // Debug info example_pre_stages->stages_list[stages_index].cfg = &precfg; example_pre_stages->stages_list[stages_index++].funcs = pre_stage_funcs; example_pre_stages->length = stages_index; /** * Fill the POST stage list * - name and type are debug infos only * - cfg is the pointer passed as "cfg" in all the stages calls * - funcs is the pointer to the stage functions */ stages_index = 0; vp_os_memset (&dispCfg, 0, sizeof (display_stage_cfg_t)); dispCfg.bpp = bpp; dispCfg.decoder_info = in_picture; example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info example_post_stages->stages_list[stages_index].cfg = &dispCfg; example_post_stages->stages_list[stages_index++].funcs = display_stage_funcs; example_post_stages->length = stages_index; /** * Fill thread params for the ardrone_tool video thread * - in_pic / out_pic are reference to our in_picture / out_picture * - pre/post stages lists are references to our stages lists * - needSetPriority and priority are used to control the video thread priority * -> if needSetPriority is set to 1, the thread will try to set its priority to "priority" * -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC) */ params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = example_pre_stages; params->post_processing_stages_list = example_post_stages; params->needSetPriority = 0; params->priority = 0; //set the tag detection ENEMY_COLORS_TYPE enemyColors = ARDRONE_DETECTION_COLOR_ORANGE_BLUE; ARDRONE_TOOL_CONFIGURATION_ADDEVENT (enemy_colors, &enemyColors, NULL); CAD_TYPE detectType = CAD_TYPE_MULTIPLE_DETECTION_MODE; ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detect_type, &detectType, NULL); uint32_t detectH = TAG_TYPE_MASK (TAG_TYPE_SHELL_TAG_V2); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detections_select_h, &detectH, NULL); /** * Start the video thread (and the video recorder thread for AR.Drone 2) */ START_THREAD(video_stage, params); START_THREAD (video_recorder, NULL); video_stage_init(); if (2 <= ARDRONE_VERSION ()) { START_THREAD (video_recorder, NULL); video_recorder_init (); } video_stage_resume_thread(); //King of the Hill threads START_THREAD(wiimote_logic, NULL); START_THREAD(drone_logic, NULL); START_THREAD(score_logic, NULL); return C_OK; }
DEFINE_THREAD_ROUTINE(video_stage, data) { C_RESULT res; vp_api_io_pipeline_t pipeline; vp_api_io_data_t out; vp_api_io_stage_t stages[NB_STAGES]; vp_api_picture_t picture; vlib_stage_decoding_config_t vec; vp_os_memset(&icc, 0, sizeof( icc )); vp_os_memset(&vec, 0, sizeof( vec )); vp_os_memset(&picture, 0, sizeof( picture )); //#ifdef RECORD_VIDEO // video_stage_recorder_config_t vrc; //#endif /// Picture configuration picture.format = /*PIX_FMT_YUV420P */ PIX_FMT_RGB565; picture.width = 512; picture.height = 512; picture.framerate = 15; picture.y_buf = vp_os_malloc( picture.width * picture.height * 2); picture.cr_buf = NULL; picture.cb_buf = NULL; picture.y_line_size = picture.width * 2; picture.cb_line_size = 0; picture.cr_line_size = 0; icc.com = COM_VIDEO(); icc.buffer_size = 100000; icc.protocol = VP_COM_UDP; COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip); vec.width = 512; vec.height = 512; vec.picture = &picture; vec.luma_only = FALSE; vec.block_mode_enable = TRUE; pipeline.nb_stages = 0; stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET; stages[pipeline.nb_stages].cfg = (void *)&icc; stages[pipeline.nb_stages].funcs = video_com_funcs; pipeline.nb_stages++; stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER; stages[pipeline.nb_stages].cfg = (void*)&vec; stages[pipeline.nb_stages].funcs = vlib_decoding_funcs; pipeline.nb_stages++; /* #ifdef RECORD_VIDEO stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER; stages[pipeline.nb_stages].cfg = (void*)&vrc; stages[pipeline.nb_stages].funcs = video_recorder_funcs; pipeline.nb_stages++; #endif */ stages[pipeline.nb_stages].type = VP_API_OUTPUT_LCD; stages[pipeline.nb_stages].cfg = (void*)&vec; stages[pipeline.nb_stages].funcs = video_stage_funcs; pipeline.nb_stages++; pipeline.stages = &stages[0]; if( !ardrone_tool_exit() ) { PRINT("\nvideo stage thread initialisation\n\n"); // NICK video_stage_init(); res = vp_api_open(&pipeline, &pipeline_handle); if( VP_SUCCEEDED(res) ) { int loop = VP_SUCCESS; out.status = VP_API_STATUS_PROCESSING; #ifdef RECORD_VIDEO { DEST_HANDLE dest; dest.stage = 2; dest.pipeline = pipeline_handle; vp_api_post_message( dest, PIPELINE_MSG_START, NULL, (void*)NULL); } #endif video_stage_in_pause = FALSE; while( !ardrone_tool_exit() && (loop == VP_SUCCESS) ) { if(video_stage_in_pause) { vp_os_mutex_lock(&video_stage_mutex); icc.num_retries = VIDEO_MAX_RETRIES; vp_os_cond_wait(&video_stage_condition); vp_os_mutex_unlock(&video_stage_mutex); } if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) { if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) { loop = VP_SUCCESS; } } else loop = -1; // Finish this thread } vp_api_close(&pipeline, &pipeline_handle); } } PRINT(" video stage thread ended\n\n"); return (THREAD_RET)0; }
C_RESULT ardrone_tool_init_custom (void) { // Reset global data bzero(&g_navdata, sizeof(g_navdata)); g_autopilot = FALSE; g_pass_button = 0; // Register a new game controller ardrone_tool_input_add( &gamepad ); ardrone_application_default_config.navdata_demo = TRUE; ardrone_application_default_config.navdata_options = (NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | NAVDATA_OPTION_FULL_MASK | NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG)); vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); if (IS_ARDRONE2) { ardrone_application_default_config.video_codec = H264_360P_CODEC; in_picture->width = 640; in_picture->height = 360; } else { ardrone_application_default_config.video_codec = P264_CODEC; in_picture->width = 320; in_picture->height = 240; } ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI; ardrone_application_default_config.bitrate_ctrl_mode = 1; /** * Allocate useful structures : * - index counter * - thread param structure and its substructures */ uint8_t stages_index = 0; specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t)); specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS out_picture->format = PIX_FMT_RGB24; out_picture->width = in_picture->width; out_picture->height = in_picture->height; // One buffer, three bytes per pixel out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = out_picture->width * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t)); example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t)); /** * Fill the POST stage list * - name and type are debug infos only * - cfg is the pointer passed as "cfg" in all the stages calls * - funcs is the pointer to the stage functions */ stages_index = 0; vp_os_memset (&dispCfg, 0, sizeof (video_cfg_t)); dispCfg.width = in_picture->width; dispCfg.height = in_picture->height; example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info example_post_stages->stages_list[stages_index].cfg = &dispCfg; example_post_stages->stages_list[stages_index++].funcs = video_funcs; example_post_stages->length = stages_index; /** * Fill thread params for the ardrone_tool video thread * - in_pic / out_pic are reference to our in_picture / out_picture * - pre/post stages lists are references to our stages lists * - needSetPriority and priority are used to control the video thread priority * -> if needSetPriority is set to 1, the thread will try to set its priority to "priority" * -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC) */ params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = example_pre_stages; params->post_processing_stages_list = example_post_stages; params->needSetPriority = 0; params->priority = 0; /** * Start the video thread (and the video recorder thread for AR.Drone 2) */ START_THREAD(video_stage, params); video_stage_init(); video_stage_resume_thread (); return C_OK; }
C_RESULT ardrone_tool_init_custom(void) { should_exit = 0; vp_os_mutex_init(&navdata_lock); vp_os_mutex_init(&video_lock); vp_os_mutex_init(&twist_lock); //jdeDriver = new ARDroneDriver(); int _w, _h; if (IS_ARDRONE2) { ardrone_application_default_config.video_codec = H264_360P_CODEC; _w = D2_STREAM_WIDTH; _h = D2_STREAM_HEIGHT; } else if (IS_ARDRONE1) { ardrone_application_default_config.video_codec = UVLC_CODEC; _w = D1_STREAM_WIDTH; _h = D1_STREAM_HEIGHT; } else { printf("Something must be really wrong with the SDK!"); } realtime_navdata=false; realtime_video=false; looprate=LOOPRATE; // SET SOME NON-STANDARD DEFAULT VALUES FOR THE DRIVER // THESE CAN BE OVERWRITTEN BY ROS PARAMETERS (below) ardrone_application_default_config.bitrate_ctrl_mode = VBC_MODE_DISABLED; if (IS_ARDRONE2) { ardrone_application_default_config.max_bitrate = 4000; } ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK; ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI; ardrone_application_default_config.control_level = (0 << CONTROL_LEVEL_COMBINED_YAW); ardrone_application_default_config.flying_mode = FLYING_MODE_FREE_FLIGHT; jdeDriver->configureDrone((char*)CONFIG_FILE_PATH); cam_state=jdeDriver->getParameter("video_channel",0); char buffer[MULTICONFIG_ID_SIZE+1]; sprintf(buffer,"-%s",usr_id); printf("Deleting Profile %s\n",buffer); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (profile_id, buffer, NULL); sprintf(buffer,"-%s",app_id); printf("Deleting Application %s\n",buffer); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (application_id, buffer, NULL); // Now continue with the rest of the initialization ardrone_tool_input_add(&teleop); uint8_t post_stages_index = 0; //Alloc structs specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t)); specific_stages_t * driver_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); specific_stages_t * driver_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); in_picture->width = _w; in_picture->height = _h; out_picture->framerate = 20; out_picture->format = PIX_FMT_RGB24; out_picture->width = _w; out_picture->height = _h; out_picture->y_buf = (uint8_t*) vp_os_malloc( _w * _h * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = _w * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; //Alloc the lists driver_pre_stages->stages_list = NULL; driver_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_DRIVER_POST_STAGES,sizeof(vp_api_io_stage_t)); driver_post_stages->stages_list[post_stages_index].name = "ExtractData"; driver_post_stages->stages_list[post_stages_index].type = VP_API_OUTPUT_SDL; driver_post_stages->stages_list[post_stages_index].cfg = NULL; driver_post_stages->stages_list[post_stages_index++].funcs = vp_stages_export_funcs; driver_pre_stages->length = 0; driver_post_stages->length = post_stages_index; params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = driver_pre_stages; params->post_processing_stages_list = driver_post_stages; params->needSetPriority = 1; params->priority = 31; // Using the provided threaded pipeline implementation from SDK START_THREAD(video_stage, params); video_stage_init(); if (ARDRONE_VERSION() >= 2) { START_THREAD (video_recorder, NULL); video_recorder_init (); video_recorder_resume_thread(); } // Threads do not start automatically video_stage_resume_thread(); ardrone_tool_set_refresh_time(25); //jdeDriver->configure_drone(); jdeDriver->initInterfaces(); START_THREAD(update_jde, jdeDriver); return C_OK; }
C_RESULT ardrone_tool_init_custom(void) { ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK /*& ~(NAVDATA_OPTION_MASK(NAVDATA_TRACKERS_SEND_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_OF_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_PERF_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_TAG))*/; if (IS_ARDRONE2) { ardrone_application_default_config.video_codec = H264_360P_CODEC; } else { ardrone_application_default_config.video_codec = UVLC_CODEC; } ardrone_application_default_config.bitrate_ctrl_mode = 1; /// Init specific code for application ardrone_navdata_handler_table[NAVDATA_IHM_PROCESS_INDEX].data = &cfg; // Add inputs //ardrone_tool_input_add( &gamepad ); /*ardrone_tool_input_add( &radioGP ); ardrone_tool_input_add( &ps3pad ); ardrone_tool_input_add( &joystick ); ardrone_tool_input_add( &wiimote_device );*/ load_ini(); printf("Default control : %s (0x%08x, %s)\n", default_control->name, default_control->serial, default_control->filename); ardrone_tool_input_add(&control_device); cfg.default_control = default_control; cfg.devices = devices; #ifdef USE_ARDRONE_VICON START_THREAD(vicon, &cfg); #endif // USE_ARDRONE_VICON START_THREAD(ihm, &cfg); #ifdef RAW_CAPTURE START_THREAD(raw_capture, &cfg); #endif START_THREAD(remote_console, &cfg); /************************ VIDEO STAGE CONFIG ******************************/ #define NB_NAVIGATION_POST_STAGES 10 uint8_t post_stages_index = 0; //Alloc structs specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t)); specific_stages_t * navigation_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); in_picture->width = STREAM_WIDTH; in_picture->height = STREAM_HEIGHT; out_picture->framerate = 20; out_picture->format = PIX_FMT_RGB24; out_picture->width = STREAM_WIDTH; out_picture->height = STREAM_HEIGHT; out_picture->y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = STREAM_WIDTH * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; //Alloc the lists navigation_pre_stages->stages_list = NULL; navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t)); //Fill the POST-stages------------------------------------------------------ vp_os_memset(&vlat, 0, sizeof( vlat )); vlat.state = 0; vlat.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].name = "(latency estimator)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&vlat; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs; #ifdef RECORD_RAW_VIDEO vp_os_memset(&vrc, 0, sizeof( vrc )); #warning Recording RAW video option enabled in Navigation. vrc.stage = 3; #warning We have to get the stage number an other way vp_os_memset(&vrc, 0, sizeof(vrc)); navigation_post_stages->stages_list[post_stages_index].name = "(raw video recorder)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&vrc; navigation_post_stages->stages_list[post_stages_index++].funcs = video_recorder_funcs; #endif // RECORD_RAW_VIDEO #if defined(FFMPEG_SUPPORT) && defined(RECORD_FFMPEG_VIDEO) #warning Recording FFMPEG (reencoding)video option enabled in Navigation. vp_os_memset(&ffmpeg_vrc, 0, sizeof(ffmpeg_vrc)); ffmpeg_vrc.numframes = &vec.controller.num_frames; ffmpeg_vrc.stage = pipeline.nb_stages; navigation_post_stages->stages_list[post_stages_index].name = "(ffmpeg recorder)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&ffmpeg_vrc; navigation_post_stages->stages_list[post_stages_index++].funcs = video_ffmpeg_recorder_funcs; #endif vp_os_memset(&draw_trackers_cfg, 0, sizeof( draw_trackers_funcs )); draw_trackers_cfg.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&draw_trackers_cfg; navigation_post_stages->stages_list[post_stages_index++].funcs = draw_trackers_funcs; vp_os_memset(>kconf, 0, sizeof( gtkconf )); gtkconf.rowstride = out_picture->width * 3; gtkconf.last_decoded_frame_info = (void*)&vec; gtkconf.desired_display_width = 0; /* auto */ gtkconf.desired_display_height = 0; /* auto */ gtkconf.gdk_interpolation_mode = 0; /* fastest */ navigation_post_stages->stages_list[post_stages_index].name = "(Gtk display)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_OUTPUT_SDL; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)>kconf; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_output_gtk_funcs; //Define the list of stages size navigation_pre_stages->length = 0; navigation_post_stages->length = post_stages_index; params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = navigation_pre_stages; params->post_processing_stages_list = navigation_post_stages; params->needSetPriority = 0; params->priority = 0; START_THREAD(video_stage, params); video_stage_init(); if (2 <= ARDRONE_VERSION ()) { START_THREAD (video_recorder, NULL); video_recorder_init (); } else { printf ("Don't start ... version is %d\n", ARDRONE_VERSION ()); } /************************ END OF VIDEO STAGE CONFIG ***********************/ #ifdef PC_USE_POLARIS START_THREAD(polaris, &cfg); #endif // PC_USE_POLARIS #ifdef USE_TABLE_PILOTAGE START_THREAD(novadem, (void*) ("/dev/ttyUSB0")); #endif // USE_TABLE_PILOTAGE return C_OK; }
/* The delegate object calls this method during initialization of an ARDrone application */ C_RESULT ardrone_tool_init_custom(void) { if (customPad) ardrone_tool_input_add( &fpad ); //init_gui(0, NULL); /// Creating the GUI START_THREAD(gui, NULL); // Starting the GUI thread START_THREAD(imgproc,NULL); START_THREAD(ttiofiles,NULL); ardrone_application_default_config.video_channel = ZAP_CHANNEL_VERT; // ARDRONE_TOOL_CONFIGURATION_ADDEVENT (video_channel, &ZAP_CHANNEL_VERT, NULL); /************************ VIDEO STAGE CONFIG ******************************/ #define NB_NAVIGATION_POST_STAGES 10 uint8_t post_stages_index = 0; //Alloc structs specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t)); specific_stages_t * navigation_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); in_picture->width = STREAM_WIDTH; in_picture->height = STREAM_HEIGHT; out_picture->framerate = 20; out_picture->format = PIX_FMT_RGB24; out_picture->width = STREAM_WIDTH; out_picture->height = STREAM_HEIGHT; out_picture->y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = STREAM_WIDTH * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; //Alloc the lists navigation_pre_stages->stages_list = NULL; navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t)); //Fill the POST-stages------------------------------------------------------ vp_os_memset(&vlat, 0, sizeof( vlat )); vlat.state = 0; vlat.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].name = "(latency estimator)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&vlat; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs; vp_os_memset(&draw_trackers_cfg, 0, sizeof( draw_trackers_funcs )); draw_trackers_cfg.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&draw_trackers_cfg; navigation_post_stages->stages_list[post_stages_index++].funcs = draw_trackers_funcs; vp_os_memset(>kconf, 0, sizeof( gtkconf )); gtkconf.rowstride = out_picture->width * 3; gtkconf.last_decoded_frame_info = (void*)&vec; gtkconf.desired_display_width = 0; /* auto */ gtkconf.desired_display_height = 0; /* auto */ gtkconf.gdk_interpolation_mode = 0; /* fastest */ navigation_post_stages->stages_list[post_stages_index].name = "(Gtk display)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_OUTPUT_SDL; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)>kconf; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_output_gtk_funcs; //Define the list of stages size navigation_pre_stages->length = 0; navigation_post_stages->length = post_stages_index; params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = navigation_pre_stages; params->post_processing_stages_list = navigation_post_stages; params->needSetPriority = 0; params->priority = 0; START_THREAD(video_stage, params); video_stage_init(); video_stage_resume_thread (); /************************ END OF VIDEO STAGE CONFIG ***********************/ return C_OK; //START_THREAD( video_stage, NULL ); return C_OK; }//*/