void parrot_ardrone_notify_start(JNIEnv* env, jobject obj, ardroneEngineCallback callback, const char *appName, const char *userName, const char* rootdir, const char* flightdir, int flight_storing_size, academy_download_new_media academy_download_callback_func, VIDEO_RECORDING_CAPABILITY recordingCapability) { video_stage_init(); video_recorder_init(); mobile_main_param_t *param = vp_os_malloc(sizeof(mobile_main_param_t)); if(param==NULL){ LOGII("paramisnull"); } if (NULL != param) { param->obj = (*env)->NewGlobalRef(env, obj); param->callback = callback; vp_os_memset(¶m->app_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->user_name, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->root_dir, 0, STRING_BUFFER_LENGTH); vp_os_memset(¶m->flight_dir, 0, STRING_BUFFER_LENGTH); strncpy(param->app_name, appName, STRING_BUFFER_LENGTH); strncpy(param->user_name, userName, STRING_BUFFER_LENGTH); strncpy(param->root_dir, rootdir, STRING_BUFFER_LENGTH); strncpy(param->flight_dir, flightdir, STRING_BUFFER_LENGTH); param->flight_storing_size = flight_storing_size; param->academy_download_callback_func = academy_download_callback_func; ctrldata.recordingCapability = recordingCapability; LOGII("startsuccess"); START_THREAD(app_main, param); } }
C_RESULT ardrone_tool_init_custom(void) { /** * Set application default configuration * * In this example, we use the AR.FreeFlight configuration : * - Demo navdata rate (15Hz) * - Useful additionnal navdata packets enabled (detection, games, video record, wifi quality estimation ...) * - Adaptive video enabled (bitrate_ctrl_mode) -> video bitrate will change according to the available bandwidth */ ardrone_application_default_config.navdata_demo = TRUE; //use this -> NAVDATA_OPTION_FULL_MASK //or coment the line below if detection doesn't work ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK;//(NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG)); if (IS_ARDRONE2){ ardrone_application_default_config.video_codec = drone2Codec; } else { ardrone_application_default_config.video_codec = drone1Codec; } ardrone_application_default_config.video_channel = videoChannel; ardrone_application_default_config.bitrate_ctrl_mode = 1; /** * Define the number of video stages we'll add before/after decoding */ #define EXAMPLE_PRE_STAGES 1 #define EXAMPLE_POST_STAGES 1 /** * Allocate useful structures : * - index counter * - thread param structure and its substructures */ uint8_t stages_index = 0; specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t)); specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t)); vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t)); /** * Fill the vp_api_pictures used for video decodig * --> out_picture->format is mandatory for AR.Drone 1 and 2. Other lines are only necessary for AR.Drone 1 video decoding */ in_picture->width = 640; // Drone 1 only : Must be greater than the drone 1 picture size (320) in_picture->height = 360; // Drone 1 only : Must be greater that the drone 1 picture size (240) out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS out_picture->format = PIX_FMT_RGB24; // MANDATORY ! Only RGB24, RGB565 are supported out_picture->width = in_picture->width; out_picture->height = in_picture->height; // Alloc Y, CB, CR bufs according to target format uint32_t bpp = 0; switch (out_picture->format) { case PIX_FMT_RGB24: // One buffer, three bytes per pixel bpp = 3; out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = out_picture->width * bpp; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; break; case PIX_FMT_RGB565: // One buffer, two bytes per pixel bpp = 2; out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * bpp ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = out_picture->width * bpp; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; break; default: fprintf (stderr, "Wrong video format, must be either PIX_FMT_RGB565 or PIX_FMT_RGB24\n"); exit (-1); break; } /** * Allocate the stage lists * * - "pre" stages are called before video decoding is done * -> A pre stage get the encoded video frame (including PaVE header for AR.Drone 2 frames) as input * -> A pre stage MUST NOT modify these data, and MUST pass it to the next stage * - Typical "pre" stage : Encoded video recording for AR.Drone 1 (recording for AR.Drone 2 is handled differently) * * - "post" stages are called after video decoding * -> The first post stage will get the decoded video frame as its input * --> Video frame format depend on out_picture->format value (RGB24 / RGB565) * -> A post stage CAN modify the data, as ardrone_tool won't process it afterwards * -> All following post stages will use the output of the previous stage as their inputs * - Typical "post" stage : Display the decoded frame */ example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_PRE_STAGES, sizeof (vp_api_io_stage_t)); example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (EXAMPLE_POST_STAGES, sizeof (vp_api_io_stage_t)); /** * Fill the PRE stage list * - name and type are debug infos only * - cfg is the pointer passed as "cfg" in all the stages calls * - funcs is the pointer to the stage functions */ stages_index = 0; vp_os_memset (&precfg, 0, sizeof (pre_stage_cfg_t)); strncpy (precfg.outputName, encodedFileName, 255); example_pre_stages->stages_list[stages_index].name = "Encoded Dumper"; // Debug info example_pre_stages->stages_list[stages_index].type = VP_API_FILTER_DECODER; // Debug info example_pre_stages->stages_list[stages_index].cfg = &precfg; example_pre_stages->stages_list[stages_index++].funcs = pre_stage_funcs; example_pre_stages->length = stages_index; /** * Fill the POST stage list * - name and type are debug infos only * - cfg is the pointer passed as "cfg" in all the stages calls * - funcs is the pointer to the stage functions */ stages_index = 0; vp_os_memset (&dispCfg, 0, sizeof (display_stage_cfg_t)); dispCfg.bpp = bpp; dispCfg.decoder_info = in_picture; example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info example_post_stages->stages_list[stages_index].cfg = &dispCfg; example_post_stages->stages_list[stages_index++].funcs = display_stage_funcs; example_post_stages->length = stages_index; /** * Fill thread params for the ardrone_tool video thread * - in_pic / out_pic are reference to our in_picture / out_picture * - pre/post stages lists are references to our stages lists * - needSetPriority and priority are used to control the video thread priority * -> if needSetPriority is set to 1, the thread will try to set its priority to "priority" * -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC) */ params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = example_pre_stages; params->post_processing_stages_list = example_post_stages; params->needSetPriority = 0; params->priority = 0; //set the tag detection ENEMY_COLORS_TYPE enemyColors = ARDRONE_DETECTION_COLOR_ORANGE_BLUE; ARDRONE_TOOL_CONFIGURATION_ADDEVENT (enemy_colors, &enemyColors, NULL); CAD_TYPE detectType = CAD_TYPE_MULTIPLE_DETECTION_MODE; ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detect_type, &detectType, NULL); uint32_t detectH = TAG_TYPE_MASK (TAG_TYPE_SHELL_TAG_V2); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (detections_select_h, &detectH, NULL); /** * Start the video thread (and the video recorder thread for AR.Drone 2) */ START_THREAD(video_stage, params); START_THREAD (video_recorder, NULL); video_stage_init(); if (2 <= ARDRONE_VERSION ()) { START_THREAD (video_recorder, NULL); video_recorder_init (); } video_stage_resume_thread(); //King of the Hill threads START_THREAD(wiimote_logic, NULL); START_THREAD(drone_logic, NULL); START_THREAD(score_logic, NULL); return C_OK; }
C_RESULT ardrone_tool_init_custom(void) { should_exit = 0; vp_os_mutex_init(&navdata_lock); vp_os_mutex_init(&video_lock); vp_os_mutex_init(&twist_lock); //jdeDriver = new ARDroneDriver(); int _w, _h; if (IS_ARDRONE2) { ardrone_application_default_config.video_codec = H264_360P_CODEC; _w = D2_STREAM_WIDTH; _h = D2_STREAM_HEIGHT; } else if (IS_ARDRONE1) { ardrone_application_default_config.video_codec = UVLC_CODEC; _w = D1_STREAM_WIDTH; _h = D1_STREAM_HEIGHT; } else { printf("Something must be really wrong with the SDK!"); } realtime_navdata=false; realtime_video=false; looprate=LOOPRATE; // SET SOME NON-STANDARD DEFAULT VALUES FOR THE DRIVER // THESE CAN BE OVERWRITTEN BY ROS PARAMETERS (below) ardrone_application_default_config.bitrate_ctrl_mode = VBC_MODE_DISABLED; if (IS_ARDRONE2) { ardrone_application_default_config.max_bitrate = 4000; } ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK; ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI; ardrone_application_default_config.control_level = (0 << CONTROL_LEVEL_COMBINED_YAW); ardrone_application_default_config.flying_mode = FLYING_MODE_FREE_FLIGHT; jdeDriver->configureDrone((char*)CONFIG_FILE_PATH); cam_state=jdeDriver->getParameter("video_channel",0); char buffer[MULTICONFIG_ID_SIZE+1]; sprintf(buffer,"-%s",usr_id); printf("Deleting Profile %s\n",buffer); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (profile_id, buffer, NULL); sprintf(buffer,"-%s",app_id); printf("Deleting Application %s\n",buffer); ARDRONE_TOOL_CONFIGURATION_ADDEVENT (application_id, buffer, NULL); // Now continue with the rest of the initialization ardrone_tool_input_add(&teleop); uint8_t post_stages_index = 0; //Alloc structs specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t)); specific_stages_t * driver_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); specific_stages_t * driver_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); in_picture->width = _w; in_picture->height = _h; out_picture->framerate = 20; out_picture->format = PIX_FMT_RGB24; out_picture->width = _w; out_picture->height = _h; out_picture->y_buf = (uint8_t*) vp_os_malloc( _w * _h * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = _w * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; //Alloc the lists driver_pre_stages->stages_list = NULL; driver_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_DRIVER_POST_STAGES,sizeof(vp_api_io_stage_t)); driver_post_stages->stages_list[post_stages_index].name = "ExtractData"; driver_post_stages->stages_list[post_stages_index].type = VP_API_OUTPUT_SDL; driver_post_stages->stages_list[post_stages_index].cfg = NULL; driver_post_stages->stages_list[post_stages_index++].funcs = vp_stages_export_funcs; driver_pre_stages->length = 0; driver_post_stages->length = post_stages_index; params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = driver_pre_stages; params->post_processing_stages_list = driver_post_stages; params->needSetPriority = 1; params->priority = 31; // Using the provided threaded pipeline implementation from SDK START_THREAD(video_stage, params); video_stage_init(); if (ARDRONE_VERSION() >= 2) { START_THREAD (video_recorder, NULL); video_recorder_init (); video_recorder_resume_thread(); } // Threads do not start automatically video_stage_resume_thread(); ardrone_tool_set_refresh_time(25); //jdeDriver->configure_drone(); jdeDriver->initInterfaces(); START_THREAD(update_jde, jdeDriver); return C_OK; }
C_RESULT ardrone_tool_init_custom(void) { ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK /*& ~(NAVDATA_OPTION_MASK(NAVDATA_TRACKERS_SEND_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_OF_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_PERF_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_TAG))*/; if (IS_ARDRONE2) { ardrone_application_default_config.video_codec = H264_360P_CODEC; } else { ardrone_application_default_config.video_codec = UVLC_CODEC; } ardrone_application_default_config.bitrate_ctrl_mode = 1; /// Init specific code for application ardrone_navdata_handler_table[NAVDATA_IHM_PROCESS_INDEX].data = &cfg; // Add inputs //ardrone_tool_input_add( &gamepad ); /*ardrone_tool_input_add( &radioGP ); ardrone_tool_input_add( &ps3pad ); ardrone_tool_input_add( &joystick ); ardrone_tool_input_add( &wiimote_device );*/ load_ini(); printf("Default control : %s (0x%08x, %s)\n", default_control->name, default_control->serial, default_control->filename); ardrone_tool_input_add(&control_device); cfg.default_control = default_control; cfg.devices = devices; #ifdef USE_ARDRONE_VICON START_THREAD(vicon, &cfg); #endif // USE_ARDRONE_VICON START_THREAD(ihm, &cfg); #ifdef RAW_CAPTURE START_THREAD(raw_capture, &cfg); #endif START_THREAD(remote_console, &cfg); /************************ VIDEO STAGE CONFIG ******************************/ #define NB_NAVIGATION_POST_STAGES 10 uint8_t post_stages_index = 0; //Alloc structs specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t)); specific_stages_t * navigation_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t)); vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t)); in_picture->width = STREAM_WIDTH; in_picture->height = STREAM_HEIGHT; out_picture->framerate = 20; out_picture->format = PIX_FMT_RGB24; out_picture->width = STREAM_WIDTH; out_picture->height = STREAM_HEIGHT; out_picture->y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 ); out_picture->cr_buf = NULL; out_picture->cb_buf = NULL; out_picture->y_line_size = STREAM_WIDTH * 3; out_picture->cb_line_size = 0; out_picture->cr_line_size = 0; //Alloc the lists navigation_pre_stages->stages_list = NULL; navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t)); //Fill the POST-stages------------------------------------------------------ vp_os_memset(&vlat, 0, sizeof( vlat )); vlat.state = 0; vlat.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].name = "(latency estimator)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&vlat; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs; #ifdef RECORD_RAW_VIDEO vp_os_memset(&vrc, 0, sizeof( vrc )); #warning Recording RAW video option enabled in Navigation. vrc.stage = 3; #warning We have to get the stage number an other way vp_os_memset(&vrc, 0, sizeof(vrc)); navigation_post_stages->stages_list[post_stages_index].name = "(raw video recorder)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&vrc; navigation_post_stages->stages_list[post_stages_index++].funcs = video_recorder_funcs; #endif // RECORD_RAW_VIDEO #if defined(FFMPEG_SUPPORT) && defined(RECORD_FFMPEG_VIDEO) #warning Recording FFMPEG (reencoding)video option enabled in Navigation. vp_os_memset(&ffmpeg_vrc, 0, sizeof(ffmpeg_vrc)); ffmpeg_vrc.numframes = &vec.controller.num_frames; ffmpeg_vrc.stage = pipeline.nb_stages; navigation_post_stages->stages_list[post_stages_index].name = "(ffmpeg recorder)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&ffmpeg_vrc; navigation_post_stages->stages_list[post_stages_index++].funcs = video_ffmpeg_recorder_funcs; #endif vp_os_memset(&draw_trackers_cfg, 0, sizeof( draw_trackers_funcs )); draw_trackers_cfg.last_decoded_frame_info = (void*)&vec; navigation_post_stages->stages_list[post_stages_index].type = VP_API_FILTER_DECODER; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)&draw_trackers_cfg; navigation_post_stages->stages_list[post_stages_index++].funcs = draw_trackers_funcs; vp_os_memset(>kconf, 0, sizeof( gtkconf )); gtkconf.rowstride = out_picture->width * 3; gtkconf.last_decoded_frame_info = (void*)&vec; gtkconf.desired_display_width = 0; /* auto */ gtkconf.desired_display_height = 0; /* auto */ gtkconf.gdk_interpolation_mode = 0; /* fastest */ navigation_post_stages->stages_list[post_stages_index].name = "(Gtk display)"; navigation_post_stages->stages_list[post_stages_index].type = VP_API_OUTPUT_SDL; navigation_post_stages->stages_list[post_stages_index].cfg = (void*)>kconf; navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_output_gtk_funcs; //Define the list of stages size navigation_pre_stages->length = 0; navigation_post_stages->length = post_stages_index; params->in_pic = in_picture; params->out_pic = out_picture; params->pre_processing_stages_list = navigation_pre_stages; params->post_processing_stages_list = navigation_post_stages; params->needSetPriority = 0; params->priority = 0; START_THREAD(video_stage, params); video_stage_init(); if (2 <= ARDRONE_VERSION ()) { START_THREAD (video_recorder, NULL); video_recorder_init (); } else { printf ("Don't start ... version is %d\n", ARDRONE_VERSION ()); } /************************ END OF VIDEO STAGE CONFIG ***********************/ #ifdef PC_USE_POLARIS START_THREAD(polaris, &cfg); #endif // PC_USE_POLARIS #ifdef USE_TABLE_PILOTAGE START_THREAD(novadem, (void*) ("/dev/ttyUSB0")); #endif // USE_TABLE_PILOTAGE return C_OK; }