BOOL _stdcall UpdateDrone()
	{
	   /* Keeps sending AT commands to control the drone as long as 
		everything is OK */
        C_RESULT res = ardrone_tool_update(); 
		printf("Sent update\n");
		return  (VP_SUCCEEDED(res) && ardrone_tool_exit() == FALSE); 
	}
Пример #2
0
/*void test(void * null){
	while(1){
		printf("将来在这里处理无人机控制事件");
		//sleep(2);
	}}*/
int main(int argc, char **argv)
{
    C_RESULT res;
    const char* appname = argv[0];
    WSADATA wsaData = {0};
    int iResult = 0;
    iResult = WSAStartup(MAKEWORD(2, 2), &wsaData);
    if (iResult != 0) {
        wprintf(L"WSAStartup failed: %d\n", iResult);
        return 1;
    }


#include <VP_Os/vp_os_signal.h>
#if defined USE_PTHREAD_FOR_WIN32
#pragma comment (lib,"pthreadVC2.lib")
#endif

    res = test_drone_connection();//检测WiFi连接
    if(res!=0) {
        printf("%s","Could not detect the drone version ... press <Enter> to try connecting anyway.\n");
        getchar();
        WSACleanup();
        exit(-1);
    }

    init_client();
    res = ardrone_tool_setup_com( NULL );//配置AT命令,视频传输
    if( FAILED(res) ) {
        PRINT("Wifi initialization failed.\n");
        return -1;
    }

    res = ardrone_tool_init(argc, argv);//调用ardrone_tool_init_custom()函数
    while( VP_SUCCEEDED(res) && ardrone_tool_exit() == FALSE ) {
        res = ardrone_tool_update();
    }//循环调用ardrone_too_update()函数

    res = ardrone_tool_shutdown();
    closesocket(ConnectSocket);
    WSACleanup();
    system("cls");
    printf("End of SDK Demo for Windows\n");
    getchar();
    return VP_SUCCEEDED(res) ? 0 : -1;
}
Пример #3
0
int main(int argc, char **argv)
{
  C_RESULT res;
  const char* old_locale;
  const char* appname = argv[0];
  int argc_backup = argc;
  char** argv_backup = argv;

  bool_t show_usage = FAILED( ardrone_tool_check_argc_custom(argc) ) ? TRUE : FALSE;

  argc--; argv++;
  while( argc && *argv[0] == '-' )
  {
    if( !strcmp(*argv, "-?") || !strcmp(*argv, "-h") || !strcmp(*argv, "-help") || !strcmp(*argv, "--help") )
    {
      ardrone_tool_usage( appname );
      exit( 0 );
    }
    else if( !ardrone_tool_parse_cmd_line_custom( *argv ) )
    {
      printf("Option %s not recognized\n", *argv);
      show_usage = TRUE;
    }

    argc--; argv++;
  }

  if( show_usage || (argc != 0) )
  {
    ardrone_tool_usage( appname );
    exit(-1);
  }
  
  /* After a first analysis, the arguments are restored so they can be passed to the user-defined functions */
  argc=argc_backup;
  argv=argv_backup;
  
  old_locale = setlocale(LC_NUMERIC, "en_GB.UTF-8");

  if( old_locale == NULL )
  {
    PRINT("You have to install new locales in your dev environment! (avoid the need of conv_coma_to_dot)\n");
    PRINT("As root, do a \"dpkg-reconfigure locales\" and add en_GB.UTF8 to your locale settings\n");
    PRINT("If you have any problem, feel free to contact Pierre Eline ([email protected])\n");
  }
  else
  {
    PRINT("Setting locale to %s\n", old_locale);
  }

  vp_com_wifi_config_t *config = (vp_com_wifi_config_t*)wifi_config();
  if(config)
  {
	  vp_os_memset( &wifi_ardrone_ip[0], 0, sizeof(wifi_ardrone_ip) );
	  printf("===================+> %s\n", config->server);
	  strcpy( &wifi_ardrone_ip[0], config->server);
  }

  if( &custom_main )
  {
    return custom_main(argc, argv);
  }
  else
  {
	res = ardrone_tool_setup_com( NULL );

    if( FAILED(res) )
    {
      PRINT("Wifi initialization failed. It means either:\n");
      PRINT("\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
      PRINT("\t* wifi device is not present (on your pc or on your card)\n");
      PRINT("\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
      PRINT("\t* ap is not up (reboot card or remove wifi usb dongle)\n");
      PRINT("\t* wifi device has no antenna\n");
    }
    else
    {
      res = ardrone_tool_init(argc, argv);

      while( SUCCEED(res) && ardrone_tool_exit() == FALSE )
      {
        res = ardrone_tool_update();
      }

      res = ardrone_tool_shutdown();
    }
  }

  if( old_locale != NULL )
  {
    setlocale(LC_NUMERIC, old_locale);
  }

  return SUCCEED(res) ? 0 : -1;
}
DEFINE_THREAD_ROUTINE( navdata_update, nomParams )
{
    C_RESULT res;
    int32_t  i, size;
    uint32_t cks, navdata_cks, sequence = NAVDATA_SEQUENCE_DEFAULT-1;
    struct timeval tv;
#ifdef _WIN32
    int timeout_for_windows=1000/*milliseconds*/;
#endif


    navdata_t* navdata = (navdata_t*) &navdata_buffer[0];

    tv.tv_sec   = 1/*second*/;
    tv.tv_usec  = 0;

    res = C_OK;

    if( VP_FAILED(vp_com_open(COM_NAVDATA(), &navdata_socket, &navdata_read, &navdata_write)) )
    {
        printf("VP_Com : Failed to open socket for navdata\n");
        res = C_FAIL;
    }

    if( VP_SUCCEEDED(res) )
    {
        PRINT("Thread navdata_update in progress...\n");

#ifdef _WIN32
        setsockopt((int32_t)navdata_socket.priv, SOL_SOCKET, SO_RCVTIMEO, (const char*)&timeout_for_windows, sizeof(timeout_for_windows));
        /* Added by Stephane to force the drone start sending data. */
        if(navdata_write)
        {
            int sizeinit = 5;
            navdata_write( (void*)&navdata_socket, (int8_t*)"Init", &sizeinit );
        }
#else
        setsockopt((int32_t)navdata_socket.priv, SOL_SOCKET, SO_RCVTIMEO, (const char*)&tv, sizeof(tv));
#endif


        i = 0;
        while( ardrone_navdata_handler_table[i].init != NULL )
        {
            // if init failed for an handler we set its process function to null
            // We keep its release function for cleanup
            if( VP_FAILED( ardrone_navdata_handler_table[i].init(ardrone_navdata_handler_table[i].data) ) )
                ardrone_navdata_handler_table[i].process = NULL;

            i ++;
        }

        navdata_thread_in_pause = FALSE;
        while( VP_SUCCEEDED(res)
                && !ardrone_tool_exit()
                && bContinue )
        {
            if(navdata_thread_in_pause)
            {
                vp_os_mutex_lock(&navdata_client_mutex);
                num_retries = NAVDATA_MAX_RETRIES + 1;
                vp_os_cond_wait(&navdata_client_condition);
                vp_os_mutex_unlock(&navdata_client_mutex);
            }

            if( navdata_read == NULL )
            {
                res = C_FAIL;
                continue;
            }

            size = NAVDATA_MAX_SIZE;
            navdata->header = 0; // Soft reset
            res = navdata_read( (void*)&navdata_socket, (int8_t*)&navdata_buffer[0], &size );

#ifdef _WIN32
            if( size <= 0 )
#else
            if( size == 0 )
#endif
            {
                // timeout
                PRINT("Timeout\n");
                ardrone_navdata_open_server();
                sequence = NAVDATA_SEQUENCE_DEFAULT-1;
                num_retries++;
            }
            else
                num_retries = 0;

            if( VP_SUCCEEDED( res ) )
            {
                if( navdata->header == NAVDATA_HEADER )
                {
                    if( ardrone_get_mask_from_state(navdata->ardrone_state, ARDRONE_COM_WATCHDOG_MASK) )
                    {
                        // reset sequence number because of com watchdog
                        // This code is mandatory because we can have a com watchdog without detecting it on mobile side :
                        //        Reconnection is fast enough (less than one second)
                        sequence = NAVDATA_SEQUENCE_DEFAULT-1;

                        if( ardrone_get_mask_from_state(navdata->ardrone_state, ARDRONE_NAVDATA_BOOTSTRAP) == FALSE )
                            ardrone_tool_send_com_watchdog(); // acknowledge
                    }

                    if( navdata->sequence > sequence )
                    {
                        i = 0;

                        ardrone_navdata_unpack_all(&navdata_unpacked, navdata, &navdata_cks);
                        cks = ardrone_navdata_compute_cks( &navdata_buffer[0], size - sizeof(navdata_cks_t) );

                        if( cks == navdata_cks )
                        {
                            while( ardrone_navdata_handler_table[i].init != NULL )
                            {
                                if( ardrone_navdata_handler_table[i].process != NULL )
                                    ardrone_navdata_handler_table[i].process( &navdata_unpacked );

                                i++;
                            }
                        }
                        else
                        {
                            PRINT("[Navdata] Checksum failed : %d (distant) / %d (local)\n", navdata_cks, cks);
                        }
                    }
                    else
                    {
                        PRINT("[Navdata] Sequence pb : %d (distant) / %d (local)\n", navdata->sequence, sequence);
                    }

                    // remaining = sizeof(navdata);

                    sequence = navdata->sequence;
                }
            }
        }

        // Release resources alllocated by handlers
        i = 0;
        while( ardrone_navdata_handler_table[i].init != NULL )
        {
            ardrone_navdata_handler_table[i].release();

            i ++;
        }
    }

    vp_com_close(COM_NAVDATA(), &navdata_socket);

    DEBUG_PRINT_SDK("Thread navdata_update ended\n");

    return (THREAD_RET)res;
}
Пример #5
0
int ardrone_tool_main(int argc, char **argv)
{
  C_RESULT res;
  const char* old_locale;
  const char* appname = argv[0];
  int argc_backup = argc;
  char** argv_backup = argv;
  char * drone_ip_address = NULL;
  struct in_addr drone_ip_address_in;

  bool_t show_usage = FAILED( ardrone_tool_check_argc_custom(argc) ) ? TRUE : FALSE;

  argc--; argv++;
  while( argc && *argv[0] == '-' )
  {
    if( !strcmp(*argv, "-ip") && ( argc > 1 ) )
    {
    	drone_ip_address = *(argv+1);
    	printf("Using custom ip address %s\n",drone_ip_address);
	    argc--; argv++;
    }
    else if( !strcmp(*argv, "-?") || !strcmp(*argv, "-h") || !strcmp(*argv, "-help") || !strcmp(*argv, "--help") )
    {
      ardrone_tool_usage( appname );
      exit( 0 );
    }
    else if( !ardrone_tool_parse_cmd_line_custom( *argv ) )
    {
      printf("Option %s not recognized\n", *argv);
      show_usage = TRUE;
    }

    argc--; argv++;
  }

  if( show_usage || (argc != 0) )
  {
    ardrone_tool_usage( appname );
    exit(-1);
  }
  
  /* After a first analysis, the arguments are restored so they can be passed to the user-defined functions */
  argc=argc_backup;
  argv=argv_backup;
  
  old_locale = setlocale(LC_NUMERIC, "en_GB.UTF-8");

  if( old_locale == NULL )
  {
    PRINT("You have to install new locales in your dev environment! (avoid the need of conv_coma_to_dot)\n");
    PRINT("As root, do a \"dpkg-reconfigure locales\" and add en_GB.UTF8 to your locale settings\n");
  }
  else
  {
    PRINT("Setting locale to %s\n", old_locale);
  }

  vp_com_wifi_config_t *config = (vp_com_wifi_config_t*)wifi_config();
  
  if(config)
  {
	  vp_os_memset( &wifi_ardrone_ip[0], 0, sizeof(wifi_ardrone_ip) );

	  if(drone_ip_address && inet_aton(drone_ip_address,&drone_ip_address_in)!=0)
	  {
	  /* If the drone IP address was given on the command line and is valid */
  	  	printf("===================+> %s\n", drone_ip_address);
        strncpy( &wifi_ardrone_ip[0], drone_ip_address, sizeof(wifi_ardrone_ip)-1);
      }
      else
	  {
		printf("===================+> %s\n", config->server);
		strncpy( &wifi_ardrone_ip[0], config->server, sizeof(wifi_ardrone_ip)-1);
	  }
  }

  while (-1 == getDroneVersion (root_dir, wifi_ardrone_ip, &ardroneVersion))
    {
      printf ("Getting AR.Drone version ...\n");
      vp_os_delay (250);
    }

	res = ardrone_tool_setup_com( NULL );

	if( FAILED(res) )
	{
	  PRINT("Wifi initialization failed. It means either:\n");
	  PRINT("\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
	  PRINT("\t* wifi device is not present (on your pc or on your card)\n");
	  PRINT("\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
	  PRINT("\t* ap is not up (reboot card or remove wifi usb dongle)\n");
	  PRINT("\t* wifi device has no antenna\n");
	}
	else
	{
		// Save appname/appid for reconnections
		char *appname = NULL;
		int lastSlashPos;
		/* Cut the invoking name to the last / or \ character on the command line
		* This avoids using differents app_id for applications called from different directories
		* e.g. if argv[0] is "Build/Release/ardrone_navigation", appname will point to "ardrone_navigation" only
		*/
		for (lastSlashPos = strlen (argv[0])-1; lastSlashPos > 0 && argv[0][lastSlashPos] != '/' && argv[0][lastSlashPos] != '\\'; lastSlashPos--);
		appname = &argv[0][lastSlashPos+1];
		ardrone_gen_appid (appname, __SDK_VERSION__, app_id, app_name, sizeof (app_name));
		res = ardrone_tool_init(wifi_ardrone_ip, strlen(wifi_ardrone_ip), NULL, appname, NULL, NULL, NULL, MAX_FLIGHT_STORING_SIZE, NULL);

      while( SUCCEED(res) && ardrone_tool_exit() == FALSE )
      {
        res = ardrone_tool_update();
      }

      res = ardrone_tool_shutdown();
    }

  if( old_locale != NULL )
  {
    setlocale(LC_NUMERIC, old_locale);
  }

  return SUCCEED(res) ? 0 : -1;
}
Пример #6
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("Video stage thread initialization\n");

    res = vp_api_open(&pipeline, &pipeline_handle);
   // PRINT("VP_API OPENED\n");
    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;
      cvStartWindowThread();
      #define frontWindow "DroneView"
      cvNamedWindow(frontWindow, CV_WINDOW_AUTOSIZE);
      frontImgStream = cvCreateImage(cvSize(picture.width, picture.height), IPL_DEPTH_8U, 3);

      #define bottomWindow "BomberView"
      if(extractBottomImage)
	cvNamedWindow(bottomWindow, CV_WINDOW_AUTOSIZE);
      bottomImgStream = cvCreateImage(cvSize(bottomWidth, bottomHeight), IPL_DEPTH_8U, 3);

      
      IplImage *frame;
      CvCapture *capture;
      if(USEWEBCAM)
      {
        capture = cvCaptureFromCAM(WEBCAMINDEX);
    
        if(!capture)
        {
          printf("ERROR: Cannot Initialize Webcam.\n");
          loop = !SUCCESS;
        }
      }

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
        if(!USEWEBCAM)
        {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
              if (vec.controller.num_frames==0) continue;
	      /*int i;
              for(i = 0; i < (picture.width)*(picture.height); i++)
              {
                frontImgStream->imageData[i*3] = picture.y_buf[i];
                frontImgStream->imageData[i*3+1] = picture.y_buf[i];
                frontImgStream->imageData[i*3+2] = picture.y_buf[i];
              }
	      */
             cvCvtColor(rgbHeader, frontImgStream, CV_RGB2BGR); //[for colour]

              if(extractBottomImage)
              {
                int j = 0, i;
                for(i = 0; j < bottomHeight*bottomWidth; i = i%picture.width >= bottomWidth-1 ? i - (i%picture.width) + picture.width : i+1)
                {
                  bottomImgStream->imageData[j*3] = frontImgStream->imageData[i*3];
                  bottomImgStream->imageData[j*3+1] = frontImgStream->imageData[i*3+1];
                  bottomImgStream->imageData[j*3+2] = frontImgStream->imageData[i*3+2];
                  frontImgStream->imageData[i*3] = 0;
                  frontImgStream->imageData[i*3+1] = 0;
                  frontImgStream->imageData[i*3+2] = 0;
                  j++;
                }
              }
              
              //cvLine(frontImgStream, cvPoint(picture.width/2, 0), cvPoint(picture.width/2, picture.height), CV_RGB(0,255,0), 1, CV_AA, 0 );
              cvShowImage(frontWindow, frontImgStream);
              if(extractBottomImage)
              cvShowImage(bottomWindow, bottomImgStream);
              
              if(CAPTUREIMAGESTREAM)
              {
                char filename[256];
                struct timeval t;
                gettimeofday(&t, NULL);
                sprintf(filename, "%d.%06d.jpg", (int)t.tv_sec, (int)t.tv_usec);
                if(frontImgStream != NULL && cvSaveImage(filename, cvCloneImage(frontImgStream), 0))
                  printf("Image dumped to %s\n", filename);
                else
                  printf("Error dumping image...\n");
                if(extractBottomImage)
                {
                  sprintf(filename, "%d.%06dbottom.jpg", (int)t.tv_sec, (int)t.tv_usec);
                  if(bottomImgStream != NULL && cvSaveImage(filename, cvCloneImage(bottomImgStream), 0))
                    printf("Bottom Image dumped to %s\n", filename);
                  else
                    printf("Error dumping bottom image...\n");
                }
              }
          }
          else loop = -1;
        }
        else //use webcam
        {
          frame = cvQueryFrame(capture);
      
          if(!frame) break;
          
          cvResize(frame, frontImgStream, CV_INTER_LINEAR);
          cvShowImage(frontWindow, frontImgStream);
          
          cvWaitKey(1);
        }
      }

      cvDestroyWindow(frontWindow);
      if(extractBottomImage)
	cvDestroyWindow(bottomWindow);
      //cvReleaseImage(&imgBottom);
      //cvDestroyWindow(bottomWindow);
      cvReleaseImage(&frontImgStream);
      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
DEFINE_THREAD_ROUTINE(ihm, data)
{
	C_RESULT res;

	WSADATA wsaData = {0};
	int iResult = 0;


	/* Initializes Windows socket subsystem */
	iResult = WSAStartup(MAKEWORD(2, 2), &wsaData);
	if (iResult != 0)
	{
		wprintf(L"WSAStartup failed: %d\n", iResult);
		return 1;
	}

 
	/* Initializes communication sockets */	
	res = test_drone_connection(); // Nick disabled the press enter (wait)
	if(res!=0)
	{
		printf("%s","Could not detect the drone version ... press <Enter> to try connecting anyway.\n");
		getchar();
		//WSACleanup();
		exit(-1);
	}


	res = ardrone_tool_setup_com( NULL );
	if( FAILED(res) )
	{
		PRINT("Wifi initialization failed.\n");
		return -1;
	}


	START_THREAD(video_stage, 0);

	res = ardrone_tool_init(WIFI_ARDRONE_IP, strlen(WIFI_ARDRONE_IP), NULL, ARDRONE_CLIENT_APPNAME, ARDRONE_CLIENT_USRNAME);

	ardrone_tool_set_refresh_time(20); // 20 ms

	ardrone_at_reset_com_watchdog();


	// config
	ardrone_control_config.euler_angle_max = 0.20943951f; // 12 degrees
	ardrone_control_config.video_channel	= ZAP_CHANNEL_VERT;
	ardrone_control_config.video_codec		= UVLC_CODEC; //P264_CODEC;
	ardrone_control_config.navdata_demo		= FALSE;
	ardrone_control_config.altitude_max		= 10000;
	ardrone_control_config.control_vz_max	= 1000.0f;
	ardrone_control_config.outdoor			= FALSE;
	//ardrone_control_config.flight_without_shell = TRUE;


	ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_channel, &ardrone_control_config.video_channel, (ardrone_tool_configuration_callback) ardrone_demo_config_callback);
	//ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_channel, &ardrone_control_config.video_channel, NULL);
	//ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_codec, &ardrone_control_config.video_codec, NULL);
	ARDRONE_TOOL_CONFIGURATION_ADDEVENT (navdata_demo, &ardrone_control_config.navdata_demo, NULL);
	ARDRONE_TOOL_CONFIGURATION_ADDEVENT (altitude_max, &ardrone_control_config.altitude_max, NULL);
	ARDRONE_TOOL_CONFIGURATION_ADDEVENT (control_vz_max, &ardrone_control_config.control_vz_max, NULL);
	//ARDRONE_TOOL_CONFIGURATION_ADDEVENT (outdoor, &ardrone_control_config.outdoor, NULL);
	//ARDRONE_TOOL_CONFIGURATION_ADDEVENT (flight_without_shell, &ardrone_control_config.flight_without_shell, NULL);
	ARDRONE_TOOL_CONFIGURATION_ADDEVENT (euler_angle_max, &ardrone_control_config.euler_angle_max, NULL);


	// flat trim
	ardrone_at_set_flat_trim();


	//SetEvent(ardrone_ready);
	//ardrone_demo_redirect_to_interface = 1;


	while( VP_SUCCEEDED(res) && ardrone_tool_exit() == FALSE )
	{
		res = ardrone_tool_update();
	}

	JOIN_THREAD(video_stage);

	res = ardrone_tool_shutdown();

	WSACleanup();

	return (THREAD_RET)res;
}
Пример #8
0
/*
	The video processing thread.
	This function can be kept as it is by most users.
	It automatically receives the video stream in a loop, decode it, and then 
		call the 'output_rendering_device_stage_transform' function for each decoded frame.
*/
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
  

  /* Picture configuration */
	  picture.format        = PIX_FMT_YUV420P;

	  picture.width         = DRONE_VIDEO_MAX_WIDTH;
	  picture.height        = DRONE_VIDEO_MAX_HEIGHT;
	  picture.framerate     = 15;
	  //picture.framerate     = 3;

	  picture.y_buf   = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT     );
	  picture.cr_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );
	  picture.cb_buf  = vp_os_malloc( DRONE_VIDEO_MAX_WIDTH * DRONE_VIDEO_MAX_HEIGHT / 4 );

	  picture.y_line_size   = DRONE_VIDEO_MAX_WIDTH;
	  picture.cb_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;
	  picture.cr_line_size  = DRONE_VIDEO_MAX_WIDTH / 2;

	  vp_os_memset(&icc,          0, sizeof( icc ));
	  vp_os_memset(&vec,          0, sizeof( vec ));
	  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

   /* Video socket configuration */
	  icc.com                 = COM_VIDEO();
	  icc.buffer_size         = 100000;
	  icc.protocol            = VP_COM_UDP;
  
	  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  /* Video decoder configuration */
	  /* Size of the buffers used for decoding 
         This must be set to the maximum possible video resolution used by the drone 
         The actual video resolution will be stored by the decoder in vec.controller 
		 (see vlib_stage_decode.h) */
	  vec.width               = DRONE_VIDEO_MAX_WIDTH;
	  vec.height              = DRONE_VIDEO_MAX_HEIGHT;
	  vec.picture             = &picture;
	  vec.block_mode_enable   = TRUE;
	  vec.luma_only           = FALSE;

	yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

   /* Video pipeline building */
  
		 pipeline.nb_stages = 0;

		/* Video stream reception */
		stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
		stages[pipeline.nb_stages].cfg     = (void *)&icc;
		stages[pipeline.nb_stages].funcs   = video_com_funcs;

		pipeline.nb_stages++;

		/* Video stream decoding */
		stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
		stages[pipeline.nb_stages].cfg     = (void*)&vec;
		stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

		pipeline.nb_stages++;

		/* YUV to RGB conversion 
		YUV format is used by the video stream protocol
		Remove this stage if your rendering device can handle 
		YUV data directly
		*/
		stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
		stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
		stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

		pipeline.nb_stages++;

		/* User code */  
		stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;  /* Set to VP_API_OUTPUT_SDL even if SDL is not used */
		stages[pipeline.nb_stages].cfg     = (void*)&vec;   /* give the decoder information to the renderer */
		stages[pipeline.nb_stages].funcs   = vp_stages_output_rendering_device_funcs;

		  pipeline.nb_stages++;
		  pipeline.stages = &stages[0];
 
 
		  /* Processing of a pipeline */
			  if( !ardrone_tool_exit() )
			  {
				PRINT("\n   Video stage thread initialisation\n\n");

				// switch to vertical camera
				//ardrone_at_zap(ZAP_CHANNEL_VERT);

				res = vp_api_open(&pipeline, &pipeline_handle);

				if( VP_SUCCEEDED(res) )
				{
				  int loop = VP_SUCCESS;
				  out.status = VP_API_STATUS_PROCESSING;

				  while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
				  {
					  if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
						if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						  loop = VP_SUCCESS;
						}
					  }
					  else loop = -1; // Finish this thread
				  }

				  vp_api_close(&pipeline, &pipeline_handle);
				}
			}

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #9
0
DEFINE_THREAD_ROUTINE(video_stage, data) {
    C_RESULT res;

    vp_api_io_pipeline_t pipeline;
    vp_api_io_data_t out;
    
    vp_api_io_stage_t * stages;
    video_stage_merge_slices_config_t merge_slices_cfg;
    
    uint8_t i;
    
    specific_parameters_t * params = (specific_parameters_t *)(data);

    if (1 == params->needSetPriority)
    {
      CHANGE_THREAD_PRIO (video_stage, params->priority);
    }
    
    vp_os_memset(&icc_tcp, 0, sizeof ( icc_tcp));
    vp_os_memset(&icc_udp, 0, sizeof ( icc_udp));
    
    // Video Communication config
    icc_tcp.com = COM_VIDEO();
    icc_tcp.buffer_size = (1024*1024);
    icc_tcp.protocol = VP_COM_TCP;
    COM_CONFIG_SOCKET_VIDEO(&icc_tcp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
    
    // Video Communication config
    icc_udp.com = COM_VIDEO();
    icc_udp.buffer_size = (1024*1024);
    icc_udp.protocol = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc_udp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    icc.nb_sockets = 2;
    icc.configs = icc_tab;
    icc.forceNonBlocking = &(tcpConf.tcpStageHasMoreData);
    icc_tab[1]  = &icc_tcp;
    icc_tab[0]  = &icc_udp;
    
    icc.buffer_size = (1024*1024);
    
     vp_os_memset(&vec, 0, sizeof ( vec));

     stages = (vp_api_io_stage_t*) (vp_os_calloc(
        NB_STAGES + params->pre_processing_stages_list->length + params->post_processing_stages_list->length,
        sizeof (vp_api_io_stage_t)
    ));
    
    vec.src_picture = params->in_pic;
    vec.dst_picture = params->out_pic;
    
    vp_os_memset(&tcpConf, 0, sizeof ( tcpConf));
    tcpConf.maxPFramesPerIFrame = 30;
    tcpConf.frameMeanSize = 160*1024;
    tcpConf.tcpStageHasMoreData = FALSE;
    tcpConf.latencyDrop = 1;

	pipeline.name = "video_stage_pipeline";
    pipeline.nb_stages = 0;
    pipeline.stages = &stages[0];

    //ENCODED FRAME PROCESSING STAGES
    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *) &icc;
    stages[pipeline.nb_stages++].funcs = video_com_multisocket_funcs;
	printf("thread_video_stage: adding multisocket_funcs as %d th stage (input type) to video_pipeline of AR.Drone with version %d.%d.%d\n", pipeline.nb_stages, ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *) &tcpConf;
    stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
	printf("thread_video_stage: adding tcp_funcs to video_pipeline as %d nd stage (filter type) of AR.Drone\n", pipeline.nb_stages );
    
    // Record Encoded video
    if(1 == ARDRONE_VERSION())
    {

        ardrone_academy_stage_recorder_config.dest.pipeline = video_pipeline_handle;
        ardrone_academy_stage_recorder_config.dest.stage    = pipeline.nb_stages;
        stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
        stages[pipeline.nb_stages].cfg     = (void*)&ardrone_academy_stage_recorder_config;
        stages[pipeline.nb_stages++].funcs = ardrone_academy_stage_recorder_funcs;
		printf("thread_video_stage: adding academy_recorder_funcs to video_pipeline as %d rd stage (filter type) of AR.Drone\n", pipeline.nb_stages );

    }
    else
    {
      // Nothing to do for AR.Drone 2 as we have a separated thread for recording
    }

    //PRE-DECODING STAGES ==> recording, ...
    for(i=0;i<params->pre_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->pre_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->pre_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->pre_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding pre_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *)&merge_slices_cfg;
    stages[pipeline.nb_stages++].funcs = video_stage_merge_slices_funcs;
	printf("thread_video_stage: adding merge_slices_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //DECODING STAGES
    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*) &vec;
    stages[pipeline.nb_stages++].funcs = video_decoding_funcs;
	printf("thread_video_stage: adding video_decoding_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //POST-DECODING STAGES ==> transformation, display, ...
    for(i=0;i<params->post_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->post_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->post_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->post_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding post_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );


    if (!ardrone_tool_exit()) {
        PRINT("\nvideo stage thread initialisation\n\n");

        res = vp_api_open(&pipeline, &video_pipeline_handle);

        if (SUCCEED(res)) {
            int loop = SUCCESS;
            out.status = VP_API_STATUS_PROCESSING;

            while (!ardrone_tool_exit() && (loop == SUCCESS)) {
                if (video_stage_in_pause) {
                    vp_os_mutex_lock(&video_stage_mutex);
                    icc.num_retries = VIDEO_MAX_RETRIES;
                    vp_os_cond_wait(&video_stage_condition);
                    vp_os_mutex_unlock(&video_stage_mutex);
                }

                if (SUCCEED(vp_api_run(&pipeline, &out))) {
                    if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
                        loop = SUCCESS;
                    }
                } else loop = -1; // Finish this thread
            }
            
            vp_os_free(params->pre_processing_stages_list->stages_list);
            vp_os_free(params->post_processing_stages_list->stages_list);
            vp_os_free(params->pre_processing_stages_list);
            vp_os_free(params->post_processing_stages_list);
            
            vp_os_free(params->in_pic);
            vp_os_free(params->out_pic);
            
            vp_os_free(params);
            
            vp_api_close(&pipeline, &video_pipeline_handle);
        }
    }

    PRINT("\nvideo stage thread ended\n\n");

    return (THREAD_RET) 0;
}
Пример #10
0
DEFINE_THREAD_ROUTINE(academy_upload, data)
{
	char *directoryList = NULL;
	char *fileList = NULL;
	char dirname[ACADEMY_MAX_FILENAME];
	_ftp_t *academy_ftp = NULL;
	_ftp_status academy_status;
	char *ptr = NULL;
	academy_upload_t *academy = (academy_upload_t *)data;

	printf("Start thread %s\n", __FUNCTION__);

	while( academy_upload_started && !ardrone_tool_exit() )
	{
		vp_os_mutex_lock(&academy->mutex);
		vp_os_memset(&academy->user, 0, sizeof(academy_user_t));
		academy->callback = &academy_upload_private_callback;
		academy->connected = FALSE;
		academy_upload_resetState(academy);
		vp_os_cond_wait(&academy->cond);
		vp_os_mutex_unlock(&academy->mutex);

		while(academy_upload_started && academy->connected)
		{
			academy_status = FTP_FAIL;
			academy_upload_nextState(academy);

			switch(academy->state.state)
			{
				case ACADEMY_STATE_CONNECTION:
					{
						struct dirent *dirent = NULL;
						DIR *dir = NULL;
						academy_status = FTP_FAIL;

						// Check if flight_* directory exist in local dir
						if((dir = opendir(flight_dir)) != NULL)
						{
							struct stat statbuf;
							while(FTP_FAILED(academy_status) && (dirent = readdir(dir)) != NULL)
							{
								if((strncmp(dirent->d_name, "flight_", strlen("flight_")) == 0))
								{
									char local_dir[ACADEMY_MAX_FILENAME];
									sprintf(dirname, "%s", dirent->d_name);
									sprintf(local_dir, "%s/%s", flight_dir, dirname);
									if((stat(local_dir, &statbuf) == 0) && S_ISDIR(statbuf.st_mode))
										academy_status = FTP_SUCCESS;
								}
							}
                            closedir(dir);
						}

						if(FTP_SUCCEDED(academy_status))
						{
							if(academy_ftp == NULL)
								academy_ftp = ftpConnectFromName(ACADEMY_SERVERNAME, ACADEMY_PORT, academy->user.username, academy->user.password, &academy_status);
						}
					}
					break;

				case ACADEMY_STATE_PREPARE_PROCESS:
					academy_status = ftpCd(academy_ftp, "/Uploaded");
					if(FTP_FAILED(academy_status))
					{
						ftpMkdir(academy_ftp, "/Uploaded");
						academy_status = ftpCd(academy_ftp, "/Uploaded");
					}

					if(FTP_SUCCEDED(academy_status))
					{
						academy_status = ftpList(academy_ftp, &directoryList, NULL);
						if(FTP_SUCCEDED(academy_status))
						{
							bool_t found = FALSE;
							char *next_dir = NULL;

							while(!found && (ptr = academy_get_next_item_with_prefix(directoryList, &next_dir, "flight_", TRUE)))
							{
								if(strcmp(ptr, dirname) == 0)
								{
									found = TRUE;
									academy_upload_setState(academy, ACADEMY_STATE_FINISH_PROCESS);
								}
							}

							if(directoryList != NULL)
							{
								vp_os_free(directoryList);
								directoryList = NULL;
							}
						}
					}
					break;

				case ACADEMY_STATE_PROCESS:
					academy_status = ftpCd(academy_ftp, "/Uploading");
					if(FTP_FAILED(academy_status))
					{
						ftpMkdir(academy_ftp, "/Uploading");
						academy_status = ftpCd(academy_ftp, "/Uploading");
					}

					if(FTP_SUCCEDED(academy_status))
					{
						ftpMkdir(academy_ftp, dirname);
						academy_status = ftpCd(academy_ftp, dirname);
						if(FTP_SUCCEDED(academy_status))
						{
							char local_dir[ACADEMY_MAX_FILENAME];
							struct dirent *dirent = NULL;
							DIR *dir = NULL;

							sprintf(local_dir, "%s/%s", flight_dir, dirname);
							if((dir = opendir(local_dir)) != NULL)
							{
								char local_filename[ACADEMY_MAX_FILENAME];
								struct stat statbuf;
								while(FTP_SUCCEDED(academy_status) && ((dirent = readdir(dir)) != NULL))
								{
									if((strncmp(dirent->d_name, "picture_", strlen("picture_")) == 0) || (strncmp(dirent->d_name, "userbox_", strlen("userbox_")) == 0))
									{
										sprintf(local_filename, "%s/%s", local_dir, dirent->d_name);
										if((stat(local_filename, &statbuf) == 0) && !S_ISDIR(statbuf.st_mode))
										{
											PA_DEBUG("Put %s from local directory to server...", dirent->d_name);
											academy_status = ftpPut(academy_ftp, local_filename, dirent->d_name, 1, NULL);
											if(FTP_SUCCEDED(academy_status))
												PA_DEBUG("OK\n");
											else
												PA_DEBUG("ERROR\n");
										}
									}
								}
                                
                                closedir(dir);
							}
						}
					}
					break;

				case ACADEMY_STATE_FINISH_PROCESS:
					{
						char local_dir[ACADEMY_MAX_FILENAME];
						char src[ACADEMY_MAX_FILENAME];
						char dest[ACADEMY_MAX_FILENAME];
						sprintf(src, "/Uploading/%s", dirname);
						sprintf(dest, "/Uploaded/%s", dirname);
						academy_status = ftpRename(academy_ftp, src, dest);

						// Penser à supprimer le fichier local
						academy_status = FTP_FAIL;
						sprintf(local_dir, "%s/%s", flight_dir, dirname);
						if(!ftw(local_dir, &academy_remove, 1))
						{
							rmdir(local_dir);
							academy_status = FTP_SUCCESS;
						}
					}
					break;

				case ACADEMY_STATE_DISCONNECTION:
					if(academy_ftp != NULL)
						ftpClose(&academy_ftp);
					academy_status = FTP_SUCCESS;
					break;

				default:
					// Nothing to do
					break;
			}

			if(FTP_SUCCEDED(academy_status))
			{
				PA_DEBUG("continue state from %d to %d\n", academy->state.state, (academy->state.state + 1) % ACADEMY_STATE_MAX);
				academy_upload_stateOK(academy);
			}
			else
			{
				PA_DEBUG("stop\n");
				academy_upload_stateERROR(academy);

				if(fileList != NULL)
				{
					vp_os_free(fileList);
					fileList = NULL;
				}

				if(academy_ftp)
					ftpClose(&academy_ftp);

				vp_os_delay(1000);
			}
		}

		if(fileList != NULL)
		{
			vp_os_free(fileList);
			fileList = NULL;
		}

		if(academy_ftp)
			ftpClose(&academy_ftp);
	} // while

	THREAD_RETURN(C_OK);
}
int main(int argc , char** argv)
{
  int cnt=0;
    
	  C_RESULT res;					// functions return value


	  WSADATA wsaData = {0};
	  int iResult = 0;

    
	/* Initializes Windows socket subsystem */
		iResult = WSAStartup(MAKEWORD(2, 2), &wsaData);
		if (iResult != 0) {	wprintf(L"WSAStartup failed: %d\n", iResult);	return 1;	}
			
	/* Includes the Pthread for Win32 Library if necessary */
		#include <VP_Os/vp_os_signal.h>
			#if defined USE_PTHREAD_FOR_WIN32
			#pragma comment (lib,"pthreadVC2.lib")
			#endif
#ifdef VIDEO_RECORD
  cv_writer = cvCreateVideoWriter("D:\\out.avi",-1,15,cvSize(320,240),1);
#endif
  cv_image = cvCreateImage(cvSize(320,240),IPL_DEPTH_8U,3);
  //Kinect_Init();
	/* 	Initializes communication sockets	*/	
    hSocketThread = CreateThread(NULL,0,Socket_ProcessThread,NULL,0,0);
   // while (1);
		res = test_drone_connection();
		if(res!=0){
			printf("%s","Could not detect the drone version ... press <Enter> to try connecting anyway.\n");
			getchar();
			//WSACleanup(); exit(-1);
		}

		res = ardrone_tool_setup_com( NULL );
		if( FAILED(res) ){  PRINT("Wifi initialization failed.\n");  return -1;	}

	/* Initialises ARDroneTool */
	   res = ardrone_tool_init(argc, argv);

    /*record video*/
     
     
   /* Keeps sending AT commands to control the drone as long as 
		everything is OK */

      while( VP_SUCCEEDED(res) && ardrone_tool_exit() == FALSE) {
#ifdef VIDEO_RECORD
        if ( cnt++ > 3000 )
          cvReleaseVideoWriter(&cv_writer);
#endif
        res = ardrone_tool_update();     
    
      }
      printf("out\n");
   /**/
      //close Socket Thread
    if ( hSocketThread!=NULL ) {
      WaitForSingleObject(hSocketThread,INFINITE);
      CloseHandle(hSocketThread);
    }
      res = ardrone_tool_shutdown();
    //Kinect_UnInit();
	  WSACleanup();

  /* Bye bye */
  cvReleaseVideoWriter(&cv_writer);
	system("cls");
	  printf("End of SDK Demo for Windows\n");
	  getchar();
	  return VP_SUCCEEDED(res) ? 0 : -1;
}
Пример #12
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  PRINT("\n   Record Video\n\n");
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
  
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");
    
    if ((sockfd = socket(AF_UNIX,SOCK_STREAM,0)) < 0)
   {
       printf("error creating socket");
   }
   bzero((char *) &serv_addr, sizeof(serv_addr));
   serv_addr.sun_family = AF_UNIX;
   unlink("/tmp/video");
   strcpy(serv_addr.sun_path, "/tmp/video");
   servlen=strlen(serv_addr.sun_path) + 
                     sizeof(serv_addr.sun_family);
   if(bind(sockfd,(struct sockaddr *)&serv_addr,servlen)<0)
   {
       printf("error binding socket");
   } 
   
   listen(sockfd,5);
   
   clilen = sizeof(serv_addr);
   
   newsockfd = accept(
        sockfd,(struct sockaddr *)&serv_addr,&clilen);
   
   printf("1 sockfd, %d", sockfd);
    
   
   PRINT("\n   Video socket initialisation\n\n");
    
    
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
          
          //printf("%s\n", pixbuf_data);
          //printf("!%d  sizeof *\n", sizeof(pixbuf_data));
          
          //printf("!%d --- %d\n", n, errno);
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }
  

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
DEFINE_THREAD_ROUTINE( ardrone_control, nomParams )
{
	C_RESULT res_wait_navdata = C_OK;
	C_RESULT res = C_OK;
	uint32_t retry, current_ardrone_state;
	int32_t next_index_in_queue;
	ardrone_control_event_ptr_t  current_event;
	
	retry = 0;
	current_event = NULL;
	
	DEBUG_PRINT_SDK("Thread control in progress...\n");
	control_socket.is_disable = TRUE;
	
	ardrone_control_connect_to_drone();

	while( bContinue 
          && !ardrone_tool_exit() )
	{
		vp_os_mutex_lock(&control_mutex);
		control_waited = TRUE;

		/* Wait for new navdata to be received. */
		res_wait_navdata = vp_os_cond_timed_wait(&control_cond, 1000);
		vp_os_mutex_unlock(&control_mutex);

		/*
		 * In case of timeout on the navdata, we assume that there was a problem
		 * with the Wifi connection.
		 * It is then safer to close and reopen the control socket (TCP 5559) since
		 * some OS might stop giving data but not signal any disconnection.
		 */
		if(VP_FAILED(res_wait_navdata))
		{
			DEBUG_PRINT_SDK("Timeout while waiting for new navdata.\n");
			if(!control_socket.is_disable)
				control_socket.is_disable = TRUE;
		}
			
		if(control_socket.is_disable)
		{
			ardrone_control_connect_to_drone();
		}
		
		if(VP_SUCCEEDED(res_wait_navdata) && (!control_socket.is_disable))
		{
			vp_os_mutex_lock(&control_mutex);
			current_ardrone_state = ardrone_state;
			control_waited = FALSE;
			vp_os_mutex_unlock(&control_mutex);
			
			if( ardrone_tool_exit() ) // Test if we received a signal because we are quitting the application
				THREAD_RETURN( res );
			
 			if( current_event == NULL )
			{
				vp_os_mutex_lock(&event_queue_mutex);
				next_index_in_queue = (end_index_in_queue + 1) & (ARDRONE_CONTROL_MAX_NUM_EVENTS_IN_QUEUE - 1);
				
				if( next_index_in_queue != start_index_in_queue )
				{ // There's an event to process
					current_event = ardrone_control_event_queue[next_index_in_queue];
					if( current_event != NULL )
					{
						if( current_event->ardrone_control_event_start != NULL )
						{
							current_event->ardrone_control_event_start( current_event );
						}
					}
					end_index_in_queue = next_index_in_queue;
					
					retry = 0;
				}
				
				vp_os_mutex_unlock(&event_queue_mutex);
			}
			
			if( current_event != NULL )
			{
				switch( current_event->event )
				{
					case ARDRONE_UPDATE_CONTROL_MODE:
						res = ardrone_control_soft_update_run( current_ardrone_state, (ardrone_control_soft_update_event_t*) current_event );
						break;
						
					case PIC_UPDATE_CONTROL_MODE:
						res = ardrone_control_soft_update_run( current_ardrone_state, (ardrone_control_soft_update_event_t*) current_event );
						break;
						
					case LOGS_GET_CONTROL_MODE:
						break;
						
					case CFG_GET_CONTROL_MODE:
					case CUSTOM_CFG_GET_CONTROL_MODE: /* multiconfiguration support */
						res = ardrone_control_configuration_run( current_ardrone_state, (ardrone_control_configuration_event_t*) current_event );
						break;
						
					case ACK_CONTROL_MODE:
						res = ardrone_control_ack_run( current_ardrone_state, (ardrone_control_ack_event_t *) current_event);
						break;
						
					default:
						break;
				}
				
				if( VP_FAILED(res) )
				{
					retry ++;
					if( retry > current_event->num_retries)
						current_event->status = ARDRONE_CONTROL_EVENT_FINISH_FAILURE;
				}
				else
				{
					retry = 0;
				}
				
				if( current_event->status & ARDRONE_CONTROL_EVENT_FINISH )
				{
 					if( current_event->ardrone_control_event_end != NULL )
						current_event->ardrone_control_event_end( current_event );
					
 					/* Make the thread read a new event on the next loop iteration */
					current_event = NULL;
				}
				else
				{
					/* Not changing 'current_event' makes the loop process the same
					 * event when the next navdata packet arrives. */
				}
			}
		}
  }// while

  /* Stephane : Bug fix - mutexes were previously detroyed by another thread,
  which made ardrone_control crash.*/
	  vp_os_mutex_destroy(&event_queue_mutex);
	  vp_os_cond_destroy(&control_cond);
	  vp_os_mutex_destroy(&control_mutex);

  vp_com_close(COM_CONTROL(), &control_socket);

  THREAD_RETURN( res );
}
Пример #14
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
	C_RESULT res;
	
	vp_api_io_pipeline_t    pipeline;
	vp_api_io_data_t        out;
	vp_api_io_stage_t       stages[NB_STAGES];
	
	vp_api_picture_t picture;
	
	vlib_stage_decoding_config_t    vec;

	vp_os_memset(&icc,          0, sizeof( icc ));
	vp_os_memset(&vec,          0, sizeof( vec ));
	vp_os_memset(&picture,      0, sizeof( picture ));

//#ifdef RECORD_VIDEO
//	video_stage_recorder_config_t vrc;
//#endif
	
	/// Picture configuration
	picture.format        = /*PIX_FMT_YUV420P */ PIX_FMT_RGB565;
	
	picture.width         = 512;
	picture.height        = 512;
	picture.framerate     = 15;
	
	picture.y_buf   = vp_os_malloc( picture.width * picture.height * 2);
	picture.cr_buf  = NULL;
	picture.cb_buf  = NULL;

	picture.y_line_size   = picture.width * 2;
	picture.cb_line_size  = 0;
	picture.cr_line_size  = 0;
		
	icc.com                 = COM_VIDEO();
	icc.buffer_size         = 100000;
	icc.protocol            = VP_COM_UDP;
	COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
	
	vec.width               = 512;
	vec.height              = 512;
	vec.picture             = &picture;
	vec.luma_only           = FALSE;
	vec.block_mode_enable   = TRUE;
	
	pipeline.nb_stages = 0;
	
	stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
	stages[pipeline.nb_stages].cfg     = (void *)&icc;
	stages[pipeline.nb_stages].funcs   = video_com_funcs;
	pipeline.nb_stages++;
	
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;
	pipeline.nb_stages++;
	
/*
#ifdef RECORD_VIDEO
	stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
	stages[pipeline.nb_stages].cfg     = (void*)&vrc;
	stages[pipeline.nb_stages].funcs   = video_recorder_funcs;
	pipeline.nb_stages++;
#endif
*/
	stages[pipeline.nb_stages].type    = VP_API_OUTPUT_LCD;
	stages[pipeline.nb_stages].cfg     = (void*)&vec;
	stages[pipeline.nb_stages].funcs   = video_stage_funcs;
	pipeline.nb_stages++;
		
	pipeline.stages = &stages[0];
		
	if( !ardrone_tool_exit() )
	{
		PRINT("\nvideo stage thread initialisation\n\n");
		
		// NICK
		video_stage_init();

		res = vp_api_open(&pipeline, &pipeline_handle);

		
		if( VP_SUCCEEDED(res) )
		{

			int loop = VP_SUCCESS;
			out.status = VP_API_STATUS_PROCESSING;
#ifdef RECORD_VIDEO		    
			{
				DEST_HANDLE dest;
				dest.stage = 2;
				dest.pipeline = pipeline_handle;
				vp_api_post_message( dest, PIPELINE_MSG_START, NULL, (void*)NULL);
			}
#endif			

			video_stage_in_pause = FALSE;
			
			while( !ardrone_tool_exit() && (loop == VP_SUCCESS) )
			{

				if(video_stage_in_pause)
				{
					vp_os_mutex_lock(&video_stage_mutex);
					icc.num_retries = VIDEO_MAX_RETRIES;
					vp_os_cond_wait(&video_stage_condition);
					vp_os_mutex_unlock(&video_stage_mutex);
				}

				if( VP_SUCCEEDED(vp_api_run(&pipeline, &out)) ) {
					if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
						loop = VP_SUCCESS;
					}
				}
				else loop = -1; // Finish this thread
			}
			
			vp_api_close(&pipeline, &pipeline_handle);
		}
	}
	
	PRINT("   video stage thread ended\n\n");
	
	return (THREAD_RET)0;
}
DEFINE_THREAD_ROUTINE(video_recorder, data)
{
  if (1 >= ARDRONE_VERSION ()) // Run only for ARDrone 2 and upper
    {
      return (THREAD_RET)0;
    }
  C_RESULT res = C_OK;
    
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[3];
  PIPELINE_HANDLE video_recorder_pipeline_handle;
  video_recorder_thread_param_t *video_recorder_thread_param = (video_recorder_thread_param_t *)data;

  video_stage_tcp_config_t tcpConf;
    
  if(video_recorder_thread_param != NULL)
  {
      CHANGE_THREAD_PRIO(video_recorder, video_recorder_thread_param->priority);
      video_stage_encoded_recorder_config.finish_callback = video_recorder_thread_param->finish_callback; 
  }
    
  vp_os_memset (&record_icc, 0x0, sizeof (record_icc));
  record_icc.com = COM_VIDEO ();
  record_icc.buffer_size = (8*1024);
  record_icc.protocol = VP_COM_TCP;
  record_icc.forceNonBlocking = &tcpConf.tcpStageHasMoreData;
  COM_CONFIG_SOCKET_VIDEO (&record_icc.socket, VP_COM_CLIENT, VIDEO_RECORDER_PORT, wifi_ardrone_ip);
  record_icc.timeoutFunc = &video_stage_encoded_recorder_com_timeout;
  record_icc.timeoutFuncAfterSec = 10;
  
  vp_os_memset (&tcpConf, 0, sizeof (tcpConf));
  tcpConf.maxPFramesPerIFrame = 30;
  tcpConf.frameMeanSize = 160*1024;
  tcpConf.latencyDrop = 0;
    
  pipeline.nb_stages = 0;
  pipeline.stages = &stages[0];
    
  // Com
  stages[pipeline.nb_stages].type = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg  = (void *)&record_icc;
  stages[pipeline.nb_stages++].funcs = video_com_funcs;

  // TCP
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &tcpConf;
  stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
    
  // Record
  stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg  = (void *) &video_stage_encoded_recorder_config;
  stages[pipeline.nb_stages++].funcs = video_encoded_recorder_funcs;
    
  while (FALSE == isInit)
    {
        printf ("Waiting for init\n");
    }
    
  if (! ardrone_tool_exit ())
    {
      PRINT ("Video recorder thread initialisation\n");
      res = vp_api_open (&pipeline, &video_recorder_pipeline_handle);
        
      if (SUCCEED (res))
        {
          int loop = SUCCESS;
          out.status = VP_API_STATUS_PROCESSING;
            
          while (! ardrone_tool_exit () && (SUCCESS == loop))
            {
              if (video_recorder_in_pause)
                {
                  vp_os_mutex_lock (&video_recorder_mutex);
                  record_icc.num_retries = VIDEO_MAX_RETRIES;
                  vp_os_cond_wait (&video_recorder_condition);
                  vp_os_mutex_unlock (&video_recorder_mutex);
                }

              if (SUCCEED (vp_api_run (&pipeline, &out)))
                {
                  if ((VP_API_STATUS_PROCESSING == out.status) ||
                      (VP_API_STATUS_STILL_RUNNING == out.status))
                    {
                      loop = SUCCESS;
                    }
                  else loop = -1;
                }
              else loop = -1;
            }
          vp_api_close (&pipeline, &video_recorder_pipeline_handle);
        }
    }
  PRINT ("Video recorder thread ended\n");
    
  return (THREAD_RET)res;
}
Пример #16
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");

    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #17
0
DEFINE_THREAD_ROUTINE(app_main, data)
{

	C_RESULT res = C_FAIL;
	vp_com_wifi_config_t* config = NULL;

	JNIEnv* env = NULL;

	if (g_vm) {
		(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &env, NULL);
	}

	bContinue = TRUE;
	mobile_main_param_t *param = data;

    video_recorder_thread_param_t video_recorder_param;
    video_recorder_param.priority = VIDEO_RECORDER_THREAD_PRIORITY;
    video_recorder_param.finish_callback = param->academy_download_callback_func;

	vp_os_memset(&ardrone_info, 0x0, sizeof(ardrone_info_t));

	while ((config = (vp_com_wifi_config_t *)wifi_config()) != NULL && strlen(config->itfName) == 0)
	{
		//Waiting for wifi initialization
		vp_os_delay(250);

		if (ardrone_tool_exit() == TRUE) {
			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
			}
			return 0;
		}
	}



	vp_os_memcpy(&ardrone_info.drone_address[0], config->server, strlen(config->server));


    while (-1 == getDroneVersion (param->root_dir, &ardrone_info.drone_address[0], &ardroneVersion))
    {
        LOGD (TAG, "Getting AR.Drone version");
        vp_os_delay (250);
    }

    sprintf(&ardrone_info.drone_version[0], "%u.%u.%u", ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision);

    LOGD (TAG, "ARDrone Version : %s\n", &ardrone_info.drone_version[0]);
	LOGI(TAG, "Drone Family: %d", ARDRONE_VERSION());

	res = ardrone_tool_setup_com( NULL );

	if( FAILED(res) )
	{
		LOGII("Setup com failed");
		LOGW(TAG, "Wifi initialization failed. It means either:");
		LOGW(TAG, "\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
		LOGW(TAG, "\t* wifi device is not present (on your pc or on your card)\n");
		LOGW(TAG, "\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
		LOGW(TAG, "\t* ap is not up (reboot card or remove wifi usb dongle)\n");
		LOGW(TAG, "\t* wifi device has no antenna\n");

		if (param != NULL && param->callback != NULL) {
			param->callback(env, param->obj, ARDRONE_MESSAGE_ERR_NO_WIFI);
		}
	}
	else
	{
		LOGII("ardrone_tool_setup_com [OK]");
		#define NB_IPHONE_PRE_STAGES 0

		#define NB_IPHONE_POST_STAGES 2

        //Alloc structs
        specific_parameters_t * params         = (specific_parameters_t *)vp_os_calloc(1, sizeof(specific_parameters_t));
        specific_stages_t * iphone_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
        specific_stages_t * iphone_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
        vp_api_picture_t  * in_picture         = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
        vp_api_picture_t  * out_picture        = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));


        in_picture->width          = STREAM_WIDTH;
        in_picture->height         = STREAM_HEIGHT;

        out_picture->framerate     = 20;
        out_picture->format        = PIX_FMT_RGB565;
        out_picture->width         = STREAM_WIDTH;
        out_picture->height        = STREAM_HEIGHT;

        out_picture->y_buf         = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 2 );
        out_picture->cr_buf        = NULL;
        out_picture->cb_buf        = NULL;

        out_picture->y_line_size   = STREAM_WIDTH * 2;
        out_picture->cb_line_size  = 0;
        out_picture->cr_line_size  = 0;

        //Define the list of stages size
        iphone_pre_stages->length  = NB_IPHONE_PRE_STAGES;
        iphone_post_stages->length = NB_IPHONE_POST_STAGES;

        //Alloc the lists
        iphone_pre_stages->stages_list  = NULL;
        iphone_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(iphone_post_stages->length,sizeof(vp_api_io_stage_t));

        //Fill the POST-stages------------------------------------------------------
        int postStageNumber = 0;

        vp_os_memset (&vlat, 0x0, sizeof (vlat));
        vlat.state = 0;
        vlat.last_decoded_frame_info= (void *)&vec;
        iphone_post_stages->stages_list[postStageNumber].type  = VP_API_FILTER_DECODER;
        iphone_post_stages->stages_list[postStageNumber].cfg   = (void *)&vlat;
        iphone_post_stages->stages_list[postStageNumber++].funcs = vp_stages_latency_estimation_funcs;

        vp_os_memset (&ovsc, 0x0, sizeof (ovsc));
        ovsc.video_decoder = &vec;
        iphone_post_stages->stages_list[postStageNumber].type  = VP_API_OUTPUT_LCD;
        iphone_post_stages->stages_list[postStageNumber].cfg   = (void *)&ovsc;
        iphone_post_stages->stages_list[postStageNumber++].funcs = opengl_video_stage_funcs;

        params->in_pic = in_picture;
        params->out_pic = out_picture;
        params->pre_processing_stages_list = iphone_pre_stages;
        params->post_processing_stages_list = iphone_post_stages;

#if USE_THREAD_PRIORITIES
        params->needSetPriority = 1;
        params->priority = VIDEO_THREAD_PRIORITY;
#else
        params->needSetPriority = 0;
        params->priority = 0;
#endif


		START_THREAD(video_stage, params);

        if (IS_LEAST_ARDRONE2)
        {
            START_THREAD (video_recorder, (void *)&video_recorder_param);
            LOGD(TAG, "Video recorder thread start [OK]");
        }



		res = ardrone_tool_init(&ardrone_info.drone_address[0], strlen(&ardrone_info.drone_address[0]), NULL, param->app_name, param->user_name, param->root_dir, param->flight_dir, param->flight_storing_size, param->academy_download_callback_func);

		if(SUCCEED(res))
		{

			ardrone_tool_input_add(&virtual_gamepad);

			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_CONNECTED_OK);
			}
		} else {
			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_UNKNOWN_ERR);
			}

			bContinue = FALSE;
		}

		res = ardrone_tool_set_refresh_time(1000 / kAPS);

#if USE_THREAD_PRIORITIES
        CHANGE_THREAD_PRIO (app_main, AT_THREAD_PRIORITY);
        CHANGE_THREAD_PRIO (navdata_update, NAVDATA_THREAD_PRIORITY);
        CHANGE_THREAD_PRIO (ardrone_control, NAVDATA_THREAD_PRIORITY);
#endif

		while( SUCCEED(res) && bContinue == TRUE )
		{
			ardrone_tool_update();
		}

		JOIN_THREAD(video_stage);

        if (IS_LEAST_ARDRONE2)
        {
            JOIN_THREAD (video_recorder);
        }

	    /* Unregistering for the current device */
	    ardrone_tool_input_remove( &virtual_gamepad );

		res = ardrone_tool_shutdown();
		LOGD(TAG, "AR.Drone tool shutdown [OK]");

		if (param != NULL && param->callback != NULL) {
			param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
		}
	}

	vp_os_free (data);
	data = NULL;

	(*env)->DeleteGlobalRef(env, param->obj);

	if (g_vm) {
		(*g_vm)->DetachCurrentThread (g_vm);
	}

	LOGI(TAG, "app_main thread has been stopped.");

	return (THREAD_RET) res;
}