Пример #1
0
static void* navdata_loop(void *arg)
{
	uint8_t msg[NAVDATA_BUFFER_SIZE];
    navdata_unpacked_t navdata_unpacked;
    unsigned int cks, navdata_cks, sequence = NAVDATA_SEQUENCE_DEFAULT-1;
    int sockfd = -1, addr_in_size;
	struct sockaddr_in *my_addr, *from;

	INFO("NAVDATA thread starting (thread=%d)...\n", (int)pthread_self());

    navdata_open_server();

	addr_in_size = sizeof(struct sockaddr_in);

    navdata_t* navdata = (navdata_t*) &msg[0];

	from = (struct sockaddr_in *)vp_os_malloc(addr_in_size);
	my_addr = (struct sockaddr_in *)vp_os_malloc(addr_in_size);
    assert(from);
    assert(my_addr);

	vp_os_memset((char *)my_addr,(char)0,addr_in_size);
	my_addr->sin_family = AF_INET;
	my_addr->sin_addr.s_addr = htonl(INADDR_ANY);
	my_addr->sin_port = htons( NAVDATA_PORT );

	if((sockfd = socket (AF_INET, SOCK_DGRAM, 0)) < 0){
        INFO ("socket: %s\n", strerror(errno));
        goto fail;
	};

	if(bind(sockfd, (struct sockaddr *)my_addr, addr_in_size) < 0){
        INFO ("bind: %s\n", strerror(errno));
        goto fail;
	};

	{
		struct timeval tv;
		// 1 second timeout
		tv.tv_sec   = 1;
		tv.tv_usec  = 0;
		setsockopt( sockfd, SOL_SOCKET, SO_RCVTIMEO, &tv, sizeof(tv));
	}

	INFO("[NAVDATA] Ready to receive incomming data\n");

	while ( nav_thread_alive ) {
		int size;
		
	
		
		size = recvfrom (sockfd, &msg[0], NAVDATA_BUFFER_SIZE, 0, (struct sockaddr *)from,
                         (socklen_t *)&addr_in_size);
		
		
		//INFO ("SIZE %d \n", size);
		
	if( size == 0)
            {
                INFO ("Navdata lost connection \n");
                navdata_open_server();
                sequence = NAVDATA_SEQUENCE_DEFAULT-1;
            }
            
//         if (size == 24)
// 	{
// 	     const char cmds[] = "AT*COMWDG=0\r";
//              at_write ((int8_t*)cmds, strlen( cmds ));
// 	     
// 	     const char cmds2[] = "AT*CONFIG=1,\"general:navdata_demo\",\"TRUE\"\r";
//              at_write ((int8_t*)cmds2, strlen( cmds2 ));
// 	}
	
	if( navdata->header == NAVDATA_HEADER )
            {
                mykonos_state = navdata->ardrone_state;
			
                if(ardrone_get_mask_from_state(navdata->ardrone_state, ARDRONE_COM_WATCHDOG_MASK) )
                    { 
                       // INFO ("[NAVDATA] Detect com watchdog\n");
                        sequence = NAVDATA_SEQUENCE_DEFAULT-1; 

                        if( ardrone_get_mask_from_state(navdata->ardrone_state, ARDRONE_NAVDATA_BOOTSTRAP) == FALSE )
                            {
			        //INFO ("[NAVDATA] Send com watchdog\n");

                                const char cmds[] = "AT*COMWDG=0\r";
                                at_write ((int8_t*)cmds, strlen( cmds ));
                            }
                    } 
 
                if( navdata->sequence > sequence ) 
                    { 
                        if ( ardrone_get_mask_from_state( mykonos_state, ARDRONE_NAVDATA_DEMO_MASK ))
                            {
                              ardrone_navdata_unpack_all(&navdata_unpacked, navdata, &navdata_cks);
                                cks = ardrone_navdata_compute_cks( &msg[0], size - sizeof(navdata_cks_t) );

                                if( cks != navdata_cks )
				    {
				      INFO("NAVADATA wrong CKS\n");
				    }
				    //INFO("VALUE = %f \n", navdata_unpacked.navdata_demo.theta);
                            }
                    } 
                else 
                    { 
                        INFO ("[Navdata] Sequence pb : %d (distant) / %d (local)\n", navdata->sequence, sequence); 
                    } 

                sequence = navdata->sequence;
            }
	}
 fail:
    vp_os_free(from);
    vp_os_free(my_addr);

    if (sockfd >= 0){
        close(sockfd);
    }

    if (navdata_udp_socket >= 0){
        close(navdata_udp_socket);
        navdata_udp_socket = -1;
    }

	INFO("NAVDATA thread stopping\n");
    return NULL;
}
Пример #2
0
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
#ifdef RECORD_VIDEO
  video_stage_recorder_config_t   vrc;
#endif
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;
#ifdef RECORD_VIDEO
  vrc.fp = NULL;
#endif

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

#ifdef RECORD_VIDEO
  PRINT("\n   Record Video\n\n");
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vrc;
  stages[pipeline.nb_stages].funcs   = video_recorder_funcs;

  pipeline.nb_stages++;
#endif // RECORD_VIDEO
  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
  
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation\n\n");
    
    if ((sockfd = socket(AF_UNIX,SOCK_STREAM,0)) < 0)
   {
       printf("error creating socket");
   }
   bzero((char *) &serv_addr, sizeof(serv_addr));
   serv_addr.sun_family = AF_UNIX;
   unlink("/tmp/video");
   strcpy(serv_addr.sun_path, "/tmp/video");
   servlen=strlen(serv_addr.sun_path) + 
                     sizeof(serv_addr.sun_family);
   if(bind(sockfd,(struct sockaddr *)&serv_addr,servlen)<0)
   {
       printf("error binding socket");
   } 
   
   listen(sockfd,5);
   
   clilen = sizeof(serv_addr);
   
   newsockfd = accept(
        sockfd,(struct sockaddr *)&serv_addr,&clilen);
   
   printf("1 sockfd, %d", sockfd);
    
   
   PRINT("\n   Video socket initialisation\n\n");
    
    
    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
          
          //printf("%s\n", pixbuf_data);
          //printf("!%d  sizeof *\n", sizeof(pixbuf_data));
          
          //printf("!%d --- %d\n", n, errno);
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }
  

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #3
0
C_RESULT ardrone_tool_init_custom (void)
{
    // Reset global data
    bzero(&g_navdata, sizeof(g_navdata));
    g_autopilot = FALSE;
    //int32_t altMax = 1400;
    //uint32_t altMin = 1300;
   // ARDRONE_TOOL_CONFIGURATION_ADDEVENT (altitude_max, &altMax, NULL);
    // Register a new game controller
    ardrone_tool_input_add( &gamepad );
    
    ardrone_application_default_config.navdata_demo = TRUE;
    
    ardrone_application_default_config.navdata_options = 
    (NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | 
        NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG));
    
    vp_api_picture_t *in_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));
    vp_api_picture_t *out_picture = (vp_api_picture_t *)vp_os_calloc (1, sizeof (vp_api_picture_t));
    
    if (IS_ARDRONE2)
    {
        /*ardrone_application_default_config.video_codec = H264_720P_CODEC;
        in_picture->width = 1280;  
        in_picture->height = 720;*/
		ardrone_application_default_config.video_codec = H264_360P_CODEC;
        in_picture->width = 640;  
        in_picture->height = 360;
    }
    else
    {
        ardrone_application_default_config.video_codec = P264_CODEC;
        in_picture->width = 320;  
        in_picture->height = 240;
    }
    
    ardrone_application_default_config.video_channel = ZAP_CHANNEL_HORI;
    ardrone_application_default_config.bitrate_ctrl_mode = 1;
    
    
    /**
    * Allocate useful structures :
    * - index counter
    * - thread param structure and its substructures
    */
    uint8_t stages_index = 0;
    
    specific_parameters_t *params = (specific_parameters_t *)vp_os_calloc (1, sizeof (specific_parameters_t));
    specific_stages_t *example_pre_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    specific_stages_t *example_post_stages = (specific_stages_t *)vp_os_calloc (1, sizeof (specific_stages_t));
    
    out_picture->framerate = 20; // Drone 1 only, must be equal to drone target FPS
    out_picture->format = PIX_FMT_RGB24; 
    out_picture->width = in_picture->width;
    out_picture->height = in_picture->height;
    
    // One buffer, three bytes per pixel
    out_picture->y_buf = vp_os_malloc ( out_picture->width * out_picture->height * 3 );
    out_picture->cr_buf = NULL;
    out_picture->cb_buf = NULL;
    out_picture->y_line_size = out_picture->width * 3;
    out_picture->cb_line_size = 0;
    out_picture->cr_line_size = 0;
    
    example_pre_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t));
    example_post_stages->stages_list = (vp_api_io_stage_t *)vp_os_calloc (1, sizeof (vp_api_io_stage_t));
    
    /**
    * Fill the POST stage list
    * - name and type are debug infos only
    * - cfg is the pointer passed as "cfg" in all the stages calls
    * - funcs is the pointer to the stage functions
    */
    
    stages_index = 0;
    

    vp_os_memset (&dispCfg, 0, sizeof (video_cfg_t));
    
    dispCfg.width = in_picture->width;
    dispCfg.height = in_picture->height;
    
    example_post_stages->stages_list[stages_index].name = "Decoded display"; // Debug info
    example_post_stages->stages_list[stages_index].type = VP_API_OUTPUT_SDL; // Debug info
    example_post_stages->stages_list[stages_index].cfg  = &dispCfg;
    example_post_stages->stages_list[stages_index++].funcs  = video_funcs;
    
    example_post_stages->length = stages_index;
    

    
    /**
    * Fill thread params for the ardrone_tool video thread
    *  - in_pic / out_pic are reference to our in_picture / out_picture
    *  - pre/post stages lists are references to our stages lists
    *  - needSetPriority and priority are used to control the video thread priority
    *   -> if needSetPriority is set to 1, the thread will try to set its priority to "priority"
    *   -> if needSetPriority is set to 0, the thread will keep its default priority (best on PC)
    */
    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = example_pre_stages;
    params->post_processing_stages_list = example_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;
    
	//bool_t isOutdoor = TRUE;
	//ARDRONE_TOOL_CONFIGURATION_ADDEVENT (outdoor, &isOutdoor, NULL);

    /**
    * Start the video thread (and the video recorder thread for AR.Drone 2)
    */
    START_THREAD(video_stage, params);
    video_stage_init();
    
    //Set max and minimum altitude flight
   	//uint32_t altMin = 500;
    //ARDRONE_TOOL_CONFIGURATION_ADDEVENT (altitude_min, &altMin, NULL);
	

    /** START THE KEYBOARD CONTROL THREAD**/

    //START_THREAD(KeyboardController, NULL); 
    
    video_stage_resume_thread ();
    
    return C_OK;
}
PROTO_THREAD_ROUTINE(app,argv)
{
  vp_api_picture_t picture;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_stages_input_file_config_t     ifc;
  vp_stages_output_sdl_config_t     osc;
  mjpeg_stage_decoding_config_t     dec;

  vp_os_memset(&ifc,0,sizeof(vp_stages_input_file_config_t));

  ifc.name            = ((char**)argv)[1];
  ifc.buffer_size     = (ACQ_WIDTH*ACQ_HEIGHT*3)/2;

  osc.width           = WINDOW_WIDTH;
  osc.height          = WINDOW_HEIGHT;
  osc.bpp             = 16;
  osc.window_width    = WINDOW_WIDTH;
  osc.window_height   = WINDOW_HEIGHT;
  osc.pic_width       = ACQ_WIDTH;
  osc.pic_height      = ACQ_HEIGHT;
  osc.y_size          = ACQ_WIDTH*ACQ_HEIGHT;
  osc.c_size          = (ACQ_WIDTH*ACQ_HEIGHT) >> 2;

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = ACQ_WIDTH;
  picture.height        = ACQ_HEIGHT;
  picture.framerate     = 15;

  picture.y_buf   = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT );
  picture.cr_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );
  picture.cb_buf  = vp_os_malloc( ACQ_WIDTH*ACQ_HEIGHT/4 );

  picture.y_line_size   = ACQ_WIDTH;
  picture.cb_line_size  = ACQ_WIDTH / 2;
  picture.cr_line_size  = ACQ_WIDTH / 2;

  picture.y_pad         = 0;
  picture.c_pad         = 0;

  dec.picture           = &picture;
  dec.out_buffer_size   = 4096;

  stages[0].type      = VP_API_INPUT_FILE;
  stages[0].cfg       = (void *)&ifc;
  stages[0].funcs     = vp_stages_input_file_funcs;

  stages[1].type      = VP_API_FILTER_DECODER;
  stages[1].cfg       = (void *)&dec;
  stages[1].funcs     = mjpeg_decoding_funcs;

  stages[2].type      = VP_API_OUTPUT_SDL;
  stages[2].cfg       = (void *)&osc;
  stages[2].funcs     = vp_stages_output_sdl_funcs;

  pipeline.nb_stages  = NB_STAGES;
  pipeline.stages     = &stages[0];

  vp_api_open(&pipeline, &pipeline_handle);
  out.status = VP_API_STATUS_PROCESSING;
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING))
  {
    vp_os_delay( 30 );
  }

  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
/**
 * \brief Runs a control event managing the configuration file.
 * This functions handles reading the configuration information
 * sent by the drone on the TCP 'control' socket on port 5559.
 * Called by the 'ardrone_control' thread loop in 'ardrone_control.c'.
 */
C_RESULT ardrone_control_configuration_run( uint32_t ardrone_state, ardrone_control_configuration_event_t* event )
{
	int32_t buffer_size;
	char *start_buffer, *buffer;
	char *value, *param, *c;
	bool_t ini_buffer_end, ini_buffer_more;
	C_RESULT res = C_OK;
	int bytes_to_read;

	/* Multiconfiguration support */
	static char *custom_configuration_list_buffer = NULL;
	static int  custom_configuration_list_buffer_size = 0;
	static int  custom_configuration_list_data_size = 0;
	#define CUSTOM_CFG_BLOCK_SIZE 1024

	int i;

	switch( event->config_state )
	{
		case CONFIG_REQUEST_INI:
			if( ardrone_state & ARDRONE_COMMAND_MASK )
			{
				/* If the ACK bit is set, we must ask the drone to clear it before asking the configuration. */
				DEBUG_CONFIG_RECEIVE("%s %s %i - Requesting ack. bit reset.\n",__FILE__,__FUNCTION__,__LINE__);
				ardrone_at_update_control_mode(ACK_CONTROL_MODE, 0);
			}
			else
			{
				DEBUG_CONFIG_RECEIVE("%s %s %i - Requesting the configuration.\n",__FILE__,__FUNCTION__,__LINE__);
				ini_buffer_index = 0;
				ardrone_at_configuration_get_ctrl_mode();
				set_state(event, CONFIG_RECEIVE_INI);
			}
			break;

		/* multi-configuration support */
		case CUSTOM_CONFIG_REQUEST:
			DEBUG_CONFIG_RECEIVE("%s %s %i\n",__FILE__,__FUNCTION__,__LINE__);
			if( ardrone_state & ARDRONE_COMMAND_MASK )
			{
				DEBUG_CONFIG_RECEIVE("%s %s %i - Requesting ack. bit reset.\n",__FILE__,__FUNCTION__,__LINE__);
				ardrone_at_update_control_mode(ACK_CONTROL_MODE, 0);
			}
			else
			{
				DEBUG_CONFIG_RECEIVE("%s %s %i - Requesting the custom config.\n",__FILE__,__FUNCTION__,__LINE__);
				custom_configuration_list_buffer = NULL;
				custom_configuration_list_buffer_size = 0;
				custom_configuration_list_data_size = 0;
				ardrone_at_custom_configuration_get_ctrl_mode();
				set_state(event, CUSTOM_CONFIG_RECEIVE);
			}
			break;

		case CONFIG_RECEIVE_INI:
			DEBUG_CONFIG_RECEIVE("%s %s %i - Trying to read the control socket.\n",__FILE__,__FUNCTION__,__LINE__);
			// Read data coming from ARDrone
			buffer_size = ARDRONE_CONTROL_CONFIGURATION_INI_BUFFER_SIZE - ini_buffer_index;
			res = ardrone_control_read( &ini_buffer[ini_buffer_index], &buffer_size );
			DEBUG_CONFIG_RECEIVE("Received <<%s>>\n",&ini_buffer[ini_buffer_index]);
			if(VP_SUCCEEDED(res))
			{      
				buffer_size += ini_buffer_index;

				ini_buffer[buffer_size]=0;  // Makes sure the buffer ends with a zero

				// Parse received data
				if( buffer_size > 0 )
				{
					//DEBUG_CONFIG_RECEIVE(" Searching value\n");

					ini_buffer_end  = ini_buffer[buffer_size-1] == '\0'; // End of configuration data
					ini_buffer_more = ini_buffer[buffer_size-1] != '\n'; // Need more configuration data to end parsing current line

					//if (ini_buffer_end) DEBUG_CONFIG_RECEIVE(" ini_buffer_end\n");
					//if (ini_buffer_more) DEBUG_CONFIG_RECEIVE(" ini_buffer_more\n");

					start_buffer = (char*)&ini_buffer[0];
					buffer = strchr(start_buffer, '\n');

					//DEBUG_CONFIG_RECEIVE("Le start buffer : <<%s>>\n",start_buffer);

					while( buffer != NULL )
					{
						//DEBUG_CONFIG_RECEIVE(" Found an '\\n' \n");

						value = start_buffer;
						param = strchr(value, '=');

						*buffer = '\0';
						*param  = '\0';

						// Remove spaces at end of strings
						c = param - 1;
						while( *c == ' ' )
						{
							*c = '\0';
							c  = c-1;
						}

						c = buffer-1;
						while( *c == ' ' )
						{
							*c = '\0';
							c  = c-1;
						}

						// Remove spaces at beginning of strings
						while( *value == ' ' )
						{
							value = value + 1;
						}

						param = param + 1;
						while( *param == ' ' )
						{
							param = param + 1;
						}

						DEBUG_CONFIG_RECEIVE(" Decoding from control stream : <%s>=<%s>\n",value,param);
						iniparser_setstring( event->ini_dict, value, param );

						start_buffer = buffer + 1;
						buffer = strchr(start_buffer, '\n');
					}

					if( ini_buffer_end )
					{
						/* Reading the condfiguration is finished, we ask the drone
						 * to clear the ACK bit */
						ardrone_at_update_control_mode(ACK_CONTROL_MODE, 0);
						set_state(event, CONFIG_RECEIVED);
					}
					else if( ini_buffer_more )
					{
						// Compute number of bytes to copy
						int32_t size = (int32_t)&ini_buffer[buffer_size] - (int32_t)start_buffer;
						vp_os_memcpy( &ini_buffer[0], start_buffer, size );
						ini_buffer_index = size;
					}
					else
					{
						/* The previous line was completely consumed -  next data
						 * from the network will be stored at the beginning of the
						 * buffer. */
						ini_buffer_index = 0;
					}
				}
			}
         else
         {
        	 res = C_FAIL;

        	 DEBUG_CONFIG_RECEIVE("%s %s %i - no data to read from the control socket.\n",__FILE__,__FUNCTION__,__LINE__);

            if(!(ardrone_state & ARDRONE_COMMAND_MASK))
				   set_state(event, CONFIG_REQUEST_INI);
         }
		break;

		case CUSTOM_CONFIG_RECEIVE:
			DEBUG_CONFIG_RECEIVE("%s %s %i - Trying to read the control socket.\n",__FILE__,__FUNCTION__,__LINE__);
			DEBUG_CONFIG_RECEIVE("%s %s %i\n",__FILE__,__FUNCTION__,__LINE__);
			/* Read data until a zero byte is received */

			/* Clear old data from the buffer when receiving the first bytes */
				if (custom_configuration_list_buffer!=NULL && custom_configuration_list_data_size==0){
					vp_os_memset(custom_configuration_list_buffer,0,custom_configuration_list_buffer_size);
				}

			/* Enlarge the buffer if needed */
				if (custom_configuration_list_data_size==custom_configuration_list_buffer_size)
				{
					custom_configuration_list_buffer_size += CUSTOM_CFG_BLOCK_SIZE;
					custom_configuration_list_buffer = vp_os_realloc(custom_configuration_list_buffer,custom_configuration_list_buffer_size);
				}
			/* Read data at the end of the buffer */
				bytes_to_read = custom_configuration_list_buffer_size - custom_configuration_list_data_size;
				res = ardrone_control_read( &custom_configuration_list_buffer[custom_configuration_list_data_size], &bytes_to_read );
				DEBUG_CONFIG_RECEIVE(" Reading %i bytes of the custom config. list\n",bytes_to_read);
				for (i=0;i<bytes_to_read;i++) { DEBUG_CONFIG_RECEIVE("<%i>",custom_configuration_list_buffer[i]); }

			/* Searches a zero */
				if (VP_SUCCEEDED(res))
				{
					custom_configuration_list_data_size += bytes_to_read;
					for (i=bytes_to_read;i>0;i--){
						if (custom_configuration_list_buffer[custom_configuration_list_data_size - i] == 0) {

							/* Reading the condfiguration is finished, we ask the drone
							 * to clear the ACK bit */
							DEBUG_CONFIG_RECEIVE("Finished receiving\n");
							ardrone_at_update_control_mode(ACK_CONTROL_MODE, 0);
							set_state(event, CUSTOM_CONFIG_RECEIVED);

							ardrone_control_reset_custom_configurations_list(available_configurations);

							ardrone_control_read_custom_configurations_list(custom_configuration_list_buffer,
																			custom_configuration_list_data_size,
																			available_configurations);

							/* Clean up */
								vp_os_sfree(&custom_configuration_list_buffer);
								custom_configuration_list_buffer_size = 0;
								custom_configuration_list_data_size = 0;
							res = C_OK;
							break;
						}
					}

				}
				else{
					/* No data are available on the control socket. */
					DEBUG_CONFIG_RECEIVE("%s %s %i - no data to read from the control socket.\n",__FILE__,__FUNCTION__,__LINE__);
					/* Reset the buffer */
					custom_configuration_list_data_size = 0;
					/* The request for the configuration file should have been acknowledged by the drone.
					 * If not, another request is sent.	*/
					if(!(ardrone_state & ARDRONE_COMMAND_MASK))   set_state(event, CUSTOM_CONFIG_REQUEST);
				}

				break;


		case CONFIG_RECEIVED:
		case CUSTOM_CONFIG_RECEIVED:
				/* We finished reading the configuration, we wait for the drone to reset the ACK bit */
				if( ardrone_state & ARDRONE_COMMAND_MASK )
				{
					/* If the ACK bit is set, we must ask the drone to clear it before asking the configuration. */
					PRINT("%s %s %i - Requesting ack. bit reset.\n",__FILE__,__FUNCTION__,__LINE__);
					ardrone_at_update_control_mode(ACK_CONTROL_MODE, 0);
				}
				else
				{
					PRINT("%s %s %i - Finished.\n",__FILE__,__FUNCTION__,__LINE__);
					event->status = ARDRONE_CONTROL_EVENT_FINISH_SUCCESS;
				}
				res = C_OK;
				break;


		default:
         res = C_FAIL;
			break;
	}

	return res;
}
Пример #6
0
C_RESULT ardrone_navdata_file_process( const navdata_unpacked_t* const pnd )
{
  uint32_t i;
  char str[50];
  int32_t* locked_ptr;
  screen_point_t* point_ptr;
  struct timeval time;

  gettimeofday(&time,NULL);

  if( navdata_file_private == NULL )
    return C_FAIL;

  if( ardrone_get_mask_from_state(pnd->ardrone_state, ARDRONE_NAVDATA_BOOTSTRAP) )
    return C_OK;

  if( navdata_file == NULL )
  {
    navdata_file = navdata_file_private;

    if( ardrone_get_mask_from_state(pnd->ardrone_state, ARDRONE_NAVDATA_DEMO_MASK) )
    {
      printf("Receiving navdata demo\n");
    }
    else
    {
      printf("Receiving all navdata\n");
    }
    ardrone_navdata_file_print_version();
  }

  // Handle the case where user asked for a new navdata file
  if( navdata_file != navdata_file_private )
  {
    fclose(navdata_file);
    navdata_file = navdata_file_private;

    if( ardrone_get_mask_from_state(pnd->ardrone_state, ARDRONE_NAVDATA_DEMO_MASK) )
    {
      printf("Receiving navdata demo\n");
    }
    else
    {
      printf("Receiving all navdata\n");
    }
    ardrone_navdata_file_print_version();
  }

	vp_os_memset(&str[0], 0, sizeof(str));

    fprintf( navdata_file,"\n" );
    fprintf( navdata_file, "%u; %u", (unsigned int) pnd->navdata_demo.ctrl_state, (unsigned int) pnd->ardrone_state );

    sprintf( str, "%d.%06d", (int)((pnd->navdata_time.time & TSECMASK) >> TSECDEC), (int)(pnd->navdata_time.time & TUSECMASK) );
    fprintf( navdata_file, ";%s", str );

    fprintf( navdata_file, "; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u; %04u",
                            (unsigned int) pnd->navdata_raw_measures.raw_accs[ACC_X],
                            (unsigned int) pnd->navdata_raw_measures.raw_accs[ACC_Y],
                            (unsigned int) pnd->navdata_raw_measures.raw_accs[ACC_Z],
                            (unsigned int) pnd->navdata_raw_measures.raw_gyros[GYRO_X],
                            (unsigned int) pnd->navdata_raw_measures.raw_gyros[GYRO_Y],
                            (unsigned int) pnd->navdata_raw_measures.raw_gyros[GYRO_Z],
                            (unsigned int) pnd->navdata_raw_measures.raw_gyros_110[0],
                            (unsigned int) pnd->navdata_raw_measures.raw_gyros_110[1],
                            (unsigned int) pnd->navdata_raw_measures.vbat_raw,
                            (unsigned int) pnd->navdata_phys_measures.alim3V3,
                            (unsigned int) pnd->navdata_phys_measures.vrefEpson,
                            (unsigned int) pnd->navdata_phys_measures.vrefIDG,
                            (unsigned int) pnd->navdata_raw_measures.flag_echo_ini,
                            (unsigned int) pnd->navdata_raw_measures.us_debut_echo,
                            (unsigned int) pnd->navdata_raw_measures.us_fin_echo,
                            (unsigned int) pnd->navdata_raw_measures.us_association_echo,
                            (unsigned int) pnd->navdata_raw_measures.us_distance_echo,
                            (unsigned int) pnd->navdata_raw_measures.us_courbe_temps,
                            (unsigned int) pnd->navdata_raw_measures.us_courbe_valeur,
                            (unsigned int) pnd->navdata_raw_measures.us_courbe_ref );

    fprintf( navdata_file, "; %f; %04u; % 5f; % 5f; % 5f; % 6f; % 6f; % 6f",
                            pnd->navdata_phys_measures.accs_temp,
                            (unsigned int)pnd->navdata_phys_measures.gyro_temp,
                            pnd->navdata_phys_measures.phys_accs[ACC_X],
                            pnd->navdata_phys_measures.phys_accs[ACC_Y],
                            pnd->navdata_phys_measures.phys_accs[ACC_Z],
                            pnd->navdata_phys_measures.phys_gyros[GYRO_X],
                            pnd->navdata_phys_measures.phys_gyros[GYRO_Y],
                            pnd->navdata_phys_measures.phys_gyros[GYRO_Z]);

    fprintf( navdata_file, "; % f; % f; % f",
                            pnd->navdata_gyros_offsets.offset_g[GYRO_X],
                            pnd->navdata_gyros_offsets.offset_g[GYRO_Y],
                            pnd->navdata_gyros_offsets.offset_g[GYRO_Z] );

    fprintf( navdata_file, "; % f; % f",
                            pnd->navdata_euler_angles.theta_a,
                            pnd->navdata_euler_angles.phi_a);

    fprintf( navdata_file, ";  %04d; %04d; %04d; %04d; %04d; %06d; %06d; %06d",
                            (int) pnd->navdata_references.ref_theta,
                            (int) pnd->navdata_references.ref_phi,
                            (int) pnd->navdata_references.ref_psi,
                            (int) pnd->navdata_references.ref_theta_I,
                            (int) pnd->navdata_references.ref_phi_I,
                            (int) pnd->navdata_references.ref_pitch,
                            (int) pnd->navdata_references.ref_roll,
                            (int) pnd->navdata_references.ref_yaw );

    fprintf( navdata_file, "; % 8.6f; % 8.6f; % 8.6f",
                            pnd->navdata_trims.euler_angles_trim_theta,
                            pnd->navdata_trims.euler_angles_trim_phi,
                            pnd->navdata_trims.angular_rates_trim_r );

    fprintf( navdata_file, "; %04d; %04d; %04d; %04d; %04d; %04u",
                            (int) pnd->navdata_rc_references.rc_ref_pitch,
                            (int) pnd->navdata_rc_references.rc_ref_roll,
                            (int) pnd->navdata_rc_references.rc_ref_yaw,
                            (int) pnd->navdata_rc_references.rc_ref_gaz,
                            (int) pnd->navdata_rc_references.rc_ref_ag,
                            (unsigned int) ui_get_user_input() );

    fprintf( navdata_file, "; %03u; %03u; %03u; %03u; %03u; %03u; %03u; %03u; %03u; %03d; % f; % f; %03d; %03d; %03d; % f; %03d; %03d; %03d; % f; %04d; %04d; %04d; %04d",
                            (unsigned int) pnd->navdata_pwm.motor1,
                            (unsigned int) pnd->navdata_pwm.motor2,
                            (unsigned int) pnd->navdata_pwm.motor3,
                            (unsigned int) pnd->navdata_pwm.motor4,
                            (unsigned int) pnd->navdata_pwm.sat_motor1,
                            (unsigned int) pnd->navdata_pwm.sat_motor2,
                            (unsigned int) pnd->navdata_pwm.sat_motor3,
                            (unsigned int) pnd->navdata_pwm.sat_motor4,
                            (int) pnd->navdata_pwm.gaz_feed_forward,
                            pnd->navdata_pwm.gaz_altitude,
                            pnd->navdata_pwm.altitude_integral,
                            pnd->navdata_pwm.vz_ref,
                            (int) pnd->navdata_pwm.u_pitch,
                            (int) pnd->navdata_pwm.u_roll,
                            (int) pnd->navdata_pwm.u_yaw,
                            pnd->navdata_pwm.yaw_u_I,
                            (int) pnd->navdata_pwm.u_pitch_planif,
                            (int) pnd->navdata_pwm.u_roll_planif,
                            (int) pnd->navdata_pwm.u_yaw_planif,
                            pnd->navdata_pwm.u_gaz_planif,
                            (int) pnd->navdata_pwm.current_motor1,
                            (int) pnd->navdata_pwm.current_motor2,
                            (int) pnd->navdata_pwm.current_motor3,
                            (int) pnd->navdata_pwm.current_motor4 );

    fprintf( navdata_file, "; %04d; %f; %04d; %04u",
                            (int) pnd->navdata_altitude.altitude_vision,
                            pnd->navdata_altitude.altitude_vz,
                            (int) pnd->navdata_altitude.altitude_ref,
                            (unsigned int) pnd->navdata_altitude.altitude_raw );

   fprintf( navdata_file, "; %f; %f; %f; %f; %f; %04u; %f; %f; %04u",
                            pnd->navdata_altitude.obs_accZ,
                            pnd->navdata_altitude.obs_alt,
                            pnd->navdata_altitude.obs_x.v[0],
                            pnd->navdata_altitude.obs_x.v[1],
                            pnd->navdata_altitude.obs_x.v[2],
                            pnd->navdata_altitude.obs_state,
														pnd->navdata_altitude.est_vb.v[0],
                            pnd->navdata_altitude.est_vb.v[1],
                            pnd->navdata_altitude.est_state );

		vp_os_memset(&str[0], 0, sizeof(str));
    sprintf( str, "%d.%06d", (int)((pnd->navdata_vision.time_capture & TSECMASK) >> TSECDEC), (int)(pnd->navdata_vision.time_capture & TUSECMASK) );

    fprintf( navdata_file, "; % 8.6f; % 8.6f; % 8.6f; %u; %u; % f;% f;% f;% f; % f; % f; % f; %u; % f; % f; % f; % f; % f; % f; % d; %s",
                            pnd->navdata_vision_raw.vision_tx_raw,
                            pnd->navdata_vision_raw.vision_ty_raw,
                            pnd->navdata_vision_raw.vision_tz_raw,
                            (unsigned int) pnd->navdata_vision.vision_state,
                            (unsigned int) pnd->vision_defined,
                            pnd->navdata_vision.vision_phi_trim,
                            pnd->navdata_vision.vision_phi_ref_prop,
                            pnd->navdata_vision.vision_theta_trim,
                            pnd->navdata_vision.vision_theta_ref_prop,
                            pnd->navdata_vision.body_v.x,
                            pnd->navdata_vision.body_v.y,
                            pnd->navdata_vision.body_v.z,
                            (unsigned int) pnd->navdata_vision.new_raw_picture,
                            pnd->navdata_vision.theta_capture,
                            pnd->navdata_vision.phi_capture,
                            pnd->navdata_vision.psi_capture,
                            pnd->navdata_vision.delta_phi,
                            pnd->navdata_vision.delta_theta,
                            pnd->navdata_vision.delta_psi,
                            (int)pnd->navdata_vision.altitude_capture,
                            str );

    fprintf( navdata_file, "; %04u",
                             (unsigned int) pnd->navdata_demo.vbat_flying_percentage );

     fprintf( navdata_file, "; % f; % f; % f",
                             pnd->navdata_demo.theta,
                             pnd->navdata_demo.phi,
                             pnd->navdata_demo.psi );

     fprintf( navdata_file, "; %04d",
                             (int) pnd->navdata_demo.altitude );

     fprintf( navdata_file, "; %f; %f; %f ",
							 pnd->navdata_demo.vx,
                             pnd->navdata_demo.vy,
                             pnd->navdata_demo.vz );

     fprintf( navdata_file, "; %04u", (unsigned int) pnd->navdata_demo.num_frames );

     fprintf( navdata_file, "; %f; %f; %f; %f; %f; %f; %f; %f; %f", pnd->navdata_demo.detection_camera_rot.m11,
																	pnd->navdata_demo.detection_camera_rot.m12,
																	pnd->navdata_demo.detection_camera_rot.m13,
																	pnd->navdata_demo.detection_camera_rot.m21,
																	pnd->navdata_demo.detection_camera_rot.m22,
																	pnd->navdata_demo.detection_camera_rot.m23,
																	pnd->navdata_demo.detection_camera_rot.m31,
																	pnd->navdata_demo.detection_camera_rot.m32,
																	pnd->navdata_demo.detection_camera_rot.m33);

     fprintf( navdata_file, "; %f; %f; %f", pnd->navdata_demo.detection_camera_trans.x,
											pnd->navdata_demo.detection_camera_trans.y,
											pnd->navdata_demo.detection_camera_trans.z);

     fprintf( navdata_file, "; %04u; %04u",
                             (unsigned int) pnd->navdata_demo.detection_tag_index,
                             (unsigned int) pnd->navdata_demo.detection_camera_type);

     fprintf( navdata_file, "; %f; %f; %f; %f; %f; %f; %f; %f; %f", pnd->navdata_demo.drone_camera_rot.m11,
																	pnd->navdata_demo.drone_camera_rot.m12,
																	pnd->navdata_demo.drone_camera_rot.m13,
																	pnd->navdata_demo.drone_camera_rot.m21,
																	pnd->navdata_demo.drone_camera_rot.m22,
																	pnd->navdata_demo.drone_camera_rot.m23,
																	pnd->navdata_demo.drone_camera_rot.m31,
																	pnd->navdata_demo.drone_camera_rot.m32,
																	pnd->navdata_demo.drone_camera_rot.m33);

     fprintf( navdata_file, "; %f; %f; %f", pnd->navdata_demo.drone_camera_trans.x,
											pnd->navdata_demo.drone_camera_trans.y,
											pnd->navdata_demo.drone_camera_trans.z);

     fprintf( navdata_file, "; %d; %f; %f; %f; %f",
											(int)nd_iphone_flag,
											nd_iphone_phi,
											nd_iphone_theta,
											nd_iphone_gaz,
											nd_iphone_yaw);

	   fprintf( navdata_file, "; %d; %d; %d; %d; %d; %f; %d",
			   pnd->navdata_video_stream.quant,
			   pnd->navdata_video_stream.frame_size,
			   pnd->navdata_video_stream.frame_number,
			   pnd->navdata_video_stream.atcmd_ref_seq,
			   pnd->navdata_video_stream.atcmd_mean_ref_gap,
			   pnd->navdata_video_stream.atcmd_var_ref_gap,
			   pnd->navdata_video_stream.atcmd_ref_quality);



    locked_ptr  = (int32_t*) &pnd->navdata_trackers_send.locked[0];
    point_ptr   = (screen_point_t*) &pnd->navdata_trackers_send.point[0];

    for(i = 0; i < DEFAULT_NB_TRACKERS_WIDTH*DEFAULT_NB_TRACKERS_HEIGHT; i++)
    {
      fprintf( navdata_file, "; %d; %u; %u",
                            (int) *locked_ptr++,
                            (unsigned int) point_ptr->x,
                            (unsigned int) point_ptr->y );
      point_ptr++;
    }

    fprintf( navdata_file, "; %u", (unsigned int) pnd->navdata_vision_detect.nb_detected );
    for(i = 0 ; i < 4 ; i++)
    {
      fprintf( navdata_file, "; %u; %u; %u; %u; %u; %u; %f",
                            (unsigned int) pnd->navdata_vision_detect.type[i],
                            (unsigned int) pnd->navdata_vision_detect.xc[i],
                            (unsigned int) pnd->navdata_vision_detect.yc[i],
                            (unsigned int) pnd->navdata_vision_detect.width[i],
                            (unsigned int) pnd->navdata_vision_detect.height[i],
                            (unsigned int) pnd->navdata_vision_detect.dist[i],
                            pnd->navdata_vision_detect.orientation_angle[i]);
    }

    fprintf( navdata_file, "; %f; %f; %f; %f; %f; %f",
                          pnd->navdata_vision_perf.time_szo,
                          pnd->navdata_vision_perf.time_corners,
                          pnd->navdata_vision_perf.time_compute,
                          pnd->navdata_vision_perf.time_tracking,
                          pnd->navdata_vision_perf.time_trans,
                          pnd->navdata_vision_perf.time_update );

    for(i = 0 ; i < NAVDATA_MAX_CUSTOM_TIME_SAVE ; i++)
    {
      fprintf( navdata_file, "; %f", pnd->navdata_vision_perf.time_custom[i]);
	}

    fprintf( navdata_file, "; %d", (int) pnd->navdata_watchdog.watchdog );

    fprintf( navdata_file, "; %u", (unsigned int) num_picture_decoded );

    sprintf( str, "%d.%06d", (int)time.tv_sec, (int)time.tv_usec);
    fprintf( navdata_file, "; %s", str );

  return C_OK;
}
Пример #7
0
DEFINE_THREAD_ROUTINE(app_main, data)
{

	C_RESULT res = C_FAIL;
	vp_com_wifi_config_t* config = NULL;

	JNIEnv* env = NULL;

	if (g_vm) {
		(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &env, NULL);
	}

	bContinue = TRUE;
	mobile_main_param_t *param = data;

    video_recorder_thread_param_t video_recorder_param;
    video_recorder_param.priority = VIDEO_RECORDER_THREAD_PRIORITY;
    video_recorder_param.finish_callback = param->academy_download_callback_func;

	vp_os_memset(&ardrone_info, 0x0, sizeof(ardrone_info_t));

	while ((config = (vp_com_wifi_config_t *)wifi_config()) != NULL && strlen(config->itfName) == 0)
	{
		//Waiting for wifi initialization
		vp_os_delay(250);

		if (ardrone_tool_exit() == TRUE) {
			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
			}
			return 0;
		}
	}



	vp_os_memcpy(&ardrone_info.drone_address[0], config->server, strlen(config->server));


    while (-1 == getDroneVersion (param->root_dir, &ardrone_info.drone_address[0], &ardroneVersion))
    {
        LOGD (TAG, "Getting AR.Drone version");
        vp_os_delay (250);
    }

    sprintf(&ardrone_info.drone_version[0], "%u.%u.%u", ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision);

    LOGD (TAG, "ARDrone Version : %s\n", &ardrone_info.drone_version[0]);
	LOGI(TAG, "Drone Family: %d", ARDRONE_VERSION());

	res = ardrone_tool_setup_com( NULL );

	if( FAILED(res) )
	{
		LOGII("Setup com failed");
		LOGW(TAG, "Wifi initialization failed. It means either:");
		LOGW(TAG, "\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
		LOGW(TAG, "\t* wifi device is not present (on your pc or on your card)\n");
		LOGW(TAG, "\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
		LOGW(TAG, "\t* ap is not up (reboot card or remove wifi usb dongle)\n");
		LOGW(TAG, "\t* wifi device has no antenna\n");

		if (param != NULL && param->callback != NULL) {
			param->callback(env, param->obj, ARDRONE_MESSAGE_ERR_NO_WIFI);
		}
	}
	else
	{
		LOGII("ardrone_tool_setup_com [OK]");
		#define NB_IPHONE_PRE_STAGES 0

		#define NB_IPHONE_POST_STAGES 2

        //Alloc structs
        specific_parameters_t * params         = (specific_parameters_t *)vp_os_calloc(1, sizeof(specific_parameters_t));
        specific_stages_t * iphone_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
        specific_stages_t * iphone_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
        vp_api_picture_t  * in_picture         = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
        vp_api_picture_t  * out_picture        = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));


        in_picture->width          = STREAM_WIDTH;
        in_picture->height         = STREAM_HEIGHT;

        out_picture->framerate     = 20;
        out_picture->format        = PIX_FMT_RGB565;
        out_picture->width         = STREAM_WIDTH;
        out_picture->height        = STREAM_HEIGHT;

        out_picture->y_buf         = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 2 );
        out_picture->cr_buf        = NULL;
        out_picture->cb_buf        = NULL;

        out_picture->y_line_size   = STREAM_WIDTH * 2;
        out_picture->cb_line_size  = 0;
        out_picture->cr_line_size  = 0;

        //Define the list of stages size
        iphone_pre_stages->length  = NB_IPHONE_PRE_STAGES;
        iphone_post_stages->length = NB_IPHONE_POST_STAGES;

        //Alloc the lists
        iphone_pre_stages->stages_list  = NULL;
        iphone_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(iphone_post_stages->length,sizeof(vp_api_io_stage_t));

        //Fill the POST-stages------------------------------------------------------
        int postStageNumber = 0;

        vp_os_memset (&vlat, 0x0, sizeof (vlat));
        vlat.state = 0;
        vlat.last_decoded_frame_info= (void *)&vec;
        iphone_post_stages->stages_list[postStageNumber].type  = VP_API_FILTER_DECODER;
        iphone_post_stages->stages_list[postStageNumber].cfg   = (void *)&vlat;
        iphone_post_stages->stages_list[postStageNumber++].funcs = vp_stages_latency_estimation_funcs;

        vp_os_memset (&ovsc, 0x0, sizeof (ovsc));
        ovsc.video_decoder = &vec;
        iphone_post_stages->stages_list[postStageNumber].type  = VP_API_OUTPUT_LCD;
        iphone_post_stages->stages_list[postStageNumber].cfg   = (void *)&ovsc;
        iphone_post_stages->stages_list[postStageNumber++].funcs = opengl_video_stage_funcs;

        params->in_pic = in_picture;
        params->out_pic = out_picture;
        params->pre_processing_stages_list = iphone_pre_stages;
        params->post_processing_stages_list = iphone_post_stages;

#if USE_THREAD_PRIORITIES
        params->needSetPriority = 1;
        params->priority = VIDEO_THREAD_PRIORITY;
#else
        params->needSetPriority = 0;
        params->priority = 0;
#endif


		START_THREAD(video_stage, params);

        if (IS_LEAST_ARDRONE2)
        {
            START_THREAD (video_recorder, (void *)&video_recorder_param);
            LOGD(TAG, "Video recorder thread start [OK]");
        }



		res = ardrone_tool_init(&ardrone_info.drone_address[0], strlen(&ardrone_info.drone_address[0]), NULL, param->app_name, param->user_name, param->root_dir, param->flight_dir, param->flight_storing_size, param->academy_download_callback_func);

		if(SUCCEED(res))
		{

			ardrone_tool_input_add(&virtual_gamepad);

			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_CONNECTED_OK);
			}
		} else {
			if (param != NULL && param->callback != NULL) {
				param->callback(env, param->obj, ARDRONE_MESSAGE_UNKNOWN_ERR);
			}

			bContinue = FALSE;
		}

		res = ardrone_tool_set_refresh_time(1000 / kAPS);

#if USE_THREAD_PRIORITIES
        CHANGE_THREAD_PRIO (app_main, AT_THREAD_PRIORITY);
        CHANGE_THREAD_PRIO (navdata_update, NAVDATA_THREAD_PRIORITY);
        CHANGE_THREAD_PRIO (ardrone_control, NAVDATA_THREAD_PRIORITY);
#endif

		while( SUCCEED(res) && bContinue == TRUE )
		{
			ardrone_tool_update();
		}

		JOIN_THREAD(video_stage);

        if (IS_LEAST_ARDRONE2)
        {
            JOIN_THREAD (video_recorder);
        }

	    /* Unregistering for the current device */
	    ardrone_tool_input_remove( &virtual_gamepad );

		res = ardrone_tool_shutdown();
		LOGD(TAG, "AR.Drone tool shutdown [OK]");

		if (param != NULL && param->callback != NULL) {
			param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
		}
	}

	vp_os_free (data);
	data = NULL;

	(*env)->DeleteGlobalRef(env, param->obj);

	if (g_vm) {
		(*g_vm)->DetachCurrentThread (g_vm);
	}

	LOGI(TAG, "app_main thread has been stopped.");

	return (THREAD_RET) res;
}
/*---------------------------------------------------------------------------------------------------------------------
Tests the network connection to the drone by fetching the drone version number
through the FTP server embedded on the drone.
This is how FreeFlight checks if a drone sofware update is required.

The FTP connection process is a quick and (very)dirty one. It uses FTP passive mode.
---------------------------------------------------------------------------------------------------------------------*/
int test_drone_connection()
{
	const char * passivdeModeHeader = "\r\n227 PASV ok (";
	vp_com_socket_t ftp_client,ftp_client2;
	char buffer[1024];
	static Write ftp_write = NULL;
	static Read  ftp_read = NULL;
	int bytes_to_send,received_bytes;
	int i,L,x[6],port;
	int timeout_windows = 1000; /*milliseconds*/
	
	vp_os_memset(buffer,0,sizeof(buffer));

	/* Connects to the FTP server */
		wifi_config_socket(&ftp_client,VP_COM_CLIENT,FTP_PORT,WIFI_ARDRONE_IP);
		ftp_client.protocol = VP_COM_TCP;
		if(VP_FAILED(vp_com_init(wifi_com()))) return -1;
		if(VP_FAILED(vp_com_open(wifi_com(), &ftp_client, &ftp_read, &ftp_write))) return -2;
		setsockopt((int32_t)ftp_client.priv, 
								SOL_SOCKET, 
								SO_RCVTIMEO, 
								(const char*)&timeout_windows, sizeof(timeout_windows)
								); 

	/* Request version file */
		bytes_to_send = _snprintf(buffer,sizeof(buffer),"%s",
			"USER anonymous\r\nCWD /\r\nPWD\r\nTYPE A\r\nPASV\r\nRETR version.txt\r\n");
		ftp_write(&ftp_client,buffer,&bytes_to_send);
		/* Dirty. We should wait for data to arrive with some kind of synchronization
		or make the socket blocking.*/
		Sleep(1000);

	/* Gets the data port */
		received_bytes = sizeof(buffer);
		ftp_read(&ftp_client,buffer,&received_bytes);
		if (received_bytes<1) { vp_com_close(wifi_com(), &ftp_client); return -3; }
		L=received_bytes-strlen(passivdeModeHeader);

	/* Searches for the passive mode acknowlegment from the FTP server */
		for (i=0;i<L;i++) {
			if (strncmp((buffer+i),passivdeModeHeader,strlen(passivdeModeHeader))==0)  break; 
		}
		if (i==L) {
			vp_com_close(wifi_com(), &ftp_client); return -4; 
		}
		i+=strlen(passivdeModeHeader);
		if (sscanf(buffer+i,"%i,%i,%i,%i,%i,%i)",&x[0],&x[1],&x[2],&x[3],&x[4],&x[5])!=6)
			{ vp_com_close(wifi_com(), &ftp_client); return -5; }
		port=(x[4]<<8)+x[5];

	/* Connects to the FTP server data port */
		wifi_config_socket(&ftp_client2,VP_COM_CLIENT,port,"192.168.1.1");
		ftp_client2.protocol = VP_COM_TCP;
		if(VP_FAILED(vp_com_init(wifi_com()))) 
				{ vp_com_close(wifi_com(), &ftp_client2); return -6; }
		if(VP_FAILED(vp_com_open(wifi_com(), &ftp_client2, &ftp_read, &ftp_write)))
			{ vp_com_close(wifi_com(), &ftp_client2); return -7; }

	/* Gets the data */
		received_bytes = sizeof(buffer);
		ftp_read(&ftp_client2,buffer,&received_bytes);
		if (received_bytes>0) {
			buffer[min(received_bytes,sizeof(buffer)-1)]=0;
			printf("Drone version %s detected ... press <Enter> to start the application.\n",buffer);
			getchar();
		}
	
	/* Clean up */
		vp_com_close(wifi_com(), &ftp_client);
		vp_com_close(wifi_com(), &ftp_client2);

	return 0;
}
Пример #9
0
int main(int argc, char **argv)
{
  vp_api_picture_t  video_picture;
  vp_api_picture_t  buffer_blockline;
#ifdef BLOCK_MODE
  vp_api_picture_t  video_blockline;
#endif
  vp_api_io_pipeline_t pipeline;
  vp_api_io_data_t out;
  vp_api_io_stage_t stages[NB_STAGES_MAX];

  vp_os_memset(&ivc,0,sizeof(ivc));
  vp_os_memset(&ofc,0,sizeof(ofc));
  vp_os_memset(&occ,0,sizeof(occ));


  // CAMIF config
  ivc.camera = "/dev/video1";
  ivc.i2c = "/dev/i2c-0";
#ifdef OV_CAM
  ivc.camera_configure = &camera_configure_ov77xx;
#else
#ifdef CRESYN_CAM
  ivc.camera_configure = &camera_configure_cresyn;
#else  
#error no cam selected
#endif
#endif

  ivc.resolution = VGA;
  ivc.nb_buffers = 8;
  ivc.format                              = V4L2_PIX_FMT_YUV420;
  ivc.y_pad                               = 0;
  ivc.c_pad                               = 0;
  ivc.offset_delta                        = 0;
#ifdef BLOCK_MODE
  ivc.vp_api_blockline                    = &video_blockline;
#endif
  ivc.vp_api_picture                      = &video_picture;
  ivc.use_chrominances                    = 1;
  ivc.framerate				  = 15;

  // BUFFER
#ifdef BLOCK_MODE
  bbc.picture                    = &video_blockline;
#endif

  // ENCODER
  vec.width                               = 320;
  vec.height                              = 240;
#ifdef BLOCK_MODE
  vec.block_mode_enable                   = TRUE;
#else
  vec.block_mode_enable                   = FALSE;
#endif
  vec.picture                             = &video_picture;
 
  // OUTPUT FILE config
  ofc.name = "/tmp/out.yuv";
  
  // COM CONFIG
  occ.com                            = wired_com();
  occ.config                         = wired_config();
  occ.connection                     = wired_connection();
  occ.socket.type                    = VP_COM_SERVER;
  occ.socket.protocol                = VP_COM_TCP;
  occ.socket.port                    = 5555;
  occ.buffer_size                    = 640*480*3/2;

  // build pipeline
  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_CAMIF;
  stages[pipeline.nb_stages].cfg     = (void *)&ivc;
  stages[pipeline.nb_stages++].funcs = V4L2_funcs;

#ifdef BLOCK_MODE
#ifdef BUFFER
  stages[pipeline.nb_stages].type    = VP_API_INPUT_CAMIF;
  stages[pipeline.nb_stages].cfg     = (void *)&bbc;
  stages[pipeline.nb_stages++].funcs =  blockline_to_buffer_funcs;
#endif
#endif

#ifdef ENCODE 
  stages[pipeline.nb_stages].type    = VP_API_FILTER_ENCODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages++].funcs = vlib_encoding_funcs;
#endif

#ifdef FILE_OUTPUT
  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_FILE;
  stages[pipeline.nb_stages].cfg     = (void *)&ofc;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_file_funcs;
#endif

#ifdef ETHERNET_OUTPUT
  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&occ;
  stages[pipeline.nb_stages++].funcs = vp_stages_output_com_funcs;
#endif

  pipeline.stages = &stages[0];

  // launch pipelines
  vp_api_open(&pipeline, &pipeline_handle);
  
  out.status = VP_API_STATUS_PROCESSING;

  /////////////////////////////////////////////////////////////////////////////////////////
  
  uint16_t i = 0;

  struct timeval time1,time2;

  printf("Pipeline launched....\n");
  gettimeofday(&time1,NULL);
  while(SUCCEED(vp_api_run(&pipeline, &out)) && (out.status == VP_API_STATUS_PROCESSING))
    {
      i++;
      //printf("image %d \n",i);
      if (i>50)
        break;
      //vp_os_delay(20);
    }   
  /////////////////////////////////////////////////////////////////////////////////////////
 
  gettimeofday(&time2,NULL);
  int seconde = time2.tv_sec-time1.tv_sec;
  int ms = time2.tv_usec-time1.tv_usec;
  if (ms<0)
  {
     seconde--;
     ms += 1000000;
  }
  ms = ms/1000;
  float fps = ((float)i*1000)/(float)(seconde*1000+ms);
  printf("temps ecoule : %d.%d / nombre image : %d average fps : %f\n",seconde,ms,i,fps);
  vp_api_close(&pipeline, &pipeline_handle);

  return EXIT_SUCCESS;
}
Пример #10
0
int ardrone_tool_main(int argc, char **argv)
{
  C_RESULT res;
  const char* old_locale;
  const char* appname = argv[0];
  int argc_backup = argc;
  char** argv_backup = argv;
  char * drone_ip_address = NULL;
  struct in_addr drone_ip_address_in;

  bool_t show_usage = FAILED( ardrone_tool_check_argc_custom(argc) ) ? TRUE : FALSE;

  argc--; argv++;
  while( argc && *argv[0] == '-' )
  {
    if( !strcmp(*argv, "-ip") && ( argc > 1 ) )
    {
    	drone_ip_address = *(argv+1);
    	printf("Using custom ip address %s\n",drone_ip_address);
	    argc--; argv++;
    }
    else if( !strcmp(*argv, "-?") || !strcmp(*argv, "-h") || !strcmp(*argv, "-help") || !strcmp(*argv, "--help") )
    {
      ardrone_tool_usage( appname );
      exit( 0 );
    }
    else if( !ardrone_tool_parse_cmd_line_custom( *argv ) )
    {
      printf("Option %s not recognized\n", *argv);
      show_usage = TRUE;
    }

    argc--; argv++;
  }

  /*if( show_usage || (argc != 0) )
  {
    ardrone_tool_usage( appname );
    exit(-1);
  }*/
  
  /* After a first analysis, the arguments are restored so they can be passed to the user-defined functions */
  argc=argc_backup;
  argv=argv_backup;
  
  old_locale = setlocale(LC_NUMERIC, "en_GB.UTF-8");

  if( old_locale == NULL )
  {
    PRINT("You have to install new locales in your dev environment! (avoid the need of conv_coma_to_dot)\n");
    PRINT("As root, do a \"dpkg-reconfigure locales\" and add en_GB.UTF8 to your locale settings\n");
  }
  else
  {
    PRINT("Setting locale to %s\n", old_locale);
  }

  vp_com_wifi_config_t *config = (vp_com_wifi_config_t*)wifi_config();
  
  if(config)
  {
	  vp_os_memset( &wifi_ardrone_ip[0], 0, sizeof(wifi_ardrone_ip) );

	  if(drone_ip_address && inet_aton(drone_ip_address,&drone_ip_address_in)!=0)
	  {
	  /* If the drone IP address was given on the command line and is valid */
  	  	printf("===================+> %s\n", drone_ip_address);
        strncpy( &wifi_ardrone_ip[0], drone_ip_address, sizeof(wifi_ardrone_ip)-1);
      }
      else
	  {
		printf("===================+> %s\n", config->server);
		strncpy( &wifi_ardrone_ip[0], config->server, sizeof(wifi_ardrone_ip)-1);
	  }
  }

  while (-1 == getDroneVersion (root_dir, wifi_ardrone_ip, &ardroneVersion))
    {
      printf ("Getting AR.Drone version ...\n");
      vp_os_delay (250);
    }

	res = ardrone_tool_setup_com( NULL );

	if( FAILED(res) )
	{
	  PRINT("Wifi initialization failed. It means either:\n");
	  PRINT("\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
	  PRINT("\t* wifi device is not present (on your pc or on your card)\n");
	  PRINT("\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
	  PRINT("\t* ap is not up (reboot card or remove wifi usb dongle)\n");
	  PRINT("\t* wifi device has no antenna\n");
	}
	else
	{
		// Save appname/appid for reconnections
		char *appname = NULL;
		int lastSlashPos;
		/* Cut the invoking name to the last / or \ character on the command line
		* This avoids using differents app_id for applications called from different directories
		* e.g. if argv[0] is "Build/Release/ardrone_navigation", appname will point to "ardrone_navigation" only
		*/
		for (lastSlashPos = strlen (argv[0])-1; lastSlashPos > 0 && argv[0][lastSlashPos] != '/' && argv[0][lastSlashPos] != '\\'; lastSlashPos--);
		appname = &argv[0][lastSlashPos+1];
		ardrone_gen_appid (appname, __SDK_VERSION__, app_id, app_name, sizeof (app_name));
		res = ardrone_tool_init(wifi_ardrone_ip, strlen(wifi_ardrone_ip), NULL, appname, NULL, NULL, NULL, MAX_FLIGHT_STORING_SIZE, NULL);

      while( SUCCEED(res) && ardrone_tool_exit() == FALSE )
      {
        res = ardrone_tool_update();
      }

      res = ardrone_tool_shutdown();
    }

  if( old_locale != NULL )
  {
    setlocale(LC_NUMERIC, old_locale);
  }

  return SUCCEED(res) ? 0 : -1;
}
Пример #11
0
C_RESULT ardrone_timer_reset(ardrone_timer_t* timer)
{
    vp_os_memset(timer, 0, sizeof(ardrone_timer_t));

    return C_OK;
}
Пример #12
0
DEFINE_THREAD_ROUTINE(drone_video_stage_thread, data)
{
  PIPELINE_HANDLE pipeline_handle;

  printf("Vision thread started\n");

  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[DRONE_NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;

  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_video_output_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];

  pthread_mutex_lock(&drone_sharedDataMutex);
  int doRun = drone_doRun;
  pthread_mutex_unlock(&drone_sharedDataMutex);

  /* Processing of a pipeline */
  if (doRun) {
    res = 0;
    res = vp_api_open(&pipeline, &pipeline_handle);

    if (SUCCEED(res)) {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while (doRun && (loop == SUCCESS)) {
          pthread_mutex_lock(&drone_sharedDataMutex);
          doRun = drone_doRun;
          pthread_mutex_unlock(&drone_sharedDataMutex);

          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
              loop = SUCCESS;
            }
          }
      }

      fprintf(stderr, "BUG and WORKAROUND: vp_api_close was never called because it is buggy\n");
//      vp_api_close(&pipeline, &pipeline_handle); // <- TODO: do not call this - fix this function in SDK
    }
  }

  vp_os_free(picture.y_buf);
  vp_os_free(picture.cr_buf);
  vp_os_free(picture.cb_buf);

  printf("Vision thread terminated\n");

  return (THREAD_RET) 0;
}
void ardrone_control_read_custom_configurations_list(/*in*/char * buffer,
													 /*in*/int buffer_size,
													 /*out*/custom_configuration_list_t *available_configurations)
{
	custom_configuration_list_t * current_scope = NULL;
	char id[CUSTOM_CONFIGURATION_ID_LENGTH+1];
	char description[1024];
	int index = 0;
	char * pindex; int i,j;
	char * end_of_buffer;

	index = 0;
	pindex = buffer;
	end_of_buffer = buffer + buffer_size;

	DEBUG_CONFIG_RECEIVE("Decoding %i bytes",buffer_size);
	DEBUG_CONFIG_RECEIVE("\n");

	while(1)
	{
		//DEBUG_CONFIG_RECEIVE("Analysing <"); for (i=index;i<buffer_size;i++) DEBUG_CONFIG_RECEIVE("[%i]",buffer[i]); DEBUG_CONFIG_RECEIVE(">\n");
		/* Go to the beginning of a section */
			while((*pindex)!='[') { index++; pindex++; if (pindex==end_of_buffer) return; }
		/* Search the end of the section name */
			for (;index<buffer_size;index++) { if (buffer[index]==13 ) { buffer[index]=0; break; } }    if(index==buffer_size) return;
		/* Search the corresponding category */
			for (j=0;j<NB_CONFIG_CATEGORIES;j++){
				if ( strcmp(custom_configuration_headers[j],pindex)==0 ){
					/* Found the category */
					current_scope = &available_configurations[j];
					DEBUG_CONFIG_RECEIVE(" Found Scope <%s>\n",custom_configuration_headers[j]);
					break;
				}
			}
			if (j==NB_CONFIG_CATEGORIES) { DEBUG_CONFIG_RECEIVE("Unknown category."); return ;}
		/* Reset the list */
			if (current_scope!=NULL)
			{
				current_scope->nb_configurations = 0;
				if (current_scope->list!=NULL) { vp_os_free(current_scope->list); current_scope->list = NULL; }
			}
		/* Points on the first ID */
			index++;
			pindex=buffer+index;

		/* Read the IDs */
			while(pindex<end_of_buffer && (*pindex)!='[' && (*pindex)!=0)
			{
				vp_os_memset(id,0,sizeof(id));
				vp_os_memset(description,0,sizeof(description));

				//DEBUG_CONFIG_RECEIVE("Now scanning <%c> %i\n",*pindex,index);
				for (;index<buffer_size;index++) { if (buffer[index]==',' || buffer[index]=='\r') { buffer[index]=0; break; } }   if(index==buffer_size) return;
				strncpy(id,pindex,sizeof(id));
				index++;
				pindex=buffer+index;
				for (;index<buffer_size;index++) { if (buffer[index]==0 || buffer[index]=='\r') { buffer[index]=0; break; } }   if(index==buffer_size) return;
				strncpy(description,pindex,sizeof(description));
				DEBUG_CONFIG_RECEIVE(" Found ID <%s> description <%s>\n",id,description);
				index++;
				pindex=buffer+index;

				/* Store the found ID */
					/* Increase the size of the list by one element */
					current_scope->list = vp_os_realloc(current_scope->list,sizeof(*current_scope->list)*(current_scope->nb_configurations+1));
					/* Store the new element */
					strncpy(current_scope->list[current_scope->nb_configurations].id,
							id,
							sizeof(current_scope->list[current_scope->nb_configurations].id)  );
					strncpy(current_scope->list[current_scope->nb_configurations].description,
							description,
							sizeof(current_scope->list[current_scope->nb_configurations].description)  );
					current_scope->nb_configurations++;
			}
	}
	return;
}
Пример #14
0
void create_image_window( void )
{
  /* Image display main window */
  /* ------------------------- */
  int k;

  printf("Creating the Video window.\n");

  // Image main window
  ihm_ImageWin = gtk_window_new( GTK_WINDOW_TOPLEVEL);
  gtk_container_set_border_width(GTK_CONTAINER(ihm_ImageWin), 10);
  gtk_window_set_title(GTK_WINDOW(ihm_ImageWin), ihm_ImageTitle);
  gtk_signal_connect(GTK_OBJECT(ihm_ImageWin), "destroy", G_CALLBACK(ihm_ImageWinDestroy), NULL );

  // Boxes
  ihm_ImageVBox = gtk_vbox_new(FALSE, 0);
  ihm_VideoStream_VBox = gtk_vbox_new(FALSE, 0);
  ihm_ImageVBoxPT = gtk_vbox_new(FALSE, 0);

  //hBox_vision_state = gtk_hbox_new(FALSE, 0);
  for (k=0; k<NB_IMAGES_H_BOXES; k++)  ihm_ImageHBox[k] = gtk_hbox_new(FALSE, 0);
  // Frames
  for (k=0; k<NB_IMAGES_FRAMES; k++)  ihm_ImageFrames[k] = gtk_frame_new( ihm_ImageFrameCaption[k] );
  // Entries
  for (k=0; k<NB_IMAGES_ENTRIES; k++) {
    ihm_ImageEntry[k] = gtk_entry_new();
    gtk_widget_set_size_request(ihm_ImageEntry[k], 80, 20);
  }
  // Video Stream

  for (k=0; k<NB_VIDEO_STREAM_WIDGET; k++)  ihm_VideoStreamLabel[k] = gtk_label_new( ihm_ImageVideoStreamCaption[k] );

  video_bitrateEntry =   gtk_entry_new();
  gtk_widget_set_size_request(video_bitrateEntry, 150, 20);

  video_codecList = gtk_combo_box_new_text();
  gtk_combo_box_insert_text( (GtkComboBox*)video_codecList, 0, (const gchar*)"UVLC");
  gtk_combo_box_insert_text( (GtkComboBox*)video_codecList, 1, (const gchar*)"P264");
  gtk_widget_set_size_request(video_codecList, 150, 20);

  video_bitrateModeList = gtk_combo_box_new_text();
  gtk_combo_box_insert_text( (GtkComboBox*)video_bitrateModeList, 0, (const gchar*)"None");
  gtk_combo_box_insert_text( (GtkComboBox*)video_bitrateModeList, 1, (const gchar*)"Adaptative");
  gtk_combo_box_insert_text( (GtkComboBox*)video_bitrateModeList, 2, (const gchar*)"Manual");
  gtk_widget_set_size_request(video_codecList, 150, 20);

  for (k=0; k<NB_IMAGES_ENTRIES; k++)  ihm_ImageLabel[k] = gtk_label_new( ihm_ImageEntryCaption[k] );

  /* Creates buttons and links them to callbacks */
  for (k=0; k<NB_IMAGES_BUTTONS; k++)
  {
    ihm_ImageButton[k] = gtk_button_new();// ihm_ImageButtonCaption[k] );
    gtk_button_set_label((GtkButton*)ihm_ImageButton[k] ,ihm_ImageButtonCaption[k]);

    switch (k)
    {
      case UPDATE_VISION_PARAMS_BUTTON:
        g_signal_connect( G_OBJECT(ihm_ImageButton[k]), "clicked", G_CALLBACK(ihm_sendVisionConfigParams), (gpointer)k );
        break;
      case RAW_CAPTURE_BUTTON:
        g_signal_connect( G_OBJECT(ihm_ImageButton[k]), "clicked", G_CALLBACK(ihm_RAWCapture), (gpointer)k );
        break;
      case ZAPPER_BUTTON:
        g_signal_connect( G_OBJECT(ihm_ImageButton[k]), "clicked", G_CALLBACK(ihm_Zapper), (gpointer)k );
        break;
      case FULLSCREEN_BUTTON:
        g_signal_connect( G_OBJECT(ihm_ImageButton[k]), "clicked", G_CALLBACK(ihm_VideoFullScreen), (gpointer)k );
        break;
      default:
        g_signal_connect( G_OBJECT(ihm_ImageButton[k]), "clicked", G_CALLBACK(ihm_ImageButtonCB), (gpointer)k );
      }
  }

  GdkColor color;
  gdk_color_parse ("red", &color);
  gtk_widget_modify_text (ihm_ImageLabel[RAW_CAPTURE_BUTTON], GTK_STATE_NORMAL, &color);

  video_bitrateButton = gtk_button_new_with_label( "Send" );
  g_signal_connect(G_OBJECT(video_bitrateButton), "clicked", G_CALLBACK(ihm_send_VideoBitrate), 0 );
  g_signal_connect(G_OBJECT(video_codecList), "changed", G_CALLBACK(ihm_send_VideoCodec), 0 );
  g_signal_connect(G_OBJECT(video_bitrateModeList), "changed", G_CALLBACK(ihm_send_VideoBitrateMode), 0 );

  /* Creates input boxes (aka. entries) */
  char label_vision_default_val[NB_IMAGES_ENTRIES] ;
  tab_vision_config_params[0] = DEFAULT_CS;
  tab_vision_config_params[1] = DEFAULT_NB_PAIRS;
  tab_vision_config_params[2] = DEFAULT_LOSS_PER;
  tab_vision_config_params[3] = DEFAULT_NB_TRACKERS_WIDTH;
  tab_vision_config_params[4] = DEFAULT_NB_TRACKERS_HEIGHT;
  tab_vision_config_params[5] = DEFAULT_SCALE;
  tab_vision_config_params[6] = DEFAULT_TRANSLATION_MAX;
  tab_vision_config_params[7] = DEFAULT_MAX_PAIR_DIST;
  tab_vision_config_params[8] = DEFAULT_NOISE;

  for (k=0; k<NB_IMAGES_ENTRIES; k++)  {
    if (k==FAKE_ENTRY) continue;
    sprintf(label_vision_default_val, "%d", tab_vision_config_params[k]);
    gtk_entry_set_text( GTK_ENTRY(ihm_ImageEntry[k]), label_vision_default_val);
  }
  gtk_entry_set_text( GTK_ENTRY(video_bitrateEntry), "frame size (bytes)");

  /* Builds the vision state frame */
  vp_os_memset(label_vision_state_value, 0, sizeof(label_vision_state_value));
  strcat(label_vision_state_value, "Not Connected");
  label_vision_values = (GtkLabel*) gtk_label_new(label_vision_state_value);

  gtk_container_add( GTK_CONTAINER(ihm_ImageFrames[STATE_FRAME]), (GtkWidget*) label_vision_values );

  /* Builds the vision parameters frame */

  /* First line of parameters */
  for (k=CS_ENTRY; k<NB_TH_ENTRY; k++)  {
    gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAM_HBOX1]), ihm_ImageLabel[k], FALSE , FALSE, 0);
    gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAM_HBOX1]), ihm_ImageEntry[k], FALSE , FALSE, 0);
  }
  /* Second line of parameters */
  for (k=NB_TH_ENTRY; k<NB_IMAGES_ENTRIES; k++)  {
    if (k==FAKE_ENTRY) continue;
    gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAM_HBOX2]), ihm_ImageLabel[k], FALSE , FALSE, 0);
    gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAM_HBOX2]), ihm_ImageEntry[k], FALSE , FALSE, 0);
  }

  gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAM_HBOX2]), ihm_ImageLabel[FAKE_ENTRY], FALSE , FALSE, 0); // To fill space
  /* Fuses the two line in a single VBox */
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBoxPT), ihm_ImageHBox[TRACKING_PARAM_HBOX1], FALSE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBoxPT), ihm_ImageHBox[TRACKING_PARAM_HBOX2], FALSE , FALSE, 0);

  /* Builds the whole parameter block */
  gtk_container_add(GTK_CONTAINER(ihm_ImageFrames[TRACKING_PARAMETERS_FRAME]), ihm_ImageVBoxPT );
  gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAMS_HBOX]), ihm_ImageFrames[TRACKING_PARAMETERS_FRAME], FALSE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[TRACKING_PARAMS_HBOX]), ihm_ImageButton[UPDATE_VISION_PARAMS_BUTTON], TRUE  , FALSE, 5);

  for (k=TZ_KNOWN_BUTTON; k<=SE3_BUTTON; k++)
    gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[TRACKING_OPTION_HBOX]), ihm_ImageButton[k], TRUE , FALSE, 0);
  for (k=PROJ_OVERSCENE_BUTTON; k<=FLAT_GROUND_BUTTON; k++)
    gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[COMPUTING_OPTION_HBOX]), ihm_ImageButton[k], TRUE , FALSE, 0);


  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), ihm_VideoStreamLabel[CODEC_TYPE_LIST], FALSE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), video_codecList, TRUE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), ihm_VideoStreamLabel[BITRATE_MODE_LIST], FALSE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), video_bitrateModeList, TRUE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), ihm_VideoStreamLabel[MANUAL_BITRATE_ENTRY], FALSE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), video_bitrateEntry, TRUE , FALSE, 0);
  gtk_box_pack_start(GTK_BOX( ihm_ImageHBox[VIDEO_STREAM_HBOX]), video_bitrateButton, TRUE , FALSE, 0);

  /* */
  gtk_container_add(GTK_CONTAINER( ihm_ImageFrames[TRACKING_OPTION_FRAME]) , ihm_ImageHBox[TRACKING_OPTION_HBOX] );
  gtk_container_add(GTK_CONTAINER( ihm_ImageFrames[COMPUTING_OPTION_FRAME]), ihm_ImageHBox[COMPUTING_OPTION_HBOX] );
  gtk_container_add(GTK_CONTAINER( ihm_ImageFrames[VIDEO_STREAM_FRAME])    , ihm_ImageHBox[VIDEO_STREAM_HBOX] );

  /* Frame where to show buttons controlling how the drone video is displayed */
  displayvbox = gtk_vbox_new(FALSE,0);

  gtk_box_pack_start(GTK_BOX(displayvbox), ihm_ImageButton[RAW_CAPTURE_BUTTON], FALSE  , FALSE, 5);
  gtk_box_pack_start(GTK_BOX(displayvbox), ihm_ImageButton[ZAPPER_BUTTON], FALSE  , FALSE, 5);
  gtk_box_pack_start(GTK_BOX(displayvbox), ihm_ImageButton[FULLSCREEN_BUTTON], FALSE  , FALSE, 5);

  gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[VIDEO_DISPLAY_HBOX]),ihm_VideoStream_VBox,FALSE,FALSE,5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageHBox[VIDEO_DISPLAY_HBOX]),displayvbox,FALSE,FALSE,5);

  gtk_container_add(GTK_CONTAINER( ihm_ImageFrames[VIDEO_DISPLAY_FRAME])    , ihm_ImageHBox[VIDEO_DISPLAY_HBOX] );

  /* Builds the final window */
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageFrames[VIDEO_DISPLAY_FRAME], FALSE, FALSE, 5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageFrames[STATE_FRAME], FALSE, FALSE, 5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageHBox[TRACKING_PARAMS_HBOX], FALSE, FALSE, 5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageFrames[TRACKING_OPTION_FRAME], FALSE, FALSE, 5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageFrames[COMPUTING_OPTION_FRAME], FALSE, FALSE, 5);
  gtk_box_pack_start(GTK_BOX(ihm_ImageVBox), ihm_ImageFrames[VIDEO_STREAM_FRAME], FALSE, FALSE, 5);

  gtk_container_add(GTK_CONTAINER(ihm_ImageWin), ihm_ImageVBox);
  image_vision_window_view = WINDOW_HIDE;
  image_vision_window_status = WINDOW_OPENED;

  /* Set the callback for the checkbox inside the main application window */
  g_signal_connect(G_OBJECT(button_show_image), "clicked", G_CALLBACK(ihm_showImage), (gpointer) ihm_ImageWin);
  g_signal_connect(G_OBJECT(button_show_image2), "clicked", G_CALLBACK(ihm_showImage), (gpointer) ihm_ImageWin);
}
Пример #15
0
/* The delegate object calls this method during initialization of an ARDrone application */
C_RESULT ardrone_tool_init_custom(void)
{
  
  if (customPad)
	  ardrone_tool_input_add( &fpad );
	  
  //init_gui(0, NULL); /// Creating the GUI 
	START_THREAD(gui, NULL); // Starting the GUI thread 
	START_THREAD(imgproc,NULL);
	START_THREAD(ttiofiles,NULL);

    ardrone_application_default_config.video_channel = ZAP_CHANNEL_VERT;
   // ARDRONE_TOOL_CONFIGURATION_ADDEVENT (video_channel, &ZAP_CHANNEL_VERT, NULL);	

    
    /************************ VIDEO STAGE CONFIG ******************************/

    #define NB_NAVIGATION_POST_STAGES   10
    uint8_t post_stages_index = 0;

    //Alloc structs
    specific_parameters_t * params             = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t));
    specific_stages_t * navigation_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    vp_api_picture_t  * in_picture             = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
    vp_api_picture_t  * out_picture            = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));

    in_picture->width          = STREAM_WIDTH;
    in_picture->height         = STREAM_HEIGHT;

    out_picture->framerate     = 20;
    out_picture->format        = PIX_FMT_RGB24;
    out_picture->width         = STREAM_WIDTH;
    out_picture->height        = STREAM_HEIGHT;

    out_picture->y_buf         = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 );
    out_picture->cr_buf        = NULL;
    out_picture->cb_buf        = NULL;

    out_picture->y_line_size   = STREAM_WIDTH * 3;
    out_picture->cb_line_size  = 0;
    out_picture->cr_line_size  = 0;

    //Alloc the lists
    navigation_pre_stages->stages_list  = NULL;
    navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t));

    //Fill the POST-stages------------------------------------------------------
    vp_os_memset(&vlat,         0, sizeof( vlat ));
    vlat.state = 0;
    vlat.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].name    = "(latency estimator)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&vlat;
    navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs;


    vp_os_memset(&draw_trackers_cfg,         0, sizeof( draw_trackers_funcs ));
    draw_trackers_cfg.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&draw_trackers_cfg;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = draw_trackers_funcs;


    vp_os_memset(&gtkconf,         0, sizeof( gtkconf ));
    gtkconf.rowstride               = out_picture->width * 3;
    gtkconf.last_decoded_frame_info = (void*)&vec;
    gtkconf.desired_display_width   = 0;  /* auto */
    gtkconf.desired_display_height  = 0;  /* auto */
    gtkconf.gdk_interpolation_mode  = 0;  /* fastest */
    navigation_post_stages->stages_list[post_stages_index].name    = "(Gtk display)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_OUTPUT_SDL;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&gtkconf;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = vp_stages_output_gtk_funcs;

    //Define the list of stages size
    navigation_pre_stages->length  = 0;
    navigation_post_stages->length = post_stages_index;

    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = navigation_pre_stages;
    params->post_processing_stages_list = navigation_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;

    START_THREAD(video_stage, params);
    video_stage_init();
    video_stage_resume_thread ();
    
    /************************ END OF VIDEO STAGE CONFIG ***********************/
    

    return C_OK;
    
  //START_THREAD( video_stage, NULL );
  
  return C_OK;
}//*/
DEFINE_THREAD_ROUTINE(video_stage, data)
{
  C_RESULT res;

  vp_api_io_pipeline_t    pipeline;
  vp_api_io_data_t        out;
  vp_api_io_stage_t       stages[NB_STAGES];

  vp_api_picture_t picture;

  video_com_config_t              icc;
  vlib_stage_decoding_config_t    vec;
  vp_stages_yuv2rgb_config_t      yuv2rgbconf;
  /// Picture configuration
  picture.format        = PIX_FMT_YUV420P;

  picture.width         = QVGA_WIDTH;
  picture.height        = QVGA_HEIGHT;
  picture.framerate     = 30;

  picture.y_buf   = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT     );
  picture.cr_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );
  picture.cb_buf  = vp_os_malloc( QVGA_WIDTH * QVGA_HEIGHT / 4 );

  picture.y_line_size   = QVGA_WIDTH;
  picture.cb_line_size  = QVGA_WIDTH / 2;
  picture.cr_line_size  = QVGA_WIDTH / 2;

  vp_os_memset(&icc,          0, sizeof( icc ));
  vp_os_memset(&vec,          0, sizeof( vec ));
  vp_os_memset(&yuv2rgbconf,  0, sizeof( yuv2rgbconf ));

  icc.com                 = COM_VIDEO();
  icc.buffer_size         = 100000;
  icc.protocol            = VP_COM_UDP;
  COM_CONFIG_SOCKET_VIDEO(&icc.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

  vec.width               = QVGA_WIDTH;
  vec.height              = QVGA_HEIGHT;
  vec.picture             = &picture;
  vec.block_mode_enable   = TRUE;
  vec.luma_only           = FALSE;

  yuv2rgbconf.rgb_format = VP_STAGES_RGB_FORMAT_RGB24;

  pipeline.nb_stages = 0;

  stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
  stages[pipeline.nb_stages].cfg     = (void *)&icc;
  stages[pipeline.nb_stages].funcs   = video_com_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
  stages[pipeline.nb_stages].cfg     = (void*)&vec;
  stages[pipeline.nb_stages].funcs   = vlib_decoding_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_FILTER_YUV2RGB;
  stages[pipeline.nb_stages].cfg     = (void*)&yuv2rgbconf;
  stages[pipeline.nb_stages].funcs   = vp_stages_yuv2rgb_funcs;

  pipeline.nb_stages++;

  stages[pipeline.nb_stages].type    = VP_API_OUTPUT_SDL;
  stages[pipeline.nb_stages].cfg     = NULL;
  stages[pipeline.nb_stages].funcs   = vp_stages_output_gtk_funcs;

  pipeline.nb_stages++;

  pipeline.stages = &stages[0];
  nextChannel = ZAP_CHANNEL_HORI;
 
  /* Processing of a pipeline */
  if( !ardrone_tool_exit() )
  {
    PRINT("\n   Video stage thread initialisation1\n\n");

    res = vp_api_open(&pipeline, &pipeline_handle);

    if( SUCCEED(res) )
    {
      int loop = SUCCESS;
      out.status = VP_API_STATUS_PROCESSING;

      while( !ardrone_tool_exit() && (loop == SUCCESS) )
      {
          if( SUCCEED(vp_api_run(&pipeline, &out)) ) {
            if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) ) {
              loop = SUCCESS;
            }
          }
          else loop = -1; // Finish this thread
      }

      vp_api_close(&pipeline, &pipeline_handle);
    }
  }

  PRINT("   Video stage thread ended\n\n");

  return (THREAD_RET)0;
}
Пример #17
0
C_RESULT ardrone_tool_init_custom(void) {
    ardrone_application_default_config.navdata_options = NAVDATA_OPTION_FULL_MASK /*&
        ~(NAVDATA_OPTION_MASK(NAVDATA_TRACKERS_SEND_TAG)
        | NAVDATA_OPTION_MASK(NAVDATA_VISION_OF_TAG)
        | NAVDATA_OPTION_MASK(NAVDATA_VISION_PERF_TAG)
        | NAVDATA_OPTION_MASK(NAVDATA_VISION_TAG))*/;

    if (IS_ARDRONE2)
    {
        ardrone_application_default_config.video_codec = H264_360P_CODEC;
    }
    else
    {
        ardrone_application_default_config.video_codec = UVLC_CODEC;
    }
    ardrone_application_default_config.bitrate_ctrl_mode = 1;

    /// Init specific code for application
    ardrone_navdata_handler_table[NAVDATA_IHM_PROCESS_INDEX].data = &cfg;

    // Add inputs
    //ardrone_tool_input_add( &gamepad );
    /*ardrone_tool_input_add( &radioGP );
    ardrone_tool_input_add( &ps3pad );
    ardrone_tool_input_add( &joystick );
    ardrone_tool_input_add( &wiimote_device );*/


    load_ini();
    printf("Default control : %s (0x%08x, %s)\n", default_control->name, default_control->serial, default_control->filename);
    ardrone_tool_input_add(&control_device);
    cfg.default_control = default_control;
    cfg.devices = devices;

#ifdef USE_ARDRONE_VICON
    START_THREAD(vicon, &cfg);
#endif // USE_ARDRONE_VICON

    START_THREAD(ihm, &cfg);
#ifdef RAW_CAPTURE
    START_THREAD(raw_capture, &cfg);
#endif
    START_THREAD(remote_console, &cfg);

    /************************ VIDEO STAGE CONFIG ******************************/
    #define NB_NAVIGATION_POST_STAGES   10
    uint8_t post_stages_index = 0;

    //Alloc structs
    specific_parameters_t * params             = (specific_parameters_t *)vp_os_calloc(1,sizeof(specific_parameters_t));
    specific_stages_t * navigation_pre_stages  = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    specific_stages_t * navigation_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
    vp_api_picture_t  * in_picture             = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
    vp_api_picture_t  * out_picture            = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));

    in_picture->width          = STREAM_WIDTH;
    in_picture->height         = STREAM_HEIGHT;

    out_picture->framerate     = 20;
    out_picture->format        = PIX_FMT_RGB24;
    out_picture->width         = STREAM_WIDTH;
    out_picture->height        = STREAM_HEIGHT;

    out_picture->y_buf         = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 3 );
    out_picture->cr_buf        = NULL;
    out_picture->cb_buf        = NULL;

    out_picture->y_line_size   = STREAM_WIDTH * 3;
    out_picture->cb_line_size  = 0;
    out_picture->cr_line_size  = 0;

    //Alloc the lists
    navigation_pre_stages->stages_list  = NULL;
    navigation_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(NB_NAVIGATION_POST_STAGES,sizeof(vp_api_io_stage_t));

    //Fill the POST-stages------------------------------------------------------
    vp_os_memset(&vlat,         0, sizeof( vlat ));
    vlat.state = 0;
    vlat.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].name    = "(latency estimator)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&vlat;
    navigation_post_stages->stages_list[post_stages_index++].funcs = vp_stages_latency_estimation_funcs;

    #ifdef RECORD_RAW_VIDEO
    vp_os_memset(&vrc,         0, sizeof( vrc ));
    #warning Recording RAW video option enabled in Navigation.
    vrc.stage = 3;
    #warning We have to get the stage number an other way
    vp_os_memset(&vrc, 0, sizeof(vrc));
    navigation_post_stages->stages_list[post_stages_index].name    = "(raw video recorder)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&vrc;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = video_recorder_funcs;
    #endif // RECORD_RAW_VIDEO


    #if defined(FFMPEG_SUPPORT) && defined(RECORD_FFMPEG_VIDEO)
    #warning Recording FFMPEG (reencoding)video option enabled in Navigation.
    vp_os_memset(&ffmpeg_vrc, 0, sizeof(ffmpeg_vrc));
    ffmpeg_vrc.numframes = &vec.controller.num_frames;
    ffmpeg_vrc.stage = pipeline.nb_stages;
    navigation_post_stages->stages_list[post_stages_index].name    = "(ffmpeg recorder)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&ffmpeg_vrc;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = video_ffmpeg_recorder_funcs;
    #endif


    vp_os_memset(&draw_trackers_cfg,         0, sizeof( draw_trackers_funcs ));
    draw_trackers_cfg.last_decoded_frame_info = (void*)&vec;
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_FILTER_DECODER;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&draw_trackers_cfg;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = draw_trackers_funcs;


    vp_os_memset(&gtkconf,         0, sizeof( gtkconf ));
    gtkconf.rowstride               = out_picture->width * 3;
    gtkconf.last_decoded_frame_info = (void*)&vec;
    gtkconf.desired_display_width   = 0;  /* auto */
    gtkconf.desired_display_height  = 0;  /* auto */
    gtkconf.gdk_interpolation_mode  = 0;  /* fastest */
    navigation_post_stages->stages_list[post_stages_index].name    = "(Gtk display)";
    navigation_post_stages->stages_list[post_stages_index].type    = VP_API_OUTPUT_SDL;
    navigation_post_stages->stages_list[post_stages_index].cfg     = (void*)&gtkconf;
    navigation_post_stages->stages_list[post_stages_index++].funcs   = vp_stages_output_gtk_funcs;

    //Define the list of stages size
    navigation_pre_stages->length  = 0;
    navigation_post_stages->length = post_stages_index;

    params->in_pic = in_picture;
    params->out_pic = out_picture;
    params->pre_processing_stages_list  = navigation_pre_stages;
    params->post_processing_stages_list = navigation_post_stages;
    params->needSetPriority = 0;
    params->priority = 0;

    START_THREAD(video_stage, params);
    video_stage_init();
    if (2 <= ARDRONE_VERSION ())
      {
        START_THREAD (video_recorder, NULL);
        video_recorder_init ();
      }
    else
      {
        printf ("Don't start ... version is %d\n", ARDRONE_VERSION ());
      }

    /************************ END OF VIDEO STAGE CONFIG ***********************/

#ifdef PC_USE_POLARIS
    START_THREAD(polaris, &cfg);
#endif // PC_USE_POLARIS
#ifdef USE_TABLE_PILOTAGE
    START_THREAD(novadem, (void*) ("/dev/ttyUSB0"));
#endif // USE_TABLE_PILOTAGE

    return C_OK;
}
Пример #18
0
DEFINE_THREAD_ROUTINE(video_stage, data) {
    C_RESULT res;

    vp_api_io_pipeline_t pipeline;
    vp_api_io_data_t out;
    
    vp_api_io_stage_t * stages;
    video_stage_merge_slices_config_t merge_slices_cfg;
    
    uint8_t i;
    
    specific_parameters_t * params = (specific_parameters_t *)(data);

    if (1 == params->needSetPriority)
    {
      CHANGE_THREAD_PRIO (video_stage, params->priority);
    }
    
    vp_os_memset(&icc_tcp, 0, sizeof ( icc_tcp));
    vp_os_memset(&icc_udp, 0, sizeof ( icc_udp));
    
    // Video Communication config
    icc_tcp.com = COM_VIDEO();
    icc_tcp.buffer_size = (1024*1024);
    icc_tcp.protocol = VP_COM_TCP;
    COM_CONFIG_SOCKET_VIDEO(&icc_tcp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);
    
    // Video Communication config
    icc_udp.com = COM_VIDEO();
    icc_udp.buffer_size = (1024*1024);
    icc_udp.protocol = VP_COM_UDP;
    COM_CONFIG_SOCKET_VIDEO(&icc_udp.socket, VP_COM_CLIENT, VIDEO_PORT, wifi_ardrone_ip);

    icc.nb_sockets = 2;
    icc.configs = icc_tab;
    icc.forceNonBlocking = &(tcpConf.tcpStageHasMoreData);
    icc_tab[1]  = &icc_tcp;
    icc_tab[0]  = &icc_udp;
    
    icc.buffer_size = (1024*1024);
    
     vp_os_memset(&vec, 0, sizeof ( vec));

     stages = (vp_api_io_stage_t*) (vp_os_calloc(
        NB_STAGES + params->pre_processing_stages_list->length + params->post_processing_stages_list->length,
        sizeof (vp_api_io_stage_t)
    ));
    
    vec.src_picture = params->in_pic;
    vec.dst_picture = params->out_pic;
    
    vp_os_memset(&tcpConf, 0, sizeof ( tcpConf));
    tcpConf.maxPFramesPerIFrame = 30;
    tcpConf.frameMeanSize = 160*1024;
    tcpConf.tcpStageHasMoreData = FALSE;
    tcpConf.latencyDrop = 1;

	pipeline.name = "video_stage_pipeline";
    pipeline.nb_stages = 0;
    pipeline.stages = &stages[0];

    //ENCODED FRAME PROCESSING STAGES
    stages[pipeline.nb_stages].type    = VP_API_INPUT_SOCKET;
    stages[pipeline.nb_stages].cfg     = (void *) &icc;
    stages[pipeline.nb_stages++].funcs = video_com_multisocket_funcs;
	printf("thread_video_stage: adding multisocket_funcs as %d th stage (input type) to video_pipeline of AR.Drone with version %d.%d.%d\n", pipeline.nb_stages, ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *) &tcpConf;
    stages[pipeline.nb_stages++].funcs = video_stage_tcp_funcs;
	printf("thread_video_stage: adding tcp_funcs to video_pipeline as %d nd stage (filter type) of AR.Drone\n", pipeline.nb_stages );
    
    // Record Encoded video
    if(1 == ARDRONE_VERSION())
    {

        ardrone_academy_stage_recorder_config.dest.pipeline = video_pipeline_handle;
        ardrone_academy_stage_recorder_config.dest.stage    = pipeline.nb_stages;
        stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
        stages[pipeline.nb_stages].cfg     = (void*)&ardrone_academy_stage_recorder_config;
        stages[pipeline.nb_stages++].funcs = ardrone_academy_stage_recorder_funcs;
		printf("thread_video_stage: adding academy_recorder_funcs to video_pipeline as %d rd stage (filter type) of AR.Drone\n", pipeline.nb_stages );

    }
    else
    {
      // Nothing to do for AR.Drone 2 as we have a separated thread for recording
    }

    //PRE-DECODING STAGES ==> recording, ...
    for(i=0;i<params->pre_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->pre_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->pre_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->pre_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding pre_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void *)&merge_slices_cfg;
    stages[pipeline.nb_stages++].funcs = video_stage_merge_slices_funcs;
	printf("thread_video_stage: adding merge_slices_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //DECODING STAGES
    stages[pipeline.nb_stages].type    = VP_API_FILTER_DECODER;
    stages[pipeline.nb_stages].cfg     = (void*) &vec;
    stages[pipeline.nb_stages++].funcs = video_decoding_funcs;
	printf("thread_video_stage: adding video_decoding_funcs to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );

    //POST-DECODING STAGES ==> transformation, display, ...
    for(i=0;i<params->post_processing_stages_list->length;i++){
        stages[pipeline.nb_stages].type    = params->post_processing_stages_list->stages_list[i].type;
        stages[pipeline.nb_stages].cfg     = params->post_processing_stages_list->stages_list[i].cfg;
        stages[pipeline.nb_stages++].funcs = params->post_processing_stages_list->stages_list[i].funcs;
    }
	printf("thread_video_stage: adding post_processing_stages_list to video_pipeline (now %d stages) of AR.Drone\n", pipeline.nb_stages );


    if (!ardrone_tool_exit()) {
        PRINT("\nvideo stage thread initialisation\n\n");

        res = vp_api_open(&pipeline, &video_pipeline_handle);

        if (SUCCEED(res)) {
            int loop = SUCCESS;
            out.status = VP_API_STATUS_PROCESSING;

            while (!ardrone_tool_exit() && (loop == SUCCESS)) {
                if (video_stage_in_pause) {
                    vp_os_mutex_lock(&video_stage_mutex);
                    icc.num_retries = VIDEO_MAX_RETRIES;
                    vp_os_cond_wait(&video_stage_condition);
                    vp_os_mutex_unlock(&video_stage_mutex);
                }

                if (SUCCEED(vp_api_run(&pipeline, &out))) {
                    if ((out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING)) {
                        loop = SUCCESS;
                    }
                } else loop = -1; // Finish this thread
            }
            
            vp_os_free(params->pre_processing_stages_list->stages_list);
            vp_os_free(params->post_processing_stages_list->stages_list);
            vp_os_free(params->pre_processing_stages_list);
            vp_os_free(params->post_processing_stages_list);
            
            vp_os_free(params->in_pic);
            vp_os_free(params->out_pic);
            
            vp_os_free(params);
            
            vp_api_close(&pipeline, &video_pipeline_handle);
        }
    }

    PRINT("\nvideo stage thread ended\n\n");

    return (THREAD_RET) 0;
}
Пример #19
0
/**
 * @fn ardrone_sendata_unpack_all:
 * @param sendata_unpacked  sendata_unpacked in which to store the sendata.
 * @param sendata One packet read from the port SENDATA.
 * @param Checksum of sendata
 * @brief Disassembles a buffer of received sendata, and dispatches
 * it inside 'sendata_unpacked' structure.
 */
C_RESULT ardrone_sendata_unpack_all(sendata_unpacked_t* sendata_unpacked, sendata_t* sendata, uint32_t* cks)
{
  C_RESULT res;
  sendata_cks_t sendata_cks = { 0 };
  sendata_option_t* sendata_option_ptr;

  sendata_option_ptr = (sendata_option_t*) &sendata->options[0];

  vp_os_memset( sendata_unpacked, 0, sizeof(*sendata_unpacked) );

  res = C_OK;

#ifdef DEBUG_SENDATA_C
  printf("Received sendatas tags :");
#endif


  while( sendata_option_ptr != NULL )
    {
      // Check if we have a valid option
      if( sendata_option_ptr->size == 0 )
	{
	  PRINT("One sendata option (%d) is not a valid option because its size is zero\n", sendata_option_ptr->tag);
	  sendata_option_ptr = NULL;
	  res = C_FAIL;
	}
      else
	{
	  
	  if( sendata_option_ptr->tag <= SENDATA_NUM_TAGS){
#ifdef DEBUG_SENDATA_C
	    printf("[%d]",sendata_option_ptr->tag);
#endif
	    sendata_unpacked->last_sendata_refresh |= SENDATA_OPTION_MASK(sendata_option_ptr->tag);
	  }

	  switch( sendata_option_ptr->tag )
	    {
	      
#define SENDATA_OPTION(STRUCTURE,NAME,TAG)	\
	      case TAG:							\
		sendata_option_ptr = ardrone_sendata_unpack( sendata_option_ptr, sendata_unpacked->NAME); \
		break;

#define SENDATA_OPTION_DEMO(STRUCTURE,NAME,TAG)  SENDATA_OPTION(STRUCTURE,NAME,TAG)
#define SENDATA_OPTION_CKS(STRUCTURE,NAME,TAG) {}
	      
#include <ardrone_tool/Sendata/sendata_keys.h>

	    case SENDATA_CKS_TAG:
	      sendata_option_ptr = ardrone_sendata_unpack( sendata_option_ptr, sendata_cks );
	      *cks = sendata_cks.cks;
	      sendata_option_ptr = NULL; // End of structure
	      break;

	    default:
	      PRINT("Tag %d is an unknown sendata option tag\n", (int) sendata_option_ptr->tag);
	      sendata_option_ptr = (sendata_option_t *)(((uint32_t)sendata_option_ptr) + sendata_option_ptr->size);
	      break;
	    }
	}
    }

#ifdef DEBUG_SENDATA_C
  printf("\n");
#endif

  return res;
}