C_RESULT ittiam_stage_decoding_close(ittiam_stage_decoding_config_t *cfg) {
    if (current_PaVE.video_codec == CODEC_MPEG4_AVC) {
        //H264
        /****************************************************************************/
        /* H264 ====== Reset the memory records
         *****************************************************************************/
        ITTIAM_DEBUG_PRINT("ITTIAM RESET");

        ivd_ctl_reset_ip_t h264_ctl_reset_ip;
        ivd_ctl_reset_op_t h264_ctl_reset_op;

        h264_ctl_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
        h264_ctl_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
        h264_ctl_reset_ip.u4_size = sizeof (ivd_ctl_reset_ip_t);
        h264_ctl_reset_op.u4_size = sizeof (ivd_ctl_reset_op_t);

        if (ih264d_cxa8_api_function(H264_DECHDL, (void*) (&h264_ctl_reset_ip), (void*) (&h264_ctl_reset_op)) == IV_SUCCESS) {
            ITTIAM_DEBUG_PRINT("IVD_CMD_CTL_RESET    [ OK ]");
        } else {
            ITTIAM_DEBUG_PRINT("IVD_CMD_CTL_RESET    [ NOK ] with error %d", (UWORD32) h264_ctl_reset_op.u4_error_code);

        }

        vp_os_free(h264_mem_rec);
        vp_os_free(h264_ps_it_mem);

    } else if (current_PaVE.video_codec == CODEC_MPEG4_VISUAL) {
        //MPEG4
        /****************************************************************************/
        /* H264 ====== Reset the memory records
         *****************************************************************************/
        ITTIAM_DEBUG_PRINT("ITTIAM RESET");

        ivd_ctl_reset_ip_t mpeg4_ctl_reset_ip;
        ivd_ctl_reset_op_t mpeg4_ctl_reset_op;

        mpeg4_ctl_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
        mpeg4_ctl_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
        mpeg4_ctl_reset_ip.u4_size = sizeof (ivd_ctl_reset_ip_t);
        mpeg4_ctl_reset_op.u4_size = sizeof (ivd_ctl_reset_op_t);

        if (imp4d_cxa8_api_function(MPEG4_DECHDL, (void*) (&mpeg4_ctl_reset_ip), (void*) (&mpeg4_ctl_reset_op)) == IV_SUCCESS) {
            ITTIAM_DEBUG_PRINT("IVD_CMD_CTL_RESET    [ OK ]");
        } else {
            ITTIAM_DEBUG_PRINT("IVD_CMD_CTL_RESET    [ NOK ] with error %d", (UWORD32) mpeg4_ctl_reset_op.u4_error_code);

        }

        vp_os_free(mpeg4_mem_rec);
        vp_os_free(mpeg4_ps_it_mem);
    }
    old_num_frame = cfg->num_picture_decoded;
    ITTIAM_DEBUG_PRINT("ITTIAM CLEAN");
    return C_OK;
}
void ardrone_free_user_list (ardrone_users_t **users)
{
    if (NULL != *users)
    {
        if (NULL != (*users)->userList)
        {
            vp_os_free ((*users)->userList);
        }
        vp_os_free (*users);
        *users = NULL;
    }
}
ardrone_users_t *ardrone_get_user_list(void)
{
    if (0 == droneSupportsMulticonfig)
    {
        return NULL;
    }
    ardrone_users_t *retVal = vp_os_malloc (sizeof (ardrone_users_t));
    if (NULL == retVal)
        return NULL;
    
    
    // Assume that userlist is up to date
    int validUserCount = 0; // User whose descriptions start with a dot ('.') are hidden users that may not be shown to the application user (e.g. default user for each iPhone, or user specific to a control mode
    int configIndex;
    for (configIndex = 0; configIndex < available_configurations[CAT_USER].nb_configurations; configIndex++) // Check all existing user_ids
    {
        if ('.' != available_configurations[CAT_USER].list[configIndex].description[0]) // Not an hidden user
        {
            validUserCount++;
            retVal->userList = vp_os_realloc (retVal->userList, validUserCount * sizeof (ardrone_user_t));
            if (NULL == retVal->userList)
            {
                vp_os_free (retVal);
                return NULL;
            }
            strncpy (retVal->userList[validUserCount-1].ident, available_configurations[CAT_USER].list[configIndex].id, MULTICONFIG_ID_SIZE);
            strncpy (retVal->userList[validUserCount-1].description, available_configurations[CAT_USER].list[configIndex].description, USER_NAME_SIZE);
        }
    }
    retVal->userCount = validUserCount;
    return retVal;
}
示例#4
0
void videoServer_destroy() {
	uint32_t iterator;
	TClient *curClient;

	/* Do not accept more clients */
	vp_com_close(&globalCom, &videoServerSocket);

	/* Close all client sockets */
	if (videoServer_clientList != NULL) {
		clientList_startIterator(&iterator);
		while((curClient = clientList_getNextClient(videoServer_clientList, &iterator)) != NULL) {
			closeClient(curClient);
		}
		clientList_destroy(videoServer_clientList);
		videoServer_clientList = NULL;
	}

	/* Free resources */
	vp_os_cond_destroy(&frameBufferCond);
	vp_os_mutex_destroy(&frameBufferMutex);
	vp_os_free(frameBuffer);

	videoTranscoder_destroy();
	vp_os_mutex_destroy(&settingsMutex);

	videoServerStarted = FALSE;
}
示例#5
0
DEFINE_THREAD_ROUTINE(mobile_main, data)
{
	C_RESULT res = C_FAIL;
	vp_com_wifi_config_t *config = NULL;
	
	mobile_main_param_t *param = (mobile_main_param_t *)data;

	ardroneEngineCallback callback = param->callback;
	vp_os_memset(drone_address, 0x0, sizeof(drone_address));

  // TODO(johnb): Make this autodetect based on network interfaces
	while(((config = (vp_com_wifi_config_t *)wifi_config()) != NULL) && (strcmp(config->itfName, WIFI_ITFNAME) != 0))
	{
		PRINT("Wait WIFI connection !\n");
		vp_os_delay(250);
	}
	
	// Get drone_address
	vp_os_memcpy(drone_address, config->server, strlen(config->server));
	PRINT("Drone address %s\n", drone_address);
	
	// Get iphone_mac_address
	get_iphone_mac_address(config->itfName);
	PRINT("Iphone MAC Address %s\n", iphone_mac_address);
	
	res = ardrone_tool_setup_com( NULL );
	
	if( FAILED(res) )
	{
		PRINT("Wifi initialization failed. It means either:\n");
		PRINT("\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
		PRINT("\t* wifi device is not present (on your pc or on your card)\n");
		PRINT("\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
		PRINT("\t* ap is not up (reboot card or remove wifi usb dongle)\n");
		PRINT("\t* wifi device has no antenna\n");
	}
	else
	{
		START_THREAD(video_stage, NULL);
		
		res = ardrone_tool_init(drone_address, strlen(drone_address), NULL, param->appName, param->usrName);
		
		callback(ARDRONE_ENGINE_INIT_OK);
		
		ardrone_tool_set_refresh_time(1000 / kAPS);

		while( SUCCEED(res) && bContinue == TRUE )
		{
			ardrone_tool_update();
		}
		
		JOIN_THREAD(video_stage);

		res = ardrone_tool_shutdown();
	}
	
	vp_os_free (data);
	
	return (THREAD_RET)res;
}
/* Link a program with all currently attached shaders */
GLint opengl_shader_link(GLuint prog)
{
#if defined(DEBUG_SHADER)
    printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
	GLint status;
	
	glLinkProgram(prog);
	
#if defined(DEBUG_SHADER)
	GLint logLength;
    glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
    if (logLength > 0)
    {
        GLchar *log = (GLchar *)vp_os_malloc(logLength);
        glGetProgramInfoLog(prog, logLength, &logLength, log);
        printf("Program link log:\n%s\n", log);
        vp_os_free(log);
    }
#endif
    
    glGetProgramiv(prog, GL_LINK_STATUS, &status);
    if (status == GL_FALSE)
		printf("Failed to link program %d\n", prog);
	
	return status;
}
void
ATcodec_Sorted_List_removeElement (ATcodec_Sorted_List_t *list, void *element)
{
  ATcodec_Sorted_List_header_t *system_ptr;
  ATcodec_Sorted_List_header_t *next;
  ATcodec_Sorted_List_header_t *previous;

  VP_OS_ASSERT(element);

  system_ptr = ATCODEC_SYSTEM_PTR(element);

  VP_OS_ASSERT(system_ptr);
  VP_OS_ASSERT(system_ptr->magic == ATCODEC_MAGIC_NUMBER);

  next = system_ptr->next;
  previous = system_ptr->previous;

  if(previous)
    {
      previous->next = next;
    }
  else
    {
      list->head = next;
    }

  if(next)
    next->previous = previous;

  vp_os_free(system_ptr);

  list->nb--;
}
route_t *file_create_route(FILE *fp) {
		route_t *route = (route_t*)vp_os_malloc(sizeof(route_t));

		INIT_ROUTE(route);

		distance_t x, y, z;
		NODE_T *p = NULL;

 		while (1) {
				if(-1 == fscanf(fp, "%f %f %f", &x, &y, &z))
						break;

				if (route->head == NULL) {
						route->head = (NODE_T*)vp_os_malloc(sizeof(NODE_T));
						p = route->head;
						INIT_NODE(p, x, y, z);
						printw("x %f, y %f, z %f\n", x, y, z);
				}
				else {
						p->next = (NODE_T*)vp_os_malloc(sizeof(NODE_T));
						p = p->next;
						INIT_NODE(p, x, y, z);
						printw("x %f, y %f, z %f\n", x, y, z);
				}
		}

		printw("\n");

		if (route->head == NULL) {
				vp_os_free(route);
				return NULL;
		}

		return route;
}
void checkNeonSupport ()
{
  neon_status_t loc_neonStat = NEON_SUPPORT_FAIL;
  FILE *cpuInfo = fopen ("/proc/cpuinfo", "r");
  if (NULL == cpuInfo)
    {
      return;
    }

  char *neonCheckStrBuf = vp_os_malloc (NEONCHECK_BUFFER_STRING_SIZE * sizeof (char));
  if (NULL == neonCheckStrBuf)
    {
      fclose (cpuInfo);
      return;
    }

  while (NULL != fgets (neonCheckStrBuf, NEONCHECK_BUFFER_STRING_SIZE, cpuInfo))
    {
      char *supportTest = strstr (neonCheckStrBuf, "neon");
      if (NULL != supportTest)
        {
          loc_neonStat = NEON_SUPPORT_OK;
          break;
        }
    }
  
  vp_os_free (neonCheckStrBuf);
  fclose (cpuInfo);

  neonStatus = loc_neonStat;
}
C_RESULT video_stage_decoder_close (video_decoder_config_t *cfg)
{
  C_RESULT res, resVlib, resMp4h264;
  resVlib = vlib_stage_decoding_close (cfg->vlibConf);
  resMp4h264 = mp4h264_close (cfg->mp4h264Conf);
  res = (C_OK == resVlib && C_OK == resMp4h264 ) ? C_OK : C_FAIL;
  vp_os_free (cfg->vlibConf);
  cfg->vlibConf = NULL;
  vp_os_free (cfg->vlibOut);
  cfg->vlibOut = NULL;
  vp_os_free (cfg->mp4h264Conf);
  cfg->mp4h264Conf = NULL;
  vp_os_free (cfg->mp4h264Out);
  cfg->mp4h264Out = NULL;
  return res;
}
C_RESULT
vp_stages_frame_pipe_receiver_close(vp_stages_frame_pipe_config_t *cfg)
{
  vp_os_free (cfg->outPicture.raw);
  cfg->outPicture.raw = NULL;
  vp_os_cond_destroy (&(cfg->buffer_sent));
  vp_os_mutex_destroy (&(cfg->pipe_mut));
  return C_OK;
}
示例#12
0
文件: video.c 项目: Aidsy/ARDroneSDK
// Called from the app framework.
void video_deinit()
{
    INFO("terminating OpenGL video rendering...\n");
    //glDeleteTextures(1, &texture);
    if (pixbuf) {
        vp_os_free(pixbuf);
        pixbuf = NULL;
    }
}
示例#13
0
void
ATcodec_Buffer_destroy (ATcodec_Buffer_t *s)
{
  vp_os_free(s->data);

  s->data = NULL;
  s->nbElements = 0;
  s->elementSize = 0;
  s->totalSize = 0;
}
void ardrone_control_reset_custom_configurations_list(custom_configuration_list_t *available_configurations)
{
	int i;

	for (i=0;i<NB_CONFIG_CATEGORIES;i++)
	{
		if (available_configurations[i].list) { vp_os_free(available_configurations[i].list); }
		available_configurations[i].list=NULL;
		available_configurations[i].nb_configurations=0;
	}
}
示例#15
0
C_RESULT display_stage_close (display_stage_cfg_t *cfg)
{
    // Free all allocated memory
    if (NULL != cfg->frameBuffer)
    {
        vp_os_free (cfg->frameBuffer);
        cfg->frameBuffer = NULL;
    }

    return C_OK;
}
示例#16
0
C_RESULT vlib_stage_decoding_close(vlib_stage_decoding_config_t *cfg)
{
  if(!cfg->block_mode_enable)
  {
    vp_os_free(stream.bytes);
    stream.bytes = NULL;
  }
  else
    cfg->controller.in_stream.bytes = NULL;
    
  return video_codec_close( &cfg->controller );
}
示例#17
0
void get_iphone_mac_address(const char *itfName)
{
    int                 mib[6];
    size_t              len;
    char                *buf;
    unsigned char       *ptr;
    struct if_msghdr    *ifm;
    struct sockaddr_dl  *sdl;
    
    mib[0] = CTL_NET;
    mib[1] = AF_ROUTE;
    mib[2] = 0;
    mib[3] = AF_LINK;
    mib[4] = NET_RT_IFLIST;
    
    if ((mib[5] = if_nametoindex(itfName)) == 0) 
    {
        printf("Error: if_nametoindex error\n");
        return;
    }
    
    if (sysctl(mib, 6, NULL, &len, NULL, 0) < 0) 
    {
        printf("Error: sysctl, take 1\n");
        return;
    }
    
    if ((buf = vp_os_malloc(len)) == NULL) 
    {
        printf("Could not allocate memory. error!\n");
        return;
    }
    
    if (sysctl(mib, 6, buf, &len, NULL, 0) < 0) {
        printf("Error: sysctl, take 2");
        return;
    }
    
    ifm = (struct if_msghdr *)buf;
    sdl = (struct sockaddr_dl *)(ifm + 1);
    ptr = (unsigned char *)LLADDR(sdl);
    sprintf(iphone_mac_address, "%02X:%02X:%02X:%02X:%02X:%02X",*ptr, *(ptr+1), *(ptr+2), *(ptr+3), *(ptr+4), *(ptr+5));
    
    if(buf != NULL)
        vp_os_free(buf);
}
示例#18
0
C_RESULT output_gtk_stage_close( void *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
#ifdef IMAGE_DEBUG
	PRINT("Closing local video debug...\n");
	cvDestroyWindow("DroneCamera");
#endif
	if (debugImage != NULL) {
		cvReleaseImageHeader(&debugImage);
		debugImage = NULL;
	}
	if (outputImageBuffer != NULL) {
		vp_os_free(outputImageBuffer);
		outputImageBuffer = NULL;
	}
	/* In other filters, memory is not freed. I guess it is done by the VP stage manager */
  return (SUCCESS);
}
void*
vp_os_aligned_realloc(void* ptr, size_t size, size_t align_size)
{
  void* ptr_ret;
  void* aligned_ptr;

  if( size == 0 )
  {
    ptr_ret = NULL;
    if( ptr != NULL )
      vp_os_aligned_free(ptr);
  }
  else
  {
    if( ptr != NULL )
    {
      int* ptr2 = (int*)ptr - 1;
      size_t old_size;

      aligned_ptr = ptr;

      old_size = *ptr2--;

      ptr_ret = vp_os_aligned_malloc(size, align_size);

      // Compute smallest size
      if( size > old_size )
      {
        size = old_size;
      }

      // Copy old data
      vp_os_memcpy( ptr_ret, aligned_ptr, size );

      vp_os_free( ((char*)ptr - *ptr2) );
    }
    else
    {
      ptr_ret = vp_os_aligned_malloc(size, align_size);
    }
  }

  return ptr_ret;
}
示例#20
0
static C_RESULT video_codec_close_private( video_controller_t* controller, bool_t keep_stream )
{
  video_utils_close( controller );

  if( controller->blockline_cache != NULL )
  {
    vp_os_aligned_free( controller->blockline_cache );
    controller->blockline_cache = NULL;
  }

  if (controller->cache_mbs != NULL)
  {
    vp_os_free( controller->cache_mbs );
    controller->cache_mbs = NULL;
  }


  // TODO: choix dynamique de codec, le packetizer_close n'est pas appellé lors d'un changement de codec dynamique
  if( keep_stream == FALSE && controller->in_stream.bytes != NULL )
    video_packetizer_close( controller );

  switch( controller->codec_type )
  {
    case UVLC_CODEC:
      uvlc_codec_free( controller );
      break;

    case P263_CODEC:
      p263_codec_free( controller );
      break;

    case P264_CODEC:
      p264_codec_free( controller );
      break;

    default:
      break;
  }

  // Cleanup caches
  video_controller_cleanup( controller );

  return C_OK;
}
示例#21
0
static C_RESULT video_codec_close_private( video_controller_t* controller, bool_t keep_stream )
{
  video_utils_close( controller );

  if( controller->blockline_cache != NULL )
  {
    vp_os_aligned_free( controller->blockline_cache );
    controller->blockline_cache = NULL;
  }

  if (controller->cache_mbs != NULL)
  {
    vp_os_free( controller->cache_mbs );
    controller->cache_mbs = NULL;
  }


  if( keep_stream == FALSE && controller->in_stream.bytes != NULL )
    video_packetizer_close( controller );

  switch( controller->codec_type )
  {
    case UVLC_CODEC:
      uvlc_codec_free( controller );
      break;

    case P263_CODEC:
      p263_codec_free( controller );
      break;

    /*case P264_CODEC:
      p264_codec_free( controller );
      break;*/

    default:
      break;
  }

  // Cleanup caches
  video_controller_cleanup( controller );

  return C_OK;
}
C_RESULT vlib_stage_decoding_open(vlib_stage_decoding_config_t *cfg)
{
  // init video decoder with NULL_CODEC
  video_codec_open( &cfg->controller, NULL_CODEC );

  if(cfg->block_mode_enable)
  {
    vp_os_free( cfg->controller.in_stream.bytes );
  }
  else
  {
    stream.bytes  = (uint32_t*)vp_os_malloc(FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t));
    stream.index  = 0;
    stream.used   = 0;
    stream.size   = FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t);
  }

  cfg->num_picture_decoded = 0;

  return C_OK;
}
示例#23
0
/* Create and compile a shader from the provided source(s) */
GLint opengl_shader_compile(GLuint *shader, GLenum type, GLsizei count, const char* content_file)
{
#if defined(DEBUG_SHADER)
    printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
	GLint status;
	const GLchar *sources = (const GLchar *)content_file;

	// get source code
	if (!sources)
	{
		printf("Failed to load vertex shader\n");
		return 0;
	}
	
    *shader = glCreateShader(type);				// create shader
    glShaderSource(*shader, 1, &sources, NULL);	// set source code in the shader
    glCompileShader(*shader);					// compile shader
	
#if defined(DEBUG_SHADER)
	GLint logLength;
    glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
    if (logLength > 0)
    {
        GLchar *log = (GLchar *)vp_os_malloc(logLength);
        glGetShaderInfoLog(*shader, logLength, &logLength, log);
        printf("Shader compile log:\n%s\n", log);
        vp_os_free(log);
    }
#endif
    
    glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
    if (status == GL_FALSE)
	{
		printf("Failed to compile shader:\n");
		printf("%s\n", sources);
	}
	
	return status;
}
C_RESULT
vp_stages_input_file_stage_transform(vp_stages_input_file_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
  
  uint32_t UI32_i=0;
  char c;
  uint32_t y_size, c_size;

  vp_os_mutex_lock(&out->lock);
  if( out->status == VP_API_STATUS_INIT )
  {
    out->numBuffers =  1;
    out->size = cfg->buffer_size;
    out->buffers = (int8_t **) vp_os_malloc (sizeof(int8_t *)+out->size*sizeof(int8_t));
    out->buffers[0] = (int8_t *)(out->buffers+1);
    out->indexBuffer = 0;
    // out->lineSize not used
    out->status = VP_API_STATUS_PROCESSING;
  }

  // work and update status
  if(out->size < (int32_t)cfg->buffer_size || feof(cfg->f))
  {
    if (cfg->loop)
    {
      rewind(cfg->f);
    }
    else
    {
      //vp_os_free(out->buffers);
      out->status = VP_API_STATUS_ENDED;
    }
  }
  else
  {
    if(out->status == VP_API_STATUS_PROCESSING)
      out->size = fread(out->buffers[0], sizeof(int8_t), cfg->buffer_size*sizeof(int8_t), cfg->f);

    if(out->size <= 0)
    {
      if (cfg->loop)
      {
        rewind(cfg->f);
        out->size = fread(out->buffers[0], sizeof(int8_t), cfg->buffer_size*sizeof(int8_t), cfg->f);
      }
      else
      {
        vp_os_free(out->buffers);
        out->status = VP_API_STATUS_ENDED;
      }
    }

    if(ferror(cfg->f))
    {
      PRINT("ferror\n");
      out->status = VP_API_STATUS_ERROR;
    }
  }

  vp_os_mutex_unlock(&out->lock);
  return (VP_SUCCESS);
}
示例#25
0
static void on_destroy(GtkWidget *widget, gpointer data)
{
  vp_os_free(gui);
  gtk_main_quit();
}
C_RESULT video_stage_ffmpeg_recorder_transform(video_stage_ffmpeg_recorder_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
	 time_t temptime;
	 struct timeval tv;
	 struct tm *atm;
	 long long int current_timestamp_us;
	 static long long int first_frame_timestamp_us=0;
	 static int frame_counter=0;
	 int i;
	 int frame_size;
	 static int flag_video_file_open=0;

	 vp_os_mutex_lock( &out->lock );
	 vp_api_picture_t* picture = (vp_api_picture_t *) in->buffers;

	gettimeofday(&tv,NULL);

	 temptime = (time_t)tv.tv_sec;
	 atm = localtime(&temptime);  //atm = localtime(&tv.tv_sec);

	 current_timestamp_us = tv.tv_sec *1000000 + tv.tv_usec;


  if( out->status == VP_API_STATUS_INIT )
  {
    out->numBuffers   = 1;
    out->indexBuffer  = 0;
    out->lineSize     = NULL;
    //out->buffers      = (int8_t **) vp_os_malloc( sizeof(int8_t *) );
  }

  out->size     = in->size;
  out->status   = in->status;
  out->buffers  = in->buffers;

  if( in->status == VP_API_STATUS_ENDED ) {
    out->status = in->status;
  }
  else if(in->status == VP_API_STATUS_STILL_RUNNING) {
    out->status = VP_API_STATUS_PROCESSING;
  }
  else {
    out->status = in->status;
  }



	if(cfg->startRec==VIDEO_RECORD_HOLD)
	{
		/* Create a new video file */

		sprintf(video_filename_ffmpeg, "%s/video_%04d%02d%02d_%02d%02d%02d_w%i_h%i.mp4",
				VIDEO_FILE_DEFAULT_PATH,
				atm->tm_year+1900, atm->tm_mon+1, atm->tm_mday,
				atm->tm_hour, atm->tm_min, atm->tm_sec,
				picture->width,
				picture->height);

		create_video_file(video_filename_ffmpeg, picture->width,picture->height);
		flag_video_file_open=1;

		cfg->startRec=VIDEO_RECORD_START;

		first_frame_timestamp_us = current_timestamp_us;
		frame_counter=1;
	}

  if( out->size > 0 && out->status == VP_API_STATUS_PROCESSING && cfg->startRec==VIDEO_RECORD_START)
  {
	  frame_size = ( previous_frame.width * previous_frame.height )*3/2;

	  /* Send the previous frame to FFMPEG */
	  if (previous_frame.buffer!=NULL)
		{
		  /* Compute the number of frames to store to achieve 60 FPS
		   * This should be computed using the timestamp of the first frame
		   * to avoid error accumulation.
		   */
			int current_frame_number = (current_timestamp_us - first_frame_timestamp_us) / 16666;
			int nb_frames_to_write = current_frame_number - previous_frame.frame_number;

			if (picture_to_encode!=NULL){
				picture_to_encode->data[0] = picture_to_encode->base[0] = picture->y_buf;
				picture_to_encode->data[1] = picture_to_encode->base[1] = picture->cb_buf;
				picture_to_encode->data[2] = picture_to_encode->base[2] = picture->cr_buf;

				picture_to_encode->linesize[0] = picture->width;
				picture_to_encode->linesize[1] = picture->width/2;
				picture_to_encode->linesize[2] = picture->width/2;
			}

			for (i=0;i<nb_frames_to_write;i++)
			{
				//printf("Storing %i frames\n",nb_frames_to_write);
				write_video_frame(oc, video_st);
			}

			/* Pass infos to next iteration */
			previous_frame.frame_number = current_frame_number;
		}

	  /* Create a buffer to hold the current frame */
		//if (0)
		{
	  if (previous_frame.buffer!=NULL && (previous_frame.width!=picture->width || previous_frame.height!=picture->height))
		{
			vp_os_free(previous_frame.buffer);
			previous_frame.buffer=NULL;
		}
		if (previous_frame.buffer==NULL)
		{
			previous_frame.width = picture->width;
			previous_frame.height = picture->height;
			frame_size = ( previous_frame.width * previous_frame.height )*3/2;
			printf("Allocating previous frame.\n");
			previous_frame.buffer=vp_os_malloc( frame_size );
		}

	/* Copy the current frame in a buffer so it can be encoded at next stage call */
		if (previous_frame.buffer!=NULL)
		{
			char * dest = previous_frame.buffer;
			int size = picture->width*picture->height;
			vp_os_memcpy(dest,picture->y_buf,size);

			dest+=size;
			size /= 4;
			vp_os_memcpy(dest,picture->cb_buf,size);

			dest+=size;
			vp_os_memcpy(dest,picture->cr_buf,size);
		}
		}
  }


  else
	{
		if(cfg->startRec==VIDEO_RECORD_STOP && flag_video_file_open)
		{
			close_video_file();
			flag_video_file_open=0;
		}
	}

  vp_os_mutex_unlock( &out->lock );

  return C_OK;
}
void ardrone_control_read_custom_configurations_list(/*in*/char * buffer,
													 /*in*/int buffer_size,
													 /*out*/custom_configuration_list_t *available_configurations)
{
	custom_configuration_list_t * current_scope = NULL;
	char id[CUSTOM_CONFIGURATION_ID_LENGTH+1];
	char description[1024];
	int index = 0;
	char * pindex; int j;
	char * end_of_buffer;

	index = 0;
	pindex = buffer;
	end_of_buffer = buffer + buffer_size;

	DEBUG_CONFIG_RECEIVE("Decoding %i bytes",buffer_size);
	DEBUG_CONFIG_RECEIVE("\n");

	while(1)
	{
		//DEBUG_CONFIG_RECEIVE("Analysing <"); for (i=index;i<buffer_size;i++) DEBUG_CONFIG_RECEIVE("[%i]",buffer[i]); DEBUG_CONFIG_RECEIVE(">\n");
		/* Go to the beginning of a section */
			while((*pindex)!='[') { index++; pindex++; if (pindex==end_of_buffer) return; }
		/* Search the end of the section name */
			for (;index<buffer_size;index++) { if (buffer[index]==13 ) { buffer[index]=0; break; } }    if(index==buffer_size) return;
		/* Search the corresponding category */
			for (j=0;j<NB_CONFIG_CATEGORIES;j++){
				if ( strcmp(custom_configuration_headers[j],pindex)==0 ){
					/* Found the category */
					current_scope = &available_configurations[j];
					DEBUG_CONFIG_RECEIVE(" Found Scope <%s>\n",custom_configuration_headers[j]);
					break;
				}
			}
			if (j==NB_CONFIG_CATEGORIES) { DEBUG_CONFIG_RECEIVE("Unknown category."); return ;}
		/* Reset the list */
			if (current_scope!=NULL)
			{
				current_scope->nb_configurations = 0;
				if (current_scope->list!=NULL) { vp_os_free(current_scope->list); current_scope->list = NULL; }
			}
		/* Points on the first ID */
			index++;
			pindex=buffer+index;

		/* Read the IDs */
			while(pindex<end_of_buffer && (*pindex)!='[' && (*pindex)!=0)
			{
				vp_os_memset(id,0,sizeof(id));
				vp_os_memset(description,0,sizeof(description));

				//DEBUG_CONFIG_RECEIVE("Now scanning <%c> %i\n",*pindex,index);
				for (;index<buffer_size;index++) { if (buffer[index]==',' || buffer[index]=='\r') { buffer[index]=0; break; } }   if(index==buffer_size) return;
				strncpy(id,pindex,sizeof(id));
				index++;
				pindex=buffer+index;
				for (;index<buffer_size;index++) { if (buffer[index]==0 || buffer[index]=='\r') { buffer[index]=0; break; } }   if(index==buffer_size) return;
				strncpy(description,pindex,sizeof(description));
				DEBUG_CONFIG_RECEIVE(" Found ID <%s> description <%s>\n",id,description);
				index++;
				pindex=buffer+index;

				/* Store the found ID */
					/* Increase the size of the list by one element */
					current_scope->list = vp_os_realloc(current_scope->list,sizeof(*current_scope->list)*(current_scope->nb_configurations+1));
					/* Store the new element */
					strncpy(current_scope->list[current_scope->nb_configurations].id,
							id,
							sizeof(current_scope->list[current_scope->nb_configurations].id)  );
					strncpy(current_scope->list[current_scope->nb_configurations].description,
							description,
							sizeof(current_scope->list[current_scope->nb_configurations].description)  );
					current_scope->nb_configurations++;
			}
	}
	return;
}
示例#28
0
C_RESULT videoServer_init() {
	videoServer_clientList = clientList_create(VIDEOSERVER_MAXCLIENTS);

	/* Build frame packet buffer */
	videoServer_framePacketLength = sizeof(VideoFrameHeader) + videoServer_frameWidth * videoServer_frameHeight * (videoServer_frameBpp / 8);
	frameBuffer = vp_os_malloc(videoServer_framePacketLength);

	vp_os_mutex_init(&frameBufferMutex);
	vp_os_cond_init(&frameBufferCond, &frameBufferMutex);
	/* Create server socket */
	videoServerSocket.type = VP_COM_SERVER;
	videoServerSocket.protocol = VP_COM_TCP;
	videoServerSocket.block = VP_COM_DONTWAIT;
	videoServerSocket.is_multicast = 0;
	videoServerSocket.port = VIDEO_SERVER_PORT;
/*	if (FAILED(vp_com_open(&globalCom, &videoServerSocket, NULL, NULL))) {
		vp_os_cond_destroy(&frameBufferCond);
		vp_os_mutex_destroy(&frameBufferMutex);
		vp_os_free(frameBuffer);
		vp_os_mutex_destroy(&clientListMutex);
		PRINT("[VideoServer] Unable to open server socket\n");
		return C_FAIL;
	}*/
	/* We create the server socket manually, as we need to set the reuse flag before binding it and Parrot's SDK doesn't allow that */
	bool_t error = TRUE;
	videoServerSocket.priv = (void *)socket(AF_INET, SOCK_STREAM, 0);
	if ((int)videoServerSocket.priv >= 0) {
		struct sockaddr_in serverAddress;
		/* Try reusing the address */
		int s = 1;
		setsockopt((int)videoServerSocket.priv, SOL_SOCKET, SO_REUSEADDR, &s, sizeof(int));
		/* Bind to address and port */
		bzero((char *)&serverAddress, sizeof(serverAddress));
		serverAddress.sin_family = AF_INET;
		serverAddress.sin_addr.s_addr = INADDR_ANY;
		serverAddress.sin_port = htons(videoServerSocket.port);
	    if (bind((int)videoServerSocket.priv, (struct sockaddr *) &serverAddress, sizeof(struct sockaddr_in)) >= 0)
		{
	    	error = FALSE;
		} else close((int)videoServerSocket.priv);
	}
	if (error) {
		vp_os_cond_destroy(&frameBufferCond);
		vp_os_mutex_destroy(&frameBufferMutex);
		vp_os_free(frameBuffer);
		if (videoServer_clientList != NULL) {
			clientList_destroy(videoServer_clientList);
			videoServer_clientList = NULL;
		}
		PRINT("[VideoServer] Unable to open server socket\n");
		return C_FAIL;
	}

	/* Set server socket timeout */
	struct timeval tm;
	tm.tv_sec = 0;
	tm.tv_usec = ACCEPT_TIMEOUT * 1e6;
	setsockopt((int32_t)videoServerSocket.priv, SOL_SOCKET, SO_RCVTIMEO, &tm, sizeof(tm));

	videoTranscoder_init();

	videoServerStarted = TRUE;
	vp_os_mutex_init(&settingsMutex);

	return C_OK;
}
static void
ATcodec_Sorted_List_freeElement(void *element)
{
  vp_os_free(ATCODEC_SYSTEM_PTR(element));
}
C_RESULT video_stage_close(vlib_stage_decoding_config_t *cfg)
{
	vp_os_free(video_stage_config.data);
	
	return C_OK;
}