void rtw_mstat_update(const enum mstat_f flags, const MSTAT_STATUS status, u32 sz) { static u32 update_time = 0; int peak, alloc; int i; /* initialization */ if(!update_time) { for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { ATOMIC_SET(&(rtw_mem_type_stat[i].alloc), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].peak), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].alloc_cnt), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].alloc_err_cnt), 0); } for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { ATOMIC_SET(&(rtw_mem_func_stat[i].alloc), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].peak), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].alloc_cnt), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].alloc_err_cnt), 0); } } switch(status) { case MSTAT_ALLOC_SUCCESS: ATOMIC_INC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_cnt)); alloc = ATOMIC_ADD_RETURN(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc), sz); peak=ATOMIC_READ(&(rtw_mem_type_stat[mstat_tf_idx(flags)].peak)); if (peak<alloc) ATOMIC_SET(&(rtw_mem_type_stat[mstat_tf_idx(flags)].peak), alloc); ATOMIC_INC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_cnt)); alloc = ATOMIC_ADD_RETURN(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc), sz); peak=ATOMIC_READ(&(rtw_mem_func_stat[mstat_ff_idx(flags)].peak)); if (peak<alloc) ATOMIC_SET(&(rtw_mem_func_stat[mstat_ff_idx(flags)].peak), alloc); break; case MSTAT_ALLOC_FAIL: ATOMIC_INC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_err_cnt)); ATOMIC_INC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_err_cnt)); break; case MSTAT_FREE: ATOMIC_DEC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_cnt)); ATOMIC_SUB(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc), sz); ATOMIC_DEC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_cnt)); ATOMIC_SUB(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc), sz); break; }; //if (rtw_get_passing_time_ms(update_time) > 5000) { // rtw_mstat_dump(); update_time=rtw_get_current_time(); //} }
int _rtw_mstat_dump(char *buf, int len) { int cnt = 0; int i; int value_t[4][mstat_tf_idx(MSTAT_TYPE_MAX)]; int value_f[4][mstat_ff_idx(MSTAT_FUNC_MAX)]; int vir_alloc, vir_peak, vir_alloc_err, phy_alloc, phy_peak, phy_alloc_err; int tx_alloc, tx_peak, tx_alloc_err, rx_alloc, rx_peak, rx_alloc_err; for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { value_t[0][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc)); value_t[1][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].peak)); value_t[2][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc_cnt)); value_t[3][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc_err_cnt)); } #if 0 for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { value_f[0][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc)); value_f[1][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].peak)); value_f[2][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc_cnt)); value_f[3][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc_err_cnt)); } #endif cnt += snprintf(buf+cnt, len-cnt, "===================== MSTAT =====================\n"); cnt += snprintf(buf+cnt, len-cnt, "%4s %10s %10s %10s %10s\n", "TAG", "alloc", "peak", "aloc_cnt", "err_cnt"); cnt += snprintf(buf+cnt, len-cnt, "-------------------------------------------------\n"); for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { cnt += snprintf(buf+cnt, len-cnt, "%4s %10d %10d %10d %10d\n", MSTAT_TYPE_str[i], value_t[0][i], value_t[1][i], value_t[2][i], value_t[3][i]); } #if 0 cnt += snprintf(buf+cnt, len-cnt, "-------------------------------------------------\n"); for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { cnt += snprintf(buf+cnt, len-cnt, "%4s %10d %10d %10d %10d\n", MSTAT_FUNC_str[i], value_f[0][i], value_f[1][i], value_f[2][i], value_f[3][i]); } #endif return cnt; }
void rtw_mstat_dump(void *sel) { int i; int value_t[4][mstat_tf_idx(MSTAT_TYPE_MAX)]; #ifdef RTW_MEM_FUNC_STAT int value_f[4][mstat_ff_idx(MSTAT_FUNC_MAX)]; #endif int vir_alloc, vir_peak, vir_alloc_err, phy_alloc, phy_peak, phy_alloc_err; int tx_alloc, tx_peak, tx_alloc_err, rx_alloc, rx_peak, rx_alloc_err; for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { value_t[0][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc)); value_t[1][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].peak)); value_t[2][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc_cnt)); value_t[3][i] = ATOMIC_READ(&(rtw_mem_type_stat[i].alloc_err_cnt)); } #ifdef RTW_MEM_FUNC_STAT for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { value_f[0][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc)); value_f[1][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].peak)); value_f[2][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc_cnt)); value_f[3][i] = ATOMIC_READ(&(rtw_mem_func_stat[i].alloc_err_cnt)); } #endif DBG_871X_SEL_NL(sel, "===================== MSTAT =====================\n"); DBG_871X_SEL_NL(sel, "%4s %10s %10s %10s %10s\n", "TAG", "alloc", "peak", "aloc_cnt", "err_cnt"); DBG_871X_SEL_NL(sel, "-------------------------------------------------\n"); for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { DBG_871X_SEL_NL(sel, "%4s %10d %10d %10d %10d\n", MSTAT_TYPE_str[i], value_t[0][i], value_t[1][i], value_t[2][i], value_t[3][i]); } #ifdef RTW_MEM_FUNC_STAT DBG_871X_SEL_NL(sel, "-------------------------------------------------\n"); for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { DBG_871X_SEL_NL(sel, "%4s %10d %10d %10d %10d\n", MSTAT_FUNC_str[i], value_f[0][i], value_f[1][i], value_f[2][i], value_f[3][i]); } #endif }
#ifdef PLATFORM_FREEBSD free(addr, M_USBDEV); #endif /* PLATFORM_FREEBSD */ } #endif /* CONFIG_USB_HCI */ #ifdef DBG_MEM_ALLOC struct rtw_mem_stat { ATOMIC_T alloc; // the memory bytes we allocate currently ATOMIC_T peak; // the peak memory bytes we allocate ATOMIC_T alloc_cnt; // the alloc count for alloc currently ATOMIC_T alloc_err_cnt; // the error times we fail to allocate memory }; struct rtw_mem_stat rtw_mem_type_stat[mstat_tf_idx(MSTAT_TYPE_MAX)]; struct rtw_mem_stat rtw_mem_func_stat[mstat_ff_idx(MSTAT_FUNC_MAX)]; char *MSTAT_TYPE_str[] = { "VIR", "PHY", "SKB", "USB", }; char *MSTAT_FUNC_str[] = { "UNSP", "IO", "TXIO", "RXIO", "TX",