void rtw_mstat_update(const enum mstat_f flags, const MSTAT_STATUS status, u32 sz) { static u32 update_time = 0; int peak, alloc; int i; /* initialization */ if(!update_time) { for(i=0;i<mstat_tf_idx(MSTAT_TYPE_MAX);i++) { ATOMIC_SET(&(rtw_mem_type_stat[i].alloc), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].peak), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].alloc_cnt), 0); ATOMIC_SET(&(rtw_mem_type_stat[i].alloc_err_cnt), 0); } for(i=0;i<mstat_ff_idx(MSTAT_FUNC_MAX);i++) { ATOMIC_SET(&(rtw_mem_func_stat[i].alloc), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].peak), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].alloc_cnt), 0); ATOMIC_SET(&(rtw_mem_func_stat[i].alloc_err_cnt), 0); } } switch(status) { case MSTAT_ALLOC_SUCCESS: ATOMIC_INC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_cnt)); alloc = ATOMIC_ADD_RETURN(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc), sz); peak=ATOMIC_READ(&(rtw_mem_type_stat[mstat_tf_idx(flags)].peak)); if (peak<alloc) ATOMIC_SET(&(rtw_mem_type_stat[mstat_tf_idx(flags)].peak), alloc); ATOMIC_INC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_cnt)); alloc = ATOMIC_ADD_RETURN(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc), sz); peak=ATOMIC_READ(&(rtw_mem_func_stat[mstat_ff_idx(flags)].peak)); if (peak<alloc) ATOMIC_SET(&(rtw_mem_func_stat[mstat_ff_idx(flags)].peak), alloc); break; case MSTAT_ALLOC_FAIL: ATOMIC_INC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_err_cnt)); ATOMIC_INC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_err_cnt)); break; case MSTAT_FREE: ATOMIC_DEC(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc_cnt)); ATOMIC_SUB(&(rtw_mem_type_stat[mstat_tf_idx(flags)].alloc), sz); ATOMIC_DEC(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc_cnt)); ATOMIC_SUB(&(rtw_mem_func_stat[mstat_ff_idx(flags)].alloc), sz); break; }; //if (rtw_get_passing_time_ms(update_time) > 5000) { // rtw_mstat_dump(); update_time=rtw_get_current_time(); //} }
void rtw_update_mem_stat(u8 flag, u32 sz) { static u32 update_time = 0; int peak, alloc; if(!update_time) { ATOMIC_SET(&rtw_dbg_mem_stat.vir_alloc,0); ATOMIC_SET(&rtw_dbg_mem_stat.vir_peak,0); ATOMIC_SET(&rtw_dbg_mem_stat.vir_alloc_err,0); ATOMIC_SET(&rtw_dbg_mem_stat.phy_alloc,0); ATOMIC_SET(&rtw_dbg_mem_stat.phy_peak,0); ATOMIC_SET(&rtw_dbg_mem_stat.phy_alloc_err,0); } switch(flag) { case MEM_STAT_VIR_ALLOC_SUCCESS: alloc = ATOMIC_ADD_RETURN(&rtw_dbg_mem_stat.vir_alloc, sz); peak=ATOMIC_READ(&rtw_dbg_mem_stat.vir_peak); if (peak<alloc) ATOMIC_SET(&rtw_dbg_mem_stat.vir_peak, alloc); break; case MEM_STAT_VIR_ALLOC_FAIL: ATOMIC_INC(&rtw_dbg_mem_stat.vir_alloc_err); break; case MEM_STAT_VIR_FREE: alloc = ATOMIC_SUB_RETURN(&rtw_dbg_mem_stat.vir_alloc, sz); break; case MEM_STAT_PHY_ALLOC_SUCCESS: alloc = ATOMIC_ADD_RETURN(&rtw_dbg_mem_stat.phy_alloc, sz); peak=ATOMIC_READ(&rtw_dbg_mem_stat.phy_peak); if (peak<alloc) ATOMIC_SET(&rtw_dbg_mem_stat.phy_peak, alloc); break; case MEM_STAT_PHY_ALLOC_FAIL: ATOMIC_INC(&rtw_dbg_mem_stat.phy_alloc_err); break; case MEM_STAT_PHY_FREE: alloc = ATOMIC_SUB_RETURN(&rtw_dbg_mem_stat.phy_alloc, sz); }; if (rtw_get_passing_time_ms(update_time) > 5000) { rtw_dump_mem_stat(); update_time=rtw_get_current_time(); } }