void sec_gpu_dvfs_handler(int utilization_value) { /*utilization_value is zero mean is gpu going to idle*/ if (utilization_value == 0) return; sgx_dvfs_level = sec_gpu_dvfs_level_from_clk_get(gpu_clock_get()); /* this check for current clock must be find in dvfs table */ if (sgx_dvfs_level < 0) { PVR_LOG(("WARN: current clock: %d MHz not found in DVFS table. so set to max clock", gpu_clock_get())); sec_gpu_vol_clk_change(gdata[BASE_START_LEVEL].clock, gdata[BASE_START_LEVEL].voltage); return; } PVR_DPF((PVR_DBG_MESSAGE, "INFO: AUTO DVFS [%d MHz] <%d, %d>, utilization [%d]", gpu_clock_get(), gdata[sgx_dvfs_level].min_threadhold, gdata[sgx_dvfs_level].max_threadhold, utilization_value)); /* check current level's threadhold value */ if (gdata[sgx_dvfs_level].min_threadhold > utilization_value) { /* need to down current clock */ sgx_dvfs_level = sec_clock_change_down(sgx_dvfs_level, BASE_DOWN_STEP_LEVEL); } else if (gdata[sgx_dvfs_level].max_threadhold < utilization_value) { /* need to up current clock */ sgx_dvfs_level = sec_clock_change_up(sgx_dvfs_level, BASE_UP_STEP_LEVEL); } else sgx_dvfs_down_requirement = gdata[sgx_dvfs_level].stay_total_count; g_g3dfreq = gdata[sgx_dvfs_level].clock; }
int sec_clock_change_down(int level, int step) { sgx_dvfs_down_requirement--; if (sgx_dvfs_down_requirement > 0 ) return level; level += step; if (level > GPU_DVFS_MAX_LEVEL - 1) level = GPU_DVFS_MAX_LEVEL - 1; if (sgx_dvfs_min_lock) { if (level > custom_min_lock_level) level = custom_min_lock_level; } sgx_dvfs_down_requirement = gdata[level].stay_total_count; sec_gpu_vol_clk_change(gdata[level].clock, gdata[level].voltage); if ((g_debug_CCB_Info_Flag % g_debug_CCB_count) == 0) PVR_LOG(("SGX CCB RO : %d, WO : %d, Total : %d", *g_debug_CCB_Info_RO, *g_debug_CCB_Info_WO, g_debug_CCB_Info_WCNT)); g_debug_CCB_Info_WCNT = 0; g_debug_CCB_Info_Flag ++; return level; }
static int sec_gpu_lock_control_proc(int bmax, long value, size_t count) { int lock_level = sec_gpu_dvfs_level_from_clk_get(value); int retval = -EINVAL; sgx_dvfs_level = sec_gpu_dvfs_level_from_clk_get(gpu_clock_get()); if (lock_level < 0) { /* unlock something */ if (bmax) sgx_dvfs_max_lock = custom_max_lock_level = 0; else sgx_dvfs_min_lock = custom_min_lock_level = 0; if (sgx_dvfs_min_lock && (sgx_dvfs_level > custom_min_lock_level)) /* min lock only - likely */ sec_gpu_vol_clk_change(gdata[custom_min_lock_level].clock, gdata[custom_min_lock_level].voltage); else if (sgx_dvfs_max_lock && (sgx_dvfs_level < custom_max_lock_level)) /* max lock only - unlikely */ sec_gpu_vol_clk_change(gdata[custom_max_lock_level].clock, gdata[custom_max_lock_level].voltage); if (value == 0) retval = count; } else{ /* lock something */ if (bmax) { sgx_dvfs_max_lock = value; custom_max_lock_level = lock_level; } else { sgx_dvfs_min_lock = value; custom_min_lock_level = lock_level; } if ((sgx_dvfs_max_lock) && (sgx_dvfs_min_lock) && (sgx_dvfs_max_lock < sgx_dvfs_min_lock)){ /* abnormal status */ if (sgx_dvfs_max_lock) /* max lock */ sec_gpu_vol_clk_change(gdata[custom_max_lock_level].clock, gdata[custom_max_lock_level].voltage); } else { /* normal status */ if ((bmax) && sgx_dvfs_max_lock && (sgx_dvfs_level < custom_max_lock_level)) /* max lock */ sec_gpu_vol_clk_change(gdata[custom_max_lock_level].clock, gdata[custom_max_lock_level].voltage); if ((!bmax) && sgx_dvfs_min_lock && (sgx_dvfs_level > custom_min_lock_level)) /* min lock */ sec_gpu_vol_clk_change(gdata[custom_min_lock_level].clock, gdata[custom_min_lock_level].voltage); } retval = count; } return retval; }
int sec_clock_change_up(int level, int step) { level -= step; if (level < 0) level = 0; if (sgx_dvfs_max_lock) { if (level < custom_max_lock_level) level = custom_max_lock_level; } sgx_dvfs_down_requirement = gdata[level].stay_total_count; sec_gpu_vol_clk_change(gdata[level].clock, gdata[level].voltage); if ((g_debug_CCB_Info_Flag % g_debug_CCB_count) == 0) PVR_LOG(("SGX CCB RO : %d, WO : %d, Total : %d", *g_debug_CCB_Info_RO, *g_debug_CCB_Info_WO, g_debug_CCB_Info_WCNT)); g_debug_CCB_Info_WCNT = 0; g_debug_CCB_Info_Flag ++; return level; }
void sec_gpu_dvfs_handler(int utilization_value) { if (custom_threshold_change) sec_custom_threshold_set(); /*utilization_value is zero mean is gpu going to idle*/ if (utilization_value == 0) return; #ifdef CONFIG_ASV_MARGIN_TEST sgx_dvfs_custom_clock = set_g3d_freq; #endif /* this check for custom dvfs setting - 0:auto, others: custom lock clock*/ if (sgx_dvfs_custom_clock) { if (sgx_dvfs_custom_clock != gpu_clock_get()) { sgx_dvfs_level = sec_gpu_dvfs_level_from_clk_get(sgx_dvfs_custom_clock); /* this check for current clock must be find in dvfs table */ if (sgx_dvfs_level < 0) { PVR_LOG(("WARN: custom clock: %d MHz not found in DVFS table", sgx_dvfs_custom_clock)); return; } if (sgx_dvfs_level < MAX_DVFS_LEVEL && sgx_dvfs_level >= 0) { sec_gpu_vol_clk_change(g_gpu_dvfs_data[sgx_dvfs_level].clock, g_gpu_dvfs_data[sgx_dvfs_level].voltage); PVR_LOG(("INFO: CUSTOM DVFS [%d MHz] (%d, %d), utilization [%d] -(%d MHz)", gpu_clock_get(), g_gpu_dvfs_data[sgx_dvfs_level].min_threadhold, g_gpu_dvfs_data[sgx_dvfs_level].max_threadhold, utilization_value, sgx_dvfs_custom_clock )); } else { PVR_LOG(("INFO: CUSTOM DVFS [%d MHz] invalid clock - restore auto mode", sgx_dvfs_custom_clock)); sgx_dvfs_custom_clock = 0; } } } else { sgx_dvfs_level = sec_gpu_dvfs_level_from_clk_get(gpu_clock_get()); /* this check for current clock must be find in dvfs table */ if (sgx_dvfs_level < 0) { PVR_LOG(("WARN: current clock: %d MHz not found in DVFS table. so set to max clock", gpu_clock_get())); sec_gpu_vol_clk_change(g_gpu_dvfs_data[BASE_START_LEVEL].clock, g_gpu_dvfs_data[BASE_START_LEVEL].voltage); return; } PVR_DPF((PVR_DBG_MESSAGE, "INFO: AUTO DVFS [%d MHz] <%d, %d>, utilization [%d]", gpu_clock_get(), g_gpu_dvfs_data[sgx_dvfs_level].min_threadhold, g_gpu_dvfs_data[sgx_dvfs_level].max_threadhold, utilization_value)); /* check current level's threadhold value */ if (g_gpu_dvfs_data[sgx_dvfs_level].min_threadhold > utilization_value) { #if defined(USING_BOOST_DOWN_MODE) /* check need Quick up/down change */ if (g_gpu_dvfs_data[sgx_dvfs_level].quick_down_threadhold >= utilization_value) sgx_dvfs_level = sec_clock_change_down(sgx_dvfs_level, BASE_QUICK_DOWN_LEVEL); else #endif /* need to down current clock */ sgx_dvfs_level = sec_clock_change_down(sgx_dvfs_level, BASE_DWON_STEP_LEVEL); } else if (g_gpu_dvfs_data[sgx_dvfs_level].max_threadhold < utilization_value) { #if defined(USING_BOOST_UP_MODE) if (g_gpu_dvfs_data[sgx_dvfs_level].quick_up_threadhold <= utilization_value) sgx_dvfs_level = sec_clock_change_up(sgx_dvfs_level, BASE_QUICK_UP_LEVEL); else #endif /* need to up current clock */ sgx_dvfs_level = sec_clock_change_up(sgx_dvfs_level, BASE_UP_STEP_LEVEL); } else sgx_dvfs_down_requirement = g_gpu_dvfs_data[sgx_dvfs_level].stay_total_count; } g_g3dfreq = g_gpu_dvfs_data[sgx_dvfs_level].clock; }