int spm_set_vcore_dvs_voltage(unsigned int opp) { u8 f26m_req, apsrc_req; u32 target_sta, req; int timeout, r = 0; bool not_existed, not_support; unsigned long flags; if (opp == OPPI_PERF_ULTRA) { f26m_req = 1; apsrc_req = 1; target_sta = VCORE_STA_UHPM; } else if (opp == OPPI_PERF) { f26m_req = 1; apsrc_req = 0; target_sta = VCORE_STA_HPM; } else if (opp == OPPI_LOW_PWR) { f26m_req = 0; apsrc_req = 0; target_sta = VCORE_STA_LPM; } else { return -EINVAL; } spin_lock_irqsave(&__spm_lock, flags); not_existed = is_fw_not_existed(); not_support = is_fw_not_support_uhpm(); if (not_existed || (opp == OPPI_PERF_ULTRA && not_support)) { __go_to_vcore_dvfs(SPM_VCORE_DVFS_EN, f26m_req, apsrc_req); } else { req = spm_read(SPM_PCM_SRC_REQ) & ~(SR_PCM_F26M_REQ | SR_PCM_APSRC_REQ); spm_write(SPM_PCM_SRC_REQ, req | (f26m_req << 1) | apsrc_req); } if (opp < OPPI_LOW_PWR) { /* normal FW fetch time + 1.15->1.05->1.15->1.25V transition time */ timeout = 2 * __spm_vcore_dvfs.pcmdesc->size + 3 * PER_OPP_DVS_US; r = wait_pcm_complete_dvs(get_vcore_sta() == target_sta, timeout); if (r >= 0) { /* DVS pass */ r = 0; } else { spm_err("[VcoreFS] OPP: %u (%u)(%u)\n", opp, not_existed, not_support); spm_dump_vcore_dvs_regs(NULL); BUG(); } } spin_unlock_irqrestore(&__spm_lock, flags); return r; }
void spm_go_to_vcore_dvfs(u32 spm_flags, u32 spm_data) { unsigned long flags; spin_lock_irqsave(&__spm_lock, flags); mt_cpufreq_set_pmic_phase(PMIC_WRAP_PHASE_NORMAL); __go_to_vcore_dvfs(spm_flags, 0, 0); spm_crit("[VcoreFS] STA: 0x%x, REQ: 0x%x\n", spm_read(SPM_SLEEP_DVFS_STA), spm_read(SPM_PCM_SRC_REQ)); spin_unlock_irqrestore(&__spm_lock, flags); }