static int local_pll_clk_set_rate(struct clk *c, unsigned long rate) { struct pll_freq_tbl *nf; struct pll_clk *pll = to_pll_clk(c); u32 mode; mode = readl_relaxed(PLL_MODE_REG(pll)); /* Don't change PLL's rate if it is enabled */ if ((mode & PLL_MODE_MASK) == PLL_MODE_MASK) return -EBUSY; for (nf = pll->freq_tbl; nf->freq_hz != PLL_FREQ_END && nf->freq_hz != rate; nf++) ; if (nf->freq_hz == PLL_FREQ_END) return -EINVAL; writel_relaxed(nf->l_val, PLL_L_REG(pll)); writel_relaxed(nf->m_val, PLL_M_REG(pll)); writel_relaxed(nf->n_val, PLL_N_REG(pll)); __pll_config_reg(PLL_CONFIG_REG(pll), nf, &pll->masks); return 0; }
int sr_pll_clk_enable(struct clk *c) { u32 mode; unsigned long flags; struct pll_clk *pll = to_pll_clk(c); spin_lock_irqsave(&pll_reg_lock, flags); mode = readl_relaxed(PLL_MODE_REG(pll)); mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); mb(); udelay(10); mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); mb(); udelay(60); mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return 0; }
static enum handoff local_pll_clk_handoff(struct clk *c) { struct pll_clk *pll = to_pll_clk(c); u32 mode = readl_relaxed(PLL_MODE_REG(pll)); u32 mask = PLL_BYPASSNL | PLL_RESET_N | PLL_OUTCTRL; unsigned long parent_rate; u32 lval, mval, nval, userval; if ((mode & mask) != mask) return HANDOFF_DISABLED_CLK; /* Assume bootloaders configure PLL to c->rate */ if (c->rate) return HANDOFF_ENABLED_CLK; parent_rate = clk_get_rate(c->parent); lval = readl_relaxed(PLL_L_REG(pll)); mval = readl_relaxed(PLL_M_REG(pll)); nval = readl_relaxed(PLL_N_REG(pll)); userval = readl_relaxed(PLL_CONFIG_REG(pll)); c->rate = parent_rate * lval; if (pll->masks.mn_en_mask && userval) { if (!nval) nval = 1; c->rate += (parent_rate * mval) / nval; } return HANDOFF_ENABLED_CLK; }
static int local_pll_clk_set_rate(struct clk *c, unsigned long rate) { struct pll_freq_tbl *nf; struct pll_clk *pll = to_pll_clk(c); unsigned long flags; for (nf = pll->freq_tbl; nf->freq_hz != PLL_FREQ_END && nf->freq_hz != rate; nf++) ; if (nf->freq_hz == PLL_FREQ_END) return -EINVAL; /* * Ensure PLL is off before changing rate. For optimization reasons, * assume no downstream clock is using actively using it. */ spin_lock_irqsave(&c->lock, flags); if (c->count) c->ops->disable(c); writel_relaxed(nf->l_val, PLL_L_REG(pll)); writel_relaxed(nf->m_val, PLL_M_REG(pll)); writel_relaxed(nf->n_val, PLL_N_REG(pll)); __pll_config_reg(PLL_CONFIG_REG(pll), nf, &pll->masks); if (c->count) c->ops->enable(c); spin_unlock_irqrestore(&c->lock, flags); return 0; }
/* * For optimization reasons, assumes no downstream clocks are actively using * it. */ static int variable_rate_pll_set_rate(struct clk *c, unsigned long rate) { struct pll_clk *pll = to_pll_clk(c); unsigned long flags; u32 l_val; if (rate != variable_rate_pll_round_rate(c, rate)) return -EINVAL; l_val = rate / pll->src_rate; spin_lock_irqsave(&c->lock, flags); if (c->count) c->ops->disable(c); writel_relaxed(l_val, PLL_L_REG(pll)); if (c->count) c->ops->enable(c); spin_unlock_irqrestore(&c->lock, flags); return 0; }
static int sr_pll_clk_enable(struct clk *clk) { uint32_t mode; struct pll_clk *pll = to_pll_clk(clk); mode = readl_relaxed(pll->mode_reg); /* De-assert active-low PLL reset. */ mode |= BIT(2); writel_relaxed(mode, pll->mode_reg); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Delay 10us just to be safe. */ dmb(); udelay(10); /* Disable PLL bypass mode. */ mode |= BIT(1); writel_relaxed(mode, pll->mode_reg); /* Wait until PLL is locked. */ dmb(); udelay(60); /* Enable PLL output. */ mode |= BIT(0); writel_relaxed(mode, pll->mode_reg); return 0; }
static int local_pll_clk_set_rate(struct clk *c, unsigned long rate) { struct pll_freq_tbl *nf; struct pll_clk *pll = to_pll_clk(c); unsigned long flags; for (nf = pll->freq_tbl; nf->freq_hz != PLL_FREQ_END && nf->freq_hz != rate; nf++) ; if (nf->freq_hz == PLL_FREQ_END) return -EINVAL; spin_lock_irqsave(&c->lock, flags); if (c->count) c->ops->disable(c); writel_relaxed(nf->l_val, PLL_L_REG(pll)); writel_relaxed(nf->m_val, PLL_M_REG(pll)); writel_relaxed(nf->n_val, PLL_N_REG(pll)); __pll_config_reg(PLL_CONFIG_REG(pll), nf, &pll->masks); if (c->count) c->ops->enable(c); spin_unlock_irqrestore(&c->lock, flags); return 0; }
static void local_pll_clk_disable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); spin_lock_irqsave(&pll_reg_lock, flags); __pll_clk_disable_reg(PLL_MODE_REG(pll)); spin_unlock_irqrestore(&pll_reg_lock, flags); }
static int local_pll_clk_enable(struct clk *clk) { unsigned long flags; struct pll_clk *pll = to_pll_clk(clk); spin_lock_irqsave(&pll_reg_lock, flags); __pll_clk_enable_reg(PLL_MODE_REG(pll)); spin_unlock_irqrestore(&pll_reg_lock, flags); return 0; }
static enum handoff local_pll_clk_handoff(struct clk *c) { struct pll_clk *pll = to_pll_clk(c); u32 mode = readl_relaxed(PLL_MODE_REG(pll)); u32 mask = PLL_BYPASSNL | PLL_RESET_N | PLL_OUTCTRL; if ((mode & mask) == mask) return HANDOFF_ENABLED_CLK; return HANDOFF_DISABLED_CLK; }
static void local_pll_clk_disable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); /* * Disable the PLL output, disable test mode, enable * the bypass mode, and assert the reset. */ spin_lock_irqsave(&pll_reg_lock, flags); __pll_clk_disable_reg(PLL_MODE_REG(pll)); spin_unlock_irqrestore(&pll_reg_lock, flags); }
static long local_pll_clk_round_rate(struct clk *c, unsigned long rate) { struct pll_freq_tbl *nf; struct pll_clk *pll = to_pll_clk(c); if (!pll->freq_tbl) return -EINVAL; for (nf = pll->freq_tbl; nf->freq_hz != PLL_FREQ_END; nf++) if (nf->freq_hz >= rate) return nf->freq_hz; nf--; return nf->freq_hz; }
static long variable_rate_pll_round_rate(struct clk *c, unsigned long rate) { struct pll_clk *pll = to_pll_clk(c); if (!pll->src_rate) return 0; if (rate < pll->min_rate) rate = pll->min_rate; if (rate > pll->max_rate) rate = pll->max_rate; return min(pll->max_rate, DIV_ROUND_UP(rate, pll->src_rate) * pll->src_rate); }
static int sr2_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); int ret = 0, count; u32 mode = readl_relaxed(PLL_MODE_REG(pll)); u32 lockmask = pll->masks.lock_mask ?: PLL_LOCKED_BIT; spin_lock_irqsave(&pll_reg_lock, flags); spm_event(pll->spm_ctrl.spm_base, pll->spm_ctrl.offset, pll->spm_ctrl.event_bit, false); /* Disable PLL bypass mode. */ mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Delay 10us just to be safe. */ mb(); udelay(10); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Wait for pll to lock. */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & lockmask) break; udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & lockmask)) pr_err("PLL %s didn't lock after enabling it!\n", c->dbg_name); /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure that the write above goes through before returning. */ mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
int msm8974_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); u32 count, mode; int ret = 0; spin_lock_irqsave(&pll_reg_lock, flags); mode = readl_relaxed(PLL_MODE_REG(pll)); /* Disable PLL bypass mode. */ mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Delay 10us just to be safe. */ mb(); udelay(10); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Wait for pll to enable. */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT) break; udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT)) { WARN("PLL %s didn't lock after enabling it!\n", c->dbg_name); ret = -ETIMEDOUT; goto out; } /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure the write above goes through before returning. */ mb(); out: spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
static void __variable_rate_pll_init(struct clk *c) { struct pll_clk *pll = to_pll_clk(c); u32 regval; regval = readl_relaxed(PLL_CONFIG_REG(pll)); if (pll->masks.post_div_mask) { regval &= ~pll->masks.post_div_mask; regval |= pll->vals.post_div_masked; } if (pll->masks.pre_div_mask) { regval &= ~pll->masks.pre_div_mask; regval |= pll->vals.pre_div_masked; } if (pll->masks.main_output_mask) regval |= pll->masks.main_output_mask; if (pll->masks.early_output_mask) regval |= pll->masks.early_output_mask; if (pll->vals.enable_mn) regval |= pll->masks.mn_en_mask; else regval &= ~pll->masks.mn_en_mask; writel_relaxed(regval, PLL_CONFIG_REG(pll)); regval = readl_relaxed(PLL_MODE_REG(pll)); if (pll->masks.apc_pdn_mask) regval &= ~pll->masks.apc_pdn_mask; writel_relaxed(regval, PLL_MODE_REG(pll)); writel_relaxed(pll->vals.alpha_val, PLL_ALPHA_REG(pll)); writel_relaxed(pll->vals.config_ctl_val, PLL_CFG_CTL_REG(pll)); if (pll->init_test_ctl) { writel_relaxed(pll->vals.test_ctl_lo_val, PLL_TEST_CTL_LO_REG(pll)); writel_relaxed(pll->vals.test_ctl_hi_val, PLL_TEST_CTL_HI_REG(pll)); } pll->inited = true; }
static long variable_rate_pll_round_rate(struct clk *c, unsigned long rate) { struct pll_clk *pll = to_pll_clk(c); if (!pll->src_rate) return 0; if (pll->no_prepared_reconfig && c->prepare_count) return -EINVAL; if (rate < pll->min_rate) rate = pll->min_rate; if (rate > pll->max_rate) rate = pll->max_rate; return min(pll->max_rate, DIV_ROUND_UP(rate, pll->src_rate) * pll->src_rate); }
int msm8974_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); u32 count, mode; int ret = 0; spin_lock_irqsave(&pll_reg_lock, flags); mode = readl_relaxed(PLL_MODE_REG(pll)); mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); mb(); udelay(10); mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT) break; udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT)) { WARN("PLL %s didn't lock after enabling it!\n", c->dbg_name); ret = -ETIMEDOUT; goto out; } mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); mb(); out: spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
static void __iomem *variable_rate_pll_list_registers(struct clk *c, int n, struct clk_register_data **regs, u32 *size) { struct pll_clk *pll = to_pll_clk(c); static struct clk_register_data data[] = { {"MODE", 0x0}, {"L", 0x4}, {"ALPHA", 0x8}, {"USER_CTL", 0x10}, {"CONFIG_CTL", 0x14}, {"STATUS", 0x1C}, }; if (n) return ERR_PTR(-EINVAL); *regs = data; *size = ARRAY_SIZE(data); return PLL_MODE_REG(pll); }
/* * For optimization reasons, assumes no downstream clocks are actively using * it. */ static int variable_rate_pll_set_rate(struct clk *c, unsigned long rate) { struct pll_clk *pll = to_pll_clk(c); unsigned long flags; u32 l_val, regval; if (rate != variable_rate_pll_round_rate(c, rate)) return -EINVAL; l_val = rate / pll->src_rate; spin_lock_irqsave(&c->lock, flags); if (c->count) c->ops->disable(c); /* Set any vco data if present */ if (pll->data.vco_val) { regval = readl_relaxed(PLL_CONFIG_REG(pll)); if ((rate >= pll->data.min_freq) && (rate <= pll->data.max_freq)) { regval |= pll->data.vco_val; writel_relaxed(pll->data.config_ctl_val, PLL_CFG_CTL_REG(pll)); } else { regval &= ~pll->data.vco_val; writel_relaxed(pll->vals.config_ctl_val, PLL_CFG_CTL_REG(pll)); } writel_relaxed(regval, PLL_CONFIG_REG(pll)); } writel_relaxed(l_val, PLL_L_REG(pll)); if (c->count) c->ops->enable(c); spin_unlock_irqrestore(&c->lock, flags); return 0; }
static void __iomem *local_pll_clk_list_registers(struct clk *c, int n, struct clk_register_data **regs, u32 *size) { /* Not compatible with 8960 & friends */ struct pll_clk *pll = to_pll_clk(c); static struct clk_register_data data[] = { {"MODE", 0x0}, {"L", 0x4}, {"M", 0x8}, {"N", 0xC}, {"USER", 0x10}, {"CONFIG", 0x14}, {"STATUS", 0x1C}, }; if (n) return ERR_PTR(-EINVAL); *regs = data; *size = ARRAY_SIZE(data); return PLL_MODE_REG(pll); }
static void __hf_pll_init(struct clk *c) { struct pll_clk *pll = to_pll_clk(c); u32 regval; regval = readl_relaxed(PLL_CONFIG_REG(pll)); if (pll->masks.post_div_mask) { regval &= ~pll->masks.post_div_mask; regval |= pll->vals.post_div_masked; } if (pll->masks.pre_div_mask) { regval &= ~pll->masks.pre_div_mask; regval |= pll->vals.pre_div_masked; } if (pll->masks.vco_mask) { regval &= ~pll->masks.vco_mask; regval |= pll->vals.vco_mode_masked; } if (pll->masks.main_output_mask) regval |= pll->masks.main_output_mask; if (pll->masks.early_output_mask) regval |= pll->masks.early_output_mask; if (pll->vals.enable_mn) regval |= pll->masks.mn_en_mask; else regval &= ~pll->masks.mn_en_mask; writel_relaxed(regval, PLL_CONFIG_REG(pll)); writel_relaxed(pll->vals.config_ctl_val, PLL_CFG_CTL_REG(pll)); pll->inited = true; }
int sr_pll_clk_enable(struct clk *c) { u32 mode; unsigned long flags; struct pll_clk *pll = to_pll_clk(c); spin_lock_irqsave(&pll_reg_lock, flags); mode = readl_relaxed(PLL_MODE_REG(pll)); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Delay 10us just to be safe. */ mb(); udelay(10); /* Disable PLL bypass mode. */ mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Wait until PLL is locked. */ mb(); udelay(60); /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure that the write above goes through before returning. */ mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return 0; }
static enum handoff variable_rate_pll_handoff(struct clk *c) { struct pll_clk *pll = to_pll_clk(c); u32 mode = readl_relaxed(PLL_MODE_REG(pll)); u32 mask = PLL_BYPASSNL | PLL_RESET_N | PLL_OUTCTRL; u32 lval; pll->src_rate = clk_get_rate(c->parent); if ((mode & mask) != mask) return HANDOFF_DISABLED_CLK; lval = readl_relaxed(PLL_L_REG(pll)); c->rate = pll->src_rate * lval; if (c->rate > pll->max_rate || c->rate < pll->min_rate) { WARN(1, "%s: Out of spec PLL", c->dbg_name); return HANDOFF_DISABLED_CLK; } return HANDOFF_ENABLED_CLK; }
int sr_hpm_lp_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); u32 count, mode; int ret = 0; spin_lock_irqsave(&pll_reg_lock, flags); /* Disable PLL bypass mode and de-assert reset. */ mode = PLL_BYPASSNL | PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Wait for pll to lock. */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT) break; udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & PLL_LOCKED_BIT)) { WARN("PLL %s didn't lock after enabling it!\n", c->dbg_name); ret = -ETIMEDOUT; goto out; } /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure the write above goes through before returning. */ mb(); out: spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
int sr_pll_clk_enable(struct clk *c) { u32 mode; int count; unsigned long flags; struct pll_clk *pll = to_pll_clk(c); u32 lockmask = pll->masks.lock_mask ?: PLL_LOCKED_BIT; u32 status_reg, user_reg, l_reg, m_reg, n_reg, config_reg; spin_lock_irqsave(&pll_reg_lock, flags); if (unlikely(!to_pll_clk(c)->inited)) /* PLL initilazation is similar to HF PLL */ __hf_pll_init(c); /* Remove SPM HW event */ spm_event(pll->spm_ctrl.spm_base, pll->spm_ctrl.offset, pll->spm_ctrl.event_bit, false); mode = readl_relaxed(PLL_MODE_REG(pll)); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N | PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* A 100us delay required before locking the PLL */ mb(); udelay(100); /* Wait for the PLL to lock */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & lockmask) break; udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & lockmask)) { mode = readl_relaxed(PLL_MODE_REG(pll)); status_reg = readl_relaxed(PLL_STATUS_REG(pll)); user_reg = readl_relaxed(PLL_CONFIG_REG(pll)); config_reg = readl_relaxed(PLL_CFG_CTL_REG(pll)); l_reg = readl_relaxed(PLL_L_REG(pll)); m_reg = readl_relaxed(PLL_M_REG(pll)); n_reg = readl_relaxed(PLL_N_REG(pll)); pr_err("PLL %s didn't lock after enabling for L value 0x%x!\n", c->dbg_name, l_reg); pr_err("mode register is 0x%x\n", mode); pr_err("status register is 0x%x\n", status_reg); pr_err("user control register is 0x%x\n", user_reg); pr_err("config control register is 0x%x\n", config_reg); pr_err("L value register is 0x%x\n", l_reg); pr_err("M value register is 0x%x\n", m_reg); pr_err("N value control register is 0x%x\n", n_reg); if (pll->spm_ctrl.spm_base) pr_err("L2 spm_force_event_en 0x%x\n", readl_relaxed(pll->spm_ctrl.spm_base + SPM_FORCE_EVENT)); panic("PLL %s didn't lock after enabling it!\n", c->dbg_name); } /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure that the write above goes through before returning. */ mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return 0; }
static int hf_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); int ret = 0, count; u32 mode; u32 lockmask = pll->masks.lock_mask ?: PLL_LOCKED_BIT; u32 status_reg, user_reg, l_reg, m_reg, n_reg, config_reg; spin_lock_irqsave(&pll_reg_lock, flags); if (unlikely(!to_pll_clk(c)->inited)) __hf_pll_init(c); /* Remove SPM HW event */ spm_event(pll->spm_ctrl.spm_base, pll->spm_ctrl.offset, pll->spm_ctrl.event_bit, false); mode = readl_relaxed(PLL_MODE_REG(pll)); /* Disable PLL bypass mode. */ mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Use 10us to be sure. */ mb(); udelay(10); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* A 50us delay required before locking the PLL. */ mb(); udelay(50); /* Wait for pll to lock. */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & lockmask) { udelay(1); /* * Check again to be sure. This is to avoid * breaking too early if there is a "transient" * lock. */ if ((readl_relaxed(PLL_STATUS_REG(pll)) & lockmask)) break; } udelay(1); } if (!(readl_relaxed(PLL_STATUS_REG(pll)) & lockmask)) { mode = readl_relaxed(PLL_MODE_REG(pll)); status_reg = readl_relaxed(PLL_STATUS_REG(pll)); user_reg = readl_relaxed(PLL_CONFIG_REG(pll)); config_reg = readl_relaxed(PLL_CFG_CTL_REG(pll)); l_reg = readl_relaxed(PLL_L_REG(pll)); m_reg = readl_relaxed(PLL_M_REG(pll)); n_reg = readl_relaxed(PLL_N_REG(pll)); pr_err("PLL %s didn't lock after enabling for L value 0x%x!\n", c->dbg_name, l_reg); pr_err("mode register is 0x%x\n", mode); pr_err("status register is 0x%x\n", status_reg); pr_err("user control register is 0x%x\n", user_reg); pr_err("config control register is 0x%x\n", config_reg); pr_err("L value register is 0x%x\n", l_reg); pr_err("M value register is 0x%x\n", m_reg); pr_err("N value control register is 0x%x\n", n_reg); if (pll->spm_ctrl.spm_base) pr_err("L2 spm_force_event_en 0x%x\n", readl_relaxed(pll->spm_ctrl.spm_base + SPM_FORCE_EVENT)); panic("PLL %s didn't lock after enabling it!\n", c->dbg_name); } /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure that the write above goes through before returning. */ mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
static struct clk *local_pll_clk_get_parent(struct clk *c) { return to_pll_clk(c)->parent; }
static int variable_rate_pll_clk_enable(struct clk *c) { unsigned long flags; struct pll_clk *pll = to_pll_clk(c); int ret = 0, count; u32 mode, testlo; u32 lockmask = pll->masks.lock_mask ?: PLL_LOCKED_BIT; u32 mode_lock; u64 time; bool early_lock = false; spin_lock_irqsave(&pll_reg_lock, flags); if (unlikely(!to_pll_clk(c)->inited)) __variable_rate_pll_init(c); mode = readl_relaxed(PLL_MODE_REG(pll)); /* Set test control bits as required by HW doc */ if (pll->test_ctl_lo_reg && pll->vals.test_ctl_lo_val && pll->pgm_test_ctl_enable) writel_relaxed(pll->vals.test_ctl_lo_val, PLL_TEST_CTL_LO_REG(pll)); /* Enable test_ctl debug */ mode |= BIT(3); writel_relaxed(mode, PLL_MODE_REG(pll)); testlo = readl_relaxed(PLL_TEST_CTL_LO_REG(pll)); testlo &= ~BM(7, 6); testlo |= 0xC0; writel_relaxed(testlo, PLL_TEST_CTL_LO_REG(pll)); /* Wait for the write to complete */ mb(); /* Disable PLL bypass mode. */ mode |= PLL_BYPASSNL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * H/W requires a 5us delay between disabling the bypass and * de-asserting the reset. Use 10us to be sure. */ mb(); udelay(10); /* De-assert active-low PLL reset. */ mode |= PLL_RESET_N; writel_relaxed(mode, PLL_MODE_REG(pll)); /* * 5us delay mandated by HPG. However, put in a 200us delay here. * This is to address possible locking issues with the PLL exhibit * early "transient" locks about 16us from this point. With this * higher delay, we avoid running into those transients. */ mb(); udelay(200); /* Clear test control bits */ if (pll->test_ctl_lo_reg && pll->vals.test_ctl_lo_val && pll->pgm_test_ctl_enable) writel_relaxed(0x0, PLL_TEST_CTL_LO_REG(pll)); time = sched_clock(); /* Wait for pll to lock. */ for (count = ENABLE_WAIT_MAX_LOOPS; count > 0; count--) { if (readl_relaxed(PLL_STATUS_REG(pll)) & lockmask) { udelay(1); /* * Check again to be sure. This is to avoid * breaking too early if there is a "transient" * lock. */ if ((readl_relaxed(PLL_STATUS_REG(pll)) & lockmask)) break; else early_lock = true; } udelay(1); } time = sched_clock() - time; mode_lock = readl_relaxed(PLL_STATUS_REG(pll)); if (!(mode_lock & lockmask)) { pr_err("PLL lock bit detection total wait time: %lld ns", time); pr_err("PLL %s didn't lock after enabling for L value 0x%x!\n", c->dbg_name, readl_relaxed(PLL_L_REG(pll))); pr_err("mode register is 0x%x\n", readl_relaxed(PLL_STATUS_REG(pll))); pr_err("user control register is 0x%x\n", readl_relaxed(PLL_CONFIG_REG(pll))); pr_err("config control register is 0x%x\n", readl_relaxed(PLL_CFG_CTL_REG(pll))); pr_err("test control high register is 0x%x\n", readl_relaxed(PLL_TEST_CTL_HI_REG(pll))); pr_err("test control low register is 0x%x\n", readl_relaxed(PLL_TEST_CTL_LO_REG(pll))); pr_err("early lock? %s\n", early_lock ? "yes" : "no"); testlo = readl_relaxed(PLL_TEST_CTL_LO_REG(pll)); testlo &= ~BM(7, 6); writel_relaxed(testlo, PLL_TEST_CTL_LO_REG(pll)); /* Wait for the write to complete */ mb(); pr_err("test_ctl_lo = 0x%x, pll status is: 0x%x\n", readl_relaxed(PLL_TEST_CTL_LO_REG(pll)), readl_relaxed(PLL_ALT_STATUS_REG(pll))); testlo = readl_relaxed(PLL_TEST_CTL_LO_REG(pll)); testlo &= ~BM(7, 6); testlo |= 0x40; writel_relaxed(testlo, PLL_TEST_CTL_LO_REG(pll)); /* Wait for the write to complete */ mb(); pr_err("test_ctl_lo = 0x%x, pll status is: 0x%x\n", readl_relaxed(PLL_TEST_CTL_LO_REG(pll)), readl_relaxed(PLL_ALT_STATUS_REG(pll))); testlo = readl_relaxed(PLL_TEST_CTL_LO_REG(pll)); testlo &= ~BM(7, 6); testlo |= 0x80; writel_relaxed(testlo, PLL_TEST_CTL_LO_REG(pll)); /* Wait for the write to complete */ mb(); pr_err("test_ctl_lo = 0x%x, pll status is: 0x%x\n", readl_relaxed(PLL_TEST_CTL_LO_REG(pll)), readl_relaxed(PLL_ALT_STATUS_REG(pll))); testlo = readl_relaxed(PLL_TEST_CTL_LO_REG(pll)); testlo &= ~BM(7, 6); testlo |= 0xC0; writel_relaxed(testlo, PLL_TEST_CTL_LO_REG(pll)); /* Wait for the write to complete */ mb(); pr_err("test_ctl_lo = 0x%x, pll status is: 0x%x\n", readl_relaxed(PLL_TEST_CTL_LO_REG(pll)), readl_relaxed(PLL_ALT_STATUS_REG(pll))); panic("failed to lock %s PLL\n", c->dbg_name); } /* Enable PLL output. */ mode |= PLL_OUTCTRL; writel_relaxed(mode, PLL_MODE_REG(pll)); /* Ensure that the write above goes through before returning. */ mb(); spin_unlock_irqrestore(&pll_reg_lock, flags); return ret; }
static struct clk *local_pll_clk_get_parent(struct clk *clk) { struct pll_clk *pll = to_pll_clk(clk); return pll->parent; }