static long alpha_pll_round_rate(struct clk *c, unsigned long rate) { struct alpha_pll_clk *pll = to_alpha_pll_clk(c); struct alpha_pll_vco_tbl *v = pll->vco_tbl; int ret; u32 l_val; unsigned long freq_hz; u64 a_val; int i; freq_hz = round_rate_up(pll, rate, &l_val, &a_val); ret = find_vco(pll, freq_hz); if (!IS_ERR_VALUE(ret)) return freq_hz; freq_hz = 0; for (i = 0; i < pll->num_vco; i++) { if (is_better_rate(rate, freq_hz, v[i].min_freq)) freq_hz = v[i].min_freq; if (is_better_rate(rate, freq_hz, v[i].max_freq)) freq_hz = v[i].max_freq; } if (!freq_hz) return -EINVAL; return freq_hz; }
static long __div_round_rate(struct div_data *data, unsigned long rate, struct clk *parent, unsigned int *best_div, unsigned long *best_prate) { unsigned int div, min_div, max_div, _best_div = 1; unsigned long prate, _best_prate = 0, rrate = 0, req_prate, actual_rate; unsigned int numer; rate = max(rate, 1UL); min_div = max(data->min_div, 1U); max_div = min(data->max_div, (unsigned int) (ULONG_MAX / rate)); /* * div values are doubled for half dividers. * Adjust for that by picking a numer of 2. */ numer = data->is_half_divider ? 2 : 1; for (div = min_div; div <= max_div; div++) { if (data->skip_odd_div && (div & 1)) continue; req_prate = mult_frac(rate, div, numer); prate = clk_round_rate(parent, req_prate); if (IS_ERR_VALUE(prate)) break; actual_rate = mult_frac(prate, numer, div); if (is_better_rate(rate, rrate, actual_rate)) { rrate = actual_rate; _best_div = div; _best_prate = prate; } /* * Trying higher dividers is only going to ask the parent for * a higher rate. If it can't even output a rate higher than * the one we request for this divider, the parent is not * going to be able to output an even higher rate required * for a higher divider. So, stop trying higher dividers. */ if (actual_rate < rate) break; if (rrate <= rate + data->rate_margin) break; } if (!rrate) return -EINVAL; if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; return rrate; }
static long __div_round_rate(struct clk *c, unsigned long rate, int *best_div) { struct div_clk *d = to_div_clk(c); unsigned int div, min_div, max_div, rrate_div = 1; unsigned long p_rrate, rrate = 0; rate = max(rate, 1UL); if (!d->ops || !d->ops->set_div) min_div = max_div = d->div; else { min_div = max(d->min_div, 1U); max_div = min(d->max_div, (unsigned int) (ULONG_MAX / rate)); } for (div = min_div; div <= max_div; div++) { p_rrate = clk_round_rate(c->parent, rate * div); if (IS_ERR_VALUE(p_rrate)) break; p_rrate /= div; if (is_better_rate(rate, rrate, p_rrate)) { rrate = p_rrate; rrate_div = div; } /* * Trying higher dividers is only going to ask the parent for * a higher rate. If it can't even output a rate higher than * the one we request for this divider, the parent is not * going to be able to output an even higher rate required * for a higher divider. So, stop trying higher dividers. */ if (p_rrate < rate) break; if (rrate <= rate + d->rate_margin) break; } if (!rrate) return -EINVAL; if (best_div) *best_div = rrate_div; return rrate; }
static long mux_round_rate(struct clk *c, unsigned long rate) { struct mux_clk *mux = to_mux_clk(c); int i; unsigned long prate, rrate = 0; for (i = 0; i < mux->num_parents; i++) { prate = clk_round_rate(mux->parents[i].src, rate); if (is_better_rate(rate, rrate, prate)) rrate = prate; } if (!rrate) return -EINVAL; return rrate; }
static long __div_round_rate(struct div_data *data, unsigned long rate, struct clk *parent, unsigned int *best_div, unsigned long *best_prate) { unsigned int div, min_div, max_div, _best_div = 1; unsigned long prate, _best_prate = 0, rrate = 0, req_prate, actual_rate; unsigned int numer; rate = max(rate, 1UL); min_div = max(data->min_div, 1U); max_div = min(data->max_div, (unsigned int) (ULONG_MAX / rate)); numer = data->is_half_divider ? 2 : 1; for (div = min_div; div <= max_div; div++) { if (data->skip_odd_div && (div & 1)) continue; req_prate = mult_frac(rate, div, numer); prate = clk_round_rate(parent, req_prate); if (IS_ERR_VALUE(prate)) break; actual_rate = mult_frac(prate, numer, div); if (is_better_rate(rate, rrate, actual_rate)) { rrate = actual_rate; _best_div = div; _best_prate = prate; } if (actual_rate < rate) break; if (rrate <= rate + data->rate_margin) break; } if (!rrate) return -EINVAL; if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; return rrate; }
static long __div_round_rate(struct div_data *data, unsigned long rate, struct clk *parent, unsigned int *best_div, unsigned long *best_prate) { unsigned int div, min_div, max_div, _best_div = 1; unsigned long prate, _best_prate = 0, rrate = 0; rate = max(rate, 1UL); min_div = max(data->min_div, 1U); max_div = min(data->max_div, (unsigned int) (ULONG_MAX / rate)); for (div = min_div; div <= max_div; div++) { prate = clk_round_rate(parent, rate * div); if (IS_ERR_VALUE(prate)) break; if (is_better_rate(rate, rrate, prate / div)) { rrate = prate / div; _best_div = div; _best_prate = prate; } /* * Trying higher dividers is only going to ask the parent for * a higher rate. If it can't even output a rate higher than * the one we request for this divider, the parent is not * going to be able to output an even higher rate required * for a higher divider. So, stop trying higher dividers. */ if (prate / div < rate) break; if (rrate <= rate + data->rate_margin) break; } if (!rrate) return -EINVAL; if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; return rrate; }
static long __mux_div_round_rate(struct clk_hw *hw, unsigned long rate, struct clk **best_parent, int *best_div, unsigned long *best_prate) { struct mux_div_clk *md = to_mux_div_clk(hw); unsigned int i; unsigned long rrate, best = 0, _best_div = 0, _best_prate = 0; struct clk *_best_parent = 0; int num_parents = __clk_get_num_parents(hw->clk); bool set_parent = __clk_get_flags(hw->clk) & CLK_SET_RATE_PARENT; for (i = 0; i < num_parents; i++) { int div; unsigned long prate; struct clk *p = clk_get_parent_by_index(hw->clk, i); rrate = __div_round_rate(&md->data, rate, p, &div, &prate, set_parent); if (is_better_rate(rate, best, rrate)) { best = rrate; _best_div = div; _best_prate = prate; _best_parent = p; } if (rate <= rrate) break; } if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; if (best_parent) *best_parent = _best_parent; if (best) return best; return -EINVAL; }
static long __div_round_rate(struct div_data *data, unsigned long rate, struct clk *parent, unsigned int *best_div, unsigned long *best_prate) { unsigned int div, min_div, max_div, _best_div = 1; unsigned long prate, _best_prate = 0, rrate = 0; rate = max(rate, 1UL); min_div = max(data->min_div, 1U); max_div = min(data->max_div, (unsigned int) (ULONG_MAX / rate)); for (div = min_div; div <= max_div; div++) { prate = clk_round_rate(parent, rate * div); if (IS_ERR_VALUE(prate)) break; if (is_better_rate(rate, rrate, prate / div)) { rrate = prate / div; _best_div = div; _best_prate = prate; } if (prate / div < rate) break; if (rrate <= rate + data->rate_margin) break; } if (!rrate) return -EINVAL; if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; return rrate; }
static long __mux_div_round_rate(struct clk *c, unsigned long rate, struct clk **best_parent, int *best_div, unsigned long *best_prate) { struct mux_div_clk *md = to_mux_div_clk(c); unsigned int i; unsigned long rrate, best = 0, _best_div = 0, _best_prate = 0; struct clk *_best_parent = 0; for (i = 0; i < md->num_parents; i++) { int div; unsigned long prate; rrate = __div_round_rate(&md->data, rate, md->parents[i].src, &div, &prate); if (is_better_rate(rate, best, rrate)) { best = rrate; _best_div = div; _best_prate = prate; _best_parent = md->parents[i].src; } if (rate <= rrate && rrate <= rate + md->data.rate_margin) break; } if (best_div) *best_div = _best_div; if (best_prate) *best_prate = _best_prate; if (best_parent) *best_parent = _best_parent; if (best) return best; return -EINVAL; }