1 // SPDX-License-Identifier: GPL-2.0
3 * MediaTek common clock driver
5 * Copyright (C) 2018 MediaTek Inc.
6 * Author: Ryder Lee <ryder.lee@mediatek.com>
10 #include <clk-uclass.h>
14 #include <linux/delay.h>
21 #define CON0_BASE_EN BIT(0)
22 #define CON0_PWR_ON BIT(0)
23 #define CON0_ISO_EN BIT(1)
24 #define CON1_PCW_CHG BIT(31)
26 #define POSTDIV_MASK 0x7
27 #define INTEGER_BITS 7
29 /* scpsys clock off control */
30 #define CLK_SCP_CFG0 0x200
31 #define CLK_SCP_CFG1 0x204
32 #define SCP_ARMCK_OFF_EN GENMASK(9, 0)
33 #define SCP_AXICK_DCM_DIS_EN BIT(0)
34 #define SCP_AXICK_26M_SEL_EN BIT(4)
36 /* shared functions */
39 * In case the rate change propagation to parent clocks is undesirable,
40 * this function is recursively called to find the parent to calculate
41 * the accurate frequency.
43 static ulong mtk_clk_find_parent_rate(struct clk *clk, int id,
44 const struct driver *drv)
46 struct clk parent = { .id = id, };
51 if (uclass_get_device_by_driver(UCLASS_CLK, drv, &dev))
56 parent.dev = clk->dev;
59 return clk_get_rate(&parent);
62 static int mtk_clk_mux_set_parent(void __iomem *base, u32 parent,
63 const struct mtk_composite *mux)
67 while (mux->parent[index] != parent)
68 if (++index == mux->num_parents)
71 if (mux->flags & CLK_MUX_SETCLR_UPD) {
72 val = (mux->mux_mask << mux->mux_shift);
73 writel(val, base + mux->mux_clr_reg);
75 val = (index << mux->mux_shift);
76 writel(val, base + mux->mux_set_reg);
78 if (mux->upd_shift >= 0)
79 writel(BIT(mux->upd_shift), base + mux->upd_reg);
81 /* switch mux to a select parent */
82 val = readl(base + mux->mux_reg);
83 val &= ~(mux->mux_mask << mux->mux_shift);
85 val |= index << mux->mux_shift;
86 writel(val, base + mux->mux_reg);
92 /* apmixedsys functions */
94 static unsigned long __mtk_pll_recalc_rate(const struct mtk_pll_data *pll,
95 u32 fin, u32 pcw, int postdiv)
97 int pcwbits = pll->pcwbits;
103 /* The fractional part of the PLL divider. */
104 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
105 pcwfbits = pcwbits > ibits ? pcwbits - ibits : 0;
107 vco = (u64)fin * pcw;
109 if (pcwfbits && (vco & GENMASK(pcwfbits - 1, 0)))
117 return ((unsigned long)vco + postdiv - 1) / postdiv;
121 * MediaTek PLLs are configured through their pcw value. The pcw value
122 * describes a divider in the PLL feedback loop which consists of 7 bits
123 * for the integer part and the remaining bits (if present) for the
124 * fractional part. Also they have a 3 bit power-of-two post divider.
126 static void mtk_pll_set_rate_regs(struct clk *clk, u32 pcw, int postdiv)
128 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
129 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
133 val = readl(priv->base + pll->pd_reg);
134 val &= ~(POSTDIV_MASK << pll->pd_shift);
135 val |= (ffs(postdiv) - 1) << pll->pd_shift;
137 /* postdiv and pcw need to set at the same time if on same register */
138 if (pll->pd_reg != pll->pcw_reg) {
139 writel(val, priv->base + pll->pd_reg);
140 val = readl(priv->base + pll->pcw_reg);
144 val &= ~GENMASK(pll->pcw_shift + pll->pcwbits - 1, pll->pcw_shift);
145 val |= pcw << pll->pcw_shift;
147 if (pll->pcw_chg_reg) {
148 chg = readl(priv->base + pll->pcw_chg_reg);
150 writel(val, priv->base + pll->pcw_reg);
151 writel(chg, priv->base + pll->pcw_chg_reg);
154 writel(val, priv->base + pll->pcw_reg);
161 * mtk_pll_calc_values - calculate good values for a given input frequency.
163 * @pcw: The pcw value (output)
164 * @postdiv: The post divider (output)
165 * @freq: The desired target frequency
167 static void mtk_pll_calc_values(struct clk *clk, u32 *pcw, u32 *postdiv,
170 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
171 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
172 unsigned long fmin = pll->fmin ? pll->fmin : 1000 * MHZ;
177 if (freq > pll->fmax)
180 for (val = 0; val < 5; val++) {
182 if ((u64)freq * *postdiv >= fmin)
186 /* _pcw = freq * postdiv / xtal_rate * 2^pcwfbits */
187 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
188 _pcw = ((u64)freq << val) << (pll->pcwbits - ibits);
189 do_div(_pcw, priv->tree->xtal2_rate);
194 static ulong mtk_apmixedsys_set_rate(struct clk *clk, ulong rate)
199 mtk_pll_calc_values(clk, &pcw, &postdiv, rate);
200 mtk_pll_set_rate_regs(clk, pcw, postdiv);
205 static ulong mtk_apmixedsys_get_rate(struct clk *clk)
207 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
208 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
212 postdiv = (readl(priv->base + pll->pd_reg) >> pll->pd_shift) &
214 postdiv = 1 << postdiv;
216 pcw = readl(priv->base + pll->pcw_reg) >> pll->pcw_shift;
217 pcw &= GENMASK(pll->pcwbits - 1, 0);
219 return __mtk_pll_recalc_rate(pll, priv->tree->xtal2_rate,
223 static int mtk_apmixedsys_enable(struct clk *clk)
225 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
226 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
229 r = readl(priv->base + pll->pwr_reg) | CON0_PWR_ON;
230 writel(r, priv->base + pll->pwr_reg);
233 r = readl(priv->base + pll->pwr_reg) & ~CON0_ISO_EN;
234 writel(r, priv->base + pll->pwr_reg);
237 r = readl(priv->base + pll->reg + REG_CON0);
239 writel(r, priv->base + pll->reg + REG_CON0);
243 if (pll->flags & HAVE_RST_BAR) {
244 r = readl(priv->base + pll->reg + REG_CON0);
245 r |= pll->rst_bar_mask;
246 writel(r, priv->base + pll->reg + REG_CON0);
252 static int mtk_apmixedsys_disable(struct clk *clk)
254 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
255 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
258 if (pll->flags & HAVE_RST_BAR) {
259 r = readl(priv->base + pll->reg + REG_CON0);
260 r &= ~pll->rst_bar_mask;
261 writel(r, priv->base + pll->reg + REG_CON0);
264 r = readl(priv->base + pll->reg + REG_CON0);
266 writel(r, priv->base + pll->reg + REG_CON0);
268 r = readl(priv->base + pll->pwr_reg) | CON0_ISO_EN;
269 writel(r, priv->base + pll->pwr_reg);
271 r = readl(priv->base + pll->pwr_reg) & ~CON0_PWR_ON;
272 writel(r, priv->base + pll->pwr_reg);
277 /* topckgen functions */
279 static ulong mtk_factor_recalc_rate(const struct mtk_fixed_factor *fdiv,
282 u64 rate = parent_rate * fdiv->mult;
284 do_div(rate, fdiv->div);
289 static ulong mtk_topckgen_get_factor_rate(struct clk *clk, u32 off)
291 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
292 const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
295 switch (fdiv->flags & CLK_PARENT_MASK) {
296 case CLK_PARENT_APMIXED:
297 rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
298 DM_GET_DRIVER(mtk_clk_apmixedsys));
300 case CLK_PARENT_TOPCKGEN:
301 rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
305 rate = priv->tree->xtal_rate;
308 return mtk_factor_recalc_rate(fdiv, rate);
311 static ulong mtk_topckgen_get_mux_rate(struct clk *clk, u32 off)
313 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
314 const struct mtk_composite *mux = &priv->tree->muxes[off];
317 index = readl(priv->base + mux->mux_reg);
318 index &= mux->mux_mask << mux->mux_shift;
319 index = index >> mux->mux_shift;
321 if (mux->parent[index])
322 return mtk_clk_find_parent_rate(clk, mux->parent[index],
325 return priv->tree->xtal_rate;
328 static ulong mtk_topckgen_get_rate(struct clk *clk)
330 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
332 if (clk->id < priv->tree->fdivs_offs)
333 return priv->tree->fclks[clk->id].rate;
334 else if (clk->id < priv->tree->muxes_offs)
335 return mtk_topckgen_get_factor_rate(clk, clk->id -
336 priv->tree->fdivs_offs);
338 return mtk_topckgen_get_mux_rate(clk, clk->id -
339 priv->tree->muxes_offs);
342 static int mtk_topckgen_enable(struct clk *clk)
344 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
345 const struct mtk_composite *mux;
348 if (clk->id < priv->tree->muxes_offs)
351 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
352 if (mux->gate_shift < 0)
355 /* enable clock gate */
356 if (mux->flags & CLK_MUX_SETCLR_UPD) {
357 val = BIT(mux->gate_shift);
358 writel(val, priv->base + mux->mux_clr_reg);
360 val = readl(priv->base + mux->gate_reg);
361 val &= ~BIT(mux->gate_shift);
362 writel(val, priv->base + mux->gate_reg);
365 if (mux->flags & CLK_DOMAIN_SCPSYS) {
366 /* enable scpsys clock off control */
367 writel(SCP_ARMCK_OFF_EN, priv->base + CLK_SCP_CFG0);
368 writel(SCP_AXICK_DCM_DIS_EN | SCP_AXICK_26M_SEL_EN,
369 priv->base + CLK_SCP_CFG1);
375 static int mtk_topckgen_disable(struct clk *clk)
377 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
378 const struct mtk_composite *mux;
381 if (clk->id < priv->tree->muxes_offs)
384 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
385 if (mux->gate_shift < 0)
388 /* disable clock gate */
389 if (mux->flags & CLK_MUX_SETCLR_UPD) {
390 val = BIT(mux->gate_shift);
391 writel(val, priv->base + mux->mux_set_reg);
393 val = readl(priv->base + mux->gate_reg);
394 val |= BIT(mux->gate_shift);
395 writel(val, priv->base + mux->gate_reg);
401 static int mtk_topckgen_set_parent(struct clk *clk, struct clk *parent)
403 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
405 if (clk->id < priv->tree->muxes_offs)
408 return mtk_clk_mux_set_parent(priv->base, parent->id,
409 &priv->tree->muxes[clk->id - priv->tree->muxes_offs]);
414 static int mtk_clk_gate_enable(struct clk *clk)
416 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
417 const struct mtk_gate *gate = &priv->gates[clk->id];
418 u32 bit = BIT(gate->shift);
420 switch (gate->flags & CLK_GATE_MASK) {
421 case CLK_GATE_SETCLR:
422 writel(bit, priv->base + gate->regs->clr_ofs);
424 case CLK_GATE_SETCLR_INV:
425 writel(bit, priv->base + gate->regs->set_ofs);
427 case CLK_GATE_NO_SETCLR:
428 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
430 case CLK_GATE_NO_SETCLR_INV:
431 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
441 static int mtk_clk_gate_disable(struct clk *clk)
443 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
444 const struct mtk_gate *gate = &priv->gates[clk->id];
445 u32 bit = BIT(gate->shift);
447 switch (gate->flags & CLK_GATE_MASK) {
448 case CLK_GATE_SETCLR:
449 writel(bit, priv->base + gate->regs->set_ofs);
451 case CLK_GATE_SETCLR_INV:
452 writel(bit, priv->base + gate->regs->clr_ofs);
454 case CLK_GATE_NO_SETCLR:
455 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
457 case CLK_GATE_NO_SETCLR_INV:
458 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
468 static ulong mtk_clk_gate_get_rate(struct clk *clk)
470 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
471 const struct mtk_gate *gate = &priv->gates[clk->id];
473 switch (gate->flags & CLK_PARENT_MASK) {
474 case CLK_PARENT_APMIXED:
475 return mtk_clk_find_parent_rate(clk, gate->parent,
476 DM_GET_DRIVER(mtk_clk_apmixedsys));
478 case CLK_PARENT_TOPCKGEN:
479 return mtk_clk_find_parent_rate(clk, gate->parent,
480 DM_GET_DRIVER(mtk_clk_topckgen));
484 return priv->tree->xtal_rate;
488 const struct clk_ops mtk_clk_apmixedsys_ops = {
489 .enable = mtk_apmixedsys_enable,
490 .disable = mtk_apmixedsys_disable,
491 .set_rate = mtk_apmixedsys_set_rate,
492 .get_rate = mtk_apmixedsys_get_rate,
495 const struct clk_ops mtk_clk_topckgen_ops = {
496 .enable = mtk_topckgen_enable,
497 .disable = mtk_topckgen_disable,
498 .get_rate = mtk_topckgen_get_rate,
499 .set_parent = mtk_topckgen_set_parent,
502 const struct clk_ops mtk_clk_gate_ops = {
503 .enable = mtk_clk_gate_enable,
504 .disable = mtk_clk_gate_disable,
505 .get_rate = mtk_clk_gate_get_rate,
508 int mtk_common_clk_init(struct udevice *dev,
509 const struct mtk_clk_tree *tree)
511 struct mtk_clk_priv *priv = dev_get_priv(dev);
513 priv->base = dev_read_addr_ptr(dev);
522 int mtk_common_clk_gate_init(struct udevice *dev,
523 const struct mtk_clk_tree *tree,
524 const struct mtk_gate *gates)
526 struct mtk_cg_priv *priv = dev_get_priv(dev);
528 priv->base = dev_read_addr_ptr(dev);