2 * (C) Copyright 2015 Google, Inc
3 * (C) Copyright 2016 Heiko Stuebner <heiko@sntech.de>
5 * SPDX-License-Identifier: GPL-2.0
9 #include <clk-uclass.h>
11 #include <dt-structs.h>
16 #include <asm/arch/clock.h>
17 #include <asm/arch/cru_rk3188.h>
18 #include <asm/arch/grf_rk3188.h>
19 #include <asm/arch/hardware.h>
20 #include <dt-bindings/clock/rk3188-cru.h>
21 #include <dm/device-internal.h>
23 #include <dm/uclass-internal.h>
24 #include <linux/log2.h>
26 DECLARE_GLOBAL_DATA_PTR;
28 enum rk3188_clk_type {
33 struct rk3188_clk_plat {
34 #if CONFIG_IS_ENABLED(OF_PLATDATA)
35 struct dtd_rockchip_rk3188_cru dtd;
46 VCO_MAX_HZ = 2200U * 1000000,
47 VCO_MIN_HZ = 440 * 1000000,
48 OUTPUT_MAX_HZ = 2200U * 1000000,
49 OUTPUT_MIN_HZ = 30 * 1000000,
50 FREF_MAX_HZ = 2200U * 1000000,
51 FREF_MIN_HZ = 30 * 1000,
62 PLL_BWADJ_MASK = 0x0fff,
68 SOCSTS_DPLL_LOCK = 1 << 5,
69 SOCSTS_APLL_LOCK = 1 << 6,
70 SOCSTS_CPLL_LOCK = 1 << 7,
71 SOCSTS_GPLL_LOCK = 1 << 8,
74 #define RATE_TO_DIV(input_rate, output_rate) \
75 ((input_rate) / (output_rate) - 1);
77 #define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
79 #define PLL_DIVISORS(hz, _nr, _no) {\
80 .nr = _nr, .nf = (u32)((u64)hz * _nr * _no / OSC_HZ), .no = _no};\
81 _Static_assert(((u64)hz * _nr * _no / OSC_HZ) * OSC_HZ /\
82 (_nr * _no) == hz, #hz "Hz cannot be hit with PLL "\
83 "divisors on line " __stringify(__LINE__));
85 /* Keep divisors as low as possible to reduce jitter and power usage */
86 #ifdef CONFIG_SPL_BUILD
87 static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2);
88 static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2);
91 static int rkclk_set_pll(struct rk3188_cru *cru, enum rk_clk_id clk_id,
92 const struct pll_div *div, bool has_bwadj)
94 int pll_id = rk_pll_id(clk_id);
95 struct rk3188_pll *pll = &cru->pll[pll_id];
96 /* All PLLs have same VCO and output frequency range restrictions. */
97 uint vco_hz = OSC_HZ / 1000 * div->nf / div->nr * 1000;
98 uint output_hz = vco_hz / div->no;
100 debug("PLL at %x: nf=%d, nr=%d, no=%d, vco=%u Hz, output=%u Hz\n",
101 (uint)pll, div->nf, div->nr, div->no, vco_hz, output_hz);
102 assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ &&
103 output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ &&
104 (div->no == 1 || !(div->no % 2)));
107 rk_setreg(&pll->con3, 1 << PLL_RESET_SHIFT);
109 rk_clrsetreg(&pll->con0,
110 CLKR_MASK << CLKR_SHIFT | PLL_OD_MASK,
111 ((div->nr - 1) << CLKR_SHIFT) | (div->no - 1));
112 rk_clrsetreg(&pll->con1, CLKF_MASK, div->nf - 1);
115 rk_clrsetreg(&pll->con2, PLL_BWADJ_MASK, (div->nf >> 1) - 1);
119 /* return from reset */
120 rk_clrreg(&pll->con3, 1 << PLL_RESET_SHIFT);
125 static int rkclk_configure_ddr(struct rk3188_cru *cru, struct rk3188_grf *grf,
126 unsigned int hz, bool has_bwadj)
128 static const struct pll_div dpll_cfg[] = {
129 {.nf = 25, .nr = 2, .no = 1},
130 {.nf = 400, .nr = 9, .no = 2},
131 {.nf = 500, .nr = 9, .no = 2},
132 {.nf = 100, .nr = 3, .no = 1},
140 case 533000000: /* actually 533.3P MHz */
143 case 666000000: /* actually 666.6P MHz */
150 debug("Unsupported SDRAM frequency");
154 /* pll enter slow-mode */
155 rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK << DPLL_MODE_SHIFT,
156 DPLL_MODE_SLOW << DPLL_MODE_SHIFT);
158 rkclk_set_pll(cru, CLK_DDR, &dpll_cfg[cfg], has_bwadj);
160 /* wait for pll lock */
161 while (!(readl(&grf->soc_status0) & SOCSTS_DPLL_LOCK))
164 /* PLL enter normal-mode */
165 rk_clrsetreg(&cru->cru_mode_con, DPLL_MODE_MASK << DPLL_MODE_SHIFT,
166 DPLL_MODE_NORMAL << DPLL_MODE_SHIFT);
171 /* Get pll rate by id */
172 static uint32_t rkclk_pll_get_rate(struct rk3188_cru *cru,
173 enum rk_clk_id clk_id)
177 int pll_id = rk_pll_id(clk_id);
178 struct rk3188_pll *pll = &cru->pll[pll_id];
179 static u8 clk_shift[CLK_COUNT] = {
180 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, CPLL_MODE_SHIFT,
185 con = readl(&cru->cru_mode_con);
186 shift = clk_shift[clk_id];
187 switch ((con >> shift) & APLL_MODE_MASK) {
190 case APLL_MODE_NORMAL:
192 con = readl(&pll->con0);
193 no = ((con >> CLKOD_SHIFT) & CLKOD_MASK) + 1;
194 nr = ((con >> CLKR_SHIFT) & CLKR_MASK) + 1;
195 con = readl(&pll->con1);
196 nf = ((con >> CLKF_SHIFT) & CLKF_MASK) + 1;
198 return (24 * nf / (nr * no)) * 1000000;
205 static ulong rockchip_mmc_get_clk(struct rk3188_cru *cru, uint gclk_rate,
213 con = readl(&cru->cru_clksel_con[12]);
214 div = (con >> EMMC_DIV_SHIFT) & EMMC_DIV_MASK;
217 con = readl(&cru->cru_clksel_con[11]);
218 div = (con >> MMC0_DIV_SHIFT) & MMC0_DIV_MASK;
221 con = readl(&cru->cru_clksel_con[12]);
222 div = (con >> SDIO_DIV_SHIFT) & SDIO_DIV_MASK;
228 return DIV_TO_RATE(gclk_rate, div);
231 static ulong rockchip_mmc_set_clk(struct rk3188_cru *cru, uint gclk_rate,
232 int periph, uint freq)
236 debug("%s: gclk_rate=%u\n", __func__, gclk_rate);
237 src_clk_div = RATE_TO_DIV(gclk_rate, freq);
238 assert(src_clk_div <= 0x3f);
242 rk_clrsetreg(&cru->cru_clksel_con[12],
243 EMMC_DIV_MASK << EMMC_DIV_SHIFT,
244 src_clk_div << EMMC_DIV_SHIFT);
247 rk_clrsetreg(&cru->cru_clksel_con[11],
248 MMC0_DIV_MASK << MMC0_DIV_SHIFT,
249 src_clk_div << MMC0_DIV_SHIFT);
252 rk_clrsetreg(&cru->cru_clksel_con[12],
253 SDIO_DIV_MASK << SDIO_DIV_SHIFT,
254 src_clk_div << SDIO_DIV_SHIFT);
260 return rockchip_mmc_get_clk(cru, gclk_rate, periph);
263 static ulong rockchip_spi_get_clk(struct rk3188_cru *cru, uint gclk_rate,
271 con = readl(&cru->cru_clksel_con[25]);
272 div = (con >> SPI0_DIV_SHIFT) & SPI0_DIV_MASK;
275 con = readl(&cru->cru_clksel_con[25]);
276 div = (con >> SPI1_DIV_SHIFT) & SPI1_DIV_MASK;
282 return DIV_TO_RATE(gclk_rate, div);
285 static ulong rockchip_spi_set_clk(struct rk3188_cru *cru, uint gclk_rate,
286 int periph, uint freq)
288 int src_clk_div = RATE_TO_DIV(gclk_rate, freq);
292 assert(src_clk_div <= SPI0_DIV_MASK);
293 rk_clrsetreg(&cru->cru_clksel_con[25],
294 SPI0_DIV_MASK << SPI0_DIV_SHIFT,
295 src_clk_div << SPI0_DIV_SHIFT);
298 assert(src_clk_div <= SPI1_DIV_MASK);
299 rk_clrsetreg(&cru->cru_clksel_con[25],
300 SPI1_DIV_MASK << SPI1_DIV_SHIFT,
301 src_clk_div << SPI1_DIV_SHIFT);
307 return rockchip_spi_get_clk(cru, gclk_rate, periph);
310 #ifdef CONFIG_SPL_BUILD
311 static void rkclk_init(struct rk3188_cru *cru, struct rk3188_grf *grf,
314 u32 aclk_div, hclk_div, pclk_div, h2p_div;
316 /* pll enter slow-mode */
317 rk_clrsetreg(&cru->cru_mode_con,
318 GPLL_MODE_MASK << GPLL_MODE_SHIFT |
319 CPLL_MODE_MASK << CPLL_MODE_SHIFT,
320 GPLL_MODE_SLOW << GPLL_MODE_SHIFT |
321 CPLL_MODE_SLOW << CPLL_MODE_SHIFT);
324 rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg, has_bwadj);
325 rkclk_set_pll(cru, CLK_CODEC, &cpll_init_cfg, has_bwadj);
327 /* waiting for pll lock */
328 while ((readl(&grf->soc_status0) &
329 (SOCSTS_CPLL_LOCK | SOCSTS_GPLL_LOCK)) !=
330 (SOCSTS_CPLL_LOCK | SOCSTS_GPLL_LOCK))
334 * cpu clock pll source selection and
335 * reparent aclk_cpu_pre from apll to gpll
336 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
338 aclk_div = RATE_TO_DIV(GPLL_HZ, CPU_ACLK_HZ);
339 assert((aclk_div + 1) * CPU_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
341 rk_clrsetreg(&cru->cru_clksel_con[0],
342 CPU_ACLK_PLL_MASK << CPU_ACLK_PLL_SHIFT |
343 A9_CPU_DIV_MASK << A9_CPU_DIV_SHIFT,
344 CPU_ACLK_PLL_SELECT_GPLL << CPU_ACLK_PLL_SHIFT |
345 aclk_div << A9_CPU_DIV_SHIFT);
347 hclk_div = ilog2(CPU_ACLK_HZ / CPU_HCLK_HZ);
348 assert((1 << hclk_div) * CPU_HCLK_HZ == CPU_ACLK_HZ && hclk_div < 0x3);
349 pclk_div = ilog2(CPU_ACLK_HZ / CPU_PCLK_HZ);
350 assert((1 << pclk_div) * CPU_PCLK_HZ == CPU_ACLK_HZ && pclk_div < 0x4);
351 h2p_div = ilog2(CPU_HCLK_HZ / CPU_H2P_HZ);
352 assert((1 << h2p_div) * CPU_H2P_HZ == CPU_HCLK_HZ && pclk_div < 0x3);
354 rk_clrsetreg(&cru->cru_clksel_con[1],
355 AHB2APB_DIV_MASK << AHB2APB_DIV_SHIFT |
356 CPU_PCLK_DIV_MASK << CPU_PCLK_DIV_SHIFT |
357 CPU_HCLK_DIV_MASK << CPU_HCLK_DIV_SHIFT,
358 h2p_div << AHB2APB_DIV_SHIFT |
359 pclk_div << CPU_PCLK_DIV_SHIFT |
360 hclk_div << CPU_HCLK_DIV_SHIFT);
363 * peri clock pll source selection and
364 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
366 aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1;
367 assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
369 hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ);
370 assert((1 << hclk_div) * PERI_HCLK_HZ ==
371 PERI_ACLK_HZ && (hclk_div < 0x4));
373 pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ);
374 assert((1 << pclk_div) * PERI_PCLK_HZ ==
375 PERI_ACLK_HZ && (pclk_div < 0x4));
377 rk_clrsetreg(&cru->cru_clksel_con[10],
378 PERI_PCLK_DIV_MASK << PERI_PCLK_DIV_SHIFT |
379 PERI_HCLK_DIV_MASK << PERI_HCLK_DIV_SHIFT |
380 PERI_ACLK_DIV_MASK << PERI_ACLK_DIV_SHIFT,
381 PERI_SEL_GPLL << PERI_SEL_PLL_SHIFT |
382 pclk_div << PERI_PCLK_DIV_SHIFT |
383 hclk_div << PERI_HCLK_DIV_SHIFT |
384 aclk_div << PERI_ACLK_DIV_SHIFT);
386 /* PLL enter normal-mode */
387 rk_clrsetreg(&cru->cru_mode_con,
388 GPLL_MODE_MASK << GPLL_MODE_SHIFT |
389 CPLL_MODE_MASK << CPLL_MODE_SHIFT,
390 GPLL_MODE_NORMAL << GPLL_MODE_SHIFT |
391 CPLL_MODE_NORMAL << CPLL_MODE_SHIFT);
393 rockchip_mmc_set_clk(cru, PERI_HCLK_HZ, HCLK_SDMMC, 16000000);
397 static ulong rk3188_clk_get_rate(struct clk *clk)
399 struct rk3188_clk_priv *priv = dev_get_priv(clk->dev);
400 ulong new_rate, gclk_rate;
402 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
405 new_rate = rkclk_pll_get_rate(priv->cru, clk->id);
410 new_rate = rockchip_mmc_get_clk(priv->cru, PERI_HCLK_HZ,
415 new_rate = rockchip_spi_get_clk(priv->cru, PERI_PCLK_HZ,
431 static ulong rk3188_clk_set_rate(struct clk *clk, ulong rate)
433 struct rk3188_clk_priv *priv = dev_get_priv(clk->dev);
434 struct rk3188_cru *cru = priv->cru;
439 new_rate = rkclk_configure_ddr(priv->cru, priv->grf, rate,
445 new_rate = rockchip_mmc_set_clk(cru, PERI_HCLK_HZ,
450 new_rate = rockchip_spi_set_clk(cru, PERI_PCLK_HZ,
460 static struct clk_ops rk3188_clk_ops = {
461 .get_rate = rk3188_clk_get_rate,
462 .set_rate = rk3188_clk_set_rate,
465 static int rk3188_clk_ofdata_to_platdata(struct udevice *dev)
467 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
468 struct rk3188_clk_priv *priv = dev_get_priv(dev);
470 priv->cru = (struct rk3188_cru *)dev_get_addr(dev);
476 static int rk3188_clk_probe(struct udevice *dev)
478 struct rk3188_clk_priv *priv = dev_get_priv(dev);
479 enum rk3188_clk_type type = dev_get_driver_data(dev);
481 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
482 if (IS_ERR(priv->grf))
483 return PTR_ERR(priv->grf);
484 priv->has_bwadj = (type == RK3188A_CRU) ? 1 : 0;
486 #ifdef CONFIG_SPL_BUILD
487 #if CONFIG_IS_ENABLED(OF_PLATDATA)
488 struct rk3188_clk_plat *plat = dev_get_platdata(dev);
490 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
493 rkclk_init(priv->cru, priv->grf, priv->has_bwadj);
499 static int rk3188_clk_bind(struct udevice *dev)
503 /* The reset driver does not have a device node, so bind it here */
504 ret = device_bind_driver(gd->dm_root, "rk3188_sysreset", "reset", &dev);
506 debug("Warning: No rk3188 reset driver: ret=%d\n", ret);
511 static const struct udevice_id rk3188_clk_ids[] = {
512 { .compatible = "rockchip,rk3188-cru", .data = RK3188_CRU },
513 { .compatible = "rockchip,rk3188a-cru", .data = RK3188A_CRU },
517 U_BOOT_DRIVER(rockchip_rk3188_cru) = {
518 .name = "rockchip_rk3188_cru",
520 .of_match = rk3188_clk_ids,
521 .priv_auto_alloc_size = sizeof(struct rk3188_clk_priv),
522 .platdata_auto_alloc_size = sizeof(struct rk3188_clk_plat),
523 .ops = &rk3188_clk_ops,
524 .bind = rk3188_clk_bind,
525 .ofdata_to_platdata = rk3188_clk_ofdata_to_platdata,
526 .probe = rk3188_clk_probe,