blob: 780b49ccd89b68119e64866118cb480b435f498a [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Andy Yand1dcf852017-05-15 17:49:56 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
4 * Author: Andy Yan <andy.yan@rock-chips.com>
Philipp Tomsichddfe77d2017-06-22 23:47:11 +02005 * (C) Copyright 2017 Theobroma Systems Design und Consulting GmbH
Andy Yand1dcf852017-05-15 17:49:56 +08006 */
7
8#include <common.h>
9#include <clk-uclass.h>
10#include <dm.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020011#include <dt-structs.h>
Andy Yand1dcf852017-05-15 17:49:56 +080012#include <errno.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060013#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070014#include <malloc.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020015#include <mapmem.h>
Andy Yand1dcf852017-05-15 17:49:56 +080016#include <syscon.h>
David Wu615514c2017-09-20 14:37:50 +080017#include <bitfield.h>
Kever Yang15f09a12019-03-28 11:01:23 +080018#include <asm/arch-rockchip/clock.h>
19#include <asm/arch-rockchip/cru_rk3368.h>
20#include <asm/arch-rockchip/hardware.h>
Andy Yand1dcf852017-05-15 17:49:56 +080021#include <asm/io.h>
Simon Glass0fd3d912020-12-22 19:30:28 -070022#include <dm/device-internal.h>
Andy Yand1dcf852017-05-15 17:49:56 +080023#include <dm/lists.h>
24#include <dt-bindings/clock/rk3368-cru.h>
Simon Glassc05ed002020-05-10 11:40:11 -060025#include <linux/delay.h>
Simon Glass1af3c7f2020-05-10 11:40:09 -060026#include <linux/stringify.h>
Andy Yand1dcf852017-05-15 17:49:56 +080027
Philipp Tomsichbee61802017-06-22 23:51:37 +020028#if CONFIG_IS_ENABLED(OF_PLATDATA)
29struct rk3368_clk_plat {
30 struct dtd_rockchip_rk3368_cru dtd;
31};
32#endif
33
Andy Yand1dcf852017-05-15 17:49:56 +080034struct pll_div {
35 u32 nr;
36 u32 nf;
37 u32 no;
38};
39
40#define OSC_HZ (24 * 1000 * 1000)
41#define APLL_L_HZ (800 * 1000 * 1000)
42#define APLL_B_HZ (816 * 1000 * 1000)
43#define GPLL_HZ (576 * 1000 * 1000)
44#define CPLL_HZ (400 * 1000 * 1000)
45
Andy Yand1dcf852017-05-15 17:49:56 +080046#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
47
48#define PLL_DIVISORS(hz, _nr, _no) { \
49 .nr = _nr, .nf = (u32)((u64)hz * _nr * _no / OSC_HZ), .no = _no}; \
50 _Static_assert(((u64)hz * _nr * _no / OSC_HZ) * OSC_HZ /\
51 (_nr * _no) == hz, #hz "Hz cannot be hit with PLL " \
52 "divisors on line " __stringify(__LINE__));
53
Philipp Tomsich4bebf942017-06-22 23:53:44 +020054#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080055static const struct pll_div apll_l_init_cfg = PLL_DIVISORS(APLL_L_HZ, 12, 2);
56static const struct pll_div apll_b_init_cfg = PLL_DIVISORS(APLL_B_HZ, 1, 2);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020057#if !defined(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080058static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 1, 2);
59static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 6);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020060#endif
61#endif
Andy Yand1dcf852017-05-15 17:49:56 +080062
Philipp Tomsichf5a43292017-07-04 14:49:38 +020063static ulong rk3368_clk_get_rate(struct clk *clk);
64
Andy Yand1dcf852017-05-15 17:49:56 +080065/* Get pll rate by id */
66static uint32_t rkclk_pll_get_rate(struct rk3368_cru *cru,
67 enum rk3368_pll_id pll_id)
68{
69 uint32_t nr, no, nf;
70 uint32_t con;
71 struct rk3368_pll *pll = &cru->pll[pll_id];
72
73 con = readl(&pll->con3);
74
75 switch ((con & PLL_MODE_MASK) >> PLL_MODE_SHIFT) {
76 case PLL_MODE_SLOW:
77 return OSC_HZ;
78 case PLL_MODE_NORMAL:
79 con = readl(&pll->con0);
80 no = ((con & PLL_OD_MASK) >> PLL_OD_SHIFT) + 1;
81 nr = ((con & PLL_NR_MASK) >> PLL_NR_SHIFT) + 1;
82 con = readl(&pll->con1);
83 nf = ((con & PLL_NF_MASK) >> PLL_NF_SHIFT) + 1;
84
85 return (24 * nf / (nr * no)) * 1000000;
86 case PLL_MODE_DEEP_SLOW:
87 default:
88 return 32768;
89 }
90}
91
Philipp Tomsich4bebf942017-06-22 23:53:44 +020092#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080093static int rkclk_set_pll(struct rk3368_cru *cru, enum rk3368_pll_id pll_id,
Philipp Tomsichddfe77d2017-06-22 23:47:11 +020094 const struct pll_div *div)
Andy Yand1dcf852017-05-15 17:49:56 +080095{
96 struct rk3368_pll *pll = &cru->pll[pll_id];
97 /* All PLLs have same VCO and output frequency range restrictions*/
98 uint vco_hz = OSC_HZ / 1000 * div->nf / div->nr * 1000;
99 uint output_hz = vco_hz / div->no;
100
101 debug("PLL at %p: nf=%d, nr=%d, no=%d, vco=%u Hz, output=%u Hz\n",
102 pll, div->nf, div->nr, div->no, vco_hz, output_hz);
103
104 /* enter slow mode and reset pll */
105 rk_clrsetreg(&pll->con3, PLL_MODE_MASK | PLL_RESET_MASK,
106 PLL_RESET << PLL_RESET_SHIFT);
107
108 rk_clrsetreg(&pll->con0, PLL_NR_MASK | PLL_OD_MASK,
109 ((div->nr - 1) << PLL_NR_SHIFT) |
110 ((div->no - 1) << PLL_OD_SHIFT));
111 writel((div->nf - 1) << PLL_NF_SHIFT, &pll->con1);
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200112 /*
113 * BWADJ should be set to NF / 2 to ensure the nominal bandwidth.
114 * Compare the RK3368 TRM, section "3.6.4 PLL Bandwidth Adjustment".
115 */
116 clrsetbits_le32(&pll->con2, PLL_BWADJ_MASK, (div->nf >> 1) - 1);
117
Andy Yand1dcf852017-05-15 17:49:56 +0800118 udelay(10);
119
120 /* return from reset */
121 rk_clrreg(&pll->con3, PLL_RESET_MASK);
122
123 /* waiting for pll lock */
124 while (!(readl(&pll->con1) & PLL_LOCK_STA))
125 udelay(1);
126
127 rk_clrsetreg(&pll->con3, PLL_MODE_MASK,
128 PLL_MODE_NORMAL << PLL_MODE_SHIFT);
129
130 return 0;
131}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200132#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800133
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200134#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800135static void rkclk_init(struct rk3368_cru *cru)
136{
137 u32 apllb, aplll, dpll, cpll, gpll;
138
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200139 rkclk_set_pll(cru, APLLB, &apll_b_init_cfg);
140 rkclk_set_pll(cru, APLLL, &apll_l_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200141#if !defined(CONFIG_TPL_BUILD)
142 /*
143 * If we plan to return to the boot ROM, we can't increase the
144 * GPLL rate from the SPL stage.
145 */
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200146 rkclk_set_pll(cru, GPLL, &gpll_init_cfg);
147 rkclk_set_pll(cru, CPLL, &cpll_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200148#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800149
150 apllb = rkclk_pll_get_rate(cru, APLLB);
151 aplll = rkclk_pll_get_rate(cru, APLLL);
152 dpll = rkclk_pll_get_rate(cru, DPLL);
153 cpll = rkclk_pll_get_rate(cru, CPLL);
154 gpll = rkclk_pll_get_rate(cru, GPLL);
155
156 debug("%s apllb(%d) apll(%d) dpll(%d) cpll(%d) gpll(%d)\n",
157 __func__, apllb, aplll, dpll, cpll, gpll);
158}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200159#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800160
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200161#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
Andy Yand1dcf852017-05-15 17:49:56 +0800162static ulong rk3368_mmc_get_clk(struct rk3368_cru *cru, uint clk_id)
163{
164 u32 div, con, con_id, rate;
165 u32 pll_rate;
166
167 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200168 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800169 con_id = 50;
170 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200171 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800172 con_id = 51;
173 break;
174 case SCLK_SDIO0:
175 con_id = 48;
176 break;
177 default:
178 return -EINVAL;
179 }
180
181 con = readl(&cru->clksel_con[con_id]);
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200182 switch (con & MMC_PLL_SEL_MASK) {
Andy Yand1dcf852017-05-15 17:49:56 +0800183 case MMC_PLL_SEL_GPLL:
184 pll_rate = rkclk_pll_get_rate(cru, GPLL);
185 break;
186 case MMC_PLL_SEL_24M:
187 pll_rate = OSC_HZ;
188 break;
189 case MMC_PLL_SEL_CPLL:
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200190 pll_rate = rkclk_pll_get_rate(cru, CPLL);
191 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800192 case MMC_PLL_SEL_USBPHY_480M:
193 default:
194 return -EINVAL;
195 }
196 div = (con & MMC_CLK_DIV_MASK) >> MMC_CLK_DIV_SHIFT;
197 rate = DIV_TO_RATE(pll_rate, div);
198
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200199 debug("%s: raw rate %d (post-divide by 2)\n", __func__, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800200 return rate >> 1;
201}
202
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200203static ulong rk3368_mmc_find_best_rate_and_parent(struct clk *clk,
204 ulong rate,
205 u32 *best_mux,
206 u32 *best_div)
Andy Yand1dcf852017-05-15 17:49:56 +0800207{
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200208 int i;
209 ulong best_rate = 0;
210 const ulong MHz = 1000000;
211 const struct {
212 u32 mux;
213 ulong rate;
214 } parents[] = {
215 { .mux = MMC_PLL_SEL_CPLL, .rate = CPLL_HZ },
216 { .mux = MMC_PLL_SEL_GPLL, .rate = GPLL_HZ },
217 { .mux = MMC_PLL_SEL_24M, .rate = 24 * MHz }
218 };
Andy Yand1dcf852017-05-15 17:49:56 +0800219
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200220 debug("%s: target rate %ld\n", __func__, rate);
221 for (i = 0; i < ARRAY_SIZE(parents); ++i) {
222 /*
223 * Find the largest rate no larger than the target-rate for
224 * the current parent.
225 */
226 ulong parent_rate = parents[i].rate;
227 u32 div = DIV_ROUND_UP(parent_rate, rate);
228 u32 adj_div = div;
229 ulong new_rate = parent_rate / adj_div;
230
231 debug("%s: rate %ld, parent-mux %d, parent-rate %ld, div %d\n",
232 __func__, rate, parents[i].mux, parents[i].rate, div);
233
234 /* Skip, if not representable */
235 if ((div - 1) > MMC_CLK_DIV_MASK)
236 continue;
237
238 /* Skip, if we already have a better (or equal) solution */
239 if (new_rate <= best_rate)
240 continue;
241
242 /* This is our new best rate. */
243 best_rate = new_rate;
244 *best_mux = parents[i].mux;
245 *best_div = div - 1;
246 }
247
248 debug("%s: best_mux = %x, best_div = %d, best_rate = %ld\n",
249 __func__, *best_mux, *best_div, best_rate);
250
251 return best_rate;
252}
253
254static ulong rk3368_mmc_set_clk(struct clk *clk, ulong rate)
255{
256 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
257 struct rk3368_cru *cru = priv->cru;
258 ulong clk_id = clk->id;
259 u32 con_id, mux = 0, div = 0;
260
261 /* Find the best parent and rate */
262 rk3368_mmc_find_best_rate_and_parent(clk, rate << 1, &mux, &div);
Andy Yand1dcf852017-05-15 17:49:56 +0800263
264 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200265 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800266 con_id = 50;
267 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200268 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800269 con_id = 51;
270 break;
271 case SCLK_SDIO0:
272 con_id = 48;
273 break;
274 default:
275 return -EINVAL;
276 }
277
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200278 rk_clrsetreg(&cru->clksel_con[con_id],
279 MMC_PLL_SEL_MASK | MMC_CLK_DIV_MASK,
280 mux | div);
Andy Yand1dcf852017-05-15 17:49:56 +0800281
282 return rk3368_mmc_get_clk(cru, clk_id);
283}
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200284#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800285
Philipp Tomsich62924692017-07-05 11:55:23 +0200286#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200287static ulong rk3368_ddr_set_clk(struct rk3368_cru *cru, ulong set_rate)
288{
289 const struct pll_div *dpll_cfg = NULL;
290 const ulong MHz = 1000000;
291
292 /* Fout = ((Fin /NR) * NF )/ NO */
Philipp Tomsich62924692017-07-05 11:55:23 +0200293 static const struct pll_div dpll_1200 = PLL_DIVISORS(1200 * MHz, 1, 1);
294 static const struct pll_div dpll_1332 = PLL_DIVISORS(1332 * MHz, 2, 1);
295 static const struct pll_div dpll_1600 = PLL_DIVISORS(1600 * MHz, 3, 2);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200296
297 switch (set_rate) {
298 case 1200*MHz:
299 dpll_cfg = &dpll_1200;
300 break;
301 case 1332*MHz:
302 dpll_cfg = &dpll_1332;
303 break;
304 case 1600*MHz:
305 dpll_cfg = &dpll_1600;
306 break;
307 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900308 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200309 }
310 rkclk_set_pll(cru, DPLL, dpll_cfg);
311
312 return set_rate;
313}
Philipp Tomsich62924692017-07-05 11:55:23 +0200314#endif
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200315
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200316#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
David Wu64a12202018-01-13 14:07:04 +0800317static ulong rk3368_gmac_set_clk(struct rk3368_cru *cru, ulong set_rate)
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200318{
David Wu64a12202018-01-13 14:07:04 +0800319 ulong ret;
320
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200321 /*
David Wu64a12202018-01-13 14:07:04 +0800322 * The gmac clock can be derived either from an external clock
323 * or can be generated from internally by a divider from SCLK_MAC.
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200324 */
David Wu64a12202018-01-13 14:07:04 +0800325 if (readl(&cru->clksel_con[43]) & GMAC_MUX_SEL_EXTCLK) {
326 /* An external clock will always generate the right rate... */
327 ret = set_rate;
328 } else {
329 u32 con = readl(&cru->clksel_con[43]);
330 ulong pll_rate;
331 u8 div;
332
333 if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
334 GMAC_PLL_SELECT_GENERAL)
335 pll_rate = GPLL_HZ;
336 else if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
337 GMAC_PLL_SELECT_CODEC)
338 pll_rate = CPLL_HZ;
339 else
340 /* CPLL is not set */
341 return -EPERM;
342
343 div = DIV_ROUND_UP(pll_rate, set_rate) - 1;
344 if (div <= 0x1f)
345 rk_clrsetreg(&cru->clksel_con[43], GMAC_DIV_CON_MASK,
346 div << GMAC_DIV_CON_SHIFT);
347 else
348 debug("Unsupported div for gmac:%d\n", div);
349
350 return DIV_TO_RATE(pll_rate, div);
351 }
352
353 return ret;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200354}
355#endif
356
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200357/*
358 * RK3368 SPI clocks have a common divider-width (7 bits) and a single bit
359 * to select either CPLL or GPLL as the clock-parent. The location within
360 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
361 */
362
363struct spi_clkreg {
364 uint8_t reg; /* CLKSEL_CON[reg] register in CRU */
365 uint8_t div_shift;
366 uint8_t sel_shift;
367};
368
369/*
370 * The entries are numbered relative to their offset from SCLK_SPI0.
371 */
372static const struct spi_clkreg spi_clkregs[] = {
373 [0] = { .reg = 45, .div_shift = 0, .sel_shift = 7, },
374 [1] = { .reg = 45, .div_shift = 8, .sel_shift = 15, },
375 [2] = { .reg = 46, .div_shift = 8, .sel_shift = 15, },
376};
377
378static inline u32 extract_bits(u32 val, unsigned width, unsigned shift)
379{
380 return (val >> shift) & ((1 << width) - 1);
381}
382
383static ulong rk3368_spi_get_clk(struct rk3368_cru *cru, ulong clk_id)
384{
385 const struct spi_clkreg *spiclk = NULL;
386 u32 div, val;
387
388 switch (clk_id) {
389 case SCLK_SPI0 ... SCLK_SPI2:
390 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
391 break;
392
393 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900394 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200395 return -EINVAL;
396 }
397
398 val = readl(&cru->clksel_con[spiclk->reg]);
399 div = extract_bits(val, 7, spiclk->div_shift);
400
401 debug("%s: div 0x%x\n", __func__, div);
402 return DIV_TO_RATE(GPLL_HZ, div);
403}
404
405static ulong rk3368_spi_set_clk(struct rk3368_cru *cru, ulong clk_id, uint hz)
406{
407 const struct spi_clkreg *spiclk = NULL;
408 int src_clk_div;
409
410 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz);
411 assert(src_clk_div < 127);
412
413 switch (clk_id) {
414 case SCLK_SPI0 ... SCLK_SPI2:
415 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
416 break;
417
418 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900419 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200420 return -EINVAL;
421 }
422
423 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
424 ((0x7f << spiclk->div_shift) |
425 (0x1 << spiclk->sel_shift)),
426 ((src_clk_div << spiclk->div_shift) |
427 (1 << spiclk->sel_shift)));
428
429 return rk3368_spi_get_clk(cru, clk_id);
430}
431
David Wu615514c2017-09-20 14:37:50 +0800432static ulong rk3368_saradc_get_clk(struct rk3368_cru *cru)
433{
434 u32 div, val;
435
436 val = readl(&cru->clksel_con[25]);
437 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
438 CLK_SARADC_DIV_CON_WIDTH);
439
440 return DIV_TO_RATE(OSC_HZ, div);
441}
442
443static ulong rk3368_saradc_set_clk(struct rk3368_cru *cru, uint hz)
444{
445 int src_clk_div;
446
447 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
448 assert(src_clk_div < 128);
449
450 rk_clrsetreg(&cru->clksel_con[25],
451 CLK_SARADC_DIV_CON_MASK,
452 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
453
454 return rk3368_saradc_get_clk(cru);
455}
456
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200457static ulong rk3368_clk_get_rate(struct clk *clk)
458{
459 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
460 ulong rate = 0;
461
462 debug("%s: id %ld\n", __func__, clk->id);
463 switch (clk->id) {
464 case PLL_CPLL:
465 rate = rkclk_pll_get_rate(priv->cru, CPLL);
466 break;
467 case PLL_GPLL:
468 rate = rkclk_pll_get_rate(priv->cru, GPLL);
469 break;
470 case SCLK_SPI0 ... SCLK_SPI2:
471 rate = rk3368_spi_get_clk(priv->cru, clk->id);
472 break;
473#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
474 case HCLK_SDMMC:
475 case HCLK_EMMC:
476 rate = rk3368_mmc_get_clk(priv->cru, clk->id);
477 break;
478#endif
David Wu615514c2017-09-20 14:37:50 +0800479 case SCLK_SARADC:
480 rate = rk3368_saradc_get_clk(priv->cru);
481 break;
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200482 default:
483 return -ENOENT;
484 }
485
486 return rate;
487}
488
Andy Yand1dcf852017-05-15 17:49:56 +0800489static ulong rk3368_clk_set_rate(struct clk *clk, ulong rate)
490{
Philipp Tomsich4e4c40d2017-07-05 12:11:58 +0200491 __maybe_unused struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800492 ulong ret = 0;
493
494 debug("%s id:%ld rate:%ld\n", __func__, clk->id, rate);
495 switch (clk->id) {
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200496 case SCLK_SPI0 ... SCLK_SPI2:
497 ret = rk3368_spi_set_clk(priv->cru, clk->id, rate);
498 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200499#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200500 case CLK_DDR:
501 ret = rk3368_ddr_set_clk(priv->cru, rate);
502 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200503#endif
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200504#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
505 case HCLK_SDMMC:
506 case HCLK_EMMC:
507 ret = rk3368_mmc_set_clk(clk, rate);
508 break;
509#endif
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200510#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200511 case SCLK_MAC:
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200512 /* select the external clock */
David Wu64a12202018-01-13 14:07:04 +0800513 ret = rk3368_gmac_set_clk(priv->cru, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800514 break;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200515#endif
David Wu615514c2017-09-20 14:37:50 +0800516 case SCLK_SARADC:
517 ret = rk3368_saradc_set_clk(priv->cru, rate);
518 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800519 default:
520 return -ENOENT;
521 }
522
523 return ret;
524}
525
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100526static int __maybe_unused rk3368_gmac_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800527{
528 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
529 struct rk3368_cru *cru = priv->cru;
530 const char *clock_output_name;
531 int ret;
532
533 /*
534 * If the requested parent is in the same clock-controller and
535 * the id is SCLK_MAC ("sclk_mac"), switch to the internal
536 * clock.
537 */
538 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC)) {
539 debug("%s: switching GAMC to SCLK_MAC\n", __func__);
540 rk_clrreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
541 return 0;
542 }
543
544 /*
545 * Otherwise, we need to check the clock-output-names of the
546 * requested parent to see if the requested id is "ext_gmac".
547 */
548 ret = dev_read_string_index(parent->dev, "clock-output-names",
549 parent->id, &clock_output_name);
550 if (ret < 0)
551 return -ENODATA;
552
553 /* If this is "ext_gmac", switch to the external clock input */
554 if (!strcmp(clock_output_name, "ext_gmac")) {
555 debug("%s: switching GMAC to external clock\n", __func__);
556 rk_setreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
557 return 0;
558 }
559
560 return -EINVAL;
561}
562
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100563static int __maybe_unused rk3368_clk_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800564{
565 switch (clk->id) {
566 case SCLK_MAC:
567 return rk3368_gmac_set_parent(clk, parent);
568 }
569
570 debug("%s: unsupported clk %ld\n", __func__, clk->id);
571 return -ENOENT;
572}
573
Andy Yand1dcf852017-05-15 17:49:56 +0800574static struct clk_ops rk3368_clk_ops = {
575 .get_rate = rk3368_clk_get_rate,
576 .set_rate = rk3368_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100577#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
David Wu64a12202018-01-13 14:07:04 +0800578 .set_parent = rk3368_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100579#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800580};
581
582static int rk3368_clk_probe(struct udevice *dev)
583{
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200584 struct rk3368_clk_priv __maybe_unused *priv = dev_get_priv(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200585#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glassc69cda22020-12-03 16:55:20 -0700586 struct rk3368_clk_plat *plat = dev_get_plat(dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800587
Simon Glassc20ee0e2017-08-29 14:15:50 -0600588 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200589#endif
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200590#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800591 rkclk_init(priv->cru);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200592#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800593
594 return 0;
595}
596
Simon Glassd1998a92020-12-03 16:55:21 -0700597static int rk3368_clk_of_to_plat(struct udevice *dev)
Andy Yand1dcf852017-05-15 17:49:56 +0800598{
Philipp Tomsichbee61802017-06-22 23:51:37 +0200599#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Andy Yand1dcf852017-05-15 17:49:56 +0800600 struct rk3368_clk_priv *priv = dev_get_priv(dev);
601
Philipp Tomsich9a342f42017-09-11 22:04:18 +0200602 priv->cru = dev_read_addr_ptr(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200603#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800604
605 return 0;
606}
607
608static int rk3368_clk_bind(struct udevice *dev)
609{
610 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +0800611 struct udevice *sys_child;
612 struct sysreset_reg *priv;
Andy Yand1dcf852017-05-15 17:49:56 +0800613
614 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +0800615 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
616 &sys_child);
617 if (ret) {
618 debug("Warning: No sysreset driver: ret=%d\n", ret);
619 } else {
620 priv = malloc(sizeof(struct sysreset_reg));
621 priv->glb_srst_fst_value = offsetof(struct rk3368_cru,
622 glb_srst_fst_val);
623 priv->glb_srst_snd_value = offsetof(struct rk3368_cru,
624 glb_srst_snd_val);
Simon Glass0fd3d912020-12-22 19:30:28 -0700625 dev_set_priv(sys_child, priv);
Kever Yangf24e36d2017-11-03 15:16:13 +0800626 }
Andy Yand1dcf852017-05-15 17:49:56 +0800627
Heiko Stuebnera5ada252019-11-09 00:06:30 +0100628#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Elaine Zhang538f67c2017-12-19 18:22:38 +0800629 ret = offsetof(struct rk3368_cru, softrst_con[0]);
630 ret = rockchip_reset_bind(dev, ret, 15);
631 if (ret)
632 debug("Warning: software reset driver bind faile\n");
633#endif
634
Andy Yand1dcf852017-05-15 17:49:56 +0800635 return ret;
636}
637
638static const struct udevice_id rk3368_clk_ids[] = {
639 { .compatible = "rockchip,rk3368-cru" },
640 { }
641};
642
643U_BOOT_DRIVER(rockchip_rk3368_cru) = {
644 .name = "rockchip_rk3368_cru",
645 .id = UCLASS_CLK,
646 .of_match = rk3368_clk_ids,
Simon Glass41575d82020-12-03 16:55:17 -0700647 .priv_auto = sizeof(struct rk3368_clk_priv),
Philipp Tomsichbee61802017-06-22 23:51:37 +0200648#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glasscaa4daa2020-12-03 16:55:18 -0700649 .plat_auto = sizeof(struct rk3368_clk_plat),
Philipp Tomsichbee61802017-06-22 23:51:37 +0200650#endif
Simon Glassd1998a92020-12-03 16:55:21 -0700651 .of_to_plat = rk3368_clk_of_to_plat,
Andy Yand1dcf852017-05-15 17:49:56 +0800652 .ops = &rk3368_clk_ops,
653 .bind = rk3368_clk_bind,
654 .probe = rk3368_clk_probe,
655};