blob: 34466b8e26057908ea50a2b2c5d6150194e2af14 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Andy Yand1dcf852017-05-15 17:49:56 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
4 * Author: Andy Yan <andy.yan@rock-chips.com>
Philipp Tomsichddfe77d2017-06-22 23:47:11 +02005 * (C) Copyright 2017 Theobroma Systems Design und Consulting GmbH
Andy Yand1dcf852017-05-15 17:49:56 +08006 */
7
8#include <common.h>
9#include <clk-uclass.h>
10#include <dm.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020011#include <dt-structs.h>
Andy Yand1dcf852017-05-15 17:49:56 +080012#include <errno.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060013#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070014#include <malloc.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020015#include <mapmem.h>
Andy Yand1dcf852017-05-15 17:49:56 +080016#include <syscon.h>
David Wu615514c2017-09-20 14:37:50 +080017#include <bitfield.h>
Kever Yang15f09a12019-03-28 11:01:23 +080018#include <asm/arch-rockchip/clock.h>
19#include <asm/arch-rockchip/cru_rk3368.h>
20#include <asm/arch-rockchip/hardware.h>
Andy Yand1dcf852017-05-15 17:49:56 +080021#include <asm/io.h>
22#include <dm/lists.h>
23#include <dt-bindings/clock/rk3368-cru.h>
Simon Glass1af3c7f2020-05-10 11:40:09 -060024#include <linux/stringify.h>
Andy Yand1dcf852017-05-15 17:49:56 +080025
Philipp Tomsichbee61802017-06-22 23:51:37 +020026#if CONFIG_IS_ENABLED(OF_PLATDATA)
27struct rk3368_clk_plat {
28 struct dtd_rockchip_rk3368_cru dtd;
29};
30#endif
31
Andy Yand1dcf852017-05-15 17:49:56 +080032struct pll_div {
33 u32 nr;
34 u32 nf;
35 u32 no;
36};
37
38#define OSC_HZ (24 * 1000 * 1000)
39#define APLL_L_HZ (800 * 1000 * 1000)
40#define APLL_B_HZ (816 * 1000 * 1000)
41#define GPLL_HZ (576 * 1000 * 1000)
42#define CPLL_HZ (400 * 1000 * 1000)
43
Andy Yand1dcf852017-05-15 17:49:56 +080044#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
45
46#define PLL_DIVISORS(hz, _nr, _no) { \
47 .nr = _nr, .nf = (u32)((u64)hz * _nr * _no / OSC_HZ), .no = _no}; \
48 _Static_assert(((u64)hz * _nr * _no / OSC_HZ) * OSC_HZ /\
49 (_nr * _no) == hz, #hz "Hz cannot be hit with PLL " \
50 "divisors on line " __stringify(__LINE__));
51
Philipp Tomsich4bebf942017-06-22 23:53:44 +020052#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080053static const struct pll_div apll_l_init_cfg = PLL_DIVISORS(APLL_L_HZ, 12, 2);
54static const struct pll_div apll_b_init_cfg = PLL_DIVISORS(APLL_B_HZ, 1, 2);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020055#if !defined(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080056static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 1, 2);
57static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 6);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020058#endif
59#endif
Andy Yand1dcf852017-05-15 17:49:56 +080060
Philipp Tomsichf5a43292017-07-04 14:49:38 +020061static ulong rk3368_clk_get_rate(struct clk *clk);
62
Andy Yand1dcf852017-05-15 17:49:56 +080063/* Get pll rate by id */
64static uint32_t rkclk_pll_get_rate(struct rk3368_cru *cru,
65 enum rk3368_pll_id pll_id)
66{
67 uint32_t nr, no, nf;
68 uint32_t con;
69 struct rk3368_pll *pll = &cru->pll[pll_id];
70
71 con = readl(&pll->con3);
72
73 switch ((con & PLL_MODE_MASK) >> PLL_MODE_SHIFT) {
74 case PLL_MODE_SLOW:
75 return OSC_HZ;
76 case PLL_MODE_NORMAL:
77 con = readl(&pll->con0);
78 no = ((con & PLL_OD_MASK) >> PLL_OD_SHIFT) + 1;
79 nr = ((con & PLL_NR_MASK) >> PLL_NR_SHIFT) + 1;
80 con = readl(&pll->con1);
81 nf = ((con & PLL_NF_MASK) >> PLL_NF_SHIFT) + 1;
82
83 return (24 * nf / (nr * no)) * 1000000;
84 case PLL_MODE_DEEP_SLOW:
85 default:
86 return 32768;
87 }
88}
89
Philipp Tomsich4bebf942017-06-22 23:53:44 +020090#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080091static int rkclk_set_pll(struct rk3368_cru *cru, enum rk3368_pll_id pll_id,
Philipp Tomsichddfe77d2017-06-22 23:47:11 +020092 const struct pll_div *div)
Andy Yand1dcf852017-05-15 17:49:56 +080093{
94 struct rk3368_pll *pll = &cru->pll[pll_id];
95 /* All PLLs have same VCO and output frequency range restrictions*/
96 uint vco_hz = OSC_HZ / 1000 * div->nf / div->nr * 1000;
97 uint output_hz = vco_hz / div->no;
98
99 debug("PLL at %p: nf=%d, nr=%d, no=%d, vco=%u Hz, output=%u Hz\n",
100 pll, div->nf, div->nr, div->no, vco_hz, output_hz);
101
102 /* enter slow mode and reset pll */
103 rk_clrsetreg(&pll->con3, PLL_MODE_MASK | PLL_RESET_MASK,
104 PLL_RESET << PLL_RESET_SHIFT);
105
106 rk_clrsetreg(&pll->con0, PLL_NR_MASK | PLL_OD_MASK,
107 ((div->nr - 1) << PLL_NR_SHIFT) |
108 ((div->no - 1) << PLL_OD_SHIFT));
109 writel((div->nf - 1) << PLL_NF_SHIFT, &pll->con1);
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200110 /*
111 * BWADJ should be set to NF / 2 to ensure the nominal bandwidth.
112 * Compare the RK3368 TRM, section "3.6.4 PLL Bandwidth Adjustment".
113 */
114 clrsetbits_le32(&pll->con2, PLL_BWADJ_MASK, (div->nf >> 1) - 1);
115
Andy Yand1dcf852017-05-15 17:49:56 +0800116 udelay(10);
117
118 /* return from reset */
119 rk_clrreg(&pll->con3, PLL_RESET_MASK);
120
121 /* waiting for pll lock */
122 while (!(readl(&pll->con1) & PLL_LOCK_STA))
123 udelay(1);
124
125 rk_clrsetreg(&pll->con3, PLL_MODE_MASK,
126 PLL_MODE_NORMAL << PLL_MODE_SHIFT);
127
128 return 0;
129}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200130#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800131
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200132#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800133static void rkclk_init(struct rk3368_cru *cru)
134{
135 u32 apllb, aplll, dpll, cpll, gpll;
136
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200137 rkclk_set_pll(cru, APLLB, &apll_b_init_cfg);
138 rkclk_set_pll(cru, APLLL, &apll_l_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200139#if !defined(CONFIG_TPL_BUILD)
140 /*
141 * If we plan to return to the boot ROM, we can't increase the
142 * GPLL rate from the SPL stage.
143 */
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200144 rkclk_set_pll(cru, GPLL, &gpll_init_cfg);
145 rkclk_set_pll(cru, CPLL, &cpll_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200146#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800147
148 apllb = rkclk_pll_get_rate(cru, APLLB);
149 aplll = rkclk_pll_get_rate(cru, APLLL);
150 dpll = rkclk_pll_get_rate(cru, DPLL);
151 cpll = rkclk_pll_get_rate(cru, CPLL);
152 gpll = rkclk_pll_get_rate(cru, GPLL);
153
154 debug("%s apllb(%d) apll(%d) dpll(%d) cpll(%d) gpll(%d)\n",
155 __func__, apllb, aplll, dpll, cpll, gpll);
156}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200157#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800158
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200159#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
Andy Yand1dcf852017-05-15 17:49:56 +0800160static ulong rk3368_mmc_get_clk(struct rk3368_cru *cru, uint clk_id)
161{
162 u32 div, con, con_id, rate;
163 u32 pll_rate;
164
165 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200166 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800167 con_id = 50;
168 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200169 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800170 con_id = 51;
171 break;
172 case SCLK_SDIO0:
173 con_id = 48;
174 break;
175 default:
176 return -EINVAL;
177 }
178
179 con = readl(&cru->clksel_con[con_id]);
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200180 switch (con & MMC_PLL_SEL_MASK) {
Andy Yand1dcf852017-05-15 17:49:56 +0800181 case MMC_PLL_SEL_GPLL:
182 pll_rate = rkclk_pll_get_rate(cru, GPLL);
183 break;
184 case MMC_PLL_SEL_24M:
185 pll_rate = OSC_HZ;
186 break;
187 case MMC_PLL_SEL_CPLL:
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200188 pll_rate = rkclk_pll_get_rate(cru, CPLL);
189 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800190 case MMC_PLL_SEL_USBPHY_480M:
191 default:
192 return -EINVAL;
193 }
194 div = (con & MMC_CLK_DIV_MASK) >> MMC_CLK_DIV_SHIFT;
195 rate = DIV_TO_RATE(pll_rate, div);
196
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200197 debug("%s: raw rate %d (post-divide by 2)\n", __func__, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800198 return rate >> 1;
199}
200
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200201static ulong rk3368_mmc_find_best_rate_and_parent(struct clk *clk,
202 ulong rate,
203 u32 *best_mux,
204 u32 *best_div)
Andy Yand1dcf852017-05-15 17:49:56 +0800205{
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200206 int i;
207 ulong best_rate = 0;
208 const ulong MHz = 1000000;
209 const struct {
210 u32 mux;
211 ulong rate;
212 } parents[] = {
213 { .mux = MMC_PLL_SEL_CPLL, .rate = CPLL_HZ },
214 { .mux = MMC_PLL_SEL_GPLL, .rate = GPLL_HZ },
215 { .mux = MMC_PLL_SEL_24M, .rate = 24 * MHz }
216 };
Andy Yand1dcf852017-05-15 17:49:56 +0800217
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200218 debug("%s: target rate %ld\n", __func__, rate);
219 for (i = 0; i < ARRAY_SIZE(parents); ++i) {
220 /*
221 * Find the largest rate no larger than the target-rate for
222 * the current parent.
223 */
224 ulong parent_rate = parents[i].rate;
225 u32 div = DIV_ROUND_UP(parent_rate, rate);
226 u32 adj_div = div;
227 ulong new_rate = parent_rate / adj_div;
228
229 debug("%s: rate %ld, parent-mux %d, parent-rate %ld, div %d\n",
230 __func__, rate, parents[i].mux, parents[i].rate, div);
231
232 /* Skip, if not representable */
233 if ((div - 1) > MMC_CLK_DIV_MASK)
234 continue;
235
236 /* Skip, if we already have a better (or equal) solution */
237 if (new_rate <= best_rate)
238 continue;
239
240 /* This is our new best rate. */
241 best_rate = new_rate;
242 *best_mux = parents[i].mux;
243 *best_div = div - 1;
244 }
245
246 debug("%s: best_mux = %x, best_div = %d, best_rate = %ld\n",
247 __func__, *best_mux, *best_div, best_rate);
248
249 return best_rate;
250}
251
252static ulong rk3368_mmc_set_clk(struct clk *clk, ulong rate)
253{
254 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
255 struct rk3368_cru *cru = priv->cru;
256 ulong clk_id = clk->id;
257 u32 con_id, mux = 0, div = 0;
258
259 /* Find the best parent and rate */
260 rk3368_mmc_find_best_rate_and_parent(clk, rate << 1, &mux, &div);
Andy Yand1dcf852017-05-15 17:49:56 +0800261
262 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200263 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800264 con_id = 50;
265 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200266 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800267 con_id = 51;
268 break;
269 case SCLK_SDIO0:
270 con_id = 48;
271 break;
272 default:
273 return -EINVAL;
274 }
275
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200276 rk_clrsetreg(&cru->clksel_con[con_id],
277 MMC_PLL_SEL_MASK | MMC_CLK_DIV_MASK,
278 mux | div);
Andy Yand1dcf852017-05-15 17:49:56 +0800279
280 return rk3368_mmc_get_clk(cru, clk_id);
281}
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200282#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800283
Philipp Tomsich62924692017-07-05 11:55:23 +0200284#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200285static ulong rk3368_ddr_set_clk(struct rk3368_cru *cru, ulong set_rate)
286{
287 const struct pll_div *dpll_cfg = NULL;
288 const ulong MHz = 1000000;
289
290 /* Fout = ((Fin /NR) * NF )/ NO */
Philipp Tomsich62924692017-07-05 11:55:23 +0200291 static const struct pll_div dpll_1200 = PLL_DIVISORS(1200 * MHz, 1, 1);
292 static const struct pll_div dpll_1332 = PLL_DIVISORS(1332 * MHz, 2, 1);
293 static const struct pll_div dpll_1600 = PLL_DIVISORS(1600 * MHz, 3, 2);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200294
295 switch (set_rate) {
296 case 1200*MHz:
297 dpll_cfg = &dpll_1200;
298 break;
299 case 1332*MHz:
300 dpll_cfg = &dpll_1332;
301 break;
302 case 1600*MHz:
303 dpll_cfg = &dpll_1600;
304 break;
305 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900306 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200307 }
308 rkclk_set_pll(cru, DPLL, dpll_cfg);
309
310 return set_rate;
311}
Philipp Tomsich62924692017-07-05 11:55:23 +0200312#endif
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200313
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200314#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
David Wu64a12202018-01-13 14:07:04 +0800315static ulong rk3368_gmac_set_clk(struct rk3368_cru *cru, ulong set_rate)
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200316{
David Wu64a12202018-01-13 14:07:04 +0800317 ulong ret;
318
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200319 /*
David Wu64a12202018-01-13 14:07:04 +0800320 * The gmac clock can be derived either from an external clock
321 * or can be generated from internally by a divider from SCLK_MAC.
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200322 */
David Wu64a12202018-01-13 14:07:04 +0800323 if (readl(&cru->clksel_con[43]) & GMAC_MUX_SEL_EXTCLK) {
324 /* An external clock will always generate the right rate... */
325 ret = set_rate;
326 } else {
327 u32 con = readl(&cru->clksel_con[43]);
328 ulong pll_rate;
329 u8 div;
330
331 if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
332 GMAC_PLL_SELECT_GENERAL)
333 pll_rate = GPLL_HZ;
334 else if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
335 GMAC_PLL_SELECT_CODEC)
336 pll_rate = CPLL_HZ;
337 else
338 /* CPLL is not set */
339 return -EPERM;
340
341 div = DIV_ROUND_UP(pll_rate, set_rate) - 1;
342 if (div <= 0x1f)
343 rk_clrsetreg(&cru->clksel_con[43], GMAC_DIV_CON_MASK,
344 div << GMAC_DIV_CON_SHIFT);
345 else
346 debug("Unsupported div for gmac:%d\n", div);
347
348 return DIV_TO_RATE(pll_rate, div);
349 }
350
351 return ret;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200352}
353#endif
354
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200355/*
356 * RK3368 SPI clocks have a common divider-width (7 bits) and a single bit
357 * to select either CPLL or GPLL as the clock-parent. The location within
358 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
359 */
360
361struct spi_clkreg {
362 uint8_t reg; /* CLKSEL_CON[reg] register in CRU */
363 uint8_t div_shift;
364 uint8_t sel_shift;
365};
366
367/*
368 * The entries are numbered relative to their offset from SCLK_SPI0.
369 */
370static const struct spi_clkreg spi_clkregs[] = {
371 [0] = { .reg = 45, .div_shift = 0, .sel_shift = 7, },
372 [1] = { .reg = 45, .div_shift = 8, .sel_shift = 15, },
373 [2] = { .reg = 46, .div_shift = 8, .sel_shift = 15, },
374};
375
376static inline u32 extract_bits(u32 val, unsigned width, unsigned shift)
377{
378 return (val >> shift) & ((1 << width) - 1);
379}
380
381static ulong rk3368_spi_get_clk(struct rk3368_cru *cru, ulong clk_id)
382{
383 const struct spi_clkreg *spiclk = NULL;
384 u32 div, val;
385
386 switch (clk_id) {
387 case SCLK_SPI0 ... SCLK_SPI2:
388 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
389 break;
390
391 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900392 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200393 return -EINVAL;
394 }
395
396 val = readl(&cru->clksel_con[spiclk->reg]);
397 div = extract_bits(val, 7, spiclk->div_shift);
398
399 debug("%s: div 0x%x\n", __func__, div);
400 return DIV_TO_RATE(GPLL_HZ, div);
401}
402
403static ulong rk3368_spi_set_clk(struct rk3368_cru *cru, ulong clk_id, uint hz)
404{
405 const struct spi_clkreg *spiclk = NULL;
406 int src_clk_div;
407
408 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz);
409 assert(src_clk_div < 127);
410
411 switch (clk_id) {
412 case SCLK_SPI0 ... SCLK_SPI2:
413 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
414 break;
415
416 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900417 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200418 return -EINVAL;
419 }
420
421 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
422 ((0x7f << spiclk->div_shift) |
423 (0x1 << spiclk->sel_shift)),
424 ((src_clk_div << spiclk->div_shift) |
425 (1 << spiclk->sel_shift)));
426
427 return rk3368_spi_get_clk(cru, clk_id);
428}
429
David Wu615514c2017-09-20 14:37:50 +0800430static ulong rk3368_saradc_get_clk(struct rk3368_cru *cru)
431{
432 u32 div, val;
433
434 val = readl(&cru->clksel_con[25]);
435 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
436 CLK_SARADC_DIV_CON_WIDTH);
437
438 return DIV_TO_RATE(OSC_HZ, div);
439}
440
441static ulong rk3368_saradc_set_clk(struct rk3368_cru *cru, uint hz)
442{
443 int src_clk_div;
444
445 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
446 assert(src_clk_div < 128);
447
448 rk_clrsetreg(&cru->clksel_con[25],
449 CLK_SARADC_DIV_CON_MASK,
450 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
451
452 return rk3368_saradc_get_clk(cru);
453}
454
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200455static ulong rk3368_clk_get_rate(struct clk *clk)
456{
457 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
458 ulong rate = 0;
459
460 debug("%s: id %ld\n", __func__, clk->id);
461 switch (clk->id) {
462 case PLL_CPLL:
463 rate = rkclk_pll_get_rate(priv->cru, CPLL);
464 break;
465 case PLL_GPLL:
466 rate = rkclk_pll_get_rate(priv->cru, GPLL);
467 break;
468 case SCLK_SPI0 ... SCLK_SPI2:
469 rate = rk3368_spi_get_clk(priv->cru, clk->id);
470 break;
471#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
472 case HCLK_SDMMC:
473 case HCLK_EMMC:
474 rate = rk3368_mmc_get_clk(priv->cru, clk->id);
475 break;
476#endif
David Wu615514c2017-09-20 14:37:50 +0800477 case SCLK_SARADC:
478 rate = rk3368_saradc_get_clk(priv->cru);
479 break;
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200480 default:
481 return -ENOENT;
482 }
483
484 return rate;
485}
486
Andy Yand1dcf852017-05-15 17:49:56 +0800487static ulong rk3368_clk_set_rate(struct clk *clk, ulong rate)
488{
Philipp Tomsich4e4c40d2017-07-05 12:11:58 +0200489 __maybe_unused struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800490 ulong ret = 0;
491
492 debug("%s id:%ld rate:%ld\n", __func__, clk->id, rate);
493 switch (clk->id) {
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200494 case SCLK_SPI0 ... SCLK_SPI2:
495 ret = rk3368_spi_set_clk(priv->cru, clk->id, rate);
496 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200497#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200498 case CLK_DDR:
499 ret = rk3368_ddr_set_clk(priv->cru, rate);
500 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200501#endif
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200502#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
503 case HCLK_SDMMC:
504 case HCLK_EMMC:
505 ret = rk3368_mmc_set_clk(clk, rate);
506 break;
507#endif
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200508#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200509 case SCLK_MAC:
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200510 /* select the external clock */
David Wu64a12202018-01-13 14:07:04 +0800511 ret = rk3368_gmac_set_clk(priv->cru, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800512 break;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200513#endif
David Wu615514c2017-09-20 14:37:50 +0800514 case SCLK_SARADC:
515 ret = rk3368_saradc_set_clk(priv->cru, rate);
516 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800517 default:
518 return -ENOENT;
519 }
520
521 return ret;
522}
523
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100524static int __maybe_unused rk3368_gmac_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800525{
526 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
527 struct rk3368_cru *cru = priv->cru;
528 const char *clock_output_name;
529 int ret;
530
531 /*
532 * If the requested parent is in the same clock-controller and
533 * the id is SCLK_MAC ("sclk_mac"), switch to the internal
534 * clock.
535 */
536 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC)) {
537 debug("%s: switching GAMC to SCLK_MAC\n", __func__);
538 rk_clrreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
539 return 0;
540 }
541
542 /*
543 * Otherwise, we need to check the clock-output-names of the
544 * requested parent to see if the requested id is "ext_gmac".
545 */
546 ret = dev_read_string_index(parent->dev, "clock-output-names",
547 parent->id, &clock_output_name);
548 if (ret < 0)
549 return -ENODATA;
550
551 /* If this is "ext_gmac", switch to the external clock input */
552 if (!strcmp(clock_output_name, "ext_gmac")) {
553 debug("%s: switching GMAC to external clock\n", __func__);
554 rk_setreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
555 return 0;
556 }
557
558 return -EINVAL;
559}
560
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100561static int __maybe_unused rk3368_clk_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800562{
563 switch (clk->id) {
564 case SCLK_MAC:
565 return rk3368_gmac_set_parent(clk, parent);
566 }
567
568 debug("%s: unsupported clk %ld\n", __func__, clk->id);
569 return -ENOENT;
570}
571
Andy Yand1dcf852017-05-15 17:49:56 +0800572static struct clk_ops rk3368_clk_ops = {
573 .get_rate = rk3368_clk_get_rate,
574 .set_rate = rk3368_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100575#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
David Wu64a12202018-01-13 14:07:04 +0800576 .set_parent = rk3368_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100577#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800578};
579
580static int rk3368_clk_probe(struct udevice *dev)
581{
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200582 struct rk3368_clk_priv __maybe_unused *priv = dev_get_priv(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200583#if CONFIG_IS_ENABLED(OF_PLATDATA)
584 struct rk3368_clk_plat *plat = dev_get_platdata(dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800585
Simon Glassc20ee0e2017-08-29 14:15:50 -0600586 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200587#endif
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200588#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800589 rkclk_init(priv->cru);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200590#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800591
592 return 0;
593}
594
595static int rk3368_clk_ofdata_to_platdata(struct udevice *dev)
596{
Philipp Tomsichbee61802017-06-22 23:51:37 +0200597#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Andy Yand1dcf852017-05-15 17:49:56 +0800598 struct rk3368_clk_priv *priv = dev_get_priv(dev);
599
Philipp Tomsich9a342f42017-09-11 22:04:18 +0200600 priv->cru = dev_read_addr_ptr(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200601#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800602
603 return 0;
604}
605
606static int rk3368_clk_bind(struct udevice *dev)
607{
608 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +0800609 struct udevice *sys_child;
610 struct sysreset_reg *priv;
Andy Yand1dcf852017-05-15 17:49:56 +0800611
612 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +0800613 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
614 &sys_child);
615 if (ret) {
616 debug("Warning: No sysreset driver: ret=%d\n", ret);
617 } else {
618 priv = malloc(sizeof(struct sysreset_reg));
619 priv->glb_srst_fst_value = offsetof(struct rk3368_cru,
620 glb_srst_fst_val);
621 priv->glb_srst_snd_value = offsetof(struct rk3368_cru,
622 glb_srst_snd_val);
623 sys_child->priv = priv;
624 }
Andy Yand1dcf852017-05-15 17:49:56 +0800625
Heiko Stuebnera5ada252019-11-09 00:06:30 +0100626#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Elaine Zhang538f67c2017-12-19 18:22:38 +0800627 ret = offsetof(struct rk3368_cru, softrst_con[0]);
628 ret = rockchip_reset_bind(dev, ret, 15);
629 if (ret)
630 debug("Warning: software reset driver bind faile\n");
631#endif
632
Andy Yand1dcf852017-05-15 17:49:56 +0800633 return ret;
634}
635
636static const struct udevice_id rk3368_clk_ids[] = {
637 { .compatible = "rockchip,rk3368-cru" },
638 { }
639};
640
641U_BOOT_DRIVER(rockchip_rk3368_cru) = {
642 .name = "rockchip_rk3368_cru",
643 .id = UCLASS_CLK,
644 .of_match = rk3368_clk_ids,
Philipp Tomsichcdc60802017-07-11 20:59:45 +0200645 .priv_auto_alloc_size = sizeof(struct rk3368_clk_priv),
Philipp Tomsichbee61802017-06-22 23:51:37 +0200646#if CONFIG_IS_ENABLED(OF_PLATDATA)
647 .platdata_auto_alloc_size = sizeof(struct rk3368_clk_plat),
648#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800649 .ofdata_to_platdata = rk3368_clk_ofdata_to_platdata,
650 .ops = &rk3368_clk_ops,
651 .bind = rk3368_clk_bind,
652 .probe = rk3368_clk_probe,
653};