blob: 9492cc2a36ef8d4bd388d548f5c2b8c72f2d6371 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Andy Yand1dcf852017-05-15 17:49:56 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
4 * Author: Andy Yan <andy.yan@rock-chips.com>
Philipp Tomsichddfe77d2017-06-22 23:47:11 +02005 * (C) Copyright 2017 Theobroma Systems Design und Consulting GmbH
Andy Yand1dcf852017-05-15 17:49:56 +08006 */
7
8#include <common.h>
9#include <clk-uclass.h>
10#include <dm.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020011#include <dt-structs.h>
Andy Yand1dcf852017-05-15 17:49:56 +080012#include <errno.h>
Philipp Tomsichbee61802017-06-22 23:51:37 +020013#include <mapmem.h>
Andy Yand1dcf852017-05-15 17:49:56 +080014#include <syscon.h>
David Wu615514c2017-09-20 14:37:50 +080015#include <bitfield.h>
Andy Yand1dcf852017-05-15 17:49:56 +080016#include <asm/arch/clock.h>
17#include <asm/arch/cru_rk3368.h>
18#include <asm/arch/hardware.h>
19#include <asm/io.h>
20#include <dm/lists.h>
21#include <dt-bindings/clock/rk3368-cru.h>
22
Philipp Tomsichbee61802017-06-22 23:51:37 +020023#if CONFIG_IS_ENABLED(OF_PLATDATA)
24struct rk3368_clk_plat {
25 struct dtd_rockchip_rk3368_cru dtd;
26};
27#endif
28
Andy Yand1dcf852017-05-15 17:49:56 +080029struct pll_div {
30 u32 nr;
31 u32 nf;
32 u32 no;
33};
34
35#define OSC_HZ (24 * 1000 * 1000)
36#define APLL_L_HZ (800 * 1000 * 1000)
37#define APLL_B_HZ (816 * 1000 * 1000)
38#define GPLL_HZ (576 * 1000 * 1000)
39#define CPLL_HZ (400 * 1000 * 1000)
40
Andy Yand1dcf852017-05-15 17:49:56 +080041#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
42
43#define PLL_DIVISORS(hz, _nr, _no) { \
44 .nr = _nr, .nf = (u32)((u64)hz * _nr * _no / OSC_HZ), .no = _no}; \
45 _Static_assert(((u64)hz * _nr * _no / OSC_HZ) * OSC_HZ /\
46 (_nr * _no) == hz, #hz "Hz cannot be hit with PLL " \
47 "divisors on line " __stringify(__LINE__));
48
Philipp Tomsich4bebf942017-06-22 23:53:44 +020049#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080050static const struct pll_div apll_l_init_cfg = PLL_DIVISORS(APLL_L_HZ, 12, 2);
51static const struct pll_div apll_b_init_cfg = PLL_DIVISORS(APLL_B_HZ, 1, 2);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020052#if !defined(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080053static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 1, 2);
54static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 6);
Philipp Tomsich4bebf942017-06-22 23:53:44 +020055#endif
56#endif
Andy Yand1dcf852017-05-15 17:49:56 +080057
Philipp Tomsichf5a43292017-07-04 14:49:38 +020058static ulong rk3368_clk_get_rate(struct clk *clk);
59
Andy Yand1dcf852017-05-15 17:49:56 +080060/* Get pll rate by id */
61static uint32_t rkclk_pll_get_rate(struct rk3368_cru *cru,
62 enum rk3368_pll_id pll_id)
63{
64 uint32_t nr, no, nf;
65 uint32_t con;
66 struct rk3368_pll *pll = &cru->pll[pll_id];
67
68 con = readl(&pll->con3);
69
70 switch ((con & PLL_MODE_MASK) >> PLL_MODE_SHIFT) {
71 case PLL_MODE_SLOW:
72 return OSC_HZ;
73 case PLL_MODE_NORMAL:
74 con = readl(&pll->con0);
75 no = ((con & PLL_OD_MASK) >> PLL_OD_SHIFT) + 1;
76 nr = ((con & PLL_NR_MASK) >> PLL_NR_SHIFT) + 1;
77 con = readl(&pll->con1);
78 nf = ((con & PLL_NF_MASK) >> PLL_NF_SHIFT) + 1;
79
80 return (24 * nf / (nr * no)) * 1000000;
81 case PLL_MODE_DEEP_SLOW:
82 default:
83 return 32768;
84 }
85}
86
Philipp Tomsich4bebf942017-06-22 23:53:44 +020087#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +080088static int rkclk_set_pll(struct rk3368_cru *cru, enum rk3368_pll_id pll_id,
Philipp Tomsichddfe77d2017-06-22 23:47:11 +020089 const struct pll_div *div)
Andy Yand1dcf852017-05-15 17:49:56 +080090{
91 struct rk3368_pll *pll = &cru->pll[pll_id];
92 /* All PLLs have same VCO and output frequency range restrictions*/
93 uint vco_hz = OSC_HZ / 1000 * div->nf / div->nr * 1000;
94 uint output_hz = vco_hz / div->no;
95
96 debug("PLL at %p: nf=%d, nr=%d, no=%d, vco=%u Hz, output=%u Hz\n",
97 pll, div->nf, div->nr, div->no, vco_hz, output_hz);
98
99 /* enter slow mode and reset pll */
100 rk_clrsetreg(&pll->con3, PLL_MODE_MASK | PLL_RESET_MASK,
101 PLL_RESET << PLL_RESET_SHIFT);
102
103 rk_clrsetreg(&pll->con0, PLL_NR_MASK | PLL_OD_MASK,
104 ((div->nr - 1) << PLL_NR_SHIFT) |
105 ((div->no - 1) << PLL_OD_SHIFT));
106 writel((div->nf - 1) << PLL_NF_SHIFT, &pll->con1);
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200107 /*
108 * BWADJ should be set to NF / 2 to ensure the nominal bandwidth.
109 * Compare the RK3368 TRM, section "3.6.4 PLL Bandwidth Adjustment".
110 */
111 clrsetbits_le32(&pll->con2, PLL_BWADJ_MASK, (div->nf >> 1) - 1);
112
Andy Yand1dcf852017-05-15 17:49:56 +0800113 udelay(10);
114
115 /* return from reset */
116 rk_clrreg(&pll->con3, PLL_RESET_MASK);
117
118 /* waiting for pll lock */
119 while (!(readl(&pll->con1) & PLL_LOCK_STA))
120 udelay(1);
121
122 rk_clrsetreg(&pll->con3, PLL_MODE_MASK,
123 PLL_MODE_NORMAL << PLL_MODE_SHIFT);
124
125 return 0;
126}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200127#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800128
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200129#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800130static void rkclk_init(struct rk3368_cru *cru)
131{
132 u32 apllb, aplll, dpll, cpll, gpll;
133
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200134 rkclk_set_pll(cru, APLLB, &apll_b_init_cfg);
135 rkclk_set_pll(cru, APLLL, &apll_l_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200136#if !defined(CONFIG_TPL_BUILD)
137 /*
138 * If we plan to return to the boot ROM, we can't increase the
139 * GPLL rate from the SPL stage.
140 */
Philipp Tomsichddfe77d2017-06-22 23:47:11 +0200141 rkclk_set_pll(cru, GPLL, &gpll_init_cfg);
142 rkclk_set_pll(cru, CPLL, &cpll_init_cfg);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200143#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800144
145 apllb = rkclk_pll_get_rate(cru, APLLB);
146 aplll = rkclk_pll_get_rate(cru, APLLL);
147 dpll = rkclk_pll_get_rate(cru, DPLL);
148 cpll = rkclk_pll_get_rate(cru, CPLL);
149 gpll = rkclk_pll_get_rate(cru, GPLL);
150
151 debug("%s apllb(%d) apll(%d) dpll(%d) cpll(%d) gpll(%d)\n",
152 __func__, apllb, aplll, dpll, cpll, gpll);
153}
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200154#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800155
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200156#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
Andy Yand1dcf852017-05-15 17:49:56 +0800157static ulong rk3368_mmc_get_clk(struct rk3368_cru *cru, uint clk_id)
158{
159 u32 div, con, con_id, rate;
160 u32 pll_rate;
161
162 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200163 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800164 con_id = 50;
165 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200166 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800167 con_id = 51;
168 break;
169 case SCLK_SDIO0:
170 con_id = 48;
171 break;
172 default:
173 return -EINVAL;
174 }
175
176 con = readl(&cru->clksel_con[con_id]);
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200177 switch (con & MMC_PLL_SEL_MASK) {
Andy Yand1dcf852017-05-15 17:49:56 +0800178 case MMC_PLL_SEL_GPLL:
179 pll_rate = rkclk_pll_get_rate(cru, GPLL);
180 break;
181 case MMC_PLL_SEL_24M:
182 pll_rate = OSC_HZ;
183 break;
184 case MMC_PLL_SEL_CPLL:
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200185 pll_rate = rkclk_pll_get_rate(cru, CPLL);
186 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800187 case MMC_PLL_SEL_USBPHY_480M:
188 default:
189 return -EINVAL;
190 }
191 div = (con & MMC_CLK_DIV_MASK) >> MMC_CLK_DIV_SHIFT;
192 rate = DIV_TO_RATE(pll_rate, div);
193
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200194 debug("%s: raw rate %d (post-divide by 2)\n", __func__, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800195 return rate >> 1;
196}
197
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200198static ulong rk3368_mmc_find_best_rate_and_parent(struct clk *clk,
199 ulong rate,
200 u32 *best_mux,
201 u32 *best_div)
Andy Yand1dcf852017-05-15 17:49:56 +0800202{
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200203 int i;
204 ulong best_rate = 0;
205 const ulong MHz = 1000000;
206 const struct {
207 u32 mux;
208 ulong rate;
209 } parents[] = {
210 { .mux = MMC_PLL_SEL_CPLL, .rate = CPLL_HZ },
211 { .mux = MMC_PLL_SEL_GPLL, .rate = GPLL_HZ },
212 { .mux = MMC_PLL_SEL_24M, .rate = 24 * MHz }
213 };
Andy Yand1dcf852017-05-15 17:49:56 +0800214
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200215 debug("%s: target rate %ld\n", __func__, rate);
216 for (i = 0; i < ARRAY_SIZE(parents); ++i) {
217 /*
218 * Find the largest rate no larger than the target-rate for
219 * the current parent.
220 */
221 ulong parent_rate = parents[i].rate;
222 u32 div = DIV_ROUND_UP(parent_rate, rate);
223 u32 adj_div = div;
224 ulong new_rate = parent_rate / adj_div;
225
226 debug("%s: rate %ld, parent-mux %d, parent-rate %ld, div %d\n",
227 __func__, rate, parents[i].mux, parents[i].rate, div);
228
229 /* Skip, if not representable */
230 if ((div - 1) > MMC_CLK_DIV_MASK)
231 continue;
232
233 /* Skip, if we already have a better (or equal) solution */
234 if (new_rate <= best_rate)
235 continue;
236
237 /* This is our new best rate. */
238 best_rate = new_rate;
239 *best_mux = parents[i].mux;
240 *best_div = div - 1;
241 }
242
243 debug("%s: best_mux = %x, best_div = %d, best_rate = %ld\n",
244 __func__, *best_mux, *best_div, best_rate);
245
246 return best_rate;
247}
248
249static ulong rk3368_mmc_set_clk(struct clk *clk, ulong rate)
250{
251 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
252 struct rk3368_cru *cru = priv->cru;
253 ulong clk_id = clk->id;
254 u32 con_id, mux = 0, div = 0;
255
256 /* Find the best parent and rate */
257 rk3368_mmc_find_best_rate_and_parent(clk, rate << 1, &mux, &div);
Andy Yand1dcf852017-05-15 17:49:56 +0800258
259 switch (clk_id) {
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200260 case HCLK_SDMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800261 con_id = 50;
262 break;
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200263 case HCLK_EMMC:
Andy Yand1dcf852017-05-15 17:49:56 +0800264 con_id = 51;
265 break;
266 case SCLK_SDIO0:
267 con_id = 48;
268 break;
269 default:
270 return -EINVAL;
271 }
272
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200273 rk_clrsetreg(&cru->clksel_con[con_id],
274 MMC_PLL_SEL_MASK | MMC_CLK_DIV_MASK,
275 mux | div);
Andy Yand1dcf852017-05-15 17:49:56 +0800276
277 return rk3368_mmc_get_clk(cru, clk_id);
278}
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200279#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800280
Philipp Tomsich62924692017-07-05 11:55:23 +0200281#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200282static ulong rk3368_ddr_set_clk(struct rk3368_cru *cru, ulong set_rate)
283{
284 const struct pll_div *dpll_cfg = NULL;
285 const ulong MHz = 1000000;
286
287 /* Fout = ((Fin /NR) * NF )/ NO */
Philipp Tomsich62924692017-07-05 11:55:23 +0200288 static const struct pll_div dpll_1200 = PLL_DIVISORS(1200 * MHz, 1, 1);
289 static const struct pll_div dpll_1332 = PLL_DIVISORS(1332 * MHz, 2, 1);
290 static const struct pll_div dpll_1600 = PLL_DIVISORS(1600 * MHz, 3, 2);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200291
292 switch (set_rate) {
293 case 1200*MHz:
294 dpll_cfg = &dpll_1200;
295 break;
296 case 1332*MHz:
297 dpll_cfg = &dpll_1332;
298 break;
299 case 1600*MHz:
300 dpll_cfg = &dpll_1600;
301 break;
302 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900303 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200304 }
305 rkclk_set_pll(cru, DPLL, dpll_cfg);
306
307 return set_rate;
308}
Philipp Tomsich62924692017-07-05 11:55:23 +0200309#endif
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200310
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200311#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
David Wu64a12202018-01-13 14:07:04 +0800312static ulong rk3368_gmac_set_clk(struct rk3368_cru *cru, ulong set_rate)
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200313{
David Wu64a12202018-01-13 14:07:04 +0800314 ulong ret;
315
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200316 /*
David Wu64a12202018-01-13 14:07:04 +0800317 * The gmac clock can be derived either from an external clock
318 * or can be generated from internally by a divider from SCLK_MAC.
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200319 */
David Wu64a12202018-01-13 14:07:04 +0800320 if (readl(&cru->clksel_con[43]) & GMAC_MUX_SEL_EXTCLK) {
321 /* An external clock will always generate the right rate... */
322 ret = set_rate;
323 } else {
324 u32 con = readl(&cru->clksel_con[43]);
325 ulong pll_rate;
326 u8 div;
327
328 if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
329 GMAC_PLL_SELECT_GENERAL)
330 pll_rate = GPLL_HZ;
331 else if (((con >> GMAC_PLL_SHIFT) & GMAC_PLL_MASK) ==
332 GMAC_PLL_SELECT_CODEC)
333 pll_rate = CPLL_HZ;
334 else
335 /* CPLL is not set */
336 return -EPERM;
337
338 div = DIV_ROUND_UP(pll_rate, set_rate) - 1;
339 if (div <= 0x1f)
340 rk_clrsetreg(&cru->clksel_con[43], GMAC_DIV_CON_MASK,
341 div << GMAC_DIV_CON_SHIFT);
342 else
343 debug("Unsupported div for gmac:%d\n", div);
344
345 return DIV_TO_RATE(pll_rate, div);
346 }
347
348 return ret;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200349}
350#endif
351
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200352/*
353 * RK3368 SPI clocks have a common divider-width (7 bits) and a single bit
354 * to select either CPLL or GPLL as the clock-parent. The location within
355 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
356 */
357
358struct spi_clkreg {
359 uint8_t reg; /* CLKSEL_CON[reg] register in CRU */
360 uint8_t div_shift;
361 uint8_t sel_shift;
362};
363
364/*
365 * The entries are numbered relative to their offset from SCLK_SPI0.
366 */
367static const struct spi_clkreg spi_clkregs[] = {
368 [0] = { .reg = 45, .div_shift = 0, .sel_shift = 7, },
369 [1] = { .reg = 45, .div_shift = 8, .sel_shift = 15, },
370 [2] = { .reg = 46, .div_shift = 8, .sel_shift = 15, },
371};
372
373static inline u32 extract_bits(u32 val, unsigned width, unsigned shift)
374{
375 return (val >> shift) & ((1 << width) - 1);
376}
377
378static ulong rk3368_spi_get_clk(struct rk3368_cru *cru, ulong clk_id)
379{
380 const struct spi_clkreg *spiclk = NULL;
381 u32 div, val;
382
383 switch (clk_id) {
384 case SCLK_SPI0 ... SCLK_SPI2:
385 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
386 break;
387
388 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900389 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200390 return -EINVAL;
391 }
392
393 val = readl(&cru->clksel_con[spiclk->reg]);
394 div = extract_bits(val, 7, spiclk->div_shift);
395
396 debug("%s: div 0x%x\n", __func__, div);
397 return DIV_TO_RATE(GPLL_HZ, div);
398}
399
400static ulong rk3368_spi_set_clk(struct rk3368_cru *cru, ulong clk_id, uint hz)
401{
402 const struct spi_clkreg *spiclk = NULL;
403 int src_clk_div;
404
405 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz);
406 assert(src_clk_div < 127);
407
408 switch (clk_id) {
409 case SCLK_SPI0 ... SCLK_SPI2:
410 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
411 break;
412
413 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900414 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200415 return -EINVAL;
416 }
417
418 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
419 ((0x7f << spiclk->div_shift) |
420 (0x1 << spiclk->sel_shift)),
421 ((src_clk_div << spiclk->div_shift) |
422 (1 << spiclk->sel_shift)));
423
424 return rk3368_spi_get_clk(cru, clk_id);
425}
426
David Wu615514c2017-09-20 14:37:50 +0800427static ulong rk3368_saradc_get_clk(struct rk3368_cru *cru)
428{
429 u32 div, val;
430
431 val = readl(&cru->clksel_con[25]);
432 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
433 CLK_SARADC_DIV_CON_WIDTH);
434
435 return DIV_TO_RATE(OSC_HZ, div);
436}
437
438static ulong rk3368_saradc_set_clk(struct rk3368_cru *cru, uint hz)
439{
440 int src_clk_div;
441
442 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
443 assert(src_clk_div < 128);
444
445 rk_clrsetreg(&cru->clksel_con[25],
446 CLK_SARADC_DIV_CON_MASK,
447 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
448
449 return rk3368_saradc_get_clk(cru);
450}
451
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200452static ulong rk3368_clk_get_rate(struct clk *clk)
453{
454 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
455 ulong rate = 0;
456
457 debug("%s: id %ld\n", __func__, clk->id);
458 switch (clk->id) {
459 case PLL_CPLL:
460 rate = rkclk_pll_get_rate(priv->cru, CPLL);
461 break;
462 case PLL_GPLL:
463 rate = rkclk_pll_get_rate(priv->cru, GPLL);
464 break;
465 case SCLK_SPI0 ... SCLK_SPI2:
466 rate = rk3368_spi_get_clk(priv->cru, clk->id);
467 break;
468#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
469 case HCLK_SDMMC:
470 case HCLK_EMMC:
471 rate = rk3368_mmc_get_clk(priv->cru, clk->id);
472 break;
473#endif
David Wu615514c2017-09-20 14:37:50 +0800474 case SCLK_SARADC:
475 rate = rk3368_saradc_get_clk(priv->cru);
476 break;
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200477 default:
478 return -ENOENT;
479 }
480
481 return rate;
482}
483
Andy Yand1dcf852017-05-15 17:49:56 +0800484static ulong rk3368_clk_set_rate(struct clk *clk, ulong rate)
485{
Philipp Tomsich4e4c40d2017-07-05 12:11:58 +0200486 __maybe_unused struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800487 ulong ret = 0;
488
489 debug("%s id:%ld rate:%ld\n", __func__, clk->id, rate);
490 switch (clk->id) {
Philipp Tomsichcf8aceb2017-07-25 16:48:16 +0200491 case SCLK_SPI0 ... SCLK_SPI2:
492 ret = rk3368_spi_set_clk(priv->cru, clk->id, rate);
493 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200494#if IS_ENABLED(CONFIG_TPL_BUILD)
Philipp Tomsicha00dfa02017-06-23 00:01:10 +0200495 case CLK_DDR:
496 ret = rk3368_ddr_set_clk(priv->cru, rate);
497 break;
Philipp Tomsich62924692017-07-05 11:55:23 +0200498#endif
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200499#if !IS_ENABLED(CONFIG_SPL_BUILD) || CONFIG_IS_ENABLED(MMC_SUPPORT)
500 case HCLK_SDMMC:
501 case HCLK_EMMC:
502 ret = rk3368_mmc_set_clk(clk, rate);
503 break;
504#endif
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200505#if CONFIG_IS_ENABLED(GMAC_ROCKCHIP)
Philipp Tomsichf5a43292017-07-04 14:49:38 +0200506 case SCLK_MAC:
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200507 /* select the external clock */
David Wu64a12202018-01-13 14:07:04 +0800508 ret = rk3368_gmac_set_clk(priv->cru, rate);
Andy Yand1dcf852017-05-15 17:49:56 +0800509 break;
Philipp Tomsichdf0ae002017-07-14 19:57:39 +0200510#endif
David Wu615514c2017-09-20 14:37:50 +0800511 case SCLK_SARADC:
512 ret = rk3368_saradc_set_clk(priv->cru, rate);
513 break;
Andy Yand1dcf852017-05-15 17:49:56 +0800514 default:
515 return -ENOENT;
516 }
517
518 return ret;
519}
520
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100521static int __maybe_unused rk3368_gmac_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800522{
523 struct rk3368_clk_priv *priv = dev_get_priv(clk->dev);
524 struct rk3368_cru *cru = priv->cru;
525 const char *clock_output_name;
526 int ret;
527
528 /*
529 * If the requested parent is in the same clock-controller and
530 * the id is SCLK_MAC ("sclk_mac"), switch to the internal
531 * clock.
532 */
533 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC)) {
534 debug("%s: switching GAMC to SCLK_MAC\n", __func__);
535 rk_clrreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
536 return 0;
537 }
538
539 /*
540 * Otherwise, we need to check the clock-output-names of the
541 * requested parent to see if the requested id is "ext_gmac".
542 */
543 ret = dev_read_string_index(parent->dev, "clock-output-names",
544 parent->id, &clock_output_name);
545 if (ret < 0)
546 return -ENODATA;
547
548 /* If this is "ext_gmac", switch to the external clock input */
549 if (!strcmp(clock_output_name, "ext_gmac")) {
550 debug("%s: switching GMAC to external clock\n", __func__);
551 rk_setreg(&cru->clksel_con[43], GMAC_MUX_SEL_EXTCLK);
552 return 0;
553 }
554
555 return -EINVAL;
556}
557
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100558static int __maybe_unused rk3368_clk_set_parent(struct clk *clk, struct clk *parent)
David Wu64a12202018-01-13 14:07:04 +0800559{
560 switch (clk->id) {
561 case SCLK_MAC:
562 return rk3368_gmac_set_parent(clk, parent);
563 }
564
565 debug("%s: unsupported clk %ld\n", __func__, clk->id);
566 return -ENOENT;
567}
568
Philipp Tomsich35a69a32018-02-16 16:07:25 +0100569static int rk3368_clk_enable(struct clk *clk)
570{
571 switch (clk->id) {
572 case SCLK_MAC:
573 case SCLK_MAC_RX:
574 case SCLK_MAC_TX:
575 case SCLK_MACREF:
576 case SCLK_MACREF_OUT:
577 case ACLK_GMAC:
578 case PCLK_GMAC:
579 /* Required to successfully probe the Designware GMAC driver */
580 return 0;
581 }
582
583 debug("%s: unsupported clk %ld\n", __func__, clk->id);
584 return -ENOENT;
585}
586
Andy Yand1dcf852017-05-15 17:49:56 +0800587static struct clk_ops rk3368_clk_ops = {
588 .get_rate = rk3368_clk_get_rate,
589 .set_rate = rk3368_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100590#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
David Wu64a12202018-01-13 14:07:04 +0800591 .set_parent = rk3368_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +0100592#endif
Philipp Tomsich35a69a32018-02-16 16:07:25 +0100593 .enable = rk3368_clk_enable,
Andy Yand1dcf852017-05-15 17:49:56 +0800594};
595
596static int rk3368_clk_probe(struct udevice *dev)
597{
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200598 struct rk3368_clk_priv __maybe_unused *priv = dev_get_priv(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200599#if CONFIG_IS_ENABLED(OF_PLATDATA)
600 struct rk3368_clk_plat *plat = dev_get_platdata(dev);
Andy Yand1dcf852017-05-15 17:49:56 +0800601
Simon Glassc20ee0e2017-08-29 14:15:50 -0600602 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200603#endif
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200604#if IS_ENABLED(CONFIG_SPL_BUILD) || IS_ENABLED(CONFIG_TPL_BUILD)
Andy Yand1dcf852017-05-15 17:49:56 +0800605 rkclk_init(priv->cru);
Philipp Tomsich4bebf942017-06-22 23:53:44 +0200606#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800607
608 return 0;
609}
610
611static int rk3368_clk_ofdata_to_platdata(struct udevice *dev)
612{
Philipp Tomsichbee61802017-06-22 23:51:37 +0200613#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Andy Yand1dcf852017-05-15 17:49:56 +0800614 struct rk3368_clk_priv *priv = dev_get_priv(dev);
615
Philipp Tomsich9a342f42017-09-11 22:04:18 +0200616 priv->cru = dev_read_addr_ptr(dev);
Philipp Tomsichbee61802017-06-22 23:51:37 +0200617#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800618
619 return 0;
620}
621
622static int rk3368_clk_bind(struct udevice *dev)
623{
624 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +0800625 struct udevice *sys_child;
626 struct sysreset_reg *priv;
Andy Yand1dcf852017-05-15 17:49:56 +0800627
628 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +0800629 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
630 &sys_child);
631 if (ret) {
632 debug("Warning: No sysreset driver: ret=%d\n", ret);
633 } else {
634 priv = malloc(sizeof(struct sysreset_reg));
635 priv->glb_srst_fst_value = offsetof(struct rk3368_cru,
636 glb_srst_fst_val);
637 priv->glb_srst_snd_value = offsetof(struct rk3368_cru,
638 glb_srst_snd_val);
639 sys_child->priv = priv;
640 }
Andy Yand1dcf852017-05-15 17:49:56 +0800641
Elaine Zhang538f67c2017-12-19 18:22:38 +0800642#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
643 ret = offsetof(struct rk3368_cru, softrst_con[0]);
644 ret = rockchip_reset_bind(dev, ret, 15);
645 if (ret)
646 debug("Warning: software reset driver bind faile\n");
647#endif
648
Andy Yand1dcf852017-05-15 17:49:56 +0800649 return ret;
650}
651
652static const struct udevice_id rk3368_clk_ids[] = {
653 { .compatible = "rockchip,rk3368-cru" },
654 { }
655};
656
657U_BOOT_DRIVER(rockchip_rk3368_cru) = {
658 .name = "rockchip_rk3368_cru",
659 .id = UCLASS_CLK,
660 .of_match = rk3368_clk_ids,
Philipp Tomsichcdc60802017-07-11 20:59:45 +0200661 .priv_auto_alloc_size = sizeof(struct rk3368_clk_priv),
Philipp Tomsichbee61802017-06-22 23:51:37 +0200662#if CONFIG_IS_ENABLED(OF_PLATDATA)
663 .platdata_auto_alloc_size = sizeof(struct rk3368_clk_plat),
664#endif
Andy Yand1dcf852017-05-15 17:49:56 +0800665 .ofdata_to_platdata = rk3368_clk_ofdata_to_platdata,
666 .ops = &rk3368_clk_ops,
667 .bind = rk3368_clk_bind,
668 .probe = rk3368_clk_probe,
669};