blob: 2c58814b70db43c655e4315a59a499a99e993895 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yangb0b3c862016-07-29 10:35:25 +08002/*
3 * (C) Copyright 2015 Google, Inc
Philipp Tomsich8fa69792017-04-20 22:05:49 +02004 * (C) 2017 Theobroma Systems Design und Consulting GmbH
Kever Yangb0b3c862016-07-29 10:35:25 +08005 */
6
7#include <common.h>
8#include <clk-uclass.h>
9#include <dm.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080010#include <dt-structs.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080011#include <errno.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060012#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070013#include <malloc.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080014#include <mapmem.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080015#include <syscon.h>
David Wu364fc732017-09-20 14:38:58 +080016#include <bitfield.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080017#include <asm/io.h>
Kever Yang15f09a12019-03-28 11:01:23 +080018#include <asm/arch-rockchip/clock.h>
Jagan Tekib52a1992020-01-09 14:22:17 +053019#include <asm/arch-rockchip/cru.h>
Kever Yang15f09a12019-03-28 11:01:23 +080020#include <asm/arch-rockchip/hardware.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080021#include <dm/lists.h>
22#include <dt-bindings/clock/rk3399-cru.h>
Simon Glassc05ed002020-05-10 11:40:11 -060023#include <linux/delay.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080024
Kever Yang5ae2fd92017-02-13 17:38:56 +080025#if CONFIG_IS_ENABLED(OF_PLATDATA)
26struct rk3399_clk_plat {
27 struct dtd_rockchip_rk3399_cru dtd;
Kever Yang5e79f442016-08-12 17:47:15 +080028};
29
Kever Yang5ae2fd92017-02-13 17:38:56 +080030struct rk3399_pmuclk_plat {
31 struct dtd_rockchip_rk3399_pmucru dtd;
32};
33#endif
34
Kever Yangb0b3c862016-07-29 10:35:25 +080035struct pll_div {
36 u32 refdiv;
37 u32 fbdiv;
38 u32 postdiv1;
39 u32 postdiv2;
40 u32 frac;
41};
42
43#define RATE_TO_DIV(input_rate, output_rate) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +053044 ((input_rate) / (output_rate) - 1)
45#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
Kever Yangb0b3c862016-07-29 10:35:25 +080046
47#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
48 .refdiv = _refdiv,\
49 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
50 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
51
Philipp Tomsich61dff332017-03-24 19:24:24 +010052#if defined(CONFIG_SPL_BUILD)
Kever Yangb0b3c862016-07-29 10:35:25 +080053static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
54static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2, 2);
Philipp Tomsich61dff332017-03-24 19:24:24 +010055#else
Kever Yangb0b3c862016-07-29 10:35:25 +080056static const struct pll_div ppll_init_cfg = PLL_DIVISORS(PPLL_HZ, 2, 2, 1);
Philipp Tomsich61dff332017-03-24 19:24:24 +010057#endif
Kever Yangb0b3c862016-07-29 10:35:25 +080058
Jagan Tekidd7dfa22019-07-15 23:51:10 +053059static const struct pll_div apll_l_1600_cfg = PLL_DIVISORS(1600 * MHz, 3, 1, 1);
60static const struct pll_div apll_l_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Kever Yangb0b3c862016-07-29 10:35:25 +080061
62static const struct pll_div *apll_l_cfgs[] = {
63 [APLL_L_1600_MHZ] = &apll_l_1600_cfg,
64 [APLL_L_600_MHZ] = &apll_l_600_cfg,
65};
66
Jagan Tekidd7dfa22019-07-15 23:51:10 +053067static const struct pll_div apll_b_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Christoph Muellneraf765a42018-11-30 20:32:48 +010068static const struct pll_div *apll_b_cfgs[] = {
69 [APLL_B_600_MHZ] = &apll_b_600_cfg,
70};
71
Kever Yangb0b3c862016-07-29 10:35:25 +080072enum {
73 /* PLL_CON0 */
74 PLL_FBDIV_MASK = 0xfff,
75 PLL_FBDIV_SHIFT = 0,
76
77 /* PLL_CON1 */
78 PLL_POSTDIV2_SHIFT = 12,
79 PLL_POSTDIV2_MASK = 0x7 << PLL_POSTDIV2_SHIFT,
80 PLL_POSTDIV1_SHIFT = 8,
81 PLL_POSTDIV1_MASK = 0x7 << PLL_POSTDIV1_SHIFT,
82 PLL_REFDIV_MASK = 0x3f,
83 PLL_REFDIV_SHIFT = 0,
84
85 /* PLL_CON2 */
86 PLL_LOCK_STATUS_SHIFT = 31,
87 PLL_LOCK_STATUS_MASK = 1 << PLL_LOCK_STATUS_SHIFT,
88 PLL_FRACDIV_MASK = 0xffffff,
89 PLL_FRACDIV_SHIFT = 0,
90
91 /* PLL_CON3 */
92 PLL_MODE_SHIFT = 8,
93 PLL_MODE_MASK = 3 << PLL_MODE_SHIFT,
94 PLL_MODE_SLOW = 0,
95 PLL_MODE_NORM,
96 PLL_MODE_DEEP,
97 PLL_DSMPD_SHIFT = 3,
98 PLL_DSMPD_MASK = 1 << PLL_DSMPD_SHIFT,
99 PLL_INTEGER_MODE = 1,
100
101 /* PMUCRU_CLKSEL_CON0 */
102 PMU_PCLK_DIV_CON_MASK = 0x1f,
103 PMU_PCLK_DIV_CON_SHIFT = 0,
104
105 /* PMUCRU_CLKSEL_CON1 */
106 SPI3_PLL_SEL_SHIFT = 7,
107 SPI3_PLL_SEL_MASK = 1 << SPI3_PLL_SEL_SHIFT,
108 SPI3_PLL_SEL_24M = 0,
109 SPI3_PLL_SEL_PPLL = 1,
110 SPI3_DIV_CON_SHIFT = 0x0,
111 SPI3_DIV_CON_MASK = 0x7f,
112
113 /* PMUCRU_CLKSEL_CON2 */
114 I2C_DIV_CON_MASK = 0x7f,
Kever Yang5e79f442016-08-12 17:47:15 +0800115 CLK_I2C8_DIV_CON_SHIFT = 8,
116 CLK_I2C0_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800117
118 /* PMUCRU_CLKSEL_CON3 */
Kever Yang5e79f442016-08-12 17:47:15 +0800119 CLK_I2C4_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800120
121 /* CLKSEL_CON0 */
122 ACLKM_CORE_L_DIV_CON_SHIFT = 8,
123 ACLKM_CORE_L_DIV_CON_MASK = 0x1f << ACLKM_CORE_L_DIV_CON_SHIFT,
124 CLK_CORE_L_PLL_SEL_SHIFT = 6,
125 CLK_CORE_L_PLL_SEL_MASK = 3 << CLK_CORE_L_PLL_SEL_SHIFT,
126 CLK_CORE_L_PLL_SEL_ALPLL = 0x0,
127 CLK_CORE_L_PLL_SEL_ABPLL = 0x1,
128 CLK_CORE_L_PLL_SEL_DPLL = 0x10,
129 CLK_CORE_L_PLL_SEL_GPLL = 0x11,
130 CLK_CORE_L_DIV_MASK = 0x1f,
131 CLK_CORE_L_DIV_SHIFT = 0,
132
133 /* CLKSEL_CON1 */
134 PCLK_DBG_L_DIV_SHIFT = 0x8,
135 PCLK_DBG_L_DIV_MASK = 0x1f << PCLK_DBG_L_DIV_SHIFT,
136 ATCLK_CORE_L_DIV_SHIFT = 0,
137 ATCLK_CORE_L_DIV_MASK = 0x1f << ATCLK_CORE_L_DIV_SHIFT,
138
Christoph Muellneraf765a42018-11-30 20:32:48 +0100139 /* CLKSEL_CON2 */
140 ACLKM_CORE_B_DIV_CON_SHIFT = 8,
141 ACLKM_CORE_B_DIV_CON_MASK = 0x1f << ACLKM_CORE_B_DIV_CON_SHIFT,
142 CLK_CORE_B_PLL_SEL_SHIFT = 6,
143 CLK_CORE_B_PLL_SEL_MASK = 3 << CLK_CORE_B_PLL_SEL_SHIFT,
144 CLK_CORE_B_PLL_SEL_ALPLL = 0x0,
145 CLK_CORE_B_PLL_SEL_ABPLL = 0x1,
146 CLK_CORE_B_PLL_SEL_DPLL = 0x10,
147 CLK_CORE_B_PLL_SEL_GPLL = 0x11,
148 CLK_CORE_B_DIV_MASK = 0x1f,
149 CLK_CORE_B_DIV_SHIFT = 0,
150
151 /* CLKSEL_CON3 */
152 PCLK_DBG_B_DIV_SHIFT = 0x8,
153 PCLK_DBG_B_DIV_MASK = 0x1f << PCLK_DBG_B_DIV_SHIFT,
154 ATCLK_CORE_B_DIV_SHIFT = 0,
155 ATCLK_CORE_B_DIV_MASK = 0x1f << ATCLK_CORE_B_DIV_SHIFT,
156
Kever Yangb0b3c862016-07-29 10:35:25 +0800157 /* CLKSEL_CON14 */
158 PCLK_PERIHP_DIV_CON_SHIFT = 12,
159 PCLK_PERIHP_DIV_CON_MASK = 0x7 << PCLK_PERIHP_DIV_CON_SHIFT,
160 HCLK_PERIHP_DIV_CON_SHIFT = 8,
161 HCLK_PERIHP_DIV_CON_MASK = 3 << HCLK_PERIHP_DIV_CON_SHIFT,
162 ACLK_PERIHP_PLL_SEL_SHIFT = 7,
163 ACLK_PERIHP_PLL_SEL_MASK = 1 << ACLK_PERIHP_PLL_SEL_SHIFT,
164 ACLK_PERIHP_PLL_SEL_CPLL = 0,
165 ACLK_PERIHP_PLL_SEL_GPLL = 1,
166 ACLK_PERIHP_DIV_CON_SHIFT = 0,
167 ACLK_PERIHP_DIV_CON_MASK = 0x1f,
168
169 /* CLKSEL_CON21 */
170 ACLK_EMMC_PLL_SEL_SHIFT = 7,
171 ACLK_EMMC_PLL_SEL_MASK = 0x1 << ACLK_EMMC_PLL_SEL_SHIFT,
172 ACLK_EMMC_PLL_SEL_GPLL = 0x1,
173 ACLK_EMMC_DIV_CON_SHIFT = 0,
174 ACLK_EMMC_DIV_CON_MASK = 0x1f,
175
176 /* CLKSEL_CON22 */
177 CLK_EMMC_PLL_SHIFT = 8,
178 CLK_EMMC_PLL_MASK = 0x7 << CLK_EMMC_PLL_SHIFT,
179 CLK_EMMC_PLL_SEL_GPLL = 0x1,
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800180 CLK_EMMC_PLL_SEL_24M = 0x5,
Kever Yangb0b3c862016-07-29 10:35:25 +0800181 CLK_EMMC_DIV_CON_SHIFT = 0,
182 CLK_EMMC_DIV_CON_MASK = 0x7f << CLK_EMMC_DIV_CON_SHIFT,
183
184 /* CLKSEL_CON23 */
185 PCLK_PERILP0_DIV_CON_SHIFT = 12,
186 PCLK_PERILP0_DIV_CON_MASK = 0x7 << PCLK_PERILP0_DIV_CON_SHIFT,
187 HCLK_PERILP0_DIV_CON_SHIFT = 8,
188 HCLK_PERILP0_DIV_CON_MASK = 3 << HCLK_PERILP0_DIV_CON_SHIFT,
189 ACLK_PERILP0_PLL_SEL_SHIFT = 7,
190 ACLK_PERILP0_PLL_SEL_MASK = 1 << ACLK_PERILP0_PLL_SEL_SHIFT,
191 ACLK_PERILP0_PLL_SEL_CPLL = 0,
192 ACLK_PERILP0_PLL_SEL_GPLL = 1,
193 ACLK_PERILP0_DIV_CON_SHIFT = 0,
194 ACLK_PERILP0_DIV_CON_MASK = 0x1f,
195
196 /* CLKSEL_CON25 */
197 PCLK_PERILP1_DIV_CON_SHIFT = 8,
198 PCLK_PERILP1_DIV_CON_MASK = 0x7 << PCLK_PERILP1_DIV_CON_SHIFT,
199 HCLK_PERILP1_PLL_SEL_SHIFT = 7,
200 HCLK_PERILP1_PLL_SEL_MASK = 1 << HCLK_PERILP1_PLL_SEL_SHIFT,
201 HCLK_PERILP1_PLL_SEL_CPLL = 0,
202 HCLK_PERILP1_PLL_SEL_GPLL = 1,
203 HCLK_PERILP1_DIV_CON_SHIFT = 0,
204 HCLK_PERILP1_DIV_CON_MASK = 0x1f,
205
206 /* CLKSEL_CON26 */
207 CLK_SARADC_DIV_CON_SHIFT = 8,
David Wu364fc732017-09-20 14:38:58 +0800208 CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
209 CLK_SARADC_DIV_CON_WIDTH = 8,
Kever Yangb0b3c862016-07-29 10:35:25 +0800210
211 /* CLKSEL_CON27 */
212 CLK_TSADC_SEL_X24M = 0x0,
213 CLK_TSADC_SEL_SHIFT = 15,
214 CLK_TSADC_SEL_MASK = 1 << CLK_TSADC_SEL_SHIFT,
215 CLK_TSADC_DIV_CON_SHIFT = 0,
216 CLK_TSADC_DIV_CON_MASK = 0x3ff,
217
218 /* CLKSEL_CON47 & CLKSEL_CON48 */
219 ACLK_VOP_PLL_SEL_SHIFT = 6,
220 ACLK_VOP_PLL_SEL_MASK = 0x3 << ACLK_VOP_PLL_SEL_SHIFT,
221 ACLK_VOP_PLL_SEL_CPLL = 0x1,
222 ACLK_VOP_DIV_CON_SHIFT = 0,
223 ACLK_VOP_DIV_CON_MASK = 0x1f << ACLK_VOP_DIV_CON_SHIFT,
224
225 /* CLKSEL_CON49 & CLKSEL_CON50 */
226 DCLK_VOP_DCLK_SEL_SHIFT = 11,
227 DCLK_VOP_DCLK_SEL_MASK = 1 << DCLK_VOP_DCLK_SEL_SHIFT,
228 DCLK_VOP_DCLK_SEL_DIVOUT = 0,
229 DCLK_VOP_PLL_SEL_SHIFT = 8,
230 DCLK_VOP_PLL_SEL_MASK = 3 << DCLK_VOP_PLL_SEL_SHIFT,
231 DCLK_VOP_PLL_SEL_VPLL = 0,
232 DCLK_VOP_DIV_CON_MASK = 0xff,
233 DCLK_VOP_DIV_CON_SHIFT = 0,
234
235 /* CLKSEL_CON58 */
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200236 CLK_SPI_PLL_SEL_WIDTH = 1,
237 CLK_SPI_PLL_SEL_MASK = ((1 < CLK_SPI_PLL_SEL_WIDTH) - 1),
238 CLK_SPI_PLL_SEL_CPLL = 0,
239 CLK_SPI_PLL_SEL_GPLL = 1,
240 CLK_SPI_PLL_DIV_CON_WIDTH = 7,
241 CLK_SPI_PLL_DIV_CON_MASK = ((1 << CLK_SPI_PLL_DIV_CON_WIDTH) - 1),
242
243 CLK_SPI5_PLL_DIV_CON_SHIFT = 8,
244 CLK_SPI5_PLL_SEL_SHIFT = 15,
Kever Yangb0b3c862016-07-29 10:35:25 +0800245
246 /* CLKSEL_CON59 */
247 CLK_SPI1_PLL_SEL_SHIFT = 15,
248 CLK_SPI1_PLL_DIV_CON_SHIFT = 8,
249 CLK_SPI0_PLL_SEL_SHIFT = 7,
250 CLK_SPI0_PLL_DIV_CON_SHIFT = 0,
251
252 /* CLKSEL_CON60 */
253 CLK_SPI4_PLL_SEL_SHIFT = 15,
254 CLK_SPI4_PLL_DIV_CON_SHIFT = 8,
255 CLK_SPI2_PLL_SEL_SHIFT = 7,
256 CLK_SPI2_PLL_DIV_CON_SHIFT = 0,
257
258 /* CLKSEL_CON61 */
259 CLK_I2C_PLL_SEL_MASK = 1,
260 CLK_I2C_PLL_SEL_CPLL = 0,
261 CLK_I2C_PLL_SEL_GPLL = 1,
262 CLK_I2C5_PLL_SEL_SHIFT = 15,
263 CLK_I2C5_DIV_CON_SHIFT = 8,
264 CLK_I2C1_PLL_SEL_SHIFT = 7,
265 CLK_I2C1_DIV_CON_SHIFT = 0,
266
267 /* CLKSEL_CON62 */
268 CLK_I2C6_PLL_SEL_SHIFT = 15,
269 CLK_I2C6_DIV_CON_SHIFT = 8,
270 CLK_I2C2_PLL_SEL_SHIFT = 7,
271 CLK_I2C2_DIV_CON_SHIFT = 0,
272
273 /* CLKSEL_CON63 */
274 CLK_I2C7_PLL_SEL_SHIFT = 15,
275 CLK_I2C7_DIV_CON_SHIFT = 8,
276 CLK_I2C3_PLL_SEL_SHIFT = 7,
277 CLK_I2C3_DIV_CON_SHIFT = 0,
278
279 /* CRU_SOFTRST_CON4 */
280 RESETN_DDR0_REQ_SHIFT = 8,
281 RESETN_DDR0_REQ_MASK = 1 << RESETN_DDR0_REQ_SHIFT,
282 RESETN_DDRPHY0_REQ_SHIFT = 9,
283 RESETN_DDRPHY0_REQ_MASK = 1 << RESETN_DDRPHY0_REQ_SHIFT,
284 RESETN_DDR1_REQ_SHIFT = 12,
285 RESETN_DDR1_REQ_MASK = 1 << RESETN_DDR1_REQ_SHIFT,
286 RESETN_DDRPHY1_REQ_SHIFT = 13,
287 RESETN_DDRPHY1_REQ_MASK = 1 << RESETN_DDRPHY1_REQ_SHIFT,
288};
289
290#define VCO_MAX_KHZ (3200 * (MHz / KHz))
291#define VCO_MIN_KHZ (800 * (MHz / KHz))
292#define OUTPUT_MAX_KHZ (3200 * (MHz / KHz))
293#define OUTPUT_MIN_KHZ (16 * (MHz / KHz))
294
295/*
296 * the div restructions of pll in integer mode, these are defined in
297 * * CRU_*PLL_CON0 or PMUCRU_*PLL_CON0
298 */
299#define PLL_DIV_MIN 16
300#define PLL_DIV_MAX 3200
301
302/*
303 * How to calculate the PLL(from TRM V0.3 Part 1 Page 63):
304 * Formulas also embedded within the Fractional PLL Verilog model:
305 * If DSMPD = 1 (DSM is disabled, "integer mode")
306 * FOUTVCO = FREF / REFDIV * FBDIV
307 * FOUTPOSTDIV = FOUTVCO / POSTDIV1 / POSTDIV2
308 * Where:
309 * FOUTVCO = Fractional PLL non-divided output frequency
310 * FOUTPOSTDIV = Fractional PLL divided output frequency
311 * (output of second post divider)
312 * FREF = Fractional PLL input reference frequency, (the OSC_HZ 24MHz input)
313 * REFDIV = Fractional PLL input reference clock divider
314 * FBDIV = Integer value programmed into feedback divide
315 *
316 */
317static void rkclk_set_pll(u32 *pll_con, const struct pll_div *div)
318{
319 /* All 8 PLLs have same VCO and output frequency range restrictions. */
320 u32 vco_khz = OSC_HZ / 1000 * div->fbdiv / div->refdiv;
321 u32 output_khz = vco_khz / div->postdiv1 / div->postdiv2;
322
323 debug("PLL at %p: fbdiv=%d, refdiv=%d, postdiv1=%d, "
324 "postdiv2=%d, vco=%u khz, output=%u khz\n",
325 pll_con, div->fbdiv, div->refdiv, div->postdiv1,
326 div->postdiv2, vco_khz, output_khz);
327 assert(vco_khz >= VCO_MIN_KHZ && vco_khz <= VCO_MAX_KHZ &&
328 output_khz >= OUTPUT_MIN_KHZ && output_khz <= OUTPUT_MAX_KHZ &&
329 div->fbdiv >= PLL_DIV_MIN && div->fbdiv <= PLL_DIV_MAX);
330
331 /*
332 * When power on or changing PLL setting,
333 * we must force PLL into slow mode to ensure output stable clock.
334 */
335 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
336 PLL_MODE_SLOW << PLL_MODE_SHIFT);
337
338 /* use integer mode */
339 rk_clrsetreg(&pll_con[3], PLL_DSMPD_MASK,
340 PLL_INTEGER_MODE << PLL_DSMPD_SHIFT);
341
342 rk_clrsetreg(&pll_con[0], PLL_FBDIV_MASK,
343 div->fbdiv << PLL_FBDIV_SHIFT);
344 rk_clrsetreg(&pll_con[1],
345 PLL_POSTDIV2_MASK | PLL_POSTDIV1_MASK |
346 PLL_REFDIV_MASK | PLL_REFDIV_SHIFT,
347 (div->postdiv2 << PLL_POSTDIV2_SHIFT) |
348 (div->postdiv1 << PLL_POSTDIV1_SHIFT) |
349 (div->refdiv << PLL_REFDIV_SHIFT));
350
351 /* waiting for pll lock */
352 while (!(readl(&pll_con[2]) & (1 << PLL_LOCK_STATUS_SHIFT)))
353 udelay(1);
354
355 /* pll enter normal mode */
356 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
357 PLL_MODE_NORM << PLL_MODE_SHIFT);
358}
359
360static int pll_para_config(u32 freq_hz, struct pll_div *div)
361{
362 u32 ref_khz = OSC_HZ / KHz, refdiv, fbdiv = 0;
363 u32 postdiv1, postdiv2 = 1;
364 u32 fref_khz;
365 u32 diff_khz, best_diff_khz;
366 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
367 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
368 u32 vco_khz;
369 u32 freq_khz = freq_hz / KHz;
370
371 if (!freq_hz) {
372 printf("%s: the frequency can't be 0 Hz\n", __func__);
373 return -1;
374 }
375
376 postdiv1 = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
377 if (postdiv1 > max_postdiv1) {
378 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
379 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
380 }
381
382 vco_khz = freq_khz * postdiv1 * postdiv2;
383
384 if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ ||
385 postdiv2 > max_postdiv2) {
386 printf("%s: Cannot find out a supported VCO"
387 " for Frequency (%uHz).\n", __func__, freq_hz);
388 return -1;
389 }
390
391 div->postdiv1 = postdiv1;
392 div->postdiv2 = postdiv2;
393
394 best_diff_khz = vco_khz;
395 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
396 fref_khz = ref_khz / refdiv;
397
398 fbdiv = vco_khz / fref_khz;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530399 if (fbdiv >= max_fbdiv || fbdiv <= min_fbdiv)
Kever Yangb0b3c862016-07-29 10:35:25 +0800400 continue;
401 diff_khz = vco_khz - fbdiv * fref_khz;
402 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
403 fbdiv++;
404 diff_khz = fref_khz - diff_khz;
405 }
406
407 if (diff_khz >= best_diff_khz)
408 continue;
409
410 best_diff_khz = diff_khz;
411 div->refdiv = refdiv;
412 div->fbdiv = fbdiv;
413 }
414
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530415 if (best_diff_khz > 4 * (MHz / KHz)) {
Kever Yangb0b3c862016-07-29 10:35:25 +0800416 printf("%s: Failed to match output frequency %u, "
417 "difference is %u Hz,exceed 4MHZ\n", __func__, freq_hz,
418 best_diff_khz * KHz);
419 return -1;
420 }
421 return 0;
422}
423
Jagan Tekib52a1992020-01-09 14:22:17 +0530424void rk3399_configure_cpu_l(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100425 enum apll_l_frequencies apll_l_freq)
Kever Yangb0b3c862016-07-29 10:35:25 +0800426{
427 u32 aclkm_div;
428 u32 pclk_dbg_div;
429 u32 atclk_div;
430
Christoph Muellneraf765a42018-11-30 20:32:48 +0100431 /* Setup cluster L */
Kever Yangb0b3c862016-07-29 10:35:25 +0800432 rkclk_set_pll(&cru->apll_l_con[0], apll_l_cfgs[apll_l_freq]);
433
Christoph Muellneraf765a42018-11-30 20:32:48 +0100434 aclkm_div = LPLL_HZ / ACLKM_CORE_L_HZ - 1;
435 assert((aclkm_div + 1) * ACLKM_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800436 aclkm_div < 0x1f);
437
Christoph Muellneraf765a42018-11-30 20:32:48 +0100438 pclk_dbg_div = LPLL_HZ / PCLK_DBG_L_HZ - 1;
439 assert((pclk_dbg_div + 1) * PCLK_DBG_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800440 pclk_dbg_div < 0x1f);
441
Christoph Muellneraf765a42018-11-30 20:32:48 +0100442 atclk_div = LPLL_HZ / ATCLK_CORE_L_HZ - 1;
443 assert((atclk_div + 1) * ATCLK_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800444 atclk_div < 0x1f);
445
446 rk_clrsetreg(&cru->clksel_con[0],
447 ACLKM_CORE_L_DIV_CON_MASK | CLK_CORE_L_PLL_SEL_MASK |
448 CLK_CORE_L_DIV_MASK,
449 aclkm_div << ACLKM_CORE_L_DIV_CON_SHIFT |
450 CLK_CORE_L_PLL_SEL_ALPLL << CLK_CORE_L_PLL_SEL_SHIFT |
451 0 << CLK_CORE_L_DIV_SHIFT);
452
453 rk_clrsetreg(&cru->clksel_con[1],
454 PCLK_DBG_L_DIV_MASK | ATCLK_CORE_L_DIV_MASK,
455 pclk_dbg_div << PCLK_DBG_L_DIV_SHIFT |
456 atclk_div << ATCLK_CORE_L_DIV_SHIFT);
457}
Christoph Muellneraf765a42018-11-30 20:32:48 +0100458
Jagan Tekib52a1992020-01-09 14:22:17 +0530459void rk3399_configure_cpu_b(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100460 enum apll_b_frequencies apll_b_freq)
461{
462 u32 aclkm_div;
463 u32 pclk_dbg_div;
464 u32 atclk_div;
465
466 /* Setup cluster B */
467 rkclk_set_pll(&cru->apll_b_con[0], apll_b_cfgs[apll_b_freq]);
468
469 aclkm_div = BPLL_HZ / ACLKM_CORE_B_HZ - 1;
470 assert((aclkm_div + 1) * ACLKM_CORE_B_HZ == BPLL_HZ &&
471 aclkm_div < 0x1f);
472
473 pclk_dbg_div = BPLL_HZ / PCLK_DBG_B_HZ - 1;
474 assert((pclk_dbg_div + 1) * PCLK_DBG_B_HZ == BPLL_HZ &&
475 pclk_dbg_div < 0x1f);
476
477 atclk_div = BPLL_HZ / ATCLK_CORE_B_HZ - 1;
478 assert((atclk_div + 1) * ATCLK_CORE_B_HZ == BPLL_HZ &&
479 atclk_div < 0x1f);
480
481 rk_clrsetreg(&cru->clksel_con[2],
482 ACLKM_CORE_B_DIV_CON_MASK | CLK_CORE_B_PLL_SEL_MASK |
483 CLK_CORE_B_DIV_MASK,
484 aclkm_div << ACLKM_CORE_B_DIV_CON_SHIFT |
485 CLK_CORE_B_PLL_SEL_ABPLL << CLK_CORE_B_PLL_SEL_SHIFT |
486 0 << CLK_CORE_B_DIV_SHIFT);
487
488 rk_clrsetreg(&cru->clksel_con[3],
489 PCLK_DBG_B_DIV_MASK | ATCLK_CORE_B_DIV_MASK,
490 pclk_dbg_div << PCLK_DBG_B_DIV_SHIFT |
491 atclk_div << ATCLK_CORE_B_DIV_SHIFT);
492}
493
Kever Yangb0b3c862016-07-29 10:35:25 +0800494#define I2C_CLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530495 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT | \
496 CLK_I2C_PLL_SEL_MASK << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800497
498#define I2C_CLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530499 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT | \
500 CLK_I2C_PLL_SEL_GPLL << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800501
502#define I2C_CLK_DIV_VALUE(con, bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530503 ((con >> CLK_I2C ##bus## _DIV_CON_SHIFT) & I2C_DIV_CON_MASK)
Kever Yangb0b3c862016-07-29 10:35:25 +0800504
Kever Yang5e79f442016-08-12 17:47:15 +0800505#define I2C_PMUCLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530506 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800507
508#define I2C_PMUCLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530509 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800510
Jagan Tekib52a1992020-01-09 14:22:17 +0530511static ulong rk3399_i2c_get_clk(struct rockchip_cru *cru, ulong clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800512{
513 u32 div, con;
514
515 switch (clk_id) {
516 case SCLK_I2C1:
517 con = readl(&cru->clksel_con[61]);
518 div = I2C_CLK_DIV_VALUE(con, 1);
519 break;
520 case SCLK_I2C2:
521 con = readl(&cru->clksel_con[62]);
522 div = I2C_CLK_DIV_VALUE(con, 2);
523 break;
524 case SCLK_I2C3:
525 con = readl(&cru->clksel_con[63]);
526 div = I2C_CLK_DIV_VALUE(con, 3);
527 break;
528 case SCLK_I2C5:
529 con = readl(&cru->clksel_con[61]);
530 div = I2C_CLK_DIV_VALUE(con, 5);
531 break;
532 case SCLK_I2C6:
533 con = readl(&cru->clksel_con[62]);
534 div = I2C_CLK_DIV_VALUE(con, 6);
535 break;
536 case SCLK_I2C7:
537 con = readl(&cru->clksel_con[63]);
538 div = I2C_CLK_DIV_VALUE(con, 7);
539 break;
540 default:
541 printf("do not support this i2c bus\n");
542 return -EINVAL;
543 }
544
545 return DIV_TO_RATE(GPLL_HZ, div);
546}
547
Jagan Tekib52a1992020-01-09 14:22:17 +0530548static ulong rk3399_i2c_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800549{
550 int src_clk_div;
551
552 /* i2c0,4,8 src clock from ppll, i2c1,2,3,5,6,7 src clock from gpll*/
553 src_clk_div = GPLL_HZ / hz;
554 assert(src_clk_div - 1 < 127);
555
556 switch (clk_id) {
557 case SCLK_I2C1:
558 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(1),
559 I2C_CLK_REG_VALUE(1, src_clk_div));
560 break;
561 case SCLK_I2C2:
562 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(2),
563 I2C_CLK_REG_VALUE(2, src_clk_div));
564 break;
565 case SCLK_I2C3:
566 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(3),
567 I2C_CLK_REG_VALUE(3, src_clk_div));
568 break;
569 case SCLK_I2C5:
570 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(5),
571 I2C_CLK_REG_VALUE(5, src_clk_div));
572 break;
573 case SCLK_I2C6:
574 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(6),
575 I2C_CLK_REG_VALUE(6, src_clk_div));
576 break;
577 case SCLK_I2C7:
578 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(7),
579 I2C_CLK_REG_VALUE(7, src_clk_div));
580 break;
581 default:
582 printf("do not support this i2c bus\n");
583 return -EINVAL;
584 }
585
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200586 return rk3399_i2c_get_clk(cru, clk_id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800587}
588
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200589/*
590 * RK3399 SPI clocks have a common divider-width (7 bits) and a single bit
591 * to select either CPLL or GPLL as the clock-parent. The location within
592 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
593 */
594
595struct spi_clkreg {
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530596 u8 reg; /* CLKSEL_CON[reg] register in CRU */
597 u8 div_shift;
598 u8 sel_shift;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200599};
600
601/*
602 * The entries are numbered relative to their offset from SCLK_SPI0.
603 *
604 * Note that SCLK_SPI3 (which is configured via PMUCRU and requires different
605 * logic is not supported).
606 */
607static const struct spi_clkreg spi_clkregs[] = {
608 [0] = { .reg = 59,
609 .div_shift = CLK_SPI0_PLL_DIV_CON_SHIFT,
610 .sel_shift = CLK_SPI0_PLL_SEL_SHIFT, },
611 [1] = { .reg = 59,
612 .div_shift = CLK_SPI1_PLL_DIV_CON_SHIFT,
613 .sel_shift = CLK_SPI1_PLL_SEL_SHIFT, },
614 [2] = { .reg = 60,
615 .div_shift = CLK_SPI2_PLL_DIV_CON_SHIFT,
616 .sel_shift = CLK_SPI2_PLL_SEL_SHIFT, },
617 [3] = { .reg = 60,
618 .div_shift = CLK_SPI4_PLL_DIV_CON_SHIFT,
619 .sel_shift = CLK_SPI4_PLL_SEL_SHIFT, },
620 [4] = { .reg = 58,
621 .div_shift = CLK_SPI5_PLL_DIV_CON_SHIFT,
622 .sel_shift = CLK_SPI5_PLL_SEL_SHIFT, },
623};
624
Jagan Tekib52a1992020-01-09 14:22:17 +0530625static ulong rk3399_spi_get_clk(struct rockchip_cru *cru, ulong clk_id)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200626{
627 const struct spi_clkreg *spiclk = NULL;
628 u32 div, val;
629
630 switch (clk_id) {
631 case SCLK_SPI0 ... SCLK_SPI5:
632 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
633 break;
634
635 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900636 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200637 return -EINVAL;
638 }
639
640 val = readl(&cru->clksel_con[spiclk->reg]);
Philipp Tomsicha8ee98d2017-11-22 19:45:04 +0100641 div = bitfield_extract(val, spiclk->div_shift,
642 CLK_SPI_PLL_DIV_CON_WIDTH);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200643
644 return DIV_TO_RATE(GPLL_HZ, div);
645}
646
Jagan Tekib52a1992020-01-09 14:22:17 +0530647static ulong rk3399_spi_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200648{
649 const struct spi_clkreg *spiclk = NULL;
650 int src_clk_div;
651
Kever Yang217273c2017-07-27 12:54:02 +0800652 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz) - 1;
653 assert(src_clk_div < 128);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200654
655 switch (clk_id) {
656 case SCLK_SPI1 ... SCLK_SPI5:
657 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
658 break;
659
660 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900661 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200662 return -EINVAL;
663 }
664
665 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
666 ((CLK_SPI_PLL_DIV_CON_MASK << spiclk->div_shift) |
667 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)),
668 ((src_clk_div << spiclk->div_shift) |
669 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)));
670
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200671 return rk3399_spi_get_clk(cru, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200672}
673
Jagan Tekib52a1992020-01-09 14:22:17 +0530674static ulong rk3399_vop_set_clk(struct rockchip_cru *cru, ulong clk_id, u32 hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800675{
676 struct pll_div vpll_config = {0};
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530677 int aclk_vop = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800678 void *aclkreg_addr, *dclkreg_addr;
679 u32 div;
680
681 switch (clk_id) {
682 case DCLK_VOP0:
683 aclkreg_addr = &cru->clksel_con[47];
684 dclkreg_addr = &cru->clksel_con[49];
685 break;
686 case DCLK_VOP1:
687 aclkreg_addr = &cru->clksel_con[48];
688 dclkreg_addr = &cru->clksel_con[50];
689 break;
690 default:
691 return -EINVAL;
692 }
693 /* vop aclk source clk: cpll */
694 div = CPLL_HZ / aclk_vop;
695 assert(div - 1 < 32);
696
697 rk_clrsetreg(aclkreg_addr,
698 ACLK_VOP_PLL_SEL_MASK | ACLK_VOP_DIV_CON_MASK,
699 ACLK_VOP_PLL_SEL_CPLL << ACLK_VOP_PLL_SEL_SHIFT |
700 (div - 1) << ACLK_VOP_DIV_CON_SHIFT);
701
702 /* vop dclk source from vpll, and equals to vpll(means div == 1) */
703 if (pll_para_config(hz, &vpll_config))
704 return -1;
705
706 rkclk_set_pll(&cru->vpll_con[0], &vpll_config);
707
708 rk_clrsetreg(dclkreg_addr,
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530709 DCLK_VOP_DCLK_SEL_MASK | DCLK_VOP_PLL_SEL_MASK |
Kever Yangb0b3c862016-07-29 10:35:25 +0800710 DCLK_VOP_DIV_CON_MASK,
711 DCLK_VOP_DCLK_SEL_DIVOUT << DCLK_VOP_DCLK_SEL_SHIFT |
712 DCLK_VOP_PLL_SEL_VPLL << DCLK_VOP_PLL_SEL_SHIFT |
713 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
714
715 return hz;
716}
717
Jagan Tekib52a1992020-01-09 14:22:17 +0530718static ulong rk3399_mmc_get_clk(struct rockchip_cru *cru, uint clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800719{
720 u32 div, con;
721
722 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200723 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800724 case SCLK_SDMMC:
725 con = readl(&cru->clksel_con[16]);
Kever Yang3a94d752017-07-27 12:54:01 +0800726 /* dwmmc controller have internal div 2 */
727 div = 2;
Kever Yangb0b3c862016-07-29 10:35:25 +0800728 break;
729 case SCLK_EMMC:
730 con = readl(&cru->clksel_con[21]);
Kever Yang3a94d752017-07-27 12:54:01 +0800731 div = 1;
Kever Yangb0b3c862016-07-29 10:35:25 +0800732 break;
733 default:
734 return -EINVAL;
735 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800736
Kever Yang3a94d752017-07-27 12:54:01 +0800737 div *= (con & CLK_EMMC_DIV_CON_MASK) >> CLK_EMMC_DIV_CON_SHIFT;
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800738 if ((con & CLK_EMMC_PLL_MASK) >> CLK_EMMC_PLL_SHIFT
739 == CLK_EMMC_PLL_SEL_24M)
Kever Yang3a94d752017-07-27 12:54:01 +0800740 return DIV_TO_RATE(OSC_HZ, div);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800741 else
742 return DIV_TO_RATE(GPLL_HZ, div);
Kever Yangb0b3c862016-07-29 10:35:25 +0800743}
744
Jagan Tekib52a1992020-01-09 14:22:17 +0530745static ulong rk3399_mmc_set_clk(struct rockchip_cru *cru,
Kever Yangb0b3c862016-07-29 10:35:25 +0800746 ulong clk_id, ulong set_rate)
747{
748 int src_clk_div;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530749 int aclk_emmc = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800750
751 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200752 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800753 case SCLK_SDMMC:
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800754 /* Select clk_sdmmc source from GPLL by default */
Kever Yang3a94d752017-07-27 12:54:01 +0800755 /* mmc clock defaulg div 2 internal, provide double in cru */
756 src_clk_div = DIV_ROUND_UP(GPLL_HZ / 2, set_rate);
Kever Yangb0b3c862016-07-29 10:35:25 +0800757
Kever Yang217273c2017-07-27 12:54:02 +0800758 if (src_clk_div > 128) {
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800759 /* use 24MHz source for 400KHz clock */
Kever Yang3a94d752017-07-27 12:54:01 +0800760 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, set_rate);
Kever Yang217273c2017-07-27 12:54:02 +0800761 assert(src_clk_div - 1 < 128);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800762 rk_clrsetreg(&cru->clksel_con[16],
763 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
764 CLK_EMMC_PLL_SEL_24M << CLK_EMMC_PLL_SHIFT |
765 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
766 } else {
767 rk_clrsetreg(&cru->clksel_con[16],
768 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
769 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
770 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
771 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800772 break;
773 case SCLK_EMMC:
774 /* Select aclk_emmc source from GPLL */
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530775 src_clk_div = DIV_ROUND_UP(GPLL_HZ, aclk_emmc);
Kever Yang217273c2017-07-27 12:54:02 +0800776 assert(src_clk_div - 1 < 32);
Kever Yangb0b3c862016-07-29 10:35:25 +0800777
778 rk_clrsetreg(&cru->clksel_con[21],
779 ACLK_EMMC_PLL_SEL_MASK | ACLK_EMMC_DIV_CON_MASK,
780 ACLK_EMMC_PLL_SEL_GPLL << ACLK_EMMC_PLL_SEL_SHIFT |
781 (src_clk_div - 1) << ACLK_EMMC_DIV_CON_SHIFT);
782
783 /* Select clk_emmc source from GPLL too */
Kever Yang217273c2017-07-27 12:54:02 +0800784 src_clk_div = DIV_ROUND_UP(GPLL_HZ, set_rate);
785 assert(src_clk_div - 1 < 128);
Kever Yangb0b3c862016-07-29 10:35:25 +0800786
787 rk_clrsetreg(&cru->clksel_con[22],
788 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
789 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
790 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
791 break;
792 default:
793 return -EINVAL;
794 }
795 return rk3399_mmc_get_clk(cru, clk_id);
796}
797
Jagan Tekib52a1992020-01-09 14:22:17 +0530798static ulong rk3399_gmac_set_clk(struct rockchip_cru *cru, ulong rate)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +0100799{
800 ulong ret;
801
802 /*
803 * The RGMII CLK can be derived either from an external "clkin"
804 * or can be generated from internally by a divider from SCLK_MAC.
805 */
806 if (readl(&cru->clksel_con[19]) & BIT(4)) {
807 /* An external clock will always generate the right rate... */
808 ret = rate;
809 } else {
810 /*
811 * No platform uses an internal clock to date.
812 * Implement this once it becomes necessary and print an error
813 * if someone tries to use it (while it remains unimplemented).
814 */
815 pr_err("%s: internal clock is UNIMPLEMENTED\n", __func__);
816 ret = 0;
817 }
818
819 return ret;
820}
821
Kever Yang5ae2fd92017-02-13 17:38:56 +0800822#define PMUSGRF_DDR_RGN_CON16 0xff330040
Jagan Tekib52a1992020-01-09 14:22:17 +0530823static ulong rk3399_ddr_set_clk(struct rockchip_cru *cru,
Kever Yang5ae2fd92017-02-13 17:38:56 +0800824 ulong set_rate)
825{
826 struct pll_div dpll_cfg;
827
828 /* IC ECO bug, need to set this register */
829 writel(0xc000c000, PMUSGRF_DDR_RGN_CON16);
830
831 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */
832 switch (set_rate) {
Jagan Teki09565682019-07-16 17:27:35 +0530833 case 50 * MHz:
834 dpll_cfg = (struct pll_div)
835 {.refdiv = 1, .fbdiv = 12, .postdiv1 = 3, .postdiv2 = 2};
836 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530837 case 200 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800838 dpll_cfg = (struct pll_div)
839 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 6, .postdiv2 = 1};
840 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530841 case 300 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800842 dpll_cfg = (struct pll_div)
843 {.refdiv = 2, .fbdiv = 100, .postdiv1 = 4, .postdiv2 = 1};
844 break;
Jagan Tekif556d752019-07-16 17:27:36 +0530845 case 400 * MHz:
846 dpll_cfg = (struct pll_div)
847 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1};
848 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530849 case 666 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800850 dpll_cfg = (struct pll_div)
851 {.refdiv = 2, .fbdiv = 111, .postdiv1 = 2, .postdiv2 = 1};
852 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530853 case 800 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800854 dpll_cfg = (struct pll_div)
855 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1};
856 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530857 case 933 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800858 dpll_cfg = (struct pll_div)
859 {.refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1};
860 break;
861 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900862 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Kever Yang5ae2fd92017-02-13 17:38:56 +0800863 }
864 rkclk_set_pll(&cru->dpll_con[0], &dpll_cfg);
865
866 return set_rate;
867}
David Wu364fc732017-09-20 14:38:58 +0800868
Jagan Tekib52a1992020-01-09 14:22:17 +0530869static ulong rk3399_saradc_get_clk(struct rockchip_cru *cru)
David Wu364fc732017-09-20 14:38:58 +0800870{
871 u32 div, val;
872
873 val = readl(&cru->clksel_con[26]);
874 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
875 CLK_SARADC_DIV_CON_WIDTH);
876
877 return DIV_TO_RATE(OSC_HZ, div);
878}
879
Jagan Tekib52a1992020-01-09 14:22:17 +0530880static ulong rk3399_saradc_set_clk(struct rockchip_cru *cru, uint hz)
David Wu364fc732017-09-20 14:38:58 +0800881{
882 int src_clk_div;
883
884 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
885 assert(src_clk_div < 128);
886
887 rk_clrsetreg(&cru->clksel_con[26],
888 CLK_SARADC_DIV_CON_MASK,
889 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
890
891 return rk3399_saradc_get_clk(cru);
892}
893
Kever Yangb0b3c862016-07-29 10:35:25 +0800894static ulong rk3399_clk_get_rate(struct clk *clk)
895{
896 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
897 ulong rate = 0;
898
899 switch (clk->id) {
900 case 0 ... 63:
901 return 0;
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200902 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800903 case SCLK_SDMMC:
904 case SCLK_EMMC:
905 rate = rk3399_mmc_get_clk(priv->cru, clk->id);
906 break;
907 case SCLK_I2C1:
908 case SCLK_I2C2:
909 case SCLK_I2C3:
910 case SCLK_I2C5:
911 case SCLK_I2C6:
912 case SCLK_I2C7:
913 rate = rk3399_i2c_get_clk(priv->cru, clk->id);
914 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200915 case SCLK_SPI0...SCLK_SPI5:
916 rate = rk3399_spi_get_clk(priv->cru, clk->id);
917 break;
918 case SCLK_UART0:
Christoph Muellner24615432019-05-07 10:58:44 +0200919 case SCLK_UART1:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200920 case SCLK_UART2:
Christoph Muellner24615432019-05-07 10:58:44 +0200921 case SCLK_UART3:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200922 return 24000000;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +0200923 case PCLK_HDMI_CTRL:
924 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800925 case DCLK_VOP0:
926 case DCLK_VOP1:
927 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +0200928 case PCLK_EFUSE1024NS:
929 break;
David Wu364fc732017-09-20 14:38:58 +0800930 case SCLK_SARADC:
931 rate = rk3399_saradc_get_clk(priv->cru);
932 break;
Simon Glass5328af12019-01-21 14:53:30 -0700933 case ACLK_VIO:
934 case ACLK_HDCP:
935 case ACLK_GIC_PRE:
936 case PCLK_DDR:
937 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800938 default:
Simon Glass5328af12019-01-21 14:53:30 -0700939 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800940 return -ENOENT;
941 }
942
943 return rate;
944}
945
946static ulong rk3399_clk_set_rate(struct clk *clk, ulong rate)
947{
948 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
949 ulong ret = 0;
950
951 switch (clk->id) {
952 case 0 ... 63:
953 return 0;
Philipp Tomsichd2f1f1a2018-01-08 14:00:27 +0100954
955 case ACLK_PERIHP:
956 case HCLK_PERIHP:
957 case PCLK_PERIHP:
958 return 0;
959
960 case ACLK_PERILP0:
961 case HCLK_PERILP0:
962 case PCLK_PERILP0:
963 return 0;
964
965 case ACLK_CCI:
966 return 0;
967
968 case HCLK_PERILP1:
969 case PCLK_PERILP1:
970 return 0;
971
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200972 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800973 case SCLK_SDMMC:
974 case SCLK_EMMC:
975 ret = rk3399_mmc_set_clk(priv->cru, clk->id, rate);
976 break;
Philipp Tomsich65d83302017-03-24 19:24:25 +0100977 case SCLK_MAC:
Philipp Tomsicha45f17e2018-01-08 13:11:01 +0100978 ret = rk3399_gmac_set_clk(priv->cru, rate);
Philipp Tomsich65d83302017-03-24 19:24:25 +0100979 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800980 case SCLK_I2C1:
981 case SCLK_I2C2:
982 case SCLK_I2C3:
983 case SCLK_I2C5:
984 case SCLK_I2C6:
985 case SCLK_I2C7:
986 ret = rk3399_i2c_set_clk(priv->cru, clk->id, rate);
987 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200988 case SCLK_SPI0...SCLK_SPI5:
989 ret = rk3399_spi_set_clk(priv->cru, clk->id, rate);
990 break;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +0200991 case PCLK_HDMI_CTRL:
992 case PCLK_VIO_GRF:
993 /* the PCLK gates for video are enabled by default */
994 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800995 case DCLK_VOP0:
996 case DCLK_VOP1:
Kever Yang5e79f442016-08-12 17:47:15 +0800997 ret = rk3399_vop_set_clk(priv->cru, clk->id, rate);
Kever Yangb0b3c862016-07-29 10:35:25 +0800998 break;
Jagan Tekib1bcd612020-04-02 17:11:21 +0530999 case ACLK_VOP1:
1000 case HCLK_VOP1:
Jagan Teki96993d72020-04-28 15:30:16 +05301001 case HCLK_SD:
Jagan Tekib1bcd612020-04-02 17:11:21 +05301002 /**
1003 * assigned-clocks handling won't require for vopl, so
1004 * return 0 to satisfy clk_set_defaults during device probe.
1005 */
1006 return 0;
Kever Yang5ae2fd92017-02-13 17:38:56 +08001007 case SCLK_DDRCLK:
1008 ret = rk3399_ddr_set_clk(priv->cru, rate);
1009 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +02001010 case PCLK_EFUSE1024NS:
1011 break;
David Wu364fc732017-09-20 14:38:58 +08001012 case SCLK_SARADC:
1013 ret = rk3399_saradc_set_clk(priv->cru, rate);
1014 break;
Simon Glass5328af12019-01-21 14:53:30 -07001015 case ACLK_VIO:
1016 case ACLK_HDCP:
1017 case ACLK_GIC_PRE:
1018 case PCLK_DDR:
1019 return 0;
Kever Yangb0b3c862016-07-29 10:35:25 +08001020 default:
Simon Glass5328af12019-01-21 14:53:30 -07001021 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +08001022 return -ENOENT;
1023 }
1024
1025 return ret;
1026}
1027
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301028static int __maybe_unused rk3399_gmac_set_parent(struct clk *clk,
1029 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001030{
1031 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1032 const char *clock_output_name;
1033 int ret;
1034
1035 /*
1036 * If the requested parent is in the same clock-controller and
1037 * the id is SCLK_MAC ("clk_gmac"), switch to the internal clock.
1038 */
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301039 if (parent->dev == clk->dev && parent->id == SCLK_MAC) {
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001040 debug("%s: switching RGMII to SCLK_MAC\n", __func__);
1041 rk_clrreg(&priv->cru->clksel_con[19], BIT(4));
1042 return 0;
1043 }
1044
1045 /*
1046 * Otherwise, we need to check the clock-output-names of the
1047 * requested parent to see if the requested id is "clkin_gmac".
1048 */
1049 ret = dev_read_string_index(parent->dev, "clock-output-names",
1050 parent->id, &clock_output_name);
1051 if (ret < 0)
1052 return -ENODATA;
1053
1054 /* If this is "clkin_gmac", switch to the external clock input */
1055 if (!strcmp(clock_output_name, "clkin_gmac")) {
1056 debug("%s: switching RGMII to CLKIN\n", __func__);
1057 rk_setreg(&priv->cru->clksel_con[19], BIT(4));
1058 return 0;
1059 }
1060
1061 return -EINVAL;
1062}
1063
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301064static int __maybe_unused rk3399_clk_set_parent(struct clk *clk,
1065 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001066{
1067 switch (clk->id) {
1068 case SCLK_RMII_SRC:
1069 return rk3399_gmac_set_parent(clk, parent);
1070 }
1071
1072 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1073 return -ENOENT;
1074}
1075
Kever Yangb0b3c862016-07-29 10:35:25 +08001076static struct clk_ops rk3399_clk_ops = {
1077 .get_rate = rk3399_clk_get_rate,
1078 .set_rate = rk3399_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001079#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001080 .set_parent = rk3399_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001081#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001082};
1083
Kever Yang9f636a22017-10-12 15:27:29 +08001084#ifdef CONFIG_SPL_BUILD
Jagan Tekib52a1992020-01-09 14:22:17 +05301085static void rkclk_init(struct rockchip_cru *cru)
Kever Yang9f636a22017-10-12 15:27:29 +08001086{
1087 u32 aclk_div;
1088 u32 hclk_div;
1089 u32 pclk_div;
1090
Christoph Muellneraf765a42018-11-30 20:32:48 +01001091 rk3399_configure_cpu_l(cru, APLL_L_600_MHZ);
1092 rk3399_configure_cpu_b(cru, APLL_B_600_MHZ);
Kever Yang9f636a22017-10-12 15:27:29 +08001093 /*
1094 * some cru registers changed by bootrom, we'd better reset them to
1095 * reset/default values described in TRM to avoid confusion in kernel.
1096 * Please consider these three lines as a fix of bootrom bug.
1097 */
1098 rk_clrsetreg(&cru->clksel_con[12], 0xffff, 0x4101);
1099 rk_clrsetreg(&cru->clksel_con[19], 0xffff, 0x033f);
1100 rk_clrsetreg(&cru->clksel_con[56], 0x0003, 0x0003);
1101
1102 /* configure gpll cpll */
1103 rkclk_set_pll(&cru->gpll_con[0], &gpll_init_cfg);
1104 rkclk_set_pll(&cru->cpll_con[0], &cpll_init_cfg);
1105
1106 /* configure perihp aclk, hclk, pclk */
1107 aclk_div = GPLL_HZ / PERIHP_ACLK_HZ - 1;
1108 assert((aclk_div + 1) * PERIHP_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1109
1110 hclk_div = PERIHP_ACLK_HZ / PERIHP_HCLK_HZ - 1;
1111 assert((hclk_div + 1) * PERIHP_HCLK_HZ ==
1112 PERIHP_ACLK_HZ && (hclk_div < 0x4));
1113
1114 pclk_div = PERIHP_ACLK_HZ / PERIHP_PCLK_HZ - 1;
1115 assert((pclk_div + 1) * PERIHP_PCLK_HZ ==
1116 PERIHP_ACLK_HZ && (pclk_div < 0x7));
1117
1118 rk_clrsetreg(&cru->clksel_con[14],
1119 PCLK_PERIHP_DIV_CON_MASK | HCLK_PERIHP_DIV_CON_MASK |
1120 ACLK_PERIHP_PLL_SEL_MASK | ACLK_PERIHP_DIV_CON_MASK,
1121 pclk_div << PCLK_PERIHP_DIV_CON_SHIFT |
1122 hclk_div << HCLK_PERIHP_DIV_CON_SHIFT |
1123 ACLK_PERIHP_PLL_SEL_GPLL << ACLK_PERIHP_PLL_SEL_SHIFT |
1124 aclk_div << ACLK_PERIHP_DIV_CON_SHIFT);
1125
1126 /* configure perilp0 aclk, hclk, pclk */
1127 aclk_div = GPLL_HZ / PERILP0_ACLK_HZ - 1;
1128 assert((aclk_div + 1) * PERILP0_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1129
1130 hclk_div = PERILP0_ACLK_HZ / PERILP0_HCLK_HZ - 1;
1131 assert((hclk_div + 1) * PERILP0_HCLK_HZ ==
1132 PERILP0_ACLK_HZ && (hclk_div < 0x4));
1133
1134 pclk_div = PERILP0_ACLK_HZ / PERILP0_PCLK_HZ - 1;
1135 assert((pclk_div + 1) * PERILP0_PCLK_HZ ==
1136 PERILP0_ACLK_HZ && (pclk_div < 0x7));
1137
1138 rk_clrsetreg(&cru->clksel_con[23],
1139 PCLK_PERILP0_DIV_CON_MASK | HCLK_PERILP0_DIV_CON_MASK |
1140 ACLK_PERILP0_PLL_SEL_MASK | ACLK_PERILP0_DIV_CON_MASK,
1141 pclk_div << PCLK_PERILP0_DIV_CON_SHIFT |
1142 hclk_div << HCLK_PERILP0_DIV_CON_SHIFT |
1143 ACLK_PERILP0_PLL_SEL_GPLL << ACLK_PERILP0_PLL_SEL_SHIFT |
1144 aclk_div << ACLK_PERILP0_DIV_CON_SHIFT);
1145
1146 /* perilp1 hclk select gpll as source */
1147 hclk_div = GPLL_HZ / PERILP1_HCLK_HZ - 1;
1148 assert((hclk_div + 1) * PERILP1_HCLK_HZ ==
1149 GPLL_HZ && (hclk_div < 0x1f));
1150
1151 pclk_div = PERILP1_HCLK_HZ / PERILP1_HCLK_HZ - 1;
1152 assert((pclk_div + 1) * PERILP1_HCLK_HZ ==
1153 PERILP1_HCLK_HZ && (hclk_div < 0x7));
1154
1155 rk_clrsetreg(&cru->clksel_con[25],
1156 PCLK_PERILP1_DIV_CON_MASK | HCLK_PERILP1_DIV_CON_MASK |
1157 HCLK_PERILP1_PLL_SEL_MASK,
1158 pclk_div << PCLK_PERILP1_DIV_CON_SHIFT |
1159 hclk_div << HCLK_PERILP1_DIV_CON_SHIFT |
1160 HCLK_PERILP1_PLL_SEL_GPLL << HCLK_PERILP1_PLL_SEL_SHIFT);
1161}
1162#endif
1163
Kever Yangb0b3c862016-07-29 10:35:25 +08001164static int rk3399_clk_probe(struct udevice *dev)
1165{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001166#ifdef CONFIG_SPL_BUILD
Kever Yangb0b3c862016-07-29 10:35:25 +08001167 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1168
Kever Yang5ae2fd92017-02-13 17:38:56 +08001169#if CONFIG_IS_ENABLED(OF_PLATDATA)
1170 struct rk3399_clk_plat *plat = dev_get_platdata(dev);
Kever Yangb0b3c862016-07-29 10:35:25 +08001171
Simon Glassc20ee0e2017-08-29 14:15:50 -06001172 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001173#endif
1174 rkclk_init(priv->cru);
1175#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001176 return 0;
1177}
1178
1179static int rk3399_clk_ofdata_to_platdata(struct udevice *dev)
1180{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001181#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yangb0b3c862016-07-29 10:35:25 +08001182 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1183
Philipp Tomsich75c78592017-09-12 17:32:24 +02001184 priv->cru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001185#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001186 return 0;
1187}
1188
1189static int rk3399_clk_bind(struct udevice *dev)
1190{
1191 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +08001192 struct udevice *sys_child;
1193 struct sysreset_reg *priv;
Kever Yangb0b3c862016-07-29 10:35:25 +08001194
1195 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +08001196 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
1197 &sys_child);
1198 if (ret) {
1199 debug("Warning: No sysreset driver: ret=%d\n", ret);
1200 } else {
1201 priv = malloc(sizeof(struct sysreset_reg));
Jagan Tekib52a1992020-01-09 14:22:17 +05301202 priv->glb_srst_fst_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001203 glb_srst_fst_value);
Jagan Tekib52a1992020-01-09 14:22:17 +05301204 priv->glb_srst_snd_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001205 glb_srst_snd_value);
1206 sys_child->priv = priv;
1207 }
Kever Yangb0b3c862016-07-29 10:35:25 +08001208
Heiko Stuebnera5ada252019-11-09 00:06:30 +01001209#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Jagan Tekib52a1992020-01-09 14:22:17 +05301210 ret = offsetof(struct rockchip_cru, softrst_con[0]);
Elaine Zhang538f67c2017-12-19 18:22:38 +08001211 ret = rockchip_reset_bind(dev, ret, 21);
1212 if (ret)
1213 debug("Warning: software reset driver bind faile\n");
1214#endif
1215
Kever Yangb0b3c862016-07-29 10:35:25 +08001216 return 0;
1217}
1218
1219static const struct udevice_id rk3399_clk_ids[] = {
1220 { .compatible = "rockchip,rk3399-cru" },
1221 { }
1222};
1223
1224U_BOOT_DRIVER(clk_rk3399) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001225 .name = "rockchip_rk3399_cru",
Kever Yangb0b3c862016-07-29 10:35:25 +08001226 .id = UCLASS_CLK,
1227 .of_match = rk3399_clk_ids,
1228 .priv_auto_alloc_size = sizeof(struct rk3399_clk_priv),
1229 .ofdata_to_platdata = rk3399_clk_ofdata_to_platdata,
1230 .ops = &rk3399_clk_ops,
1231 .bind = rk3399_clk_bind,
1232 .probe = rk3399_clk_probe,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001233#if CONFIG_IS_ENABLED(OF_PLATDATA)
1234 .platdata_auto_alloc_size = sizeof(struct rk3399_clk_plat),
1235#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001236};
Kever Yang5e79f442016-08-12 17:47:15 +08001237
1238static ulong rk3399_i2c_get_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id)
1239{
1240 u32 div, con;
1241
1242 switch (clk_id) {
1243 case SCLK_I2C0_PMU:
1244 con = readl(&pmucru->pmucru_clksel[2]);
1245 div = I2C_CLK_DIV_VALUE(con, 0);
1246 break;
1247 case SCLK_I2C4_PMU:
1248 con = readl(&pmucru->pmucru_clksel[3]);
1249 div = I2C_CLK_DIV_VALUE(con, 4);
1250 break;
1251 case SCLK_I2C8_PMU:
1252 con = readl(&pmucru->pmucru_clksel[2]);
1253 div = I2C_CLK_DIV_VALUE(con, 8);
1254 break;
1255 default:
1256 printf("do not support this i2c bus\n");
1257 return -EINVAL;
1258 }
1259
1260 return DIV_TO_RATE(PPLL_HZ, div);
1261}
1262
1263static ulong rk3399_i2c_set_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id,
1264 uint hz)
1265{
1266 int src_clk_div;
1267
1268 src_clk_div = PPLL_HZ / hz;
1269 assert(src_clk_div - 1 < 127);
1270
1271 switch (clk_id) {
1272 case SCLK_I2C0_PMU:
1273 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(0),
1274 I2C_PMUCLK_REG_VALUE(0, src_clk_div));
1275 break;
1276 case SCLK_I2C4_PMU:
1277 rk_clrsetreg(&pmucru->pmucru_clksel[3], I2C_PMUCLK_REG_MASK(4),
1278 I2C_PMUCLK_REG_VALUE(4, src_clk_div));
1279 break;
1280 case SCLK_I2C8_PMU:
1281 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(8),
1282 I2C_PMUCLK_REG_VALUE(8, src_clk_div));
1283 break;
1284 default:
1285 printf("do not support this i2c bus\n");
1286 return -EINVAL;
1287 }
1288
1289 return DIV_TO_RATE(PPLL_HZ, src_clk_div);
1290}
1291
1292static ulong rk3399_pwm_get_clk(struct rk3399_pmucru *pmucru)
1293{
1294 u32 div, con;
1295
1296 /* PWM closk rate is same as pclk_pmu */
1297 con = readl(&pmucru->pmucru_clksel[0]);
1298 div = con & PMU_PCLK_DIV_CON_MASK;
1299
1300 return DIV_TO_RATE(PPLL_HZ, div);
1301}
1302
1303static ulong rk3399_pmuclk_get_rate(struct clk *clk)
1304{
1305 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1306 ulong rate = 0;
1307
1308 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001309 case PLL_PPLL:
1310 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001311 case PCLK_RKPWM_PMU:
1312 rate = rk3399_pwm_get_clk(priv->pmucru);
1313 break;
1314 case SCLK_I2C0_PMU:
1315 case SCLK_I2C4_PMU:
1316 case SCLK_I2C8_PMU:
1317 rate = rk3399_i2c_get_pmuclk(priv->pmucru, clk->id);
1318 break;
1319 default:
1320 return -ENOENT;
1321 }
1322
1323 return rate;
1324}
1325
1326static ulong rk3399_pmuclk_set_rate(struct clk *clk, ulong rate)
1327{
1328 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1329 ulong ret = 0;
1330
1331 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001332 case PLL_PPLL:
1333 /*
1334 * This has already been set up and we don't want/need
1335 * to change it here. Accept the request though, as the
1336 * device-tree has this in an 'assigned-clocks' list.
1337 */
1338 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001339 case SCLK_I2C0_PMU:
1340 case SCLK_I2C4_PMU:
1341 case SCLK_I2C8_PMU:
1342 ret = rk3399_i2c_set_pmuclk(priv->pmucru, clk->id, rate);
1343 break;
1344 default:
1345 return -ENOENT;
1346 }
1347
1348 return ret;
1349}
1350
1351static struct clk_ops rk3399_pmuclk_ops = {
1352 .get_rate = rk3399_pmuclk_get_rate,
1353 .set_rate = rk3399_pmuclk_set_rate,
1354};
1355
Kever Yang5ae2fd92017-02-13 17:38:56 +08001356#ifndef CONFIG_SPL_BUILD
Kever Yang5e79f442016-08-12 17:47:15 +08001357static void pmuclk_init(struct rk3399_pmucru *pmucru)
1358{
1359 u32 pclk_div;
1360
1361 /* configure pmu pll(ppll) */
1362 rkclk_set_pll(&pmucru->ppll_con[0], &ppll_init_cfg);
1363
1364 /* configure pmu pclk */
1365 pclk_div = PPLL_HZ / PMU_PCLK_HZ - 1;
Kever Yang5e79f442016-08-12 17:47:15 +08001366 rk_clrsetreg(&pmucru->pmucru_clksel[0],
1367 PMU_PCLK_DIV_CON_MASK,
1368 pclk_div << PMU_PCLK_DIV_CON_SHIFT);
1369}
Kever Yang5ae2fd92017-02-13 17:38:56 +08001370#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001371
1372static int rk3399_pmuclk_probe(struct udevice *dev)
1373{
Philipp Tomsich61dff332017-03-24 19:24:24 +01001374#if CONFIG_IS_ENABLED(OF_PLATDATA) || !defined(CONFIG_SPL_BUILD)
Kever Yang5e79f442016-08-12 17:47:15 +08001375 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
Philipp Tomsich61dff332017-03-24 19:24:24 +01001376#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001377
Kever Yang5ae2fd92017-02-13 17:38:56 +08001378#if CONFIG_IS_ENABLED(OF_PLATDATA)
1379 struct rk3399_pmuclk_plat *plat = dev_get_platdata(dev);
Kever Yang5e79f442016-08-12 17:47:15 +08001380
Simon Glassc20ee0e2017-08-29 14:15:50 -06001381 priv->pmucru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001382#endif
1383
1384#ifndef CONFIG_SPL_BUILD
1385 pmuclk_init(priv->pmucru);
1386#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001387 return 0;
1388}
1389
1390static int rk3399_pmuclk_ofdata_to_platdata(struct udevice *dev)
1391{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001392#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yang5e79f442016-08-12 17:47:15 +08001393 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
1394
Philipp Tomsich75c78592017-09-12 17:32:24 +02001395 priv->pmucru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001396#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001397 return 0;
1398}
1399
Elaine Zhang538f67c2017-12-19 18:22:38 +08001400static int rk3399_pmuclk_bind(struct udevice *dev)
1401{
1402#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
1403 int ret;
1404
1405 ret = offsetof(struct rk3399_pmucru, pmucru_softrst_con[0]);
1406 ret = rockchip_reset_bind(dev, ret, 2);
1407 if (ret)
1408 debug("Warning: software reset driver bind faile\n");
1409#endif
1410 return 0;
1411}
1412
Kever Yang5e79f442016-08-12 17:47:15 +08001413static const struct udevice_id rk3399_pmuclk_ids[] = {
1414 { .compatible = "rockchip,rk3399-pmucru" },
1415 { }
1416};
1417
Simon Glassc8a6bc92016-10-01 20:04:51 -06001418U_BOOT_DRIVER(rockchip_rk3399_pmuclk) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001419 .name = "rockchip_rk3399_pmucru",
Kever Yang5e79f442016-08-12 17:47:15 +08001420 .id = UCLASS_CLK,
1421 .of_match = rk3399_pmuclk_ids,
1422 .priv_auto_alloc_size = sizeof(struct rk3399_pmuclk_priv),
1423 .ofdata_to_platdata = rk3399_pmuclk_ofdata_to_platdata,
1424 .ops = &rk3399_pmuclk_ops,
1425 .probe = rk3399_pmuclk_probe,
Elaine Zhang538f67c2017-12-19 18:22:38 +08001426 .bind = rk3399_pmuclk_bind,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001427#if CONFIG_IS_ENABLED(OF_PLATDATA)
1428 .platdata_auto_alloc_size = sizeof(struct rk3399_pmuclk_plat),
1429#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001430};