blob: 37fc142a7a88a8559d63f3a52bff9d509a11b410 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yangb0b3c862016-07-29 10:35:25 +08002/*
3 * (C) Copyright 2015 Google, Inc
Philipp Tomsich8fa69792017-04-20 22:05:49 +02004 * (C) 2017 Theobroma Systems Design und Consulting GmbH
Kever Yangb0b3c862016-07-29 10:35:25 +08005 */
6
7#include <common.h>
8#include <clk-uclass.h>
9#include <dm.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080010#include <dt-structs.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080011#include <errno.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080012#include <mapmem.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080013#include <syscon.h>
David Wu364fc732017-09-20 14:38:58 +080014#include <bitfield.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080015#include <asm/io.h>
Kever Yang15f09a12019-03-28 11:01:23 +080016#include <asm/arch-rockchip/clock.h>
Jagan Tekib52a1992020-01-09 14:22:17 +053017#include <asm/arch-rockchip/cru.h>
Kever Yang15f09a12019-03-28 11:01:23 +080018#include <asm/arch-rockchip/hardware.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080019#include <dm/lists.h>
20#include <dt-bindings/clock/rk3399-cru.h>
21
Kever Yang5ae2fd92017-02-13 17:38:56 +080022#if CONFIG_IS_ENABLED(OF_PLATDATA)
23struct rk3399_clk_plat {
24 struct dtd_rockchip_rk3399_cru dtd;
Kever Yang5e79f442016-08-12 17:47:15 +080025};
26
Kever Yang5ae2fd92017-02-13 17:38:56 +080027struct rk3399_pmuclk_plat {
28 struct dtd_rockchip_rk3399_pmucru dtd;
29};
30#endif
31
Kever Yangb0b3c862016-07-29 10:35:25 +080032struct pll_div {
33 u32 refdiv;
34 u32 fbdiv;
35 u32 postdiv1;
36 u32 postdiv2;
37 u32 frac;
38};
39
40#define RATE_TO_DIV(input_rate, output_rate) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +053041 ((input_rate) / (output_rate) - 1)
42#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
Kever Yangb0b3c862016-07-29 10:35:25 +080043
44#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
45 .refdiv = _refdiv,\
46 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
47 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
48
Philipp Tomsich61dff332017-03-24 19:24:24 +010049#if defined(CONFIG_SPL_BUILD)
Kever Yangb0b3c862016-07-29 10:35:25 +080050static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
51static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2, 2);
Philipp Tomsich61dff332017-03-24 19:24:24 +010052#else
Kever Yangb0b3c862016-07-29 10:35:25 +080053static const struct pll_div ppll_init_cfg = PLL_DIVISORS(PPLL_HZ, 2, 2, 1);
Philipp Tomsich61dff332017-03-24 19:24:24 +010054#endif
Kever Yangb0b3c862016-07-29 10:35:25 +080055
Jagan Tekidd7dfa22019-07-15 23:51:10 +053056static const struct pll_div apll_l_1600_cfg = PLL_DIVISORS(1600 * MHz, 3, 1, 1);
57static const struct pll_div apll_l_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Kever Yangb0b3c862016-07-29 10:35:25 +080058
59static const struct pll_div *apll_l_cfgs[] = {
60 [APLL_L_1600_MHZ] = &apll_l_1600_cfg,
61 [APLL_L_600_MHZ] = &apll_l_600_cfg,
62};
63
Jagan Tekidd7dfa22019-07-15 23:51:10 +053064static const struct pll_div apll_b_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Christoph Muellneraf765a42018-11-30 20:32:48 +010065static const struct pll_div *apll_b_cfgs[] = {
66 [APLL_B_600_MHZ] = &apll_b_600_cfg,
67};
68
Kever Yangb0b3c862016-07-29 10:35:25 +080069enum {
70 /* PLL_CON0 */
71 PLL_FBDIV_MASK = 0xfff,
72 PLL_FBDIV_SHIFT = 0,
73
74 /* PLL_CON1 */
75 PLL_POSTDIV2_SHIFT = 12,
76 PLL_POSTDIV2_MASK = 0x7 << PLL_POSTDIV2_SHIFT,
77 PLL_POSTDIV1_SHIFT = 8,
78 PLL_POSTDIV1_MASK = 0x7 << PLL_POSTDIV1_SHIFT,
79 PLL_REFDIV_MASK = 0x3f,
80 PLL_REFDIV_SHIFT = 0,
81
82 /* PLL_CON2 */
83 PLL_LOCK_STATUS_SHIFT = 31,
84 PLL_LOCK_STATUS_MASK = 1 << PLL_LOCK_STATUS_SHIFT,
85 PLL_FRACDIV_MASK = 0xffffff,
86 PLL_FRACDIV_SHIFT = 0,
87
88 /* PLL_CON3 */
89 PLL_MODE_SHIFT = 8,
90 PLL_MODE_MASK = 3 << PLL_MODE_SHIFT,
91 PLL_MODE_SLOW = 0,
92 PLL_MODE_NORM,
93 PLL_MODE_DEEP,
94 PLL_DSMPD_SHIFT = 3,
95 PLL_DSMPD_MASK = 1 << PLL_DSMPD_SHIFT,
96 PLL_INTEGER_MODE = 1,
97
98 /* PMUCRU_CLKSEL_CON0 */
99 PMU_PCLK_DIV_CON_MASK = 0x1f,
100 PMU_PCLK_DIV_CON_SHIFT = 0,
101
102 /* PMUCRU_CLKSEL_CON1 */
103 SPI3_PLL_SEL_SHIFT = 7,
104 SPI3_PLL_SEL_MASK = 1 << SPI3_PLL_SEL_SHIFT,
105 SPI3_PLL_SEL_24M = 0,
106 SPI3_PLL_SEL_PPLL = 1,
107 SPI3_DIV_CON_SHIFT = 0x0,
108 SPI3_DIV_CON_MASK = 0x7f,
109
110 /* PMUCRU_CLKSEL_CON2 */
111 I2C_DIV_CON_MASK = 0x7f,
Kever Yang5e79f442016-08-12 17:47:15 +0800112 CLK_I2C8_DIV_CON_SHIFT = 8,
113 CLK_I2C0_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800114
115 /* PMUCRU_CLKSEL_CON3 */
Kever Yang5e79f442016-08-12 17:47:15 +0800116 CLK_I2C4_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800117
118 /* CLKSEL_CON0 */
119 ACLKM_CORE_L_DIV_CON_SHIFT = 8,
120 ACLKM_CORE_L_DIV_CON_MASK = 0x1f << ACLKM_CORE_L_DIV_CON_SHIFT,
121 CLK_CORE_L_PLL_SEL_SHIFT = 6,
122 CLK_CORE_L_PLL_SEL_MASK = 3 << CLK_CORE_L_PLL_SEL_SHIFT,
123 CLK_CORE_L_PLL_SEL_ALPLL = 0x0,
124 CLK_CORE_L_PLL_SEL_ABPLL = 0x1,
125 CLK_CORE_L_PLL_SEL_DPLL = 0x10,
126 CLK_CORE_L_PLL_SEL_GPLL = 0x11,
127 CLK_CORE_L_DIV_MASK = 0x1f,
128 CLK_CORE_L_DIV_SHIFT = 0,
129
130 /* CLKSEL_CON1 */
131 PCLK_DBG_L_DIV_SHIFT = 0x8,
132 PCLK_DBG_L_DIV_MASK = 0x1f << PCLK_DBG_L_DIV_SHIFT,
133 ATCLK_CORE_L_DIV_SHIFT = 0,
134 ATCLK_CORE_L_DIV_MASK = 0x1f << ATCLK_CORE_L_DIV_SHIFT,
135
Christoph Muellneraf765a42018-11-30 20:32:48 +0100136 /* CLKSEL_CON2 */
137 ACLKM_CORE_B_DIV_CON_SHIFT = 8,
138 ACLKM_CORE_B_DIV_CON_MASK = 0x1f << ACLKM_CORE_B_DIV_CON_SHIFT,
139 CLK_CORE_B_PLL_SEL_SHIFT = 6,
140 CLK_CORE_B_PLL_SEL_MASK = 3 << CLK_CORE_B_PLL_SEL_SHIFT,
141 CLK_CORE_B_PLL_SEL_ALPLL = 0x0,
142 CLK_CORE_B_PLL_SEL_ABPLL = 0x1,
143 CLK_CORE_B_PLL_SEL_DPLL = 0x10,
144 CLK_CORE_B_PLL_SEL_GPLL = 0x11,
145 CLK_CORE_B_DIV_MASK = 0x1f,
146 CLK_CORE_B_DIV_SHIFT = 0,
147
148 /* CLKSEL_CON3 */
149 PCLK_DBG_B_DIV_SHIFT = 0x8,
150 PCLK_DBG_B_DIV_MASK = 0x1f << PCLK_DBG_B_DIV_SHIFT,
151 ATCLK_CORE_B_DIV_SHIFT = 0,
152 ATCLK_CORE_B_DIV_MASK = 0x1f << ATCLK_CORE_B_DIV_SHIFT,
153
Kever Yangb0b3c862016-07-29 10:35:25 +0800154 /* CLKSEL_CON14 */
155 PCLK_PERIHP_DIV_CON_SHIFT = 12,
156 PCLK_PERIHP_DIV_CON_MASK = 0x7 << PCLK_PERIHP_DIV_CON_SHIFT,
157 HCLK_PERIHP_DIV_CON_SHIFT = 8,
158 HCLK_PERIHP_DIV_CON_MASK = 3 << HCLK_PERIHP_DIV_CON_SHIFT,
159 ACLK_PERIHP_PLL_SEL_SHIFT = 7,
160 ACLK_PERIHP_PLL_SEL_MASK = 1 << ACLK_PERIHP_PLL_SEL_SHIFT,
161 ACLK_PERIHP_PLL_SEL_CPLL = 0,
162 ACLK_PERIHP_PLL_SEL_GPLL = 1,
163 ACLK_PERIHP_DIV_CON_SHIFT = 0,
164 ACLK_PERIHP_DIV_CON_MASK = 0x1f,
165
166 /* CLKSEL_CON21 */
167 ACLK_EMMC_PLL_SEL_SHIFT = 7,
168 ACLK_EMMC_PLL_SEL_MASK = 0x1 << ACLK_EMMC_PLL_SEL_SHIFT,
169 ACLK_EMMC_PLL_SEL_GPLL = 0x1,
170 ACLK_EMMC_DIV_CON_SHIFT = 0,
171 ACLK_EMMC_DIV_CON_MASK = 0x1f,
172
173 /* CLKSEL_CON22 */
174 CLK_EMMC_PLL_SHIFT = 8,
175 CLK_EMMC_PLL_MASK = 0x7 << CLK_EMMC_PLL_SHIFT,
176 CLK_EMMC_PLL_SEL_GPLL = 0x1,
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800177 CLK_EMMC_PLL_SEL_24M = 0x5,
Kever Yangb0b3c862016-07-29 10:35:25 +0800178 CLK_EMMC_DIV_CON_SHIFT = 0,
179 CLK_EMMC_DIV_CON_MASK = 0x7f << CLK_EMMC_DIV_CON_SHIFT,
180
181 /* CLKSEL_CON23 */
182 PCLK_PERILP0_DIV_CON_SHIFT = 12,
183 PCLK_PERILP0_DIV_CON_MASK = 0x7 << PCLK_PERILP0_DIV_CON_SHIFT,
184 HCLK_PERILP0_DIV_CON_SHIFT = 8,
185 HCLK_PERILP0_DIV_CON_MASK = 3 << HCLK_PERILP0_DIV_CON_SHIFT,
186 ACLK_PERILP0_PLL_SEL_SHIFT = 7,
187 ACLK_PERILP0_PLL_SEL_MASK = 1 << ACLK_PERILP0_PLL_SEL_SHIFT,
188 ACLK_PERILP0_PLL_SEL_CPLL = 0,
189 ACLK_PERILP0_PLL_SEL_GPLL = 1,
190 ACLK_PERILP0_DIV_CON_SHIFT = 0,
191 ACLK_PERILP0_DIV_CON_MASK = 0x1f,
192
193 /* CLKSEL_CON25 */
194 PCLK_PERILP1_DIV_CON_SHIFT = 8,
195 PCLK_PERILP1_DIV_CON_MASK = 0x7 << PCLK_PERILP1_DIV_CON_SHIFT,
196 HCLK_PERILP1_PLL_SEL_SHIFT = 7,
197 HCLK_PERILP1_PLL_SEL_MASK = 1 << HCLK_PERILP1_PLL_SEL_SHIFT,
198 HCLK_PERILP1_PLL_SEL_CPLL = 0,
199 HCLK_PERILP1_PLL_SEL_GPLL = 1,
200 HCLK_PERILP1_DIV_CON_SHIFT = 0,
201 HCLK_PERILP1_DIV_CON_MASK = 0x1f,
202
203 /* CLKSEL_CON26 */
204 CLK_SARADC_DIV_CON_SHIFT = 8,
David Wu364fc732017-09-20 14:38:58 +0800205 CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
206 CLK_SARADC_DIV_CON_WIDTH = 8,
Kever Yangb0b3c862016-07-29 10:35:25 +0800207
208 /* CLKSEL_CON27 */
209 CLK_TSADC_SEL_X24M = 0x0,
210 CLK_TSADC_SEL_SHIFT = 15,
211 CLK_TSADC_SEL_MASK = 1 << CLK_TSADC_SEL_SHIFT,
212 CLK_TSADC_DIV_CON_SHIFT = 0,
213 CLK_TSADC_DIV_CON_MASK = 0x3ff,
214
215 /* CLKSEL_CON47 & CLKSEL_CON48 */
216 ACLK_VOP_PLL_SEL_SHIFT = 6,
217 ACLK_VOP_PLL_SEL_MASK = 0x3 << ACLK_VOP_PLL_SEL_SHIFT,
218 ACLK_VOP_PLL_SEL_CPLL = 0x1,
219 ACLK_VOP_DIV_CON_SHIFT = 0,
220 ACLK_VOP_DIV_CON_MASK = 0x1f << ACLK_VOP_DIV_CON_SHIFT,
221
222 /* CLKSEL_CON49 & CLKSEL_CON50 */
223 DCLK_VOP_DCLK_SEL_SHIFT = 11,
224 DCLK_VOP_DCLK_SEL_MASK = 1 << DCLK_VOP_DCLK_SEL_SHIFT,
225 DCLK_VOP_DCLK_SEL_DIVOUT = 0,
226 DCLK_VOP_PLL_SEL_SHIFT = 8,
227 DCLK_VOP_PLL_SEL_MASK = 3 << DCLK_VOP_PLL_SEL_SHIFT,
228 DCLK_VOP_PLL_SEL_VPLL = 0,
229 DCLK_VOP_DIV_CON_MASK = 0xff,
230 DCLK_VOP_DIV_CON_SHIFT = 0,
231
232 /* CLKSEL_CON58 */
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200233 CLK_SPI_PLL_SEL_WIDTH = 1,
234 CLK_SPI_PLL_SEL_MASK = ((1 < CLK_SPI_PLL_SEL_WIDTH) - 1),
235 CLK_SPI_PLL_SEL_CPLL = 0,
236 CLK_SPI_PLL_SEL_GPLL = 1,
237 CLK_SPI_PLL_DIV_CON_WIDTH = 7,
238 CLK_SPI_PLL_DIV_CON_MASK = ((1 << CLK_SPI_PLL_DIV_CON_WIDTH) - 1),
239
240 CLK_SPI5_PLL_DIV_CON_SHIFT = 8,
241 CLK_SPI5_PLL_SEL_SHIFT = 15,
Kever Yangb0b3c862016-07-29 10:35:25 +0800242
243 /* CLKSEL_CON59 */
244 CLK_SPI1_PLL_SEL_SHIFT = 15,
245 CLK_SPI1_PLL_DIV_CON_SHIFT = 8,
246 CLK_SPI0_PLL_SEL_SHIFT = 7,
247 CLK_SPI0_PLL_DIV_CON_SHIFT = 0,
248
249 /* CLKSEL_CON60 */
250 CLK_SPI4_PLL_SEL_SHIFT = 15,
251 CLK_SPI4_PLL_DIV_CON_SHIFT = 8,
252 CLK_SPI2_PLL_SEL_SHIFT = 7,
253 CLK_SPI2_PLL_DIV_CON_SHIFT = 0,
254
255 /* CLKSEL_CON61 */
256 CLK_I2C_PLL_SEL_MASK = 1,
257 CLK_I2C_PLL_SEL_CPLL = 0,
258 CLK_I2C_PLL_SEL_GPLL = 1,
259 CLK_I2C5_PLL_SEL_SHIFT = 15,
260 CLK_I2C5_DIV_CON_SHIFT = 8,
261 CLK_I2C1_PLL_SEL_SHIFT = 7,
262 CLK_I2C1_DIV_CON_SHIFT = 0,
263
264 /* CLKSEL_CON62 */
265 CLK_I2C6_PLL_SEL_SHIFT = 15,
266 CLK_I2C6_DIV_CON_SHIFT = 8,
267 CLK_I2C2_PLL_SEL_SHIFT = 7,
268 CLK_I2C2_DIV_CON_SHIFT = 0,
269
270 /* CLKSEL_CON63 */
271 CLK_I2C7_PLL_SEL_SHIFT = 15,
272 CLK_I2C7_DIV_CON_SHIFT = 8,
273 CLK_I2C3_PLL_SEL_SHIFT = 7,
274 CLK_I2C3_DIV_CON_SHIFT = 0,
275
276 /* CRU_SOFTRST_CON4 */
277 RESETN_DDR0_REQ_SHIFT = 8,
278 RESETN_DDR0_REQ_MASK = 1 << RESETN_DDR0_REQ_SHIFT,
279 RESETN_DDRPHY0_REQ_SHIFT = 9,
280 RESETN_DDRPHY0_REQ_MASK = 1 << RESETN_DDRPHY0_REQ_SHIFT,
281 RESETN_DDR1_REQ_SHIFT = 12,
282 RESETN_DDR1_REQ_MASK = 1 << RESETN_DDR1_REQ_SHIFT,
283 RESETN_DDRPHY1_REQ_SHIFT = 13,
284 RESETN_DDRPHY1_REQ_MASK = 1 << RESETN_DDRPHY1_REQ_SHIFT,
285};
286
287#define VCO_MAX_KHZ (3200 * (MHz / KHz))
288#define VCO_MIN_KHZ (800 * (MHz / KHz))
289#define OUTPUT_MAX_KHZ (3200 * (MHz / KHz))
290#define OUTPUT_MIN_KHZ (16 * (MHz / KHz))
291
292/*
293 * the div restructions of pll in integer mode, these are defined in
294 * * CRU_*PLL_CON0 or PMUCRU_*PLL_CON0
295 */
296#define PLL_DIV_MIN 16
297#define PLL_DIV_MAX 3200
298
299/*
300 * How to calculate the PLL(from TRM V0.3 Part 1 Page 63):
301 * Formulas also embedded within the Fractional PLL Verilog model:
302 * If DSMPD = 1 (DSM is disabled, "integer mode")
303 * FOUTVCO = FREF / REFDIV * FBDIV
304 * FOUTPOSTDIV = FOUTVCO / POSTDIV1 / POSTDIV2
305 * Where:
306 * FOUTVCO = Fractional PLL non-divided output frequency
307 * FOUTPOSTDIV = Fractional PLL divided output frequency
308 * (output of second post divider)
309 * FREF = Fractional PLL input reference frequency, (the OSC_HZ 24MHz input)
310 * REFDIV = Fractional PLL input reference clock divider
311 * FBDIV = Integer value programmed into feedback divide
312 *
313 */
314static void rkclk_set_pll(u32 *pll_con, const struct pll_div *div)
315{
316 /* All 8 PLLs have same VCO and output frequency range restrictions. */
317 u32 vco_khz = OSC_HZ / 1000 * div->fbdiv / div->refdiv;
318 u32 output_khz = vco_khz / div->postdiv1 / div->postdiv2;
319
320 debug("PLL at %p: fbdiv=%d, refdiv=%d, postdiv1=%d, "
321 "postdiv2=%d, vco=%u khz, output=%u khz\n",
322 pll_con, div->fbdiv, div->refdiv, div->postdiv1,
323 div->postdiv2, vco_khz, output_khz);
324 assert(vco_khz >= VCO_MIN_KHZ && vco_khz <= VCO_MAX_KHZ &&
325 output_khz >= OUTPUT_MIN_KHZ && output_khz <= OUTPUT_MAX_KHZ &&
326 div->fbdiv >= PLL_DIV_MIN && div->fbdiv <= PLL_DIV_MAX);
327
328 /*
329 * When power on or changing PLL setting,
330 * we must force PLL into slow mode to ensure output stable clock.
331 */
332 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
333 PLL_MODE_SLOW << PLL_MODE_SHIFT);
334
335 /* use integer mode */
336 rk_clrsetreg(&pll_con[3], PLL_DSMPD_MASK,
337 PLL_INTEGER_MODE << PLL_DSMPD_SHIFT);
338
339 rk_clrsetreg(&pll_con[0], PLL_FBDIV_MASK,
340 div->fbdiv << PLL_FBDIV_SHIFT);
341 rk_clrsetreg(&pll_con[1],
342 PLL_POSTDIV2_MASK | PLL_POSTDIV1_MASK |
343 PLL_REFDIV_MASK | PLL_REFDIV_SHIFT,
344 (div->postdiv2 << PLL_POSTDIV2_SHIFT) |
345 (div->postdiv1 << PLL_POSTDIV1_SHIFT) |
346 (div->refdiv << PLL_REFDIV_SHIFT));
347
348 /* waiting for pll lock */
349 while (!(readl(&pll_con[2]) & (1 << PLL_LOCK_STATUS_SHIFT)))
350 udelay(1);
351
352 /* pll enter normal mode */
353 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
354 PLL_MODE_NORM << PLL_MODE_SHIFT);
355}
356
357static int pll_para_config(u32 freq_hz, struct pll_div *div)
358{
359 u32 ref_khz = OSC_HZ / KHz, refdiv, fbdiv = 0;
360 u32 postdiv1, postdiv2 = 1;
361 u32 fref_khz;
362 u32 diff_khz, best_diff_khz;
363 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
364 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
365 u32 vco_khz;
366 u32 freq_khz = freq_hz / KHz;
367
368 if (!freq_hz) {
369 printf("%s: the frequency can't be 0 Hz\n", __func__);
370 return -1;
371 }
372
373 postdiv1 = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
374 if (postdiv1 > max_postdiv1) {
375 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
376 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
377 }
378
379 vco_khz = freq_khz * postdiv1 * postdiv2;
380
381 if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ ||
382 postdiv2 > max_postdiv2) {
383 printf("%s: Cannot find out a supported VCO"
384 " for Frequency (%uHz).\n", __func__, freq_hz);
385 return -1;
386 }
387
388 div->postdiv1 = postdiv1;
389 div->postdiv2 = postdiv2;
390
391 best_diff_khz = vco_khz;
392 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
393 fref_khz = ref_khz / refdiv;
394
395 fbdiv = vco_khz / fref_khz;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530396 if (fbdiv >= max_fbdiv || fbdiv <= min_fbdiv)
Kever Yangb0b3c862016-07-29 10:35:25 +0800397 continue;
398 diff_khz = vco_khz - fbdiv * fref_khz;
399 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
400 fbdiv++;
401 diff_khz = fref_khz - diff_khz;
402 }
403
404 if (diff_khz >= best_diff_khz)
405 continue;
406
407 best_diff_khz = diff_khz;
408 div->refdiv = refdiv;
409 div->fbdiv = fbdiv;
410 }
411
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530412 if (best_diff_khz > 4 * (MHz / KHz)) {
Kever Yangb0b3c862016-07-29 10:35:25 +0800413 printf("%s: Failed to match output frequency %u, "
414 "difference is %u Hz,exceed 4MHZ\n", __func__, freq_hz,
415 best_diff_khz * KHz);
416 return -1;
417 }
418 return 0;
419}
420
Jagan Tekib52a1992020-01-09 14:22:17 +0530421void rk3399_configure_cpu_l(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100422 enum apll_l_frequencies apll_l_freq)
Kever Yangb0b3c862016-07-29 10:35:25 +0800423{
424 u32 aclkm_div;
425 u32 pclk_dbg_div;
426 u32 atclk_div;
427
Christoph Muellneraf765a42018-11-30 20:32:48 +0100428 /* Setup cluster L */
Kever Yangb0b3c862016-07-29 10:35:25 +0800429 rkclk_set_pll(&cru->apll_l_con[0], apll_l_cfgs[apll_l_freq]);
430
Christoph Muellneraf765a42018-11-30 20:32:48 +0100431 aclkm_div = LPLL_HZ / ACLKM_CORE_L_HZ - 1;
432 assert((aclkm_div + 1) * ACLKM_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800433 aclkm_div < 0x1f);
434
Christoph Muellneraf765a42018-11-30 20:32:48 +0100435 pclk_dbg_div = LPLL_HZ / PCLK_DBG_L_HZ - 1;
436 assert((pclk_dbg_div + 1) * PCLK_DBG_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800437 pclk_dbg_div < 0x1f);
438
Christoph Muellneraf765a42018-11-30 20:32:48 +0100439 atclk_div = LPLL_HZ / ATCLK_CORE_L_HZ - 1;
440 assert((atclk_div + 1) * ATCLK_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800441 atclk_div < 0x1f);
442
443 rk_clrsetreg(&cru->clksel_con[0],
444 ACLKM_CORE_L_DIV_CON_MASK | CLK_CORE_L_PLL_SEL_MASK |
445 CLK_CORE_L_DIV_MASK,
446 aclkm_div << ACLKM_CORE_L_DIV_CON_SHIFT |
447 CLK_CORE_L_PLL_SEL_ALPLL << CLK_CORE_L_PLL_SEL_SHIFT |
448 0 << CLK_CORE_L_DIV_SHIFT);
449
450 rk_clrsetreg(&cru->clksel_con[1],
451 PCLK_DBG_L_DIV_MASK | ATCLK_CORE_L_DIV_MASK,
452 pclk_dbg_div << PCLK_DBG_L_DIV_SHIFT |
453 atclk_div << ATCLK_CORE_L_DIV_SHIFT);
454}
Christoph Muellneraf765a42018-11-30 20:32:48 +0100455
Jagan Tekib52a1992020-01-09 14:22:17 +0530456void rk3399_configure_cpu_b(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100457 enum apll_b_frequencies apll_b_freq)
458{
459 u32 aclkm_div;
460 u32 pclk_dbg_div;
461 u32 atclk_div;
462
463 /* Setup cluster B */
464 rkclk_set_pll(&cru->apll_b_con[0], apll_b_cfgs[apll_b_freq]);
465
466 aclkm_div = BPLL_HZ / ACLKM_CORE_B_HZ - 1;
467 assert((aclkm_div + 1) * ACLKM_CORE_B_HZ == BPLL_HZ &&
468 aclkm_div < 0x1f);
469
470 pclk_dbg_div = BPLL_HZ / PCLK_DBG_B_HZ - 1;
471 assert((pclk_dbg_div + 1) * PCLK_DBG_B_HZ == BPLL_HZ &&
472 pclk_dbg_div < 0x1f);
473
474 atclk_div = BPLL_HZ / ATCLK_CORE_B_HZ - 1;
475 assert((atclk_div + 1) * ATCLK_CORE_B_HZ == BPLL_HZ &&
476 atclk_div < 0x1f);
477
478 rk_clrsetreg(&cru->clksel_con[2],
479 ACLKM_CORE_B_DIV_CON_MASK | CLK_CORE_B_PLL_SEL_MASK |
480 CLK_CORE_B_DIV_MASK,
481 aclkm_div << ACLKM_CORE_B_DIV_CON_SHIFT |
482 CLK_CORE_B_PLL_SEL_ABPLL << CLK_CORE_B_PLL_SEL_SHIFT |
483 0 << CLK_CORE_B_DIV_SHIFT);
484
485 rk_clrsetreg(&cru->clksel_con[3],
486 PCLK_DBG_B_DIV_MASK | ATCLK_CORE_B_DIV_MASK,
487 pclk_dbg_div << PCLK_DBG_B_DIV_SHIFT |
488 atclk_div << ATCLK_CORE_B_DIV_SHIFT);
489}
490
Kever Yangb0b3c862016-07-29 10:35:25 +0800491#define I2C_CLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530492 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT | \
493 CLK_I2C_PLL_SEL_MASK << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800494
495#define I2C_CLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530496 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT | \
497 CLK_I2C_PLL_SEL_GPLL << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800498
499#define I2C_CLK_DIV_VALUE(con, bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530500 ((con >> CLK_I2C ##bus## _DIV_CON_SHIFT) & I2C_DIV_CON_MASK)
Kever Yangb0b3c862016-07-29 10:35:25 +0800501
Kever Yang5e79f442016-08-12 17:47:15 +0800502#define I2C_PMUCLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530503 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800504
505#define I2C_PMUCLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530506 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800507
Jagan Tekib52a1992020-01-09 14:22:17 +0530508static ulong rk3399_i2c_get_clk(struct rockchip_cru *cru, ulong clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800509{
510 u32 div, con;
511
512 switch (clk_id) {
513 case SCLK_I2C1:
514 con = readl(&cru->clksel_con[61]);
515 div = I2C_CLK_DIV_VALUE(con, 1);
516 break;
517 case SCLK_I2C2:
518 con = readl(&cru->clksel_con[62]);
519 div = I2C_CLK_DIV_VALUE(con, 2);
520 break;
521 case SCLK_I2C3:
522 con = readl(&cru->clksel_con[63]);
523 div = I2C_CLK_DIV_VALUE(con, 3);
524 break;
525 case SCLK_I2C5:
526 con = readl(&cru->clksel_con[61]);
527 div = I2C_CLK_DIV_VALUE(con, 5);
528 break;
529 case SCLK_I2C6:
530 con = readl(&cru->clksel_con[62]);
531 div = I2C_CLK_DIV_VALUE(con, 6);
532 break;
533 case SCLK_I2C7:
534 con = readl(&cru->clksel_con[63]);
535 div = I2C_CLK_DIV_VALUE(con, 7);
536 break;
537 default:
538 printf("do not support this i2c bus\n");
539 return -EINVAL;
540 }
541
542 return DIV_TO_RATE(GPLL_HZ, div);
543}
544
Jagan Tekib52a1992020-01-09 14:22:17 +0530545static ulong rk3399_i2c_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800546{
547 int src_clk_div;
548
549 /* i2c0,4,8 src clock from ppll, i2c1,2,3,5,6,7 src clock from gpll*/
550 src_clk_div = GPLL_HZ / hz;
551 assert(src_clk_div - 1 < 127);
552
553 switch (clk_id) {
554 case SCLK_I2C1:
555 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(1),
556 I2C_CLK_REG_VALUE(1, src_clk_div));
557 break;
558 case SCLK_I2C2:
559 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(2),
560 I2C_CLK_REG_VALUE(2, src_clk_div));
561 break;
562 case SCLK_I2C3:
563 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(3),
564 I2C_CLK_REG_VALUE(3, src_clk_div));
565 break;
566 case SCLK_I2C5:
567 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(5),
568 I2C_CLK_REG_VALUE(5, src_clk_div));
569 break;
570 case SCLK_I2C6:
571 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(6),
572 I2C_CLK_REG_VALUE(6, src_clk_div));
573 break;
574 case SCLK_I2C7:
575 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(7),
576 I2C_CLK_REG_VALUE(7, src_clk_div));
577 break;
578 default:
579 printf("do not support this i2c bus\n");
580 return -EINVAL;
581 }
582
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200583 return rk3399_i2c_get_clk(cru, clk_id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800584}
585
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200586/*
587 * RK3399 SPI clocks have a common divider-width (7 bits) and a single bit
588 * to select either CPLL or GPLL as the clock-parent. The location within
589 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
590 */
591
592struct spi_clkreg {
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530593 u8 reg; /* CLKSEL_CON[reg] register in CRU */
594 u8 div_shift;
595 u8 sel_shift;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200596};
597
598/*
599 * The entries are numbered relative to their offset from SCLK_SPI0.
600 *
601 * Note that SCLK_SPI3 (which is configured via PMUCRU and requires different
602 * logic is not supported).
603 */
604static const struct spi_clkreg spi_clkregs[] = {
605 [0] = { .reg = 59,
606 .div_shift = CLK_SPI0_PLL_DIV_CON_SHIFT,
607 .sel_shift = CLK_SPI0_PLL_SEL_SHIFT, },
608 [1] = { .reg = 59,
609 .div_shift = CLK_SPI1_PLL_DIV_CON_SHIFT,
610 .sel_shift = CLK_SPI1_PLL_SEL_SHIFT, },
611 [2] = { .reg = 60,
612 .div_shift = CLK_SPI2_PLL_DIV_CON_SHIFT,
613 .sel_shift = CLK_SPI2_PLL_SEL_SHIFT, },
614 [3] = { .reg = 60,
615 .div_shift = CLK_SPI4_PLL_DIV_CON_SHIFT,
616 .sel_shift = CLK_SPI4_PLL_SEL_SHIFT, },
617 [4] = { .reg = 58,
618 .div_shift = CLK_SPI5_PLL_DIV_CON_SHIFT,
619 .sel_shift = CLK_SPI5_PLL_SEL_SHIFT, },
620};
621
Jagan Tekib52a1992020-01-09 14:22:17 +0530622static ulong rk3399_spi_get_clk(struct rockchip_cru *cru, ulong clk_id)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200623{
624 const struct spi_clkreg *spiclk = NULL;
625 u32 div, val;
626
627 switch (clk_id) {
628 case SCLK_SPI0 ... SCLK_SPI5:
629 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
630 break;
631
632 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900633 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200634 return -EINVAL;
635 }
636
637 val = readl(&cru->clksel_con[spiclk->reg]);
Philipp Tomsicha8ee98d2017-11-22 19:45:04 +0100638 div = bitfield_extract(val, spiclk->div_shift,
639 CLK_SPI_PLL_DIV_CON_WIDTH);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200640
641 return DIV_TO_RATE(GPLL_HZ, div);
642}
643
Jagan Tekib52a1992020-01-09 14:22:17 +0530644static ulong rk3399_spi_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200645{
646 const struct spi_clkreg *spiclk = NULL;
647 int src_clk_div;
648
Kever Yang217273c2017-07-27 12:54:02 +0800649 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz) - 1;
650 assert(src_clk_div < 128);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200651
652 switch (clk_id) {
653 case SCLK_SPI1 ... SCLK_SPI5:
654 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
655 break;
656
657 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900658 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200659 return -EINVAL;
660 }
661
662 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
663 ((CLK_SPI_PLL_DIV_CON_MASK << spiclk->div_shift) |
664 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)),
665 ((src_clk_div << spiclk->div_shift) |
666 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)));
667
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200668 return rk3399_spi_get_clk(cru, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200669}
670
Jagan Tekib52a1992020-01-09 14:22:17 +0530671static ulong rk3399_vop_set_clk(struct rockchip_cru *cru, ulong clk_id, u32 hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800672{
673 struct pll_div vpll_config = {0};
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530674 int aclk_vop = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800675 void *aclkreg_addr, *dclkreg_addr;
676 u32 div;
677
678 switch (clk_id) {
679 case DCLK_VOP0:
680 aclkreg_addr = &cru->clksel_con[47];
681 dclkreg_addr = &cru->clksel_con[49];
682 break;
683 case DCLK_VOP1:
684 aclkreg_addr = &cru->clksel_con[48];
685 dclkreg_addr = &cru->clksel_con[50];
686 break;
687 default:
688 return -EINVAL;
689 }
690 /* vop aclk source clk: cpll */
691 div = CPLL_HZ / aclk_vop;
692 assert(div - 1 < 32);
693
694 rk_clrsetreg(aclkreg_addr,
695 ACLK_VOP_PLL_SEL_MASK | ACLK_VOP_DIV_CON_MASK,
696 ACLK_VOP_PLL_SEL_CPLL << ACLK_VOP_PLL_SEL_SHIFT |
697 (div - 1) << ACLK_VOP_DIV_CON_SHIFT);
698
699 /* vop dclk source from vpll, and equals to vpll(means div == 1) */
700 if (pll_para_config(hz, &vpll_config))
701 return -1;
702
703 rkclk_set_pll(&cru->vpll_con[0], &vpll_config);
704
705 rk_clrsetreg(dclkreg_addr,
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530706 DCLK_VOP_DCLK_SEL_MASK | DCLK_VOP_PLL_SEL_MASK |
Kever Yangb0b3c862016-07-29 10:35:25 +0800707 DCLK_VOP_DIV_CON_MASK,
708 DCLK_VOP_DCLK_SEL_DIVOUT << DCLK_VOP_DCLK_SEL_SHIFT |
709 DCLK_VOP_PLL_SEL_VPLL << DCLK_VOP_PLL_SEL_SHIFT |
710 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
711
712 return hz;
713}
714
Jagan Tekib52a1992020-01-09 14:22:17 +0530715static ulong rk3399_mmc_get_clk(struct rockchip_cru *cru, uint clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800716{
717 u32 div, con;
718
719 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200720 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800721 case SCLK_SDMMC:
722 con = readl(&cru->clksel_con[16]);
Kever Yang3a94d752017-07-27 12:54:01 +0800723 /* dwmmc controller have internal div 2 */
724 div = 2;
Kever Yangb0b3c862016-07-29 10:35:25 +0800725 break;
726 case SCLK_EMMC:
727 con = readl(&cru->clksel_con[21]);
Kever Yang3a94d752017-07-27 12:54:01 +0800728 div = 1;
Kever Yangb0b3c862016-07-29 10:35:25 +0800729 break;
730 default:
731 return -EINVAL;
732 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800733
Kever Yang3a94d752017-07-27 12:54:01 +0800734 div *= (con & CLK_EMMC_DIV_CON_MASK) >> CLK_EMMC_DIV_CON_SHIFT;
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800735 if ((con & CLK_EMMC_PLL_MASK) >> CLK_EMMC_PLL_SHIFT
736 == CLK_EMMC_PLL_SEL_24M)
Kever Yang3a94d752017-07-27 12:54:01 +0800737 return DIV_TO_RATE(OSC_HZ, div);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800738 else
739 return DIV_TO_RATE(GPLL_HZ, div);
Kever Yangb0b3c862016-07-29 10:35:25 +0800740}
741
Jagan Tekib52a1992020-01-09 14:22:17 +0530742static ulong rk3399_mmc_set_clk(struct rockchip_cru *cru,
Kever Yangb0b3c862016-07-29 10:35:25 +0800743 ulong clk_id, ulong set_rate)
744{
745 int src_clk_div;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530746 int aclk_emmc = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800747
748 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200749 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800750 case SCLK_SDMMC:
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800751 /* Select clk_sdmmc source from GPLL by default */
Kever Yang3a94d752017-07-27 12:54:01 +0800752 /* mmc clock defaulg div 2 internal, provide double in cru */
753 src_clk_div = DIV_ROUND_UP(GPLL_HZ / 2, set_rate);
Kever Yangb0b3c862016-07-29 10:35:25 +0800754
Kever Yang217273c2017-07-27 12:54:02 +0800755 if (src_clk_div > 128) {
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800756 /* use 24MHz source for 400KHz clock */
Kever Yang3a94d752017-07-27 12:54:01 +0800757 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, set_rate);
Kever Yang217273c2017-07-27 12:54:02 +0800758 assert(src_clk_div - 1 < 128);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800759 rk_clrsetreg(&cru->clksel_con[16],
760 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
761 CLK_EMMC_PLL_SEL_24M << CLK_EMMC_PLL_SHIFT |
762 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
763 } else {
764 rk_clrsetreg(&cru->clksel_con[16],
765 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
766 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
767 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
768 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800769 break;
770 case SCLK_EMMC:
771 /* Select aclk_emmc source from GPLL */
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530772 src_clk_div = DIV_ROUND_UP(GPLL_HZ, aclk_emmc);
Kever Yang217273c2017-07-27 12:54:02 +0800773 assert(src_clk_div - 1 < 32);
Kever Yangb0b3c862016-07-29 10:35:25 +0800774
775 rk_clrsetreg(&cru->clksel_con[21],
776 ACLK_EMMC_PLL_SEL_MASK | ACLK_EMMC_DIV_CON_MASK,
777 ACLK_EMMC_PLL_SEL_GPLL << ACLK_EMMC_PLL_SEL_SHIFT |
778 (src_clk_div - 1) << ACLK_EMMC_DIV_CON_SHIFT);
779
780 /* Select clk_emmc source from GPLL too */
Kever Yang217273c2017-07-27 12:54:02 +0800781 src_clk_div = DIV_ROUND_UP(GPLL_HZ, set_rate);
782 assert(src_clk_div - 1 < 128);
Kever Yangb0b3c862016-07-29 10:35:25 +0800783
784 rk_clrsetreg(&cru->clksel_con[22],
785 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
786 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
787 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
788 break;
789 default:
790 return -EINVAL;
791 }
792 return rk3399_mmc_get_clk(cru, clk_id);
793}
794
Jagan Tekib52a1992020-01-09 14:22:17 +0530795static ulong rk3399_gmac_set_clk(struct rockchip_cru *cru, ulong rate)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +0100796{
797 ulong ret;
798
799 /*
800 * The RGMII CLK can be derived either from an external "clkin"
801 * or can be generated from internally by a divider from SCLK_MAC.
802 */
803 if (readl(&cru->clksel_con[19]) & BIT(4)) {
804 /* An external clock will always generate the right rate... */
805 ret = rate;
806 } else {
807 /*
808 * No platform uses an internal clock to date.
809 * Implement this once it becomes necessary and print an error
810 * if someone tries to use it (while it remains unimplemented).
811 */
812 pr_err("%s: internal clock is UNIMPLEMENTED\n", __func__);
813 ret = 0;
814 }
815
816 return ret;
817}
818
Kever Yang5ae2fd92017-02-13 17:38:56 +0800819#define PMUSGRF_DDR_RGN_CON16 0xff330040
Jagan Tekib52a1992020-01-09 14:22:17 +0530820static ulong rk3399_ddr_set_clk(struct rockchip_cru *cru,
Kever Yang5ae2fd92017-02-13 17:38:56 +0800821 ulong set_rate)
822{
823 struct pll_div dpll_cfg;
824
825 /* IC ECO bug, need to set this register */
826 writel(0xc000c000, PMUSGRF_DDR_RGN_CON16);
827
828 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */
829 switch (set_rate) {
Jagan Teki09565682019-07-16 17:27:35 +0530830 case 50 * MHz:
831 dpll_cfg = (struct pll_div)
832 {.refdiv = 1, .fbdiv = 12, .postdiv1 = 3, .postdiv2 = 2};
833 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530834 case 200 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800835 dpll_cfg = (struct pll_div)
836 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 6, .postdiv2 = 1};
837 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530838 case 300 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800839 dpll_cfg = (struct pll_div)
840 {.refdiv = 2, .fbdiv = 100, .postdiv1 = 4, .postdiv2 = 1};
841 break;
Jagan Tekif556d752019-07-16 17:27:36 +0530842 case 400 * MHz:
843 dpll_cfg = (struct pll_div)
844 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1};
845 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530846 case 666 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800847 dpll_cfg = (struct pll_div)
848 {.refdiv = 2, .fbdiv = 111, .postdiv1 = 2, .postdiv2 = 1};
849 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530850 case 800 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800851 dpll_cfg = (struct pll_div)
852 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1};
853 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530854 case 933 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800855 dpll_cfg = (struct pll_div)
856 {.refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1};
857 break;
858 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900859 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Kever Yang5ae2fd92017-02-13 17:38:56 +0800860 }
861 rkclk_set_pll(&cru->dpll_con[0], &dpll_cfg);
862
863 return set_rate;
864}
David Wu364fc732017-09-20 14:38:58 +0800865
Jagan Tekib52a1992020-01-09 14:22:17 +0530866static ulong rk3399_saradc_get_clk(struct rockchip_cru *cru)
David Wu364fc732017-09-20 14:38:58 +0800867{
868 u32 div, val;
869
870 val = readl(&cru->clksel_con[26]);
871 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
872 CLK_SARADC_DIV_CON_WIDTH);
873
874 return DIV_TO_RATE(OSC_HZ, div);
875}
876
Jagan Tekib52a1992020-01-09 14:22:17 +0530877static ulong rk3399_saradc_set_clk(struct rockchip_cru *cru, uint hz)
David Wu364fc732017-09-20 14:38:58 +0800878{
879 int src_clk_div;
880
881 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
882 assert(src_clk_div < 128);
883
884 rk_clrsetreg(&cru->clksel_con[26],
885 CLK_SARADC_DIV_CON_MASK,
886 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
887
888 return rk3399_saradc_get_clk(cru);
889}
890
Kever Yangb0b3c862016-07-29 10:35:25 +0800891static ulong rk3399_clk_get_rate(struct clk *clk)
892{
893 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
894 ulong rate = 0;
895
896 switch (clk->id) {
897 case 0 ... 63:
898 return 0;
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200899 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800900 case SCLK_SDMMC:
901 case SCLK_EMMC:
902 rate = rk3399_mmc_get_clk(priv->cru, clk->id);
903 break;
904 case SCLK_I2C1:
905 case SCLK_I2C2:
906 case SCLK_I2C3:
907 case SCLK_I2C5:
908 case SCLK_I2C6:
909 case SCLK_I2C7:
910 rate = rk3399_i2c_get_clk(priv->cru, clk->id);
911 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200912 case SCLK_SPI0...SCLK_SPI5:
913 rate = rk3399_spi_get_clk(priv->cru, clk->id);
914 break;
915 case SCLK_UART0:
Christoph Muellner24615432019-05-07 10:58:44 +0200916 case SCLK_UART1:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200917 case SCLK_UART2:
Christoph Muellner24615432019-05-07 10:58:44 +0200918 case SCLK_UART3:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200919 return 24000000;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +0200920 case PCLK_HDMI_CTRL:
921 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800922 case DCLK_VOP0:
923 case DCLK_VOP1:
924 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +0200925 case PCLK_EFUSE1024NS:
926 break;
David Wu364fc732017-09-20 14:38:58 +0800927 case SCLK_SARADC:
928 rate = rk3399_saradc_get_clk(priv->cru);
929 break;
Simon Glass5328af12019-01-21 14:53:30 -0700930 case ACLK_VIO:
931 case ACLK_HDCP:
932 case ACLK_GIC_PRE:
933 case PCLK_DDR:
934 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800935 default:
Simon Glass5328af12019-01-21 14:53:30 -0700936 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800937 return -ENOENT;
938 }
939
940 return rate;
941}
942
943static ulong rk3399_clk_set_rate(struct clk *clk, ulong rate)
944{
945 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
946 ulong ret = 0;
947
948 switch (clk->id) {
949 case 0 ... 63:
950 return 0;
Philipp Tomsichd2f1f1a2018-01-08 14:00:27 +0100951
952 case ACLK_PERIHP:
953 case HCLK_PERIHP:
954 case PCLK_PERIHP:
955 return 0;
956
957 case ACLK_PERILP0:
958 case HCLK_PERILP0:
959 case PCLK_PERILP0:
960 return 0;
961
962 case ACLK_CCI:
963 return 0;
964
965 case HCLK_PERILP1:
966 case PCLK_PERILP1:
967 return 0;
968
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200969 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800970 case SCLK_SDMMC:
971 case SCLK_EMMC:
972 ret = rk3399_mmc_set_clk(priv->cru, clk->id, rate);
973 break;
Philipp Tomsich65d83302017-03-24 19:24:25 +0100974 case SCLK_MAC:
Philipp Tomsicha45f17e2018-01-08 13:11:01 +0100975 ret = rk3399_gmac_set_clk(priv->cru, rate);
Philipp Tomsich65d83302017-03-24 19:24:25 +0100976 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800977 case SCLK_I2C1:
978 case SCLK_I2C2:
979 case SCLK_I2C3:
980 case SCLK_I2C5:
981 case SCLK_I2C6:
982 case SCLK_I2C7:
983 ret = rk3399_i2c_set_clk(priv->cru, clk->id, rate);
984 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200985 case SCLK_SPI0...SCLK_SPI5:
986 ret = rk3399_spi_set_clk(priv->cru, clk->id, rate);
987 break;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +0200988 case PCLK_HDMI_CTRL:
989 case PCLK_VIO_GRF:
990 /* the PCLK gates for video are enabled by default */
991 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800992 case DCLK_VOP0:
993 case DCLK_VOP1:
Kever Yang5e79f442016-08-12 17:47:15 +0800994 ret = rk3399_vop_set_clk(priv->cru, clk->id, rate);
Kever Yangb0b3c862016-07-29 10:35:25 +0800995 break;
Kever Yang5ae2fd92017-02-13 17:38:56 +0800996 case SCLK_DDRCLK:
997 ret = rk3399_ddr_set_clk(priv->cru, rate);
998 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +0200999 case PCLK_EFUSE1024NS:
1000 break;
David Wu364fc732017-09-20 14:38:58 +08001001 case SCLK_SARADC:
1002 ret = rk3399_saradc_set_clk(priv->cru, rate);
1003 break;
Simon Glass5328af12019-01-21 14:53:30 -07001004 case ACLK_VIO:
1005 case ACLK_HDCP:
1006 case ACLK_GIC_PRE:
1007 case PCLK_DDR:
1008 return 0;
Kever Yangb0b3c862016-07-29 10:35:25 +08001009 default:
Simon Glass5328af12019-01-21 14:53:30 -07001010 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +08001011 return -ENOENT;
1012 }
1013
1014 return ret;
1015}
1016
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301017static int __maybe_unused rk3399_gmac_set_parent(struct clk *clk,
1018 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001019{
1020 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1021 const char *clock_output_name;
1022 int ret;
1023
1024 /*
1025 * If the requested parent is in the same clock-controller and
1026 * the id is SCLK_MAC ("clk_gmac"), switch to the internal clock.
1027 */
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301028 if (parent->dev == clk->dev && parent->id == SCLK_MAC) {
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001029 debug("%s: switching RGMII to SCLK_MAC\n", __func__);
1030 rk_clrreg(&priv->cru->clksel_con[19], BIT(4));
1031 return 0;
1032 }
1033
1034 /*
1035 * Otherwise, we need to check the clock-output-names of the
1036 * requested parent to see if the requested id is "clkin_gmac".
1037 */
1038 ret = dev_read_string_index(parent->dev, "clock-output-names",
1039 parent->id, &clock_output_name);
1040 if (ret < 0)
1041 return -ENODATA;
1042
1043 /* If this is "clkin_gmac", switch to the external clock input */
1044 if (!strcmp(clock_output_name, "clkin_gmac")) {
1045 debug("%s: switching RGMII to CLKIN\n", __func__);
1046 rk_setreg(&priv->cru->clksel_con[19], BIT(4));
1047 return 0;
1048 }
1049
1050 return -EINVAL;
1051}
1052
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301053static int __maybe_unused rk3399_clk_set_parent(struct clk *clk,
1054 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001055{
1056 switch (clk->id) {
1057 case SCLK_RMII_SRC:
1058 return rk3399_gmac_set_parent(clk, parent);
1059 }
1060
1061 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1062 return -ENOENT;
1063}
1064
Kever Yangb0b3c862016-07-29 10:35:25 +08001065static struct clk_ops rk3399_clk_ops = {
1066 .get_rate = rk3399_clk_get_rate,
1067 .set_rate = rk3399_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001068#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001069 .set_parent = rk3399_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001070#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001071};
1072
Kever Yang9f636a22017-10-12 15:27:29 +08001073#ifdef CONFIG_SPL_BUILD
Jagan Tekib52a1992020-01-09 14:22:17 +05301074static void rkclk_init(struct rockchip_cru *cru)
Kever Yang9f636a22017-10-12 15:27:29 +08001075{
1076 u32 aclk_div;
1077 u32 hclk_div;
1078 u32 pclk_div;
1079
Christoph Muellneraf765a42018-11-30 20:32:48 +01001080 rk3399_configure_cpu_l(cru, APLL_L_600_MHZ);
1081 rk3399_configure_cpu_b(cru, APLL_B_600_MHZ);
Kever Yang9f636a22017-10-12 15:27:29 +08001082 /*
1083 * some cru registers changed by bootrom, we'd better reset them to
1084 * reset/default values described in TRM to avoid confusion in kernel.
1085 * Please consider these three lines as a fix of bootrom bug.
1086 */
1087 rk_clrsetreg(&cru->clksel_con[12], 0xffff, 0x4101);
1088 rk_clrsetreg(&cru->clksel_con[19], 0xffff, 0x033f);
1089 rk_clrsetreg(&cru->clksel_con[56], 0x0003, 0x0003);
1090
1091 /* configure gpll cpll */
1092 rkclk_set_pll(&cru->gpll_con[0], &gpll_init_cfg);
1093 rkclk_set_pll(&cru->cpll_con[0], &cpll_init_cfg);
1094
1095 /* configure perihp aclk, hclk, pclk */
1096 aclk_div = GPLL_HZ / PERIHP_ACLK_HZ - 1;
1097 assert((aclk_div + 1) * PERIHP_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1098
1099 hclk_div = PERIHP_ACLK_HZ / PERIHP_HCLK_HZ - 1;
1100 assert((hclk_div + 1) * PERIHP_HCLK_HZ ==
1101 PERIHP_ACLK_HZ && (hclk_div < 0x4));
1102
1103 pclk_div = PERIHP_ACLK_HZ / PERIHP_PCLK_HZ - 1;
1104 assert((pclk_div + 1) * PERIHP_PCLK_HZ ==
1105 PERIHP_ACLK_HZ && (pclk_div < 0x7));
1106
1107 rk_clrsetreg(&cru->clksel_con[14],
1108 PCLK_PERIHP_DIV_CON_MASK | HCLK_PERIHP_DIV_CON_MASK |
1109 ACLK_PERIHP_PLL_SEL_MASK | ACLK_PERIHP_DIV_CON_MASK,
1110 pclk_div << PCLK_PERIHP_DIV_CON_SHIFT |
1111 hclk_div << HCLK_PERIHP_DIV_CON_SHIFT |
1112 ACLK_PERIHP_PLL_SEL_GPLL << ACLK_PERIHP_PLL_SEL_SHIFT |
1113 aclk_div << ACLK_PERIHP_DIV_CON_SHIFT);
1114
1115 /* configure perilp0 aclk, hclk, pclk */
1116 aclk_div = GPLL_HZ / PERILP0_ACLK_HZ - 1;
1117 assert((aclk_div + 1) * PERILP0_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1118
1119 hclk_div = PERILP0_ACLK_HZ / PERILP0_HCLK_HZ - 1;
1120 assert((hclk_div + 1) * PERILP0_HCLK_HZ ==
1121 PERILP0_ACLK_HZ && (hclk_div < 0x4));
1122
1123 pclk_div = PERILP0_ACLK_HZ / PERILP0_PCLK_HZ - 1;
1124 assert((pclk_div + 1) * PERILP0_PCLK_HZ ==
1125 PERILP0_ACLK_HZ && (pclk_div < 0x7));
1126
1127 rk_clrsetreg(&cru->clksel_con[23],
1128 PCLK_PERILP0_DIV_CON_MASK | HCLK_PERILP0_DIV_CON_MASK |
1129 ACLK_PERILP0_PLL_SEL_MASK | ACLK_PERILP0_DIV_CON_MASK,
1130 pclk_div << PCLK_PERILP0_DIV_CON_SHIFT |
1131 hclk_div << HCLK_PERILP0_DIV_CON_SHIFT |
1132 ACLK_PERILP0_PLL_SEL_GPLL << ACLK_PERILP0_PLL_SEL_SHIFT |
1133 aclk_div << ACLK_PERILP0_DIV_CON_SHIFT);
1134
1135 /* perilp1 hclk select gpll as source */
1136 hclk_div = GPLL_HZ / PERILP1_HCLK_HZ - 1;
1137 assert((hclk_div + 1) * PERILP1_HCLK_HZ ==
1138 GPLL_HZ && (hclk_div < 0x1f));
1139
1140 pclk_div = PERILP1_HCLK_HZ / PERILP1_HCLK_HZ - 1;
1141 assert((pclk_div + 1) * PERILP1_HCLK_HZ ==
1142 PERILP1_HCLK_HZ && (hclk_div < 0x7));
1143
1144 rk_clrsetreg(&cru->clksel_con[25],
1145 PCLK_PERILP1_DIV_CON_MASK | HCLK_PERILP1_DIV_CON_MASK |
1146 HCLK_PERILP1_PLL_SEL_MASK,
1147 pclk_div << PCLK_PERILP1_DIV_CON_SHIFT |
1148 hclk_div << HCLK_PERILP1_DIV_CON_SHIFT |
1149 HCLK_PERILP1_PLL_SEL_GPLL << HCLK_PERILP1_PLL_SEL_SHIFT);
1150}
1151#endif
1152
Kever Yangb0b3c862016-07-29 10:35:25 +08001153static int rk3399_clk_probe(struct udevice *dev)
1154{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001155#ifdef CONFIG_SPL_BUILD
Kever Yangb0b3c862016-07-29 10:35:25 +08001156 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1157
Kever Yang5ae2fd92017-02-13 17:38:56 +08001158#if CONFIG_IS_ENABLED(OF_PLATDATA)
1159 struct rk3399_clk_plat *plat = dev_get_platdata(dev);
Kever Yangb0b3c862016-07-29 10:35:25 +08001160
Simon Glassc20ee0e2017-08-29 14:15:50 -06001161 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001162#endif
1163 rkclk_init(priv->cru);
1164#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001165 return 0;
1166}
1167
1168static int rk3399_clk_ofdata_to_platdata(struct udevice *dev)
1169{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001170#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yangb0b3c862016-07-29 10:35:25 +08001171 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1172
Philipp Tomsich75c78592017-09-12 17:32:24 +02001173 priv->cru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001174#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001175 return 0;
1176}
1177
1178static int rk3399_clk_bind(struct udevice *dev)
1179{
1180 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +08001181 struct udevice *sys_child;
1182 struct sysreset_reg *priv;
Kever Yangb0b3c862016-07-29 10:35:25 +08001183
1184 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +08001185 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
1186 &sys_child);
1187 if (ret) {
1188 debug("Warning: No sysreset driver: ret=%d\n", ret);
1189 } else {
1190 priv = malloc(sizeof(struct sysreset_reg));
Jagan Tekib52a1992020-01-09 14:22:17 +05301191 priv->glb_srst_fst_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001192 glb_srst_fst_value);
Jagan Tekib52a1992020-01-09 14:22:17 +05301193 priv->glb_srst_snd_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001194 glb_srst_snd_value);
1195 sys_child->priv = priv;
1196 }
Kever Yangb0b3c862016-07-29 10:35:25 +08001197
Heiko Stuebnera5ada252019-11-09 00:06:30 +01001198#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Jagan Tekib52a1992020-01-09 14:22:17 +05301199 ret = offsetof(struct rockchip_cru, softrst_con[0]);
Elaine Zhang538f67c2017-12-19 18:22:38 +08001200 ret = rockchip_reset_bind(dev, ret, 21);
1201 if (ret)
1202 debug("Warning: software reset driver bind faile\n");
1203#endif
1204
Kever Yangb0b3c862016-07-29 10:35:25 +08001205 return 0;
1206}
1207
1208static const struct udevice_id rk3399_clk_ids[] = {
1209 { .compatible = "rockchip,rk3399-cru" },
1210 { }
1211};
1212
1213U_BOOT_DRIVER(clk_rk3399) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001214 .name = "rockchip_rk3399_cru",
Kever Yangb0b3c862016-07-29 10:35:25 +08001215 .id = UCLASS_CLK,
1216 .of_match = rk3399_clk_ids,
1217 .priv_auto_alloc_size = sizeof(struct rk3399_clk_priv),
1218 .ofdata_to_platdata = rk3399_clk_ofdata_to_platdata,
1219 .ops = &rk3399_clk_ops,
1220 .bind = rk3399_clk_bind,
1221 .probe = rk3399_clk_probe,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001222#if CONFIG_IS_ENABLED(OF_PLATDATA)
1223 .platdata_auto_alloc_size = sizeof(struct rk3399_clk_plat),
1224#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001225};
Kever Yang5e79f442016-08-12 17:47:15 +08001226
1227static ulong rk3399_i2c_get_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id)
1228{
1229 u32 div, con;
1230
1231 switch (clk_id) {
1232 case SCLK_I2C0_PMU:
1233 con = readl(&pmucru->pmucru_clksel[2]);
1234 div = I2C_CLK_DIV_VALUE(con, 0);
1235 break;
1236 case SCLK_I2C4_PMU:
1237 con = readl(&pmucru->pmucru_clksel[3]);
1238 div = I2C_CLK_DIV_VALUE(con, 4);
1239 break;
1240 case SCLK_I2C8_PMU:
1241 con = readl(&pmucru->pmucru_clksel[2]);
1242 div = I2C_CLK_DIV_VALUE(con, 8);
1243 break;
1244 default:
1245 printf("do not support this i2c bus\n");
1246 return -EINVAL;
1247 }
1248
1249 return DIV_TO_RATE(PPLL_HZ, div);
1250}
1251
1252static ulong rk3399_i2c_set_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id,
1253 uint hz)
1254{
1255 int src_clk_div;
1256
1257 src_clk_div = PPLL_HZ / hz;
1258 assert(src_clk_div - 1 < 127);
1259
1260 switch (clk_id) {
1261 case SCLK_I2C0_PMU:
1262 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(0),
1263 I2C_PMUCLK_REG_VALUE(0, src_clk_div));
1264 break;
1265 case SCLK_I2C4_PMU:
1266 rk_clrsetreg(&pmucru->pmucru_clksel[3], I2C_PMUCLK_REG_MASK(4),
1267 I2C_PMUCLK_REG_VALUE(4, src_clk_div));
1268 break;
1269 case SCLK_I2C8_PMU:
1270 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(8),
1271 I2C_PMUCLK_REG_VALUE(8, src_clk_div));
1272 break;
1273 default:
1274 printf("do not support this i2c bus\n");
1275 return -EINVAL;
1276 }
1277
1278 return DIV_TO_RATE(PPLL_HZ, src_clk_div);
1279}
1280
1281static ulong rk3399_pwm_get_clk(struct rk3399_pmucru *pmucru)
1282{
1283 u32 div, con;
1284
1285 /* PWM closk rate is same as pclk_pmu */
1286 con = readl(&pmucru->pmucru_clksel[0]);
1287 div = con & PMU_PCLK_DIV_CON_MASK;
1288
1289 return DIV_TO_RATE(PPLL_HZ, div);
1290}
1291
1292static ulong rk3399_pmuclk_get_rate(struct clk *clk)
1293{
1294 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1295 ulong rate = 0;
1296
1297 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001298 case PLL_PPLL:
1299 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001300 case PCLK_RKPWM_PMU:
1301 rate = rk3399_pwm_get_clk(priv->pmucru);
1302 break;
1303 case SCLK_I2C0_PMU:
1304 case SCLK_I2C4_PMU:
1305 case SCLK_I2C8_PMU:
1306 rate = rk3399_i2c_get_pmuclk(priv->pmucru, clk->id);
1307 break;
1308 default:
1309 return -ENOENT;
1310 }
1311
1312 return rate;
1313}
1314
1315static ulong rk3399_pmuclk_set_rate(struct clk *clk, ulong rate)
1316{
1317 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1318 ulong ret = 0;
1319
1320 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001321 case PLL_PPLL:
1322 /*
1323 * This has already been set up and we don't want/need
1324 * to change it here. Accept the request though, as the
1325 * device-tree has this in an 'assigned-clocks' list.
1326 */
1327 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001328 case SCLK_I2C0_PMU:
1329 case SCLK_I2C4_PMU:
1330 case SCLK_I2C8_PMU:
1331 ret = rk3399_i2c_set_pmuclk(priv->pmucru, clk->id, rate);
1332 break;
1333 default:
1334 return -ENOENT;
1335 }
1336
1337 return ret;
1338}
1339
1340static struct clk_ops rk3399_pmuclk_ops = {
1341 .get_rate = rk3399_pmuclk_get_rate,
1342 .set_rate = rk3399_pmuclk_set_rate,
1343};
1344
Kever Yang5ae2fd92017-02-13 17:38:56 +08001345#ifndef CONFIG_SPL_BUILD
Kever Yang5e79f442016-08-12 17:47:15 +08001346static void pmuclk_init(struct rk3399_pmucru *pmucru)
1347{
1348 u32 pclk_div;
1349
1350 /* configure pmu pll(ppll) */
1351 rkclk_set_pll(&pmucru->ppll_con[0], &ppll_init_cfg);
1352
1353 /* configure pmu pclk */
1354 pclk_div = PPLL_HZ / PMU_PCLK_HZ - 1;
Kever Yang5e79f442016-08-12 17:47:15 +08001355 rk_clrsetreg(&pmucru->pmucru_clksel[0],
1356 PMU_PCLK_DIV_CON_MASK,
1357 pclk_div << PMU_PCLK_DIV_CON_SHIFT);
1358}
Kever Yang5ae2fd92017-02-13 17:38:56 +08001359#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001360
1361static int rk3399_pmuclk_probe(struct udevice *dev)
1362{
Philipp Tomsich61dff332017-03-24 19:24:24 +01001363#if CONFIG_IS_ENABLED(OF_PLATDATA) || !defined(CONFIG_SPL_BUILD)
Kever Yang5e79f442016-08-12 17:47:15 +08001364 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
Philipp Tomsich61dff332017-03-24 19:24:24 +01001365#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001366
Kever Yang5ae2fd92017-02-13 17:38:56 +08001367#if CONFIG_IS_ENABLED(OF_PLATDATA)
1368 struct rk3399_pmuclk_plat *plat = dev_get_platdata(dev);
Kever Yang5e79f442016-08-12 17:47:15 +08001369
Simon Glassc20ee0e2017-08-29 14:15:50 -06001370 priv->pmucru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001371#endif
1372
1373#ifndef CONFIG_SPL_BUILD
1374 pmuclk_init(priv->pmucru);
1375#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001376 return 0;
1377}
1378
1379static int rk3399_pmuclk_ofdata_to_platdata(struct udevice *dev)
1380{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001381#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yang5e79f442016-08-12 17:47:15 +08001382 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
1383
Philipp Tomsich75c78592017-09-12 17:32:24 +02001384 priv->pmucru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001385#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001386 return 0;
1387}
1388
Elaine Zhang538f67c2017-12-19 18:22:38 +08001389static int rk3399_pmuclk_bind(struct udevice *dev)
1390{
1391#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
1392 int ret;
1393
1394 ret = offsetof(struct rk3399_pmucru, pmucru_softrst_con[0]);
1395 ret = rockchip_reset_bind(dev, ret, 2);
1396 if (ret)
1397 debug("Warning: software reset driver bind faile\n");
1398#endif
1399 return 0;
1400}
1401
Kever Yang5e79f442016-08-12 17:47:15 +08001402static const struct udevice_id rk3399_pmuclk_ids[] = {
1403 { .compatible = "rockchip,rk3399-pmucru" },
1404 { }
1405};
1406
Simon Glassc8a6bc92016-10-01 20:04:51 -06001407U_BOOT_DRIVER(rockchip_rk3399_pmuclk) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001408 .name = "rockchip_rk3399_pmucru",
Kever Yang5e79f442016-08-12 17:47:15 +08001409 .id = UCLASS_CLK,
1410 .of_match = rk3399_pmuclk_ids,
1411 .priv_auto_alloc_size = sizeof(struct rk3399_pmuclk_priv),
1412 .ofdata_to_platdata = rk3399_pmuclk_ofdata_to_platdata,
1413 .ops = &rk3399_pmuclk_ops,
1414 .probe = rk3399_pmuclk_probe,
Elaine Zhang538f67c2017-12-19 18:22:38 +08001415 .bind = rk3399_pmuclk_bind,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001416#if CONFIG_IS_ENABLED(OF_PLATDATA)
1417 .platdata_auto_alloc_size = sizeof(struct rk3399_pmuclk_plat),
1418#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001419};