blob: 55ebac7057d141291212468524f6469aad0c48d0 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yangb0b3c862016-07-29 10:35:25 +08002/*
3 * (C) Copyright 2015 Google, Inc
Philipp Tomsich8fa69792017-04-20 22:05:49 +02004 * (C) 2017 Theobroma Systems Design und Consulting GmbH
Kever Yangb0b3c862016-07-29 10:35:25 +08005 */
6
7#include <common.h>
8#include <clk-uclass.h>
9#include <dm.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080010#include <dt-structs.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080011#include <errno.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060012#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070013#include <malloc.h>
Kever Yang5ae2fd92017-02-13 17:38:56 +080014#include <mapmem.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080015#include <syscon.h>
David Wu364fc732017-09-20 14:38:58 +080016#include <bitfield.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080017#include <asm/io.h>
Kever Yang15f09a12019-03-28 11:01:23 +080018#include <asm/arch-rockchip/clock.h>
Jagan Tekib52a1992020-01-09 14:22:17 +053019#include <asm/arch-rockchip/cru.h>
Kever Yang15f09a12019-03-28 11:01:23 +080020#include <asm/arch-rockchip/hardware.h>
Simon Glass0fd3d912020-12-22 19:30:28 -070021#include <dm/device-internal.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080022#include <dm/lists.h>
23#include <dt-bindings/clock/rk3399-cru.h>
Simon Glasscd93d622020-05-10 11:40:13 -060024#include <linux/bitops.h>
Simon Glassc05ed002020-05-10 11:40:11 -060025#include <linux/delay.h>
Kever Yangb0b3c862016-07-29 10:35:25 +080026
Alper Nebi Yasakeb890252020-10-28 00:15:10 +030027DECLARE_GLOBAL_DATA_PTR;
28
Kever Yang5ae2fd92017-02-13 17:38:56 +080029#if CONFIG_IS_ENABLED(OF_PLATDATA)
30struct rk3399_clk_plat {
31 struct dtd_rockchip_rk3399_cru dtd;
Kever Yang5e79f442016-08-12 17:47:15 +080032};
33
Kever Yang5ae2fd92017-02-13 17:38:56 +080034struct rk3399_pmuclk_plat {
35 struct dtd_rockchip_rk3399_pmucru dtd;
36};
37#endif
38
Kever Yangb0b3c862016-07-29 10:35:25 +080039struct pll_div {
40 u32 refdiv;
41 u32 fbdiv;
42 u32 postdiv1;
43 u32 postdiv2;
44 u32 frac;
45};
46
47#define RATE_TO_DIV(input_rate, output_rate) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +053048 ((input_rate) / (output_rate) - 1)
49#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
Kever Yangb0b3c862016-07-29 10:35:25 +080050
51#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
52 .refdiv = _refdiv,\
53 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
54 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
55
56static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
57static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2, 2);
Alper Nebi Yasakeb890252020-10-28 00:15:10 +030058#if !defined(CONFIG_SPL_BUILD)
Kever Yangb0b3c862016-07-29 10:35:25 +080059static const struct pll_div ppll_init_cfg = PLL_DIVISORS(PPLL_HZ, 2, 2, 1);
Philipp Tomsich61dff332017-03-24 19:24:24 +010060#endif
Kever Yangb0b3c862016-07-29 10:35:25 +080061
Jagan Tekidd7dfa22019-07-15 23:51:10 +053062static const struct pll_div apll_l_1600_cfg = PLL_DIVISORS(1600 * MHz, 3, 1, 1);
63static const struct pll_div apll_l_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Kever Yangb0b3c862016-07-29 10:35:25 +080064
65static const struct pll_div *apll_l_cfgs[] = {
66 [APLL_L_1600_MHZ] = &apll_l_1600_cfg,
67 [APLL_L_600_MHZ] = &apll_l_600_cfg,
68};
69
Jagan Tekidd7dfa22019-07-15 23:51:10 +053070static const struct pll_div apll_b_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Christoph Muellneraf765a42018-11-30 20:32:48 +010071static const struct pll_div *apll_b_cfgs[] = {
72 [APLL_B_600_MHZ] = &apll_b_600_cfg,
73};
74
Kever Yangb0b3c862016-07-29 10:35:25 +080075enum {
76 /* PLL_CON0 */
77 PLL_FBDIV_MASK = 0xfff,
78 PLL_FBDIV_SHIFT = 0,
79
80 /* PLL_CON1 */
81 PLL_POSTDIV2_SHIFT = 12,
82 PLL_POSTDIV2_MASK = 0x7 << PLL_POSTDIV2_SHIFT,
83 PLL_POSTDIV1_SHIFT = 8,
84 PLL_POSTDIV1_MASK = 0x7 << PLL_POSTDIV1_SHIFT,
85 PLL_REFDIV_MASK = 0x3f,
86 PLL_REFDIV_SHIFT = 0,
87
88 /* PLL_CON2 */
89 PLL_LOCK_STATUS_SHIFT = 31,
90 PLL_LOCK_STATUS_MASK = 1 << PLL_LOCK_STATUS_SHIFT,
91 PLL_FRACDIV_MASK = 0xffffff,
92 PLL_FRACDIV_SHIFT = 0,
93
94 /* PLL_CON3 */
95 PLL_MODE_SHIFT = 8,
96 PLL_MODE_MASK = 3 << PLL_MODE_SHIFT,
97 PLL_MODE_SLOW = 0,
98 PLL_MODE_NORM,
99 PLL_MODE_DEEP,
100 PLL_DSMPD_SHIFT = 3,
101 PLL_DSMPD_MASK = 1 << PLL_DSMPD_SHIFT,
102 PLL_INTEGER_MODE = 1,
103
104 /* PMUCRU_CLKSEL_CON0 */
105 PMU_PCLK_DIV_CON_MASK = 0x1f,
106 PMU_PCLK_DIV_CON_SHIFT = 0,
107
108 /* PMUCRU_CLKSEL_CON1 */
109 SPI3_PLL_SEL_SHIFT = 7,
110 SPI3_PLL_SEL_MASK = 1 << SPI3_PLL_SEL_SHIFT,
111 SPI3_PLL_SEL_24M = 0,
112 SPI3_PLL_SEL_PPLL = 1,
113 SPI3_DIV_CON_SHIFT = 0x0,
114 SPI3_DIV_CON_MASK = 0x7f,
115
116 /* PMUCRU_CLKSEL_CON2 */
117 I2C_DIV_CON_MASK = 0x7f,
Kever Yang5e79f442016-08-12 17:47:15 +0800118 CLK_I2C8_DIV_CON_SHIFT = 8,
119 CLK_I2C0_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800120
121 /* PMUCRU_CLKSEL_CON3 */
Kever Yang5e79f442016-08-12 17:47:15 +0800122 CLK_I2C4_DIV_CON_SHIFT = 0,
Kever Yangb0b3c862016-07-29 10:35:25 +0800123
124 /* CLKSEL_CON0 */
125 ACLKM_CORE_L_DIV_CON_SHIFT = 8,
126 ACLKM_CORE_L_DIV_CON_MASK = 0x1f << ACLKM_CORE_L_DIV_CON_SHIFT,
127 CLK_CORE_L_PLL_SEL_SHIFT = 6,
128 CLK_CORE_L_PLL_SEL_MASK = 3 << CLK_CORE_L_PLL_SEL_SHIFT,
129 CLK_CORE_L_PLL_SEL_ALPLL = 0x0,
130 CLK_CORE_L_PLL_SEL_ABPLL = 0x1,
131 CLK_CORE_L_PLL_SEL_DPLL = 0x10,
132 CLK_CORE_L_PLL_SEL_GPLL = 0x11,
133 CLK_CORE_L_DIV_MASK = 0x1f,
134 CLK_CORE_L_DIV_SHIFT = 0,
135
136 /* CLKSEL_CON1 */
137 PCLK_DBG_L_DIV_SHIFT = 0x8,
138 PCLK_DBG_L_DIV_MASK = 0x1f << PCLK_DBG_L_DIV_SHIFT,
139 ATCLK_CORE_L_DIV_SHIFT = 0,
140 ATCLK_CORE_L_DIV_MASK = 0x1f << ATCLK_CORE_L_DIV_SHIFT,
141
Christoph Muellneraf765a42018-11-30 20:32:48 +0100142 /* CLKSEL_CON2 */
143 ACLKM_CORE_B_DIV_CON_SHIFT = 8,
144 ACLKM_CORE_B_DIV_CON_MASK = 0x1f << ACLKM_CORE_B_DIV_CON_SHIFT,
145 CLK_CORE_B_PLL_SEL_SHIFT = 6,
146 CLK_CORE_B_PLL_SEL_MASK = 3 << CLK_CORE_B_PLL_SEL_SHIFT,
147 CLK_CORE_B_PLL_SEL_ALPLL = 0x0,
148 CLK_CORE_B_PLL_SEL_ABPLL = 0x1,
149 CLK_CORE_B_PLL_SEL_DPLL = 0x10,
150 CLK_CORE_B_PLL_SEL_GPLL = 0x11,
151 CLK_CORE_B_DIV_MASK = 0x1f,
152 CLK_CORE_B_DIV_SHIFT = 0,
153
154 /* CLKSEL_CON3 */
155 PCLK_DBG_B_DIV_SHIFT = 0x8,
156 PCLK_DBG_B_DIV_MASK = 0x1f << PCLK_DBG_B_DIV_SHIFT,
157 ATCLK_CORE_B_DIV_SHIFT = 0,
158 ATCLK_CORE_B_DIV_MASK = 0x1f << ATCLK_CORE_B_DIV_SHIFT,
159
Kever Yangb0b3c862016-07-29 10:35:25 +0800160 /* CLKSEL_CON14 */
161 PCLK_PERIHP_DIV_CON_SHIFT = 12,
162 PCLK_PERIHP_DIV_CON_MASK = 0x7 << PCLK_PERIHP_DIV_CON_SHIFT,
163 HCLK_PERIHP_DIV_CON_SHIFT = 8,
164 HCLK_PERIHP_DIV_CON_MASK = 3 << HCLK_PERIHP_DIV_CON_SHIFT,
165 ACLK_PERIHP_PLL_SEL_SHIFT = 7,
166 ACLK_PERIHP_PLL_SEL_MASK = 1 << ACLK_PERIHP_PLL_SEL_SHIFT,
167 ACLK_PERIHP_PLL_SEL_CPLL = 0,
168 ACLK_PERIHP_PLL_SEL_GPLL = 1,
169 ACLK_PERIHP_DIV_CON_SHIFT = 0,
170 ACLK_PERIHP_DIV_CON_MASK = 0x1f,
171
172 /* CLKSEL_CON21 */
173 ACLK_EMMC_PLL_SEL_SHIFT = 7,
174 ACLK_EMMC_PLL_SEL_MASK = 0x1 << ACLK_EMMC_PLL_SEL_SHIFT,
175 ACLK_EMMC_PLL_SEL_GPLL = 0x1,
176 ACLK_EMMC_DIV_CON_SHIFT = 0,
177 ACLK_EMMC_DIV_CON_MASK = 0x1f,
178
179 /* CLKSEL_CON22 */
180 CLK_EMMC_PLL_SHIFT = 8,
181 CLK_EMMC_PLL_MASK = 0x7 << CLK_EMMC_PLL_SHIFT,
182 CLK_EMMC_PLL_SEL_GPLL = 0x1,
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800183 CLK_EMMC_PLL_SEL_24M = 0x5,
Kever Yangb0b3c862016-07-29 10:35:25 +0800184 CLK_EMMC_DIV_CON_SHIFT = 0,
185 CLK_EMMC_DIV_CON_MASK = 0x7f << CLK_EMMC_DIV_CON_SHIFT,
186
187 /* CLKSEL_CON23 */
188 PCLK_PERILP0_DIV_CON_SHIFT = 12,
189 PCLK_PERILP0_DIV_CON_MASK = 0x7 << PCLK_PERILP0_DIV_CON_SHIFT,
190 HCLK_PERILP0_DIV_CON_SHIFT = 8,
191 HCLK_PERILP0_DIV_CON_MASK = 3 << HCLK_PERILP0_DIV_CON_SHIFT,
192 ACLK_PERILP0_PLL_SEL_SHIFT = 7,
193 ACLK_PERILP0_PLL_SEL_MASK = 1 << ACLK_PERILP0_PLL_SEL_SHIFT,
194 ACLK_PERILP0_PLL_SEL_CPLL = 0,
195 ACLK_PERILP0_PLL_SEL_GPLL = 1,
196 ACLK_PERILP0_DIV_CON_SHIFT = 0,
197 ACLK_PERILP0_DIV_CON_MASK = 0x1f,
198
199 /* CLKSEL_CON25 */
200 PCLK_PERILP1_DIV_CON_SHIFT = 8,
201 PCLK_PERILP1_DIV_CON_MASK = 0x7 << PCLK_PERILP1_DIV_CON_SHIFT,
202 HCLK_PERILP1_PLL_SEL_SHIFT = 7,
203 HCLK_PERILP1_PLL_SEL_MASK = 1 << HCLK_PERILP1_PLL_SEL_SHIFT,
204 HCLK_PERILP1_PLL_SEL_CPLL = 0,
205 HCLK_PERILP1_PLL_SEL_GPLL = 1,
206 HCLK_PERILP1_DIV_CON_SHIFT = 0,
207 HCLK_PERILP1_DIV_CON_MASK = 0x1f,
208
209 /* CLKSEL_CON26 */
210 CLK_SARADC_DIV_CON_SHIFT = 8,
David Wu364fc732017-09-20 14:38:58 +0800211 CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
212 CLK_SARADC_DIV_CON_WIDTH = 8,
Kever Yangb0b3c862016-07-29 10:35:25 +0800213
214 /* CLKSEL_CON27 */
215 CLK_TSADC_SEL_X24M = 0x0,
216 CLK_TSADC_SEL_SHIFT = 15,
217 CLK_TSADC_SEL_MASK = 1 << CLK_TSADC_SEL_SHIFT,
218 CLK_TSADC_DIV_CON_SHIFT = 0,
219 CLK_TSADC_DIV_CON_MASK = 0x3ff,
220
221 /* CLKSEL_CON47 & CLKSEL_CON48 */
222 ACLK_VOP_PLL_SEL_SHIFT = 6,
223 ACLK_VOP_PLL_SEL_MASK = 0x3 << ACLK_VOP_PLL_SEL_SHIFT,
224 ACLK_VOP_PLL_SEL_CPLL = 0x1,
225 ACLK_VOP_DIV_CON_SHIFT = 0,
226 ACLK_VOP_DIV_CON_MASK = 0x1f << ACLK_VOP_DIV_CON_SHIFT,
227
228 /* CLKSEL_CON49 & CLKSEL_CON50 */
229 DCLK_VOP_DCLK_SEL_SHIFT = 11,
230 DCLK_VOP_DCLK_SEL_MASK = 1 << DCLK_VOP_DCLK_SEL_SHIFT,
231 DCLK_VOP_DCLK_SEL_DIVOUT = 0,
232 DCLK_VOP_PLL_SEL_SHIFT = 8,
233 DCLK_VOP_PLL_SEL_MASK = 3 << DCLK_VOP_PLL_SEL_SHIFT,
234 DCLK_VOP_PLL_SEL_VPLL = 0,
235 DCLK_VOP_DIV_CON_MASK = 0xff,
236 DCLK_VOP_DIV_CON_SHIFT = 0,
237
Jack Mitchellda0be4e2020-09-17 10:42:06 +0100238 /* CLKSEL_CON57 */
239 PCLK_ALIVE_DIV_CON_SHIFT = 0,
240 PCLK_ALIVE_DIV_CON_MASK = 0x1f << PCLK_ALIVE_DIV_CON_SHIFT,
241
Kever Yangb0b3c862016-07-29 10:35:25 +0800242 /* CLKSEL_CON58 */
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200243 CLK_SPI_PLL_SEL_WIDTH = 1,
244 CLK_SPI_PLL_SEL_MASK = ((1 < CLK_SPI_PLL_SEL_WIDTH) - 1),
245 CLK_SPI_PLL_SEL_CPLL = 0,
246 CLK_SPI_PLL_SEL_GPLL = 1,
247 CLK_SPI_PLL_DIV_CON_WIDTH = 7,
248 CLK_SPI_PLL_DIV_CON_MASK = ((1 << CLK_SPI_PLL_DIV_CON_WIDTH) - 1),
249
250 CLK_SPI5_PLL_DIV_CON_SHIFT = 8,
251 CLK_SPI5_PLL_SEL_SHIFT = 15,
Kever Yangb0b3c862016-07-29 10:35:25 +0800252
253 /* CLKSEL_CON59 */
254 CLK_SPI1_PLL_SEL_SHIFT = 15,
255 CLK_SPI1_PLL_DIV_CON_SHIFT = 8,
256 CLK_SPI0_PLL_SEL_SHIFT = 7,
257 CLK_SPI0_PLL_DIV_CON_SHIFT = 0,
258
259 /* CLKSEL_CON60 */
260 CLK_SPI4_PLL_SEL_SHIFT = 15,
261 CLK_SPI4_PLL_DIV_CON_SHIFT = 8,
262 CLK_SPI2_PLL_SEL_SHIFT = 7,
263 CLK_SPI2_PLL_DIV_CON_SHIFT = 0,
264
265 /* CLKSEL_CON61 */
266 CLK_I2C_PLL_SEL_MASK = 1,
267 CLK_I2C_PLL_SEL_CPLL = 0,
268 CLK_I2C_PLL_SEL_GPLL = 1,
269 CLK_I2C5_PLL_SEL_SHIFT = 15,
270 CLK_I2C5_DIV_CON_SHIFT = 8,
271 CLK_I2C1_PLL_SEL_SHIFT = 7,
272 CLK_I2C1_DIV_CON_SHIFT = 0,
273
274 /* CLKSEL_CON62 */
275 CLK_I2C6_PLL_SEL_SHIFT = 15,
276 CLK_I2C6_DIV_CON_SHIFT = 8,
277 CLK_I2C2_PLL_SEL_SHIFT = 7,
278 CLK_I2C2_DIV_CON_SHIFT = 0,
279
280 /* CLKSEL_CON63 */
281 CLK_I2C7_PLL_SEL_SHIFT = 15,
282 CLK_I2C7_DIV_CON_SHIFT = 8,
283 CLK_I2C3_PLL_SEL_SHIFT = 7,
284 CLK_I2C3_DIV_CON_SHIFT = 0,
285
286 /* CRU_SOFTRST_CON4 */
287 RESETN_DDR0_REQ_SHIFT = 8,
288 RESETN_DDR0_REQ_MASK = 1 << RESETN_DDR0_REQ_SHIFT,
289 RESETN_DDRPHY0_REQ_SHIFT = 9,
290 RESETN_DDRPHY0_REQ_MASK = 1 << RESETN_DDRPHY0_REQ_SHIFT,
291 RESETN_DDR1_REQ_SHIFT = 12,
292 RESETN_DDR1_REQ_MASK = 1 << RESETN_DDR1_REQ_SHIFT,
293 RESETN_DDRPHY1_REQ_SHIFT = 13,
294 RESETN_DDRPHY1_REQ_MASK = 1 << RESETN_DDRPHY1_REQ_SHIFT,
295};
296
297#define VCO_MAX_KHZ (3200 * (MHz / KHz))
298#define VCO_MIN_KHZ (800 * (MHz / KHz))
299#define OUTPUT_MAX_KHZ (3200 * (MHz / KHz))
300#define OUTPUT_MIN_KHZ (16 * (MHz / KHz))
301
302/*
303 * the div restructions of pll in integer mode, these are defined in
304 * * CRU_*PLL_CON0 or PMUCRU_*PLL_CON0
305 */
306#define PLL_DIV_MIN 16
307#define PLL_DIV_MAX 3200
308
309/*
310 * How to calculate the PLL(from TRM V0.3 Part 1 Page 63):
311 * Formulas also embedded within the Fractional PLL Verilog model:
312 * If DSMPD = 1 (DSM is disabled, "integer mode")
313 * FOUTVCO = FREF / REFDIV * FBDIV
314 * FOUTPOSTDIV = FOUTVCO / POSTDIV1 / POSTDIV2
315 * Where:
316 * FOUTVCO = Fractional PLL non-divided output frequency
317 * FOUTPOSTDIV = Fractional PLL divided output frequency
318 * (output of second post divider)
319 * FREF = Fractional PLL input reference frequency, (the OSC_HZ 24MHz input)
320 * REFDIV = Fractional PLL input reference clock divider
321 * FBDIV = Integer value programmed into feedback divide
322 *
323 */
324static void rkclk_set_pll(u32 *pll_con, const struct pll_div *div)
325{
326 /* All 8 PLLs have same VCO and output frequency range restrictions. */
327 u32 vco_khz = OSC_HZ / 1000 * div->fbdiv / div->refdiv;
328 u32 output_khz = vco_khz / div->postdiv1 / div->postdiv2;
329
330 debug("PLL at %p: fbdiv=%d, refdiv=%d, postdiv1=%d, "
331 "postdiv2=%d, vco=%u khz, output=%u khz\n",
332 pll_con, div->fbdiv, div->refdiv, div->postdiv1,
333 div->postdiv2, vco_khz, output_khz);
334 assert(vco_khz >= VCO_MIN_KHZ && vco_khz <= VCO_MAX_KHZ &&
335 output_khz >= OUTPUT_MIN_KHZ && output_khz <= OUTPUT_MAX_KHZ &&
336 div->fbdiv >= PLL_DIV_MIN && div->fbdiv <= PLL_DIV_MAX);
337
338 /*
339 * When power on or changing PLL setting,
340 * we must force PLL into slow mode to ensure output stable clock.
341 */
342 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
343 PLL_MODE_SLOW << PLL_MODE_SHIFT);
344
345 /* use integer mode */
346 rk_clrsetreg(&pll_con[3], PLL_DSMPD_MASK,
347 PLL_INTEGER_MODE << PLL_DSMPD_SHIFT);
348
349 rk_clrsetreg(&pll_con[0], PLL_FBDIV_MASK,
350 div->fbdiv << PLL_FBDIV_SHIFT);
351 rk_clrsetreg(&pll_con[1],
352 PLL_POSTDIV2_MASK | PLL_POSTDIV1_MASK |
353 PLL_REFDIV_MASK | PLL_REFDIV_SHIFT,
354 (div->postdiv2 << PLL_POSTDIV2_SHIFT) |
355 (div->postdiv1 << PLL_POSTDIV1_SHIFT) |
356 (div->refdiv << PLL_REFDIV_SHIFT));
357
358 /* waiting for pll lock */
359 while (!(readl(&pll_con[2]) & (1 << PLL_LOCK_STATUS_SHIFT)))
360 udelay(1);
361
362 /* pll enter normal mode */
363 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
364 PLL_MODE_NORM << PLL_MODE_SHIFT);
365}
366
367static int pll_para_config(u32 freq_hz, struct pll_div *div)
368{
369 u32 ref_khz = OSC_HZ / KHz, refdiv, fbdiv = 0;
370 u32 postdiv1, postdiv2 = 1;
371 u32 fref_khz;
372 u32 diff_khz, best_diff_khz;
373 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
374 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
375 u32 vco_khz;
376 u32 freq_khz = freq_hz / KHz;
377
378 if (!freq_hz) {
379 printf("%s: the frequency can't be 0 Hz\n", __func__);
380 return -1;
381 }
382
383 postdiv1 = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
384 if (postdiv1 > max_postdiv1) {
385 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
386 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
387 }
388
389 vco_khz = freq_khz * postdiv1 * postdiv2;
390
391 if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ ||
392 postdiv2 > max_postdiv2) {
393 printf("%s: Cannot find out a supported VCO"
394 " for Frequency (%uHz).\n", __func__, freq_hz);
395 return -1;
396 }
397
398 div->postdiv1 = postdiv1;
399 div->postdiv2 = postdiv2;
400
401 best_diff_khz = vco_khz;
402 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
403 fref_khz = ref_khz / refdiv;
404
405 fbdiv = vco_khz / fref_khz;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530406 if (fbdiv >= max_fbdiv || fbdiv <= min_fbdiv)
Kever Yangb0b3c862016-07-29 10:35:25 +0800407 continue;
408 diff_khz = vco_khz - fbdiv * fref_khz;
409 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
410 fbdiv++;
411 diff_khz = fref_khz - diff_khz;
412 }
413
414 if (diff_khz >= best_diff_khz)
415 continue;
416
417 best_diff_khz = diff_khz;
418 div->refdiv = refdiv;
419 div->fbdiv = fbdiv;
420 }
421
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530422 if (best_diff_khz > 4 * (MHz / KHz)) {
Kever Yangb0b3c862016-07-29 10:35:25 +0800423 printf("%s: Failed to match output frequency %u, "
424 "difference is %u Hz,exceed 4MHZ\n", __func__, freq_hz,
425 best_diff_khz * KHz);
426 return -1;
427 }
428 return 0;
429}
430
Jagan Tekib52a1992020-01-09 14:22:17 +0530431void rk3399_configure_cpu_l(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100432 enum apll_l_frequencies apll_l_freq)
Kever Yangb0b3c862016-07-29 10:35:25 +0800433{
434 u32 aclkm_div;
435 u32 pclk_dbg_div;
436 u32 atclk_div;
437
Christoph Muellneraf765a42018-11-30 20:32:48 +0100438 /* Setup cluster L */
Kever Yangb0b3c862016-07-29 10:35:25 +0800439 rkclk_set_pll(&cru->apll_l_con[0], apll_l_cfgs[apll_l_freq]);
440
Christoph Muellneraf765a42018-11-30 20:32:48 +0100441 aclkm_div = LPLL_HZ / ACLKM_CORE_L_HZ - 1;
442 assert((aclkm_div + 1) * ACLKM_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800443 aclkm_div < 0x1f);
444
Christoph Muellneraf765a42018-11-30 20:32:48 +0100445 pclk_dbg_div = LPLL_HZ / PCLK_DBG_L_HZ - 1;
446 assert((pclk_dbg_div + 1) * PCLK_DBG_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800447 pclk_dbg_div < 0x1f);
448
Christoph Muellneraf765a42018-11-30 20:32:48 +0100449 atclk_div = LPLL_HZ / ATCLK_CORE_L_HZ - 1;
450 assert((atclk_div + 1) * ATCLK_CORE_L_HZ == LPLL_HZ &&
Kever Yangb0b3c862016-07-29 10:35:25 +0800451 atclk_div < 0x1f);
452
453 rk_clrsetreg(&cru->clksel_con[0],
454 ACLKM_CORE_L_DIV_CON_MASK | CLK_CORE_L_PLL_SEL_MASK |
455 CLK_CORE_L_DIV_MASK,
456 aclkm_div << ACLKM_CORE_L_DIV_CON_SHIFT |
457 CLK_CORE_L_PLL_SEL_ALPLL << CLK_CORE_L_PLL_SEL_SHIFT |
458 0 << CLK_CORE_L_DIV_SHIFT);
459
460 rk_clrsetreg(&cru->clksel_con[1],
461 PCLK_DBG_L_DIV_MASK | ATCLK_CORE_L_DIV_MASK,
462 pclk_dbg_div << PCLK_DBG_L_DIV_SHIFT |
463 atclk_div << ATCLK_CORE_L_DIV_SHIFT);
464}
Christoph Muellneraf765a42018-11-30 20:32:48 +0100465
Jagan Tekib52a1992020-01-09 14:22:17 +0530466void rk3399_configure_cpu_b(struct rockchip_cru *cru,
Christoph Muellneraf765a42018-11-30 20:32:48 +0100467 enum apll_b_frequencies apll_b_freq)
468{
469 u32 aclkm_div;
470 u32 pclk_dbg_div;
471 u32 atclk_div;
472
473 /* Setup cluster B */
474 rkclk_set_pll(&cru->apll_b_con[0], apll_b_cfgs[apll_b_freq]);
475
476 aclkm_div = BPLL_HZ / ACLKM_CORE_B_HZ - 1;
477 assert((aclkm_div + 1) * ACLKM_CORE_B_HZ == BPLL_HZ &&
478 aclkm_div < 0x1f);
479
480 pclk_dbg_div = BPLL_HZ / PCLK_DBG_B_HZ - 1;
481 assert((pclk_dbg_div + 1) * PCLK_DBG_B_HZ == BPLL_HZ &&
482 pclk_dbg_div < 0x1f);
483
484 atclk_div = BPLL_HZ / ATCLK_CORE_B_HZ - 1;
485 assert((atclk_div + 1) * ATCLK_CORE_B_HZ == BPLL_HZ &&
486 atclk_div < 0x1f);
487
488 rk_clrsetreg(&cru->clksel_con[2],
489 ACLKM_CORE_B_DIV_CON_MASK | CLK_CORE_B_PLL_SEL_MASK |
490 CLK_CORE_B_DIV_MASK,
491 aclkm_div << ACLKM_CORE_B_DIV_CON_SHIFT |
492 CLK_CORE_B_PLL_SEL_ABPLL << CLK_CORE_B_PLL_SEL_SHIFT |
493 0 << CLK_CORE_B_DIV_SHIFT);
494
495 rk_clrsetreg(&cru->clksel_con[3],
496 PCLK_DBG_B_DIV_MASK | ATCLK_CORE_B_DIV_MASK,
497 pclk_dbg_div << PCLK_DBG_B_DIV_SHIFT |
498 atclk_div << ATCLK_CORE_B_DIV_SHIFT);
499}
500
Kever Yangb0b3c862016-07-29 10:35:25 +0800501#define I2C_CLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530502 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT | \
503 CLK_I2C_PLL_SEL_MASK << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800504
505#define I2C_CLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530506 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT | \
507 CLK_I2C_PLL_SEL_GPLL << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangb0b3c862016-07-29 10:35:25 +0800508
509#define I2C_CLK_DIV_VALUE(con, bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530510 ((con >> CLK_I2C ##bus## _DIV_CON_SHIFT) & I2C_DIV_CON_MASK)
Kever Yangb0b3c862016-07-29 10:35:25 +0800511
Kever Yang5e79f442016-08-12 17:47:15 +0800512#define I2C_PMUCLK_REG_MASK(bus) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530513 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800514
515#define I2C_PMUCLK_REG_VALUE(bus, clk_div) \
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530516 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yang5e79f442016-08-12 17:47:15 +0800517
Jagan Tekib52a1992020-01-09 14:22:17 +0530518static ulong rk3399_i2c_get_clk(struct rockchip_cru *cru, ulong clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800519{
520 u32 div, con;
521
522 switch (clk_id) {
523 case SCLK_I2C1:
524 con = readl(&cru->clksel_con[61]);
525 div = I2C_CLK_DIV_VALUE(con, 1);
526 break;
527 case SCLK_I2C2:
528 con = readl(&cru->clksel_con[62]);
529 div = I2C_CLK_DIV_VALUE(con, 2);
530 break;
531 case SCLK_I2C3:
532 con = readl(&cru->clksel_con[63]);
533 div = I2C_CLK_DIV_VALUE(con, 3);
534 break;
535 case SCLK_I2C5:
536 con = readl(&cru->clksel_con[61]);
537 div = I2C_CLK_DIV_VALUE(con, 5);
538 break;
539 case SCLK_I2C6:
540 con = readl(&cru->clksel_con[62]);
541 div = I2C_CLK_DIV_VALUE(con, 6);
542 break;
543 case SCLK_I2C7:
544 con = readl(&cru->clksel_con[63]);
545 div = I2C_CLK_DIV_VALUE(con, 7);
546 break;
547 default:
548 printf("do not support this i2c bus\n");
549 return -EINVAL;
550 }
551
552 return DIV_TO_RATE(GPLL_HZ, div);
553}
554
Jagan Tekib52a1992020-01-09 14:22:17 +0530555static ulong rk3399_i2c_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800556{
557 int src_clk_div;
558
559 /* i2c0,4,8 src clock from ppll, i2c1,2,3,5,6,7 src clock from gpll*/
560 src_clk_div = GPLL_HZ / hz;
561 assert(src_clk_div - 1 < 127);
562
563 switch (clk_id) {
564 case SCLK_I2C1:
565 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(1),
566 I2C_CLK_REG_VALUE(1, src_clk_div));
567 break;
568 case SCLK_I2C2:
569 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(2),
570 I2C_CLK_REG_VALUE(2, src_clk_div));
571 break;
572 case SCLK_I2C3:
573 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(3),
574 I2C_CLK_REG_VALUE(3, src_clk_div));
575 break;
576 case SCLK_I2C5:
577 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(5),
578 I2C_CLK_REG_VALUE(5, src_clk_div));
579 break;
580 case SCLK_I2C6:
581 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(6),
582 I2C_CLK_REG_VALUE(6, src_clk_div));
583 break;
584 case SCLK_I2C7:
585 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(7),
586 I2C_CLK_REG_VALUE(7, src_clk_div));
587 break;
588 default:
589 printf("do not support this i2c bus\n");
590 return -EINVAL;
591 }
592
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200593 return rk3399_i2c_get_clk(cru, clk_id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800594}
595
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200596/*
597 * RK3399 SPI clocks have a common divider-width (7 bits) and a single bit
598 * to select either CPLL or GPLL as the clock-parent. The location within
599 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
600 */
601
602struct spi_clkreg {
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530603 u8 reg; /* CLKSEL_CON[reg] register in CRU */
604 u8 div_shift;
605 u8 sel_shift;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200606};
607
608/*
609 * The entries are numbered relative to their offset from SCLK_SPI0.
610 *
611 * Note that SCLK_SPI3 (which is configured via PMUCRU and requires different
612 * logic is not supported).
613 */
614static const struct spi_clkreg spi_clkregs[] = {
615 [0] = { .reg = 59,
616 .div_shift = CLK_SPI0_PLL_DIV_CON_SHIFT,
617 .sel_shift = CLK_SPI0_PLL_SEL_SHIFT, },
618 [1] = { .reg = 59,
619 .div_shift = CLK_SPI1_PLL_DIV_CON_SHIFT,
620 .sel_shift = CLK_SPI1_PLL_SEL_SHIFT, },
621 [2] = { .reg = 60,
622 .div_shift = CLK_SPI2_PLL_DIV_CON_SHIFT,
623 .sel_shift = CLK_SPI2_PLL_SEL_SHIFT, },
624 [3] = { .reg = 60,
625 .div_shift = CLK_SPI4_PLL_DIV_CON_SHIFT,
626 .sel_shift = CLK_SPI4_PLL_SEL_SHIFT, },
627 [4] = { .reg = 58,
628 .div_shift = CLK_SPI5_PLL_DIV_CON_SHIFT,
629 .sel_shift = CLK_SPI5_PLL_SEL_SHIFT, },
630};
631
Jagan Tekib52a1992020-01-09 14:22:17 +0530632static ulong rk3399_spi_get_clk(struct rockchip_cru *cru, ulong clk_id)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200633{
634 const struct spi_clkreg *spiclk = NULL;
635 u32 div, val;
636
637 switch (clk_id) {
638 case SCLK_SPI0 ... SCLK_SPI5:
639 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
640 break;
641
642 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900643 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200644 return -EINVAL;
645 }
646
647 val = readl(&cru->clksel_con[spiclk->reg]);
Philipp Tomsicha8ee98d2017-11-22 19:45:04 +0100648 div = bitfield_extract(val, spiclk->div_shift,
649 CLK_SPI_PLL_DIV_CON_WIDTH);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200650
651 return DIV_TO_RATE(GPLL_HZ, div);
652}
653
Jagan Tekib52a1992020-01-09 14:22:17 +0530654static ulong rk3399_spi_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200655{
656 const struct spi_clkreg *spiclk = NULL;
657 int src_clk_div;
658
Kever Yang217273c2017-07-27 12:54:02 +0800659 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz) - 1;
660 assert(src_clk_div < 128);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200661
662 switch (clk_id) {
663 case SCLK_SPI1 ... SCLK_SPI5:
664 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
665 break;
666
667 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900668 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200669 return -EINVAL;
670 }
671
672 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
673 ((CLK_SPI_PLL_DIV_CON_MASK << spiclk->div_shift) |
674 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)),
675 ((src_clk_div << spiclk->div_shift) |
676 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)));
677
Philipp Tomsichbeb90a52017-04-20 22:05:50 +0200678 return rk3399_spi_get_clk(cru, clk_id);
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200679}
680
Jagan Tekib52a1992020-01-09 14:22:17 +0530681static ulong rk3399_vop_set_clk(struct rockchip_cru *cru, ulong clk_id, u32 hz)
Kever Yangb0b3c862016-07-29 10:35:25 +0800682{
683 struct pll_div vpll_config = {0};
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530684 int aclk_vop = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800685 void *aclkreg_addr, *dclkreg_addr;
686 u32 div;
687
688 switch (clk_id) {
689 case DCLK_VOP0:
690 aclkreg_addr = &cru->clksel_con[47];
691 dclkreg_addr = &cru->clksel_con[49];
692 break;
693 case DCLK_VOP1:
694 aclkreg_addr = &cru->clksel_con[48];
695 dclkreg_addr = &cru->clksel_con[50];
696 break;
697 default:
698 return -EINVAL;
699 }
700 /* vop aclk source clk: cpll */
701 div = CPLL_HZ / aclk_vop;
702 assert(div - 1 < 32);
703
704 rk_clrsetreg(aclkreg_addr,
705 ACLK_VOP_PLL_SEL_MASK | ACLK_VOP_DIV_CON_MASK,
706 ACLK_VOP_PLL_SEL_CPLL << ACLK_VOP_PLL_SEL_SHIFT |
707 (div - 1) << ACLK_VOP_DIV_CON_SHIFT);
708
709 /* vop dclk source from vpll, and equals to vpll(means div == 1) */
710 if (pll_para_config(hz, &vpll_config))
711 return -1;
712
713 rkclk_set_pll(&cru->vpll_con[0], &vpll_config);
714
715 rk_clrsetreg(dclkreg_addr,
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530716 DCLK_VOP_DCLK_SEL_MASK | DCLK_VOP_PLL_SEL_MASK |
Kever Yangb0b3c862016-07-29 10:35:25 +0800717 DCLK_VOP_DIV_CON_MASK,
718 DCLK_VOP_DCLK_SEL_DIVOUT << DCLK_VOP_DCLK_SEL_SHIFT |
719 DCLK_VOP_PLL_SEL_VPLL << DCLK_VOP_PLL_SEL_SHIFT |
720 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
721
722 return hz;
723}
724
Jagan Tekib52a1992020-01-09 14:22:17 +0530725static ulong rk3399_mmc_get_clk(struct rockchip_cru *cru, uint clk_id)
Kever Yangb0b3c862016-07-29 10:35:25 +0800726{
727 u32 div, con;
728
729 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200730 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800731 case SCLK_SDMMC:
732 con = readl(&cru->clksel_con[16]);
Kever Yang3a94d752017-07-27 12:54:01 +0800733 /* dwmmc controller have internal div 2 */
734 div = 2;
Kever Yangb0b3c862016-07-29 10:35:25 +0800735 break;
736 case SCLK_EMMC:
Jagan Teki46481082020-05-24 22:13:15 +0530737 con = readl(&cru->clksel_con[22]);
Kever Yang3a94d752017-07-27 12:54:01 +0800738 div = 1;
Kever Yangb0b3c862016-07-29 10:35:25 +0800739 break;
740 default:
741 return -EINVAL;
742 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800743
Kever Yang3a94d752017-07-27 12:54:01 +0800744 div *= (con & CLK_EMMC_DIV_CON_MASK) >> CLK_EMMC_DIV_CON_SHIFT;
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800745 if ((con & CLK_EMMC_PLL_MASK) >> CLK_EMMC_PLL_SHIFT
746 == CLK_EMMC_PLL_SEL_24M)
Kever Yang3a94d752017-07-27 12:54:01 +0800747 return DIV_TO_RATE(OSC_HZ, div);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800748 else
749 return DIV_TO_RATE(GPLL_HZ, div);
Kever Yangb0b3c862016-07-29 10:35:25 +0800750}
751
Jagan Tekib52a1992020-01-09 14:22:17 +0530752static ulong rk3399_mmc_set_clk(struct rockchip_cru *cru,
Kever Yangb0b3c862016-07-29 10:35:25 +0800753 ulong clk_id, ulong set_rate)
754{
755 int src_clk_div;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530756 int aclk_emmc = 198 * MHz;
Kever Yangb0b3c862016-07-29 10:35:25 +0800757
758 switch (clk_id) {
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200759 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800760 case SCLK_SDMMC:
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800761 /* Select clk_sdmmc source from GPLL by default */
Kever Yang3a94d752017-07-27 12:54:01 +0800762 /* mmc clock defaulg div 2 internal, provide double in cru */
763 src_clk_div = DIV_ROUND_UP(GPLL_HZ / 2, set_rate);
Kever Yangb0b3c862016-07-29 10:35:25 +0800764
Kever Yang217273c2017-07-27 12:54:02 +0800765 if (src_clk_div > 128) {
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800766 /* use 24MHz source for 400KHz clock */
Kever Yang3a94d752017-07-27 12:54:01 +0800767 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, set_rate);
Kever Yang217273c2017-07-27 12:54:02 +0800768 assert(src_clk_div - 1 < 128);
Kever Yangfd4b2dc2016-08-04 11:44:58 +0800769 rk_clrsetreg(&cru->clksel_con[16],
770 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
771 CLK_EMMC_PLL_SEL_24M << CLK_EMMC_PLL_SHIFT |
772 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
773 } else {
774 rk_clrsetreg(&cru->clksel_con[16],
775 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
776 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
777 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
778 }
Kever Yangb0b3c862016-07-29 10:35:25 +0800779 break;
780 case SCLK_EMMC:
781 /* Select aclk_emmc source from GPLL */
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530782 src_clk_div = DIV_ROUND_UP(GPLL_HZ, aclk_emmc);
Kever Yang217273c2017-07-27 12:54:02 +0800783 assert(src_clk_div - 1 < 32);
Kever Yangb0b3c862016-07-29 10:35:25 +0800784
785 rk_clrsetreg(&cru->clksel_con[21],
786 ACLK_EMMC_PLL_SEL_MASK | ACLK_EMMC_DIV_CON_MASK,
787 ACLK_EMMC_PLL_SEL_GPLL << ACLK_EMMC_PLL_SEL_SHIFT |
788 (src_clk_div - 1) << ACLK_EMMC_DIV_CON_SHIFT);
789
790 /* Select clk_emmc source from GPLL too */
Kever Yang217273c2017-07-27 12:54:02 +0800791 src_clk_div = DIV_ROUND_UP(GPLL_HZ, set_rate);
792 assert(src_clk_div - 1 < 128);
Kever Yangb0b3c862016-07-29 10:35:25 +0800793
794 rk_clrsetreg(&cru->clksel_con[22],
795 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
796 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
797 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
798 break;
799 default:
800 return -EINVAL;
801 }
802 return rk3399_mmc_get_clk(cru, clk_id);
803}
804
Jagan Tekib52a1992020-01-09 14:22:17 +0530805static ulong rk3399_gmac_set_clk(struct rockchip_cru *cru, ulong rate)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +0100806{
807 ulong ret;
808
809 /*
810 * The RGMII CLK can be derived either from an external "clkin"
811 * or can be generated from internally by a divider from SCLK_MAC.
812 */
813 if (readl(&cru->clksel_con[19]) & BIT(4)) {
814 /* An external clock will always generate the right rate... */
815 ret = rate;
816 } else {
817 /*
818 * No platform uses an internal clock to date.
819 * Implement this once it becomes necessary and print an error
820 * if someone tries to use it (while it remains unimplemented).
821 */
822 pr_err("%s: internal clock is UNIMPLEMENTED\n", __func__);
823 ret = 0;
824 }
825
826 return ret;
827}
828
Kever Yang5ae2fd92017-02-13 17:38:56 +0800829#define PMUSGRF_DDR_RGN_CON16 0xff330040
Jagan Tekib52a1992020-01-09 14:22:17 +0530830static ulong rk3399_ddr_set_clk(struct rockchip_cru *cru,
Kever Yang5ae2fd92017-02-13 17:38:56 +0800831 ulong set_rate)
832{
833 struct pll_div dpll_cfg;
834
835 /* IC ECO bug, need to set this register */
836 writel(0xc000c000, PMUSGRF_DDR_RGN_CON16);
837
838 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */
839 switch (set_rate) {
Jagan Teki09565682019-07-16 17:27:35 +0530840 case 50 * MHz:
841 dpll_cfg = (struct pll_div)
842 {.refdiv = 1, .fbdiv = 12, .postdiv1 = 3, .postdiv2 = 2};
843 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530844 case 200 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800845 dpll_cfg = (struct pll_div)
846 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 6, .postdiv2 = 1};
847 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530848 case 300 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800849 dpll_cfg = (struct pll_div)
850 {.refdiv = 2, .fbdiv = 100, .postdiv1 = 4, .postdiv2 = 1};
851 break;
Jagan Tekif556d752019-07-16 17:27:36 +0530852 case 400 * MHz:
853 dpll_cfg = (struct pll_div)
854 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1};
855 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530856 case 666 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800857 dpll_cfg = (struct pll_div)
858 {.refdiv = 2, .fbdiv = 111, .postdiv1 = 2, .postdiv2 = 1};
859 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530860 case 800 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800861 dpll_cfg = (struct pll_div)
862 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1};
863 break;
Jagan Tekidd7dfa22019-07-15 23:51:10 +0530864 case 933 * MHz:
Kever Yang5ae2fd92017-02-13 17:38:56 +0800865 dpll_cfg = (struct pll_div)
866 {.refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1};
867 break;
868 default:
Masahiro Yamada9b643e32017-09-16 14:10:41 +0900869 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Kever Yang5ae2fd92017-02-13 17:38:56 +0800870 }
871 rkclk_set_pll(&cru->dpll_con[0], &dpll_cfg);
872
873 return set_rate;
874}
David Wu364fc732017-09-20 14:38:58 +0800875
Jack Mitchellda0be4e2020-09-17 10:42:06 +0100876static ulong rk3399_alive_get_clk(struct rockchip_cru *cru)
877{
878 u32 div, val;
879
880 val = readl(&cru->clksel_con[57]);
881 div = (val & PCLK_ALIVE_DIV_CON_MASK) >>
882 PCLK_ALIVE_DIV_CON_SHIFT;
883
884 return DIV_TO_RATE(GPLL_HZ, div);
885}
886
Jagan Tekib52a1992020-01-09 14:22:17 +0530887static ulong rk3399_saradc_get_clk(struct rockchip_cru *cru)
David Wu364fc732017-09-20 14:38:58 +0800888{
889 u32 div, val;
890
891 val = readl(&cru->clksel_con[26]);
892 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
893 CLK_SARADC_DIV_CON_WIDTH);
894
895 return DIV_TO_RATE(OSC_HZ, div);
896}
897
Jagan Tekib52a1992020-01-09 14:22:17 +0530898static ulong rk3399_saradc_set_clk(struct rockchip_cru *cru, uint hz)
David Wu364fc732017-09-20 14:38:58 +0800899{
900 int src_clk_div;
901
902 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
903 assert(src_clk_div < 128);
904
905 rk_clrsetreg(&cru->clksel_con[26],
906 CLK_SARADC_DIV_CON_MASK,
907 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
908
909 return rk3399_saradc_get_clk(cru);
910}
911
Kever Yangb0b3c862016-07-29 10:35:25 +0800912static ulong rk3399_clk_get_rate(struct clk *clk)
913{
914 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
915 ulong rate = 0;
916
917 switch (clk->id) {
918 case 0 ... 63:
919 return 0;
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200920 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800921 case SCLK_SDMMC:
922 case SCLK_EMMC:
923 rate = rk3399_mmc_get_clk(priv->cru, clk->id);
924 break;
925 case SCLK_I2C1:
926 case SCLK_I2C2:
927 case SCLK_I2C3:
928 case SCLK_I2C5:
929 case SCLK_I2C6:
930 case SCLK_I2C7:
931 rate = rk3399_i2c_get_clk(priv->cru, clk->id);
932 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200933 case SCLK_SPI0...SCLK_SPI5:
934 rate = rk3399_spi_get_clk(priv->cru, clk->id);
935 break;
936 case SCLK_UART0:
Christoph Muellner24615432019-05-07 10:58:44 +0200937 case SCLK_UART1:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200938 case SCLK_UART2:
Christoph Muellner24615432019-05-07 10:58:44 +0200939 case SCLK_UART3:
Philipp Tomsich8fa69792017-04-20 22:05:49 +0200940 return 24000000;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +0200941 case PCLK_HDMI_CTRL:
942 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800943 case DCLK_VOP0:
944 case DCLK_VOP1:
945 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +0200946 case PCLK_EFUSE1024NS:
947 break;
David Wu364fc732017-09-20 14:38:58 +0800948 case SCLK_SARADC:
949 rate = rk3399_saradc_get_clk(priv->cru);
950 break;
Simon Glass5328af12019-01-21 14:53:30 -0700951 case ACLK_VIO:
952 case ACLK_HDCP:
953 case ACLK_GIC_PRE:
954 case PCLK_DDR:
955 break;
Jack Mitchellda0be4e2020-09-17 10:42:06 +0100956 case PCLK_ALIVE:
957 case PCLK_WDT:
958 rate = rk3399_alive_get_clk(priv->cru);
959 break;
Kever Yangb0b3c862016-07-29 10:35:25 +0800960 default:
Simon Glass5328af12019-01-21 14:53:30 -0700961 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +0800962 return -ENOENT;
963 }
964
965 return rate;
966}
967
968static ulong rk3399_clk_set_rate(struct clk *clk, ulong rate)
969{
970 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
971 ulong ret = 0;
972
973 switch (clk->id) {
974 case 0 ... 63:
975 return 0;
Philipp Tomsichd2f1f1a2018-01-08 14:00:27 +0100976
977 case ACLK_PERIHP:
978 case HCLK_PERIHP:
979 case PCLK_PERIHP:
980 return 0;
981
982 case ACLK_PERILP0:
983 case HCLK_PERILP0:
984 case PCLK_PERILP0:
985 return 0;
986
987 case ACLK_CCI:
988 return 0;
989
990 case HCLK_PERILP1:
991 case PCLK_PERILP1:
992 return 0;
993
Philipp Tomsich998c61a2017-04-25 09:52:06 +0200994 case HCLK_SDMMC:
Kever Yangb0b3c862016-07-29 10:35:25 +0800995 case SCLK_SDMMC:
996 case SCLK_EMMC:
997 ret = rk3399_mmc_set_clk(priv->cru, clk->id, rate);
998 break;
Philipp Tomsich65d83302017-03-24 19:24:25 +0100999 case SCLK_MAC:
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001000 ret = rk3399_gmac_set_clk(priv->cru, rate);
Philipp Tomsich65d83302017-03-24 19:24:25 +01001001 break;
Kever Yangb0b3c862016-07-29 10:35:25 +08001002 case SCLK_I2C1:
1003 case SCLK_I2C2:
1004 case SCLK_I2C3:
1005 case SCLK_I2C5:
1006 case SCLK_I2C6:
1007 case SCLK_I2C7:
1008 ret = rk3399_i2c_set_clk(priv->cru, clk->id, rate);
1009 break;
Philipp Tomsich8fa69792017-04-20 22:05:49 +02001010 case SCLK_SPI0...SCLK_SPI5:
1011 ret = rk3399_spi_set_clk(priv->cru, clk->id, rate);
1012 break;
Philipp Tomsichffc1fac2017-04-28 18:33:57 +02001013 case PCLK_HDMI_CTRL:
1014 case PCLK_VIO_GRF:
1015 /* the PCLK gates for video are enabled by default */
1016 break;
Kever Yangb0b3c862016-07-29 10:35:25 +08001017 case DCLK_VOP0:
1018 case DCLK_VOP1:
Kever Yang5e79f442016-08-12 17:47:15 +08001019 ret = rk3399_vop_set_clk(priv->cru, clk->id, rate);
Kever Yangb0b3c862016-07-29 10:35:25 +08001020 break;
Jagan Tekib1bcd612020-04-02 17:11:21 +05301021 case ACLK_VOP1:
1022 case HCLK_VOP1:
Jagan Teki96993d72020-04-28 15:30:16 +05301023 case HCLK_SD:
Jagan Teki80e19112020-05-26 11:32:06 +08001024 case SCLK_UPHY0_TCPDCORE:
1025 case SCLK_UPHY1_TCPDCORE:
Jagan Tekib1bcd612020-04-02 17:11:21 +05301026 /**
1027 * assigned-clocks handling won't require for vopl, so
1028 * return 0 to satisfy clk_set_defaults during device probe.
1029 */
1030 return 0;
Kever Yang5ae2fd92017-02-13 17:38:56 +08001031 case SCLK_DDRCLK:
1032 ret = rk3399_ddr_set_clk(priv->cru, rate);
1033 break;
Philipp Tomsicha70feb42017-04-28 17:11:55 +02001034 case PCLK_EFUSE1024NS:
1035 break;
David Wu364fc732017-09-20 14:38:58 +08001036 case SCLK_SARADC:
1037 ret = rk3399_saradc_set_clk(priv->cru, rate);
1038 break;
Simon Glass5328af12019-01-21 14:53:30 -07001039 case ACLK_VIO:
1040 case ACLK_HDCP:
1041 case ACLK_GIC_PRE:
1042 case PCLK_DDR:
1043 return 0;
Kever Yangb0b3c862016-07-29 10:35:25 +08001044 default:
Simon Glass5328af12019-01-21 14:53:30 -07001045 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangb0b3c862016-07-29 10:35:25 +08001046 return -ENOENT;
1047 }
1048
1049 return ret;
1050}
1051
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301052static int __maybe_unused rk3399_gmac_set_parent(struct clk *clk,
1053 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001054{
1055 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1056 const char *clock_output_name;
1057 int ret;
1058
1059 /*
1060 * If the requested parent is in the same clock-controller and
1061 * the id is SCLK_MAC ("clk_gmac"), switch to the internal clock.
1062 */
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301063 if (parent->dev == clk->dev && parent->id == SCLK_MAC) {
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001064 debug("%s: switching RGMII to SCLK_MAC\n", __func__);
1065 rk_clrreg(&priv->cru->clksel_con[19], BIT(4));
1066 return 0;
1067 }
1068
1069 /*
1070 * Otherwise, we need to check the clock-output-names of the
1071 * requested parent to see if the requested id is "clkin_gmac".
1072 */
1073 ret = dev_read_string_index(parent->dev, "clock-output-names",
1074 parent->id, &clock_output_name);
1075 if (ret < 0)
1076 return -ENODATA;
1077
1078 /* If this is "clkin_gmac", switch to the external clock input */
1079 if (!strcmp(clock_output_name, "clkin_gmac")) {
1080 debug("%s: switching RGMII to CLKIN\n", __func__);
1081 rk_setreg(&priv->cru->clksel_con[19], BIT(4));
1082 return 0;
1083 }
1084
1085 return -EINVAL;
1086}
1087
Jagan Tekidd7dfa22019-07-15 23:51:10 +05301088static int __maybe_unused rk3399_clk_set_parent(struct clk *clk,
1089 struct clk *parent)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001090{
1091 switch (clk->id) {
1092 case SCLK_RMII_SRC:
1093 return rk3399_gmac_set_parent(clk, parent);
1094 }
1095
1096 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1097 return -ENOENT;
1098}
1099
Jagan Teki30d09a22020-05-09 22:26:19 +05301100static int rk3399_clk_enable(struct clk *clk)
1101{
1102 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1103
1104 switch (clk->id) {
1105 case SCLK_MAC:
1106 rk_clrreg(&priv->cru->clkgate_con[5], BIT(5));
1107 break;
1108 case SCLK_MAC_RX:
1109 rk_clrreg(&priv->cru->clkgate_con[5], BIT(8));
1110 break;
1111 case SCLK_MAC_TX:
1112 rk_clrreg(&priv->cru->clkgate_con[5], BIT(9));
1113 break;
1114 case SCLK_MACREF:
1115 rk_clrreg(&priv->cru->clkgate_con[5], BIT(7));
1116 break;
1117 case SCLK_MACREF_OUT:
1118 rk_clrreg(&priv->cru->clkgate_con[5], BIT(6));
1119 break;
Jagan Tekif7dd12a2020-05-26 11:32:05 +08001120 case SCLK_USB2PHY0_REF:
1121 rk_clrreg(&priv->cru->clkgate_con[6], BIT(5));
1122 break;
1123 case SCLK_USB2PHY1_REF:
1124 rk_clrreg(&priv->cru->clkgate_con[6], BIT(6));
1125 break;
Jagan Teki30d09a22020-05-09 22:26:19 +05301126 case ACLK_GMAC:
1127 rk_clrreg(&priv->cru->clkgate_con[32], BIT(0));
1128 break;
1129 case PCLK_GMAC:
1130 rk_clrreg(&priv->cru->clkgate_con[32], BIT(2));
1131 break;
1132 case SCLK_USB3OTG0_REF:
1133 rk_clrreg(&priv->cru->clkgate_con[12], BIT(1));
1134 break;
1135 case SCLK_USB3OTG1_REF:
1136 rk_clrreg(&priv->cru->clkgate_con[12], BIT(2));
1137 break;
1138 case SCLK_USB3OTG0_SUSPEND:
1139 rk_clrreg(&priv->cru->clkgate_con[12], BIT(3));
1140 break;
1141 case SCLK_USB3OTG1_SUSPEND:
1142 rk_clrreg(&priv->cru->clkgate_con[12], BIT(4));
1143 break;
1144 case ACLK_USB3OTG0:
1145 rk_clrreg(&priv->cru->clkgate_con[30], BIT(1));
1146 break;
1147 case ACLK_USB3OTG1:
1148 rk_clrreg(&priv->cru->clkgate_con[30], BIT(2));
1149 break;
1150 case ACLK_USB3_RKSOC_AXI_PERF:
1151 rk_clrreg(&priv->cru->clkgate_con[30], BIT(3));
1152 break;
1153 case ACLK_USB3:
1154 rk_clrreg(&priv->cru->clkgate_con[12], BIT(0));
1155 break;
1156 case ACLK_USB3_GRF:
1157 rk_clrreg(&priv->cru->clkgate_con[30], BIT(4));
1158 break;
1159 case HCLK_HOST0:
1160 rk_clrreg(&priv->cru->clksel_con[20], BIT(5));
1161 break;
1162 case HCLK_HOST0_ARB:
1163 rk_clrreg(&priv->cru->clksel_con[20], BIT(6));
1164 break;
1165 case HCLK_HOST1:
1166 rk_clrreg(&priv->cru->clksel_con[20], BIT(7));
1167 break;
1168 case HCLK_HOST1_ARB:
1169 rk_clrreg(&priv->cru->clksel_con[20], BIT(8));
1170 break;
Jagan Tekie1b413d2020-05-26 11:32:07 +08001171 case SCLK_UPHY0_TCPDPHY_REF:
1172 rk_clrreg(&priv->cru->clkgate_con[13], BIT(4));
1173 break;
1174 case SCLK_UPHY0_TCPDCORE:
1175 rk_clrreg(&priv->cru->clkgate_con[13], BIT(5));
1176 break;
1177 case SCLK_UPHY1_TCPDPHY_REF:
1178 rk_clrreg(&priv->cru->clkgate_con[13], BIT(6));
1179 break;
1180 case SCLK_UPHY1_TCPDCORE:
1181 rk_clrreg(&priv->cru->clkgate_con[13], BIT(7));
1182 break;
Jagan Teki912f6332020-05-09 22:26:20 +05301183 case SCLK_PCIEPHY_REF:
1184 rk_clrreg(&priv->cru->clksel_con[18], BIT(10));
1185 break;
Jagan Teki30d09a22020-05-09 22:26:19 +05301186 default:
1187 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1188 return -ENOENT;
1189 }
1190
1191 return 0;
1192}
1193
1194static int rk3399_clk_disable(struct clk *clk)
1195{
1196 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1197
1198 switch (clk->id) {
1199 case SCLK_MAC:
1200 rk_setreg(&priv->cru->clkgate_con[5], BIT(5));
1201 break;
1202 case SCLK_MAC_RX:
1203 rk_setreg(&priv->cru->clkgate_con[5], BIT(8));
1204 break;
1205 case SCLK_MAC_TX:
1206 rk_setreg(&priv->cru->clkgate_con[5], BIT(9));
1207 break;
1208 case SCLK_MACREF:
1209 rk_setreg(&priv->cru->clkgate_con[5], BIT(7));
1210 break;
1211 case SCLK_MACREF_OUT:
1212 rk_setreg(&priv->cru->clkgate_con[5], BIT(6));
1213 break;
Jagan Tekif7dd12a2020-05-26 11:32:05 +08001214 case SCLK_USB2PHY0_REF:
1215 rk_setreg(&priv->cru->clkgate_con[6], BIT(5));
1216 break;
1217 case SCLK_USB2PHY1_REF:
1218 rk_setreg(&priv->cru->clkgate_con[6], BIT(6));
1219 break;
Jagan Teki30d09a22020-05-09 22:26:19 +05301220 case ACLK_GMAC:
1221 rk_setreg(&priv->cru->clkgate_con[32], BIT(0));
1222 break;
1223 case PCLK_GMAC:
1224 rk_setreg(&priv->cru->clkgate_con[32], BIT(2));
1225 break;
1226 case SCLK_USB3OTG0_REF:
1227 rk_setreg(&priv->cru->clkgate_con[12], BIT(1));
1228 break;
1229 case SCLK_USB3OTG1_REF:
1230 rk_setreg(&priv->cru->clkgate_con[12], BIT(2));
1231 break;
1232 case SCLK_USB3OTG0_SUSPEND:
1233 rk_setreg(&priv->cru->clkgate_con[12], BIT(3));
1234 break;
1235 case SCLK_USB3OTG1_SUSPEND:
1236 rk_setreg(&priv->cru->clkgate_con[12], BIT(4));
1237 break;
1238 case ACLK_USB3OTG0:
1239 rk_setreg(&priv->cru->clkgate_con[30], BIT(1));
1240 break;
1241 case ACLK_USB3OTG1:
1242 rk_setreg(&priv->cru->clkgate_con[30], BIT(2));
1243 break;
1244 case ACLK_USB3_RKSOC_AXI_PERF:
1245 rk_setreg(&priv->cru->clkgate_con[30], BIT(3));
1246 break;
1247 case ACLK_USB3:
1248 rk_setreg(&priv->cru->clkgate_con[12], BIT(0));
1249 break;
1250 case ACLK_USB3_GRF:
1251 rk_setreg(&priv->cru->clkgate_con[30], BIT(4));
1252 break;
1253 case HCLK_HOST0:
1254 rk_setreg(&priv->cru->clksel_con[20], BIT(5));
1255 break;
1256 case HCLK_HOST0_ARB:
1257 rk_setreg(&priv->cru->clksel_con[20], BIT(6));
1258 break;
1259 case HCLK_HOST1:
1260 rk_setreg(&priv->cru->clksel_con[20], BIT(7));
1261 break;
1262 case HCLK_HOST1_ARB:
1263 rk_setreg(&priv->cru->clksel_con[20], BIT(8));
1264 break;
Jagan Tekie1b413d2020-05-26 11:32:07 +08001265 case SCLK_UPHY0_TCPDPHY_REF:
1266 rk_setreg(&priv->cru->clkgate_con[13], BIT(4));
1267 break;
1268 case SCLK_UPHY0_TCPDCORE:
1269 rk_setreg(&priv->cru->clkgate_con[13], BIT(5));
1270 break;
1271 case SCLK_UPHY1_TCPDPHY_REF:
1272 rk_setreg(&priv->cru->clkgate_con[13], BIT(6));
1273 break;
1274 case SCLK_UPHY1_TCPDCORE:
1275 rk_setreg(&priv->cru->clkgate_con[13], BIT(7));
1276 break;
Jagan Teki912f6332020-05-09 22:26:20 +05301277 case SCLK_PCIEPHY_REF:
1278 rk_clrreg(&priv->cru->clksel_con[18], BIT(10));
1279 break;
Jagan Teki30d09a22020-05-09 22:26:19 +05301280 default:
1281 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1282 return -ENOENT;
1283 }
1284
1285 return 0;
1286}
1287
Kever Yangb0b3c862016-07-29 10:35:25 +08001288static struct clk_ops rk3399_clk_ops = {
1289 .get_rate = rk3399_clk_get_rate,
1290 .set_rate = rk3399_clk_set_rate,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001291#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
Philipp Tomsicha45f17e2018-01-08 13:11:01 +01001292 .set_parent = rk3399_clk_set_parent,
Philipp Tomsich75b381a2018-01-25 15:27:10 +01001293#endif
Jagan Teki30d09a22020-05-09 22:26:19 +05301294 .enable = rk3399_clk_enable,
1295 .disable = rk3399_clk_disable,
Kever Yangb0b3c862016-07-29 10:35:25 +08001296};
1297
Jagan Tekib52a1992020-01-09 14:22:17 +05301298static void rkclk_init(struct rockchip_cru *cru)
Kever Yang9f636a22017-10-12 15:27:29 +08001299{
1300 u32 aclk_div;
1301 u32 hclk_div;
1302 u32 pclk_div;
1303
Christoph Muellneraf765a42018-11-30 20:32:48 +01001304 rk3399_configure_cpu_l(cru, APLL_L_600_MHZ);
1305 rk3399_configure_cpu_b(cru, APLL_B_600_MHZ);
Kever Yang9f636a22017-10-12 15:27:29 +08001306 /*
1307 * some cru registers changed by bootrom, we'd better reset them to
1308 * reset/default values described in TRM to avoid confusion in kernel.
1309 * Please consider these three lines as a fix of bootrom bug.
1310 */
1311 rk_clrsetreg(&cru->clksel_con[12], 0xffff, 0x4101);
1312 rk_clrsetreg(&cru->clksel_con[19], 0xffff, 0x033f);
1313 rk_clrsetreg(&cru->clksel_con[56], 0x0003, 0x0003);
1314
1315 /* configure gpll cpll */
1316 rkclk_set_pll(&cru->gpll_con[0], &gpll_init_cfg);
1317 rkclk_set_pll(&cru->cpll_con[0], &cpll_init_cfg);
1318
1319 /* configure perihp aclk, hclk, pclk */
1320 aclk_div = GPLL_HZ / PERIHP_ACLK_HZ - 1;
1321 assert((aclk_div + 1) * PERIHP_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1322
1323 hclk_div = PERIHP_ACLK_HZ / PERIHP_HCLK_HZ - 1;
1324 assert((hclk_div + 1) * PERIHP_HCLK_HZ ==
1325 PERIHP_ACLK_HZ && (hclk_div < 0x4));
1326
1327 pclk_div = PERIHP_ACLK_HZ / PERIHP_PCLK_HZ - 1;
1328 assert((pclk_div + 1) * PERIHP_PCLK_HZ ==
1329 PERIHP_ACLK_HZ && (pclk_div < 0x7));
1330
1331 rk_clrsetreg(&cru->clksel_con[14],
1332 PCLK_PERIHP_DIV_CON_MASK | HCLK_PERIHP_DIV_CON_MASK |
1333 ACLK_PERIHP_PLL_SEL_MASK | ACLK_PERIHP_DIV_CON_MASK,
1334 pclk_div << PCLK_PERIHP_DIV_CON_SHIFT |
1335 hclk_div << HCLK_PERIHP_DIV_CON_SHIFT |
1336 ACLK_PERIHP_PLL_SEL_GPLL << ACLK_PERIHP_PLL_SEL_SHIFT |
1337 aclk_div << ACLK_PERIHP_DIV_CON_SHIFT);
1338
1339 /* configure perilp0 aclk, hclk, pclk */
1340 aclk_div = GPLL_HZ / PERILP0_ACLK_HZ - 1;
1341 assert((aclk_div + 1) * PERILP0_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1342
1343 hclk_div = PERILP0_ACLK_HZ / PERILP0_HCLK_HZ - 1;
1344 assert((hclk_div + 1) * PERILP0_HCLK_HZ ==
1345 PERILP0_ACLK_HZ && (hclk_div < 0x4));
1346
1347 pclk_div = PERILP0_ACLK_HZ / PERILP0_PCLK_HZ - 1;
1348 assert((pclk_div + 1) * PERILP0_PCLK_HZ ==
1349 PERILP0_ACLK_HZ && (pclk_div < 0x7));
1350
1351 rk_clrsetreg(&cru->clksel_con[23],
1352 PCLK_PERILP0_DIV_CON_MASK | HCLK_PERILP0_DIV_CON_MASK |
1353 ACLK_PERILP0_PLL_SEL_MASK | ACLK_PERILP0_DIV_CON_MASK,
1354 pclk_div << PCLK_PERILP0_DIV_CON_SHIFT |
1355 hclk_div << HCLK_PERILP0_DIV_CON_SHIFT |
1356 ACLK_PERILP0_PLL_SEL_GPLL << ACLK_PERILP0_PLL_SEL_SHIFT |
1357 aclk_div << ACLK_PERILP0_DIV_CON_SHIFT);
1358
1359 /* perilp1 hclk select gpll as source */
1360 hclk_div = GPLL_HZ / PERILP1_HCLK_HZ - 1;
1361 assert((hclk_div + 1) * PERILP1_HCLK_HZ ==
1362 GPLL_HZ && (hclk_div < 0x1f));
1363
1364 pclk_div = PERILP1_HCLK_HZ / PERILP1_HCLK_HZ - 1;
1365 assert((pclk_div + 1) * PERILP1_HCLK_HZ ==
1366 PERILP1_HCLK_HZ && (hclk_div < 0x7));
1367
1368 rk_clrsetreg(&cru->clksel_con[25],
1369 PCLK_PERILP1_DIV_CON_MASK | HCLK_PERILP1_DIV_CON_MASK |
1370 HCLK_PERILP1_PLL_SEL_MASK,
1371 pclk_div << PCLK_PERILP1_DIV_CON_SHIFT |
1372 hclk_div << HCLK_PERILP1_DIV_CON_SHIFT |
1373 HCLK_PERILP1_PLL_SEL_GPLL << HCLK_PERILP1_PLL_SEL_SHIFT);
1374}
Kever Yang9f636a22017-10-12 15:27:29 +08001375
Kever Yangb0b3c862016-07-29 10:35:25 +08001376static int rk3399_clk_probe(struct udevice *dev)
1377{
1378 struct rk3399_clk_priv *priv = dev_get_priv(dev);
Alper Nebi Yasakeb890252020-10-28 00:15:10 +03001379 bool init_clocks = false;
Kever Yangb0b3c862016-07-29 10:35:25 +08001380
Kever Yang5ae2fd92017-02-13 17:38:56 +08001381#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glassc69cda22020-12-03 16:55:20 -07001382 struct rk3399_clk_plat *plat = dev_get_plat(dev);
Kever Yangb0b3c862016-07-29 10:35:25 +08001383
Simon Glassc20ee0e2017-08-29 14:15:50 -06001384 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001385#endif
Alper Nebi Yasakeb890252020-10-28 00:15:10 +03001386
1387#if defined(CONFIG_SPL_BUILD)
1388 init_clocks = true;
1389#elif CONFIG_IS_ENABLED(HANDOFF)
1390 if (!(gd->flags & GD_FLG_RELOC)) {
1391 if (!(gd->spl_handoff))
1392 init_clocks = true;
1393 }
Kever Yang5ae2fd92017-02-13 17:38:56 +08001394#endif
Alper Nebi Yasakeb890252020-10-28 00:15:10 +03001395
1396 if (init_clocks)
1397 rkclk_init(priv->cru);
1398
Kever Yangb0b3c862016-07-29 10:35:25 +08001399 return 0;
1400}
1401
Simon Glassd1998a92020-12-03 16:55:21 -07001402static int rk3399_clk_of_to_plat(struct udevice *dev)
Kever Yangb0b3c862016-07-29 10:35:25 +08001403{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001404#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yangb0b3c862016-07-29 10:35:25 +08001405 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1406
Philipp Tomsich75c78592017-09-12 17:32:24 +02001407 priv->cru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001408#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001409 return 0;
1410}
1411
1412static int rk3399_clk_bind(struct udevice *dev)
1413{
1414 int ret;
Kever Yangf24e36d2017-11-03 15:16:13 +08001415 struct udevice *sys_child;
1416 struct sysreset_reg *priv;
Kever Yangb0b3c862016-07-29 10:35:25 +08001417
1418 /* The reset driver does not have a device node, so bind it here */
Kever Yangf24e36d2017-11-03 15:16:13 +08001419 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
1420 &sys_child);
1421 if (ret) {
1422 debug("Warning: No sysreset driver: ret=%d\n", ret);
1423 } else {
1424 priv = malloc(sizeof(struct sysreset_reg));
Jagan Tekib52a1992020-01-09 14:22:17 +05301425 priv->glb_srst_fst_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001426 glb_srst_fst_value);
Jagan Tekib52a1992020-01-09 14:22:17 +05301427 priv->glb_srst_snd_value = offsetof(struct rockchip_cru,
Kever Yangf24e36d2017-11-03 15:16:13 +08001428 glb_srst_snd_value);
Simon Glass0fd3d912020-12-22 19:30:28 -07001429 dev_set_priv(sys_child, priv);
Kever Yangf24e36d2017-11-03 15:16:13 +08001430 }
Kever Yangb0b3c862016-07-29 10:35:25 +08001431
Heiko Stuebnera5ada252019-11-09 00:06:30 +01001432#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Jagan Tekib52a1992020-01-09 14:22:17 +05301433 ret = offsetof(struct rockchip_cru, softrst_con[0]);
Elaine Zhang538f67c2017-12-19 18:22:38 +08001434 ret = rockchip_reset_bind(dev, ret, 21);
1435 if (ret)
1436 debug("Warning: software reset driver bind faile\n");
1437#endif
1438
Kever Yangb0b3c862016-07-29 10:35:25 +08001439 return 0;
1440}
1441
1442static const struct udevice_id rk3399_clk_ids[] = {
1443 { .compatible = "rockchip,rk3399-cru" },
1444 { }
1445};
1446
1447U_BOOT_DRIVER(clk_rk3399) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001448 .name = "rockchip_rk3399_cru",
Kever Yangb0b3c862016-07-29 10:35:25 +08001449 .id = UCLASS_CLK,
1450 .of_match = rk3399_clk_ids,
Simon Glass41575d82020-12-03 16:55:17 -07001451 .priv_auto = sizeof(struct rk3399_clk_priv),
Simon Glassd1998a92020-12-03 16:55:21 -07001452 .of_to_plat = rk3399_clk_of_to_plat,
Kever Yangb0b3c862016-07-29 10:35:25 +08001453 .ops = &rk3399_clk_ops,
1454 .bind = rk3399_clk_bind,
1455 .probe = rk3399_clk_probe,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001456#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glasscaa4daa2020-12-03 16:55:18 -07001457 .plat_auto = sizeof(struct rk3399_clk_plat),
Kever Yang5ae2fd92017-02-13 17:38:56 +08001458#endif
Kever Yangb0b3c862016-07-29 10:35:25 +08001459};
Kever Yang5e79f442016-08-12 17:47:15 +08001460
1461static ulong rk3399_i2c_get_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id)
1462{
1463 u32 div, con;
1464
1465 switch (clk_id) {
1466 case SCLK_I2C0_PMU:
1467 con = readl(&pmucru->pmucru_clksel[2]);
1468 div = I2C_CLK_DIV_VALUE(con, 0);
1469 break;
1470 case SCLK_I2C4_PMU:
1471 con = readl(&pmucru->pmucru_clksel[3]);
1472 div = I2C_CLK_DIV_VALUE(con, 4);
1473 break;
1474 case SCLK_I2C8_PMU:
1475 con = readl(&pmucru->pmucru_clksel[2]);
1476 div = I2C_CLK_DIV_VALUE(con, 8);
1477 break;
1478 default:
1479 printf("do not support this i2c bus\n");
1480 return -EINVAL;
1481 }
1482
1483 return DIV_TO_RATE(PPLL_HZ, div);
1484}
1485
1486static ulong rk3399_i2c_set_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id,
1487 uint hz)
1488{
1489 int src_clk_div;
1490
1491 src_clk_div = PPLL_HZ / hz;
1492 assert(src_clk_div - 1 < 127);
1493
1494 switch (clk_id) {
1495 case SCLK_I2C0_PMU:
1496 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(0),
1497 I2C_PMUCLK_REG_VALUE(0, src_clk_div));
1498 break;
1499 case SCLK_I2C4_PMU:
1500 rk_clrsetreg(&pmucru->pmucru_clksel[3], I2C_PMUCLK_REG_MASK(4),
1501 I2C_PMUCLK_REG_VALUE(4, src_clk_div));
1502 break;
1503 case SCLK_I2C8_PMU:
1504 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(8),
1505 I2C_PMUCLK_REG_VALUE(8, src_clk_div));
1506 break;
1507 default:
1508 printf("do not support this i2c bus\n");
1509 return -EINVAL;
1510 }
1511
1512 return DIV_TO_RATE(PPLL_HZ, src_clk_div);
1513}
1514
1515static ulong rk3399_pwm_get_clk(struct rk3399_pmucru *pmucru)
1516{
1517 u32 div, con;
1518
1519 /* PWM closk rate is same as pclk_pmu */
1520 con = readl(&pmucru->pmucru_clksel[0]);
1521 div = con & PMU_PCLK_DIV_CON_MASK;
1522
1523 return DIV_TO_RATE(PPLL_HZ, div);
1524}
1525
1526static ulong rk3399_pmuclk_get_rate(struct clk *clk)
1527{
1528 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1529 ulong rate = 0;
1530
1531 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001532 case PLL_PPLL:
1533 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001534 case PCLK_RKPWM_PMU:
Jack Mitchellda0be4e2020-09-17 10:42:06 +01001535 case PCLK_WDT_M0_PMU:
Kever Yang5e79f442016-08-12 17:47:15 +08001536 rate = rk3399_pwm_get_clk(priv->pmucru);
1537 break;
1538 case SCLK_I2C0_PMU:
1539 case SCLK_I2C4_PMU:
1540 case SCLK_I2C8_PMU:
1541 rate = rk3399_i2c_get_pmuclk(priv->pmucru, clk->id);
1542 break;
1543 default:
1544 return -ENOENT;
1545 }
1546
1547 return rate;
1548}
1549
1550static ulong rk3399_pmuclk_set_rate(struct clk *clk, ulong rate)
1551{
1552 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1553 ulong ret = 0;
1554
1555 switch (clk->id) {
Philipp Tomsich434d5a02018-02-23 17:36:41 +01001556 case PLL_PPLL:
1557 /*
1558 * This has already been set up and we don't want/need
1559 * to change it here. Accept the request though, as the
1560 * device-tree has this in an 'assigned-clocks' list.
1561 */
1562 return PPLL_HZ;
Kever Yang5e79f442016-08-12 17:47:15 +08001563 case SCLK_I2C0_PMU:
1564 case SCLK_I2C4_PMU:
1565 case SCLK_I2C8_PMU:
1566 ret = rk3399_i2c_set_pmuclk(priv->pmucru, clk->id, rate);
1567 break;
1568 default:
1569 return -ENOENT;
1570 }
1571
1572 return ret;
1573}
1574
1575static struct clk_ops rk3399_pmuclk_ops = {
1576 .get_rate = rk3399_pmuclk_get_rate,
1577 .set_rate = rk3399_pmuclk_set_rate,
1578};
1579
Kever Yang5ae2fd92017-02-13 17:38:56 +08001580#ifndef CONFIG_SPL_BUILD
Kever Yang5e79f442016-08-12 17:47:15 +08001581static void pmuclk_init(struct rk3399_pmucru *pmucru)
1582{
1583 u32 pclk_div;
1584
1585 /* configure pmu pll(ppll) */
1586 rkclk_set_pll(&pmucru->ppll_con[0], &ppll_init_cfg);
1587
1588 /* configure pmu pclk */
1589 pclk_div = PPLL_HZ / PMU_PCLK_HZ - 1;
Kever Yang5e79f442016-08-12 17:47:15 +08001590 rk_clrsetreg(&pmucru->pmucru_clksel[0],
1591 PMU_PCLK_DIV_CON_MASK,
1592 pclk_div << PMU_PCLK_DIV_CON_SHIFT);
1593}
Kever Yang5ae2fd92017-02-13 17:38:56 +08001594#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001595
1596static int rk3399_pmuclk_probe(struct udevice *dev)
1597{
Philipp Tomsich61dff332017-03-24 19:24:24 +01001598#if CONFIG_IS_ENABLED(OF_PLATDATA) || !defined(CONFIG_SPL_BUILD)
Kever Yang5e79f442016-08-12 17:47:15 +08001599 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
Philipp Tomsich61dff332017-03-24 19:24:24 +01001600#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001601
Kever Yang5ae2fd92017-02-13 17:38:56 +08001602#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glassc69cda22020-12-03 16:55:20 -07001603 struct rk3399_pmuclk_plat *plat = dev_get_plat(dev);
Kever Yang5e79f442016-08-12 17:47:15 +08001604
Simon Glassc20ee0e2017-08-29 14:15:50 -06001605 priv->pmucru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001606#endif
1607
1608#ifndef CONFIG_SPL_BUILD
1609 pmuclk_init(priv->pmucru);
1610#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001611 return 0;
1612}
1613
Simon Glassd1998a92020-12-03 16:55:21 -07001614static int rk3399_pmuclk_of_to_plat(struct udevice *dev)
Kever Yang5e79f442016-08-12 17:47:15 +08001615{
Kever Yang5ae2fd92017-02-13 17:38:56 +08001616#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yang5e79f442016-08-12 17:47:15 +08001617 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
1618
Philipp Tomsich75c78592017-09-12 17:32:24 +02001619 priv->pmucru = dev_read_addr_ptr(dev);
Kever Yang5ae2fd92017-02-13 17:38:56 +08001620#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001621 return 0;
1622}
1623
Elaine Zhang538f67c2017-12-19 18:22:38 +08001624static int rk3399_pmuclk_bind(struct udevice *dev)
1625{
Alper Nebi Yasak957a3e52020-10-05 09:57:29 +03001626#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Elaine Zhang538f67c2017-12-19 18:22:38 +08001627 int ret;
1628
1629 ret = offsetof(struct rk3399_pmucru, pmucru_softrst_con[0]);
1630 ret = rockchip_reset_bind(dev, ret, 2);
1631 if (ret)
1632 debug("Warning: software reset driver bind faile\n");
1633#endif
1634 return 0;
1635}
1636
Kever Yang5e79f442016-08-12 17:47:15 +08001637static const struct udevice_id rk3399_pmuclk_ids[] = {
1638 { .compatible = "rockchip,rk3399-pmucru" },
1639 { }
1640};
1641
Simon Glassc8a6bc92016-10-01 20:04:51 -06001642U_BOOT_DRIVER(rockchip_rk3399_pmuclk) = {
Kever Yang5ae2fd92017-02-13 17:38:56 +08001643 .name = "rockchip_rk3399_pmucru",
Kever Yang5e79f442016-08-12 17:47:15 +08001644 .id = UCLASS_CLK,
1645 .of_match = rk3399_pmuclk_ids,
Simon Glass41575d82020-12-03 16:55:17 -07001646 .priv_auto = sizeof(struct rk3399_pmuclk_priv),
Simon Glassd1998a92020-12-03 16:55:21 -07001647 .of_to_plat = rk3399_pmuclk_of_to_plat,
Kever Yang5e79f442016-08-12 17:47:15 +08001648 .ops = &rk3399_pmuclk_ops,
1649 .probe = rk3399_pmuclk_probe,
Elaine Zhang538f67c2017-12-19 18:22:38 +08001650 .bind = rk3399_pmuclk_bind,
Kever Yang5ae2fd92017-02-13 17:38:56 +08001651#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glasscaa4daa2020-12-03 16:55:18 -07001652 .plat_auto = sizeof(struct rk3399_pmuclk_plat),
Kever Yang5ae2fd92017-02-13 17:38:56 +08001653#endif
Kever Yang5e79f442016-08-12 17:47:15 +08001654};