blob: a699a3664c335ac738bca1bbaf880f3e786337c4 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +01002/*
3 * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
4 * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
5 *
6 * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
7 * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +01008 */
9
10#include <common.h>
11#include <clk-uclass.h>
12#include <dm.h>
Simon Glassf7ae49f2020-05-10 11:40:05 -060013#include <log.h>
Simon Glass336d4612020-02-03 07:36:16 -070014#include <dm/device_compat.h>
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +010015#include <dm/lists.h>
16#include <errno.h>
17#include <asm/io.h>
18#include <asm/arch/clk.h>
19#include <asm/arch/hardware.h>
20#include <asm/arch/sys_proto.h>
21
22/* Register bitfield defines */
23#define PLLCTRL_FBDIV_MASK 0x7f000
24#define PLLCTRL_FBDIV_SHIFT 12
25#define PLLCTRL_BPFORCE_MASK (1 << 4)
26#define PLLCTRL_PWRDWN_MASK 2
27#define PLLCTRL_PWRDWN_SHIFT 1
28#define PLLCTRL_RESET_MASK 1
29#define PLLCTRL_RESET_SHIFT 0
30
31#define ZYNQ_CLK_MAXDIV 0x3f
32#define CLK_CTRL_DIV1_SHIFT 20
33#define CLK_CTRL_DIV1_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
34#define CLK_CTRL_DIV0_SHIFT 8
35#define CLK_CTRL_DIV0_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
36#define CLK_CTRL_SRCSEL_SHIFT 4
37#define CLK_CTRL_SRCSEL_MASK (0x3 << CLK_CTRL_SRCSEL_SHIFT)
38
39#define CLK_CTRL_DIV2X_SHIFT 26
40#define CLK_CTRL_DIV2X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
41#define CLK_CTRL_DIV3X_SHIFT 20
42#define CLK_CTRL_DIV3X_MASK (ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
43
44DECLARE_GLOBAL_DATA_PTR;
45
46#ifndef CONFIG_SPL_BUILD
47enum zynq_clk_rclk {mio_clk, emio_clk};
48#endif
49
50struct zynq_clk_priv {
51 ulong ps_clk_freq;
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +010052#ifndef CONFIG_SPL_BUILD
53 struct clk gem_emio_clk[2];
54#endif
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +010055};
56
57static void *zynq_clk_get_register(enum zynq_clk id)
58{
59 switch (id) {
60 case armpll_clk:
61 return &slcr_base->arm_pll_ctrl;
62 case ddrpll_clk:
63 return &slcr_base->ddr_pll_ctrl;
64 case iopll_clk:
65 return &slcr_base->io_pll_ctrl;
66 case lqspi_clk:
67 return &slcr_base->lqspi_clk_ctrl;
68 case smc_clk:
69 return &slcr_base->smc_clk_ctrl;
70 case pcap_clk:
71 return &slcr_base->pcap_clk_ctrl;
72 case sdio0_clk ... sdio1_clk:
73 return &slcr_base->sdio_clk_ctrl;
74 case uart0_clk ... uart1_clk:
75 return &slcr_base->uart_clk_ctrl;
76 case spi0_clk ... spi1_clk:
77 return &slcr_base->spi_clk_ctrl;
78#ifndef CONFIG_SPL_BUILD
79 case dci_clk:
80 return &slcr_base->dci_clk_ctrl;
81 case gem0_clk:
82 return &slcr_base->gem0_clk_ctrl;
83 case gem1_clk:
84 return &slcr_base->gem1_clk_ctrl;
85 case fclk0_clk:
86 return &slcr_base->fpga0_clk_ctrl;
87 case fclk1_clk:
88 return &slcr_base->fpga1_clk_ctrl;
89 case fclk2_clk:
90 return &slcr_base->fpga2_clk_ctrl;
91 case fclk3_clk:
92 return &slcr_base->fpga3_clk_ctrl;
93 case can0_clk ... can1_clk:
94 return &slcr_base->can_clk_ctrl;
95 case dbg_trc_clk ... dbg_apb_clk:
96 /* fall through */
97#endif
98 default:
99 return &slcr_base->dbg_clk_ctrl;
100 }
101}
102
103static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
104{
105 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
106
107 switch (srcsel) {
108 case 2:
109 return ddrpll_clk;
110 case 3:
111 return iopll_clk;
112 case 0 ... 1:
113 default:
114 return armpll_clk;
115 }
116}
117
118static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
119{
120 u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
121
122 switch (srcsel) {
123 case 2:
124 return armpll_clk;
125 case 3:
126 return ddrpll_clk;
127 case 0 ... 1:
128 default:
129 return iopll_clk;
130 }
131}
132
133static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
134{
135 u32 clk_ctrl, reset, pwrdwn, mul, bypass;
136
137 clk_ctrl = readl(zynq_clk_get_register(id));
138
139 reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
140 pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
141 if (reset || pwrdwn)
142 return 0;
143
144 bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
145 if (bypass)
146 mul = 1;
147 else
148 mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
149
150 return priv->ps_clk_freq * mul;
151}
152
153#ifndef CONFIG_SPL_BUILD
154static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
155{
156 u32 clk_ctrl, srcsel;
157
158 if (id == gem0_clk)
159 clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
160 else
161 clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
162
163 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
164 if (srcsel)
165 return emio_clk;
166 else
167 return mio_clk;
168}
169#endif
170
171static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
172{
173 u32 clk_621, clk_ctrl, div;
174 enum zynq_clk pll;
175
176 clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
177
178 div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
179
180 switch (id) {
181 case cpu_1x_clk:
182 div *= 2;
183 /* fall through */
184 case cpu_2x_clk:
185 clk_621 = readl(&slcr_base->clk_621_true) & 1;
186 div *= 2 + clk_621;
187 break;
188 case cpu_3or2x_clk:
189 div *= 2;
190 /* fall through */
191 case cpu_6or4x_clk:
192 break;
193 default:
194 return 0;
195 }
196
197 pll = zynq_clk_get_cpu_pll(clk_ctrl);
198
199 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
200}
201
202#ifndef CONFIG_SPL_BUILD
203static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
204{
205 u32 clk_ctrl, div;
206
207 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
208
209 div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
210
211 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
212}
213#endif
214
215static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
216{
217 u32 clk_ctrl, div;
218
219 clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
220
221 div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
222
223 return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
224}
225
226#ifndef CONFIG_SPL_BUILD
227static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
228{
229 u32 clk_ctrl, div0, div1;
230
231 clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
232
233 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
234 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
235
236 return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
237 zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
238}
239#endif
240
241static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
242 enum zynq_clk id, bool two_divs)
243{
244 enum zynq_clk pll;
245 u32 clk_ctrl, div0;
246 u32 div1 = 1;
247
248 clk_ctrl = readl(zynq_clk_get_register(id));
249
250 div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
251 if (!div0)
252 div0 = 1;
253
254#ifndef CONFIG_SPL_BUILD
255 if (two_divs) {
256 div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
257 if (!div1)
258 div1 = 1;
259 }
260#endif
261
262 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
263
264 return
265 DIV_ROUND_CLOSEST(
266 DIV_ROUND_CLOSEST(
267 zynq_clk_get_pll_rate(priv, pll), div0),
268 div1);
269}
270
271#ifndef CONFIG_SPL_BUILD
272static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
273{
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100274 struct clk *parent;
275
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100276 if (zynq_clk_get_gem_rclk(id) == mio_clk)
277 return zynq_clk_get_peripheral_rate(priv, id, true);
278
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100279 parent = &priv->gem_emio_clk[id - gem0_clk];
280 if (parent->dev)
281 return clk_get_rate(parent);
282
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100283 debug("%s: gem%d emio rx clock source unknown\n", __func__,
284 id - gem0_clk);
285
286 return -ENOSYS;
287}
288
289static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
290 ulong pll_rate,
291 u32 *div0, u32 *div1)
292{
293 long new_err, best_err = (long)(~0UL >> 1);
294 ulong new_rate, best_rate = 0;
295 u32 d0, d1;
296
297 for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
298 for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
299 new_rate = DIV_ROUND_CLOSEST(
300 DIV_ROUND_CLOSEST(pll_rate, d0), d1);
301 new_err = abs(new_rate - rate);
302
303 if (new_err < best_err) {
304 *div0 = d0;
305 *div1 = d1;
306 best_err = new_err;
307 best_rate = new_rate;
308 }
309 }
310 }
311
312 return best_rate;
313}
314
315static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
316 enum zynq_clk id, ulong rate,
317 bool two_divs)
318{
319 enum zynq_clk pll;
320 u32 clk_ctrl, div0 = 0, div1 = 0;
321 ulong pll_rate, new_rate;
322 u32 *reg;
323
324 reg = zynq_clk_get_register(id);
325 clk_ctrl = readl(reg);
326
327 pll = zynq_clk_get_peripheral_pll(clk_ctrl);
328 pll_rate = zynq_clk_get_pll_rate(priv, pll);
329 clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
330 if (two_divs) {
331 clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
332 new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
333 &div0, &div1);
334 clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
335 } else {
336 div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
337 if (div0 > ZYNQ_CLK_MAXDIV)
338 div0 = ZYNQ_CLK_MAXDIV;
339 new_rate = DIV_ROUND_CLOSEST(rate, div0);
340 }
341 clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
342
343 zynq_slcr_unlock();
344 writel(clk_ctrl, reg);
345 zynq_slcr_lock();
346
347 return new_rate;
348}
349
350static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
351 ulong rate)
352{
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100353 struct clk *parent;
354
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100355 if (zynq_clk_get_gem_rclk(id) == mio_clk)
356 return zynq_clk_set_peripheral_rate(priv, id, rate, true);
357
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100358 parent = &priv->gem_emio_clk[id - gem0_clk];
359 if (parent->dev)
360 return clk_set_rate(parent, rate);
361
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100362 debug("%s: gem%d emio rx clock source unknown\n", __func__,
363 id - gem0_clk);
364
365 return -ENOSYS;
366}
367#endif
368
369#ifndef CONFIG_SPL_BUILD
370static ulong zynq_clk_get_rate(struct clk *clk)
371{
372 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
373 enum zynq_clk id = clk->id;
374 bool two_divs = false;
375
376 switch (id) {
377 case armpll_clk ... iopll_clk:
378 return zynq_clk_get_pll_rate(priv, id);
379 case cpu_6or4x_clk ... cpu_1x_clk:
380 return zynq_clk_get_cpu_rate(priv, id);
381 case ddr2x_clk:
382 return zynq_clk_get_ddr2x_rate(priv);
383 case ddr3x_clk:
384 return zynq_clk_get_ddr3x_rate(priv);
385 case dci_clk:
386 return zynq_clk_get_dci_rate(priv);
387 case gem0_clk ... gem1_clk:
388 return zynq_clk_get_gem_rate(priv, id);
389 case fclk0_clk ... can1_clk:
390 two_divs = true;
391 /* fall through */
392 case dbg_trc_clk ... dbg_apb_clk:
393 case lqspi_clk ... pcap_clk:
394 case sdio0_clk ... spi1_clk:
395 return zynq_clk_get_peripheral_rate(priv, id, two_divs);
396 case dma_clk:
397 return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
Michal Simek58afff42018-02-21 15:06:20 +0100398 case usb0_aper_clk ... swdt_clk:
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100399 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
400 default:
401 return -ENXIO;
402 }
403}
404
405static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
406{
407 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
408 enum zynq_clk id = clk->id;
409 bool two_divs = false;
410
411 switch (id) {
412 case gem0_clk ... gem1_clk:
413 return zynq_clk_set_gem_rate(priv, id, rate);
414 case fclk0_clk ... can1_clk:
415 two_divs = true;
416 /* fall through */
417 case lqspi_clk ... pcap_clk:
418 case sdio0_clk ... spi1_clk:
419 case dbg_trc_clk ... dbg_apb_clk:
420 return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
421 default:
422 return -ENXIO;
423 }
424}
425#else
426static ulong zynq_clk_get_rate(struct clk *clk)
427{
428 struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
429 enum zynq_clk id = clk->id;
430
431 switch (id) {
432 case cpu_6or4x_clk ... cpu_1x_clk:
433 return zynq_clk_get_cpu_rate(priv, id);
434 case ddr3x_clk:
435 return zynq_clk_get_ddr3x_rate(priv);
436 case lqspi_clk ... pcap_clk:
437 case sdio0_clk ... spi1_clk:
438 return zynq_clk_get_peripheral_rate(priv, id, 0);
Hannes Schmelzer2aff7222019-02-14 08:54:42 +0100439 case i2c0_aper_clk ... i2c1_aper_clk:
440 return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100441 default:
442 return -ENXIO;
443 }
444}
445#endif
446
447static struct clk_ops zynq_clk_ops = {
448 .get_rate = zynq_clk_get_rate,
449#ifndef CONFIG_SPL_BUILD
450 .set_rate = zynq_clk_set_rate,
451#endif
452};
453
454static int zynq_clk_probe(struct udevice *dev)
455{
456 struct zynq_clk_priv *priv = dev_get_priv(dev);
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100457#ifndef CONFIG_SPL_BUILD
458 unsigned int i;
459 char name[16];
460 int ret;
461
462 for (i = 0; i < 2; i++) {
463 sprintf(name, "gem%d_emio_clk", i);
464 ret = clk_get_by_name(dev, name, &priv->gem_emio_clk[i]);
Simon Glassaa9bb092017-05-30 21:47:29 -0600465 if (ret < 0 && ret != -ENODATA) {
Stefan Herbrechtsmeier9bb803d2017-01-17 16:27:31 +0100466 dev_err(dev, "failed to get %s clock\n", name);
467 return ret;
468 }
469 }
470#endif
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100471
Simon Glassda409cc2017-05-17 17:18:09 -0600472 priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100473 "ps-clk-frequency", 33333333UL);
474
475 return 0;
476}
477
478static const struct udevice_id zynq_clk_ids[] = {
479 { .compatible = "xlnx,ps7-clkc"},
480 {}
481};
482
483U_BOOT_DRIVER(zynq_clk) = {
484 .name = "zynq_clk",
485 .id = UCLASS_CLK,
486 .of_match = zynq_clk_ids,
Stefan Herbrechtsmeier3a64b252017-01-17 16:27:29 +0100487 .ops = &zynq_clk_ops,
488 .priv_auto_alloc_size = sizeof(struct zynq_clk_priv),
489 .probe = zynq_clk_probe,
490};