Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 Altera Corporation <www.altera.com> |
| 3 | * |
| 4 | * SPDX-License-Identifier: GPL-2.0+ |
| 5 | */ |
| 6 | |
| 7 | #include <common.h> |
| 8 | #include <asm/io.h> |
| 9 | #include <asm/arch/clock_manager.h> |
| 10 | |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 11 | DECLARE_GLOBAL_DATA_PTR; |
| 12 | |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 13 | static const struct socfpga_clock_manager *clock_manager_base = |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 14 | (struct socfpga_clock_manager *)SOCFPGA_CLKMGR_ADDRESS; |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 15 | |
Marek Vasut | 4425e62 | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 16 | static void cm_wait_for_lock(uint32_t mask) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 17 | { |
| 18 | register uint32_t inter_val; |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 19 | uint32_t retry = 0; |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 20 | do { |
| 21 | inter_val = readl(&clock_manager_base->inter) & mask; |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 22 | if (inter_val == mask) |
| 23 | retry++; |
| 24 | else |
| 25 | retry = 0; |
| 26 | if (retry >= 10) |
| 27 | break; |
| 28 | } while (1); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 29 | } |
| 30 | |
| 31 | /* function to poll in the fsm busy bit */ |
Marek Vasut | 4425e62 | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 32 | static void cm_wait_for_fsm(void) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 33 | { |
| 34 | while (readl(&clock_manager_base->stat) & CLKMGR_STAT_BUSY) |
| 35 | ; |
| 36 | } |
| 37 | |
| 38 | /* |
| 39 | * function to write the bypass register which requires a poll of the |
| 40 | * busy bit |
| 41 | */ |
Marek Vasut | 4425e62 | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 42 | static void cm_write_bypass(uint32_t val) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 43 | { |
| 44 | writel(val, &clock_manager_base->bypass); |
| 45 | cm_wait_for_fsm(); |
| 46 | } |
| 47 | |
| 48 | /* function to write the ctrl register which requires a poll of the busy bit */ |
Marek Vasut | 4425e62 | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 49 | static void cm_write_ctrl(uint32_t val) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 50 | { |
| 51 | writel(val, &clock_manager_base->ctrl); |
| 52 | cm_wait_for_fsm(); |
| 53 | } |
| 54 | |
| 55 | /* function to write a clock register that has phase information */ |
Marek Vasut | 4425e62 | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 56 | static void cm_write_with_phase(uint32_t value, |
| 57 | uint32_t reg_address, uint32_t mask) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 58 | { |
| 59 | /* poll until phase is zero */ |
| 60 | while (readl(reg_address) & mask) |
| 61 | ; |
| 62 | |
| 63 | writel(value, reg_address); |
| 64 | |
| 65 | while (readl(reg_address) & mask) |
| 66 | ; |
| 67 | } |
| 68 | |
| 69 | /* |
| 70 | * Setup clocks while making no assumptions about previous state of the clocks. |
| 71 | * |
| 72 | * Start by being paranoid and gate all sw managed clocks |
| 73 | * Put all plls in bypass |
| 74 | * Put all plls VCO registers back to reset value (bandgap power down). |
| 75 | * Put peripheral and main pll src to reset value to avoid glitch. |
| 76 | * Delay 5 us. |
| 77 | * Deassert bandgap power down and set numerator and denominator |
| 78 | * Start 7 us timer. |
| 79 | * set internal dividers |
| 80 | * Wait for 7 us timer. |
| 81 | * Enable plls |
| 82 | * Set external dividers while plls are locking |
| 83 | * Wait for pll lock |
| 84 | * Assert/deassert outreset all. |
| 85 | * Take all pll's out of bypass |
| 86 | * Clear safe mode |
| 87 | * set source main and peripheral clocks |
| 88 | * Ungate clocks |
| 89 | */ |
| 90 | |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 91 | void cm_basic_init(const struct cm_config * const cfg) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 92 | { |
Marek Vasut | 7e4d2fa | 2015-08-11 00:54:12 +0200 | [diff] [blame] | 93 | unsigned long end; |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 94 | |
| 95 | /* Start by being paranoid and gate all sw managed clocks */ |
| 96 | |
| 97 | /* |
| 98 | * We need to disable nandclk |
| 99 | * and then do another apb access before disabling |
| 100 | * gatting off the rest of the periperal clocks. |
| 101 | */ |
| 102 | writel(~CLKMGR_PERPLLGRP_EN_NANDCLK_MASK & |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 103 | readl(&clock_manager_base->per_pll.en), |
| 104 | &clock_manager_base->per_pll.en); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 105 | |
| 106 | /* DO NOT GATE OFF DEBUG CLOCKS & BRIDGE CLOCKS */ |
| 107 | writel(CLKMGR_MAINPLLGRP_EN_DBGTIMERCLK_MASK | |
| 108 | CLKMGR_MAINPLLGRP_EN_DBGTRACECLK_MASK | |
| 109 | CLKMGR_MAINPLLGRP_EN_DBGCLK_MASK | |
| 110 | CLKMGR_MAINPLLGRP_EN_DBGATCLK_MASK | |
| 111 | CLKMGR_MAINPLLGRP_EN_S2FUSER0CLK_MASK | |
| 112 | CLKMGR_MAINPLLGRP_EN_L4MPCLK_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 113 | &clock_manager_base->main_pll.en); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 114 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 115 | writel(0, &clock_manager_base->sdr_pll.en); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 116 | |
| 117 | /* now we can gate off the rest of the peripheral clocks */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 118 | writel(0, &clock_manager_base->per_pll.en); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 119 | |
| 120 | /* Put all plls in bypass */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 121 | cm_write_bypass(CLKMGR_BYPASS_PERPLL | CLKMGR_BYPASS_SDRPLL | |
| 122 | CLKMGR_BYPASS_MAINPLL); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 123 | |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 124 | /* Put all plls VCO registers back to reset value. */ |
| 125 | writel(CLKMGR_MAINPLLGRP_VCO_RESET_VALUE & |
| 126 | ~CLKMGR_MAINPLLGRP_VCO_REGEXTSEL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 127 | &clock_manager_base->main_pll.vco); |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 128 | writel(CLKMGR_PERPLLGRP_VCO_RESET_VALUE & |
| 129 | ~CLKMGR_PERPLLGRP_VCO_REGEXTSEL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 130 | &clock_manager_base->per_pll.vco); |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 131 | writel(CLKMGR_SDRPLLGRP_VCO_RESET_VALUE & |
| 132 | ~CLKMGR_SDRPLLGRP_VCO_REGEXTSEL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 133 | &clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 134 | |
| 135 | /* |
| 136 | * The clocks to the flash devices and the L4_MAIN clocks can |
| 137 | * glitch when coming out of safe mode if their source values |
| 138 | * are different from their reset value. So the trick it to |
| 139 | * put them back to their reset state, and change input |
| 140 | * after exiting safe mode but before ungating the clocks. |
| 141 | */ |
| 142 | writel(CLKMGR_PERPLLGRP_SRC_RESET_VALUE, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 143 | &clock_manager_base->per_pll.src); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 144 | writel(CLKMGR_MAINPLLGRP_L4SRC_RESET_VALUE, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 145 | &clock_manager_base->main_pll.l4src); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 146 | |
| 147 | /* read back for the required 5 us delay. */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 148 | readl(&clock_manager_base->main_pll.vco); |
| 149 | readl(&clock_manager_base->per_pll.vco); |
| 150 | readl(&clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 151 | |
| 152 | |
| 153 | /* |
| 154 | * We made sure bgpwr down was assert for 5 us. Now deassert BG PWR DN |
| 155 | * with numerator and denominator. |
| 156 | */ |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 157 | writel(cfg->main_vco_base, &clock_manager_base->main_pll.vco); |
| 158 | writel(cfg->peri_vco_base, &clock_manager_base->per_pll.vco); |
| 159 | writel(cfg->sdram_vco_base, &clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 160 | |
| 161 | /* |
Marek Vasut | 7e4d2fa | 2015-08-11 00:54:12 +0200 | [diff] [blame] | 162 | * Time starts here. Must wait 7 us from |
| 163 | * BGPWRDN_SET(0) to VCO_ENABLE_SET(1). |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 164 | */ |
Marek Vasut | 7e4d2fa | 2015-08-11 00:54:12 +0200 | [diff] [blame] | 165 | end = timer_get_us() + 7; |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 166 | |
| 167 | /* main mpu */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 168 | writel(cfg->mpuclk, &clock_manager_base->main_pll.mpuclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 169 | |
Dinh Nguyen | a45526a | 2017-01-31 12:33:08 -0600 | [diff] [blame] | 170 | /* altera group mpuclk */ |
| 171 | writel(cfg->altera_grp_mpuclk, &clock_manager_base->altera.mpuclk); |
| 172 | |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 173 | /* main main clock */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 174 | writel(cfg->mainclk, &clock_manager_base->main_pll.mainclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 175 | |
| 176 | /* main for dbg */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 177 | writel(cfg->dbgatclk, &clock_manager_base->main_pll.dbgatclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 178 | |
| 179 | /* main for cfgs2fuser0clk */ |
| 180 | writel(cfg->cfg2fuser0clk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 181 | &clock_manager_base->main_pll.cfgs2fuser0clk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 182 | |
| 183 | /* Peri emac0 50 MHz default to RMII */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 184 | writel(cfg->emac0clk, &clock_manager_base->per_pll.emac0clk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 185 | |
| 186 | /* Peri emac1 50 MHz default to RMII */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 187 | writel(cfg->emac1clk, &clock_manager_base->per_pll.emac1clk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 188 | |
| 189 | /* Peri QSPI */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 190 | writel(cfg->mainqspiclk, &clock_manager_base->main_pll.mainqspiclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 191 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 192 | writel(cfg->perqspiclk, &clock_manager_base->per_pll.perqspiclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 193 | |
| 194 | /* Peri pernandsdmmcclk */ |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 195 | writel(cfg->mainnandsdmmcclk, |
| 196 | &clock_manager_base->main_pll.mainnandsdmmcclk); |
| 197 | |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 198 | writel(cfg->pernandsdmmcclk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 199 | &clock_manager_base->per_pll.pernandsdmmcclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 200 | |
| 201 | /* Peri perbaseclk */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 202 | writel(cfg->perbaseclk, &clock_manager_base->per_pll.perbaseclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 203 | |
| 204 | /* Peri s2fuser1clk */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 205 | writel(cfg->s2fuser1clk, &clock_manager_base->per_pll.s2fuser1clk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 206 | |
| 207 | /* 7 us must have elapsed before we can enable the VCO */ |
Marek Vasut | 7e4d2fa | 2015-08-11 00:54:12 +0200 | [diff] [blame] | 208 | while (timer_get_us() < end) |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 209 | ; |
| 210 | |
| 211 | /* Enable vco */ |
| 212 | /* main pll vco */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 213 | writel(cfg->main_vco_base | CLKMGR_MAINPLLGRP_VCO_EN, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 214 | &clock_manager_base->main_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 215 | |
| 216 | /* periferal pll */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 217 | writel(cfg->peri_vco_base | CLKMGR_MAINPLLGRP_VCO_EN, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 218 | &clock_manager_base->per_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 219 | |
| 220 | /* sdram pll vco */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 221 | writel(cfg->sdram_vco_base | CLKMGR_MAINPLLGRP_VCO_EN, |
| 222 | &clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 223 | |
| 224 | /* L3 MP and L3 SP */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 225 | writel(cfg->maindiv, &clock_manager_base->main_pll.maindiv); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 226 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 227 | writel(cfg->dbgdiv, &clock_manager_base->main_pll.dbgdiv); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 228 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 229 | writel(cfg->tracediv, &clock_manager_base->main_pll.tracediv); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 230 | |
| 231 | /* L4 MP, L4 SP, can0, and can1 */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 232 | writel(cfg->perdiv, &clock_manager_base->per_pll.div); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 233 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 234 | writel(cfg->gpiodiv, &clock_manager_base->per_pll.gpiodiv); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 235 | |
| 236 | #define LOCKED_MASK \ |
| 237 | (CLKMGR_INTER_SDRPLLLOCKED_MASK | \ |
| 238 | CLKMGR_INTER_PERPLLLOCKED_MASK | \ |
| 239 | CLKMGR_INTER_MAINPLLLOCKED_MASK) |
| 240 | |
| 241 | cm_wait_for_lock(LOCKED_MASK); |
| 242 | |
| 243 | /* write the sdram clock counters before toggling outreset all */ |
| 244 | writel(cfg->ddrdqsclk & CLKMGR_SDRPLLGRP_DDRDQSCLK_CNT_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 245 | &clock_manager_base->sdr_pll.ddrdqsclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 246 | |
| 247 | writel(cfg->ddr2xdqsclk & CLKMGR_SDRPLLGRP_DDR2XDQSCLK_CNT_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 248 | &clock_manager_base->sdr_pll.ddr2xdqsclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 249 | |
| 250 | writel(cfg->ddrdqclk & CLKMGR_SDRPLLGRP_DDRDQCLK_CNT_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 251 | &clock_manager_base->sdr_pll.ddrdqclk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 252 | |
| 253 | writel(cfg->s2fuser2clk & CLKMGR_SDRPLLGRP_S2FUSER2CLK_CNT_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 254 | &clock_manager_base->sdr_pll.s2fuser2clk); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 255 | |
| 256 | /* |
| 257 | * after locking, but before taking out of bypass |
| 258 | * assert/deassert outresetall |
| 259 | */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 260 | uint32_t mainvco = readl(&clock_manager_base->main_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 261 | |
| 262 | /* assert main outresetall */ |
| 263 | writel(mainvco | CLKMGR_MAINPLLGRP_VCO_OUTRESETALL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 264 | &clock_manager_base->main_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 265 | |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 266 | uint32_t periphvco = readl(&clock_manager_base->per_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 267 | |
| 268 | /* assert pheriph outresetall */ |
| 269 | writel(periphvco | CLKMGR_PERPLLGRP_VCO_OUTRESETALL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 270 | &clock_manager_base->per_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 271 | |
| 272 | /* assert sdram outresetall */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 273 | writel(cfg->sdram_vco_base | CLKMGR_MAINPLLGRP_VCO_EN| |
| 274 | CLKMGR_SDRPLLGRP_VCO_OUTRESETALL, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 275 | &clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 276 | |
| 277 | /* deassert main outresetall */ |
| 278 | writel(mainvco & ~CLKMGR_MAINPLLGRP_VCO_OUTRESETALL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 279 | &clock_manager_base->main_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 280 | |
| 281 | /* deassert pheriph outresetall */ |
| 282 | writel(periphvco & ~CLKMGR_PERPLLGRP_VCO_OUTRESETALL_MASK, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 283 | &clock_manager_base->per_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 284 | |
| 285 | /* deassert sdram outresetall */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 286 | writel(cfg->sdram_vco_base | CLKMGR_MAINPLLGRP_VCO_EN, |
| 287 | &clock_manager_base->sdr_pll.vco); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 288 | |
| 289 | /* |
| 290 | * now that we've toggled outreset all, all the clocks |
| 291 | * are aligned nicely; so we can change any phase. |
| 292 | */ |
| 293 | cm_write_with_phase(cfg->ddrdqsclk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 294 | (uint32_t)&clock_manager_base->sdr_pll.ddrdqsclk, |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 295 | CLKMGR_SDRPLLGRP_DDRDQSCLK_PHASE_MASK); |
| 296 | |
| 297 | /* SDRAM DDR2XDQSCLK */ |
| 298 | cm_write_with_phase(cfg->ddr2xdqsclk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 299 | (uint32_t)&clock_manager_base->sdr_pll.ddr2xdqsclk, |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 300 | CLKMGR_SDRPLLGRP_DDR2XDQSCLK_PHASE_MASK); |
| 301 | |
| 302 | cm_write_with_phase(cfg->ddrdqclk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 303 | (uint32_t)&clock_manager_base->sdr_pll.ddrdqclk, |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 304 | CLKMGR_SDRPLLGRP_DDRDQCLK_PHASE_MASK); |
| 305 | |
| 306 | cm_write_with_phase(cfg->s2fuser2clk, |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 307 | (uint32_t)&clock_manager_base->sdr_pll.s2fuser2clk, |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 308 | CLKMGR_SDRPLLGRP_S2FUSER2CLK_PHASE_MASK); |
| 309 | |
| 310 | /* Take all three PLLs out of bypass when safe mode is cleared. */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 311 | cm_write_bypass(0); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 312 | |
| 313 | /* clear safe mode */ |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 314 | cm_write_ctrl(readl(&clock_manager_base->ctrl) | CLKMGR_CTRL_SAFEMODE); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 315 | |
| 316 | /* |
| 317 | * now that safe mode is clear with clocks gated |
| 318 | * it safe to change the source mux for the flashes the the L4_MAIN |
| 319 | */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 320 | writel(cfg->persrc, &clock_manager_base->per_pll.src); |
| 321 | writel(cfg->l4src, &clock_manager_base->main_pll.l4src); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 322 | |
| 323 | /* Now ungate non-hw-managed clocks */ |
Pavel Machek | 51fb455 | 2014-07-19 23:57:59 +0200 | [diff] [blame] | 324 | writel(~0, &clock_manager_base->main_pll.en); |
| 325 | writel(~0, &clock_manager_base->per_pll.en); |
| 326 | writel(~0, &clock_manager_base->sdr_pll.en); |
Marek Vasut | 036ba54 | 2014-09-16 19:54:32 +0200 | [diff] [blame] | 327 | |
| 328 | /* Clear the loss of lock bits (write 1 to clear) */ |
| 329 | writel(CLKMGR_INTER_SDRPLLLOST_MASK | CLKMGR_INTER_PERPLLLOST_MASK | |
| 330 | CLKMGR_INTER_MAINPLLLOST_MASK, |
| 331 | &clock_manager_base->inter); |
Chin Liang See | ddfeb0a | 2014-03-04 22:13:53 -0600 | [diff] [blame] | 332 | } |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 333 | |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 334 | static unsigned int cm_get_main_vco_clk_hz(void) |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 335 | { |
| 336 | uint32_t reg, clock; |
| 337 | |
| 338 | /* get the main VCO clock */ |
| 339 | reg = readl(&clock_manager_base->main_pll.vco); |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 340 | clock = cm_get_osc_clk_hz(1); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 341 | clock /= ((reg & CLKMGR_MAINPLLGRP_VCO_DENOM_MASK) >> |
| 342 | CLKMGR_MAINPLLGRP_VCO_DENOM_OFFSET) + 1; |
| 343 | clock *= ((reg & CLKMGR_MAINPLLGRP_VCO_NUMER_MASK) >> |
| 344 | CLKMGR_MAINPLLGRP_VCO_NUMER_OFFSET) + 1; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 345 | |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 346 | return clock; |
| 347 | } |
| 348 | |
| 349 | static unsigned int cm_get_per_vco_clk_hz(void) |
| 350 | { |
| 351 | uint32_t reg, clock = 0; |
| 352 | |
| 353 | /* identify PER PLL clock source */ |
| 354 | reg = readl(&clock_manager_base->per_pll.vco); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 355 | reg = (reg & CLKMGR_PERPLLGRP_VCO_SSRC_MASK) >> |
| 356 | CLKMGR_PERPLLGRP_VCO_SSRC_OFFSET; |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 357 | if (reg == CLKMGR_VCO_SSRC_EOSC1) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 358 | clock = cm_get_osc_clk_hz(1); |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 359 | else if (reg == CLKMGR_VCO_SSRC_EOSC2) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 360 | clock = cm_get_osc_clk_hz(2); |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 361 | else if (reg == CLKMGR_VCO_SSRC_F2S) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 362 | clock = cm_get_f2s_per_ref_clk_hz(); |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 363 | |
| 364 | /* get the PER VCO clock */ |
| 365 | reg = readl(&clock_manager_base->per_pll.vco); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 366 | clock /= ((reg & CLKMGR_PERPLLGRP_VCO_DENOM_MASK) >> |
| 367 | CLKMGR_PERPLLGRP_VCO_DENOM_OFFSET) + 1; |
| 368 | clock *= ((reg & CLKMGR_PERPLLGRP_VCO_NUMER_MASK) >> |
| 369 | CLKMGR_PERPLLGRP_VCO_NUMER_OFFSET) + 1; |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 370 | |
| 371 | return clock; |
| 372 | } |
| 373 | |
| 374 | unsigned long cm_get_mpu_clk_hz(void) |
| 375 | { |
| 376 | uint32_t reg, clock; |
| 377 | |
| 378 | clock = cm_get_main_vco_clk_hz(); |
| 379 | |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 380 | /* get the MPU clock */ |
| 381 | reg = readl(&clock_manager_base->altera.mpuclk); |
| 382 | clock /= (reg + 1); |
| 383 | reg = readl(&clock_manager_base->main_pll.mpuclk); |
| 384 | clock /= (reg + 1); |
| 385 | return clock; |
| 386 | } |
| 387 | |
| 388 | unsigned long cm_get_sdram_clk_hz(void) |
| 389 | { |
| 390 | uint32_t reg, clock = 0; |
| 391 | |
| 392 | /* identify SDRAM PLL clock source */ |
| 393 | reg = readl(&clock_manager_base->sdr_pll.vco); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 394 | reg = (reg & CLKMGR_SDRPLLGRP_VCO_SSRC_MASK) >> |
| 395 | CLKMGR_SDRPLLGRP_VCO_SSRC_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 396 | if (reg == CLKMGR_VCO_SSRC_EOSC1) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 397 | clock = cm_get_osc_clk_hz(1); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 398 | else if (reg == CLKMGR_VCO_SSRC_EOSC2) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 399 | clock = cm_get_osc_clk_hz(2); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 400 | else if (reg == CLKMGR_VCO_SSRC_F2S) |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 401 | clock = cm_get_f2s_sdr_ref_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 402 | |
| 403 | /* get the SDRAM VCO clock */ |
| 404 | reg = readl(&clock_manager_base->sdr_pll.vco); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 405 | clock /= ((reg & CLKMGR_SDRPLLGRP_VCO_DENOM_MASK) >> |
| 406 | CLKMGR_SDRPLLGRP_VCO_DENOM_OFFSET) + 1; |
| 407 | clock *= ((reg & CLKMGR_SDRPLLGRP_VCO_NUMER_MASK) >> |
| 408 | CLKMGR_SDRPLLGRP_VCO_NUMER_OFFSET) + 1; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 409 | |
| 410 | /* get the SDRAM (DDR_DQS) clock */ |
| 411 | reg = readl(&clock_manager_base->sdr_pll.ddrdqsclk); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 412 | reg = (reg & CLKMGR_SDRPLLGRP_DDRDQSCLK_CNT_MASK) >> |
| 413 | CLKMGR_SDRPLLGRP_DDRDQSCLK_CNT_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 414 | clock /= (reg + 1); |
| 415 | |
| 416 | return clock; |
| 417 | } |
| 418 | |
| 419 | unsigned int cm_get_l4_sp_clk_hz(void) |
| 420 | { |
| 421 | uint32_t reg, clock = 0; |
| 422 | |
| 423 | /* identify the source of L4 SP clock */ |
| 424 | reg = readl(&clock_manager_base->main_pll.l4src); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 425 | reg = (reg & CLKMGR_MAINPLLGRP_L4SRC_L4SP) >> |
| 426 | CLKMGR_MAINPLLGRP_L4SRC_L4SP_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 427 | |
| 428 | if (reg == CLKMGR_L4_SP_CLK_SRC_MAINPLL) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 429 | clock = cm_get_main_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 430 | |
| 431 | /* get the clock prior L4 SP divider (main clk) */ |
| 432 | reg = readl(&clock_manager_base->altera.mainclk); |
| 433 | clock /= (reg + 1); |
| 434 | reg = readl(&clock_manager_base->main_pll.mainclk); |
| 435 | clock /= (reg + 1); |
| 436 | } else if (reg == CLKMGR_L4_SP_CLK_SRC_PERPLL) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 437 | clock = cm_get_per_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 438 | |
| 439 | /* get the clock prior L4 SP divider (periph_base_clk) */ |
| 440 | reg = readl(&clock_manager_base->per_pll.perbaseclk); |
| 441 | clock /= (reg + 1); |
| 442 | } |
| 443 | |
| 444 | /* get the L4 SP clock which supplied to UART */ |
| 445 | reg = readl(&clock_manager_base->main_pll.maindiv); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 446 | reg = (reg & CLKMGR_MAINPLLGRP_MAINDIV_L4SPCLK_MASK) >> |
| 447 | CLKMGR_MAINPLLGRP_MAINDIV_L4SPCLK_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 448 | clock = clock / (1 << reg); |
| 449 | |
| 450 | return clock; |
| 451 | } |
| 452 | |
| 453 | unsigned int cm_get_mmc_controller_clk_hz(void) |
| 454 | { |
| 455 | uint32_t reg, clock = 0; |
| 456 | |
| 457 | /* identify the source of MMC clock */ |
| 458 | reg = readl(&clock_manager_base->per_pll.src); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 459 | reg = (reg & CLKMGR_PERPLLGRP_SRC_SDMMC_MASK) >> |
| 460 | CLKMGR_PERPLLGRP_SRC_SDMMC_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 461 | |
| 462 | if (reg == CLKMGR_SDMMC_CLK_SRC_F2S) { |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 463 | clock = cm_get_f2s_per_ref_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 464 | } else if (reg == CLKMGR_SDMMC_CLK_SRC_MAIN) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 465 | clock = cm_get_main_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 466 | |
| 467 | /* get the SDMMC clock */ |
| 468 | reg = readl(&clock_manager_base->main_pll.mainnandsdmmcclk); |
| 469 | clock /= (reg + 1); |
| 470 | } else if (reg == CLKMGR_SDMMC_CLK_SRC_PER) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 471 | clock = cm_get_per_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 472 | |
| 473 | /* get the SDMMC clock */ |
| 474 | reg = readl(&clock_manager_base->per_pll.pernandsdmmcclk); |
| 475 | clock /= (reg + 1); |
| 476 | } |
| 477 | |
| 478 | /* further divide by 4 as we have fixed divider at wrapper */ |
| 479 | clock /= 4; |
| 480 | return clock; |
| 481 | } |
| 482 | |
| 483 | unsigned int cm_get_qspi_controller_clk_hz(void) |
| 484 | { |
| 485 | uint32_t reg, clock = 0; |
| 486 | |
| 487 | /* identify the source of QSPI clock */ |
| 488 | reg = readl(&clock_manager_base->per_pll.src); |
Marek Vasut | 44428ab | 2014-09-16 17:21:00 +0200 | [diff] [blame] | 489 | reg = (reg & CLKMGR_PERPLLGRP_SRC_QSPI_MASK) >> |
| 490 | CLKMGR_PERPLLGRP_SRC_QSPI_OFFSET; |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 491 | |
| 492 | if (reg == CLKMGR_QSPI_CLK_SRC_F2S) { |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 493 | clock = cm_get_f2s_per_ref_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 494 | } else if (reg == CLKMGR_QSPI_CLK_SRC_MAIN) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 495 | clock = cm_get_main_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 496 | |
| 497 | /* get the qspi clock */ |
| 498 | reg = readl(&clock_manager_base->main_pll.mainqspiclk); |
| 499 | clock /= (reg + 1); |
| 500 | } else if (reg == CLKMGR_QSPI_CLK_SRC_PER) { |
Marek Vasut | 5d8ad0c | 2014-09-13 08:27:16 +0200 | [diff] [blame] | 501 | clock = cm_get_per_vco_clk_hz(); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 502 | |
| 503 | /* get the qspi clock */ |
| 504 | reg = readl(&clock_manager_base->per_pll.perqspiclk); |
| 505 | clock /= (reg + 1); |
| 506 | } |
| 507 | |
| 508 | return clock; |
| 509 | } |
| 510 | |
Stefan Roese | d2bb937 | 2014-11-07 13:50:29 +0100 | [diff] [blame] | 511 | unsigned int cm_get_spi_controller_clk_hz(void) |
| 512 | { |
| 513 | uint32_t reg, clock = 0; |
| 514 | |
| 515 | clock = cm_get_per_vco_clk_hz(); |
| 516 | |
| 517 | /* get the clock prior L4 SP divider (periph_base_clk) */ |
| 518 | reg = readl(&clock_manager_base->per_pll.perbaseclk); |
| 519 | clock /= (reg + 1); |
| 520 | |
| 521 | return clock; |
| 522 | } |
| 523 | |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 524 | static void cm_print_clock_quick_summary(void) |
| 525 | { |
| 526 | printf("MPU %10ld kHz\n", cm_get_mpu_clk_hz() / 1000); |
| 527 | printf("DDR %10ld kHz\n", cm_get_sdram_clk_hz() / 1000); |
Marek Vasut | 93b4abd | 2015-07-25 08:44:27 +0200 | [diff] [blame] | 528 | printf("EOSC1 %8d kHz\n", cm_get_osc_clk_hz(1) / 1000); |
| 529 | printf("EOSC2 %8d kHz\n", cm_get_osc_clk_hz(2) / 1000); |
| 530 | printf("F2S_SDR_REF %8d kHz\n", cm_get_f2s_sdr_ref_clk_hz() / 1000); |
| 531 | printf("F2S_PER_REF %8d kHz\n", cm_get_f2s_per_ref_clk_hz() / 1000); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 532 | printf("MMC %8d kHz\n", cm_get_mmc_controller_clk_hz() / 1000); |
| 533 | printf("QSPI %8d kHz\n", cm_get_qspi_controller_clk_hz() / 1000); |
| 534 | printf("UART %8d kHz\n", cm_get_l4_sp_clk_hz() / 1000); |
Stefan Roese | d2bb937 | 2014-11-07 13:50:29 +0100 | [diff] [blame] | 535 | printf("SPI %8d kHz\n", cm_get_spi_controller_clk_hz() / 1000); |
Pavel Machek | a832ddb | 2014-09-08 14:08:45 +0200 | [diff] [blame] | 536 | } |
| 537 | |
| 538 | int set_cpu_clk_info(void) |
| 539 | { |
| 540 | /* Calculate the clock frequencies required for drivers */ |
| 541 | cm_get_l4_sp_clk_hz(); |
| 542 | cm_get_mmc_controller_clk_hz(); |
| 543 | |
| 544 | gd->bd->bi_arm_freq = cm_get_mpu_clk_hz() / 1000000; |
| 545 | gd->bd->bi_dsp_freq = 0; |
| 546 | gd->bd->bi_ddr_freq = cm_get_sdram_clk_hz() / 1000000; |
| 547 | |
| 548 | return 0; |
| 549 | } |
| 550 | |
| 551 | int do_showclocks(cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[]) |
| 552 | { |
| 553 | cm_print_clock_quick_summary(); |
| 554 | return 0; |
| 555 | } |
| 556 | |
| 557 | U_BOOT_CMD( |
| 558 | clocks, CONFIG_SYS_MAXARGS, 1, do_showclocks, |
| 559 | "display clocks", |
| 560 | "" |
| 561 | ); |