Tom Rini | 83d290c | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 2 | /* |
| 3 | * DDR Configuration for AM33xx devices. |
| 4 | * |
Wolfgang Denk | 1a45966 | 2013-07-08 09:37:19 +0200 | [diff] [blame] | 5 | * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/ |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
| 8 | #include <asm/arch/cpu.h> |
| 9 | #include <asm/arch/ddr_defs.h> |
Satyanarayana, Sandhya | 6995a28 | 2012-08-09 18:29:57 +0000 | [diff] [blame] | 10 | #include <asm/arch/sys_proto.h> |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 11 | #include <asm/io.h> |
Tom Rini | 7d5eb34 | 2012-05-29 09:02:15 -0700 | [diff] [blame] | 12 | #include <asm/emif.h> |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 13 | |
| 14 | /** |
| 15 | * Base address for EMIF instances |
| 16 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 17 | static struct emif_reg_struct *emif_reg[2] = { |
| 18 | (struct emif_reg_struct *)EMIF4_0_CFG_BASE, |
| 19 | (struct emif_reg_struct *)EMIF4_1_CFG_BASE}; |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 20 | |
| 21 | /** |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 22 | * Base addresses for DDR PHY cmd/data regs |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 23 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 24 | static struct ddr_cmd_regs *ddr_cmd_reg[2] = { |
| 25 | (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR, |
| 26 | (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2}; |
| 27 | |
| 28 | static struct ddr_data_regs *ddr_data_reg[2] = { |
| 29 | (struct ddr_data_regs *)DDR_PHY_DATA_ADDR, |
| 30 | (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2}; |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 31 | |
| 32 | /** |
| 33 | * Base address for ddr io control instances |
| 34 | */ |
| 35 | static struct ddr_cmdtctrl *ioctrl_reg = { |
| 36 | (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR}; |
| 37 | |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 38 | static inline u32 get_mr(int nr, u32 cs, u32 mr_addr) |
| 39 | { |
| 40 | u32 mr; |
| 41 | |
| 42 | mr_addr |= cs << EMIF_REG_CS_SHIFT; |
| 43 | writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg); |
| 44 | |
| 45 | mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data); |
| 46 | debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr); |
| 47 | if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) && |
| 48 | ((mr & 0x00ff0000) >> 16) == (mr & 0xff) && |
| 49 | ((mr & 0xff000000) >> 24) == (mr & 0xff)) |
| 50 | return mr & 0xff; |
| 51 | else |
| 52 | return mr; |
| 53 | } |
| 54 | |
| 55 | static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val) |
| 56 | { |
| 57 | mr_addr |= cs << EMIF_REG_CS_SHIFT; |
| 58 | writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg); |
| 59 | writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data); |
| 60 | } |
| 61 | |
| 62 | static void configure_mr(int nr, u32 cs) |
| 63 | { |
| 64 | u32 mr_addr; |
| 65 | |
| 66 | while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK) |
| 67 | ; |
| 68 | set_mr(nr, cs, LPDDR2_MR10, 0x56); |
| 69 | |
| 70 | set_mr(nr, cs, LPDDR2_MR1, 0x43); |
| 71 | set_mr(nr, cs, LPDDR2_MR2, 0x2); |
| 72 | |
| 73 | mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK; |
| 74 | set_mr(nr, cs, mr_addr, 0x2); |
| 75 | } |
| 76 | |
| 77 | /* |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 78 | * Configure EMIF4D5 registers and MR registers For details about these magic |
| 79 | * values please see the EMIF registers section of the TRM. |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 80 | */ |
| 81 | void config_sdram_emif4d5(const struct emif_regs *regs, int nr) |
| 82 | { |
Dave Gerlach | 4800be4 | 2014-02-18 07:31:59 -0500 | [diff] [blame] | 83 | writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl); |
| 84 | writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 85 | writel(regs->zq_config, &emif_reg[nr]->emif_zq_config); |
| 86 | |
| 87 | writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config); |
| 88 | writel(regs->emif_rd_wr_lvl_rmp_win, |
| 89 | &emif_reg[nr]->emif_rd_wr_lvl_rmp_win); |
| 90 | writel(regs->emif_rd_wr_lvl_rmp_ctl, |
| 91 | &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl); |
| 92 | writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl); |
| 93 | writel(regs->emif_rd_wr_exec_thresh, |
| 94 | &emif_reg[nr]->emif_rd_wr_exec_thresh); |
| 95 | |
Cooper Jr., Franklin | 8038b49 | 2014-06-27 13:31:15 -0500 | [diff] [blame] | 96 | /* |
| 97 | * for most SOCs these registers won't need to be changed so only |
| 98 | * write to these registers if someone explicitly has set the |
| 99 | * register's value. |
| 100 | */ |
| 101 | if(regs->emif_cos_config) { |
| 102 | writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map); |
| 103 | writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map); |
| 104 | writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map); |
| 105 | writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config); |
| 106 | } |
| 107 | |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 108 | /* |
| 109 | * Sequence to ensure that the PHY is in a known state prior to |
| 110 | * startting hardware leveling. Also acts as to latch some state from |
| 111 | * the EMIF into the PHY. |
| 112 | */ |
| 113 | writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc); |
| 114 | writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc); |
| 115 | writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc); |
| 116 | |
| 117 | clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl, |
| 118 | EMIF_REG_INITREF_DIS_MASK); |
| 119 | |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 120 | writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config); |
Dave Gerlach | f84880f | 2014-02-18 07:32:00 -0500 | [diff] [blame] | 121 | writel(regs->sdram_config, &cstat->secure_emif_sdram_config); |
Russ Dill | 3325b06 | 2016-07-21 04:28:31 -0700 | [diff] [blame] | 122 | |
| 123 | /* Wait 1ms because of L3 timeout error */ |
| 124 | udelay(1000); |
| 125 | |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 126 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl); |
| 127 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw); |
| 128 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 129 | /* Perform hardware leveling for DDR3 */ |
| 130 | if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) { |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 131 | writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) | |
| 132 | 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36); |
| 133 | writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) | |
| 134 | 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw); |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 135 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 136 | writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl); |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 137 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 138 | /* Enable read leveling */ |
| 139 | writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl); |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 140 | |
Brad Griffis | 84cf295 | 2019-04-29 09:59:29 +0530 | [diff] [blame^] | 141 | /* Wait 1ms because of L3 timeout error */ |
| 142 | udelay(1000); |
| 143 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 144 | /* |
| 145 | * Enable full read and write leveling. Wait for read and write |
| 146 | * leveling bit to clear RDWRLVLFULL_START bit 31 |
| 147 | */ |
| 148 | while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000) |
| 149 | != 0) |
| 150 | ; |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 151 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 152 | /* Check the timeout register to see if leveling is complete */ |
| 153 | if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0) |
| 154 | puts("DDR3 H/W leveling incomplete with errors\n"); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 155 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 156 | } else { |
| 157 | /* DDR2 */ |
Lokesh Vutla | b5e01ee | 2013-12-10 15:02:23 +0530 | [diff] [blame] | 158 | configure_mr(nr, 0); |
| 159 | configure_mr(nr, 1); |
| 160 | } |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 161 | } |
| 162 | |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 163 | /** |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 164 | * Configure SDRAM |
| 165 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 166 | void config_sdram(const struct emif_regs *regs, int nr) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 167 | { |
Tom Rini | 8627733 | 2017-05-16 14:46:35 -0400 | [diff] [blame] | 168 | #ifdef CONFIG_TI816X |
| 169 | writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config); |
| 170 | writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1); |
| 171 | writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw); |
| 172 | writel(0x0000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* initially a large refresh period */ |
| 173 | writel(0x1000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* trigger initialization */ |
| 174 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl); |
| 175 | #else |
Tom Rini | 1c382ea | 2013-02-26 16:35:33 -0500 | [diff] [blame] | 176 | if (regs->zq_config) { |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 177 | writel(regs->zq_config, &emif_reg[nr]->emif_zq_config); |
Satyanarayana, Sandhya | 6995a28 | 2012-08-09 18:29:57 +0000 | [diff] [blame] | 178 | writel(regs->sdram_config, &cstat->secure_emif_sdram_config); |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 179 | writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config); |
Egli, Samuel | 69b918b | 2015-12-02 15:27:56 +0100 | [diff] [blame] | 180 | |
| 181 | /* Trigger initialization */ |
| 182 | writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl); |
| 183 | /* Wait 1ms because of L3 timeout error */ |
| 184 | udelay(1000); |
| 185 | |
| 186 | /* Write proper sdram_ref_cref_ctrl value */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 187 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl); |
| 188 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw); |
Satyanarayana, Sandhya | 6995a28 | 2012-08-09 18:29:57 +0000 | [diff] [blame] | 189 | } |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 190 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl); |
| 191 | writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw); |
Tom Rini | e049b77 | 2015-04-02 16:01:33 -0400 | [diff] [blame] | 192 | writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config); |
Jyri Sarha | 8c17cbd | 2016-12-09 12:29:13 +0200 | [diff] [blame] | 193 | |
| 194 | /* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */ |
| 195 | if (regs->ocp_config) |
| 196 | writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config); |
Tom Rini | 8627733 | 2017-05-16 14:46:35 -0400 | [diff] [blame] | 197 | #endif |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 198 | } |
| 199 | |
| 200 | /** |
| 201 | * Set SDRAM timings |
| 202 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 203 | void set_sdram_timings(const struct emif_regs *regs, int nr) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 204 | { |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 205 | writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1); |
| 206 | writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw); |
| 207 | writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2); |
| 208 | writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw); |
| 209 | writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3); |
| 210 | writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw); |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 211 | } |
| 212 | |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 213 | /* |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 214 | * Configure EXT PHY registers for software leveling |
| 215 | */ |
| 216 | static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr) |
| 217 | { |
| 218 | u32 *ext_phy_ctrl_base = 0; |
| 219 | u32 *emif_ext_phy_ctrl_base = 0; |
| 220 | __maybe_unused const u32 *ext_phy_ctrl_const_regs; |
| 221 | u32 i = 0; |
| 222 | __maybe_unused u32 size; |
| 223 | |
| 224 | ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1); |
| 225 | emif_ext_phy_ctrl_base = |
| 226 | (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1); |
| 227 | |
| 228 | /* Configure external phy control timing registers */ |
| 229 | for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) { |
| 230 | writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++); |
| 231 | /* Update shadow registers */ |
| 232 | writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++); |
| 233 | } |
| 234 | |
| 235 | #ifdef CONFIG_AM43XX |
| 236 | /* |
| 237 | * External phy 6-24 registers do not change with ddr frequency. |
| 238 | * These only need to be set on DDR2 on AM43xx. |
| 239 | */ |
| 240 | emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size); |
| 241 | |
| 242 | if (!size) |
| 243 | return; |
| 244 | |
| 245 | for (i = 0; i < size; i++) { |
| 246 | writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++); |
| 247 | /* Update shadow registers */ |
| 248 | writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++); |
| 249 | } |
| 250 | #endif |
| 251 | } |
| 252 | |
| 253 | /* |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 254 | * Configure EXT PHY registers for hardware leveling |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 255 | */ |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 256 | static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr) |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 257 | { |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 258 | /* |
| 259 | * Enable hardware leveling on the EMIF. For details about these |
| 260 | * magic values please see the EMIF registers section of the TRM. |
| 261 | */ |
Brad Griffis | 8219579 | 2019-04-29 09:59:28 +0530 | [diff] [blame] | 262 | if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) { |
| 263 | /* PHY_INVERT_CLKOUT = 1 */ |
| 264 | writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1); |
| 265 | writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw); |
| 266 | } else { |
| 267 | /* PHY_INVERT_CLKOUT = 0 */ |
| 268 | writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1); |
| 269 | writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw); |
| 270 | } |
| 271 | |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 272 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22); |
| 273 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw); |
| 274 | writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23); |
| 275 | writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw); |
| 276 | writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24); |
| 277 | writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw); |
| 278 | writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25); |
| 279 | writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw); |
| 280 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26); |
| 281 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw); |
| 282 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27); |
| 283 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw); |
| 284 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28); |
| 285 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw); |
| 286 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29); |
| 287 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw); |
| 288 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30); |
| 289 | writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw); |
| 290 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31); |
| 291 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw); |
| 292 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32); |
| 293 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw); |
| 294 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33); |
| 295 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw); |
| 296 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34); |
| 297 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw); |
| 298 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35); |
| 299 | writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw); |
| 300 | writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36); |
| 301 | writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 302 | |
| 303 | /* |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 304 | * Sequence to ensure that the PHY is again in a known state after |
| 305 | * hardware leveling. |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 306 | */ |
James Doublesin | fc46bae | 2014-12-22 16:26:11 -0600 | [diff] [blame] | 307 | writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc); |
| 308 | writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc); |
| 309 | writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 310 | } |
| 311 | |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 312 | /** |
| 313 | * Configure DDR PHY |
| 314 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 315 | void config_ddr_phy(const struct emif_regs *regs, int nr) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 316 | { |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 317 | /* |
Russ Dill | 335b4e5 | 2016-07-21 04:28:32 -0700 | [diff] [blame] | 318 | * Disable initialization and refreshes for now until we finish |
| 319 | * programming EMIF regs and set time between rising edge of |
| 320 | * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec. |
| 321 | * We currently hardcode a value based on a max expected frequency |
| 322 | * of 400MHz. |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 323 | */ |
Russ Dill | 335b4e5 | 2016-07-21 04:28:32 -0700 | [diff] [blame] | 324 | writel(EMIF_REG_INITREF_DIS_MASK | 0x3100, |
| 325 | &emif_reg[nr]->emif_sdram_ref_ctrl); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 326 | |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 327 | writel(regs->emif_ddr_phy_ctlr_1, |
| 328 | &emif_reg[nr]->emif_ddr_phy_ctrl_1); |
| 329 | writel(regs->emif_ddr_phy_ctlr_1, |
| 330 | &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw); |
Lokesh Vutla | d3daba1 | 2013-12-10 15:02:22 +0530 | [diff] [blame] | 331 | |
Tom Rini | 7c352cd | 2015-06-05 15:51:11 +0530 | [diff] [blame] | 332 | if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) { |
| 333 | if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) |
| 334 | ext_phy_settings_hwlvl(regs, nr); |
| 335 | else |
| 336 | ext_phy_settings_swlvl(regs, nr); |
| 337 | } |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 338 | } |
| 339 | |
| 340 | /** |
| 341 | * Configure DDR CMD control registers |
| 342 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 343 | void config_cmd_ctrl(const struct cmd_control *cmd, int nr) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 344 | { |
Lokesh Vutla | 965de8b | 2013-12-10 15:02:21 +0530 | [diff] [blame] | 345 | if (!cmd) |
| 346 | return; |
| 347 | |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 348 | writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio); |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 349 | writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout); |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 350 | |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 351 | writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio); |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 352 | writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout); |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 353 | |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 354 | writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio); |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 355 | writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout); |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 356 | } |
| 357 | |
| 358 | /** |
| 359 | * Configure DDR DATA registers |
| 360 | */ |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 361 | void config_ddr_data(const struct ddr_data *data, int nr) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 362 | { |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 363 | int i; |
| 364 | |
Lokesh Vutla | 965de8b | 2013-12-10 15:02:21 +0530 | [diff] [blame] | 365 | if (!data) |
| 366 | return; |
| 367 | |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 368 | for (i = 0; i < DDR_DATA_REGS_NR; i++) { |
| 369 | writel(data->datardsratio0, |
| 370 | &(ddr_data_reg[nr]+i)->dt0rdsratio0); |
| 371 | writel(data->datawdsratio0, |
| 372 | &(ddr_data_reg[nr]+i)->dt0wdsratio0); |
| 373 | writel(data->datawiratio0, |
| 374 | &(ddr_data_reg[nr]+i)->dt0wiratio0); |
| 375 | writel(data->datagiratio0, |
| 376 | &(ddr_data_reg[nr]+i)->dt0giratio0); |
| 377 | writel(data->datafwsratio0, |
| 378 | &(ddr_data_reg[nr]+i)->dt0fwsratio0); |
| 379 | writel(data->datawrsratio0, |
| 380 | &(ddr_data_reg[nr]+i)->dt0wrsratio0); |
Matt Porter | 3ba65f9 | 2013-03-15 10:07:03 +0000 | [diff] [blame] | 381 | } |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 382 | } |
| 383 | |
Lokesh Vutla | 965de8b | 2013-12-10 15:02:21 +0530 | [diff] [blame] | 384 | void config_io_ctrl(const struct ctrl_ioregs *ioregs) |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 385 | { |
Lokesh Vutla | 965de8b | 2013-12-10 15:02:21 +0530 | [diff] [blame] | 386 | if (!ioregs) |
| 387 | return; |
| 388 | |
| 389 | writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl); |
| 390 | writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl); |
| 391 | writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl); |
| 392 | writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl); |
| 393 | writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl); |
| 394 | #ifdef CONFIG_AM43XX |
| 395 | writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl); |
| 396 | writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl); |
| 397 | writel(ioregs->emif_sdram_config_ext, |
| 398 | &ioctrl_reg->emif_sdram_config_ext); |
| 399 | #endif |
Chandan Nath | 62d7fe7c | 2011-10-14 02:58:24 +0000 | [diff] [blame] | 400 | } |