blob: 5d947a68c3deac731fd019b4599977e3b133fe01 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Chandan Nath62d7fe7c2011-10-14 02:58:24 +00002/*
3 * DDR Configuration for AM33xx devices.
4 *
Wolfgang Denk1a459662013-07-08 09:37:19 +02005 * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
Chandan Nath62d7fe7c2011-10-14 02:58:24 +00006 */
7
8#include <asm/arch/cpu.h>
9#include <asm/arch/ddr_defs.h>
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +000010#include <asm/arch/sys_proto.h>
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000011#include <asm/io.h>
Tom Rini7d5eb342012-05-29 09:02:15 -070012#include <asm/emif.h>
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000013
14/**
15 * Base address for EMIF instances
16 */
Matt Porter3ba65f92013-03-15 10:07:03 +000017static struct emif_reg_struct *emif_reg[2] = {
18 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
19 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000020
21/**
Matt Porter3ba65f92013-03-15 10:07:03 +000022 * Base addresses for DDR PHY cmd/data regs
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000023 */
Matt Porter3ba65f92013-03-15 10:07:03 +000024static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
25 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
26 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
27
28static struct ddr_data_regs *ddr_data_reg[2] = {
29 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
30 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000031
32/**
33 * Base address for ddr io control instances
34 */
35static struct ddr_cmdtctrl *ioctrl_reg = {
36 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
37
Lokesh Vutlad3daba12013-12-10 15:02:22 +053038static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
39{
40 u32 mr;
41
42 mr_addr |= cs << EMIF_REG_CS_SHIFT;
43 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
44
45 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
46 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
47 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
48 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
49 ((mr & 0xff000000) >> 24) == (mr & 0xff))
50 return mr & 0xff;
51 else
52 return mr;
53}
54
55static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
56{
57 mr_addr |= cs << EMIF_REG_CS_SHIFT;
58 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
59 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
60}
61
62static void configure_mr(int nr, u32 cs)
63{
64 u32 mr_addr;
65
66 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
67 ;
68 set_mr(nr, cs, LPDDR2_MR10, 0x56);
69
70 set_mr(nr, cs, LPDDR2_MR1, 0x43);
71 set_mr(nr, cs, LPDDR2_MR2, 0x2);
72
73 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
74 set_mr(nr, cs, mr_addr, 0x2);
75}
76
77/*
James Doublesinfc46bae2014-12-22 16:26:11 -060078 * Configure EMIF4D5 registers and MR registers For details about these magic
79 * values please see the EMIF registers section of the TRM.
Lokesh Vutlad3daba12013-12-10 15:02:22 +053080 */
81void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
82{
Dave Gerlach4800be42014-02-18 07:31:59 -050083 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
84 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +053085 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
86
87 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
88 writel(regs->emif_rd_wr_lvl_rmp_win,
89 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
90 writel(regs->emif_rd_wr_lvl_rmp_ctl,
91 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
92 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
93 writel(regs->emif_rd_wr_exec_thresh,
94 &emif_reg[nr]->emif_rd_wr_exec_thresh);
95
Cooper Jr., Franklin8038b492014-06-27 13:31:15 -050096 /*
97 * for most SOCs these registers won't need to be changed so only
98 * write to these registers if someone explicitly has set the
99 * register's value.
100 */
101 if(regs->emif_cos_config) {
102 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
103 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
104 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
105 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
106 }
107
James Doublesinfc46bae2014-12-22 16:26:11 -0600108 /*
109 * Sequence to ensure that the PHY is in a known state prior to
110 * startting hardware leveling. Also acts as to latch some state from
111 * the EMIF into the PHY.
112 */
113 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
114 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
115 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
116
117 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
118 EMIF_REG_INITREF_DIS_MASK);
119
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530120 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Dave Gerlachf84880f2014-02-18 07:32:00 -0500121 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Russ Dill3325b062016-07-21 04:28:31 -0700122
123 /* Wait 1ms because of L3 timeout error */
124 udelay(1000);
125
James Doublesinfc46bae2014-12-22 16:26:11 -0600126 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
127 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
128
Tom Rini7c352cd2015-06-05 15:51:11 +0530129 /* Perform hardware leveling for DDR3 */
130 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
Tom Rini7c352cd2015-06-05 15:51:11 +0530131 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
132 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
133 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
134 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
James Doublesinfc46bae2014-12-22 16:26:11 -0600135
Tom Rini7c352cd2015-06-05 15:51:11 +0530136 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
James Doublesinfc46bae2014-12-22 16:26:11 -0600137
Tom Rini7c352cd2015-06-05 15:51:11 +0530138 /* Enable read leveling */
139 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
James Doublesinfc46bae2014-12-22 16:26:11 -0600140
Brad Griffis84cf2952019-04-29 09:59:29 +0530141 /* Wait 1ms because of L3 timeout error */
142 udelay(1000);
143
Tom Rini7c352cd2015-06-05 15:51:11 +0530144 /*
145 * Enable full read and write leveling. Wait for read and write
146 * leveling bit to clear RDWRLVLFULL_START bit 31
147 */
148 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
149 != 0)
150 ;
James Doublesinfc46bae2014-12-22 16:26:11 -0600151
Tom Rini7c352cd2015-06-05 15:51:11 +0530152 /* Check the timeout register to see if leveling is complete */
153 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
154 puts("DDR3 H/W leveling incomplete with errors\n");
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530155
Tom Rini7c352cd2015-06-05 15:51:11 +0530156 } else {
157 /* DDR2 */
Lokesh Vutlab5e01ee2013-12-10 15:02:23 +0530158 configure_mr(nr, 0);
159 configure_mr(nr, 1);
160 }
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530161}
162
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000163/**
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000164 * Configure SDRAM
165 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000166void config_sdram(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000167{
Tom Rini86277332017-05-16 14:46:35 -0400168#ifdef CONFIG_TI816X
169 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
170 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1);
171 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
172 writel(0x0000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* initially a large refresh period */
173 writel(0x1000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* trigger initialization */
174 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
175#else
Tom Rini1c382ea2013-02-26 16:35:33 -0500176 if (regs->zq_config) {
Matt Porter3ba65f92013-03-15 10:07:03 +0000177 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +0000178 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Matt Porter3ba65f92013-03-15 10:07:03 +0000179 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Egli, Samuel69b918b2015-12-02 15:27:56 +0100180
181 /* Trigger initialization */
182 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
183 /* Wait 1ms because of L3 timeout error */
184 udelay(1000);
185
186 /* Write proper sdram_ref_cref_ctrl value */
Matt Porter3ba65f92013-03-15 10:07:03 +0000187 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
188 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +0000189 }
Matt Porter3ba65f92013-03-15 10:07:03 +0000190 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
191 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Tom Rinie049b772015-04-02 16:01:33 -0400192 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Jyri Sarha8c17cbd2016-12-09 12:29:13 +0200193
194 /* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
195 if (regs->ocp_config)
196 writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
Tom Rini86277332017-05-16 14:46:35 -0400197#endif
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000198}
199
200/**
201 * Set SDRAM timings
202 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000203void set_sdram_timings(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000204{
Matt Porter3ba65f92013-03-15 10:07:03 +0000205 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
206 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
207 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
208 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
209 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
210 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000211}
212
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530213/*
Tom Rini7c352cd2015-06-05 15:51:11 +0530214 * Configure EXT PHY registers for software leveling
215 */
216static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
217{
218 u32 *ext_phy_ctrl_base = 0;
219 u32 *emif_ext_phy_ctrl_base = 0;
220 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
221 u32 i = 0;
222 __maybe_unused u32 size;
223
224 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
225 emif_ext_phy_ctrl_base =
226 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
227
228 /* Configure external phy control timing registers */
229 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
230 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
231 /* Update shadow registers */
232 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
233 }
234
235#ifdef CONFIG_AM43XX
236 /*
237 * External phy 6-24 registers do not change with ddr frequency.
238 * These only need to be set on DDR2 on AM43xx.
239 */
240 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
241
242 if (!size)
243 return;
244
245 for (i = 0; i < size; i++) {
246 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
247 /* Update shadow registers */
248 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
249 }
250#endif
251}
252
253/*
James Doublesinfc46bae2014-12-22 16:26:11 -0600254 * Configure EXT PHY registers for hardware leveling
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530255 */
Tom Rini7c352cd2015-06-05 15:51:11 +0530256static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530257{
James Doublesinfc46bae2014-12-22 16:26:11 -0600258 /*
259 * Enable hardware leveling on the EMIF. For details about these
260 * magic values please see the EMIF registers section of the TRM.
261 */
Brad Griffis82195792019-04-29 09:59:28 +0530262 if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) {
263 /* PHY_INVERT_CLKOUT = 1 */
264 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
265 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
266 } else {
267 /* PHY_INVERT_CLKOUT = 0 */
268 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
269 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
270 }
271
James Doublesinfc46bae2014-12-22 16:26:11 -0600272 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
273 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
274 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
275 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
276 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
277 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
278 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
279 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
280 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
281 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
282 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
283 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
284 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
285 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
286 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
287 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
288 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
289 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
290 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
291 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
292 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
293 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
294 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
295 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
296 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
297 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
298 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
299 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
300 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
301 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530302
303 /*
James Doublesinfc46bae2014-12-22 16:26:11 -0600304 * Sequence to ensure that the PHY is again in a known state after
305 * hardware leveling.
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530306 */
James Doublesinfc46bae2014-12-22 16:26:11 -0600307 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
308 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
309 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530310}
311
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000312/**
313 * Configure DDR PHY
314 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000315void config_ddr_phy(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000316{
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530317 /*
Russ Dill335b4e52016-07-21 04:28:32 -0700318 * Disable initialization and refreshes for now until we finish
319 * programming EMIF regs and set time between rising edge of
320 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
321 * We currently hardcode a value based on a max expected frequency
322 * of 400MHz.
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530323 */
Russ Dill335b4e52016-07-21 04:28:32 -0700324 writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
325 &emif_reg[nr]->emif_sdram_ref_ctrl);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530326
Matt Porter3ba65f92013-03-15 10:07:03 +0000327 writel(regs->emif_ddr_phy_ctlr_1,
328 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
329 writel(regs->emif_ddr_phy_ctlr_1,
330 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530331
Tom Rini7c352cd2015-06-05 15:51:11 +0530332 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
333 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
334 ext_phy_settings_hwlvl(regs, nr);
335 else
336 ext_phy_settings_swlvl(regs, nr);
337 }
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000338}
339
340/**
341 * Configure DDR CMD control registers
342 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000343void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000344{
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530345 if (!cmd)
346 return;
347
Matt Porter3ba65f92013-03-15 10:07:03 +0000348 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000349 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000350
Matt Porter3ba65f92013-03-15 10:07:03 +0000351 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000352 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000353
Matt Porter3ba65f92013-03-15 10:07:03 +0000354 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000355 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000356}
357
358/**
359 * Configure DDR DATA registers
360 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000361void config_ddr_data(const struct ddr_data *data, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000362{
Matt Porter3ba65f92013-03-15 10:07:03 +0000363 int i;
364
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530365 if (!data)
366 return;
367
Matt Porter3ba65f92013-03-15 10:07:03 +0000368 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
369 writel(data->datardsratio0,
370 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
371 writel(data->datawdsratio0,
372 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
373 writel(data->datawiratio0,
374 &(ddr_data_reg[nr]+i)->dt0wiratio0);
375 writel(data->datagiratio0,
376 &(ddr_data_reg[nr]+i)->dt0giratio0);
377 writel(data->datafwsratio0,
378 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
379 writel(data->datawrsratio0,
380 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
Matt Porter3ba65f92013-03-15 10:07:03 +0000381 }
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000382}
383
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530384void config_io_ctrl(const struct ctrl_ioregs *ioregs)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000385{
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530386 if (!ioregs)
387 return;
388
389 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
390 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
391 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
392 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
393 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
394#ifdef CONFIG_AM43XX
395 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
396 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
397 writel(ioregs->emif_sdram_config_ext,
398 &ioctrl_reg->emif_sdram_config_ext);
399#endif
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000400}