blob: 3fd1d086ff142473fce73dd46952dc25a42dad40 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Chandan Nath62d7fe7c2011-10-14 02:58:24 +00002/*
3 * DDR Configuration for AM33xx devices.
4 *
Wolfgang Denk1a459662013-07-08 09:37:19 +02005 * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
Chandan Nath62d7fe7c2011-10-14 02:58:24 +00006 */
7
8#include <asm/arch/cpu.h>
9#include <asm/arch/ddr_defs.h>
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +000010#include <asm/arch/sys_proto.h>
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000011#include <asm/io.h>
Tom Rini7d5eb342012-05-29 09:02:15 -070012#include <asm/emif.h>
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000013
14/**
15 * Base address for EMIF instances
16 */
Matt Porter3ba65f92013-03-15 10:07:03 +000017static struct emif_reg_struct *emif_reg[2] = {
18 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
19 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000020
21/**
Matt Porter3ba65f92013-03-15 10:07:03 +000022 * Base addresses for DDR PHY cmd/data regs
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000023 */
Matt Porter3ba65f92013-03-15 10:07:03 +000024static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
25 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
26 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
27
28static struct ddr_data_regs *ddr_data_reg[2] = {
29 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
30 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
Chandan Nath62d7fe7c2011-10-14 02:58:24 +000031
32/**
33 * Base address for ddr io control instances
34 */
35static struct ddr_cmdtctrl *ioctrl_reg = {
36 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
37
Lokesh Vutlad3daba12013-12-10 15:02:22 +053038static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
39{
40 u32 mr;
41
42 mr_addr |= cs << EMIF_REG_CS_SHIFT;
43 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
44
45 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
46 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
47 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
48 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
49 ((mr & 0xff000000) >> 24) == (mr & 0xff))
50 return mr & 0xff;
51 else
52 return mr;
53}
54
55static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
56{
57 mr_addr |= cs << EMIF_REG_CS_SHIFT;
58 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
59 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
60}
61
62static void configure_mr(int nr, u32 cs)
63{
64 u32 mr_addr;
65
66 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
67 ;
68 set_mr(nr, cs, LPDDR2_MR10, 0x56);
69
70 set_mr(nr, cs, LPDDR2_MR1, 0x43);
71 set_mr(nr, cs, LPDDR2_MR2, 0x2);
72
73 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
74 set_mr(nr, cs, mr_addr, 0x2);
75}
76
77/*
James Doublesinfc46bae2014-12-22 16:26:11 -060078 * Configure EMIF4D5 registers and MR registers For details about these magic
79 * values please see the EMIF registers section of the TRM.
Lokesh Vutlad3daba12013-12-10 15:02:22 +053080 */
81void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
82{
Brad Griffis7b5774e2019-04-29 09:59:31 +053083#ifdef CONFIG_AM43XX
84 struct prm_device_inst *prm_device =
85 (struct prm_device_inst *)PRM_DEVICE_INST;
86#endif
87
Dave Gerlach4800be42014-02-18 07:31:59 -050088 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
89 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +053090 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
91
92 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
93 writel(regs->emif_rd_wr_lvl_rmp_win,
94 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
95 writel(regs->emif_rd_wr_lvl_rmp_ctl,
96 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
97 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
98 writel(regs->emif_rd_wr_exec_thresh,
99 &emif_reg[nr]->emif_rd_wr_exec_thresh);
100
Cooper Jr., Franklin8038b492014-06-27 13:31:15 -0500101 /*
102 * for most SOCs these registers won't need to be changed so only
103 * write to these registers if someone explicitly has set the
104 * register's value.
105 */
106 if(regs->emif_cos_config) {
107 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
108 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
109 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
110 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
111 }
112
James Doublesinfc46bae2014-12-22 16:26:11 -0600113 /*
114 * Sequence to ensure that the PHY is in a known state prior to
115 * startting hardware leveling. Also acts as to latch some state from
116 * the EMIF into the PHY.
117 */
118 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
119 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
120 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
121
122 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
123 EMIF_REG_INITREF_DIS_MASK);
124
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530125 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Dave Gerlachf84880f2014-02-18 07:32:00 -0500126 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Russ Dill3325b062016-07-21 04:28:31 -0700127
128 /* Wait 1ms because of L3 timeout error */
129 udelay(1000);
130
James Doublesinfc46bae2014-12-22 16:26:11 -0600131 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
132 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
133
Brad Griffis7b5774e2019-04-29 09:59:31 +0530134#ifdef CONFIG_AM43XX
135 /*
136 * Disable EMIF_DEVOFF
137 * -> Cold Boot: This is just rewriting the default register value.
138 * -> RTC Resume: Must disable DEVOFF before leveling.
139 */
140 writel(0, &prm_device->emif_ctrl);
141#endif
142
Tom Rini7c352cd2015-06-05 15:51:11 +0530143 /* Perform hardware leveling for DDR3 */
144 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
Tom Rini7c352cd2015-06-05 15:51:11 +0530145 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
146 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
147 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
148 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
James Doublesinfc46bae2014-12-22 16:26:11 -0600149
Tom Rini7c352cd2015-06-05 15:51:11 +0530150 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
James Doublesinfc46bae2014-12-22 16:26:11 -0600151
Tom Rini7c352cd2015-06-05 15:51:11 +0530152 /* Enable read leveling */
153 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
James Doublesinfc46bae2014-12-22 16:26:11 -0600154
Brad Griffis84cf2952019-04-29 09:59:29 +0530155 /* Wait 1ms because of L3 timeout error */
156 udelay(1000);
157
Tom Rini7c352cd2015-06-05 15:51:11 +0530158 /*
159 * Enable full read and write leveling. Wait for read and write
160 * leveling bit to clear RDWRLVLFULL_START bit 31
161 */
162 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
163 != 0)
164 ;
James Doublesinfc46bae2014-12-22 16:26:11 -0600165
Tom Rini7c352cd2015-06-05 15:51:11 +0530166 /* Check the timeout register to see if leveling is complete */
167 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
168 puts("DDR3 H/W leveling incomplete with errors\n");
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530169
Tom Rini7c352cd2015-06-05 15:51:11 +0530170 } else {
171 /* DDR2 */
Lokesh Vutlab5e01ee2013-12-10 15:02:23 +0530172 configure_mr(nr, 0);
173 configure_mr(nr, 1);
174 }
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530175}
176
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000177/**
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000178 * Configure SDRAM
179 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000180void config_sdram(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000181{
Tom Rini86277332017-05-16 14:46:35 -0400182#ifdef CONFIG_TI816X
183 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
184 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1);
185 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
186 writel(0x0000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* initially a large refresh period */
187 writel(0x1000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* trigger initialization */
188 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
189#else
Tom Rini1c382ea2013-02-26 16:35:33 -0500190 if (regs->zq_config) {
Matt Porter3ba65f92013-03-15 10:07:03 +0000191 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +0000192 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Matt Porter3ba65f92013-03-15 10:07:03 +0000193 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Egli, Samuel69b918b2015-12-02 15:27:56 +0100194
195 /* Trigger initialization */
196 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
197 /* Wait 1ms because of L3 timeout error */
198 udelay(1000);
199
200 /* Write proper sdram_ref_cref_ctrl value */
Matt Porter3ba65f92013-03-15 10:07:03 +0000201 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
202 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Satyanarayana, Sandhya6995a282012-08-09 18:29:57 +0000203 }
Matt Porter3ba65f92013-03-15 10:07:03 +0000204 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
205 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Tom Rinie049b772015-04-02 16:01:33 -0400206 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Jyri Sarha8c17cbd2016-12-09 12:29:13 +0200207
208 /* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
209 if (regs->ocp_config)
210 writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
Tom Rini86277332017-05-16 14:46:35 -0400211#endif
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000212}
213
214/**
215 * Set SDRAM timings
216 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000217void set_sdram_timings(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000218{
Matt Porter3ba65f92013-03-15 10:07:03 +0000219 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
220 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
221 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
222 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
223 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
224 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000225}
226
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530227/*
Tom Rini7c352cd2015-06-05 15:51:11 +0530228 * Configure EXT PHY registers for software leveling
229 */
230static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
231{
232 u32 *ext_phy_ctrl_base = 0;
233 u32 *emif_ext_phy_ctrl_base = 0;
234 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
235 u32 i = 0;
236 __maybe_unused u32 size;
237
238 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
239 emif_ext_phy_ctrl_base =
240 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
241
242 /* Configure external phy control timing registers */
243 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
244 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
245 /* Update shadow registers */
246 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
247 }
248
249#ifdef CONFIG_AM43XX
250 /*
251 * External phy 6-24 registers do not change with ddr frequency.
252 * These only need to be set on DDR2 on AM43xx.
253 */
254 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
255
256 if (!size)
257 return;
258
259 for (i = 0; i < size; i++) {
260 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
261 /* Update shadow registers */
262 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
263 }
264#endif
265}
266
267/*
James Doublesinfc46bae2014-12-22 16:26:11 -0600268 * Configure EXT PHY registers for hardware leveling
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530269 */
Tom Rini7c352cd2015-06-05 15:51:11 +0530270static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530271{
James Doublesinfc46bae2014-12-22 16:26:11 -0600272 /*
273 * Enable hardware leveling on the EMIF. For details about these
274 * magic values please see the EMIF registers section of the TRM.
275 */
Brad Griffis82195792019-04-29 09:59:28 +0530276 if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) {
277 /* PHY_INVERT_CLKOUT = 1 */
278 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
279 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
280 } else {
281 /* PHY_INVERT_CLKOUT = 0 */
282 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
283 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
284 }
285
James Doublesinfc46bae2014-12-22 16:26:11 -0600286 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
287 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
288 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
289 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
290 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
291 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
292 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
293 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
294 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
295 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
296 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
297 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
298 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
299 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
300 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
301 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
302 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
303 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
304 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
305 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
306 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
307 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
308 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
309 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
310 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
311 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
312 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
313 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
Brad Griffis1dbd9a72019-04-29 09:59:32 +0530314 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
315 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530316
317 /*
James Doublesinfc46bae2014-12-22 16:26:11 -0600318 * Sequence to ensure that the PHY is again in a known state after
319 * hardware leveling.
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530320 */
James Doublesinfc46bae2014-12-22 16:26:11 -0600321 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
322 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
323 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530324}
325
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000326/**
327 * Configure DDR PHY
328 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000329void config_ddr_phy(const struct emif_regs *regs, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000330{
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530331 /*
Russ Dill335b4e52016-07-21 04:28:32 -0700332 * Disable initialization and refreshes for now until we finish
333 * programming EMIF regs and set time between rising edge of
334 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
335 * We currently hardcode a value based on a max expected frequency
336 * of 400MHz.
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530337 */
Russ Dill335b4e52016-07-21 04:28:32 -0700338 writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
339 &emif_reg[nr]->emif_sdram_ref_ctrl);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530340
Matt Porter3ba65f92013-03-15 10:07:03 +0000341 writel(regs->emif_ddr_phy_ctlr_1,
342 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
343 writel(regs->emif_ddr_phy_ctlr_1,
344 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
Lokesh Vutlad3daba12013-12-10 15:02:22 +0530345
Tom Rini7c352cd2015-06-05 15:51:11 +0530346 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
347 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
348 ext_phy_settings_hwlvl(regs, nr);
349 else
350 ext_phy_settings_swlvl(regs, nr);
351 }
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000352}
353
354/**
355 * Configure DDR CMD control registers
356 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000357void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000358{
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530359 if (!cmd)
360 return;
361
Matt Porter3ba65f92013-03-15 10:07:03 +0000362 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000363 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000364
Matt Porter3ba65f92013-03-15 10:07:03 +0000365 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000366 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000367
Matt Porter3ba65f92013-03-15 10:07:03 +0000368 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
Matt Porter3ba65f92013-03-15 10:07:03 +0000369 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000370}
371
372/**
373 * Configure DDR DATA registers
374 */
Matt Porter3ba65f92013-03-15 10:07:03 +0000375void config_ddr_data(const struct ddr_data *data, int nr)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000376{
Matt Porter3ba65f92013-03-15 10:07:03 +0000377 int i;
378
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530379 if (!data)
380 return;
381
Matt Porter3ba65f92013-03-15 10:07:03 +0000382 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
383 writel(data->datardsratio0,
384 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
385 writel(data->datawdsratio0,
386 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
387 writel(data->datawiratio0,
388 &(ddr_data_reg[nr]+i)->dt0wiratio0);
389 writel(data->datagiratio0,
390 &(ddr_data_reg[nr]+i)->dt0giratio0);
391 writel(data->datafwsratio0,
392 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
393 writel(data->datawrsratio0,
394 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
Matt Porter3ba65f92013-03-15 10:07:03 +0000395 }
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000396}
397
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530398void config_io_ctrl(const struct ctrl_ioregs *ioregs)
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000399{
Lokesh Vutla965de8b2013-12-10 15:02:21 +0530400 if (!ioregs)
401 return;
402
403 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
404 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
405 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
406 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
407 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
408#ifdef CONFIG_AM43XX
409 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
410 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
411 writel(ioregs->emif_sdram_config_ext,
412 &ioctrl_reg->emif_sdram_config_ext);
413#endif
Chandan Nath62d7fe7c2011-10-14 02:58:24 +0000414}