blob: 9e1376a940fc639581dbce0099ef8f977bc1f86e [file] [log] [blame]
Jagan Tekie869b342022-12-14 23:20:53 +05301// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4 * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd.
5 */
6
7#include <common.h>
8#include <debug_uart.h>
9#include <dm.h>
10#include <ram.h>
11#include <syscon.h>
12#include <asm/io.h>
13#include <asm/arch-rockchip/clock.h>
14#include <asm/arch-rockchip/hardware.h>
15#include <asm/arch-rockchip/cru_rv1126.h>
16#include <asm/arch-rockchip/grf_rv1126.h>
17#include <asm/arch-rockchip/sdram_common.h>
18#include <asm/arch-rockchip/sdram_rv1126.h>
19#include <linux/delay.h>
20
21/* define training flag */
22#define CA_TRAINING (0x1 << 0)
23#define READ_GATE_TRAINING (0x1 << 1)
24#define WRITE_LEVELING (0x1 << 2)
25#define WRITE_TRAINING (0x1 << 3)
26#define READ_TRAINING (0x1 << 4)
27#define FULL_TRAINING (0xff)
28
29#define SKEW_RX_SIGNAL (0)
30#define SKEW_TX_SIGNAL (1)
31#define SKEW_CA_SIGNAL (2)
32
33#define DESKEW_MDF_ABS_VAL (0)
34#define DESKEW_MDF_DIFF_VAL (1)
35
36struct dram_info {
37#if defined(CONFIG_TPL_BUILD) || \
38 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
39 void __iomem *pctl;
40 void __iomem *phy;
41 struct rv1126_cru *cru;
42 struct msch_regs *msch;
43 struct rv1126_ddrgrf *ddrgrf;
44 struct rv1126_grf *grf;
45 u32 sr_idle;
46 u32 pd_idle;
47#endif
48 struct ram_info info;
49 struct rv1126_pmugrf *pmugrf;
50};
51
52#if defined(CONFIG_TPL_BUILD) || \
53 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
54
55#define GRF_BASE_ADDR 0xfe000000
56#define PMU_GRF_BASE_ADDR 0xfe020000
57#define DDR_GRF_BASE_ADDR 0xfe030000
58#define BUS_SGRF_BASE_ADDR 0xfe0a0000
59#define SERVER_MSCH_BASE_ADDR 0xfe800000
60#define CRU_BASE_ADDR 0xff490000
61#define DDR_PHY_BASE_ADDR 0xff4a0000
62#define UPCTL2_BASE_ADDR 0xffa50000
63
64#define SGRF_SOC_CON2 0x8
65#define SGRF_SOC_CON12 0x30
66#define SGRF_SOC_CON13 0x34
67
68struct dram_info dram_info;
69
70struct rv1126_sdram_params sdram_configs[] = {
Jagan Teki09008402022-12-14 23:20:55 +053071#if defined(CONFIG_RAM_ROCKCHIP_LPDDR4)
72# include "sdram-rv1126-lpddr4-detect-328.inc"
73# include "sdram-rv1126-lpddr4-detect-396.inc"
74# include "sdram-rv1126-lpddr4-detect-528.inc"
75# include "sdram-rv1126-lpddr4-detect-664.inc"
76# include "sdram-rv1126-lpddr4-detect-784.inc"
77# include "sdram-rv1126-lpddr4-detect-924.inc"
78# include "sdram-rv1126-lpddr4-detect-1056.inc"
79#else
Jagan Tekie869b342022-12-14 23:20:53 +053080# include "sdram-rv1126-ddr3-detect-328.inc"
81# include "sdram-rv1126-ddr3-detect-396.inc"
82# include "sdram-rv1126-ddr3-detect-528.inc"
83# include "sdram-rv1126-ddr3-detect-664.inc"
84# include "sdram-rv1126-ddr3-detect-784.inc"
85# include "sdram-rv1126-ddr3-detect-924.inc"
86# include "sdram-rv1126-ddr3-detect-1056.inc"
Jagan Teki09008402022-12-14 23:20:55 +053087#endif
Jagan Tekie869b342022-12-14 23:20:53 +053088};
89
90u32 common_info[] = {
91#include "sdram-rv1126-loader_params.inc"
92};
93
94#if defined(CONFIG_CMD_DDR_TEST_TOOL)
95static struct rw_trn_result rw_trn_result;
96#endif
97
98static struct rv1126_fsp_param fsp_param[MAX_IDX];
99
100static u8 lp3_odt_value;
101
102static s8 wrlvl_result[2][4];
103
104/* DDR configuration 0-9 */
105u16 ddr_cfg_2_rbc[] = {
106 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
107 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
108 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
109 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
110 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
111 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
112 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
113 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
114 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
115 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
116};
117
118/* DDR configuration 10-21 */
119u8 ddr4_cfg_2_rbc[] = {
120 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
121 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
122 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
123 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
124 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
125 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
126 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
127 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
128 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
129 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
130 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
131 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
132};
133
134/* DDR configuration 22-28 */
135u16 ddr_cfg_2_rbc_p2[] = {
136 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
137 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
138 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
139 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
140 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
141 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
142 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
143};
144
145u8 d4_rbc_2_d3_rbc[][2] = {
146 {10, 0},
147 {11, 2},
148 {12, 23},
149 {13, 1},
150 {14, 28},
151 {15, 24},
152 {16, 27},
153 {17, 7},
154 {18, 6},
155 {19, 25},
156 {20, 26},
157 {21, 3}
158};
159
160u32 addrmap[29][9] = {
161 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
162 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
163 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
164 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
165 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
166 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
167 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
168 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
169 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
170 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
171 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
172 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
173 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
174 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
175 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
176 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
177 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
178 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
179 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
180 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
181
182 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
183 0x08080808, 0x00000f0f, 0x0801}, /* 10 */
184 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
185 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
186 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
187 0x07070707, 0x00000f07, 0x0700}, /* 12 */
188 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
189 0x07070707, 0x00000f0f, 0x0700}, /* 13 */
190 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
191 0x07070707, 0x00000f07, 0x3f01}, /* 14 */
192 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
193 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
194 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
195 0x06060606, 0x00000f06, 0x3f00}, /* 16 */
196 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
197 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
198 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
199 0x08080808, 0x00000f0f, 0x0700}, /* 18 */
200 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
201 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
202
203 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
204 0x07070707, 0x00000f07, 0x3f00}, /* 20 */
205 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
206 0x06060606, 0x00000f06, 0x0600}, /* 21 */
207 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
208 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
209
210 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
211 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
212 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
213 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
214 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
215 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
216 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
217 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
218 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
219 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
220 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
221 0x07070707, 0x00000f07, 0x3f3f} /* 28 */
222};
223
224static u8 dq_sel[22][3] = {
225 {0x0, 0x17, 0x22},
226 {0x1, 0x18, 0x23},
227 {0x2, 0x19, 0x24},
228 {0x3, 0x1a, 0x25},
229 {0x4, 0x1b, 0x26},
230 {0x5, 0x1c, 0x27},
231 {0x6, 0x1d, 0x28},
232 {0x7, 0x1e, 0x29},
233 {0x8, 0x16, 0x21},
234 {0x9, 0x1f, 0x2a},
235 {0xa, 0x20, 0x2b},
236 {0x10, 0x1, 0xc},
237 {0x11, 0x2, 0xd},
238 {0x12, 0x3, 0xe},
239 {0x13, 0x4, 0xf},
240 {0x14, 0x5, 0x10},
241 {0x15, 0x6, 0x11},
242 {0x16, 0x7, 0x12},
243 {0x17, 0x8, 0x13},
244 {0x18, 0x0, 0xb},
245 {0x19, 0x9, 0x14},
246 {0x1a, 0xa, 0x15}
247};
248
249static u16 grp_addr[4] = {
250 ADD_GROUP_CS0_A,
251 ADD_GROUP_CS0_B,
252 ADD_GROUP_CS1_A,
253 ADD_GROUP_CS1_B
254};
255
256static u8 wrlvl_result_offset[2][4] = {
257 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
258 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
259};
260
261static u16 dqs_dq_skew_adr[16] = {
262 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */
263 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */
264 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */
265 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */
266 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */
267 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */
268 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */
269 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */
270 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */
271 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */
272 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */
273 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */
274 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */
275 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */
276 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */
277 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */
278};
279
280static void rkclk_ddr_reset(struct dram_info *dram,
281 u32 ctl_srstn, u32 ctl_psrstn,
282 u32 phy_srstn, u32 phy_psrstn)
283{
284 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
285 UPCTL2_ASRSTN_REQ(ctl_srstn),
286 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
287
288 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
289 &dram->cru->softrst_con[12]);
290}
291
292static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
293{
294 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
295 int delay = 1000;
296 u32 mhz = hz / MHz;
297 struct global_info *gbl_info;
298 struct sdram_head_info_index_v2 *index =
299 (struct sdram_head_info_index_v2 *)common_info;
300 u32 ssmod_info;
301 u32 dsmpd = 1;
302
303 gbl_info = (struct global_info *)((void *)common_info +
304 index->global_index.offset * 4);
305 ssmod_info = gbl_info->info_2t;
306 refdiv = 1;
307 if (mhz <= 100) {
308 postdiv1 = 6;
309 postdiv2 = 4;
310 } else if (mhz <= 150) {
311 postdiv1 = 4;
312 postdiv2 = 4;
313 } else if (mhz <= 200) {
314 postdiv1 = 6;
315 postdiv2 = 2;
316 } else if (mhz <= 300) {
317 postdiv1 = 4;
318 postdiv2 = 2;
319 } else if (mhz <= 400) {
320 postdiv1 = 6;
321 postdiv2 = 1;
322 } else {
323 postdiv1 = 4;
324 postdiv2 = 1;
325 }
326 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
327
328 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
329
330 writel(0x1f000000, &dram->cru->clksel_con[64]);
331 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
332 /* enable ssmod */
333 if (PLL_SSMOD_SPREAD(ssmod_info)) {
334 dsmpd = 0;
335 clrsetbits_le32(&dram->cru->pll[1].con2,
336 0xffffff << 0, 0x0 << 0);
337 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
338 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
339 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
340 SSMOD_RESET(0) |
341 SSMOD_DIS_SSCG(0) |
342 SSMOD_BP(0),
343 &dram->cru->pll[1].con3);
344 }
345 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
346 &dram->cru->pll[1].con1);
347
348 while (delay > 0) {
349 udelay(1);
350 if (LOCK(readl(&dram->cru->pll[1].con1)))
351 break;
352 delay--;
353 }
354
355 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
356}
357
358static void rkclk_configure_ddr(struct dram_info *dram,
359 struct rv1126_sdram_params *sdram_params)
360{
361 /* for inno ddr phy need freq / 2 */
362 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
363}
364
365static unsigned int
366 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
367{
368 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
369 u32 cs, bw, die_bw, col, row, bank;
370 u32 cs1_row;
371 u32 i, tmp;
372 u32 ddrconf = -1;
373 u32 row_3_4;
374
375 cs = cap_info->rank;
376 bw = cap_info->bw;
377 die_bw = cap_info->dbw;
378 col = cap_info->col;
379 row = cap_info->cs0_row;
380 cs1_row = cap_info->cs1_row;
381 bank = cap_info->bk;
382 row_3_4 = cap_info->row_3_4;
383
384 if (sdram_params->base.dramtype == DDR4) {
385 if (cs == 2 && row == cs1_row && !row_3_4) {
386 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
387 die_bw;
388 for (i = 17; i < 21; i++) {
389 if (((tmp & 0xf) ==
390 (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
391 ((tmp & 0x70) <=
392 (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
393 ddrconf = i;
394 goto out;
395 }
396 }
397 }
398
399 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
400 for (i = 10; i < 21; i++) {
401 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
402 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
403 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
404 ddrconf = i;
405 goto out;
406 }
407 }
408 } else {
409 if (cs == 2 && row == cs1_row && bank == 3) {
410 for (i = 5; i < 8; i++) {
411 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
412 0x7)) &&
413 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
414 (0x7 << 5))) {
415 ddrconf = i;
416 goto out;
417 }
418 }
419 }
420
421 tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
422 ((bw + col - 10) << 0);
423 if (bank == 3)
424 tmp |= (1 << 3);
425
426 for (i = 0; i < 9; i++)
427 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
428 ((tmp & (7 << 5)) <=
429 (ddr_cfg_2_rbc[i] & (7 << 5))) &&
430 ((tmp & (1 << 8)) <=
431 (ddr_cfg_2_rbc[i] & (1 << 8)))) {
432 ddrconf = i;
433 goto out;
434 }
435
436 for (i = 0; i < 7; i++)
437 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
438 ((tmp & (7 << 5)) <=
439 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
440 ((tmp & (1 << 8)) <=
441 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
442 ddrconf = i + 22;
443 goto out;
444 }
445
446 if (cs == 1 && bank == 3 && row <= 17 &&
447 (col + bw) == 12)
448 ddrconf = 23;
449 }
450
451out:
452 if (ddrconf > 28)
453 printascii("calculate ddrconfig error\n");
454
455 if (sdram_params->base.dramtype == DDR4) {
456 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
457 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
458 if (ddrconf == 21 && row > 16)
459 printascii("warn:ddrconf21 row > 16\n");
460 else
461 ddrconf = d4_rbc_2_d3_rbc[i][1];
462 break;
463 }
464 }
465 }
466
467 return ddrconf;
468}
469
470static void sw_set_req(struct dram_info *dram)
471{
472 void __iomem *pctl_base = dram->pctl;
473
474 /* clear sw_done=0 */
475 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
476}
477
478static void sw_set_ack(struct dram_info *dram)
479{
480 void __iomem *pctl_base = dram->pctl;
481
482 /* set sw_done=1 */
483 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
484 while (1) {
485 /* wait programming done */
486 if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
487 PCTL2_SW_DONE_ACK)
488 break;
489 }
490}
491
492static void set_ctl_address_map(struct dram_info *dram,
493 struct rv1126_sdram_params *sdram_params)
494{
495 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
496 void __iomem *pctl_base = dram->pctl;
497 u32 ddrconf = cap_info->ddrconfig;
498 u32 i, row;
499
500 row = cap_info->cs0_row;
501 if (sdram_params->base.dramtype == DDR4) {
502 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
503 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
504 ddrconf = d4_rbc_2_d3_rbc[i][0];
505 break;
506 }
507 }
508 }
509
510 if (ddrconf >= ARRAY_SIZE(addrmap)) {
511 printascii("set ctl address map fail\n");
512 return;
513 }
514
515 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
516 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
517
518 /* unused row set to 0xf */
519 for (i = 17; i >= row; i--)
520 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
521 ((i - 12) * 8 / 32) * 4,
522 0xf << ((i - 12) * 8 % 32));
523
524 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
525 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
526 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
527 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
528
529 if (cap_info->rank == 1)
530 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
531}
532
533static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
534{
535 void __iomem *phy_base = dram->phy;
536 u32 fbdiv, prediv, postdiv, postdiv_en;
537
538 if (wait) {
539 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
540 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
541 continue;
542 } else {
543 freq /= MHz;
544 prediv = 1;
545 if (freq <= 200) {
546 fbdiv = 16;
547 postdiv = 2;
548 postdiv_en = 1;
549 } else if (freq <= 456) {
550 fbdiv = 8;
551 postdiv = 1;
552 postdiv_en = 1;
553 } else {
554 fbdiv = 4;
555 postdiv = 0;
556 postdiv_en = 0;
557 }
558 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
559 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
560 (fbdiv >> 8) & 1);
561 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
562 postdiv_en << PHY_POSTDIV_EN_SHIFT);
563
564 clrsetbits_le32(PHY_REG(phy_base, 0x52),
565 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
566 clrsetbits_le32(PHY_REG(phy_base, 0x53),
567 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
568 postdiv << PHY_POSTDIV_SHIFT);
569 }
570}
571
572static const u16 d3_phy_drv_2_ohm[][2] = {
573 {PHY_DDR3_RON_455ohm, 455},
574 {PHY_DDR3_RON_230ohm, 230},
575 {PHY_DDR3_RON_153ohm, 153},
576 {PHY_DDR3_RON_115ohm, 115},
577 {PHY_DDR3_RON_91ohm, 91},
578 {PHY_DDR3_RON_76ohm, 76},
579 {PHY_DDR3_RON_65ohm, 65},
580 {PHY_DDR3_RON_57ohm, 57},
581 {PHY_DDR3_RON_51ohm, 51},
582 {PHY_DDR3_RON_46ohm, 46},
583 {PHY_DDR3_RON_41ohm, 41},
584 {PHY_DDR3_RON_38ohm, 38},
585 {PHY_DDR3_RON_35ohm, 35},
586 {PHY_DDR3_RON_32ohm, 32},
587 {PHY_DDR3_RON_30ohm, 30},
588 {PHY_DDR3_RON_28ohm, 28},
589 {PHY_DDR3_RON_27ohm, 27},
590 {PHY_DDR3_RON_25ohm, 25},
591 {PHY_DDR3_RON_24ohm, 24},
592 {PHY_DDR3_RON_23ohm, 23},
593 {PHY_DDR3_RON_22ohm, 22},
594 {PHY_DDR3_RON_21ohm, 21},
595 {PHY_DDR3_RON_20ohm, 20}
596};
597
598static u16 d3_phy_odt_2_ohm[][2] = {
599 {PHY_DDR3_RTT_DISABLE, 0},
600 {PHY_DDR3_RTT_561ohm, 561},
601 {PHY_DDR3_RTT_282ohm, 282},
602 {PHY_DDR3_RTT_188ohm, 188},
603 {PHY_DDR3_RTT_141ohm, 141},
604 {PHY_DDR3_RTT_113ohm, 113},
605 {PHY_DDR3_RTT_94ohm, 94},
606 {PHY_DDR3_RTT_81ohm, 81},
607 {PHY_DDR3_RTT_72ohm, 72},
608 {PHY_DDR3_RTT_64ohm, 64},
609 {PHY_DDR3_RTT_58ohm, 58},
610 {PHY_DDR3_RTT_52ohm, 52},
611 {PHY_DDR3_RTT_48ohm, 48},
612 {PHY_DDR3_RTT_44ohm, 44},
613 {PHY_DDR3_RTT_41ohm, 41},
614 {PHY_DDR3_RTT_38ohm, 38},
615 {PHY_DDR3_RTT_37ohm, 37},
616 {PHY_DDR3_RTT_34ohm, 34},
617 {PHY_DDR3_RTT_32ohm, 32},
618 {PHY_DDR3_RTT_31ohm, 31},
619 {PHY_DDR3_RTT_29ohm, 29},
620 {PHY_DDR3_RTT_28ohm, 28},
621 {PHY_DDR3_RTT_27ohm, 27},
622 {PHY_DDR3_RTT_25ohm, 25}
623};
624
625static u16 d4lp3_phy_drv_2_ohm[][2] = {
626 {PHY_DDR4_LPDDR3_RON_482ohm, 482},
627 {PHY_DDR4_LPDDR3_RON_244ohm, 244},
628 {PHY_DDR4_LPDDR3_RON_162ohm, 162},
629 {PHY_DDR4_LPDDR3_RON_122ohm, 122},
630 {PHY_DDR4_LPDDR3_RON_97ohm, 97},
631 {PHY_DDR4_LPDDR3_RON_81ohm, 81},
632 {PHY_DDR4_LPDDR3_RON_69ohm, 69},
633 {PHY_DDR4_LPDDR3_RON_61ohm, 61},
634 {PHY_DDR4_LPDDR3_RON_54ohm, 54},
635 {PHY_DDR4_LPDDR3_RON_48ohm, 48},
636 {PHY_DDR4_LPDDR3_RON_44ohm, 44},
637 {PHY_DDR4_LPDDR3_RON_40ohm, 40},
638 {PHY_DDR4_LPDDR3_RON_37ohm, 37},
639 {PHY_DDR4_LPDDR3_RON_34ohm, 34},
640 {PHY_DDR4_LPDDR3_RON_32ohm, 32},
641 {PHY_DDR4_LPDDR3_RON_30ohm, 30},
642 {PHY_DDR4_LPDDR3_RON_28ohm, 28},
643 {PHY_DDR4_LPDDR3_RON_27ohm, 27},
644 {PHY_DDR4_LPDDR3_RON_25ohm, 25},
645 {PHY_DDR4_LPDDR3_RON_24ohm, 24},
646 {PHY_DDR4_LPDDR3_RON_23ohm, 23},
647 {PHY_DDR4_LPDDR3_RON_22ohm, 22},
648 {PHY_DDR4_LPDDR3_RON_21ohm, 21}
649};
650
651static u16 d4lp3_phy_odt_2_ohm[][2] = {
652 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
653 {PHY_DDR4_LPDDR3_RTT_586ohm, 586},
654 {PHY_DDR4_LPDDR3_RTT_294ohm, 294},
655 {PHY_DDR4_LPDDR3_RTT_196ohm, 196},
656 {PHY_DDR4_LPDDR3_RTT_148ohm, 148},
657 {PHY_DDR4_LPDDR3_RTT_118ohm, 118},
658 {PHY_DDR4_LPDDR3_RTT_99ohm, 99},
659 {PHY_DDR4_LPDDR3_RTT_85ohm, 58},
660 {PHY_DDR4_LPDDR3_RTT_76ohm, 76},
661 {PHY_DDR4_LPDDR3_RTT_67ohm, 67},
662 {PHY_DDR4_LPDDR3_RTT_60ohm, 60},
663 {PHY_DDR4_LPDDR3_RTT_55ohm, 55},
664 {PHY_DDR4_LPDDR3_RTT_50ohm, 50},
665 {PHY_DDR4_LPDDR3_RTT_46ohm, 46},
666 {PHY_DDR4_LPDDR3_RTT_43ohm, 43},
667 {PHY_DDR4_LPDDR3_RTT_40ohm, 40},
668 {PHY_DDR4_LPDDR3_RTT_38ohm, 38},
669 {PHY_DDR4_LPDDR3_RTT_36ohm, 36},
670 {PHY_DDR4_LPDDR3_RTT_34ohm, 34},
671 {PHY_DDR4_LPDDR3_RTT_32ohm, 32},
672 {PHY_DDR4_LPDDR3_RTT_31ohm, 31},
673 {PHY_DDR4_LPDDR3_RTT_29ohm, 29},
674 {PHY_DDR4_LPDDR3_RTT_28ohm, 28},
675 {PHY_DDR4_LPDDR3_RTT_27ohm, 27}
676};
677
678static u16 lp4_phy_drv_2_ohm[][2] = {
679 {PHY_LPDDR4_RON_501ohm, 501},
680 {PHY_LPDDR4_RON_253ohm, 253},
681 {PHY_LPDDR4_RON_168ohm, 168},
682 {PHY_LPDDR4_RON_126ohm, 126},
683 {PHY_LPDDR4_RON_101ohm, 101},
684 {PHY_LPDDR4_RON_84ohm, 84},
685 {PHY_LPDDR4_RON_72ohm, 72},
686 {PHY_LPDDR4_RON_63ohm, 63},
687 {PHY_LPDDR4_RON_56ohm, 56},
688 {PHY_LPDDR4_RON_50ohm, 50},
689 {PHY_LPDDR4_RON_46ohm, 46},
690 {PHY_LPDDR4_RON_42ohm, 42},
691 {PHY_LPDDR4_RON_38ohm, 38},
692 {PHY_LPDDR4_RON_36ohm, 36},
693 {PHY_LPDDR4_RON_33ohm, 33},
694 {PHY_LPDDR4_RON_31ohm, 31},
695 {PHY_LPDDR4_RON_29ohm, 29},
696 {PHY_LPDDR4_RON_28ohm, 28},
697 {PHY_LPDDR4_RON_26ohm, 26},
698 {PHY_LPDDR4_RON_25ohm, 25},
699 {PHY_LPDDR4_RON_24ohm, 24},
700 {PHY_LPDDR4_RON_23ohm, 23},
701 {PHY_LPDDR4_RON_22ohm, 22}
702};
703
704static u16 lp4_phy_odt_2_ohm[][2] = {
705 {PHY_LPDDR4_RTT_DISABLE, 0},
706 {PHY_LPDDR4_RTT_604ohm, 604},
707 {PHY_LPDDR4_RTT_303ohm, 303},
708 {PHY_LPDDR4_RTT_202ohm, 202},
709 {PHY_LPDDR4_RTT_152ohm, 152},
710 {PHY_LPDDR4_RTT_122ohm, 122},
711 {PHY_LPDDR4_RTT_101ohm, 101},
712 {PHY_LPDDR4_RTT_87ohm, 87},
713 {PHY_LPDDR4_RTT_78ohm, 78},
714 {PHY_LPDDR4_RTT_69ohm, 69},
715 {PHY_LPDDR4_RTT_62ohm, 62},
716 {PHY_LPDDR4_RTT_56ohm, 56},
717 {PHY_LPDDR4_RTT_52ohm, 52},
718 {PHY_LPDDR4_RTT_48ohm, 48},
719 {PHY_LPDDR4_RTT_44ohm, 44},
720 {PHY_LPDDR4_RTT_41ohm, 41},
721 {PHY_LPDDR4_RTT_39ohm, 39},
722 {PHY_LPDDR4_RTT_37ohm, 37},
723 {PHY_LPDDR4_RTT_35ohm, 35},
724 {PHY_LPDDR4_RTT_33ohm, 33},
725 {PHY_LPDDR4_RTT_32ohm, 32},
726 {PHY_LPDDR4_RTT_30ohm, 30},
727 {PHY_LPDDR4_RTT_29ohm, 29},
728 {PHY_LPDDR4_RTT_27ohm, 27}
729};
730
731static u32 lp4_odt_calc(u32 odt_ohm)
732{
733 u32 odt;
734
735 if (odt_ohm == 0)
736 odt = LPDDR4_DQODT_DIS;
737 else if (odt_ohm <= 40)
738 odt = LPDDR4_DQODT_40;
739 else if (odt_ohm <= 48)
740 odt = LPDDR4_DQODT_48;
741 else if (odt_ohm <= 60)
742 odt = LPDDR4_DQODT_60;
743 else if (odt_ohm <= 80)
744 odt = LPDDR4_DQODT_80;
745 else if (odt_ohm <= 120)
746 odt = LPDDR4_DQODT_120;
747 else
748 odt = LPDDR4_DQODT_240;
749
750 return odt;
751}
752
753static void *get_ddr_drv_odt_info(u32 dramtype)
754{
755 struct sdram_head_info_index_v2 *index =
756 (struct sdram_head_info_index_v2 *)common_info;
757 void *ddr_info = 0;
758
759 if (dramtype == DDR4)
760 ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
761 else if (dramtype == DDR3)
762 ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
763 else if (dramtype == LPDDR3)
764 ddr_info = (void *)common_info + index->lp3_index.offset * 4;
765 else if (dramtype == LPDDR4)
766 ddr_info = (void *)common_info + index->lp4_index.offset * 4;
767 else
768 printascii("unsupported dram type\n");
769 return ddr_info;
770}
771
772static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
773 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
774{
775 void __iomem *pctl_base = dram->pctl;
776 u32 ca_vref, dq_vref;
777
778 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
779 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
780 else
781 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
782
783 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
784 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
785 else
786 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
787
788 if (dramtype == LPDDR4) {
789 if (ca_vref < 100)
790 ca_vref = 100;
791 if (ca_vref > 420)
792 ca_vref = 420;
793
794 if (ca_vref <= 300)
795 ca_vref = (0 << 6) | (ca_vref - 100) / 4;
796 else
797 ca_vref = (1 << 6) | (ca_vref - 220) / 4;
798
799 if (dq_vref < 100)
800 dq_vref = 100;
801 if (dq_vref > 420)
802 dq_vref = 420;
803
804 if (dq_vref <= 300)
805 dq_vref = (0 << 6) | (dq_vref - 100) / 4;
806 else
807 dq_vref = (1 << 6) | (dq_vref - 220) / 4;
808 } else {
809 ca_vref = ca_vref * 11 / 6;
810 if (ca_vref < 150)
811 ca_vref = 150;
812 if (ca_vref > 629)
813 ca_vref = 629;
814
815 if (ca_vref <= 449)
816 ca_vref = (0 << 6) | (ca_vref - 150) / 4;
817 else
818 ca_vref = (1 << 6) | (ca_vref - 329) / 4;
819
820 if (dq_vref < 150)
821 dq_vref = 150;
822 if (dq_vref > 629)
823 dq_vref = 629;
824
825 if (dq_vref <= 449)
826 dq_vref = (0 << 6) | (dq_vref - 150) / 6;
827 else
828 dq_vref = (1 << 6) | (dq_vref - 329) / 6;
829 }
830 sw_set_req(dram);
831 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
832 DDR_PCTL2_INIT6,
833 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
834 ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
835
836 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
837 DDR_PCTL2_INIT7,
838 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
839 dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
840 sw_set_ack(dram);
841}
842
843static void set_ds_odt(struct dram_info *dram,
844 struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
845{
846 void __iomem *phy_base = dram->phy;
847 void __iomem *pctl_base = dram->pctl;
848 u32 dramtype = sdram_params->base.dramtype;
849 struct ddr2_3_4_lp2_3_info *ddr_info;
850 struct lp4_info *lp4_info;
851 u32 i, j, tmp;
852 const u16 (*p_drv)[2];
853 const u16 (*p_odt)[2];
854 u32 drv_info, sr_info;
855 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
856 u32 phy_odt_ohm, dram_odt_ohm;
857 u32 lp4_pu_cal, phy_lp4_drv_pd_en;
858 u32 phy_odt_up_en, phy_odt_dn_en;
859 u32 sr_dq, sr_clk;
860 u32 freq = sdram_params->base.ddr_freq;
861 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
862 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
863 u32 phy_dq_drv = 0;
864 u32 phy_odt_up = 0, phy_odt_dn = 0;
865
866 ddr_info = get_ddr_drv_odt_info(dramtype);
867 lp4_info = (void *)ddr_info;
868
869 if (!ddr_info)
870 return;
871
872 /* dram odt en freq control phy drv, dram odt and phy sr */
873 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
874 drv_info = ddr_info->drv_when_odtoff;
875 dram_odt_ohm = 0;
876 sr_info = ddr_info->sr_when_odtoff;
877 phy_lp4_drv_pd_en =
878 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
879 } else {
880 drv_info = ddr_info->drv_when_odten;
881 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
882 sr_info = ddr_info->sr_when_odten;
883 phy_lp4_drv_pd_en =
884 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
885 }
886 phy_dq_drv_ohm =
887 DRV_INFO_PHY_DQ_DRV(drv_info);
888 phy_clk_drv_ohm =
889 DRV_INFO_PHY_CLK_DRV(drv_info);
890 phy_ca_drv_ohm =
891 DRV_INFO_PHY_CA_DRV(drv_info);
892
893 sr_dq = DQ_SR_INFO(sr_info);
894 sr_clk = CLK_SR_INFO(sr_info);
895
896 /* phy odt en freq control dram drv and phy odt */
897 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
898 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
899 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
900 phy_odt_ohm = 0;
901 phy_odt_up_en = 0;
902 phy_odt_dn_en = 0;
903 } else {
904 dram_drv_ohm =
905 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
906 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
907 phy_odt_up_en =
908 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
909 phy_odt_dn_en =
910 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
911 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
912 }
913
914 if (dramtype == LPDDR4) {
915 if (phy_odt_ohm) {
916 phy_odt_up_en = 0;
917 phy_odt_dn_en = 1;
918 }
919 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
920 dram_caodt_ohm = 0;
921 else
922 dram_caodt_ohm =
923 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
924 }
925
926 if (dramtype == DDR3) {
927 p_drv = d3_phy_drv_2_ohm;
928 p_odt = d3_phy_odt_2_ohm;
929 } else if (dramtype == LPDDR4) {
930 p_drv = lp4_phy_drv_2_ohm;
931 p_odt = lp4_phy_odt_2_ohm;
932 } else {
933 p_drv = d4lp3_phy_drv_2_ohm;
934 p_odt = d4lp3_phy_odt_2_ohm;
935 }
936
937 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
938 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
939 phy_dq_drv = **(p_drv + i);
940 break;
941 }
942 if (i == 0)
943 break;
944 }
945 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
946 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
947 phy_clk_drv = **(p_drv + i);
948 break;
949 }
950 if (i == 0)
951 break;
952 }
953 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
954 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
955 phy_ca_drv = **(p_drv + i);
956 break;
957 }
958 if (i == 0)
959 break;
960 }
961 if (!phy_odt_ohm)
962 phy_odt = 0;
963 else
964 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
965 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
966 phy_odt = **(p_odt + i);
967 break;
968 }
969 if (i == 0)
970 break;
971 }
972
973 if (dramtype != LPDDR4) {
974 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
975 vref_inner = 0x80;
976 else if (phy_odt_up_en)
977 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
978 (dram_drv_ohm + phy_odt_ohm);
979 else
980 vref_inner = phy_odt_ohm * 128 /
981 (phy_odt_ohm + dram_drv_ohm);
982
983 if (dramtype != DDR3 && dram_odt_ohm)
984 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
985 (phy_dq_drv_ohm + dram_odt_ohm);
986 else
987 vref_out = 0x80;
988 } else {
989 /* for lp4 and lp4x*/
990 if (phy_odt_ohm)
991 vref_inner =
992 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
993 256) / 1000;
994 else
995 vref_inner =
996 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
997 256) / 1000;
998
999 vref_out = 0x80;
1000 }
1001
1002 /* default ZQCALIB bypass mode */
1003 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1004 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1005 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1006 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1007 if (dramtype == LPDDR4) {
1008 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1009 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1010 } else {
1011 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1012 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1013 }
1014 /* clk / cmd slew rate */
1015 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1016
1017 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1018 if (phy_odt_up_en)
1019 phy_odt_up = phy_odt;
1020 if (phy_odt_dn_en)
1021 phy_odt_dn = phy_odt;
1022
1023 for (i = 0; i < 4; i++) {
1024 j = 0x110 + i * 0x10;
1025 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1026 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1027 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1028 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1029 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1030
1031 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1032 1 << 3, phy_lp4_drv_pd_en << 3);
1033 if (dramtype == LPDDR4)
1034 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1035 /* dq slew rate */
1036 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1037 0x1f, sr_dq);
1038 }
1039
1040 /* reg_rx_vref_value_update */
1041 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1042 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1043
1044 /* RAM VREF */
1045 writel(vref_out, PHY_REG(phy_base, 0x105));
1046 if (dramtype == LPDDR3)
1047 udelay(100);
1048
1049 if (dramtype == LPDDR4)
1050 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1051
1052 if (dramtype == DDR3 || dramtype == DDR4) {
1053 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1054 DDR_PCTL2_INIT3);
1055 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1056 } else {
1057 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1058 DDR_PCTL2_INIT4);
1059 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1060 }
1061
1062 if (dramtype == DDR3) {
1063 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1064 if (dram_drv_ohm == 34)
1065 mr1_mr3 |= DDR3_DS_34;
1066
1067 if (dram_odt_ohm == 0)
1068 mr1_mr3 |= DDR3_RTT_NOM_DIS;
1069 else if (dram_odt_ohm <= 40)
1070 mr1_mr3 |= DDR3_RTT_NOM_40;
1071 else if (dram_odt_ohm <= 60)
1072 mr1_mr3 |= DDR3_RTT_NOM_60;
1073 else
1074 mr1_mr3 |= DDR3_RTT_NOM_120;
1075
1076 } else if (dramtype == DDR4) {
1077 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1078 if (dram_drv_ohm == 48)
1079 mr1_mr3 |= DDR4_DS_48;
1080
1081 if (dram_odt_ohm == 0)
1082 mr1_mr3 |= DDR4_RTT_NOM_DIS;
1083 else if (dram_odt_ohm <= 34)
1084 mr1_mr3 |= DDR4_RTT_NOM_34;
1085 else if (dram_odt_ohm <= 40)
1086 mr1_mr3 |= DDR4_RTT_NOM_40;
1087 else if (dram_odt_ohm <= 48)
1088 mr1_mr3 |= DDR4_RTT_NOM_48;
1089 else if (dram_odt_ohm <= 60)
1090 mr1_mr3 |= DDR4_RTT_NOM_60;
1091 else
1092 mr1_mr3 |= DDR4_RTT_NOM_120;
1093
1094 } else if (dramtype == LPDDR3) {
1095 if (dram_drv_ohm <= 34)
1096 mr1_mr3 |= LPDDR3_DS_34;
1097 else if (dram_drv_ohm <= 40)
1098 mr1_mr3 |= LPDDR3_DS_40;
1099 else if (dram_drv_ohm <= 48)
1100 mr1_mr3 |= LPDDR3_DS_48;
1101 else if (dram_drv_ohm <= 60)
1102 mr1_mr3 |= LPDDR3_DS_60;
1103 else if (dram_drv_ohm <= 80)
1104 mr1_mr3 |= LPDDR3_DS_80;
1105
1106 if (dram_odt_ohm == 0)
1107 lp3_odt_value = LPDDR3_ODT_DIS;
1108 else if (dram_odt_ohm <= 60)
1109 lp3_odt_value = LPDDR3_ODT_60;
1110 else if (dram_odt_ohm <= 120)
1111 lp3_odt_value = LPDDR3_ODT_120;
1112 else
1113 lp3_odt_value = LPDDR3_ODT_240;
1114 } else {/* for lpddr4 and lpddr4x */
1115 /* MR3 for lp4 PU-CAL and PDDS */
1116 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1117 mr1_mr3 |= lp4_pu_cal;
1118
1119 tmp = lp4_odt_calc(dram_drv_ohm);
1120 if (!tmp)
1121 tmp = LPDDR4_PDDS_240;
1122 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1123
1124 /* MR11 for lp4 ca odt, dq odt set */
1125 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1126 DDR_PCTL2_INIT6);
1127 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1128
1129 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1130
1131 tmp = lp4_odt_calc(dram_odt_ohm);
1132 mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1133
1134 tmp = lp4_odt_calc(dram_caodt_ohm);
1135 mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1136 sw_set_req(dram);
1137 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1138 DDR_PCTL2_INIT6,
1139 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1140 mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1141 sw_set_ack(dram);
1142
1143 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1144 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1145 DDR_PCTL2_INIT7);
1146 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1147 mr22 &= ~LPDDR4_SOC_ODT_MASK;
1148
1149 tmp = lp4_odt_calc(phy_odt_ohm);
1150 mr22 |= tmp;
1151 mr22 = mr22 |
1152 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1153 LPDDR4_ODTE_CK_SHIFT) |
1154 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1155 LPDDR4_ODTE_CS_SHIFT) |
1156 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1157 LPDDR4_ODTD_CA_SHIFT);
1158
1159 sw_set_req(dram);
1160 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1161 DDR_PCTL2_INIT7,
1162 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1163 mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1164 sw_set_ack(dram);
1165 }
1166
1167 if (dramtype == DDR4 || dramtype == DDR3) {
1168 sw_set_req(dram);
1169 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 DDR_PCTL2_INIT3,
1171 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1172 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1173 sw_set_ack(dram);
1174 } else {
1175 sw_set_req(dram);
1176 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1177 DDR_PCTL2_INIT4,
1178 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1179 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1180 sw_set_ack(dram);
1181 }
1182}
1183
1184static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1185 struct rv1126_sdram_params *sdram_params)
1186{
1187 void __iomem *phy_base = dram->phy;
1188 u32 dramtype = sdram_params->base.dramtype;
1189 struct sdram_head_info_index_v2 *index =
1190 (struct sdram_head_info_index_v2 *)common_info;
1191 struct dq_map_info *map_info;
1192
1193 map_info = (struct dq_map_info *)((void *)common_info +
1194 index->dq_map_index.offset * 4);
1195
1196 if (dramtype <= LPDDR4)
1197 writel((map_info->byte_map[dramtype / 4] >>
1198 ((dramtype % 4) * 8)) & 0xff,
1199 PHY_REG(phy_base, 0x4f));
1200
1201 return 0;
1202}
1203
1204static void phy_cfg(struct dram_info *dram,
1205 struct rv1126_sdram_params *sdram_params)
1206{
1207 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1208 void __iomem *phy_base = dram->phy;
1209 u32 i, dq_map, tmp;
1210 u32 byte1 = 0, byte0 = 0;
1211
1212 sdram_cmd_dq_path_remap(dram, sdram_params);
1213
1214 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1215 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1216 writel(sdram_params->phy_regs.phy[i][1],
1217 phy_base + sdram_params->phy_regs.phy[i][0]);
1218 }
1219
1220 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1221 dq_map = readl(PHY_REG(phy_base, 0x4f));
1222 for (i = 0; i < 4; i++) {
1223 if (((dq_map >> (i * 2)) & 0x3) == 0)
1224 byte0 = i;
1225 if (((dq_map >> (i * 2)) & 0x3) == 1)
1226 byte1 = i;
1227 }
1228
1229 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1230 if (cap_info->bw == 2)
1231 tmp |= 0xf;
1232 else if (cap_info->bw == 1)
1233 tmp |= ((1 << byte0) | (1 << byte1));
1234 else
1235 tmp |= (1 << byte0);
1236
1237 writel(tmp, PHY_REG(phy_base, 0xf));
1238
1239 /* lpddr4 odt control by phy, enable cs0 odt */
1240 if (sdram_params->base.dramtype == LPDDR4)
1241 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1242 (1 << 6) | (1 << 4));
1243 /* for ca training ca vref choose range1 */
1244 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1245 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1246 /* for wr training PHY_0x7c[5], choose range0 */
1247 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1248}
1249
1250static int update_refresh_reg(struct dram_info *dram)
1251{
1252 void __iomem *pctl_base = dram->pctl;
1253 u32 ret;
1254
1255 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1256 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1257
1258 return 0;
1259}
1260
1261/*
1262 * rank = 1: cs0
1263 * rank = 2: cs1
1264 */
1265int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1266{
1267 u32 ret;
1268 u32 i, temp;
1269 u32 dqmap;
1270
1271 void __iomem *pctl_base = dram->pctl;
1272 struct sdram_head_info_index_v2 *index =
1273 (struct sdram_head_info_index_v2 *)common_info;
1274 struct dq_map_info *map_info;
1275
1276 map_info = (struct dq_map_info *)((void *)common_info +
1277 index->dq_map_index.offset * 4);
1278
1279 if (dramtype == LPDDR2)
1280 dqmap = map_info->lp2_dq0_7_map;
1281 else
1282 dqmap = map_info->lp3_dq0_7_map;
1283
1284 pctl_read_mr(pctl_base, rank, mr_num);
1285
1286 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1287
1288 if (dramtype != LPDDR4) {
1289 temp = 0;
1290 for (i = 0; i < 8; i++) {
1291 temp = temp | (((ret >> i) & 0x1) <<
1292 ((dqmap >> (i * 4)) & 0xf));
1293 }
1294 } else {
1295 temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1296 }
1297
1298 return temp;
1299}
1300
1301/* before call this function autorefresh should be disabled */
1302void send_a_refresh(struct dram_info *dram)
1303{
1304 void __iomem *pctl_base = dram->pctl;
1305
1306 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1307 continue;
1308 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1309}
1310
1311static void enter_sr(struct dram_info *dram, u32 en)
1312{
1313 void __iomem *pctl_base = dram->pctl;
1314
1315 if (en) {
1316 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1317 while (1) {
1318 if (((readl(pctl_base + DDR_PCTL2_STAT) &
1319 PCTL2_SELFREF_TYPE_MASK) ==
1320 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1321 ((readl(pctl_base + DDR_PCTL2_STAT) &
1322 PCTL2_OPERATING_MODE_MASK) ==
1323 PCTL2_OPERATING_MODE_SR))
1324 break;
1325 }
1326 } else {
1327 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1328 while ((readl(pctl_base + DDR_PCTL2_STAT) &
1329 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1330 continue;
1331 }
1332}
1333
1334void record_dq_prebit(struct dram_info *dram)
1335{
1336 u32 group, i, tmp;
1337 void __iomem *phy_base = dram->phy;
1338
1339 for (group = 0; group < 4; group++) {
1340 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1341 /* l_loop_invdelaysel */
1342 writel(dq_sel[i][0], PHY_REG(phy_base,
1343 grp_addr[group] + 0x2c));
1344 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1345 writel(tmp, PHY_REG(phy_base,
1346 grp_addr[group] + dq_sel[i][1]));
1347
1348 /* r_loop_invdelaysel */
1349 writel(dq_sel[i][0], PHY_REG(phy_base,
1350 grp_addr[group] + 0x2d));
1351 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1352 writel(tmp, PHY_REG(phy_base,
1353 grp_addr[group] + dq_sel[i][2]));
1354 }
1355 }
1356}
1357
1358static void update_dq_rx_prebit(struct dram_info *dram)
1359{
1360 void __iomem *phy_base = dram->phy;
1361
1362 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1363 BIT(4));
1364 udelay(1);
1365 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1366}
1367
1368static void update_dq_tx_prebit(struct dram_info *dram)
1369{
1370 void __iomem *phy_base = dram->phy;
1371
1372 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1373 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1374 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1375 udelay(1);
1376 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1377}
1378
1379static void update_ca_prebit(struct dram_info *dram)
1380{
1381 void __iomem *phy_base = dram->phy;
1382
1383 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1384 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1385 udelay(1);
1386 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1387}
1388
1389/*
1390 * dir: 0: de-skew = delta_*
1391 * 1: de-skew = reg val - delta_*
1392 * delta_dir: value for differential signal: clk/
1393 * delta_sig: value for single signal: ca/cmd
1394 */
1395static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1396 int delta_sig, u32 cs, u32 dramtype)
1397{
1398 void __iomem *phy_base = dram->phy;
1399 u32 i, cs_en, tmp;
1400 u32 dfi_lp_stat = 0;
1401
1402 if (cs == 0)
1403 cs_en = 1;
1404 else if (cs == 2)
1405 cs_en = 2;
1406 else
1407 cs_en = 3;
1408
1409 if (dramtype == LPDDR4 &&
1410 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1411 dfi_lp_stat = 1;
1412 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1413 }
1414 enter_sr(dram, 1);
1415
1416 for (i = 0; i < 0x20; i++) {
1417 if (dir == DESKEW_MDF_ABS_VAL)
1418 tmp = delta_sig;
1419 else
1420 tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1421 delta_sig;
1422 writel(tmp, PHY_REG(phy_base, 0x150 + i));
1423 }
1424
1425 if (dir == DESKEW_MDF_ABS_VAL)
1426 tmp = delta_dif;
1427 else
1428 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1429 delta_sig + delta_dif;
1430 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1431 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1432 if (dramtype == LPDDR4) {
1433 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1434 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1435
1436 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1437 update_ca_prebit(dram);
1438 }
1439 enter_sr(dram, 0);
1440
1441 if (dfi_lp_stat)
1442 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1443}
1444
1445static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1446{
1447 u32 i, j, offset = 0;
1448 u32 min = 0x3f;
1449 void __iomem *phy_base = dram->phy;
1450 u32 byte_en;
1451
1452 if (signal == SKEW_TX_SIGNAL)
1453 offset = 8;
1454
1455 if (signal == SKEW_CA_SIGNAL) {
1456 for (i = 0; i < 0x20; i++)
1457 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i)));
1458 } else {
1459 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1460 for (j = offset; j < offset + rank * 4; j++) {
1461 if (!((byte_en >> (j % 4)) & 1))
1462 continue;
1463 for (i = 0; i < 11; i++)
1464 min = MIN(min,
1465 readl(PHY_REG(phy_base,
1466 dqs_dq_skew_adr[j] +
1467 i)));
1468 }
1469 }
1470
1471 return min;
1472}
1473
1474static u32 low_power_update(struct dram_info *dram, u32 en)
1475{
1476 void __iomem *pctl_base = dram->pctl;
1477 u32 lp_stat = 0;
1478
1479 if (en) {
1480 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1481 } else {
1482 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1483 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1484 }
1485
1486 return lp_stat;
1487}
1488
1489/*
1490 * signal:
1491 * dir: 0: de-skew = delta_*
1492 * 1: de-skew = reg val - delta_*
1493 * delta_dir: value for differential signal: dqs
1494 * delta_sig: value for single signal: dq/dm
1495 */
1496static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1497 int delta_dif, int delta_sig, u32 rank)
1498{
1499 void __iomem *phy_base = dram->phy;
1500 u32 i, j, tmp, offset;
1501 u32 byte_en;
1502
1503 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1504
1505 if (signal == SKEW_RX_SIGNAL)
1506 offset = 0;
1507 else
1508 offset = 8;
1509
1510 for (j = offset; j < (offset + rank * 4); j++) {
1511 if (!((byte_en >> (j % 4)) & 1))
1512 continue;
1513 for (i = 0; i < 0x9; i++) {
1514 if (dir == DESKEW_MDF_ABS_VAL)
1515 tmp = delta_sig;
1516 else
1517 tmp = delta_sig + readl(PHY_REG(phy_base,
1518 dqs_dq_skew_adr[j] +
1519 i));
1520 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1521 }
1522 if (dir == DESKEW_MDF_ABS_VAL)
1523 tmp = delta_dif;
1524 else
1525 tmp = delta_dif + readl(PHY_REG(phy_base,
1526 dqs_dq_skew_adr[j] + 9));
1527 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1528 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1529 }
1530 if (signal == SKEW_RX_SIGNAL)
1531 update_dq_rx_prebit(dram);
1532 else
1533 update_dq_tx_prebit(dram);
1534}
1535
1536static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1537{
1538 void __iomem *phy_base = dram->phy;
1539 u32 ret;
1540 u32 dis_auto_zq = 0;
1541 u32 odt_val_up, odt_val_dn;
1542 u32 i, j;
1543
1544 odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1545 odt_val_up = readl(PHY_REG(phy_base, 0x111));
1546
1547 if (dramtype != LPDDR4) {
1548 for (i = 0; i < 4; i++) {
1549 j = 0x110 + i * 0x10;
1550 writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1551 PHY_REG(phy_base, j));
1552 writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1553 PHY_REG(phy_base, j + 0x1));
1554 }
1555 }
1556 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1557 /* use normal read mode for data training */
1558 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1559
1560 if (dramtype == DDR4)
1561 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1562
1563 /* choose training cs */
1564 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1565 /* enable gate training */
1566 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1567 udelay(50);
1568 ret = readl(PHY_REG(phy_base, 0x91));
1569 /* disable gate training */
1570 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1571 clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1572 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1573
1574 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1575
1576 if (dramtype != LPDDR4) {
1577 for (i = 0; i < 4; i++) {
1578 j = 0x110 + i * 0x10;
1579 writel(odt_val_dn, PHY_REG(phy_base, j));
1580 writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1581 }
1582 }
1583 return ret;
1584}
1585
1586static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1587 u32 rank)
1588{
1589 void __iomem *pctl_base = dram->pctl;
1590 void __iomem *phy_base = dram->phy;
1591 u32 dis_auto_zq = 0;
1592 u32 tmp;
1593 u32 cur_fsp;
1594 u32 timeout_us = 1000;
1595
1596 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1597
1598 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1599
1600 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1601 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1602 0xffff;
1603 writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1604
1605 /* disable another cs's output */
1606 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1607 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1608 dramtype);
1609 if (dramtype == DDR3 || dramtype == DDR4)
1610 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1611 else
1612 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1613
1614 /* choose cs */
1615 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1616 ((0x2 >> cs) << 6) | (0 << 2));
1617 /* enable write leveling */
1618 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1619 ((0x2 >> cs) << 6) | (1 << 2));
1620
1621 while (1) {
1622 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1623 (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1624 break;
1625
1626 udelay(1);
1627 if (timeout_us-- == 0) {
1628 printascii("error: write leveling timeout\n");
1629 while (1)
1630 ;
1631 }
1632 }
1633
1634 /* disable write leveling */
1635 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1636 ((0x2 >> cs) << 6) | (0 << 2));
1637 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1638
1639 /* enable another cs's output */
1640 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1641 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1642 dramtype);
1643
1644 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1645
1646 return 0;
1647}
1648
1649char pattern[32] = {
1650 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1651 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1652 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1653 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1654};
1655
1656static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1657 u32 mhz)
1658{
1659 void __iomem *pctl_base = dram->pctl;
1660 void __iomem *phy_base = dram->phy;
1661 u32 trefi_1x, trfc_1x;
1662 u32 dis_auto_zq = 0;
1663 u32 timeout_us = 1000;
1664 u32 dqs_default;
1665 u32 cur_fsp;
1666 u32 vref_inner;
1667 u32 i;
1668 struct sdram_head_info_index_v2 *index =
1669 (struct sdram_head_info_index_v2 *)common_info;
1670 struct dq_map_info *map_info;
1671
1672 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1673 if (dramtype == DDR3 && vref_inner == 0x80) {
1674 for (i = 0; i < 4; i++)
1675 writel(vref_inner - 0xa,
1676 PHY_REG(phy_base, 0x118 + i * 0x10));
1677
1678 /* reg_rx_vref_value_update */
1679 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1680 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1681 }
1682
1683 map_info = (struct dq_map_info *)((void *)common_info +
1684 index->dq_map_index.offset * 4);
1685 /* only 1cs a time, 0:cs0 1 cs1 */
1686 if (cs > 1)
1687 return -1;
1688
1689 dqs_default = 0xf;
1690 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1691
1692 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1693 /* config refresh timing */
1694 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1695 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1696 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1697 DDR_PCTL2_RFSHTMG) & 0x3ff;
1698 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1699 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1700 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1701 /* reg_phy_trfc */
1702 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1703 /* reg_max_refi_cnt */
1704 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1705
1706 /* choose training cs */
1707 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1708
1709 /* set dq map for ddr4 */
1710 if (dramtype == DDR4) {
1711 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1712 for (i = 0; i < 4; i++) {
1713 writel((map_info->ddr4_dq_map[cs * 2] >>
1714 ((i % 4) * 8)) & 0xff,
1715 PHY_REG(phy_base, 0x238 + i));
1716 writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1717 ((i % 4) * 8)) & 0xff,
1718 PHY_REG(phy_base, 0x2b8 + i));
1719 }
1720 }
1721
1722 /* cha_l reg_l_rd_train_dqs_default[5:0] */
1723 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1724 /* cha_h reg_h_rd_train_dqs_default[5:0] */
1725 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1726 /* chb_l reg_l_rd_train_dqs_default[5:0] */
1727 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1728 /* chb_h reg_h_rd_train_dqs_default[5:0] */
1729 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1730
1731 /* Choose the read train auto mode */
1732 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1733 /* Enable the auto train of the read train */
1734 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1735
1736 /* Wait the train done. */
1737 while (1) {
1738 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1739 break;
1740
1741 udelay(1);
1742 if (timeout_us-- == 0) {
1743 printascii("error: read training timeout\n");
1744 return -1;
1745 }
1746 }
1747
1748 /* Check the read train state */
1749 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1750 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1751 printascii("error: read training error\n");
1752 return -1;
1753 }
1754
1755 /* Exit the Read Training by setting */
1756 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1757
1758 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1759
1760 if (dramtype == DDR3 && vref_inner == 0x80) {
1761 for (i = 0; i < 4; i++)
1762 writel(vref_inner,
1763 PHY_REG(phy_base, 0x118 + i * 0x10));
1764
1765 /* reg_rx_vref_value_update */
1766 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1767 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1768 }
1769
1770 return 0;
1771}
1772
1773static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1774 u32 mhz, u32 dst_fsp)
1775{
1776 void __iomem *pctl_base = dram->pctl;
1777 void __iomem *phy_base = dram->phy;
1778 u32 trefi_1x, trfc_1x;
1779 u32 dis_auto_zq = 0;
1780 u32 timeout_us = 1000;
1781 u32 cur_fsp;
1782 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1783
1784 if (dramtype == LPDDR3 && mhz <= 400) {
1785 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1786 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1787 cl = readl(PHY_REG(phy_base, offset));
1788 cwl = readl(PHY_REG(phy_base, offset + 2));
1789
1790 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1791 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1792 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1793 }
1794
1795 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1796
1797 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1798 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1799 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1800 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1801 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1802 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1803 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1804 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1805 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1806 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1807
1808 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1809 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1810
1811 /* config refresh timing */
1812 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1813 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1814 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1815 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1816 DDR_PCTL2_RFSHTMG) & 0x3ff;
1817 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1818 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1819 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1820 /* reg_phy_trfc */
1821 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1822 /* reg_max_refi_cnt */
1823 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1824
1825 /* choose training cs */
1826 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1827
1828 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1829 /* 0: Use the write-leveling value. */
1830 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1831 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1832
1833 /* PHY_0x7a [0] reg_dq_wr_train_auto */
1834 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1835
1836 /* PHY_0x7a [1] reg_dq_wr_train_en */
1837 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1838
1839 send_a_refresh(dram);
1840
1841 while (1) {
1842 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1843 break;
1844
1845 udelay(1);
1846 if (timeout_us-- == 0) {
1847 printascii("error: write training timeout\n");
1848 while (1)
1849 ;
1850 }
1851 }
1852
1853 /* Check the write train state */
1854 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1855 printascii("error: write training error\n");
1856 return -1;
1857 }
1858
1859 /* PHY_0x7a [1] reg_dq_wr_train_en */
1860 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1861
1862 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1863
1864 /* save LPDDR4 write vref to fsp_param for dfs */
1865 if (dramtype == LPDDR4) {
1866 fsp_param[dst_fsp].vref_dq[cs] =
1867 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1868 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1869 /* add range info */
1870 fsp_param[dst_fsp].vref_dq[cs] |=
1871 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1872 }
1873
1874 if (dramtype == LPDDR3 && mhz <= 400) {
1875 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1876 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1877 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1878 DDR_PCTL2_INIT3);
1879 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1880 dramtype);
1881 }
1882
1883 return 0;
1884}
1885
1886static int data_training(struct dram_info *dram, u32 cs,
1887 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1888 u32 training_flag)
1889{
1890 u32 ret = 0;
1891
1892 if (training_flag == FULL_TRAINING)
1893 training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1894 WRITE_TRAINING | READ_TRAINING;
1895
1896 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1897 ret = data_training_wl(dram, cs,
1898 sdram_params->base.dramtype,
1899 sdram_params->ch.cap_info.rank);
1900 if (ret != 0)
1901 goto out;
1902 }
1903
1904 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1905 ret = data_training_rg(dram, cs,
1906 sdram_params->base.dramtype);
1907 if (ret != 0)
1908 goto out;
1909 }
1910
1911 if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1912 ret = data_training_rd(dram, cs,
1913 sdram_params->base.dramtype,
1914 sdram_params->base.ddr_freq);
1915 if (ret != 0)
1916 goto out;
1917 }
1918
1919 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1920 ret = data_training_wr(dram, cs,
1921 sdram_params->base.dramtype,
1922 sdram_params->base.ddr_freq, dst_fsp);
1923 if (ret != 0)
1924 goto out;
1925 }
1926
1927out:
1928 return ret;
1929}
1930
1931static int get_wrlvl_val(struct dram_info *dram,
1932 struct rv1126_sdram_params *sdram_params)
1933{
1934 int i, j, clk_skew;
1935 void __iomem *phy_base = dram->phy;
1936 u32 lp_stat;
1937 int ret;
1938
1939 lp_stat = low_power_update(dram, 0);
1940
1941 clk_skew = 0x1f;
1942 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1943 sdram_params->base.dramtype);
1944
1945 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1946 if (sdram_params->ch.cap_info.rank == 2)
1947 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1948
1949 for (j = 0; j < 2; j++)
1950 for (i = 0; i < 4; i++)
1951 wrlvl_result[j][i] =
1952 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1953 clk_skew;
1954
1955 low_power_update(dram, lp_stat);
1956
1957 return ret;
1958}
1959
1960#if defined(CONFIG_CMD_DDR_TEST_TOOL)
1961static void init_rw_trn_result_struct(struct rw_trn_result *result,
1962 void __iomem *phy_base, u8 cs_num)
1963{
1964 int i;
1965
1966 result->cs_num = cs_num;
1967 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1968 PHY_DQ_WIDTH_MASK;
1969 for (i = 0; i < FSP_NUM; i++)
1970 result->fsp_mhz[i] = 0;
1971}
1972
1973static void save_rw_trn_min_max(void __iomem *phy_base,
1974 struct cs_rw_trn_result *rd_result,
1975 struct cs_rw_trn_result *wr_result,
1976 u8 byte_en)
1977{
1978 u16 phy_ofs;
1979 u8 dqs;
1980 u8 dq;
1981
1982 for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1983 if ((byte_en & BIT(dqs)) == 0)
1984 continue;
1985
1986 /* Channel A or B (low or high 16 bit) */
1987 phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1988 /* low or high 8 bit */
1989 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1990 for (dq = 0; dq < 8; dq++) {
1991 rd_result->dqs[dqs].dq_min[dq] =
1992 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1993 rd_result->dqs[dqs].dq_max[dq] =
1994 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1995 wr_result->dqs[dqs].dq_min[dq] =
1996 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1997 wr_result->dqs[dqs].dq_max[dq] =
1998 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1999 }
2000 }
2001}
2002
2003static void save_rw_trn_deskew(void __iomem *phy_base,
2004 struct fsp_rw_trn_result *result, u8 cs_num,
2005 int min_val, bool rw)
2006{
2007 u16 phy_ofs;
2008 u8 cs;
2009 u8 dq;
2010
2011 result->min_val = min_val;
2012
2013 for (cs = 0; cs < cs_num; cs++) {
2014 phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2015 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2016 for (dq = 0; dq < 8; dq++) {
2017 result->cs[cs].dqs[0].dq_deskew[dq] =
2018 readb(PHY_REG(phy_base, phy_ofs + dq));
2019 result->cs[cs].dqs[1].dq_deskew[dq] =
2020 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2021 result->cs[cs].dqs[2].dq_deskew[dq] =
2022 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2023 result->cs[cs].dqs[3].dq_deskew[dq] =
2024 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2025 }
2026
2027 result->cs[cs].dqs[0].dqs_deskew =
2028 readb(PHY_REG(phy_base, phy_ofs + 0x8));
2029 result->cs[cs].dqs[1].dqs_deskew =
2030 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2031 result->cs[cs].dqs[2].dqs_deskew =
2032 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2033 result->cs[cs].dqs[3].dqs_deskew =
2034 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2035 }
2036}
2037
2038static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2039{
2040 result->flag = DDR_DQ_EYE_FLAG;
2041 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2042}
2043#endif
2044
2045static int high_freq_training(struct dram_info *dram,
2046 struct rv1126_sdram_params *sdram_params,
2047 u32 fsp)
2048{
2049 u32 i, j;
2050 void __iomem *phy_base = dram->phy;
2051 u32 dramtype = sdram_params->base.dramtype;
2052 int min_val;
2053 int dqs_skew, clk_skew, ca_skew;
2054 u8 byte_en;
2055 int ret;
2056
2057 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2058 dqs_skew = 0;
2059 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2060 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2061 if ((byte_en & BIT(i)) != 0)
2062 dqs_skew += wrlvl_result[j][i];
2063 }
2064 }
2065 dqs_skew = dqs_skew /
2066 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2067
2068 clk_skew = 0x20 - dqs_skew;
2069 dqs_skew = 0x20;
2070
2071 if (dramtype == LPDDR4) {
2072 min_val = 0xff;
2073 for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2074 for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2075 min_val = MIN(wrlvl_result[j][i], min_val);
2076
2077 if (min_val < 0) {
2078 clk_skew = -min_val;
2079 ca_skew = -min_val;
2080 } else {
2081 clk_skew = 0;
2082 ca_skew = 0;
2083 }
2084 } else if (dramtype == LPDDR3) {
2085 ca_skew = clk_skew - 4;
2086 } else {
2087 ca_skew = clk_skew;
2088 }
2089 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2090 dramtype);
2091
2092 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2093 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2094 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2095 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2096 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2097 READ_TRAINING | WRITE_TRAINING);
2098#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2099 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2100 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2101 &rw_trn_result.wr_fsp[fsp].cs[0],
2102 rw_trn_result.byte_en);
2103#endif
2104 if (sdram_params->ch.cap_info.rank == 2) {
2105 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2106 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2107 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2108 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2109 ret |= data_training(dram, 1, sdram_params, fsp,
2110 READ_GATE_TRAINING | READ_TRAINING |
2111 WRITE_TRAINING);
2112#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2113 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2114 &rw_trn_result.wr_fsp[fsp].cs[1],
2115 rw_trn_result.byte_en);
2116#endif
2117 }
2118 if (ret)
2119 goto out;
2120
2121 record_dq_prebit(dram);
2122
2123 min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2124 sdram_params->ch.cap_info.rank) * -1;
2125 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2126 min_val, min_val, sdram_params->ch.cap_info.rank);
2127#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2128 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2129 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2130 SKEW_RX_SIGNAL);
2131#endif
2132
2133 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2134 sdram_params->ch.cap_info.rank),
2135 get_min_value(dram, SKEW_CA_SIGNAL,
2136 sdram_params->ch.cap_info.rank)) * -1;
2137
2138 /* clk = 0, rx all skew -7, tx - min_value */
2139 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2140 dramtype);
2141
2142 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2143 min_val, min_val, sdram_params->ch.cap_info.rank);
2144#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2145 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2146 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2147 SKEW_TX_SIGNAL);
2148#endif
2149
2150 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2151 if (sdram_params->ch.cap_info.rank == 2)
2152 ret |= data_training(dram, 1, sdram_params, 0,
2153 READ_GATE_TRAINING);
2154out:
2155 return ret;
2156}
2157
2158static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2159{
2160 writel(ddrconfig, &dram->msch->deviceconf);
2161 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2162}
2163
2164static void update_noc_timing(struct dram_info *dram,
2165 struct rv1126_sdram_params *sdram_params)
2166{
2167 void __iomem *pctl_base = dram->pctl;
2168 u32 bw, bl;
2169
2170 bw = 8 << sdram_params->ch.cap_info.bw;
2171 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2172
2173 /* update the noc timing related to data bus width */
2174 if ((bw / 8 * bl) <= 16)
2175 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2176 else if ((bw / 8 * bl) == 32)
2177 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2178 else if ((bw / 8 * bl) == 64)
2179 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2180 else
2181 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2182
2183 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2184 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2185
2186 if (sdram_params->base.dramtype == LPDDR4) {
2187 sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2188 (bw == 16) ? 0x1 : 0x2;
2189 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2190 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2191 }
2192
2193 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2194 &dram->msch->ddrtiminga0);
2195 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2196 &dram->msch->ddrtimingb0);
2197 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2198 &dram->msch->ddrtimingc0);
2199 writel(sdram_params->ch.noc_timings.devtodev0.d32,
2200 &dram->msch->devtodev0);
2201 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2202 writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2203 &dram->msch->ddr4timing);
2204}
2205
2206static int split_setup(struct dram_info *dram,
2207 struct rv1126_sdram_params *sdram_params)
2208{
2209 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2210 u32 dramtype = sdram_params->base.dramtype;
2211 u32 split_size, split_mode;
2212 u64 cs_cap[2], cap;
2213
2214 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2215 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2216 /* only support the larger cap is in low 16bit */
2217 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2218 cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2219 cap_info->cs0_high16bit_row));
2220 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2221 (cap_info->rank == 2)) {
2222 if (!cap_info->cs1_high16bit_row)
2223 cap = cs_cap[0];
2224 else
2225 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2226 cap_info->cs1_high16bit_row));
2227 } else {
2228 goto out;
2229 }
2230 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2231 if (cap_info->bw == 2)
2232 split_mode = SPLIT_MODE_32_L16_VALID;
2233 else
2234 split_mode = SPLIT_MODE_16_L8_VALID;
2235
2236 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2237 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2238 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2239 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2240 (split_mode << SPLIT_MODE_OFFSET) |
2241 (0x0 << SPLIT_BYPASS_OFFSET) |
2242 (split_size << SPLIT_SIZE_OFFSET));
2243
2244 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2245 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2246 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2247
2248out:
2249 return 0;
2250}
2251
2252static void split_bypass(struct dram_info *dram)
2253{
2254 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2255 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2256 return;
2257
2258 /* bypass split */
2259 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2260 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2261 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2262 (0x1 << SPLIT_BYPASS_OFFSET) |
2263 (0x0 << SPLIT_SIZE_OFFSET));
2264}
2265
2266static void dram_all_config(struct dram_info *dram,
2267 struct rv1126_sdram_params *sdram_params)
2268{
2269 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2270 u32 dram_type = sdram_params->base.dramtype;
2271 void __iomem *pctl_base = dram->pctl;
2272 u32 sys_reg2 = 0;
2273 u32 sys_reg3 = 0;
2274 u64 cs_cap[2];
2275 u32 cs_pst;
2276
2277 set_ddrconfig(dram, cap_info->ddrconfig);
2278 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2279 &sys_reg3, 0);
2280 writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2281 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2282
2283 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2284 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2285
2286 if (cap_info->rank == 2) {
2287 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2288 6 + 2;
2289 if (cs_pst > 28)
2290 cs_cap[0] = 1llu << cs_pst;
2291 }
2292
2293 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2294 (((cs_cap[0] >> 20) / 64) & 0xff),
2295 &dram->msch->devicesize);
2296 update_noc_timing(dram, sdram_params);
2297}
2298
2299static void enable_low_power(struct dram_info *dram,
2300 struct rv1126_sdram_params *sdram_params)
2301{
2302 void __iomem *pctl_base = dram->pctl;
2303 u32 grf_lp_con;
2304
2305 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2306
2307 if (sdram_params->base.dramtype == DDR4)
2308 grf_lp_con = (0x7 << 16) | (1 << 1);
2309 else if (sdram_params->base.dramtype == DDR3)
2310 grf_lp_con = (0x7 << 16) | (1 << 0);
2311 else
2312 grf_lp_con = (0x7 << 16) | (1 << 2);
2313
2314 /* en lpckdis_en */
2315 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2316 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2317
2318 /* enable sr, pd */
2319 if (dram->pd_idle == 0)
2320 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2321 else
2322 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2323 if (dram->sr_idle == 0)
2324 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2325 else
2326 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2327 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2328}
2329
2330static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2331{
2332 u32 split;
2333
2334 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2335 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2336 split = 0;
2337 else
2338 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2339 SPLIT_SIZE_MASK;
2340
2341 sdram_print_ddr_info(&sdram_params->ch.cap_info,
2342 &sdram_params->base, split);
2343}
2344
2345static int sdram_init_(struct dram_info *dram,
2346 struct rv1126_sdram_params *sdram_params, u32 post_init)
2347{
2348 void __iomem *pctl_base = dram->pctl;
2349 void __iomem *phy_base = dram->phy;
2350 u32 ddr4_vref;
2351 u32 mr_tmp;
2352
2353 rkclk_configure_ddr(dram, sdram_params);
2354
2355 rkclk_ddr_reset(dram, 1, 1, 1, 1);
2356 udelay(10);
2357
2358 rkclk_ddr_reset(dram, 1, 1, 1, 0);
2359 phy_cfg(dram, sdram_params);
2360
2361 rkclk_ddr_reset(dram, 1, 1, 0, 0);
2362 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2363
2364 rkclk_ddr_reset(dram, 1, 0, 0, 0);
2365 pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2366 dram->sr_idle, dram->pd_idle);
2367
2368 if (sdram_params->ch.cap_info.bw == 2) {
2369 /* 32bit interface use pageclose */
2370 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2371 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2372 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2373 } else {
2374 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2375 }
2376
2377#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2378 u32 tmp, trefi;
2379
2380 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2381 trefi = (tmp >> 16) & 0xfff;
2382 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2383 pctl_base + DDR_PCTL2_RFSHTMG);
2384#endif
2385
2386 <