blob: 0a78e18c732de1f1c43aeee49d824950bdcd4945 [file] [log] [blame]
Jagan Tekie869b342022-12-14 23:20:53 +05301// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4 * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd.
5 */
6
7#include <common.h>
8#include <debug_uart.h>
9#include <dm.h>
10#include <ram.h>
11#include <syscon.h>
12#include <asm/io.h>
13#include <asm/arch-rockchip/clock.h>
14#include <asm/arch-rockchip/hardware.h>
15#include <asm/arch-rockchip/cru_rv1126.h>
16#include <asm/arch-rockchip/grf_rv1126.h>
17#include <asm/arch-rockchip/sdram_common.h>
18#include <asm/arch-rockchip/sdram_rv1126.h>
19#include <linux/delay.h>
20
21/* define training flag */
22#define CA_TRAINING (0x1 << 0)
23#define READ_GATE_TRAINING (0x1 << 1)
24#define WRITE_LEVELING (0x1 << 2)
25#define WRITE_TRAINING (0x1 << 3)
26#define READ_TRAINING (0x1 << 4)
27#define FULL_TRAINING (0xff)
28
29#define SKEW_RX_SIGNAL (0)
30#define SKEW_TX_SIGNAL (1)
31#define SKEW_CA_SIGNAL (2)
32
33#define DESKEW_MDF_ABS_VAL (0)
34#define DESKEW_MDF_DIFF_VAL (1)
35
36struct dram_info {
37#if defined(CONFIG_TPL_BUILD) || \
38 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
39 void __iomem *pctl;
40 void __iomem *phy;
41 struct rv1126_cru *cru;
42 struct msch_regs *msch;
43 struct rv1126_ddrgrf *ddrgrf;
44 struct rv1126_grf *grf;
45 u32 sr_idle;
46 u32 pd_idle;
47#endif
48 struct ram_info info;
49 struct rv1126_pmugrf *pmugrf;
50};
51
52#if defined(CONFIG_TPL_BUILD) || \
53 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
54
55#define GRF_BASE_ADDR 0xfe000000
56#define PMU_GRF_BASE_ADDR 0xfe020000
57#define DDR_GRF_BASE_ADDR 0xfe030000
58#define BUS_SGRF_BASE_ADDR 0xfe0a0000
59#define SERVER_MSCH_BASE_ADDR 0xfe800000
60#define CRU_BASE_ADDR 0xff490000
61#define DDR_PHY_BASE_ADDR 0xff4a0000
62#define UPCTL2_BASE_ADDR 0xffa50000
63
64#define SGRF_SOC_CON2 0x8
65#define SGRF_SOC_CON12 0x30
66#define SGRF_SOC_CON13 0x34
67
68struct dram_info dram_info;
69
70struct rv1126_sdram_params sdram_configs[] = {
Jagan Teki09008402022-12-14 23:20:55 +053071#if defined(CONFIG_RAM_ROCKCHIP_LPDDR4)
72# include "sdram-rv1126-lpddr4-detect-328.inc"
73# include "sdram-rv1126-lpddr4-detect-396.inc"
74# include "sdram-rv1126-lpddr4-detect-528.inc"
75# include "sdram-rv1126-lpddr4-detect-664.inc"
76# include "sdram-rv1126-lpddr4-detect-784.inc"
77# include "sdram-rv1126-lpddr4-detect-924.inc"
78# include "sdram-rv1126-lpddr4-detect-1056.inc"
Tim Lunn644e8fc2024-01-24 14:25:58 +110079#elif defined(CONFIG_RAM_ROCKCHIP_DDR4)
80# include "sdram-rv1126-ddr4-detect-328.inc"
81# include "sdram-rv1126-ddr4-detect-396.inc"
82# include "sdram-rv1126-ddr4-detect-528.inc"
83# include "sdram-rv1126-ddr4-detect-664.inc"
84# include "sdram-rv1126-ddr4-detect-784.inc"
85# include "sdram-rv1126-ddr4-detect-924.inc"
86# include "sdram-rv1126-ddr4-detect-1056.inc"
Jagan Teki09008402022-12-14 23:20:55 +053087#else
Jagan Tekie869b342022-12-14 23:20:53 +053088# include "sdram-rv1126-ddr3-detect-328.inc"
89# include "sdram-rv1126-ddr3-detect-396.inc"
90# include "sdram-rv1126-ddr3-detect-528.inc"
91# include "sdram-rv1126-ddr3-detect-664.inc"
92# include "sdram-rv1126-ddr3-detect-784.inc"
93# include "sdram-rv1126-ddr3-detect-924.inc"
94# include "sdram-rv1126-ddr3-detect-1056.inc"
Jagan Teki09008402022-12-14 23:20:55 +053095#endif
Jagan Tekie869b342022-12-14 23:20:53 +053096};
97
98u32 common_info[] = {
99#include "sdram-rv1126-loader_params.inc"
100};
101
102#if defined(CONFIG_CMD_DDR_TEST_TOOL)
103static struct rw_trn_result rw_trn_result;
104#endif
105
106static struct rv1126_fsp_param fsp_param[MAX_IDX];
107
108static u8 lp3_odt_value;
109
110static s8 wrlvl_result[2][4];
111
112/* DDR configuration 0-9 */
113u16 ddr_cfg_2_rbc[] = {
114 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
115 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
116 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
117 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
118 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
119 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
120 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
121 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
122 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
123 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
124};
125
126/* DDR configuration 10-21 */
127u8 ddr4_cfg_2_rbc[] = {
128 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
129 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
130 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
131 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
132 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
133 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
134 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
135 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
136 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
137 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
138 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
139 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
140};
141
142/* DDR configuration 22-28 */
143u16 ddr_cfg_2_rbc_p2[] = {
144 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
145 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
146 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
147 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
148 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
149 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
150 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
151};
152
153u8 d4_rbc_2_d3_rbc[][2] = {
154 {10, 0},
155 {11, 2},
156 {12, 23},
157 {13, 1},
158 {14, 28},
159 {15, 24},
160 {16, 27},
161 {17, 7},
162 {18, 6},
163 {19, 25},
164 {20, 26},
165 {21, 3}
166};
167
168u32 addrmap[29][9] = {
169 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
170 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
171 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
172 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
173 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
174 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
175 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
176 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
177 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
178 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
179 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
180 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
181 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
182 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
183 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
184 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
185 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
186 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
187 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
188 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
189
190 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
191 0x08080808, 0x00000f0f, 0x0801}, /* 10 */
192 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
193 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
194 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
195 0x07070707, 0x00000f07, 0x0700}, /* 12 */
196 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
197 0x07070707, 0x00000f0f, 0x0700}, /* 13 */
198 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
199 0x07070707, 0x00000f07, 0x3f01}, /* 14 */
200 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
201 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
202 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
203 0x06060606, 0x00000f06, 0x3f00}, /* 16 */
204 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
205 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
206 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
207 0x08080808, 0x00000f0f, 0x0700}, /* 18 */
208 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
209 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
210
211 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 0x07070707, 0x00000f07, 0x3f00}, /* 20 */
213 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
214 0x06060606, 0x00000f06, 0x0600}, /* 21 */
215 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
216 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
217
218 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
219 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
220 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
221 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
222 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
223 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
224 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
225 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
226 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
227 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
228 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
229 0x07070707, 0x00000f07, 0x3f3f} /* 28 */
230};
231
232static u8 dq_sel[22][3] = {
233 {0x0, 0x17, 0x22},
234 {0x1, 0x18, 0x23},
235 {0x2, 0x19, 0x24},
236 {0x3, 0x1a, 0x25},
237 {0x4, 0x1b, 0x26},
238 {0x5, 0x1c, 0x27},
239 {0x6, 0x1d, 0x28},
240 {0x7, 0x1e, 0x29},
241 {0x8, 0x16, 0x21},
242 {0x9, 0x1f, 0x2a},
243 {0xa, 0x20, 0x2b},
244 {0x10, 0x1, 0xc},
245 {0x11, 0x2, 0xd},
246 {0x12, 0x3, 0xe},
247 {0x13, 0x4, 0xf},
248 {0x14, 0x5, 0x10},
249 {0x15, 0x6, 0x11},
250 {0x16, 0x7, 0x12},
251 {0x17, 0x8, 0x13},
252 {0x18, 0x0, 0xb},
253 {0x19, 0x9, 0x14},
254 {0x1a, 0xa, 0x15}
255};
256
257static u16 grp_addr[4] = {
258 ADD_GROUP_CS0_A,
259 ADD_GROUP_CS0_B,
260 ADD_GROUP_CS1_A,
261 ADD_GROUP_CS1_B
262};
263
264static u8 wrlvl_result_offset[2][4] = {
265 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
266 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
267};
268
269static u16 dqs_dq_skew_adr[16] = {
270 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */
271 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */
272 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */
273 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */
274 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */
275 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */
276 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */
277 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */
278 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */
279 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */
280 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */
281 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */
282 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */
283 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */
284 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */
285 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */
286};
287
288static void rkclk_ddr_reset(struct dram_info *dram,
289 u32 ctl_srstn, u32 ctl_psrstn,
290 u32 phy_srstn, u32 phy_psrstn)
291{
292 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
293 UPCTL2_ASRSTN_REQ(ctl_srstn),
294 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
295
296 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
297 &dram->cru->softrst_con[12]);
298}
299
300static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
301{
302 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
303 int delay = 1000;
304 u32 mhz = hz / MHz;
305 struct global_info *gbl_info;
306 struct sdram_head_info_index_v2 *index =
307 (struct sdram_head_info_index_v2 *)common_info;
308 u32 ssmod_info;
309 u32 dsmpd = 1;
310
311 gbl_info = (struct global_info *)((void *)common_info +
312 index->global_index.offset * 4);
313 ssmod_info = gbl_info->info_2t;
314 refdiv = 1;
315 if (mhz <= 100) {
316 postdiv1 = 6;
317 postdiv2 = 4;
318 } else if (mhz <= 150) {
319 postdiv1 = 4;
320 postdiv2 = 4;
321 } else if (mhz <= 200) {
322 postdiv1 = 6;
323 postdiv2 = 2;
324 } else if (mhz <= 300) {
325 postdiv1 = 4;
326 postdiv2 = 2;
327 } else if (mhz <= 400) {
328 postdiv1 = 6;
329 postdiv2 = 1;
330 } else {
331 postdiv1 = 4;
332 postdiv2 = 1;
333 }
334 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
335
336 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
337
338 writel(0x1f000000, &dram->cru->clksel_con[64]);
339 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
340 /* enable ssmod */
341 if (PLL_SSMOD_SPREAD(ssmod_info)) {
342 dsmpd = 0;
343 clrsetbits_le32(&dram->cru->pll[1].con2,
344 0xffffff << 0, 0x0 << 0);
345 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
346 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
347 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
348 SSMOD_RESET(0) |
349 SSMOD_DIS_SSCG(0) |
350 SSMOD_BP(0),
351 &dram->cru->pll[1].con3);
352 }
353 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
354 &dram->cru->pll[1].con1);
355
356 while (delay > 0) {
357 udelay(1);
358 if (LOCK(readl(&dram->cru->pll[1].con1)))
359 break;
360 delay--;
361 }
362
363 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
364}
365
366static void rkclk_configure_ddr(struct dram_info *dram,
367 struct rv1126_sdram_params *sdram_params)
368{
369 /* for inno ddr phy need freq / 2 */
370 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
371}
372
373static unsigned int
374 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
375{
376 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
377 u32 cs, bw, die_bw, col, row, bank;
378 u32 cs1_row;
379 u32 i, tmp;
380 u32 ddrconf = -1;
381 u32 row_3_4;
382
383 cs = cap_info->rank;
384 bw = cap_info->bw;
385 die_bw = cap_info->dbw;
386 col = cap_info->col;
387 row = cap_info->cs0_row;
388 cs1_row = cap_info->cs1_row;
389 bank = cap_info->bk;
390 row_3_4 = cap_info->row_3_4;
391
392 if (sdram_params->base.dramtype == DDR4) {
393 if (cs == 2 && row == cs1_row && !row_3_4) {
394 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
395 die_bw;
396 for (i = 17; i < 21; i++) {
397 if (((tmp & 0xf) ==
398 (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
399 ((tmp & 0x70) <=
400 (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
401 ddrconf = i;
402 goto out;
403 }
404 }
405 }
406
407 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
408 for (i = 10; i < 21; i++) {
409 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
410 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
411 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
412 ddrconf = i;
413 goto out;
414 }
415 }
416 } else {
417 if (cs == 2 && row == cs1_row && bank == 3) {
418 for (i = 5; i < 8; i++) {
419 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
420 0x7)) &&
421 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
422 (0x7 << 5))) {
423 ddrconf = i;
424 goto out;
425 }
426 }
427 }
428
429 tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
430 ((bw + col - 10) << 0);
431 if (bank == 3)
432 tmp |= (1 << 3);
433
434 for (i = 0; i < 9; i++)
435 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
436 ((tmp & (7 << 5)) <=
437 (ddr_cfg_2_rbc[i] & (7 << 5))) &&
438 ((tmp & (1 << 8)) <=
439 (ddr_cfg_2_rbc[i] & (1 << 8)))) {
440 ddrconf = i;
441 goto out;
442 }
443
444 for (i = 0; i < 7; i++)
445 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
446 ((tmp & (7 << 5)) <=
447 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
448 ((tmp & (1 << 8)) <=
449 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
450 ddrconf = i + 22;
451 goto out;
452 }
453
454 if (cs == 1 && bank == 3 && row <= 17 &&
455 (col + bw) == 12)
456 ddrconf = 23;
457 }
458
459out:
460 if (ddrconf > 28)
461 printascii("calculate ddrconfig error\n");
462
463 if (sdram_params->base.dramtype == DDR4) {
464 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
465 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
466 if (ddrconf == 21 && row > 16)
467 printascii("warn:ddrconf21 row > 16\n");
468 else
469 ddrconf = d4_rbc_2_d3_rbc[i][1];
470 break;
471 }
472 }
473 }
474
475 return ddrconf;
476}
477
478static void sw_set_req(struct dram_info *dram)
479{
480 void __iomem *pctl_base = dram->pctl;
481
482 /* clear sw_done=0 */
483 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
484}
485
486static void sw_set_ack(struct dram_info *dram)
487{
488 void __iomem *pctl_base = dram->pctl;
489
490 /* set sw_done=1 */
491 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
492 while (1) {
493 /* wait programming done */
494 if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
495 PCTL2_SW_DONE_ACK)
496 break;
497 }
498}
499
500static void set_ctl_address_map(struct dram_info *dram,
501 struct rv1126_sdram_params *sdram_params)
502{
503 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
504 void __iomem *pctl_base = dram->pctl;
505 u32 ddrconf = cap_info->ddrconfig;
506 u32 i, row;
507
508 row = cap_info->cs0_row;
509 if (sdram_params->base.dramtype == DDR4) {
510 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
511 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
512 ddrconf = d4_rbc_2_d3_rbc[i][0];
513 break;
514 }
515 }
516 }
517
518 if (ddrconf >= ARRAY_SIZE(addrmap)) {
519 printascii("set ctl address map fail\n");
520 return;
521 }
522
523 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
524 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
525
526 /* unused row set to 0xf */
527 for (i = 17; i >= row; i--)
528 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
529 ((i - 12) * 8 / 32) * 4,
530 0xf << ((i - 12) * 8 % 32));
531
532 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
533 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
534 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
535 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
536
537 if (cap_info->rank == 1)
538 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
539}
540
541static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
542{
543 void __iomem *phy_base = dram->phy;
544 u32 fbdiv, prediv, postdiv, postdiv_en;
545
546 if (wait) {
547 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
548 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
549 continue;
550 } else {
551 freq /= MHz;
552 prediv = 1;
553 if (freq <= 200) {
554 fbdiv = 16;
555 postdiv = 2;
556 postdiv_en = 1;
557 } else if (freq <= 456) {
558 fbdiv = 8;
559 postdiv = 1;
560 postdiv_en = 1;
561 } else {
562 fbdiv = 4;
563 postdiv = 0;
564 postdiv_en = 0;
565 }
566 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
567 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
568 (fbdiv >> 8) & 1);
569 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
570 postdiv_en << PHY_POSTDIV_EN_SHIFT);
571
572 clrsetbits_le32(PHY_REG(phy_base, 0x52),
573 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
574 clrsetbits_le32(PHY_REG(phy_base, 0x53),
575 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
576 postdiv << PHY_POSTDIV_SHIFT);
577 }
578}
579
580static const u16 d3_phy_drv_2_ohm[][2] = {
581 {PHY_DDR3_RON_455ohm, 455},
582 {PHY_DDR3_RON_230ohm, 230},
583 {PHY_DDR3_RON_153ohm, 153},
584 {PHY_DDR3_RON_115ohm, 115},
585 {PHY_DDR3_RON_91ohm, 91},
586 {PHY_DDR3_RON_76ohm, 76},
587 {PHY_DDR3_RON_65ohm, 65},
588 {PHY_DDR3_RON_57ohm, 57},
589 {PHY_DDR3_RON_51ohm, 51},
590 {PHY_DDR3_RON_46ohm, 46},
591 {PHY_DDR3_RON_41ohm, 41},
592 {PHY_DDR3_RON_38ohm, 38},
593 {PHY_DDR3_RON_35ohm, 35},
594 {PHY_DDR3_RON_32ohm, 32},
595 {PHY_DDR3_RON_30ohm, 30},
596 {PHY_DDR3_RON_28ohm, 28},
597 {PHY_DDR3_RON_27ohm, 27},
598 {PHY_DDR3_RON_25ohm, 25},
599 {PHY_DDR3_RON_24ohm, 24},
600 {PHY_DDR3_RON_23ohm, 23},
601 {PHY_DDR3_RON_22ohm, 22},
602 {PHY_DDR3_RON_21ohm, 21},
603 {PHY_DDR3_RON_20ohm, 20}
604};
605
606static u16 d3_phy_odt_2_ohm[][2] = {
607 {PHY_DDR3_RTT_DISABLE, 0},
608 {PHY_DDR3_RTT_561ohm, 561},
609 {PHY_DDR3_RTT_282ohm, 282},
610 {PHY_DDR3_RTT_188ohm, 188},
611 {PHY_DDR3_RTT_141ohm, 141},
612 {PHY_DDR3_RTT_113ohm, 113},
613 {PHY_DDR3_RTT_94ohm, 94},
614 {PHY_DDR3_RTT_81ohm, 81},
615 {PHY_DDR3_RTT_72ohm, 72},
616 {PHY_DDR3_RTT_64ohm, 64},
617 {PHY_DDR3_RTT_58ohm, 58},
618 {PHY_DDR3_RTT_52ohm, 52},
619 {PHY_DDR3_RTT_48ohm, 48},
620 {PHY_DDR3_RTT_44ohm, 44},
621 {PHY_DDR3_RTT_41ohm, 41},
622 {PHY_DDR3_RTT_38ohm, 38},
623 {PHY_DDR3_RTT_37ohm, 37},
624 {PHY_DDR3_RTT_34ohm, 34},
625 {PHY_DDR3_RTT_32ohm, 32},
626 {PHY_DDR3_RTT_31ohm, 31},
627 {PHY_DDR3_RTT_29ohm, 29},
628 {PHY_DDR3_RTT_28ohm, 28},
629 {PHY_DDR3_RTT_27ohm, 27},
630 {PHY_DDR3_RTT_25ohm, 25}
631};
632
633static u16 d4lp3_phy_drv_2_ohm[][2] = {
634 {PHY_DDR4_LPDDR3_RON_482ohm, 482},
635 {PHY_DDR4_LPDDR3_RON_244ohm, 244},
636 {PHY_DDR4_LPDDR3_RON_162ohm, 162},
637 {PHY_DDR4_LPDDR3_RON_122ohm, 122},
638 {PHY_DDR4_LPDDR3_RON_97ohm, 97},
639 {PHY_DDR4_LPDDR3_RON_81ohm, 81},
640 {PHY_DDR4_LPDDR3_RON_69ohm, 69},
641 {PHY_DDR4_LPDDR3_RON_61ohm, 61},
642 {PHY_DDR4_LPDDR3_RON_54ohm, 54},
643 {PHY_DDR4_LPDDR3_RON_48ohm, 48},
644 {PHY_DDR4_LPDDR3_RON_44ohm, 44},
645 {PHY_DDR4_LPDDR3_RON_40ohm, 40},
646 {PHY_DDR4_LPDDR3_RON_37ohm, 37},
647 {PHY_DDR4_LPDDR3_RON_34ohm, 34},
648 {PHY_DDR4_LPDDR3_RON_32ohm, 32},
649 {PHY_DDR4_LPDDR3_RON_30ohm, 30},
650 {PHY_DDR4_LPDDR3_RON_28ohm, 28},
651 {PHY_DDR4_LPDDR3_RON_27ohm, 27},
652 {PHY_DDR4_LPDDR3_RON_25ohm, 25},
653 {PHY_DDR4_LPDDR3_RON_24ohm, 24},
654 {PHY_DDR4_LPDDR3_RON_23ohm, 23},
655 {PHY_DDR4_LPDDR3_RON_22ohm, 22},
656 {PHY_DDR4_LPDDR3_RON_21ohm, 21}
657};
658
659static u16 d4lp3_phy_odt_2_ohm[][2] = {
660 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
661 {PHY_DDR4_LPDDR3_RTT_586ohm, 586},
662 {PHY_DDR4_LPDDR3_RTT_294ohm, 294},
663 {PHY_DDR4_LPDDR3_RTT_196ohm, 196},
664 {PHY_DDR4_LPDDR3_RTT_148ohm, 148},
665 {PHY_DDR4_LPDDR3_RTT_118ohm, 118},
666 {PHY_DDR4_LPDDR3_RTT_99ohm, 99},
667 {PHY_DDR4_LPDDR3_RTT_85ohm, 58},
668 {PHY_DDR4_LPDDR3_RTT_76ohm, 76},
669 {PHY_DDR4_LPDDR3_RTT_67ohm, 67},
670 {PHY_DDR4_LPDDR3_RTT_60ohm, 60},
671 {PHY_DDR4_LPDDR3_RTT_55ohm, 55},
672 {PHY_DDR4_LPDDR3_RTT_50ohm, 50},
673 {PHY_DDR4_LPDDR3_RTT_46ohm, 46},
674 {PHY_DDR4_LPDDR3_RTT_43ohm, 43},
675 {PHY_DDR4_LPDDR3_RTT_40ohm, 40},
676 {PHY_DDR4_LPDDR3_RTT_38ohm, 38},
677 {PHY_DDR4_LPDDR3_RTT_36ohm, 36},
678 {PHY_DDR4_LPDDR3_RTT_34ohm, 34},
679 {PHY_DDR4_LPDDR3_RTT_32ohm, 32},
680 {PHY_DDR4_LPDDR3_RTT_31ohm, 31},
681 {PHY_DDR4_LPDDR3_RTT_29ohm, 29},
682 {PHY_DDR4_LPDDR3_RTT_28ohm, 28},
683 {PHY_DDR4_LPDDR3_RTT_27ohm, 27}
684};
685
686static u16 lp4_phy_drv_2_ohm[][2] = {
687 {PHY_LPDDR4_RON_501ohm, 501},
688 {PHY_LPDDR4_RON_253ohm, 253},
689 {PHY_LPDDR4_RON_168ohm, 168},
690 {PHY_LPDDR4_RON_126ohm, 126},
691 {PHY_LPDDR4_RON_101ohm, 101},
692 {PHY_LPDDR4_RON_84ohm, 84},
693 {PHY_LPDDR4_RON_72ohm, 72},
694 {PHY_LPDDR4_RON_63ohm, 63},
695 {PHY_LPDDR4_RON_56ohm, 56},
696 {PHY_LPDDR4_RON_50ohm, 50},
697 {PHY_LPDDR4_RON_46ohm, 46},
698 {PHY_LPDDR4_RON_42ohm, 42},
699 {PHY_LPDDR4_RON_38ohm, 38},
700 {PHY_LPDDR4_RON_36ohm, 36},
701 {PHY_LPDDR4_RON_33ohm, 33},
702 {PHY_LPDDR4_RON_31ohm, 31},
703 {PHY_LPDDR4_RON_29ohm, 29},
704 {PHY_LPDDR4_RON_28ohm, 28},
705 {PHY_LPDDR4_RON_26ohm, 26},
706 {PHY_LPDDR4_RON_25ohm, 25},
707 {PHY_LPDDR4_RON_24ohm, 24},
708 {PHY_LPDDR4_RON_23ohm, 23},
709 {PHY_LPDDR4_RON_22ohm, 22}
710};
711
712static u16 lp4_phy_odt_2_ohm[][2] = {
713 {PHY_LPDDR4_RTT_DISABLE, 0},
714 {PHY_LPDDR4_RTT_604ohm, 604},
715 {PHY_LPDDR4_RTT_303ohm, 303},
716 {PHY_LPDDR4_RTT_202ohm, 202},
717 {PHY_LPDDR4_RTT_152ohm, 152},
718 {PHY_LPDDR4_RTT_122ohm, 122},
719 {PHY_LPDDR4_RTT_101ohm, 101},
720 {PHY_LPDDR4_RTT_87ohm, 87},
721 {PHY_LPDDR4_RTT_78ohm, 78},
722 {PHY_LPDDR4_RTT_69ohm, 69},
723 {PHY_LPDDR4_RTT_62ohm, 62},
724 {PHY_LPDDR4_RTT_56ohm, 56},
725 {PHY_LPDDR4_RTT_52ohm, 52},
726 {PHY_LPDDR4_RTT_48ohm, 48},
727 {PHY_LPDDR4_RTT_44ohm, 44},
728 {PHY_LPDDR4_RTT_41ohm, 41},
729 {PHY_LPDDR4_RTT_39ohm, 39},
730 {PHY_LPDDR4_RTT_37ohm, 37},
731 {PHY_LPDDR4_RTT_35ohm, 35},
732 {PHY_LPDDR4_RTT_33ohm, 33},
733 {PHY_LPDDR4_RTT_32ohm, 32},
734 {PHY_LPDDR4_RTT_30ohm, 30},
735 {PHY_LPDDR4_RTT_29ohm, 29},
736 {PHY_LPDDR4_RTT_27ohm, 27}
737};
738
739static u32 lp4_odt_calc(u32 odt_ohm)
740{
741 u32 odt;
742
743 if (odt_ohm == 0)
744 odt = LPDDR4_DQODT_DIS;
745 else if (odt_ohm <= 40)
746 odt = LPDDR4_DQODT_40;
747 else if (odt_ohm <= 48)
748 odt = LPDDR4_DQODT_48;
749 else if (odt_ohm <= 60)
750 odt = LPDDR4_DQODT_60;
751 else if (odt_ohm <= 80)
752 odt = LPDDR4_DQODT_80;
753 else if (odt_ohm <= 120)
754 odt = LPDDR4_DQODT_120;
755 else
756 odt = LPDDR4_DQODT_240;
757
758 return odt;
759}
760
761static void *get_ddr_drv_odt_info(u32 dramtype)
762{
763 struct sdram_head_info_index_v2 *index =
764 (struct sdram_head_info_index_v2 *)common_info;
765 void *ddr_info = 0;
766
767 if (dramtype == DDR4)
768 ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
769 else if (dramtype == DDR3)
770 ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
771 else if (dramtype == LPDDR3)
772 ddr_info = (void *)common_info + index->lp3_index.offset * 4;
773 else if (dramtype == LPDDR4)
774 ddr_info = (void *)common_info + index->lp4_index.offset * 4;
775 else
776 printascii("unsupported dram type\n");
777 return ddr_info;
778}
779
780static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
781 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
782{
783 void __iomem *pctl_base = dram->pctl;
784 u32 ca_vref, dq_vref;
785
786 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
787 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
788 else
789 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
790
791 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
792 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
793 else
794 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
795
796 if (dramtype == LPDDR4) {
797 if (ca_vref < 100)
798 ca_vref = 100;
799 if (ca_vref > 420)
800 ca_vref = 420;
801
802 if (ca_vref <= 300)
803 ca_vref = (0 << 6) | (ca_vref - 100) / 4;
804 else
805 ca_vref = (1 << 6) | (ca_vref - 220) / 4;
806
807 if (dq_vref < 100)
808 dq_vref = 100;
809 if (dq_vref > 420)
810 dq_vref = 420;
811
812 if (dq_vref <= 300)
813 dq_vref = (0 << 6) | (dq_vref - 100) / 4;
814 else
815 dq_vref = (1 << 6) | (dq_vref - 220) / 4;
816 } else {
817 ca_vref = ca_vref * 11 / 6;
818 if (ca_vref < 150)
819 ca_vref = 150;
820 if (ca_vref > 629)
821 ca_vref = 629;
822
823 if (ca_vref <= 449)
824 ca_vref = (0 << 6) | (ca_vref - 150) / 4;
825 else
826 ca_vref = (1 << 6) | (ca_vref - 329) / 4;
827
828 if (dq_vref < 150)
829 dq_vref = 150;
830 if (dq_vref > 629)
831 dq_vref = 629;
832
833 if (dq_vref <= 449)
834 dq_vref = (0 << 6) | (dq_vref - 150) / 6;
835 else
836 dq_vref = (1 << 6) | (dq_vref - 329) / 6;
837 }
838 sw_set_req(dram);
839 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
840 DDR_PCTL2_INIT6,
841 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
842 ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
843
844 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
845 DDR_PCTL2_INIT7,
846 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
847 dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
848 sw_set_ack(dram);
849}
850
851static void set_ds_odt(struct dram_info *dram,
852 struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
853{
854 void __iomem *phy_base = dram->phy;
855 void __iomem *pctl_base = dram->pctl;
856 u32 dramtype = sdram_params->base.dramtype;
857 struct ddr2_3_4_lp2_3_info *ddr_info;
858 struct lp4_info *lp4_info;
859 u32 i, j, tmp;
860 const u16 (*p_drv)[2];
861 const u16 (*p_odt)[2];
862 u32 drv_info, sr_info;
863 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
864 u32 phy_odt_ohm, dram_odt_ohm;
865 u32 lp4_pu_cal, phy_lp4_drv_pd_en;
866 u32 phy_odt_up_en, phy_odt_dn_en;
867 u32 sr_dq, sr_clk;
868 u32 freq = sdram_params->base.ddr_freq;
869 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
870 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
871 u32 phy_dq_drv = 0;
872 u32 phy_odt_up = 0, phy_odt_dn = 0;
873
874 ddr_info = get_ddr_drv_odt_info(dramtype);
875 lp4_info = (void *)ddr_info;
876
877 if (!ddr_info)
878 return;
879
880 /* dram odt en freq control phy drv, dram odt and phy sr */
881 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
882 drv_info = ddr_info->drv_when_odtoff;
883 dram_odt_ohm = 0;
884 sr_info = ddr_info->sr_when_odtoff;
885 phy_lp4_drv_pd_en =
886 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
887 } else {
888 drv_info = ddr_info->drv_when_odten;
889 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
890 sr_info = ddr_info->sr_when_odten;
891 phy_lp4_drv_pd_en =
892 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
893 }
894 phy_dq_drv_ohm =
895 DRV_INFO_PHY_DQ_DRV(drv_info);
896 phy_clk_drv_ohm =
897 DRV_INFO_PHY_CLK_DRV(drv_info);
898 phy_ca_drv_ohm =
899 DRV_INFO_PHY_CA_DRV(drv_info);
900
901 sr_dq = DQ_SR_INFO(sr_info);
902 sr_clk = CLK_SR_INFO(sr_info);
903
904 /* phy odt en freq control dram drv and phy odt */
905 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
906 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
907 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
908 phy_odt_ohm = 0;
909 phy_odt_up_en = 0;
910 phy_odt_dn_en = 0;
911 } else {
912 dram_drv_ohm =
913 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
914 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
915 phy_odt_up_en =
916 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
917 phy_odt_dn_en =
918 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
919 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
920 }
921
922 if (dramtype == LPDDR4) {
923 if (phy_odt_ohm) {
924 phy_odt_up_en = 0;
925 phy_odt_dn_en = 1;
926 }
927 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
928 dram_caodt_ohm = 0;
929 else
930 dram_caodt_ohm =
931 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
932 }
933
934 if (dramtype == DDR3) {
935 p_drv = d3_phy_drv_2_ohm;
936 p_odt = d3_phy_odt_2_ohm;
937 } else if (dramtype == LPDDR4) {
938 p_drv = lp4_phy_drv_2_ohm;
939 p_odt = lp4_phy_odt_2_ohm;
940 } else {
941 p_drv = d4lp3_phy_drv_2_ohm;
942 p_odt = d4lp3_phy_odt_2_ohm;
943 }
944
945 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
946 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
947 phy_dq_drv = **(p_drv + i);
948 break;
949 }
950 if (i == 0)
951 break;
952 }
953 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
954 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
955 phy_clk_drv = **(p_drv + i);
956 break;
957 }
958 if (i == 0)
959 break;
960 }
961 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
962 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
963 phy_ca_drv = **(p_drv + i);
964 break;
965 }
966 if (i == 0)
967 break;
968 }
969 if (!phy_odt_ohm)
970 phy_odt = 0;
971 else
972 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
973 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
974 phy_odt = **(p_odt + i);
975 break;
976 }
977 if (i == 0)
978 break;
979 }
980
981 if (dramtype != LPDDR4) {
982 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
983 vref_inner = 0x80;
984 else if (phy_odt_up_en)
985 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
986 (dram_drv_ohm + phy_odt_ohm);
987 else
988 vref_inner = phy_odt_ohm * 128 /
989 (phy_odt_ohm + dram_drv_ohm);
990
991 if (dramtype != DDR3 && dram_odt_ohm)
992 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
993 (phy_dq_drv_ohm + dram_odt_ohm);
994 else
995 vref_out = 0x80;
996 } else {
997 /* for lp4 and lp4x*/
998 if (phy_odt_ohm)
999 vref_inner =
1000 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1001 256) / 1000;
1002 else
1003 vref_inner =
1004 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1005 256) / 1000;
1006
1007 vref_out = 0x80;
1008 }
1009
1010 /* default ZQCALIB bypass mode */
1011 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1012 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1013 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1014 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1015 if (dramtype == LPDDR4) {
1016 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1017 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1018 } else {
1019 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1020 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1021 }
1022 /* clk / cmd slew rate */
1023 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1024
1025 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1026 if (phy_odt_up_en)
1027 phy_odt_up = phy_odt;
1028 if (phy_odt_dn_en)
1029 phy_odt_dn = phy_odt;
1030
1031 for (i = 0; i < 4; i++) {
1032 j = 0x110 + i * 0x10;
1033 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1034 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1035 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1036 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1037 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1038
1039 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1040 1 << 3, phy_lp4_drv_pd_en << 3);
1041 if (dramtype == LPDDR4)
1042 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1043 /* dq slew rate */
1044 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1045 0x1f, sr_dq);
1046 }
1047
1048 /* reg_rx_vref_value_update */
1049 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1050 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1051
1052 /* RAM VREF */
1053 writel(vref_out, PHY_REG(phy_base, 0x105));
1054 if (dramtype == LPDDR3)
1055 udelay(100);
1056
1057 if (dramtype == LPDDR4)
1058 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1059
1060 if (dramtype == DDR3 || dramtype == DDR4) {
1061 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1062 DDR_PCTL2_INIT3);
1063 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1064 } else {
1065 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1066 DDR_PCTL2_INIT4);
1067 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1068 }
1069
1070 if (dramtype == DDR3) {
1071 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1072 if (dram_drv_ohm == 34)
1073 mr1_mr3 |= DDR3_DS_34;
1074
1075 if (dram_odt_ohm == 0)
1076 mr1_mr3 |= DDR3_RTT_NOM_DIS;
1077 else if (dram_odt_ohm <= 40)
1078 mr1_mr3 |= DDR3_RTT_NOM_40;
1079 else if (dram_odt_ohm <= 60)
1080 mr1_mr3 |= DDR3_RTT_NOM_60;
1081 else
1082 mr1_mr3 |= DDR3_RTT_NOM_120;
1083
1084 } else if (dramtype == DDR4) {
1085 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1086 if (dram_drv_ohm == 48)
1087 mr1_mr3 |= DDR4_DS_48;
1088
1089 if (dram_odt_ohm == 0)
1090 mr1_mr3 |= DDR4_RTT_NOM_DIS;
1091 else if (dram_odt_ohm <= 34)
1092 mr1_mr3 |= DDR4_RTT_NOM_34;
1093 else if (dram_odt_ohm <= 40)
1094 mr1_mr3 |= DDR4_RTT_NOM_40;
1095 else if (dram_odt_ohm <= 48)
1096 mr1_mr3 |= DDR4_RTT_NOM_48;
1097 else if (dram_odt_ohm <= 60)
1098 mr1_mr3 |= DDR4_RTT_NOM_60;
1099 else
1100 mr1_mr3 |= DDR4_RTT_NOM_120;
1101
1102 } else if (dramtype == LPDDR3) {
1103 if (dram_drv_ohm <= 34)
1104 mr1_mr3 |= LPDDR3_DS_34;
1105 else if (dram_drv_ohm <= 40)
1106 mr1_mr3 |= LPDDR3_DS_40;
1107 else if (dram_drv_ohm <= 48)
1108 mr1_mr3 |= LPDDR3_DS_48;
1109 else if (dram_drv_ohm <= 60)
1110 mr1_mr3 |= LPDDR3_DS_60;
1111 else if (dram_drv_ohm <= 80)
1112 mr1_mr3 |= LPDDR3_DS_80;
1113
1114 if (dram_odt_ohm == 0)
1115 lp3_odt_value = LPDDR3_ODT_DIS;
1116 else if (dram_odt_ohm <= 60)
1117 lp3_odt_value = LPDDR3_ODT_60;
1118 else if (dram_odt_ohm <= 120)
1119 lp3_odt_value = LPDDR3_ODT_120;
1120 else
1121 lp3_odt_value = LPDDR3_ODT_240;
1122 } else {/* for lpddr4 and lpddr4x */
1123 /* MR3 for lp4 PU-CAL and PDDS */
1124 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1125 mr1_mr3 |= lp4_pu_cal;
1126
1127 tmp = lp4_odt_calc(dram_drv_ohm);
1128 if (!tmp)
1129 tmp = LPDDR4_PDDS_240;
1130 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1131
1132 /* MR11 for lp4 ca odt, dq odt set */
1133 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1134 DDR_PCTL2_INIT6);
1135 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1136
1137 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1138
1139 tmp = lp4_odt_calc(dram_odt_ohm);
1140 mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1141
1142 tmp = lp4_odt_calc(dram_caodt_ohm);
1143 mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1144 sw_set_req(dram);
1145 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1146 DDR_PCTL2_INIT6,
1147 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1148 mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1149 sw_set_ack(dram);
1150
1151 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1152 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1153 DDR_PCTL2_INIT7);
1154 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1155 mr22 &= ~LPDDR4_SOC_ODT_MASK;
1156
1157 tmp = lp4_odt_calc(phy_odt_ohm);
1158 mr22 |= tmp;
1159 mr22 = mr22 |
1160 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1161 LPDDR4_ODTE_CK_SHIFT) |
1162 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1163 LPDDR4_ODTE_CS_SHIFT) |
1164 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1165 LPDDR4_ODTD_CA_SHIFT);
1166
1167 sw_set_req(dram);
1168 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1169 DDR_PCTL2_INIT7,
1170 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1171 mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1172 sw_set_ack(dram);
1173 }
1174
1175 if (dramtype == DDR4 || dramtype == DDR3) {
1176 sw_set_req(dram);
1177 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1178 DDR_PCTL2_INIT3,
1179 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1180 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1181 sw_set_ack(dram);
1182 } else {
1183 sw_set_req(dram);
1184 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1185 DDR_PCTL2_INIT4,
1186 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1187 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1188 sw_set_ack(dram);
1189 }
1190}
1191
1192static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1193 struct rv1126_sdram_params *sdram_params)
1194{
1195 void __iomem *phy_base = dram->phy;
1196 u32 dramtype = sdram_params->base.dramtype;
1197 struct sdram_head_info_index_v2 *index =
1198 (struct sdram_head_info_index_v2 *)common_info;
1199 struct dq_map_info *map_info;
1200
1201 map_info = (struct dq_map_info *)((void *)common_info +
1202 index->dq_map_index.offset * 4);
1203
1204 if (dramtype <= LPDDR4)
1205 writel((map_info->byte_map[dramtype / 4] >>
1206 ((dramtype % 4) * 8)) & 0xff,
1207 PHY_REG(phy_base, 0x4f));
1208
1209 return 0;
1210}
1211
1212static void phy_cfg(struct dram_info *dram,
1213 struct rv1126_sdram_params *sdram_params)
1214{
1215 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1216 void __iomem *phy_base = dram->phy;
1217 u32 i, dq_map, tmp;
1218 u32 byte1 = 0, byte0 = 0;
1219
1220 sdram_cmd_dq_path_remap(dram, sdram_params);
1221
1222 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1223 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1224 writel(sdram_params->phy_regs.phy[i][1],
1225 phy_base + sdram_params->phy_regs.phy[i][0]);
1226 }
1227
1228 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1229 dq_map = readl(PHY_REG(phy_base, 0x4f));
1230 for (i = 0; i < 4; i++) {
1231 if (((dq_map >> (i * 2)) & 0x3) == 0)
1232 byte0 = i;
1233 if (((dq_map >> (i * 2)) & 0x3) == 1)
1234 byte1 = i;
1235 }
1236
1237 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1238 if (cap_info->bw == 2)
1239 tmp |= 0xf;
1240 else if (cap_info->bw == 1)
1241 tmp |= ((1 << byte0) | (1 << byte1));
1242 else
1243 tmp |= (1 << byte0);
1244
1245 writel(tmp, PHY_REG(phy_base, 0xf));
1246
1247 /* lpddr4 odt control by phy, enable cs0 odt */
1248 if (sdram_params->base.dramtype == LPDDR4)
1249 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1250 (1 << 6) | (1 << 4));
1251 /* for ca training ca vref choose range1 */
1252 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1253 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1254 /* for wr training PHY_0x7c[5], choose range0 */
1255 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1256}
1257
1258static int update_refresh_reg(struct dram_info *dram)
1259{
1260 void __iomem *pctl_base = dram->pctl;
1261 u32 ret;
1262
1263 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1264 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1265
1266 return 0;
1267}
1268
1269/*
1270 * rank = 1: cs0
1271 * rank = 2: cs1
1272 */
1273int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1274{
1275 u32 ret;
1276 u32 i, temp;
1277 u32 dqmap;
1278
1279 void __iomem *pctl_base = dram->pctl;
1280 struct sdram_head_info_index_v2 *index =
1281 (struct sdram_head_info_index_v2 *)common_info;
1282 struct dq_map_info *map_info;
1283
1284 map_info = (struct dq_map_info *)((void *)common_info +
1285 index->dq_map_index.offset * 4);
1286
1287 if (dramtype == LPDDR2)
1288 dqmap = map_info->lp2_dq0_7_map;
1289 else
1290 dqmap = map_info->lp3_dq0_7_map;
1291
1292 pctl_read_mr(pctl_base, rank, mr_num);
1293
1294 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1295
1296 if (dramtype != LPDDR4) {
1297 temp = 0;
1298 for (i = 0; i < 8; i++) {
1299 temp = temp | (((ret >> i) & 0x1) <<
1300 ((dqmap >> (i * 4)) & 0xf));
1301 }
1302 } else {
1303 temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1304 }
1305
1306 return temp;
1307}
1308
1309/* before call this function autorefresh should be disabled */
1310void send_a_refresh(struct dram_info *dram)
1311{
1312 void __iomem *pctl_base = dram->pctl;
1313
1314 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1315 continue;
1316 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1317}
1318
1319static void enter_sr(struct dram_info *dram, u32 en)
1320{
1321 void __iomem *pctl_base = dram->pctl;
1322
1323 if (en) {
1324 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1325 while (1) {
1326 if (((readl(pctl_base + DDR_PCTL2_STAT) &
1327 PCTL2_SELFREF_TYPE_MASK) ==
1328 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1329 ((readl(pctl_base + DDR_PCTL2_STAT) &
1330 PCTL2_OPERATING_MODE_MASK) ==
1331 PCTL2_OPERATING_MODE_SR))
1332 break;
1333 }
1334 } else {
1335 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1336 while ((readl(pctl_base + DDR_PCTL2_STAT) &
1337 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1338 continue;
1339 }
1340}
1341
1342void record_dq_prebit(struct dram_info *dram)
1343{
1344 u32 group, i, tmp;
1345 void __iomem *phy_base = dram->phy;
1346
1347 for (group = 0; group < 4; group++) {
1348 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1349 /* l_loop_invdelaysel */
1350 writel(dq_sel[i][0], PHY_REG(phy_base,
1351 grp_addr[group] + 0x2c));
1352 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1353 writel(tmp, PHY_REG(phy_base,
1354 grp_addr[group] + dq_sel[i][1]));
1355
1356 /* r_loop_invdelaysel */
1357 writel(dq_sel[i][0], PHY_REG(phy_base,
1358 grp_addr[group] + 0x2d));
1359 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1360 writel(tmp, PHY_REG(phy_base,
1361 grp_addr[group] + dq_sel[i][2]));
1362 }
1363 }
1364}
1365
1366static void update_dq_rx_prebit(struct dram_info *dram)
1367{
1368 void __iomem *phy_base = dram->phy;
1369
1370 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1371 BIT(4));
1372 udelay(1);
1373 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1374}
1375
1376static void update_dq_tx_prebit(struct dram_info *dram)
1377{
1378 void __iomem *phy_base = dram->phy;
1379
1380 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1381 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1382 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1383 udelay(1);
1384 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1385}
1386
1387static void update_ca_prebit(struct dram_info *dram)
1388{
1389 void __iomem *phy_base = dram->phy;
1390
1391 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1392 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1393 udelay(1);
1394 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1395}
1396
1397/*
1398 * dir: 0: de-skew = delta_*
1399 * 1: de-skew = reg val - delta_*
1400 * delta_dir: value for differential signal: clk/
1401 * delta_sig: value for single signal: ca/cmd
1402 */
1403static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1404 int delta_sig, u32 cs, u32 dramtype)
1405{
1406 void __iomem *phy_base = dram->phy;
1407 u32 i, cs_en, tmp;
1408 u32 dfi_lp_stat = 0;
1409
1410 if (cs == 0)
1411 cs_en = 1;
1412 else if (cs == 2)
1413 cs_en = 2;
1414 else
1415 cs_en = 3;
1416
1417 if (dramtype == LPDDR4 &&
1418 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1419 dfi_lp_stat = 1;
1420 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1421 }
1422 enter_sr(dram, 1);
1423
1424 for (i = 0; i < 0x20; i++) {
1425 if (dir == DESKEW_MDF_ABS_VAL)
1426 tmp = delta_sig;
1427 else
1428 tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1429 delta_sig;
1430 writel(tmp, PHY_REG(phy_base, 0x150 + i));
1431 }
1432
1433 if (dir == DESKEW_MDF_ABS_VAL)
1434 tmp = delta_dif;
1435 else
1436 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1437 delta_sig + delta_dif;
1438 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1439 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1440 if (dramtype == LPDDR4) {
1441 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1442 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1443
1444 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1445 update_ca_prebit(dram);
1446 }
1447 enter_sr(dram, 0);
1448
1449 if (dfi_lp_stat)
1450 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1451}
1452
1453static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1454{
1455 u32 i, j, offset = 0;
1456 u32 min = 0x3f;
1457 void __iomem *phy_base = dram->phy;
1458 u32 byte_en;
1459
1460 if (signal == SKEW_TX_SIGNAL)
1461 offset = 8;
1462
1463 if (signal == SKEW_CA_SIGNAL) {
1464 for (i = 0; i < 0x20; i++)
1465 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i)));
1466 } else {
1467 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1468 for (j = offset; j < offset + rank * 4; j++) {
1469 if (!((byte_en >> (j % 4)) & 1))
1470 continue;
1471 for (i = 0; i < 11; i++)
1472 min = MIN(min,
1473 readl(PHY_REG(phy_base,
1474 dqs_dq_skew_adr[j] +
1475 i)));
1476 }
1477 }
1478
1479 return min;
1480}
1481
1482static u32 low_power_update(struct dram_info *dram, u32 en)
1483{
1484 void __iomem *pctl_base = dram->pctl;
1485 u32 lp_stat = 0;
1486
1487 if (en) {
1488 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1489 } else {
1490 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1491 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1492 }
1493
1494 return lp_stat;
1495}
1496
1497/*
1498 * signal:
1499 * dir: 0: de-skew = delta_*
1500 * 1: de-skew = reg val - delta_*
1501 * delta_dir: value for differential signal: dqs
1502 * delta_sig: value for single signal: dq/dm
1503 */
1504static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1505 int delta_dif, int delta_sig, u32 rank)
1506{
1507 void __iomem *phy_base = dram->phy;
1508 u32 i, j, tmp, offset;
1509 u32 byte_en;
1510
1511 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1512
1513 if (signal == SKEW_RX_SIGNAL)
1514 offset = 0;
1515 else
1516 offset = 8;
1517
1518 for (j = offset; j < (offset + rank * 4); j++) {
1519 if (!((byte_en >> (j % 4)) & 1))
1520 continue;
1521 for (i = 0; i < 0x9; i++) {
1522 if (dir == DESKEW_MDF_ABS_VAL)
1523 tmp = delta_sig;
1524 else
1525 tmp = delta_sig + readl(PHY_REG(phy_base,
1526 dqs_dq_skew_adr[j] +
1527 i));
1528 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1529 }
1530 if (dir == DESKEW_MDF_ABS_VAL)
1531 tmp = delta_dif;
1532 else
1533 tmp = delta_dif + readl(PHY_REG(phy_base,
1534 dqs_dq_skew_adr[j] + 9));
1535 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1536 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1537 }
1538 if (signal == SKEW_RX_SIGNAL)
1539 update_dq_rx_prebit(dram);
1540 else
1541 update_dq_tx_prebit(dram);
1542}
1543
1544static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1545{
1546 void __iomem *phy_base = dram->phy;
1547 u32 ret;
1548 u32 dis_auto_zq = 0;
1549 u32 odt_val_up, odt_val_dn;
1550 u32 i, j;
1551
1552 odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1553 odt_val_up = readl(PHY_REG(phy_base, 0x111));
1554
1555 if (dramtype != LPDDR4) {
1556 for (i = 0; i < 4; i++) {
1557 j = 0x110 + i * 0x10;
1558 writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1559 PHY_REG(phy_base, j));
1560 writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1561 PHY_REG(phy_base, j + 0x1));
1562 }
1563 }
1564 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1565 /* use normal read mode for data training */
1566 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1567
1568 if (dramtype == DDR4)
1569 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1570
1571 /* choose training cs */
1572 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1573 /* enable gate training */
1574 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1575 udelay(50);
1576 ret = readl(PHY_REG(phy_base, 0x91));
1577 /* disable gate training */
1578 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1579 clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1580 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1581
1582 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1583
1584 if (dramtype != LPDDR4) {
1585 for (i = 0; i < 4; i++) {
1586 j = 0x110 + i * 0x10;
1587 writel(odt_val_dn, PHY_REG(phy_base, j));
1588 writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1589 }
1590 }
1591 return ret;
1592}
1593
1594static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1595 u32 rank)
1596{
1597 void __iomem *pctl_base = dram->pctl;
1598 void __iomem *phy_base = dram->phy;
1599 u32 dis_auto_zq = 0;
1600 u32 tmp;
1601 u32 cur_fsp;
1602 u32 timeout_us = 1000;
1603
1604 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1605
1606 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1607
1608 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1609 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1610 0xffff;
1611 writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1612
1613 /* disable another cs's output */
1614 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1615 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1616 dramtype);
1617 if (dramtype == DDR3 || dramtype == DDR4)
1618 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1619 else
1620 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1621
1622 /* choose cs */
1623 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1624 ((0x2 >> cs) << 6) | (0 << 2));
1625 /* enable write leveling */
1626 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1627 ((0x2 >> cs) << 6) | (1 << 2));
1628
1629 while (1) {
1630 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1631 (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1632 break;
1633
1634 udelay(1);
1635 if (timeout_us-- == 0) {
1636 printascii("error: write leveling timeout\n");
1637 while (1)
1638 ;
1639 }
1640 }
1641
1642 /* disable write leveling */
1643 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1644 ((0x2 >> cs) << 6) | (0 << 2));
1645 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1646
1647 /* enable another cs's output */
1648 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1649 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1650 dramtype);
1651
1652 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1653
1654 return 0;
1655}
1656
1657char pattern[32] = {
1658 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1659 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1660 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1661 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1662};
1663
1664static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1665 u32 mhz)
1666{
1667 void __iomem *pctl_base = dram->pctl;
1668 void __iomem *phy_base = dram->phy;
1669 u32 trefi_1x, trfc_1x;
1670 u32 dis_auto_zq = 0;
1671 u32 timeout_us = 1000;
1672 u32 dqs_default;
1673 u32 cur_fsp;
1674 u32 vref_inner;
1675 u32 i;
1676 struct sdram_head_info_index_v2 *index =
1677 (struct sdram_head_info_index_v2 *)common_info;
1678 struct dq_map_info *map_info;
1679
1680 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1681 if (dramtype == DDR3 && vref_inner == 0x80) {
1682 for (i = 0; i < 4; i++)
1683 writel(vref_inner - 0xa,
1684 PHY_REG(phy_base, 0x118 + i * 0x10));
1685
1686 /* reg_rx_vref_value_update */
1687 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1688 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1689 }
1690
1691 map_info = (struct dq_map_info *)((void *)common_info +
1692 index->dq_map_index.offset * 4);
1693 /* only 1cs a time, 0:cs0 1 cs1 */
1694 if (cs > 1)
1695 return -1;
1696
1697 dqs_default = 0xf;
1698 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1699
1700 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1701 /* config refresh timing */
1702 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1703 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1704 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1705 DDR_PCTL2_RFSHTMG) & 0x3ff;
1706 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1707 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1708 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1709 /* reg_phy_trfc */
1710 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1711 /* reg_max_refi_cnt */
1712 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1713
1714 /* choose training cs */
1715 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1716
1717 /* set dq map for ddr4 */
1718 if (dramtype == DDR4) {
1719 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1720 for (i = 0; i < 4; i++) {
1721 writel((map_info->ddr4_dq_map[cs * 2] >>
1722 ((i % 4) * 8)) & 0xff,
1723 PHY_REG(phy_base, 0x238 + i));
1724 writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1725 ((i % 4) * 8)) & 0xff,
1726 PHY_REG(phy_base, 0x2b8 + i));
1727 }
1728 }
1729
1730 /* cha_l reg_l_rd_train_dqs_default[5:0] */
1731 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1732 /* cha_h reg_h_rd_train_dqs_default[5:0] */
1733 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1734 /* chb_l reg_l_rd_train_dqs_default[5:0] */
1735 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1736 /* chb_h reg_h_rd_train_dqs_default[5:0] */
1737 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1738
1739 /* Choose the read train auto mode */
1740 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1741 /* Enable the auto train of the read train */
1742 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1743
1744 /* Wait the train done. */
1745 while (1) {
1746 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1747 break;
1748
1749 udelay(1);
1750 if (timeout_us-- == 0) {
1751 printascii("error: read training timeout\n");
1752 return -1;
1753 }
1754 }
1755
1756 /* Check the read train state */
1757 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1758 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1759 printascii("error: read training error\n");
1760 return -1;
1761 }
1762
1763 /* Exit the Read Training by setting */
1764 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1765
1766 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1767
1768 if (dramtype == DDR3 && vref_inner == 0x80) {
1769 for (i = 0; i < 4; i++)
1770 writel(vref_inner,
1771 PHY_REG(phy_base, 0x118 + i * 0x10));
1772
1773 /* reg_rx_vref_value_update */
1774 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1775 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1776 }
1777
1778 return 0;
1779}
1780
1781static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1782 u32 mhz, u32 dst_fsp)
1783{
1784 void __iomem *pctl_base = dram->pctl;
1785 void __iomem *phy_base = dram->phy;
1786 u32 trefi_1x, trfc_1x;
1787 u32 dis_auto_zq = 0;
1788 u32 timeout_us = 1000;
1789 u32 cur_fsp;
1790 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1791
1792 if (dramtype == LPDDR3 && mhz <= 400) {
1793 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1794 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1795 cl = readl(PHY_REG(phy_base, offset));
1796 cwl = readl(PHY_REG(phy_base, offset + 2));
1797
1798 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1799 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1800 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1801 }
1802
1803 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1804
1805 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1806 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1807 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1808 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1809 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1810 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1811 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1812 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1813 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1814 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1815
1816 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1817 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1818
1819 /* config refresh timing */
1820 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1821 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1822 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1823 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1824 DDR_PCTL2_RFSHTMG) & 0x3ff;
1825 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1826 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1827 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1828 /* reg_phy_trfc */
1829 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1830 /* reg_max_refi_cnt */
1831 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1832
1833 /* choose training cs */
1834 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1835
1836 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1837 /* 0: Use the write-leveling value. */
1838 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1839 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1840
1841 /* PHY_0x7a [0] reg_dq_wr_train_auto */
1842 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1843
1844 /* PHY_0x7a [1] reg_dq_wr_train_en */
1845 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1846
1847 send_a_refresh(dram);
1848
1849 while (1) {
1850 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1851 break;
1852
1853 udelay(1);
1854 if (timeout_us-- == 0) {
1855 printascii("error: write training timeout\n");
1856 while (1)
1857 ;
1858 }
1859 }
1860
1861 /* Check the write train state */
1862 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1863 printascii("error: write training error\n");
1864 return -1;
1865 }
1866
1867 /* PHY_0x7a [1] reg_dq_wr_train_en */
1868 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1869
1870 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1871
1872 /* save LPDDR4 write vref to fsp_param for dfs */
1873 if (dramtype == LPDDR4) {
1874 fsp_param[dst_fsp].vref_dq[cs] =
1875 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1876 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1877 /* add range info */
1878 fsp_param[dst_fsp].vref_dq[cs] |=
1879 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1880 }
1881
1882 if (dramtype == LPDDR3 && mhz <= 400) {
1883 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1884 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1885 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1886 DDR_PCTL2_INIT3);
1887 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1888 dramtype);
1889 }
1890
1891 return 0;
1892}
1893
1894static int data_training(struct dram_info *dram, u32 cs,
1895 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1896 u32 training_flag)
1897{
1898 u32 ret = 0;
1899
1900 if (training_flag == FULL_TRAINING)
1901 training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1902 WRITE_TRAINING | READ_TRAINING;
1903
1904 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1905 ret = data_training_wl(dram, cs,
1906 sdram_params->base.dramtype,
1907 sdram_params->ch.cap_info.rank);
1908 if (ret != 0)
1909 goto out;
1910 }
1911
1912 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1913 ret = data_training_rg(dram, cs,
1914 sdram_params->base.dramtype);
1915 if (ret != 0)
1916 goto out;
1917 }
1918
1919 if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1920 ret = data_training_rd(dram, cs,
1921 sdram_params->base.dramtype,
1922 sdram_params->base.ddr_freq);
1923 if (ret != 0)
1924 goto out;
1925 }
1926
1927 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1928 ret = data_training_wr(dram, cs,
1929 sdram_params->base.dramtype,
1930 sdram_params->base.ddr_freq, dst_fsp);
1931 if (ret != 0)
1932 goto out;
1933 }
1934
1935out:
1936 return ret;
1937}
1938
1939static int get_wrlvl_val(struct dram_info *dram,
1940 struct rv1126_sdram_params *sdram_params)
1941{
1942 int i, j, clk_skew;
1943 void __iomem *phy_base = dram->phy;
1944 u32 lp_stat;
1945 int ret;
1946
1947 lp_stat = low_power_update(dram, 0);
1948
1949 clk_skew = 0x1f;
1950 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1951 sdram_params->base.dramtype);
1952
1953 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1954 if (sdram_params->ch.cap_info.rank == 2)
1955 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1956
1957 for (j = 0; j < 2; j++)
1958 for (i = 0; i < 4; i++)
1959 wrlvl_result[j][i] =
1960 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1961 clk_skew;
1962
1963 low_power_update(dram, lp_stat);
1964
1965 return ret;
1966}
1967
1968#if defined(CONFIG_CMD_DDR_TEST_TOOL)
1969static void init_rw_trn_result_struct(struct rw_trn_result *result,
1970 void __iomem *phy_base, u8 cs_num)
1971{
1972 int i;
1973
1974 result->cs_num = cs_num;
1975 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1976 PHY_DQ_WIDTH_MASK;
1977 for (i = 0; i < FSP_NUM; i++)
1978 result->fsp_mhz[i] = 0;
1979}
1980
1981static void save_rw_trn_min_max(void __iomem *phy_base,
1982 struct cs_rw_trn_result *rd_result,
1983 struct cs_rw_trn_result *wr_result,
1984 u8 byte_en)
1985{
1986 u16 phy_ofs;
1987 u8 dqs;
1988 u8 dq;
1989
1990 for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1991 if ((byte_en & BIT(dqs)) == 0)
1992 continue;
1993
1994 /* Channel A or B (low or high 16 bit) */
1995 phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1996 /* low or high 8 bit */
1997 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1998 for (dq = 0; dq < 8; dq++) {
1999 rd_result->dqs[dqs].dq_min[dq] =
2000 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2001 rd_result->dqs[dqs].dq_max[dq] =
2002 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2003 wr_result->dqs[dqs].dq_min[dq] =
2004 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2005 wr_result->dqs[dqs].dq_max[dq] =
2006 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2007 }
2008 }
2009}
2010
2011static void save_rw_trn_deskew(void __iomem *phy_base,
2012 struct fsp_rw_trn_result *result, u8 cs_num,
2013 int min_val, bool rw)
2014{
2015 u16 phy_ofs;
2016 u8 cs;
2017 u8 dq;
2018
2019 result->min_val = min_val;
2020
2021 for (cs = 0; cs < cs_num; cs++) {
2022 phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2023 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2024 for (dq = 0; dq < 8; dq++) {
2025 result->cs[cs].dqs[0].dq_deskew[dq] =
2026 readb(PHY_REG(phy_base, phy_ofs + dq));
2027 result->cs[cs].dqs[1].dq_deskew[dq] =
2028 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2029 result->cs[cs].dqs[2].dq_deskew[dq] =
2030 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2031 result->cs[cs].dqs[3].dq_deskew[dq] =
2032 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2033 }
2034
2035 result->cs[cs].dqs[0].dqs_deskew =
2036 readb(PHY_REG(phy_base, phy_ofs + 0x8));
2037 result->cs[cs].dqs[1].dqs_deskew =
2038 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2039 result->cs[cs].dqs[2].dqs_deskew =
2040 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2041 result->cs[cs].dqs[3].dqs_deskew =
2042 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2043 }
2044}
2045
2046static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2047{
2048 result->flag = DDR_DQ_EYE_FLAG;
2049 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2050}
2051#endif
2052
2053static int high_freq_training(struct dram_info *dram,
2054 struct rv1126_sdram_params *sdram_params,
2055 u32 fsp)
2056{
2057 u32 i, j;
2058 void __iomem *phy_base = dram->phy;
2059 u32 dramtype = sdram_params->base.dramtype;
2060 int min_val;
2061 int dqs_skew, clk_skew, ca_skew;
2062 u8 byte_en;
2063 int ret;
2064
2065 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2066 dqs_skew = 0;
2067 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2068 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2069 if ((byte_en & BIT(i)) != 0)
2070 dqs_skew += wrlvl_result[j][i];
2071 }
2072 }
2073 dqs_skew = dqs_skew /
2074 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2075
2076 clk_skew = 0x20 - dqs_skew;
2077 dqs_skew = 0x20;
2078
2079 if (dramtype == LPDDR4) {
2080 min_val = 0xff;
2081 for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2082 for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2083 min_val = MIN(wrlvl_result[j][i], min_val);
2084
2085 if (min_val < 0) {
2086 clk_skew = -min_val;
2087 ca_skew = -min_val;
2088 } else {
2089 clk_skew = 0;
2090 ca_skew = 0;
2091 }
2092 } else if (dramtype == LPDDR3) {
2093 ca_skew = clk_skew - 4;
2094 } else {
2095 ca_skew = clk_skew;
2096 }
2097 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2098 dramtype);
2099
2100 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2101 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2102 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2103 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2104 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2105 READ_TRAINING | WRITE_TRAINING);
2106#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2107 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2108 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2109 &rw_trn_result.wr_fsp[fsp].cs[0],
2110 rw_trn_result.byte_en);
2111#endif
2112 if (sdram_params->ch.cap_info.rank == 2) {
2113 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2114 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2115 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2116 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2117 ret |= data_training(dram, 1, sdram_params, fsp,
2118 READ_GATE_TRAINING | READ_TRAINING |
2119 WRITE_TRAINING);
2120#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2121 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2122 &rw_trn_result.wr_fsp[fsp].cs[1],
2123 rw_trn_result.byte_en);
2124#endif
2125 }
2126 if (ret)
2127 goto out;
2128
2129 record_dq_prebit(dram);
2130
2131 min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2132 sdram_params->ch.cap_info.rank) * -1;
2133 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2134 min_val, min_val, sdram_params->ch.cap_info.rank);
2135#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2136 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2137 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2138 SKEW_RX_SIGNAL);
2139#endif
2140
2141 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2142 sdram_params->ch.cap_info.rank),
2143 get_min_value(dram, SKEW_CA_SIGNAL,
2144 sdram_params->ch.cap_info.rank)) * -1;
2145
2146 /* clk = 0, rx all skew -7, tx - min_value */
2147 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2148 dramtype);
2149
2150 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2151 min_val, min_val, sdram_params->ch.cap_info.rank);
2152#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2153 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2154 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2155 SKEW_TX_SIGNAL);
2156#endif
2157
2158 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2159 if (sdram_params->ch.cap_info.rank == 2)
2160 ret |= data_training(dram, 1, sdram_params, 0,
2161 READ_GATE_TRAINING);
2162out:
2163 return ret;
2164}
2165
2166static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2167{
2168 writel(ddrconfig, &dram->msch->deviceconf);
2169 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2170}
2171
2172static void update_noc_timing(struct dram_info *dram,
2173 struct rv1126_sdram_params *sdram_params)
2174{
2175 void __iomem *pctl_base = dram->pctl;
2176 u32 bw, bl;
2177
2178 bw = 8 << sdram_params->ch.cap_info.bw;
2179 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2180
2181 /* update the noc timing related to data bus width */
2182 if ((bw / 8 * bl) <= 16)
2183 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2184 else if ((bw / 8 * bl) == 32)
2185 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2186 else if ((bw / 8 * bl) == 64)
2187 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2188 else
2189 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2190
2191 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2192 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2193
2194 if (sdram_params->base.dramtype == LPDDR4) {
2195 sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2196 (bw == 16) ? 0x1 : 0x2;
2197 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2198 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2199 }
2200
2201 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2202 &dram->msch->ddrtiminga0);
2203 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2204 &dram->msch->ddrtimingb0);
2205 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2206 &dram->msch->ddrtimingc0);
2207 writel(sdram_params->ch.noc_timings.devtodev0.d32,
2208 &dram->msch->devtodev0);
2209 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2210 writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2211 &dram->msch->ddr4timing);
2212}
2213
2214static int split_setup(struct dram_info *dram,
2215 struct rv1126_sdram_params *sdram_params)
2216{
2217 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2218 u32 dramtype = sdram_params->base.dramtype;
2219 u32 split_size, split_mode;
2220 u64 cs_cap[2], cap;
2221
2222 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2223 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2224 /* only support the larger cap is in low 16bit */
2225 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2226 cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2227 cap_info->cs0_high16bit_row));
2228 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2229 (cap_info->rank == 2)) {
2230 if (!cap_info->cs1_high16bit_row)
2231 cap = cs_cap[0];
2232 else
2233 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2234 cap_info->cs1_high16bit_row));
2235 } else {
2236 goto out;
2237 }
2238 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2239 if (cap_info->bw == 2)
2240 split_mode = SPLIT_MODE_32_L16_VALID;
2241 else
2242 split_mode = SPLIT_MODE_16_L8_VALID;
2243
2244 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2245 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2246 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2247 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2248 (split_mode << SPLIT_MODE_OFFSET) |
2249 (0x0 << SPLIT_BYPASS_OFFSET) |
2250 (split_size << SPLIT_SIZE_OFFSET));
2251
2252 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2253 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2254 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2255
2256out:
2257 return 0;
2258}
2259
2260static void split_bypass(struct dram_info *dram)
2261{
2262 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2263 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2264 return;
2265
2266 /* bypass split */
2267 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2268 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2269 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2270 (0x1 << SPLIT_BYPASS_OFFSET) |
2271 (0x0 << SPLIT_SIZE_OFFSET));
2272}
2273
2274static void dram_all_config(struct dram_info *dram,
2275 struct rv1126_sdram_params *sdram_params)
2276{
2277 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2278 u32 dram_type = sdram_params->base.dramtype;
2279 void __iomem *pctl_base = dram->pctl;
2280 u32 sys_reg2 = 0;
2281 u32 sys_reg3 = 0;
2282 u64 cs_cap[2];
2283 u32 cs_pst;
2284
2285 set_ddrconfig(dram, cap_info->ddrconfig);
2286 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2287 &sys_reg3, 0);
2288 writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2289 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2290
2291 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2292 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2293
2294 if (cap_info->rank == 2) {
2295 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2296 6 + 2;
2297 if (cs_pst > 28)
2298 cs_cap[0] = 1llu << cs_pst;
2299 }
2300
2301 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2302 (((cs_cap[0] >> 20) / 64) & 0xff),
2303 &dram->msch->devicesize);
2304 update_noc_timing(dram, sdram_params);
2305}
2306
2307static void enable_low_power(struct dram_info *dram,
2308 struct rv1126_sdram_params *sdram_params)
2309{
2310 void __iomem *pctl_base = dram->pctl;
2311 u32 grf_lp_con;
2312
2313 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2314
2315 if (sdram_params->base.dramtype == DDR4)
2316 grf_lp_con = (0x7 << 16) | (1 << 1);
2317 else if (sdram_params->base.dramtype == DDR3)
2318 grf_lp_con = (0x7 << 16) | (1 << 0);
2319 else
2320 grf_lp_con = (0x7 << 16) | (1 << 2);
2321
2322 /* en lpckdis_en */
2323 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2324 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2325
2326 /* enable sr, pd */
2327 if (dram->pd_idle == 0)
2328 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2329 else
2330 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2331 if (dram->sr_idle == 0)
2332 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2333 else
2334 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2335 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2336}
2337
2338static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2339{
2340 u32 split;
2341
2342 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2343 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2344 split = 0;
2345 else
2346 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2347 SPLIT_SIZE_MASK;
2348
2349 sdram_print_ddr_info(&sdram_params->ch.cap_info,
2350 &sdram_params->base, split);
2351}
2352
2353static int sdram_init_(struct dram_info *dram,
2354 struct rv1126_sdram_params *sdram_params, u32 post_init)
2355{
2356 void __iomem *pctl_base = dram->pctl;
2357 void __iomem *phy_base = dram->phy;
2358 u32 ddr4_vref;
2359 u32 mr_tmp;
2360
2361 rkclk_configure_ddr(dram, sdram_params);
2362
2363 rkclk_ddr_reset(dram, 1, 1, 1, 1);
2364 udelay(10);
2365
2366 rkclk_ddr_reset(dram, 1, 1, 1, 0);
2367 phy_cfg(dram, sdram_params);
2368
2369 rkclk_ddr_reset(dram, 1, 1, 0, 0);
2370 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2371
2372 rkclk_ddr_reset(dram, 1, 0, 0, 0);
2373 pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2374 dram->sr_idle, dram->pd_idle);
2375
2376 if (sdram_params->ch.cap_info.bw == 2) {
2377 /* 32bit interface use pageclose */
2378 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2379 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2380 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2381 } else {
2382 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2383 }
2384
2385#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2386 u32 tmp, trefi;
2387
2388 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2389 trefi = (tmp >> 16) & 0xfff;
2390 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2391 pctl_base + DDR_PCTL2_RFSHTMG);
2392#endif
2393
2394 /* set frequency_mode */
2395 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2396 /* set target_frequency to Frequency 0 */
2397 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2398
2399 set_ds_odt(dram, sdram_params, 0);
2400 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2401 set_ctl_address_map(dram, sdram_params);
2402
2403 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2404
2405 rkclk_ddr_reset(dram, 0, 0, 0, 0);
2406
2407 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2408 continue;
2409
2410 if (sdram_params->base.dramtype == LPDDR3) {
2411 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2412 } else if (sdram_params->base.dramtype == LPDDR4) {
2413 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2414 /* MR11 */
2415 pctl_write_mr(dram->pctl, 3, 11,
2416 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2417 LPDDR4);
2418 /* MR12 */
2419 pctl_write_mr(dram->pctl, 3, 12,
2420 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2421 LPDDR4);
2422
2423 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2424 /* MR22 */
2425 pctl_write_mr(dram->pctl, 3, 22,
2426 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2427 LPDDR4);
2428 }
2429
2430 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2431 if (post_init != 0)
2432 printascii("DTT cs0 error\n");
2433 return -1;
2434 }
2435
2436 if (sdram_params->base.dramtype == LPDDR4) {
2437 mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2438
2439 if (mr_tmp != 0x4d)
2440 return -1;
2441 }
2442
2443 if (sdram_params->base.dramtype == LPDDR4) {
2444 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2445 /* MR14 */
2446 pctl_write_mr(dram->pctl, 3, 14,
2447 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2448 LPDDR4);
2449 }
2450 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2451 if (data_training(dram, 1, sdram_params, 0,
2452 READ_GATE_TRAINING) != 0) {
2453 printascii("DTT cs1 error\n");
2454 return -1;
2455 }
2456 }
2457
2458 if (sdram_params->base.dramtype == DDR4) {
2459 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2460 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2461 sdram_params->base.dramtype);
2462 }
2463
2464 dram_all_config(dram, sdram_params);
2465 enable_low_power(dram, sdram_params);
2466
2467 return 0;
2468}
2469
2470static u64 dram_detect_cap(struct dram_info *dram,
2471 struct rv1126_sdram_params *sdram_params,
2472 unsigned char channel)
2473{
2474 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2475 void __iomem *pctl_base = dram->pctl;
2476 void __iomem *phy_base = dram->phy;
2477 u32 mr8;
2478
2479 u32 bktmp;
2480 u32 coltmp;
2481 u32 rowtmp;
2482 u32 cs;
2483 u32 dram_type = sdram_params->base.dramtype;
2484 u32 pwrctl;
2485 u32 i, dq_map;
2486 u32 byte1 = 0, byte0 = 0;
2487 u32 tmp, byte;
2488 struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2489 struct dq_map_info *map_info = (struct dq_map_info *)
2490 ((void *)common_info + index->dq_map_index.offset * 4);
2491
2492 cap_info->bw = dram_type == DDR3 ? 0 : 1;
2493 if (dram_type != LPDDR4) {
2494 if (dram_type != DDR4) {
2495 coltmp = 12;
2496 bktmp = 3;
2497 if (dram_type == LPDDR2)
2498 rowtmp = 15;
2499 else
2500 rowtmp = 16;
2501
2502 if (sdram_detect_col(cap_info, coltmp) != 0)
2503 goto cap_err;
2504
2505 sdram_detect_bank(cap_info, coltmp, bktmp);
2506 if (dram_type != LPDDR3)
2507 sdram_detect_dbw(cap_info, dram_type);
2508 } else {
2509 coltmp = 10;
2510 bktmp = 4;
2511 rowtmp = 17;
2512
2513 cap_info->col = 10;
2514 cap_info->bk = 2;
2515 sdram_detect_bg(cap_info, coltmp);
2516 }
2517
2518 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2519 goto cap_err;
2520
2521 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2522 } else {
2523 cap_info->col = 10;
2524 cap_info->bk = 3;
2525 mr8 = read_mr(dram, 1, 8, dram_type);
2526 cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2527 mr8 = (mr8 >> 2) & 0xf;
2528 if (mr8 >= 0 && mr8 <= 6) {
2529 cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2530 } else if (mr8 == 0xc) {
2531 cap_info->cs0_row = 13;
2532 } else {
2533 printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2534 goto cap_err;
2535 }
2536 if (cap_info->dbw == 0)
2537 cap_info->cs0_row++;
2538 cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2539 if (cap_info->cs0_row >= 17) {
2540 printascii("Cap ERR: ");
2541 printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2542 goto cap_err;
2543 // cap_info->cs0_row = 16;
2544 // cap_info->row_3_4 = 0;
2545 }
2546 }
2547
2548 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2549 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2550
2551 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2552 cs = 1;
2553 else
2554 cs = 0;
2555 cap_info->rank = cs + 1;
2556
2557 setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2558
2559 tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2560
2561 if (tmp == 0) {
2562 cap_info->bw = 2;
2563 } else {
2564 if (dram_type == DDR3 || dram_type == DDR4) {
2565 dq_map = 0;
2566 byte = 0;
2567 for (i = 0; i < 4; i++) {
2568 if ((tmp & BIT(i)) == 0) {
2569 dq_map |= byte << (i * 2);
2570 byte++;
2571 }
2572 }
2573 cap_info->bw = byte / 2;
2574 for (i = 0; i < 4; i++) {
2575 if ((tmp & BIT(i)) != 0) {
2576 dq_map |= byte << (i * 2);
2577 byte++;
2578 }
2579 }
2580 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2581 } else {
2582 dq_map = readl(PHY_REG(phy_base, 0x4f));
2583 for (i = 0; i < 4; i++) {
2584 if (((dq_map >> (i * 2)) & 0x3) == 0)
2585 byte0 = i;
2586 if (((dq_map >> (i * 2)) & 0x3) == 1)
2587 byte1 = i;
2588 }
2589 clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2590 BIT(byte0) | BIT(byte1));
2591 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2592 cap_info->bw = 1;
2593 else
2594 cap_info->bw = 0;
2595 }
2596 }
2597 if (cap_info->bw > 0)
2598 cap_info->dbw = 1;
2599
2600 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2601
2602 cap_info->cs0_high16bit_row = cap_info->cs0_row;
2603 if (cs) {
2604 cap_info->cs1_row = cap_info->cs0_row;
2605 cap_info->cs1_high16bit_row = cap_info->cs0_row;
2606 } else {
2607 cap_info->cs1_row = 0;
2608 cap_info->cs1_high16bit_row = 0;
2609 }
2610
2611 if (dram_type == LPDDR3)
2612 sdram_detect_dbw(cap_info, dram_type);
2613
2614 return 0;
2615cap_err:
2616 return -1;
2617}
2618
2619static int dram_detect_cs1_row(struct dram_info *dram,
2620 struct rv1126_sdram_params *sdram_params,
2621 unsigned char channel)
2622{
2623 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2624 void __iomem *pctl_base = dram->pctl;
2625 u32 ret = 0;
2626 void __iomem *test_addr;
2627 u32 row, bktmp, coltmp, bw;
2628 u64 cs0_cap;
2629 u32 byte_mask;
2630 u32 cs_pst;
2631 u32 cs_add = 0;
2632 u32 max_row;
2633
2634 if (cap_info->rank == 2) {
2635 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2636 6 + 2;
2637 if (cs_pst < 28)
2638 cs_add = 1;
2639
2640 cs0_cap = 1 << cs_pst;
2641
2642 if (sdram_params->base.dramtype == DDR4) {
2643 if (cap_info->dbw == 0)
2644 bktmp = cap_info->bk + 2;
2645 else
2646 bktmp = cap_info->bk + 1;
2647 } else {
2648 bktmp = cap_info->bk;
2649 }
2650 bw = cap_info->bw;
2651 coltmp = cap_info->col;
2652
2653 if (bw == 2)
2654 byte_mask = 0xFFFF;
2655 else
2656 byte_mask = 0xFF;
2657
2658 max_row = (cs_pst == 31) ? 30 : 31;
2659
2660 max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2661
2662 row = (cap_info->cs0_row > max_row) ? max_row :
2663 cap_info->cs0_row;
2664
2665 for (; row > 12; row--) {
2666 test_addr = (void __iomem *)(CFG_SYS_SDRAM_BASE +
2667 (u32)cs0_cap +
2668 (1ul << (row + bktmp + coltmp +
2669 cs_add + bw - 1ul)));
2670
2671 writel(0, CFG_SYS_SDRAM_BASE + (u32)cs0_cap);
2672 writel(PATTERN, test_addr);
2673
2674 if (((readl(test_addr) & byte_mask) ==
2675 (PATTERN & byte_mask)) &&
2676 ((readl(CFG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2677 byte_mask) == 0)) {
2678 ret = row;
2679 break;
2680 }
2681 }
2682 }
2683
2684 return ret;
2685}
2686
2687/* return: 0 = success, other = fail */
2688static int sdram_init_detect(struct dram_info *dram,
2689 struct rv1126_sdram_params *sdram_params)
2690{
2691 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2692 u32 ret;
2693 u32 sys_reg = 0;
2694 u32 sys_reg3 = 0;
2695 struct sdram_head_info_index_v2 *index =
2696 (struct sdram_head_info_index_v2 *)common_info;
2697 struct dq_map_info *map_info;
2698
2699 map_info = (struct dq_map_info *)((void *)common_info +
2700 index->dq_map_index.offset * 4);
2701
2702 if (sdram_init_(dram, sdram_params, 0)) {
2703 if (sdram_params->base.dramtype == DDR3) {
2704 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2705 ((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2706 (0x0 << 0)) << 24);
2707 if (sdram_init_(dram, sdram_params, 0))
2708 return -1;
2709 } else {
2710 return -1;
2711 }
2712 }
2713
2714 if (sdram_params->base.dramtype == DDR3) {
2715 writel(PATTERN, CFG_SYS_SDRAM_BASE);
2716 if (readl(CFG_SYS_SDRAM_BASE) != PATTERN)
2717 return -1;
2718 }
2719
2720 split_bypass(dram);
2721 if (dram_detect_cap(dram, sdram_params, 0) != 0)
2722 return -1;
2723
2724 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2725 sdram_params->base.dramtype);
2726 ret = sdram_init_(dram, sdram_params, 1);
2727 if (ret != 0)
2728 goto out;
2729
2730 cap_info->cs1_row =
2731 dram_detect_cs1_row(dram, sdram_params, 0);
2732 if (cap_info->cs1_row) {
2733 sys_reg = readl(&dram->pmugrf->os_reg[2]);
2734 sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2735 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2736 sys_reg, sys_reg3, 0);
2737 writel(sys_reg, &dram->pmugrf->os_reg[2]);
2738 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2739 }
2740
2741 sdram_detect_high_row(cap_info);
2742 split_setup(dram, sdram_params);
2743out:
2744 return ret;
2745}
2746
2747struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2748{
2749 u32 i;
2750 u32 offset = 0;
2751 struct ddr2_3_4_lp2_3_info *ddr_info;
2752
2753 if (!freq_mhz) {
2754 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2755 if (ddr_info)
2756 freq_mhz =
2757 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2758 DDR_FREQ_MASK;
2759 else
2760 freq_mhz = 0;
2761 }
2762
2763 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2764 if (sdram_configs[i].base.ddr_freq == 0 ||
2765 freq_mhz < sdram_configs[i].base.ddr_freq)
2766 break;
2767 }
2768 offset = i == 0 ? 0 : i - 1;
2769
2770 return &sdram_configs[offset];
2771}
2772
2773static const u16 pctl_need_update_reg[] = {
2774 DDR_PCTL2_RFSHTMG,
2775 DDR_PCTL2_INIT3,
2776 DDR_PCTL2_INIT4,
2777 DDR_PCTL2_INIT6,
2778 DDR_PCTL2_INIT7,
2779 DDR_PCTL2_DRAMTMG0,
2780 DDR_PCTL2_DRAMTMG1,
2781 DDR_PCTL2_DRAMTMG2,
2782 DDR_PCTL2_DRAMTMG3,
2783 DDR_PCTL2_DRAMTMG4,
2784 DDR_PCTL2_DRAMTMG5,
2785 DDR_PCTL2_DRAMTMG6,
2786 DDR_PCTL2_DRAMTMG7,
2787 DDR_PCTL2_DRAMTMG8,
2788 DDR_PCTL2_DRAMTMG9,
2789 DDR_PCTL2_DRAMTMG12,
2790 DDR_PCTL2_DRAMTMG13,
2791 DDR_PCTL2_DRAMTMG14,
2792 DDR_PCTL2_ZQCTL0,
2793 DDR_PCTL2_DFITMG0,
2794 DDR_PCTL2_ODTCFG
2795};
2796
2797static const u16 phy_need_update_reg[] = {
2798 0x14,
2799 0x18,
2800 0x1c
2801};
2802
2803static void pre_set_rate(struct dram_info *dram,
2804 struct rv1126_sdram_params *sdram_params,
2805 u32 dst_fsp, u32 dst_fsp_lp4)
2806{
2807 u32 i, j, find;
2808 void __iomem *pctl_base = dram->pctl;
2809 void __iomem *phy_base = dram->phy;
2810 u32 phy_offset;
2811 u32 mr_tmp;
2812 u32 dramtype = sdram_params->base.dramtype;
2813
2814 sw_set_req(dram);
2815 /* pctl timing update */
2816 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2817 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2818 j++) {
2819 if (sdram_params->pctl_regs.pctl[j][0] ==
2820 pctl_need_update_reg[i]) {
2821 writel(sdram_params->pctl_regs.pctl[j][1],
2822 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2823 pctl_need_update_reg[i]);
2824 find = j;
2825 break;
2826 }
2827 }
2828 }
2829
2830#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2831 u32 tmp, trefi;
2832
2833 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2834 trefi = (tmp >> 16) & 0xfff;
2835 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2836 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2837#endif
2838
2839 sw_set_ack(dram);
2840
2841 /* phy timing update */
2842 if (dst_fsp == 0)
2843 phy_offset = 0;
2844 else
2845 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2846 /* cl cwl al update */
2847 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2848 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2849 j++) {
2850 if (sdram_params->phy_regs.phy[j][0] ==
2851 phy_need_update_reg[i]) {
2852 writel(sdram_params->phy_regs.phy[j][1],
2853 phy_base + phy_offset +
2854 phy_need_update_reg[i]);
2855 find = j;
2856 break;
2857 }
2858 }
2859 }
2860
2861 set_ds_odt(dram, sdram_params, dst_fsp);
2862 if (dramtype == LPDDR4) {
2863 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2864 DDR_PCTL2_INIT4);
2865 /* MR13 */
2866 pctl_write_mr(dram->pctl, 3, 13,
2867 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2868 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2869 ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2870 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2871 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2872 ((0x2 << 6) >> dst_fsp_lp4),
2873 PHY_REG(phy_base, 0x1b));
2874 /* MR3 */
2875 pctl_write_mr(dram->pctl, 3, 3,
2876 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2877 PCTL2_MR_MASK,
2878 dramtype);
2879 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2880 PHY_REG(phy_base, 0x19));
2881
2882 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2883 DDR_PCTL2_INIT3);
2884 /* MR1 */
2885 pctl_write_mr(dram->pctl, 3, 1,
2886 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2887 PCTL2_MR_MASK,
2888 dramtype);
2889 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2890 PHY_REG(phy_base, 0x17));
2891 /* MR2 */
2892 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2893 dramtype);
2894 writel(mr_tmp & PCTL2_MR_MASK,
2895 PHY_REG(phy_base, 0x18));
2896
2897 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2898 DDR_PCTL2_INIT6);
2899 /* MR11 */
2900 pctl_write_mr(dram->pctl, 3, 11,
2901 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2902 dramtype);
2903 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2904 PHY_REG(phy_base, 0x1a));
2905 /* MR12 */
2906 pctl_write_mr(dram->pctl, 3, 12,
2907 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2908 dramtype);
2909
2910 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2911 DDR_PCTL2_INIT7);
2912 /* MR22 */
2913 pctl_write_mr(dram->pctl, 3, 22,
2914 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2915 dramtype);
2916 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2917 PHY_REG(phy_base, 0x1d));
2918 /* MR14 */
2919 pctl_write_mr(dram->pctl, 3, 14,
2920 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2921 dramtype);
2922 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2923 PHY_REG(phy_base, 0x1c));
2924 }
2925
2926 update_noc_timing(dram, sdram_params);
2927}
2928
2929static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2930 struct rv1126_sdram_params *sdram_params)
2931{
2932 void __iomem *pctl_base = dram->pctl;
2933 void __iomem *phy_base = dram->phy;
2934 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2935 u32 temp, temp1;
2936 struct ddr2_3_4_lp2_3_info *ddr_info;
2937
2938 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2939
2940 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2941
2942 if (sdram_params->base.dramtype == LPDDR4) {
2943 p_fsp_param->rd_odt_up_en = 0;
2944 p_fsp_param->rd_odt_down_en = 1;
2945 } else {
2946 p_fsp_param->rd_odt_up_en =
2947 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2948 p_fsp_param->rd_odt_down_en =
2949 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2950 }
2951
2952 if (p_fsp_param->rd_odt_up_en)
2953 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2954 else if (p_fsp_param->rd_odt_down_en)
2955 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2956 else
2957 p_fsp_param->rd_odt = 0;
2958 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2959 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2960 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2961 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2962 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2963
2964 if (sdram_params->base.dramtype == DDR3) {
2965 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2966 DDR_PCTL2_INIT3);
2967 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2968 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2969 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2970 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2971 } else if (sdram_params->base.dramtype == DDR4) {
2972 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2973 DDR_PCTL2_INIT3);
2974 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2975 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2976 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2977 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2978 } else if (sdram_params->base.dramtype == LPDDR3) {
2979 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2980 DDR_PCTL2_INIT4);
2981 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2982 p_fsp_param->ds_pdds = temp & 0xf;
2983
2984 p_fsp_param->dq_odt = lp3_odt_value;
2985 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2986 } else if (sdram_params->base.dramtype == LPDDR4) {
2987 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2988 DDR_PCTL2_INIT4);
2989 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2990 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2991
2992 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2993 DDR_PCTL2_INIT6);
2994 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2995 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2996 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2997
2998 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2999 readl(PHY_REG(phy_base, 0x3ce)));
3000 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3001 readl(PHY_REG(phy_base, 0x3de)));
3002 p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3003 temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3004 readl(PHY_REG(phy_base, 0x3cf)));
3005 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3006 readl(PHY_REG(phy_base, 0x3df)));
3007 p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3008 p_fsp_param->vref_ca[0] |=
3009 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3010 p_fsp_param->vref_ca[1] |=
3011 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3012
3013 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3014 3) & 0x1;
3015 }
3016
3017 p_fsp_param->noc_timings.ddrtiminga0 =
3018 sdram_params->ch.noc_timings.ddrtiminga0;
3019 p_fsp_param->noc_timings.ddrtimingb0 =
3020 sdram_params->ch.noc_timings.ddrtimingb0;
3021 p_fsp_param->noc_timings.ddrtimingc0 =
3022 sdram_params->ch.noc_timings.ddrtimingc0;
3023 p_fsp_param->noc_timings.devtodev0 =
3024 sdram_params->ch.noc_timings.devtodev0;
3025 p_fsp_param->noc_timings.ddrmode =
3026 sdram_params->ch.noc_timings.ddrmode;
3027 p_fsp_param->noc_timings.ddr4timing =
3028 sdram_params->ch.noc_timings.ddr4timing;
3029 p_fsp_param->noc_timings.agingx0 =
3030 sdram_params->ch.noc_timings.agingx0;
3031 p_fsp_param->noc_timings.aging0 =
3032 sdram_params->ch.noc_timings.aging0;
3033 p_fsp_param->noc_timings.aging1 =
3034 sdram_params->ch.noc_timings.aging1;
3035 p_fsp_param->noc_timings.aging2 =
3036 sdram_params->ch.noc_timings.aging2;
3037 p_fsp_param->noc_timings.aging3 =
3038 sdram_params->ch.noc_timings.aging3;
3039
3040 p_fsp_param->flag = FSP_FLAG;
3041}
3042
3043static void copy_fsp_param_to_ddr(void)
3044{
3045 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3046 sizeof(fsp_param));
3047}
3048
3049static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3050 struct sdram_cap_info *cap_info, u32 dram_type,
3051 u32 freq)
3052{
3053 u64 cs0_cap;
3054 u32 die_cap;
3055 u32 trfc_ns, trfc4_ns;
3056 u32 trfc, txsnr;
3057 u32 txs_abort_fast = 0;
3058 u32 tmp;
3059
3060 cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3061 die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3062
3063 switch (dram_type) {
3064 case DDR3:
3065 if (die_cap <= DIE_CAP_512MBIT)
3066 trfc_ns = 90;
3067 else if (die_cap <= DIE_CAP_1GBIT)
3068 trfc_ns = 110;
3069 else if (die_cap <= DIE_CAP_2GBIT)
3070 trfc_ns = 160;
3071 else if (die_cap <= DIE_CAP_4GBIT)
3072 trfc_ns = 260;
3073 else
3074 trfc_ns = 350;
3075 txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3076 break;
3077
3078 case DDR4:
3079 if (die_cap <= DIE_CAP_2GBIT) {
3080 trfc_ns = 160;
3081 trfc4_ns = 90;
3082 } else if (die_cap <= DIE_CAP_4GBIT) {
3083 trfc_ns = 260;
3084 trfc4_ns = 110;
3085 } else if (die_cap <= DIE_CAP_8GBIT) {
3086 trfc_ns = 350;
3087 trfc4_ns = 160;
3088 } else {
3089 trfc_ns = 550;
3090 trfc4_ns = 260;
3091 }
3092 txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3093 txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3094 break;
3095
3096 case LPDDR3:
3097 if (die_cap <= DIE_CAP_4GBIT)
3098 trfc_ns = 130;
3099 else
3100 trfc_ns = 210;
3101 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3102 break;
3103
3104 case LPDDR4:
3105 if (die_cap <= DIE_CAP_2GBIT)
3106 trfc_ns = 130;
3107 else if (die_cap <= DIE_CAP_4GBIT)
3108 trfc_ns = 180;
3109 else if (die_cap <= DIE_CAP_8GBIT)
3110 trfc_ns = 280;
3111 else
3112 trfc_ns = 380;
3113 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3114 break;
3115
3116 default:
3117 return;
3118 }
3119 trfc = (trfc_ns * freq + 999) / 1000;
3120
3121 for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3122 switch (pctl_regs->pctl[i][0]) {
3123 case DDR_PCTL2_RFSHTMG:
3124 tmp = pctl_regs->pctl[i][1];
3125 /* t_rfc_min */
3126 tmp &= ~((u32)0x3ff);
3127 tmp |= ((trfc + 1) / 2) & 0x3ff;
3128 pctl_regs->pctl[i][1] = tmp;
3129 break;
3130
3131 case DDR_PCTL2_DRAMTMG8:
3132 if (dram_type == DDR3 || dram_type == DDR4) {
3133 tmp = pctl_regs->pctl[i][1];
3134 /* t_xs_x32 */
3135 tmp &= ~((u32)0x7f);
3136 tmp |= ((txsnr + 63) / 64) & 0x7f;
3137
3138 if (dram_type == DDR4) {
3139 /* t_xs_abort_x32 */
3140 tmp &= ~((u32)(0x7f << 16));
3141 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3142 /* t_xs_fast_x32 */
3143 tmp &= ~((u32)(0x7f << 24));
3144 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3145 }
3146
3147 pctl_regs->pctl[i][1] = tmp;
3148 }
3149 break;
3150
3151 case DDR_PCTL2_DRAMTMG14:
3152 if (dram_type == LPDDR3 ||
3153 dram_type == LPDDR4) {
3154 tmp = pctl_regs->pctl[i][1];
3155 /* t_xsr */
3156 tmp &= ~((u32)0xfff);
3157 tmp |= ((txsnr + 1) / 2) & 0xfff;
3158 pctl_regs->pctl[i][1] = tmp;
3159 }
3160 break;
3161
3162 default:
3163 break;
3164 }
3165 }
3166}
3167
3168void ddr_set_rate(struct dram_info *dram,
3169 struct rv1126_sdram_params *sdram_params,
3170 u32 freq, u32 cur_freq, u32 dst_fsp,
3171 u32 dst_fsp_lp4, u32 training_en)
3172{
3173 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3174 u32 mr_tmp;
3175 u32 lp_stat;
3176 u32 dramtype = sdram_params->base.dramtype;
3177 struct rv1126_sdram_params *sdram_params_new;
3178 void __iomem *pctl_base = dram->pctl;
3179 void __iomem *phy_base = dram->phy;
3180
3181 lp_stat = low_power_update(dram, 0);
3182 sdram_params_new = get_default_sdram_config(freq);
3183 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3184 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3185
3186 pctl_modify_trfc(&sdram_params_new->pctl_regs,
3187 &sdram_params->ch.cap_info, dramtype, freq);
3188 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3189
3190 while ((readl(pctl_base + DDR_PCTL2_STAT) &
3191 PCTL2_OPERATING_MODE_MASK) ==
3192 PCTL2_OPERATING_MODE_SR)
3193 continue;
3194
3195 dest_dll_off = 0;
3196 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3197 DDR_PCTL2_INIT3);
3198 if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3199 (dramtype == DDR4 && !(dst_init3 & 1)))
3200 dest_dll_off = 1;
3201
3202 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3203 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3204 DDR_PCTL2_INIT3);
3205 cur_init3 &= PCTL2_MR_MASK;
3206 cur_dll_off = 1;
3207 if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3208 (dramtype == DDR4 && (cur_init3 & 1)))
3209 cur_dll_off = 0;
3210
3211 if (!cur_dll_off) {
3212 if (dramtype == DDR3)
3213 cur_init3 |= 1;
3214 else
3215 cur_init3 &= ~1;
3216 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3217 }
3218
3219 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3220 PCTL2_DIS_AUTO_REFRESH);
3221 update_refresh_reg(dram);
3222
3223 enter_sr(dram, 1);
3224
3225 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3226 PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3227 &dram->pmugrf->soc_con[0]);
3228 sw_set_req(dram);
3229 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3230 PCTL2_DFI_INIT_COMPLETE_EN);
3231 sw_set_ack(dram);
3232
3233 sw_set_req(dram);
3234 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3235 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3236 else
3237 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3238
3239 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3240 PCTL2_DIS_SRX_ZQCL);
3241 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3242 PCTL2_DIS_SRX_ZQCL);
3243 sw_set_ack(dram);
3244
3245 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3246 &dram->cru->clkgate_con[21]);
3247 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3248 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3249 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3250 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3251
3252 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3253 rkclk_set_dpll(dram, freq * MHz / 2);
3254 phy_pll_set(dram, freq * MHz, 0);
3255 phy_pll_set(dram, freq * MHz, 1);
3256 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3257
3258 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3259 PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3260 &dram->pmugrf->soc_con[0]);
3261 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3262 &dram->cru->clkgate_con[21]);
3263 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3264 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3265 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3266 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3267 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3268 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3269 continue;
3270
3271 sw_set_req(dram);
3272 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3273 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3274 sw_set_ack(dram);
3275 update_refresh_reg(dram);
3276 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3277
3278 enter_sr(dram, 0);
3279
3280 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3281 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3282
3283 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3284 if (dramtype == LPDDR3) {
3285 pctl_write_mr(dram->pctl, 3, 1,
3286 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3287 PCTL2_MR_MASK,
3288 dramtype);
3289 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3290 dramtype);
3291 pctl_write_mr(dram->pctl, 3, 3,
3292 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3293 PCTL2_MR_MASK,
3294 dramtype);
3295 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3296 } else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3297 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3298 dramtype);
3299 if (!dest_dll_off) {
3300 pctl_write_mr(dram->pctl, 3, 0,
3301 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3302 PCTL2_MR_MASK) | DDR3_DLL_RESET,
3303 dramtype);
3304 udelay(2);
3305 }
3306 pctl_write_mr(dram->pctl, 3, 0,
3307 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3308 PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3309 dramtype);
3310 pctl_write_mr(dram->pctl, 3, 2,
3311 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3312 PCTL2_MR_MASK), dramtype);
3313 if (dramtype == DDR4) {
3314 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3315 dramtype);
3316 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3317 DDR_PCTL2_INIT6);
3318 pctl_write_mr(dram->pctl, 3, 4,
3319 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3320 PCTL2_MR_MASK,
3321 dramtype);
3322 pctl_write_mr(dram->pctl, 3, 5,
3323 mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3324 PCTL2_MR_MASK,
3325 dramtype);
3326
3327 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3328 DDR_PCTL2_INIT7);
3329 pctl_write_mr(dram->pctl, 3, 6,
3330 mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3331 PCTL2_MR_MASK,
3332 dramtype);
3333 }
3334 } else if (dramtype == LPDDR4) {
3335 pctl_write_mr(dram->pctl, 3, 13,
3336 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3337 PCTL2_MR_MASK) & (~(BIT(7)))) |
3338 dst_fsp_lp4 << 7, dramtype);
3339 }
3340 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3341 PCTL2_DIS_AUTO_REFRESH);
3342 update_refresh_reg(dram);
3343
3344 /* training */
3345 high_freq_training(dram, sdram_params_new, dst_fsp);
3346 low_power_update(dram, lp_stat);
3347
3348 save_fsp_param(dram, dst_fsp, sdram_params_new);
3349}
3350
3351static void ddr_set_rate_for_fsp(struct dram_info *dram,
3352 struct rv1126_sdram_params *sdram_params)
3353{
3354 struct ddr2_3_4_lp2_3_info *ddr_info;
3355 u32 f0;
3356 u32 dramtype = sdram_params->base.dramtype;
3357 u32 f1, f2, f3;
3358
3359 ddr_info = get_ddr_drv_odt_info(dramtype);
3360 if (!ddr_info)
3361 return;
3362
3363 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3364 DDR_FREQ_MASK;
3365
3366 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3367 memset((void *)&fsp_param, 0, sizeof(fsp_param));
3368
3369 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3370 DDR_FREQ_MASK;
3371 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3372 DDR_FREQ_MASK;
3373 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3374 DDR_FREQ_MASK;
3375
3376 if (get_wrlvl_val(dram, sdram_params))
3377 printascii("get wrlvl value fail\n");
3378
Jagan Teki4fadeec2022-12-14 23:20:54 +05303379 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3380 printascii("change to: ");
3381 printdec(f1);
3382 printascii("MHz\n");
3383 }
Jagan Tekie869b342022-12-14 23:20:53 +05303384 ddr_set_rate(&dram_info, sdram_params, f1,
3385 sdram_params->base.ddr_freq, 1, 1, 1);
Jagan Teki4fadeec2022-12-14 23:20:54 +05303386
3387 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3388 printascii("change to: ");
3389 printdec(f2);
3390 printascii("MHz\n");
3391 }
Jagan Tekie869b342022-12-14 23:20:53 +05303392 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
Jagan Teki4fadeec2022-12-14 23:20:54 +05303393
3394 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3395 printascii("change to: ");
3396 printdec(f3);
3397 printascii("MHz\n");
3398 }
Jagan Tekie869b342022-12-14 23:20:53 +05303399 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
Jagan Teki4fadeec2022-12-14 23:20:54 +05303400
3401 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3402 printascii("change to: ");
3403 printdec(f0);
3404 printascii("MHz(final freq)\n");
3405 }
Jagan Tekie869b342022-12-14 23:20:53 +05303406 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3407}
3408
3409int get_uart_config(void)
3410{
3411 struct sdram_head_info_index_v2 *index =
3412 (struct sdram_head_info_index_v2 *)common_info;
3413 struct global_info *gbl_info;
3414
3415 gbl_info = (struct global_info *)((void *)common_info +
3416 index->global_index.offset * 4);
3417
3418 return gbl_info->uart_info;
3419}
3420
3421/* return: 0 = success, other = fail */
3422static int rv1126_dmc_init(struct udevice *dev)
3423{
3424 struct rv1126_sdram_params *sdram_params;
3425 int ret = 0;
3426 struct sdram_head_info_index_v2 *index =
3427 (struct sdram_head_info_index_v2 *)common_info;
3428 struct global_info *gbl_info;
3429
3430 dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3431 dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3432 dram_info.grf = (void *)GRF_BASE_ADDR;
3433 dram_info.cru = (void *)CRU_BASE_ADDR;
3434 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3435 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3436 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3437
3438#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3439 printascii("extended temp support\n");
3440#endif
3441 if (index->version_info != 2 ||
3442 (index->global_index.size != sizeof(struct global_info) / 4) ||
3443 (index->ddr3_index.size !=
3444 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3445 (index->ddr4_index.size !=
3446 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3447 (index->lp3_index.size !=
3448 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3449 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3450 (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3451 index->global_index.offset == 0 ||
3452 index->ddr3_index.offset == 0 ||
3453 index->ddr4_index.offset == 0 ||
3454 index->lp3_index.offset == 0 ||
3455 index->lp4_index.offset == 0 ||
3456 index->lp4x_index.offset == 0) {
3457 printascii("common info error\n");
3458 goto error;
3459 }
3460
3461 gbl_info = (struct global_info *)((void *)common_info +
3462 index->global_index.offset * 4);
3463
3464 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3465 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3466
3467 sdram_params = &sdram_configs[0];
3468 if (sdram_params->base.dramtype == DDR3 ||
3469 sdram_params->base.dramtype == DDR4) {
3470 if (DDR_2T_INFO(gbl_info->info_2t))
3471 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3472 else
3473 sdram_params->pctl_regs.pctl[0][1] &=
3474 ~(0x1 << 10);
3475 }
3476 ret = sdram_init_detect(&dram_info, sdram_params);
3477 if (ret) {
3478 sdram_print_dram_type(sdram_params->base.dramtype);
3479 printascii(", ");
3480 printdec(sdram_params->base.ddr_freq);
3481 printascii("MHz\n");
3482 goto error;
3483 }
3484 print_ddr_info(sdram_params);
3485#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3486 init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3487 (u8)sdram_params->ch.cap_info.rank);
3488#endif
3489
3490 ddr_set_rate_for_fsp(&dram_info, sdram_params);
3491 copy_fsp_param_to_ddr();
3492
3493#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3494 save_rw_trn_result_to_ddr(&rw_trn_result);
3495#endif
3496
Jagan Teki4fadeec2022-12-14 23:20:54 +05303497 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG))
3498 printascii("out\n");
Jagan Tekie869b342022-12-14 23:20:53 +05303499
3500 return ret;
3501error:
3502 printascii("error\n");
3503 return (-1);
3504}
3505
3506#endif
3507
3508static int rv1126_dmc_probe(struct udevice *dev)
3509{
3510#if defined(CONFIG_TPL_BUILD) || \
3511 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
3512 if (rv1126_dmc_init(dev))
3513 return 0;
3514#else
3515 struct dram_info *priv = dev_get_priv(dev);
3516
3517 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
3518 debug("%s: grf=%p\n", __func__, priv->pmugrf);
3519 priv->info.base = CFG_SYS_SDRAM_BASE;
3520 priv->info.size =
3521 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
3522#endif
3523 return 0;
3524}
3525
3526static int rv1126_dmc_get_info(struct udevice *dev, struct ram_info *info)
3527{
3528 struct dram_info *priv = dev_get_priv(dev);
3529
3530 *info = priv->info;
3531
3532 return 0;
3533}
3534
3535static struct ram_ops rv1126_dmc_ops = {
3536 .get_info = rv1126_dmc_get_info,
3537};
3538
3539static const struct udevice_id rv1126_dmc_ids[] = {
3540 { .compatible = "rockchip,rv1126-dmc" },
3541 { }
3542};
3543
3544U_BOOT_DRIVER(dmc_rv1126) = {
3545 .name = "rockchip_rv1126_dmc",
3546 .id = UCLASS_RAM,
3547 .of_match = rv1126_dmc_ids,
3548 .ops = &rv1126_dmc_ops,
3549 .probe = rv1126_dmc_probe,
3550 .priv_auto = sizeof(struct dram_info),
3551};