blob: 71a53c3c9cbfb31c52f26f0d8243656f27a5572d [file] [log] [blame]
Weijie Gao02cd4492020-04-21 09:28:34 +02001// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2020 MediaTek Inc.
4 *
5 * Author: Weijie Gao <weijie.gao@mediatek.com>
6 */
7
8#include <common.h>
9#include <asm/addrspace.h>
10#include <asm/cacheops.h>
11#include <linux/bitops.h>
12#include <linux/io.h>
13#include <mach/mc.h>
14
15DECLARE_GLOBAL_DATA_PTR;
16
17#define COARSE_MIN_START 6
18#define FINE_MIN_START 15
19#define COARSE_MAX_START 7
20#define FINE_MAX_START 0
21
22#define NUM_OF_CACHELINE 128
23#define TEST_PAT_SIZE (NUM_OF_CACHELINE * CONFIG_SYS_CACHELINE_SIZE)
24
25#define INIT_DQS_VAL ((7 << DQS1_DELAY_COARSE_TUNING_S) | \
26 (4 << DQS1_DELAY_FINE_TUNING_S) | \
27 (7 << DQS0_DELAY_COARSE_TUNING_S) | \
28 (4 << DQS0_DELAY_FINE_TUNING_S))
29
30static inline void pref_op(int op, const volatile void *addr)
31{
32 __asm__ __volatile__("pref %0, 0(%1)" : : "i" (op), "r" (addr));
33}
34
Stefan Roese896449f2020-03-06 15:14:03 +010035static inline bool dqs_test_error(void __iomem *memc, u32 memsize, u32 dqsval,
36 u32 bias)
Weijie Gao02cd4492020-04-21 09:28:34 +020037{
38 u32 *nca, *ca;
39 u32 off;
40 int i;
41
42 for (off = 0; off < memsize - TEST_PAT_SIZE; off += (memsize >> 6)) {
43 nca = (u32 *)KSEG1ADDR(off);
44 ca = (u32 *)KSEG0ADDR(off);
45
46 writel(INIT_DQS_VAL, memc + MEMCTL_DDR_DQS_DLY_REG);
47 wmb();
48
49 for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
50 ca[i] = 0x1f1f1f1f;
51
52 for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++)
53 nca[i] = (u32)nca + i + bias;
54
55 writel(dqsval, memc + MEMCTL_DDR_DQS_DLY_REG);
56 wmb();
57
58 for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
59 mips_cache(HIT_INVALIDATE_D, (u8 *)ca + i);
60 wmb();
61
62 for (i = 0; i < TEST_PAT_SIZE; i += CONFIG_SYS_CACHELINE_SIZE)
63 pref_op(0, (u8 *)ca + i);
64
65 for (i = 0; i < TEST_PAT_SIZE / sizeof(u32); i++) {
66 if (ca[i] != (u32)nca + i + bias)
Stefan Roese896449f2020-03-06 15:14:03 +010067 return true;
Weijie Gao02cd4492020-04-21 09:28:34 +020068 }
69 }
70
Stefan Roese896449f2020-03-06 15:14:03 +010071 return false;
Weijie Gao02cd4492020-04-21 09:28:34 +020072}
73
Stefan Roese112add32020-03-06 15:14:04 +010074static inline int dqs_find_max(void __iomem *memc, u32 memsize, int initval,
75 int maxval, int shift, u32 regval)
Weijie Gao02cd4492020-04-21 09:28:34 +020076{
Stefan Roese04d21a92020-03-06 15:14:05 +010077 int fieldval;
Stefan Roese112add32020-03-06 15:14:04 +010078 u32 dqsval;
Weijie Gao02cd4492020-04-21 09:28:34 +020079
Stefan Roese04d21a92020-03-06 15:14:05 +010080 for (fieldval = initval; fieldval <= maxval; fieldval++) {
Weijie Gao02cd4492020-04-21 09:28:34 +020081 dqsval = regval | (fieldval << shift);
Stefan Roese896449f2020-03-06 15:14:03 +010082 if (dqs_test_error(memc, memsize, dqsval, 3))
Stefan Roese04d21a92020-03-06 15:14:05 +010083 return max(fieldval - 1, initval);
84 }
Weijie Gao02cd4492020-04-21 09:28:34 +020085
Stefan Roese04d21a92020-03-06 15:14:05 +010086 return maxval;
Weijie Gao02cd4492020-04-21 09:28:34 +020087}
88
Stefan Roese112add32020-03-06 15:14:04 +010089static inline int dqs_find_min(void __iomem *memc, u32 memsize, int initval,
90 int minval, int shift, u32 regval)
Weijie Gao02cd4492020-04-21 09:28:34 +020091{
Stefan Roese04d21a92020-03-06 15:14:05 +010092 int fieldval;
Stefan Roese112add32020-03-06 15:14:04 +010093 u32 dqsval;
Weijie Gao02cd4492020-04-21 09:28:34 +020094
Stefan Roese04d21a92020-03-06 15:14:05 +010095 for (fieldval = initval; fieldval >= minval; fieldval--) {
Weijie Gao02cd4492020-04-21 09:28:34 +020096 dqsval = regval | (fieldval << shift);
Stefan Roese04d21a92020-03-06 15:14:05 +010097 if (dqs_test_error(memc, memsize, dqsval, 1))
98 return min(fieldval + 1, initval);
Weijie Gao02cd4492020-04-21 09:28:34 +020099 }
100
Stefan Roese04d21a92020-03-06 15:14:05 +0100101 return minval;
Weijie Gao02cd4492020-04-21 09:28:34 +0200102}
103
104void ddr_calibrate(void __iomem *memc, u32 memsize, u32 bw)
105{
106 u32 dqs_coarse_min, dqs_coarse_max, dqs_coarse_val;
107 u32 dqs_fine_min, dqs_fine_max, dqs_fine_val;
108 u32 dqs_coarse_min_limit, dqs_fine_min_limit;
109 u32 dlls, dqs_dll, ddr_cfg2_reg;
110 u32 dqs_dly_tmp, dqs_dly, test_dqs, shift;
111 u32 rem, mask;
112 int i;
113
114 /* Disable Self-refresh */
115 clrbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
116
117 /* Save DDR_CFG2 and modify its DQS gating window */
118 ddr_cfg2_reg = readl(memc + MEMCTL_DDR_CFG2_REG);
119 mask = DQS0_GATING_WINDOW_M;
120 if (bw == IND_SDRAM_WIDTH_16BIT)
121 mask |= DQS1_GATING_WINDOW_M;
122 clrbits_32(memc + MEMCTL_DDR_CFG2_REG, mask);
123
124 /* Get minimum available DQS value */
125 dlls = readl(memc + MEMCTL_DLL_DBG_REG);
126 dlls = (dlls & MST_DLY_SEL_M) >> MST_DLY_SEL_S;
127
128 dqs_dll = dlls >> 4;
129 if (dqs_dll <= 8)
130 dqs_coarse_min_limit = 8 - dqs_dll;
131 else
132 dqs_coarse_min_limit = 0;
133
134 dqs_dll = dlls & 0xf;
135 if (dqs_dll <= 8)
136 dqs_fine_min_limit = 8 - dqs_dll;
137 else
138 dqs_fine_min_limit = 0;
139
140 /* Initial DQS register value */
141 dqs_dly = INIT_DQS_VAL;
142
143 /* Calibrate DQS0 and/or DQS1 */
144 for (i = 0; i < bw; i++) {
145 shift = i * 8;
146 dqs_dly &= ~(0xff << shift);
147
148 /* Find maximum DQS coarse-grain */
149 dqs_dly_tmp = dqs_dly | (0xf << shift);
150 dqs_coarse_max = dqs_find_max(memc, memsize, COARSE_MAX_START,
151 0xf, 4 + shift, dqs_dly_tmp);
152
153 /* Find maximum DQS fine-grain */
154 dqs_dly_tmp = dqs_dly | (dqs_coarse_max << (4 + shift));
155 test_dqs = dqs_find_max(memc, memsize, FINE_MAX_START, 0xf,
156 shift, dqs_dly_tmp);
157
158 if (test_dqs == FINE_MAX_START) {
159 dqs_coarse_max--;
160 dqs_fine_max = 0xf;
161 } else {
162 dqs_fine_max = test_dqs - 1;
163 }
164
165 /* Find minimum DQS coarse-grain */
166 dqs_dly_tmp = dqs_dly;
167 dqs_coarse_min = dqs_find_min(memc, memsize, COARSE_MIN_START,
168 dqs_coarse_min_limit, 4 + shift,
169 dqs_dly_tmp);
170
171 /* Find minimum DQS fine-grain */
172 dqs_dly_tmp = dqs_dly | (dqs_coarse_min << (4 + shift));
173 test_dqs = dqs_find_min(memc, memsize, FINE_MIN_START,
174 dqs_fine_min_limit, shift, dqs_dly_tmp);
175
176 if (test_dqs == FINE_MIN_START + 1) {
177 dqs_coarse_min++;
178 dqs_fine_min = 0;
179 } else {
180 dqs_fine_min = test_dqs;
181 }
182
183 /* Calculate central DQS coarse/fine value */
184 dqs_coarse_val = (dqs_coarse_max + dqs_coarse_min) >> 1;
185 rem = (dqs_coarse_max + dqs_coarse_min) % 2;
186
187 dqs_fine_val = (rem * 4) + ((dqs_fine_max + dqs_fine_min) >> 1);
188 if (dqs_fine_val >= 0x10) {
189 dqs_coarse_val++;
190 dqs_fine_val -= 8;
191 }
192
193 /* Save current DQS value */
194 dqs_dly |= ((dqs_coarse_val << 4) | dqs_fine_val) << shift;
195 }
196
197 /* Set final DQS value */
198 writel(dqs_dly, memc + MEMCTL_DDR_DQS_DLY_REG);
199
200 /* Restore DDR_CFG2 */
201 writel(ddr_cfg2_reg, memc + MEMCTL_DDR_CFG2_REG);
202
203 /* Enable Self-refresh */
204 setbits_32(memc + MEMCTL_DDR_SELF_REFRESH_REG, SR_AUTO_EN);
205}