blob: ab4aa93f42a6108eae9b19b3ae46d845d643d321 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Masahiro Yamada5894ca02014-10-03 19:21:06 +09002/*
Masahiro Yamada3e9952b2017-01-28 06:53:43 +09003 * Copyright (C) 2012-2015 Panasonic Corporation
4 * Copyright (C) 2015-2017 Socionext Inc.
5 * Author: Masahiro Yamada <yamada.masahiro@socionext.com>
Masahiro Yamada5894ca02014-10-03 19:21:06 +09006 */
7
8#include <common.h>
Masahiro Yamada0f4ec052017-01-21 18:05:24 +09009#include <linux/errno.h>
Masahiro Yamada3b7fc3f2019-06-29 02:38:04 +090010#include <linux/io.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090011#include <linux/kernel.h>
12#include <linux/printk.h>
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090013#include <linux/sizes.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090014#include <asm/global_data.h>
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090015
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090016#include "sg-regs.h"
Masahiro Yamada51ea5a02016-06-17 19:24:29 +090017#include "soc-info.h"
18
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090019DECLARE_GLOBAL_DATA_PTR;
20
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090021struct uniphier_dram_map {
22 unsigned long base;
23 unsigned long size;
24};
25
Masahiro Yamada6f47c992019-07-10 20:07:43 +090026static int uniphier_memconf_decode(struct uniphier_dram_map *dram_map,
27 unsigned long sparse_ch1_base, bool have_ch2)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090028{
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090029 unsigned long size;
30 u32 val;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090031
Masahiro Yamadad41b3582019-07-10 20:07:40 +090032 val = readl(sg_base + SG_MEMCONF);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090033
34 /* set up ch0 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090035 dram_map[0].base = CONFIG_SYS_SDRAM_BASE;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090036
37 switch (val & SG_MEMCONF_CH0_SZ_MASK) {
38 case SG_MEMCONF_CH0_SZ_64M:
39 size = SZ_64M;
40 break;
41 case SG_MEMCONF_CH0_SZ_128M:
42 size = SZ_128M;
43 break;
44 case SG_MEMCONF_CH0_SZ_256M:
45 size = SZ_256M;
46 break;
47 case SG_MEMCONF_CH0_SZ_512M:
48 size = SZ_512M;
49 break;
50 case SG_MEMCONF_CH0_SZ_1G:
51 size = SZ_1G;
52 break;
53 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090054 pr_err("error: invalid value is set to MEMCONF ch0 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090055 return -EINVAL;
56 }
57
58 if ((val & SG_MEMCONF_CH0_NUM_MASK) == SG_MEMCONF_CH0_NUM_2)
59 size *= 2;
60
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090061 dram_map[0].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090062
63 /* set up ch1 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090064 dram_map[1].base = dram_map[0].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090065
66 if (val & SG_MEMCONF_SPARSEMEM) {
Masahiro Yamada6f47c992019-07-10 20:07:43 +090067 if (dram_map[1].base > sparse_ch1_base) {
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090068 pr_warn("Sparse mem is enabled, but ch0 and ch1 overlap\n");
69 pr_warn("Only ch0 is available\n");
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090070 dram_map[1].base = 0;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090071 return 0;
72 }
73
Masahiro Yamada6f47c992019-07-10 20:07:43 +090074 dram_map[1].base = sparse_ch1_base;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090075 }
76
77 switch (val & SG_MEMCONF_CH1_SZ_MASK) {
78 case SG_MEMCONF_CH1_SZ_64M:
79 size = SZ_64M;
80 break;
81 case SG_MEMCONF_CH1_SZ_128M:
82 size = SZ_128M;
83 break;
84 case SG_MEMCONF_CH1_SZ_256M:
85 size = SZ_256M;
86 break;
87 case SG_MEMCONF_CH1_SZ_512M:
88 size = SZ_512M;
89 break;
90 case SG_MEMCONF_CH1_SZ_1G:
91 size = SZ_1G;
92 break;
93 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090094 pr_err("error: invalid value is set to MEMCONF ch1 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090095 return -EINVAL;
96 }
97
98 if ((val & SG_MEMCONF_CH1_NUM_MASK) == SG_MEMCONF_CH1_NUM_2)
99 size *= 2;
100
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900101 dram_map[1].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900102
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900103 if (!have_ch2 || val & SG_MEMCONF_CH2_DISABLE)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900104 return 0;
105
106 /* set up ch2 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900107 dram_map[2].base = dram_map[1].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900108
109 switch (val & SG_MEMCONF_CH2_SZ_MASK) {
110 case SG_MEMCONF_CH2_SZ_64M:
111 size = SZ_64M;
112 break;
113 case SG_MEMCONF_CH2_SZ_128M:
114 size = SZ_128M;
115 break;
116 case SG_MEMCONF_CH2_SZ_256M:
117 size = SZ_256M;
118 break;
119 case SG_MEMCONF_CH2_SZ_512M:
120 size = SZ_512M;
121 break;
122 case SG_MEMCONF_CH2_SZ_1G:
123 size = SZ_1G;
124 break;
125 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +0900126 pr_err("error: invalid value is set to MEMCONF ch2 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900127 return -EINVAL;
128 }
129
130 if ((val & SG_MEMCONF_CH2_NUM_MASK) == SG_MEMCONF_CH2_NUM_2)
131 size *= 2;
132
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900133 dram_map[2].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900134
135 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900136}
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900137
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900138static int uniphier_ld4_dram_map_get(struct uniphier_dram_map dram_map[])
139{
140 return uniphier_memconf_decode(dram_map, 0xc0000000, false);
141}
142
143static int uniphier_pro4_dram_map_get(struct uniphier_dram_map dram_map[])
144{
145 return uniphier_memconf_decode(dram_map, 0xa0000000, false);
146}
147
148static int uniphier_pxs2_dram_map_get(struct uniphier_dram_map dram_map[])
149{
150 return uniphier_memconf_decode(dram_map, 0xc0000000, true);
151}
152
153struct uniphier_dram_init_data {
154 unsigned int soc_id;
155 int (*dram_map_get)(struct uniphier_dram_map dram_map[]);
156};
157
158static const struct uniphier_dram_init_data uniphier_dram_init_data[] = {
159 {
160 .soc_id = UNIPHIER_LD4_ID,
161 .dram_map_get = uniphier_ld4_dram_map_get,
162 },
163 {
164 .soc_id = UNIPHIER_PRO4_ID,
165 .dram_map_get = uniphier_pro4_dram_map_get,
166 },
167 {
168 .soc_id = UNIPHIER_SLD8_ID,
169 .dram_map_get = uniphier_ld4_dram_map_get,
170 },
171 {
172 .soc_id = UNIPHIER_PRO5_ID,
173 .dram_map_get = uniphier_ld4_dram_map_get,
174 },
175 {
176 .soc_id = UNIPHIER_PXS2_ID,
177 .dram_map_get = uniphier_pxs2_dram_map_get,
178 },
179 {
180 .soc_id = UNIPHIER_LD6B_ID,
181 .dram_map_get = uniphier_pxs2_dram_map_get,
182 },
183 {
184 .soc_id = UNIPHIER_LD11_ID,
185 .dram_map_get = uniphier_ld4_dram_map_get,
186 },
187 {
188 .soc_id = UNIPHIER_LD20_ID,
189 .dram_map_get = uniphier_pxs2_dram_map_get,
190 },
191 {
192 .soc_id = UNIPHIER_PXS3_ID,
193 .dram_map_get = uniphier_pxs2_dram_map_get,
194 },
195};
196UNIPHIER_DEFINE_SOCDATA_FUNC(uniphier_get_dram_init_data,
197 uniphier_dram_init_data)
198
199static int uniphier_dram_map_get(struct uniphier_dram_map *dram_map)
200{
201 const struct uniphier_dram_init_data *data;
202
203 data = uniphier_get_dram_init_data();
204 if (!data) {
205 pr_err("unsupported SoC\n");
206 return -ENOTSUPP;
207 }
208
209 return data->dram_map_get(dram_map);
210}
211
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900212int dram_init(void)
213{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900214 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadadf725342019-07-10 20:07:44 +0900215 bool valid_bank_found = false;
216 unsigned long prev_top;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900217 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900218
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900219 gd->ram_size = 0;
220
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900221 ret = uniphier_dram_map_get(dram_map);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900222 if (ret)
223 return ret;
224
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900225 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900226 unsigned long max_size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900227
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900228 if (!dram_map[i].size)
Masahiro Yamadadf725342019-07-10 20:07:44 +0900229 continue;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900230
231 /*
232 * U-Boot relocates itself to the tail of the memory region,
233 * but it does not expect sparse memory. We use the first
234 * contiguous chunk here.
235 */
Masahiro Yamadadf725342019-07-10 20:07:44 +0900236 if (valid_bank_found && prev_top < dram_map[i].base)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900237 break;
238
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900239 /*
240 * Do not use memory that exceeds 32bit address range. U-Boot
241 * relocates itself to the end of the effectively available RAM.
242 * This could be a problem for DMA engines that do not support
243 * 64bit address (SDMA of SDHCI, UniPhier AV-ether, etc.)
244 */
245 if (dram_map[i].base >= 1ULL << 32)
246 break;
247
248 max_size = (1ULL << 32) - dram_map[i].base;
249
250 if (dram_map[i].size > max_size) {
251 gd->ram_size += max_size;
252 break;
253 }
254
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900255 gd->ram_size += dram_map[i].size;
Masahiro Yamadadf725342019-07-10 20:07:44 +0900256
257 prev_top = dram_map[i].base + dram_map[i].size;
258 valid_bank_found = true;
Masahiro Yamadaac2a1032016-03-29 20:18:45 +0900259 }
260
Masahiro Yamadaa322eb92018-01-06 22:59:26 +0900261 /*
262 * LD20 uses the last 64 byte for each channel for dynamic
263 * DDR PHY training
264 */
265 if (uniphier_get_soc_id() == UNIPHIER_LD20_ID)
266 gd->ram_size -= 64;
267
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900268 return 0;
269}
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900270
Simon Glass76b00ac2017-03-31 08:40:32 -0600271int dram_init_banksize(void)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900272{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900273 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900274 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900275
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900276 ret = uniphier_dram_map_get(dram_map);
277 if (ret)
278 return ret;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900279
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900280 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900281 if (i >= ARRAY_SIZE(gd->bd->bi_dram))
282 break;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900283
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900284 gd->bd->bi_dram[i].start = dram_map[i].base;
285 gd->bd->bi_dram[i].size = dram_map[i].size;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900286 }
Simon Glass76b00ac2017-03-31 08:40:32 -0600287
288 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900289}