blob: 970fa09ef03c0aaa6d76ba01572bb2b590e336a1 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Masahiro Yamada5894ca02014-10-03 19:21:06 +09002/*
Masahiro Yamada3e9952b2017-01-28 06:53:43 +09003 * Copyright (C) 2012-2015 Panasonic Corporation
4 * Copyright (C) 2015-2017 Socionext Inc.
5 * Author: Masahiro Yamada <yamada.masahiro@socionext.com>
Masahiro Yamada5894ca02014-10-03 19:21:06 +09006 */
7
8#include <common.h>
Masahiro Yamada0f4ec052017-01-21 18:05:24 +09009#include <linux/errno.h>
Masahiro Yamada3b7fc3f2019-06-29 02:38:04 +090010#include <linux/io.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090011#include <linux/kernel.h>
12#include <linux/printk.h>
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090013#include <linux/sizes.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090014#include <asm/global_data.h>
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090015
Masahiro Yamada34e29f72019-07-10 20:07:45 +090016#include "init.h"
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090017#include "sg-regs.h"
Masahiro Yamada51ea5a02016-06-17 19:24:29 +090018#include "soc-info.h"
19
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090020DECLARE_GLOBAL_DATA_PTR;
21
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090022struct uniphier_dram_map {
23 unsigned long base;
24 unsigned long size;
25};
26
Masahiro Yamada6f47c992019-07-10 20:07:43 +090027static int uniphier_memconf_decode(struct uniphier_dram_map *dram_map,
28 unsigned long sparse_ch1_base, bool have_ch2)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090029{
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090030 unsigned long size;
31 u32 val;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090032
Masahiro Yamadad41b3582019-07-10 20:07:40 +090033 val = readl(sg_base + SG_MEMCONF);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090034
35 /* set up ch0 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090036 dram_map[0].base = CONFIG_SYS_SDRAM_BASE;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090037
38 switch (val & SG_MEMCONF_CH0_SZ_MASK) {
39 case SG_MEMCONF_CH0_SZ_64M:
40 size = SZ_64M;
41 break;
42 case SG_MEMCONF_CH0_SZ_128M:
43 size = SZ_128M;
44 break;
45 case SG_MEMCONF_CH0_SZ_256M:
46 size = SZ_256M;
47 break;
48 case SG_MEMCONF_CH0_SZ_512M:
49 size = SZ_512M;
50 break;
51 case SG_MEMCONF_CH0_SZ_1G:
52 size = SZ_1G;
53 break;
54 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090055 pr_err("error: invalid value is set to MEMCONF ch0 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090056 return -EINVAL;
57 }
58
59 if ((val & SG_MEMCONF_CH0_NUM_MASK) == SG_MEMCONF_CH0_NUM_2)
60 size *= 2;
61
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090062 dram_map[0].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090063
64 /* set up ch1 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090065 dram_map[1].base = dram_map[0].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090066
67 if (val & SG_MEMCONF_SPARSEMEM) {
Masahiro Yamada6f47c992019-07-10 20:07:43 +090068 if (dram_map[1].base > sparse_ch1_base) {
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090069 pr_warn("Sparse mem is enabled, but ch0 and ch1 overlap\n");
70 pr_warn("Only ch0 is available\n");
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090071 dram_map[1].base = 0;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090072 return 0;
73 }
74
Masahiro Yamada6f47c992019-07-10 20:07:43 +090075 dram_map[1].base = sparse_ch1_base;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090076 }
77
78 switch (val & SG_MEMCONF_CH1_SZ_MASK) {
79 case SG_MEMCONF_CH1_SZ_64M:
80 size = SZ_64M;
81 break;
82 case SG_MEMCONF_CH1_SZ_128M:
83 size = SZ_128M;
84 break;
85 case SG_MEMCONF_CH1_SZ_256M:
86 size = SZ_256M;
87 break;
88 case SG_MEMCONF_CH1_SZ_512M:
89 size = SZ_512M;
90 break;
91 case SG_MEMCONF_CH1_SZ_1G:
92 size = SZ_1G;
93 break;
94 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090095 pr_err("error: invalid value is set to MEMCONF ch1 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090096 return -EINVAL;
97 }
98
99 if ((val & SG_MEMCONF_CH1_NUM_MASK) == SG_MEMCONF_CH1_NUM_2)
100 size *= 2;
101
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900102 dram_map[1].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900103
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900104 if (!have_ch2 || val & SG_MEMCONF_CH2_DISABLE)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900105 return 0;
106
107 /* set up ch2 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900108 dram_map[2].base = dram_map[1].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900109
110 switch (val & SG_MEMCONF_CH2_SZ_MASK) {
111 case SG_MEMCONF_CH2_SZ_64M:
112 size = SZ_64M;
113 break;
114 case SG_MEMCONF_CH2_SZ_128M:
115 size = SZ_128M;
116 break;
117 case SG_MEMCONF_CH2_SZ_256M:
118 size = SZ_256M;
119 break;
120 case SG_MEMCONF_CH2_SZ_512M:
121 size = SZ_512M;
122 break;
123 case SG_MEMCONF_CH2_SZ_1G:
124 size = SZ_1G;
125 break;
126 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +0900127 pr_err("error: invalid value is set to MEMCONF ch2 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900128 return -EINVAL;
129 }
130
131 if ((val & SG_MEMCONF_CH2_NUM_MASK) == SG_MEMCONF_CH2_NUM_2)
132 size *= 2;
133
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900134 dram_map[2].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900135
136 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900137}
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900138
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900139static int uniphier_ld4_dram_map_get(struct uniphier_dram_map dram_map[])
140{
141 return uniphier_memconf_decode(dram_map, 0xc0000000, false);
142}
143
144static int uniphier_pro4_dram_map_get(struct uniphier_dram_map dram_map[])
145{
146 return uniphier_memconf_decode(dram_map, 0xa0000000, false);
147}
148
149static int uniphier_pxs2_dram_map_get(struct uniphier_dram_map dram_map[])
150{
151 return uniphier_memconf_decode(dram_map, 0xc0000000, true);
152}
153
154struct uniphier_dram_init_data {
155 unsigned int soc_id;
156 int (*dram_map_get)(struct uniphier_dram_map dram_map[]);
157};
158
159static const struct uniphier_dram_init_data uniphier_dram_init_data[] = {
160 {
161 .soc_id = UNIPHIER_LD4_ID,
162 .dram_map_get = uniphier_ld4_dram_map_get,
163 },
164 {
165 .soc_id = UNIPHIER_PRO4_ID,
166 .dram_map_get = uniphier_pro4_dram_map_get,
167 },
168 {
169 .soc_id = UNIPHIER_SLD8_ID,
170 .dram_map_get = uniphier_ld4_dram_map_get,
171 },
172 {
173 .soc_id = UNIPHIER_PRO5_ID,
174 .dram_map_get = uniphier_ld4_dram_map_get,
175 },
176 {
177 .soc_id = UNIPHIER_PXS2_ID,
178 .dram_map_get = uniphier_pxs2_dram_map_get,
179 },
180 {
181 .soc_id = UNIPHIER_LD6B_ID,
182 .dram_map_get = uniphier_pxs2_dram_map_get,
183 },
184 {
185 .soc_id = UNIPHIER_LD11_ID,
186 .dram_map_get = uniphier_ld4_dram_map_get,
187 },
188 {
189 .soc_id = UNIPHIER_LD20_ID,
190 .dram_map_get = uniphier_pxs2_dram_map_get,
191 },
192 {
193 .soc_id = UNIPHIER_PXS3_ID,
194 .dram_map_get = uniphier_pxs2_dram_map_get,
195 },
196};
197UNIPHIER_DEFINE_SOCDATA_FUNC(uniphier_get_dram_init_data,
198 uniphier_dram_init_data)
199
200static int uniphier_dram_map_get(struct uniphier_dram_map *dram_map)
201{
202 const struct uniphier_dram_init_data *data;
203
204 data = uniphier_get_dram_init_data();
205 if (!data) {
206 pr_err("unsupported SoC\n");
207 return -ENOTSUPP;
208 }
209
210 return data->dram_map_get(dram_map);
211}
212
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900213int dram_init(void)
214{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900215 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadadf725342019-07-10 20:07:44 +0900216 bool valid_bank_found = false;
217 unsigned long prev_top;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900218 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900219
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900220 gd->ram_size = 0;
221
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900222 ret = uniphier_dram_map_get(dram_map);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900223 if (ret)
224 return ret;
225
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900226 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900227 unsigned long max_size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900228
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900229 if (!dram_map[i].size)
Masahiro Yamadadf725342019-07-10 20:07:44 +0900230 continue;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900231
232 /*
233 * U-Boot relocates itself to the tail of the memory region,
234 * but it does not expect sparse memory. We use the first
235 * contiguous chunk here.
236 */
Masahiro Yamadadf725342019-07-10 20:07:44 +0900237 if (valid_bank_found && prev_top < dram_map[i].base)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900238 break;
239
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900240 /*
241 * Do not use memory that exceeds 32bit address range. U-Boot
242 * relocates itself to the end of the effectively available RAM.
243 * This could be a problem for DMA engines that do not support
244 * 64bit address (SDMA of SDHCI, UniPhier AV-ether, etc.)
245 */
246 if (dram_map[i].base >= 1ULL << 32)
247 break;
248
249 max_size = (1ULL << 32) - dram_map[i].base;
250
251 if (dram_map[i].size > max_size) {
252 gd->ram_size += max_size;
253 break;
254 }
255
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900256 gd->ram_size += dram_map[i].size;
Masahiro Yamadadf725342019-07-10 20:07:44 +0900257
258 prev_top = dram_map[i].base + dram_map[i].size;
259 valid_bank_found = true;
Masahiro Yamadaac2a1032016-03-29 20:18:45 +0900260 }
261
Masahiro Yamadaa322eb92018-01-06 22:59:26 +0900262 /*
263 * LD20 uses the last 64 byte for each channel for dynamic
264 * DDR PHY training
265 */
266 if (uniphier_get_soc_id() == UNIPHIER_LD20_ID)
267 gd->ram_size -= 64;
268
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900269 return 0;
270}
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900271
Simon Glass76b00ac2017-03-31 08:40:32 -0600272int dram_init_banksize(void)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900273{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900274 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900275 unsigned long base, top;
276 bool valid_bank_found = false;
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900277 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900278
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900279 ret = uniphier_dram_map_get(dram_map);
280 if (ret)
281 return ret;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900282
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900283 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900284 if (i < ARRAY_SIZE(gd->bd->bi_dram)) {
285 gd->bd->bi_dram[i].start = dram_map[i].base;
286 gd->bd->bi_dram[i].size = dram_map[i].size;
287 }
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900288
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900289 if (!dram_map[i].size)
290 continue;
291
292 if (!valid_bank_found)
293 base = dram_map[i].base;
294 top = dram_map[i].base + dram_map[i].size;
295 valid_bank_found = true;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900296 }
Simon Glass76b00ac2017-03-31 08:40:32 -0600297
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900298 if (!valid_bank_found)
299 return -EINVAL;
300
301 /* map all the DRAM regions */
302 uniphier_mem_map_init(base, top - base);
303
Simon Glass76b00ac2017-03-31 08:40:32 -0600304 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900305}