blob: 1127373b20fb53ac638b39beacd4a8f0bf5055ae [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Masahiro Yamada5894ca02014-10-03 19:21:06 +09002/*
Masahiro Yamada3e9952b2017-01-28 06:53:43 +09003 * Copyright (C) 2012-2015 Panasonic Corporation
4 * Copyright (C) 2015-2017 Socionext Inc.
5 * Author: Masahiro Yamada <yamada.masahiro@socionext.com>
Masahiro Yamada5894ca02014-10-03 19:21:06 +09006 */
7
8#include <common.h>
Simon Glass691d7192020-05-10 11:40:02 -06009#include <init.h>
Masahiro Yamada0f4ec052017-01-21 18:05:24 +090010#include <linux/errno.h>
Masahiro Yamada3b7fc3f2019-06-29 02:38:04 +090011#include <linux/io.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090012#include <linux/kernel.h>
13#include <linux/printk.h>
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090014#include <linux/sizes.h>
Masahiro Yamadadd74b942017-10-13 19:21:55 +090015#include <asm/global_data.h>
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090016
Masahiro Yamada34e29f72019-07-10 20:07:45 +090017#include "init.h"
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090018#include "sg-regs.h"
Masahiro Yamada51ea5a02016-06-17 19:24:29 +090019#include "soc-info.h"
20
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090021DECLARE_GLOBAL_DATA_PTR;
22
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090023struct uniphier_dram_map {
24 unsigned long base;
25 unsigned long size;
26};
27
Masahiro Yamada6f47c992019-07-10 20:07:43 +090028static int uniphier_memconf_decode(struct uniphier_dram_map *dram_map,
29 unsigned long sparse_ch1_base, bool have_ch2)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090030{
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090031 unsigned long size;
32 u32 val;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +090033
Masahiro Yamadad41b3582019-07-10 20:07:40 +090034 val = readl(sg_base + SG_MEMCONF);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090035
36 /* set up ch0 */
Masahiro Yamada1f8357c2019-07-10 20:07:46 +090037 dram_map[0].base = 0x80000000;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090038
39 switch (val & SG_MEMCONF_CH0_SZ_MASK) {
40 case SG_MEMCONF_CH0_SZ_64M:
41 size = SZ_64M;
42 break;
43 case SG_MEMCONF_CH0_SZ_128M:
44 size = SZ_128M;
45 break;
46 case SG_MEMCONF_CH0_SZ_256M:
47 size = SZ_256M;
48 break;
49 case SG_MEMCONF_CH0_SZ_512M:
50 size = SZ_512M;
51 break;
52 case SG_MEMCONF_CH0_SZ_1G:
53 size = SZ_1G;
54 break;
55 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090056 pr_err("error: invalid value is set to MEMCONF ch0 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090057 return -EINVAL;
58 }
59
60 if ((val & SG_MEMCONF_CH0_NUM_MASK) == SG_MEMCONF_CH0_NUM_2)
61 size *= 2;
62
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090063 dram_map[0].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090064
65 /* set up ch1 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090066 dram_map[1].base = dram_map[0].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090067
68 if (val & SG_MEMCONF_SPARSEMEM) {
Masahiro Yamada6f47c992019-07-10 20:07:43 +090069 if (dram_map[1].base > sparse_ch1_base) {
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090070 pr_warn("Sparse mem is enabled, but ch0 and ch1 overlap\n");
71 pr_warn("Only ch0 is available\n");
Masahiro Yamada04cd4e72017-02-05 10:52:12 +090072 dram_map[1].base = 0;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090073 return 0;
74 }
75
Masahiro Yamada6f47c992019-07-10 20:07:43 +090076 dram_map[1].base = sparse_ch1_base;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090077 }
78
79 switch (val & SG_MEMCONF_CH1_SZ_MASK) {
80 case SG_MEMCONF_CH1_SZ_64M:
81 size = SZ_64M;
82 break;
83 case SG_MEMCONF_CH1_SZ_128M:
84 size = SZ_128M;
85 break;
86 case SG_MEMCONF_CH1_SZ_256M:
87 size = SZ_256M;
88 break;
89 case SG_MEMCONF_CH1_SZ_512M:
90 size = SZ_512M;
91 break;
92 case SG_MEMCONF_CH1_SZ_1G:
93 size = SZ_1G;
94 break;
95 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +090096 pr_err("error: invalid value is set to MEMCONF ch1 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +090097 return -EINVAL;
98 }
99
100 if ((val & SG_MEMCONF_CH1_NUM_MASK) == SG_MEMCONF_CH1_NUM_2)
101 size *= 2;
102
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900103 dram_map[1].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900104
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900105 if (!have_ch2 || val & SG_MEMCONF_CH2_DISABLE)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900106 return 0;
107
108 /* set up ch2 */
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900109 dram_map[2].base = dram_map[1].base + size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900110
111 switch (val & SG_MEMCONF_CH2_SZ_MASK) {
112 case SG_MEMCONF_CH2_SZ_64M:
113 size = SZ_64M;
114 break;
115 case SG_MEMCONF_CH2_SZ_128M:
116 size = SZ_128M;
117 break;
118 case SG_MEMCONF_CH2_SZ_256M:
119 size = SZ_256M;
120 break;
121 case SG_MEMCONF_CH2_SZ_512M:
122 size = SZ_512M;
123 break;
124 case SG_MEMCONF_CH2_SZ_1G:
125 size = SZ_1G;
126 break;
127 default:
Masahiro Yamada0f5bf092017-02-20 12:09:00 +0900128 pr_err("error: invalid value is set to MEMCONF ch2 size\n");
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900129 return -EINVAL;
130 }
131
132 if ((val & SG_MEMCONF_CH2_NUM_MASK) == SG_MEMCONF_CH2_NUM_2)
133 size *= 2;
134
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900135 dram_map[2].size = size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900136
137 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900138}
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900139
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900140static int uniphier_ld4_dram_map_get(struct uniphier_dram_map dram_map[])
141{
142 return uniphier_memconf_decode(dram_map, 0xc0000000, false);
143}
144
145static int uniphier_pro4_dram_map_get(struct uniphier_dram_map dram_map[])
146{
147 return uniphier_memconf_decode(dram_map, 0xa0000000, false);
148}
149
150static int uniphier_pxs2_dram_map_get(struct uniphier_dram_map dram_map[])
151{
152 return uniphier_memconf_decode(dram_map, 0xc0000000, true);
153}
154
155struct uniphier_dram_init_data {
156 unsigned int soc_id;
157 int (*dram_map_get)(struct uniphier_dram_map dram_map[]);
158};
159
160static const struct uniphier_dram_init_data uniphier_dram_init_data[] = {
161 {
162 .soc_id = UNIPHIER_LD4_ID,
163 .dram_map_get = uniphier_ld4_dram_map_get,
164 },
165 {
166 .soc_id = UNIPHIER_PRO4_ID,
167 .dram_map_get = uniphier_pro4_dram_map_get,
168 },
169 {
170 .soc_id = UNIPHIER_SLD8_ID,
171 .dram_map_get = uniphier_ld4_dram_map_get,
172 },
173 {
174 .soc_id = UNIPHIER_PRO5_ID,
175 .dram_map_get = uniphier_ld4_dram_map_get,
176 },
177 {
178 .soc_id = UNIPHIER_PXS2_ID,
179 .dram_map_get = uniphier_pxs2_dram_map_get,
180 },
181 {
182 .soc_id = UNIPHIER_LD6B_ID,
183 .dram_map_get = uniphier_pxs2_dram_map_get,
184 },
185 {
186 .soc_id = UNIPHIER_LD11_ID,
187 .dram_map_get = uniphier_ld4_dram_map_get,
188 },
189 {
190 .soc_id = UNIPHIER_LD20_ID,
191 .dram_map_get = uniphier_pxs2_dram_map_get,
192 },
193 {
194 .soc_id = UNIPHIER_PXS3_ID,
195 .dram_map_get = uniphier_pxs2_dram_map_get,
196 },
197};
198UNIPHIER_DEFINE_SOCDATA_FUNC(uniphier_get_dram_init_data,
199 uniphier_dram_init_data)
200
201static int uniphier_dram_map_get(struct uniphier_dram_map *dram_map)
202{
203 const struct uniphier_dram_init_data *data;
204
205 data = uniphier_get_dram_init_data();
206 if (!data) {
207 pr_err("unsupported SoC\n");
208 return -ENOTSUPP;
209 }
210
211 return data->dram_map_get(dram_map);
212}
213
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900214int dram_init(void)
215{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900216 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadadf725342019-07-10 20:07:44 +0900217 bool valid_bank_found = false;
218 unsigned long prev_top;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900219 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900220
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900221 gd->ram_size = 0;
222
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900223 ret = uniphier_dram_map_get(dram_map);
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900224 if (ret)
225 return ret;
226
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900227 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900228 unsigned long max_size;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900229
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900230 if (!dram_map[i].size)
Masahiro Yamadadf725342019-07-10 20:07:44 +0900231 continue;
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900232
233 /*
234 * U-Boot relocates itself to the tail of the memory region,
235 * but it does not expect sparse memory. We use the first
236 * contiguous chunk here.
237 */
Masahiro Yamadadf725342019-07-10 20:07:44 +0900238 if (valid_bank_found && prev_top < dram_map[i].base)
Masahiro Yamada3e9952b2017-01-28 06:53:43 +0900239 break;
240
Masahiro Yamadabe893a52018-01-06 22:59:24 +0900241 /*
242 * Do not use memory that exceeds 32bit address range. U-Boot
243 * relocates itself to the end of the effectively available RAM.
244 * This could be a problem for DMA engines that do not support
245 * 64bit address (SDMA of SDHCI, UniPhier AV-ether, etc.)
246 */
247 if (dram_map[i].base >= 1ULL << 32)
248 break;
249
250 max_size = (1ULL << 32) - dram_map[i].base;
251
Masahiro Yamada6da69d32020-01-30 22:20:37 +0900252 gd->ram_size = min(dram_map[i].size, max_size);
Masahiro Yamadadf725342019-07-10 20:07:44 +0900253
Masahiro Yamada1f8357c2019-07-10 20:07:46 +0900254 if (!valid_bank_found)
255 gd->ram_base = dram_map[i].base;
256
Masahiro Yamadadf725342019-07-10 20:07:44 +0900257 prev_top = dram_map[i].base + dram_map[i].size;
258 valid_bank_found = true;
Masahiro Yamadaac2a1032016-03-29 20:18:45 +0900259 }
260
Masahiro Yamadaa322eb92018-01-06 22:59:26 +0900261 /*
262 * LD20 uses the last 64 byte for each channel for dynamic
263 * DDR PHY training
264 */
265 if (uniphier_get_soc_id() == UNIPHIER_LD20_ID)
266 gd->ram_size -= 64;
267
Masahiro Yamada5894ca02014-10-03 19:21:06 +0900268 return 0;
269}
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900270
Simon Glass76b00ac2017-03-31 08:40:32 -0600271int dram_init_banksize(void)
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900272{
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900273 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900274 unsigned long base, top;
275 bool valid_bank_found = false;
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900276 int ret, i;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900277
Masahiro Yamada6f47c992019-07-10 20:07:43 +0900278 ret = uniphier_dram_map_get(dram_map);
279 if (ret)
280 return ret;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900281
Masahiro Yamada04cd4e72017-02-05 10:52:12 +0900282 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900283 if (i < ARRAY_SIZE(gd->bd->bi_dram)) {
284 gd->bd->bi_dram[i].start = dram_map[i].base;
285 gd->bd->bi_dram[i].size = dram_map[i].size;
286 }
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900287
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900288 if (!dram_map[i].size)
289 continue;
290
291 if (!valid_bank_found)
292 base = dram_map[i].base;
293 top = dram_map[i].base + dram_map[i].size;
294 valid_bank_found = true;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900295 }
Simon Glass76b00ac2017-03-31 08:40:32 -0600296
Masahiro Yamada34e29f72019-07-10 20:07:45 +0900297 if (!valid_bank_found)
298 return -EINVAL;
299
300 /* map all the DRAM regions */
301 uniphier_mem_map_init(base, top - base);
302
Simon Glass76b00ac2017-03-31 08:40:32 -0600303 return 0;
Masahiro Yamadacf88aff2015-09-11 20:17:49 +0900304}