blob: 3eb8e8f44875176ab7ce43f3dcf9a5f7560909f1 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Michal Simek84c72042015-01-15 10:01:51 +01002/*
3 * (C) Copyright 2014 - 2015 Xilinx, Inc.
4 * Michal Simek <michal.simek@xilinx.com>
Michal Simek84c72042015-01-15 10:01:51 +01005 */
6
7#include <common.h>
Simon Glass691d7192020-05-10 11:40:02 -06008#include <init.h>
Simon Glass049f8d62019-12-28 10:44:59 -07009#include <time.h>
Michal Simek84c72042015-01-15 10:01:51 +010010#include <asm/arch/hardware.h>
11#include <asm/arch/sys_proto.h>
Alexander Graf96519f32016-03-04 01:09:49 +010012#include <asm/armv8/mmu.h>
Simon Glass90526e92020-05-10 11:39:56 -060013#include <asm/cache.h>
Michal Simek84c72042015-01-15 10:01:51 +010014#include <asm/io.h>
Ibai Erkiaga009ab7b2019-09-27 11:37:01 +010015#include <zynqmp_firmware.h>
Ovidiu Panait61848582020-03-29 20:57:40 +030016#include <asm/cache.h>
Michal Simek84c72042015-01-15 10:01:51 +010017
18#define ZYNQ_SILICON_VER_MASK 0xF000
19#define ZYNQ_SILICON_VER_SHIFT 12
20
21DECLARE_GLOBAL_DATA_PTR;
22
Nitin Jain06789412018-04-20 12:30:40 +053023/*
24 * Number of filled static entries and also the first empty
25 * slot in zynqmp_mem_map.
26 */
27#define ZYNQMP_MEM_MAP_USED 4
28
Siva Durga Prasad Paladugu3b644a32018-01-12 15:35:46 +053029#if !defined(CONFIG_ZYNQMP_NO_DDR)
Nitin Jain06789412018-04-20 12:30:40 +053030#define DRAM_BANKS CONFIG_NR_DRAM_BANKS
31#else
32#define DRAM_BANKS 0
Siva Durga Prasad Paladugu3b644a32018-01-12 15:35:46 +053033#endif
Nitin Jain06789412018-04-20 12:30:40 +053034
35#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
36#define TCM_MAP 1
37#else
38#define TCM_MAP 0
39#endif
40
41/* +1 is end of list which needs to be empty */
42#define ZYNQMP_MEM_MAP_MAX (ZYNQMP_MEM_MAP_USED + DRAM_BANKS + TCM_MAP + 1)
43
44static struct mm_region zynqmp_mem_map[ZYNQMP_MEM_MAP_MAX] = {
Siva Durga Prasad Paladugu3b644a32018-01-12 15:35:46 +053045 {
York Suncd4b0c52016-06-24 16:46:22 -070046 .virt = 0x80000000UL,
47 .phys = 0x80000000UL,
Alexander Graf96519f32016-03-04 01:09:49 +010048 .size = 0x70000000UL,
49 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
50 PTE_BLOCK_NON_SHARE |
51 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain06789412018-04-20 12:30:40 +053052 }, {
York Suncd4b0c52016-06-24 16:46:22 -070053 .virt = 0xf8000000UL,
54 .phys = 0xf8000000UL,
Alexander Graf96519f32016-03-04 01:09:49 +010055 .size = 0x07e00000UL,
56 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
57 PTE_BLOCK_NON_SHARE |
58 PTE_BLOCK_PXN | PTE_BLOCK_UXN
59 }, {
York Suncd4b0c52016-06-24 16:46:22 -070060 .virt = 0x400000000UL,
61 .phys = 0x400000000UL,
Anders Hedlund501fbc62017-12-19 17:24:41 +010062 .size = 0x400000000UL,
Alexander Graf96519f32016-03-04 01:09:49 +010063 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
64 PTE_BLOCK_NON_SHARE |
65 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain06789412018-04-20 12:30:40 +053066 }, {
Anders Hedlund501fbc62017-12-19 17:24:41 +010067 .virt = 0x1000000000UL,
68 .phys = 0x1000000000UL,
69 .size = 0xf000000000UL,
Alexander Graf96519f32016-03-04 01:09:49 +010070 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
71 PTE_BLOCK_NON_SHARE |
72 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Alexander Graf96519f32016-03-04 01:09:49 +010073 }
74};
Nitin Jain06789412018-04-20 12:30:40 +053075
76void mem_map_fill(void)
77{
78 int banks = ZYNQMP_MEM_MAP_USED;
79
80#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
81 zynqmp_mem_map[banks].virt = 0xffe00000UL;
82 zynqmp_mem_map[banks].phys = 0xffe00000UL;
83 zynqmp_mem_map[banks].size = 0x00200000UL;
84 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
85 PTE_BLOCK_INNER_SHARE;
86 banks = banks + 1;
87#endif
88
89#if !defined(CONFIG_ZYNQMP_NO_DDR)
90 for (int i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
91 /* Zero size means no more DDR that's this is end */
92 if (!gd->bd->bi_dram[i].size)
93 break;
94
95 zynqmp_mem_map[banks].virt = gd->bd->bi_dram[i].start;
96 zynqmp_mem_map[banks].phys = gd->bd->bi_dram[i].start;
97 zynqmp_mem_map[banks].size = gd->bd->bi_dram[i].size;
98 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
99 PTE_BLOCK_INNER_SHARE;
100 banks = banks + 1;
101 }
102#endif
103}
104
Alexander Graf96519f32016-03-04 01:09:49 +0100105struct mm_region *mem_map = zynqmp_mem_map;
106
Michal Simek9c152ed2016-05-30 10:41:26 +0200107u64 get_page_table_size(void)
108{
109 return 0x14000;
110}
111
Siva Durga Prasad Paladugu5860bc12018-10-05 15:09:05 +0530112#if defined(CONFIG_SYS_MEM_RSVD_FOR_MMU) || defined(CONFIG_DEFINE_TCM_OCM_MMAP)
113void tcm_init(u8 mode)
Siva Durga Prasad Paladugu12ad2992018-10-05 15:09:04 +0530114{
115 puts("WARNING: Initializing TCM overwrites TCM content\n");
116 initialize_tcm(mode);
117 memset((void *)ZYNQMP_TCM_BASE_ADDR, 0, ZYNQMP_TCM_SIZE);
118}
Siva Durga Prasad Paladugu5860bc12018-10-05 15:09:05 +0530119#endif
Siva Durga Prasad Paladugu12ad2992018-10-05 15:09:04 +0530120
Siva Durga Prasad Paladugu5860bc12018-10-05 15:09:05 +0530121#ifdef CONFIG_SYS_MEM_RSVD_FOR_MMU
Ovidiu Panait61848582020-03-29 20:57:40 +0300122int arm_reserve_mmu(void)
Siva Durga Prasad Paladugue042d362017-07-13 19:01:11 +0530123{
Siva Durga Prasad Paladugu12ad2992018-10-05 15:09:04 +0530124 tcm_init(TCM_LOCK);
Siva Durga Prasad Paladugue042d362017-07-13 19:01:11 +0530125 gd->arch.tlb_size = PGTABLE_SIZE;
126 gd->arch.tlb_addr = ZYNQMP_TCM_BASE_ADDR;
127
128 return 0;
129}
130#endif
131
Michal Simek0785dfd2015-11-05 08:34:35 +0100132static unsigned int zynqmp_get_silicon_version_secure(void)
133{
134 u32 ver;
135
136 ver = readl(&csu_base->version);
137 ver &= ZYNQMP_SILICON_VER_MASK;
138 ver >>= ZYNQMP_SILICON_VER_SHIFT;
139
140 return ver;
141}
142
Michal Simek84c72042015-01-15 10:01:51 +0100143unsigned int zynqmp_get_silicon_version(void)
144{
Michal Simek0785dfd2015-11-05 08:34:35 +0100145 if (current_el() == 3)
146 return zynqmp_get_silicon_version_secure();
147
Michal Simek84c72042015-01-15 10:01:51 +0100148 gd->cpu_clk = get_tbclk();
149
150 switch (gd->cpu_clk) {
151 case 50000000:
152 return ZYNQMP_CSU_VERSION_QEMU;
153 }
154
Michal Simekbe6f6af2015-08-20 14:01:39 +0200155 return ZYNQMP_CSU_VERSION_SILICON;
Michal Simek84c72042015-01-15 10:01:51 +0100156}
Siva Durga Prasad Paladugue0752bc2017-02-02 01:10:46 +0530157
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530158static int zynqmp_mmio_rawwrite(const u32 address,
Siva Durga Prasad Paladugue0752bc2017-02-02 01:10:46 +0530159 const u32 mask,
160 const u32 value)
161{
162 u32 data;
163 u32 value_local = value;
Michal Simeke3c26b82018-06-13 10:38:33 +0200164 int ret;
Siva Durga Prasad Paladugue0752bc2017-02-02 01:10:46 +0530165
Michal Simeke3c26b82018-06-13 10:38:33 +0200166 ret = zynqmp_mmio_read(address, &data);
167 if (ret)
168 return ret;
169
Siva Durga Prasad Paladugue0752bc2017-02-02 01:10:46 +0530170 data &= ~mask;
171 value_local &= mask;
172 value_local |= data;
173 writel(value_local, (ulong)address);
174 return 0;
175}
176
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530177static int zynqmp_mmio_rawread(const u32 address, u32 *value)
Siva Durga Prasad Paladugue0752bc2017-02-02 01:10:46 +0530178{
179 *value = readl((ulong)address);
180 return 0;
181}
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530182
183int zynqmp_mmio_write(const u32 address,
184 const u32 mask,
185 const u32 value)
186{
187 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3)
188 return zynqmp_mmio_rawwrite(address, mask, value);
Michal Simek866225f2019-10-04 15:45:29 +0200189#if defined(CONFIG_ZYNQMP_FIRMWARE)
Heinrich Schuchardt549d6842017-10-13 01:14:27 +0200190 else
Michal Simek40361952019-10-04 15:35:45 +0200191 return xilinx_pm_request(PM_MMIO_WRITE, address, mask,
192 value, 0, NULL);
Michal Simek866225f2019-10-04 15:45:29 +0200193#endif
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530194
195 return -EINVAL;
196}
197
198int zynqmp_mmio_read(const u32 address, u32 *value)
199{
Michal Simek866225f2019-10-04 15:45:29 +0200200 u32 ret = -EINVAL;
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530201
202 if (!value)
Michal Simek866225f2019-10-04 15:45:29 +0200203 return ret;
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530204
205 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
206 ret = zynqmp_mmio_rawread(address, value);
Michal Simek866225f2019-10-04 15:45:29 +0200207 }
208#if defined(CONFIG_ZYNQMP_FIRMWARE)
209 else {
210 u32 ret_payload[PAYLOAD_ARG_CNT];
211
Michal Simek40361952019-10-04 15:35:45 +0200212 ret = xilinx_pm_request(PM_MMIO_READ, address, 0, 0,
213 0, ret_payload);
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530214 *value = ret_payload[1];
215 }
Michal Simek866225f2019-10-04 15:45:29 +0200216#endif
Siva Durga Prasad Paladugucb186e72017-07-13 19:01:12 +0530217
218 return ret;
219}