blob: 2553e3e349c5da1a4572fe735e0cbb5ec4ea5f04 [file] [log] [blame]
Jean-Christophe PLAGNIOL-VILLARD958f7da2009-06-13 20:50:02 +02001/*
2 * include/asm-arm/macro.h
3 *
4 * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
5 *
Wolfgang Denk1a459662013-07-08 09:37:19 +02006 * SPDX-License-Identifier: GPL-2.0+
Jean-Christophe PLAGNIOL-VILLARD958f7da2009-06-13 20:50:02 +02007 */
8
9#ifndef __ASM_ARM_MACRO_H__
10#define __ASM_ARM_MACRO_H__
Alison Wangec6617c2016-11-10 10:49:03 +080011
12#ifdef CONFIG_ARM64
13#include <asm/system.h>
14#endif
15
Jean-Christophe PLAGNIOL-VILLARD958f7da2009-06-13 20:50:02 +020016#ifdef __ASSEMBLY__
17
18/*
19 * These macros provide a convenient way to write 8, 16 and 32 bit data
20 * to any address.
21 * Registers r4 and r5 are used, any data in these registers are
22 * overwritten by the macros.
23 * The macros are valid for any ARM architecture, they do not implement
24 * any memory barriers so caution is recommended when using these when the
25 * caches are enabled or on a multi-core system.
26 */
27
28.macro write32, addr, data
29 ldr r4, =\addr
30 ldr r5, =\data
31 str r5, [r4]
32.endm
33
34.macro write16, addr, data
35 ldr r4, =\addr
36 ldrh r5, =\data
37 strh r5, [r4]
38.endm
39
40.macro write8, addr, data
41 ldr r4, =\addr
42 ldrb r5, =\data
43 strb r5, [r4]
44.endm
45
46/*
47 * This macro generates a loop that can be used for delays in the code.
48 * Register r4 is used, any data in this register is overwritten by the
49 * macro.
50 * The macro is valid for any ARM architeture. The actual time spent in the
51 * loop will vary from CPU to CPU though.
52 */
53
54.macro wait_timer, time
55 ldr r4, =\time
561:
57 nop
58 subs r4, r4, #1
59 bcs 1b
60.endm
61
David Feng0ae76532013-12-14 11:47:35 +080062#ifdef CONFIG_ARM64
63/*
64 * Register aliases.
65 */
66lr .req x30
67
68/*
69 * Branch according to exception level
70 */
71.macro switch_el, xreg, el3_label, el2_label, el1_label
72 mrs \xreg, CurrentEL
73 cmp \xreg, 0xc
74 b.eq \el3_label
75 cmp \xreg, 0x8
76 b.eq \el2_label
77 cmp \xreg, 0x4
78 b.eq \el1_label
79.endm
80
81/*
Bhupesh Sharma37118fb2015-01-23 15:50:04 +053082 * Branch if current processor is a Cortex-A57 core.
83 */
84.macro branch_if_a57_core, xreg, a57_label
85 mrs \xreg, midr_el1
86 lsr \xreg, \xreg, #4
87 and \xreg, \xreg, #0x00000FFF
88 cmp \xreg, #0xD07 /* Cortex-A57 MPCore processor. */
89 b.eq \a57_label
90.endm
91
92/*
93 * Branch if current processor is a Cortex-A53 core.
94 */
95.macro branch_if_a53_core, xreg, a53_label
96 mrs \xreg, midr_el1
97 lsr \xreg, \xreg, #4
98 and \xreg, \xreg, #0x00000FFF
99 cmp \xreg, #0xD03 /* Cortex-A53 MPCore processor. */
100 b.eq \a53_label
101.endm
102
103/*
David Feng0ae76532013-12-14 11:47:35 +0800104 * Branch if current processor is a slave,
105 * choose processor with all zero affinity value as the master.
106 */
107.macro branch_if_slave, xreg, slave_label
Linus Walleij23b58772015-03-09 10:53:21 +0100108#ifdef CONFIG_ARMV8_MULTIENTRY
109 /* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
David Feng0ae76532013-12-14 11:47:35 +0800110 mrs \xreg, mpidr_el1
111 tst \xreg, #0xff /* Test Affinity 0 */
112 b.ne \slave_label
113 lsr \xreg, \xreg, #8
114 tst \xreg, #0xff /* Test Affinity 1 */
115 b.ne \slave_label
116 lsr \xreg, \xreg, #8
117 tst \xreg, #0xff /* Test Affinity 2 */
118 b.ne \slave_label
119 lsr \xreg, \xreg, #16
120 tst \xreg, #0xff /* Test Affinity 3 */
121 b.ne \slave_label
Linus Walleij23b58772015-03-09 10:53:21 +0100122#endif
David Feng0ae76532013-12-14 11:47:35 +0800123.endm
124
125/*
126 * Branch if current processor is a master,
127 * choose processor with all zero affinity value as the master.
128 */
129.macro branch_if_master, xreg1, xreg2, master_label
Linus Walleij23b58772015-03-09 10:53:21 +0100130#ifdef CONFIG_ARMV8_MULTIENTRY
131 /* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
David Feng0ae76532013-12-14 11:47:35 +0800132 mrs \xreg1, mpidr_el1
133 lsr \xreg2, \xreg1, #32
134 lsl \xreg1, \xreg1, #40
135 lsr \xreg1, \xreg1, #40
136 orr \xreg1, \xreg1, \xreg2
137 cbz \xreg1, \master_label
Linus Walleij23b58772015-03-09 10:53:21 +0100138#else
139 b \master_label
140#endif
David Feng0ae76532013-12-14 11:47:35 +0800141.endm
142
Alison Wangec6617c2016-11-10 10:49:03 +0800143/*
144 * Switch from EL3 to EL2 for ARMv8
145 * @ep: kernel entry point
146 * @flag: The execution state flag for lower exception
147 * level, ES_TO_AARCH64 or ES_TO_AARCH32
148 * @tmp: temporary register
149 *
150 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
151 * For loading 64-bit OS, x0 is physical address to the FDT blob.
152 * They will be passed to the guest.
153 */
154.macro armv8_switch_to_el2_m, ep, flag, tmp
York Sun40f8dec2014-09-08 12:20:00 -0700155 msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */
Alison Wangec6617c2016-11-10 10:49:03 +0800156 mov \tmp, #CPTR_EL2_RES1
157 msr cptr_el2, \tmp /* Disable coprocessor traps to EL2 */
York Sun40f8dec2014-09-08 12:20:00 -0700158
David Feng148822d2015-03-02 15:29:34 +0800159 /* Initialize Generic Timers */
160 msr cntvoff_el2, xzr
161
York Sun40f8dec2014-09-08 12:20:00 -0700162 /* Initialize SCTLR_EL2
163 *
164 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
165 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
166 * EE,WXN,I,SA,C,A,M to 0
167 */
Alison Wangec6617c2016-11-10 10:49:03 +0800168 ldr \tmp, =(SCTLR_EL2_RES1 | SCTLR_EL2_EE_LE |\
169 SCTLR_EL2_WXN_DIS | SCTLR_EL2_ICACHE_DIS |\
170 SCTLR_EL2_SA_DIS | SCTLR_EL2_DCACHE_DIS |\
171 SCTLR_EL2_ALIGN_DIS | SCTLR_EL2_MMU_DIS)
172 msr sctlr_el2, \tmp
173
174 mov \tmp, sp
175 msr sp_el2, \tmp /* Migrate SP */
176 mrs \tmp, vbar_el3
177 msr vbar_el2, \tmp /* Migrate VBAR */
178
179 /* Check switch to AArch64 EL2 or AArch32 Hypervisor mode */
180 cmp \flag, #ES_TO_AARCH32
181 b.eq 1f
182
183 /*
184 * The next lower exception level is AArch64, 64bit EL2 | HCE |
185 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
186 */
187 ldr \tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
188 SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
189 SCR_EL3_NS_EN)
190 msr scr_el3, \tmp
York Sun40f8dec2014-09-08 12:20:00 -0700191
192 /* Return to the EL2_SP2 mode from EL3 */
Alison Wangec6617c2016-11-10 10:49:03 +0800193 ldr \tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
194 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
195 SPSR_EL_M_AARCH64 | SPSR_EL_M_EL2H)
196 msr spsr_el3, \tmp
197 msr elr_el3, \ep
198 eret
199
2001:
201 /*
202 * The next lower exception level is AArch32, 32bit EL2 | HCE |
203 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
204 */
205 ldr \tmp, =(SCR_EL3_RW_AARCH32 | SCR_EL3_HCE_EN |\
206 SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
207 SCR_EL3_NS_EN)
208 msr scr_el3, \tmp
209
210 /* Return to AArch32 Hypervisor mode */
211 ldr \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
212 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
213 SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
214 SPSR_EL_M_HYP)
215 msr spsr_el3, \tmp
216 msr elr_el3, \ep
York Sun40f8dec2014-09-08 12:20:00 -0700217 eret
218.endm
219
Alison Wangec6617c2016-11-10 10:49:03 +0800220/*
221 * Switch from EL2 to EL1 for ARMv8
222 * @ep: kernel entry point
223 * @flag: The execution state flag for lower exception
224 * level, ES_TO_AARCH64 or ES_TO_AARCH32
225 * @tmp: temporary register
226 *
227 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
228 * For loading 64-bit OS, x0 is physical address to the FDT blob.
229 * They will be passed to the guest.
230 */
231.macro armv8_switch_to_el1_m, ep, flag, tmp
York Sun40f8dec2014-09-08 12:20:00 -0700232 /* Initialize Generic Timers */
Alison Wangec6617c2016-11-10 10:49:03 +0800233 mrs \tmp, cnthctl_el2
234 /* Enable EL1 access to timers */
235 orr \tmp, \tmp, #(CNTHCTL_EL2_EL1PCEN_EN |\
236 CNTHCTL_EL2_EL1PCTEN_EN)
237 msr cnthctl_el2, \tmp
York Sun40f8dec2014-09-08 12:20:00 -0700238 msr cntvoff_el2, xzr
239
240 /* Initilize MPID/MPIDR registers */
Alison Wangec6617c2016-11-10 10:49:03 +0800241 mrs \tmp, midr_el1
242 msr vpidr_el2, \tmp
243 mrs \tmp, mpidr_el1
244 msr vmpidr_el2, \tmp
York Sun40f8dec2014-09-08 12:20:00 -0700245
246 /* Disable coprocessor traps */
Alison Wangec6617c2016-11-10 10:49:03 +0800247 mov \tmp, #CPTR_EL2_RES1
248 msr cptr_el2, \tmp /* Disable coprocessor traps to EL2 */
York Sun40f8dec2014-09-08 12:20:00 -0700249 msr hstr_el2, xzr /* Disable coprocessor traps to EL2 */
Alison Wangec6617c2016-11-10 10:49:03 +0800250 mov \tmp, #CPACR_EL1_FPEN_EN
251 msr cpacr_el1, \tmp /* Enable FP/SIMD at EL1 */
York Sun40f8dec2014-09-08 12:20:00 -0700252
253 /* SCTLR_EL1 initialization
254 *
255 * setting RES1 bits (29,28,23,22,20,11) to 1
256 * and RES0 bits (31,30,27,21,17,13,10,6) +
257 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
258 * CP15BEN,SA0,SA,C,A,M to 0
259 */
Alison Wangec6617c2016-11-10 10:49:03 +0800260 ldr \tmp, =(SCTLR_EL1_RES1 | SCTLR_EL1_UCI_DIS |\
261 SCTLR_EL1_EE_LE | SCTLR_EL1_WXN_DIS |\
262 SCTLR_EL1_NTWE_DIS | SCTLR_EL1_NTWI_DIS |\
263 SCTLR_EL1_UCT_DIS | SCTLR_EL1_DZE_DIS |\
264 SCTLR_EL1_ICACHE_DIS | SCTLR_EL1_UMA_DIS |\
265 SCTLR_EL1_SED_EN | SCTLR_EL1_ITD_EN |\
266 SCTLR_EL1_CP15BEN_DIS | SCTLR_EL1_SA0_DIS |\
267 SCTLR_EL1_SA_DIS | SCTLR_EL1_DCACHE_DIS |\
268 SCTLR_EL1_ALIGN_DIS | SCTLR_EL1_MMU_DIS)
269 msr sctlr_el1, \tmp
270
271 mov \tmp, sp
272 msr sp_el1, \tmp /* Migrate SP */
273 mrs \tmp, vbar_el2
274 msr vbar_el1, \tmp /* Migrate VBAR */
275
276 /* Check switch to AArch64 EL1 or AArch32 Supervisor mode */
277 cmp \flag, #ES_TO_AARCH32
278 b.eq 1f
279
280 /* Initialize HCR_EL2 */
281 ldr \tmp, =(HCR_EL2_RW_AARCH64 | HCR_EL2_HCD_DIS)
282 msr hcr_el2, \tmp
York Sun40f8dec2014-09-08 12:20:00 -0700283
284 /* Return to the EL1_SP1 mode from EL2 */
Alison Wangec6617c2016-11-10 10:49:03 +0800285 ldr \tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
286 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
287 SPSR_EL_M_AARCH64 | SPSR_EL_M_EL1H)
288 msr spsr_el2, \tmp
289 msr elr_el2, \ep
290 eret
291
2921:
293 /* Initialize HCR_EL2 */
294 ldr \tmp, =(HCR_EL2_RW_AARCH32 | HCR_EL2_HCD_DIS)
295 msr hcr_el2, \tmp
296
297 /* Return to AArch32 Supervisor mode from EL2 */
298 ldr \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
299 SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
300 SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
301 SPSR_EL_M_SVC)
302 msr spsr_el2, \tmp
303 msr elr_el2, \ep
York Sun40f8dec2014-09-08 12:20:00 -0700304 eret
305.endm
306
307#if defined(CONFIG_GICV3)
308.macro gic_wait_for_interrupt_m xreg1
3090 : wfi
310 mrs \xreg1, ICC_IAR1_EL1
311 msr ICC_EOIR1_EL1, \xreg1
312 cbnz \xreg1, 0b
313.endm
314#elif defined(CONFIG_GICV2)
315.macro gic_wait_for_interrupt_m xreg1, wreg2
3160 : wfi
317 ldr \wreg2, [\xreg1, GICC_AIAR]
318 str \wreg2, [\xreg1, GICC_AEOIR]
Yehuda Yitschak59a9cfd2014-10-27 14:07:16 +0200319 and \wreg2, \wreg2, #0x3ff
York Sun40f8dec2014-09-08 12:20:00 -0700320 cbnz \wreg2, 0b
321.endm
322#endif
323
David Feng0ae76532013-12-14 11:47:35 +0800324#endif /* CONFIG_ARM64 */
325
Jean-Christophe PLAGNIOL-VILLARD958f7da2009-06-13 20:50:02 +0200326#endif /* __ASSEMBLY__ */
327#endif /* __ASM_ARM_MACRO_H__ */