blob: c5b135db40e4c79e3b7451745b888ff13e120da1 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001/* SPDX-License-Identifier: GPL-2.0+ */
Albert ARIBAUD3da0e572013-05-19 01:48:15 +00002/*
3 * relocate - common relocation function for ARM U-Boot
4 *
5 * Copyright (c) 2013 Albert ARIBAUD <albert.u.boot@aribaud.net>
Albert ARIBAUD3da0e572013-05-19 01:48:15 +00006 */
7
Georges Savoundararadj3ff46cc2014-10-28 23:16:11 +01008#include <asm-offsets.h>
9#include <config.h>
Simon Glassc70f74a2016-11-07 08:47:09 -070010#include <elf.h>
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000011#include <linux/linkage.h>
rev13@wp.pl12d8a722015-03-01 12:44:39 +010012#ifdef CONFIG_CPU_V7M
13#include <asm/armv7m.h>
14#endif
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000015
16/*
Albert ARIBAUDdb544b92014-11-13 17:59:15 +010017 * Default/weak exception vectors relocation routine
18 *
19 * This routine covers the standard ARM cases: normal (0x00000000),
20 * high (0xffff0000) and VBAR. SoCs which do not comply with any of
21 * the standard cases must provide their own, strong, version.
22 */
23
24 .section .text.relocate_vectors,"ax",%progbits
25 .weak relocate_vectors
26
27ENTRY(relocate_vectors)
28
rev13@wp.pl12d8a722015-03-01 12:44:39 +010029#ifdef CONFIG_CPU_V7M
30 /*
31 * On ARMv7-M we only have to write the new vector address
32 * to VTOR register.
33 */
34 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
35 ldr r1, =V7M_SCB_BASE
36 str r0, [r1, V7M_SCB_VTOR]
37#else
Albert ARIBAUDdb544b92014-11-13 17:59:15 +010038#ifdef CONFIG_HAS_VBAR
39 /*
40 * If the ARM processor has the security extensions,
41 * use VBAR to relocate the exception vectors.
42 */
43 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
44 mcr p15, 0, r0, c12, c0, 0 /* Set VBAR */
45#else
46 /*
47 * Copy the relocated exception vectors to the
48 * correct address
49 * CP15 c1 V bit gives us the location of the vectors:
50 * 0x00000000 or 0xFFFF0000.
51 */
52 ldr r0, [r9, #GD_RELOCADDR] /* r0 = gd->relocaddr */
53 mrc p15, 0, r2, c1, c0, 0 /* V bit (bit[13]) in CP15 c1 */
54 ands r2, r2, #(1 << 13)
55 ldreq r1, =0x00000000 /* If V=0 */
56 ldrne r1, =0xFFFF0000 /* If V=1 */
57 ldmia r0!, {r2-r8,r10}
58 stmia r1!, {r2-r8,r10}
59 ldmia r0!, {r2-r8,r10}
60 stmia r1!, {r2-r8,r10}
61#endif
rev13@wp.pl12d8a722015-03-01 12:44:39 +010062#endif
Albert ARIBAUDdb544b92014-11-13 17:59:15 +010063 bx lr
64
65ENDPROC(relocate_vectors)
66
67/*
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000068 * void relocate_code(addr_moni)
69 *
70 * This function relocates the monitor code.
71 *
72 * NOTE:
73 * To prevent the code below from containing references with an R_ARM_ABS32
74 * relocation record type, we never refer to linker-defined symbols directly.
75 * Instead, we declare literals which contain their relative location with
76 * respect to relocate_code, and at run time, add relocate_code back to them.
77 */
78
79ENTRY(relocate_code)
Albert ARIBAUDfbf87b12013-06-11 14:17:35 +020080 ldr r1, =__image_copy_start /* r1 <- SRC &__image_copy_start */
Jeroen Hofsteea81872f2013-09-21 14:04:40 +020081 subs r4, r0, r1 /* r4 <- relocation offset */
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000082 beq relocate_done /* skip relocation */
Albert ARIBAUDd026dec2013-06-11 14:17:33 +020083 ldr r2, =__image_copy_end /* r2 <- SRC &__image_copy_end */
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000084
85copy_loop:
Albert ARIBAUDfbf87b12013-06-11 14:17:35 +020086 ldmia r1!, {r10-r11} /* copy from source address [r1] */
87 stmia r0!, {r10-r11} /* copy to target address [r0] */
88 cmp r1, r2 /* until source end address [r2] */
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000089 blo copy_loop
90
91 /*
92 * fix .rel.dyn relocations
93 */
Albert ARIBAUD47bd65e2013-06-11 14:17:34 +020094 ldr r2, =__rel_dyn_start /* r2 <- SRC &__rel_dyn_start */
95 ldr r3, =__rel_dyn_end /* r3 <- SRC &__rel_dyn_end */
Albert ARIBAUD3da0e572013-05-19 01:48:15 +000096fixloop:
Albert ARIBAUDfbf87b12013-06-11 14:17:35 +020097 ldmia r2!, {r0-r1} /* (r0,r1) <- (SRC location,fixup) */
98 and r1, r1, #0xff
Simon Glassc70f74a2016-11-07 08:47:09 -070099 cmp r1, #R_ARM_RELATIVE
Albert ARIBAUDfbf87b12013-06-11 14:17:35 +0200100 bne fixnext
101
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000102 /* relative fix: increase location by offset */
Jeroen Hofsteea81872f2013-09-21 14:04:40 +0200103 add r0, r0, r4
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000104 ldr r1, [r0]
Jeroen Hofsteea81872f2013-09-21 14:04:40 +0200105 add r1, r1, r4
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000106 str r1, [r0]
Albert ARIBAUDfbf87b12013-06-11 14:17:35 +0200107fixnext:
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000108 cmp r2, r3
109 blo fixloop
110
111relocate_done:
112
Mike Dunn9dc8fef2013-06-21 09:12:28 -0700113#ifdef __XSCALE__
114 /*
115 * On xscale, icache must be invalidated and write buffers drained,
116 * even with cache disabled - 4.2.7 of xscale core developer's manual
117 */
118 mcr p15, 0, r0, c7, c7, 0 /* invalidate icache */
119 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
120#endif
121
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000122 /* ARMv4- don't know bx lr but the assembler fails to see that */
123
124#ifdef __ARM_ARCH_4__
Albert ARIBAUD28970ef2014-11-13 17:59:14 +0100125 mov pc, lr
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000126#else
Albert ARIBAUD28970ef2014-11-13 17:59:14 +0100127 bx lr
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000128#endif
129
Albert ARIBAUD3da0e572013-05-19 01:48:15 +0000130ENDPROC(relocate_code)