blob: 580df65ea78208d9f7f24a204cf780e5d88df2c1 [file] [log] [blame]
Tom Rini83d290c2018-05-06 17:58:06 -04001/* SPDX-License-Identifier: GPL-2.0+ */
Chris Zankelc978b522016-08-10 18:36:44 +03002/*
3 * (C) Copyright 2008 - 2013 Tensilica Inc.
4 * (C) Copyright 2014 - 2016 Cadence Design Systems Inc.
Chris Zankelc978b522016-08-10 18:36:44 +03005 */
6
7#include <config.h>
8#include <asm/asmmacro.h>
9#include <asm/cacheasm.h>
10#include <asm/regs.h>
11#include <asm/arch/tie.h>
12#include <asm-offsets.h>
13
14/*
15 * Offsets into the the pt_regs struture.
16 * Make sure these always match with the structure defined in ptrace.h!
17 */
18
19#define PT_PC 0
20#define PT_PS 4
21#define PT_DEPC 8
22#define PT_EXCCAUSE 12
23#define PT_EXCVADDR 16
24#define PT_DEBUGCAUSE 20
25#define PT_WMASK 24
26#define PT_LBEG 28
27#define PT_LEND 32
28#define PT_LCOUNT 36
29#define PT_SAR 40
30#define PT_WINDOWBASE 44
31#define PT_WINDOWSTART 48
32#define PT_SYSCALL 52
33#define PT_ICOUNTLEVEL 56
34#define PT_RESERVED 60
35#define PT_AREG 64
36#define PT_SIZE (64 + 64)
37
38/*
39 * Cache attributes are different for full MMU and region protection.
40 */
41
42#if XCHAL_HAVE_PTP_MMU
43#define CA_WRITEBACK (0x7)
44#else
45#define CA_WRITEBACK (0x4)
46#endif
47
48/*
49 * Reset vector.
50 * Only a trampoline to jump to _start
51 * (Note that we have to mark the section writable as the section contains
52 * a relocatable literal)
53 */
54
55 .section .ResetVector.text, "awx"
56 .global _ResetVector
57_ResetVector:
58
59 j 1f
60 .align 4
612: .long _start
621: l32r a2, 2b
63 jx a2
64
65
66/*
67 * Processor initialization. We still run in rom space.
68 *
69 * NOTE: Running in ROM
70 * For Xtensa, we currently don't allow to run some code from ROM but
71 * unpack the data immediately to memory. This requires, for example,
72 * that DDR has been set up before running U-Boot. (See also comments
73 * inline for ways to change it)
74 */
75
76 .section .reset.text, "ax"
77 .global _start
78 .align 4
79_start:
80 /* Keep a0 = 0 for various initializations */
81
82 movi a0, 0
83
84 /*
85 * For full MMU cores, put page table at unmapped virtual address.
86 * This ensures that accesses outside the static maps result
87 * in miss exceptions rather than random behaviour.
88 */
89
90#if XCHAL_HAVE_PTP_MMU
91 wsr a0, PTEVADDR
92#endif
93
94 /* Disable dbreak debug exceptions */
95
96#if XCHAL_HAVE_DEBUG && XCHAL_NUM_DBREAK > 0
97 .set _index, 0
98 .rept XCHAL_NUM_DBREAK
99 wsr a0, DBREAKC + _index
100 .set _index, _index + 1
101 .endr
102#endif
103
104 /* Reset windowbase and windowstart */
105
106#if XCHAL_HAVE_WINDOWED
107 movi a3, 1
108 wsr a3, windowstart
109 wsr a0, windowbase
110 rsync
111 movi a0, 0 /* windowbase might have changed */
112#endif
113
114 /*
115 * Vecbase in bitstream may differ from header files
116 * set or check it.
117 */
118
119#if XCHAL_HAVE_VECBASE
120 movi a3, XCHAL_VECBASE_RESET_VADDR /* VECBASE reset value */
121 wsr a3, VECBASE
122#endif
123
124#if XCHAL_HAVE_LOOPS
125 /* Disable loops */
126
127 wsr a0, LCOUNT
128#endif
129
130 /* Set PS.WOE = 0, PS.EXCM = 0 (for loop), PS.INTLEVEL = EXCM level */
131
132#if XCHAL_HAVE_XEA1
133 movi a2, 1
134#else
135 movi a2, XCHAL_EXCM_LEVEL
136#endif
137 wsr a2, PS
138 rsync
139
140 /* Unlock and invalidate caches */
141
142 ___unlock_dcache_all a2, a3
143 ___invalidate_dcache_all a2, a3
144 ___unlock_icache_all a2, a3
145 ___invalidate_icache_all a2, a3
146
147 isync
148
149 /* Unpack data sections */
150
151 movi a2, __reloc_table_start
152 movi a3, __reloc_table_end
153
1541: beq a2, a3, 3f # no more entries?
155 l32i a4, a2, 0 # start destination (in RAM)
156 l32i a5, a2, 4 # end destination (in RAM)
157 l32i a6, a2, 8 # start source (in ROM)
158 addi a2, a2, 12 # next entry
159 beq a4, a5, 1b # skip, empty entry
160 beq a4, a6, 1b # skip, source and destination are the same
161
162 /* If there's memory protection option with 512MB TLB regions and
163 * cache attributes in TLB entries and caching is not inhibited,
164 * enable data/instruction cache for relocated image.
165 */
166#if XCHAL_HAVE_SPANNING_WAY && \
Trevor Woerner10015022019-05-03 09:41:00 -0400167 !(CONFIG_IS_ENABLED(SYS_DCACHE_OFF) && \
168 CONFIG_IS_ENABLED(SYS_ICACHE_OFF))
Chris Zankelc978b522016-08-10 18:36:44 +0300169 srli a7, a4, 29
170 slli a7, a7, 29
171 addi a7, a7, XCHAL_SPANNING_WAY
Trevor Woerner10015022019-05-03 09:41:00 -0400172#if !CONFIG_IS_ENABLED(SYS_DCACHE_OFF)
Chris Zankelc978b522016-08-10 18:36:44 +0300173 rdtlb1 a8, a7
174 srli a8, a8, 4
175 slli a8, a8, 4
176 addi a8, a8, CA_WRITEBACK
177 wdtlb a8, a7
178#endif
Trevor Woerner10015022019-05-03 09:41:00 -0400179#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
Chris Zankelc978b522016-08-10 18:36:44 +0300180 ritlb1 a8, a7
181 srli a8, a8, 4
182 slli a8, a8, 4
183 addi a8, a8, CA_WRITEBACK
184 witlb a8, a7
185#endif
186 isync
187#endif
188
1892: l32i a7, a6, 0
190 addi a6, a6, 4
191 s32i a7, a4, 0
192 addi a4, a4, 4
193 bltu a4, a5, 2b
194 j 1b
195
1963: /* All code and initalized data segments have been copied */
197
198 /* Setup PS, PS.WOE = 1, PS.EXCM = 0, PS.INTLEVEL = EXCM level. */
199
200#if __XTENSA_CALL0_ABI__
201 movi a2, XCHAL_EXCM_LEVEL
202#else
203 movi a2, (1<<PS_WOE_BIT) | XCHAL_EXCM_LEVEL
204#endif
205 wsr a2, PS
206 rsync
207
208 /* Writeback */
209
210 ___flush_dcache_all a2, a3
211
212#ifdef __XTENSA_WINDOWED_ABI__
213 /*
214 * In windowed ABI caller and call target need to be within the same
215 * gigabyte. Put the rest of the code into the text segment and jump
216 * there.
217 */
218
219 movi a4, .Lboard_init_code
220 jx a4
221
222 .text
223 .align 4
224.Lboard_init_code:
225#endif
226
227 movi a0, 0
Max Filippov10117a22018-02-12 15:39:19 -0800228 movi sp, (XTENSA_SYS_TEXT_ADDR - 16) & 0xfffffff0
Chris Zankelc978b522016-08-10 18:36:44 +0300229
230#ifdef CONFIG_DEBUG_UART
231 movi a4, debug_uart_init
232#ifdef __XTENSA_CALL0_ABI__
233 callx0 a4
234#else
235 callx4 a4
236#endif
237#endif
238
239 movi a4, board_init_f_alloc_reserve
240
241#ifdef __XTENSA_CALL0_ABI__
242 mov a2, sp
243 callx0 a4
244 mov sp, a2
245#else
246 mov a6, sp
247 callx4 a4
248 movsp sp, a6
249#endif
250
251 movi a4, board_init_f_init_reserve
252
253#ifdef __XTENSA_CALL0_ABI__
254 callx0 a4
255#else
256 callx4 a4
257#endif
258
259 /*
260 * Call board initialization routine (never returns).
261 */
262
263 movi a4, board_init_f
264
265#ifdef __XTENSA_CALL0_ABI__
266 movi a2, 0
267 callx0 a4
268#else
269 movi a6, 0
270 callx4 a4
271#endif
272 /* Never Returns */
273 ill
274
275/*
Simon Glass94133872019-12-28 10:44:45 -0700276 * void relocate_code(addr_sp, gd, addr_moni)
Chris Zankelc978b522016-08-10 18:36:44 +0300277 *
278 * This "function" does not return, instead it continues in RAM
279 * after relocating the monitor code.
280 *
281 * a2 = addr_sp
282 * a3 = gd
283 * a4 = destination address
284 */
285 .text
286 .globl relocate_code
287 .align 4
288relocate_code:
289 abi_entry
290
291#ifdef __XTENSA_CALL0_ABI__
292 mov a1, a2
293 mov a2, a3
294 mov a3, a4
295 movi a0, board_init_r
296 callx0 a0
297#else
298 /* We can't movsp here, because the chain of stack frames may cross
299 * the now reserved memory. We need to toss all window frames except
300 * the current, create new pristine stack frame and start from scratch.
301 */
302 rsr a0, windowbase
303 ssl a0
304 movi a0, 1
305 sll a0, a0
306 wsr a0, windowstart
307 rsync
308
309 movi a0, 0
310
311 /* Reserve 16-byte save area */
312 addi sp, a2, -16
313 mov a6, a3
314 mov a7, a4
315 movi a4, board_init_r
316 callx4 a4
317#endif
318 ill
319
320#if XCHAL_HAVE_EXCEPTIONS
321
322/*
323 * Exception vectors.
324 *
325 * Various notes:
326 * - We currently don't use the user exception vector (PS.UM is always 0),
327 * but do define such a vector, just in case. They both jump to the
328 * same exception handler, though.
329 * - We currently only save the bare minimum number of registers:
330 * a0...a15, sar, loop-registers, exception register (epc1, excvaddr,
331 * exccause, depc)
332 * - WINDOWSTART is only saved to identify if registers have been spilled
333 * to the wrong stack (exception stack) while executing the exception
334 * handler.
335 */
336
337 .section .KernelExceptionVector.text, "ax"
338 .global _KernelExceptionVector
339_KernelExceptionVector:
340
341 wsr a2, EXCSAVE1
342 movi a2, ExceptionHandler
343 jx a2
344
345 .section .UserExceptionVector.text, "ax"
346 .global _UserExceptionVector
347_UserExceptionVector:
348
349 wsr a2, EXCSAVE1
350 movi a2, ExceptionHandler
351 jx a2
352
353#if !XCHAL_HAVE_XEA1
354 .section .DoubleExceptionVector.text, "ax"
355 .global _DoubleExceptionVector
356_DoubleExceptionVector:
357
358#ifdef __XTENSA_CALL0_ABI__
359 wsr a0, EXCSAVE1
360 movi a0, hang # report and ask user to reset board
361 callx0 a0
362#else
363 wsr a4, EXCSAVE1
364 movi a4, hang # report and ask user to reset board
365 callx4 a4
366#endif
367#endif
368 /* Does not return here */
369
370
371 .text
372 .align 4
373ExceptionHandler:
374
375 rsr a2, EXCCAUSE # find handler
376
377#if XCHAL_HAVE_WINDOWED
378 /* Special case for alloca handler */
379
380 bnei a2, 5, 1f # jump if not alloca exception
381
382 addi a1, a1, -16 - 4 # create a small stack frame
383 s32i a3, a1, 0 # and save a3 (a2 still in excsave1)
384 movi a2, fast_alloca_exception
385 jx a2 # jump to fast_alloca_exception
386#endif
387 /* All other exceptions go here: */
388
389 /* Create ptrace stack and save a0...a3 */
390
3911: addi a2, a1, - PT_SIZE - 16
392 s32i a0, a2, PT_AREG + 0 * 4
393 s32i a1, a2, PT_AREG + 1 * 4
394 s32i a3, a2, PT_AREG + 3 * 4
395 rsr a3, EXCSAVE1
396 s32i a3, a2, PT_AREG + 2 * 4
397 mov a1, a2
398
399 /* Save remaining AR registers */
400
401 s32i a4, a1, PT_AREG + 4 * 4
402 s32i a5, a1, PT_AREG + 5 * 4
403 s32i a6, a1, PT_AREG + 6 * 4
404 s32i a7, a1, PT_AREG + 7 * 4
405 s32i a8, a1, PT_AREG + 8 * 4
406 s32i a9, a1, PT_AREG + 9 * 4
407 s32i a10, a1, PT_AREG + 10 * 4
408 s32i a11, a1, PT_AREG + 11 * 4
409 s32i a12, a1, PT_AREG + 12 * 4
410 s32i a13, a1, PT_AREG + 13 * 4
411 s32i a14, a1, PT_AREG + 14 * 4
412 s32i a15, a1, PT_AREG + 15 * 4
413
414 /* Save SRs */
415
416#if XCHAL_HAVE_WINDOWED
417 rsr a2, WINDOWSTART
418 s32i a2, a1, PT_WINDOWSTART
419#endif
420
421 rsr a2, SAR
422 rsr a3, EPC1
423 rsr a4, EXCVADDR
424 s32i a2, a1, PT_SAR
425 s32i a3, a1, PT_PC
426 s32i a4, a1, PT_EXCVADDR
427
428#if XCHAL_HAVE_LOOPS
429 movi a2, 0
430 rsr a3, LBEG
431 xsr a2, LCOUNT
432 s32i a3, a1, PT_LBEG
433 rsr a3, LEND
434 s32i a2, a1, PT_LCOUNT
435 s32i a3, a1, PT_LEND
436#endif
437
438 /* Set up C environment and call registered handler */
439 /* Setup stack, PS.WOE = 1, PS.EXCM = 0, PS.INTLEVEL = EXCM level. */
440
441 rsr a2, EXCCAUSE
442#if XCHAL_HAVE_XEA1
443 movi a3, (1<<PS_WOE_BIT) | 1
444#elif __XTENSA_CALL0_ABI__
445 movi a3, XCHAL_EXCM_LEVEL
446#else
447 movi a3, (1<<PS_WOE_BIT) | XCHAL_EXCM_LEVEL
448#endif
449 xsr a3, PS
450 rsync
451 s32i a2, a1, PT_EXCCAUSE
452 s32i a3, a1, PT_PS
453
454 movi a0, exc_table
455 addx4 a0, a2, a0
456 l32i a0, a0, 0
457#ifdef __XTENSA_CALL0_ABI__
458 mov a2, a1 # Provide stack frame as only argument
459 callx0 a0
460 l32i a3, a1, PT_PS
461#else
462 mov a6, a1 # Provide stack frame as only argument
463 callx4 a0
464#endif
465
466 /* Restore PS and go to exception mode (PS.EXCM=1) */
467
468 wsr a3, PS
469
470 /* Restore SR registers */
471
472#if XCHAL_HAVE_LOOPS
473 l32i a2, a1, PT_LBEG
474 l32i a3, a1, PT_LEND
475 l32i a4, a1, PT_LCOUNT
476 wsr a2, LBEG
477 wsr a3, LEND
478 wsr a4, LCOUNT
479#endif
480
481 l32i a2, a1, PT_SAR
482 l32i a3, a1, PT_PC
483 wsr a2, SAR
484 wsr a3, EPC1
485
486#if XCHAL_HAVE_WINDOWED
487 /* Do we need to simulate a MOVSP? */
488
489 l32i a2, a1, PT_WINDOWSTART
490 addi a3, a2, -1
491 and a2, a2, a3
492 beqz a2, 1f # Skip if regs were spilled before exc.
493
494 rsr a2, WINDOWSTART
495 addi a3, a2, -1
496 and a2, a2, a3
497 bnez a2, 1f # Skip if registers aren't spilled now
498
499 addi a2, a1, -16
500 l32i a4, a2, 0
501 l32i a5, a2, 4
502 s32i a4, a1, PT_SIZE + 0
503 s32i a5, a1, PT_SIZE + 4
504 l32i a4, a2, 8
505 l32i a5, a2, 12
506 s32i a4, a1, PT_SIZE + 8
507 s32i a5, a1, PT_SIZE + 12
508#endif
509
510 /* Restore address register */
511
5121: l32i a15, a1, PT_AREG + 15 * 4
513 l32i a14, a1, PT_AREG + 14 * 4
514 l32i a13, a1, PT_AREG + 13 * 4
515 l32i a12, a1, PT_AREG + 12 * 4
516 l32i a11, a1, PT_AREG + 11 * 4
517 l32i a10, a1, PT_AREG + 10 * 4
518 l32i a9, a1, PT_AREG + 9 * 4
519 l32i a8, a1, PT_AREG + 8 * 4
520 l32i a7, a1, PT_AREG + 7 * 4
521 l32i a6, a1, PT_AREG + 6 * 4
522 l32i a5, a1, PT_AREG + 5 * 4
523 l32i a4, a1, PT_AREG + 4 * 4
524 l32i a3, a1, PT_AREG + 3 * 4
525 l32i a2, a1, PT_AREG + 2 * 4
526 l32i a0, a1, PT_AREG + 0 * 4
527
528 l32i a1, a1, PT_AREG + 1 * 4 # Remove ptrace stack frame
529
530 rfe
531
532#endif /* XCHAL_HAVE_EXCEPTIONS */
533
534#if XCHAL_HAVE_WINDOWED
535
536/*
537 * Window overflow and underflow handlers.
538 * The handlers must be 64 bytes apart, first starting with the underflow
539 * handlers underflow-4 to underflow-12, then the overflow handlers
540 * overflow-4 to overflow-12.
541 *
542 * Note: We rerun the underflow handlers if we hit an exception, so
543 * we try to access any page that would cause a page fault early.
544 */
545
546 .section .WindowVectors.text, "ax"
547
548/* 4-Register Window Overflow Vector (Handler) */
549
550 .align 64
551.global _WindowOverflow4
552_WindowOverflow4:
553 s32e a0, a5, -16
554 s32e a1, a5, -12
555 s32e a2, a5, -8
556 s32e a3, a5, -4
557 rfwo
558
559
560/* 4-Register Window Underflow Vector (Handler) */
561
562 .align 64
563.global _WindowUnderflow4
564_WindowUnderflow4:
565 l32e a0, a5, -16
566 l32e a1, a5, -12
567 l32e a2, a5, -8
568 l32e a3, a5, -4
569 rfwu
570
571/*
572 * a0: a0
573 * a1: new stack pointer = a1 - 16 - 4
574 * a2: available, saved in excsave1
575 * a3: available, saved on stack *a1
576 */
577
578/* 15*/ .byte 0xff
579
580fast_alloca_exception: /* must be at _WindowUnderflow4 + 16 */
581
582/* 16*/ rsr a2, PS
583/* 19*/ rsr a3, WINDOWBASE
584/* 22*/ extui a2, a2, PS_OWB_SHIFT, PS_OWB_SHIFT
585/* 25*/ xor a2, a2, a3
586/* 28*/ rsr a3, PS
587/* 31*/ slli a2, a2, PS_OWB_SHIFT
588/* 34*/ xor a2, a3, a2
589/* 37*/ wsr a2, PS
590
591/* 40*/ _l32i a3, a1, 0
592/* 43*/ addi a1, a1, 16 + 4
593/* 46*/ rsr a2, EXCSAVE1
594
595/* 49*/ rotw -1
596/* 52*/ _bbci.l a4, 31, _WindowUnderflow4 /* 0x: call4 */
597/* 55*/ rotw -1
598/* 58*/ _bbci.l a8, 30, _WindowUnderflow8 /* 10: call8 */
599/* 61*/ _j __WindowUnderflow12 /* 11: call12 */
600/* 64*/
601
602/* 8-Register Window Overflow Vector (Handler) */
603
604 .align 64
605.global _WindowOverflow8
606_WindowOverflow8:
607 s32e a0, a9, -16
608 l32e a0, a1, -12
609 s32e a2, a9, -8
610 s32e a1, a9, -12
611 s32e a3, a9, -4
612 s32e a4, a0, -32
613 s32e a5, a0, -28
614 s32e a6, a0, -24
615 s32e a7, a0, -20
616 rfwo
617
618/* 8-Register Window Underflow Vector (Handler) */
619
620 .align 64
621.global _WindowUnderflow8
622_WindowUnderflow8:
623 l32e a1, a9, -12
624 l32e a0, a9, -16
625 l32e a7, a1, -12
626 l32e a2, a9, -8
627 l32e a4, a7, -32
628 l32e a3, a9, -4
629 l32e a5, a7, -28
630 l32e a6, a7, -24
631 l32e a7, a7, -20
632 rfwu
633
634/* 12-Register Window Overflow Vector (Handler) */
635
636 .align 64
637.global _WindowOverflow12
638_WindowOverflow12:
639 s32e a0, a13, -16
640 l32e a0, a1, -12
641 s32e a1, a13, -12
642 s32e a2, a13, -8
643 s32e a3, a13, -4
644 s32e a4, a0, -48
645 s32e a5, a0, -44
646 s32e a6, a0, -40
647 s32e a7, a0, -36
648 s32e a8, a0, -32
649 s32e a9, a0, -28
650 s32e a10, a0, -24
651 s32e a11, a0, -20
652 rfwo
653
654/* 12-Register Window Underflow Vector (Handler) */
655
656 .org _WindowOverflow12 + 64 - 3
657__WindowUnderflow12:
658 rotw -1
659.global _WindowUnderflow12
660_WindowUnderflow12:
661 l32e a1, a13, -12
662 l32e a0, a13, -16
663 l32e a11, a1, -12
664 l32e a2, a13, -8
665 l32e a4, a11, -48
666 l32e a8, a11, -32
667 l32e a3, a13, -4
668 l32e a5, a11, -44
669 l32e a6, a11, -40
670 l32e a7, a11, -36
671 l32e a9, a11, -28
672 l32e a10, a11, -24
673 l32e a11, a11, -20
674 rfwu
675
676#endif /* XCHAL_HAVE_WINDOWED */