blob: 8d42ef4823e9fdc2cc6a25cb79612d9b08a98eec [file] [log] [blame]
Matthias Weisserd8834a12011-03-10 21:36:32 +00001/*
2 * arch/arm/include/asm/assembler.h
3 *
4 * Copyright (C) 1996-2000 Russell King
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This file contains arm architecture specific defines
11 * for the different processors.
12 *
13 * Do not include any C declarations in this file - it is included by
14 * assembler source.
15 */
16
Stefan Agner75d7a0d2014-12-18 18:10:33 +010017#include <config.h>
Marek Vasutc0db6f82016-05-26 18:01:37 +020018#include <asm/unified.h>
Stefan Agner75d7a0d2014-12-18 18:10:33 +010019
Matthias Weisserd8834a12011-03-10 21:36:32 +000020/*
21 * Endian independent macros for shifting bytes within registers.
22 */
23#ifndef __ARMEB__
Stefan Agner75d7a0d2014-12-18 18:10:33 +010024#define lspull lsr
25#define lspush lsl
Matthias Weisserd8834a12011-03-10 21:36:32 +000026#define get_byte_0 lsl #0
27#define get_byte_1 lsr #8
28#define get_byte_2 lsr #16
29#define get_byte_3 lsr #24
30#define put_byte_0 lsl #0
31#define put_byte_1 lsl #8
32#define put_byte_2 lsl #16
33#define put_byte_3 lsl #24
34#else
Stefan Agner75d7a0d2014-12-18 18:10:33 +010035#define lspull lsl
36#define lspush lsr
Matthias Weisserd8834a12011-03-10 21:36:32 +000037#define get_byte_0 lsr #24
38#define get_byte_1 lsr #16
39#define get_byte_2 lsr #8
40#define get_byte_3 lsl #0
41#define put_byte_0 lsl #24
42#define put_byte_1 lsl #16
43#define put_byte_2 lsl #8
44#define put_byte_3 lsl #0
45#endif
46
47/*
48 * Data preload for architectures that support it
49 */
50#if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
51 defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
52 defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \
53 defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \
54 defined(__ARM_ARCH_7R__)
55#define PLD(code...) code
56#else
57#define PLD(code...)
58#endif
59
Tom Rini431afb42017-03-02 09:59:30 -050060/*
Sergei Antonov583f1b22022-08-21 16:34:20 +030061 * Use 'bx lr' everywhere except ARMv4 (without 'T') where only 'mov pc, lr'
62 * works
Tom Rini431afb42017-03-02 09:59:30 -050063 */
Stefan Agner75d7a0d2014-12-18 18:10:33 +010064 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
65 .macro ret\c, reg
Sergei Antonov583f1b22022-08-21 16:34:20 +030066
67 /* ARMv4- don't know bx lr but the assembler fails to see that */
68#ifdef __ARM_ARCH_4__
69 mov\c pc, \reg
70#else
Stefan Agner75d7a0d2014-12-18 18:10:33 +010071 .ifeqs "\reg", "lr"
72 bx\c \reg
73 .else
74 mov\c pc, \reg
75 .endif
Sergei Antonov583f1b22022-08-21 16:34:20 +030076#endif
Stefan Agner75d7a0d2014-12-18 18:10:33 +010077 .endm
78 .endr
79
Matthias Weisserd8834a12011-03-10 21:36:32 +000080/*
Stefan Agner75d7a0d2014-12-18 18:10:33 +010081 * Cache aligned, used for optimized memcpy/memset
82 * In the kernel this is only enabled for Feroceon CPU's...
83 * We disable it especially for Thumb builds since those instructions
84 * are not made in a Thumb ready way...
Matthias Weisserd8834a12011-03-10 21:36:32 +000085 */
Tom Rini3a649402017-03-18 09:01:44 -040086#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
Stefan Agner75d7a0d2014-12-18 18:10:33 +010087#define CALGN(code...)
88#else
Matthias Weisserd8834a12011-03-10 21:36:32 +000089#define CALGN(code...) code
Stefan Agner75d7a0d2014-12-18 18:10:33 +010090#endif