Chris Zankel | c978b52 | 2016-08-10 18:36:44 +0300 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2005 - 2013 Tensilica Inc. |
| 3 | * Copyright (C) 2014 - 2016 Cadence Design Systems Inc. |
| 4 | * |
| 5 | * SPDX-License-Identifier: GPL-2.0+ |
| 6 | */ |
| 7 | |
| 8 | #ifndef _XTENSA_ASMMACRO_H |
| 9 | #define _XTENSA_ASMMACRO_H |
| 10 | |
| 11 | #include <asm/arch/core.h> |
| 12 | |
| 13 | /* |
| 14 | * Function entry and return macros for supported ABIs. |
| 15 | */ |
| 16 | |
| 17 | #if defined(__XTENSA_WINDOWED_ABI__) |
| 18 | #define abi_entry entry sp, 16 |
| 19 | #define abi_ret retw |
| 20 | #elif defined(__XTENSA_CALL0_ABI__) |
| 21 | #define abi_entry |
| 22 | #define abi_ret ret |
| 23 | #else |
| 24 | #error Unsupported Xtensa ABI |
| 25 | #endif |
| 26 | |
| 27 | /* |
| 28 | * Some little helpers for loops. Use zero-overhead-loops |
| 29 | * where applicable and if supported by the processor. |
| 30 | * |
| 31 | * __loopi ar, at, size, inc |
| 32 | * ar register initialized with the start address |
| 33 | * at scratch register used by macro |
| 34 | * size size immediate value |
| 35 | * inc increment |
| 36 | * |
| 37 | * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond] |
| 38 | * ar register initialized with the start address |
| 39 | * as register initialized with the size |
| 40 | * at scratch register use by macro |
| 41 | * inc_log2 increment [in log2] |
| 42 | * mask_log2 mask [in log2] |
| 43 | * cond true condition (used in loop'cond') |
| 44 | * ncond false condition (used in b'ncond') |
| 45 | * |
| 46 | * __loop as |
| 47 | * restart loop. 'as' register must not have been modified! |
| 48 | * |
| 49 | * __endla ar, as, incr |
| 50 | * ar start address (modified) |
| 51 | * as scratch register used by __loops/__loopi macros or |
| 52 | * end address used by __loopt macro |
| 53 | * inc increment |
| 54 | */ |
| 55 | |
| 56 | #if XCHAL_HAVE_LOOPS |
| 57 | |
| 58 | .macro __loopi ar, at, size, incr |
| 59 | movi \at, ((\size + \incr - 1) / (\incr)) |
| 60 | loop \at, 99f |
| 61 | .endm |
| 62 | |
| 63 | |
| 64 | .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond |
| 65 | .ifgt \incr_log2 - 1 |
| 66 | addi \at, \as, (1 << \incr_log2) - 1 |
| 67 | .ifnc \mask_log2, |
| 68 | extui \at, \at, \incr_log2, \mask_log2 |
| 69 | .else |
| 70 | srli \at, \at, \incr_log2 |
| 71 | .endif |
| 72 | .endif |
| 73 | loop\cond \at, 99f |
| 74 | .endm |
| 75 | |
| 76 | |
| 77 | .macro __loopt ar, as, at, incr_log2 |
| 78 | sub \at, \as, \ar |
| 79 | .ifgt \incr_log2 - 1 |
| 80 | addi \at, \at, (1 << \incr_log2) - 1 |
| 81 | srli \at, \at, \incr_log2 |
| 82 | .endif |
| 83 | loop \at, 99f |
| 84 | .endm |
| 85 | |
| 86 | |
| 87 | .macro __loop as |
| 88 | loop \as, 99f |
| 89 | .endm |
| 90 | |
| 91 | |
| 92 | .macro __endl ar, as |
| 93 | 99: |
| 94 | .endm |
| 95 | |
| 96 | |
| 97 | #else |
| 98 | |
| 99 | .macro __loopi ar, at, size, incr |
| 100 | movi \at, ((\size + \incr - 1) / (\incr)) |
| 101 | addi \at, \ar, \size |
| 102 | 98: |
| 103 | .endm |
| 104 | |
| 105 | |
| 106 | .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond |
| 107 | .ifnc \mask_log2, |
| 108 | extui \at, \as, \incr_log2, \mask_log2 |
| 109 | .else |
| 110 | .ifnc \ncond, |
| 111 | srli \at, \as, \incr_log2 |
| 112 | .endif |
| 113 | .endif |
| 114 | .ifnc \ncond, |
| 115 | b\ncond \at, 99f |
| 116 | |
| 117 | .endif |
| 118 | .ifnc \mask_log2, |
| 119 | slli \at, \at, \incr_log2 |
| 120 | add \at, \ar, \at |
| 121 | .else |
| 122 | add \at, \ar, \as |
| 123 | .endif |
| 124 | 98: |
| 125 | .endm |
| 126 | |
| 127 | .macro __loopt ar, as, at, incr_log2 |
| 128 | 98: |
| 129 | .endm |
| 130 | |
| 131 | |
| 132 | .macro __loop as |
| 133 | 98: |
| 134 | .endm |
| 135 | |
| 136 | |
| 137 | .macro __endl ar, as |
| 138 | bltu \ar, \as, 98b |
| 139 | 99: |
| 140 | .endm |
| 141 | |
| 142 | |
| 143 | #endif |
| 144 | |
| 145 | |
| 146 | .macro __endla ar, as, incr |
| 147 | addi \ar, \ar, \incr |
| 148 | __endl \ar \as |
| 149 | .endm |
| 150 | |
| 151 | |
| 152 | #endif /* _XTENSA_ASMMACRO_H */ |