Tom Rini | 83d290c | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0+ */ |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 2 | /* |
Shinya Kuribayashi | 373b16f | 2008-03-25 21:30:07 +0900 | [diff] [blame] | 3 | * Cache-handling routined for MIPS CPUs |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 4 | * |
| 5 | * Copyright (c) 2003 Wolfgang Denk <wd@denx.de> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
Wolfgang Denk | 25ddd1f | 2010-10-26 14:34:52 +0200 | [diff] [blame] | 8 | #include <asm-offsets.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 9 | #include <config.h> |
Shinya Kuribayashi | 2f5d414 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 10 | #include <asm/asm.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 11 | #include <asm/regdef.h> |
| 12 | #include <asm/mipsregs.h> |
| 13 | #include <asm/addrspace.h> |
| 14 | #include <asm/cacheops.h> |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 15 | #include <asm/cm.h> |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 16 | |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 17 | .macro f_fill64 dst, offset, val |
| 18 | LONG_S \val, (\offset + 0 * LONGSIZE)(\dst) |
| 19 | LONG_S \val, (\offset + 1 * LONGSIZE)(\dst) |
| 20 | LONG_S \val, (\offset + 2 * LONGSIZE)(\dst) |
| 21 | LONG_S \val, (\offset + 3 * LONGSIZE)(\dst) |
| 22 | LONG_S \val, (\offset + 4 * LONGSIZE)(\dst) |
| 23 | LONG_S \val, (\offset + 5 * LONGSIZE)(\dst) |
| 24 | LONG_S \val, (\offset + 6 * LONGSIZE)(\dst) |
| 25 | LONG_S \val, (\offset + 7 * LONGSIZE)(\dst) |
| 26 | #if LONGSIZE == 4 |
| 27 | LONG_S \val, (\offset + 8 * LONGSIZE)(\dst) |
| 28 | LONG_S \val, (\offset + 9 * LONGSIZE)(\dst) |
| 29 | LONG_S \val, (\offset + 10 * LONGSIZE)(\dst) |
| 30 | LONG_S \val, (\offset + 11 * LONGSIZE)(\dst) |
| 31 | LONG_S \val, (\offset + 12 * LONGSIZE)(\dst) |
| 32 | LONG_S \val, (\offset + 13 * LONGSIZE)(\dst) |
| 33 | LONG_S \val, (\offset + 14 * LONGSIZE)(\dst) |
| 34 | LONG_S \val, (\offset + 15 * LONGSIZE)(\dst) |
| 35 | #endif |
| 36 | .endm |
| 37 | |
Paul Burton | ac22fec | 2015-01-29 01:28:00 +0000 | [diff] [blame] | 38 | .macro cache_loop curr, end, line_sz, op |
| 39 | 10: cache \op, 0(\curr) |
| 40 | PTR_ADDU \curr, \curr, \line_sz |
| 41 | bne \curr, \end, 10b |
| 42 | .endm |
| 43 | |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 44 | .macro l1_info sz, line_sz, off |
| 45 | .set push |
| 46 | .set noat |
| 47 | |
| 48 | mfc0 $1, CP0_CONFIG, 1 |
| 49 | |
| 50 | /* detect line size */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 51 | srl \line_sz, $1, \off + MIPS_CONF1_DL_SHF - MIPS_CONF1_DA_SHF |
| 52 | andi \line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHF) |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 53 | move \sz, zero |
| 54 | beqz \line_sz, 10f |
| 55 | li \sz, 2 |
| 56 | sllv \line_sz, \sz, \line_sz |
| 57 | |
| 58 | /* detect associativity */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 59 | srl \sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF |
| 60 | andi \sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF) |
Paul Burton | 9f8ac82 | 2016-05-16 10:52:10 +0100 | [diff] [blame] | 61 | addiu \sz, \sz, 1 |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 62 | |
| 63 | /* sz *= line_sz */ |
| 64 | mul \sz, \sz, \line_sz |
| 65 | |
| 66 | /* detect log32(sets) */ |
Daniel Schwierzeck | a3ab2ae | 2016-01-12 21:48:26 +0100 | [diff] [blame] | 67 | srl $1, $1, \off + MIPS_CONF1_DS_SHF - MIPS_CONF1_DA_SHF |
| 68 | andi $1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHF) |
Paul Burton | 536cb7c | 2015-01-29 01:27:59 +0000 | [diff] [blame] | 69 | addiu $1, $1, 1 |
| 70 | andi $1, $1, 0x7 |
| 71 | |
| 72 | /* sz <<= log32(sets) */ |
| 73 | sllv \sz, \sz, $1 |
| 74 | |
| 75 | /* sz *= 32 */ |
| 76 | li $1, 32 |
| 77 | mul \sz, \sz, $1 |
| 78 | 10: |
| 79 | .set pop |
| 80 | .endm |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 81 | |
Daniel Schwierzeck | b55e07e | 2020-07-12 00:45:55 +0200 | [diff] [blame] | 82 | /* |
| 83 | * The changing of Kernel mode cacheability must be done from KSEG1. |
| 84 | * If the code is executing from KSEG0, jump to KSEG1 during the execution |
| 85 | * of change_k0_cca. change_k0_cca itself clears all hazards when returning. |
| 86 | */ |
| 87 | .macro change_k0_cca_kseg1 mode |
| 88 | PTR_LA t0, change_k0_cca |
| 89 | li t1, CPHYSADDR(~0) |
| 90 | and t0, t0, t1 |
| 91 | PTR_LI t1, CKSEG1 |
| 92 | or t0, t0, t1 |
| 93 | li a0, \mode |
| 94 | jalr t0 |
| 95 | .endm |
| 96 | |
Shinya Kuribayashi | 7aa1f19 | 2011-05-07 00:18:13 +0900 | [diff] [blame] | 97 | /* |
| 98 | * mips_cache_reset - low level initialisation of the primary caches |
| 99 | * |
| 100 | * This routine initialises the primary caches to ensure that they have good |
| 101 | * parity. It must be called by the ROM before any cached locations are used |
| 102 | * to prevent the possibility of data with bad parity being written to memory. |
| 103 | * |
| 104 | * To initialise the instruction cache it is essential that a source of data |
| 105 | * with good parity is available. This routine will initialise an area of |
| 106 | * memory starting at location zero to be used as a source of parity. |
| 107 | * |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 108 | * Note that this function does not follow the standard calling convention & |
| 109 | * may clobber typically callee-saved registers. |
| 110 | * |
Shinya Kuribayashi | 7aa1f19 | 2011-05-07 00:18:13 +0900 | [diff] [blame] | 111 | * RETURNS: N/A |
| 112 | * |
| 113 | */ |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 114 | #define R_RETURN s0 |
| 115 | #define R_IC_SIZE s1 |
| 116 | #define R_IC_LINE s2 |
| 117 | #define R_DC_SIZE s3 |
| 118 | #define R_DC_LINE s4 |
| 119 | #define R_L2_SIZE s5 |
| 120 | #define R_L2_LINE s6 |
| 121 | #define R_L2_BYPASSED s7 |
| 122 | #define R_L2_L2C t8 |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 123 | LEAF(mips_cache_reset) |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 124 | move R_RETURN, ra |
| 125 | |
| 126 | #ifdef CONFIG_MIPS_L2_CACHE |
| 127 | /* |
| 128 | * For there to be an L2 present, Config2 must be present. If it isn't |
| 129 | * then we proceed knowing there's no L2 cache. |
| 130 | */ |
| 131 | move R_L2_SIZE, zero |
| 132 | move R_L2_LINE, zero |
| 133 | move R_L2_BYPASSED, zero |
| 134 | move R_L2_L2C, zero |
| 135 | mfc0 t0, CP0_CONFIG, 1 |
| 136 | bgez t0, l2_probe_done |
| 137 | |
| 138 | /* |
| 139 | * From MIPSr6 onwards the L2 cache configuration might not be reported |
| 140 | * by Config2. The Config5.L2C bit indicates whether this is the case, |
| 141 | * and if it is then we need knowledge of where else to look. For cores |
| 142 | * from Imagination Technologies this is a CM GCR. |
| 143 | */ |
| 144 | # if __mips_isa_rev >= 6 |
| 145 | /* Check that Config5 exists */ |
| 146 | mfc0 t0, CP0_CONFIG, 2 |
| 147 | bgez t0, l2_probe_cop0 |
| 148 | mfc0 t0, CP0_CONFIG, 3 |
| 149 | bgez t0, l2_probe_cop0 |
| 150 | mfc0 t0, CP0_CONFIG, 4 |
| 151 | bgez t0, l2_probe_cop0 |
| 152 | |
| 153 | /* Check Config5.L2C is set */ |
| 154 | mfc0 t0, CP0_CONFIG, 5 |
| 155 | and R_L2_L2C, t0, MIPS_CONF5_L2C |
| 156 | beqz R_L2_L2C, l2_probe_cop0 |
| 157 | |
| 158 | /* Config5.L2C is set */ |
| 159 | # ifdef CONFIG_MIPS_CM |
| 160 | /* The CM will provide L2 configuration */ |
| 161 | PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 162 | lw t1, GCR_L2_CONFIG(t0) |
| 163 | bgez t1, l2_probe_done |
| 164 | |
| 165 | ext R_L2_LINE, t1, \ |
| 166 | GCR_L2_CONFIG_LINESZ_SHIFT, GCR_L2_CONFIG_LINESZ_BITS |
| 167 | beqz R_L2_LINE, l2_probe_done |
| 168 | li t2, 2 |
| 169 | sllv R_L2_LINE, t2, R_L2_LINE |
| 170 | |
| 171 | ext t2, t1, GCR_L2_CONFIG_ASSOC_SHIFT, GCR_L2_CONFIG_ASSOC_BITS |
| 172 | addiu t2, t2, 1 |
| 173 | mul R_L2_SIZE, R_L2_LINE, t2 |
| 174 | |
| 175 | ext t2, t1, GCR_L2_CONFIG_SETSZ_SHIFT, GCR_L2_CONFIG_SETSZ_BITS |
| 176 | sllv R_L2_SIZE, R_L2_SIZE, t2 |
| 177 | li t2, 64 |
| 178 | mul R_L2_SIZE, R_L2_SIZE, t2 |
| 179 | |
| 180 | /* Bypass the L2 cache so that we can init the L1s early */ |
| 181 | or t1, t1, GCR_L2_CONFIG_BYPASS |
| 182 | sw t1, GCR_L2_CONFIG(t0) |
| 183 | sync |
| 184 | li R_L2_BYPASSED, 1 |
| 185 | |
| 186 | /* Zero the L2 tag registers */ |
| 187 | sw zero, GCR_L2_TAG_ADDR(t0) |
| 188 | sw zero, GCR_L2_TAG_ADDR_UPPER(t0) |
| 189 | sw zero, GCR_L2_TAG_STATE(t0) |
| 190 | sw zero, GCR_L2_TAG_STATE_UPPER(t0) |
| 191 | sw zero, GCR_L2_DATA(t0) |
| 192 | sw zero, GCR_L2_DATA_UPPER(t0) |
| 193 | sync |
| 194 | # else |
| 195 | /* We don't know how to retrieve L2 configuration on this system */ |
| 196 | # endif |
| 197 | b l2_probe_done |
| 198 | # endif |
| 199 | |
| 200 | /* |
| 201 | * For pre-r6 systems, or r6 systems with Config5.L2C==0, probe the L2 |
| 202 | * cache configuration from the cop0 Config2 register. |
| 203 | */ |
| 204 | l2_probe_cop0: |
| 205 | mfc0 t0, CP0_CONFIG, 2 |
| 206 | |
| 207 | srl R_L2_LINE, t0, MIPS_CONF2_SL_SHF |
| 208 | andi R_L2_LINE, R_L2_LINE, MIPS_CONF2_SL >> MIPS_CONF2_SL_SHF |
| 209 | beqz R_L2_LINE, l2_probe_done |
| 210 | li t1, 2 |
| 211 | sllv R_L2_LINE, t1, R_L2_LINE |
| 212 | |
| 213 | srl t1, t0, MIPS_CONF2_SA_SHF |
| 214 | andi t1, t1, MIPS_CONF2_SA >> MIPS_CONF2_SA_SHF |
| 215 | addiu t1, t1, 1 |
| 216 | mul R_L2_SIZE, R_L2_LINE, t1 |
| 217 | |
| 218 | srl t1, t0, MIPS_CONF2_SS_SHF |
| 219 | andi t1, t1, MIPS_CONF2_SS >> MIPS_CONF2_SS_SHF |
| 220 | sllv R_L2_SIZE, R_L2_SIZE, t1 |
| 221 | li t1, 64 |
| 222 | mul R_L2_SIZE, R_L2_SIZE, t1 |
| 223 | |
| 224 | /* Attempt to bypass the L2 so that we can init the L1s early */ |
| 225 | or t0, t0, MIPS_CONF2_L2B |
| 226 | mtc0 t0, CP0_CONFIG, 2 |
| 227 | ehb |
| 228 | mfc0 t0, CP0_CONFIG, 2 |
| 229 | and R_L2_BYPASSED, t0, MIPS_CONF2_L2B |
| 230 | |
| 231 | /* Zero the L2 tag registers */ |
| 232 | mtc0 zero, CP0_TAGLO, 4 |
| 233 | ehb |
| 234 | l2_probe_done: |
| 235 | #endif |
| 236 | |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 237 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 238 | li R_IC_SIZE, CONFIG_SYS_ICACHE_SIZE |
| 239 | li R_IC_LINE, CONFIG_SYS_ICACHE_LINE_SIZE |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 240 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 241 | l1_info R_IC_SIZE, R_IC_LINE, MIPS_CONF1_IA_SHF |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 242 | #endif |
| 243 | |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 244 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 245 | li R_DC_SIZE, CONFIG_SYS_DCACHE_SIZE |
| 246 | li R_DC_LINE, CONFIG_SYS_DCACHE_LINE_SIZE |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 247 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 248 | l1_info R_DC_SIZE, R_DC_LINE, MIPS_CONF1_DA_SHF |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 249 | #endif |
| 250 | |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 251 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
| 252 | |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 253 | /* Determine the largest L1 cache size */ |
Paul Burton | ace3be4 | 2016-05-27 14:28:04 +0100 | [diff] [blame] | 254 | #ifndef CONFIG_SYS_CACHE_SIZE_AUTO |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 255 | #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE |
| 256 | li v0, CONFIG_SYS_ICACHE_SIZE |
| 257 | #else |
| 258 | li v0, CONFIG_SYS_DCACHE_SIZE |
| 259 | #endif |
| 260 | #else |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 261 | move v0, R_IC_SIZE |
| 262 | sltu t1, R_IC_SIZE, R_DC_SIZE |
| 263 | movn v0, R_DC_SIZE, t1 |
Paul Burton | fa476f7 | 2013-11-08 11:18:42 +0000 | [diff] [blame] | 264 | #endif |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 265 | /* |
| 266 | * Now clear that much memory starting from zero. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 267 | */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 268 | PTR_LI a0, CKSEG1ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Shinya Kuribayashi | 1898840 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 269 | PTR_ADDU a1, a0, v0 |
| 270 | 2: PTR_ADDIU a0, 64 |
| 271 | f_fill64 a0, -64, zero |
| 272 | bne a0, a1, 2b |
wdenk | 8bde7f7 | 2003-06-27 21:31:46 +0000 | [diff] [blame] | 273 | |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 274 | #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */ |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 275 | |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 276 | #ifdef CONFIG_MIPS_L2_CACHE |
| 277 | /* |
| 278 | * If the L2 is bypassed, init the L1 first so that we can execute the |
| 279 | * rest of the cache initialisation using the L1 instruction cache. |
| 280 | */ |
| 281 | bnez R_L2_BYPASSED, l1_init |
| 282 | |
| 283 | l2_init: |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 284 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 285 | PTR_ADDU t1, t0, R_L2_SIZE |
| 286 | 1: cache INDEX_STORE_TAG_SD, 0(t0) |
| 287 | PTR_ADDU t0, t0, R_L2_LINE |
| 288 | bne t0, t1, 1b |
| 289 | |
| 290 | /* |
| 291 | * If the L2 was bypassed then we already initialised the L1s before |
| 292 | * the L2, so we are now done. |
| 293 | */ |
| 294 | bnez R_L2_BYPASSED, l2_unbypass |
| 295 | #endif |
| 296 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 297 | /* |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 298 | * The TagLo registers used depend upon the CPU implementation, but the |
| 299 | * architecture requires that it is safe for software to write to both |
| 300 | * TagLo selects 0 & 2 covering supported cases. |
| 301 | */ |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 302 | l1_init: |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 303 | mtc0 zero, CP0_TAGLO |
| 304 | mtc0 zero, CP0_TAGLO, 2 |
Paul Burton | d608254 | 2016-09-21 11:18:58 +0100 | [diff] [blame] | 305 | ehb |
Paul Burton | 8755d50 | 2015-01-29 01:28:03 +0000 | [diff] [blame] | 306 | |
| 307 | /* |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 308 | * The caches are probably in an indeterminate state, so we force good |
| 309 | * parity into them by doing an invalidate for each line. If |
| 310 | * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to |
| 311 | * perform a load/fill & a further invalidate for each line, assuming |
| 312 | * that the bottom of RAM (having just been cleared) will generate good |
| 313 | * parity for the cache. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 314 | */ |
| 315 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 316 | /* |
| 317 | * Initialize the I-cache first, |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 318 | */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 319 | blez R_IC_SIZE, 1f |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 320 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 321 | PTR_ADDU t1, t0, R_IC_SIZE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 322 | /* clear tag to invalidate */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 323 | cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 324 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 325 | /* fill once, so data field parity is correct */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 326 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 327 | cache_loop t0, t1, R_IC_LINE, FILL |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 328 | /* invalidate again - prudent but not strictly neccessary */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 329 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 330 | cache_loop t0, t1, R_IC_LINE, INDEX_STORE_TAG_I |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 331 | #endif |
Paul Burton | 33b5c9b | 2016-09-21 11:18:49 +0100 | [diff] [blame] | 332 | sync |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 333 | |
| 334 | /* |
Daniel Schwierzeck | b55e07e | 2020-07-12 00:45:55 +0200 | [diff] [blame] | 335 | * Enable use of the I-cache by setting Config.K0. |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 336 | */ |
Daniel Schwierzeck | b55e07e | 2020-07-12 00:45:55 +0200 | [diff] [blame] | 337 | change_k0_cca_kseg1 CONF_CM_CACHABLE_NONCOHERENT |
Paul Burton | 33b5c9b | 2016-09-21 11:18:49 +0100 | [diff] [blame] | 338 | |
Shinya Kuribayashi | 2e0e527 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 339 | /* |
| 340 | * then initialize D-cache. |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 341 | */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 342 | 1: blez R_DC_SIZE, 3f |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 343 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 344 | PTR_ADDU t1, t0, R_DC_SIZE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 345 | /* clear all tags */ |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 346 | cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 347 | #ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 348 | /* load from each line (in cached space) */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 349 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 350 | 2: LONG_L zero, 0(t0) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 351 | PTR_ADDU t0, R_DC_LINE |
Paul Burton | ca4e833 | 2015-01-29 01:28:01 +0000 | [diff] [blame] | 352 | bne t0, t1, 2b |
| 353 | /* clear all tags */ |
Daniel Schwierzeck | 5ef337a | 2018-09-07 19:02:05 +0200 | [diff] [blame] | 354 | PTR_LI t0, CKSEG0ADDR(CONFIG_MIPS_CACHE_INDEX_BASE) |
Paul Burton | 5c72e5a | 2016-09-21 11:18:52 +0100 | [diff] [blame] | 355 | cache_loop t0, t1, R_DC_LINE, INDEX_STORE_TAG_D |
Paul Burton | dd7c720 | 2015-01-29 01:28:02 +0000 | [diff] [blame] | 356 | #endif |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 357 | 3: |
wdenk | c021880 | 2003-03-27 12:09:35 +0000 | [diff] [blame] | 358 | |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 359 | #ifdef CONFIG_MIPS_L2_CACHE |
| 360 | /* If the L2 isn't bypassed then we're done */ |
| 361 | beqz R_L2_BYPASSED, return |
| 362 | |
| 363 | /* The L2 is bypassed - go initialise it */ |
| 364 | b l2_init |
| 365 | |
| 366 | l2_unbypass: |
| 367 | # if __mips_isa_rev >= 6 |
| 368 | beqz R_L2_L2C, 1f |
| 369 | |
| 370 | li t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 371 | lw t1, GCR_L2_CONFIG(t0) |
| 372 | xor t1, t1, GCR_L2_CONFIG_BYPASS |
| 373 | sw t1, GCR_L2_CONFIG(t0) |
| 374 | sync |
| 375 | ehb |
| 376 | b 2f |
| 377 | # endif |
| 378 | 1: mfc0 t0, CP0_CONFIG, 2 |
| 379 | xor t0, t0, MIPS_CONF2_L2B |
| 380 | mtc0 t0, CP0_CONFIG, 2 |
| 381 | ehb |
| 382 | |
| 383 | 2: |
Paul Burton | 7953354 | 2016-09-21 11:18:55 +0100 | [diff] [blame] | 384 | # ifdef CONFIG_MIPS_CM |
| 385 | /* Config3 must exist for a CM to be present */ |
| 386 | mfc0 t0, CP0_CONFIG, 1 |
| 387 | bgez t0, 2f |
| 388 | mfc0 t0, CP0_CONFIG, 2 |
| 389 | bgez t0, 2f |
| 390 | |
| 391 | /* Check Config3.CMGCR to determine CM presence */ |
| 392 | mfc0 t0, CP0_CONFIG, 3 |
| 393 | and t0, t0, MIPS_CONF3_CMGCR |
| 394 | beqz t0, 2f |
| 395 | |
| 396 | /* Change Config.K0 to a coherent CCA */ |
Daniel Schwierzeck | b55e07e | 2020-07-12 00:45:55 +0200 | [diff] [blame] | 397 | change_k0_cca_kseg1 CONF_CM_CACHABLE_COW |
Paul Burton | 7953354 | 2016-09-21 11:18:55 +0100 | [diff] [blame] | 398 | |
| 399 | /* |
| 400 | * Join the coherent domain such that the caches of this core are kept |
| 401 | * coherent with those of other cores. |
| 402 | */ |
| 403 | PTR_LI t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE) |
| 404 | lw t1, GCR_REV(t0) |
| 405 | li t2, GCR_REV_CM3 |
| 406 | li t3, GCR_Cx_COHERENCE_EN |
| 407 | bge t1, t2, 1f |
| 408 | li t3, GCR_Cx_COHERENCE_DOM_EN |
| 409 | 1: sw t3, GCR_Cx_COHERENCE(t0) |
| 410 | ehb |
| 411 | 2: |
| 412 | # endif |
Paul Burton | 4baa0ab | 2016-09-21 11:18:54 +0100 | [diff] [blame] | 413 | #endif |
| 414 | |
| 415 | return: |
Paul Burton | 639200f | 2016-09-21 11:18:59 +0100 | [diff] [blame] | 416 | /* Ensure all cache operations complete before returning */ |
| 417 | sync |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 418 | jr R_RETURN |
Shinya Kuribayashi | 2f5d414 | 2008-03-25 21:30:06 +0900 | [diff] [blame] | 419 | END(mips_cache_reset) |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 420 | |
Daniel Schwierzeck | 6077243 | 2020-07-12 00:45:56 +0200 | [diff] [blame] | 421 | LEAF(mips_cache_disable) |
| 422 | move R_RETURN, ra |
| 423 | change_k0_cca_kseg1 CONF_CM_UNCACHED |
| 424 | jr R_RETURN |
| 425 | END(mips_cache_disable) |
| 426 | |
Daniel Schwierzeck | b838586 | 2018-09-07 19:02:04 +0200 | [diff] [blame] | 427 | LEAF(change_k0_cca) |
| 428 | mfc0 t0, CP0_CONFIG |
| 429 | #if __mips_isa_rev >= 2 |
| 430 | ins t0, a0, 0, 3 |
| 431 | #else |
| 432 | xor a0, a0, t0 |
| 433 | andi a0, a0, CONF_CM_CMASK |
| 434 | xor a0, a0, t0 |
| 435 | #endif |
| 436 | mtc0 a0, CP0_CONFIG |
| 437 | |
| 438 | jr.hb ra |
| 439 | END(change_k0_cca) |