wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 1 | /* |
| 2 | * include/asm-ppc/cache.h |
| 3 | */ |
| 4 | #ifndef __ARCH_PPC_CACHE_H |
| 5 | #define __ARCH_PPC_CACHE_H |
| 6 | |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 7 | #include <asm/processor.h> |
| 8 | |
| 9 | /* bytes per L1 cache line */ |
Christophe Leroy | ee1e600 | 2018-03-16 17:20:41 +0100 | [diff] [blame] | 10 | #if defined(CONFIG_MPC8xx) |
Christophe Leroy | 907208c | 2017-07-06 10:23:22 +0200 | [diff] [blame] | 11 | #define L1_CACHE_SHIFT 4 |
| 12 | #elif defined(CONFIG_PPC64BRIDGE) |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 13 | #define L1_CACHE_SHIFT 7 |
Kumar Gala | 0f060c3 | 2008-10-23 01:47:38 -0500 | [diff] [blame] | 14 | #elif defined(CONFIG_E500MC) |
| 15 | #define L1_CACHE_SHIFT 6 |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 16 | #else |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 17 | #define L1_CACHE_SHIFT 5 |
Kumar Gala | b009f3e | 2008-01-08 01:22:21 -0600 | [diff] [blame] | 18 | #endif |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 19 | |
| 20 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) |
| 21 | |
| 22 | /* |
Anton Staaf | 0991701 | 2011-10-17 16:46:06 -0700 | [diff] [blame] | 23 | * Use the L1 data cache line size value for the minimum DMA buffer alignment |
| 24 | * on PowerPC. |
| 25 | */ |
| 26 | #define ARCH_DMA_MINALIGN L1_CACHE_BYTES |
| 27 | |
| 28 | /* |
Jean-Christophe PLAGNIOL-VILLARD | 6d0f6bc | 2008-10-16 15:01:15 +0200 | [diff] [blame] | 29 | * For compatibility reasons support the CONFIG_SYS_CACHELINE_SIZE too |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 30 | */ |
Jean-Christophe PLAGNIOL-VILLARD | 6d0f6bc | 2008-10-16 15:01:15 +0200 | [diff] [blame] | 31 | #ifndef CONFIG_SYS_CACHELINE_SIZE |
| 32 | #define CONFIG_SYS_CACHELINE_SIZE L1_CACHE_BYTES |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 33 | #endif |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 34 | |
| 35 | #define L1_CACHE_ALIGN(x) (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1)) |
| 36 | #define L1_CACHE_PAGES 8 |
| 37 | |
| 38 | #define SMP_CACHE_BYTES L1_CACHE_BYTES |
| 39 | |
| 40 | #ifdef MODULE |
| 41 | #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES))) |
| 42 | #else |
| 43 | #define __cacheline_aligned \ |
| 44 | __attribute__((__aligned__(L1_CACHE_BYTES), \ |
| 45 | __section__(".data.cacheline_aligned"))) |
| 46 | #endif |
| 47 | |
| 48 | #if defined(__KERNEL__) && !defined(__ASSEMBLY__) |
| 49 | extern void flush_dcache_range(unsigned long start, unsigned long stop); |
| 50 | extern void clean_dcache_range(unsigned long start, unsigned long stop); |
| 51 | extern void invalidate_dcache_range(unsigned long start, unsigned long stop); |
Stefan Roese | 9b94ac6 | 2007-10-31 17:55:58 +0100 | [diff] [blame] | 52 | extern void flush_dcache(void); |
| 53 | extern void invalidate_dcache(void); |
Kumar Gala | 54e091d | 2008-09-22 14:11:10 -0500 | [diff] [blame] | 54 | extern void invalidate_icache(void); |
Jean-Christophe PLAGNIOL-VILLARD | 6d0f6bc | 2008-10-16 15:01:15 +0200 | [diff] [blame] | 55 | #ifdef CONFIG_SYS_INIT_RAM_LOCK |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 56 | extern void unlock_ram_in_cache(void); |
Jean-Christophe PLAGNIOL-VILLARD | 6d0f6bc | 2008-10-16 15:01:15 +0200 | [diff] [blame] | 57 | #endif /* CONFIG_SYS_INIT_RAM_LOCK */ |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 58 | #endif /* __ASSEMBLY__ */ |
| 59 | |
Tang Yuantian | 7cb7272 | 2014-07-04 17:39:26 +0800 | [diff] [blame] | 60 | #if defined(__KERNEL__) && !defined(__ASSEMBLY__) |
| 61 | int l2cache_init(void); |
| 62 | void enable_cpc(void); |
| 63 | void disable_cpc_sram(void); |
| 64 | #endif |
| 65 | |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 66 | /* prep registers for L2 */ |
| 67 | #define CACHECRBA 0x80000823 /* Cache configuration register address */ |
| 68 | #define L2CACHE_MASK 0x03 /* Mask for 2 L2 Cache bits */ |
| 69 | #define L2CACHE_512KB 0x00 /* 512KB */ |
| 70 | #define L2CACHE_256KB 0x01 /* 256KB */ |
| 71 | #define L2CACHE_1MB 0x02 /* 1MB */ |
| 72 | #define L2CACHE_NONE 0x03 /* NONE */ |
| 73 | #define L2CACHE_PARITY 0x08 /* Mask for L2 Cache Parity Protected bit */ |
| 74 | |
Christophe Leroy | ee1e600 | 2018-03-16 17:20:41 +0100 | [diff] [blame] | 75 | #ifdef CONFIG_MPC8xx |
Christophe Leroy | 907208c | 2017-07-06 10:23:22 +0200 | [diff] [blame] | 76 | /* Cache control on the MPC8xx is provided through some additional |
| 77 | * special purpose registers. |
| 78 | */ |
| 79 | #define IC_CST 560 /* Instruction cache control/status */ |
| 80 | #define IC_ADR 561 /* Address needed for some commands */ |
| 81 | #define IC_DAT 562 /* Read-only data register */ |
| 82 | #define DC_CST 568 /* Data cache control/status */ |
| 83 | #define DC_ADR 569 /* Address needed for some commands */ |
| 84 | #define DC_DAT 570 /* Read-only data register */ |
| 85 | |
| 86 | /* Commands. Only the first few are available to the instruction cache. |
| 87 | */ |
| 88 | #define IDC_ENABLE 0x02000000 /* Cache enable */ |
| 89 | #define IDC_DISABLE 0x04000000 /* Cache disable */ |
| 90 | #define IDC_LDLCK 0x06000000 /* Load and lock */ |
| 91 | #define IDC_UNLINE 0x08000000 /* Unlock line */ |
| 92 | #define IDC_UNALL 0x0a000000 /* Unlock all */ |
| 93 | #define IDC_INVALL 0x0c000000 /* Invalidate all */ |
| 94 | |
| 95 | #define DC_FLINE 0x0e000000 /* Flush data cache line */ |
| 96 | #define DC_SFWT 0x01000000 /* Set forced writethrough mode */ |
| 97 | #define DC_CFWT 0x03000000 /* Clear forced writethrough mode */ |
| 98 | #define DC_SLES 0x05000000 /* Set little endian swap mode */ |
| 99 | #define DC_CLES 0x07000000 /* Clear little endian swap mode */ |
| 100 | |
| 101 | /* Status. |
| 102 | */ |
| 103 | #define IDC_ENABLED 0x80000000 /* Cache is enabled */ |
| 104 | #define IDC_CERR1 0x00200000 /* Cache error 1 */ |
| 105 | #define IDC_CERR2 0x00100000 /* Cache error 2 */ |
| 106 | #define IDC_CERR3 0x00080000 /* Cache error 3 */ |
| 107 | |
| 108 | #define DC_DFWT 0x40000000 /* Data cache is forced write through */ |
| 109 | #define DC_LES 0x20000000 /* Caches are little endian mode */ |
Christophe Leroy | 506cb8b | 2017-07-13 15:10:04 +0200 | [diff] [blame] | 110 | |
| 111 | #if !defined(__ASSEMBLY__) |
| 112 | static inline uint rd_ic_cst(void) |
| 113 | { |
| 114 | return mfspr(IC_CST); |
| 115 | } |
| 116 | |
| 117 | static inline void wr_ic_cst(uint val) |
| 118 | { |
| 119 | mtspr(IC_CST, val); |
| 120 | } |
| 121 | |
| 122 | static inline void wr_ic_adr(uint val) |
| 123 | { |
| 124 | mtspr(IC_ADR, val); |
| 125 | } |
| 126 | |
| 127 | static inline uint rd_dc_cst(void) |
| 128 | { |
| 129 | return mfspr(DC_CST); |
| 130 | } |
| 131 | |
| 132 | static inline void wr_dc_cst(uint val) |
| 133 | { |
| 134 | mtspr(DC_CST, val); |
| 135 | } |
| 136 | |
| 137 | static inline void wr_dc_adr(uint val) |
| 138 | { |
| 139 | mtspr(DC_ADR, val); |
| 140 | } |
| 141 | #endif |
Christophe Leroy | ee1e600 | 2018-03-16 17:20:41 +0100 | [diff] [blame] | 142 | #endif /* CONFIG_MPC8xx */ |
Christophe Leroy | 907208c | 2017-07-06 10:23:22 +0200 | [diff] [blame] | 143 | |
wdenk | 416fef1 | 2002-05-15 20:05:05 +0000 | [diff] [blame] | 144 | #endif |