blob: 445a366807db9d6b061d37907fdcc54fae035b73 [file] [log] [blame]
wdenk416fef12002-05-15 20:05:05 +00001/*
2 * include/asm-ppc/cache.h
3 */
4#ifndef __ARCH_PPC_CACHE_H
5#define __ARCH_PPC_CACHE_H
6
wdenk416fef12002-05-15 20:05:05 +00007#include <asm/processor.h>
8
9/* bytes per L1 cache line */
Christophe Leroyee1e6002018-03-16 17:20:41 +010010#if defined(CONFIG_MPC8xx)
Christophe Leroy907208c2017-07-06 10:23:22 +020011#define L1_CACHE_SHIFT 4
12#elif defined(CONFIG_PPC64BRIDGE)
Stefan Roese9b94ac62007-10-31 17:55:58 +010013#define L1_CACHE_SHIFT 7
Kumar Gala0f060c32008-10-23 01:47:38 -050014#elif defined(CONFIG_E500MC)
15#define L1_CACHE_SHIFT 6
wdenk416fef12002-05-15 20:05:05 +000016#else
Stefan Roese9b94ac62007-10-31 17:55:58 +010017#define L1_CACHE_SHIFT 5
Kumar Galab009f3e2008-01-08 01:22:21 -060018#endif
Stefan Roese9b94ac62007-10-31 17:55:58 +010019
20#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
21
22/*
Anton Staaf09917012011-10-17 16:46:06 -070023 * Use the L1 data cache line size value for the minimum DMA buffer alignment
24 * on PowerPC.
25 */
26#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
27
28/*
Jean-Christophe PLAGNIOL-VILLARD6d0f6bc2008-10-16 15:01:15 +020029 * For compatibility reasons support the CONFIG_SYS_CACHELINE_SIZE too
Stefan Roese9b94ac62007-10-31 17:55:58 +010030 */
Jean-Christophe PLAGNIOL-VILLARD6d0f6bc2008-10-16 15:01:15 +020031#ifndef CONFIG_SYS_CACHELINE_SIZE
32#define CONFIG_SYS_CACHELINE_SIZE L1_CACHE_BYTES
Stefan Roese9b94ac62007-10-31 17:55:58 +010033#endif
wdenk416fef12002-05-15 20:05:05 +000034
35#define L1_CACHE_ALIGN(x) (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
36#define L1_CACHE_PAGES 8
37
38#define SMP_CACHE_BYTES L1_CACHE_BYTES
39
40#ifdef MODULE
41#define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES)))
42#else
43#define __cacheline_aligned \
44 __attribute__((__aligned__(L1_CACHE_BYTES), \
45 __section__(".data.cacheline_aligned")))
46#endif
47
48#if defined(__KERNEL__) && !defined(__ASSEMBLY__)
49extern void flush_dcache_range(unsigned long start, unsigned long stop);
50extern void clean_dcache_range(unsigned long start, unsigned long stop);
51extern void invalidate_dcache_range(unsigned long start, unsigned long stop);
Stefan Roese9b94ac62007-10-31 17:55:58 +010052extern void flush_dcache(void);
53extern void invalidate_dcache(void);
Kumar Gala54e091d2008-09-22 14:11:10 -050054extern void invalidate_icache(void);
Jean-Christophe PLAGNIOL-VILLARD6d0f6bc2008-10-16 15:01:15 +020055#ifdef CONFIG_SYS_INIT_RAM_LOCK
wdenk416fef12002-05-15 20:05:05 +000056extern void unlock_ram_in_cache(void);
Jean-Christophe PLAGNIOL-VILLARD6d0f6bc2008-10-16 15:01:15 +020057#endif /* CONFIG_SYS_INIT_RAM_LOCK */
wdenk416fef12002-05-15 20:05:05 +000058#endif /* __ASSEMBLY__ */
59
Tang Yuantian7cb72722014-07-04 17:39:26 +080060#if defined(__KERNEL__) && !defined(__ASSEMBLY__)
61int l2cache_init(void);
62void enable_cpc(void);
63void disable_cpc_sram(void);
64#endif
65
wdenk416fef12002-05-15 20:05:05 +000066/* prep registers for L2 */
67#define CACHECRBA 0x80000823 /* Cache configuration register address */
68#define L2CACHE_MASK 0x03 /* Mask for 2 L2 Cache bits */
69#define L2CACHE_512KB 0x00 /* 512KB */
70#define L2CACHE_256KB 0x01 /* 256KB */
71#define L2CACHE_1MB 0x02 /* 1MB */
72#define L2CACHE_NONE 0x03 /* NONE */
73#define L2CACHE_PARITY 0x08 /* Mask for L2 Cache Parity Protected bit */
74
Christophe Leroyee1e6002018-03-16 17:20:41 +010075#ifdef CONFIG_MPC8xx
Christophe Leroy907208c2017-07-06 10:23:22 +020076/* Cache control on the MPC8xx is provided through some additional
77 * special purpose registers.
78 */
79#define IC_CST 560 /* Instruction cache control/status */
80#define IC_ADR 561 /* Address needed for some commands */
81#define IC_DAT 562 /* Read-only data register */
82#define DC_CST 568 /* Data cache control/status */
83#define DC_ADR 569 /* Address needed for some commands */
84#define DC_DAT 570 /* Read-only data register */
85
86/* Commands. Only the first few are available to the instruction cache.
87*/
88#define IDC_ENABLE 0x02000000 /* Cache enable */
89#define IDC_DISABLE 0x04000000 /* Cache disable */
90#define IDC_LDLCK 0x06000000 /* Load and lock */
91#define IDC_UNLINE 0x08000000 /* Unlock line */
92#define IDC_UNALL 0x0a000000 /* Unlock all */
93#define IDC_INVALL 0x0c000000 /* Invalidate all */
94
95#define DC_FLINE 0x0e000000 /* Flush data cache line */
96#define DC_SFWT 0x01000000 /* Set forced writethrough mode */
97#define DC_CFWT 0x03000000 /* Clear forced writethrough mode */
98#define DC_SLES 0x05000000 /* Set little endian swap mode */
99#define DC_CLES 0x07000000 /* Clear little endian swap mode */
100
101/* Status.
102*/
103#define IDC_ENABLED 0x80000000 /* Cache is enabled */
104#define IDC_CERR1 0x00200000 /* Cache error 1 */
105#define IDC_CERR2 0x00100000 /* Cache error 2 */
106#define IDC_CERR3 0x00080000 /* Cache error 3 */
107
108#define DC_DFWT 0x40000000 /* Data cache is forced write through */
109#define DC_LES 0x20000000 /* Caches are little endian mode */
Christophe Leroy506cb8b2017-07-13 15:10:04 +0200110
111#if !defined(__ASSEMBLY__)
112static inline uint rd_ic_cst(void)
113{
114 return mfspr(IC_CST);
115}
116
117static inline void wr_ic_cst(uint val)
118{
119 mtspr(IC_CST, val);
120}
121
122static inline void wr_ic_adr(uint val)
123{
124 mtspr(IC_ADR, val);
125}
126
127static inline uint rd_dc_cst(void)
128{
129 return mfspr(DC_CST);
130}
131
132static inline void wr_dc_cst(uint val)
133{
134 mtspr(DC_CST, val);
135}
136
137static inline void wr_dc_adr(uint val)
138{
139 mtspr(DC_ADR, val);
140}
141#endif
Christophe Leroyee1e6002018-03-16 17:20:41 +0100142#endif /* CONFIG_MPC8xx */
Christophe Leroy907208c2017-07-06 10:23:22 +0200143
wdenk416fef12002-05-15 20:05:05 +0000144#endif