1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_CACHE_H
3 #define _ASM_POWERPC_CACHE_H
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_PPC_8xx)
10 #define L1_CACHE_SHIFT 4
11 #define MAX_COPY_PREFETCH 1
12 #define IFETCH_ALIGN_SHIFT 2
13 #elif defined(CONFIG_PPC_E500MC)
14 #define L1_CACHE_SHIFT 6
15 #define MAX_COPY_PREFETCH 4
16 #define IFETCH_ALIGN_SHIFT 3
17 #elif defined(CONFIG_PPC32)
18 #define MAX_COPY_PREFETCH 4
19 #define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */
20 #if defined(CONFIG_PPC_47x)
21 #define L1_CACHE_SHIFT 7
23 #define L1_CACHE_SHIFT 5
25 #else /* CONFIG_PPC64 */
26 #define L1_CACHE_SHIFT 7
27 #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
30 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
32 #define SMP_CACHE_BYTES L1_CACHE_BYTES
34 #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
36 #ifdef CONFIG_NOT_COHERENT_CACHE
37 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
40 #if !defined(__ASSEMBLY__)
43 struct ppc_cache_info
{
46 u32 block_size
; /* L1 only */
54 struct ppc_cache_info l1d
;
55 struct ppc_cache_info l1i
;
56 struct ppc_cache_info l2
;
57 struct ppc_cache_info l3
;
60 extern struct ppc64_caches ppc64_caches
;
62 static inline u32
l1_dcache_shift(void)
64 return ppc64_caches
.l1d
.log_block_size
;
67 static inline u32
l1_dcache_bytes(void)
69 return ppc64_caches
.l1d
.block_size
;
72 static inline u32
l1_icache_shift(void)
74 return ppc64_caches
.l1i
.log_block_size
;
77 static inline u32
l1_icache_bytes(void)
79 return ppc64_caches
.l1i
.block_size
;
82 static inline u32
l1_dcache_shift(void)
84 return L1_CACHE_SHIFT
;
87 static inline u32
l1_dcache_bytes(void)
89 return L1_CACHE_BYTES
;
92 static inline u32
l1_icache_shift(void)
94 return L1_CACHE_SHIFT
;
97 static inline u32
l1_icache_bytes(void)
99 return L1_CACHE_BYTES
;
104 #define __read_mostly __section(".data..read_mostly")
106 #ifdef CONFIG_PPC_BOOK3S_32
107 extern long _get_L2CR(void);
108 extern long _get_L3CR(void);
109 extern void _set_L2CR(unsigned long);
110 extern void _set_L3CR(unsigned long);
112 #define _get_L2CR() 0L
113 #define _get_L3CR() 0L
114 #define _set_L2CR(val) do { } while(0)
115 #define _set_L3CR(val) do { } while(0)
118 static inline void dcbz(void *addr
)
120 __asm__
__volatile__ ("dcbz 0, %0" : : "r"(addr
) : "memory");
123 static inline void dcbi(void *addr
)
125 __asm__
__volatile__ ("dcbi 0, %0" : : "r"(addr
) : "memory");
128 static inline void dcbf(void *addr
)
130 __asm__
__volatile__ ("dcbf 0, %0" : : "r"(addr
) : "memory");
133 static inline void dcbst(void *addr
)
135 __asm__
__volatile__ ("dcbst 0, %0" : : "r"(addr
) : "memory");
138 static inline void icbi(void *addr
)
140 asm volatile ("icbi 0, %0" : : "r"(addr
) : "memory");
143 static inline void iccci(void *addr
)
145 asm volatile ("iccci 0, %0" : : "r"(addr
) : "memory");
148 #endif /* !__ASSEMBLY__ */
149 #endif /* __KERNEL__ */
150 #endif /* _ASM_POWERPC_CACHE_H */