1 #ifndef _ASM_POWERPC_CACHE_H
2 #define _ASM_POWERPC_CACHE_H
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
10 #define L1_CACHE_SHIFT 4
11 #define MAX_COPY_PREFETCH 1
12 #elif defined(CONFIG_PPC_E500MC)
13 #define L1_CACHE_SHIFT 6
14 #define MAX_COPY_PREFETCH 4
15 #elif defined(CONFIG_PPC32)
16 #define MAX_COPY_PREFETCH 4
17 #if defined(CONFIG_PPC_47x)
18 #define L1_CACHE_SHIFT 7
20 #define L1_CACHE_SHIFT 5
22 #else /* CONFIG_PPC64 */
23 #define L1_CACHE_SHIFT 7
26 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
28 #define SMP_CACHE_BYTES L1_CACHE_BYTES
30 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
32 u32 dsize
; /* L1 d-cache size */
33 u32 dline_size
; /* L1 d-cache line size */
36 u32 isize
; /* L1 i-cache size */
37 u32 iline_size
; /* L1 i-cache line size */
42 extern struct ppc64_caches ppc64_caches
;
44 static inline void logmpp(u64 x
)
46 asm volatile(PPC_LOGMPP(R1
) : : "r" (x
));
49 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
51 #if defined(__ASSEMBLY__)
53 * For a snooping icache, we still need a dummy icbi to purge all the
54 * prefetched instructions from the ifetch buffers. We also need a sync
55 * before the icbi to order the the actual stores to memory that might
56 * have modified instructions with the icbi.
58 #define PURGE_PREFETCHED_INS \
65 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
68 extern long _get_L2CR(void);
69 extern long _get_L3CR(void);
70 extern void _set_L2CR(unsigned long);
71 extern void _set_L3CR(unsigned long);
73 #define _get_L2CR() 0L
74 #define _get_L3CR() 0L
75 #define _set_L2CR(val) do { } while(0)
76 #define _set_L3CR(val) do { } while(0)
79 #endif /* !__ASSEMBLY__ */
80 #endif /* __KERNEL__ */
81 #endif /* _ASM_POWERPC_CACHE_H */