1 #ifndef _ASM_POWERPC_CACHE_H
2 #define _ASM_POWERPC_CACHE_H
7 /* bytes per L1 cache line */
8 #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
9 #define L1_CACHE_SHIFT 4
10 #define MAX_COPY_PREFETCH 1
11 #elif defined(CONFIG_PPC_E500MC)
12 #define L1_CACHE_SHIFT 6
13 #define MAX_COPY_PREFETCH 4
14 #elif defined(CONFIG_PPC32)
15 #define MAX_COPY_PREFETCH 4
16 #if defined(CONFIG_PPC_47x)
17 #define L1_CACHE_SHIFT 7
19 #define L1_CACHE_SHIFT 5
21 #else /* CONFIG_PPC64 */
22 #define L1_CACHE_SHIFT 7
25 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
27 #define SMP_CACHE_BYTES L1_CACHE_BYTES
29 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
31 u32 dsize
; /* L1 d-cache size */
32 u32 dline_size
; /* L1 d-cache line size */
35 u32 isize
; /* L1 i-cache size */
36 u32 iline_size
; /* L1 i-cache line size */
41 extern struct ppc64_caches ppc64_caches
;
42 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
44 #if defined(__ASSEMBLY__)
46 * For a snooping icache, we still need a dummy icbi to purge all the
47 * prefetched instructions from the ifetch buffers. We also need a sync
48 * before the icbi to order the the actual stores to memory that might
49 * have modified instructions with the icbi.
51 #define PURGE_PREFETCHED_INS \
58 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
61 extern long _get_L2CR(void);
62 extern long _get_L3CR(void);
63 extern void _set_L2CR(unsigned long);
64 extern void _set_L3CR(unsigned long);
66 #define _get_L2CR() 0L
67 #define _get_L3CR() 0L
68 #define _set_L2CR(val) do { } while(0)
69 #define _set_L3CR(val) do { } while(0)
72 extern void cacheable_memzero(void *p
, unsigned int nb
);
73 extern void *cacheable_memcpy(void *, const void *, unsigned int);
75 #endif /* !__ASSEMBLY__ */
76 #endif /* __KERNEL__ */
77 #endif /* _ASM_POWERPC_CACHE_H */