conn rcv_lock converted to spinlock, struct cor_sock created, kernel_packet skb_clone...
[cor_2_6_31.git] / arch / blackfin / include / asm / cache.h
blob477050ad5c53cb4665625739d3305b173cd80bbe
1 /*
2 * include/asm-blackfin/cache.h
3 */
4 #ifndef __ARCH_BLACKFIN_CACHE_H
5 #define __ARCH_BLACKFIN_CACHE_H
7 /*
8 * Bytes per L1 cache line
9 * Blackfin loads 32 bytes for cache
11 #define L1_CACHE_SHIFT 5
12 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
13 #define SMP_CACHE_BYTES L1_CACHE_BYTES
15 #ifdef CONFIG_SMP
16 #define __cacheline_aligned
17 #else
18 #define ____cacheline_aligned
21 * Put cacheline_aliged data to L1 data memory
23 #ifdef CONFIG_CACHELINE_ALIGNED_L1
24 #define __cacheline_aligned \
25 __attribute__((__aligned__(L1_CACHE_BYTES), \
26 __section__(".data_l1.cacheline_aligned")))
27 #endif
29 #endif
32 * largest L1 which this arch supports
34 #define L1_CACHE_SHIFT_MAX 5
36 #if defined(CONFIG_SMP) && \
37 !defined(CONFIG_BFIN_CACHE_COHERENT)
38 # if defined(CONFIG_BFIN_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
39 # define __ARCH_SYNC_CORE_ICACHE
40 # endif
41 # if defined(CONFIG_BFIN_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
42 # define __ARCH_SYNC_CORE_DCACHE
43 # endif
44 #ifndef __ASSEMBLY__
45 asmlinkage void __raw_smp_mark_barrier_asm(void);
46 asmlinkage void __raw_smp_check_barrier_asm(void);
48 static inline void smp_mark_barrier(void)
50 __raw_smp_mark_barrier_asm();
52 static inline void smp_check_barrier(void)
54 __raw_smp_check_barrier_asm();
57 void resync_core_dcache(void);
58 void resync_core_icache(void);
59 #endif
60 #endif
63 #endif