2 * Copyright 2004-2009 Analog Devices Inc.
4 * Licensed under the GPL-2 or later.
7 #ifndef __ARCH_BLACKFIN_CACHE_H
8 #define __ARCH_BLACKFIN_CACHE_H
11 * Bytes per L1 cache line
12 * Blackfin loads 32 bytes for cache
14 #define L1_CACHE_SHIFT 5
15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
16 #define SMP_CACHE_BYTES L1_CACHE_BYTES
19 #define __cacheline_aligned
21 #define ____cacheline_aligned
24 * Put cacheline_aliged data to L1 data memory
26 #ifdef CONFIG_CACHELINE_ALIGNED_L1
27 #define __cacheline_aligned \
28 __attribute__((__aligned__(L1_CACHE_BYTES), \
29 __section__(".data_l1.cacheline_aligned")))
35 * largest L1 which this arch supports
37 #define L1_CACHE_SHIFT_MAX 5
39 #if defined(CONFIG_SMP) && \
40 !defined(CONFIG_BFIN_CACHE_COHERENT)
41 # if defined(CONFIG_BFIN_EXTMEM_ICACHEABLE) || defined(CONFIG_BFIN_L2_ICACHEABLE)
42 # define __ARCH_SYNC_CORE_ICACHE
44 # if defined(CONFIG_BFIN_EXTMEM_DCACHEABLE) || defined(CONFIG_BFIN_L2_DCACHEABLE)
45 # define __ARCH_SYNC_CORE_DCACHE
48 asmlinkage
void __raw_smp_mark_barrier_asm(void);
49 asmlinkage
void __raw_smp_check_barrier_asm(void);
51 static inline void smp_mark_barrier(void)
53 __raw_smp_mark_barrier_asm();
55 static inline void smp_check_barrier(void)
57 __raw_smp_check_barrier_asm();
60 void resync_core_dcache(void);
61 void resync_core_icache(void);