1 #ifndef _ASM_SCORE_CACHEFLUSH_H
2 #define _ASM_SCORE_CACHEFLUSH_H
4 /* Keep includes the same across arches. */
7 extern void flush_cache_all(void);
8 extern void flush_cache_mm(struct mm_struct
*mm
);
9 extern void flush_cache_range(struct vm_area_struct
*vma
,
10 unsigned long start
, unsigned long end
);
11 extern void flush_cache_page(struct vm_area_struct
*vma
,
12 unsigned long page
, unsigned long pfn
);
13 extern void flush_cache_sigtramp(unsigned long addr
);
14 extern void flush_icache_all(void);
15 extern void flush_icache_range(unsigned long start
, unsigned long end
);
16 extern void flush_dcache_range(unsigned long start
, unsigned long end
);
18 #define flush_cache_dup_mm(mm) do {} while (0)
19 #define flush_dcache_page(page) do {} while (0)
20 #define flush_dcache_mmap_lock(mapping) do {} while (0)
21 #define flush_dcache_mmap_unlock(mapping) do {} while (0)
22 #define flush_cache_vmap(start, end) do {} while (0)
23 #define flush_cache_vunmap(start, end) do {} while (0)
25 static inline void flush_icache_page(struct vm_area_struct
*vma
,
28 if (vma
->vm_flags
& VM_EXEC
) {
29 void *v
= page_address(page
);
30 flush_icache_range((unsigned long) v
,
31 (unsigned long) v
+ PAGE_SIZE
);
35 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \
38 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \
40 memcpy(dst, src, len); \
41 if ((vma->vm_flags & VM_EXEC)) \
42 flush_cache_page(vma, vaddr, page_to_pfn(page));\
45 #endif /* _ASM_SCORE_CACHEFLUSH_H */