Merge tag 'iommu-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux
[linux/fpc-iii.git] / include / asm-generic / cacheflush.h
blob4a674db4e1fa5eae93b7a09b851c530d721d5815
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_CACHEFLUSH_H
3 #define _ASM_GENERIC_CACHEFLUSH_H
5 struct mm_struct;
6 struct vm_area_struct;
7 struct page;
8 struct address_space;
11 * The cache doesn't need to be flushed when TLB entries change when
12 * the cache is mapped to physical memory, not virtual memory
14 #ifndef flush_cache_all
15 static inline void flush_cache_all(void)
18 #endif
20 #ifndef flush_cache_mm
21 static inline void flush_cache_mm(struct mm_struct *mm)
24 #endif
26 #ifndef flush_cache_dup_mm
27 static inline void flush_cache_dup_mm(struct mm_struct *mm)
30 #endif
32 #ifndef flush_cache_range
33 static inline void flush_cache_range(struct vm_area_struct *vma,
34 unsigned long start,
35 unsigned long end)
38 #endif
40 #ifndef flush_cache_page
41 static inline void flush_cache_page(struct vm_area_struct *vma,
42 unsigned long vmaddr,
43 unsigned long pfn)
46 #endif
48 #ifndef ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE
49 static inline void flush_dcache_page(struct page *page)
52 #define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0
53 #endif
56 #ifndef flush_dcache_mmap_lock
57 static inline void flush_dcache_mmap_lock(struct address_space *mapping)
60 #endif
62 #ifndef flush_dcache_mmap_unlock
63 static inline void flush_dcache_mmap_unlock(struct address_space *mapping)
66 #endif
68 #ifndef flush_icache_range
69 static inline void flush_icache_range(unsigned long start, unsigned long end)
72 #endif
74 #ifndef flush_icache_user_range
75 #define flush_icache_user_range flush_icache_range
76 #endif
78 #ifndef flush_icache_page
79 static inline void flush_icache_page(struct vm_area_struct *vma,
80 struct page *page)
83 #endif
85 #ifndef flush_icache_user_page
86 static inline void flush_icache_user_page(struct vm_area_struct *vma,
87 struct page *page,
88 unsigned long addr, int len)
91 #endif
93 #ifndef flush_cache_vmap
94 static inline void flush_cache_vmap(unsigned long start, unsigned long end)
97 #endif
99 #ifndef flush_cache_vunmap
100 static inline void flush_cache_vunmap(unsigned long start, unsigned long end)
103 #endif
105 #ifndef copy_to_user_page
106 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \
107 do { \
108 memcpy(dst, src, len); \
109 flush_icache_user_page(vma, page, vaddr, len); \
110 } while (0)
111 #endif
113 #ifndef copy_from_user_page
114 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \
115 memcpy(dst, src, len)
116 #endif
118 #endif /* _ASM_GENERIC_CACHEFLUSH_H */