1 // SPDX-License-Identifier: GPL-2.0-only
3 * This file defines C prototypes for the low-level cache assembly functions
4 * and populates a vtable for each selected ARM CPU cache type.
7 #include <linux/types.h>
8 #include <asm/cacheflush.h>
10 #ifdef CONFIG_CPU_CACHE_V4
11 void v4_flush_icache_all(void);
12 void v4_flush_kern_cache_all(void);
13 void v4_flush_user_cache_all(void);
14 void v4_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
15 void v4_coherent_kern_range(unsigned long, unsigned long);
16 int v4_coherent_user_range(unsigned long, unsigned long);
17 void v4_flush_kern_dcache_area(void *, size_t);
18 void v4_dma_map_area(const void *, size_t, int);
19 void v4_dma_unmap_area(const void *, size_t, int);
20 void v4_dma_flush_range(const void *, const void *);
22 struct cpu_cache_fns v4_cache_fns __initconst
= {
23 .flush_icache_all
= v4_flush_icache_all
,
24 .flush_kern_all
= v4_flush_kern_cache_all
,
25 .flush_kern_louis
= v4_flush_kern_cache_all
,
26 .flush_user_all
= v4_flush_user_cache_all
,
27 .flush_user_range
= v4_flush_user_cache_range
,
28 .coherent_kern_range
= v4_coherent_kern_range
,
29 .coherent_user_range
= v4_coherent_user_range
,
30 .flush_kern_dcache_area
= v4_flush_kern_dcache_area
,
31 .dma_map_area
= v4_dma_map_area
,
32 .dma_unmap_area
= v4_dma_unmap_area
,
33 .dma_flush_range
= v4_dma_flush_range
,
37 /* V4 write-back cache "V4WB" */
38 #ifdef CONFIG_CPU_CACHE_V4WB
39 void v4wb_flush_icache_all(void);
40 void v4wb_flush_kern_cache_all(void);
41 void v4wb_flush_user_cache_all(void);
42 void v4wb_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
43 void v4wb_coherent_kern_range(unsigned long, unsigned long);
44 int v4wb_coherent_user_range(unsigned long, unsigned long);
45 void v4wb_flush_kern_dcache_area(void *, size_t);
46 void v4wb_dma_map_area(const void *, size_t, int);
47 void v4wb_dma_unmap_area(const void *, size_t, int);
48 void v4wb_dma_flush_range(const void *, const void *);
50 struct cpu_cache_fns v4wb_cache_fns __initconst
= {
51 .flush_icache_all
= v4wb_flush_icache_all
,
52 .flush_kern_all
= v4wb_flush_kern_cache_all
,
53 .flush_kern_louis
= v4wb_flush_kern_cache_all
,
54 .flush_user_all
= v4wb_flush_user_cache_all
,
55 .flush_user_range
= v4wb_flush_user_cache_range
,
56 .coherent_kern_range
= v4wb_coherent_kern_range
,
57 .coherent_user_range
= v4wb_coherent_user_range
,
58 .flush_kern_dcache_area
= v4wb_flush_kern_dcache_area
,
59 .dma_map_area
= v4wb_dma_map_area
,
60 .dma_unmap_area
= v4wb_dma_unmap_area
,
61 .dma_flush_range
= v4wb_dma_flush_range
,
65 /* V4 write-through cache "V4WT" */
66 #ifdef CONFIG_CPU_CACHE_V4WT
67 void v4wt_flush_icache_all(void);
68 void v4wt_flush_kern_cache_all(void);
69 void v4wt_flush_user_cache_all(void);
70 void v4wt_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
71 void v4wt_coherent_kern_range(unsigned long, unsigned long);
72 int v4wt_coherent_user_range(unsigned long, unsigned long);
73 void v4wt_flush_kern_dcache_area(void *, size_t);
74 void v4wt_dma_map_area(const void *, size_t, int);
75 void v4wt_dma_unmap_area(const void *, size_t, int);
76 void v4wt_dma_flush_range(const void *, const void *);
78 struct cpu_cache_fns v4wt_cache_fns __initconst
= {
79 .flush_icache_all
= v4wt_flush_icache_all
,
80 .flush_kern_all
= v4wt_flush_kern_cache_all
,
81 .flush_kern_louis
= v4wt_flush_kern_cache_all
,
82 .flush_user_all
= v4wt_flush_user_cache_all
,
83 .flush_user_range
= v4wt_flush_user_cache_range
,
84 .coherent_kern_range
= v4wt_coherent_kern_range
,
85 .coherent_user_range
= v4wt_coherent_user_range
,
86 .flush_kern_dcache_area
= v4wt_flush_kern_dcache_area
,
87 .dma_map_area
= v4wt_dma_map_area
,
88 .dma_unmap_area
= v4wt_dma_unmap_area
,
89 .dma_flush_range
= v4wt_dma_flush_range
,
93 /* Faraday FA526 cache */
94 #ifdef CONFIG_CPU_CACHE_FA
95 void fa_flush_icache_all(void);
96 void fa_flush_kern_cache_all(void);
97 void fa_flush_user_cache_all(void);
98 void fa_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
99 void fa_coherent_kern_range(unsigned long, unsigned long);
100 int fa_coherent_user_range(unsigned long, unsigned long);
101 void fa_flush_kern_dcache_area(void *, size_t);
102 void fa_dma_map_area(const void *, size_t, int);
103 void fa_dma_unmap_area(const void *, size_t, int);
104 void fa_dma_flush_range(const void *, const void *);
106 struct cpu_cache_fns fa_cache_fns __initconst
= {
107 .flush_icache_all
= fa_flush_icache_all
,
108 .flush_kern_all
= fa_flush_kern_cache_all
,
109 .flush_kern_louis
= fa_flush_kern_cache_all
,
110 .flush_user_all
= fa_flush_user_cache_all
,
111 .flush_user_range
= fa_flush_user_cache_range
,
112 .coherent_kern_range
= fa_coherent_kern_range
,
113 .coherent_user_range
= fa_coherent_user_range
,
114 .flush_kern_dcache_area
= fa_flush_kern_dcache_area
,
115 .dma_map_area
= fa_dma_map_area
,
116 .dma_unmap_area
= fa_dma_unmap_area
,
117 .dma_flush_range
= fa_dma_flush_range
,
121 #ifdef CONFIG_CPU_CACHE_V6
122 void v6_flush_icache_all(void);
123 void v6_flush_kern_cache_all(void);
124 void v6_flush_user_cache_all(void);
125 void v6_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
126 void v6_coherent_kern_range(unsigned long, unsigned long);
127 int v6_coherent_user_range(unsigned long, unsigned long);
128 void v6_flush_kern_dcache_area(void *, size_t);
129 void v6_dma_map_area(const void *, size_t, int);
130 void v6_dma_unmap_area(const void *, size_t, int);
131 void v6_dma_flush_range(const void *, const void *);
133 struct cpu_cache_fns v6_cache_fns __initconst
= {
134 .flush_icache_all
= v6_flush_icache_all
,
135 .flush_kern_all
= v6_flush_kern_cache_all
,
136 .flush_kern_louis
= v6_flush_kern_cache_all
,
137 .flush_user_all
= v6_flush_user_cache_all
,
138 .flush_user_range
= v6_flush_user_cache_range
,
139 .coherent_kern_range
= v6_coherent_kern_range
,
140 .coherent_user_range
= v6_coherent_user_range
,
141 .flush_kern_dcache_area
= v6_flush_kern_dcache_area
,
142 .dma_map_area
= v6_dma_map_area
,
143 .dma_unmap_area
= v6_dma_unmap_area
,
144 .dma_flush_range
= v6_dma_flush_range
,
148 #ifdef CONFIG_CPU_CACHE_V7
149 void v7_flush_icache_all(void);
150 void v7_flush_kern_cache_all(void);
151 void v7_flush_kern_cache_louis(void);
152 void v7_flush_user_cache_all(void);
153 void v7_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
154 void v7_coherent_kern_range(unsigned long, unsigned long);
155 int v7_coherent_user_range(unsigned long, unsigned long);
156 void v7_flush_kern_dcache_area(void *, size_t);
157 void v7_dma_map_area(const void *, size_t, int);
158 void v7_dma_unmap_area(const void *, size_t, int);
159 void v7_dma_flush_range(const void *, const void *);
161 struct cpu_cache_fns v7_cache_fns __initconst
= {
162 .flush_icache_all
= v7_flush_icache_all
,
163 .flush_kern_all
= v7_flush_kern_cache_all
,
164 .flush_kern_louis
= v7_flush_kern_cache_louis
,
165 .flush_user_all
= v7_flush_user_cache_all
,
166 .flush_user_range
= v7_flush_user_cache_range
,
167 .coherent_kern_range
= v7_coherent_kern_range
,
168 .coherent_user_range
= v7_coherent_user_range
,
169 .flush_kern_dcache_area
= v7_flush_kern_dcache_area
,
170 .dma_map_area
= v7_dma_map_area
,
171 .dma_unmap_area
= v7_dma_unmap_area
,
172 .dma_flush_range
= v7_dma_flush_range
,
175 /* Special quirky cache flush function for Broadcom B15 v7 caches */
176 void b15_flush_kern_cache_all(void);
178 struct cpu_cache_fns b15_cache_fns __initconst
= {
179 .flush_icache_all
= v7_flush_icache_all
,
180 #ifdef CONFIG_CACHE_B15_RAC
181 .flush_kern_all
= b15_flush_kern_cache_all
,
183 .flush_kern_all
= v7_flush_kern_cache_all
,
185 .flush_kern_louis
= v7_flush_kern_cache_louis
,
186 .flush_user_all
= v7_flush_user_cache_all
,
187 .flush_user_range
= v7_flush_user_cache_range
,
188 .coherent_kern_range
= v7_coherent_kern_range
,
189 .coherent_user_range
= v7_coherent_user_range
,
190 .flush_kern_dcache_area
= v7_flush_kern_dcache_area
,
191 .dma_map_area
= v7_dma_map_area
,
192 .dma_unmap_area
= v7_dma_unmap_area
,
193 .dma_flush_range
= v7_dma_flush_range
,
197 /* The NOP cache is just a set of dummy stubs that by definition does nothing */
198 #ifdef CONFIG_CPU_CACHE_NOP
199 void nop_flush_icache_all(void);
200 void nop_flush_kern_cache_all(void);
201 void nop_flush_user_cache_all(void);
202 void nop_flush_user_cache_range(unsigned long start
, unsigned long end
, unsigned int flags
);
203 void nop_coherent_kern_range(unsigned long start
, unsigned long end
);
204 int nop_coherent_user_range(unsigned long, unsigned long);
205 void nop_flush_kern_dcache_area(void *kaddr
, size_t size
);
206 void nop_dma_map_area(const void *start
, size_t size
, int flags
);
207 void nop_dma_unmap_area(const void *start
, size_t size
, int flags
);
208 void nop_dma_flush_range(const void *start
, const void *end
);
210 struct cpu_cache_fns nop_cache_fns __initconst
= {
211 .flush_icache_all
= nop_flush_icache_all
,
212 .flush_kern_all
= nop_flush_kern_cache_all
,
213 .flush_kern_louis
= nop_flush_kern_cache_all
,
214 .flush_user_all
= nop_flush_user_cache_all
,
215 .flush_user_range
= nop_flush_user_cache_range
,
216 .coherent_kern_range
= nop_coherent_kern_range
,
217 .coherent_user_range
= nop_coherent_user_range
,
218 .flush_kern_dcache_area
= nop_flush_kern_dcache_area
,
219 .dma_map_area
= nop_dma_map_area
,
220 .dma_unmap_area
= nop_dma_unmap_area
,
221 .dma_flush_range
= nop_dma_flush_range
,
225 #ifdef CONFIG_CPU_CACHE_V7M
226 void v7m_flush_icache_all(void);
227 void v7m_flush_kern_cache_all(void);
228 void v7m_flush_user_cache_all(void);
229 void v7m_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
230 void v7m_coherent_kern_range(unsigned long, unsigned long);
231 int v7m_coherent_user_range(unsigned long, unsigned long);
232 void v7m_flush_kern_dcache_area(void *, size_t);
233 void v7m_dma_map_area(const void *, size_t, int);
234 void v7m_dma_unmap_area(const void *, size_t, int);
235 void v7m_dma_flush_range(const void *, const void *);
237 struct cpu_cache_fns v7m_cache_fns __initconst
= {
238 .flush_icache_all
= v7m_flush_icache_all
,
239 .flush_kern_all
= v7m_flush_kern_cache_all
,
240 .flush_kern_louis
= v7m_flush_kern_cache_all
,
241 .flush_user_all
= v7m_flush_user_cache_all
,
242 .flush_user_range
= v7m_flush_user_cache_range
,
243 .coherent_kern_range
= v7m_coherent_kern_range
,
244 .coherent_user_range
= v7m_coherent_user_range
,
245 .flush_kern_dcache_area
= v7m_flush_kern_dcache_area
,
246 .dma_map_area
= v7m_dma_map_area
,
247 .dma_unmap_area
= v7m_dma_unmap_area
,
248 .dma_flush_range
= v7m_dma_flush_range
,
252 #ifdef CONFIG_CPU_ARM1020
253 void arm1020_flush_icache_all(void);
254 void arm1020_flush_kern_cache_all(void);
255 void arm1020_flush_user_cache_all(void);
256 void arm1020_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
257 void arm1020_coherent_kern_range(unsigned long, unsigned long);
258 int arm1020_coherent_user_range(unsigned long, unsigned long);
259 void arm1020_flush_kern_dcache_area(void *, size_t);
260 void arm1020_dma_map_area(const void *, size_t, int);
261 void arm1020_dma_unmap_area(const void *, size_t, int);
262 void arm1020_dma_flush_range(const void *, const void *);
264 struct cpu_cache_fns arm1020_cache_fns __initconst
= {
265 .flush_icache_all
= arm1020_flush_icache_all
,
266 .flush_kern_all
= arm1020_flush_kern_cache_all
,
267 .flush_kern_louis
= arm1020_flush_kern_cache_all
,
268 .flush_user_all
= arm1020_flush_user_cache_all
,
269 .flush_user_range
= arm1020_flush_user_cache_range
,
270 .coherent_kern_range
= arm1020_coherent_kern_range
,
271 .coherent_user_range
= arm1020_coherent_user_range
,
272 .flush_kern_dcache_area
= arm1020_flush_kern_dcache_area
,
273 .dma_map_area
= arm1020_dma_map_area
,
274 .dma_unmap_area
= arm1020_dma_unmap_area
,
275 .dma_flush_range
= arm1020_dma_flush_range
,
279 #ifdef CONFIG_CPU_ARM1020E
280 void arm1020e_flush_icache_all(void);
281 void arm1020e_flush_kern_cache_all(void);
282 void arm1020e_flush_user_cache_all(void);
283 void arm1020e_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
284 void arm1020e_coherent_kern_range(unsigned long, unsigned long);
285 int arm1020e_coherent_user_range(unsigned long, unsigned long);
286 void arm1020e_flush_kern_dcache_area(void *, size_t);
287 void arm1020e_dma_map_area(const void *, size_t, int);
288 void arm1020e_dma_unmap_area(const void *, size_t, int);
289 void arm1020e_dma_flush_range(const void *, const void *);
291 struct cpu_cache_fns arm1020e_cache_fns __initconst
= {
292 .flush_icache_all
= arm1020e_flush_icache_all
,
293 .flush_kern_all
= arm1020e_flush_kern_cache_all
,
294 .flush_kern_louis
= arm1020e_flush_kern_cache_all
,
295 .flush_user_all
= arm1020e_flush_user_cache_all
,
296 .flush_user_range
= arm1020e_flush_user_cache_range
,
297 .coherent_kern_range
= arm1020e_coherent_kern_range
,
298 .coherent_user_range
= arm1020e_coherent_user_range
,
299 .flush_kern_dcache_area
= arm1020e_flush_kern_dcache_area
,
300 .dma_map_area
= arm1020e_dma_map_area
,
301 .dma_unmap_area
= arm1020e_dma_unmap_area
,
302 .dma_flush_range
= arm1020e_dma_flush_range
,
306 #ifdef CONFIG_CPU_ARM1022
307 void arm1022_flush_icache_all(void);
308 void arm1022_flush_kern_cache_all(void);
309 void arm1022_flush_user_cache_all(void);
310 void arm1022_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
311 void arm1022_coherent_kern_range(unsigned long, unsigned long);
312 int arm1022_coherent_user_range(unsigned long, unsigned long);
313 void arm1022_flush_kern_dcache_area(void *, size_t);
314 void arm1022_dma_map_area(const void *, size_t, int);
315 void arm1022_dma_unmap_area(const void *, size_t, int);
316 void arm1022_dma_flush_range(const void *, const void *);
318 struct cpu_cache_fns arm1022_cache_fns __initconst
= {
319 .flush_icache_all
= arm1022_flush_icache_all
,
320 .flush_kern_all
= arm1022_flush_kern_cache_all
,
321 .flush_kern_louis
= arm1022_flush_kern_cache_all
,
322 .flush_user_all
= arm1022_flush_user_cache_all
,
323 .flush_user_range
= arm1022_flush_user_cache_range
,
324 .coherent_kern_range
= arm1022_coherent_kern_range
,
325 .coherent_user_range
= arm1022_coherent_user_range
,
326 .flush_kern_dcache_area
= arm1022_flush_kern_dcache_area
,
327 .dma_map_area
= arm1022_dma_map_area
,
328 .dma_unmap_area
= arm1022_dma_unmap_area
,
329 .dma_flush_range
= arm1022_dma_flush_range
,
333 #ifdef CONFIG_CPU_ARM1026
334 void arm1026_flush_icache_all(void);
335 void arm1026_flush_kern_cache_all(void);
336 void arm1026_flush_user_cache_all(void);
337 void arm1026_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
338 void arm1026_coherent_kern_range(unsigned long, unsigned long);
339 int arm1026_coherent_user_range(unsigned long, unsigned long);
340 void arm1026_flush_kern_dcache_area(void *, size_t);
341 void arm1026_dma_map_area(const void *, size_t, int);
342 void arm1026_dma_unmap_area(const void *, size_t, int);
343 void arm1026_dma_flush_range(const void *, const void *);
345 struct cpu_cache_fns arm1026_cache_fns __initconst
= {
346 .flush_icache_all
= arm1026_flush_icache_all
,
347 .flush_kern_all
= arm1026_flush_kern_cache_all
,
348 .flush_kern_louis
= arm1026_flush_kern_cache_all
,
349 .flush_user_all
= arm1026_flush_user_cache_all
,
350 .flush_user_range
= arm1026_flush_user_cache_range
,
351 .coherent_kern_range
= arm1026_coherent_kern_range
,
352 .coherent_user_range
= arm1026_coherent_user_range
,
353 .flush_kern_dcache_area
= arm1026_flush_kern_dcache_area
,
354 .dma_map_area
= arm1026_dma_map_area
,
355 .dma_unmap_area
= arm1026_dma_unmap_area
,
356 .dma_flush_range
= arm1026_dma_flush_range
,
360 #if defined(CONFIG_CPU_ARM920T) && !defined(CONFIG_CPU_DCACHE_WRITETHROUGH)
361 void arm920_flush_icache_all(void);
362 void arm920_flush_kern_cache_all(void);
363 void arm920_flush_user_cache_all(void);
364 void arm920_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
365 void arm920_coherent_kern_range(unsigned long, unsigned long);
366 int arm920_coherent_user_range(unsigned long, unsigned long);
367 void arm920_flush_kern_dcache_area(void *, size_t);
368 void arm920_dma_map_area(const void *, size_t, int);
369 void arm920_dma_unmap_area(const void *, size_t, int);
370 void arm920_dma_flush_range(const void *, const void *);
372 struct cpu_cache_fns arm920_cache_fns __initconst
= {
373 .flush_icache_all
= arm920_flush_icache_all
,
374 .flush_kern_all
= arm920_flush_kern_cache_all
,
375 .flush_kern_louis
= arm920_flush_kern_cache_all
,
376 .flush_user_all
= arm920_flush_user_cache_all
,
377 .flush_user_range
= arm920_flush_user_cache_range
,
378 .coherent_kern_range
= arm920_coherent_kern_range
,
379 .coherent_user_range
= arm920_coherent_user_range
,
380 .flush_kern_dcache_area
= arm920_flush_kern_dcache_area
,
381 .dma_map_area
= arm920_dma_map_area
,
382 .dma_unmap_area
= arm920_dma_unmap_area
,
383 .dma_flush_range
= arm920_dma_flush_range
,
387 #if defined(CONFIG_CPU_ARM922T) && !defined(CONFIG_CPU_DCACHE_WRITETHROUGH)
388 void arm922_flush_icache_all(void);
389 void arm922_flush_kern_cache_all(void);
390 void arm922_flush_user_cache_all(void);
391 void arm922_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
392 void arm922_coherent_kern_range(unsigned long, unsigned long);
393 int arm922_coherent_user_range(unsigned long, unsigned long);
394 void arm922_flush_kern_dcache_area(void *, size_t);
395 void arm922_dma_map_area(const void *, size_t, int);
396 void arm922_dma_unmap_area(const void *, size_t, int);
397 void arm922_dma_flush_range(const void *, const void *);
399 struct cpu_cache_fns arm922_cache_fns __initconst
= {
400 .flush_icache_all
= arm922_flush_icache_all
,
401 .flush_kern_all
= arm922_flush_kern_cache_all
,
402 .flush_kern_louis
= arm922_flush_kern_cache_all
,
403 .flush_user_all
= arm922_flush_user_cache_all
,
404 .flush_user_range
= arm922_flush_user_cache_range
,
405 .coherent_kern_range
= arm922_coherent_kern_range
,
406 .coherent_user_range
= arm922_coherent_user_range
,
407 .flush_kern_dcache_area
= arm922_flush_kern_dcache_area
,
408 .dma_map_area
= arm922_dma_map_area
,
409 .dma_unmap_area
= arm922_dma_unmap_area
,
410 .dma_flush_range
= arm922_dma_flush_range
,
414 #ifdef CONFIG_CPU_ARM925T
415 void arm925_flush_icache_all(void);
416 void arm925_flush_kern_cache_all(void);
417 void arm925_flush_user_cache_all(void);
418 void arm925_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
419 void arm925_coherent_kern_range(unsigned long, unsigned long);
420 int arm925_coherent_user_range(unsigned long, unsigned long);
421 void arm925_flush_kern_dcache_area(void *, size_t);
422 void arm925_dma_map_area(const void *, size_t, int);
423 void arm925_dma_unmap_area(const void *, size_t, int);
424 void arm925_dma_flush_range(const void *, const void *);
426 struct cpu_cache_fns arm925_cache_fns __initconst
= {
427 .flush_icache_all
= arm925_flush_icache_all
,
428 .flush_kern_all
= arm925_flush_kern_cache_all
,
429 .flush_kern_louis
= arm925_flush_kern_cache_all
,
430 .flush_user_all
= arm925_flush_user_cache_all
,
431 .flush_user_range
= arm925_flush_user_cache_range
,
432 .coherent_kern_range
= arm925_coherent_kern_range
,
433 .coherent_user_range
= arm925_coherent_user_range
,
434 .flush_kern_dcache_area
= arm925_flush_kern_dcache_area
,
435 .dma_map_area
= arm925_dma_map_area
,
436 .dma_unmap_area
= arm925_dma_unmap_area
,
437 .dma_flush_range
= arm925_dma_flush_range
,
441 #ifdef CONFIG_CPU_ARM926T
442 void arm926_flush_icache_all(void);
443 void arm926_flush_kern_cache_all(void);
444 void arm926_flush_user_cache_all(void);
445 void arm926_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
446 void arm926_coherent_kern_range(unsigned long, unsigned long);
447 int arm926_coherent_user_range(unsigned long, unsigned long);
448 void arm926_flush_kern_dcache_area(void *, size_t);
449 void arm926_dma_map_area(const void *, size_t, int);
450 void arm926_dma_unmap_area(const void *, size_t, int);
451 void arm926_dma_flush_range(const void *, const void *);
453 struct cpu_cache_fns arm926_cache_fns __initconst
= {
454 .flush_icache_all
= arm926_flush_icache_all
,
455 .flush_kern_all
= arm926_flush_kern_cache_all
,
456 .flush_kern_louis
= arm926_flush_kern_cache_all
,
457 .flush_user_all
= arm926_flush_user_cache_all
,
458 .flush_user_range
= arm926_flush_user_cache_range
,
459 .coherent_kern_range
= arm926_coherent_kern_range
,
460 .coherent_user_range
= arm926_coherent_user_range
,
461 .flush_kern_dcache_area
= arm926_flush_kern_dcache_area
,
462 .dma_map_area
= arm926_dma_map_area
,
463 .dma_unmap_area
= arm926_dma_unmap_area
,
464 .dma_flush_range
= arm926_dma_flush_range
,
468 #ifdef CONFIG_CPU_ARM940T
469 void arm940_flush_icache_all(void);
470 void arm940_flush_kern_cache_all(void);
471 void arm940_flush_user_cache_all(void);
472 void arm940_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
473 void arm940_coherent_kern_range(unsigned long, unsigned long);
474 int arm940_coherent_user_range(unsigned long, unsigned long);
475 void arm940_flush_kern_dcache_area(void *, size_t);
476 void arm940_dma_map_area(const void *, size_t, int);
477 void arm940_dma_unmap_area(const void *, size_t, int);
478 void arm940_dma_flush_range(const void *, const void *);
480 struct cpu_cache_fns arm940_cache_fns __initconst
= {
481 .flush_icache_all
= arm940_flush_icache_all
,
482 .flush_kern_all
= arm940_flush_kern_cache_all
,
483 .flush_kern_louis
= arm940_flush_kern_cache_all
,
484 .flush_user_all
= arm940_flush_user_cache_all
,
485 .flush_user_range
= arm940_flush_user_cache_range
,
486 .coherent_kern_range
= arm940_coherent_kern_range
,
487 .coherent_user_range
= arm940_coherent_user_range
,
488 .flush_kern_dcache_area
= arm940_flush_kern_dcache_area
,
489 .dma_map_area
= arm940_dma_map_area
,
490 .dma_unmap_area
= arm940_dma_unmap_area
,
491 .dma_flush_range
= arm940_dma_flush_range
,
495 #ifdef CONFIG_CPU_ARM946E
496 void arm946_flush_icache_all(void);
497 void arm946_flush_kern_cache_all(void);
498 void arm946_flush_user_cache_all(void);
499 void arm946_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
500 void arm946_coherent_kern_range(unsigned long, unsigned long);
501 int arm946_coherent_user_range(unsigned long, unsigned long);
502 void arm946_flush_kern_dcache_area(void *, size_t);
503 void arm946_dma_map_area(const void *, size_t, int);
504 void arm946_dma_unmap_area(const void *, size_t, int);
505 void arm946_dma_flush_range(const void *, const void *);
507 struct cpu_cache_fns arm946_cache_fns __initconst
= {
508 .flush_icache_all
= arm946_flush_icache_all
,
509 .flush_kern_all
= arm946_flush_kern_cache_all
,
510 .flush_kern_louis
= arm946_flush_kern_cache_all
,
511 .flush_user_all
= arm946_flush_user_cache_all
,
512 .flush_user_range
= arm946_flush_user_cache_range
,
513 .coherent_kern_range
= arm946_coherent_kern_range
,
514 .coherent_user_range
= arm946_coherent_user_range
,
515 .flush_kern_dcache_area
= arm946_flush_kern_dcache_area
,
516 .dma_map_area
= arm946_dma_map_area
,
517 .dma_unmap_area
= arm946_dma_unmap_area
,
518 .dma_flush_range
= arm946_dma_flush_range
,
522 #ifdef CONFIG_CPU_XSCALE
523 void xscale_flush_icache_all(void);
524 void xscale_flush_kern_cache_all(void);
525 void xscale_flush_user_cache_all(void);
526 void xscale_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
527 void xscale_coherent_kern_range(unsigned long, unsigned long);
528 int xscale_coherent_user_range(unsigned long, unsigned long);
529 void xscale_flush_kern_dcache_area(void *, size_t);
530 void xscale_dma_map_area(const void *, size_t, int);
531 void xscale_dma_unmap_area(const void *, size_t, int);
532 void xscale_dma_flush_range(const void *, const void *);
534 struct cpu_cache_fns xscale_cache_fns __initconst
= {
535 .flush_icache_all
= xscale_flush_icache_all
,
536 .flush_kern_all
= xscale_flush_kern_cache_all
,
537 .flush_kern_louis
= xscale_flush_kern_cache_all
,
538 .flush_user_all
= xscale_flush_user_cache_all
,
539 .flush_user_range
= xscale_flush_user_cache_range
,
540 .coherent_kern_range
= xscale_coherent_kern_range
,
541 .coherent_user_range
= xscale_coherent_user_range
,
542 .flush_kern_dcache_area
= xscale_flush_kern_dcache_area
,
543 .dma_map_area
= xscale_dma_map_area
,
544 .dma_unmap_area
= xscale_dma_unmap_area
,
545 .dma_flush_range
= xscale_dma_flush_range
,
548 /* The 80200 A0 and A1 need a special quirk for dma_map_area() */
549 void xscale_80200_A0_A1_dma_map_area(const void *, size_t, int);
551 struct cpu_cache_fns xscale_80200_A0_A1_cache_fns __initconst
= {
552 .flush_icache_all
= xscale_flush_icache_all
,
553 .flush_kern_all
= xscale_flush_kern_cache_all
,
554 .flush_kern_louis
= xscale_flush_kern_cache_all
,
555 .flush_user_all
= xscale_flush_user_cache_all
,
556 .flush_user_range
= xscale_flush_user_cache_range
,
557 .coherent_kern_range
= xscale_coherent_kern_range
,
558 .coherent_user_range
= xscale_coherent_user_range
,
559 .flush_kern_dcache_area
= xscale_flush_kern_dcache_area
,
560 .dma_map_area
= xscale_80200_A0_A1_dma_map_area
,
561 .dma_unmap_area
= xscale_dma_unmap_area
,
562 .dma_flush_range
= xscale_dma_flush_range
,
566 #ifdef CONFIG_CPU_XSC3
567 void xsc3_flush_icache_all(void);
568 void xsc3_flush_kern_cache_all(void);
569 void xsc3_flush_user_cache_all(void);
570 void xsc3_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
571 void xsc3_coherent_kern_range(unsigned long, unsigned long);
572 int xsc3_coherent_user_range(unsigned long, unsigned long);
573 void xsc3_flush_kern_dcache_area(void *, size_t);
574 void xsc3_dma_map_area(const void *, size_t, int);
575 void xsc3_dma_unmap_area(const void *, size_t, int);
576 void xsc3_dma_flush_range(const void *, const void *);
578 struct cpu_cache_fns xsc3_cache_fns __initconst
= {
579 .flush_icache_all
= xsc3_flush_icache_all
,
580 .flush_kern_all
= xsc3_flush_kern_cache_all
,
581 .flush_kern_louis
= xsc3_flush_kern_cache_all
,
582 .flush_user_all
= xsc3_flush_user_cache_all
,
583 .flush_user_range
= xsc3_flush_user_cache_range
,
584 .coherent_kern_range
= xsc3_coherent_kern_range
,
585 .coherent_user_range
= xsc3_coherent_user_range
,
586 .flush_kern_dcache_area
= xsc3_flush_kern_dcache_area
,
587 .dma_map_area
= xsc3_dma_map_area
,
588 .dma_unmap_area
= xsc3_dma_unmap_area
,
589 .dma_flush_range
= xsc3_dma_flush_range
,
593 #ifdef CONFIG_CPU_MOHAWK
594 void mohawk_flush_icache_all(void);
595 void mohawk_flush_kern_cache_all(void);
596 void mohawk_flush_user_cache_all(void);
597 void mohawk_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
598 void mohawk_coherent_kern_range(unsigned long, unsigned long);
599 int mohawk_coherent_user_range(unsigned long, unsigned long);
600 void mohawk_flush_kern_dcache_area(void *, size_t);
601 void mohawk_dma_map_area(const void *, size_t, int);
602 void mohawk_dma_unmap_area(const void *, size_t, int);
603 void mohawk_dma_flush_range(const void *, const void *);
605 struct cpu_cache_fns mohawk_cache_fns __initconst
= {
606 .flush_icache_all
= mohawk_flush_icache_all
,
607 .flush_kern_all
= mohawk_flush_kern_cache_all
,
608 .flush_kern_louis
= mohawk_flush_kern_cache_all
,
609 .flush_user_all
= mohawk_flush_user_cache_all
,
610 .flush_user_range
= mohawk_flush_user_cache_range
,
611 .coherent_kern_range
= mohawk_coherent_kern_range
,
612 .coherent_user_range
= mohawk_coherent_user_range
,
613 .flush_kern_dcache_area
= mohawk_flush_kern_dcache_area
,
614 .dma_map_area
= mohawk_dma_map_area
,
615 .dma_unmap_area
= mohawk_dma_unmap_area
,
616 .dma_flush_range
= mohawk_dma_flush_range
,
620 #ifdef CONFIG_CPU_FEROCEON
621 void feroceon_flush_icache_all(void);
622 void feroceon_flush_kern_cache_all(void);
623 void feroceon_flush_user_cache_all(void);
624 void feroceon_flush_user_cache_range(unsigned long, unsigned long, unsigned int);
625 void feroceon_coherent_kern_range(unsigned long, unsigned long);
626 int feroceon_coherent_user_range(unsigned long, unsigned long);
627 void feroceon_flush_kern_dcache_area(void *, size_t);
628 void feroceon_dma_map_area(const void *, size_t, int);
629 void feroceon_dma_unmap_area(const void *, size_t, int);
630 void feroceon_dma_flush_range(const void *, const void *);
632 struct cpu_cache_fns feroceon_cache_fns __initconst
= {
633 .flush_icache_all
= feroceon_flush_icache_all
,
634 .flush_kern_all
= feroceon_flush_kern_cache_all
,
635 .flush_kern_louis
= feroceon_flush_kern_cache_all
,
636 .flush_user_all
= feroceon_flush_user_cache_all
,
637 .flush_user_range
= feroceon_flush_user_cache_range
,
638 .coherent_kern_range
= feroceon_coherent_kern_range
,
639 .coherent_user_range
= feroceon_coherent_user_range
,
640 .flush_kern_dcache_area
= feroceon_flush_kern_dcache_area
,
641 .dma_map_area
= feroceon_dma_map_area
,
642 .dma_unmap_area
= feroceon_dma_unmap_area
,
643 .dma_flush_range
= feroceon_dma_flush_range
,
646 void feroceon_range_flush_kern_dcache_area(void *, size_t);
647 void feroceon_range_dma_map_area(const void *, size_t, int);
648 void feroceon_range_dma_flush_range(const void *, const void *);
650 struct cpu_cache_fns feroceon_range_cache_fns __initconst
= {
651 .flush_icache_all
= feroceon_flush_icache_all
,
652 .flush_kern_all
= feroceon_flush_kern_cache_all
,
653 .flush_kern_louis
= feroceon_flush_kern_cache_all
,
654 .flush_user_all
= feroceon_flush_user_cache_all
,
655 .flush_user_range
= feroceon_flush_user_cache_range
,
656 .coherent_kern_range
= feroceon_coherent_kern_range
,
657 .coherent_user_range
= feroceon_coherent_user_range
,
658 .flush_kern_dcache_area
= feroceon_range_flush_kern_dcache_area
,
659 .dma_map_area
= feroceon_range_dma_map_area
,
660 .dma_unmap_area
= feroceon_dma_unmap_area
,
661 .dma_flush_range
= feroceon_range_dma_flush_range
,