1 /* SPDX-License-Identifier: GPL-2.0-only */
5 * Copyright (C) 2001 Deep Blue Solutions Ltd.
6 * Copyright (C) 2012 ARM Ltd.
9 #include <linux/errno.h>
10 #include <linux/linkage.h>
11 #include <linux/init.h>
12 #include <asm/assembler.h>
13 #include <asm/cpufeature.h>
14 #include <asm/alternative.h>
15 #include <asm/asm-uaccess.h>
18 * flush_icache_range(start,end)
20 * Ensure that the I and D caches are coherent within specified region.
21 * This is typically used when code has been written to a memory region,
22 * and will be executed.
24 * - start - virtual start address of region
25 * - end - virtual end address of region
27 ENTRY(__flush_icache_range)
31 * __flush_cache_user_range(start,end)
33 * Ensure that the I and D caches are coherent within specified region.
34 * This is typically used when code has been written to a memory region,
35 * and will be executed.
37 * - start - virtual start address of region
38 * - end - virtual end address of region
40 ENTRY(__flush_cache_user_range)
41 uaccess_ttbr0_enable x2, x3, x4
42 alternative_if ARM64_HAS_CACHE_IDC
45 alternative_else_nop_endif
46 dcache_line_size x2, x3
50 user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
57 alternative_if ARM64_HAS_CACHE_DIC
60 alternative_else_nop_endif
61 invalidate_icache_by_line x0, x1, x2, x3, 9f
64 uaccess_ttbr0_disable x1, x2
69 ENDPROC(__flush_icache_range)
70 ENDPROC(__flush_cache_user_range)
73 * invalidate_icache_range(start,end)
75 * Ensure that the I cache is invalid within specified region.
77 * - start - virtual start address of region
78 * - end - virtual end address of region
80 ENTRY(invalidate_icache_range)
81 alternative_if ARM64_HAS_CACHE_DIC
85 alternative_else_nop_endif
87 uaccess_ttbr0_enable x2, x3, x4
89 invalidate_icache_by_line x0, x1, x2, x3, 2f
92 uaccess_ttbr0_disable x1, x2
97 ENDPROC(invalidate_icache_range)
100 * __flush_dcache_area(kaddr, size)
102 * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
103 * are cleaned and invalidated to the PoC.
105 * - kaddr - kernel address
106 * - size - size in question
108 ENTRY(__flush_dcache_area)
109 dcache_by_line_op civac, sy, x0, x1, x2, x3
111 ENDPIPROC(__flush_dcache_area)
114 * __clean_dcache_area_pou(kaddr, size)
116 * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
117 * are cleaned to the PoU.
119 * - kaddr - kernel address
120 * - size - size in question
122 ENTRY(__clean_dcache_area_pou)
123 alternative_if ARM64_HAS_CACHE_IDC
126 alternative_else_nop_endif
127 dcache_by_line_op cvau, ish, x0, x1, x2, x3
129 ENDPROC(__clean_dcache_area_pou)
132 * __inval_dcache_area(kaddr, size)
134 * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
135 * are invalidated. Any partial lines at the ends of the interval are
136 * also cleaned to PoC to prevent data loss.
138 * - kaddr - kernel address
139 * - size - size in question
141 ENTRY(__inval_dcache_area)
145 * __dma_inv_area(start, size)
146 * - start - virtual start address of region
147 * - size - size in question
151 dcache_line_size x2, x3
153 tst x1, x3 // end cache line aligned?
156 dc civac, x1 // clean & invalidate D / U line
157 1: tst x0, x3 // start cache line aligned?
160 dc civac, x0 // clean & invalidate D / U line
162 2: dc ivac, x0 // invalidate D / U line
168 ENDPIPROC(__inval_dcache_area)
169 ENDPROC(__dma_inv_area)
172 * __clean_dcache_area_poc(kaddr, size)
174 * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
175 * are cleaned to the PoC.
177 * - kaddr - kernel address
178 * - size - size in question
180 ENTRY(__clean_dcache_area_poc)
184 * __dma_clean_area(start, size)
185 * - start - virtual start address of region
186 * - size - size in question
189 dcache_by_line_op cvac, sy, x0, x1, x2, x3
191 ENDPIPROC(__clean_dcache_area_poc)
192 ENDPROC(__dma_clean_area)
195 * __clean_dcache_area_pop(kaddr, size)
197 * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
198 * are cleaned to the PoP.
200 * - kaddr - kernel address
201 * - size - size in question
203 ENTRY(__clean_dcache_area_pop)
204 alternative_if_not ARM64_HAS_DCPOP
205 b __clean_dcache_area_poc
206 alternative_else_nop_endif
207 dcache_by_line_op cvap, sy, x0, x1, x2, x3
209 ENDPIPROC(__clean_dcache_area_pop)
212 * __dma_flush_area(start, size)
214 * clean & invalidate D / U line
216 * - start - virtual start address of region
217 * - size - size in question
219 ENTRY(__dma_flush_area)
220 dcache_by_line_op civac, sy, x0, x1, x2, x3
222 ENDPIPROC(__dma_flush_area)
225 * __dma_map_area(start, size, dir)
226 * - start - kernel virtual start address
227 * - size - size of region
228 * - dir - DMA direction
230 ENTRY(__dma_map_area)
231 cmp w2, #DMA_FROM_DEVICE
234 ENDPIPROC(__dma_map_area)
237 * __dma_unmap_area(start, size, dir)
238 * - start - kernel virtual start address
239 * - size - size of region
240 * - dir - DMA direction
242 ENTRY(__dma_unmap_area)
243 cmp w2, #DMA_TO_DEVICE
246 ENDPIPROC(__dma_unmap_area)