2 * Based on arch/arm/include/asm/assembler.h, arch/arm/mm/proc-macros.S
4 * Copyright (C) 1996-2000 Russell King
5 * Copyright (C) 2012 ARM Ltd.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #error "Only include this from assembly code"
23 #ifndef __ASM_ASSEMBLER_H
24 #define __ASM_ASSEMBLER_H
26 #include <asm/asm-offsets.h>
27 #include <asm/cpufeature.h>
28 #include <asm/mmu_context.h>
30 #include <asm/pgtable-hwdef.h>
31 #include <asm/ptrace.h>
32 #include <asm/thread_info.h>
35 * Enable and disable interrupts.
45 .macro save_and_disable_irq
, flags
50 .macro restore_irq
, flags
55 * Enable and disable debug exceptions.
65 .macro disable_step_tsk
, flgs
, tmp
66 tbz
\flgs
, #TIF_SINGLESTEP, 9990f
70 isb
// Synchronise with enable_dbg
74 .macro enable_step_tsk
, flgs
, tmp
75 tbz
\flgs
, #TIF_SINGLESTEP, 9990f
84 * Enable both debug exceptions and interrupts. This is likely to be
85 * faster than two daifclr operations, since writes to this register
86 * are self-synchronising.
88 .macro enable_dbg_and_irq
93 * SMP data memory barrier
109 * Emit an entry into the exception table
111 .macro _asm_extable
, from
, to
112 .pushsection __ex_table
, "a"
114 .long (\from
- .), (\to
- .)
118 #define USER(l, x...) \
120 _asm_extable 9999b, l
125 lr
.req x30
// link register
136 * Select code when configured for BE.
138 #ifdef CONFIG_CPU_BIG_ENDIAN
139 #define CPU_BE(code...) code
141 #define CPU_BE(code...)
145 * Select code when configured for LE.
147 #ifdef CONFIG_CPU_BIG_ENDIAN
148 #define CPU_LE(code...)
150 #define CPU_LE(code...) code
154 * Define a macro that constructs a 64-bit value by concatenating two
155 * 32-bit registers. Note that on big endian systems the order of the
156 * registers is swapped.
158 #ifndef CONFIG_CPU_BIG_ENDIAN
159 .macro regs_to_64
, rd
, lbits
, hbits
161 .macro regs_to_64
, rd
, hbits
, lbits
163 orr
\rd
, \lbits
, \hbits
, lsl
#32
167 * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
168 * <symbol> is within the range +/- 4 GB of the PC when running
169 * in core kernel context. In module context, a movz/movk sequence
170 * is used, since modules may be loaded far away from the kernel
171 * when KASLR is in effect.
174 * @dst: destination register (64 bit wide)
175 * @sym: name of the symbol
177 .macro adr_l
, dst
, sym
180 add \dst
, \dst
, :lo12
:\sym
182 movz \dst
, #:abs_g3:\sym
183 movk \dst
, #:abs_g2_nc:\sym
184 movk \dst
, #:abs_g1_nc:\sym
185 movk \dst
, #:abs_g0_nc:\sym
190 * @dst: destination register (32 or 64 bit wide)
191 * @sym: name of the symbol
192 * @tmp: optional 64-bit scratch register to be used if <dst> is a
193 * 32-bit wide register, in which case it cannot be used to hold
196 .macro ldr_l
, dst
, sym
, tmp
=
200 ldr \dst
, [\dst
, :lo12
:\sym
]
203 ldr \dst
, [\tmp
, :lo12
:\sym
]
217 * @src: source register (32 or 64 bit wide)
218 * @sym: name of the symbol
219 * @tmp: mandatory 64-bit scratch register to calculate the address
220 * while <src> needs to be preserved.
222 .macro str_l
, src
, sym
, tmp
225 str \src
, [\tmp
, :lo12
:\sym
]
233 * @dst: Result of per_cpu(sym, smp_processor_id())
234 * @sym: The name of the per-cpu variable
235 * @tmp: scratch register
237 .macro adr_this_cpu
, dst
, sym
, tmp
244 * @dst: Result of READ_ONCE(per_cpu(sym, smp_processor_id()))
245 * @sym: The name of the per-cpu variable
246 * @tmp: scratch register
248 .macro ldr_this_cpu dst
, sym
, tmp
251 ldr \dst
, [\dst
, \tmp
]
255 * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm)
257 .macro vma_vm_mm
, rd
, rn
258 ldr
\rd
, [\rn
, #VMA_VM_MM]
262 * mmid - get context id from mm pointer (mm->context.id)
265 ldr
\rd
, [\rn
, #MM_CONTEXT_ID]
268 * read_ctr - read CTR_EL0. If the system has mismatched
269 * cache line sizes, provide the system wide safe value
270 * from arm64_ftr_reg_ctrel0.sys_val
273 alternative_if_not ARM64_MISMATCHED_CACHE_LINE_SIZE
274 mrs
\reg
, ctr_el0
// read CTR
277 ldr_l
\reg
, arm64_ftr_reg_ctrel0
+ ARM64_FTR_SYSVAL
283 * raw_dcache_line_size - get the minimum D-cache line size on this CPU
284 * from the CTR register.
286 .macro raw_dcache_line_size
, reg
, tmp
287 mrs
\tmp
, ctr_el0
// read CTR
288 ubfm
\tmp
, \tmp
, #16, #19 // cache line size encoding
289 mov
\reg
, #4 // bytes per word
290 lsl
\reg
, \reg
, \tmp
// actual cache line size
294 * dcache_line_size - get the safe D-cache line size across all CPUs
296 .macro dcache_line_size
, reg
, tmp
298 ubfm
\tmp
, \tmp
, #16, #19 // cache line size encoding
299 mov
\reg
, #4 // bytes per word
300 lsl
\reg
, \reg
, \tmp
// actual cache line size
304 * raw_icache_line_size - get the minimum I-cache line size on this CPU
305 * from the CTR register.
307 .macro raw_icache_line_size
, reg
, tmp
308 mrs
\tmp
, ctr_el0
// read CTR
309 and \tmp
, \tmp
, #0xf // cache line size encoding
310 mov
\reg
, #4 // bytes per word
311 lsl
\reg
, \reg
, \tmp
// actual cache line size
315 * icache_line_size - get the safe I-cache line size across all CPUs
317 .macro icache_line_size
, reg
, tmp
319 and \tmp
, \tmp
, #0xf // cache line size encoding
320 mov
\reg
, #4 // bytes per word
321 lsl
\reg
, \reg
, \tmp
// actual cache line size
325 * tcr_set_idmap_t0sz - update TCR.T0SZ so that we can load the ID map
327 .macro tcr_set_idmap_t0sz
, valreg
, tmpreg
328 #ifndef CONFIG_ARM64_VA_BITS_48
329 ldr_l
\tmpreg
, idmap_t0sz
330 bfi
\valreg
, \tmpreg
, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
335 * Macro to perform a data cache maintenance for the interval
336 * [kaddr, kaddr + size)
338 * op: operation passed to dc instruction
339 * domain: domain used in dsb instruciton
340 * kaddr: starting virtual address of the region
341 * size: size of the region
342 * Corrupts: kaddr, size, tmp1, tmp2
344 .macro dcache_by_line_op op
, domain
, kaddr
, size
, tmp1
, tmp2
345 dcache_line_size
\tmp
1, \tmp
2
346 add \size
, \kaddr
, \size
348 bic \kaddr
, \kaddr
, \tmp
2
350 .if (\op
== cvau
|| \op
== cvac
)
351 alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
359 add \kaddr
, \kaddr
, \tmp
1
366 * reset_pmuserenr_el0 - reset PMUSERENR_EL0 if PMUv3 present
368 .macro reset_pmuserenr_el0
, tmpreg
369 mrs
\tmpreg
, id_aa64dfr0_el1
// Check ID_AA64DFR0_EL1 PMUVer
370 sbfx
\tmpreg
, \tmpreg
, #8, #4
371 cmp
\tmpreg
, #1 // Skip if no PMU present
373 msr pmuserenr_el0
, xzr
// Disable PMU access from EL0
378 * copy_page - copy src to dest using temp registers t1-t8
380 .macro copy_page dest
:req src
:req t1
:req t2
:req t3
:req t4
:req t5
:req t6
:req t7
:req t8
:req
381 9998: ldp
\t1, \t2, [\src
]
382 ldp
\t3, \t4, [\src
, #16]
383 ldp
\t5, \t6, [\src
, #32]
384 ldp
\t7, \t8, [\src
, #48]
386 stnp
\t1, \t2, [\dest
]
387 stnp
\t3, \t4, [\dest
, #16]
388 stnp
\t5, \t6, [\dest
, #32]
389 stnp
\t7, \t8, [\dest
, #48]
390 add \dest
, \dest
, #64
391 tst \src
, #(PAGE_SIZE - 1)
396 * Annotate a function as position independent, i.e., safe to be called before
397 * the kernel virtual mapping is activated.
399 #define ENDPIPROC(x) \
401 .type __pi_##x, %function; \
403 .size __pi_##x, . - x; \
407 * Emit a 64-bit absolute little endian symbol reference in a way that
408 * ensures that it will be resolved at build time, even when building a
409 * PIE binary. This requires cooperation from the linker script, which
410 * must emit the lo32/hi32 halves individually.
418 * mov_q - move an immediate constant into a 64-bit register using
419 * between 2 and 4 movz/movk instructions (depending on the
420 * magnitude and sign of the operand)
422 .macro mov_q
, reg
, val
423 .if (((\val
) >> 31) == 0 || ((\val
) >> 31) == 0x1ffffffff)
424 movz
\reg
, :abs_g1_s
:\val
426 .if (((\val
) >> 47) == 0 || ((\val
) >> 47) == 0x1ffff)
427 movz
\reg
, :abs_g2_s
:\val
429 movz
\reg
, :abs_g3
:\val
430 movk
\reg
, :abs_g2_nc
:\val
432 movk
\reg
, :abs_g1_nc
:\val
434 movk
\reg
, :abs_g0_nc
:\val
438 * Return the current thread_info.
440 .macro get_thread_info
, rd
445 * Errata workaround prior to TTBR0_EL1 update
447 * val: TTBR value with new BADDR, preserved
448 * tmp0: temporary register, clobbered
449 * tmp1: other temporary register, clobbered
451 .macro pre_ttbr0_update_workaround
, val
, tmp0
, tmp1
452 #ifdef CONFIG_QCOM_FALKOR_ERRATUM_1003
453 alternative_if ARM64_WORKAROUND_QCOM_FALKOR_E1003
455 mov
\tmp
1, #FALKOR_RESERVED_ASID
456 bfi
\tmp
0, \tmp
1, #48, #16 // reserved ASID + old BADDR
459 bfi
\tmp
0, \val
, #0, #48 // reserved ASID + new BADDR
462 alternative_else_nop_endif
467 * Errata workaround post TTBR0_EL1 update.
469 .macro post_ttbr0_update_workaround
470 #ifdef CONFIG_CAVIUM_ERRATUM_27456
471 alternative_if ARM64_WORKAROUND_CAVIUM_27456
475 alternative_else_nop_endif
479 #endif /* __ASM_ASSEMBLER_H */