mm: hugetlb: fix hugepage memory leak caused by wrong reserve count
[linux/fpc-iii.git] / arch / arm64 / include / asm / assembler.h
blob12eff928ef8b38dd18ae3bd157b12eb918f797a6
1 /*
2 * Based on arch/arm/include/asm/assembler.h
4 * Copyright (C) 1996-2000 Russell King
5 * Copyright (C) 2012 ARM Ltd.
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
19 #ifndef __ASSEMBLY__
20 #error "Only include this from assembly code"
21 #endif
23 #ifndef __ASM_ASSEMBLER_H
24 #define __ASM_ASSEMBLER_H
26 #include <asm/ptrace.h>
27 #include <asm/thread_info.h>
30 * Stack pushing/popping (register pairs only). Equivalent to store decrement
31 * before, load increment after.
33 .macro push, xreg1, xreg2
34 stp \xreg1, \xreg2, [sp, #-16]!
35 .endm
37 .macro pop, xreg1, xreg2
38 ldp \xreg1, \xreg2, [sp], #16
39 .endm
42 * Enable and disable interrupts.
44 .macro disable_irq
45 msr daifset, #2
46 .endm
48 .macro enable_irq
49 msr daifclr, #2
50 .endm
53 * Enable and disable debug exceptions.
55 .macro disable_dbg
56 msr daifset, #8
57 .endm
59 .macro enable_dbg
60 msr daifclr, #8
61 .endm
63 .macro disable_step_tsk, flgs, tmp
64 tbz \flgs, #TIF_SINGLESTEP, 9990f
65 mrs \tmp, mdscr_el1
66 bic \tmp, \tmp, #1
67 msr mdscr_el1, \tmp
68 isb // Synchronise with enable_dbg
69 9990:
70 .endm
72 .macro enable_step_tsk, flgs, tmp
73 tbz \flgs, #TIF_SINGLESTEP, 9990f
74 disable_dbg
75 mrs \tmp, mdscr_el1
76 orr \tmp, \tmp, #1
77 msr mdscr_el1, \tmp
78 9990:
79 .endm
82 * Enable both debug exceptions and interrupts. This is likely to be
83 * faster than two daifclr operations, since writes to this register
84 * are self-synchronising.
86 .macro enable_dbg_and_irq
87 msr daifclr, #(8 | 2)
88 .endm
91 * SMP data memory barrier
93 .macro smp_dmb, opt
94 dmb \opt
95 .endm
97 #define USER(l, x...) \
98 9999: x; \
99 .section __ex_table,"a"; \
100 .align 3; \
101 .quad 9999b,l; \
102 .previous
105 * Register aliases.
107 lr .req x30 // link register
110 * Vector entry
112 .macro ventry label
113 .align 7
114 b \label
115 .endm
118 * Select code when configured for BE.
120 #ifdef CONFIG_CPU_BIG_ENDIAN
121 #define CPU_BE(code...) code
122 #else
123 #define CPU_BE(code...)
124 #endif
127 * Select code when configured for LE.
129 #ifdef CONFIG_CPU_BIG_ENDIAN
130 #define CPU_LE(code...)
131 #else
132 #define CPU_LE(code...) code
133 #endif
136 * Define a macro that constructs a 64-bit value by concatenating two
137 * 32-bit registers. Note that on big endian systems the order of the
138 * registers is swapped.
140 #ifndef CONFIG_CPU_BIG_ENDIAN
141 .macro regs_to_64, rd, lbits, hbits
142 #else
143 .macro regs_to_64, rd, hbits, lbits
144 #endif
145 orr \rd, \lbits, \hbits, lsl #32
146 .endm
149 * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
150 * <symbol> is within the range +/- 4 GB of the PC.
153 * @dst: destination register (64 bit wide)
154 * @sym: name of the symbol
155 * @tmp: optional scratch register to be used if <dst> == sp, which
156 * is not allowed in an adrp instruction
158 .macro adr_l, dst, sym, tmp=
159 .ifb \tmp
160 adrp \dst, \sym
161 add \dst, \dst, :lo12:\sym
162 .else
163 adrp \tmp, \sym
164 add \dst, \tmp, :lo12:\sym
165 .endif
166 .endm
169 * @dst: destination register (32 or 64 bit wide)
170 * @sym: name of the symbol
171 * @tmp: optional 64-bit scratch register to be used if <dst> is a
172 * 32-bit wide register, in which case it cannot be used to hold
173 * the address
175 .macro ldr_l, dst, sym, tmp=
176 .ifb \tmp
177 adrp \dst, \sym
178 ldr \dst, [\dst, :lo12:\sym]
179 .else
180 adrp \tmp, \sym
181 ldr \dst, [\tmp, :lo12:\sym]
182 .endif
183 .endm
186 * @src: source register (32 or 64 bit wide)
187 * @sym: name of the symbol
188 * @tmp: mandatory 64-bit scratch register to calculate the address
189 * while <src> needs to be preserved.
191 .macro str_l, src, sym, tmp
192 adrp \tmp, \sym
193 str \src, [\tmp, :lo12:\sym]
194 .endm
197 * Annotate a function as position independent, i.e., safe to be called before
198 * the kernel virtual mapping is activated.
200 #define ENDPIPROC(x) \
201 .globl __pi_##x; \
202 .type __pi_##x, %function; \
203 .set __pi_##x, x; \
204 .size __pi_##x, . - x; \
205 ENDPROC(x)
207 #endif /* __ASM_ASSEMBLER_H */