3 * Copyright 2018 Google Inc
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #ifndef __ARCH_LIB_HELPERS_H__
30 #define __ARCH_LIB_HELPERS_H__
32 #define SCTLR_M (1 << 0) /* MMU enable */
33 #define SCTLR_A (1 << 1) /* Alignment check enable */
34 #define SCTLR_C (1 << 2) /* Data/unified cache enable */
35 #define SCTLR_SA (1 << 3) /* Stack alignment check enable */
36 #define SCTLR_NAA (1 << 6) /* non-aligned access STA/LDR */
37 #define SCTLR_I (1 << 12) /* Instruction cache enable */
38 #define SCTLR_ENDB (1 << 13) /* Pointer auth (data B) */
39 #define SCTLR_WXN (1 << 19) /* Write permission implies XN */
40 #define SCTLR_IESB (1 << 21) /* Implicit error sync event */
41 #define SCTLR_EE (1 << 25) /* Exception endianness (BE) */
42 #define SCTLR_ENDA (1 << 27) /* Pointer auth (data A) */
43 #define SCTLR_ENIB (1 << 30) /* Pointer auth (insn B) */
44 #define SCTLR_ENIA (1 << 31) /* Pointer auth (insn A) */
45 #define SCTLR_RES1 ((0x3 << 4) | (0x1 << 11) | (0x1 << 16) | \
46 (0x1 << 18) | (0x3 << 22) | (0x3 << 28))
48 #define DAIF_DBG_BIT (1 << 3)
49 #define DAIF_ABT_BIT (1 << 2)
50 #define DAIF_IRQ_BIT (1 << 1)
51 #define DAIF_FIQ_BIT (1 << 0)
57 #define MAKE_REGISTER_ACCESSORS(reg) \
58 static inline uint64_t raw_read_##reg(void) \
61 __asm__ __volatile__("mrs %0, " #reg "\n\t" \
62 : "=r" (value) : : "memory"); \
65 static inline void raw_write_##reg(uint64_t value) \
67 __asm__ __volatile__("msr " #reg ", %0\n\t" \
68 : : "r" (value) : "memory"); \
71 #define MAKE_REGISTER_ACCESSORS_EL12(reg) \
72 MAKE_REGISTER_ACCESSORS(reg##_el1) \
73 MAKE_REGISTER_ACCESSORS(reg##_el2)
75 /* Architectural register accessors */
76 MAKE_REGISTER_ACCESSORS_EL12(actlr
)
77 MAKE_REGISTER_ACCESSORS_EL12(afsr0
)
78 MAKE_REGISTER_ACCESSORS_EL12(afsr1
)
79 MAKE_REGISTER_ACCESSORS(aidr_el1
)
80 MAKE_REGISTER_ACCESSORS_EL12(amair
)
81 MAKE_REGISTER_ACCESSORS(ccsidr_el1
)
82 MAKE_REGISTER_ACCESSORS(clidr_el1
)
83 MAKE_REGISTER_ACCESSORS(cntfrq_el0
)
84 MAKE_REGISTER_ACCESSORS(cnthctl_el2
)
85 MAKE_REGISTER_ACCESSORS(cnthp_ctl_el2
)
86 MAKE_REGISTER_ACCESSORS(cnthp_cval_el2
)
87 MAKE_REGISTER_ACCESSORS(cnthp_tval_el2
)
88 MAKE_REGISTER_ACCESSORS(cntkctl_el1
)
89 MAKE_REGISTER_ACCESSORS(cntp_ctl_el0
)
90 MAKE_REGISTER_ACCESSORS(cntp_cval_el0
)
91 MAKE_REGISTER_ACCESSORS(cntp_tval_el0
)
92 MAKE_REGISTER_ACCESSORS(cntpct_el0
)
93 MAKE_REGISTER_ACCESSORS(cntps_ctl_el1
)
94 MAKE_REGISTER_ACCESSORS(cntps_cval_el1
)
95 MAKE_REGISTER_ACCESSORS(cntps_tval_el1
)
96 MAKE_REGISTER_ACCESSORS(cntv_ctl_el0
)
97 MAKE_REGISTER_ACCESSORS(cntv_cval_el0
)
98 MAKE_REGISTER_ACCESSORS(cntv_tval_el0
)
99 MAKE_REGISTER_ACCESSORS(cntvct_el0
)
100 MAKE_REGISTER_ACCESSORS(cntvoff_el2
)
101 MAKE_REGISTER_ACCESSORS(contextidr_el1
)
102 MAKE_REGISTER_ACCESSORS(cpacr_el1
)
103 MAKE_REGISTER_ACCESSORS(cptr_el2
)
104 MAKE_REGISTER_ACCESSORS(csselr_el1
)
105 MAKE_REGISTER_ACCESSORS(ctr_el0
)
106 MAKE_REGISTER_ACCESSORS(currentel
)
107 MAKE_REGISTER_ACCESSORS(daif
)
108 MAKE_REGISTER_ACCESSORS(dczid_el0
)
109 MAKE_REGISTER_ACCESSORS_EL12(elr
)
110 MAKE_REGISTER_ACCESSORS_EL12(esr
)
111 MAKE_REGISTER_ACCESSORS_EL12(far
)
112 MAKE_REGISTER_ACCESSORS(fpcr
)
113 MAKE_REGISTER_ACCESSORS(fpsr
)
114 MAKE_REGISTER_ACCESSORS(hacr_el2
)
115 MAKE_REGISTER_ACCESSORS(hcr_el2
)
116 MAKE_REGISTER_ACCESSORS(hpfar_el2
)
117 MAKE_REGISTER_ACCESSORS(hstr_el2
)
118 MAKE_REGISTER_ACCESSORS(isr_el1
)
119 MAKE_REGISTER_ACCESSORS_EL12(mair
)
120 MAKE_REGISTER_ACCESSORS_EL12(mdcr
)
121 MAKE_REGISTER_ACCESSORS(mdscr_el1
)
122 MAKE_REGISTER_ACCESSORS(midr_el1
)
123 MAKE_REGISTER_ACCESSORS(mpidr_el1
)
124 MAKE_REGISTER_ACCESSORS(nzcv
)
125 MAKE_REGISTER_ACCESSORS(oslar_el1
)
126 MAKE_REGISTER_ACCESSORS(oslsr_el1
)
127 MAKE_REGISTER_ACCESSORS(par_el1
)
128 MAKE_REGISTER_ACCESSORS(revdir_el1
)
129 MAKE_REGISTER_ACCESSORS_EL12(rmr
)
130 MAKE_REGISTER_ACCESSORS_EL12(rvbar
)
131 MAKE_REGISTER_ACCESSORS_EL12(sctlr
)
132 MAKE_REGISTER_ACCESSORS(sp_el0
)
133 MAKE_REGISTER_ACCESSORS(sp_el1
)
134 MAKE_REGISTER_ACCESSORS(spsel
)
135 MAKE_REGISTER_ACCESSORS_EL12(spsr
)
136 MAKE_REGISTER_ACCESSORS(spsr_abt
)
137 MAKE_REGISTER_ACCESSORS(spsr_fiq
)
138 MAKE_REGISTER_ACCESSORS(spsr_irq
)
139 MAKE_REGISTER_ACCESSORS(spsr_und
)
140 MAKE_REGISTER_ACCESSORS_EL12(tcr
)
141 MAKE_REGISTER_ACCESSORS_EL12(tpidr
)
142 MAKE_REGISTER_ACCESSORS_EL12(ttbr0
)
143 MAKE_REGISTER_ACCESSORS(ttbr1_el1
)
144 MAKE_REGISTER_ACCESSORS_EL12(vbar
)
145 MAKE_REGISTER_ACCESSORS(vmpidr_el2
)
146 MAKE_REGISTER_ACCESSORS(vpidr_el2
)
147 MAKE_REGISTER_ACCESSORS(vtcr_el2
)
148 MAKE_REGISTER_ACCESSORS(vttbr_el2
)
150 /* Special DAIF accessor functions */
151 static inline void enable_debug_exceptions(void)
153 __asm__
__volatile__("msr DAIFClr, %0\n\t"
154 : : "i" (DAIF_DBG_BIT
) : "memory");
157 static inline void enable_serror_exceptions(void)
159 __asm__
__volatile__("msr DAIFClr, %0\n\t"
160 : : "i" (DAIF_ABT_BIT
) : "memory");
163 static inline void enable_irq(void)
165 __asm__
__volatile__("msr DAIFClr, %0\n\t"
166 : : "i" (DAIF_IRQ_BIT
) : "memory");
169 static inline void enable_fiq(void)
171 __asm__
__volatile__("msr DAIFClr, %0\n\t"
172 : : "i" (DAIF_FIQ_BIT
) : "memory");
175 static inline void disable_debug_exceptions(void)
177 __asm__
__volatile__("msr DAIFSet, %0\n\t"
178 : : "i" (DAIF_DBG_BIT
) : "memory");
181 static inline void disable_serror_exceptions(void)
183 __asm__
__volatile__("msr DAIFSet, %0\n\t"
184 : : "i" (DAIF_ABT_BIT
) : "memory");
187 static inline void disable_irq(void)
189 __asm__
__volatile__("msr DAIFSet, %0\n\t"
190 : : "i" (DAIF_IRQ_BIT
) : "memory");
193 static inline void disable_fiq(void)
195 __asm__
__volatile__("msr DAIFSet, %0\n\t"
196 : : "i" (DAIF_FIQ_BIT
) : "memory");
199 /* Cache maintenance system instructions */
200 static inline void dccisw(uint64_t cisw
)
202 __asm__
__volatile__("dc cisw, %0\n\t" : : "r" (cisw
) : "memory");
205 static inline void dccivac(uint64_t civac
)
207 __asm__
__volatile__("dc civac, %0\n\t" : : "r" (civac
) : "memory");
210 static inline void dccsw(uint64_t csw
)
212 __asm__
__volatile__("dc csw, %0\n\t" : : "r" (csw
) : "memory");
215 static inline void dccvac(uint64_t cvac
)
217 __asm__
__volatile__("dc cvac, %0\n\t" : : "r" (cvac
) : "memory");
220 static inline void dccvau(uint64_t cvau
)
222 __asm__
__volatile__("dc cvau, %0\n\t" : : "r" (cvau
) : "memory");
225 static inline void dcisw(uint64_t isw
)
227 __asm__
__volatile__("dc isw, %0\n\t" : : "r" (isw
) : "memory");
230 static inline void dcivac(uint64_t ivac
)
232 __asm__
__volatile__("dc ivac, %0\n\t" : : "r" (ivac
) : "memory");
235 static inline void dczva(uint64_t zva
)
237 __asm__
__volatile__("dc zva, %0\n\t" : : "r" (zva
) : "memory");
240 static inline void iciallu(void)
242 __asm__
__volatile__("ic iallu\n\t" : : : "memory");
245 static inline void icialluis(void)
247 __asm__
__volatile__("ic ialluis\n\t" : : : "memory");
250 static inline void icivau(uint64_t ivau
)
252 __asm__
__volatile__("ic ivau, %0\n\t" : : "r" (ivau
) : "memory");
255 /* TLB maintenance instructions */
256 static inline void tlbiall_el1(void)
258 __asm__
__volatile__("tlbi alle1\n\t" : : : "memory");
261 static inline void tlbiall_el2(void)
263 __asm__
__volatile__("tlbi alle2\n\t" : : : "memory");
266 static inline void tlbiallis_el1(void)
268 __asm__
__volatile__("tlbi alle1is\n\t" : : : "memory");
271 static inline void tlbiallis_el2(void)
273 __asm__
__volatile__("tlbi alle2is\n\t" : : : "memory");
276 static inline void tlbivaa_el1(uint64_t va
)
278 __asm__
__volatile__("tlbi vaae1, %0\n\t" : : "r" (va
) : "memory");
282 /* data memory barrier */
283 #define dmb_opt(opt) asm volatile ("dmb " #opt : : : "memory")
284 /* data sync barrier */
285 #define dsb_opt(opt) asm volatile ("dsb " #opt : : : "memory")
286 /* instruction sync barrier */
287 #define isb_opt(opt) asm volatile ("isb " #opt : : : "memory")
289 #define dmb() dmb_opt(sy)
290 #define dsb() dsb_opt(sy)
291 #define isb() isb_opt()
293 #endif /* __ASSEMBLER__ */
295 #endif /* __ARCH_LIB_HELPERS_H__ */