mm: fix exec activate_mm vs TLB shootdown and lazy tlb switching race
[linux/fpc-iii.git] / arch / parisc / include / asm / special_insns.h
blob3d4dd68e181b44b2a8c2d089067bdca3ad0ab674
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __PARISC_SPECIAL_INSNS_H
3 #define __PARISC_SPECIAL_INSNS_H
5 #define mfctl(reg) ({ \
6 unsigned long cr; \
7 __asm__ __volatile__( \
8 "mfctl " #reg ",%0" : \
9 "=r" (cr) \
10 ); \
11 cr; \
14 #define mtctl(gr, cr) \
15 __asm__ __volatile__("mtctl %0,%1" \
16 : /* no outputs */ \
17 : "r" (gr), "i" (cr) : "memory")
19 /* these are here to de-mystefy the calling code, and to provide hooks */
20 /* which I needed for debugging EIEM problems -PB */
21 #define get_eiem() mfctl(15)
22 static inline void set_eiem(unsigned long val)
24 mtctl(val, 15);
27 #define mfsp(reg) ({ \
28 unsigned long cr; \
29 __asm__ __volatile__( \
30 "mfsp " #reg ",%0" : \
31 "=r" (cr) \
32 ); \
33 cr; \
36 #define mtsp(val, cr) \
37 { if (__builtin_constant_p(val) && ((val) == 0)) \
38 __asm__ __volatile__("mtsp %%r0,%0" : : "i" (cr) : "memory"); \
39 else \
40 __asm__ __volatile__("mtsp %0,%1" \
41 : /* no outputs */ \
42 : "r" (val), "i" (cr) : "memory"); }
44 #endif /* __PARISC_SPECIAL_INSNS_H */