1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Supervisor Mode Access Prevention support
5 * Copyright (C) 2012 Intel Corporation
6 * Author: H. Peter Anvin <hpa@linux.intel.com>
9 #ifndef _ASM_X86_SMAP_H
10 #define _ASM_X86_SMAP_H
13 #include <asm/cpufeatures.h>
15 /* "Raw" instruction opcodes */
16 #define __ASM_CLAC ".byte 0x0f,0x01,0xca"
17 #define __ASM_STAC ".byte 0x0f,0x01,0xcb"
21 #include <asm/alternative-asm.h>
23 #ifdef CONFIG_X86_SMAP
26 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP
29 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP
31 #else /* CONFIG_X86_SMAP */
36 #endif /* CONFIG_X86_SMAP */
38 #else /* __ASSEMBLY__ */
40 #include <asm/alternative.h>
42 #ifdef CONFIG_X86_SMAP
44 static __always_inline
void clac(void)
46 /* Note: a barrier is implicit in alternative() */
47 alternative("", __ASM_CLAC
, X86_FEATURE_SMAP
);
50 static __always_inline
void stac(void)
52 /* Note: a barrier is implicit in alternative() */
53 alternative("", __ASM_STAC
, X86_FEATURE_SMAP
);
56 static __always_inline
unsigned long smap_save(void)
60 asm volatile ("# smap_save\n\t"
61 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP
)
62 "pushf; pop %0; " __ASM_CLAC
"\n\t"
64 : "=rm" (flags
) : : "memory", "cc");
69 static __always_inline
void smap_restore(unsigned long flags
)
71 asm volatile ("# smap_restore\n\t"
72 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP
)
75 : : "g" (flags
) : "memory", "cc");
78 /* These macros can be used in asm() statements */
80 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP)
82 ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP)
84 #else /* CONFIG_X86_SMAP */
86 static inline void clac(void) { }
87 static inline void stac(void) { }
89 static inline unsigned long smap_save(void) { return 0; }
90 static inline void smap_restore(unsigned long flags
) { }
95 #endif /* CONFIG_X86_SMAP */
97 #endif /* __ASSEMBLY__ */
99 #endif /* _ASM_X86_SMAP_H */