Linux 5.7.6
[linux/fpc-iii.git] / arch / x86 / include / asm / special_insns.h
blob6d37b8fcfc778cb52663c7a924aa4f5153b67bb3
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_SPECIAL_INSNS_H
3 #define _ASM_X86_SPECIAL_INSNS_H
6 #ifdef __KERNEL__
8 #include <asm/nops.h>
9 #include <asm/processor-flags.h>
10 #include <linux/jump_label.h>
13 * Volatile isn't enough to prevent the compiler from reordering the
14 * read/write functions for the control registers and messing everything up.
15 * A memory clobber would solve the problem, but would prevent reordering of
16 * all loads stores around it, which can hurt performance. Solution is to
17 * use a variable and mimic reads and writes to it to enforce serialization
19 extern unsigned long __force_order;
21 void native_write_cr0(unsigned long val);
23 static inline unsigned long native_read_cr0(void)
25 unsigned long val;
26 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
27 return val;
30 static inline unsigned long native_read_cr2(void)
32 unsigned long val;
33 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
34 return val;
37 static inline void native_write_cr2(unsigned long val)
39 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
42 static inline unsigned long __native_read_cr3(void)
44 unsigned long val;
45 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
46 return val;
49 static inline void native_write_cr3(unsigned long val)
51 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
54 static inline unsigned long native_read_cr4(void)
56 unsigned long val;
57 #ifdef CONFIG_X86_32
59 * This could fault if CR4 does not exist. Non-existent CR4
60 * is functionally equivalent to CR4 == 0. Keep it simple and pretend
61 * that CR4 == 0 on CPUs that don't have CR4.
63 asm volatile("1: mov %%cr4, %0\n"
64 "2:\n"
65 _ASM_EXTABLE(1b, 2b)
66 : "=r" (val), "=m" (__force_order) : "0" (0));
67 #else
68 /* CR4 always exists on x86_64. */
69 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
70 #endif
71 return val;
74 void native_write_cr4(unsigned long val);
76 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
77 static inline u32 rdpkru(void)
79 u32 ecx = 0;
80 u32 edx, pkru;
83 * "rdpkru" instruction. Places PKRU contents in to EAX,
84 * clears EDX and requires that ecx=0.
86 asm volatile(".byte 0x0f,0x01,0xee\n\t"
87 : "=a" (pkru), "=d" (edx)
88 : "c" (ecx));
89 return pkru;
92 static inline void wrpkru(u32 pkru)
94 u32 ecx = 0, edx = 0;
97 * "wrpkru" instruction. Loads contents in EAX to PKRU,
98 * requires that ecx = edx = 0.
100 asm volatile(".byte 0x0f,0x01,0xef\n\t"
101 : : "a" (pkru), "c"(ecx), "d"(edx));
104 static inline void __write_pkru(u32 pkru)
107 * WRPKRU is relatively expensive compared to RDPKRU.
108 * Avoid WRPKRU when it would not change the value.
110 if (pkru == rdpkru())
111 return;
113 wrpkru(pkru);
116 #else
117 static inline u32 rdpkru(void)
119 return 0;
122 static inline void __write_pkru(u32 pkru)
125 #endif
127 static inline void native_wbinvd(void)
129 asm volatile("wbinvd": : :"memory");
132 extern asmlinkage void native_load_gs_index(unsigned);
134 static inline unsigned long __read_cr4(void)
136 return native_read_cr4();
139 #ifdef CONFIG_PARAVIRT_XXL
140 #include <asm/paravirt.h>
141 #else
143 static inline unsigned long read_cr0(void)
145 return native_read_cr0();
148 static inline void write_cr0(unsigned long x)
150 native_write_cr0(x);
153 static inline unsigned long read_cr2(void)
155 return native_read_cr2();
158 static inline void write_cr2(unsigned long x)
160 native_write_cr2(x);
164 * Careful! CR3 contains more than just an address. You probably want
165 * read_cr3_pa() instead.
167 static inline unsigned long __read_cr3(void)
169 return __native_read_cr3();
172 static inline void write_cr3(unsigned long x)
174 native_write_cr3(x);
177 static inline void __write_cr4(unsigned long x)
179 native_write_cr4(x);
182 static inline void wbinvd(void)
184 native_wbinvd();
187 #ifdef CONFIG_X86_64
189 static inline void load_gs_index(unsigned selector)
191 native_load_gs_index(selector);
194 #endif
196 #endif /* CONFIG_PARAVIRT_XXL */
198 static inline void clflush(volatile void *__p)
200 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
203 static inline void clflushopt(volatile void *__p)
205 alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
206 ".byte 0x66; clflush %P0",
207 X86_FEATURE_CLFLUSHOPT,
208 "+m" (*(volatile char __force *)__p));
211 static inline void clwb(volatile void *__p)
213 volatile struct { char x[64]; } *p = __p;
215 asm volatile(ALTERNATIVE_2(
216 ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
217 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
218 X86_FEATURE_CLFLUSHOPT,
219 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */
220 X86_FEATURE_CLWB)
221 : [p] "+m" (*p)
222 : [pax] "a" (p));
225 #define nop() asm volatile ("nop")
228 #endif /* __KERNEL__ */
230 #endif /* _ASM_X86_SPECIAL_INSNS_H */