locking/refcounts: Include fewer headers in <linux/refcount.h>
[linux/fpc-iii.git] / arch / x86 / include / asm / special_insns.h
blob317fc59b512c56f7b30fe4686b4d138fef791ac9
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_SPECIAL_INSNS_H
3 #define _ASM_X86_SPECIAL_INSNS_H
6 #ifdef __KERNEL__
8 #include <asm/nops.h>
11 * Volatile isn't enough to prevent the compiler from reordering the
12 * read/write functions for the control registers and messing everything up.
13 * A memory clobber would solve the problem, but would prevent reordering of
14 * all loads stores around it, which can hurt performance. Solution is to
15 * use a variable and mimic reads and writes to it to enforce serialization
17 extern unsigned long __force_order;
19 static inline unsigned long native_read_cr0(void)
21 unsigned long val;
22 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
23 return val;
26 static inline void native_write_cr0(unsigned long val)
28 asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
31 static inline unsigned long native_read_cr2(void)
33 unsigned long val;
34 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
35 return val;
38 static inline void native_write_cr2(unsigned long val)
40 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
43 static inline unsigned long __native_read_cr3(void)
45 unsigned long val;
46 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
47 return val;
50 static inline void native_write_cr3(unsigned long val)
52 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
55 static inline unsigned long native_read_cr4(void)
57 unsigned long val;
58 #ifdef CONFIG_X86_32
60 * This could fault if CR4 does not exist. Non-existent CR4
61 * is functionally equivalent to CR4 == 0. Keep it simple and pretend
62 * that CR4 == 0 on CPUs that don't have CR4.
64 asm volatile("1: mov %%cr4, %0\n"
65 "2:\n"
66 _ASM_EXTABLE(1b, 2b)
67 : "=r" (val), "=m" (__force_order) : "0" (0));
68 #else
69 /* CR4 always exists on x86_64. */
70 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
71 #endif
72 return val;
75 static inline void native_write_cr4(unsigned long val)
77 asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
80 #ifdef CONFIG_X86_64
81 static inline unsigned long native_read_cr8(void)
83 unsigned long cr8;
84 asm volatile("movq %%cr8,%0" : "=r" (cr8));
85 return cr8;
88 static inline void native_write_cr8(unsigned long val)
90 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
92 #endif
94 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
95 static inline u32 __read_pkru(void)
97 u32 ecx = 0;
98 u32 edx, pkru;
101 * "rdpkru" instruction. Places PKRU contents in to EAX,
102 * clears EDX and requires that ecx=0.
104 asm volatile(".byte 0x0f,0x01,0xee\n\t"
105 : "=a" (pkru), "=d" (edx)
106 : "c" (ecx));
107 return pkru;
110 static inline void __write_pkru(u32 pkru)
112 u32 ecx = 0, edx = 0;
115 * "wrpkru" instruction. Loads contents in EAX to PKRU,
116 * requires that ecx = edx = 0.
118 asm volatile(".byte 0x0f,0x01,0xef\n\t"
119 : : "a" (pkru), "c"(ecx), "d"(edx));
121 #else
122 static inline u32 __read_pkru(void)
124 return 0;
127 static inline void __write_pkru(u32 pkru)
130 #endif
132 static inline void native_wbinvd(void)
134 asm volatile("wbinvd": : :"memory");
137 extern asmlinkage void native_load_gs_index(unsigned);
139 static inline unsigned long __read_cr4(void)
141 return native_read_cr4();
144 #ifdef CONFIG_PARAVIRT
145 #include <asm/paravirt.h>
146 #else
148 static inline unsigned long read_cr0(void)
150 return native_read_cr0();
153 static inline void write_cr0(unsigned long x)
155 native_write_cr0(x);
158 static inline unsigned long read_cr2(void)
160 return native_read_cr2();
163 static inline void write_cr2(unsigned long x)
165 native_write_cr2(x);
169 * Careful! CR3 contains more than just an address. You probably want
170 * read_cr3_pa() instead.
172 static inline unsigned long __read_cr3(void)
174 return __native_read_cr3();
177 static inline void write_cr3(unsigned long x)
179 native_write_cr3(x);
182 static inline void __write_cr4(unsigned long x)
184 native_write_cr4(x);
187 static inline void wbinvd(void)
189 native_wbinvd();
192 #ifdef CONFIG_X86_64
194 static inline unsigned long read_cr8(void)
196 return native_read_cr8();
199 static inline void write_cr8(unsigned long x)
201 native_write_cr8(x);
204 static inline void load_gs_index(unsigned selector)
206 native_load_gs_index(selector);
209 #endif
211 #endif/* CONFIG_PARAVIRT */
213 static inline void clflush(volatile void *__p)
215 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
218 static inline void clflushopt(volatile void *__p)
220 alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
221 ".byte 0x66; clflush %P0",
222 X86_FEATURE_CLFLUSHOPT,
223 "+m" (*(volatile char __force *)__p));
226 static inline void clwb(volatile void *__p)
228 volatile struct { char x[64]; } *p = __p;
230 asm volatile(ALTERNATIVE_2(
231 ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
232 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
233 X86_FEATURE_CLFLUSHOPT,
234 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */
235 X86_FEATURE_CLWB)
236 : [p] "+m" (*p)
237 : [pax] "a" (p));
240 #define nop() asm volatile ("nop")
243 #endif /* __KERNEL__ */
245 #endif /* _ASM_X86_SPECIAL_INSNS_H */