1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/lockdep.h>
10 #include <asm/alternative.h>
11 #include <asm/cpufeatures.h>
15 * Copy To/From Userspace
18 /* Handles exceptions in both to and from, but doesn't do access_ok */
19 __must_check
unsigned long
20 copy_user_enhanced_fast_string(void *to
, const void *from
, unsigned len
);
21 __must_check
unsigned long
22 copy_user_generic_string(void *to
, const void *from
, unsigned len
);
23 __must_check
unsigned long
24 copy_user_generic_unrolled(void *to
, const void *from
, unsigned len
);
26 static __always_inline __must_check
unsigned long
27 copy_user_generic(void *to
, const void *from
, unsigned len
)
32 * If CPU has ERMS feature, use copy_user_enhanced_fast_string.
33 * Otherwise, if CPU has rep_good feature, use copy_user_generic_string.
34 * Otherwise, use copy_user_generic_unrolled.
36 alternative_call_2(copy_user_generic_unrolled
,
37 copy_user_generic_string
,
39 copy_user_enhanced_fast_string
,
41 ASM_OUTPUT2("=a" (ret
), "=D" (to
), "=S" (from
),
43 "1" (to
), "2" (from
), "3" (len
)
44 : "memory", "rcx", "r8", "r9", "r10", "r11");
48 __must_check
unsigned long
49 copy_in_user(void __user
*to
, const void __user
*from
, unsigned len
);
51 static __always_inline __must_check
52 int __copy_from_user_nocheck(void *dst
, const void __user
*src
, unsigned size
)
56 if (!__builtin_constant_p(size
))
57 return copy_user_generic(dst
, (__force
void *)src
, size
);
61 __get_user_asm(*(u8
*)dst
, (u8 __user
*)src
,
62 ret
, "b", "b", "=q", 1);
67 __get_user_asm(*(u16
*)dst
, (u16 __user
*)src
,
68 ret
, "w", "w", "=r", 2);
73 __get_user_asm(*(u32
*)dst
, (u32 __user
*)src
,
74 ret
, "l", "k", "=r", 4);
79 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
80 ret
, "q", "", "=r", 8);
85 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
86 ret
, "q", "", "=r", 10);
88 __get_user_asm(*(u16
*)(8 + (char *)dst
),
89 (u16 __user
*)(8 + (char __user
*)src
),
90 ret
, "w", "w", "=r", 2);
95 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
96 ret
, "q", "", "=r", 16);
98 __get_user_asm(*(u64
*)(8 + (char *)dst
),
99 (u64 __user
*)(8 + (char __user
*)src
),
100 ret
, "q", "", "=r", 8);
104 return copy_user_generic(dst
, (__force
void *)src
, size
);
108 static __always_inline __must_check
109 int __copy_from_user(void *dst
, const void __user
*src
, unsigned size
)
112 return __copy_from_user_nocheck(dst
, src
, size
);
115 static __always_inline __must_check
116 int __copy_to_user_nocheck(void __user
*dst
, const void *src
, unsigned size
)
120 if (!__builtin_constant_p(size
))
121 return copy_user_generic((__force
void *)dst
, src
, size
);
125 __put_user_asm(*(u8
*)src
, (u8 __user
*)dst
,
126 ret
, "b", "b", "iq", 1);
131 __put_user_asm(*(u16
*)src
, (u16 __user
*)dst
,
132 ret
, "w", "w", "ir", 2);
137 __put_user_asm(*(u32
*)src
, (u32 __user
*)dst
,
138 ret
, "l", "k", "ir", 4);
143 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
144 ret
, "q", "", "er", 8);
149 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
150 ret
, "q", "", "er", 10);
153 __put_user_asm(4[(u16
*)src
], 4 + (u16 __user
*)dst
,
154 ret
, "w", "w", "ir", 2);
160 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
161 ret
, "q", "", "er", 16);
164 __put_user_asm(1[(u64
*)src
], 1 + (u64 __user
*)dst
,
165 ret
, "q", "", "er", 8);
170 return copy_user_generic((__force
void *)dst
, src
, size
);
174 static __always_inline __must_check
175 int __copy_to_user(void __user
*dst
, const void *src
, unsigned size
)
178 return __copy_to_user_nocheck(dst
, src
, size
);
181 static __always_inline __must_check
182 int __copy_in_user(void __user
*dst
, const void __user
*src
, unsigned size
)
187 if (!__builtin_constant_p(size
))
188 return copy_user_generic((__force
void *)dst
,
189 (__force
void *)src
, size
);
194 __get_user_asm(tmp
, (u8 __user
*)src
,
195 ret
, "b", "b", "=q", 1);
197 __put_user_asm(tmp
, (u8 __user
*)dst
,
198 ret
, "b", "b", "iq", 1);
205 __get_user_asm(tmp
, (u16 __user
*)src
,
206 ret
, "w", "w", "=r", 2);
208 __put_user_asm(tmp
, (u16 __user
*)dst
,
209 ret
, "w", "w", "ir", 2);
217 __get_user_asm(tmp
, (u32 __user
*)src
,
218 ret
, "l", "k", "=r", 4);
220 __put_user_asm(tmp
, (u32 __user
*)dst
,
221 ret
, "l", "k", "ir", 4);
228 __get_user_asm(tmp
, (u64 __user
*)src
,
229 ret
, "q", "", "=r", 8);
231 __put_user_asm(tmp
, (u64 __user
*)dst
,
232 ret
, "q", "", "er", 8);
237 return copy_user_generic((__force
void *)dst
,
238 (__force
void *)src
, size
);
242 static __must_check __always_inline
int
243 __copy_from_user_inatomic(void *dst
, const void __user
*src
, unsigned size
)
245 return __copy_from_user_nocheck(dst
, src
, size
);
248 static __must_check __always_inline
int
249 __copy_to_user_inatomic(void __user
*dst
, const void *src
, unsigned size
)
251 return __copy_to_user_nocheck(dst
, src
, size
);
254 extern long __copy_user_nocache(void *dst
, const void __user
*src
,
255 unsigned size
, int zerorest
);
258 __copy_from_user_nocache(void *dst
, const void __user
*src
, unsigned size
)
261 return __copy_user_nocache(dst
, src
, size
, 1);
265 __copy_from_user_inatomic_nocache(void *dst
, const void __user
*src
,
268 return __copy_user_nocache(dst
, src
, size
, 0);
272 copy_user_handle_tail(char *to
, char *from
, unsigned len
);
274 #endif /* _ASM_X86_UACCESS_64_H */