1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/lockdep.h>
10 #include <linux/kasan-checks.h>
11 #include <asm/alternative.h>
12 #include <asm/cpufeatures.h>
16 * Copy To/From Userspace
19 /* Handles exceptions in both to and from, but doesn't do access_ok */
20 __must_check
unsigned long
21 copy_user_enhanced_fast_string(void *to
, const void *from
, unsigned len
);
22 __must_check
unsigned long
23 copy_user_generic_string(void *to
, const void *from
, unsigned len
);
24 __must_check
unsigned long
25 copy_user_generic_unrolled(void *to
, const void *from
, unsigned len
);
27 static __always_inline __must_check
unsigned long
28 copy_user_generic(void *to
, const void *from
, unsigned len
)
33 * If CPU has ERMS feature, use copy_user_enhanced_fast_string.
34 * Otherwise, if CPU has rep_good feature, use copy_user_generic_string.
35 * Otherwise, use copy_user_generic_unrolled.
37 alternative_call_2(copy_user_generic_unrolled
,
38 copy_user_generic_string
,
40 copy_user_enhanced_fast_string
,
42 ASM_OUTPUT2("=a" (ret
), "=D" (to
), "=S" (from
),
44 "1" (to
), "2" (from
), "3" (len
)
45 : "memory", "rcx", "r8", "r9", "r10", "r11");
49 __must_check
unsigned long
50 copy_in_user(void __user
*to
, const void __user
*from
, unsigned len
);
52 static __always_inline __must_check
53 int __copy_from_user_nocheck(void *dst
, const void __user
*src
, unsigned size
)
57 check_object_size(dst
, size
, false);
58 if (!__builtin_constant_p(size
))
59 return copy_user_generic(dst
, (__force
void *)src
, size
);
63 __get_user_asm(*(u8
*)dst
, (u8 __user
*)src
,
64 ret
, "b", "b", "=q", 1);
69 __get_user_asm(*(u16
*)dst
, (u16 __user
*)src
,
70 ret
, "w", "w", "=r", 2);
75 __get_user_asm(*(u32
*)dst
, (u32 __user
*)src
,
76 ret
, "l", "k", "=r", 4);
81 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
82 ret
, "q", "", "=r", 8);
87 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
88 ret
, "q", "", "=r", 10);
90 __get_user_asm(*(u16
*)(8 + (char *)dst
),
91 (u16 __user
*)(8 + (char __user
*)src
),
92 ret
, "w", "w", "=r", 2);
97 __get_user_asm(*(u64
*)dst
, (u64 __user
*)src
,
98 ret
, "q", "", "=r", 16);
100 __get_user_asm(*(u64
*)(8 + (char *)dst
),
101 (u64 __user
*)(8 + (char __user
*)src
),
102 ret
, "q", "", "=r", 8);
106 return copy_user_generic(dst
, (__force
void *)src
, size
);
110 static __always_inline __must_check
111 int __copy_from_user(void *dst
, const void __user
*src
, unsigned size
)
114 kasan_check_write(dst
, size
);
115 return __copy_from_user_nocheck(dst
, src
, size
);
118 static __always_inline __must_check
119 int __copy_to_user_nocheck(void __user
*dst
, const void *src
, unsigned size
)
123 check_object_size(src
, size
, true);
124 if (!__builtin_constant_p(size
))
125 return copy_user_generic((__force
void *)dst
, src
, size
);
129 __put_user_asm(*(u8
*)src
, (u8 __user
*)dst
,
130 ret
, "b", "b", "iq", 1);
135 __put_user_asm(*(u16
*)src
, (u16 __user
*)dst
,
136 ret
, "w", "w", "ir", 2);
141 __put_user_asm(*(u32
*)src
, (u32 __user
*)dst
,
142 ret
, "l", "k", "ir", 4);
147 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
148 ret
, "q", "", "er", 8);
153 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
154 ret
, "q", "", "er", 10);
157 __put_user_asm(4[(u16
*)src
], 4 + (u16 __user
*)dst
,
158 ret
, "w", "w", "ir", 2);
164 __put_user_asm(*(u64
*)src
, (u64 __user
*)dst
,
165 ret
, "q", "", "er", 16);
168 __put_user_asm(1[(u64
*)src
], 1 + (u64 __user
*)dst
,
169 ret
, "q", "", "er", 8);
174 return copy_user_generic((__force
void *)dst
, src
, size
);
178 static __always_inline __must_check
179 int __copy_to_user(void __user
*dst
, const void *src
, unsigned size
)
182 kasan_check_read(src
, size
);
183 return __copy_to_user_nocheck(dst
, src
, size
);
186 static __always_inline __must_check
187 int __copy_in_user(void __user
*dst
, const void __user
*src
, unsigned size
)
192 if (!__builtin_constant_p(size
))
193 return copy_user_generic((__force
void *)dst
,
194 (__force
void *)src
, size
);
199 __get_user_asm(tmp
, (u8 __user
*)src
,
200 ret
, "b", "b", "=q", 1);
202 __put_user_asm(tmp
, (u8 __user
*)dst
,
203 ret
, "b", "b", "iq", 1);
210 __get_user_asm(tmp
, (u16 __user
*)src
,
211 ret
, "w", "w", "=r", 2);
213 __put_user_asm(tmp
, (u16 __user
*)dst
,
214 ret
, "w", "w", "ir", 2);
222 __get_user_asm(tmp
, (u32 __user
*)src
,
223 ret
, "l", "k", "=r", 4);
225 __put_user_asm(tmp
, (u32 __user
*)dst
,
226 ret
, "l", "k", "ir", 4);
233 __get_user_asm(tmp
, (u64 __user
*)src
,
234 ret
, "q", "", "=r", 8);
236 __put_user_asm(tmp
, (u64 __user
*)dst
,
237 ret
, "q", "", "er", 8);
242 return copy_user_generic((__force
void *)dst
,
243 (__force
void *)src
, size
);
247 static __must_check __always_inline
int
248 __copy_from_user_inatomic(void *dst
, const void __user
*src
, unsigned size
)
250 kasan_check_write(dst
, size
);
251 return __copy_from_user_nocheck(dst
, src
, size
);
254 static __must_check __always_inline
int
255 __copy_to_user_inatomic(void __user
*dst
, const void *src
, unsigned size
)
257 kasan_check_read(src
, size
);
258 return __copy_to_user_nocheck(dst
, src
, size
);
261 extern long __copy_user_nocache(void *dst
, const void __user
*src
,
262 unsigned size
, int zerorest
);
265 __copy_from_user_nocache(void *dst
, const void __user
*src
, unsigned size
)
268 kasan_check_write(dst
, size
);
269 return __copy_user_nocache(dst
, src
, size
, 1);
273 __copy_from_user_inatomic_nocache(void *dst
, const void __user
*src
,
276 kasan_check_write(dst
, size
);
277 return __copy_user_nocache(dst
, src
, size
, 0);
281 copy_user_handle_tail(char *to
, char *from
, unsigned len
);
283 #endif /* _ASM_X86_UACCESS_64_H */