1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/lockdep.h>
9 #include <linux/kasan-checks.h>
10 #include <asm/alternative.h>
11 #include <asm/cpufeatures.h>
15 * Copy To/From Userspace
18 /* Handles exceptions in both to and from, but doesn't do access_ok */
19 __must_check unsigned long
20 copy_user_enhanced_fast_string(void *to, const void *from, unsigned len);
21 __must_check unsigned long
22 copy_user_generic_string(void *to, const void *from, unsigned len);
23 __must_check unsigned long
24 copy_user_generic_unrolled(void *to, const void *from, unsigned len);
26 static __always_inline __must_check unsigned long
27 copy_user_generic(void *to, const void *from, unsigned len)
32 * If CPU has ERMS feature, use copy_user_enhanced_fast_string.
33 * Otherwise, if CPU has rep_good feature, use copy_user_generic_string.
34 * Otherwise, use copy_user_generic_unrolled.
36 alternative_call_2(copy_user_generic_unrolled,
37 copy_user_generic_string,
39 copy_user_enhanced_fast_string,
41 ASM_OUTPUT2("=a" (ret), "=D" (to), "=S" (from),
43 "1" (to), "2" (from), "3" (len)
44 : "memory", "rcx", "r8", "r9", "r10", "r11");
48 __must_check unsigned long
49 copy_in_user(void __user *to, const void __user *from, unsigned len);
51 static __always_inline __must_check
52 int __copy_from_user_nocheck(void *dst, const void __user *src, unsigned size)
56 check_object_size(dst, size, false);
57 if (!__builtin_constant_p(size))
58 return copy_user_generic(dst, (__force void *)src, size);
62 __get_user_asm_nozero(*(u8 *)dst, (u8 __user *)src,
63 ret, "b", "b", "=q", 1);
68 __get_user_asm_nozero(*(u16 *)dst, (u16 __user *)src,
69 ret, "w", "w", "=r", 2);
74 __get_user_asm_nozero(*(u32 *)dst, (u32 __user *)src,
75 ret, "l", "k", "=r", 4);
80 __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
81 ret, "q", "", "=r", 8);
86 __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
87 ret, "q", "", "=r", 10);
89 __get_user_asm_nozero(*(u16 *)(8 + (char *)dst),
90 (u16 __user *)(8 + (char __user *)src),
91 ret, "w", "w", "=r", 2);
96 __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src,
97 ret, "q", "", "=r", 16);
99 __get_user_asm_nozero(*(u64 *)(8 + (char *)dst),
100 (u64 __user *)(8 + (char __user *)src),
101 ret, "q", "", "=r", 8);
105 return copy_user_generic(dst, (__force void *)src, size);
109 static __always_inline __must_check
110 int __copy_from_user(void *dst, const void __user *src, unsigned size)
113 kasan_check_write(dst, size);
114 return __copy_from_user_nocheck(dst, src, size);
117 static __always_inline __must_check
118 int __copy_to_user_nocheck(void __user *dst, const void *src, unsigned size)
122 check_object_size(src, size, true);
123 if (!__builtin_constant_p(size))
124 return copy_user_generic((__force void *)dst, src, size);
128 __put_user_asm(*(u8 *)src, (u8 __user *)dst,
129 ret, "b", "b", "iq", 1);
134 __put_user_asm(*(u16 *)src, (u16 __user *)dst,
135 ret, "w", "w", "ir", 2);
140 __put_user_asm(*(u32 *)src, (u32 __user *)dst,
141 ret, "l", "k", "ir", 4);
146 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
147 ret, "q", "", "er", 8);
152 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
153 ret, "q", "", "er", 10);
156 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
157 ret, "w", "w", "ir", 2);
163 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
164 ret, "q", "", "er", 16);
167 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
168 ret, "q", "", "er", 8);
173 return copy_user_generic((__force void *)dst, src, size);
177 static __always_inline __must_check
178 int __copy_to_user(void __user *dst, const void *src, unsigned size)
181 kasan_check_read(src, size);
182 return __copy_to_user_nocheck(dst, src, size);
185 static __always_inline __must_check
186 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
191 if (!__builtin_constant_p(size))
192 return copy_user_generic((__force void *)dst,
193 (__force void *)src, size);
198 __get_user_asm(tmp, (u8 __user *)src,
199 ret, "b", "b", "=q", 1);
201 __put_user_asm(tmp, (u8 __user *)dst,
202 ret, "b", "b", "iq", 1);
209 __get_user_asm(tmp, (u16 __user *)src,
210 ret, "w", "w", "=r", 2);
212 __put_user_asm(tmp, (u16 __user *)dst,
213 ret, "w", "w", "ir", 2);
221 __get_user_asm(tmp, (u32 __user *)src,
222 ret, "l", "k", "=r", 4);
224 __put_user_asm(tmp, (u32 __user *)dst,
225 ret, "l", "k", "ir", 4);
232 __get_user_asm(tmp, (u64 __user *)src,
233 ret, "q", "", "=r", 8);
235 __put_user_asm(tmp, (u64 __user *)dst,
236 ret, "q", "", "er", 8);
241 return copy_user_generic((__force void *)dst,
242 (__force void *)src, size);
246 static __must_check __always_inline int
247 __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size)
249 kasan_check_write(dst, size);
250 return __copy_from_user_nocheck(dst, src, size);
253 static __must_check __always_inline int
254 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
256 kasan_check_read(src, size);
257 return __copy_to_user_nocheck(dst, src, size);
260 extern long __copy_user_nocache(void *dst, const void __user *src,
261 unsigned size, int zerorest);
264 __copy_from_user_nocache(void *dst, const void __user *src, unsigned size)
267 kasan_check_write(dst, size);
268 return __copy_user_nocache(dst, src, size, 1);
272 __copy_from_user_inatomic_nocache(void *dst, const void __user *src,
275 kasan_check_write(dst, size);
276 return __copy_user_nocache(dst, src, size, 0);
280 copy_user_handle_tail(char *to, char *from, unsigned len);
282 #endif /* _ASM_X86_UACCESS_64_H */