diff options
Diffstat (limited to 'arch/x86/include/asm/uaccess_64.h')
-rw-r--r-- | arch/x86/include/asm/uaccess_64.h | 208 |
1 files changed, 208 insertions, 0 deletions
diff --git a/arch/x86/include/asm/uaccess_64.h b/arch/x86/include/asm/uaccess_64.h new file mode 100644 index 00000000000..543ba883cc6 --- /dev/null +++ b/arch/x86/include/asm/uaccess_64.h | |||
@@ -0,0 +1,208 @@ | |||
1 | #ifndef _ASM_X86_UACCESS_64_H | ||
2 | #define _ASM_X86_UACCESS_64_H | ||
3 | |||
4 | /* | ||
5 | * User space memory access functions | ||
6 | */ | ||
7 | #include <linux/compiler.h> | ||
8 | #include <linux/errno.h> | ||
9 | #include <linux/prefetch.h> | ||
10 | #include <linux/lockdep.h> | ||
11 | #include <asm/page.h> | ||
12 | |||
13 | /* | ||
14 | * Copy To/From Userspace | ||
15 | */ | ||
16 | |||
17 | /* Handles exceptions in both to and from, but doesn't do access_ok */ | ||
18 | __must_check unsigned long | ||
19 | copy_user_generic(void *to, const void *from, unsigned len); | ||
20 | |||
21 | __must_check unsigned long | ||
22 | copy_to_user(void __user *to, const void *from, unsigned len); | ||
23 | __must_check unsigned long | ||
24 | copy_from_user(void *to, const void __user *from, unsigned len); | ||
25 | __must_check unsigned long | ||
26 | copy_in_user(void __user *to, const void __user *from, unsigned len); | ||
27 | |||
28 | static __always_inline __must_check | ||
29 | int __copy_from_user(void *dst, const void __user *src, unsigned size) | ||
30 | { | ||
31 | int ret = 0; | ||
32 | |||
33 | might_fault(); | ||
34 | if (!__builtin_constant_p(size)) | ||
35 | return copy_user_generic(dst, (__force void *)src, size); | ||
36 | switch (size) { | ||
37 | case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src, | ||
38 | ret, "b", "b", "=q", 1); | ||
39 | return ret; | ||
40 | case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src, | ||
41 | ret, "w", "w", "=r", 2); | ||
42 | return ret; | ||
43 | case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src, | ||
44 | ret, "l", "k", "=r", 4); | ||
45 | return ret; | ||
46 | case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src, | ||
47 | ret, "q", "", "=r", 8); | ||
48 | return ret; | ||
49 | case 10: | ||
50 | __get_user_asm(*(u64 *)dst, (u64 __user *)src, | ||
51 | ret, "q", "", "=r", 16); | ||
52 | if (unlikely(ret)) | ||
53 | return ret; | ||
54 | __get_user_asm(*(u16 *)(8 + (char *)dst), | ||
55 | (u16 __user *)(8 + (char __user *)src), | ||
56 | ret, "w", "w", "=r", 2); | ||
57 | return ret; | ||
58 | case 16: | ||
59 | __get_user_asm(*(u64 *)dst, (u64 __user *)src, | ||
60 | ret, "q", "", "=r", 16); | ||
61 | if (unlikely(ret)) | ||
62 | return ret; | ||
63 | __get_user_asm(*(u64 *)(8 + (char *)dst), | ||
64 | (u64 __user *)(8 + (char __user *)src), | ||
65 | ret, "q", "", "=r", 8); | ||
66 | return ret; | ||
67 | default: | ||
68 | return copy_user_generic(dst, (__force void *)src, size); | ||
69 | } | ||
70 | } | ||
71 | |||
72 | static __always_inline __must_check | ||
73 | int __copy_to_user(void __user *dst, const void *src, unsigned size) | ||
74 | { | ||
75 | int ret = 0; | ||
76 | |||
77 | might_fault(); | ||
78 | if (!__builtin_constant_p(size)) | ||
79 | return copy_user_generic((__force void *)dst, src, size); | ||
80 | switch (size) { | ||
81 | case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst, | ||
82 | ret, "b", "b", "iq", 1); | ||
83 | return ret; | ||
84 | case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst, | ||
85 | ret, "w", "w", "ir", 2); | ||
86 | return ret; | ||
87 | case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst, | ||
88 | ret, "l", "k", "ir", 4); | ||
89 | return ret; | ||
90 | case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst, | ||
91 | ret, "q", "", "ir", 8); | ||
92 | return ret; | ||
93 | case 10: | ||
94 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, | ||
95 | ret, "q", "", "ir", 10); | ||
96 | if (unlikely(ret)) | ||
97 | return ret; | ||
98 | asm("":::"memory"); | ||
99 | __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst, | ||
100 | ret, "w", "w", "ir", 2); | ||
101 | return ret; | ||
102 | case 16: | ||
103 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, | ||
104 | ret, "q", "", "ir", 16); | ||
105 | if (unlikely(ret)) | ||
106 | return ret; | ||
107 | asm("":::"memory"); | ||
108 | __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst, | ||
109 | ret, "q", "", "ir", 8); | ||
110 | return ret; | ||
111 | default: | ||
112 | return copy_user_generic((__force void *)dst, src, size); | ||
113 | } | ||
114 | } | ||
115 | |||
116 | static __always_inline __must_check | ||
117 | int __copy_in_user(void __user *dst, const void __user *src, unsigned size) | ||
118 | { | ||
119 | int ret = 0; | ||
120 | |||
121 | might_fault(); | ||
122 | if (!__builtin_constant_p(size)) | ||
123 | return copy_user_generic((__force void *)dst, | ||
124 | (__force void *)src, size); | ||
125 | switch (size) { | ||
126 | case 1: { | ||
127 | u8 tmp; | ||
128 | __get_user_asm(tmp, (u8 __user *)src, | ||
129 | ret, "b", "b", "=q", 1); | ||
130 | if (likely(!ret)) | ||
131 | __put_user_asm(tmp, (u8 __user *)dst, | ||
132 | ret, "b", "b", "iq", 1); | ||
133 | return ret; | ||
134 | } | ||
135 | case 2: { | ||
136 | u16 tmp; | ||
137 | __get_user_asm(tmp, (u16 __user *)src, | ||
138 | ret, "w", "w", "=r", 2); | ||
139 | if (likely(!ret)) | ||
140 | __put_user_asm(tmp, (u16 __user *)dst, | ||
141 | ret, "w", "w", "ir", 2); | ||
142 | return ret; | ||
143 | } | ||
144 | |||
145 | case 4: { | ||
146 | u32 tmp; | ||
147 | __get_user_asm(tmp, (u32 __user *)src, | ||
148 | ret, "l", "k", "=r", 4); | ||
149 | if (likely(!ret)) | ||
150 | __put_user_asm(tmp, (u32 __user *)dst, | ||
151 | ret, "l", "k", "ir", 4); | ||
152 | return ret; | ||
153 | } | ||
154 | case 8: { | ||
155 | u64 tmp; | ||
156 | __get_user_asm(tmp, (u64 __user *)src, | ||
157 | ret, "q", "", "=r", 8); | ||
158 | if (likely(!ret)) | ||
159 | __put_user_asm(tmp, (u64 __user *)dst, | ||
160 | ret, "q", "", "ir", 8); | ||
161 | return ret; | ||
162 | } | ||
163 | default: | ||
164 | return copy_user_generic((__force void *)dst, | ||
165 | (__force void *)src, size); | ||
166 | } | ||
167 | } | ||
168 | |||
169 | __must_check long | ||
170 | strncpy_from_user(char *dst, const char __user *src, long count); | ||
171 | __must_check long | ||
172 | __strncpy_from_user(char *dst, const char __user *src, long count); | ||
173 | __must_check long strnlen_user(const char __user *str, long n); | ||
174 | __must_check long __strnlen_user(const char __user *str, long n); | ||
175 | __must_check long strlen_user(const char __user *str); | ||
176 | __must_check unsigned long clear_user(void __user *mem, unsigned long len); | ||
177 | __must_check unsigned long __clear_user(void __user *mem, unsigned long len); | ||
178 | |||
179 | __must_check long __copy_from_user_inatomic(void *dst, const void __user *src, | ||
180 | unsigned size); | ||
181 | |||
182 | static __must_check __always_inline int | ||
183 | __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size) | ||
184 | { | ||
185 | return copy_user_generic((__force void *)dst, src, size); | ||
186 | } | ||
187 | |||
188 | extern long __copy_user_nocache(void *dst, const void __user *src, | ||
189 | unsigned size, int zerorest); | ||
190 | |||
191 | static inline int __copy_from_user_nocache(void *dst, const void __user *src, | ||
192 | unsigned size) | ||
193 | { | ||
194 | might_sleep(); | ||
195 | return __copy_user_nocache(dst, src, size, 1); | ||
196 | } | ||
197 | |||
198 | static inline int __copy_from_user_inatomic_nocache(void *dst, | ||
199 | const void __user *src, | ||
200 | unsigned size) | ||
201 | { | ||
202 | return __copy_user_nocache(dst, src, size, 0); | ||
203 | } | ||
204 | |||
205 | unsigned long | ||
206 | copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest); | ||
207 | |||
208 | #endif /* _ASM_X86_UACCESS_64_H */ | ||