aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/asm-x86_64/uaccess.h20
1 files changed, 10 insertions, 10 deletions
diff --git a/include/asm-x86_64/uaccess.h b/include/asm-x86_64/uaccess.h
index 802a4a068ef6..e856570c0689 100644
--- a/include/asm-x86_64/uaccess.h
+++ b/include/asm-x86_64/uaccess.h
@@ -84,7 +84,7 @@ struct exception_table_entry
84 */ 84 */
85 85
86#define __get_user_x(size,ret,x,ptr) \ 86#define __get_user_x(size,ret,x,ptr) \
87 __asm__ __volatile__("call __get_user_" #size \ 87 asm volatile("call __get_user_" #size \
88 :"=a" (ret),"=d" (x) \ 88 :"=a" (ret),"=d" (x) \
89 :"c" (ptr) \ 89 :"c" (ptr) \
90 :"r8") 90 :"r8")
@@ -101,7 +101,7 @@ struct exception_table_entry
101 case 8: __get_user_x(8,__ret_gu,__val_gu,ptr); break; \ 101 case 8: __get_user_x(8,__ret_gu,__val_gu,ptr); break; \
102 default: __get_user_bad(); break; \ 102 default: __get_user_bad(); break; \
103 } \ 103 } \
104 (x) = (__typeof__(*(ptr)))__val_gu; \ 104 (x) = (typeof(*(ptr)))__val_gu; \
105 __ret_gu; \ 105 __ret_gu; \
106}) 106})
107 107
@@ -112,7 +112,7 @@ extern void __put_user_8(void);
112extern void __put_user_bad(void); 112extern void __put_user_bad(void);
113 113
114#define __put_user_x(size,ret,x,ptr) \ 114#define __put_user_x(size,ret,x,ptr) \
115 __asm__ __volatile__("call __put_user_" #size \ 115 asm volatile("call __put_user_" #size \
116 :"=a" (ret) \ 116 :"=a" (ret) \
117 :"c" (ptr),"d" (x) \ 117 :"c" (ptr),"d" (x) \
118 :"r8") 118 :"r8")
@@ -139,7 +139,7 @@ extern void __put_user_bad(void);
139#define __put_user_check(x,ptr,size) \ 139#define __put_user_check(x,ptr,size) \
140({ \ 140({ \
141 int __pu_err; \ 141 int __pu_err; \
142 __typeof__(*(ptr)) __user *__pu_addr = (ptr); \ 142 typeof(*(ptr)) __user *__pu_addr = (ptr); \
143 switch (size) { \ 143 switch (size) { \
144 case 1: __put_user_x(1,__pu_err,x,__pu_addr); break; \ 144 case 1: __put_user_x(1,__pu_err,x,__pu_addr); break; \
145 case 2: __put_user_x(2,__pu_err,x,__pu_addr); break; \ 145 case 2: __put_user_x(2,__pu_err,x,__pu_addr); break; \
@@ -173,7 +173,7 @@ struct __large_struct { unsigned long buf[100]; };
173 * aliasing issues. 173 * aliasing issues.
174 */ 174 */
175#define __put_user_asm(x, addr, err, itype, rtype, ltype, errno) \ 175#define __put_user_asm(x, addr, err, itype, rtype, ltype, errno) \
176 __asm__ __volatile__( \ 176 asm volatile( \
177 "1: mov"itype" %"rtype"1,%2\n" \ 177 "1: mov"itype" %"rtype"1,%2\n" \
178 "2:\n" \ 178 "2:\n" \
179 ".section .fixup,\"ax\"\n" \ 179 ".section .fixup,\"ax\"\n" \
@@ -193,7 +193,7 @@ struct __large_struct { unsigned long buf[100]; };
193 int __gu_err; \ 193 int __gu_err; \
194 unsigned long __gu_val; \ 194 unsigned long __gu_val; \
195 __get_user_size(__gu_val,(ptr),(size),__gu_err); \ 195 __get_user_size(__gu_val,(ptr),(size),__gu_err); \
196 (x) = (__typeof__(*(ptr)))__gu_val; \ 196 (x) = (typeof(*(ptr)))__gu_val; \
197 __gu_err; \ 197 __gu_err; \
198}) 198})
199 199
@@ -217,7 +217,7 @@ do { \
217} while (0) 217} while (0)
218 218
219#define __get_user_asm(x, addr, err, itype, rtype, ltype, errno) \ 219#define __get_user_asm(x, addr, err, itype, rtype, ltype, errno) \
220 __asm__ __volatile__( \ 220 asm volatile( \
221 "1: mov"itype" %2,%"rtype"1\n" \ 221 "1: mov"itype" %2,%"rtype"1\n" \
222 "2:\n" \ 222 "2:\n" \
223 ".section .fixup,\"ax\"\n" \ 223 ".section .fixup,\"ax\"\n" \
@@ -250,7 +250,7 @@ copy_in_user(void __user *to, const void __user *from, unsigned len);
250static __always_inline __must_check 250static __always_inline __must_check
251int __copy_from_user(void *dst, const void __user *src, unsigned size) 251int __copy_from_user(void *dst, const void __user *src, unsigned size)
252{ 252{
253 int ret = 0; 253 int ret = 0;
254 if (!__builtin_constant_p(size)) 254 if (!__builtin_constant_p(size))
255 return copy_user_generic(dst,(__force void *)src,size); 255 return copy_user_generic(dst,(__force void *)src,size);
256 switch (size) { 256 switch (size) {
@@ -280,7 +280,7 @@ int __copy_from_user(void *dst, const void __user *src, unsigned size)
280static __always_inline __must_check 280static __always_inline __must_check
281int __copy_to_user(void __user *dst, const void *src, unsigned size) 281int __copy_to_user(void __user *dst, const void *src, unsigned size)
282{ 282{
283 int ret = 0; 283 int ret = 0;
284 if (!__builtin_constant_p(size)) 284 if (!__builtin_constant_p(size))
285 return copy_user_generic((__force void *)dst,src,size); 285 return copy_user_generic((__force void *)dst,src,size);
286 switch (size) { 286 switch (size) {
@@ -312,7 +312,7 @@ int __copy_to_user(void __user *dst, const void *src, unsigned size)
312static __always_inline __must_check 312static __always_inline __must_check
313int __copy_in_user(void __user *dst, const void __user *src, unsigned size) 313int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
314{ 314{
315 int ret = 0; 315 int ret = 0;
316 if (!__builtin_constant_p(size)) 316 if (!__builtin_constant_p(size))
317 return copy_user_generic((__force void *)dst,(__force void *)src,size); 317 return copy_user_generic((__force void *)dst,(__force void *)src,size);
318 switch (size) { 318 switch (size) {