diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2013-11-11 21:46:06 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2013-11-11 21:46:06 -0500 |
commit | c2136301e43cbb3b71d0163a9949f30dafcb4590 (patch) | |
tree | 1da35409efcdf88921262410b6985fe4447f595d /arch | |
parent | 986189f9ec81263c982b60433b5b937f1056a631 (diff) | |
parent | ff47ab4ff3cddfa7bc1b25b990e24abe2ae474ff (diff) |
Merge branch 'x86-uaccess-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 uaccess changes from Ingo Molnar:
"A single change that micro-optimizes __copy_*_user_inatomic(), used by
the futex code"
* 'x86-uaccess-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
x86: Add 1/2/4/8 byte optimization to 64bit __copy_{from,to}_user_inatomic
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/include/asm/uaccess_64.h | 24 |
1 files changed, 18 insertions, 6 deletions
diff --git a/arch/x86/include/asm/uaccess_64.h b/arch/x86/include/asm/uaccess_64.h index 0acae710fa00..190413d0de57 100644 --- a/arch/x86/include/asm/uaccess_64.h +++ b/arch/x86/include/asm/uaccess_64.h | |||
@@ -49,11 +49,10 @@ __must_check unsigned long | |||
49 | copy_in_user(void __user *to, const void __user *from, unsigned len); | 49 | copy_in_user(void __user *to, const void __user *from, unsigned len); |
50 | 50 | ||
51 | static __always_inline __must_check | 51 | static __always_inline __must_check |
52 | int __copy_from_user(void *dst, const void __user *src, unsigned size) | 52 | int __copy_from_user_nocheck(void *dst, const void __user *src, unsigned size) |
53 | { | 53 | { |
54 | int ret = 0; | 54 | int ret = 0; |
55 | 55 | ||
56 | might_fault(); | ||
57 | if (!__builtin_constant_p(size)) | 56 | if (!__builtin_constant_p(size)) |
58 | return copy_user_generic(dst, (__force void *)src, size); | 57 | return copy_user_generic(dst, (__force void *)src, size); |
59 | switch (size) { | 58 | switch (size) { |
@@ -93,11 +92,17 @@ int __copy_from_user(void *dst, const void __user *src, unsigned size) | |||
93 | } | 92 | } |
94 | 93 | ||
95 | static __always_inline __must_check | 94 | static __always_inline __must_check |
96 | int __copy_to_user(void __user *dst, const void *src, unsigned size) | 95 | int __copy_from_user(void *dst, const void __user *src, unsigned size) |
96 | { | ||
97 | might_fault(); | ||
98 | return __copy_from_user_nocheck(dst, src, size); | ||
99 | } | ||
100 | |||
101 | static __always_inline __must_check | ||
102 | int __copy_to_user_nocheck(void __user *dst, const void *src, unsigned size) | ||
97 | { | 103 | { |
98 | int ret = 0; | 104 | int ret = 0; |
99 | 105 | ||
100 | might_fault(); | ||
101 | if (!__builtin_constant_p(size)) | 106 | if (!__builtin_constant_p(size)) |
102 | return copy_user_generic((__force void *)dst, src, size); | 107 | return copy_user_generic((__force void *)dst, src, size); |
103 | switch (size) { | 108 | switch (size) { |
@@ -137,6 +142,13 @@ int __copy_to_user(void __user *dst, const void *src, unsigned size) | |||
137 | } | 142 | } |
138 | 143 | ||
139 | static __always_inline __must_check | 144 | static __always_inline __must_check |
145 | int __copy_to_user(void __user *dst, const void *src, unsigned size) | ||
146 | { | ||
147 | might_fault(); | ||
148 | return __copy_to_user_nocheck(dst, src, size); | ||
149 | } | ||
150 | |||
151 | static __always_inline __must_check | ||
140 | int __copy_in_user(void __user *dst, const void __user *src, unsigned size) | 152 | int __copy_in_user(void __user *dst, const void __user *src, unsigned size) |
141 | { | 153 | { |
142 | int ret = 0; | 154 | int ret = 0; |
@@ -192,13 +204,13 @@ int __copy_in_user(void __user *dst, const void __user *src, unsigned size) | |||
192 | static __must_check __always_inline int | 204 | static __must_check __always_inline int |
193 | __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size) | 205 | __copy_from_user_inatomic(void *dst, const void __user *src, unsigned size) |
194 | { | 206 | { |
195 | return copy_user_generic(dst, (__force const void *)src, size); | 207 | return __copy_from_user_nocheck(dst, (__force const void *)src, size); |
196 | } | 208 | } |
197 | 209 | ||
198 | static __must_check __always_inline int | 210 | static __must_check __always_inline int |
199 | __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size) | 211 | __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size) |
200 | { | 212 | { |
201 | return copy_user_generic((__force void *)dst, src, size); | 213 | return __copy_to_user_nocheck((__force void *)dst, src, size); |
202 | } | 214 | } |
203 | 215 | ||
204 | extern long __copy_user_nocache(void *dst, const void __user *src, | 216 | extern long __copy_user_nocache(void *dst, const void __user *src, |