aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/lib/copy_user_64.S
diff options
context:
space:
mode:
authorArnd Bergmann <arnd@arndb.de>2012-10-04 16:57:00 -0400
committerArnd Bergmann <arnd@arndb.de>2012-10-04 16:57:51 -0400
commitc37d6154c0b9163c27e53cc1d0be3867b4abd760 (patch)
tree7a24522c56d1cb284dff1d3c225bbdaba0901bb5 /arch/x86/lib/copy_user_64.S
parente7a570ff7dff9af6e54ff5e580a61ec7652137a0 (diff)
parent8a1ab3155c2ac7fbe5f2038d6e26efeb607a1498 (diff)
Merge branch 'disintegrate-asm-generic' of git://git.infradead.org/users/dhowells/linux-headers into asm-generic
Patches from David Howells <dhowells@redhat.com>: This is to complete part of the UAPI disintegration for which the preparatory patches were pulled recently. Note that there are some fixup patches which are at the base of the branch aimed at you, plus all arches get the asm-generic branch merged in too. * 'disintegrate-asm-generic' of git://git.infradead.org/users/dhowells/linux-headers: UAPI: (Scripted) Disintegrate include/asm-generic UAPI: Fix conditional header installation handling (notably kvm_para.h on m68k) c6x: remove c6x signal.h UAPI: Split compound conditionals containing __KERNEL__ in Arm64 UAPI: Fix the guards on various asm/unistd.h files Signed-off-by: Arnd Bergmann <arnd@arndb.de>
Diffstat (limited to 'arch/x86/lib/copy_user_64.S')
-rw-r--r--arch/x86/lib/copy_user_64.S7
1 files changed, 7 insertions, 0 deletions
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 5b2995f4557a..a30ca15be21c 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -17,6 +17,7 @@
17#include <asm/cpufeature.h> 17#include <asm/cpufeature.h>
18#include <asm/alternative-asm.h> 18#include <asm/alternative-asm.h>
19#include <asm/asm.h> 19#include <asm/asm.h>
20#include <asm/smap.h>
20 21
21/* 22/*
22 * By placing feature2 after feature1 in altinstructions section, we logically 23 * By placing feature2 after feature1 in altinstructions section, we logically
@@ -130,6 +131,7 @@ ENDPROC(bad_from_user)
130 */ 131 */
131ENTRY(copy_user_generic_unrolled) 132ENTRY(copy_user_generic_unrolled)
132 CFI_STARTPROC 133 CFI_STARTPROC
134 ASM_STAC
133 cmpl $8,%edx 135 cmpl $8,%edx
134 jb 20f /* less then 8 bytes, go to byte copy loop */ 136 jb 20f /* less then 8 bytes, go to byte copy loop */
135 ALIGN_DESTINATION 137 ALIGN_DESTINATION
@@ -177,6 +179,7 @@ ENTRY(copy_user_generic_unrolled)
177 decl %ecx 179 decl %ecx
178 jnz 21b 180 jnz 21b
17923: xor %eax,%eax 18123: xor %eax,%eax
182 ASM_CLAC
180 ret 183 ret
181 184
182 .section .fixup,"ax" 185 .section .fixup,"ax"
@@ -232,6 +235,7 @@ ENDPROC(copy_user_generic_unrolled)
232 */ 235 */
233ENTRY(copy_user_generic_string) 236ENTRY(copy_user_generic_string)
234 CFI_STARTPROC 237 CFI_STARTPROC
238 ASM_STAC
235 andl %edx,%edx 239 andl %edx,%edx
236 jz 4f 240 jz 4f
237 cmpl $8,%edx 241 cmpl $8,%edx
@@ -246,6 +250,7 @@ ENTRY(copy_user_generic_string)
2463: rep 2503: rep
247 movsb 251 movsb
2484: xorl %eax,%eax 2524: xorl %eax,%eax
253 ASM_CLAC
249 ret 254 ret
250 255
251 .section .fixup,"ax" 256 .section .fixup,"ax"
@@ -273,12 +278,14 @@ ENDPROC(copy_user_generic_string)
273 */ 278 */
274ENTRY(copy_user_enhanced_fast_string) 279ENTRY(copy_user_enhanced_fast_string)
275 CFI_STARTPROC 280 CFI_STARTPROC
281 ASM_STAC
276 andl %edx,%edx 282 andl %edx,%edx
277 jz 2f 283 jz 2f
278 movl %edx,%ecx 284 movl %edx,%ecx
2791: rep 2851: rep
280 movsb 286 movsb
2812: xorl %eax,%eax 2872: xorl %eax,%eax
288 ASM_CLAC
282 ret 289 ret
283 290
284 .section .fixup,"ax" 291 .section .fixup,"ax"