diff options
author | Borislav Petkov <bp@suse.de> | 2015-05-13 13:42:23 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2015-05-14 01:25:34 -0400 |
commit | 9e6b13f761d5914a8c9b83610e8d459653515c94 (patch) | |
tree | 241fdaea2c9e88c3b230227bc83594e9d2a6ff87 | |
parent | 26e7d9dee8a5b6c844178c8e2d91be540ce311c0 (diff) |
x86/asm/uaccess: Unify the ALIGN_DESTINATION macro
Pull it up into the header and kill duplicate versions.
Separately, both macros are identical:
35948b2bd3431aee7149e85cfe4becbc /tmp/a
35948b2bd3431aee7149e85cfe4becbc /tmp/b
Signed-off-by: Borislav Petkov <bp@suse.de>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Denys Vlasenko <dvlasenk@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: http://lkml.kernel.org/r/1431538944-27724-3-git-send-email-bp@alien8.de
Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r-- | arch/x86/include/asm/asm.h | 25 | ||||
-rw-r--r-- | arch/x86/lib/copy_user_64.S | 24 | ||||
-rw-r--r-- | arch/x86/lib/copy_user_nocache_64.S | 24 |
3 files changed, 25 insertions, 48 deletions
diff --git a/arch/x86/include/asm/asm.h b/arch/x86/include/asm/asm.h index 7730c1c5c83a..189679aba703 100644 --- a/arch/x86/include/asm/asm.h +++ b/arch/x86/include/asm/asm.h | |||
@@ -63,6 +63,31 @@ | |||
63 | _ASM_ALIGN ; \ | 63 | _ASM_ALIGN ; \ |
64 | _ASM_PTR (entry); \ | 64 | _ASM_PTR (entry); \ |
65 | .popsection | 65 | .popsection |
66 | |||
67 | .macro ALIGN_DESTINATION | ||
68 | /* check for bad alignment of destination */ | ||
69 | movl %edi,%ecx | ||
70 | andl $7,%ecx | ||
71 | jz 102f /* already aligned */ | ||
72 | subl $8,%ecx | ||
73 | negl %ecx | ||
74 | subl %ecx,%edx | ||
75 | 100: movb (%rsi),%al | ||
76 | 101: movb %al,(%rdi) | ||
77 | incq %rsi | ||
78 | incq %rdi | ||
79 | decl %ecx | ||
80 | jnz 100b | ||
81 | 102: | ||
82 | .section .fixup,"ax" | ||
83 | 103: addl %ecx,%edx /* ecx is zerorest also */ | ||
84 | jmp copy_user_handle_tail | ||
85 | .previous | ||
86 | |||
87 | _ASM_EXTABLE(100b,103b) | ||
88 | _ASM_EXTABLE(101b,103b) | ||
89 | .endm | ||
90 | |||
66 | #else | 91 | #else |
67 | # define _ASM_EXTABLE(from,to) \ | 92 | # define _ASM_EXTABLE(from,to) \ |
68 | " .pushsection \"__ex_table\",\"a\"\n" \ | 93 | " .pushsection \"__ex_table\",\"a\"\n" \ |
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S index fa997dfaef24..06ce685c3a5d 100644 --- a/arch/x86/lib/copy_user_64.S +++ b/arch/x86/lib/copy_user_64.S | |||
@@ -16,30 +16,6 @@ | |||
16 | #include <asm/asm.h> | 16 | #include <asm/asm.h> |
17 | #include <asm/smap.h> | 17 | #include <asm/smap.h> |
18 | 18 | ||
19 | .macro ALIGN_DESTINATION | ||
20 | /* check for bad alignment of destination */ | ||
21 | movl %edi,%ecx | ||
22 | andl $7,%ecx | ||
23 | jz 102f /* already aligned */ | ||
24 | subl $8,%ecx | ||
25 | negl %ecx | ||
26 | subl %ecx,%edx | ||
27 | 100: movb (%rsi),%al | ||
28 | 101: movb %al,(%rdi) | ||
29 | incq %rsi | ||
30 | incq %rdi | ||
31 | decl %ecx | ||
32 | jnz 100b | ||
33 | 102: | ||
34 | .section .fixup,"ax" | ||
35 | 103: addl %ecx,%edx /* ecx is zerorest also */ | ||
36 | jmp copy_user_handle_tail | ||
37 | .previous | ||
38 | |||
39 | _ASM_EXTABLE(100b,103b) | ||
40 | _ASM_EXTABLE(101b,103b) | ||
41 | .endm | ||
42 | |||
43 | /* Standard copy_to_user with segment limit checking */ | 19 | /* Standard copy_to_user with segment limit checking */ |
44 | ENTRY(_copy_to_user) | 20 | ENTRY(_copy_to_user) |
45 | CFI_STARTPROC | 21 | CFI_STARTPROC |
diff --git a/arch/x86/lib/copy_user_nocache_64.S b/arch/x86/lib/copy_user_nocache_64.S index 42eeb12e0cd9..b836a2bace15 100644 --- a/arch/x86/lib/copy_user_nocache_64.S +++ b/arch/x86/lib/copy_user_nocache_64.S | |||
@@ -14,30 +14,6 @@ | |||
14 | #include <asm/asm.h> | 14 | #include <asm/asm.h> |
15 | #include <asm/smap.h> | 15 | #include <asm/smap.h> |
16 | 16 | ||
17 | .macro ALIGN_DESTINATION | ||
18 | /* check for bad alignment of destination */ | ||
19 | movl %edi,%ecx | ||
20 | andl $7,%ecx | ||
21 | jz 102f /* already aligned */ | ||
22 | subl $8,%ecx | ||
23 | negl %ecx | ||
24 | subl %ecx,%edx | ||
25 | 100: movb (%rsi),%al | ||
26 | 101: movb %al,(%rdi) | ||
27 | incq %rsi | ||
28 | incq %rdi | ||
29 | decl %ecx | ||
30 | jnz 100b | ||
31 | 102: | ||
32 | .section .fixup,"ax" | ||
33 | 103: addl %ecx,%edx /* ecx is zerorest also */ | ||
34 | jmp copy_user_handle_tail | ||
35 | .previous | ||
36 | |||
37 | _ASM_EXTABLE(100b,103b) | ||
38 | _ASM_EXTABLE(101b,103b) | ||
39 | .endm | ||
40 | |||
41 | /* | 17 | /* |
42 | * copy_user_nocache - Uncached memory copy with exception handling | 18 | * copy_user_nocache - Uncached memory copy with exception handling |
43 | * This will force destination/source out of cache for more performance. | 19 | * This will force destination/source out of cache for more performance. |