aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBorislav Petkov <bp@suse.de>2015-05-13 13:42:24 -0400
committerIngo Molnar <mingo@kernel.org>2015-05-14 01:25:35 -0400
commitb41e6ec242cba0151f0b32041cfa728e7ca6e0b7 (patch)
tree0b8a9f0c0aeb0dd11af99bfed4d8972baa3e7b5a
parent9e6b13f761d5914a8c9b83610e8d459653515c94 (diff)
x86/asm/uaccess: Get rid of copy_user_nocache_64.S
Move __copy_user_nocache() to arch/x86/lib/copy_user_64.S and kill the containing file. No functionality change. Signed-off-by: Borislav Petkov <bp@suse.de> Cc: Andy Lutomirski <luto@amacapital.net> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: H. Peter Anvin <hpa@zytor.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: http://lkml.kernel.org/r/1431538944-27724-4-git-send-email-bp@alien8.de Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/x86/lib/Makefile2
-rw-r--r--arch/x86/lib/copy_user_64.S92
-rw-r--r--arch/x86/lib/copy_user_nocache_64.S107
3 files changed, 93 insertions, 108 deletions
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile
index 1530afb07c85..982989d282ff 100644
--- a/arch/x86/lib/Makefile
+++ b/arch/x86/lib/Makefile
@@ -40,6 +40,6 @@ else
40 lib-y += csum-partial_64.o csum-copy_64.o csum-wrappers_64.o 40 lib-y += csum-partial_64.o csum-copy_64.o csum-wrappers_64.o
41 lib-y += clear_page_64.o copy_page_64.o 41 lib-y += clear_page_64.o copy_page_64.o
42 lib-y += memmove_64.o memset_64.o 42 lib-y += memmove_64.o memset_64.o
43 lib-y += copy_user_64.o copy_user_nocache_64.o 43 lib-y += copy_user_64.o
44 lib-y += cmpxchg16b_emu.o 44 lib-y += cmpxchg16b_emu.o
45endif 45endif
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 06ce685c3a5d..e4b3beee83bd 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -242,3 +242,95 @@ ENTRY(copy_user_enhanced_fast_string)
242 _ASM_EXTABLE(1b,12b) 242 _ASM_EXTABLE(1b,12b)
243 CFI_ENDPROC 243 CFI_ENDPROC
244ENDPROC(copy_user_enhanced_fast_string) 244ENDPROC(copy_user_enhanced_fast_string)
245
246/*
247 * copy_user_nocache - Uncached memory copy with exception handling
248 * This will force destination/source out of cache for more performance.
249 */
250ENTRY(__copy_user_nocache)
251 CFI_STARTPROC
252 ASM_STAC
253 cmpl $8,%edx
254 jb 20f /* less then 8 bytes, go to byte copy loop */
255 ALIGN_DESTINATION
256 movl %edx,%ecx
257 andl $63,%edx
258 shrl $6,%ecx
259 jz 17f
2601: movq (%rsi),%r8
2612: movq 1*8(%rsi),%r9
2623: movq 2*8(%rsi),%r10
2634: movq 3*8(%rsi),%r11
2645: movnti %r8,(%rdi)
2656: movnti %r9,1*8(%rdi)
2667: movnti %r10,2*8(%rdi)
2678: movnti %r11,3*8(%rdi)
2689: movq 4*8(%rsi),%r8
26910: movq 5*8(%rsi),%r9
27011: movq 6*8(%rsi),%r10
27112: movq 7*8(%rsi),%r11
27213: movnti %r8,4*8(%rdi)
27314: movnti %r9,5*8(%rdi)
27415: movnti %r10,6*8(%rdi)
27516: movnti %r11,7*8(%rdi)
276 leaq 64(%rsi),%rsi
277 leaq 64(%rdi),%rdi
278 decl %ecx
279 jnz 1b
28017: movl %edx,%ecx
281 andl $7,%edx
282 shrl $3,%ecx
283 jz 20f
28418: movq (%rsi),%r8
28519: movnti %r8,(%rdi)
286 leaq 8(%rsi),%rsi
287 leaq 8(%rdi),%rdi
288 decl %ecx
289 jnz 18b
29020: andl %edx,%edx
291 jz 23f
292 movl %edx,%ecx
29321: movb (%rsi),%al
29422: movb %al,(%rdi)
295 incq %rsi
296 incq %rdi
297 decl %ecx
298 jnz 21b
29923: xorl %eax,%eax
300 ASM_CLAC
301 sfence
302 ret
303
304 .section .fixup,"ax"
30530: shll $6,%ecx
306 addl %ecx,%edx
307 jmp 60f
30840: lea (%rdx,%rcx,8),%rdx
309 jmp 60f
31050: movl %ecx,%edx
31160: sfence
312 jmp copy_user_handle_tail
313 .previous
314
315 _ASM_EXTABLE(1b,30b)
316 _ASM_EXTABLE(2b,30b)
317 _ASM_EXTABLE(3b,30b)
318 _ASM_EXTABLE(4b,30b)
319 _ASM_EXTABLE(5b,30b)
320 _ASM_EXTABLE(6b,30b)
321 _ASM_EXTABLE(7b,30b)
322 _ASM_EXTABLE(8b,30b)
323 _ASM_EXTABLE(9b,30b)
324 _ASM_EXTABLE(10b,30b)
325 _ASM_EXTABLE(11b,30b)
326 _ASM_EXTABLE(12b,30b)
327 _ASM_EXTABLE(13b,30b)
328 _ASM_EXTABLE(14b,30b)
329 _ASM_EXTABLE(15b,30b)
330 _ASM_EXTABLE(16b,30b)
331 _ASM_EXTABLE(18b,40b)
332 _ASM_EXTABLE(19b,40b)
333 _ASM_EXTABLE(21b,50b)
334 _ASM_EXTABLE(22b,50b)
335 CFI_ENDPROC
336ENDPROC(__copy_user_nocache)
diff --git a/arch/x86/lib/copy_user_nocache_64.S b/arch/x86/lib/copy_user_nocache_64.S
deleted file mode 100644
index b836a2bace15..000000000000
--- a/arch/x86/lib/copy_user_nocache_64.S
+++ /dev/null
@@ -1,107 +0,0 @@
1/*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
4 * Subject to the GNU Public License v2.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/linkage.h>
10#include <asm/dwarf2.h>
11#include <asm/current.h>
12#include <asm/asm-offsets.h>
13#include <asm/thread_info.h>
14#include <asm/asm.h>
15#include <asm/smap.h>
16
17/*
18 * copy_user_nocache - Uncached memory copy with exception handling
19 * This will force destination/source out of cache for more performance.
20 */
21ENTRY(__copy_user_nocache)
22 CFI_STARTPROC
23 ASM_STAC
24 cmpl $8,%edx
25 jb 20f /* less then 8 bytes, go to byte copy loop */
26 ALIGN_DESTINATION
27 movl %edx,%ecx
28 andl $63,%edx
29 shrl $6,%ecx
30 jz 17f
311: movq (%rsi),%r8
322: movq 1*8(%rsi),%r9
333: movq 2*8(%rsi),%r10
344: movq 3*8(%rsi),%r11
355: movnti %r8,(%rdi)
366: movnti %r9,1*8(%rdi)
377: movnti %r10,2*8(%rdi)
388: movnti %r11,3*8(%rdi)
399: movq 4*8(%rsi),%r8
4010: movq 5*8(%rsi),%r9
4111: movq 6*8(%rsi),%r10
4212: movq 7*8(%rsi),%r11
4313: movnti %r8,4*8(%rdi)
4414: movnti %r9,5*8(%rdi)
4515: movnti %r10,6*8(%rdi)
4616: movnti %r11,7*8(%rdi)
47 leaq 64(%rsi),%rsi
48 leaq 64(%rdi),%rdi
49 decl %ecx
50 jnz 1b
5117: movl %edx,%ecx
52 andl $7,%edx
53 shrl $3,%ecx
54 jz 20f
5518: movq (%rsi),%r8
5619: movnti %r8,(%rdi)
57 leaq 8(%rsi),%rsi
58 leaq 8(%rdi),%rdi
59 decl %ecx
60 jnz 18b
6120: andl %edx,%edx
62 jz 23f
63 movl %edx,%ecx
6421: movb (%rsi),%al
6522: movb %al,(%rdi)
66 incq %rsi
67 incq %rdi
68 decl %ecx
69 jnz 21b
7023: xorl %eax,%eax
71 ASM_CLAC
72 sfence
73 ret
74
75 .section .fixup,"ax"
7630: shll $6,%ecx
77 addl %ecx,%edx
78 jmp 60f
7940: lea (%rdx,%rcx,8),%rdx
80 jmp 60f
8150: movl %ecx,%edx
8260: sfence
83 jmp copy_user_handle_tail
84 .previous
85
86 _ASM_EXTABLE(1b,30b)
87 _ASM_EXTABLE(2b,30b)
88 _ASM_EXTABLE(3b,30b)
89 _ASM_EXTABLE(4b,30b)
90 _ASM_EXTABLE(5b,30b)
91 _ASM_EXTABLE(6b,30b)
92 _ASM_EXTABLE(7b,30b)
93 _ASM_EXTABLE(8b,30b)
94 _ASM_EXTABLE(9b,30b)
95 _ASM_EXTABLE(10b,30b)
96 _ASM_EXTABLE(11b,30b)
97 _ASM_EXTABLE(12b,30b)
98 _ASM_EXTABLE(13b,30b)
99 _ASM_EXTABLE(14b,30b)
100 _ASM_EXTABLE(15b,30b)
101 _ASM_EXTABLE(16b,30b)
102 _ASM_EXTABLE(18b,40b)
103 _ASM_EXTABLE(19b,40b)
104 _ASM_EXTABLE(21b,50b)
105 _ASM_EXTABLE(22b,50b)
106 CFI_ENDPROC
107ENDPROC(__copy_user_nocache)