aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/lib/copy_user_nocache_64.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/lib/copy_user_nocache_64.S')
-rw-r--r--arch/x86/lib/copy_user_nocache_64.S107
1 files changed, 0 insertions, 107 deletions
diff --git a/arch/x86/lib/copy_user_nocache_64.S b/arch/x86/lib/copy_user_nocache_64.S
deleted file mode 100644
index b836a2bace15..000000000000
--- a/arch/x86/lib/copy_user_nocache_64.S
+++ /dev/null
@@ -1,107 +0,0 @@
1/*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
4 * Subject to the GNU Public License v2.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/linkage.h>
10#include <asm/dwarf2.h>
11#include <asm/current.h>
12#include <asm/asm-offsets.h>
13#include <asm/thread_info.h>
14#include <asm/asm.h>
15#include <asm/smap.h>
16
17/*
18 * copy_user_nocache - Uncached memory copy with exception handling
19 * This will force destination/source out of cache for more performance.
20 */
21ENTRY(__copy_user_nocache)
22 CFI_STARTPROC
23 ASM_STAC
24 cmpl $8,%edx
25 jb 20f /* less then 8 bytes, go to byte copy loop */
26 ALIGN_DESTINATION
27 movl %edx,%ecx
28 andl $63,%edx
29 shrl $6,%ecx
30 jz 17f
311: movq (%rsi),%r8
322: movq 1*8(%rsi),%r9
333: movq 2*8(%rsi),%r10
344: movq 3*8(%rsi),%r11
355: movnti %r8,(%rdi)
366: movnti %r9,1*8(%rdi)
377: movnti %r10,2*8(%rdi)
388: movnti %r11,3*8(%rdi)
399: movq 4*8(%rsi),%r8
4010: movq 5*8(%rsi),%r9
4111: movq 6*8(%rsi),%r10
4212: movq 7*8(%rsi),%r11
4313: movnti %r8,4*8(%rdi)
4414: movnti %r9,5*8(%rdi)
4515: movnti %r10,6*8(%rdi)
4616: movnti %r11,7*8(%rdi)
47 leaq 64(%rsi),%rsi
48 leaq 64(%rdi),%rdi
49 decl %ecx
50 jnz 1b
5117: movl %edx,%ecx
52 andl $7,%edx
53 shrl $3,%ecx
54 jz 20f
5518: movq (%rsi),%r8
5619: movnti %r8,(%rdi)
57 leaq 8(%rsi),%rsi
58 leaq 8(%rdi),%rdi
59 decl %ecx
60 jnz 18b
6120: andl %edx,%edx
62 jz 23f
63 movl %edx,%ecx
6421: movb (%rsi),%al
6522: movb %al,(%rdi)
66 incq %rsi
67 incq %rdi
68 decl %ecx
69 jnz 21b
7023: xorl %eax,%eax
71 ASM_CLAC
72 sfence
73 ret
74
75 .section .fixup,"ax"
7630: shll $6,%ecx
77 addl %ecx,%edx
78 jmp 60f
7940: lea (%rdx,%rcx,8),%rdx
80 jmp 60f
8150: movl %ecx,%edx
8260: sfence
83 jmp copy_user_handle_tail
84 .previous
85
86 _ASM_EXTABLE(1b,30b)
87 _ASM_EXTABLE(2b,30b)
88 _ASM_EXTABLE(3b,30b)
89 _ASM_EXTABLE(4b,30b)
90 _ASM_EXTABLE(5b,30b)
91 _ASM_EXTABLE(6b,30b)
92 _ASM_EXTABLE(7b,30b)
93 _ASM_EXTABLE(8b,30b)
94 _ASM_EXTABLE(9b,30b)
95 _ASM_EXTABLE(10b,30b)
96 _ASM_EXTABLE(11b,30b)
97 _ASM_EXTABLE(12b,30b)
98 _ASM_EXTABLE(13b,30b)
99 _ASM_EXTABLE(14b,30b)
100 _ASM_EXTABLE(15b,30b)
101 _ASM_EXTABLE(16b,30b)
102 _ASM_EXTABLE(18b,40b)
103 _ASM_EXTABLE(19b,40b)
104 _ASM_EXTABLE(21b,50b)
105 _ASM_EXTABLE(22b,50b)
106 CFI_ENDPROC
107ENDPROC(__copy_user_nocache)