diff options
Diffstat (limited to 'arch/x86/lib/copy_user_nocache_64.S')
| -rw-r--r-- | arch/x86/lib/copy_user_nocache_64.S | 107 |
1 files changed, 0 insertions, 107 deletions
diff --git a/arch/x86/lib/copy_user_nocache_64.S b/arch/x86/lib/copy_user_nocache_64.S deleted file mode 100644 index b836a2bace15..000000000000 --- a/arch/x86/lib/copy_user_nocache_64.S +++ /dev/null | |||
| @@ -1,107 +0,0 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com> | ||
| 3 | * Copyright 2002 Andi Kleen, SuSE Labs. | ||
| 4 | * Subject to the GNU Public License v2. | ||
| 5 | * | ||
| 6 | * Functions to copy from and to user space. | ||
| 7 | */ | ||
| 8 | |||
| 9 | #include <linux/linkage.h> | ||
| 10 | #include <asm/dwarf2.h> | ||
| 11 | #include <asm/current.h> | ||
| 12 | #include <asm/asm-offsets.h> | ||
| 13 | #include <asm/thread_info.h> | ||
| 14 | #include <asm/asm.h> | ||
| 15 | #include <asm/smap.h> | ||
| 16 | |||
| 17 | /* | ||
| 18 | * copy_user_nocache - Uncached memory copy with exception handling | ||
| 19 | * This will force destination/source out of cache for more performance. | ||
| 20 | */ | ||
| 21 | ENTRY(__copy_user_nocache) | ||
| 22 | CFI_STARTPROC | ||
| 23 | ASM_STAC | ||
| 24 | cmpl $8,%edx | ||
| 25 | jb 20f /* less then 8 bytes, go to byte copy loop */ | ||
| 26 | ALIGN_DESTINATION | ||
| 27 | movl %edx,%ecx | ||
| 28 | andl $63,%edx | ||
| 29 | shrl $6,%ecx | ||
| 30 | jz 17f | ||
| 31 | 1: movq (%rsi),%r8 | ||
| 32 | 2: movq 1*8(%rsi),%r9 | ||
| 33 | 3: movq 2*8(%rsi),%r10 | ||
| 34 | 4: movq 3*8(%rsi),%r11 | ||
| 35 | 5: movnti %r8,(%rdi) | ||
| 36 | 6: movnti %r9,1*8(%rdi) | ||
| 37 | 7: movnti %r10,2*8(%rdi) | ||
| 38 | 8: movnti %r11,3*8(%rdi) | ||
| 39 | 9: movq 4*8(%rsi),%r8 | ||
| 40 | 10: movq 5*8(%rsi),%r9 | ||
| 41 | 11: movq 6*8(%rsi),%r10 | ||
| 42 | 12: movq 7*8(%rsi),%r11 | ||
| 43 | 13: movnti %r8,4*8(%rdi) | ||
| 44 | 14: movnti %r9,5*8(%rdi) | ||
| 45 | 15: movnti %r10,6*8(%rdi) | ||
| 46 | 16: movnti %r11,7*8(%rdi) | ||
| 47 | leaq 64(%rsi),%rsi | ||
| 48 | leaq 64(%rdi),%rdi | ||
| 49 | decl %ecx | ||
| 50 | jnz 1b | ||
| 51 | 17: movl %edx,%ecx | ||
| 52 | andl $7,%edx | ||
| 53 | shrl $3,%ecx | ||
| 54 | jz 20f | ||
| 55 | 18: movq (%rsi),%r8 | ||
| 56 | 19: movnti %r8,(%rdi) | ||
| 57 | leaq 8(%rsi),%rsi | ||
| 58 | leaq 8(%rdi),%rdi | ||
| 59 | decl %ecx | ||
| 60 | jnz 18b | ||
| 61 | 20: andl %edx,%edx | ||
| 62 | jz 23f | ||
| 63 | movl %edx,%ecx | ||
| 64 | 21: movb (%rsi),%al | ||
| 65 | 22: movb %al,(%rdi) | ||
| 66 | incq %rsi | ||
| 67 | incq %rdi | ||
| 68 | decl %ecx | ||
| 69 | jnz 21b | ||
| 70 | 23: xorl %eax,%eax | ||
| 71 | ASM_CLAC | ||
| 72 | sfence | ||
| 73 | ret | ||
| 74 | |||
| 75 | .section .fixup,"ax" | ||
| 76 | 30: shll $6,%ecx | ||
| 77 | addl %ecx,%edx | ||
| 78 | jmp 60f | ||
| 79 | 40: lea (%rdx,%rcx,8),%rdx | ||
| 80 | jmp 60f | ||
| 81 | 50: movl %ecx,%edx | ||
| 82 | 60: sfence | ||
| 83 | jmp copy_user_handle_tail | ||
| 84 | .previous | ||
| 85 | |||
| 86 | _ASM_EXTABLE(1b,30b) | ||
| 87 | _ASM_EXTABLE(2b,30b) | ||
| 88 | _ASM_EXTABLE(3b,30b) | ||
| 89 | _ASM_EXTABLE(4b,30b) | ||
| 90 | _ASM_EXTABLE(5b,30b) | ||
| 91 | _ASM_EXTABLE(6b,30b) | ||
| 92 | _ASM_EXTABLE(7b,30b) | ||
| 93 | _ASM_EXTABLE(8b,30b) | ||
| 94 | _ASM_EXTABLE(9b,30b) | ||
| 95 | _ASM_EXTABLE(10b,30b) | ||
| 96 | _ASM_EXTABLE(11b,30b) | ||
| 97 | _ASM_EXTABLE(12b,30b) | ||
| 98 | _ASM_EXTABLE(13b,30b) | ||
| 99 | _ASM_EXTABLE(14b,30b) | ||
| 100 | _ASM_EXTABLE(15b,30b) | ||
| 101 | _ASM_EXTABLE(16b,30b) | ||
| 102 | _ASM_EXTABLE(18b,40b) | ||
| 103 | _ASM_EXTABLE(19b,40b) | ||
| 104 | _ASM_EXTABLE(21b,50b) | ||
| 105 | _ASM_EXTABLE(22b,50b) | ||
| 106 | CFI_ENDPROC | ||
| 107 | ENDPROC(__copy_user_nocache) | ||
