diff options
author | Borislav Petkov <bp@suse.de> | 2015-05-13 13:42:24 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2015-05-14 01:25:35 -0400 |
commit | b41e6ec242cba0151f0b32041cfa728e7ca6e0b7 (patch) | |
tree | 0b8a9f0c0aeb0dd11af99bfed4d8972baa3e7b5a | |
parent | 9e6b13f761d5914a8c9b83610e8d459653515c94 (diff) |
x86/asm/uaccess: Get rid of copy_user_nocache_64.S
Move __copy_user_nocache() to arch/x86/lib/copy_user_64.S and
kill the containing file.
No functionality change.
Signed-off-by: Borislav Petkov <bp@suse.de>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Denys Vlasenko <dvlasenk@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: http://lkml.kernel.org/r/1431538944-27724-4-git-send-email-bp@alien8.de
Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r-- | arch/x86/lib/Makefile | 2 | ||||
-rw-r--r-- | arch/x86/lib/copy_user_64.S | 92 | ||||
-rw-r--r-- | arch/x86/lib/copy_user_nocache_64.S | 107 |
3 files changed, 93 insertions, 108 deletions
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile index 1530afb07c85..982989d282ff 100644 --- a/arch/x86/lib/Makefile +++ b/arch/x86/lib/Makefile | |||
@@ -40,6 +40,6 @@ else | |||
40 | lib-y += csum-partial_64.o csum-copy_64.o csum-wrappers_64.o | 40 | lib-y += csum-partial_64.o csum-copy_64.o csum-wrappers_64.o |
41 | lib-y += clear_page_64.o copy_page_64.o | 41 | lib-y += clear_page_64.o copy_page_64.o |
42 | lib-y += memmove_64.o memset_64.o | 42 | lib-y += memmove_64.o memset_64.o |
43 | lib-y += copy_user_64.o copy_user_nocache_64.o | 43 | lib-y += copy_user_64.o |
44 | lib-y += cmpxchg16b_emu.o | 44 | lib-y += cmpxchg16b_emu.o |
45 | endif | 45 | endif |
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S index 06ce685c3a5d..e4b3beee83bd 100644 --- a/arch/x86/lib/copy_user_64.S +++ b/arch/x86/lib/copy_user_64.S | |||
@@ -242,3 +242,95 @@ ENTRY(copy_user_enhanced_fast_string) | |||
242 | _ASM_EXTABLE(1b,12b) | 242 | _ASM_EXTABLE(1b,12b) |
243 | CFI_ENDPROC | 243 | CFI_ENDPROC |
244 | ENDPROC(copy_user_enhanced_fast_string) | 244 | ENDPROC(copy_user_enhanced_fast_string) |
245 | |||
246 | /* | ||
247 | * copy_user_nocache - Uncached memory copy with exception handling | ||
248 | * This will force destination/source out of cache for more performance. | ||
249 | */ | ||
250 | ENTRY(__copy_user_nocache) | ||
251 | CFI_STARTPROC | ||
252 | ASM_STAC | ||
253 | cmpl $8,%edx | ||
254 | jb 20f /* less then 8 bytes, go to byte copy loop */ | ||
255 | ALIGN_DESTINATION | ||
256 | movl %edx,%ecx | ||
257 | andl $63,%edx | ||
258 | shrl $6,%ecx | ||
259 | jz 17f | ||
260 | 1: movq (%rsi),%r8 | ||
261 | 2: movq 1*8(%rsi),%r9 | ||
262 | 3: movq 2*8(%rsi),%r10 | ||
263 | 4: movq 3*8(%rsi),%r11 | ||
264 | 5: movnti %r8,(%rdi) | ||
265 | 6: movnti %r9,1*8(%rdi) | ||
266 | 7: movnti %r10,2*8(%rdi) | ||
267 | 8: movnti %r11,3*8(%rdi) | ||
268 | 9: movq 4*8(%rsi),%r8 | ||
269 | 10: movq 5*8(%rsi),%r9 | ||
270 | 11: movq 6*8(%rsi),%r10 | ||
271 | 12: movq 7*8(%rsi),%r11 | ||
272 | 13: movnti %r8,4*8(%rdi) | ||
273 | 14: movnti %r9,5*8(%rdi) | ||
274 | 15: movnti %r10,6*8(%rdi) | ||
275 | 16: movnti %r11,7*8(%rdi) | ||
276 | leaq 64(%rsi),%rsi | ||
277 | leaq 64(%rdi),%rdi | ||
278 | decl %ecx | ||
279 | jnz 1b | ||
280 | 17: movl %edx,%ecx | ||
281 | andl $7,%edx | ||
282 | shrl $3,%ecx | ||
283 | jz 20f | ||
284 | 18: movq (%rsi),%r8 | ||
285 | 19: movnti %r8,(%rdi) | ||
286 | leaq 8(%rsi),%rsi | ||
287 | leaq 8(%rdi),%rdi | ||
288 | decl %ecx | ||
289 | jnz 18b | ||
290 | 20: andl %edx,%edx | ||
291 | jz 23f | ||
292 | movl %edx,%ecx | ||
293 | 21: movb (%rsi),%al | ||
294 | 22: movb %al,(%rdi) | ||
295 | incq %rsi | ||
296 | incq %rdi | ||
297 | decl %ecx | ||
298 | jnz 21b | ||
299 | 23: xorl %eax,%eax | ||
300 | ASM_CLAC | ||
301 | sfence | ||
302 | ret | ||
303 | |||
304 | .section .fixup,"ax" | ||
305 | 30: shll $6,%ecx | ||
306 | addl %ecx,%edx | ||
307 | jmp 60f | ||
308 | 40: lea (%rdx,%rcx,8),%rdx | ||
309 | jmp 60f | ||
310 | 50: movl %ecx,%edx | ||
311 | 60: sfence | ||
312 | jmp copy_user_handle_tail | ||
313 | .previous | ||
314 | |||
315 | _ASM_EXTABLE(1b,30b) | ||
316 | _ASM_EXTABLE(2b,30b) | ||
317 | _ASM_EXTABLE(3b,30b) | ||
318 | _ASM_EXTABLE(4b,30b) | ||
319 | _ASM_EXTABLE(5b,30b) | ||
320 | _ASM_EXTABLE(6b,30b) | ||
321 | _ASM_EXTABLE(7b,30b) | ||
322 | _ASM_EXTABLE(8b,30b) | ||
323 | _ASM_EXTABLE(9b,30b) | ||
324 | _ASM_EXTABLE(10b,30b) | ||
325 | _ASM_EXTABLE(11b,30b) | ||
326 | _ASM_EXTABLE(12b,30b) | ||
327 | _ASM_EXTABLE(13b,30b) | ||
328 | _ASM_EXTABLE(14b,30b) | ||
329 | _ASM_EXTABLE(15b,30b) | ||
330 | _ASM_EXTABLE(16b,30b) | ||
331 | _ASM_EXTABLE(18b,40b) | ||
332 | _ASM_EXTABLE(19b,40b) | ||
333 | _ASM_EXTABLE(21b,50b) | ||
334 | _ASM_EXTABLE(22b,50b) | ||
335 | CFI_ENDPROC | ||
336 | ENDPROC(__copy_user_nocache) | ||
diff --git a/arch/x86/lib/copy_user_nocache_64.S b/arch/x86/lib/copy_user_nocache_64.S deleted file mode 100644 index b836a2bace15..000000000000 --- a/arch/x86/lib/copy_user_nocache_64.S +++ /dev/null | |||
@@ -1,107 +0,0 @@ | |||
1 | /* | ||
2 | * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com> | ||
3 | * Copyright 2002 Andi Kleen, SuSE Labs. | ||
4 | * Subject to the GNU Public License v2. | ||
5 | * | ||
6 | * Functions to copy from and to user space. | ||
7 | */ | ||
8 | |||
9 | #include <linux/linkage.h> | ||
10 | #include <asm/dwarf2.h> | ||
11 | #include <asm/current.h> | ||
12 | #include <asm/asm-offsets.h> | ||
13 | #include <asm/thread_info.h> | ||
14 | #include <asm/asm.h> | ||
15 | #include <asm/smap.h> | ||
16 | |||
17 | /* | ||
18 | * copy_user_nocache - Uncached memory copy with exception handling | ||
19 | * This will force destination/source out of cache for more performance. | ||
20 | */ | ||
21 | ENTRY(__copy_user_nocache) | ||
22 | CFI_STARTPROC | ||
23 | ASM_STAC | ||
24 | cmpl $8,%edx | ||
25 | jb 20f /* less then 8 bytes, go to byte copy loop */ | ||
26 | ALIGN_DESTINATION | ||
27 | movl %edx,%ecx | ||
28 | andl $63,%edx | ||
29 | shrl $6,%ecx | ||
30 | jz 17f | ||
31 | 1: movq (%rsi),%r8 | ||
32 | 2: movq 1*8(%rsi),%r9 | ||
33 | 3: movq 2*8(%rsi),%r10 | ||
34 | 4: movq 3*8(%rsi),%r11 | ||
35 | 5: movnti %r8,(%rdi) | ||
36 | 6: movnti %r9,1*8(%rdi) | ||
37 | 7: movnti %r10,2*8(%rdi) | ||
38 | 8: movnti %r11,3*8(%rdi) | ||
39 | 9: movq 4*8(%rsi),%r8 | ||
40 | 10: movq 5*8(%rsi),%r9 | ||
41 | 11: movq 6*8(%rsi),%r10 | ||
42 | 12: movq 7*8(%rsi),%r11 | ||
43 | 13: movnti %r8,4*8(%rdi) | ||
44 | 14: movnti %r9,5*8(%rdi) | ||
45 | 15: movnti %r10,6*8(%rdi) | ||
46 | 16: movnti %r11,7*8(%rdi) | ||
47 | leaq 64(%rsi),%rsi | ||
48 | leaq 64(%rdi),%rdi | ||
49 | decl %ecx | ||
50 | jnz 1b | ||
51 | 17: movl %edx,%ecx | ||
52 | andl $7,%edx | ||
53 | shrl $3,%ecx | ||
54 | jz 20f | ||
55 | 18: movq (%rsi),%r8 | ||
56 | 19: movnti %r8,(%rdi) | ||
57 | leaq 8(%rsi),%rsi | ||
58 | leaq 8(%rdi),%rdi | ||
59 | decl %ecx | ||
60 | jnz 18b | ||
61 | 20: andl %edx,%edx | ||
62 | jz 23f | ||
63 | movl %edx,%ecx | ||
64 | 21: movb (%rsi),%al | ||
65 | 22: movb %al,(%rdi) | ||
66 | incq %rsi | ||
67 | incq %rdi | ||
68 | decl %ecx | ||
69 | jnz 21b | ||
70 | 23: xorl %eax,%eax | ||
71 | ASM_CLAC | ||
72 | sfence | ||
73 | ret | ||
74 | |||
75 | .section .fixup,"ax" | ||
76 | 30: shll $6,%ecx | ||
77 | addl %ecx,%edx | ||
78 | jmp 60f | ||
79 | 40: lea (%rdx,%rcx,8),%rdx | ||
80 | jmp 60f | ||
81 | 50: movl %ecx,%edx | ||
82 | 60: sfence | ||
83 | jmp copy_user_handle_tail | ||
84 | .previous | ||
85 | |||
86 | _ASM_EXTABLE(1b,30b) | ||
87 | _ASM_EXTABLE(2b,30b) | ||
88 | _ASM_EXTABLE(3b,30b) | ||
89 | _ASM_EXTABLE(4b,30b) | ||
90 | _ASM_EXTABLE(5b,30b) | ||
91 | _ASM_EXTABLE(6b,30b) | ||
92 | _ASM_EXTABLE(7b,30b) | ||
93 | _ASM_EXTABLE(8b,30b) | ||
94 | _ASM_EXTABLE(9b,30b) | ||
95 | _ASM_EXTABLE(10b,30b) | ||
96 | _ASM_EXTABLE(11b,30b) | ||
97 | _ASM_EXTABLE(12b,30b) | ||
98 | _ASM_EXTABLE(13b,30b) | ||
99 | _ASM_EXTABLE(14b,30b) | ||
100 | _ASM_EXTABLE(15b,30b) | ||
101 | _ASM_EXTABLE(16b,30b) | ||
102 | _ASM_EXTABLE(18b,40b) | ||
103 | _ASM_EXTABLE(19b,40b) | ||
104 | _ASM_EXTABLE(21b,50b) | ||
105 | _ASM_EXTABLE(22b,50b) | ||
106 | CFI_ENDPROC | ||
107 | ENDPROC(__copy_user_nocache) | ||