aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/lib/copy_page_64.S
diff options
context:
space:
mode:
authorIngo Molnar <mingo@kernel.org>2015-05-28 06:21:47 -0400
committerIngo Molnar <mingo@kernel.org>2015-06-02 01:57:48 -0400
commit131484c8da97ed600c18dd9d03b661e8ae052df6 (patch)
tree18293a131e8a40a9a339734259a74d33cbba1186 /arch/x86/lib/copy_page_64.S
parentcdeb6048940fa4bfb429e2f1cba0d28a11e20cd5 (diff)
x86/debug: Remove perpetually broken, unmaintainable dwarf annotations
So the dwarf2 annotations in low level assembly code have become an increasing hindrance: unreadable, messy macros mixed into some of the most security sensitive code paths of the Linux kernel. These debug info annotations don't even buy the upstream kernel anything: dwarf driven stack unwinding has caused problems in the past so it's out of tree, and the upstream kernel only uses the much more robust framepointers based stack unwinding method. In addition to that there's a steady, slow bitrot going on with these annotations, requiring frequent fixups. There's no tooling and no functionality upstream that keeps it correct. So burn down the sick forest, allowing new, healthier growth: 27 files changed, 350 insertions(+), 1101 deletions(-) Someone who has the willingness and time to do this properly can attempt to reintroduce dwarf debuginfo in x86 assembly code plus dwarf unwinding from first principles, with the following conditions: - it should be maximally readable, and maximally low-key to 'ordinary' code reading and maintenance. - find a build time method to insert dwarf annotations automatically in the most common cases, for pop/push instructions that manipulate the stack pointer. This could be done for example via a preprocessing step that just looks for common patterns - plus special annotations for the few cases where we want to depart from the default. We have hundreds of CFI annotations, so automating most of that makes sense. - it should come with build tooling checks that ensure that CFI annotations are sensible. We've seen such efforts from the framepointer side, and there's no reason it couldn't be done on the dwarf side. Cc: Andy Lutomirski <luto@amacapital.net> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: Frédéric Weisbecker <fweisbec@gmail.com Cc: H. Peter Anvin <hpa@zytor.com> Cc: Jan Beulich <JBeulich@suse.com> Cc: Josh Poimboeuf <jpoimboe@redhat.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: linux-kernel@vger.kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/x86/lib/copy_page_64.S')
-rw-r--r--arch/x86/lib/copy_page_64.S11
1 files changed, 0 insertions, 11 deletions
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S
index 8239dbcbf984..009f98216b7e 100644
--- a/arch/x86/lib/copy_page_64.S
+++ b/arch/x86/lib/copy_page_64.S
@@ -1,7 +1,6 @@
1/* Written 2003 by Andi Kleen, based on a kernel by Evandro Menezes */ 1/* Written 2003 by Andi Kleen, based on a kernel by Evandro Menezes */
2 2
3#include <linux/linkage.h> 3#include <linux/linkage.h>
4#include <asm/dwarf2.h>
5#include <asm/cpufeature.h> 4#include <asm/cpufeature.h>
6#include <asm/alternative-asm.h> 5#include <asm/alternative-asm.h>
7 6
@@ -13,22 +12,16 @@
13 */ 12 */
14 ALIGN 13 ALIGN
15ENTRY(copy_page) 14ENTRY(copy_page)
16 CFI_STARTPROC
17 ALTERNATIVE "jmp copy_page_regs", "", X86_FEATURE_REP_GOOD 15 ALTERNATIVE "jmp copy_page_regs", "", X86_FEATURE_REP_GOOD
18 movl $4096/8, %ecx 16 movl $4096/8, %ecx
19 rep movsq 17 rep movsq
20 ret 18 ret
21 CFI_ENDPROC
22ENDPROC(copy_page) 19ENDPROC(copy_page)
23 20
24ENTRY(copy_page_regs) 21ENTRY(copy_page_regs)
25 CFI_STARTPROC
26 subq $2*8, %rsp 22 subq $2*8, %rsp
27 CFI_ADJUST_CFA_OFFSET 2*8
28 movq %rbx, (%rsp) 23 movq %rbx, (%rsp)
29 CFI_REL_OFFSET rbx, 0
30 movq %r12, 1*8(%rsp) 24 movq %r12, 1*8(%rsp)
31 CFI_REL_OFFSET r12, 1*8
32 25
33 movl $(4096/64)-5, %ecx 26 movl $(4096/64)-5, %ecx
34 .p2align 4 27 .p2align 4
@@ -87,11 +80,7 @@ ENTRY(copy_page_regs)
87 jnz .Loop2 80 jnz .Loop2
88 81
89 movq (%rsp), %rbx 82 movq (%rsp), %rbx
90 CFI_RESTORE rbx
91 movq 1*8(%rsp), %r12 83 movq 1*8(%rsp), %r12
92 CFI_RESTORE r12
93 addq $2*8, %rsp 84 addq $2*8, %rsp
94 CFI_ADJUST_CFA_OFFSET -2*8
95 ret 85 ret
96 CFI_ENDPROC
97ENDPROC(copy_page_regs) 86ENDPROC(copy_page_regs)