diff options
author | H. Peter Anvin <hpa@zytor.com> | 2009-08-31 16:59:53 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2009-08-31 18:14:47 -0400 |
commit | 79c5dca3619d6ae15815eec14cd7a43db5f38b47 (patch) | |
tree | f2a10abc4862d1812a08dcb44bc5a7a8a78ec57f | |
parent | 709972b1f6f70535d1fddbe1243a51b90c408a1c (diff) |
x86, msr: CFI annotations, cleanups for msr-reg.S
Add CFI annotations for native_{rd,wr}msr_safe_regs().
Simplify the 64-bit implementation: we don't allow the upper half
registers to be set, and so we can use them to carry state across the
operation.
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Cc: Borislav Petkov <petkovbb@gmail.com>
LKML-Reference: <1251705011-18636-1-git-send-email-petkovbb@gmail.com>
-rw-r--r-- | arch/x86/lib/msr-reg.S | 80 |
1 files changed, 42 insertions, 38 deletions
diff --git a/arch/x86/lib/msr-reg.S b/arch/x86/lib/msr-reg.S index 51f1bb3f8c79..9e8cdcf5d73c 100644 --- a/arch/x86/lib/msr-reg.S +++ b/arch/x86/lib/msr-reg.S | |||
@@ -1,5 +1,6 @@ | |||
1 | #include <linux/linkage.h> | 1 | #include <linux/linkage.h> |
2 | #include <linux/errno.h> | 2 | #include <linux/errno.h> |
3 | #include <asm/dwarf2.h> | ||
3 | #include <asm/asm.h> | 4 | #include <asm/asm.h> |
4 | #include <asm/msr.h> | 5 | #include <asm/msr.h> |
5 | 6 | ||
@@ -12,10 +13,11 @@ | |||
12 | */ | 13 | */ |
13 | .macro op_safe_regs op:req | 14 | .macro op_safe_regs op:req |
14 | ENTRY(native_\op\()_safe_regs) | 15 | ENTRY(native_\op\()_safe_regs) |
15 | push %rbx | 16 | CFI_STARTPROC |
16 | push %rbp | 17 | pushq_cfi %rbx |
17 | push $0 /* Return value */ | 18 | pushq_cfi %rbp |
18 | push %rdi | 19 | movq %rdi, %r10 /* Save pointer */ |
20 | xorl %r11d, %r11d /* Return value */ | ||
19 | movl (%rdi), %eax | 21 | movl (%rdi), %eax |
20 | movl 4(%rdi), %ecx | 22 | movl 4(%rdi), %ecx |
21 | movl 8(%rdi), %edx | 23 | movl 8(%rdi), %edx |
@@ -23,27 +25,26 @@ ENTRY(native_\op\()_safe_regs) | |||
23 | movl 20(%rdi), %ebp | 25 | movl 20(%rdi), %ebp |
24 | movl 24(%rdi), %esi | 26 | movl 24(%rdi), %esi |
25 | movl 28(%rdi), %edi | 27 | movl 28(%rdi), %edi |
28 | CFI_REMEMBER_STATE | ||
26 | 1: \op | 29 | 1: \op |
27 | 2: movl %edi, %r10d | 30 | 2: movl %eax, (%r10) |
28 | pop %rdi | 31 | movl %r11d, %eax /* Return value */ |
29 | movl %eax, (%rdi) | 32 | movl %ecx, 4(%r10) |
30 | movl %ecx, 4(%rdi) | 33 | movl %edx, 8(%r10) |
31 | movl %edx, 8(%rdi) | 34 | movl %ebx, 12(%r10) |
32 | movl %ebx, 12(%rdi) | 35 | movl %ebp, 20(%r10) |
33 | movl %ebp, 20(%rdi) | 36 | movl %esi, 24(%r10) |
34 | movl %esi, 24(%rdi) | 37 | movl %edi, 28(%r10) |
35 | movl %r10d, 28(%rdi) | 38 | popq_cfi %rbp |
36 | pop %rax | 39 | popq_cfi %rbx |
37 | pop %rbp | ||
38 | pop %rbx | ||
39 | ret | 40 | ret |
40 | 3: | 41 | 3: |
41 | movq $-EIO, 8(%rsp) | 42 | CFI_RESTORE_STATE |
43 | movl $-EIO, %r11d | ||
42 | jmp 2b | 44 | jmp 2b |
43 | .section __ex_table,"ax" | 45 | |
44 | .balign 4 | 46 | _ASM_EXTABLE(1b, 3b) |
45 | .quad 1b, 3b | 47 | CFI_ENDPROC |
46 | .previous | ||
47 | ENDPROC(native_\op\()_safe_regs) | 48 | ENDPROC(native_\op\()_safe_regs) |
48 | .endm | 49 | .endm |
49 | 50 | ||
@@ -51,12 +52,13 @@ ENDPROC(native_\op\()_safe_regs) | |||
51 | 52 | ||
52 | .macro op_safe_regs op:req | 53 | .macro op_safe_regs op:req |
53 | ENTRY(native_\op\()_safe_regs) | 54 | ENTRY(native_\op\()_safe_regs) |
54 | push %ebx | 55 | CFI_STARTPROC |
55 | push %ebp | 56 | pushl_cfi %ebx |
56 | push %esi | 57 | pushl_cfi %ebp |
57 | push %edi | 58 | pushl_cfi %esi |
58 | push $0 /* Return value */ | 59 | pushl_cfi %edi |
59 | push %eax | 60 | pushl_cfi $0 /* Return value */ |
61 | pushl_cfi %eax | ||
60 | movl 4(%eax), %ecx | 62 | movl 4(%eax), %ecx |
61 | movl 8(%eax), %edx | 63 | movl 8(%eax), %edx |
62 | movl 12(%eax), %ebx | 64 | movl 12(%eax), %ebx |
@@ -64,30 +66,32 @@ ENTRY(native_\op\()_safe_regs) | |||
64 | movl 24(%eax), %esi | 66 | movl 24(%eax), %esi |
65 | movl 28(%eax), %edi | 67 | movl 28(%eax), %edi |
66 | movl (%eax), %eax | 68 | movl (%eax), %eax |
69 | CFI_REMEMBER_STATE | ||
67 | 1: \op | 70 | 1: \op |
68 | 2: push %eax | 71 | 2: pushl_cfi %eax |
69 | movl 4(%esp), %eax | 72 | movl 4(%esp), %eax |
70 | pop (%eax) | 73 | popl_cfi (%eax) |
71 | addl $4, %esp | 74 | addl $4, %esp |
75 | CFI_ADJUST_CFA_OFFSET -4 | ||
72 | movl %ecx, 4(%eax) | 76 | movl %ecx, 4(%eax) |
73 | movl %edx, 8(%eax) | 77 | movl %edx, 8(%eax) |
74 | movl %ebx, 12(%eax) | 78 | movl %ebx, 12(%eax) |
75 | movl %ebp, 20(%eax) | 79 | movl %ebp, 20(%eax) |
76 | movl %esi, 24(%eax) | 80 | movl %esi, 24(%eax) |
77 | movl %edi, 28(%eax) | 81 | movl %edi, 28(%eax) |
78 | pop %eax | 82 | popl_cfi %eax |
79 | pop %edi | 83 | popl_cfi %edi |
80 | pop %esi | 84 | popl_cfi %esi |
81 | pop %ebp | 85 | popl_cfi %ebp |
82 | pop %ebx | 86 | popl_cfi %ebx |
83 | ret | 87 | ret |
84 | 3: | 88 | 3: |
89 | CFI_RESTORE_STATE | ||
85 | movl $-EIO, 4(%esp) | 90 | movl $-EIO, 4(%esp) |
86 | jmp 2b | 91 | jmp 2b |
87 | .section __ex_table,"ax" | 92 | |
88 | .balign 4 | 93 | _ASM_EXTABLE(1b, 3b) |
89 | .long 1b, 3b | 94 | CFI_ENDPROC |
90 | .previous | ||
91 | ENDPROC(native_\op\()_safe_regs) | 95 | ENDPROC(native_\op\()_safe_regs) |
92 | .endm | 96 | .endm |
93 | 97 | ||