aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBorislav Petkov <petkovbb@googlemail.com>2009-08-31 03:50:09 -0400
committerH. Peter Anvin <hpa@zytor.com>2009-08-31 18:14:26 -0400
commit132ec92f3f70fe365c1f4b8d46e66cf8a2a16880 (patch)
treef8e3f3ab5541f583030b0bcd5f3f81ca338f77a2
parent366d19e181be873c70f4aafca3931d77d781ccd7 (diff)
x86, msr: Add rd/wrmsr interfaces with preset registers
native_{rdmsr,wrmsr}_safe_regs are two new interfaces which allow presetting of a subset of eight x86 GPRs before executing the rd/wrmsr instructions. This is needed at least on AMD K8 for accessing an erratum workaround MSR. Originally based on an idea by H. Peter Anvin. Signed-off-by: Borislav Petkov <petkovbb@gmail.com> LKML-Reference: <1251705011-18636-1-git-send-email-petkovbb@gmail.com> Signed-off-by: H. Peter Anvin <hpa@zytor.com>
-rw-r--r--arch/x86/include/asm/msr.h13
-rw-r--r--arch/x86/include/asm/paravirt.h16
-rw-r--r--arch/x86/kernel/paravirt.c2
-rw-r--r--arch/x86/lib/Makefile1
-rw-r--r--arch/x86/lib/msr-reg.S98
5 files changed, 130 insertions, 0 deletions
diff --git a/arch/x86/include/asm/msr.h b/arch/x86/include/asm/msr.h
index 48ad9d29484a..184d4a113961 100644
--- a/arch/x86/include/asm/msr.h
+++ b/arch/x86/include/asm/msr.h
@@ -113,6 +113,9 @@ notrace static inline int native_write_msr_safe(unsigned int msr,
113 113
114extern unsigned long long native_read_tsc(void); 114extern unsigned long long native_read_tsc(void);
115 115
116extern int native_rdmsr_safe_regs(u32 *regs);
117extern int native_wrmsr_safe_regs(u32 *regs);
118
116static __always_inline unsigned long long __native_read_tsc(void) 119static __always_inline unsigned long long __native_read_tsc(void)
117{ 120{
118 DECLARE_ARGS(val, low, high); 121 DECLARE_ARGS(val, low, high);
@@ -189,6 +192,16 @@ static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p)
189 return err; 192 return err;
190} 193}
191 194
195static inline int rdmsr_safe_regs(u32 *regs)
196{
197 return native_rdmsr_safe_regs(regs);
198}
199
200static inline int wrmsr_safe_regs(u32 *regs)
201{
202 return native_wrmsr_safe_regs(regs);
203}
204
192#define rdtscl(low) \ 205#define rdtscl(low) \
193 ((low) = (u32)__native_read_tsc()) 206 ((low) = (u32)__native_read_tsc())
194 207
diff --git a/arch/x86/include/asm/paravirt.h b/arch/x86/include/asm/paravirt.h
index 4fb37c8a0832..1705944e0374 100644
--- a/arch/x86/include/asm/paravirt.h
+++ b/arch/x86/include/asm/paravirt.h
@@ -168,7 +168,9 @@ struct pv_cpu_ops {
168 err = 0/-EFAULT. wrmsr returns 0/-EFAULT. */ 168 err = 0/-EFAULT. wrmsr returns 0/-EFAULT. */
169 u64 (*read_msr_amd)(unsigned int msr, int *err); 169 u64 (*read_msr_amd)(unsigned int msr, int *err);
170 u64 (*read_msr)(unsigned int msr, int *err); 170 u64 (*read_msr)(unsigned int msr, int *err);
171 int (*rdmsr_regs)(u32 *regs);
171 int (*write_msr)(unsigned int msr, unsigned low, unsigned high); 172 int (*write_msr)(unsigned int msr, unsigned low, unsigned high);
173 int (*wrmsr_regs)(u32 *regs);
172 174
173 u64 (*read_tsc)(void); 175 u64 (*read_tsc)(void);
174 u64 (*read_pmc)(int counter); 176 u64 (*read_pmc)(int counter);
@@ -820,6 +822,12 @@ static inline u64 paravirt_read_msr(unsigned msr, int *err)
820{ 822{
821 return PVOP_CALL2(u64, pv_cpu_ops.read_msr, msr, err); 823 return PVOP_CALL2(u64, pv_cpu_ops.read_msr, msr, err);
822} 824}
825
826static inline int paravirt_rdmsr_regs(u32 *regs)
827{
828 return PVOP_CALL1(int, pv_cpu_ops.rdmsr_regs, regs);
829}
830
823static inline u64 paravirt_read_msr_amd(unsigned msr, int *err) 831static inline u64 paravirt_read_msr_amd(unsigned msr, int *err)
824{ 832{
825 return PVOP_CALL2(u64, pv_cpu_ops.read_msr_amd, msr, err); 833 return PVOP_CALL2(u64, pv_cpu_ops.read_msr_amd, msr, err);
@@ -829,6 +837,11 @@ static inline int paravirt_write_msr(unsigned msr, unsigned low, unsigned high)
829 return PVOP_CALL3(int, pv_cpu_ops.write_msr, msr, low, high); 837 return PVOP_CALL3(int, pv_cpu_ops.write_msr, msr, low, high);
830} 838}
831 839
840static inline int paravirt_wrmsr_regs(u32 *regs)
841{
842 return PVOP_CALL1(int, pv_cpu_ops.wrmsr_regs, regs);
843}
844
832/* These should all do BUG_ON(_err), but our headers are too tangled. */ 845/* These should all do BUG_ON(_err), but our headers are too tangled. */
833#define rdmsr(msr, val1, val2) \ 846#define rdmsr(msr, val1, val2) \
834do { \ 847do { \
@@ -862,6 +875,9 @@ do { \
862 _err; \ 875 _err; \
863}) 876})
864 877
878#define rdmsr_safe_regs(regs) paravirt_rdmsr_regs(regs)
879#define wrmsr_safe_regs(regs) paravirt_wrmsr_regs(regs)
880
865static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) 881static inline int rdmsrl_safe(unsigned msr, unsigned long long *p)
866{ 882{
867 int err; 883 int err;
diff --git a/arch/x86/kernel/paravirt.c b/arch/x86/kernel/paravirt.c
index 70ec9b951d76..67594af43b38 100644
--- a/arch/x86/kernel/paravirt.c
+++ b/arch/x86/kernel/paravirt.c
@@ -362,8 +362,10 @@ struct pv_cpu_ops pv_cpu_ops = {
362#endif 362#endif
363 .wbinvd = native_wbinvd, 363 .wbinvd = native_wbinvd,
364 .read_msr = native_read_msr_safe, 364 .read_msr = native_read_msr_safe,
365 .rdmsr_regs = native_rdmsr_safe_regs,
365 .read_msr_amd = native_read_msr_amd_safe, 366 .read_msr_amd = native_read_msr_amd_safe,
366 .write_msr = native_write_msr_safe, 367 .write_msr = native_write_msr_safe,
368 .wrmsr_regs = native_wrmsr_safe_regs,
367 .read_tsc = native_read_tsc, 369 .read_tsc = native_read_tsc,
368 .read_pmc = native_read_pmc, 370 .read_pmc = native_read_pmc,
369 .read_tscp = native_read_tscp, 371 .read_tscp = native_read_tscp,
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile
index 07c31899c9c2..b59c0647d809 100644
--- a/arch/x86/lib/Makefile
+++ b/arch/x86/lib/Makefile
@@ -8,6 +8,7 @@ lib-y := delay.o
8lib-y += thunk_$(BITS).o 8lib-y += thunk_$(BITS).o
9lib-y += usercopy_$(BITS).o getuser.o putuser.o 9lib-y += usercopy_$(BITS).o getuser.o putuser.o
10lib-y += memcpy_$(BITS).o 10lib-y += memcpy_$(BITS).o
11lib-y += msr-reg.o
11 12
12ifeq ($(CONFIG_X86_32),y) 13ifeq ($(CONFIG_X86_32),y)
13 obj-y += atomic64_32.o 14 obj-y += atomic64_32.o
diff --git a/arch/x86/lib/msr-reg.S b/arch/x86/lib/msr-reg.S
new file mode 100644
index 000000000000..51f1bb3f8c79
--- /dev/null
+++ b/arch/x86/lib/msr-reg.S
@@ -0,0 +1,98 @@
1#include <linux/linkage.h>
2#include <linux/errno.h>
3#include <asm/asm.h>
4#include <asm/msr.h>
5
6#ifdef CONFIG_X86_64
7/*
8 * int native_{rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
9 *
10 * reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
11 *
12 */
13.macro op_safe_regs op:req
14ENTRY(native_\op\()_safe_regs)
15 push %rbx
16 push %rbp
17 push $0 /* Return value */
18 push %rdi
19 movl (%rdi), %eax
20 movl 4(%rdi), %ecx
21 movl 8(%rdi), %edx
22 movl 12(%rdi), %ebx
23 movl 20(%rdi), %ebp
24 movl 24(%rdi), %esi
25 movl 28(%rdi), %edi
261: \op
272: movl %edi, %r10d
28 pop %rdi
29 movl %eax, (%rdi)
30 movl %ecx, 4(%rdi)
31 movl %edx, 8(%rdi)
32 movl %ebx, 12(%rdi)
33 movl %ebp, 20(%rdi)
34 movl %esi, 24(%rdi)
35 movl %r10d, 28(%rdi)
36 pop %rax
37 pop %rbp
38 pop %rbx
39 ret
403:
41 movq $-EIO, 8(%rsp)
42 jmp 2b
43 .section __ex_table,"ax"
44 .balign 4
45 .quad 1b, 3b
46 .previous
47ENDPROC(native_\op\()_safe_regs)
48.endm
49
50#else /* X86_32 */
51
52.macro op_safe_regs op:req
53ENTRY(native_\op\()_safe_regs)
54 push %ebx
55 push %ebp
56 push %esi
57 push %edi
58 push $0 /* Return value */
59 push %eax
60 movl 4(%eax), %ecx
61 movl 8(%eax), %edx
62 movl 12(%eax), %ebx
63 movl 20(%eax), %ebp
64 movl 24(%eax), %esi
65 movl 28(%eax), %edi
66 movl (%eax), %eax
671: \op
682: push %eax
69 movl 4(%esp), %eax
70 pop (%eax)
71 addl $4, %esp
72 movl %ecx, 4(%eax)
73 movl %edx, 8(%eax)
74 movl %ebx, 12(%eax)
75 movl %ebp, 20(%eax)
76 movl %esi, 24(%eax)
77 movl %edi, 28(%eax)
78 pop %eax
79 pop %edi
80 pop %esi
81 pop %ebp
82 pop %ebx
83 ret
843:
85 movl $-EIO, 4(%esp)
86 jmp 2b
87 .section __ex_table,"ax"
88 .balign 4
89 .long 1b, 3b
90 .previous
91ENDPROC(native_\op\()_safe_regs)
92.endm
93
94#endif
95
96op_safe_regs rdmsr
97op_safe_regs wrmsr
98