diff options
author | H. Peter Anvin <hpa@zytor.com> | 2008-08-26 01:39:15 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2008-08-26 01:39:15 -0400 |
commit | 08970fc4e0385790a7b093adfaa4165a189f9eb0 (patch) | |
tree | 9a7ca00d88f64ba69abb3cc1cb7a4a803033b12a /include | |
parent | 9ea2b82ed6265a31f9a84886d74d8a2ef01b27c8 (diff) |
x86: msr: fix bogus return values from rdmsr_safe/wrmsr_safe
Impact: bogus error codes (+other?) on x86-64
The rdmsr_safe/wrmsr_safe routines have macros for the handling of the
edx:eax arguments. Those macros take a variable number of assembly
arguments. This is rather inherently incompatible with using
%digit-style escapes in the inline assembly; replace those with
%[name]-style escapes.
This fixes miscompilation on x86-64, which at the very least caused
bogus return values. It is possible that this could also corrupt the
return value; I am not sure.
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/msr.h | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/include/asm-x86/msr.h b/include/asm-x86/msr.h index ad5f2decf7f7..2362cfda1fbc 100644 --- a/include/asm-x86/msr.h +++ b/include/asm-x86/msr.h | |||
@@ -52,14 +52,14 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr, | |||
52 | { | 52 | { |
53 | DECLARE_ARGS(val, low, high); | 53 | DECLARE_ARGS(val, low, high); |
54 | 54 | ||
55 | asm volatile("2: rdmsr ; xor %0,%0\n" | 55 | asm volatile("2: rdmsr ; xor %[err],%[err]\n" |
56 | "1:\n\t" | 56 | "1:\n\t" |
57 | ".section .fixup,\"ax\"\n\t" | 57 | ".section .fixup,\"ax\"\n\t" |
58 | "3: mov %3,%0 ; jmp 1b\n\t" | 58 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
59 | ".previous\n\t" | 59 | ".previous\n\t" |
60 | _ASM_EXTABLE(2b, 3b) | 60 | _ASM_EXTABLE(2b, 3b) |
61 | : "=r" (*err), EAX_EDX_RET(val, low, high) | 61 | : [err] "=r" (*err), EAX_EDX_RET(val, low, high) |
62 | : "c" (msr), "i" (-EFAULT)); | 62 | : "c" (msr), [fault] "i" (-EFAULT)); |
63 | return EAX_EDX_VAL(val, low, high); | 63 | return EAX_EDX_VAL(val, low, high); |
64 | } | 64 | } |
65 | 65 | ||
@@ -73,15 +73,15 @@ static inline int native_write_msr_safe(unsigned int msr, | |||
73 | unsigned low, unsigned high) | 73 | unsigned low, unsigned high) |
74 | { | 74 | { |
75 | int err; | 75 | int err; |
76 | asm volatile("2: wrmsr ; xor %0,%0\n" | 76 | asm volatile("2: wrmsr ; xor %[err],%[err]\n" |
77 | "1:\n\t" | 77 | "1:\n\t" |
78 | ".section .fixup,\"ax\"\n\t" | 78 | ".section .fixup,\"ax\"\n\t" |
79 | "3: mov %4,%0 ; jmp 1b\n\t" | 79 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
80 | ".previous\n\t" | 80 | ".previous\n\t" |
81 | _ASM_EXTABLE(2b, 3b) | 81 | _ASM_EXTABLE(2b, 3b) |
82 | : "=a" (err) | 82 | : [err] "=a" (err) |
83 | : "c" (msr), "0" (low), "d" (high), | 83 | : "c" (msr), "0" (low), "d" (high), |
84 | "i" (-EFAULT) | 84 | [fault] "i" (-EFAULT) |
85 | : "memory"); | 85 | : "memory"); |
86 | return err; | 86 | return err; |
87 | } | 87 | } |