aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/msr.h
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 07:31:07 -0500
committerIngo Molnar <mingo@elte.hu>2008-01-30 07:31:07 -0500
commitc9dcda5ce46c395c5c99003e259e1973dface640 (patch)
treeaec2a6b41de18d2313c2d620c077835d24222480 /include/asm-x86/msr.h
parentb8d1fae7dbde6a1227fa142acecb48dc3dd63817 (diff)
x86: change write msr functions interface
This patche changes the native_write_msr() and friends interface to explicitly take 2 32-bit registers instead of a 64-bit value. The change will ease the merge with 64-bit code. As the 64-bit value will be passed as two registers anyway in i386, the PVOP_CALL interface has to account for that and use low/high parameters It would force the x86_64 version to be different. The change does not make i386 generated code less efficient. As said above, it would get the values from two registers anyway. Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/msr.h')
-rw-r--r--include/asm-x86/msr.h19
1 files changed, 10 insertions, 9 deletions
diff --git a/include/asm-x86/msr.h b/include/asm-x86/msr.h
index effb7319c0a5..cb7222358897 100644
--- a/include/asm-x86/msr.h
+++ b/include/asm-x86/msr.h
@@ -63,13 +63,14 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr,
63 return val; 63 return val;
64} 64}
65 65
66static inline void native_write_msr(unsigned int msr, unsigned long long val) 66static inline void native_write_msr(unsigned int msr,
67 unsigned low, unsigned high)
67{ 68{
68 asm volatile("wrmsr" : : "c" (msr), "A"(val)); 69 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high));
69} 70}
70 71
71static inline int native_write_msr_safe(unsigned int msr, 72static inline int native_write_msr_safe(unsigned int msr,
72 unsigned long long val) 73 unsigned low, unsigned high)
73{ 74{
74 int err; 75 int err;
75 asm volatile("2: wrmsr ; xorl %0,%0\n" 76 asm volatile("2: wrmsr ; xorl %0,%0\n"
@@ -82,7 +83,7 @@ static inline int native_write_msr_safe(unsigned int msr,
82 " .long 2b,3b\n\t" 83 " .long 2b,3b\n\t"
83 ".previous" 84 ".previous"
84 : "=a" (err) 85 : "=a" (err)
85 : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)), 86 : "c" (msr), "0" (low), "d" (high),
86 "i" (-EFAULT)); 87 "i" (-EFAULT));
87 return err; 88 return err;
88} 89}
@@ -118,20 +119,20 @@ static inline unsigned long long native_read_pmc(int counter)
118 (val2) = (u32)(__val >> 32); \ 119 (val2) = (u32)(__val >> 32); \
119 } while(0) 120 } while(0)
120 121
121static inline void wrmsr(u32 __msr, u32 __low, u32 __high) 122static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
122{ 123{
123 native_write_msr(__msr, ((u64)__high << 32) | __low); 124 native_write_msr(msr, low, high);
124} 125}
125 126
126#define rdmsrl(msr,val) \ 127#define rdmsrl(msr,val) \
127 ((val) = native_read_msr(msr)) 128 ((val) = native_read_msr(msr))
128 129
129#define wrmsrl(msr,val) native_write_msr(msr, val) 130#define wrmsrl(msr, val) native_write_msr(msr, (u32)val, (u32)(val >> 32))
130 131
131/* wrmsr with exception handling */ 132/* wrmsr with exception handling */
132static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high) 133static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
133{ 134{
134 return native_write_msr_safe(__msr, ((u64)__high << 32) | __low); 135 return native_write_msr_safe(msr, low, high);
135} 136}
136 137
137/* rdmsr with exception handling */ 138/* rdmsr with exception handling */