diff options
author | Glauber de Oliveira Costa <gcosta@redhat.com> | 2008-01-30 07:31:07 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 07:31:07 -0500 |
commit | c210d24986dc19e387c10508c0bc2faadadc1a2e (patch) | |
tree | f5e908b69cc89c628858ce1de49f7276eb234822 | |
parent | 56ec1ddcff967e51d98427e4efcbfc90de67efe3 (diff) |
x86: integrate 32-bit and 64-bit code in msr.h
This patches proceeds with the integration of msr.h, making
the code unified, instead of having a version for each architecture.
We stick with the native_* functions, and then paravirt comes for free.
Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
-rw-r--r-- | include/asm-x86/msr.h | 167 |
1 files changed, 47 insertions, 120 deletions
diff --git a/include/asm-x86/msr.h b/include/asm-x86/msr.h index 792fde2e8908..040d3910d891 100644 --- a/include/asm-x86/msr.h +++ b/include/asm-x86/msr.h | |||
@@ -9,6 +9,10 @@ | |||
9 | 9 | ||
10 | #ifdef __KERNEL__ | 10 | #ifdef __KERNEL__ |
11 | #ifndef __ASSEMBLY__ | 11 | #ifndef __ASSEMBLY__ |
12 | |||
13 | #include <asm/asm.h> | ||
14 | #include <asm/errno.h> | ||
15 | |||
12 | static inline unsigned long long native_read_tscp(int *aux) | 16 | static inline unsigned long long native_read_tscp(int *aux) |
13 | { | 17 | { |
14 | unsigned long low, high; | 18 | unsigned long low, high; |
@@ -17,37 +21,36 @@ static inline unsigned long long native_read_tscp(int *aux) | |||
17 | return low | ((u64)high >> 32); | 21 | return low | ((u64)high >> 32); |
18 | } | 22 | } |
19 | 23 | ||
20 | #define rdtscp(low, high, aux) \ | 24 | /* |
21 | do { \ | 25 | * i386 calling convention returns 64-bit value in edx:eax, while |
22 | unsigned long long _val = native_read_tscp(&(aux)); \ | 26 | * x86_64 returns at rax. Also, the "A" constraint does not really |
23 | (low) = (u32)_val; \ | 27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each |
24 | (high) = (u32)(_val >> 32); \ | 28 | * architecture |
25 | } while (0) | 29 | */ |
26 | 30 | #ifdef CONFIG_X86_64 | |
27 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) | 31 | #define DECLARE_ARGS(val, low, high) unsigned low, high |
28 | #endif | 32 | #define EAX_EDX_VAL(val, low, high) (low | ((u64)(high) << 32)) |
33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) | ||
34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) | ||
35 | #else | ||
36 | #define DECLARE_ARGS(val, low, high) unsigned long long val | ||
37 | #define EAX_EDX_VAL(val, low, high) (val) | ||
38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) | ||
39 | #define EAX_EDX_RET(val, low, high) "=A" (val) | ||
29 | #endif | 40 | #endif |
30 | 41 | ||
31 | #ifdef __i386__ | ||
32 | |||
33 | #ifdef __KERNEL__ | ||
34 | #ifndef __ASSEMBLY__ | ||
35 | |||
36 | #include <asm/asm.h> | ||
37 | #include <asm/errno.h> | ||
38 | |||
39 | static inline unsigned long long native_read_msr(unsigned int msr) | 42 | static inline unsigned long long native_read_msr(unsigned int msr) |
40 | { | 43 | { |
41 | unsigned long long val; | 44 | DECLARE_ARGS(val, low, high); |
42 | 45 | ||
43 | asm volatile("rdmsr" : "=A" (val) : "c" (msr)); | 46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); |
44 | return val; | 47 | return EAX_EDX_VAL(val, low, high); |
45 | } | 48 | } |
46 | 49 | ||
47 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | 50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, |
48 | int *err) | 51 | int *err) |
49 | { | 52 | { |
50 | unsigned long long val; | 53 | DECLARE_ARGS(val, low, high); |
51 | 54 | ||
52 | asm volatile("2: rdmsr ; xor %0,%0\n" | 55 | asm volatile("2: rdmsr ; xor %0,%0\n" |
53 | "1:\n\t" | 56 | "1:\n\t" |
@@ -58,10 +61,9 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr, | |||
58 | _ASM_ALIGN "\n\t" | 61 | _ASM_ALIGN "\n\t" |
59 | _ASM_PTR " 2b,3b\n\t" | 62 | _ASM_PTR " 2b,3b\n\t" |
60 | ".previous" | 63 | ".previous" |
61 | : "=r" (*err), "=A" (val) | 64 | : "=r" (*err), EAX_EDX_RET(val, low, high) |
62 | : "c" (msr), "i" (-EFAULT)); | 65 | : "c" (msr), "i" (-EFAULT)); |
63 | 66 | return EAX_EDX_VAL(val, low, high); | |
64 | return val; | ||
65 | } | 67 | } |
66 | 68 | ||
67 | static inline void native_write_msr(unsigned int msr, | 69 | static inline void native_write_msr(unsigned int msr, |
@@ -91,16 +93,18 @@ static inline int native_write_msr_safe(unsigned int msr, | |||
91 | 93 | ||
92 | static inline unsigned long long native_read_tsc(void) | 94 | static inline unsigned long long native_read_tsc(void) |
93 | { | 95 | { |
94 | unsigned long long val; | 96 | DECLARE_ARGS(val, low, high); |
95 | asm volatile("rdtsc" : "=A" (val)); | 97 | |
96 | return val; | 98 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); |
99 | return EAX_EDX_VAL(val, low, high); | ||
97 | } | 100 | } |
98 | 101 | ||
99 | static inline unsigned long long native_read_pmc(int counter) | 102 | static inline unsigned long long native_read_pmc(int counter) |
100 | { | 103 | { |
101 | unsigned long long val; | 104 | DECLARE_ARGS(val, low, high); |
102 | asm volatile("rdpmc" : "=A" (val) : "c" (counter)); | 105 | |
103 | return val; | 106 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); |
107 | return EAX_EDX_VAL(val, low, high); | ||
104 | } | 108 | } |
105 | 109 | ||
106 | #ifdef CONFIG_PARAVIRT | 110 | #ifdef CONFIG_PARAVIRT |
@@ -128,7 +132,8 @@ static inline void wrmsr(unsigned msr, unsigned low, unsigned high) | |||
128 | #define rdmsrl(msr,val) \ | 132 | #define rdmsrl(msr,val) \ |
129 | ((val) = native_read_msr(msr)) | 133 | ((val) = native_read_msr(msr)) |
130 | 134 | ||
131 | #define wrmsrl(msr, val) native_write_msr(msr, (u32)val, (u32)(val >> 32)) | 135 | #define wrmsrl(msr, val) \ |
136 | native_write_msr(msr, (u32)((u64)(val)), (u32)((u64)(val) >> 32)) | ||
132 | 137 | ||
133 | /* wrmsr with exception handling */ | 138 | /* wrmsr with exception handling */ |
134 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) | 139 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) |
@@ -160,104 +165,25 @@ static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) | |||
160 | (low) = (u32)_l; \ | 165 | (low) = (u32)_l; \ |
161 | (high) = (u32)(_l >> 32); \ | 166 | (high) = (u32)(_l >> 32); \ |
162 | } while(0) | 167 | } while(0) |
163 | #endif /* !CONFIG_PARAVIRT */ | ||
164 | |||
165 | #endif /* ! __ASSEMBLY__ */ | ||
166 | #endif /* __KERNEL__ */ | ||
167 | |||
168 | #else /* __i386__ */ | ||
169 | |||
170 | #ifndef __ASSEMBLY__ | ||
171 | #include <linux/errno.h> | ||
172 | /* | ||
173 | * Access to machine-specific registers (available on 586 and better only) | ||
174 | * Note: the rd* operations modify the parameters directly (without using | ||
175 | * pointer indirection), this allows gcc to optimize better | ||
176 | */ | ||
177 | |||
178 | #define rdmsr(msr,val1,val2) \ | ||
179 | __asm__ __volatile__("rdmsr" \ | ||
180 | : "=a" (val1), "=d" (val2) \ | ||
181 | : "c" (msr)) | ||
182 | |||
183 | 168 | ||
184 | #define rdmsrl(msr,val) do { unsigned long a__,b__; \ | 169 | #define rdtscp(low, high, aux) \ |
185 | __asm__ __volatile__("rdmsr" \ | 170 | do { \ |
186 | : "=a" (a__), "=d" (b__) \ | 171 | unsigned long long _val = native_read_tscp(&(aux)); \ |
187 | : "c" (msr)); \ | 172 | (low) = (u32)_val; \ |
188 | val = a__ | (b__<<32); \ | 173 | (high) = (u32)(_val >> 32); \ |
189 | } while(0) | 174 | } while (0) |
190 | |||
191 | #define wrmsr(msr,val1,val2) \ | ||
192 | __asm__ __volatile__("wrmsr" \ | ||
193 | : /* no outputs */ \ | ||
194 | : "c" (msr), "a" (val1), "d" (val2)) | ||
195 | |||
196 | #define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32) | ||
197 | 175 | ||
198 | #define rdtsc(low,high) \ | 176 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) |
199 | __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high)) | ||
200 | 177 | ||
201 | #define rdtscl(low) \ | 178 | #endif /* !CONFIG_PARAVIRT */ |
202 | __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx") | ||
203 | 179 | ||
204 | 180 | ||
205 | #define rdtscll(val) do { \ | 181 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) |
206 | unsigned int __a,__d; \ | ||
207 | __asm__ __volatile__("rdtsc" : "=a" (__a), "=d" (__d)); \ | ||
208 | (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \ | ||
209 | } while(0) | ||
210 | 182 | ||
211 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) | 183 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) |
212 | 184 | ||
213 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) | 185 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) |
214 | 186 | ||
215 | #define rdpmc(counter,low,high) \ | ||
216 | __asm__ __volatile__("rdpmc" \ | ||
217 | : "=a" (low), "=d" (high) \ | ||
218 | : "c" (counter)) | ||
219 | |||
220 | |||
221 | #ifdef __KERNEL__ | ||
222 | |||
223 | /* wrmsr with exception handling */ | ||
224 | #define wrmsr_safe(msr,a,b) ({ int ret__; \ | ||
225 | asm volatile("2: wrmsr ; xorl %0,%0\n" \ | ||
226 | "1:\n\t" \ | ||
227 | ".section .fixup,\"ax\"\n\t" \ | ||
228 | "3: movl %4,%0 ; jmp 1b\n\t" \ | ||
229 | ".previous\n\t" \ | ||
230 | ".section __ex_table,\"a\"\n" \ | ||
231 | " .align 8\n\t" \ | ||
232 | " .quad 2b,3b\n\t" \ | ||
233 | ".previous" \ | ||
234 | : "=a" (ret__) \ | ||
235 | : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \ | ||
236 | ret__; }) | ||
237 | |||
238 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) | ||
239 | |||
240 | #define rdmsr_safe(msr,a,b) \ | ||
241 | ({ int ret__; \ | ||
242 | asm volatile ("1: rdmsr\n" \ | ||
243 | "2:\n" \ | ||
244 | ".section .fixup,\"ax\"\n" \ | ||
245 | "3: movl %4,%0\n" \ | ||
246 | " jmp 2b\n" \ | ||
247 | ".previous\n" \ | ||
248 | ".section __ex_table,\"a\"\n" \ | ||
249 | " .align 8\n" \ | ||
250 | " .quad 1b,3b\n" \ | ||
251 | ".previous":"=&bDS" (ret__), "=a"(*(a)), "=d"(*(b)) \ | ||
252 | :"c"(msr), "i"(-EIO), "0"(0)); \ | ||
253 | ret__; }) | ||
254 | |||
255 | #endif /* __ASSEMBLY__ */ | ||
256 | |||
257 | #endif /* !__i386__ */ | ||
258 | |||
259 | #ifndef __ASSEMBLY__ | ||
260 | |||
261 | #ifdef CONFIG_SMP | 187 | #ifdef CONFIG_SMP |
262 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 188 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
263 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 189 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
@@ -281,7 +207,8 @@ static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |||
281 | return wrmsr_safe(msr_no, l, h); | 207 | return wrmsr_safe(msr_no, l, h); |
282 | } | 208 | } |
283 | #endif /* CONFIG_SMP */ | 209 | #endif /* CONFIG_SMP */ |
284 | #endif /* __KERNEL__ */ | ||
285 | #endif /* __ASSEMBLY__ */ | 210 | #endif /* __ASSEMBLY__ */ |
211 | #endif /* __KERNEL__ */ | ||
212 | |||
286 | 213 | ||
287 | #endif | 214 | #endif |