aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/msr.h
diff options
context:
space:
mode:
authorRusty Russell <rusty@rustcorp.com.au>2007-05-02 13:27:10 -0400
committerAndi Kleen <andi@basil.nowhere.org>2007-05-02 13:27:10 -0400
commit90a0a06aa81692028864c21f981905fda46b1208 (patch)
tree516528b328d5288ee057d1eff5491e2ba1b49af1 /include/asm-i386/msr.h
parent52de74dd3994e165ef1b35c33d54655a6400e30c (diff)
[PATCH] i386: rationalize paravirt wrappers
paravirt.c used to implement native versions of all low-level functions. Far cleaner is to have the native versions exposed in the headers and as inline native_XXX, and if !CONFIG_PARAVIRT, then simply #define XXX native_XXX. There are several nice side effects: 1) write_dt_entry() now takes the correct "struct Xgt_desc_struct *" not "void *". 2) load_TLS is reintroduced to the for loop, not manually unrolled with a #error in case the bounds ever change. 3) Macros become inlines, with type checking. 4) Access to the native versions is trivial for KVM, lguest, Xen and others who might want it. Signed-off-by: Jeremy Fitzhardinge <jeremy@xensource.com> Signed-off-by: Rusty Russell <rusty@rustcorp.com.au> Signed-off-by: Andi Kleen <ak@suse.de> Cc: Andi Kleen <ak@muc.de> Cc: Avi Kivity <avi@qumranet.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Diffstat (limited to 'include/asm-i386/msr.h')
-rw-r--r--include/asm-i386/msr.h163
1 files changed, 110 insertions, 53 deletions
diff --git a/include/asm-i386/msr.h b/include/asm-i386/msr.h
index 2ad3f30b1a68..00acaa8b36bb 100644
--- a/include/asm-i386/msr.h
+++ b/include/asm-i386/msr.h
@@ -1,6 +1,74 @@
1#ifndef __ASM_MSR_H 1#ifndef __ASM_MSR_H
2#define __ASM_MSR_H 2#define __ASM_MSR_H
3 3
4#include <asm/errno.h>
5
6static inline unsigned long long native_read_msr(unsigned int msr)
7{
8 unsigned long long val;
9
10 asm volatile("rdmsr" : "=A" (val) : "c" (msr));
11 return val;
12}
13
14static inline unsigned long long native_read_msr_safe(unsigned int msr,
15 int *err)
16{
17 unsigned long long val;
18
19 asm volatile("2: rdmsr ; xorl %0,%0\n"
20 "1:\n\t"
21 ".section .fixup,\"ax\"\n\t"
22 "3: movl %3,%0 ; jmp 1b\n\t"
23 ".previous\n\t"
24 ".section __ex_table,\"a\"\n"
25 " .align 4\n\t"
26 " .long 2b,3b\n\t"
27 ".previous"
28 : "=r" (*err), "=A" (val)
29 : "c" (msr), "i" (-EFAULT));
30
31 return val;
32}
33
34static inline void native_write_msr(unsigned int msr, unsigned long long val)
35{
36 asm volatile("wrmsr" : : "c" (msr), "A"(val));
37}
38
39static inline int native_write_msr_safe(unsigned int msr,
40 unsigned long long val)
41{
42 int err;
43 asm volatile("2: wrmsr ; xorl %0,%0\n"
44 "1:\n\t"
45 ".section .fixup,\"ax\"\n\t"
46 "3: movl %4,%0 ; jmp 1b\n\t"
47 ".previous\n\t"
48 ".section __ex_table,\"a\"\n"
49 " .align 4\n\t"
50 " .long 2b,3b\n\t"
51 ".previous"
52 : "=a" (err)
53 : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
54 "i" (-EFAULT));
55 return err;
56}
57
58static inline unsigned long long native_read_tsc(void)
59{
60 unsigned long long val;
61 asm volatile("rdtsc" : "=A" (val));
62 return val;
63}
64
65static inline unsigned long long native_read_pmc(void)
66{
67 unsigned long long val;
68 asm volatile("rdpmc" : "=A" (val));
69 return val;
70}
71
4#ifdef CONFIG_PARAVIRT 72#ifdef CONFIG_PARAVIRT
5#include <asm/paravirt.h> 73#include <asm/paravirt.h>
6#else 74#else
@@ -11,22 +79,20 @@
11 * pointer indirection), this allows gcc to optimize better 79 * pointer indirection), this allows gcc to optimize better
12 */ 80 */
13 81
14#define rdmsr(msr,val1,val2) \ 82#define rdmsr(msr,val1,val2) \
15 __asm__ __volatile__("rdmsr" \ 83 do { \
16 : "=a" (val1), "=d" (val2) \ 84 unsigned long long __val = native_read_msr(msr); \
17 : "c" (msr)) 85 val1 = __val; \
86 val2 = __val >> 32; \
87 } while(0)
18 88
19#define wrmsr(msr,val1,val2) \ 89#define wrmsr(msr,val1,val2) \
20 __asm__ __volatile__("wrmsr" \ 90 native_write_msr(msr, ((unsigned long long)val2 << 32) | val1)
21 : /* no outputs */ \
22 : "c" (msr), "a" (val1), "d" (val2))
23 91
24#define rdmsrl(msr,val) do { \ 92#define rdmsrl(msr,val) \
25 unsigned long l__,h__; \ 93 do { \
26 rdmsr (msr, l__, h__); \ 94 (val) = native_read_msr(msr); \
27 val = l__; \ 95 } while(0)
28 val |= ((u64)h__<<32); \
29} while(0)
30 96
31static inline void wrmsrl (unsigned long msr, unsigned long long val) 97static inline void wrmsrl (unsigned long msr, unsigned long long val)
32{ 98{
@@ -37,50 +103,41 @@ static inline void wrmsrl (unsigned long msr, unsigned long long val)
37} 103}
38 104
39/* wrmsr with exception handling */ 105/* wrmsr with exception handling */
40#define wrmsr_safe(msr,a,b) ({ int ret__; \ 106#define wrmsr_safe(msr,val1,val2) \
41 asm volatile("2: wrmsr ; xorl %0,%0\n" \ 107 (native_write_msr_safe(msr, ((unsigned long long)val2 << 32) | val1))
42 "1:\n\t" \
43 ".section .fixup,\"ax\"\n\t" \
44 "3: movl %4,%0 ; jmp 1b\n\t" \
45 ".previous\n\t" \
46 ".section __ex_table,\"a\"\n" \
47 " .align 4\n\t" \
48 " .long 2b,3b\n\t" \
49 ".previous" \
50 : "=a" (ret__) \
51 : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT));\
52 ret__; })
53 108
54/* rdmsr with exception handling */ 109/* rdmsr with exception handling */
55#define rdmsr_safe(msr,a,b) ({ int ret__; \ 110#define rdmsr_safe(msr,p1,p2) \
56 asm volatile("2: rdmsr ; xorl %0,%0\n" \ 111 ({ \
57 "1:\n\t" \ 112 int __err; \
58 ".section .fixup,\"ax\"\n\t" \ 113 unsigned long long __val = native_read_msr_safe(msr, &__err);\
59 "3: movl %4,%0 ; jmp 1b\n\t" \ 114 (*p1) = __val; \
60 ".previous\n\t" \ 115 (*p2) = __val >> 32; \
61 ".section __ex_table,\"a\"\n" \ 116 __err; \
62 " .align 4\n\t" \ 117 })
63 " .long 2b,3b\n\t" \ 118
64 ".previous" \ 119#define rdtsc(low,high) \
65 : "=r" (ret__), "=a" (*(a)), "=d" (*(b)) \ 120 do { \
66 : "c" (msr), "i" (-EFAULT));\ 121 u64 _l = native_read_tsc(); \
67 ret__; }) 122 (low) = (u32)_l; \
68 123 (high) = _l >> 32; \
69#define rdtsc(low,high) \ 124 } while(0)
70 __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high)) 125
71 126#define rdtscl(low) \
72#define rdtscl(low) \ 127 do { \
73 __asm__ __volatile__("rdtsc" : "=a" (low) : : "edx") 128 (low) = native_read_tsc(); \
74 129 } while(0)
75#define rdtscll(val) \ 130
76 __asm__ __volatile__("rdtsc" : "=A" (val)) 131#define rdtscll(val) ((val) = native_read_tsc())
77 132
78#define write_tsc(val1,val2) wrmsr(0x10, val1, val2) 133#define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
79 134
80#define rdpmc(counter,low,high) \ 135#define rdpmc(counter,low,high) \
81 __asm__ __volatile__("rdpmc" \ 136 do { \
82 : "=a" (low), "=d" (high) \ 137 u64 _l = native_read_pmc(); \
83 : "c" (counter)) 138 low = (u32)_l; \
139 high = _l >> 32; \
140 } while(0)
84#endif /* !CONFIG_PARAVIRT */ 141#endif /* !CONFIG_PARAVIRT */
85 142
86#ifdef CONFIG_SMP 143#ifdef CONFIG_SMP