diff options
-rw-r--r-- | arch/i386/lib/msr-on-cpu.c | 73 | ||||
-rw-r--r-- | include/asm-i386/msr.h | 12 | ||||
-rw-r--r-- | include/asm-x86_64/msr.h | 11 |
3 files changed, 89 insertions, 7 deletions
diff --git a/arch/i386/lib/msr-on-cpu.c b/arch/i386/lib/msr-on-cpu.c index 1c46bda409ff..7767962f25d3 100644 --- a/arch/i386/lib/msr-on-cpu.c +++ b/arch/i386/lib/msr-on-cpu.c | |||
@@ -6,6 +6,7 @@ | |||
6 | struct msr_info { | 6 | struct msr_info { |
7 | u32 msr_no; | 7 | u32 msr_no; |
8 | u32 l, h; | 8 | u32 l, h; |
9 | int err; | ||
9 | }; | 10 | }; |
10 | 11 | ||
11 | static void __rdmsr_on_cpu(void *info) | 12 | static void __rdmsr_on_cpu(void *info) |
@@ -15,20 +16,38 @@ static void __rdmsr_on_cpu(void *info) | |||
15 | rdmsr(rv->msr_no, rv->l, rv->h); | 16 | rdmsr(rv->msr_no, rv->l, rv->h); |
16 | } | 17 | } |
17 | 18 | ||
18 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 19 | static void __rdmsr_safe_on_cpu(void *info) |
19 | { | 20 | { |
21 | struct msr_info *rv = info; | ||
22 | |||
23 | rv->err = rdmsr_safe(rv->msr_no, &rv->l, &rv->h); | ||
24 | } | ||
25 | |||
26 | static int _rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h, int safe) | ||
27 | { | ||
28 | int err = 0; | ||
20 | preempt_disable(); | 29 | preempt_disable(); |
21 | if (smp_processor_id() == cpu) | 30 | if (smp_processor_id() == cpu) |
22 | rdmsr(msr_no, *l, *h); | 31 | if (safe) |
32 | err = rdmsr_safe(msr_no, l, h); | ||
33 | else | ||
34 | rdmsr(msr_no, *l, *h); | ||
23 | else { | 35 | else { |
24 | struct msr_info rv; | 36 | struct msr_info rv; |
25 | 37 | ||
26 | rv.msr_no = msr_no; | 38 | rv.msr_no = msr_no; |
27 | smp_call_function_single(cpu, __rdmsr_on_cpu, &rv, 0, 1); | 39 | if (safe) { |
40 | smp_call_function_single(cpu, __rdmsr_safe_on_cpu, | ||
41 | &rv, 0, 1); | ||
42 | err = rv.err; | ||
43 | } else { | ||
44 | smp_call_function_single(cpu, __rdmsr_on_cpu, &rv, 0, 1); | ||
45 | } | ||
28 | *l = rv.l; | 46 | *l = rv.l; |
29 | *h = rv.h; | 47 | *h = rv.h; |
30 | } | 48 | } |
31 | preempt_enable(); | 49 | preempt_enable(); |
50 | return err; | ||
32 | } | 51 | } |
33 | 52 | ||
34 | static void __wrmsr_on_cpu(void *info) | 53 | static void __wrmsr_on_cpu(void *info) |
@@ -38,21 +57,63 @@ static void __wrmsr_on_cpu(void *info) | |||
38 | wrmsr(rv->msr_no, rv->l, rv->h); | 57 | wrmsr(rv->msr_no, rv->l, rv->h); |
39 | } | 58 | } |
40 | 59 | ||
41 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 60 | static void __wrmsr_safe_on_cpu(void *info) |
42 | { | 61 | { |
62 | struct msr_info *rv = info; | ||
63 | |||
64 | rv->err = wrmsr_safe(rv->msr_no, rv->l, rv->h); | ||
65 | } | ||
66 | |||
67 | static int _wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h, int safe) | ||
68 | { | ||
69 | int err = 0; | ||
43 | preempt_disable(); | 70 | preempt_disable(); |
44 | if (smp_processor_id() == cpu) | 71 | if (smp_processor_id() == cpu) |
45 | wrmsr(msr_no, l, h); | 72 | if (safe) |
73 | err = wrmsr_safe(msr_no, l, h); | ||
74 | else | ||
75 | wrmsr(msr_no, l, h); | ||
46 | else { | 76 | else { |
47 | struct msr_info rv; | 77 | struct msr_info rv; |
48 | 78 | ||
49 | rv.msr_no = msr_no; | 79 | rv.msr_no = msr_no; |
50 | rv.l = l; | 80 | rv.l = l; |
51 | rv.h = h; | 81 | rv.h = h; |
52 | smp_call_function_single(cpu, __wrmsr_on_cpu, &rv, 0, 1); | 82 | if (safe) { |
83 | smp_call_function_single(cpu, __wrmsr_safe_on_cpu, | ||
84 | &rv, 0, 1); | ||
85 | err = rv.err; | ||
86 | } else { | ||
87 | smp_call_function_single(cpu, __wrmsr_on_cpu, &rv, 0, 1); | ||
88 | } | ||
53 | } | 89 | } |
54 | preempt_enable(); | 90 | preempt_enable(); |
91 | return err; | ||
92 | } | ||
93 | |||
94 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | ||
95 | { | ||
96 | _wrmsr_on_cpu(cpu, msr_no, l, h, 0); | ||
97 | } | ||
98 | |||
99 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | ||
100 | { | ||
101 | _rdmsr_on_cpu(cpu, msr_no, l, h, 0); | ||
102 | } | ||
103 | |||
104 | /* These "safe" variants are slower and should be used when the target MSR | ||
105 | may not actually exist. */ | ||
106 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | ||
107 | { | ||
108 | return _wrmsr_on_cpu(cpu, msr_no, l, h, 1); | ||
109 | } | ||
110 | |||
111 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | ||
112 | { | ||
113 | return _rdmsr_on_cpu(cpu, msr_no, l, h, 1); | ||
55 | } | 114 | } |
56 | 115 | ||
57 | EXPORT_SYMBOL(rdmsr_on_cpu); | 116 | EXPORT_SYMBOL(rdmsr_on_cpu); |
58 | EXPORT_SYMBOL(wrmsr_on_cpu); | 117 | EXPORT_SYMBOL(wrmsr_on_cpu); |
118 | EXPORT_SYMBOL(rdmsr_safe_on_cpu); | ||
119 | EXPORT_SYMBOL(wrmsr_safe_on_cpu); | ||
diff --git a/include/asm-i386/msr.h b/include/asm-i386/msr.h index 9559894c7658..26861df52cc4 100644 --- a/include/asm-i386/msr.h +++ b/include/asm-i386/msr.h | |||
@@ -77,7 +77,7 @@ static inline unsigned long long native_read_pmc(void) | |||
77 | #ifdef CONFIG_PARAVIRT | 77 | #ifdef CONFIG_PARAVIRT |
78 | #include <asm/paravirt.h> | 78 | #include <asm/paravirt.h> |
79 | #else | 79 | #else |
80 | 80 | #include <linux/errno.h> | |
81 | /* | 81 | /* |
82 | * Access to machine-specific registers (available on 586 and better only) | 82 | * Access to machine-specific registers (available on 586 and better only) |
83 | * Note: the rd* operations modify the parameters directly (without using | 83 | * Note: the rd* operations modify the parameters directly (without using |
@@ -148,6 +148,8 @@ static inline void wrmsrl (unsigned long msr, unsigned long long val) | |||
148 | #ifdef CONFIG_SMP | 148 | #ifdef CONFIG_SMP |
149 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 149 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
150 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 150 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
151 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | ||
152 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | ||
151 | #else /* CONFIG_SMP */ | 153 | #else /* CONFIG_SMP */ |
152 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 154 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) |
153 | { | 155 | { |
@@ -157,6 +159,14 @@ static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |||
157 | { | 159 | { |
158 | wrmsr(msr_no, l, h); | 160 | wrmsr(msr_no, l, h); |
159 | } | 161 | } |
162 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | ||
163 | { | ||
164 | return rdmsr_safe(msr_no, l, h); | ||
165 | } | ||
166 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | ||
167 | { | ||
168 | return wrmsr_safe(msr_no, l, h); | ||
169 | } | ||
160 | #endif /* CONFIG_SMP */ | 170 | #endif /* CONFIG_SMP */ |
161 | #endif | 171 | #endif |
162 | #endif | 172 | #endif |
diff --git a/include/asm-x86_64/msr.h b/include/asm-x86_64/msr.h index a524f0325673..d5c55b80da54 100644 --- a/include/asm-x86_64/msr.h +++ b/include/asm-x86_64/msr.h | |||
@@ -4,6 +4,7 @@ | |||
4 | #include <asm/msr-index.h> | 4 | #include <asm/msr-index.h> |
5 | 5 | ||
6 | #ifndef __ASSEMBLY__ | 6 | #ifndef __ASSEMBLY__ |
7 | #include <linux/errno.h> | ||
7 | /* | 8 | /* |
8 | * Access to machine-specific registers (available on 586 and better only) | 9 | * Access to machine-specific registers (available on 586 and better only) |
9 | * Note: the rd* operations modify the parameters directly (without using | 10 | * Note: the rd* operations modify the parameters directly (without using |
@@ -162,6 +163,8 @@ static inline unsigned int cpuid_edx(unsigned int op) | |||
162 | #ifdef CONFIG_SMP | 163 | #ifdef CONFIG_SMP |
163 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 164 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
164 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 165 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
166 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | ||
167 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | ||
165 | #else /* CONFIG_SMP */ | 168 | #else /* CONFIG_SMP */ |
166 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 169 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) |
167 | { | 170 | { |
@@ -171,6 +174,14 @@ static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | |||
171 | { | 174 | { |
172 | wrmsr(msr_no, l, h); | 175 | wrmsr(msr_no, l, h); |
173 | } | 176 | } |
177 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | ||
178 | { | ||
179 | return rdmsr_safe(msr_no, l, h); | ||
180 | } | ||
181 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | ||
182 | { | ||
183 | return wrmsr_safe(msr_no, l, h); | ||
184 | } | ||
174 | #endif /* CONFIG_SMP */ | 185 | #endif /* CONFIG_SMP */ |
175 | #endif /* __ASSEMBLY__ */ | 186 | #endif /* __ASSEMBLY__ */ |
176 | #endif /* X86_64_MSR_H */ | 187 | #endif /* X86_64_MSR_H */ |