aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/msr.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 04:02:51 -0400
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:25 -0400
commitabb0ade013507c93a9a0b263bbb7b0327d7c38db (patch)
treed07ac00fce2ad439100a7977401924670879d1bf /include/asm-x86/msr.h
parent934902b474bdb235a273985ad4c61eb136afe11d (diff)
include/asm-x86/msr.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/msr.h')
-rw-r--r--include/asm-x86/msr.h81
1 files changed, 42 insertions, 39 deletions
diff --git a/include/asm-x86/msr.h b/include/asm-x86/msr.h
index 3ca29ebebbb1..2c698a2e81f9 100644
--- a/include/asm-x86/msr.h
+++ b/include/asm-x86/msr.h
@@ -16,8 +16,8 @@
16static inline unsigned long long native_read_tscp(unsigned int *aux) 16static inline unsigned long long native_read_tscp(unsigned int *aux)
17{ 17{
18 unsigned long low, high; 18 unsigned long low, high;
19 asm volatile (".byte 0x0f,0x01,0xf9" 19 asm volatile(".byte 0x0f,0x01,0xf9"
20 : "=a" (low), "=d" (high), "=c" (*aux)); 20 : "=a" (low), "=d" (high), "=c" (*aux));
21 return low | ((u64)high >> 32); 21 return low | ((u64)high >> 32);
22} 22}
23 23
@@ -29,7 +29,7 @@ static inline unsigned long long native_read_tscp(unsigned int *aux)
29 */ 29 */
30#ifdef CONFIG_X86_64 30#ifdef CONFIG_X86_64
31#define DECLARE_ARGS(val, low, high) unsigned low, high 31#define DECLARE_ARGS(val, low, high) unsigned low, high
32#define EAX_EDX_VAL(val, low, high) (low | ((u64)(high) << 32)) 32#define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32))
33#define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) 33#define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high)
34#define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) 34#define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high)
35#else 35#else
@@ -57,7 +57,7 @@ static inline unsigned long long native_read_msr_safe(unsigned int msr,
57 ".section .fixup,\"ax\"\n\t" 57 ".section .fixup,\"ax\"\n\t"
58 "3: mov %3,%0 ; jmp 1b\n\t" 58 "3: mov %3,%0 ; jmp 1b\n\t"
59 ".previous\n\t" 59 ".previous\n\t"
60 _ASM_EXTABLE(2b,3b) 60 _ASM_EXTABLE(2b, 3b)
61 : "=r" (*err), EAX_EDX_RET(val, low, high) 61 : "=r" (*err), EAX_EDX_RET(val, low, high)
62 : "c" (msr), "i" (-EFAULT)); 62 : "c" (msr), "i" (-EFAULT));
63 return EAX_EDX_VAL(val, low, high); 63 return EAX_EDX_VAL(val, low, high);
@@ -78,10 +78,10 @@ static inline int native_write_msr_safe(unsigned int msr,
78 ".section .fixup,\"ax\"\n\t" 78 ".section .fixup,\"ax\"\n\t"
79 "3: mov %4,%0 ; jmp 1b\n\t" 79 "3: mov %4,%0 ; jmp 1b\n\t"
80 ".previous\n\t" 80 ".previous\n\t"
81 _ASM_EXTABLE(2b,3b) 81 _ASM_EXTABLE(2b, 3b)
82 : "=a" (err) 82 : "=a" (err)
83 : "c" (msr), "0" (low), "d" (high), 83 : "c" (msr), "0" (low), "d" (high),
84 "i" (-EFAULT)); 84 "i" (-EFAULT));
85 return err; 85 return err;
86} 86}
87 87
@@ -116,23 +116,23 @@ static inline unsigned long long native_read_pmc(int counter)
116 * pointer indirection), this allows gcc to optimize better 116 * pointer indirection), this allows gcc to optimize better
117 */ 117 */
118 118
119#define rdmsr(msr,val1,val2) \ 119#define rdmsr(msr, val1, val2) \
120 do { \ 120do { \
121 u64 __val = native_read_msr(msr); \ 121 u64 __val = native_read_msr((msr)); \
122 (val1) = (u32)__val; \ 122 (val1) = (u32)__val; \
123 (val2) = (u32)(__val >> 32); \ 123 (val2) = (u32)(__val >> 32); \
124 } while(0) 124} while (0)
125 125
126static inline void wrmsr(unsigned msr, unsigned low, unsigned high) 126static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
127{ 127{
128 native_write_msr(msr, low, high); 128 native_write_msr(msr, low, high);
129} 129}
130 130
131#define rdmsrl(msr,val) \ 131#define rdmsrl(msr, val) \
132 ((val) = native_read_msr(msr)) 132 ((val) = native_read_msr((msr)))
133 133
134#define wrmsrl(msr, val) \ 134#define wrmsrl(msr, val) \
135 native_write_msr(msr, (u32)((u64)(val)), (u32)((u64)(val) >> 32)) 135 native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32))
136 136
137/* wrmsr with exception handling */ 137/* wrmsr with exception handling */
138static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) 138static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
@@ -141,14 +141,14 @@ static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
141} 141}
142 142
143/* rdmsr with exception handling */ 143/* rdmsr with exception handling */
144#define rdmsr_safe(msr,p1,p2) \ 144#define rdmsr_safe(msr, p1, p2) \
145 ({ \ 145({ \
146 int __err; \ 146 int __err; \
147 u64 __val = native_read_msr_safe(msr, &__err); \ 147 u64 __val = native_read_msr_safe((msr), &__err); \
148 (*p1) = (u32)__val; \ 148 (*p1) = (u32)__val; \
149 (*p2) = (u32)(__val >> 32); \ 149 (*p2) = (u32)(__val >> 32); \
150 __err; \ 150 __err; \
151 }) 151})
152 152
153#define rdtscl(low) \ 153#define rdtscl(low) \
154 ((low) = (u32)native_read_tsc()) 154 ((low) = (u32)native_read_tsc())
@@ -156,35 +156,37 @@ static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
156#define rdtscll(val) \ 156#define rdtscll(val) \
157 ((val) = native_read_tsc()) 157 ((val) = native_read_tsc())
158 158
159#define rdpmc(counter,low,high) \ 159#define rdpmc(counter, low, high) \
160 do { \ 160do { \
161 u64 _l = native_read_pmc(counter); \ 161 u64 _l = native_read_pmc((counter)); \
162 (low) = (u32)_l; \ 162 (low) = (u32)_l; \
163 (high) = (u32)(_l >> 32); \ 163 (high) = (u32)(_l >> 32); \
164 } while(0) 164} while (0)
165 165
166#define rdtscp(low, high, aux) \ 166#define rdtscp(low, high, aux) \
167 do { \ 167do { \
168 unsigned long long _val = native_read_tscp(&(aux)); \ 168 unsigned long long _val = native_read_tscp(&(aux)); \
169 (low) = (u32)_val; \ 169 (low) = (u32)_val; \
170 (high) = (u32)(_val >> 32); \ 170 (high) = (u32)(_val >> 32); \
171 } while (0) 171} while (0)
172 172
173#define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) 173#define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
174 174
175#endif /* !CONFIG_PARAVIRT */ 175#endif /* !CONFIG_PARAVIRT */
176 176
177 177
178#define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) 178#define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \
179 (u32)((val) >> 32))
179 180
180#define write_tsc(val1,val2) wrmsr(0x10, val1, val2) 181#define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2))
181 182
182#define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) 183#define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0)
183 184
184#ifdef CONFIG_SMP 185#ifdef CONFIG_SMP
185void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); 186void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
186void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); 187void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
187int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); 188int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
189
188int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); 190int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
189#else /* CONFIG_SMP */ 191#else /* CONFIG_SMP */
190static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) 192static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
@@ -195,7 +197,8 @@ static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
195{ 197{
196 wrmsr(msr_no, l, h); 198 wrmsr(msr_no, l, h);
197} 199}
198static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) 200static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no,
201 u32 *l, u32 *h)
199{ 202{
200 return rdmsr_safe(msr_no, l, h); 203 return rdmsr_safe(msr_no, l, h);
201} 204}