aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/system.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 04:03:39 -0400
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:28 -0400
commitc5386c200f55940eeeb827df172edf2e0305f23b (patch)
tree94bebabc9e5a4d89625a80ecad8c6550437f72d6 /include/asm-x86/system.h
parent26b7fcc4bde28237a906597a809b149fb06713b0 (diff)
include/asm-x86/system.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/system.h')
-rw-r--r--include/asm-x86/system.h104
1 files changed, 51 insertions, 53 deletions
diff --git a/include/asm-x86/system.h b/include/asm-x86/system.h
index 33b0017156a..a2f04cd79b2 100644
--- a/include/asm-x86/system.h
+++ b/include/asm-x86/system.h
@@ -38,35 +38,33 @@ do { \
38 */ \ 38 */ \
39 unsigned long ebx, ecx, edx, esi, edi; \ 39 unsigned long ebx, ecx, edx, esi, edi; \
40 \ 40 \
41 asm volatile( \ 41 asm volatile("pushfl\n\t" /* save flags */ \
42 "pushfl \n\t" /* save flags */ \ 42 "pushl %%ebp\n\t" /* save EBP */ \
43 "pushl %%ebp \n\t" /* save EBP */ \ 43 "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \
44 "movl %%esp,%[prev_sp] \n\t" /* save ESP */ \ 44 "movl %[next_sp],%%esp\n\t" /* restore ESP */ \
45 "movl %[next_sp],%%esp \n\t" /* restore ESP */ \ 45 "movl $1f,%[prev_ip]\n\t" /* save EIP */ \
46 "movl $1f,%[prev_ip] \n\t" /* save EIP */ \ 46 "pushl %[next_ip]\n\t" /* restore EIP */ \
47 "pushl %[next_ip] \n\t" /* restore EIP */ \ 47 "jmp __switch_to\n" /* regparm call */ \
48 "jmp __switch_to \n" /* regparm call */ \ 48 "1:\t" \
49 "1: \t" \ 49 "popl %%ebp\n\t" /* restore EBP */ \
50 "popl %%ebp \n\t" /* restore EBP */ \ 50 "popfl\n" /* restore flags */ \
51 "popfl \n" /* restore flags */ \
52 \ 51 \
53 /* output parameters */ \ 52 /* output parameters */ \
54 : [prev_sp] "=m" (prev->thread.sp), \ 53 : [prev_sp] "=m" (prev->thread.sp), \
55 [prev_ip] "=m" (prev->thread.ip), \ 54 [prev_ip] "=m" (prev->thread.ip), \
56 "=a" (last), \ 55 "=a" (last), \
57 \ 56 \
58 /* clobbered output registers: */ \ 57 /* clobbered output registers: */ \
59 "=b" (ebx), "=c" (ecx), "=d" (edx), \ 58 "=b" (ebx), "=c" (ecx), "=d" (edx), \
60 "=S" (esi), "=D" (edi) \ 59 "=S" (esi), "=D" (edi) \
61 \ 60 \
62 /* input parameters: */ \ 61 /* input parameters: */ \
63 : [next_sp] "m" (next->thread.sp), \ 62 : [next_sp] "m" (next->thread.sp), \
64 [next_ip] "m" (next->thread.ip), \ 63 [next_ip] "m" (next->thread.ip), \
65 \ 64 \
66 /* regparm parameters for __switch_to(): */ \ 65 /* regparm parameters for __switch_to(): */ \
67 [prev] "a" (prev), \ 66 [prev] "a" (prev), \
68 [next] "d" (next) \ 67 [next] "d" (next)); \
69 ); \
70} while (0) 68} while (0)
71 69
72/* 70/*
@@ -146,35 +144,34 @@ extern void load_gs_index(unsigned);
146 */ 144 */
147#define loadsegment(seg, value) \ 145#define loadsegment(seg, value) \
148 asm volatile("\n" \ 146 asm volatile("\n" \
149 "1:\t" \ 147 "1:\t" \
150 "movl %k0,%%" #seg "\n" \ 148 "movl %k0,%%" #seg "\n" \
151 "2:\n" \ 149 "2:\n" \
152 ".section .fixup,\"ax\"\n" \ 150 ".section .fixup,\"ax\"\n" \
153 "3:\t" \ 151 "3:\t" \
154 "movl %k1, %%" #seg "\n\t" \ 152 "movl %k1, %%" #seg "\n\t" \
155 "jmp 2b\n" \ 153 "jmp 2b\n" \
156 ".previous\n" \ 154 ".previous\n" \
157 _ASM_EXTABLE(1b,3b) \ 155 _ASM_EXTABLE(1b,3b) \
158 : :"r" (value), "r" (0)) 156 : :"r" (value), "r" (0))
159 157
160 158
161/* 159/*
162 * Save a segment register away 160 * Save a segment register away
163 */ 161 */
164#define savesegment(seg, value) \ 162#define savesegment(seg, value) \
165 asm volatile("mov %%" #seg ",%0":"=rm" (value)) 163 asm volatile("mov %%" #seg ",%0":"=rm" (value))
166 164
167static inline unsigned long get_limit(unsigned long segment) 165static inline unsigned long get_limit(unsigned long segment)
168{ 166{
169 unsigned long __limit; 167 unsigned long __limit;
170 __asm__("lsll %1,%0" 168 asm("lsll %1,%0" : "=r" (__limit) : "r" (segment));
171 :"=r" (__limit):"r" (segment)); 169 return __limit + 1;
172 return __limit+1;
173} 170}
174 171
175static inline void native_clts(void) 172static inline void native_clts(void)
176{ 173{
177 asm volatile ("clts"); 174 asm volatile("clts");
178} 175}
179 176
180/* 177/*
@@ -189,43 +186,43 @@ static unsigned long __force_order;
189static inline unsigned long native_read_cr0(void) 186static inline unsigned long native_read_cr0(void)
190{ 187{
191 unsigned long val; 188 unsigned long val;
192 asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order)); 189 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
193 return val; 190 return val;
194} 191}
195 192
196static inline void native_write_cr0(unsigned long val) 193static inline void native_write_cr0(unsigned long val)
197{ 194{
198 asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order)); 195 asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
199} 196}
200 197
201static inline unsigned long native_read_cr2(void) 198static inline unsigned long native_read_cr2(void)
202{ 199{
203 unsigned long val; 200 unsigned long val;
204 asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order)); 201 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
205 return val; 202 return val;
206} 203}
207 204
208static inline void native_write_cr2(unsigned long val) 205static inline void native_write_cr2(unsigned long val)
209{ 206{
210 asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order)); 207 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
211} 208}
212 209
213static inline unsigned long native_read_cr3(void) 210static inline unsigned long native_read_cr3(void)
214{ 211{
215 unsigned long val; 212 unsigned long val;
216 asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order)); 213 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
217 return val; 214 return val;
218} 215}
219 216
220static inline void native_write_cr3(unsigned long val) 217static inline void native_write_cr3(unsigned long val)
221{ 218{
222 asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order)); 219 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
223} 220}
224 221
225static inline unsigned long native_read_cr4(void) 222static inline unsigned long native_read_cr4(void)
226{ 223{
227 unsigned long val; 224 unsigned long val;
228 asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order)); 225 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
229 return val; 226 return val;
230} 227}
231 228
@@ -237,7 +234,7 @@ static inline unsigned long native_read_cr4_safe(void)
237#ifdef CONFIG_X86_32 234#ifdef CONFIG_X86_32
238 asm volatile("1: mov %%cr4, %0\n" 235 asm volatile("1: mov %%cr4, %0\n"
239 "2:\n" 236 "2:\n"
240 _ASM_EXTABLE(1b,2b) 237 _ASM_EXTABLE(1b, 2b)
241 : "=r" (val), "=m" (__force_order) : "0" (0)); 238 : "=r" (val), "=m" (__force_order) : "0" (0));
242#else 239#else
243 val = native_read_cr4(); 240 val = native_read_cr4();
@@ -247,7 +244,7 @@ static inline unsigned long native_read_cr4_safe(void)
247 244
248static inline void native_write_cr4(unsigned long val) 245static inline void native_write_cr4(unsigned long val)
249{ 246{
250 asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order)); 247 asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
251} 248}
252 249
253#ifdef CONFIG_X86_64 250#ifdef CONFIG_X86_64
@@ -268,6 +265,7 @@ static inline void native_wbinvd(void)
268{ 265{
269 asm volatile("wbinvd": : :"memory"); 266 asm volatile("wbinvd": : :"memory");
270} 267}
268
271#ifdef CONFIG_PARAVIRT 269#ifdef CONFIG_PARAVIRT
272#include <asm/paravirt.h> 270#include <asm/paravirt.h>
273#else 271#else
@@ -300,7 +298,7 @@ static inline void clflush(volatile void *__p)
300 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); 298 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
301} 299}
302 300
303#define nop() __asm__ __volatile__ ("nop") 301#define nop() asm volatile ("nop")
304 302
305void disable_hlt(void); 303void disable_hlt(void);
306void enable_hlt(void); 304void enable_hlt(void);
@@ -399,7 +397,7 @@ void default_idle(void);
399# define smp_wmb() barrier() 397# define smp_wmb() barrier()
400#endif 398#endif
401#define smp_read_barrier_depends() read_barrier_depends() 399#define smp_read_barrier_depends() read_barrier_depends()
402#define set_mb(var, value) do { (void) xchg(&var, value); } while (0) 400#define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
403#else 401#else
404#define smp_mb() barrier() 402#define smp_mb() barrier()
405#define smp_rmb() barrier() 403#define smp_rmb() barrier()