diff options
author | Joe Perches <joe@perches.com> | 2008-03-23 04:02:39 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-04-17 11:41:24 -0400 |
commit | 69cde6512c3a0227878869f9ba8a02cdc72fc253 (patch) | |
tree | 6a87c90b4182497b0296c6bdd00f3a9b36570918 | |
parent | fb444c7b25420d57ce5e31cab486f734705bd278 (diff) |
include/asm-x86/local.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
-rw-r--r-- | include/asm-x86/local.h | 105 |
1 files changed, 50 insertions, 55 deletions
diff --git a/include/asm-x86/local.h b/include/asm-x86/local.h index f852c62b3319..330a72496abd 100644 --- a/include/asm-x86/local.h +++ b/include/asm-x86/local.h | |||
@@ -18,32 +18,28 @@ typedef struct { | |||
18 | 18 | ||
19 | static inline void local_inc(local_t *l) | 19 | static inline void local_inc(local_t *l) |
20 | { | 20 | { |
21 | __asm__ __volatile__( | 21 | asm volatile(_ASM_INC "%0" |
22 | _ASM_INC "%0" | 22 | : "+m" (l->a.counter)); |
23 | :"+m" (l->a.counter)); | ||
24 | } | 23 | } |
25 | 24 | ||
26 | static inline void local_dec(local_t *l) | 25 | static inline void local_dec(local_t *l) |
27 | { | 26 | { |
28 | __asm__ __volatile__( | 27 | asm volatile(_ASM_DEC "%0" |
29 | _ASM_DEC "%0" | 28 | : "+m" (l->a.counter)); |
30 | :"+m" (l->a.counter)); | ||
31 | } | 29 | } |
32 | 30 | ||
33 | static inline void local_add(long i, local_t *l) | 31 | static inline void local_add(long i, local_t *l) |
34 | { | 32 | { |
35 | __asm__ __volatile__( | 33 | asm volatile(_ASM_ADD "%1,%0" |
36 | _ASM_ADD "%1,%0" | 34 | : "+m" (l->a.counter) |
37 | :"+m" (l->a.counter) | 35 | : "ir" (i)); |
38 | :"ir" (i)); | ||
39 | } | 36 | } |
40 | 37 | ||
41 | static inline void local_sub(long i, local_t *l) | 38 | static inline void local_sub(long i, local_t *l) |
42 | { | 39 | { |
43 | __asm__ __volatile__( | 40 | asm volatile(_ASM_SUB "%1,%0" |
44 | _ASM_SUB "%1,%0" | 41 | : "+m" (l->a.counter) |
45 | :"+m" (l->a.counter) | 42 | : "ir" (i)); |
46 | :"ir" (i)); | ||
47 | } | 43 | } |
48 | 44 | ||
49 | /** | 45 | /** |
@@ -59,10 +55,9 @@ static inline int local_sub_and_test(long i, local_t *l) | |||
59 | { | 55 | { |
60 | unsigned char c; | 56 | unsigned char c; |
61 | 57 | ||
62 | __asm__ __volatile__( | 58 | asm volatile(_ASM_SUB "%2,%0; sete %1" |
63 | _ASM_SUB "%2,%0; sete %1" | 59 | : "+m" (l->a.counter), "=qm" (c) |
64 | :"+m" (l->a.counter), "=qm" (c) | 60 | : "ir" (i) : "memory"); |
65 | :"ir" (i) : "memory"); | ||
66 | return c; | 61 | return c; |
67 | } | 62 | } |
68 | 63 | ||
@@ -78,10 +73,9 @@ static inline int local_dec_and_test(local_t *l) | |||
78 | { | 73 | { |
79 | unsigned char c; | 74 | unsigned char c; |
80 | 75 | ||
81 | __asm__ __volatile__( | 76 | asm volatile(_ASM_DEC "%0; sete %1" |
82 | _ASM_DEC "%0; sete %1" | 77 | : "+m" (l->a.counter), "=qm" (c) |
83 | :"+m" (l->a.counter), "=qm" (c) | 78 | : : "memory"); |
84 | : : "memory"); | ||
85 | return c != 0; | 79 | return c != 0; |
86 | } | 80 | } |
87 | 81 | ||
@@ -97,10 +91,9 @@ static inline int local_inc_and_test(local_t *l) | |||
97 | { | 91 | { |
98 | unsigned char c; | 92 | unsigned char c; |
99 | 93 | ||
100 | __asm__ __volatile__( | 94 | asm volatile(_ASM_INC "%0; sete %1" |
101 | _ASM_INC "%0; sete %1" | 95 | : "+m" (l->a.counter), "=qm" (c) |
102 | :"+m" (l->a.counter), "=qm" (c) | 96 | : : "memory"); |
103 | : : "memory"); | ||
104 | return c != 0; | 97 | return c != 0; |
105 | } | 98 | } |
106 | 99 | ||
@@ -117,10 +110,9 @@ static inline int local_add_negative(long i, local_t *l) | |||
117 | { | 110 | { |
118 | unsigned char c; | 111 | unsigned char c; |
119 | 112 | ||
120 | __asm__ __volatile__( | 113 | asm volatile(_ASM_ADD "%2,%0; sets %1" |
121 | _ASM_ADD "%2,%0; sets %1" | 114 | : "+m" (l->a.counter), "=qm" (c) |
122 | :"+m" (l->a.counter), "=qm" (c) | 115 | : "ir" (i) : "memory"); |
123 | :"ir" (i) : "memory"); | ||
124 | return c; | 116 | return c; |
125 | } | 117 | } |
126 | 118 | ||
@@ -141,10 +133,9 @@ static inline long local_add_return(long i, local_t *l) | |||
141 | #endif | 133 | #endif |
142 | /* Modern 486+ processor */ | 134 | /* Modern 486+ processor */ |
143 | __i = i; | 135 | __i = i; |
144 | __asm__ __volatile__( | 136 | asm volatile(_ASM_XADD "%0, %1;" |
145 | _ASM_XADD "%0, %1;" | 137 | : "+r" (i), "+m" (l->a.counter) |
146 | :"+r" (i), "+m" (l->a.counter) | 138 | : : "memory"); |
147 | : : "memory"); | ||
148 | return i + __i; | 139 | return i + __i; |
149 | 140 | ||
150 | #ifdef CONFIG_M386 | 141 | #ifdef CONFIG_M386 |
@@ -182,11 +173,11 @@ static inline long local_sub_return(long i, local_t *l) | |||
182 | #define local_add_unless(l, a, u) \ | 173 | #define local_add_unless(l, a, u) \ |
183 | ({ \ | 174 | ({ \ |
184 | long c, old; \ | 175 | long c, old; \ |
185 | c = local_read(l); \ | 176 | c = local_read((l)); \ |
186 | for (;;) { \ | 177 | for (;;) { \ |
187 | if (unlikely(c == (u))) \ | 178 | if (unlikely(c == (u))) \ |
188 | break; \ | 179 | break; \ |
189 | old = local_cmpxchg((l), c, c + (a)); \ | 180 | old = local_cmpxchg((l), c, c + (a)); \ |
190 | if (likely(old == c)) \ | 181 | if (likely(old == c)) \ |
191 | break; \ | 182 | break; \ |
192 | c = old; \ | 183 | c = old; \ |
@@ -214,26 +205,30 @@ static inline long local_sub_return(long i, local_t *l) | |||
214 | 205 | ||
215 | /* Need to disable preemption for the cpu local counters otherwise we could | 206 | /* Need to disable preemption for the cpu local counters otherwise we could |
216 | still access a variable of a previous CPU in a non atomic way. */ | 207 | still access a variable of a previous CPU in a non atomic way. */ |
217 | #define cpu_local_wrap_v(l) \ | 208 | #define cpu_local_wrap_v(l) \ |
218 | ({ local_t res__; \ | 209 | ({ \ |
219 | preempt_disable(); \ | 210 | local_t res__; \ |
220 | res__ = (l); \ | 211 | preempt_disable(); \ |
221 | preempt_enable(); \ | 212 | res__ = (l); \ |
222 | res__; }) | 213 | preempt_enable(); \ |
214 | res__; \ | ||
215 | }) | ||
223 | #define cpu_local_wrap(l) \ | 216 | #define cpu_local_wrap(l) \ |
224 | ({ preempt_disable(); \ | 217 | ({ \ |
225 | l; \ | 218 | preempt_disable(); \ |
226 | preempt_enable(); }) \ | 219 | (l); \ |
227 | 220 | preempt_enable(); \ | |
228 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) | 221 | }) \ |
229 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) | 222 | |
230 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) | 223 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var((l)))) |
231 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) | 224 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var((l)), (i))) |
232 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) | 225 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var((l)))) |
233 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) | 226 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var((l)))) |
234 | 227 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var((l)))) | |
235 | #define __cpu_local_inc(l) cpu_local_inc(l) | 228 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var((l)))) |
236 | #define __cpu_local_dec(l) cpu_local_dec(l) | 229 | |
230 | #define __cpu_local_inc(l) cpu_local_inc((l)) | ||
231 | #define __cpu_local_dec(l) cpu_local_dec((l)) | ||
237 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) | 232 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) |
238 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) | 233 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) |
239 | 234 | ||