aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/atomic_32.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 04:01:41 -0400
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:21 -0400
commit78ff12eec42a4141d22dac4fdab04994384f6385 (patch)
treee5d5936144a762e375334b7790e1cedd156b294e /include/asm-x86/atomic_32.h
parent3c311febfa8cc240e2922931d7403a6bb7f3fa1b (diff)
include/asm-x86/atomic_32.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/atomic_32.h')
-rw-r--r--include/asm-x86/atomic_32.h143
1 files changed, 68 insertions, 75 deletions
diff --git a/include/asm-x86/atomic_32.h b/include/asm-x86/atomic_32.h
index 437aac801711..21a4825148c0 100644
--- a/include/asm-x86/atomic_32.h
+++ b/include/asm-x86/atomic_32.h
@@ -15,138 +15,133 @@
15 * on us. We need to use _exactly_ the address the user gave us, 15 * on us. We need to use _exactly_ the address the user gave us,
16 * not some alias that contains the same information. 16 * not some alias that contains the same information.
17 */ 17 */
18typedef struct { int counter; } atomic_t; 18typedef struct {
19 int counter;
20} atomic_t;
19 21
20#define ATOMIC_INIT(i) { (i) } 22#define ATOMIC_INIT(i) { (i) }
21 23
22/** 24/**
23 * atomic_read - read atomic variable 25 * atomic_read - read atomic variable
24 * @v: pointer of type atomic_t 26 * @v: pointer of type atomic_t
25 * 27 *
26 * Atomically reads the value of @v. 28 * Atomically reads the value of @v.
27 */ 29 */
28#define atomic_read(v) ((v)->counter) 30#define atomic_read(v) ((v)->counter)
29 31
30/** 32/**
31 * atomic_set - set atomic variable 33 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t 34 * @v: pointer of type atomic_t
33 * @i: required value 35 * @i: required value
34 * 36 *
35 * Atomically sets the value of @v to @i. 37 * Atomically sets the value of @v to @i.
36 */ 38 */
37#define atomic_set(v,i) (((v)->counter) = (i)) 39#define atomic_set(v, i) (((v)->counter) = (i))
38 40
39/** 41/**
40 * atomic_add - add integer to atomic variable 42 * atomic_add - add integer to atomic variable
41 * @i: integer value to add 43 * @i: integer value to add
42 * @v: pointer of type atomic_t 44 * @v: pointer of type atomic_t
43 * 45 *
44 * Atomically adds @i to @v. 46 * Atomically adds @i to @v.
45 */ 47 */
46static __inline__ void atomic_add(int i, atomic_t *v) 48static inline void atomic_add(int i, atomic_t *v)
47{ 49{
48 __asm__ __volatile__( 50 asm volatile(LOCK_PREFIX "addl %1,%0"
49 LOCK_PREFIX "addl %1,%0" 51 : "+m" (v->counter)
50 :"+m" (v->counter) 52 : "ir" (i));
51 :"ir" (i));
52} 53}
53 54
54/** 55/**
55 * atomic_sub - subtract integer from atomic variable 56 * atomic_sub - subtract integer from atomic variable
56 * @i: integer value to subtract 57 * @i: integer value to subtract
57 * @v: pointer of type atomic_t 58 * @v: pointer of type atomic_t
58 * 59 *
59 * Atomically subtracts @i from @v. 60 * Atomically subtracts @i from @v.
60 */ 61 */
61static __inline__ void atomic_sub(int i, atomic_t *v) 62static inline void atomic_sub(int i, atomic_t *v)
62{ 63{
63 __asm__ __volatile__( 64 asm volatile(LOCK_PREFIX "subl %1,%0"
64 LOCK_PREFIX "subl %1,%0" 65 : "+m" (v->counter)
65 :"+m" (v->counter) 66 : "ir" (i));
66 :"ir" (i));
67} 67}
68 68
69/** 69/**
70 * atomic_sub_and_test - subtract value from variable and test result 70 * atomic_sub_and_test - subtract value from variable and test result
71 * @i: integer value to subtract 71 * @i: integer value to subtract
72 * @v: pointer of type atomic_t 72 * @v: pointer of type atomic_t
73 * 73 *
74 * Atomically subtracts @i from @v and returns 74 * Atomically subtracts @i from @v and returns
75 * true if the result is zero, or false for all 75 * true if the result is zero, or false for all
76 * other cases. 76 * other cases.
77 */ 77 */
78static __inline__ int atomic_sub_and_test(int i, atomic_t *v) 78static inline int atomic_sub_and_test(int i, atomic_t *v)
79{ 79{
80 unsigned char c; 80 unsigned char c;
81 81
82 __asm__ __volatile__( 82 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
83 LOCK_PREFIX "subl %2,%0; sete %1" 83 : "+m" (v->counter), "=qm" (c)
84 :"+m" (v->counter), "=qm" (c) 84 : "ir" (i) : "memory");
85 :"ir" (i) : "memory");
86 return c; 85 return c;
87} 86}
88 87
89/** 88/**
90 * atomic_inc - increment atomic variable 89 * atomic_inc - increment atomic variable
91 * @v: pointer of type atomic_t 90 * @v: pointer of type atomic_t
92 * 91 *
93 * Atomically increments @v by 1. 92 * Atomically increments @v by 1.
94 */ 93 */
95static __inline__ void atomic_inc(atomic_t *v) 94static inline void atomic_inc(atomic_t *v)
96{ 95{
97 __asm__ __volatile__( 96 asm volatile(LOCK_PREFIX "incl %0"
98 LOCK_PREFIX "incl %0" 97 : "+m" (v->counter));
99 :"+m" (v->counter));
100} 98}
101 99
102/** 100/**
103 * atomic_dec - decrement atomic variable 101 * atomic_dec - decrement atomic variable
104 * @v: pointer of type atomic_t 102 * @v: pointer of type atomic_t
105 * 103 *
106 * Atomically decrements @v by 1. 104 * Atomically decrements @v by 1.
107 */ 105 */
108static __inline__ void atomic_dec(atomic_t *v) 106static inline void atomic_dec(atomic_t *v)
109{ 107{
110 __asm__ __volatile__( 108 asm volatile(LOCK_PREFIX "decl %0"
111 LOCK_PREFIX "decl %0" 109 : "+m" (v->counter));
112 :"+m" (v->counter));
113} 110}
114 111
115/** 112/**
116 * atomic_dec_and_test - decrement and test 113 * atomic_dec_and_test - decrement and test
117 * @v: pointer of type atomic_t 114 * @v: pointer of type atomic_t
118 * 115 *
119 * Atomically decrements @v by 1 and 116 * Atomically decrements @v by 1 and
120 * returns true if the result is 0, or false for all other 117 * returns true if the result is 0, or false for all other
121 * cases. 118 * cases.
122 */ 119 */
123static __inline__ int atomic_dec_and_test(atomic_t *v) 120static inline int atomic_dec_and_test(atomic_t *v)
124{ 121{
125 unsigned char c; 122 unsigned char c;
126 123
127 __asm__ __volatile__( 124 asm volatile(LOCK_PREFIX "decl %0; sete %1"
128 LOCK_PREFIX "decl %0; sete %1" 125 : "+m" (v->counter), "=qm" (c)
129 :"+m" (v->counter), "=qm" (c) 126 : : "memory");
130 : : "memory");
131 return c != 0; 127 return c != 0;
132} 128}
133 129
134/** 130/**
135 * atomic_inc_and_test - increment and test 131 * atomic_inc_and_test - increment and test
136 * @v: pointer of type atomic_t 132 * @v: pointer of type atomic_t
137 * 133 *
138 * Atomically increments @v by 1 134 * Atomically increments @v by 1
139 * and returns true if the result is zero, or false for all 135 * and returns true if the result is zero, or false for all
140 * other cases. 136 * other cases.
141 */ 137 */
142static __inline__ int atomic_inc_and_test(atomic_t *v) 138static inline int atomic_inc_and_test(atomic_t *v)
143{ 139{
144 unsigned char c; 140 unsigned char c;
145 141
146 __asm__ __volatile__( 142 asm volatile(LOCK_PREFIX "incl %0; sete %1"
147 LOCK_PREFIX "incl %0; sete %1" 143 : "+m" (v->counter), "=qm" (c)
148 :"+m" (v->counter), "=qm" (c) 144 : : "memory");
149 : : "memory");
150 return c != 0; 145 return c != 0;
151} 146}
152 147
@@ -154,19 +149,18 @@ static __inline__ int atomic_inc_and_test(atomic_t *v)
154 * atomic_add_negative - add and test if negative 149 * atomic_add_negative - add and test if negative
155 * @v: pointer of type atomic_t 150 * @v: pointer of type atomic_t
156 * @i: integer value to add 151 * @i: integer value to add
157 * 152 *
158 * Atomically adds @i to @v and returns true 153 * Atomically adds @i to @v and returns true
159 * if the result is negative, or false when 154 * if the result is negative, or false when
160 * result is greater than or equal to zero. 155 * result is greater than or equal to zero.
161 */ 156 */
162static __inline__ int atomic_add_negative(int i, atomic_t *v) 157static inline int atomic_add_negative(int i, atomic_t *v)
163{ 158{
164 unsigned char c; 159 unsigned char c;
165 160
166 __asm__ __volatile__( 161 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
167 LOCK_PREFIX "addl %2,%0; sets %1" 162 : "+m" (v->counter), "=qm" (c)
168 :"+m" (v->counter), "=qm" (c) 163 : "ir" (i) : "memory");
169 :"ir" (i) : "memory");
170 return c; 164 return c;
171} 165}
172 166
@@ -177,20 +171,19 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
177 * 171 *
178 * Atomically adds @i to @v and returns @i + @v 172 * Atomically adds @i to @v and returns @i + @v
179 */ 173 */
180static __inline__ int atomic_add_return(int i, atomic_t *v) 174static inline int atomic_add_return(int i, atomic_t *v)
181{ 175{
182 int __i; 176 int __i;
183#ifdef CONFIG_M386 177#ifdef CONFIG_M386
184 unsigned long flags; 178 unsigned long flags;
185 if(unlikely(boot_cpu_data.x86 <= 3)) 179 if (unlikely(boot_cpu_data.x86 <= 3))
186 goto no_xadd; 180 goto no_xadd;
187#endif 181#endif
188 /* Modern 486+ processor */ 182 /* Modern 486+ processor */
189 __i = i; 183 __i = i;
190 __asm__ __volatile__( 184 asm volatile(LOCK_PREFIX "xaddl %0, %1"
191 LOCK_PREFIX "xaddl %0, %1" 185 : "+r" (i), "+m" (v->counter)
192 :"+r" (i), "+m" (v->counter) 186 : : "memory");
193 : : "memory");
194 return i + __i; 187 return i + __i;
195 188
196#ifdef CONFIG_M386 189#ifdef CONFIG_M386
@@ -210,9 +203,9 @@ no_xadd: /* Legacy 386 processor */
210 * 203 *
211 * Atomically subtracts @i from @v and returns @v - @i 204 * Atomically subtracts @i from @v and returns @v - @i
212 */ 205 */
213static __inline__ int atomic_sub_return(int i, atomic_t *v) 206static inline int atomic_sub_return(int i, atomic_t *v)
214{ 207{
215 return atomic_add_return(-i,v); 208 return atomic_add_return(-i, v);
216} 209}
217 210
218#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 211#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
@@ -227,7 +220,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
227 * Atomically adds @a to @v, so long as @v was not already @u. 220 * Atomically adds @a to @v, so long as @v was not already @u.
228 * Returns non-zero if @v was not @u, and zero otherwise. 221 * Returns non-zero if @v was not @u, and zero otherwise.
229 */ 222 */
230static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) 223static inline int atomic_add_unless(atomic_t *v, int a, int u)
231{ 224{
232 int c, old; 225 int c, old;
233 c = atomic_read(v); 226 c = atomic_read(v);
@@ -244,17 +237,17 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
244 237
245#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 238#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
246 239
247#define atomic_inc_return(v) (atomic_add_return(1,v)) 240#define atomic_inc_return(v) (atomic_add_return(1, v))
248#define atomic_dec_return(v) (atomic_sub_return(1,v)) 241#define atomic_dec_return(v) (atomic_sub_return(1, v))
249 242
250/* These are x86-specific, used by some header files */ 243/* These are x86-specific, used by some header files */
251#define atomic_clear_mask(mask, addr) \ 244#define atomic_clear_mask(mask, addr) \
252__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ 245 asm volatile(LOCK_PREFIX "andl %0,%1" \
253: : "r" (~(mask)),"m" (*addr) : "memory") 246 : : "r" (~(mask)), "m" (*(addr)) : "memory")
254 247
255#define atomic_set_mask(mask, addr) \ 248#define atomic_set_mask(mask, addr) \
256__asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \ 249 asm volatile(LOCK_PREFIX "orl %0,%1" \
257: : "r" (mask),"m" (*(addr)) : "memory") 250 : : "r" (mask), "m" (*(addr)) : "memory")
258 251
259/* Atomic operations are already serializing on x86 */ 252/* Atomic operations are already serializing on x86 */
260#define smp_mb__before_atomic_dec() barrier() 253#define smp_mb__before_atomic_dec() barrier()