aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic/atomic.h')
-rw-r--r--include/asm-generic/atomic.h321
1 files changed, 114 insertions, 207 deletions
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index 81d3be459efb..c99c64dc5f3d 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -1,258 +1,165 @@
1#ifndef _ASM_GENERIC_ATOMIC_H
2#define _ASM_GENERIC_ATOMIC_H
3/* 1/*
4 * Copyright (C) 2005 Silicon Graphics, Inc. 2 * Generic C implementation of atomic counter operations
5 * Christoph Lameter 3 * Originally implemented for MN10300.
6 * 4 *
7 * Allows to provide arch independent atomic definitions without the need to 5 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
8 * edit all arch specific atomic.h files. 6 * Written by David Howells (dhowells@redhat.com)
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public Licence
10 * as published by the Free Software Foundation; either version
11 * 2 of the Licence, or (at your option) any later version.
9 */ 12 */
13#ifndef __ASM_GENERIC_ATOMIC_H
14#define __ASM_GENERIC_ATOMIC_H
10 15
11#include <asm/types.h> 16#ifdef CONFIG_SMP
17#error not SMP safe
18#endif
12 19
13/* 20/*
14 * Suppport for atomic_long_t 21 * Atomic operations that C can't guarantee us. Useful for
15 * 22 * resource counting etc..
16 * Casts for parameters are avoided for existing atomic functions in order to
17 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
18 * macros of a platform may have.
19 */ 23 */
20 24
21#if BITS_PER_LONG == 64 25#define ATOMIC_INIT(i) { (i) }
22
23typedef atomic64_t atomic_long_t;
24
25#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
26 26
27static inline long atomic_long_read(atomic_long_t *l) 27#ifdef __KERNEL__
28{
29 atomic64_t *v = (atomic64_t *)l;
30
31 return (long)atomic64_read(v);
32}
33
34static inline void atomic_long_set(atomic_long_t *l, long i)
35{
36 atomic64_t *v = (atomic64_t *)l;
37
38 atomic64_set(v, i);
39}
40
41static inline void atomic_long_inc(atomic_long_t *l)
42{
43 atomic64_t *v = (atomic64_t *)l;
44
45 atomic64_inc(v);
46}
47
48static inline void atomic_long_dec(atomic_long_t *l)
49{
50 atomic64_t *v = (atomic64_t *)l;
51
52 atomic64_dec(v);
53}
54
55static inline void atomic_long_add(long i, atomic_long_t *l)
56{
57 atomic64_t *v = (atomic64_t *)l;
58
59 atomic64_add(i, v);
60}
61
62static inline void atomic_long_sub(long i, atomic_long_t *l)
63{
64 atomic64_t *v = (atomic64_t *)l;
65
66 atomic64_sub(i, v);
67}
68
69static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
70{
71 atomic64_t *v = (atomic64_t *)l;
72
73 return atomic64_sub_and_test(i, v);
74}
75
76static inline int atomic_long_dec_and_test(atomic_long_t *l)
77{
78 atomic64_t *v = (atomic64_t *)l;
79
80 return atomic64_dec_and_test(v);
81}
82
83static inline int atomic_long_inc_and_test(atomic_long_t *l)
84{
85 atomic64_t *v = (atomic64_t *)l;
86
87 return atomic64_inc_and_test(v);
88}
89
90static inline int atomic_long_add_negative(long i, atomic_long_t *l)
91{
92 atomic64_t *v = (atomic64_t *)l;
93
94 return atomic64_add_negative(i, v);
95}
96
97static inline long atomic_long_add_return(long i, atomic_long_t *l)
98{
99 atomic64_t *v = (atomic64_t *)l;
100
101 return (long)atomic64_add_return(i, v);
102}
103
104static inline long atomic_long_sub_return(long i, atomic_long_t *l)
105{
106 atomic64_t *v = (atomic64_t *)l;
107 28
108 return (long)atomic64_sub_return(i, v); 29/**
109} 30 * atomic_read - read atomic variable
110 31 * @v: pointer of type atomic_t
111static inline long atomic_long_inc_return(atomic_long_t *l) 32 *
112{ 33 * Atomically reads the value of @v. Note that the guaranteed
113 atomic64_t *v = (atomic64_t *)l; 34 * useful range of an atomic_t is only 24 bits.
114 35 */
115 return (long)atomic64_inc_return(v); 36#define atomic_read(v) ((v)->counter)
116}
117
118static inline long atomic_long_dec_return(atomic_long_t *l)
119{
120 atomic64_t *v = (atomic64_t *)l;
121
122 return (long)atomic64_dec_return(v);
123}
124
125static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
126{
127 atomic64_t *v = (atomic64_t *)l;
128
129 return (long)atomic64_add_unless(v, a, u);
130}
131
132#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
133
134#define atomic_long_cmpxchg(l, old, new) \
135 (atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))
136#define atomic_long_xchg(v, new) \
137 (atomic64_xchg((atomic64_t *)(v), (new)))
138
139#else /* BITS_PER_LONG == 64 */
140
141typedef atomic_t atomic_long_t;
142
143#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
144static inline long atomic_long_read(atomic_long_t *l)
145{
146 atomic_t *v = (atomic_t *)l;
147
148 return (long)atomic_read(v);
149}
150 37
151static inline void atomic_long_set(atomic_long_t *l, long i) 38/**
152{ 39 * atomic_set - set atomic variable
153 atomic_t *v = (atomic_t *)l; 40 * @v: pointer of type atomic_t
41 * @i: required value
42 *
43 * Atomically sets the value of @v to @i. Note that the guaranteed
44 * useful range of an atomic_t is only 24 bits.
45 */
46#define atomic_set(v, i) (((v)->counter) = (i))
154 47
155 atomic_set(v, i); 48#include <asm/system.h>
156}
157 49
158static inline void atomic_long_inc(atomic_long_t *l) 50/**
51 * atomic_add_return - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
54 *
55 * Atomically adds @i to @v and returns the result
56 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
57 */
58static inline int atomic_add_return(int i, atomic_t *v)
159{ 59{
160 atomic_t *v = (atomic_t *)l; 60 unsigned long flags;
161 61 int temp;
162 atomic_inc(v);
163}
164 62
165static inline void atomic_long_dec(atomic_long_t *l) 63 local_irq_save(flags);
166{ 64 temp = v->counter;
167 atomic_t *v = (atomic_t *)l; 65 temp += i;
66 v->counter = temp;
67 local_irq_restore(flags);
168 68
169 atomic_dec(v); 69 return temp;
170} 70}
171 71
172static inline void atomic_long_add(long i, atomic_long_t *l) 72/**
73 * atomic_sub_return - subtract integer from atomic variable
74 * @i: integer value to subtract
75 * @v: pointer of type atomic_t
76 *
77 * Atomically subtracts @i from @v and returns the result
78 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
79 */
80static inline int atomic_sub_return(int i, atomic_t *v)
173{ 81{
174 atomic_t *v = (atomic_t *)l; 82 unsigned long flags;
175 83 int temp;
176 atomic_add(i, v);
177}
178 84
179static inline void atomic_long_sub(long i, atomic_long_t *l) 85 local_irq_save(flags);
180{ 86 temp = v->counter;
181 atomic_t *v = (atomic_t *)l; 87 temp -= i;
88 v->counter = temp;
89 local_irq_restore(flags);
182 90
183 atomic_sub(i, v); 91 return temp;
184} 92}
185 93
186static inline int atomic_long_sub_and_test(long i, atomic_long_t *l) 94static inline int atomic_add_negative(int i, atomic_t *v)
187{ 95{
188 atomic_t *v = (atomic_t *)l; 96 return atomic_add_return(i, v) < 0;
189
190 return atomic_sub_and_test(i, v);
191} 97}
192 98
193static inline int atomic_long_dec_and_test(atomic_long_t *l) 99static inline void atomic_add(int i, atomic_t *v)
194{ 100{
195 atomic_t *v = (atomic_t *)l; 101 atomic_add_return(i, v);
196
197 return atomic_dec_and_test(v);
198} 102}
199 103
200static inline int atomic_long_inc_and_test(atomic_long_t *l) 104static inline void atomic_sub(int i, atomic_t *v)
201{ 105{
202 atomic_t *v = (atomic_t *)l; 106 atomic_sub_return(i, v);
203
204 return atomic_inc_and_test(v);
205} 107}
206 108
207static inline int atomic_long_add_negative(long i, atomic_long_t *l) 109static inline void atomic_inc(atomic_t *v)
208{ 110{
209 atomic_t *v = (atomic_t *)l; 111 atomic_add_return(1, v);
210
211 return atomic_add_negative(i, v);
212} 112}
213 113
214static inline long atomic_long_add_return(long i, atomic_long_t *l) 114static inline void atomic_dec(atomic_t *v)
215{ 115{
216 atomic_t *v = (atomic_t *)l; 116 atomic_sub_return(1, v);
217
218 return (long)atomic_add_return(i, v);
219} 117}
220 118
221static inline long atomic_long_sub_return(long i, atomic_long_t *l) 119#define atomic_dec_return(v) atomic_sub_return(1, (v))
222{ 120#define atomic_inc_return(v) atomic_add_return(1, (v))
223 atomic_t *v = (atomic_t *)l;
224 121
225 return (long)atomic_sub_return(i, v); 122#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
226} 123#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
124#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
227 125
228static inline long atomic_long_inc_return(atomic_long_t *l) 126#define atomic_add_unless(v, a, u) \
229{ 127({ \
230 atomic_t *v = (atomic_t *)l; 128 int c, old; \
129 c = atomic_read(v); \
130 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
131 c = old; \
132 c != (u); \
133})
231 134
232 return (long)atomic_inc_return(v); 135#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
233}
234 136
235static inline long atomic_long_dec_return(atomic_long_t *l) 137static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
236{ 138{
237 atomic_t *v = (atomic_t *)l; 139 unsigned long flags;
238 140
239 return (long)atomic_dec_return(v); 141 mask = ~mask;
142 local_irq_save(flags);
143 *addr &= mask;
144 local_irq_restore(flags);
240} 145}
241 146
242static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u) 147#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
243{ 148#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
244 atomic_t *v = (atomic_t *)l;
245 149
246 return (long)atomic_add_unless(v, a, u); 150#define cmpxchg_local(ptr, o, n) \
247} 151 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
152 (unsigned long)(n), sizeof(*(ptr))))
248 153
249#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l)) 154#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
250 155
251#define atomic_long_cmpxchg(l, old, new) \ 156/* Assume that atomic operations are already serializing */
252 (atomic_cmpxchg((atomic_t *)(l), (old), (new))) 157#define smp_mb__before_atomic_dec() barrier()
253#define atomic_long_xchg(v, new) \ 158#define smp_mb__after_atomic_dec() barrier()
254 (atomic_xchg((atomic_t *)(v), (new))) 159#define smp_mb__before_atomic_inc() barrier()
160#define smp_mb__after_atomic_inc() barrier()
255 161
256#endif /* BITS_PER_LONG == 64 */ 162#include <asm-generic/atomic-long.h>
257 163
258#endif /* _ASM_GENERIC_ATOMIC_H */ 164#endif /* __KERNEL__ */
165#endif /* __ASM_GENERIC_ATOMIC_H */