aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/asm-alpha/atomic.h1
-rw-r--r--include/asm-arm/atomic.h1
-rw-r--r--include/asm-arm26/atomic.h1
-rw-r--r--include/asm-cris/atomic.h1
-rw-r--r--include/asm-frv/atomic.h1
-rw-r--r--include/asm-generic/atomic.h116
-rw-r--r--include/asm-h8300/atomic.h1
-rw-r--r--include/asm-i386/atomic.h1
-rw-r--r--include/asm-ia64/atomic.h1
-rw-r--r--include/asm-m32r/atomic.h1
-rw-r--r--include/asm-m68k/atomic.h1
-rw-r--r--include/asm-m68knommu/atomic.h1
-rw-r--r--include/asm-mips/atomic.h1
-rw-r--r--include/asm-parisc/atomic.h1
-rw-r--r--include/asm-powerpc/atomic.h1
-rw-r--r--include/asm-s390/atomic.h1
-rw-r--r--include/asm-sh/atomic.h1
-rw-r--r--include/asm-sh64/atomic.h1
-rw-r--r--include/asm-sparc/atomic.h1
-rw-r--r--include/asm-sparc64/atomic.h1
-rw-r--r--include/asm-v850/atomic.h1
-rw-r--r--include/asm-x86_64/atomic.h1
-rw-r--r--include/asm-xtensa/atomic.h1
-rw-r--r--include/linux/sched.h25
24 files changed, 144 insertions, 19 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index 6183eab006d4..cb03bbe92cdf 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -216,4 +216,5 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
216#define smp_mb__before_atomic_inc() smp_mb() 216#define smp_mb__before_atomic_inc() smp_mb()
217#define smp_mb__after_atomic_inc() smp_mb() 217#define smp_mb__after_atomic_inc() smp_mb()
218 218
219#include <asm-generic/atomic.h>
219#endif /* _ALPHA_ATOMIC_H */ 220#endif /* _ALPHA_ATOMIC_H */
diff --git a/include/asm-arm/atomic.h b/include/asm-arm/atomic.h
index d586f65c8228..f72b63309bc5 100644
--- a/include/asm-arm/atomic.h
+++ b/include/asm-arm/atomic.h
@@ -205,5 +205,6 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
205#define smp_mb__before_atomic_inc() barrier() 205#define smp_mb__before_atomic_inc() barrier()
206#define smp_mb__after_atomic_inc() barrier() 206#define smp_mb__after_atomic_inc() barrier()
207 207
208#include <asm-generic/atomic.h>
208#endif 209#endif
209#endif 210#endif
diff --git a/include/asm-arm26/atomic.h b/include/asm-arm26/atomic.h
index a47cadc59686..3074b0e76343 100644
--- a/include/asm-arm26/atomic.h
+++ b/include/asm-arm26/atomic.h
@@ -118,5 +118,6 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
118#define smp_mb__before_atomic_inc() barrier() 118#define smp_mb__before_atomic_inc() barrier()
119#define smp_mb__after_atomic_inc() barrier() 119#define smp_mb__after_atomic_inc() barrier()
120 120
121#include <asm-generic/atomic.h>
121#endif 122#endif
122#endif 123#endif
diff --git a/include/asm-cris/atomic.h b/include/asm-cris/atomic.h
index 683b05a57d88..2df2c7aa19b7 100644
--- a/include/asm-cris/atomic.h
+++ b/include/asm-cris/atomic.h
@@ -156,4 +156,5 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
156#define smp_mb__before_atomic_inc() barrier() 156#define smp_mb__before_atomic_inc() barrier()
157#define smp_mb__after_atomic_inc() barrier() 157#define smp_mb__after_atomic_inc() barrier()
158 158
159#include <asm-generic/atomic.h>
159#endif 160#endif
diff --git a/include/asm-frv/atomic.h b/include/asm-frv/atomic.h
index f6539ff569c5..3f54fea2b051 100644
--- a/include/asm-frv/atomic.h
+++ b/include/asm-frv/atomic.h
@@ -426,4 +426,5 @@ extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
426}) 426})
427#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 427#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
428 428
429#include <asm-generic/atomic.h>
429#endif /* _ASM_ATOMIC_H */ 430#endif /* _ASM_ATOMIC_H */
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
new file mode 100644
index 000000000000..e0a28b925ef0
--- /dev/null
+++ b/include/asm-generic/atomic.h
@@ -0,0 +1,116 @@
1#ifndef _ASM_GENERIC_ATOMIC_H
2#define _ASM_GENERIC_ATOMIC_H
3/*
4 * Copyright (C) 2005 Silicon Graphics, Inc.
5 * Christoph Lameter <clameter@sgi.com>
6 *
7 * Allows to provide arch independent atomic definitions without the need to
8 * edit all arch specific atomic.h files.
9 */
10
11
12/*
13 * Suppport for atomic_long_t
14 *
15 * Casts for parameters are avoided for existing atomic functions in order to
16 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
17 * macros of a platform may have.
18 */
19
20#if BITS_PER_LONG == 64
21
22typedef atomic64_t atomic_long_t;
23
24#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
25
26static inline long atomic_long_read(atomic_long_t *l)
27{
28 atomic64_t *v = (atomic64_t *)l;
29
30 return (long)atomic64_read(v);
31}
32
33static inline void atomic_long_set(atomic_long_t *l, long i)
34{
35 atomic64_t *v = (atomic64_t *)l;
36
37 atomic_set(v, i);
38}
39
40static inline void atomic_long_inc(atomic_long_t *l)
41{
42 atomic64_t *v = (atomic64_t *)l;
43
44 atomic64_inc(v);
45}
46
47static inline void atomic_long_dec(atomic_long_t *l)
48{
49 atomic64_t *v = (atomic64_t *)l;
50
51 atomic64_dec(v);
52}
53
54static inline void atomic_long_add(long i, atomic_long_t *l)
55{
56 atomic64_t *v = (atomic64_t *)l;
57
58 atomic64_add(i, v);
59}
60
61static inline void atomic_long_sub(long i, atomic_long_t *l)
62{
63 atomic64_t *v = (atomic64_t *)l;
64
65 atomic64_sub(i, v);
66}
67
68#else
69
70typedef atomic_t atomic_long_t;
71
72#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
73static inline long atomic_long_read(atomic_long_t *l)
74{
75 atomic_t *v = (atomic_t *)l;
76
77 return (long)atomic_read(v);
78}
79
80static inline void atomic_long_set(atomic_long_t *l, long i)
81{
82 atomic_t *v = (atomic_t *)l;
83
84 atomic_set(v, i);
85}
86
87static inline void atomic_long_inc(atomic_long_t *l)
88{
89 atomic_t *v = (atomic_t *)l;
90
91 atomic_inc(v);
92}
93
94static inline void atomic_long_dec(atomic_long_t *l)
95{
96 atomic_t *v = (atomic_t *)l;
97
98 atomic_dec(v);
99}
100
101static inline void atomic_long_add(long i, atomic_long_t *l)
102{
103 atomic_t *v = (atomic_t *)l;
104
105 atomic_add(i, v);
106}
107
108static inline void atomic_long_sub(long i, atomic_long_t *l)
109{
110 atomic_t *v = (atomic_t *)l;
111
112 atomic_sub(i, v);
113}
114
115#endif
116#endif
diff --git a/include/asm-h8300/atomic.h b/include/asm-h8300/atomic.h
index f23d86819ea8..d891541e89c3 100644
--- a/include/asm-h8300/atomic.h
+++ b/include/asm-h8300/atomic.h
@@ -137,4 +137,5 @@ static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
137#define smp_mb__before_atomic_inc() barrier() 137#define smp_mb__before_atomic_inc() barrier()
138#define smp_mb__after_atomic_inc() barrier() 138#define smp_mb__after_atomic_inc() barrier()
139 139
140#include <asm-generic/atomic.h>
140#endif /* __ARCH_H8300_ATOMIC __ */ 141#endif /* __ARCH_H8300_ATOMIC __ */
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h
index c68557aa04b2..7a5472d77091 100644
--- a/include/asm-i386/atomic.h
+++ b/include/asm-i386/atomic.h
@@ -254,4 +254,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \
254#define smp_mb__before_atomic_inc() barrier() 254#define smp_mb__before_atomic_inc() barrier()
255#define smp_mb__after_atomic_inc() barrier() 255#define smp_mb__after_atomic_inc() barrier()
256 256
257#include <asm-generic/atomic.h>
257#endif 258#endif
diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h
index 2fbebf85c31d..15cf7984c48e 100644
--- a/include/asm-ia64/atomic.h
+++ b/include/asm-ia64/atomic.h
@@ -192,4 +192,5 @@ atomic64_add_negative (__s64 i, atomic64_t *v)
192#define smp_mb__before_atomic_inc() barrier() 192#define smp_mb__before_atomic_inc() barrier()
193#define smp_mb__after_atomic_inc() barrier() 193#define smp_mb__after_atomic_inc() barrier()
194 194
195#include <asm-generic/atomic.h>
195#endif /* _ASM_IA64_ATOMIC_H */ 196#endif /* _ASM_IA64_ATOMIC_H */
diff --git a/include/asm-m32r/atomic.h b/include/asm-m32r/atomic.h
index ef1fb8ea4726..70761278b6cb 100644
--- a/include/asm-m32r/atomic.h
+++ b/include/asm-m32r/atomic.h
@@ -313,4 +313,5 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr)
313#define smp_mb__before_atomic_inc() barrier() 313#define smp_mb__before_atomic_inc() barrier()
314#define smp_mb__after_atomic_inc() barrier() 314#define smp_mb__after_atomic_inc() barrier()
315 315
316#include <asm-generic/atomic.h>
316#endif /* _ASM_M32R_ATOMIC_H */ 317#endif /* _ASM_M32R_ATOMIC_H */
diff --git a/include/asm-m68k/atomic.h b/include/asm-m68k/atomic.h
index e3c962eeabf3..b8a4e75d679d 100644
--- a/include/asm-m68k/atomic.h
+++ b/include/asm-m68k/atomic.h
@@ -157,4 +157,5 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
157#define smp_mb__before_atomic_inc() barrier() 157#define smp_mb__before_atomic_inc() barrier()
158#define smp_mb__after_atomic_inc() barrier() 158#define smp_mb__after_atomic_inc() barrier()
159 159
160#include <asm-generic/atomic.h>
160#endif /* __ARCH_M68K_ATOMIC __ */ 161#endif /* __ARCH_M68K_ATOMIC __ */
diff --git a/include/asm-m68knommu/atomic.h b/include/asm-m68knommu/atomic.h
index 3c1cc153c415..1702dbe9318c 100644
--- a/include/asm-m68knommu/atomic.h
+++ b/include/asm-m68knommu/atomic.h
@@ -143,4 +143,5 @@ static inline int atomic_sub_return(int i, atomic_t * v)
143#define atomic_dec_return(v) atomic_sub_return(1,(v)) 143#define atomic_dec_return(v) atomic_sub_return(1,(v))
144#define atomic_inc_return(v) atomic_add_return(1,(v)) 144#define atomic_inc_return(v) atomic_add_return(1,(v))
145 145
146#include <asm-generic/atomic.h>
146#endif /* __ARCH_M68KNOMMU_ATOMIC __ */ 147#endif /* __ARCH_M68KNOMMU_ATOMIC __ */
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 55c37c106ef0..92256e43a938 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -713,4 +713,5 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
713#define smp_mb__before_atomic_inc() smp_mb() 713#define smp_mb__before_atomic_inc() smp_mb()
714#define smp_mb__after_atomic_inc() smp_mb() 714#define smp_mb__after_atomic_inc() smp_mb()
715 715
716#include <asm-generic/atomic.h>
716#endif /* _ASM_ATOMIC_H */ 717#endif /* _ASM_ATOMIC_H */
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h
index 983e9a2b6042..64ebd086c40d 100644
--- a/include/asm-parisc/atomic.h
+++ b/include/asm-parisc/atomic.h
@@ -216,4 +216,5 @@ static __inline__ int atomic_read(const atomic_t *v)
216#define smp_mb__before_atomic_inc() smp_mb() 216#define smp_mb__before_atomic_inc() smp_mb()
217#define smp_mb__after_atomic_inc() smp_mb() 217#define smp_mb__after_atomic_inc() smp_mb()
218 218
219#include <asm-generic/atomic.h>
219#endif 220#endif
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index ec4b14468959..ae395a0632a6 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -402,5 +402,6 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
402 402
403#endif /* __powerpc64__ */ 403#endif /* __powerpc64__ */
404 404
405#include <asm-generic/atomic.h>
405#endif /* __KERNEL__ */ 406#endif /* __KERNEL__ */
406#endif /* _ASM_POWERPC_ATOMIC_H_ */ 407#endif /* _ASM_POWERPC_ATOMIC_H_ */
diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h
index b3bd4f679f72..6d07c7df4b40 100644
--- a/include/asm-s390/atomic.h
+++ b/include/asm-s390/atomic.h
@@ -215,5 +215,6 @@ atomic_compare_and_swap(int expected_oldval,int new_val,atomic_t *v)
215#define smp_mb__before_atomic_inc() smp_mb() 215#define smp_mb__before_atomic_inc() smp_mb()
216#define smp_mb__after_atomic_inc() smp_mb() 216#define smp_mb__after_atomic_inc() smp_mb()
217 217
218#include <asm-generic/atomic.h>
218#endif /* __KERNEL__ */ 219#endif /* __KERNEL__ */
219#endif /* __ARCH_S390_ATOMIC__ */ 220#endif /* __ARCH_S390_ATOMIC__ */
diff --git a/include/asm-sh/atomic.h b/include/asm-sh/atomic.h
index aabfd334462c..618d8e0de348 100644
--- a/include/asm-sh/atomic.h
+++ b/include/asm-sh/atomic.h
@@ -140,4 +140,5 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v)
140#define smp_mb__before_atomic_inc() barrier() 140#define smp_mb__before_atomic_inc() barrier()
141#define smp_mb__after_atomic_inc() barrier() 141#define smp_mb__after_atomic_inc() barrier()
142 142
143#include <asm-generic/atomic.h>
143#endif /* __ASM_SH_ATOMIC_H */ 144#endif /* __ASM_SH_ATOMIC_H */
diff --git a/include/asm-sh64/atomic.h b/include/asm-sh64/atomic.h
index 927a2bc27b30..f3ce5c0df13a 100644
--- a/include/asm-sh64/atomic.h
+++ b/include/asm-sh64/atomic.h
@@ -152,4 +152,5 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v)
152#define smp_mb__before_atomic_inc() barrier() 152#define smp_mb__before_atomic_inc() barrier()
153#define smp_mb__after_atomic_inc() barrier() 153#define smp_mb__after_atomic_inc() barrier()
154 154
155#include <asm-generic/atomic.h>
155#endif /* __ASM_SH64_ATOMIC_H */ 156#endif /* __ASM_SH64_ATOMIC_H */
diff --git a/include/asm-sparc/atomic.h b/include/asm-sparc/atomic.h
index 62bec7ad271c..accb4967e9d2 100644
--- a/include/asm-sparc/atomic.h
+++ b/include/asm-sparc/atomic.h
@@ -159,4 +159,5 @@ static inline int __atomic24_sub(int i, atomic24_t *v)
159 159
160#endif /* !(__KERNEL__) */ 160#endif /* !(__KERNEL__) */
161 161
162#include <asm-generic/atomic.h>
162#endif /* !(__ARCH_SPARC_ATOMIC__) */ 163#endif /* !(__ARCH_SPARC_ATOMIC__) */
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h
index 3789fe315992..11f5aa5d108c 100644
--- a/include/asm-sparc64/atomic.h
+++ b/include/asm-sparc64/atomic.h
@@ -96,4 +96,5 @@ extern int atomic64_sub_ret(int, atomic64_t *);
96#define smp_mb__after_atomic_inc() barrier() 96#define smp_mb__after_atomic_inc() barrier()
97#endif 97#endif
98 98
99#include <asm-generic/atomic.h>
99#endif /* !(__ARCH_SPARC64_ATOMIC__) */ 100#endif /* !(__ARCH_SPARC64_ATOMIC__) */
diff --git a/include/asm-v850/atomic.h b/include/asm-v850/atomic.h
index bede3172ce7f..f5b9ab6f4e70 100644
--- a/include/asm-v850/atomic.h
+++ b/include/asm-v850/atomic.h
@@ -126,4 +126,5 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
126#define smp_mb__before_atomic_inc() barrier() 126#define smp_mb__before_atomic_inc() barrier()
127#define smp_mb__after_atomic_inc() barrier() 127#define smp_mb__after_atomic_inc() barrier()
128 128
129#include <asm-generic/atomic.h>
129#endif /* __V850_ATOMIC_H__ */ 130#endif /* __V850_ATOMIC_H__ */
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 50db9f39274f..72eb071488c7 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -424,4 +424,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \
424#define smp_mb__before_atomic_inc() barrier() 424#define smp_mb__before_atomic_inc() barrier()
425#define smp_mb__after_atomic_inc() barrier() 425#define smp_mb__after_atomic_inc() barrier()
426 426
427#include <asm-generic/atomic.h>
427#endif 428#endif
diff --git a/include/asm-xtensa/atomic.h b/include/asm-xtensa/atomic.h
index 3670cc7695da..e2ce06b101ad 100644
--- a/include/asm-xtensa/atomic.h
+++ b/include/asm-xtensa/atomic.h
@@ -286,6 +286,7 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
286#define smp_mb__before_atomic_inc() barrier() 286#define smp_mb__before_atomic_inc() barrier()
287#define smp_mb__after_atomic_inc() barrier() 287#define smp_mb__after_atomic_inc() barrier()
288 288
289#include <asm-generic/atomic.h>
289#endif /* __KERNEL__ */ 290#endif /* __KERNEL__ */
290 291
291#endif /* _XTENSA_ATOMIC_H */ 292#endif /* _XTENSA_ATOMIC_H */
diff --git a/include/linux/sched.h b/include/linux/sched.h
index b0ad6f30679e..7da33619d5d0 100644
--- a/include/linux/sched.h
+++ b/include/linux/sched.h
@@ -254,25 +254,12 @@ extern void arch_unmap_area_topdown(struct mm_struct *, unsigned long);
254 * The mm counters are not protected by its page_table_lock, 254 * The mm counters are not protected by its page_table_lock,
255 * so must be incremented atomically. 255 * so must be incremented atomically.
256 */ 256 */
257#ifdef ATOMIC64_INIT 257#define set_mm_counter(mm, member, value) atomic_long_set(&(mm)->_##member, value)
258#define set_mm_counter(mm, member, value) atomic64_set(&(mm)->_##member, value) 258#define get_mm_counter(mm, member) ((unsigned long)atomic_long_read(&(mm)->_##member))
259#define get_mm_counter(mm, member) ((unsigned long)atomic64_read(&(mm)->_##member)) 259#define add_mm_counter(mm, member, value) atomic_long_add(value, &(mm)->_##member)
260#define add_mm_counter(mm, member, value) atomic64_add(value, &(mm)->_##member) 260#define inc_mm_counter(mm, member) atomic_long_inc(&(mm)->_##member)
261#define inc_mm_counter(mm, member) atomic64_inc(&(mm)->_##member) 261#define dec_mm_counter(mm, member) atomic_long_dec(&(mm)->_##member)
262#define dec_mm_counter(mm, member) atomic64_dec(&(mm)->_##member) 262typedef atomic_long_t mm_counter_t;
263typedef atomic64_t mm_counter_t;
264#else /* !ATOMIC64_INIT */
265/*
266 * The counters wrap back to 0 at 2^32 * PAGE_SIZE,
267 * that is, at 16TB if using 4kB page size.
268 */
269#define set_mm_counter(mm, member, value) atomic_set(&(mm)->_##member, value)
270#define get_mm_counter(mm, member) ((unsigned long)atomic_read(&(mm)->_##member))
271#define add_mm_counter(mm, member, value) atomic_add(value, &(mm)->_##member)
272#define inc_mm_counter(mm, member) atomic_inc(&(mm)->_##member)
273#define dec_mm_counter(mm, member) atomic_dec(&(mm)->_##member)
274typedef atomic_t mm_counter_t;
275#endif /* !ATOMIC64_INIT */
276 263
277#else /* NR_CPUS < CONFIG_SPLIT_PTLOCK_CPUS */ 264#else /* NR_CPUS < CONFIG_SPLIT_PTLOCK_CPUS */
278/* 265/*