aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2006-01-09 18:59:17 -0500
committerIngo Molnar <mingo@hera.kernel.org>2006-01-09 18:59:17 -0500
commitffbf670f5cd50501a34a5187981460da2216071e (patch)
treef2cd9eeeb59839d15feddf906310bb375474c573 /include
parentf17578decc40df8fceff82b106582e30bdfb3189 (diff)
[PATCH] mutex subsystem, add atomic_xchg() to all arches
add atomic_xchg() to all the architectures. Needed by the new mutex code. Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Arjan van de Ven <arjan@infradead.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-alpha/atomic.h1
-rw-r--r--include/asm-arm/atomic.h2
-rw-r--r--include/asm-arm26/atomic.h2
-rw-r--r--include/asm-cris/atomic.h2
-rw-r--r--include/asm-frv/atomic.h1
-rw-r--r--include/asm-h8300/atomic.h2
-rw-r--r--include/asm-i386/atomic.h1
-rw-r--r--include/asm-ia64/atomic.h1
-rw-r--r--include/asm-m32r/atomic.h1
-rw-r--r--include/asm-m68k/atomic.h1
-rw-r--r--include/asm-m68knommu/atomic.h1
-rw-r--r--include/asm-mips/atomic.h1
-rw-r--r--include/asm-parisc/atomic.h1
-rw-r--r--include/asm-powerpc/atomic.h1
-rw-r--r--include/asm-s390/atomic.h2
-rw-r--r--include/asm-sh/atomic.h2
-rw-r--r--include/asm-sh64/atomic.h2
-rw-r--r--include/asm-sparc/atomic.h1
-rw-r--r--include/asm-sparc64/atomic.h1
-rw-r--r--include/asm-v850/atomic.h2
-rw-r--r--include/asm-x86_64/atomic.h1
-rw-r--r--include/asm-xtensa/atomic.h1
22 files changed, 30 insertions, 0 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index cb03bbe92cd..fc77f741308 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -176,6 +176,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
176} 176}
177 177
178#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 178#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
179#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
179 180
180#define atomic_add_unless(v, a, u) \ 181#define atomic_add_unless(v, a, u) \
181({ \ 182({ \
diff --git a/include/asm-arm/atomic.h b/include/asm-arm/atomic.h
index f72b63309bc..3d7283d8440 100644
--- a/include/asm-arm/atomic.h
+++ b/include/asm-arm/atomic.h
@@ -175,6 +175,8 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
175 175
176#endif /* __LINUX_ARM_ARCH__ */ 176#endif /* __LINUX_ARM_ARCH__ */
177 177
178#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
179
178static inline int atomic_add_unless(atomic_t *v, int a, int u) 180static inline int atomic_add_unless(atomic_t *v, int a, int u)
179{ 181{
180 int c, old; 182 int c, old;
diff --git a/include/asm-arm26/atomic.h b/include/asm-arm26/atomic.h
index 3074b0e7634..1552c865399 100644
--- a/include/asm-arm26/atomic.h
+++ b/include/asm-arm26/atomic.h
@@ -76,6 +76,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
76 return ret; 76 return ret;
77} 77}
78 78
79#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
80
79static inline int atomic_add_unless(atomic_t *v, int a, int u) 81static inline int atomic_add_unless(atomic_t *v, int a, int u)
80{ 82{
81 int ret; 83 int ret;
diff --git a/include/asm-cris/atomic.h b/include/asm-cris/atomic.h
index 2df2c7aa19b..0b51a87e553 100644
--- a/include/asm-cris/atomic.h
+++ b/include/asm-cris/atomic.h
@@ -136,6 +136,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
136 return ret; 136 return ret;
137} 137}
138 138
139#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
140
139static inline int atomic_add_unless(atomic_t *v, int a, int u) 141static inline int atomic_add_unless(atomic_t *v, int a, int u)
140{ 142{
141 int ret; 143 int ret;
diff --git a/include/asm-frv/atomic.h b/include/asm-frv/atomic.h
index 9c9e9499cfd..a59f684b4f3 100644
--- a/include/asm-frv/atomic.h
+++ b/include/asm-frv/atomic.h
@@ -328,6 +328,7 @@ extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
328#endif 328#endif
329 329
330#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) 330#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
331#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
331 332
332#define atomic_add_unless(v, a, u) \ 333#define atomic_add_unless(v, a, u) \
333({ \ 334({ \
diff --git a/include/asm-h8300/atomic.h b/include/asm-h8300/atomic.h
index d891541e89c..21f54428c86 100644
--- a/include/asm-h8300/atomic.h
+++ b/include/asm-h8300/atomic.h
@@ -95,6 +95,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
95 return ret; 95 return ret;
96} 96}
97 97
98#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
99
98static inline int atomic_add_unless(atomic_t *v, int a, int u) 100static inline int atomic_add_unless(atomic_t *v, int a, int u)
99{ 101{
100 int ret; 102 int ret;
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h
index 7a5472d7709..de649d3aa2d 100644
--- a/include/asm-i386/atomic.h
+++ b/include/asm-i386/atomic.h
@@ -216,6 +216,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
216} 216}
217 217
218#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) 218#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
219#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
219 220
220/** 221/**
221 * atomic_add_unless - add unless the number is a given value 222 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h
index 15cf7984c48..d3e0dfa99e1 100644
--- a/include/asm-ia64/atomic.h
+++ b/include/asm-ia64/atomic.h
@@ -89,6 +89,7 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
89} 89}
90 90
91#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) 91#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
92#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
92 93
93#define atomic_add_unless(v, a, u) \ 94#define atomic_add_unless(v, a, u) \
94({ \ 95({ \
diff --git a/include/asm-m32r/atomic.h b/include/asm-m32r/atomic.h
index 70761278b6c..3122fe106f0 100644
--- a/include/asm-m32r/atomic.h
+++ b/include/asm-m32r/atomic.h
@@ -243,6 +243,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
243#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0) 243#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
244 244
245#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 245#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
246#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
246 247
247/** 248/**
248 * atomic_add_unless - add unless the number is a given value 249 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-m68k/atomic.h b/include/asm-m68k/atomic.h
index b8a4e75d679..a4a84d5c65d 100644
--- a/include/asm-m68k/atomic.h
+++ b/include/asm-m68k/atomic.h
@@ -140,6 +140,7 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
140} 140}
141 141
142#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 142#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
143#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
143 144
144#define atomic_add_unless(v, a, u) \ 145#define atomic_add_unless(v, a, u) \
145({ \ 146({ \
diff --git a/include/asm-m68knommu/atomic.h b/include/asm-m68knommu/atomic.h
index 1702dbe9318..6c4e4b63e45 100644
--- a/include/asm-m68knommu/atomic.h
+++ b/include/asm-m68knommu/atomic.h
@@ -129,6 +129,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
129} 129}
130 130
131#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 131#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
132#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
132 133
133#define atomic_add_unless(v, a, u) \ 134#define atomic_add_unless(v, a, u) \
134({ \ 135({ \
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 92256e43a93..94a95872d72 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -289,6 +289,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
289} 289}
290 290
291#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 291#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
292#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
292 293
293/** 294/**
294 * atomic_add_unless - add unless the number is a given value 295 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h
index 64ebd086c40..2ca56d34aaa 100644
--- a/include/asm-parisc/atomic.h
+++ b/include/asm-parisc/atomic.h
@@ -165,6 +165,7 @@ static __inline__ int atomic_read(const atomic_t *v)
165 165
166/* exported interface */ 166/* exported interface */
167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
168#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
168 169
169/** 170/**
170 * atomic_add_unless - add unless the number is a given value 171 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index ae395a0632a..248f9aec959 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -165,6 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
165} 165}
166 166
167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
168#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
168 169
169/** 170/**
170 * atomic_add_unless - add unless the number is a given value 171 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h
index d82aedf616f..be6fefe223d 100644
--- a/include/asm-s390/atomic.h
+++ b/include/asm-s390/atomic.h
@@ -75,6 +75,8 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
75 __CS_LOOP(v, mask, "or"); 75 __CS_LOOP(v, mask, "or");
76} 76}
77 77
78#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
79
78static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new) 80static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
79{ 81{
80 __asm__ __volatile__(" cs %0,%3,0(%2)\n" 82 __asm__ __volatile__(" cs %0,%3,0(%2)\n"
diff --git a/include/asm-sh/atomic.h b/include/asm-sh/atomic.h
index 618d8e0de34..fb627de217f 100644
--- a/include/asm-sh/atomic.h
+++ b/include/asm-sh/atomic.h
@@ -101,6 +101,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
101 return ret; 101 return ret;
102} 102}
103 103
104#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
105
104static inline int atomic_add_unless(atomic_t *v, int a, int u) 106static inline int atomic_add_unless(atomic_t *v, int a, int u)
105{ 107{
106 int ret; 108 int ret;
diff --git a/include/asm-sh64/atomic.h b/include/asm-sh64/atomic.h
index f3ce5c0df13..28f2ea9b567 100644
--- a/include/asm-sh64/atomic.h
+++ b/include/asm-sh64/atomic.h
@@ -113,6 +113,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
113 return ret; 113 return ret;
114} 114}
115 115
116#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
117
116static inline int atomic_add_unless(atomic_t *v, int a, int u) 118static inline int atomic_add_unless(atomic_t *v, int a, int u)
117{ 119{
118 int ret; 120 int ret;
diff --git a/include/asm-sparc/atomic.h b/include/asm-sparc/atomic.h
index accb4967e9d..e1033170bd3 100644
--- a/include/asm-sparc/atomic.h
+++ b/include/asm-sparc/atomic.h
@@ -20,6 +20,7 @@ typedef struct { volatile int counter; } atomic_t;
20 20
21extern int __atomic_add_return(int, atomic_t *); 21extern int __atomic_add_return(int, atomic_t *);
22extern int atomic_cmpxchg(atomic_t *, int, int); 22extern int atomic_cmpxchg(atomic_t *, int, int);
23#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
23extern int atomic_add_unless(atomic_t *, int, int); 24extern int atomic_add_unless(atomic_t *, int, int);
24extern void atomic_set(atomic_t *, int); 25extern void atomic_set(atomic_t *, int);
25 26
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h
index 11f5aa5d108..25256bdc8aa 100644
--- a/include/asm-sparc64/atomic.h
+++ b/include/asm-sparc64/atomic.h
@@ -72,6 +72,7 @@ extern int atomic64_sub_ret(int, atomic64_t *);
72#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) 72#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
73 73
74#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 74#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
75#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
75 76
76#define atomic_add_unless(v, a, u) \ 77#define atomic_add_unless(v, a, u) \
77({ \ 78({ \
diff --git a/include/asm-v850/atomic.h b/include/asm-v850/atomic.h
index f5b9ab6f4e7..166df00457e 100644
--- a/include/asm-v850/atomic.h
+++ b/include/asm-v850/atomic.h
@@ -104,6 +104,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
104 return ret; 104 return ret;
105} 105}
106 106
107#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
108
107static inline int atomic_add_unless(atomic_t *v, int a, int u) 109static inline int atomic_add_unless(atomic_t *v, int a, int u)
108{ 110{
109 int ret; 111 int ret;
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 72eb071488c..6b540237a2f 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -389,6 +389,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
389#define atomic64_dec_return(v) (atomic64_sub_return(1,v)) 389#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
390 390
391#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) 391#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
392#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
392 393
393/** 394/**
394 * atomic_add_unless - add unless the number is a given value 395 * atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-xtensa/atomic.h b/include/asm-xtensa/atomic.h
index e2ce06b101a..fe105a12392 100644
--- a/include/asm-xtensa/atomic.h
+++ b/include/asm-xtensa/atomic.h
@@ -224,6 +224,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
224#define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0) 224#define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0)
225 225
226#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 226#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
227#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
227 228
228/** 229/**
229 * atomic_add_unless - add unless the number is a given value 230 * atomic_add_unless - add unless the number is a given value