aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMike Frysinger <vapier@gentoo.org>2011-07-26 19:09:11 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2011-07-26 19:49:48 -0400
commit7505cb60c2d26301630b052852d484decf07aef1 (patch)
tree9383737887787553bf90d42ee6886b59d80a52d8
parent00b3c28bd753afb46fb26b2f9e21dd27dacd80d4 (diff)
asm-generic/atomic.h: allow SMP peeps to leverage this
Only a few core funcs need to be implemented for SMP systems, so allow the arches to override them while getting the rest for free. At least, this is enough to allow the Blackfin SMP port to use things. Signed-off-by: Mike Frysinger <vapier@gentoo.org> Cc: Arun Sharma <asharma@fb.com> Cc: Arnd Bergmann <arnd@arndb.de> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r--include/asm-generic/atomic.h16
1 files changed, 15 insertions, 1 deletions
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index 4c56e0daae9f..e37963c1df4d 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -16,7 +16,11 @@
16#define __ASM_GENERIC_ATOMIC_H 16#define __ASM_GENERIC_ATOMIC_H
17 17
18#ifdef CONFIG_SMP 18#ifdef CONFIG_SMP
19#error not SMP safe 19/* Force people to define core atomics */
20# if !defined(atomic_add_return) || !defined(atomic_sub_return) || \
21 !defined(atomic_clear_mask) || !defined(atomic_set_mask)
22# error "SMP requires a little arch-specific magic"
23# endif
20#endif 24#endif
21 25
22/* 26/*
@@ -34,7 +38,9 @@
34 * 38 *
35 * Atomically reads the value of @v. 39 * Atomically reads the value of @v.
36 */ 40 */
41#ifndef atomic_read
37#define atomic_read(v) (*(volatile int *)&(v)->counter) 42#define atomic_read(v) (*(volatile int *)&(v)->counter)
43#endif
38 44
39/** 45/**
40 * atomic_set - set atomic variable 46 * atomic_set - set atomic variable
@@ -55,6 +61,7 @@
55 * 61 *
56 * Atomically adds @i to @v and returns the result 62 * Atomically adds @i to @v and returns the result
57 */ 63 */
64#ifndef atomic_add_return
58static inline int atomic_add_return(int i, atomic_t *v) 65static inline int atomic_add_return(int i, atomic_t *v)
59{ 66{
60 unsigned long flags; 67 unsigned long flags;
@@ -68,6 +75,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
68 75
69 return temp; 76 return temp;
70} 77}
78#endif
71 79
72/** 80/**
73 * atomic_sub_return - subtract integer from atomic variable 81 * atomic_sub_return - subtract integer from atomic variable
@@ -76,6 +84,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
76 * 84 *
77 * Atomically subtracts @i from @v and returns the result 85 * Atomically subtracts @i from @v and returns the result
78 */ 86 */
87#ifndef atomic_sub_return
79static inline int atomic_sub_return(int i, atomic_t *v) 88static inline int atomic_sub_return(int i, atomic_t *v)
80{ 89{
81 unsigned long flags; 90 unsigned long flags;
@@ -89,6 +98,7 @@ static inline int atomic_sub_return(int i, atomic_t *v)
89 98
90 return temp; 99 return temp;
91} 100}
101#endif
92 102
93static inline int atomic_add_negative(int i, atomic_t *v) 103static inline int atomic_add_negative(int i, atomic_t *v)
94{ 104{
@@ -147,6 +157,7 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
147 * 157 *
148 * Atomically clears the bits set in @mask from @v 158 * Atomically clears the bits set in @mask from @v
149 */ 159 */
160#ifndef atomic_clear_mask
150static inline void atomic_clear_mask(unsigned long mask, atomic_t *v) 161static inline void atomic_clear_mask(unsigned long mask, atomic_t *v)
151{ 162{
152 unsigned long flags; 163 unsigned long flags;
@@ -156,6 +167,7 @@ static inline void atomic_clear_mask(unsigned long mask, atomic_t *v)
156 v->counter &= mask; 167 v->counter &= mask;
157 raw_local_irq_restore(flags); 168 raw_local_irq_restore(flags);
158} 169}
170#endif
159 171
160/** 172/**
161 * atomic_set_mask - Atomically set bits in atomic variable 173 * atomic_set_mask - Atomically set bits in atomic variable
@@ -164,6 +176,7 @@ static inline void atomic_clear_mask(unsigned long mask, atomic_t *v)
164 * 176 *
165 * Atomically sets the bits set in @mask in @v 177 * Atomically sets the bits set in @mask in @v
166 */ 178 */
179#ifndef atomic_set_mask
167static inline void atomic_set_mask(unsigned int mask, atomic_t *v) 180static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
168{ 181{
169 unsigned long flags; 182 unsigned long flags;
@@ -172,6 +185,7 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
172 v->counter |= mask; 185 v->counter |= mask;
173 raw_local_irq_restore(flags); 186 raw_local_irq_restore(flags);
174} 187}
188#endif
175 189
176/* Assume that atomic operations are already serializing */ 190/* Assume that atomic operations are already serializing */
177#define smp_mb__before_atomic_dec() barrier() 191#define smp_mb__before_atomic_dec() barrier()