aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-m68k/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-m68k/atomic.h')
-rw-r--r--include/asm-m68k/atomic.h35
1 files changed, 32 insertions, 3 deletions
diff --git a/include/asm-m68k/atomic.h b/include/asm-m68k/atomic.h
index a4a84d5c65d5..862e497c2645 100644
--- a/include/asm-m68k/atomic.h
+++ b/include/asm-m68k/atomic.h
@@ -55,6 +55,7 @@ static inline int atomic_inc_and_test(atomic_t *v)
55} 55}
56 56
57#ifdef CONFIG_RMW_INSNS 57#ifdef CONFIG_RMW_INSNS
58
58static inline int atomic_add_return(int i, atomic_t *v) 59static inline int atomic_add_return(int i, atomic_t *v)
59{ 60{
60 int t, tmp; 61 int t, tmp;
@@ -82,7 +83,12 @@ static inline int atomic_sub_return(int i, atomic_t *v)
82 : "g" (i), "2" (atomic_read(v))); 83 : "g" (i), "2" (atomic_read(v)));
83 return t; 84 return t;
84} 85}
86
87#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
88#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
89
85#else /* !CONFIG_RMW_INSNS */ 90#else /* !CONFIG_RMW_INSNS */
91
86static inline int atomic_add_return(int i, atomic_t * v) 92static inline int atomic_add_return(int i, atomic_t * v)
87{ 93{
88 unsigned long flags; 94 unsigned long flags;
@@ -110,6 +116,32 @@ static inline int atomic_sub_return(int i, atomic_t * v)
110 116
111 return t; 117 return t;
112} 118}
119
120static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
121{
122 unsigned long flags;
123 int prev;
124
125 local_irq_save(flags);
126 prev = atomic_read(v);
127 if (prev == old)
128 atomic_set(v, new);
129 local_irq_restore(flags);
130 return prev;
131}
132
133static inline int atomic_xchg(atomic_t *v, int new)
134{
135 unsigned long flags;
136 int prev;
137
138 local_irq_save(flags);
139 prev = atomic_read(v);
140 atomic_set(v, new);
141 local_irq_restore(flags);
142 return prev;
143}
144
113#endif /* !CONFIG_RMW_INSNS */ 145#endif /* !CONFIG_RMW_INSNS */
114 146
115#define atomic_dec_return(v) atomic_sub_return(1, (v)) 147#define atomic_dec_return(v) atomic_sub_return(1, (v))
@@ -139,9 +171,6 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
139 __asm__ __volatile__("orl %1,%0" : "+m" (*v) : "id" (mask)); 171 __asm__ __volatile__("orl %1,%0" : "+m" (*v) : "id" (mask));
140} 172}
141 173
142#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
143#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
144
145#define atomic_add_unless(v, a, u) \ 174#define atomic_add_unless(v, a, u) \
146({ \ 175({ \
147 int c, old; \ 176 int c, old; \