diff options
-rw-r--r-- | arch/sh/include/asm/atomic-grb.h | 46 | ||||
-rw-r--r-- | arch/sh/include/asm/atomic-llsc.h | 27 | ||||
-rw-r--r-- | arch/sh/include/asm/atomic.h | 73 |
3 files changed, 29 insertions, 117 deletions
diff --git a/arch/sh/include/asm/atomic-grb.h b/arch/sh/include/asm/atomic-grb.h index 4c5b7dbfcedb..a273c88578fc 100644 --- a/arch/sh/include/asm/atomic-grb.h +++ b/arch/sh/include/asm/atomic-grb.h | |||
@@ -120,50 +120,4 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v) | |||
120 | : "memory" , "r0", "r1"); | 120 | : "memory" , "r0", "r1"); |
121 | } | 121 | } |
122 | 122 | ||
123 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | ||
124 | { | ||
125 | int ret; | ||
126 | |||
127 | __asm__ __volatile__ ( | ||
128 | " .align 2 \n\t" | ||
129 | " mova 1f, r0 \n\t" | ||
130 | " nop \n\t" | ||
131 | " mov r15, r1 \n\t" | ||
132 | " mov #-8, r15 \n\t" | ||
133 | " mov.l @%1, %0 \n\t" | ||
134 | " cmp/eq %2, %0 \n\t" | ||
135 | " bf 1f \n\t" | ||
136 | " mov.l %3, @%1 \n\t" | ||
137 | "1: mov r1, r15 \n\t" | ||
138 | : "=&r" (ret) | ||
139 | : "r" (v), "r" (old), "r" (new) | ||
140 | : "memory" , "r0", "r1" , "t"); | ||
141 | |||
142 | return ret; | ||
143 | } | ||
144 | |||
145 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | ||
146 | { | ||
147 | int ret; | ||
148 | unsigned long tmp; | ||
149 | |||
150 | __asm__ __volatile__ ( | ||
151 | " .align 2 \n\t" | ||
152 | " mova 1f, r0 \n\t" | ||
153 | " nop \n\t" | ||
154 | " mov r15, r1 \n\t" | ||
155 | " mov #-12, r15 \n\t" | ||
156 | " mov.l @%2, %1 \n\t" | ||
157 | " mov %1, %0 \n\t" | ||
158 | " cmp/eq %4, %0 \n\t" | ||
159 | " bt/s 1f \n\t" | ||
160 | " add %3, %1 \n\t" | ||
161 | " mov.l %1, @%2 \n\t" | ||
162 | "1: mov r1, r15 \n\t" | ||
163 | : "=&r" (ret), "=&r" (tmp) | ||
164 | : "r" (v), "r" (a), "r" (u) | ||
165 | : "memory" , "r0", "r1" , "t"); | ||
166 | |||
167 | return ret != u; | ||
168 | } | ||
169 | #endif /* __ASM_SH_ATOMIC_GRB_H */ | 123 | #endif /* __ASM_SH_ATOMIC_GRB_H */ |
diff --git a/arch/sh/include/asm/atomic-llsc.h b/arch/sh/include/asm/atomic-llsc.h index b040e1e08610..4b00b78e3f4f 100644 --- a/arch/sh/include/asm/atomic-llsc.h +++ b/arch/sh/include/asm/atomic-llsc.h | |||
@@ -104,31 +104,4 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v) | |||
104 | : "t"); | 104 | : "t"); |
105 | } | 105 | } |
106 | 106 | ||
107 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | ||
108 | |||
109 | /** | ||
110 | * atomic_add_unless - add unless the number is a given value | ||
111 | * @v: pointer of type atomic_t | ||
112 | * @a: the amount to add to v... | ||
113 | * @u: ...unless v is equal to u. | ||
114 | * | ||
115 | * Atomically adds @a to @v, so long as it was not @u. | ||
116 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
117 | */ | ||
118 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | ||
119 | { | ||
120 | int c, old; | ||
121 | c = atomic_read(v); | ||
122 | for (;;) { | ||
123 | if (unlikely(c == (u))) | ||
124 | break; | ||
125 | old = atomic_cmpxchg((v), c, c + (a)); | ||
126 | if (likely(old == c)) | ||
127 | break; | ||
128 | c = old; | ||
129 | } | ||
130 | |||
131 | return c != (u); | ||
132 | } | ||
133 | |||
134 | #endif /* __ASM_SH_ATOMIC_LLSC_H */ | 107 | #endif /* __ASM_SH_ATOMIC_LLSC_H */ |
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h index b16388d71954..275a448ae8c2 100644 --- a/arch/sh/include/asm/atomic.h +++ b/arch/sh/include/asm/atomic.h | |||
@@ -25,58 +25,43 @@ | |||
25 | #endif | 25 | #endif |
26 | 26 | ||
27 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | 27 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
28 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
29 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
30 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | ||
31 | #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) | ||
32 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) | ||
33 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
28 | 34 | ||
29 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) | 35 | #define atomic_inc(v) atomic_add(1, (v)) |
30 | #define atomic_inc_return(v) atomic_add_return(1,(v)) | 36 | #define atomic_dec(v) atomic_sub(1, (v)) |
31 | 37 | ||
32 | /* | 38 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
33 | * atomic_inc_and_test - increment and test | 39 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
40 | |||
41 | /** | ||
42 | * atomic_add_unless - add unless the number is a given value | ||
34 | * @v: pointer of type atomic_t | 43 | * @v: pointer of type atomic_t |
44 | * @a: the amount to add to v... | ||
45 | * @u: ...unless v is equal to u. | ||
35 | * | 46 | * |
36 | * Atomically increments @v by 1 | 47 | * Atomically adds @a to @v, so long as it was not @u. |
37 | * and returns true if the result is zero, or false for all | 48 | * Returns non-zero if @v was not @u, and zero otherwise. |
38 | * other cases. | ||
39 | */ | 49 | */ |
40 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | ||
41 | |||
42 | #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) | ||
43 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) | ||
44 | |||
45 | #define atomic_inc(v) atomic_add(1,(v)) | ||
46 | #define atomic_dec(v) atomic_sub(1,(v)) | ||
47 | |||
48 | #if !defined(CONFIG_GUSA_RB) && !defined(CONFIG_CPU_SH4A) | ||
49 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | ||
50 | { | ||
51 | int ret; | ||
52 | unsigned long flags; | ||
53 | |||
54 | local_irq_save(flags); | ||
55 | ret = v->counter; | ||
56 | if (likely(ret == old)) | ||
57 | v->counter = new; | ||
58 | local_irq_restore(flags); | ||
59 | |||
60 | return ret; | ||
61 | } | ||
62 | |||
63 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | 50 | static inline int atomic_add_unless(atomic_t *v, int a, int u) |
64 | { | 51 | { |
65 | int ret; | 52 | int c, old; |
66 | unsigned long flags; | 53 | c = atomic_read(v); |
67 | 54 | for (;;) { | |
68 | local_irq_save(flags); | 55 | if (unlikely(c == (u))) |
69 | ret = v->counter; | 56 | break; |
70 | if (ret != u) | 57 | old = atomic_cmpxchg((v), c, c + (a)); |
71 | v->counter += a; | 58 | if (likely(old == c)) |
72 | local_irq_restore(flags); | 59 | break; |
73 | 60 | c = old; | |
74 | return ret != u; | 61 | } |
62 | |||
63 | return c != (u); | ||
75 | } | 64 | } |
76 | #endif /* !CONFIG_GUSA_RB && !CONFIG_CPU_SH4A */ | ||
77 | |||
78 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | ||
79 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
80 | 65 | ||
81 | #define smp_mb__before_atomic_dec() smp_mb() | 66 | #define smp_mb__before_atomic_dec() smp_mb() |
82 | #define smp_mb__after_atomic_dec() smp_mb() | 67 | #define smp_mb__after_atomic_dec() smp_mb() |