diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2014-10-13 09:48:00 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2014-10-13 09:48:00 -0400 |
commit | dbb885fecc1b1b35e93416bedd24d21bd20f60ed (patch) | |
tree | 9aa92bcc4e3d3594eba0ba85d72b878d85f35a59 /arch/m32r/include | |
parent | d6dd50e07c5bec00db2005969b1a01f8ca3d25ef (diff) | |
parent | 2291059c852706c6f5ffb400366042b7625066cd (diff) |
Merge branch 'locking-arch-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull arch atomic cleanups from Ingo Molnar:
"This is a series kept separate from the main locking tree, which
cleans up and improves various details in the atomics type handling:
- Remove the unused atomic_or_long() method
- Consolidate and compress atomic ops implementations between
architectures, to reduce linecount and to make it easier to add new
ops.
- Rewrite generic atomic support to only require cmpxchg() from an
architecture - generate all other methods from that"
* 'locking-arch-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (23 commits)
locking,arch: Use ACCESS_ONCE() instead of cast to volatile in atomic_read()
locking, mips: Fix atomics
locking, sparc64: Fix atomics
locking,arch: Rewrite generic atomic support
locking,arch,xtensa: Fold atomic_ops
locking,arch,sparc: Fold atomic_ops
locking,arch,sh: Fold atomic_ops
locking,arch,powerpc: Fold atomic_ops
locking,arch,parisc: Fold atomic_ops
locking,arch,mn10300: Fold atomic_ops
locking,arch,mips: Fold atomic_ops
locking,arch,metag: Fold atomic_ops
locking,arch,m68k: Fold atomic_ops
locking,arch,m32r: Fold atomic_ops
locking,arch,ia64: Fold atomic_ops
locking,arch,hexagon: Fold atomic_ops
locking,arch,cris: Fold atomic_ops
locking,arch,avr32: Fold atomic_ops
locking,arch,arm64: Fold atomic_ops
locking,arch,arm: Fold atomic_ops
...
Diffstat (limited to 'arch/m32r/include')
-rw-r--r-- | arch/m32r/include/asm/atomic.h | 145 |
1 files changed, 58 insertions, 87 deletions
diff --git a/arch/m32r/include/asm/atomic.h b/arch/m32r/include/asm/atomic.h index 8ad0ed4182a5..31bb74adba08 100644 --- a/arch/m32r/include/asm/atomic.h +++ b/arch/m32r/include/asm/atomic.h | |||
@@ -28,7 +28,7 @@ | |||
28 | * | 28 | * |
29 | * Atomically reads the value of @v. | 29 | * Atomically reads the value of @v. |
30 | */ | 30 | */ |
31 | #define atomic_read(v) (*(volatile int *)&(v)->counter) | 31 | #define atomic_read(v) ACCESS_ONCE((v)->counter) |
32 | 32 | ||
33 | /** | 33 | /** |
34 | * atomic_set - set atomic variable | 34 | * atomic_set - set atomic variable |
@@ -39,85 +39,64 @@ | |||
39 | */ | 39 | */ |
40 | #define atomic_set(v,i) (((v)->counter) = (i)) | 40 | #define atomic_set(v,i) (((v)->counter) = (i)) |
41 | 41 | ||
42 | /** | ||
43 | * atomic_add_return - add integer to atomic variable and return it | ||
44 | * @i: integer value to add | ||
45 | * @v: pointer of type atomic_t | ||
46 | * | ||
47 | * Atomically adds @i to @v and return (@i + @v). | ||
48 | */ | ||
49 | static __inline__ int atomic_add_return(int i, atomic_t *v) | ||
50 | { | ||
51 | unsigned long flags; | ||
52 | int result; | ||
53 | |||
54 | local_irq_save(flags); | ||
55 | __asm__ __volatile__ ( | ||
56 | "# atomic_add_return \n\t" | ||
57 | DCACHE_CLEAR("%0", "r4", "%1") | ||
58 | M32R_LOCK" %0, @%1; \n\t" | ||
59 | "add %0, %2; \n\t" | ||
60 | M32R_UNLOCK" %0, @%1; \n\t" | ||
61 | : "=&r" (result) | ||
62 | : "r" (&v->counter), "r" (i) | ||
63 | : "memory" | ||
64 | #ifdef CONFIG_CHIP_M32700_TS1 | 42 | #ifdef CONFIG_CHIP_M32700_TS1 |
65 | , "r4" | 43 | #define __ATOMIC_CLOBBER , "r4" |
66 | #endif /* CONFIG_CHIP_M32700_TS1 */ | 44 | #else |
67 | ); | 45 | #define __ATOMIC_CLOBBER |
68 | local_irq_restore(flags); | 46 | #endif |
69 | 47 | ||
70 | return result; | 48 | #define ATOMIC_OP(op) \ |
49 | static __inline__ void atomic_##op(int i, atomic_t *v) \ | ||
50 | { \ | ||
51 | unsigned long flags; \ | ||
52 | int result; \ | ||
53 | \ | ||
54 | local_irq_save(flags); \ | ||
55 | __asm__ __volatile__ ( \ | ||
56 | "# atomic_" #op " \n\t" \ | ||
57 | DCACHE_CLEAR("%0", "r4", "%1") \ | ||
58 | M32R_LOCK" %0, @%1; \n\t" \ | ||
59 | #op " %0, %2; \n\t" \ | ||
60 | M32R_UNLOCK" %0, @%1; \n\t" \ | ||
61 | : "=&r" (result) \ | ||
62 | : "r" (&v->counter), "r" (i) \ | ||
63 | : "memory" \ | ||
64 | __ATOMIC_CLOBBER \ | ||
65 | ); \ | ||
66 | local_irq_restore(flags); \ | ||
67 | } \ | ||
68 | |||
69 | #define ATOMIC_OP_RETURN(op) \ | ||
70 | static __inline__ int atomic_##op##_return(int i, atomic_t *v) \ | ||
71 | { \ | ||
72 | unsigned long flags; \ | ||
73 | int result; \ | ||
74 | \ | ||
75 | local_irq_save(flags); \ | ||
76 | __asm__ __volatile__ ( \ | ||
77 | "# atomic_" #op "_return \n\t" \ | ||
78 | DCACHE_CLEAR("%0", "r4", "%1") \ | ||
79 | M32R_LOCK" %0, @%1; \n\t" \ | ||
80 | #op " %0, %2; \n\t" \ | ||
81 | M32R_UNLOCK" %0, @%1; \n\t" \ | ||
82 | : "=&r" (result) \ | ||
83 | : "r" (&v->counter), "r" (i) \ | ||
84 | : "memory" \ | ||
85 | __ATOMIC_CLOBBER \ | ||
86 | ); \ | ||
87 | local_irq_restore(flags); \ | ||
88 | \ | ||
89 | return result; \ | ||
71 | } | 90 | } |
72 | 91 | ||
73 | /** | 92 | #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) |
74 | * atomic_sub_return - subtract integer from atomic variable and return it | ||
75 | * @i: integer value to subtract | ||
76 | * @v: pointer of type atomic_t | ||
77 | * | ||
78 | * Atomically subtracts @i from @v and return (@v - @i). | ||
79 | */ | ||
80 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | ||
81 | { | ||
82 | unsigned long flags; | ||
83 | int result; | ||
84 | |||
85 | local_irq_save(flags); | ||
86 | __asm__ __volatile__ ( | ||
87 | "# atomic_sub_return \n\t" | ||
88 | DCACHE_CLEAR("%0", "r4", "%1") | ||
89 | M32R_LOCK" %0, @%1; \n\t" | ||
90 | "sub %0, %2; \n\t" | ||
91 | M32R_UNLOCK" %0, @%1; \n\t" | ||
92 | : "=&r" (result) | ||
93 | : "r" (&v->counter), "r" (i) | ||
94 | : "memory" | ||
95 | #ifdef CONFIG_CHIP_M32700_TS1 | ||
96 | , "r4" | ||
97 | #endif /* CONFIG_CHIP_M32700_TS1 */ | ||
98 | ); | ||
99 | local_irq_restore(flags); | ||
100 | |||
101 | return result; | ||
102 | } | ||
103 | 93 | ||
104 | /** | 94 | ATOMIC_OPS(add) |
105 | * atomic_add - add integer to atomic variable | 95 | ATOMIC_OPS(sub) |
106 | * @i: integer value to add | ||
107 | * @v: pointer of type atomic_t | ||
108 | * | ||
109 | * Atomically adds @i to @v. | ||
110 | */ | ||
111 | #define atomic_add(i,v) ((void) atomic_add_return((i), (v))) | ||
112 | 96 | ||
113 | /** | 97 | #undef ATOMIC_OPS |
114 | * atomic_sub - subtract the atomic variable | 98 | #undef ATOMIC_OP_RETURN |
115 | * @i: integer value to subtract | 99 | #undef ATOMIC_OP |
116 | * @v: pointer of type atomic_t | ||
117 | * | ||
118 | * Atomically subtracts @i from @v. | ||
119 | */ | ||
120 | #define atomic_sub(i,v) ((void) atomic_sub_return((i), (v))) | ||
121 | 100 | ||
122 | /** | 101 | /** |
123 | * atomic_sub_and_test - subtract value from variable and test result | 102 | * atomic_sub_and_test - subtract value from variable and test result |
@@ -151,9 +130,7 @@ static __inline__ int atomic_inc_return(atomic_t *v) | |||
151 | : "=&r" (result) | 130 | : "=&r" (result) |
152 | : "r" (&v->counter) | 131 | : "r" (&v->counter) |
153 | : "memory" | 132 | : "memory" |
154 | #ifdef CONFIG_CHIP_M32700_TS1 | 133 | __ATOMIC_CLOBBER |
155 | , "r4" | ||
156 | #endif /* CONFIG_CHIP_M32700_TS1 */ | ||
157 | ); | 134 | ); |
158 | local_irq_restore(flags); | 135 | local_irq_restore(flags); |
159 | 136 | ||
@@ -181,9 +158,7 @@ static __inline__ int atomic_dec_return(atomic_t *v) | |||
181 | : "=&r" (result) | 158 | : "=&r" (result) |
182 | : "r" (&v->counter) | 159 | : "r" (&v->counter) |
183 | : "memory" | 160 | : "memory" |
184 | #ifdef CONFIG_CHIP_M32700_TS1 | 161 | __ATOMIC_CLOBBER |
185 | , "r4" | ||
186 | #endif /* CONFIG_CHIP_M32700_TS1 */ | ||
187 | ); | 162 | ); |
188 | local_irq_restore(flags); | 163 | local_irq_restore(flags); |
189 | 164 | ||
@@ -280,9 +255,7 @@ static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *addr) | |||
280 | : "=&r" (tmp) | 255 | : "=&r" (tmp) |
281 | : "r" (addr), "r" (~mask) | 256 | : "r" (addr), "r" (~mask) |
282 | : "memory" | 257 | : "memory" |
283 | #ifdef CONFIG_CHIP_M32700_TS1 | 258 | __ATOMIC_CLOBBER |
284 | , "r5" | ||
285 | #endif /* CONFIG_CHIP_M32700_TS1 */ | ||
286 | ); | 259 | ); |
287 | local_irq_restore(flags); | 260 | local_irq_restore(flags); |
288 | } | 261 | } |
@@ -302,9 +275,7 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr) | |||
302 | : "=&r" (tmp) | 275 | : "=&r" (tmp) |
303 | : "r" (addr), "r" (mask) | 276 | : "r" (addr), "r" (mask) |
304 | : "memory" | 277 | : "memory" |
305 | #ifdef CONFIG_CHIP_M32700_TS1 | 278 | __ATOMIC_CLOBBER |
306 | , "r5" | ||
307 | #endif /* CONFIG_CHIP_M32700_TS1 */ | ||
308 | ); | 279 | ); |
309 | local_irq_restore(flags); | 280 | local_irq_restore(flags); |
310 | } | 281 | } |