aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/include
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2016-07-25 15:41:29 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2016-07-25 15:41:29 -0400
commitc86ad14d305d2429c3da19462440bac50c183def (patch)
treebd794cd72476661faf82c440063c217bb978ce44 /arch/sparc/include
parenta2303849a6b4b7ba59667091e00d6bb194071d9a (diff)
parentf06628638cf6e75f179742b6c1b35076965b9fdd (diff)
Merge branch 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Ingo Molnar: "The locking tree was busier in this cycle than the usual pattern - a couple of major projects happened to coincide. The main changes are: - implement the atomic_fetch_{add,sub,and,or,xor}() API natively across all SMP architectures (Peter Zijlstra) - add atomic_fetch_{inc/dec}() as well, using the generic primitives (Davidlohr Bueso) - optimize various aspects of rwsems (Jason Low, Davidlohr Bueso, Waiman Long) - optimize smp_cond_load_acquire() on arm64 and implement LSE based atomic{,64}_fetch_{add,sub,and,andnot,or,xor}{,_relaxed,_acquire,_release}() on arm64 (Will Deacon) - introduce smp_acquire__after_ctrl_dep() and fix various barrier mis-uses and bugs (Peter Zijlstra) - after discovering ancient spin_unlock_wait() barrier bugs in its implementation and usage, strengthen its semantics and update/fix usage sites (Peter Zijlstra) - optimize mutex_trylock() fastpath (Peter Zijlstra) - ... misc fixes and cleanups" * 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (67 commits) locking/atomic: Introduce inc/dec variants for the atomic_fetch_$op() API locking/barriers, arch/arm64: Implement LDXR+WFE based smp_cond_load_acquire() locking/static_keys: Fix non static symbol Sparse warning locking/qspinlock: Use __this_cpu_dec() instead of full-blown this_cpu_dec() locking/atomic, arch/tile: Fix tilepro build locking/atomic, arch/m68k: Remove comment locking/atomic, arch/arc: Fix build locking/Documentation: Clarify limited control-dependency scope locking/atomic, arch/rwsem: Employ atomic_long_fetch_add() locking/atomic, arch/qrwlock: Employ atomic_fetch_add_acquire() locking/atomic, arch/mips: Convert to _relaxed atomics locking/atomic, arch/alpha: Convert to _relaxed atomics locking/atomic: Remove the deprecated atomic_{set,clear}_mask() functions locking/atomic: Remove linux/atomic.h:atomic_fetch_or() locking/atomic: Implement atomic{,64,_long}_fetch_{add,sub,and,andnot,or,xor}{,_relaxed,_acquire,_release}() locking/atomic: Fix atomic64_relaxed() bits locking/atomic, arch/xtensa: Implement atomic_fetch_{add,sub,and,or,xor}() locking/atomic, arch/x86: Implement atomic{,64}_fetch_{add,sub,and,or,xor}() locking/atomic, arch/tile: Implement atomic{,64}_fetch_{add,sub,and,or,xor}() locking/atomic, arch/sparc: Implement atomic{,64}_fetch_{add,sub,and,or,xor}() ...
Diffstat (limited to 'arch/sparc/include')
-rw-r--r--arch/sparc/include/asm/atomic_32.h13
-rw-r--r--arch/sparc/include/asm/atomic_64.h16
-rw-r--r--arch/sparc/include/asm/spinlock_32.h7
-rw-r--r--arch/sparc/include/asm/spinlock_64.h10
4 files changed, 34 insertions, 12 deletions
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h
index 7dcbebbcaec6..ee3f11c43cda 100644
--- a/arch/sparc/include/asm/atomic_32.h
+++ b/arch/sparc/include/asm/atomic_32.h
@@ -20,9 +20,10 @@
20#define ATOMIC_INIT(i) { (i) } 20#define ATOMIC_INIT(i) { (i) }
21 21
22int atomic_add_return(int, atomic_t *); 22int atomic_add_return(int, atomic_t *);
23void atomic_and(int, atomic_t *); 23int atomic_fetch_add(int, atomic_t *);
24void atomic_or(int, atomic_t *); 24int atomic_fetch_and(int, atomic_t *);
25void atomic_xor(int, atomic_t *); 25int atomic_fetch_or(int, atomic_t *);
26int atomic_fetch_xor(int, atomic_t *);
26int atomic_cmpxchg(atomic_t *, int, int); 27int atomic_cmpxchg(atomic_t *, int, int);
27int atomic_xchg(atomic_t *, int); 28int atomic_xchg(atomic_t *, int);
28int __atomic_add_unless(atomic_t *, int, int); 29int __atomic_add_unless(atomic_t *, int, int);
@@ -35,7 +36,13 @@ void atomic_set(atomic_t *, int);
35#define atomic_inc(v) ((void)atomic_add_return( 1, (v))) 36#define atomic_inc(v) ((void)atomic_add_return( 1, (v)))
36#define atomic_dec(v) ((void)atomic_add_return( -1, (v))) 37#define atomic_dec(v) ((void)atomic_add_return( -1, (v)))
37 38
39#define atomic_and(i, v) ((void)atomic_fetch_and((i), (v)))
40#define atomic_or(i, v) ((void)atomic_fetch_or((i), (v)))
41#define atomic_xor(i, v) ((void)atomic_fetch_xor((i), (v)))
42
38#define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v))) 43#define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v)))
44#define atomic_fetch_sub(i, v) (atomic_fetch_add (-(int)(i), (v)))
45
39#define atomic_inc_return(v) (atomic_add_return( 1, (v))) 46#define atomic_inc_return(v) (atomic_add_return( 1, (v)))
40#define atomic_dec_return(v) (atomic_add_return( -1, (v))) 47#define atomic_dec_return(v) (atomic_add_return( -1, (v)))
41 48
diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h
index f2fbf9e16faf..24827a3f733a 100644
--- a/arch/sparc/include/asm/atomic_64.h
+++ b/arch/sparc/include/asm/atomic_64.h
@@ -28,16 +28,24 @@ void atomic64_##op(long, atomic64_t *);
28int atomic_##op##_return(int, atomic_t *); \ 28int atomic_##op##_return(int, atomic_t *); \
29long atomic64_##op##_return(long, atomic64_t *); 29long atomic64_##op##_return(long, atomic64_t *);
30 30
31#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) 31#define ATOMIC_FETCH_OP(op) \
32int atomic_fetch_##op(int, atomic_t *); \
33long atomic64_fetch_##op(long, atomic64_t *);
34
35#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
32 36
33ATOMIC_OPS(add) 37ATOMIC_OPS(add)
34ATOMIC_OPS(sub) 38ATOMIC_OPS(sub)
35 39
36ATOMIC_OP(and) 40#undef ATOMIC_OPS
37ATOMIC_OP(or) 41#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
38ATOMIC_OP(xor) 42
43ATOMIC_OPS(and)
44ATOMIC_OPS(or)
45ATOMIC_OPS(xor)
39 46
40#undef ATOMIC_OPS 47#undef ATOMIC_OPS
48#undef ATOMIC_FETCH_OP
41#undef ATOMIC_OP_RETURN 49#undef ATOMIC_OP_RETURN
42#undef ATOMIC_OP 50#undef ATOMIC_OP
43 51
diff --git a/arch/sparc/include/asm/spinlock_32.h b/arch/sparc/include/asm/spinlock_32.h
index bcc98fc35281..d9c5876c6121 100644
--- a/arch/sparc/include/asm/spinlock_32.h
+++ b/arch/sparc/include/asm/spinlock_32.h
@@ -9,12 +9,15 @@
9#ifndef __ASSEMBLY__ 9#ifndef __ASSEMBLY__
10 10
11#include <asm/psr.h> 11#include <asm/psr.h>
12#include <asm/barrier.h>
12#include <asm/processor.h> /* for cpu_relax */ 13#include <asm/processor.h> /* for cpu_relax */
13 14
14#define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) 15#define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0)
15 16
16#define arch_spin_unlock_wait(lock) \ 17static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
17 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0) 18{
19 smp_cond_load_acquire(&lock->lock, !VAL);
20}
18 21
19static inline void arch_spin_lock(arch_spinlock_t *lock) 22static inline void arch_spin_lock(arch_spinlock_t *lock)
20{ 23{
diff --git a/arch/sparc/include/asm/spinlock_64.h b/arch/sparc/include/asm/spinlock_64.h
index 968917694978..87990b7c6b0d 100644
--- a/arch/sparc/include/asm/spinlock_64.h
+++ b/arch/sparc/include/asm/spinlock_64.h
@@ -8,6 +8,9 @@
8 8
9#ifndef __ASSEMBLY__ 9#ifndef __ASSEMBLY__
10 10
11#include <asm/processor.h>
12#include <asm/barrier.h>
13
11/* To get debugging spinlocks which detect and catch 14/* To get debugging spinlocks which detect and catch
12 * deadlock situations, set CONFIG_DEBUG_SPINLOCK 15 * deadlock situations, set CONFIG_DEBUG_SPINLOCK
13 * and rebuild your kernel. 16 * and rebuild your kernel.
@@ -23,9 +26,10 @@
23 26
24#define arch_spin_is_locked(lp) ((lp)->lock != 0) 27#define arch_spin_is_locked(lp) ((lp)->lock != 0)
25 28
26#define arch_spin_unlock_wait(lp) \ 29static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
27 do { rmb(); \ 30{
28 } while((lp)->lock) 31 smp_cond_load_acquire(&lock->lock, !VAL);
32}
29 33
30static inline void arch_spin_lock(arch_spinlock_t *lock) 34static inline void arch_spin_lock(arch_spinlock_t *lock)
31{ 35{