diff options
author | Will Deacon <will.deacon@arm.com> | 2013-10-09 10:54:27 -0400 |
---|---|---|
committer | Catalin Marinas <catalin.marinas@arm.com> | 2013-10-24 10:46:34 -0400 |
commit | 5686b06cea34e31ec0a549d9b5ac00776e8e8d6d (patch) | |
tree | 69f5fa9d9ccc89232bea8c1e1337a4217751ba3d /arch/arm64 | |
parent | 52ea2a560a9dba57fe5fd6b4726b1089751accf2 (diff) |
arm64: lockref: add support for lockless lockrefs using cmpxchg
Our spinlocks are only 32-bit (2x16-bit tickets) and our cmpxchg can
deal with 8-bytes (as one would hope!).
This patch wires up the cmpxchg-based lockless lockref implementation
for arm64.
Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64')
-rw-r--r-- | arch/arm64/Kconfig | 1 | ||||
-rw-r--r-- | arch/arm64/include/asm/spinlock.h | 8 |
2 files changed, 7 insertions, 2 deletions
diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig index c04454876bcb..9e8233b3f188 100644 --- a/arch/arm64/Kconfig +++ b/arch/arm64/Kconfig | |||
@@ -1,6 +1,7 @@ | |||
1 | config ARM64 | 1 | config ARM64 |
2 | def_bool y | 2 | def_bool y |
3 | select ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE | 3 | select ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE |
4 | select ARCH_USE_CMPXCHG_LOCKREF | ||
4 | select ARCH_WANT_OPTIONAL_GPIOLIB | 5 | select ARCH_WANT_OPTIONAL_GPIOLIB |
5 | select ARCH_WANT_COMPAT_IPC_PARSE_VERSION | 6 | select ARCH_WANT_COMPAT_IPC_PARSE_VERSION |
6 | select ARCH_WANT_FRAME_POINTERS | 7 | select ARCH_WANT_FRAME_POINTERS |
diff --git a/arch/arm64/include/asm/spinlock.h b/arch/arm64/include/asm/spinlock.h index 525dd535443e..3d5cf064d7a1 100644 --- a/arch/arm64/include/asm/spinlock.h +++ b/arch/arm64/include/asm/spinlock.h | |||
@@ -92,10 +92,14 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock) | |||
92 | : "memory"); | 92 | : "memory"); |
93 | } | 93 | } |
94 | 94 | ||
95 | static inline int arch_spin_value_unlocked(arch_spinlock_t lock) | ||
96 | { | ||
97 | return lock.owner == lock.next; | ||
98 | } | ||
99 | |||
95 | static inline int arch_spin_is_locked(arch_spinlock_t *lock) | 100 | static inline int arch_spin_is_locked(arch_spinlock_t *lock) |
96 | { | 101 | { |
97 | arch_spinlock_t lockval = ACCESS_ONCE(*lock); | 102 | return !arch_spin_value_unlocked(ACCESS_ONCE(*lock)); |
98 | return lockval.owner != lockval.next; | ||
99 | } | 103 | } |
100 | 104 | ||
101 | static inline int arch_spin_is_contended(arch_spinlock_t *lock) | 105 | static inline int arch_spin_is_contended(arch_spinlock_t *lock) |