aboutsummaryrefslogtreecommitdiffstats
path: root/arch/hexagon/include/asm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/hexagon/include/asm/spinlock.h')
-rw-r--r--arch/hexagon/include/asm/spinlock.h10
1 files changed, 8 insertions, 2 deletions
diff --git a/arch/hexagon/include/asm/spinlock.h b/arch/hexagon/include/asm/spinlock.h
index 12ca4ebc0338..a1c55788c5d6 100644
--- a/arch/hexagon/include/asm/spinlock.h
+++ b/arch/hexagon/include/asm/spinlock.h
@@ -23,6 +23,8 @@
23#define _ASM_SPINLOCK_H 23#define _ASM_SPINLOCK_H
24 24
25#include <asm/irqflags.h> 25#include <asm/irqflags.h>
26#include <asm/barrier.h>
27#include <asm/processor.h>
26 28
27/* 29/*
28 * This file is pulled in for SMP builds. 30 * This file is pulled in for SMP builds.
@@ -176,8 +178,12 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock)
176 * SMP spinlocks are intended to allow only a single CPU at the lock 178 * SMP spinlocks are intended to allow only a single CPU at the lock
177 */ 179 */
178#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) 180#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
179#define arch_spin_unlock_wait(lock) \ 181
180 do {while (arch_spin_is_locked(lock)) cpu_relax(); } while (0) 182static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
183{
184 smp_cond_load_acquire(&lock->lock, !VAL);
185}
186
181#define arch_spin_is_locked(x) ((x)->lock != 0) 187#define arch_spin_is_locked(x) ((x)->lock != 0)
182 188
183#define arch_read_lock_flags(lock, flags) arch_read_lock(lock) 189#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)