diff options
author | Andi Kleen <ak@suse.de> | 2006-12-06 20:14:00 -0500 |
---|---|---|
committer | Andi Kleen <andi@basil.nowhere.org> | 2006-12-06 20:14:00 -0500 |
commit | 87e1652c7863b9ae406ff37f33c7ec2bb494d7b1 (patch) | |
tree | a87d98504d20aaf6e7dba583f11c730be02ce3d5 /include/asm-x86_64/spinlock.h | |
parent | a63954b5cad5765e52870bb649992bf636f32a6b (diff) |
[PATCH] x86-64: Don't keep interrupts disabled while spinning in spinlocks
Follows i386.
Based on patch from some folks at Google (MikeW, Edward G.?), but
completely redone by AK.
Signed-off-by: Andi Kleen <ak@suse.de>
Diffstat (limited to 'include/asm-x86_64/spinlock.h')
-rw-r--r-- | include/asm-x86_64/spinlock.h | 29 |
1 files changed, 28 insertions, 1 deletions
diff --git a/include/asm-x86_64/spinlock.h b/include/asm-x86_64/spinlock.h index 05ef097ba55b..88bf981e73cf 100644 --- a/include/asm-x86_64/spinlock.h +++ b/include/asm-x86_64/spinlock.h | |||
@@ -36,7 +36,34 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
36 | "2:\t" : "=m" (lock->slock) : : "memory"); | 36 | "2:\t" : "=m" (lock->slock) : : "memory"); |
37 | } | 37 | } |
38 | 38 | ||
39 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) | 39 | /* |
40 | * Same as __raw_spin_lock, but reenable interrupts during spinning. | ||
41 | */ | ||
42 | #ifndef CONFIG_PROVE_LOCKING | ||
43 | static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) | ||
44 | { | ||
45 | asm volatile( | ||
46 | "\n1:\t" | ||
47 | LOCK_PREFIX " ; decl %0\n\t" | ||
48 | "jns 5f\n" | ||
49 | "testl $0x200, %1\n\t" /* interrupts were disabled? */ | ||
50 | "jz 4f\n\t" | ||
51 | "sti\n" | ||
52 | "3:\t" | ||
53 | "rep;nop\n\t" | ||
54 | "cmpl $0, %0\n\t" | ||
55 | "jle 3b\n\t" | ||
56 | "cli\n\t" | ||
57 | "jmp 1b\n" | ||
58 | "4:\t" | ||
59 | "rep;nop\n\t" | ||
60 | "cmpl $0, %0\n\t" | ||
61 | "jg 1b\n\t" | ||
62 | "jmp 4b\n" | ||
63 | "5:\n\t" | ||
64 | : "+m" (lock->slock) : "r" ((unsigned)flags) : "memory"); | ||
65 | } | ||
66 | #endif | ||
40 | 67 | ||
41 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) | 68 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) |
42 | { | 69 | { |