diff options
author | Ingo Molnar <mingo@elte.hu> | 2006-07-03 03:24:54 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-07-03 18:27:04 -0400 |
commit | 8a25d5debff2daee280e83e09d8c25d67c26a972 (patch) | |
tree | 3bccfef9acb66fc62863bfd6c16493c5e8c8e394 /include/asm-i386 | |
parent | 4ea2176dfa714882e88180b474e4cbcd888b70af (diff) |
[PATCH] lockdep: prove spinlock rwlock locking correctness
Use the lock validator framework to prove spinlock and rwlock locking
correctness.
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Arjan van de Ven <arjan@linux.intel.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386')
-rw-r--r-- | include/asm-i386/spinlock.h | 7 |
1 files changed, 7 insertions, 0 deletions
diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h index 7e29b51bcaa0..87c40f830653 100644 --- a/include/asm-i386/spinlock.h +++ b/include/asm-i386/spinlock.h | |||
@@ -68,6 +68,12 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
68 | "=m" (lock->slock) : : "memory"); | 68 | "=m" (lock->slock) : : "memory"); |
69 | } | 69 | } |
70 | 70 | ||
71 | /* | ||
72 | * It is easier for the lock validator if interrupts are not re-enabled | ||
73 | * in the middle of a lock-acquire. This is a performance feature anyway | ||
74 | * so we turn it off: | ||
75 | */ | ||
76 | #ifndef CONFIG_PROVE_LOCKING | ||
71 | static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) | 77 | static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) |
72 | { | 78 | { |
73 | alternative_smp( | 79 | alternative_smp( |
@@ -75,6 +81,7 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long fla | |||
75 | __raw_spin_lock_string_up, | 81 | __raw_spin_lock_string_up, |
76 | "=m" (lock->slock) : "r" (flags) : "memory"); | 82 | "=m" (lock->slock) : "r" (flags) : "memory"); |
77 | } | 83 | } |
84 | #endif | ||
78 | 85 | ||
79 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) | 86 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) |
80 | { | 87 | { |