diff options
Diffstat (limited to 'include/asm-x86/spinlock.h')
-rw-r--r-- | include/asm-x86/spinlock.h | 18 |
1 files changed, 4 insertions, 14 deletions
diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h index bc6376f1bc5a..21e89bf92f1c 100644 --- a/include/asm-x86/spinlock.h +++ b/include/asm-x86/spinlock.h | |||
@@ -20,18 +20,8 @@ | |||
20 | */ | 20 | */ |
21 | 21 | ||
22 | #ifdef CONFIG_X86_32 | 22 | #ifdef CONFIG_X86_32 |
23 | typedef char _slock_t; | ||
24 | # define LOCK_INS_DEC "decb" | ||
25 | # define LOCK_INS_XCH "xchgb" | ||
26 | # define LOCK_INS_MOV "movb" | ||
27 | # define LOCK_INS_CMP "cmpb" | ||
28 | # define LOCK_PTR_REG "a" | 23 | # define LOCK_PTR_REG "a" |
29 | #else | 24 | #else |
30 | typedef int _slock_t; | ||
31 | # define LOCK_INS_DEC "decl" | ||
32 | # define LOCK_INS_XCH "xchgl" | ||
33 | # define LOCK_INS_MOV "movl" | ||
34 | # define LOCK_INS_CMP "cmpl" | ||
35 | # define LOCK_PTR_REG "D" | 25 | # define LOCK_PTR_REG "D" |
36 | #endif | 26 | #endif |
37 | 27 | ||
@@ -66,14 +56,14 @@ typedef int _slock_t; | |||
66 | #if (NR_CPUS < 256) | 56 | #if (NR_CPUS < 256) |
67 | static inline int __raw_spin_is_locked(raw_spinlock_t *lock) | 57 | static inline int __raw_spin_is_locked(raw_spinlock_t *lock) |
68 | { | 58 | { |
69 | int tmp = *(volatile signed int *)(&(lock)->slock); | 59 | int tmp = ACCESS_ONCE(lock->slock); |
70 | 60 | ||
71 | return (((tmp >> 8) & 0xff) != (tmp & 0xff)); | 61 | return (((tmp >> 8) & 0xff) != (tmp & 0xff)); |
72 | } | 62 | } |
73 | 63 | ||
74 | static inline int __raw_spin_is_contended(raw_spinlock_t *lock) | 64 | static inline int __raw_spin_is_contended(raw_spinlock_t *lock) |
75 | { | 65 | { |
76 | int tmp = *(volatile signed int *)(&(lock)->slock); | 66 | int tmp = ACCESS_ONCE(lock->slock); |
77 | 67 | ||
78 | return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1; | 68 | return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1; |
79 | } | 69 | } |
@@ -130,14 +120,14 @@ static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock) | |||
130 | #else | 120 | #else |
131 | static inline int __raw_spin_is_locked(raw_spinlock_t *lock) | 121 | static inline int __raw_spin_is_locked(raw_spinlock_t *lock) |
132 | { | 122 | { |
133 | int tmp = *(volatile signed int *)(&(lock)->slock); | 123 | int tmp = ACCESS_ONCE(lock->slock); |
134 | 124 | ||
135 | return (((tmp >> 16) & 0xffff) != (tmp & 0xffff)); | 125 | return (((tmp >> 16) & 0xffff) != (tmp & 0xffff)); |
136 | } | 126 | } |
137 | 127 | ||
138 | static inline int __raw_spin_is_contended(raw_spinlock_t *lock) | 128 | static inline int __raw_spin_is_contended(raw_spinlock_t *lock) |
139 | { | 129 | { |
140 | int tmp = *(volatile signed int *)(&(lock)->slock); | 130 | int tmp = ACCESS_ONCE(lock->slock); |
141 | 131 | ||
142 | return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1; | 132 | return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1; |
143 | } | 133 | } |