aboutsummaryrefslogtreecommitdiffstats
path: root/arch/ia64/include/asm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/ia64/include/asm/spinlock.h')
-rw-r--r--arch/ia64/include/asm/spinlock.h77
1 files changed, 63 insertions, 14 deletions
diff --git a/arch/ia64/include/asm/spinlock.h b/arch/ia64/include/asm/spinlock.h
index 0229fb95fb38..13ab71576bc7 100644
--- a/arch/ia64/include/asm/spinlock.h
+++ b/arch/ia64/include/asm/spinlock.h
@@ -120,6 +120,38 @@ do { \
120#define __raw_read_can_lock(rw) (*(volatile int *)(rw) >= 0) 120#define __raw_read_can_lock(rw) (*(volatile int *)(rw) >= 0)
121#define __raw_write_can_lock(rw) (*(volatile int *)(rw) == 0) 121#define __raw_write_can_lock(rw) (*(volatile int *)(rw) == 0)
122 122
123#ifdef ASM_SUPPORTED
124
125static __always_inline void
126__raw_read_lock_flags(raw_rwlock_t *lock, unsigned long flags)
127{
128 __asm__ __volatile__ (
129 "tbit.nz p6, p0 = %1,%2\n"
130 "br.few 3f\n"
131 "1:\n"
132 "fetchadd4.rel r2 = [%0], -1;;\n"
133 "(p6) ssm psr.i\n"
134 "2:\n"
135 "hint @pause\n"
136 "ld4 r2 = [%0];;\n"
137 "cmp4.lt p7,p0 = r2, r0\n"
138 "(p7) br.cond.spnt.few 2b\n"
139 "(p6) rsm psr.i\n"
140 ";;\n"
141 "3:\n"
142 "fetchadd4.acq r2 = [%0], 1;;\n"
143 "cmp4.lt p7,p0 = r2, r0\n"
144 "(p7) br.cond.spnt.few 1b\n"
145 : : "r"(lock), "r"(flags), "i"(IA64_PSR_I_BIT)
146 : "p6", "p7", "r2", "memory");
147}
148
149#define __raw_read_lock(lock) __raw_read_lock_flags(lock, 0)
150
151#else /* !ASM_SUPPORTED */
152
153#define __raw_read_lock_flags(rw, flags) __raw_read_lock(rw)
154
123#define __raw_read_lock(rw) \ 155#define __raw_read_lock(rw) \
124do { \ 156do { \
125 raw_rwlock_t *__read_lock_ptr = (rw); \ 157 raw_rwlock_t *__read_lock_ptr = (rw); \
@@ -131,6 +163,8 @@ do { \
131 } \ 163 } \
132} while (0) 164} while (0)
133 165
166#endif /* !ASM_SUPPORTED */
167
134#define __raw_read_unlock(rw) \ 168#define __raw_read_unlock(rw) \
135do { \ 169do { \
136 raw_rwlock_t *__read_lock_ptr = (rw); \ 170 raw_rwlock_t *__read_lock_ptr = (rw); \
@@ -138,20 +172,33 @@ do { \
138} while (0) 172} while (0)
139 173
140#ifdef ASM_SUPPORTED 174#ifdef ASM_SUPPORTED
141#define __raw_write_lock(rw) \ 175
142do { \ 176static __always_inline void
143 __asm__ __volatile__ ( \ 177__raw_write_lock_flags(raw_rwlock_t *lock, unsigned long flags)
144 "mov ar.ccv = r0\n" \ 178{
145 "dep r29 = -1, r0, 31, 1;;\n" \ 179 __asm__ __volatile__ (
146 "1:\n" \ 180 "tbit.nz p6, p0 = %1, %2\n"
147 "ld4 r2 = [%0];;\n" \ 181 "mov ar.ccv = r0\n"
148 "cmp4.eq p0,p7 = r0,r2\n" \ 182 "dep r29 = -1, r0, 31, 1\n"
149 "(p7) br.cond.spnt.few 1b \n" \ 183 "br.few 3f;;\n"
150 "cmpxchg4.acq r2 = [%0], r29, ar.ccv;;\n" \ 184 "1:\n"
151 "cmp4.eq p0,p7 = r0, r2\n" \ 185 "(p6) ssm psr.i\n"
152 "(p7) br.cond.spnt.few 1b;;\n" \ 186 "2:\n"
153 :: "r"(rw) : "ar.ccv", "p7", "r2", "r29", "memory"); \ 187 "hint @pause\n"
154} while(0) 188 "ld4 r2 = [%0];;\n"
189 "cmp4.eq p0,p7 = r0, r2\n"
190 "(p7) br.cond.spnt.few 2b\n"
191 "(p6) rsm psr.i\n"
192 ";;\n"
193 "3:\n"
194 "cmpxchg4.acq r2 = [%0], r29, ar.ccv;;\n"
195 "cmp4.eq p0,p7 = r0, r2\n"
196 "(p7) br.cond.spnt.few 1b;;\n"
197 : : "r"(lock), "r"(flags), "i"(IA64_PSR_I_BIT)
198 : "ar.ccv", "p6", "p7", "r2", "r29", "memory");
199}
200
201#define __raw_write_lock(rw) __raw_write_lock_flags(rw, 0)
155 202
156#define __raw_write_trylock(rw) \ 203#define __raw_write_trylock(rw) \
157({ \ 204({ \
@@ -174,6 +221,8 @@ static inline void __raw_write_unlock(raw_rwlock_t *x)
174 221
175#else /* !ASM_SUPPORTED */ 222#else /* !ASM_SUPPORTED */
176 223
224#define __raw_write_lock_flags(l, flags) __raw_write_lock(l)
225
177#define __raw_write_lock(l) \ 226#define __raw_write_lock(l) \
178({ \ 227({ \
179 __u64 ia64_val, ia64_set_val = ia64_dep_mi(-1, 0, 31, 1); \ 228 __u64 ia64_val, ia64_set_val = ia64_dep_mi(-1, 0, 31, 1); \