diff options
Diffstat (limited to 'arch/mips/include/asm/spinlock.h')
-rw-r--r-- | arch/mips/include/asm/spinlock.h | 376 |
1 files changed, 376 insertions, 0 deletions
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h new file mode 100644 index 000000000000..bb897016c491 --- /dev/null +++ b/arch/mips/include/asm/spinlock.h | |||
@@ -0,0 +1,376 @@ | |||
1 | /* | ||
2 | * This file is subject to the terms and conditions of the GNU General Public | ||
3 | * License. See the file "COPYING" in the main directory of this archive | ||
4 | * for more details. | ||
5 | * | ||
6 | * Copyright (C) 1999, 2000, 06 Ralf Baechle (ralf@linux-mips.org) | ||
7 | * Copyright (C) 1999, 2000 Silicon Graphics, Inc. | ||
8 | */ | ||
9 | #ifndef _ASM_SPINLOCK_H | ||
10 | #define _ASM_SPINLOCK_H | ||
11 | |||
12 | #include <asm/barrier.h> | ||
13 | #include <asm/war.h> | ||
14 | |||
15 | /* | ||
16 | * Your basic SMP spinlocks, allowing only a single CPU anywhere | ||
17 | */ | ||
18 | |||
19 | #define __raw_spin_is_locked(x) ((x)->lock != 0) | ||
20 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) | ||
21 | #define __raw_spin_unlock_wait(x) \ | ||
22 | do { cpu_relax(); } while ((x)->lock) | ||
23 | |||
24 | /* | ||
25 | * Simple spin lock operations. There are two variants, one clears IRQ's | ||
26 | * on the local processor, one does not. | ||
27 | * | ||
28 | * We make no fairness assumptions. They have a cost. | ||
29 | */ | ||
30 | |||
31 | static inline void __raw_spin_lock(raw_spinlock_t *lock) | ||
32 | { | ||
33 | unsigned int tmp; | ||
34 | |||
35 | if (R10000_LLSC_WAR) { | ||
36 | __asm__ __volatile__( | ||
37 | " .set noreorder # __raw_spin_lock \n" | ||
38 | "1: ll %1, %2 \n" | ||
39 | " bnez %1, 1b \n" | ||
40 | " li %1, 1 \n" | ||
41 | " sc %1, %0 \n" | ||
42 | " beqzl %1, 1b \n" | ||
43 | " nop \n" | ||
44 | " .set reorder \n" | ||
45 | : "=m" (lock->lock), "=&r" (tmp) | ||
46 | : "m" (lock->lock) | ||
47 | : "memory"); | ||
48 | } else { | ||
49 | __asm__ __volatile__( | ||
50 | " .set noreorder # __raw_spin_lock \n" | ||
51 | "1: ll %1, %2 \n" | ||
52 | " bnez %1, 2f \n" | ||
53 | " li %1, 1 \n" | ||
54 | " sc %1, %0 \n" | ||
55 | " beqz %1, 2f \n" | ||
56 | " nop \n" | ||
57 | " .subsection 2 \n" | ||
58 | "2: ll %1, %2 \n" | ||
59 | " bnez %1, 2b \n" | ||
60 | " li %1, 1 \n" | ||
61 | " b 1b \n" | ||
62 | " nop \n" | ||
63 | " .previous \n" | ||
64 | " .set reorder \n" | ||
65 | : "=m" (lock->lock), "=&r" (tmp) | ||
66 | : "m" (lock->lock) | ||
67 | : "memory"); | ||
68 | } | ||
69 | |||
70 | smp_llsc_mb(); | ||
71 | } | ||
72 | |||
73 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) | ||
74 | { | ||
75 | smp_mb(); | ||
76 | |||
77 | __asm__ __volatile__( | ||
78 | " .set noreorder # __raw_spin_unlock \n" | ||
79 | " sw $0, %0 \n" | ||
80 | " .set\treorder \n" | ||
81 | : "=m" (lock->lock) | ||
82 | : "m" (lock->lock) | ||
83 | : "memory"); | ||
84 | } | ||
85 | |||
86 | static inline unsigned int __raw_spin_trylock(raw_spinlock_t *lock) | ||
87 | { | ||
88 | unsigned int temp, res; | ||
89 | |||
90 | if (R10000_LLSC_WAR) { | ||
91 | __asm__ __volatile__( | ||
92 | " .set noreorder # __raw_spin_trylock \n" | ||
93 | "1: ll %0, %3 \n" | ||
94 | " ori %2, %0, 1 \n" | ||
95 | " sc %2, %1 \n" | ||
96 | " beqzl %2, 1b \n" | ||
97 | " nop \n" | ||
98 | " andi %2, %0, 1 \n" | ||
99 | " .set reorder" | ||
100 | : "=&r" (temp), "=m" (lock->lock), "=&r" (res) | ||
101 | : "m" (lock->lock) | ||
102 | : "memory"); | ||
103 | } else { | ||
104 | __asm__ __volatile__( | ||
105 | " .set noreorder # __raw_spin_trylock \n" | ||
106 | "1: ll %0, %3 \n" | ||
107 | " ori %2, %0, 1 \n" | ||
108 | " sc %2, %1 \n" | ||
109 | " beqz %2, 2f \n" | ||
110 | " andi %2, %0, 1 \n" | ||
111 | " .subsection 2 \n" | ||
112 | "2: b 1b \n" | ||
113 | " nop \n" | ||
114 | " .previous \n" | ||
115 | " .set reorder" | ||
116 | : "=&r" (temp), "=m" (lock->lock), "=&r" (res) | ||
117 | : "m" (lock->lock) | ||
118 | : "memory"); | ||
119 | } | ||
120 | |||
121 | smp_llsc_mb(); | ||
122 | |||
123 | return res == 0; | ||
124 | } | ||
125 | |||
126 | /* | ||
127 | * Read-write spinlocks, allowing multiple readers but only one writer. | ||
128 | * | ||
129 | * NOTE! it is quite common to have readers in interrupts but no interrupt | ||
130 | * writers. For those circumstances we can "mix" irq-safe locks - any writer | ||
131 | * needs to get a irq-safe write-lock, but readers can get non-irqsafe | ||
132 | * read-locks. | ||
133 | */ | ||
134 | |||
135 | /* | ||
136 | * read_can_lock - would read_trylock() succeed? | ||
137 | * @lock: the rwlock in question. | ||
138 | */ | ||
139 | #define __raw_read_can_lock(rw) ((rw)->lock >= 0) | ||
140 | |||
141 | /* | ||
142 | * write_can_lock - would write_trylock() succeed? | ||
143 | * @lock: the rwlock in question. | ||
144 | */ | ||
145 | #define __raw_write_can_lock(rw) (!(rw)->lock) | ||
146 | |||
147 | static inline void __raw_read_lock(raw_rwlock_t *rw) | ||
148 | { | ||
149 | unsigned int tmp; | ||
150 | |||
151 | if (R10000_LLSC_WAR) { | ||
152 | __asm__ __volatile__( | ||
153 | " .set noreorder # __raw_read_lock \n" | ||
154 | "1: ll %1, %2 \n" | ||
155 | " bltz %1, 1b \n" | ||
156 | " addu %1, 1 \n" | ||
157 | " sc %1, %0 \n" | ||
158 | " beqzl %1, 1b \n" | ||
159 | " nop \n" | ||
160 | " .set reorder \n" | ||
161 | : "=m" (rw->lock), "=&r" (tmp) | ||
162 | : "m" (rw->lock) | ||
163 | : "memory"); | ||
164 | } else { | ||
165 | __asm__ __volatile__( | ||
166 | " .set noreorder # __raw_read_lock \n" | ||
167 | "1: ll %1, %2 \n" | ||
168 | " bltz %1, 2f \n" | ||
169 | " addu %1, 1 \n" | ||
170 | " sc %1, %0 \n" | ||
171 | " beqz %1, 1b \n" | ||
172 | " nop \n" | ||
173 | " .subsection 2 \n" | ||
174 | "2: ll %1, %2 \n" | ||
175 | " bltz %1, 2b \n" | ||
176 | " addu %1, 1 \n" | ||
177 | " b 1b \n" | ||
178 | " nop \n" | ||
179 | " .previous \n" | ||
180 | " .set reorder \n" | ||
181 | : "=m" (rw->lock), "=&r" (tmp) | ||
182 | : "m" (rw->lock) | ||
183 | : "memory"); | ||
184 | } | ||
185 | |||
186 | smp_llsc_mb(); | ||
187 | } | ||
188 | |||
189 | /* Note the use of sub, not subu which will make the kernel die with an | ||
190 | overflow exception if we ever try to unlock an rwlock that is already | ||
191 | unlocked or is being held by a writer. */ | ||
192 | static inline void __raw_read_unlock(raw_rwlock_t *rw) | ||
193 | { | ||
194 | unsigned int tmp; | ||
195 | |||
196 | smp_llsc_mb(); | ||
197 | |||
198 | if (R10000_LLSC_WAR) { | ||
199 | __asm__ __volatile__( | ||
200 | "1: ll %1, %2 # __raw_read_unlock \n" | ||
201 | " sub %1, 1 \n" | ||
202 | " sc %1, %0 \n" | ||
203 | " beqzl %1, 1b \n" | ||
204 | : "=m" (rw->lock), "=&r" (tmp) | ||
205 | : "m" (rw->lock) | ||
206 | : "memory"); | ||
207 | } else { | ||
208 | __asm__ __volatile__( | ||
209 | " .set noreorder # __raw_read_unlock \n" | ||
210 | "1: ll %1, %2 \n" | ||
211 | " sub %1, 1 \n" | ||
212 | " sc %1, %0 \n" | ||
213 | " beqz %1, 2f \n" | ||
214 | " nop \n" | ||
215 | " .subsection 2 \n" | ||
216 | "2: b 1b \n" | ||
217 | " nop \n" | ||
218 | " .previous \n" | ||
219 | " .set reorder \n" | ||
220 | : "=m" (rw->lock), "=&r" (tmp) | ||
221 | : "m" (rw->lock) | ||
222 | : "memory"); | ||
223 | } | ||
224 | } | ||
225 | |||
226 | static inline void __raw_write_lock(raw_rwlock_t *rw) | ||
227 | { | ||
228 | unsigned int tmp; | ||
229 | |||
230 | if (R10000_LLSC_WAR) { | ||
231 | __asm__ __volatile__( | ||
232 | " .set noreorder # __raw_write_lock \n" | ||
233 | "1: ll %1, %2 \n" | ||
234 | " bnez %1, 1b \n" | ||
235 | " lui %1, 0x8000 \n" | ||
236 | " sc %1, %0 \n" | ||
237 | " beqzl %1, 1b \n" | ||
238 | " nop \n" | ||
239 | " .set reorder \n" | ||
240 | : "=m" (rw->lock), "=&r" (tmp) | ||
241 | : "m" (rw->lock) | ||
242 | : "memory"); | ||
243 | } else { | ||
244 | __asm__ __volatile__( | ||
245 | " .set noreorder # __raw_write_lock \n" | ||
246 | "1: ll %1, %2 \n" | ||
247 | " bnez %1, 2f \n" | ||
248 | " lui %1, 0x8000 \n" | ||
249 | " sc %1, %0 \n" | ||
250 | " beqz %1, 2f \n" | ||
251 | " nop \n" | ||
252 | " .subsection 2 \n" | ||
253 | "2: ll %1, %2 \n" | ||
254 | " bnez %1, 2b \n" | ||
255 | " lui %1, 0x8000 \n" | ||
256 | " b 1b \n" | ||
257 | " nop \n" | ||
258 | " .previous \n" | ||
259 | " .set reorder \n" | ||
260 | : "=m" (rw->lock), "=&r" (tmp) | ||
261 | : "m" (rw->lock) | ||
262 | : "memory"); | ||
263 | } | ||
264 | |||
265 | smp_llsc_mb(); | ||
266 | } | ||
267 | |||
268 | static inline void __raw_write_unlock(raw_rwlock_t *rw) | ||
269 | { | ||
270 | smp_mb(); | ||
271 | |||
272 | __asm__ __volatile__( | ||
273 | " # __raw_write_unlock \n" | ||
274 | " sw $0, %0 \n" | ||
275 | : "=m" (rw->lock) | ||
276 | : "m" (rw->lock) | ||
277 | : "memory"); | ||
278 | } | ||
279 | |||
280 | static inline int __raw_read_trylock(raw_rwlock_t *rw) | ||
281 | { | ||
282 | unsigned int tmp; | ||
283 | int ret; | ||
284 | |||
285 | if (R10000_LLSC_WAR) { | ||
286 | __asm__ __volatile__( | ||
287 | " .set noreorder # __raw_read_trylock \n" | ||
288 | " li %2, 0 \n" | ||
289 | "1: ll %1, %3 \n" | ||
290 | " bltz %1, 2f \n" | ||
291 | " addu %1, 1 \n" | ||
292 | " sc %1, %0 \n" | ||
293 | " .set reorder \n" | ||
294 | " beqzl %1, 1b \n" | ||
295 | " nop \n" | ||
296 | __WEAK_LLSC_MB | ||
297 | " li %2, 1 \n" | ||
298 | "2: \n" | ||
299 | : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) | ||
300 | : "m" (rw->lock) | ||
301 | : "memory"); | ||
302 | } else { | ||
303 | __asm__ __volatile__( | ||
304 | " .set noreorder # __raw_read_trylock \n" | ||
305 | " li %2, 0 \n" | ||
306 | "1: ll %1, %3 \n" | ||
307 | " bltz %1, 2f \n" | ||
308 | " addu %1, 1 \n" | ||
309 | " sc %1, %0 \n" | ||
310 | " beqz %1, 1b \n" | ||
311 | " nop \n" | ||
312 | " .set reorder \n" | ||
313 | __WEAK_LLSC_MB | ||
314 | " li %2, 1 \n" | ||
315 | "2: \n" | ||
316 | : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) | ||
317 | : "m" (rw->lock) | ||
318 | : "memory"); | ||
319 | } | ||
320 | |||
321 | return ret; | ||
322 | } | ||
323 | |||
324 | static inline int __raw_write_trylock(raw_rwlock_t *rw) | ||
325 | { | ||
326 | unsigned int tmp; | ||
327 | int ret; | ||
328 | |||
329 | if (R10000_LLSC_WAR) { | ||
330 | __asm__ __volatile__( | ||
331 | " .set noreorder # __raw_write_trylock \n" | ||
332 | " li %2, 0 \n" | ||
333 | "1: ll %1, %3 \n" | ||
334 | " bnez %1, 2f \n" | ||
335 | " lui %1, 0x8000 \n" | ||
336 | " sc %1, %0 \n" | ||
337 | " beqzl %1, 1b \n" | ||
338 | " nop \n" | ||
339 | __WEAK_LLSC_MB | ||
340 | " li %2, 1 \n" | ||
341 | " .set reorder \n" | ||
342 | "2: \n" | ||
343 | : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) | ||
344 | : "m" (rw->lock) | ||
345 | : "memory"); | ||
346 | } else { | ||
347 | __asm__ __volatile__( | ||
348 | " .set noreorder # __raw_write_trylock \n" | ||
349 | " li %2, 0 \n" | ||
350 | "1: ll %1, %3 \n" | ||
351 | " bnez %1, 2f \n" | ||
352 | " lui %1, 0x8000 \n" | ||
353 | " sc %1, %0 \n" | ||
354 | " beqz %1, 3f \n" | ||
355 | " li %2, 1 \n" | ||
356 | "2: \n" | ||
357 | __WEAK_LLSC_MB | ||
358 | " .subsection 2 \n" | ||
359 | "3: b 1b \n" | ||
360 | " li %2, 0 \n" | ||
361 | " .previous \n" | ||
362 | " .set reorder \n" | ||
363 | : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret) | ||
364 | : "m" (rw->lock) | ||
365 | : "memory"); | ||
366 | } | ||
367 | |||
368 | return ret; | ||
369 | } | ||
370 | |||
371 | |||
372 | #define _raw_spin_relax(lock) cpu_relax() | ||
373 | #define _raw_read_relax(lock) cpu_relax() | ||
374 | #define _raw_write_relax(lock) cpu_relax() | ||
375 | |||
376 | #endif /* _ASM_SPINLOCK_H */ | ||