diff options
author | Thomas Gleixner <tglx@linutronix.de> | 2009-11-09 15:01:59 -0500 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2009-12-14 17:55:32 -0500 |
commit | b7b40ade58e621851896aa261452df99d4e9d99b (patch) | |
tree | 6ff6d2adea28376b86875cd070fbea6d4473d029 /kernel/spinlock.c | |
parent | 3ea6b3d0e6d0ffd91c0f8cadeb69b7133c038b32 (diff) |
locking: Reorder functions in spinlock.c
Separate spin_lock and rw_lock functions. Preempt-RT needs to exclude
the rw_lock functions from being compiled. The reordering allows to do
that with a single #ifdef.
No functional change.
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Acked-by: Peter Zijlstra <peterz@infradead.org>
Acked-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'kernel/spinlock.c')
-rw-r--r-- | kernel/spinlock.c | 246 |
1 files changed, 123 insertions, 123 deletions
diff --git a/kernel/spinlock.c b/kernel/spinlock.c index 41e042219ff6..e6e136318437 100644 --- a/kernel/spinlock.c +++ b/kernel/spinlock.c | |||
@@ -113,41 +113,6 @@ BUILD_LOCK_OPS(write, rwlock); | |||
113 | 113 | ||
114 | #endif | 114 | #endif |
115 | 115 | ||
116 | #ifdef CONFIG_DEBUG_LOCK_ALLOC | ||
117 | |||
118 | void __lockfunc _spin_lock_nested(spinlock_t *lock, int subclass) | ||
119 | { | ||
120 | preempt_disable(); | ||
121 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | ||
122 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); | ||
123 | } | ||
124 | EXPORT_SYMBOL(_spin_lock_nested); | ||
125 | |||
126 | unsigned long __lockfunc _spin_lock_irqsave_nested(spinlock_t *lock, | ||
127 | int subclass) | ||
128 | { | ||
129 | unsigned long flags; | ||
130 | |||
131 | local_irq_save(flags); | ||
132 | preempt_disable(); | ||
133 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | ||
134 | LOCK_CONTENDED_FLAGS(lock, _raw_spin_trylock, _raw_spin_lock, | ||
135 | _raw_spin_lock_flags, &flags); | ||
136 | return flags; | ||
137 | } | ||
138 | EXPORT_SYMBOL(_spin_lock_irqsave_nested); | ||
139 | |||
140 | void __lockfunc _spin_lock_nest_lock(spinlock_t *lock, | ||
141 | struct lockdep_map *nest_lock) | ||
142 | { | ||
143 | preempt_disable(); | ||
144 | spin_acquire_nest(&lock->dep_map, 0, 0, nest_lock, _RET_IP_); | ||
145 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); | ||
146 | } | ||
147 | EXPORT_SYMBOL(_spin_lock_nest_lock); | ||
148 | |||
149 | #endif | ||
150 | |||
151 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK | 116 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK |
152 | int __lockfunc _spin_trylock(spinlock_t *lock) | 117 | int __lockfunc _spin_trylock(spinlock_t *lock) |
153 | { | 118 | { |
@@ -156,28 +121,20 @@ int __lockfunc _spin_trylock(spinlock_t *lock) | |||
156 | EXPORT_SYMBOL(_spin_trylock); | 121 | EXPORT_SYMBOL(_spin_trylock); |
157 | #endif | 122 | #endif |
158 | 123 | ||
159 | #ifndef CONFIG_INLINE_READ_TRYLOCK | 124 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK_BH |
160 | int __lockfunc _read_trylock(rwlock_t *lock) | 125 | int __lockfunc _spin_trylock_bh(spinlock_t *lock) |
161 | { | ||
162 | return __read_trylock(lock); | ||
163 | } | ||
164 | EXPORT_SYMBOL(_read_trylock); | ||
165 | #endif | ||
166 | |||
167 | #ifndef CONFIG_INLINE_WRITE_TRYLOCK | ||
168 | int __lockfunc _write_trylock(rwlock_t *lock) | ||
169 | { | 126 | { |
170 | return __write_trylock(lock); | 127 | return __spin_trylock_bh(lock); |
171 | } | 128 | } |
172 | EXPORT_SYMBOL(_write_trylock); | 129 | EXPORT_SYMBOL(_spin_trylock_bh); |
173 | #endif | 130 | #endif |
174 | 131 | ||
175 | #ifndef CONFIG_INLINE_READ_LOCK | 132 | #ifndef CONFIG_INLINE_SPIN_LOCK |
176 | void __lockfunc _read_lock(rwlock_t *lock) | 133 | void __lockfunc _spin_lock(spinlock_t *lock) |
177 | { | 134 | { |
178 | __read_lock(lock); | 135 | __spin_lock(lock); |
179 | } | 136 | } |
180 | EXPORT_SYMBOL(_read_lock); | 137 | EXPORT_SYMBOL(_spin_lock); |
181 | #endif | 138 | #endif |
182 | 139 | ||
183 | #ifndef CONFIG_INLINE_SPIN_LOCK_IRQSAVE | 140 | #ifndef CONFIG_INLINE_SPIN_LOCK_IRQSAVE |
@@ -204,84 +161,76 @@ void __lockfunc _spin_lock_bh(spinlock_t *lock) | |||
204 | EXPORT_SYMBOL(_spin_lock_bh); | 161 | EXPORT_SYMBOL(_spin_lock_bh); |
205 | #endif | 162 | #endif |
206 | 163 | ||
207 | #ifndef CONFIG_INLINE_READ_LOCK_IRQSAVE | 164 | #ifndef CONFIG_INLINE_SPIN_UNLOCK |
208 | unsigned long __lockfunc _read_lock_irqsave(rwlock_t *lock) | 165 | void __lockfunc _spin_unlock(spinlock_t *lock) |
209 | { | ||
210 | return __read_lock_irqsave(lock); | ||
211 | } | ||
212 | EXPORT_SYMBOL(_read_lock_irqsave); | ||
213 | #endif | ||
214 | |||
215 | #ifndef CONFIG_INLINE_READ_LOCK_IRQ | ||
216 | void __lockfunc _read_lock_irq(rwlock_t *lock) | ||
217 | { | 166 | { |
218 | __read_lock_irq(lock); | 167 | __spin_unlock(lock); |
219 | } | 168 | } |
220 | EXPORT_SYMBOL(_read_lock_irq); | 169 | EXPORT_SYMBOL(_spin_unlock); |
221 | #endif | 170 | #endif |
222 | 171 | ||
223 | #ifndef CONFIG_INLINE_READ_LOCK_BH | 172 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQRESTORE |
224 | void __lockfunc _read_lock_bh(rwlock_t *lock) | 173 | void __lockfunc _spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) |
225 | { | 174 | { |
226 | __read_lock_bh(lock); | 175 | __spin_unlock_irqrestore(lock, flags); |
227 | } | 176 | } |
228 | EXPORT_SYMBOL(_read_lock_bh); | 177 | EXPORT_SYMBOL(_spin_unlock_irqrestore); |
229 | #endif | 178 | #endif |
230 | 179 | ||
231 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQSAVE | 180 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQ |
232 | unsigned long __lockfunc _write_lock_irqsave(rwlock_t *lock) | 181 | void __lockfunc _spin_unlock_irq(spinlock_t *lock) |
233 | { | 182 | { |
234 | return __write_lock_irqsave(lock); | 183 | __spin_unlock_irq(lock); |
235 | } | 184 | } |
236 | EXPORT_SYMBOL(_write_lock_irqsave); | 185 | EXPORT_SYMBOL(_spin_unlock_irq); |
237 | #endif | 186 | #endif |
238 | 187 | ||
239 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQ | 188 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_BH |
240 | void __lockfunc _write_lock_irq(rwlock_t *lock) | 189 | void __lockfunc _spin_unlock_bh(spinlock_t *lock) |
241 | { | 190 | { |
242 | __write_lock_irq(lock); | 191 | __spin_unlock_bh(lock); |
243 | } | 192 | } |
244 | EXPORT_SYMBOL(_write_lock_irq); | 193 | EXPORT_SYMBOL(_spin_unlock_bh); |
245 | #endif | 194 | #endif |
246 | 195 | ||
247 | #ifndef CONFIG_INLINE_WRITE_LOCK_BH | 196 | #ifndef CONFIG_INLINE_READ_TRYLOCK |
248 | void __lockfunc _write_lock_bh(rwlock_t *lock) | 197 | int __lockfunc _read_trylock(rwlock_t *lock) |
249 | { | 198 | { |
250 | __write_lock_bh(lock); | 199 | return __read_trylock(lock); |
251 | } | 200 | } |
252 | EXPORT_SYMBOL(_write_lock_bh); | 201 | EXPORT_SYMBOL(_read_trylock); |
253 | #endif | 202 | #endif |
254 | 203 | ||
255 | #ifndef CONFIG_INLINE_SPIN_LOCK | 204 | #ifndef CONFIG_INLINE_READ_LOCK |
256 | void __lockfunc _spin_lock(spinlock_t *lock) | 205 | void __lockfunc _read_lock(rwlock_t *lock) |
257 | { | 206 | { |
258 | __spin_lock(lock); | 207 | __read_lock(lock); |
259 | } | 208 | } |
260 | EXPORT_SYMBOL(_spin_lock); | 209 | EXPORT_SYMBOL(_read_lock); |
261 | #endif | 210 | #endif |
262 | 211 | ||
263 | #ifndef CONFIG_INLINE_WRITE_LOCK | 212 | #ifndef CONFIG_INLINE_READ_LOCK_IRQSAVE |
264 | void __lockfunc _write_lock(rwlock_t *lock) | 213 | unsigned long __lockfunc _read_lock_irqsave(rwlock_t *lock) |
265 | { | 214 | { |
266 | __write_lock(lock); | 215 | return __read_lock_irqsave(lock); |
267 | } | 216 | } |
268 | EXPORT_SYMBOL(_write_lock); | 217 | EXPORT_SYMBOL(_read_lock_irqsave); |
269 | #endif | 218 | #endif |
270 | 219 | ||
271 | #ifndef CONFIG_INLINE_SPIN_UNLOCK | 220 | #ifndef CONFIG_INLINE_READ_LOCK_IRQ |
272 | void __lockfunc _spin_unlock(spinlock_t *lock) | 221 | void __lockfunc _read_lock_irq(rwlock_t *lock) |
273 | { | 222 | { |
274 | __spin_unlock(lock); | 223 | __read_lock_irq(lock); |
275 | } | 224 | } |
276 | EXPORT_SYMBOL(_spin_unlock); | 225 | EXPORT_SYMBOL(_read_lock_irq); |
277 | #endif | 226 | #endif |
278 | 227 | ||
279 | #ifndef CONFIG_INLINE_WRITE_UNLOCK | 228 | #ifndef CONFIG_INLINE_READ_LOCK_BH |
280 | void __lockfunc _write_unlock(rwlock_t *lock) | 229 | void __lockfunc _read_lock_bh(rwlock_t *lock) |
281 | { | 230 | { |
282 | __write_unlock(lock); | 231 | __read_lock_bh(lock); |
283 | } | 232 | } |
284 | EXPORT_SYMBOL(_write_unlock); | 233 | EXPORT_SYMBOL(_read_lock_bh); |
285 | #endif | 234 | #endif |
286 | 235 | ||
287 | #ifndef CONFIG_INLINE_READ_UNLOCK | 236 | #ifndef CONFIG_INLINE_READ_UNLOCK |
@@ -292,30 +241,6 @@ void __lockfunc _read_unlock(rwlock_t *lock) | |||
292 | EXPORT_SYMBOL(_read_unlock); | 241 | EXPORT_SYMBOL(_read_unlock); |
293 | #endif | 242 | #endif |
294 | 243 | ||
295 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQRESTORE | ||
296 | void __lockfunc _spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) | ||
297 | { | ||
298 | __spin_unlock_irqrestore(lock, flags); | ||
299 | } | ||
300 | EXPORT_SYMBOL(_spin_unlock_irqrestore); | ||
301 | #endif | ||
302 | |||
303 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_IRQ | ||
304 | void __lockfunc _spin_unlock_irq(spinlock_t *lock) | ||
305 | { | ||
306 | __spin_unlock_irq(lock); | ||
307 | } | ||
308 | EXPORT_SYMBOL(_spin_unlock_irq); | ||
309 | #endif | ||
310 | |||
311 | #ifndef CONFIG_INLINE_SPIN_UNLOCK_BH | ||
312 | void __lockfunc _spin_unlock_bh(spinlock_t *lock) | ||
313 | { | ||
314 | __spin_unlock_bh(lock); | ||
315 | } | ||
316 | EXPORT_SYMBOL(_spin_unlock_bh); | ||
317 | #endif | ||
318 | |||
319 | #ifndef CONFIG_INLINE_READ_UNLOCK_IRQRESTORE | 244 | #ifndef CONFIG_INLINE_READ_UNLOCK_IRQRESTORE |
320 | void __lockfunc _read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) | 245 | void __lockfunc _read_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
321 | { | 246 | { |
@@ -340,6 +265,54 @@ void __lockfunc _read_unlock_bh(rwlock_t *lock) | |||
340 | EXPORT_SYMBOL(_read_unlock_bh); | 265 | EXPORT_SYMBOL(_read_unlock_bh); |
341 | #endif | 266 | #endif |
342 | 267 | ||
268 | #ifndef CONFIG_INLINE_WRITE_TRYLOCK | ||
269 | int __lockfunc _write_trylock(rwlock_t *lock) | ||
270 | { | ||
271 | return __write_trylock(lock); | ||
272 | } | ||
273 | EXPORT_SYMBOL(_write_trylock); | ||
274 | #endif | ||
275 | |||
276 | #ifndef CONFIG_INLINE_WRITE_LOCK | ||
277 | void __lockfunc _write_lock(rwlock_t *lock) | ||
278 | { | ||
279 | __write_lock(lock); | ||
280 | } | ||
281 | EXPORT_SYMBOL(_write_lock); | ||
282 | #endif | ||
283 | |||
284 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQSAVE | ||
285 | unsigned long __lockfunc _write_lock_irqsave(rwlock_t *lock) | ||
286 | { | ||
287 | return __write_lock_irqsave(lock); | ||
288 | } | ||
289 | EXPORT_SYMBOL(_write_lock_irqsave); | ||
290 | #endif | ||
291 | |||
292 | #ifndef CONFIG_INLINE_WRITE_LOCK_IRQ | ||
293 | void __lockfunc _write_lock_irq(rwlock_t *lock) | ||
294 | { | ||
295 | __write_lock_irq(lock); | ||
296 | } | ||
297 | EXPORT_SYMBOL(_write_lock_irq); | ||
298 | #endif | ||
299 | |||
300 | #ifndef CONFIG_INLINE_WRITE_LOCK_BH | ||
301 | void __lockfunc _write_lock_bh(rwlock_t *lock) | ||
302 | { | ||
303 | __write_lock_bh(lock); | ||
304 | } | ||
305 | EXPORT_SYMBOL(_write_lock_bh); | ||
306 | #endif | ||
307 | |||
308 | #ifndef CONFIG_INLINE_WRITE_UNLOCK | ||
309 | void __lockfunc _write_unlock(rwlock_t *lock) | ||
310 | { | ||
311 | __write_unlock(lock); | ||
312 | } | ||
313 | EXPORT_SYMBOL(_write_unlock); | ||
314 | #endif | ||
315 | |||
343 | #ifndef CONFIG_INLINE_WRITE_UNLOCK_IRQRESTORE | 316 | #ifndef CONFIG_INLINE_WRITE_UNLOCK_IRQRESTORE |
344 | void __lockfunc _write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) | 317 | void __lockfunc _write_unlock_irqrestore(rwlock_t *lock, unsigned long flags) |
345 | { | 318 | { |
@@ -364,12 +337,39 @@ void __lockfunc _write_unlock_bh(rwlock_t *lock) | |||
364 | EXPORT_SYMBOL(_write_unlock_bh); | 337 | EXPORT_SYMBOL(_write_unlock_bh); |
365 | #endif | 338 | #endif |
366 | 339 | ||
367 | #ifndef CONFIG_INLINE_SPIN_TRYLOCK_BH | 340 | #ifdef CONFIG_DEBUG_LOCK_ALLOC |
368 | int __lockfunc _spin_trylock_bh(spinlock_t *lock) | 341 | |
342 | void __lockfunc _spin_lock_nested(spinlock_t *lock, int subclass) | ||
369 | { | 343 | { |
370 | return __spin_trylock_bh(lock); | 344 | preempt_disable(); |
345 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | ||
346 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); | ||
371 | } | 347 | } |
372 | EXPORT_SYMBOL(_spin_trylock_bh); | 348 | EXPORT_SYMBOL(_spin_lock_nested); |
349 | |||
350 | unsigned long __lockfunc _spin_lock_irqsave_nested(spinlock_t *lock, | ||
351 | int subclass) | ||
352 | { | ||
353 | unsigned long flags; | ||
354 | |||
355 | local_irq_save(flags); | ||
356 | preempt_disable(); | ||
357 | spin_acquire(&lock->dep_map, subclass, 0, _RET_IP_); | ||
358 | LOCK_CONTENDED_FLAGS(lock, _raw_spin_trylock, _raw_spin_lock, | ||
359 | _raw_spin_lock_flags, &flags); | ||
360 | return flags; | ||
361 | } | ||
362 | EXPORT_SYMBOL(_spin_lock_irqsave_nested); | ||
363 | |||
364 | void __lockfunc _spin_lock_nest_lock(spinlock_t *lock, | ||
365 | struct lockdep_map *nest_lock) | ||
366 | { | ||
367 | preempt_disable(); | ||
368 | spin_acquire_nest(&lock->dep_map, 0, 0, nest_lock, _RET_IP_); | ||
369 | LOCK_CONTENDED(lock, _raw_spin_trylock, _raw_spin_lock); | ||
370 | } | ||
371 | EXPORT_SYMBOL(_spin_lock_nest_lock); | ||
372 | |||
373 | #endif | 373 | #endif |
374 | 374 | ||
375 | notrace int in_lock_functions(unsigned long addr) | 375 | notrace int in_lock_functions(unsigned long addr) |