diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2009-12-15 12:02:01 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2009-12-15 12:02:01 -0500 |
commit | 8f0ddf91f2aeb09602373e400cf8b403e9017210 (patch) | |
tree | b907c35c79caadafff6ad46a91614e30afd2f967 /include/asm-generic | |
parent | 050cbb09dac0402672edeaeac06094ef8ff1749a (diff) | |
parent | b5f91da0a6973bb6f9ff3b91b0e92c0773a458f3 (diff) |
Merge branch 'core-locking-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip
* 'core-locking-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip: (26 commits)
clockevents: Convert to raw_spinlock
clockevents: Make tick_device_lock static
debugobjects: Convert to raw_spinlocks
perf_event: Convert to raw_spinlock
hrtimers: Convert to raw_spinlocks
genirq: Convert irq_desc.lock to raw_spinlock
smp: Convert smplocks to raw_spinlocks
rtmutes: Convert rtmutex.lock to raw_spinlock
sched: Convert pi_lock to raw_spinlock
sched: Convert cpupri lock to raw_spinlock
sched: Convert rt_runtime_lock to raw_spinlock
sched: Convert rq->lock to raw_spinlock
plist: Make plist debugging raw_spinlock aware
bkl: Fixup core_lock fallout
locking: Cleanup the name space completely
locking: Further name space cleanups
alpha: Fix fallout from locking changes
locking: Implement new raw_spinlock
locking: Convert raw_rwlock functions to arch_rwlock
locking: Convert raw_rwlock to arch_rwlock
...
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/bitops/atomic.h | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/include/asm-generic/bitops/atomic.h b/include/asm-generic/bitops/atomic.h index c8946465e63a..ecc44a8e2b44 100644 --- a/include/asm-generic/bitops/atomic.h +++ b/include/asm-generic/bitops/atomic.h | |||
@@ -15,19 +15,19 @@ | |||
15 | # define ATOMIC_HASH_SIZE 4 | 15 | # define ATOMIC_HASH_SIZE 4 |
16 | # define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ])) | 16 | # define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ])) |
17 | 17 | ||
18 | extern raw_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; | 18 | extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; |
19 | 19 | ||
20 | /* Can't use raw_spin_lock_irq because of #include problems, so | 20 | /* Can't use raw_spin_lock_irq because of #include problems, so |
21 | * this is the substitute */ | 21 | * this is the substitute */ |
22 | #define _atomic_spin_lock_irqsave(l,f) do { \ | 22 | #define _atomic_spin_lock_irqsave(l,f) do { \ |
23 | raw_spinlock_t *s = ATOMIC_HASH(l); \ | 23 | arch_spinlock_t *s = ATOMIC_HASH(l); \ |
24 | local_irq_save(f); \ | 24 | local_irq_save(f); \ |
25 | __raw_spin_lock(s); \ | 25 | arch_spin_lock(s); \ |
26 | } while(0) | 26 | } while(0) |
27 | 27 | ||
28 | #define _atomic_spin_unlock_irqrestore(l,f) do { \ | 28 | #define _atomic_spin_unlock_irqrestore(l,f) do { \ |
29 | raw_spinlock_t *s = ATOMIC_HASH(l); \ | 29 | arch_spinlock_t *s = ATOMIC_HASH(l); \ |
30 | __raw_spin_unlock(s); \ | 30 | arch_spin_unlock(s); \ |
31 | local_irq_restore(f); \ | 31 | local_irq_restore(f); \ |
32 | } while(0) | 32 | } while(0) |
33 | 33 | ||