aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/include/asm')
-rw-r--r--arch/arm/include/asm/mach/irq.h4
-rw-r--r--arch/arm/include/asm/spinlock.h40
-rw-r--r--arch/arm/include/asm/spinlock_types.h8
3 files changed, 26 insertions, 26 deletions
diff --git a/arch/arm/include/asm/mach/irq.h b/arch/arm/include/asm/mach/irq.h
index acac5302e4ea..8920b2d6e3b8 100644
--- a/arch/arm/include/asm/mach/irq.h
+++ b/arch/arm/include/asm/mach/irq.h
@@ -26,9 +26,9 @@ extern int show_fiq_list(struct seq_file *, void *);
26 */ 26 */
27#define do_bad_IRQ(irq,desc) \ 27#define do_bad_IRQ(irq,desc) \
28do { \ 28do { \
29 spin_lock(&desc->lock); \ 29 raw_spin_lock(&desc->lock); \
30 handle_bad_irq(irq, desc); \ 30 handle_bad_irq(irq, desc); \
31 spin_unlock(&desc->lock); \ 31 raw_spin_unlock(&desc->lock); \
32} while(0) 32} while(0)
33 33
34#endif 34#endif
diff --git a/arch/arm/include/asm/spinlock.h b/arch/arm/include/asm/spinlock.h
index c13681ac1ede..c91c64cab922 100644
--- a/arch/arm/include/asm/spinlock.h
+++ b/arch/arm/include/asm/spinlock.h
@@ -17,13 +17,13 @@
17 * Locked value: 1 17 * Locked value: 1
18 */ 18 */
19 19
20#define __raw_spin_is_locked(x) ((x)->lock != 0) 20#define arch_spin_is_locked(x) ((x)->lock != 0)
21#define __raw_spin_unlock_wait(lock) \ 21#define arch_spin_unlock_wait(lock) \
22 do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) 22 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
23 23
24#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) 24#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
25 25
26static inline void __raw_spin_lock(raw_spinlock_t *lock) 26static inline void arch_spin_lock(arch_spinlock_t *lock)
27{ 27{
28 unsigned long tmp; 28 unsigned long tmp;
29 29
@@ -43,7 +43,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
43 smp_mb(); 43 smp_mb();
44} 44}
45 45
46static inline int __raw_spin_trylock(raw_spinlock_t *lock) 46static inline int arch_spin_trylock(arch_spinlock_t *lock)
47{ 47{
48 unsigned long tmp; 48 unsigned long tmp;
49 49
@@ -63,7 +63,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
63 } 63 }
64} 64}
65 65
66static inline void __raw_spin_unlock(raw_spinlock_t *lock) 66static inline void arch_spin_unlock(arch_spinlock_t *lock)
67{ 67{
68 smp_mb(); 68 smp_mb();
69 69
@@ -86,7 +86,7 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock)
86 * just write zero since the lock is exclusively held. 86 * just write zero since the lock is exclusively held.
87 */ 87 */
88 88
89static inline void __raw_write_lock(raw_rwlock_t *rw) 89static inline void arch_write_lock(arch_rwlock_t *rw)
90{ 90{
91 unsigned long tmp; 91 unsigned long tmp;
92 92
@@ -106,7 +106,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw)
106 smp_mb(); 106 smp_mb();
107} 107}
108 108
109static inline int __raw_write_trylock(raw_rwlock_t *rw) 109static inline int arch_write_trylock(arch_rwlock_t *rw)
110{ 110{
111 unsigned long tmp; 111 unsigned long tmp;
112 112
@@ -126,7 +126,7 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
126 } 126 }
127} 127}
128 128
129static inline void __raw_write_unlock(raw_rwlock_t *rw) 129static inline void arch_write_unlock(arch_rwlock_t *rw)
130{ 130{
131 smp_mb(); 131 smp_mb();
132 132
@@ -142,7 +142,7 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw)
142} 142}
143 143
144/* write_can_lock - would write_trylock() succeed? */ 144/* write_can_lock - would write_trylock() succeed? */
145#define __raw_write_can_lock(x) ((x)->lock == 0) 145#define arch_write_can_lock(x) ((x)->lock == 0)
146 146
147/* 147/*
148 * Read locks are a bit more hairy: 148 * Read locks are a bit more hairy:
@@ -156,7 +156,7 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw)
156 * currently active. However, we know we won't have any write 156 * currently active. However, we know we won't have any write
157 * locks. 157 * locks.
158 */ 158 */
159static inline void __raw_read_lock(raw_rwlock_t *rw) 159static inline void arch_read_lock(arch_rwlock_t *rw)
160{ 160{
161 unsigned long tmp, tmp2; 161 unsigned long tmp, tmp2;
162 162
@@ -176,7 +176,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
176 smp_mb(); 176 smp_mb();
177} 177}
178 178
179static inline void __raw_read_unlock(raw_rwlock_t *rw) 179static inline void arch_read_unlock(arch_rwlock_t *rw)
180{ 180{
181 unsigned long tmp, tmp2; 181 unsigned long tmp, tmp2;
182 182
@@ -198,7 +198,7 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
198 : "cc"); 198 : "cc");
199} 199}
200 200
201static inline int __raw_read_trylock(raw_rwlock_t *rw) 201static inline int arch_read_trylock(arch_rwlock_t *rw)
202{ 202{
203 unsigned long tmp, tmp2 = 1; 203 unsigned long tmp, tmp2 = 1;
204 204
@@ -215,13 +215,13 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
215} 215}
216 216
217/* read_can_lock - would read_trylock() succeed? */ 217/* read_can_lock - would read_trylock() succeed? */
218#define __raw_read_can_lock(x) ((x)->lock < 0x80000000) 218#define arch_read_can_lock(x) ((x)->lock < 0x80000000)
219 219
220#define __raw_read_lock_flags(lock, flags) __raw_read_lock(lock) 220#define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
221#define __raw_write_lock_flags(lock, flags) __raw_write_lock(lock) 221#define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
222 222
223#define _raw_spin_relax(lock) cpu_relax() 223#define arch_spin_relax(lock) cpu_relax()
224#define _raw_read_relax(lock) cpu_relax() 224#define arch_read_relax(lock) cpu_relax()
225#define _raw_write_relax(lock) cpu_relax() 225#define arch_write_relax(lock) cpu_relax()
226 226
227#endif /* __ASM_SPINLOCK_H */ 227#endif /* __ASM_SPINLOCK_H */
diff --git a/arch/arm/include/asm/spinlock_types.h b/arch/arm/include/asm/spinlock_types.h
index 43e83f6d2ee5..d14d197ae04a 100644
--- a/arch/arm/include/asm/spinlock_types.h
+++ b/arch/arm/include/asm/spinlock_types.h
@@ -7,14 +7,14 @@
7 7
8typedef struct { 8typedef struct {
9 volatile unsigned int lock; 9 volatile unsigned int lock;
10} raw_spinlock_t; 10} arch_spinlock_t;
11 11
12#define __RAW_SPIN_LOCK_UNLOCKED { 0 } 12#define __ARCH_SPIN_LOCK_UNLOCKED { 0 }
13 13
14typedef struct { 14typedef struct {
15 volatile unsigned int lock; 15 volatile unsigned int lock;
16} raw_rwlock_t; 16} arch_rwlock_t;
17 17
18#define __RAW_RW_LOCK_UNLOCKED { 0 } 18#define __ARCH_RW_LOCK_UNLOCKED { 0 }
19 19
20#endif 20#endif