diff options
| author | Thomas Gleixner <tglx@linutronix.de> | 2009-12-02 14:01:25 -0500 |
|---|---|---|
| committer | Thomas Gleixner <tglx@linutronix.de> | 2009-12-14 17:55:32 -0500 |
| commit | 0199c4e68d1f02894bdefe4b5d9e9ee4aedd8d62 (patch) | |
| tree | e371d17bd73d64332349debbf45962ec67e7269d /arch/sparc/include | |
| parent | edc35bd72e2079b25f99c5da7d7a65dbbffc4a26 (diff) | |
locking: Convert __raw_spin* functions to arch_spin*
Name space cleanup. No functional change.
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Acked-by: Peter Zijlstra <peterz@infradead.org>
Acked-by: David S. Miller <davem@davemloft.net>
Acked-by: Ingo Molnar <mingo@elte.hu>
Cc: linux-arch@vger.kernel.org
Diffstat (limited to 'arch/sparc/include')
| -rw-r--r-- | arch/sparc/include/asm/spinlock_32.h | 20 | ||||
| -rw-r--r-- | arch/sparc/include/asm/spinlock_64.h | 18 |
2 files changed, 19 insertions, 19 deletions
diff --git a/arch/sparc/include/asm/spinlock_32.h b/arch/sparc/include/asm/spinlock_32.h index b2d8a67f727e..9b0f2f53c81c 100644 --- a/arch/sparc/include/asm/spinlock_32.h +++ b/arch/sparc/include/asm/spinlock_32.h | |||
| @@ -10,12 +10,12 @@ | |||
| 10 | 10 | ||
| 11 | #include <asm/psr.h> | 11 | #include <asm/psr.h> |
| 12 | 12 | ||
| 13 | #define __raw_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) | 13 | #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) |
| 14 | 14 | ||
| 15 | #define __raw_spin_unlock_wait(lock) \ | 15 | #define arch_spin_unlock_wait(lock) \ |
| 16 | do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) | 16 | do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0) |
| 17 | 17 | ||
| 18 | static inline void __raw_spin_lock(arch_spinlock_t *lock) | 18 | static inline void arch_spin_lock(arch_spinlock_t *lock) |
| 19 | { | 19 | { |
| 20 | __asm__ __volatile__( | 20 | __asm__ __volatile__( |
| 21 | "\n1:\n\t" | 21 | "\n1:\n\t" |
| @@ -35,7 +35,7 @@ static inline void __raw_spin_lock(arch_spinlock_t *lock) | |||
| 35 | : "g2", "memory", "cc"); | 35 | : "g2", "memory", "cc"); |
| 36 | } | 36 | } |
| 37 | 37 | ||
| 38 | static inline int __raw_spin_trylock(arch_spinlock_t *lock) | 38 | static inline int arch_spin_trylock(arch_spinlock_t *lock) |
| 39 | { | 39 | { |
| 40 | unsigned int result; | 40 | unsigned int result; |
| 41 | __asm__ __volatile__("ldstub [%1], %0" | 41 | __asm__ __volatile__("ldstub [%1], %0" |
| @@ -45,7 +45,7 @@ static inline int __raw_spin_trylock(arch_spinlock_t *lock) | |||
| 45 | return (result == 0); | 45 | return (result == 0); |
| 46 | } | 46 | } |
| 47 | 47 | ||
| 48 | static inline void __raw_spin_unlock(arch_spinlock_t *lock) | 48 | static inline void arch_spin_unlock(arch_spinlock_t *lock) |
| 49 | { | 49 | { |
| 50 | __asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory"); | 50 | __asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory"); |
| 51 | } | 51 | } |
| @@ -176,13 +176,13 @@ static inline int arch_read_trylock(raw_rwlock_t *rw) | |||
| 176 | 176 | ||
| 177 | #define __raw_write_unlock(rw) do { (rw)->lock = 0; } while(0) | 177 | #define __raw_write_unlock(rw) do { (rw)->lock = 0; } while(0) |
| 178 | 178 | ||
| 179 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) | 179 | #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) |
| 180 | #define __raw_read_lock_flags(rw, flags) __raw_read_lock(rw) | 180 | #define __raw_read_lock_flags(rw, flags) __raw_read_lock(rw) |
| 181 | #define __raw_write_lock_flags(rw, flags) __raw_write_lock(rw) | 181 | #define __raw_write_lock_flags(rw, flags) __raw_write_lock(rw) |
| 182 | 182 | ||
| 183 | #define _raw_spin_relax(lock) cpu_relax() | 183 | #define arch_spin_relax(lock) cpu_relax() |
| 184 | #define _raw_read_relax(lock) cpu_relax() | 184 | #define arch_read_relax(lock) cpu_relax() |
| 185 | #define _raw_write_relax(lock) cpu_relax() | 185 | #define arch_write_relax(lock) cpu_relax() |
| 186 | 186 | ||
| 187 | #define __raw_read_can_lock(rw) (!((rw)->lock & 0xff)) | 187 | #define __raw_read_can_lock(rw) (!((rw)->lock & 0xff)) |
| 188 | #define __raw_write_can_lock(rw) (!(rw)->lock) | 188 | #define __raw_write_can_lock(rw) (!(rw)->lock) |
diff --git a/arch/sparc/include/asm/spinlock_64.h b/arch/sparc/include/asm/spinlock_64.h index 38e16c40efc4..7cf58a2fcda4 100644 --- a/arch/sparc/include/asm/spinlock_64.h +++ b/arch/sparc/include/asm/spinlock_64.h | |||
| @@ -21,13 +21,13 @@ | |||
| 21 | * the spinner sections must be pre-V9 branches. | 21 | * the spinner sections must be pre-V9 branches. |
| 22 | */ | 22 | */ |
| 23 | 23 | ||
| 24 | #define __raw_spin_is_locked(lp) ((lp)->lock != 0) | 24 | #define arch_spin_is_locked(lp) ((lp)->lock != 0) |
| 25 | 25 | ||
| 26 | #define __raw_spin_unlock_wait(lp) \ | 26 | #define arch_spin_unlock_wait(lp) \ |
| 27 | do { rmb(); \ | 27 | do { rmb(); \ |
| 28 | } while((lp)->lock) | 28 | } while((lp)->lock) |
| 29 | 29 | ||
| 30 | static inline void __raw_spin_lock(arch_spinlock_t *lock) | 30 | static inline void arch_spin_lock(arch_spinlock_t *lock) |
| 31 | { | 31 | { |
| 32 | unsigned long tmp; | 32 | unsigned long tmp; |
| 33 | 33 | ||
| @@ -46,7 +46,7 @@ static inline void __raw_spin_lock(arch_spinlock_t *lock) | |||
| 46 | : "memory"); | 46 | : "memory"); |
| 47 | } | 47 | } |
| 48 | 48 | ||
| 49 | static inline int __raw_spin_trylock(arch_spinlock_t *lock) | 49 | static inline int arch_spin_trylock(arch_spinlock_t *lock) |
| 50 | { | 50 | { |
| 51 | unsigned long result; | 51 | unsigned long result; |
| 52 | 52 | ||
| @@ -59,7 +59,7 @@ static inline int __raw_spin_trylock(arch_spinlock_t *lock) | |||
| 59 | return (result == 0UL); | 59 | return (result == 0UL); |
| 60 | } | 60 | } |
| 61 | 61 | ||
| 62 | static inline void __raw_spin_unlock(arch_spinlock_t *lock) | 62 | static inline void arch_spin_unlock(arch_spinlock_t *lock) |
| 63 | { | 63 | { |
| 64 | __asm__ __volatile__( | 64 | __asm__ __volatile__( |
| 65 | " stb %%g0, [%0]" | 65 | " stb %%g0, [%0]" |
| @@ -68,7 +68,7 @@ static inline void __raw_spin_unlock(arch_spinlock_t *lock) | |||
| 68 | : "memory"); | 68 | : "memory"); |
| 69 | } | 69 | } |
| 70 | 70 | ||
| 71 | static inline void __raw_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) | 71 | static inline void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) |
| 72 | { | 72 | { |
| 73 | unsigned long tmp1, tmp2; | 73 | unsigned long tmp1, tmp2; |
| 74 | 74 | ||
| @@ -222,9 +222,9 @@ static int inline arch_write_trylock(raw_rwlock_t *lock) | |||
| 222 | #define __raw_read_can_lock(rw) (!((rw)->lock & 0x80000000UL)) | 222 | #define __raw_read_can_lock(rw) (!((rw)->lock & 0x80000000UL)) |
| 223 | #define __raw_write_can_lock(rw) (!(rw)->lock) | 223 | #define __raw_write_can_lock(rw) (!(rw)->lock) |
| 224 | 224 | ||
| 225 | #define _raw_spin_relax(lock) cpu_relax() | 225 | #define arch_spin_relax(lock) cpu_relax() |
| 226 | #define _raw_read_relax(lock) cpu_relax() | 226 | #define arch_read_relax(lock) cpu_relax() |
| 227 | #define _raw_write_relax(lock) cpu_relax() | 227 | #define arch_write_relax(lock) cpu_relax() |
| 228 | 228 | ||
| 229 | #endif /* !(__ASSEMBLY__) */ | 229 | #endif /* !(__ASSEMBLY__) */ |
| 230 | 230 | ||
