diff options
Diffstat (limited to 'arch/sparc/include/asm/spinlock_32.h')
| -rw-r--r-- | arch/sparc/include/asm/spinlock_32.h | 62 |
1 files changed, 31 insertions, 31 deletions
diff --git a/arch/sparc/include/asm/spinlock_32.h b/arch/sparc/include/asm/spinlock_32.h index 857630cff636..7f9b9dba38a6 100644 --- a/arch/sparc/include/asm/spinlock_32.h +++ b/arch/sparc/include/asm/spinlock_32.h | |||
| @@ -10,12 +10,12 @@ | |||
| 10 | 10 | ||
| 11 | #include <asm/psr.h> | 11 | #include <asm/psr.h> |
| 12 | 12 | ||
| 13 | #define __raw_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) | 13 | #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) |
| 14 | 14 | ||
| 15 | #define __raw_spin_unlock_wait(lock) \ | 15 | #define arch_spin_unlock_wait(lock) \ |
| 16 | do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) | 16 | do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0) |
| 17 | 17 | ||
| 18 | static inline void __raw_spin_lock(raw_spinlock_t *lock) | 18 | static inline void arch_spin_lock(arch_spinlock_t *lock) |
| 19 | { | 19 | { |
| 20 | __asm__ __volatile__( | 20 | __asm__ __volatile__( |
| 21 | "\n1:\n\t" | 21 | "\n1:\n\t" |
| @@ -35,7 +35,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
| 35 | : "g2", "memory", "cc"); | 35 | : "g2", "memory", "cc"); |
| 36 | } | 36 | } |
| 37 | 37 | ||
| 38 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) | 38 | static inline int arch_spin_trylock(arch_spinlock_t *lock) |
| 39 | { | 39 | { |
| 40 | unsigned int result; | 40 | unsigned int result; |
| 41 | __asm__ __volatile__("ldstub [%1], %0" | 41 | __asm__ __volatile__("ldstub [%1], %0" |
| @@ -45,7 +45,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |||
| 45 | return (result == 0); | 45 | return (result == 0); |
| 46 | } | 46 | } |
| 47 | 47 | ||
| 48 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) | 48 | static inline void arch_spin_unlock(arch_spinlock_t *lock) |
| 49 | { | 49 | { |
| 50 | __asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory"); | 50 | __asm__ __volatile__("stb %%g0, [%0]" : : "r" (lock) : "memory"); |
| 51 | } | 51 | } |
| @@ -65,7 +65,7 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |||
| 65 | * Sort of like atomic_t's on Sparc, but even more clever. | 65 | * Sort of like atomic_t's on Sparc, but even more clever. |
| 66 | * | 66 | * |
| 67 | * ------------------------------------ | 67 | * ------------------------------------ |
| 68 | * | 24-bit counter | wlock | raw_rwlock_t | 68 | * | 24-bit counter | wlock | arch_rwlock_t |
| 69 | * ------------------------------------ | 69 | * ------------------------------------ |
| 70 | * 31 8 7 0 | 70 | * 31 8 7 0 |
| 71 | * | 71 | * |
| @@ -76,9 +76,9 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |||
| 76 | * | 76 | * |
| 77 | * Unfortunately this scheme limits us to ~16,000,000 cpus. | 77 | * Unfortunately this scheme limits us to ~16,000,000 cpus. |
| 78 | */ | 78 | */ |
| 79 | static inline void arch_read_lock(raw_rwlock_t *rw) | 79 | static inline void __arch_read_lock(arch_rwlock_t *rw) |
| 80 | { | 80 | { |
| 81 | register raw_rwlock_t *lp asm("g1"); | 81 | register arch_rwlock_t *lp asm("g1"); |
| 82 | lp = rw; | 82 | lp = rw; |
| 83 | __asm__ __volatile__( | 83 | __asm__ __volatile__( |
| 84 | "mov %%o7, %%g4\n\t" | 84 | "mov %%o7, %%g4\n\t" |
| @@ -89,16 +89,16 @@ static inline void arch_read_lock(raw_rwlock_t *rw) | |||
| 89 | : "g2", "g4", "memory", "cc"); | 89 | : "g2", "g4", "memory", "cc"); |
| 90 | } | 90 | } |
| 91 | 91 | ||
| 92 | #define __raw_read_lock(lock) \ | 92 | #define arch_read_lock(lock) \ |
| 93 | do { unsigned long flags; \ | 93 | do { unsigned long flags; \ |
| 94 | local_irq_save(flags); \ | 94 | local_irq_save(flags); \ |
| 95 | arch_read_lock(lock); \ | 95 | __arch_read_lock(lock); \ |
| 96 | local_irq_restore(flags); \ | 96 | local_irq_restore(flags); \ |
| 97 | } while(0) | 97 | } while(0) |
| 98 | 98 | ||
| 99 | static inline void arch_read_unlock(raw_rwlock_t *rw) | 99 | static inline void __arch_read_unlock(arch_rwlock_t *rw) |
| 100 | { | 100 | { |
| 101 | register raw_rwlock_t *lp asm("g1"); | 101 | register arch_rwlock_t *lp asm("g1"); |
| 102 | lp = rw; | 102 | lp = rw; |
| 103 | __asm__ __volatile__( | 103 | __asm__ __volatile__( |
| 104 | "mov %%o7, %%g4\n\t" | 104 | "mov %%o7, %%g4\n\t" |
| @@ -109,16 +109,16 @@ static inline void arch_read_unlock(raw_rwlock_t *rw) | |||
| 109 | : "g2", "g4", "memory", "cc"); | 109 | : "g2", "g4", "memory", "cc"); |
| 110 | } | 110 | } |
| 111 | 111 | ||
| 112 | #define __raw_read_unlock(lock) \ | 112 | #define arch_read_unlock(lock) \ |
| 113 | do { unsigned long flags; \ | 113 | do { unsigned long flags; \ |
| 114 | local_irq_save(flags); \ | 114 | local_irq_save(flags); \ |
| 115 | arch_read_unlock(lock); \ | 115 | __arch_read_unlock(lock); \ |
| 116 | local_irq_restore(flags); \ | 116 | local_irq_restore(flags); \ |
| 117 | } while(0) | 117 | } while(0) |
| 118 | 118 | ||
| 119 | static inline void __raw_write_lock(raw_rwlock_t *rw) | 119 | static inline void arch_write_lock(arch_rwlock_t *rw) |
| 120 | { | 120 | { |
| 121 | register raw_rwlock_t *lp asm("g1"); | 121 | register arch_rwlock_t *lp asm("g1"); |
| 122 | lp = rw; | 122 | lp = rw; |
| 123 | __asm__ __volatile__( | 123 | __asm__ __volatile__( |
| 124 | "mov %%o7, %%g4\n\t" | 124 | "mov %%o7, %%g4\n\t" |
| @@ -130,7 +130,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw) | |||
| 130 | *(volatile __u32 *)&lp->lock = ~0U; | 130 | *(volatile __u32 *)&lp->lock = ~0U; |
| 131 | } | 131 | } |
| 132 | 132 | ||
| 133 | static inline int __raw_write_trylock(raw_rwlock_t *rw) | 133 | static inline int arch_write_trylock(arch_rwlock_t *rw) |
| 134 | { | 134 | { |
| 135 | unsigned int val; | 135 | unsigned int val; |
| 136 | 136 | ||
| @@ -150,9 +150,9 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw) | |||
| 150 | return (val == 0); | 150 | return (val == 0); |
| 151 | } | 151 | } |
| 152 | 152 | ||
| 153 | static inline int arch_read_trylock(raw_rwlock_t *rw) | 153 | static inline int __arch_read_trylock(arch_rwlock_t *rw) |
| 154 | { | 154 | { |
| 155 | register raw_rwlock_t *lp asm("g1"); | 155 | register arch_rwlock_t *lp asm("g1"); |
| 156 | register int res asm("o0"); | 156 | register int res asm("o0"); |
| 157 | lp = rw; | 157 | lp = rw; |
| 158 | __asm__ __volatile__( | 158 | __asm__ __volatile__( |
| @@ -165,27 +165,27 @@ static inline int arch_read_trylock(raw_rwlock_t *rw) | |||
| 165 | return res; | 165 | return res; |
| 166 | } | 166 | } |
| 167 | 167 | ||
| 168 | #define __raw_read_trylock(lock) \ | 168 | #define arch_read_trylock(lock) \ |
| 169 | ({ unsigned long flags; \ | 169 | ({ unsigned long flags; \ |
| 170 | int res; \ | 170 | int res; \ |
| 171 | local_irq_save(flags); \ | 171 | local_irq_save(flags); \ |
| 172 | res = arch_read_trylock(lock); \ | 172 | res = __arch_read_trylock(lock); \ |
| 173 | local_irq_restore(flags); \ | 173 | local_irq_restore(flags); \ |
| 174 | res; \ | 174 | res; \ |
| 175 | }) | 175 | }) |
| 176 | 176 | ||
| 177 | #define __raw_write_unlock(rw) do { (rw)->lock = 0; } while(0) | 177 | #define arch_write_unlock(rw) do { (rw)->lock = 0; } while(0) |
| 178 | 178 | ||
| 179 | #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) | 179 | #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) |
| 180 | #define __raw_read_lock_flags(rw, flags) __raw_read_lock(rw) | 180 | #define arch_read_lock_flags(rw, flags) arch_read_lock(rw) |
| 181 | #define __raw_write_lock_flags(rw, flags) __raw_write_lock(rw) | 181 | #define arch_write_lock_flags(rw, flags) arch_write_lock(rw) |
| 182 | 182 | ||
| 183 | #define _raw_spin_relax(lock) cpu_relax() | 183 | #define arch_spin_relax(lock) cpu_relax() |
| 184 | #define _raw_read_relax(lock) cpu_relax() | 184 | #define arch_read_relax(lock) cpu_relax() |
| 185 | #define _raw_write_relax(lock) cpu_relax() | 185 | #define arch_write_relax(lock) cpu_relax() |
| 186 | 186 | ||
| 187 | #define __raw_read_can_lock(rw) (!((rw)->lock & 0xff)) | 187 | #define arch_read_can_lock(rw) (!((rw)->lock & 0xff)) |
| 188 | #define __raw_write_can_lock(rw) (!(rw)->lock) | 188 | #define arch_write_can_lock(rw) (!(rw)->lock) |
| 189 | 189 | ||
| 190 | #endif /* !(__ASSEMBLY__) */ | 190 | #endif /* !(__ASSEMBLY__) */ |
| 191 | 191 | ||
