diff options
author | Bart Van Assche <bart.vanassche@gmail.com> | 2008-06-28 02:51:35 -0400 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2008-06-30 21:29:00 -0400 |
commit | 89b5810f6ed4b2d42415e5ec656ab6b148cd2bde (patch) | |
tree | e3116100d5be38f2ebfaa078cd53033d1f612059 /include/asm-powerpc/spinlock.h | |
parent | fcbc5a976b1cafe2e866871c86d239d57503bfd5 (diff) |
powerpc: Make sure that include/asm-powerpc/spinlock.h does not trigger compilation warnings
When compiling kernel modules for ppc that include <linux/spinlock.h>,
gcc prints a warning message every time it encounters a function
declaration where the inline keyword appears after the return type.
This makes sure that the order of the inline keyword and the return
type is as gcc expects it. Additionally, the __inline__ keyword is
replaced by inline, as checkpatch expects.
Signed-off-by: Bart Van Assche <bart.vanassche@gmail.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc/spinlock.h')
-rw-r--r-- | include/asm-powerpc/spinlock.h | 27 |
1 files changed, 14 insertions, 13 deletions
diff --git a/include/asm-powerpc/spinlock.h b/include/asm-powerpc/spinlock.h index 258c93993190..f56a843f4705 100644 --- a/include/asm-powerpc/spinlock.h +++ b/include/asm-powerpc/spinlock.h | |||
@@ -54,7 +54,7 @@ | |||
54 | * This returns the old value in the lock, so we succeeded | 54 | * This returns the old value in the lock, so we succeeded |
55 | * in getting the lock if the return value is 0. | 55 | * in getting the lock if the return value is 0. |
56 | */ | 56 | */ |
57 | static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock) | 57 | static inline unsigned long __spin_trylock(raw_spinlock_t *lock) |
58 | { | 58 | { |
59 | unsigned long tmp, token; | 59 | unsigned long tmp, token; |
60 | 60 | ||
@@ -73,7 +73,7 @@ static __inline__ unsigned long __spin_trylock(raw_spinlock_t *lock) | |||
73 | return tmp; | 73 | return tmp; |
74 | } | 74 | } |
75 | 75 | ||
76 | static int __inline__ __raw_spin_trylock(raw_spinlock_t *lock) | 76 | static inline int __raw_spin_trylock(raw_spinlock_t *lock) |
77 | { | 77 | { |
78 | CLEAR_IO_SYNC; | 78 | CLEAR_IO_SYNC; |
79 | return __spin_trylock(lock) == 0; | 79 | return __spin_trylock(lock) == 0; |
@@ -104,7 +104,7 @@ extern void __rw_yield(raw_rwlock_t *lock); | |||
104 | #define SHARED_PROCESSOR 0 | 104 | #define SHARED_PROCESSOR 0 |
105 | #endif | 105 | #endif |
106 | 106 | ||
107 | static void __inline__ __raw_spin_lock(raw_spinlock_t *lock) | 107 | static inline void __raw_spin_lock(raw_spinlock_t *lock) |
108 | { | 108 | { |
109 | CLEAR_IO_SYNC; | 109 | CLEAR_IO_SYNC; |
110 | while (1) { | 110 | while (1) { |
@@ -119,7 +119,8 @@ static void __inline__ __raw_spin_lock(raw_spinlock_t *lock) | |||
119 | } | 119 | } |
120 | } | 120 | } |
121 | 121 | ||
122 | static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) | 122 | static inline |
123 | void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) | ||
123 | { | 124 | { |
124 | unsigned long flags_dis; | 125 | unsigned long flags_dis; |
125 | 126 | ||
@@ -139,7 +140,7 @@ static void __inline__ __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long | |||
139 | } | 140 | } |
140 | } | 141 | } |
141 | 142 | ||
142 | static __inline__ void __raw_spin_unlock(raw_spinlock_t *lock) | 143 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) |
143 | { | 144 | { |
144 | SYNC_IO; | 145 | SYNC_IO; |
145 | __asm__ __volatile__("# __raw_spin_unlock\n\t" | 146 | __asm__ __volatile__("# __raw_spin_unlock\n\t" |
@@ -180,7 +181,7 @@ extern void __raw_spin_unlock_wait(raw_spinlock_t *lock); | |||
180 | * This returns the old value in the lock + 1, | 181 | * This returns the old value in the lock + 1, |
181 | * so we got a read lock if the return value is > 0. | 182 | * so we got a read lock if the return value is > 0. |
182 | */ | 183 | */ |
183 | static long __inline__ __read_trylock(raw_rwlock_t *rw) | 184 | static inline long __read_trylock(raw_rwlock_t *rw) |
184 | { | 185 | { |
185 | long tmp; | 186 | long tmp; |
186 | 187 | ||
@@ -204,7 +205,7 @@ static long __inline__ __read_trylock(raw_rwlock_t *rw) | |||
204 | * This returns the old value in the lock, | 205 | * This returns the old value in the lock, |
205 | * so we got the write lock if the return value is 0. | 206 | * so we got the write lock if the return value is 0. |
206 | */ | 207 | */ |
207 | static __inline__ long __write_trylock(raw_rwlock_t *rw) | 208 | static inline long __write_trylock(raw_rwlock_t *rw) |
208 | { | 209 | { |
209 | long tmp, token; | 210 | long tmp, token; |
210 | 211 | ||
@@ -224,7 +225,7 @@ static __inline__ long __write_trylock(raw_rwlock_t *rw) | |||
224 | return tmp; | 225 | return tmp; |
225 | } | 226 | } |
226 | 227 | ||
227 | static void __inline__ __raw_read_lock(raw_rwlock_t *rw) | 228 | static inline void __raw_read_lock(raw_rwlock_t *rw) |
228 | { | 229 | { |
229 | while (1) { | 230 | while (1) { |
230 | if (likely(__read_trylock(rw) > 0)) | 231 | if (likely(__read_trylock(rw) > 0)) |
@@ -238,7 +239,7 @@ static void __inline__ __raw_read_lock(raw_rwlock_t *rw) | |||
238 | } | 239 | } |
239 | } | 240 | } |
240 | 241 | ||
241 | static void __inline__ __raw_write_lock(raw_rwlock_t *rw) | 242 | static inline void __raw_write_lock(raw_rwlock_t *rw) |
242 | { | 243 | { |
243 | while (1) { | 244 | while (1) { |
244 | if (likely(__write_trylock(rw) == 0)) | 245 | if (likely(__write_trylock(rw) == 0)) |
@@ -252,17 +253,17 @@ static void __inline__ __raw_write_lock(raw_rwlock_t *rw) | |||
252 | } | 253 | } |
253 | } | 254 | } |
254 | 255 | ||
255 | static int __inline__ __raw_read_trylock(raw_rwlock_t *rw) | 256 | static inline int __raw_read_trylock(raw_rwlock_t *rw) |
256 | { | 257 | { |
257 | return __read_trylock(rw) > 0; | 258 | return __read_trylock(rw) > 0; |
258 | } | 259 | } |
259 | 260 | ||
260 | static int __inline__ __raw_write_trylock(raw_rwlock_t *rw) | 261 | static inline int __raw_write_trylock(raw_rwlock_t *rw) |
261 | { | 262 | { |
262 | return __write_trylock(rw) == 0; | 263 | return __write_trylock(rw) == 0; |
263 | } | 264 | } |
264 | 265 | ||
265 | static void __inline__ __raw_read_unlock(raw_rwlock_t *rw) | 266 | static inline void __raw_read_unlock(raw_rwlock_t *rw) |
266 | { | 267 | { |
267 | long tmp; | 268 | long tmp; |
268 | 269 | ||
@@ -279,7 +280,7 @@ static void __inline__ __raw_read_unlock(raw_rwlock_t *rw) | |||
279 | : "cr0", "memory"); | 280 | : "cr0", "memory"); |
280 | } | 281 | } |
281 | 282 | ||
282 | static __inline__ void __raw_write_unlock(raw_rwlock_t *rw) | 283 | static inline void __raw_write_unlock(raw_rwlock_t *rw) |
283 | { | 284 | { |
284 | __asm__ __volatile__("# write_unlock\n\t" | 285 | __asm__ __volatile__("# write_unlock\n\t" |
285 | LWSYNC_ON_SMP: : :"memory"); | 286 | LWSYNC_ON_SMP: : :"memory"); |