diff options
author | Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> | 2008-03-06 07:45:46 -0500 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-03-21 12:06:15 -0400 |
commit | 3078b79d257054c3697c85936afce38595e7b67b (patch) | |
tree | bbf090b07a5a9fc5b40a45dfc19ba775098db8c2 /include/asm-x86/cmpxchg_32.h | |
parent | 7d2de1376226eab4660e162e0728eadd18db3c4a (diff) |
x86: cast cmpxchg and cmpxchg_local result for 386 and 486
mm/slub.c: In function 'slab_alloc':
mm/slub.c:1637: warning: assignment makes pointer from integer without a cast
mm/slub.c:1637: warning: assignment makes pointer from integer without a cast
mm/slub.c: In function 'slab_free':
mm/slub.c:1796: warning: assignment makes pointer from integer without a cast
mm/slub.c:1796: warning: assignment makes pointer from integer without a cast
A cast is needed in the 386 and 486 code because the type is a pointer. In
every other integer case the original cmpxchg code (and the cmpxchg_local
which has been copied from it) worked fine, but since we touch a pointer,
the type needs to be casted in the cmpxchg_local and cmpxchg macros.
The more recent code (586+) does not have this problem (the cast is already
there).
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Reviewed-by: Christoph Lameter <clameter@sgi.com>
Cc: Vegard Nossum <vegard.nossum@gmail.com>
Cc: Pekka Enberg <penberg@cs.helsinki.fi>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/cmpxchg_32.h')
-rw-r--r-- | include/asm-x86/cmpxchg_32.h | 32 |
1 files changed, 20 insertions, 12 deletions
diff --git a/include/asm-x86/cmpxchg_32.h b/include/asm-x86/cmpxchg_32.h index cea1dae288a7..959fad00dff5 100644 --- a/include/asm-x86/cmpxchg_32.h +++ b/include/asm-x86/cmpxchg_32.h | |||
@@ -269,22 +269,26 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old, | |||
269 | ({ \ | 269 | ({ \ |
270 | __typeof__(*(ptr)) __ret; \ | 270 | __typeof__(*(ptr)) __ret; \ |
271 | if (likely(boot_cpu_data.x86 > 3)) \ | 271 | if (likely(boot_cpu_data.x86 > 3)) \ |
272 | __ret = __cmpxchg((ptr), (unsigned long)(o), \ | 272 | __ret = (__typeof__(*(ptr)))__cmpxchg((ptr), \ |
273 | (unsigned long)(n), sizeof(*(ptr))); \ | 273 | (unsigned long)(o), (unsigned long)(n), \ |
274 | sizeof(*(ptr))); \ | ||
274 | else \ | 275 | else \ |
275 | __ret = cmpxchg_386((ptr), (unsigned long)(o), \ | 276 | __ret = (__typeof__(*(ptr)))cmpxchg_386((ptr), \ |
276 | (unsigned long)(n), sizeof(*(ptr))); \ | 277 | (unsigned long)(o), (unsigned long)(n), \ |
278 | sizeof(*(ptr))); \ | ||
277 | __ret; \ | 279 | __ret; \ |
278 | }) | 280 | }) |
279 | #define cmpxchg_local(ptr, o, n) \ | 281 | #define cmpxchg_local(ptr, o, n) \ |
280 | ({ \ | 282 | ({ \ |
281 | __typeof__(*(ptr)) __ret; \ | 283 | __typeof__(*(ptr)) __ret; \ |
282 | if (likely(boot_cpu_data.x86 > 3)) \ | 284 | if (likely(boot_cpu_data.x86 > 3)) \ |
283 | __ret = __cmpxchg_local((ptr), (unsigned long)(o), \ | 285 | __ret = (__typeof__(*(ptr)))__cmpxchg_local((ptr), \ |
284 | (unsigned long)(n), sizeof(*(ptr))); \ | 286 | (unsigned long)(o), (unsigned long)(n), \ |
287 | sizeof(*(ptr))); \ | ||
285 | else \ | 288 | else \ |
286 | __ret = cmpxchg_386((ptr), (unsigned long)(o), \ | 289 | __ret = (__typeof__(*(ptr)))cmpxchg_386((ptr), \ |
287 | (unsigned long)(n), sizeof(*(ptr))); \ | 290 | (unsigned long)(o), (unsigned long)(n), \ |
291 | sizeof(*(ptr))); \ | ||
288 | __ret; \ | 292 | __ret; \ |
289 | }) | 293 | }) |
290 | #endif | 294 | #endif |
@@ -301,10 +305,12 @@ extern unsigned long long cmpxchg_486_u64(volatile void *, u64, u64); | |||
301 | ({ \ | 305 | ({ \ |
302 | __typeof__(*(ptr)) __ret; \ | 306 | __typeof__(*(ptr)) __ret; \ |
303 | if (likely(boot_cpu_data.x86 > 4)) \ | 307 | if (likely(boot_cpu_data.x86 > 4)) \ |
304 | __ret = __cmpxchg64((ptr), (unsigned long long)(o), \ | 308 | __ret = (__typeof__(*(ptr)))__cmpxchg64((ptr), \ |
309 | (unsigned long long)(o), \ | ||
305 | (unsigned long long)(n)); \ | 310 | (unsigned long long)(n)); \ |
306 | else \ | 311 | else \ |
307 | __ret = cmpxchg_486_u64((ptr), (unsigned long long)(o), \ | 312 | __ret = (__typeof__(*(ptr)))cmpxchg_486_u64((ptr), \ |
313 | (unsigned long long)(o), \ | ||
308 | (unsigned long long)(n)); \ | 314 | (unsigned long long)(n)); \ |
309 | __ret; \ | 315 | __ret; \ |
310 | }) | 316 | }) |
@@ -312,10 +318,12 @@ extern unsigned long long cmpxchg_486_u64(volatile void *, u64, u64); | |||
312 | ({ \ | 318 | ({ \ |
313 | __typeof__(*(ptr)) __ret; \ | 319 | __typeof__(*(ptr)) __ret; \ |
314 | if (likely(boot_cpu_data.x86 > 4)) \ | 320 | if (likely(boot_cpu_data.x86 > 4)) \ |
315 | __ret = __cmpxchg64_local((ptr), (unsigned long long)(o), \ | 321 | __ret = (__typeof__(*(ptr)))__cmpxchg64_local((ptr), \ |
322 | (unsigned long long)(o), \ | ||
316 | (unsigned long long)(n)); \ | 323 | (unsigned long long)(n)); \ |
317 | else \ | 324 | else \ |
318 | __ret = cmpxchg_486_u64((ptr), (unsigned long long)(o), \ | 325 | __ret = (__typeof__(*(ptr)))cmpxchg_486_u64((ptr), \ |
326 | (unsigned long long)(o), \ | ||
319 | (unsigned long long)(n)); \ | 327 | (unsigned long long)(n)); \ |
320 | __ret; \ | 328 | __ret; \ |
321 | }) | 329 | }) |