diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2012-01-09 16:08:28 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2012-01-09 16:08:28 -0500 |
commit | 6b3da11b3c36fdafce3a72e0e90d6c4e99e9aad5 (patch) | |
tree | 33e64fc453619978c3ecc86c7ec3649db3b4d8dc /arch/x86/include/asm | |
parent | db0c2bf69aa095d4a6de7b1145f29fe9a7c0f6a3 (diff) | |
parent | 933393f58fef9963eac61db8093689544e29a600 (diff) |
Merge branch 'for-3.3' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu
* 'for-3.3' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/percpu:
percpu: Remove irqsafe_cpu_xxx variants
Fix up conflict in arch/x86/include/asm/percpu.h due to clash with
cebef5beed3d ("x86: Fix and improve percpu_cmpxchg{8,16}b_double()")
which edited the (now removed) irqsafe_cpu_cmpxchg*_double code.
Diffstat (limited to 'arch/x86/include/asm')
-rw-r--r-- | arch/x86/include/asm/percpu.h | 28 |
1 files changed, 0 insertions, 28 deletions
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h index 529bf07e806..7a11910a63c 100644 --- a/arch/x86/include/asm/percpu.h +++ b/arch/x86/include/asm/percpu.h | |||
@@ -414,22 +414,6 @@ do { \ | |||
414 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | 414 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) |
415 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | 415 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) |
416 | 416 | ||
417 | #define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val) | ||
418 | #define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val) | ||
419 | #define irqsafe_cpu_add_4(pcp, val) percpu_add_op((pcp), val) | ||
420 | #define irqsafe_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val) | ||
421 | #define irqsafe_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val) | ||
422 | #define irqsafe_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val) | ||
423 | #define irqsafe_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val) | ||
424 | #define irqsafe_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val) | ||
425 | #define irqsafe_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val) | ||
426 | #define irqsafe_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val) | ||
427 | #define irqsafe_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val) | ||
428 | #define irqsafe_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val) | ||
429 | #define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) | ||
430 | #define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | ||
431 | #define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | ||
432 | |||
433 | #ifndef CONFIG_M386 | 417 | #ifndef CONFIG_M386 |
434 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) | 418 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) |
435 | #define __this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val) | 419 | #define __this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val) |
@@ -445,9 +429,6 @@ do { \ | |||
445 | #define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 429 | #define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
446 | #define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 430 | #define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
447 | 431 | ||
448 | #define irqsafe_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
449 | #define irqsafe_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
450 | #define irqsafe_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
451 | #endif /* !CONFIG_M386 */ | 432 | #endif /* !CONFIG_M386 */ |
452 | 433 | ||
453 | #ifdef CONFIG_X86_CMPXCHG64 | 434 | #ifdef CONFIG_X86_CMPXCHG64 |
@@ -464,7 +445,6 @@ do { \ | |||
464 | 445 | ||
465 | #define __this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double | 446 | #define __this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double |
466 | #define this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double | 447 | #define this_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double |
467 | #define irqsafe_cpu_cmpxchg_double_4 percpu_cmpxchg8b_double | ||
468 | #endif /* CONFIG_X86_CMPXCHG64 */ | 448 | #endif /* CONFIG_X86_CMPXCHG64 */ |
469 | 449 | ||
470 | /* | 450 | /* |
@@ -492,13 +472,6 @@ do { \ | |||
492 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | 472 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) |
493 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 473 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
494 | 474 | ||
495 | #define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val) | ||
496 | #define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val) | ||
497 | #define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) | ||
498 | #define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) | ||
499 | #define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
500 | #define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
501 | |||
502 | /* | 475 | /* |
503 | * Pretty complex macro to generate cmpxchg16 instruction. The instruction | 476 | * Pretty complex macro to generate cmpxchg16 instruction. The instruction |
504 | * is not supported on early AMD64 processors so we must be able to emulate | 477 | * is not supported on early AMD64 processors so we must be able to emulate |
@@ -521,7 +494,6 @@ do { \ | |||
521 | 494 | ||
522 | #define __this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double | 495 | #define __this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double |
523 | #define this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double | 496 | #define this_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double |
524 | #define irqsafe_cpu_cmpxchg_double_8 percpu_cmpxchg16b_double | ||
525 | 497 | ||
526 | #endif | 498 | #endif |
527 | 499 | ||