diff options
author | Christoph Lameter <cl@linux.com> | 2011-12-22 12:58:51 -0500 |
---|---|---|
committer | Tejun Heo <tj@kernel.org> | 2011-12-22 13:40:20 -0500 |
commit | 933393f58fef9963eac61db8093689544e29a600 (patch) | |
tree | 719f8b231499aa4ea023bc1a06db4582df5f0965 /arch | |
parent | ecefc36b41ac0fe92d76273a23faf27b2da13411 (diff) |
percpu: Remove irqsafe_cpu_xxx variants
We simply say that regular this_cpu use must be safe regardless of
preemption and interrupt state. That has no material change for x86
and s390 implementations of this_cpu operations. However, arches that
do not provide their own implementation for this_cpu operations will
now get code generated that disables interrupts instead of preemption.
-tj: This is part of on-going percpu API cleanup. For detailed
discussion of the subject, please refer to the following thread.
http://thread.gmane.org/gmane.linux.kernel/1222078
Signed-off-by: Christoph Lameter <cl@linux.com>
Signed-off-by: Tejun Heo <tj@kernel.org>
LKML-Reference: <alpine.DEB.2.00.1112221154380.11787@router.home>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/s390/include/asm/percpu.h | 44 | ||||
-rw-r--r-- | arch/x86/include/asm/percpu.h | 28 |
2 files changed, 22 insertions, 50 deletions
diff --git a/arch/s390/include/asm/percpu.h b/arch/s390/include/asm/percpu.h index 5325c89a5843..0fbd1899c7b0 100644 --- a/arch/s390/include/asm/percpu.h +++ b/arch/s390/include/asm/percpu.h | |||
@@ -19,7 +19,7 @@ | |||
19 | #define ARCH_NEEDS_WEAK_PER_CPU | 19 | #define ARCH_NEEDS_WEAK_PER_CPU |
20 | #endif | 20 | #endif |
21 | 21 | ||
22 | #define arch_irqsafe_cpu_to_op(pcp, val, op) \ | 22 | #define arch_this_cpu_to_op(pcp, val, op) \ |
23 | do { \ | 23 | do { \ |
24 | typedef typeof(pcp) pcp_op_T__; \ | 24 | typedef typeof(pcp) pcp_op_T__; \ |
25 | pcp_op_T__ old__, new__, prev__; \ | 25 | pcp_op_T__ old__, new__, prev__; \ |
@@ -41,27 +41,27 @@ do { \ | |||
41 | preempt_enable(); \ | 41 | preempt_enable(); \ |
42 | } while (0) | 42 | } while (0) |
43 | 43 | ||
44 | #define irqsafe_cpu_add_1(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, +) | 44 | #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op(pcp, val, +) |
45 | #define irqsafe_cpu_add_2(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, +) | 45 | #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op(pcp, val, +) |
46 | #define irqsafe_cpu_add_4(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, +) | 46 | #define this_cpu_add_4(pcp, val) arch_this_cpu_to_op(pcp, val, +) |
47 | #define irqsafe_cpu_add_8(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, +) | 47 | #define this_cpu_add_8(pcp, val) arch_this_cpu_to_op(pcp, val, +) |
48 | 48 | ||
49 | #define irqsafe_cpu_and_1(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, &) | 49 | #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op(pcp, val, &) |
50 | #define irqsafe_cpu_and_2(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, &) | 50 | #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op(pcp, val, &) |
51 | #define irqsafe_cpu_and_4(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, &) | 51 | #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op(pcp, val, &) |
52 | #define irqsafe_cpu_and_8(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, &) | 52 | #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op(pcp, val, &) |
53 | 53 | ||
54 | #define irqsafe_cpu_or_1(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, |) | 54 | #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op(pcp, val, |) |
55 | #define irqsafe_cpu_or_2(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, |) | 55 | #define this_cpu_or_2(pcp, val) arch_this_cpu_to_op(pcp, val, |) |
56 | #define irqsafe_cpu_or_4(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, |) | 56 | #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op(pcp, val, |) |
57 | #define irqsafe_cpu_or_8(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, |) | 57 | #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op(pcp, val, |) |
58 | 58 | ||
59 | #define irqsafe_cpu_xor_1(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, ^) | 59 | #define this_cpu_xor_1(pcp, val) arch_this_cpu_to_op(pcp, val, ^) |
60 | #define irqsafe_cpu_xor_2(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, ^) | 60 | #define this_cpu_xor_2(pcp, val) arch_this_cpu_to_op(pcp, val, ^) |
61 | #define irqsafe_cpu_xor_4(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, ^) | 61 | #define this_cpu_xor_4(pcp, val) arch_this_cpu_to_op(pcp, val, ^) |
62 | #define irqsafe_cpu_xor_8(pcp, val) arch_irqsafe_cpu_to_op(pcp, val, ^) | 62 | #define this_cpu_xor_8(pcp, val) arch_this_cpu_to_op(pcp, val, ^) |
63 | 63 | ||
64 | #define arch_irqsafe_cpu_cmpxchg(pcp, oval, nval) \ | 64 | #define arch_this_cpu_cmpxchg(pcp, oval, nval) \ |
65 | ({ \ | 65 | ({ \ |
66 | typedef typeof(pcp) pcp_op_T__; \ | 66 | typedef typeof(pcp) pcp_op_T__; \ |
67 | pcp_op_T__ ret__; \ | 67 | pcp_op_T__ ret__; \ |
@@ -79,10 +79,10 @@ do { \ | |||
79 | ret__; \ | 79 | ret__; \ |
80 | }) | 80 | }) |
81 | 81 | ||
82 | #define irqsafe_cpu_cmpxchg_1(pcp, oval, nval) arch_irqsafe_cpu_cmpxchg(pcp, oval, nval) | 82 | #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
83 | #define irqsafe_cpu_cmpxchg_2(pcp, oval, nval) arch_irqsafe_cpu_cmpxchg(pcp, oval, nval) | 83 | #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
84 | #define irqsafe_cpu_cmpxchg_4(pcp, oval, nval) arch_irqsafe_cpu_cmpxchg(pcp, oval, nval) | 84 | #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
85 | #define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) arch_irqsafe_cpu_cmpxchg(pcp, oval, nval) | 85 | #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval) |
86 | 86 | ||
87 | #include <asm-generic/percpu.h> | 87 | #include <asm-generic/percpu.h> |
88 | 88 | ||
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h index 3470c9d0ebba..562ccb5323de 100644 --- a/arch/x86/include/asm/percpu.h +++ b/arch/x86/include/asm/percpu.h | |||
@@ -414,22 +414,6 @@ do { \ | |||
414 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | 414 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) |
415 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | 415 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) |
416 | 416 | ||
417 | #define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val) | ||
418 | #define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val) | ||
419 | #define irqsafe_cpu_add_4(pcp, val) percpu_add_op((pcp), val) | ||
420 | #define irqsafe_cpu_and_1(pcp, val) percpu_to_op("and", (pcp), val) | ||
421 | #define irqsafe_cpu_and_2(pcp, val) percpu_to_op("and", (pcp), val) | ||
422 | #define irqsafe_cpu_and_4(pcp, val) percpu_to_op("and", (pcp), val) | ||
423 | #define irqsafe_cpu_or_1(pcp, val) percpu_to_op("or", (pcp), val) | ||
424 | #define irqsafe_cpu_or_2(pcp, val) percpu_to_op("or", (pcp), val) | ||
425 | #define irqsafe_cpu_or_4(pcp, val) percpu_to_op("or", (pcp), val) | ||
426 | #define irqsafe_cpu_xor_1(pcp, val) percpu_to_op("xor", (pcp), val) | ||
427 | #define irqsafe_cpu_xor_2(pcp, val) percpu_to_op("xor", (pcp), val) | ||
428 | #define irqsafe_cpu_xor_4(pcp, val) percpu_to_op("xor", (pcp), val) | ||
429 | #define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) | ||
430 | #define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | ||
431 | #define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | ||
432 | |||
433 | #ifndef CONFIG_M386 | 417 | #ifndef CONFIG_M386 |
434 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) | 418 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) |
435 | #define __this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val) | 419 | #define __this_cpu_add_return_2(pcp, val) percpu_add_return_op(pcp, val) |
@@ -445,9 +429,6 @@ do { \ | |||
445 | #define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 429 | #define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
446 | #define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 430 | #define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
447 | 431 | ||
448 | #define irqsafe_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
449 | #define irqsafe_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
450 | #define irqsafe_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
451 | #endif /* !CONFIG_M386 */ | 432 | #endif /* !CONFIG_M386 */ |
452 | 433 | ||
453 | #ifdef CONFIG_X86_CMPXCHG64 | 434 | #ifdef CONFIG_X86_CMPXCHG64 |
@@ -467,7 +448,6 @@ do { \ | |||
467 | 448 | ||
468 | #define __this_cpu_cmpxchg_double_4(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg8b_double(pcp1, o1, o2, n1, n2) | 449 | #define __this_cpu_cmpxchg_double_4(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg8b_double(pcp1, o1, o2, n1, n2) |
469 | #define this_cpu_cmpxchg_double_4(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg8b_double(pcp1, o1, o2, n1, n2) | 450 | #define this_cpu_cmpxchg_double_4(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg8b_double(pcp1, o1, o2, n1, n2) |
470 | #define irqsafe_cpu_cmpxchg_double_4(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg8b_double(pcp1, o1, o2, n1, n2) | ||
471 | #endif /* CONFIG_X86_CMPXCHG64 */ | 451 | #endif /* CONFIG_X86_CMPXCHG64 */ |
472 | 452 | ||
473 | /* | 453 | /* |
@@ -495,13 +475,6 @@ do { \ | |||
495 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | 475 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) |
496 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | 476 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) |
497 | 477 | ||
498 | #define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val) | ||
499 | #define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val) | ||
500 | #define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) | ||
501 | #define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) | ||
502 | #define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
503 | #define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
504 | |||
505 | /* | 478 | /* |
506 | * Pretty complex macro to generate cmpxchg16 instruction. The instruction | 479 | * Pretty complex macro to generate cmpxchg16 instruction. The instruction |
507 | * is not supported on early AMD64 processors so we must be able to emulate | 480 | * is not supported on early AMD64 processors so we must be able to emulate |
@@ -532,7 +505,6 @@ do { \ | |||
532 | 505 | ||
533 | #define __this_cpu_cmpxchg_double_8(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg16b_double(pcp1, o1, o2, n1, n2) | 506 | #define __this_cpu_cmpxchg_double_8(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg16b_double(pcp1, o1, o2, n1, n2) |
534 | #define this_cpu_cmpxchg_double_8(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg16b_double(pcp1, o1, o2, n1, n2) | 507 | #define this_cpu_cmpxchg_double_8(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg16b_double(pcp1, o1, o2, n1, n2) |
535 | #define irqsafe_cpu_cmpxchg_double_8(pcp1, pcp2, o1, o2, n1, n2) percpu_cmpxchg16b_double(pcp1, o1, o2, n1, n2) | ||
536 | 508 | ||
537 | #endif | 509 | #endif |
538 | 510 | ||