diff options
| -rw-r--r-- | arch/x86/include/asm/percpu.h | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h index 8ee45167e817..3788f4649db4 100644 --- a/arch/x86/include/asm/percpu.h +++ b/arch/x86/include/asm/percpu.h | |||
| @@ -414,8 +414,6 @@ do { \ | |||
| 414 | #define this_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) | 414 | #define this_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) |
| 415 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | 415 | #define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) |
| 416 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | 416 | #define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) |
| 417 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
| 418 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
| 419 | 417 | ||
| 420 | #define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val) | 418 | #define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val) |
| 421 | #define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val) | 419 | #define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val) |
| @@ -432,8 +430,6 @@ do { \ | |||
| 432 | #define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) | 430 | #define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) |
| 433 | #define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) | 431 | #define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) |
| 434 | #define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) | 432 | #define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) |
| 435 | #define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
| 436 | #define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
| 437 | 433 | ||
| 438 | #ifndef CONFIG_M386 | 434 | #ifndef CONFIG_M386 |
| 439 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) | 435 | #define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) |
| @@ -475,11 +471,15 @@ do { \ | |||
| 475 | #define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) | 471 | #define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) |
| 476 | #define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) | 472 | #define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) |
| 477 | #define this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val) | 473 | #define this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val) |
| 474 | #define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
| 475 | #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
| 478 | 476 | ||
| 479 | #define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val) | 477 | #define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val) |
| 480 | #define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val) | 478 | #define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val) |
| 481 | #define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) | 479 | #define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) |
| 482 | #define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) | 480 | #define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) |
| 481 | #define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval) | ||
| 482 | #define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval) | ||
| 483 | #endif | 483 | #endif |
| 484 | 484 | ||
| 485 | /* This is not atomic against other CPUs -- CPU preemption needs to be off */ | 485 | /* This is not atomic against other CPUs -- CPU preemption needs to be off */ |
