aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorChristoph Lameter <cl@linux.com>2011-01-11 12:54:53 -0500
committerTejun Heo <tj@kernel.org>2011-01-11 12:54:53 -0500
commit2485b6464cf86a5bc361666838f2439c99c00567 (patch)
tree2b4961197f15b59c9151752547c78982c9b52dc5 /arch
parent5b2eef966cb2ae307aa4ef1767f7307774bc96ca (diff)
x86,percpu: Move out of place 64 bit ops into X86_64 section
Some operations that operate on 64 bit operands are defined for 32 bit. Move them into the correct section. Signed-off-by: Christoph Lameter <cl@linux.com> Signed-off-by: Tejun Heo <tj@kernel.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/include/asm/percpu.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/x86/include/asm/percpu.h b/arch/x86/include/asm/percpu.h
index 8ee45167e817..3788f4649db4 100644
--- a/arch/x86/include/asm/percpu.h
+++ b/arch/x86/include/asm/percpu.h
@@ -414,8 +414,6 @@ do { \
414#define this_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) 414#define this_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval)
415#define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) 415#define this_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval)
416#define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) 416#define this_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval)
417#define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
418#define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
419 417
420#define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val) 418#define irqsafe_cpu_add_1(pcp, val) percpu_add_op((pcp), val)
421#define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val) 419#define irqsafe_cpu_add_2(pcp, val) percpu_add_op((pcp), val)
@@ -432,8 +430,6 @@ do { \
432#define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval) 430#define irqsafe_cpu_xchg_1(pcp, nval) percpu_xchg_op(pcp, nval)
433#define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval) 431#define irqsafe_cpu_xchg_2(pcp, nval) percpu_xchg_op(pcp, nval)
434#define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval) 432#define irqsafe_cpu_xchg_4(pcp, nval) percpu_xchg_op(pcp, nval)
435#define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
436#define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
437 433
438#ifndef CONFIG_M386 434#ifndef CONFIG_M386
439#define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val) 435#define __this_cpu_add_return_1(pcp, val) percpu_add_return_op(pcp, val)
@@ -475,11 +471,15 @@ do { \
475#define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) 471#define this_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
476#define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) 472#define this_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
477#define this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val) 473#define this_cpu_add_return_8(pcp, val) percpu_add_return_op(pcp, val)
474#define this_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
475#define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
478 476
479#define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val) 477#define irqsafe_cpu_add_8(pcp, val) percpu_add_op((pcp), val)
480#define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val) 478#define irqsafe_cpu_and_8(pcp, val) percpu_to_op("and", (pcp), val)
481#define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val) 479#define irqsafe_cpu_or_8(pcp, val) percpu_to_op("or", (pcp), val)
482#define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val) 480#define irqsafe_cpu_xor_8(pcp, val) percpu_to_op("xor", (pcp), val)
481#define irqsafe_cpu_xchg_8(pcp, nval) percpu_xchg_op(pcp, nval)
482#define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(pcp, oval, nval)
483#endif 483#endif
484 484
485/* This is not atomic against other CPUs -- CPU preemption needs to be off */ 485/* This is not atomic against other CPUs -- CPU preemption needs to be off */