aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
committerTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
commitcadb1c4db2d33e0a818f645cd1963a479dab91e2 (patch)
tree85ed19e54f45c9e918eaf03e3811a83c7848f4c8
parent9c28278a24c01c0073fb89e53c1d2a605ab9587d (diff)
percpu: use raw_cpu_*() to define __this_cpu_*()
__this_cpu_*() operations are the same as raw_cpu_*() operations except for the added __this_cpu_preempt_check(). Curiously, these were defined using __pcu_size_call_*() instead of being layered on top of raw_cpu_*(). Let's layer them so that __this_cpu_*() are defined in terms of raw_cpu_*(). It's simpler and less error-prone this way. This patch doesn't introduce any functional difference. Signed-off-by: Tejun Heo <tj@kernel.org> Acked-by: Christoph Lameter <cl@linux.com>
-rw-r--r--include/linux/percpu-defs.h18
1 files changed, 9 insertions, 9 deletions
diff --git a/include/linux/percpu-defs.h b/include/linux/percpu-defs.h
index fd0b9ee19ec8..215917e9a176 100644
--- a/include/linux/percpu-defs.h
+++ b/include/linux/percpu-defs.h
@@ -403,16 +403,16 @@ do { \
403 * Generic percpu operations for context that are safe from preemption/interrupts. 403 * Generic percpu operations for context that are safe from preemption/interrupts.
404 */ 404 */
405# define __this_cpu_read(pcp) \ 405# define __this_cpu_read(pcp) \
406 (__this_cpu_preempt_check("read"),__pcpu_size_call_return(raw_cpu_read_, (pcp))) 406 (__this_cpu_preempt_check("read"),raw_cpu_read(pcp))
407 407
408# define __this_cpu_write(pcp, val) \ 408# define __this_cpu_write(pcp, val) \
409do { __this_cpu_preempt_check("write"); \ 409do { __this_cpu_preempt_check("write"); \
410 __pcpu_size_call(raw_cpu_write_, (pcp), (val)); \ 410 raw_cpu_write(pcp, val); \
411} while (0) 411} while (0)
412 412
413# define __this_cpu_add(pcp, val) \ 413# define __this_cpu_add(pcp, val) \
414do { __this_cpu_preempt_check("add"); \ 414do { __this_cpu_preempt_check("add"); \
415 __pcpu_size_call(raw_cpu_add_, (pcp), (val)); \ 415 raw_cpu_add(pcp, val); \
416} while (0) 416} while (0)
417 417
418# define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) 418# define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val))
@@ -421,29 +421,29 @@ do { __this_cpu_preempt_check("add"); \
421 421
422# define __this_cpu_and(pcp, val) \ 422# define __this_cpu_and(pcp, val) \
423do { __this_cpu_preempt_check("and"); \ 423do { __this_cpu_preempt_check("and"); \
424 __pcpu_size_call(raw_cpu_and_, (pcp), (val)); \ 424 raw_cpu_and(pcp, val); \
425} while (0) 425} while (0)
426 426
427# define __this_cpu_or(pcp, val) \ 427# define __this_cpu_or(pcp, val) \
428do { __this_cpu_preempt_check("or"); \ 428do { __this_cpu_preempt_check("or"); \
429 __pcpu_size_call(raw_cpu_or_, (pcp), (val)); \ 429 raw_cpu_or(pcp, val); \
430} while (0) 430} while (0)
431 431
432# define __this_cpu_add_return(pcp, val) \ 432# define __this_cpu_add_return(pcp, val) \
433 (__this_cpu_preempt_check("add_return"),__pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)) 433 (__this_cpu_preempt_check("add_return"),raw_cpu_add_return(pcp, val))
434 434
435#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) 435#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
436#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) 436#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
437#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) 437#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
438 438
439# define __this_cpu_xchg(pcp, nval) \ 439# define __this_cpu_xchg(pcp, nval) \
440 (__this_cpu_preempt_check("xchg"),__pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval)) 440 (__this_cpu_preempt_check("xchg"),raw_cpu_xchg(pcp, nval))
441 441
442# define __this_cpu_cmpxchg(pcp, oval, nval) \ 442# define __this_cpu_cmpxchg(pcp, oval, nval) \
443 (__this_cpu_preempt_check("cmpxchg"),__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)) 443 (__this_cpu_preempt_check("cmpxchg"),raw_cpu_cmpxchg(pcp, oval, nval))
444 444
445# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 445# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
446 (__this_cpu_preempt_check("cmpxchg_double"),__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))) 446 (__this_cpu_preempt_check("cmpxchg_double"),raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2))
447 447
448/* 448/*
449 * this_cpu_*() operations are used for accesses that must be done in a 449 * this_cpu_*() operations are used for accesses that must be done in a