aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/percpu.h
diff options
context:
space:
mode:
authorTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
committerTejun Heo <tj@kernel.org>2014-06-17 19:12:39 -0400
commitdcba4333683c3a0642fd575e475c6c740122a037 (patch)
tree62f2afd871df4fcab7fb5e6332ebd94421d77214 /include/linux/percpu.h
parent3b8ed91d6463f48ab180f5ebedc9663eddfa0587 (diff)
percpu: only allow sized arch overrides for {raw|this}_cpu_*() ops
Currently, percpu allows two separate methods for overriding {raw|this}_cpu_*() ops - for a given operation, an arch can provide whole replacement or sized sub operations to override specific parts of it. e.g. arch either can provide this_cpu_add() or this_cpu_add_4() to override only the 4 byte operation. While quite flexible on a glance, the dual-overriding scheme complicates the code path for no actual gain. It compilcates the already complex operation definitions and if an arch wants to override all sizes, it can easily provide all variants anyway. In fact, no arch is actually making use of whole operation override. Another oddity is that __this_cpu_*() operations are defined in the same way as raw_cpu_*() but ignores full overrides of the raw_cpu_*() and doesn't allow full operation override, so if an arch provides whole overrides for raw_cpu_*() operations __this_cpu_*() ends up using the generic implementations. More importantly, it takes away the layering between arch-specific and generic parts making it impossible for the generic part to implement arch-independent features on top of arch-specific overrides. This patch removes the support for whole operation overrides. As no arch is using it, this doesn't cause any actual difference. Signed-off-by: Tejun Heo <tj@kernel.org> Acked-by: Christoph Lameter <cl@linux.com>
Diffstat (limited to 'include/linux/percpu.h')
-rw-r--r--include/linux/percpu.h94
1 files changed, 5 insertions, 89 deletions
diff --git a/include/linux/percpu.h b/include/linux/percpu.h
index 97b207990c45..95d380e5d246 100644
--- a/include/linux/percpu.h
+++ b/include/linux/percpu.h
@@ -226,17 +226,11 @@ do { \
226 * safe. Interrupts may occur. If the interrupt modifies the variable 226 * safe. Interrupts may occur. If the interrupt modifies the variable
227 * too then RMW actions will not be reliable. 227 * too then RMW actions will not be reliable.
228 * 228 *
229 * The arch code can provide optimized functions in two ways: 229 * The arch code can provide optimized implementation by defining macros
230 * 230 * for certain scalar sizes. F.e. provide this_cpu_add_2() to provide per
231 * 1. Override the function completely. F.e. define this_cpu_add(). 231 * cpu atomic operations for 2 byte sized RMW actions. If arch code does
232 * The arch must then ensure that the various scalar format passed 232 * not provide operations for a scalar size then the fallback in the
233 * are handled correctly. 233 * generic code will be used.
234 *
235 * 2. Provide functions for certain scalar sizes. F.e. provide
236 * this_cpu_add_2() to provide per cpu atomic operations for 2 byte
237 * sized RMW actions. If arch code does not provide operations for
238 * a scalar size then the fallback in the generic code will be
239 * used.
240 */ 234 */
241 235
242#define _this_cpu_generic_read(pcp) \ 236#define _this_cpu_generic_read(pcp) \
@@ -247,7 +241,6 @@ do { \
247 ret__; \ 241 ret__; \
248}) 242})
249 243
250#ifndef this_cpu_read
251# ifndef this_cpu_read_1 244# ifndef this_cpu_read_1
252# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp) 245# define this_cpu_read_1(pcp) _this_cpu_generic_read(pcp)
253# endif 246# endif
@@ -261,7 +254,6 @@ do { \
261# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp) 254# define this_cpu_read_8(pcp) _this_cpu_generic_read(pcp)
262# endif 255# endif
263# define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, (pcp)) 256# define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, (pcp))
264#endif
265 257
266#define _this_cpu_generic_to_op(pcp, val, op) \ 258#define _this_cpu_generic_to_op(pcp, val, op) \
267do { \ 259do { \
@@ -271,7 +263,6 @@ do { \
271 raw_local_irq_restore(flags); \ 263 raw_local_irq_restore(flags); \
272} while (0) 264} while (0)
273 265
274#ifndef this_cpu_write
275# ifndef this_cpu_write_1 266# ifndef this_cpu_write_1
276# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) 267# define this_cpu_write_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
277# endif 268# endif
@@ -285,9 +276,7 @@ do { \
285# define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =) 276# define this_cpu_write_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), =)
286# endif 277# endif
287# define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, (pcp), (val)) 278# define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, (pcp), (val))
288#endif
289 279
290#ifndef this_cpu_add
291# ifndef this_cpu_add_1 280# ifndef this_cpu_add_1
292# define this_cpu_add_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) 281# define this_cpu_add_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
293# endif 282# endif
@@ -301,21 +290,11 @@ do { \
301# define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=) 290# define this_cpu_add_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), +=)
302# endif 291# endif
303# define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, (pcp), (val)) 292# define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, (pcp), (val))
304#endif
305 293
306#ifndef this_cpu_sub
307# define this_cpu_sub(pcp, val) this_cpu_add((pcp), -(typeof(pcp))(val)) 294# define this_cpu_sub(pcp, val) this_cpu_add((pcp), -(typeof(pcp))(val))
308#endif
309
310#ifndef this_cpu_inc
311# define this_cpu_inc(pcp) this_cpu_add((pcp), 1) 295# define this_cpu_inc(pcp) this_cpu_add((pcp), 1)
312#endif
313
314#ifndef this_cpu_dec
315# define this_cpu_dec(pcp) this_cpu_sub((pcp), 1) 296# define this_cpu_dec(pcp) this_cpu_sub((pcp), 1)
316#endif
317 297
318#ifndef this_cpu_and
319# ifndef this_cpu_and_1 298# ifndef this_cpu_and_1
320# define this_cpu_and_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) 299# define this_cpu_and_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
321# endif 300# endif
@@ -329,9 +308,7 @@ do { \
329# define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=) 308# define this_cpu_and_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), &=)
330# endif 309# endif
331# define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, (pcp), (val)) 310# define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, (pcp), (val))
332#endif
333 311
334#ifndef this_cpu_or
335# ifndef this_cpu_or_1 312# ifndef this_cpu_or_1
336# define this_cpu_or_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) 313# define this_cpu_or_1(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
337# endif 314# endif
@@ -345,7 +322,6 @@ do { \
345# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=) 322# define this_cpu_or_8(pcp, val) _this_cpu_generic_to_op((pcp), (val), |=)
346# endif 323# endif
347# define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val)) 324# define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val))
348#endif
349 325
350#define _this_cpu_generic_add_return(pcp, val) \ 326#define _this_cpu_generic_add_return(pcp, val) \
351({ \ 327({ \
@@ -358,7 +334,6 @@ do { \
358 ret__; \ 334 ret__; \
359}) 335})
360 336
361#ifndef this_cpu_add_return
362# ifndef this_cpu_add_return_1 337# ifndef this_cpu_add_return_1
363# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val) 338# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
364# endif 339# endif
@@ -372,7 +347,6 @@ do { \
372# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val) 347# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
373# endif 348# endif
374# define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val) 349# define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
375#endif
376 350
377#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val)) 351#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val))
378#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1) 352#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
@@ -388,7 +362,6 @@ do { \
388 ret__; \ 362 ret__; \
389}) 363})
390 364
391#ifndef this_cpu_xchg
392# ifndef this_cpu_xchg_1 365# ifndef this_cpu_xchg_1
393# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval) 366# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
394# endif 367# endif
@@ -403,7 +376,6 @@ do { \
403# endif 376# endif
404# define this_cpu_xchg(pcp, nval) \ 377# define this_cpu_xchg(pcp, nval) \
405 __pcpu_size_call_return2(this_cpu_xchg_, (pcp), nval) 378 __pcpu_size_call_return2(this_cpu_xchg_, (pcp), nval)
406#endif
407 379
408#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \ 380#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
409({ \ 381({ \
@@ -417,7 +389,6 @@ do { \
417 ret__; \ 389 ret__; \
418}) 390})
419 391
420#ifndef this_cpu_cmpxchg
421# ifndef this_cpu_cmpxchg_1 392# ifndef this_cpu_cmpxchg_1
422# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) 393# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
423# endif 394# endif
@@ -432,7 +403,6 @@ do { \
432# endif 403# endif
433# define this_cpu_cmpxchg(pcp, oval, nval) \ 404# define this_cpu_cmpxchg(pcp, oval, nval) \
434 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval) 405 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
435#endif
436 406
437/* 407/*
438 * cmpxchg_double replaces two adjacent scalars at once. The first 408 * cmpxchg_double replaces two adjacent scalars at once. The first
@@ -453,7 +423,6 @@ do { \
453 ret__; \ 423 ret__; \
454}) 424})
455 425
456#ifndef this_cpu_cmpxchg_double
457# ifndef this_cpu_cmpxchg_double_1 426# ifndef this_cpu_cmpxchg_double_1
458# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 427# define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
459 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 428 _this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
@@ -472,7 +441,6 @@ do { \
472# endif 441# endif
473# define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 442# define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
474 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) 443 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))
475#endif
476 444
477/* 445/*
478 * Generic percpu operations for contexts where we do not want to do 446 * Generic percpu operations for contexts where we do not want to do
@@ -484,7 +452,6 @@ do { \
484 * or an interrupt occurred and the same percpu variable was modified from 452 * or an interrupt occurred and the same percpu variable was modified from
485 * the interrupt context. 453 * the interrupt context.
486 */ 454 */
487#ifndef raw_cpu_read
488# ifndef raw_cpu_read_1 455# ifndef raw_cpu_read_1
489# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp))) 456# define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
490# endif 457# endif
@@ -498,15 +465,12 @@ do { \
498# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp))) 465# define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
499# endif 466# endif
500# define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp)) 467# define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp))
501#endif
502 468
503#define raw_cpu_generic_to_op(pcp, val, op) \ 469#define raw_cpu_generic_to_op(pcp, val, op) \
504do { \ 470do { \
505 *raw_cpu_ptr(&(pcp)) op val; \ 471 *raw_cpu_ptr(&(pcp)) op val; \
506} while (0) 472} while (0)
507 473
508
509#ifndef raw_cpu_write
510# ifndef raw_cpu_write_1 474# ifndef raw_cpu_write_1
511# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) 475# define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
512# endif 476# endif
@@ -520,9 +484,7 @@ do { \
520# define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) 484# define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =)
521# endif 485# endif
522# define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val)) 486# define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val))
523#endif
524 487
525#ifndef raw_cpu_add
526# ifndef raw_cpu_add_1 488# ifndef raw_cpu_add_1
527# define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) 489# define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
528# endif 490# endif
@@ -536,21 +498,13 @@ do { \
536# define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) 498# define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=)
537# endif 499# endif
538# define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val)) 500# define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val))
539#endif
540 501
541#ifndef raw_cpu_sub
542# define raw_cpu_sub(pcp, val) raw_cpu_add((pcp), -(val)) 502# define raw_cpu_sub(pcp, val) raw_cpu_add((pcp), -(val))
543#endif
544 503
545#ifndef raw_cpu_inc
546# define raw_cpu_inc(pcp) raw_cpu_add((pcp), 1) 504# define raw_cpu_inc(pcp) raw_cpu_add((pcp), 1)
547#endif
548 505
549#ifndef raw_cpu_dec
550# define raw_cpu_dec(pcp) raw_cpu_sub((pcp), 1) 506# define raw_cpu_dec(pcp) raw_cpu_sub((pcp), 1)
551#endif
552 507
553#ifndef raw_cpu_and
554# ifndef raw_cpu_and_1 508# ifndef raw_cpu_and_1
555# define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) 509# define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
556# endif 510# endif
@@ -564,9 +518,7 @@ do { \
564# define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) 518# define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=)
565# endif 519# endif
566# define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val)) 520# define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val))
567#endif
568 521
569#ifndef raw_cpu_or
570# ifndef raw_cpu_or_1 522# ifndef raw_cpu_or_1
571# define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) 523# define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
572# endif 524# endif
@@ -580,7 +532,6 @@ do { \
580# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) 532# define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=)
581# endif 533# endif
582# define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val)) 534# define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val))
583#endif
584 535
585#define raw_cpu_generic_add_return(pcp, val) \ 536#define raw_cpu_generic_add_return(pcp, val) \
586({ \ 537({ \
@@ -588,7 +539,6 @@ do { \
588 raw_cpu_read(pcp); \ 539 raw_cpu_read(pcp); \
589}) 540})
590 541
591#ifndef raw_cpu_add_return
592# ifndef raw_cpu_add_return_1 542# ifndef raw_cpu_add_return_1
593# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) 543# define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
594# endif 544# endif
@@ -603,7 +553,6 @@ do { \
603# endif 553# endif
604# define raw_cpu_add_return(pcp, val) \ 554# define raw_cpu_add_return(pcp, val) \
605 __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val) 555 __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
606#endif
607 556
608#define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val)) 557#define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val))
609#define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1) 558#define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1)
@@ -616,7 +565,6 @@ do { \
616 ret__; \ 565 ret__; \
617}) 566})
618 567
619#ifndef raw_cpu_xchg
620# ifndef raw_cpu_xchg_1 568# ifndef raw_cpu_xchg_1
621# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 569# define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
622# endif 570# endif
@@ -631,7 +579,6 @@ do { \
631# endif 579# endif
632# define raw_cpu_xchg(pcp, nval) \ 580# define raw_cpu_xchg(pcp, nval) \
633 __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval) 581 __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval)
634#endif
635 582
636#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ 583#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
637({ \ 584({ \
@@ -642,7 +589,6 @@ do { \
642 ret__; \ 589 ret__; \
643}) 590})
644 591
645#ifndef raw_cpu_cmpxchg
646# ifndef raw_cpu_cmpxchg_1 592# ifndef raw_cpu_cmpxchg_1
647# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) 593# define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval)
648# endif 594# endif
@@ -657,7 +603,6 @@ do { \
657# endif 603# endif
658# define raw_cpu_cmpxchg(pcp, oval, nval) \ 604# define raw_cpu_cmpxchg(pcp, oval, nval) \
659 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) 605 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
660#endif
661 606
662#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 607#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
663({ \ 608({ \
@@ -671,7 +616,6 @@ do { \
671 (__ret); \ 616 (__ret); \
672}) 617})
673 618
674#ifndef raw_cpu_cmpxchg_double
675# ifndef raw_cpu_cmpxchg_double_1 619# ifndef raw_cpu_cmpxchg_double_1
676# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 620# define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
677 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 621 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
@@ -690,79 +634,51 @@ do { \
690# endif 634# endif
691# define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 635# define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
692 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) 636 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))
693#endif
694 637
695/* 638/*
696 * Generic percpu operations for context that are safe from preemption/interrupts. 639 * Generic percpu operations for context that are safe from preemption/interrupts.
697 */ 640 */
698#ifndef __this_cpu_read
699# define __this_cpu_read(pcp) \ 641# define __this_cpu_read(pcp) \
700 (__this_cpu_preempt_check("read"),__pcpu_size_call_return(raw_cpu_read_, (pcp))) 642 (__this_cpu_preempt_check("read"),__pcpu_size_call_return(raw_cpu_read_, (pcp)))
701#endif
702 643
703#ifndef __this_cpu_write
704# define __this_cpu_write(pcp, val) \ 644# define __this_cpu_write(pcp, val) \
705do { __this_cpu_preempt_check("write"); \ 645do { __this_cpu_preempt_check("write"); \
706 __pcpu_size_call(raw_cpu_write_, (pcp), (val)); \ 646 __pcpu_size_call(raw_cpu_write_, (pcp), (val)); \
707} while (0) 647} while (0)
708#endif
709 648
710#ifndef __this_cpu_add
711# define __this_cpu_add(pcp, val) \ 649# define __this_cpu_add(pcp, val) \
712do { __this_cpu_preempt_check("add"); \ 650do { __this_cpu_preempt_check("add"); \
713 __pcpu_size_call(raw_cpu_add_, (pcp), (val)); \ 651 __pcpu_size_call(raw_cpu_add_, (pcp), (val)); \
714} while (0) 652} while (0)
715#endif
716 653
717#ifndef __this_cpu_sub
718# define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) 654# define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val))
719#endif
720
721#ifndef __this_cpu_inc
722# define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1) 655# define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1)
723#endif
724
725#ifndef __this_cpu_dec
726# define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1) 656# define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1)
727#endif
728 657
729#ifndef __this_cpu_and
730# define __this_cpu_and(pcp, val) \ 658# define __this_cpu_and(pcp, val) \
731do { __this_cpu_preempt_check("and"); \ 659do { __this_cpu_preempt_check("and"); \
732 __pcpu_size_call(raw_cpu_and_, (pcp), (val)); \ 660 __pcpu_size_call(raw_cpu_and_, (pcp), (val)); \
733} while (0) 661} while (0)
734 662
735#endif
736
737#ifndef __this_cpu_or
738# define __this_cpu_or(pcp, val) \ 663# define __this_cpu_or(pcp, val) \
739do { __this_cpu_preempt_check("or"); \ 664do { __this_cpu_preempt_check("or"); \
740 __pcpu_size_call(raw_cpu_or_, (pcp), (val)); \ 665 __pcpu_size_call(raw_cpu_or_, (pcp), (val)); \
741} while (0) 666} while (0)
742#endif
743 667
744#ifndef __this_cpu_add_return
745# define __this_cpu_add_return(pcp, val) \ 668# define __this_cpu_add_return(pcp, val) \
746 (__this_cpu_preempt_check("add_return"),__pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)) 669 (__this_cpu_preempt_check("add_return"),__pcpu_size_call_return2(raw_cpu_add_return_, pcp, val))
747#endif
748 670
749#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) 671#define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
750#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) 672#define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
751#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) 673#define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
752 674
753#ifndef __this_cpu_xchg
754# define __this_cpu_xchg(pcp, nval) \ 675# define __this_cpu_xchg(pcp, nval) \
755 (__this_cpu_preempt_check("xchg"),__pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval)) 676 (__this_cpu_preempt_check("xchg"),__pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval))
756#endif
757 677
758#ifndef __this_cpu_cmpxchg
759# define __this_cpu_cmpxchg(pcp, oval, nval) \ 678# define __this_cpu_cmpxchg(pcp, oval, nval) \
760 (__this_cpu_preempt_check("cmpxchg"),__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)) 679 (__this_cpu_preempt_check("cmpxchg"),__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval))
761#endif
762 680
763#ifndef __this_cpu_cmpxchg_double
764# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 681# define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
765 (__this_cpu_preempt_check("cmpxchg_double"),__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))) 682 (__this_cpu_preempt_check("cmpxchg_double"),__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)))
766#endif
767 683
768#endif /* __LINUX_PERCPU_H */ 684#endif /* __LINUX_PERCPU_H */