aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/percpu.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/linux/percpu.h')
-rw-r--r--include/linux/percpu.h205
1 files changed, 204 insertions, 1 deletions
diff --git a/include/linux/percpu.h b/include/linux/percpu.h
index 5095b834a6fb..27c3c6fcfad3 100644
--- a/include/linux/percpu.h
+++ b/include/linux/percpu.h
@@ -240,6 +240,21 @@ extern void __bad_size_call_parameter(void);
240 pscr_ret__; \ 240 pscr_ret__; \
241}) 241})
242 242
243#define __pcpu_size_call_return2(stem, variable, ...) \
244({ \
245 typeof(variable) pscr2_ret__; \
246 __verify_pcpu_ptr(&(variable)); \
247 switch(sizeof(variable)) { \
248 case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
249 case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
250 case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
251 case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
252 default: \
253 __bad_size_call_parameter(); break; \
254 } \
255 pscr2_ret__; \
256})
257
243#define __pcpu_size_call(stem, variable, ...) \ 258#define __pcpu_size_call(stem, variable, ...) \
244do { \ 259do { \
245 __verify_pcpu_ptr(&(variable)); \ 260 __verify_pcpu_ptr(&(variable)); \
@@ -402,6 +417,89 @@ do { \
402# define this_cpu_xor(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val)) 417# define this_cpu_xor(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val))
403#endif 418#endif
404 419
420#define _this_cpu_generic_add_return(pcp, val) \
421({ \
422 typeof(pcp) ret__; \
423 preempt_disable(); \
424 __this_cpu_add(pcp, val); \
425 ret__ = __this_cpu_read(pcp); \
426 preempt_enable(); \
427 ret__; \
428})
429
430#ifndef this_cpu_add_return
431# ifndef this_cpu_add_return_1
432# define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val)
433# endif
434# ifndef this_cpu_add_return_2
435# define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val)
436# endif
437# ifndef this_cpu_add_return_4
438# define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val)
439# endif
440# ifndef this_cpu_add_return_8
441# define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val)
442# endif
443# define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
444#endif
445
446#define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(val))
447#define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
448#define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
449
450#define _this_cpu_generic_xchg(pcp, nval) \
451({ typeof(pcp) ret__; \
452 preempt_disable(); \
453 ret__ = __this_cpu_read(pcp); \
454 __this_cpu_write(pcp, nval); \
455 preempt_enable(); \
456 ret__; \
457})
458
459#ifndef this_cpu_xchg
460# ifndef this_cpu_xchg_1
461# define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
462# endif
463# ifndef this_cpu_xchg_2
464# define this_cpu_xchg_2(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
465# endif
466# ifndef this_cpu_xchg_4
467# define this_cpu_xchg_4(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
468# endif
469# ifndef this_cpu_xchg_8
470# define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval)
471# endif
472# define this_cpu_xchg(pcp, nval) \
473 __pcpu_size_call_return2(this_cpu_xchg_, (pcp), nval)
474#endif
475
476#define _this_cpu_generic_cmpxchg(pcp, oval, nval) \
477({ typeof(pcp) ret__; \
478 preempt_disable(); \
479 ret__ = __this_cpu_read(pcp); \
480 if (ret__ == (oval)) \
481 __this_cpu_write(pcp, nval); \
482 preempt_enable(); \
483 ret__; \
484})
485
486#ifndef this_cpu_cmpxchg
487# ifndef this_cpu_cmpxchg_1
488# define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
489# endif
490# ifndef this_cpu_cmpxchg_2
491# define this_cpu_cmpxchg_2(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
492# endif
493# ifndef this_cpu_cmpxchg_4
494# define this_cpu_cmpxchg_4(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
495# endif
496# ifndef this_cpu_cmpxchg_8
497# define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval)
498# endif
499# define this_cpu_cmpxchg(pcp, oval, nval) \
500 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
501#endif
502
405/* 503/*
406 * Generic percpu operations that do not require preemption handling. 504 * Generic percpu operations that do not require preemption handling.
407 * Either we do not care about races or the caller has the 505 * Either we do not care about races or the caller has the
@@ -529,11 +627,87 @@ do { \
529# define __this_cpu_xor(pcp, val) __pcpu_size_call(__this_cpu_xor_, (pcp), (val)) 627# define __this_cpu_xor(pcp, val) __pcpu_size_call(__this_cpu_xor_, (pcp), (val))
530#endif 628#endif
531 629
630#define __this_cpu_generic_add_return(pcp, val) \
631({ \
632 __this_cpu_add(pcp, val); \
633 __this_cpu_read(pcp); \
634})
635
636#ifndef __this_cpu_add_return
637# ifndef __this_cpu_add_return_1
638# define __this_cpu_add_return_1(pcp, val) __this_cpu_generic_add_return(pcp, val)
639# endif
640# ifndef __this_cpu_add_return_2
641# define __this_cpu_add_return_2(pcp, val) __this_cpu_generic_add_return(pcp, val)
642# endif
643# ifndef __this_cpu_add_return_4
644# define __this_cpu_add_return_4(pcp, val) __this_cpu_generic_add_return(pcp, val)
645# endif
646# ifndef __this_cpu_add_return_8
647# define __this_cpu_add_return_8(pcp, val) __this_cpu_generic_add_return(pcp, val)
648# endif
649# define __this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
650#endif
651
652#define __this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(val))
653#define __this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
654#define __this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
655
656#define __this_cpu_generic_xchg(pcp, nval) \
657({ typeof(pcp) ret__; \
658 ret__ = __this_cpu_read(pcp); \
659 __this_cpu_write(pcp, nval); \
660 ret__; \
661})
662
663#ifndef __this_cpu_xchg
664# ifndef __this_cpu_xchg_1
665# define __this_cpu_xchg_1(pcp, nval) __this_cpu_generic_xchg(pcp, nval)
666# endif
667# ifndef __this_cpu_xchg_2
668# define __this_cpu_xchg_2(pcp, nval) __this_cpu_generic_xchg(pcp, nval)
669# endif
670# ifndef __this_cpu_xchg_4
671# define __this_cpu_xchg_4(pcp, nval) __this_cpu_generic_xchg(pcp, nval)
672# endif
673# ifndef __this_cpu_xchg_8
674# define __this_cpu_xchg_8(pcp, nval) __this_cpu_generic_xchg(pcp, nval)
675# endif
676# define __this_cpu_xchg(pcp, nval) \
677 __pcpu_size_call_return2(__this_cpu_xchg_, (pcp), nval)
678#endif
679
680#define __this_cpu_generic_cmpxchg(pcp, oval, nval) \
681({ \
682 typeof(pcp) ret__; \
683 ret__ = __this_cpu_read(pcp); \
684 if (ret__ == (oval)) \
685 __this_cpu_write(pcp, nval); \
686 ret__; \
687})
688
689#ifndef __this_cpu_cmpxchg
690# ifndef __this_cpu_cmpxchg_1
691# define __this_cpu_cmpxchg_1(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval)
692# endif
693# ifndef __this_cpu_cmpxchg_2
694# define __this_cpu_cmpxchg_2(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval)
695# endif
696# ifndef __this_cpu_cmpxchg_4
697# define __this_cpu_cmpxchg_4(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval)
698# endif
699# ifndef __this_cpu_cmpxchg_8
700# define __this_cpu_cmpxchg_8(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval)
701# endif
702# define __this_cpu_cmpxchg(pcp, oval, nval) \
703 __pcpu_size_call_return2(__this_cpu_cmpxchg_, pcp, oval, nval)
704#endif
705
532/* 706/*
533 * IRQ safe versions of the per cpu RMW operations. Note that these operations 707 * IRQ safe versions of the per cpu RMW operations. Note that these operations
534 * are *not* safe against modification of the same variable from another 708 * are *not* safe against modification of the same variable from another
535 * processors (which one gets when using regular atomic operations) 709 * processors (which one gets when using regular atomic operations)
536 . They are guaranteed to be atomic vs. local interrupts and 710 * They are guaranteed to be atomic vs. local interrupts and
537 * preemption only. 711 * preemption only.
538 */ 712 */
539#define irqsafe_cpu_generic_to_op(pcp, val, op) \ 713#define irqsafe_cpu_generic_to_op(pcp, val, op) \
@@ -620,4 +794,33 @@ do { \
620# define irqsafe_cpu_xor(pcp, val) __pcpu_size_call(irqsafe_cpu_xor_, (val)) 794# define irqsafe_cpu_xor(pcp, val) __pcpu_size_call(irqsafe_cpu_xor_, (val))
621#endif 795#endif
622 796
797#define irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) \
798({ \
799 typeof(pcp) ret__; \
800 unsigned long flags; \
801 local_irq_save(flags); \
802 ret__ = __this_cpu_read(pcp); \
803 if (ret__ == (oval)) \
804 __this_cpu_write(pcp, nval); \
805 local_irq_restore(flags); \
806 ret__; \
807})
808
809#ifndef irqsafe_cpu_cmpxchg
810# ifndef irqsafe_cpu_cmpxchg_1
811# define irqsafe_cpu_cmpxchg_1(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval)
812# endif
813# ifndef irqsafe_cpu_cmpxchg_2
814# define irqsafe_cpu_cmpxchg_2(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval)
815# endif
816# ifndef irqsafe_cpu_cmpxchg_4
817# define irqsafe_cpu_cmpxchg_4(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval)
818# endif
819# ifndef irqsafe_cpu_cmpxchg_8
820# define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval)
821# endif
822# define irqsafe_cpu_cmpxchg(pcp, oval, nval) \
823 __pcpu_size_call_return2(irqsafe_cpu_cmpxchg_, (pcp), oval, nval)
824#endif
825
623#endif /* __LINUX_PERCPU_H */ 826#endif /* __LINUX_PERCPU_H */