diff options
author | Tejun Heo <tj@kernel.org> | 2010-12-18 09:54:36 -0500 |
---|---|---|
committer | Tejun Heo <tj@kernel.org> | 2010-12-18 09:54:36 -0500 |
commit | 05c2d088d0eb904e50460b04d77324c26cef4637 (patch) | |
tree | 1dab544e05f9021a02e76adcbdb5edf4b31c7d62 /include | |
parent | 3ea9f6833c8f865a221b59ce37d7650dcf3b3e17 (diff) | |
parent | 8270137a0d50507a5b40f880db636527045b8466 (diff) |
Merge branch 'this_cpu_ops' into for-2.6.38
Diffstat (limited to 'include')
-rw-r--r-- | include/linux/percpu.h | 194 |
1 files changed, 163 insertions, 31 deletions
diff --git a/include/linux/percpu.h b/include/linux/percpu.h index 4d593defc47d..27c3c6fcfad3 100644 --- a/include/linux/percpu.h +++ b/include/linux/percpu.h | |||
@@ -417,6 +417,89 @@ do { \ | |||
417 | # define this_cpu_xor(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val)) | 417 | # define this_cpu_xor(pcp, val) __pcpu_size_call(this_cpu_or_, (pcp), (val)) |
418 | #endif | 418 | #endif |
419 | 419 | ||
420 | #define _this_cpu_generic_add_return(pcp, val) \ | ||
421 | ({ \ | ||
422 | typeof(pcp) ret__; \ | ||
423 | preempt_disable(); \ | ||
424 | __this_cpu_add(pcp, val); \ | ||
425 | ret__ = __this_cpu_read(pcp); \ | ||
426 | preempt_enable(); \ | ||
427 | ret__; \ | ||
428 | }) | ||
429 | |||
430 | #ifndef this_cpu_add_return | ||
431 | # ifndef this_cpu_add_return_1 | ||
432 | # define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
433 | # endif | ||
434 | # ifndef this_cpu_add_return_2 | ||
435 | # define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
436 | # endif | ||
437 | # ifndef this_cpu_add_return_4 | ||
438 | # define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
439 | # endif | ||
440 | # ifndef this_cpu_add_return_8 | ||
441 | # define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
442 | # endif | ||
443 | # define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val) | ||
444 | #endif | ||
445 | |||
446 | #define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(val)) | ||
447 | #define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1) | ||
448 | #define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1) | ||
449 | |||
450 | #define _this_cpu_generic_xchg(pcp, nval) \ | ||
451 | ({ typeof(pcp) ret__; \ | ||
452 | preempt_disable(); \ | ||
453 | ret__ = __this_cpu_read(pcp); \ | ||
454 | __this_cpu_write(pcp, nval); \ | ||
455 | preempt_enable(); \ | ||
456 | ret__; \ | ||
457 | }) | ||
458 | |||
459 | #ifndef this_cpu_xchg | ||
460 | # ifndef this_cpu_xchg_1 | ||
461 | # define this_cpu_xchg_1(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
462 | # endif | ||
463 | # ifndef this_cpu_xchg_2 | ||
464 | # define this_cpu_xchg_2(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
465 | # endif | ||
466 | # ifndef this_cpu_xchg_4 | ||
467 | # define this_cpu_xchg_4(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
468 | # endif | ||
469 | # ifndef this_cpu_xchg_8 | ||
470 | # define this_cpu_xchg_8(pcp, nval) _this_cpu_generic_xchg(pcp, nval) | ||
471 | # endif | ||
472 | # define this_cpu_xchg(pcp, nval) \ | ||
473 | __pcpu_size_call_return2(this_cpu_xchg_, (pcp), nval) | ||
474 | #endif | ||
475 | |||
476 | #define _this_cpu_generic_cmpxchg(pcp, oval, nval) \ | ||
477 | ({ typeof(pcp) ret__; \ | ||
478 | preempt_disable(); \ | ||
479 | ret__ = __this_cpu_read(pcp); \ | ||
480 | if (ret__ == (oval)) \ | ||
481 | __this_cpu_write(pcp, nval); \ | ||
482 | preempt_enable(); \ | ||
483 | ret__; \ | ||
484 | }) | ||
485 | |||
486 | #ifndef this_cpu_cmpxchg | ||
487 | # ifndef this_cpu_cmpxchg_1 | ||
488 | # define this_cpu_cmpxchg_1(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
489 | # endif | ||
490 | # ifndef this_cpu_cmpxchg_2 | ||
491 | # define this_cpu_cmpxchg_2(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
492 | # endif | ||
493 | # ifndef this_cpu_cmpxchg_4 | ||
494 | # define this_cpu_cmpxchg_4(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
495 | # endif | ||
496 | # ifndef this_cpu_cmpxchg_8 | ||
497 | # define this_cpu_cmpxchg_8(pcp, oval, nval) _this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
498 | # endif | ||
499 | # define this_cpu_cmpxchg(pcp, oval, nval) \ | ||
500 | __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval) | ||
501 | #endif | ||
502 | |||
420 | /* | 503 | /* |
421 | * Generic percpu operations that do not require preemption handling. | 504 | * Generic percpu operations that do not require preemption handling. |
422 | * Either we do not care about races or the caller has the | 505 | * Either we do not care about races or the caller has the |
@@ -544,36 +627,6 @@ do { \ | |||
544 | # define __this_cpu_xor(pcp, val) __pcpu_size_call(__this_cpu_xor_, (pcp), (val)) | 627 | # define __this_cpu_xor(pcp, val) __pcpu_size_call(__this_cpu_xor_, (pcp), (val)) |
545 | #endif | 628 | #endif |
546 | 629 | ||
547 | #define _this_cpu_generic_add_return(pcp, val) \ | ||
548 | ({ \ | ||
549 | typeof(pcp) ret__; \ | ||
550 | preempt_disable(); \ | ||
551 | __this_cpu_add(pcp, val); \ | ||
552 | ret__ = __this_cpu_read(pcp); \ | ||
553 | preempt_enable(); \ | ||
554 | ret__; \ | ||
555 | }) | ||
556 | |||
557 | #ifndef this_cpu_add_return | ||
558 | # ifndef this_cpu_add_return_1 | ||
559 | # define this_cpu_add_return_1(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
560 | # endif | ||
561 | # ifndef this_cpu_add_return_2 | ||
562 | # define this_cpu_add_return_2(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
563 | # endif | ||
564 | # ifndef this_cpu_add_return_4 | ||
565 | # define this_cpu_add_return_4(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
566 | # endif | ||
567 | # ifndef this_cpu_add_return_8 | ||
568 | # define this_cpu_add_return_8(pcp, val) _this_cpu_generic_add_return(pcp, val) | ||
569 | # endif | ||
570 | # define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val) | ||
571 | #endif | ||
572 | |||
573 | #define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(val)) | ||
574 | #define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1) | ||
575 | #define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1) | ||
576 | |||
577 | #define __this_cpu_generic_add_return(pcp, val) \ | 630 | #define __this_cpu_generic_add_return(pcp, val) \ |
578 | ({ \ | 631 | ({ \ |
579 | __this_cpu_add(pcp, val); \ | 632 | __this_cpu_add(pcp, val); \ |
@@ -600,11 +653,61 @@ do { \ | |||
600 | #define __this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1) | 653 | #define __this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1) |
601 | #define __this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1) | 654 | #define __this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1) |
602 | 655 | ||
656 | #define __this_cpu_generic_xchg(pcp, nval) \ | ||
657 | ({ typeof(pcp) ret__; \ | ||
658 | ret__ = __this_cpu_read(pcp); \ | ||
659 | __this_cpu_write(pcp, nval); \ | ||
660 | ret__; \ | ||
661 | }) | ||
662 | |||
663 | #ifndef __this_cpu_xchg | ||
664 | # ifndef __this_cpu_xchg_1 | ||
665 | # define __this_cpu_xchg_1(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | ||
666 | # endif | ||
667 | # ifndef __this_cpu_xchg_2 | ||
668 | # define __this_cpu_xchg_2(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | ||
669 | # endif | ||
670 | # ifndef __this_cpu_xchg_4 | ||
671 | # define __this_cpu_xchg_4(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | ||
672 | # endif | ||
673 | # ifndef __this_cpu_xchg_8 | ||
674 | # define __this_cpu_xchg_8(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | ||
675 | # endif | ||
676 | # define __this_cpu_xchg(pcp, nval) \ | ||
677 | __pcpu_size_call_return2(__this_cpu_xchg_, (pcp), nval) | ||
678 | #endif | ||
679 | |||
680 | #define __this_cpu_generic_cmpxchg(pcp, oval, nval) \ | ||
681 | ({ \ | ||
682 | typeof(pcp) ret__; \ | ||
683 | ret__ = __this_cpu_read(pcp); \ | ||
684 | if (ret__ == (oval)) \ | ||
685 | __this_cpu_write(pcp, nval); \ | ||
686 | ret__; \ | ||
687 | }) | ||
688 | |||
689 | #ifndef __this_cpu_cmpxchg | ||
690 | # ifndef __this_cpu_cmpxchg_1 | ||
691 | # define __this_cpu_cmpxchg_1(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
692 | # endif | ||
693 | # ifndef __this_cpu_cmpxchg_2 | ||
694 | # define __this_cpu_cmpxchg_2(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
695 | # endif | ||
696 | # ifndef __this_cpu_cmpxchg_4 | ||
697 | # define __this_cpu_cmpxchg_4(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
698 | # endif | ||
699 | # ifndef __this_cpu_cmpxchg_8 | ||
700 | # define __this_cpu_cmpxchg_8(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | ||
701 | # endif | ||
702 | # define __this_cpu_cmpxchg(pcp, oval, nval) \ | ||
703 | __pcpu_size_call_return2(__this_cpu_cmpxchg_, pcp, oval, nval) | ||
704 | #endif | ||
705 | |||
603 | /* | 706 | /* |
604 | * IRQ safe versions of the per cpu RMW operations. Note that these operations | 707 | * IRQ safe versions of the per cpu RMW operations. Note that these operations |
605 | * are *not* safe against modification of the same variable from another | 708 | * are *not* safe against modification of the same variable from another |
606 | * processors (which one gets when using regular atomic operations) | 709 | * processors (which one gets when using regular atomic operations) |
607 | . They are guaranteed to be atomic vs. local interrupts and | 710 | * They are guaranteed to be atomic vs. local interrupts and |
608 | * preemption only. | 711 | * preemption only. |
609 | */ | 712 | */ |
610 | #define irqsafe_cpu_generic_to_op(pcp, val, op) \ | 713 | #define irqsafe_cpu_generic_to_op(pcp, val, op) \ |
@@ -691,4 +794,33 @@ do { \ | |||
691 | # define irqsafe_cpu_xor(pcp, val) __pcpu_size_call(irqsafe_cpu_xor_, (val)) | 794 | # define irqsafe_cpu_xor(pcp, val) __pcpu_size_call(irqsafe_cpu_xor_, (val)) |
692 | #endif | 795 | #endif |
693 | 796 | ||
797 | #define irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) \ | ||
798 | ({ \ | ||
799 | typeof(pcp) ret__; \ | ||
800 | unsigned long flags; \ | ||
801 | local_irq_save(flags); \ | ||
802 | ret__ = __this_cpu_read(pcp); \ | ||
803 | if (ret__ == (oval)) \ | ||
804 | __this_cpu_write(pcp, nval); \ | ||
805 | local_irq_restore(flags); \ | ||
806 | ret__; \ | ||
807 | }) | ||
808 | |||
809 | #ifndef irqsafe_cpu_cmpxchg | ||
810 | # ifndef irqsafe_cpu_cmpxchg_1 | ||
811 | # define irqsafe_cpu_cmpxchg_1(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) | ||
812 | # endif | ||
813 | # ifndef irqsafe_cpu_cmpxchg_2 | ||
814 | # define irqsafe_cpu_cmpxchg_2(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) | ||
815 | # endif | ||
816 | # ifndef irqsafe_cpu_cmpxchg_4 | ||
817 | # define irqsafe_cpu_cmpxchg_4(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) | ||
818 | # endif | ||
819 | # ifndef irqsafe_cpu_cmpxchg_8 | ||
820 | # define irqsafe_cpu_cmpxchg_8(pcp, oval, nval) irqsafe_cpu_generic_cmpxchg(pcp, oval, nval) | ||
821 | # endif | ||
822 | # define irqsafe_cpu_cmpxchg(pcp, oval, nval) \ | ||
823 | __pcpu_size_call_return2(irqsafe_cpu_cmpxchg_, (pcp), oval, nval) | ||
824 | #endif | ||
825 | |||
694 | #endif /* __LINUX_PERCPU_H */ | 826 | #endif /* __LINUX_PERCPU_H */ |