diff options
Diffstat (limited to 'include/linux')
| -rw-r--r-- | include/linux/percpu.h | 331 |
1 files changed, 195 insertions, 136 deletions
diff --git a/include/linux/percpu.h b/include/linux/percpu.h index e3817d2441b6..4e4d2afcc0c7 100644 --- a/include/linux/percpu.h +++ b/include/linux/percpu.h | |||
| @@ -243,6 +243,8 @@ do { \ | |||
| 243 | } while (0) | 243 | } while (0) |
| 244 | 244 | ||
| 245 | /* | 245 | /* |
| 246 | * this_cpu operations (C) 2008-2013 Christoph Lameter <cl@linux.com> | ||
| 247 | * | ||
| 246 | * Optimized manipulation for memory allocated through the per cpu | 248 | * Optimized manipulation for memory allocated through the per cpu |
| 247 | * allocator or for addresses of per cpu variables. | 249 | * allocator or for addresses of per cpu variables. |
| 248 | * | 250 | * |
| @@ -296,7 +298,7 @@ do { \ | |||
| 296 | do { \ | 298 | do { \ |
| 297 | unsigned long flags; \ | 299 | unsigned long flags; \ |
| 298 | raw_local_irq_save(flags); \ | 300 | raw_local_irq_save(flags); \ |
| 299 | *__this_cpu_ptr(&(pcp)) op val; \ | 301 | *raw_cpu_ptr(&(pcp)) op val; \ |
| 300 | raw_local_irq_restore(flags); \ | 302 | raw_local_irq_restore(flags); \ |
| 301 | } while (0) | 303 | } while (0) |
| 302 | 304 | ||
| @@ -381,8 +383,8 @@ do { \ | |||
| 381 | typeof(pcp) ret__; \ | 383 | typeof(pcp) ret__; \ |
| 382 | unsigned long flags; \ | 384 | unsigned long flags; \ |
| 383 | raw_local_irq_save(flags); \ | 385 | raw_local_irq_save(flags); \ |
| 384 | __this_cpu_add(pcp, val); \ | 386 | raw_cpu_add(pcp, val); \ |
| 385 | ret__ = __this_cpu_read(pcp); \ | 387 | ret__ = raw_cpu_read(pcp); \ |
| 386 | raw_local_irq_restore(flags); \ | 388 | raw_local_irq_restore(flags); \ |
| 387 | ret__; \ | 389 | ret__; \ |
| 388 | }) | 390 | }) |
| @@ -411,8 +413,8 @@ do { \ | |||
| 411 | ({ typeof(pcp) ret__; \ | 413 | ({ typeof(pcp) ret__; \ |
| 412 | unsigned long flags; \ | 414 | unsigned long flags; \ |
| 413 | raw_local_irq_save(flags); \ | 415 | raw_local_irq_save(flags); \ |
| 414 | ret__ = __this_cpu_read(pcp); \ | 416 | ret__ = raw_cpu_read(pcp); \ |
| 415 | __this_cpu_write(pcp, nval); \ | 417 | raw_cpu_write(pcp, nval); \ |
| 416 | raw_local_irq_restore(flags); \ | 418 | raw_local_irq_restore(flags); \ |
| 417 | ret__; \ | 419 | ret__; \ |
| 418 | }) | 420 | }) |
| @@ -439,9 +441,9 @@ do { \ | |||
| 439 | typeof(pcp) ret__; \ | 441 | typeof(pcp) ret__; \ |
| 440 | unsigned long flags; \ | 442 | unsigned long flags; \ |
| 441 | raw_local_irq_save(flags); \ | 443 | raw_local_irq_save(flags); \ |
| 442 | ret__ = __this_cpu_read(pcp); \ | 444 | ret__ = raw_cpu_read(pcp); \ |
| 443 | if (ret__ == (oval)) \ | 445 | if (ret__ == (oval)) \ |
| 444 | __this_cpu_write(pcp, nval); \ | 446 | raw_cpu_write(pcp, nval); \ |
| 445 | raw_local_irq_restore(flags); \ | 447 | raw_local_irq_restore(flags); \ |
| 446 | ret__; \ | 448 | ret__; \ |
| 447 | }) | 449 | }) |
| @@ -476,7 +478,7 @@ do { \ | |||
| 476 | int ret__; \ | 478 | int ret__; \ |
| 477 | unsigned long flags; \ | 479 | unsigned long flags; \ |
| 478 | raw_local_irq_save(flags); \ | 480 | raw_local_irq_save(flags); \ |
| 479 | ret__ = __this_cpu_generic_cmpxchg_double(pcp1, pcp2, \ | 481 | ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \ |
| 480 | oval1, oval2, nval1, nval2); \ | 482 | oval1, oval2, nval1, nval2); \ |
| 481 | raw_local_irq_restore(flags); \ | 483 | raw_local_irq_restore(flags); \ |
| 482 | ret__; \ | 484 | ret__; \ |
| @@ -504,12 +506,8 @@ do { \ | |||
| 504 | #endif | 506 | #endif |
| 505 | 507 | ||
| 506 | /* | 508 | /* |
| 507 | * Generic percpu operations for context that are safe from preemption/interrupts. | 509 | * Generic percpu operations for contexts where we do not want to do |
| 508 | * Either we do not care about races or the caller has the | 510 | * any checks for preemptiosn. |
| 509 | * responsibility of handling preemption/interrupt issues. Arch code can still | ||
| 510 | * override these instructions since the arch per cpu code may be more | ||
| 511 | * efficient and may actually get race freeness for free (that is the | ||
| 512 | * case for x86 for example). | ||
| 513 | * | 511 | * |
| 514 | * If there is no other protection through preempt disable and/or | 512 | * If there is no other protection through preempt disable and/or |
| 515 | * disabling interupts then one of these RMW operations can show unexpected | 513 | * disabling interupts then one of these RMW operations can show unexpected |
| @@ -517,211 +515,272 @@ do { \ | |||
| 517 | * or an interrupt occurred and the same percpu variable was modified from | 515 | * or an interrupt occurred and the same percpu variable was modified from |
| 518 | * the interrupt context. | 516 | * the interrupt context. |
| 519 | */ | 517 | */ |
| 520 | #ifndef __this_cpu_read | 518 | #ifndef raw_cpu_read |
| 521 | # ifndef __this_cpu_read_1 | 519 | # ifndef raw_cpu_read_1 |
| 522 | # define __this_cpu_read_1(pcp) (*__this_cpu_ptr(&(pcp))) | 520 | # define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp))) |
| 523 | # endif | 521 | # endif |
| 524 | # ifndef __this_cpu_read_2 | 522 | # ifndef raw_cpu_read_2 |
| 525 | # define __this_cpu_read_2(pcp) (*__this_cpu_ptr(&(pcp))) | 523 | # define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp))) |
| 526 | # endif | 524 | # endif |
| 527 | # ifndef __this_cpu_read_4 | 525 | # ifndef raw_cpu_read_4 |
| 528 | # define __this_cpu_read_4(pcp) (*__this_cpu_ptr(&(pcp))) | 526 | # define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp))) |
| 529 | # endif | 527 | # endif |
| 530 | # ifndef __this_cpu_read_8 | 528 | # ifndef raw_cpu_read_8 |
| 531 | # define __this_cpu_read_8(pcp) (*__this_cpu_ptr(&(pcp))) | 529 | # define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp))) |
| 532 | # endif | 530 | # endif |
| 533 | # define __this_cpu_read(pcp) __pcpu_size_call_return(__this_cpu_read_, (pcp)) | 531 | # define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp)) |
| 534 | #endif | 532 | #endif |
| 535 | 533 | ||
| 536 | #define __this_cpu_generic_to_op(pcp, val, op) \ | 534 | #define raw_cpu_generic_to_op(pcp, val, op) \ |
| 537 | do { \ | 535 | do { \ |
| 538 | *__this_cpu_ptr(&(pcp)) op val; \ | 536 | *raw_cpu_ptr(&(pcp)) op val; \ |
| 539 | } while (0) | 537 | } while (0) |
| 540 | 538 | ||
| 541 | #ifndef __this_cpu_write | 539 | |
| 542 | # ifndef __this_cpu_write_1 | 540 | #ifndef raw_cpu_write |
| 543 | # define __this_cpu_write_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 541 | # ifndef raw_cpu_write_1 |
| 542 | # define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
| 544 | # endif | 543 | # endif |
| 545 | # ifndef __this_cpu_write_2 | 544 | # ifndef raw_cpu_write_2 |
| 546 | # define __this_cpu_write_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 545 | # define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 547 | # endif | 546 | # endif |
| 548 | # ifndef __this_cpu_write_4 | 547 | # ifndef raw_cpu_write_4 |
| 549 | # define __this_cpu_write_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 548 | # define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 550 | # endif | 549 | # endif |
| 551 | # ifndef __this_cpu_write_8 | 550 | # ifndef raw_cpu_write_8 |
| 552 | # define __this_cpu_write_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 551 | # define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 553 | # endif | 552 | # endif |
| 554 | # define __this_cpu_write(pcp, val) __pcpu_size_call(__this_cpu_write_, (pcp), (val)) | 553 | # define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val)) |
| 555 | #endif | 554 | #endif |
| 556 | 555 | ||
| 557 | #ifndef __this_cpu_add | 556 | #ifndef raw_cpu_add |
| 558 | # ifndef __this_cpu_add_1 | 557 | # ifndef raw_cpu_add_1 |
| 559 | # define __this_cpu_add_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 558 | # define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 560 | # endif | 559 | # endif |
| 561 | # ifndef __this_cpu_add_2 | 560 | # ifndef raw_cpu_add_2 |
| 562 | # define __this_cpu_add_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 561 | # define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 563 | # endif | 562 | # endif |
| 564 | # ifndef __this_cpu_add_4 | 563 | # ifndef raw_cpu_add_4 |
| 565 | # define __this_cpu_add_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 564 | # define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 566 | # endif | 565 | # endif |
| 567 | # ifndef __this_cpu_add_8 | 566 | # ifndef raw_cpu_add_8 |
| 568 | # define __this_cpu_add_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 567 | # define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 569 | # endif | 568 | # endif |
| 570 | # define __this_cpu_add(pcp, val) __pcpu_size_call(__this_cpu_add_, (pcp), (val)) | 569 | # define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val)) |
| 571 | #endif | 570 | #endif |
| 572 | 571 | ||
| 573 | #ifndef __this_cpu_sub | 572 | #ifndef raw_cpu_sub |
| 574 | # define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) | 573 | # define raw_cpu_sub(pcp, val) raw_cpu_add((pcp), -(val)) |
| 575 | #endif | 574 | #endif |
| 576 | 575 | ||
| 577 | #ifndef __this_cpu_inc | 576 | #ifndef raw_cpu_inc |
| 578 | # define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1) | 577 | # define raw_cpu_inc(pcp) raw_cpu_add((pcp), 1) |
| 579 | #endif | 578 | #endif |
| 580 | 579 | ||
| 581 | #ifndef __this_cpu_dec | 580 | #ifndef raw_cpu_dec |
| 582 | # define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1) | 581 | # define raw_cpu_dec(pcp) raw_cpu_sub((pcp), 1) |
| 583 | #endif | 582 | #endif |
| 584 | 583 | ||
| 585 | #ifndef __this_cpu_and | 584 | #ifndef raw_cpu_and |
| 586 | # ifndef __this_cpu_and_1 | 585 | # ifndef raw_cpu_and_1 |
| 587 | # define __this_cpu_and_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 586 | # define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 588 | # endif | 587 | # endif |
| 589 | # ifndef __this_cpu_and_2 | 588 | # ifndef raw_cpu_and_2 |
| 590 | # define __this_cpu_and_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 589 | # define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 591 | # endif | 590 | # endif |
| 592 | # ifndef __this_cpu_and_4 | 591 | # ifndef raw_cpu_and_4 |
| 593 | # define __this_cpu_and_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 592 | # define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 594 | # endif | 593 | # endif |
| 595 | # ifndef __this_cpu_and_8 | 594 | # ifndef raw_cpu_and_8 |
| 596 | # define __this_cpu_and_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 595 | # define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 597 | # endif | 596 | # endif |
| 598 | # define __this_cpu_and(pcp, val) __pcpu_size_call(__this_cpu_and_, (pcp), (val)) | 597 | # define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val)) |
| 599 | #endif | 598 | #endif |
| 600 | 599 | ||
| 601 | #ifndef __this_cpu_or | 600 | #ifndef raw_cpu_or |
| 602 | # ifndef __this_cpu_or_1 | 601 | # ifndef raw_cpu_or_1 |
| 603 | # define __this_cpu_or_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 602 | # define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 604 | # endif | 603 | # endif |
| 605 | # ifndef __this_cpu_or_2 | 604 | # ifndef raw_cpu_or_2 |
| 606 | # define __this_cpu_or_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 605 | # define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 607 | # endif | 606 | # endif |
| 608 | # ifndef __this_cpu_or_4 | 607 | # ifndef raw_cpu_or_4 |
| 609 | # define __this_cpu_or_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 608 | # define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 610 | # endif | 609 | # endif |
| 611 | # ifndef __this_cpu_or_8 | 610 | # ifndef raw_cpu_or_8 |
| 612 | # define __this_cpu_or_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 611 | # define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 613 | # endif | 612 | # endif |
| 614 | # define __this_cpu_or(pcp, val) __pcpu_size_call(__this_cpu_or_, (pcp), (val)) | 613 | # define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val)) |
| 615 | #endif | 614 | #endif |
| 616 | 615 | ||
| 617 | #define __this_cpu_generic_add_return(pcp, val) \ | 616 | #define raw_cpu_generic_add_return(pcp, val) \ |
| 618 | ({ \ | 617 | ({ \ |
| 619 | __this_cpu_add(pcp, val); \ | 618 | raw_cpu_add(pcp, val); \ |
| 620 | __this_cpu_read(pcp); \ | 619 | raw_cpu_read(pcp); \ |
| 621 | }) | 620 | }) |
| 622 | 621 | ||
| 623 | #ifndef __this_cpu_add_return | 622 | #ifndef raw_cpu_add_return |
| 624 | # ifndef __this_cpu_add_return_1 | 623 | # ifndef raw_cpu_add_return_1 |
| 625 | # define __this_cpu_add_return_1(pcp, val) __this_cpu_generic_add_return(pcp, val) | 624 | # define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 626 | # endif | 625 | # endif |
| 627 | # ifndef __this_cpu_add_return_2 | 626 | # ifndef raw_cpu_add_return_2 |
| 628 | # define __this_cpu_add_return_2(pcp, val) __this_cpu_generic_add_return(pcp, val) | 627 | # define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 629 | # endif | 628 | # endif |
| 630 | # ifndef __this_cpu_add_return_4 | 629 | # ifndef raw_cpu_add_return_4 |
| 631 | # define __this_cpu_add_return_4(pcp, val) __this_cpu_generic_add_return(pcp, val) | 630 | # define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 632 | # endif | 631 | # endif |
| 633 | # ifndef __this_cpu_add_return_8 | 632 | # ifndef raw_cpu_add_return_8 |
| 634 | # define __this_cpu_add_return_8(pcp, val) __this_cpu_generic_add_return(pcp, val) | 633 | # define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 635 | # endif | 634 | # endif |
| 636 | # define __this_cpu_add_return(pcp, val) \ | 635 | # define raw_cpu_add_return(pcp, val) \ |
| 637 | __pcpu_size_call_return2(__this_cpu_add_return_, pcp, val) | 636 | __pcpu_size_call_return2(raw_add_return_, pcp, val) |
| 638 | #endif | 637 | #endif |
| 639 | 638 | ||
| 640 | #define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) | 639 | #define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val)) |
| 641 | #define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) | 640 | #define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1) |
| 642 | #define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) | 641 | #define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1) |
| 643 | 642 | ||
| 644 | #define __this_cpu_generic_xchg(pcp, nval) \ | 643 | #define raw_cpu_generic_xchg(pcp, nval) \ |
| 645 | ({ typeof(pcp) ret__; \ | 644 | ({ typeof(pcp) ret__; \ |
| 646 | ret__ = __this_cpu_read(pcp); \ | 645 | ret__ = raw_cpu_read(pcp); \ |
| 647 | __this_cpu_write(pcp, nval); \ | 646 | raw_cpu_write(pcp, nval); \ |
| 648 | ret__; \ | 647 | ret__; \ |
| 649 | }) | 648 | }) |
| 650 | 649 | ||
| 651 | #ifndef __this_cpu_xchg | 650 | #ifndef raw_cpu_xchg |
| 652 | # ifndef __this_cpu_xchg_1 | 651 | # ifndef raw_cpu_xchg_1 |
| 653 | # define __this_cpu_xchg_1(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 652 | # define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 654 | # endif | 653 | # endif |
| 655 | # ifndef __this_cpu_xchg_2 | 654 | # ifndef raw_cpu_xchg_2 |
| 656 | # define __this_cpu_xchg_2(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 655 | # define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 657 | # endif | 656 | # endif |
| 658 | # ifndef __this_cpu_xchg_4 | 657 | # ifndef raw_cpu_xchg_4 |
| 659 | # define __this_cpu_xchg_4(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 658 | # define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 660 | # endif | 659 | # endif |
| 661 | # ifndef __this_cpu_xchg_8 | 660 | # ifndef raw_cpu_xchg_8 |
| 662 | # define __this_cpu_xchg_8(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 661 | # define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 663 | # endif | 662 | # endif |
| 664 | # define __this_cpu_xchg(pcp, nval) \ | 663 | # define raw_cpu_xchg(pcp, nval) \ |
| 665 | __pcpu_size_call_return2(__this_cpu_xchg_, (pcp), nval) | 664 | __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval) |
| 666 | #endif | 665 | #endif |
| 667 | 666 | ||
| 668 | #define __this_cpu_generic_cmpxchg(pcp, oval, nval) \ | 667 | #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ |
| 669 | ({ \ | 668 | ({ \ |
| 670 | typeof(pcp) ret__; \ | 669 | typeof(pcp) ret__; \ |
| 671 | ret__ = __this_cpu_read(pcp); \ | 670 | ret__ = raw_cpu_read(pcp); \ |
| 672 | if (ret__ == (oval)) \ | 671 | if (ret__ == (oval)) \ |
| 673 | __this_cpu_write(pcp, nval); \ | 672 | raw_cpu_write(pcp, nval); \ |
| 674 | ret__; \ | 673 | ret__; \ |
| 675 | }) | 674 | }) |
| 676 | 675 | ||
| 677 | #ifndef __this_cpu_cmpxchg | 676 | #ifndef raw_cpu_cmpxchg |
| 678 | # ifndef __this_cpu_cmpxchg_1 | 677 | # ifndef raw_cpu_cmpxchg_1 |
| 679 | # define __this_cpu_cmpxchg_1(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 678 | # define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 680 | # endif | 679 | # endif |
| 681 | # ifndef __this_cpu_cmpxchg_2 | 680 | # ifndef raw_cpu_cmpxchg_2 |
| 682 | # define __this_cpu_cmpxchg_2(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 681 | # define raw_cpu_cmpxchg_2(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 683 | # endif | 682 | # endif |
| 684 | # ifndef __this_cpu_cmpxchg_4 | 683 | # ifndef raw_cpu_cmpxchg_4 |
| 685 | # define __this_cpu_cmpxchg_4(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 684 | # define raw_cpu_cmpxchg_4(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 686 | # endif | 685 | # endif |
| 687 | # ifndef __this_cpu_cmpxchg_8 | 686 | # ifndef raw_cpu_cmpxchg_8 |
| 688 | # define __this_cpu_cmpxchg_8(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 687 | # define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 689 | # endif | 688 | # endif |
| 690 | # define __this_cpu_cmpxchg(pcp, oval, nval) \ | 689 | # define raw_cpu_cmpxchg(pcp, oval, nval) \ |
| 691 | __pcpu_size_call_return2(__this_cpu_cmpxchg_, pcp, oval, nval) | 690 | __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) |
| 692 | #endif | 691 | #endif |
| 693 | 692 | ||
| 694 | #define __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 693 | #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 695 | ({ \ | 694 | ({ \ |
| 696 | int __ret = 0; \ | 695 | int __ret = 0; \ |
| 697 | if (__this_cpu_read(pcp1) == (oval1) && \ | 696 | if (raw_cpu_read(pcp1) == (oval1) && \ |
| 698 | __this_cpu_read(pcp2) == (oval2)) { \ | 697 | raw_cpu_read(pcp2) == (oval2)) { \ |
| 699 | __this_cpu_write(pcp1, (nval1)); \ | 698 | raw_cpu_write(pcp1, (nval1)); \ |
| 700 | __this_cpu_write(pcp2, (nval2)); \ | 699 | raw_cpu_write(pcp2, (nval2)); \ |
| 701 | __ret = 1; \ | 700 | __ret = 1; \ |
| 702 | } \ | 701 | } \ |
| 703 | (__ret); \ | 702 | (__ret); \ |
| 704 | }) | 703 | }) |
| 705 | 704 | ||
| 706 | #ifndef __this_cpu_cmpxchg_double | 705 | #ifndef raw_cpu_cmpxchg_double |
| 707 | # ifndef __this_cpu_cmpxchg_double_1 | 706 | # ifndef raw_cpu_cmpxchg_double_1 |
| 708 | # define __this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 707 | # define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 709 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 708 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 710 | # endif | 709 | # endif |
| 711 | # ifndef __this_cpu_cmpxchg_double_2 | 710 | # ifndef raw_cpu_cmpxchg_double_2 |
| 712 | # define __this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 711 | # define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 713 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 712 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 714 | # endif | 713 | # endif |
| 715 | # ifndef __this_cpu_cmpxchg_double_4 | 714 | # ifndef raw_cpu_cmpxchg_double_4 |
| 716 | # define __this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 715 | # define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 717 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 716 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 718 | # endif | 717 | # endif |
| 719 | # ifndef __this_cpu_cmpxchg_double_8 | 718 | # ifndef raw_cpu_cmpxchg_double_8 |
| 720 | # define __this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 719 | # define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 721 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 720 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 722 | # endif | 721 | # endif |
| 722 | # define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
| 723 | __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) | ||
| 724 | #endif | ||
| 725 | |||
| 726 | /* | ||
| 727 | * Generic percpu operations for context that are safe from preemption/interrupts. | ||
| 728 | * Checks will be added here soon. | ||
| 729 | */ | ||
| 730 | #ifndef __this_cpu_read | ||
| 731 | # define __this_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp)) | ||
| 732 | #endif | ||
| 733 | |||
| 734 | #ifndef __this_cpu_write | ||
| 735 | # define __this_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val)) | ||
| 736 | #endif | ||
| 737 | |||
| 738 | #ifndef __this_cpu_add | ||
| 739 | # define __this_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val)) | ||
| 740 | #endif | ||
| 741 | |||
| 742 | #ifndef __this_cpu_sub | ||
| 743 | # define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) | ||
| 744 | #endif | ||
| 745 | |||
| 746 | #ifndef __this_cpu_inc | ||
| 747 | # define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1) | ||
| 748 | #endif | ||
| 749 | |||
| 750 | #ifndef __this_cpu_dec | ||
| 751 | # define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1) | ||
| 752 | #endif | ||
| 753 | |||
| 754 | #ifndef __this_cpu_and | ||
| 755 | # define __this_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val)) | ||
| 756 | #endif | ||
| 757 | |||
| 758 | #ifndef __this_cpu_or | ||
| 759 | # define __this_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val)) | ||
| 760 | #endif | ||
| 761 | |||
| 762 | #ifndef __this_cpu_add_return | ||
| 763 | # define __this_cpu_add_return(pcp, val) \ | ||
| 764 | __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val) | ||
| 765 | #endif | ||
| 766 | |||
| 767 | #define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) | ||
| 768 | #define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) | ||
| 769 | #define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) | ||
| 770 | |||
| 771 | #ifndef __this_cpu_xchg | ||
| 772 | # define __this_cpu_xchg(pcp, nval) \ | ||
| 773 | __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval) | ||
| 774 | #endif | ||
| 775 | |||
| 776 | #ifndef __this_cpu_cmpxchg | ||
| 777 | # define __this_cpu_cmpxchg(pcp, oval, nval) \ | ||
| 778 | __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) | ||
| 779 | #endif | ||
| 780 | |||
| 781 | #ifndef __this_cpu_cmpxchg_double | ||
| 723 | # define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 782 | # define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 724 | __pcpu_double_call_return_bool(__this_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) | 783 | __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) |
| 725 | #endif | 784 | #endif |
| 726 | 785 | ||
| 727 | #endif /* __LINUX_PERCPU_H */ | 786 | #endif /* __LINUX_PERCPU_H */ |
