diff options
Diffstat (limited to 'include/linux/percpu.h')
| -rw-r--r-- | include/linux/percpu.h | 350 |
1 files changed, 214 insertions, 136 deletions
diff --git a/include/linux/percpu.h b/include/linux/percpu.h index e3817d2441b6..e7a0b95ed527 100644 --- a/include/linux/percpu.h +++ b/include/linux/percpu.h | |||
| @@ -173,6 +173,12 @@ extern phys_addr_t per_cpu_ptr_to_phys(void *addr); | |||
| 173 | 173 | ||
| 174 | extern void __bad_size_call_parameter(void); | 174 | extern void __bad_size_call_parameter(void); |
| 175 | 175 | ||
| 176 | #ifdef CONFIG_DEBUG_PREEMPT | ||
| 177 | extern void __this_cpu_preempt_check(const char *op); | ||
| 178 | #else | ||
| 179 | static inline void __this_cpu_preempt_check(const char *op) { } | ||
| 180 | #endif | ||
| 181 | |||
| 176 | #define __pcpu_size_call_return(stem, variable) \ | 182 | #define __pcpu_size_call_return(stem, variable) \ |
| 177 | ({ typeof(variable) pscr_ret__; \ | 183 | ({ typeof(variable) pscr_ret__; \ |
| 178 | __verify_pcpu_ptr(&(variable)); \ | 184 | __verify_pcpu_ptr(&(variable)); \ |
| @@ -243,6 +249,8 @@ do { \ | |||
| 243 | } while (0) | 249 | } while (0) |
| 244 | 250 | ||
| 245 | /* | 251 | /* |
| 252 | * this_cpu operations (C) 2008-2013 Christoph Lameter <cl@linux.com> | ||
| 253 | * | ||
| 246 | * Optimized manipulation for memory allocated through the per cpu | 254 | * Optimized manipulation for memory allocated through the per cpu |
| 247 | * allocator or for addresses of per cpu variables. | 255 | * allocator or for addresses of per cpu variables. |
| 248 | * | 256 | * |
| @@ -296,7 +304,7 @@ do { \ | |||
| 296 | do { \ | 304 | do { \ |
| 297 | unsigned long flags; \ | 305 | unsigned long flags; \ |
| 298 | raw_local_irq_save(flags); \ | 306 | raw_local_irq_save(flags); \ |
| 299 | *__this_cpu_ptr(&(pcp)) op val; \ | 307 | *raw_cpu_ptr(&(pcp)) op val; \ |
| 300 | raw_local_irq_restore(flags); \ | 308 | raw_local_irq_restore(flags); \ |
| 301 | } while (0) | 309 | } while (0) |
| 302 | 310 | ||
| @@ -381,8 +389,8 @@ do { \ | |||
| 381 | typeof(pcp) ret__; \ | 389 | typeof(pcp) ret__; \ |
| 382 | unsigned long flags; \ | 390 | unsigned long flags; \ |
| 383 | raw_local_irq_save(flags); \ | 391 | raw_local_irq_save(flags); \ |
| 384 | __this_cpu_add(pcp, val); \ | 392 | raw_cpu_add(pcp, val); \ |
| 385 | ret__ = __this_cpu_read(pcp); \ | 393 | ret__ = raw_cpu_read(pcp); \ |
| 386 | raw_local_irq_restore(flags); \ | 394 | raw_local_irq_restore(flags); \ |
| 387 | ret__; \ | 395 | ret__; \ |
| 388 | }) | 396 | }) |
| @@ -411,8 +419,8 @@ do { \ | |||
| 411 | ({ typeof(pcp) ret__; \ | 419 | ({ typeof(pcp) ret__; \ |
| 412 | unsigned long flags; \ | 420 | unsigned long flags; \ |
| 413 | raw_local_irq_save(flags); \ | 421 | raw_local_irq_save(flags); \ |
| 414 | ret__ = __this_cpu_read(pcp); \ | 422 | ret__ = raw_cpu_read(pcp); \ |
| 415 | __this_cpu_write(pcp, nval); \ | 423 | raw_cpu_write(pcp, nval); \ |
| 416 | raw_local_irq_restore(flags); \ | 424 | raw_local_irq_restore(flags); \ |
| 417 | ret__; \ | 425 | ret__; \ |
| 418 | }) | 426 | }) |
| @@ -439,9 +447,9 @@ do { \ | |||
| 439 | typeof(pcp) ret__; \ | 447 | typeof(pcp) ret__; \ |
| 440 | unsigned long flags; \ | 448 | unsigned long flags; \ |
| 441 | raw_local_irq_save(flags); \ | 449 | raw_local_irq_save(flags); \ |
| 442 | ret__ = __this_cpu_read(pcp); \ | 450 | ret__ = raw_cpu_read(pcp); \ |
| 443 | if (ret__ == (oval)) \ | 451 | if (ret__ == (oval)) \ |
| 444 | __this_cpu_write(pcp, nval); \ | 452 | raw_cpu_write(pcp, nval); \ |
| 445 | raw_local_irq_restore(flags); \ | 453 | raw_local_irq_restore(flags); \ |
| 446 | ret__; \ | 454 | ret__; \ |
| 447 | }) | 455 | }) |
| @@ -476,7 +484,7 @@ do { \ | |||
| 476 | int ret__; \ | 484 | int ret__; \ |
| 477 | unsigned long flags; \ | 485 | unsigned long flags; \ |
| 478 | raw_local_irq_save(flags); \ | 486 | raw_local_irq_save(flags); \ |
| 479 | ret__ = __this_cpu_generic_cmpxchg_double(pcp1, pcp2, \ | 487 | ret__ = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \ |
| 480 | oval1, oval2, nval1, nval2); \ | 488 | oval1, oval2, nval1, nval2); \ |
| 481 | raw_local_irq_restore(flags); \ | 489 | raw_local_irq_restore(flags); \ |
| 482 | ret__; \ | 490 | ret__; \ |
| @@ -504,12 +512,8 @@ do { \ | |||
| 504 | #endif | 512 | #endif |
| 505 | 513 | ||
| 506 | /* | 514 | /* |
| 507 | * Generic percpu operations for context that are safe from preemption/interrupts. | 515 | * Generic percpu operations for contexts where we do not want to do |
| 508 | * Either we do not care about races or the caller has the | 516 | * any checks for preemptiosn. |
| 509 | * responsibility of handling preemption/interrupt issues. Arch code can still | ||
| 510 | * override these instructions since the arch per cpu code may be more | ||
| 511 | * efficient and may actually get race freeness for free (that is the | ||
| 512 | * case for x86 for example). | ||
| 513 | * | 517 | * |
| 514 | * If there is no other protection through preempt disable and/or | 518 | * If there is no other protection through preempt disable and/or |
| 515 | * disabling interupts then one of these RMW operations can show unexpected | 519 | * disabling interupts then one of these RMW operations can show unexpected |
| @@ -517,211 +521,285 @@ do { \ | |||
| 517 | * or an interrupt occurred and the same percpu variable was modified from | 521 | * or an interrupt occurred and the same percpu variable was modified from |
| 518 | * the interrupt context. | 522 | * the interrupt context. |
| 519 | */ | 523 | */ |
| 520 | #ifndef __this_cpu_read | 524 | #ifndef raw_cpu_read |
| 521 | # ifndef __this_cpu_read_1 | 525 | # ifndef raw_cpu_read_1 |
| 522 | # define __this_cpu_read_1(pcp) (*__this_cpu_ptr(&(pcp))) | 526 | # define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp))) |
| 523 | # endif | 527 | # endif |
| 524 | # ifndef __this_cpu_read_2 | 528 | # ifndef raw_cpu_read_2 |
| 525 | # define __this_cpu_read_2(pcp) (*__this_cpu_ptr(&(pcp))) | 529 | # define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp))) |
| 526 | # endif | 530 | # endif |
| 527 | # ifndef __this_cpu_read_4 | 531 | # ifndef raw_cpu_read_4 |
| 528 | # define __this_cpu_read_4(pcp) (*__this_cpu_ptr(&(pcp))) | 532 | # define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp))) |
| 529 | # endif | 533 | # endif |
| 530 | # ifndef __this_cpu_read_8 | 534 | # ifndef raw_cpu_read_8 |
| 531 | # define __this_cpu_read_8(pcp) (*__this_cpu_ptr(&(pcp))) | 535 | # define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp))) |
| 532 | # endif | 536 | # endif |
| 533 | # define __this_cpu_read(pcp) __pcpu_size_call_return(__this_cpu_read_, (pcp)) | 537 | # define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, (pcp)) |
| 534 | #endif | 538 | #endif |
| 535 | 539 | ||
| 536 | #define __this_cpu_generic_to_op(pcp, val, op) \ | 540 | #define raw_cpu_generic_to_op(pcp, val, op) \ |
| 537 | do { \ | 541 | do { \ |
| 538 | *__this_cpu_ptr(&(pcp)) op val; \ | 542 | *raw_cpu_ptr(&(pcp)) op val; \ |
| 539 | } while (0) | 543 | } while (0) |
| 540 | 544 | ||
| 541 | #ifndef __this_cpu_write | 545 | |
| 542 | # ifndef __this_cpu_write_1 | 546 | #ifndef raw_cpu_write |
| 543 | # define __this_cpu_write_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 547 | # ifndef raw_cpu_write_1 |
| 548 | # define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) | ||
| 544 | # endif | 549 | # endif |
| 545 | # ifndef __this_cpu_write_2 | 550 | # ifndef raw_cpu_write_2 |
| 546 | # define __this_cpu_write_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 551 | # define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 547 | # endif | 552 | # endif |
| 548 | # ifndef __this_cpu_write_4 | 553 | # ifndef raw_cpu_write_4 |
| 549 | # define __this_cpu_write_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 554 | # define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 550 | # endif | 555 | # endif |
| 551 | # ifndef __this_cpu_write_8 | 556 | # ifndef raw_cpu_write_8 |
| 552 | # define __this_cpu_write_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), =) | 557 | # define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), =) |
| 553 | # endif | 558 | # endif |
| 554 | # define __this_cpu_write(pcp, val) __pcpu_size_call(__this_cpu_write_, (pcp), (val)) | 559 | # define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, (pcp), (val)) |
| 555 | #endif | 560 | #endif |
| 556 | 561 | ||
| 557 | #ifndef __this_cpu_add | 562 | #ifndef raw_cpu_add |
| 558 | # ifndef __this_cpu_add_1 | 563 | # ifndef raw_cpu_add_1 |
| 559 | # define __this_cpu_add_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 564 | # define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 560 | # endif | 565 | # endif |
| 561 | # ifndef __this_cpu_add_2 | 566 | # ifndef raw_cpu_add_2 |
| 562 | # define __this_cpu_add_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 567 | # define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 563 | # endif | 568 | # endif |
| 564 | # ifndef __this_cpu_add_4 | 569 | # ifndef raw_cpu_add_4 |
| 565 | # define __this_cpu_add_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 570 | # define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 566 | # endif | 571 | # endif |
| 567 | # ifndef __this_cpu_add_8 | 572 | # ifndef raw_cpu_add_8 |
| 568 | # define __this_cpu_add_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), +=) | 573 | # define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), +=) |
| 569 | # endif | 574 | # endif |
| 570 | # define __this_cpu_add(pcp, val) __pcpu_size_call(__this_cpu_add_, (pcp), (val)) | 575 | # define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, (pcp), (val)) |
| 571 | #endif | 576 | #endif |
| 572 | 577 | ||
| 573 | #ifndef __this_cpu_sub | 578 | #ifndef raw_cpu_sub |
| 574 | # define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) | 579 | # define raw_cpu_sub(pcp, val) raw_cpu_add((pcp), -(val)) |
| 575 | #endif | 580 | #endif |
| 576 | 581 | ||
| 577 | #ifndef __this_cpu_inc | 582 | #ifndef raw_cpu_inc |
| 578 | # define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1) | 583 | # define raw_cpu_inc(pcp) raw_cpu_add((pcp), 1) |
| 579 | #endif | 584 | #endif |
| 580 | 585 | ||
| 581 | #ifndef __this_cpu_dec | 586 | #ifndef raw_cpu_dec |
| 582 | # define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1) | 587 | # define raw_cpu_dec(pcp) raw_cpu_sub((pcp), 1) |
| 583 | #endif | 588 | #endif |
| 584 | 589 | ||
| 585 | #ifndef __this_cpu_and | 590 | #ifndef raw_cpu_and |
| 586 | # ifndef __this_cpu_and_1 | 591 | # ifndef raw_cpu_and_1 |
| 587 | # define __this_cpu_and_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 592 | # define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 588 | # endif | 593 | # endif |
| 589 | # ifndef __this_cpu_and_2 | 594 | # ifndef raw_cpu_and_2 |
| 590 | # define __this_cpu_and_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 595 | # define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 591 | # endif | 596 | # endif |
| 592 | # ifndef __this_cpu_and_4 | 597 | # ifndef raw_cpu_and_4 |
| 593 | # define __this_cpu_and_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 598 | # define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 594 | # endif | 599 | # endif |
| 595 | # ifndef __this_cpu_and_8 | 600 | # ifndef raw_cpu_and_8 |
| 596 | # define __this_cpu_and_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), &=) | 601 | # define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), &=) |
| 597 | # endif | 602 | # endif |
| 598 | # define __this_cpu_and(pcp, val) __pcpu_size_call(__this_cpu_and_, (pcp), (val)) | 603 | # define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, (pcp), (val)) |
| 599 | #endif | 604 | #endif |
| 600 | 605 | ||
| 601 | #ifndef __this_cpu_or | 606 | #ifndef raw_cpu_or |
| 602 | # ifndef __this_cpu_or_1 | 607 | # ifndef raw_cpu_or_1 |
| 603 | # define __this_cpu_or_1(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 608 | # define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 604 | # endif | 609 | # endif |
| 605 | # ifndef __this_cpu_or_2 | 610 | # ifndef raw_cpu_or_2 |
| 606 | # define __this_cpu_or_2(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 611 | # define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 607 | # endif | 612 | # endif |
| 608 | # ifndef __this_cpu_or_4 | 613 | # ifndef raw_cpu_or_4 |
| 609 | # define __this_cpu_or_4(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 614 | # define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 610 | # endif | 615 | # endif |
| 611 | # ifndef __this_cpu_or_8 | 616 | # ifndef raw_cpu_or_8 |
| 612 | # define __this_cpu_or_8(pcp, val) __this_cpu_generic_to_op((pcp), (val), |=) | 617 | # define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op((pcp), (val), |=) |
| 613 | # endif | 618 | # endif |
| 614 | # define __this_cpu_or(pcp, val) __pcpu_size_call(__this_cpu_or_, (pcp), (val)) | 619 | # define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, (pcp), (val)) |
| 615 | #endif | 620 | #endif |
| 616 | 621 | ||
| 617 | #define __this_cpu_generic_add_return(pcp, val) \ | 622 | #define raw_cpu_generic_add_return(pcp, val) \ |
| 618 | ({ \ | 623 | ({ \ |
| 619 | __this_cpu_add(pcp, val); \ | 624 | raw_cpu_add(pcp, val); \ |
| 620 | __this_cpu_read(pcp); \ | 625 | raw_cpu_read(pcp); \ |
| 621 | }) | 626 | }) |
| 622 | 627 | ||
| 623 | #ifndef __this_cpu_add_return | 628 | #ifndef raw_cpu_add_return |
| 624 | # ifndef __this_cpu_add_return_1 | 629 | # ifndef raw_cpu_add_return_1 |
| 625 | # define __this_cpu_add_return_1(pcp, val) __this_cpu_generic_add_return(pcp, val) | 630 | # define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 626 | # endif | 631 | # endif |
| 627 | # ifndef __this_cpu_add_return_2 | 632 | # ifndef raw_cpu_add_return_2 |
| 628 | # define __this_cpu_add_return_2(pcp, val) __this_cpu_generic_add_return(pcp, val) | 633 | # define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 629 | # endif | 634 | # endif |
| 630 | # ifndef __this_cpu_add_return_4 | 635 | # ifndef raw_cpu_add_return_4 |
| 631 | # define __this_cpu_add_return_4(pcp, val) __this_cpu_generic_add_return(pcp, val) | 636 | # define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 632 | # endif | 637 | # endif |
| 633 | # ifndef __this_cpu_add_return_8 | 638 | # ifndef raw_cpu_add_return_8 |
| 634 | # define __this_cpu_add_return_8(pcp, val) __this_cpu_generic_add_return(pcp, val) | 639 | # define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) |
| 635 | # endif | 640 | # endif |
| 636 | # define __this_cpu_add_return(pcp, val) \ | 641 | # define raw_cpu_add_return(pcp, val) \ |
| 637 | __pcpu_size_call_return2(__this_cpu_add_return_, pcp, val) | 642 | __pcpu_size_call_return2(raw_add_return_, pcp, val) |
| 638 | #endif | 643 | #endif |
| 639 | 644 | ||
| 640 | #define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) | 645 | #define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val)) |
| 641 | #define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) | 646 | #define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1) |
| 642 | #define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) | 647 | #define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1) |
| 643 | 648 | ||
| 644 | #define __this_cpu_generic_xchg(pcp, nval) \ | 649 | #define raw_cpu_generic_xchg(pcp, nval) \ |
| 645 | ({ typeof(pcp) ret__; \ | 650 | ({ typeof(pcp) ret__; \ |
| 646 | ret__ = __this_cpu_read(pcp); \ | 651 | ret__ = raw_cpu_read(pcp); \ |
| 647 | __this_cpu_write(pcp, nval); \ | 652 | raw_cpu_write(pcp, nval); \ |
| 648 | ret__; \ | 653 | ret__; \ |
| 649 | }) | 654 | }) |
| 650 | 655 | ||
| 651 | #ifndef __this_cpu_xchg | 656 | #ifndef raw_cpu_xchg |
| 652 | # ifndef __this_cpu_xchg_1 | 657 | # ifndef raw_cpu_xchg_1 |
| 653 | # define __this_cpu_xchg_1(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 658 | # define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 654 | # endif | 659 | # endif |
| 655 | # ifndef __this_cpu_xchg_2 | 660 | # ifndef raw_cpu_xchg_2 |
| 656 | # define __this_cpu_xchg_2(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 661 | # define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 657 | # endif | 662 | # endif |
| 658 | # ifndef __this_cpu_xchg_4 | 663 | # ifndef raw_cpu_xchg_4 |
| 659 | # define __this_cpu_xchg_4(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 664 | # define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 660 | # endif | 665 | # endif |
| 661 | # ifndef __this_cpu_xchg_8 | 666 | # ifndef raw_cpu_xchg_8 |
| 662 | # define __this_cpu_xchg_8(pcp, nval) __this_cpu_generic_xchg(pcp, nval) | 667 | # define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) |
| 663 | # endif | 668 | # endif |
| 664 | # define __this_cpu_xchg(pcp, nval) \ | 669 | # define raw_cpu_xchg(pcp, nval) \ |
| 665 | __pcpu_size_call_return2(__this_cpu_xchg_, (pcp), nval) | 670 | __pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval) |
| 666 | #endif | 671 | #endif |
| 667 | 672 | ||
| 668 | #define __this_cpu_generic_cmpxchg(pcp, oval, nval) \ | 673 | #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ |
| 669 | ({ \ | 674 | ({ \ |
| 670 | typeof(pcp) ret__; \ | 675 | typeof(pcp) ret__; \ |
| 671 | ret__ = __this_cpu_read(pcp); \ | 676 | ret__ = raw_cpu_read(pcp); \ |
| 672 | if (ret__ == (oval)) \ | 677 | if (ret__ == (oval)) \ |
| 673 | __this_cpu_write(pcp, nval); \ | 678 | raw_cpu_write(pcp, nval); \ |
| 674 | ret__; \ | 679 | ret__; \ |
| 675 | }) | 680 | }) |
| 676 | 681 | ||
| 677 | #ifndef __this_cpu_cmpxchg | 682 | #ifndef raw_cpu_cmpxchg |
| 678 | # ifndef __this_cpu_cmpxchg_1 | 683 | # ifndef raw_cpu_cmpxchg_1 |
| 679 | # define __this_cpu_cmpxchg_1(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 684 | # define raw_cpu_cmpxchg_1(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 680 | # endif | 685 | # endif |
| 681 | # ifndef __this_cpu_cmpxchg_2 | 686 | # ifndef raw_cpu_cmpxchg_2 |
| 682 | # define __this_cpu_cmpxchg_2(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 687 | # define raw_cpu_cmpxchg_2(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 683 | # endif | 688 | # endif |
| 684 | # ifndef __this_cpu_cmpxchg_4 | 689 | # ifndef raw_cpu_cmpxchg_4 |
| 685 | # define __this_cpu_cmpxchg_4(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 690 | # define raw_cpu_cmpxchg_4(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 686 | # endif | 691 | # endif |
| 687 | # ifndef __this_cpu_cmpxchg_8 | 692 | # ifndef raw_cpu_cmpxchg_8 |
| 688 | # define __this_cpu_cmpxchg_8(pcp, oval, nval) __this_cpu_generic_cmpxchg(pcp, oval, nval) | 693 | # define raw_cpu_cmpxchg_8(pcp, oval, nval) raw_cpu_generic_cmpxchg(pcp, oval, nval) |
| 689 | # endif | 694 | # endif |
| 690 | # define __this_cpu_cmpxchg(pcp, oval, nval) \ | 695 | # define raw_cpu_cmpxchg(pcp, oval, nval) \ |
| 691 | __pcpu_size_call_return2(__this_cpu_cmpxchg_, pcp, oval, nval) | 696 | __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) |
| 692 | #endif | 697 | #endif |
| 693 | 698 | ||
| 694 | #define __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 699 | #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 695 | ({ \ | 700 | ({ \ |
| 696 | int __ret = 0; \ | 701 | int __ret = 0; \ |
| 697 | if (__this_cpu_read(pcp1) == (oval1) && \ | 702 | if (raw_cpu_read(pcp1) == (oval1) && \ |
| 698 | __this_cpu_read(pcp2) == (oval2)) { \ | 703 | raw_cpu_read(pcp2) == (oval2)) { \ |
| 699 | __this_cpu_write(pcp1, (nval1)); \ | 704 | raw_cpu_write(pcp1, (nval1)); \ |
| 700 | __this_cpu_write(pcp2, (nval2)); \ | 705 | raw_cpu_write(pcp2, (nval2)); \ |
| 701 | __ret = 1; \ | 706 | __ret = 1; \ |
| 702 | } \ | 707 | } \ |
| 703 | (__ret); \ | 708 | (__ret); \ |
| 704 | }) | 709 | }) |
| 705 | 710 | ||
| 706 | #ifndef __this_cpu_cmpxchg_double | 711 | #ifndef raw_cpu_cmpxchg_double |
| 707 | # ifndef __this_cpu_cmpxchg_double_1 | 712 | # ifndef raw_cpu_cmpxchg_double_1 |
| 708 | # define __this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 713 | # define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 709 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 714 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 710 | # endif | 715 | # endif |
| 711 | # ifndef __this_cpu_cmpxchg_double_2 | 716 | # ifndef raw_cpu_cmpxchg_double_2 |
| 712 | # define __this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 717 | # define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 713 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 718 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 714 | # endif | 719 | # endif |
| 715 | # ifndef __this_cpu_cmpxchg_double_4 | 720 | # ifndef raw_cpu_cmpxchg_double_4 |
| 716 | # define __this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 721 | # define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 717 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 722 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 718 | # endif | 723 | # endif |
| 719 | # ifndef __this_cpu_cmpxchg_double_8 | 724 | # ifndef raw_cpu_cmpxchg_double_8 |
| 720 | # define __this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 725 | # define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 721 | __this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) | 726 | raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) |
| 722 | # endif | 727 | # endif |
| 728 | # define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | ||
| 729 | __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) | ||
| 730 | #endif | ||
| 731 | |||
| 732 | /* | ||
| 733 | * Generic percpu operations for context that are safe from preemption/interrupts. | ||
| 734 | */ | ||
| 735 | #ifndef __this_cpu_read | ||
| 736 | # define __this_cpu_read(pcp) \ | ||
| 737 | (__this_cpu_preempt_check("read"),__pcpu_size_call_return(raw_cpu_read_, (pcp))) | ||
| 738 | #endif | ||
| 739 | |||
| 740 | #ifndef __this_cpu_write | ||
| 741 | # define __this_cpu_write(pcp, val) \ | ||
| 742 | do { __this_cpu_preempt_check("write"); \ | ||
| 743 | __pcpu_size_call(raw_cpu_write_, (pcp), (val)); \ | ||
| 744 | } while (0) | ||
| 745 | #endif | ||
| 746 | |||
| 747 | #ifndef __this_cpu_add | ||
| 748 | # define __this_cpu_add(pcp, val) \ | ||
| 749 | do { __this_cpu_preempt_check("add"); \ | ||
| 750 | __pcpu_size_call(raw_cpu_add_, (pcp), (val)); \ | ||
| 751 | } while (0) | ||
| 752 | #endif | ||
| 753 | |||
| 754 | #ifndef __this_cpu_sub | ||
| 755 | # define __this_cpu_sub(pcp, val) __this_cpu_add((pcp), -(typeof(pcp))(val)) | ||
| 756 | #endif | ||
| 757 | |||
| 758 | #ifndef __this_cpu_inc | ||
| 759 | # define __this_cpu_inc(pcp) __this_cpu_add((pcp), 1) | ||
| 760 | #endif | ||
| 761 | |||
| 762 | #ifndef __this_cpu_dec | ||
| 763 | # define __this_cpu_dec(pcp) __this_cpu_sub((pcp), 1) | ||
| 764 | #endif | ||
| 765 | |||
| 766 | #ifndef __this_cpu_and | ||
| 767 | # define __this_cpu_and(pcp, val) \ | ||
| 768 | do { __this_cpu_preempt_check("and"); \ | ||
| 769 | __pcpu_size_call(raw_cpu_and_, (pcp), (val)); \ | ||
| 770 | } while (0) | ||
| 771 | |||
| 772 | #endif | ||
| 773 | |||
| 774 | #ifndef __this_cpu_or | ||
| 775 | # define __this_cpu_or(pcp, val) \ | ||
| 776 | do { __this_cpu_preempt_check("or"); \ | ||
| 777 | __pcpu_size_call(raw_cpu_or_, (pcp), (val)); \ | ||
| 778 | } while (0) | ||
| 779 | #endif | ||
| 780 | |||
| 781 | #ifndef __this_cpu_add_return | ||
| 782 | # define __this_cpu_add_return(pcp, val) \ | ||
| 783 | (__this_cpu_preempt_check("add_return"),__pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)) | ||
| 784 | #endif | ||
| 785 | |||
| 786 | #define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val)) | ||
| 787 | #define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1) | ||
| 788 | #define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1) | ||
| 789 | |||
| 790 | #ifndef __this_cpu_xchg | ||
| 791 | # define __this_cpu_xchg(pcp, nval) \ | ||
| 792 | (__this_cpu_preempt_check("xchg"),__pcpu_size_call_return2(raw_cpu_xchg_, (pcp), nval)) | ||
| 793 | #endif | ||
| 794 | |||
| 795 | #ifndef __this_cpu_cmpxchg | ||
| 796 | # define __this_cpu_cmpxchg(pcp, oval, nval) \ | ||
| 797 | (__this_cpu_preempt_check("cmpxchg"),__pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)) | ||
| 798 | #endif | ||
| 799 | |||
| 800 | #ifndef __this_cpu_cmpxchg_double | ||
| 723 | # define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ | 801 | # define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ |
| 724 | __pcpu_double_call_return_bool(__this_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2)) | 802 | (__this_cpu_preempt_check("cmpxchg_double"),__pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, (pcp1), (pcp2), (oval1), (oval2), (nval1), (nval2))) |
| 725 | #endif | 803 | #endif |
| 726 | 804 | ||
| 727 | #endif /* __LINUX_PERCPU_H */ | 805 | #endif /* __LINUX_PERCPU_H */ |
