Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

locking/atomic: scripts: simplify raw_atomic*() definitions

Currently each ordering variant has several potential definitions,
with a mixture of preprocessor and C definitions, including several
copies of its C prototype, e.g.

| #if defined(arch_atomic_fetch_andnot_acquire)
| #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
| #elif defined(arch_atomic_fetch_andnot_relaxed)
| static __always_inline int
| raw_atomic_fetch_andnot_acquire(int i, atomic_t *v)
| {
| int ret = arch_atomic_fetch_andnot_relaxed(i, v);
| __atomic_acquire_fence();
| return ret;
| }
| #elif defined(arch_atomic_fetch_andnot)
| #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
| #else
| static __always_inline int
| raw_atomic_fetch_andnot_acquire(int i, atomic_t *v)
| {
| return raw_atomic_fetch_and_acquire(~i, v);
| }
| #endif

Make this a bit simpler by defining the C prototype once, and writing
the various potential definitions as plain C code guarded by ifdeffery.
For example, the above becomes:

| static __always_inline int
| raw_atomic_fetch_andnot_acquire(int i, atomic_t *v)
| {
| #if defined(arch_atomic_fetch_andnot_acquire)
| return arch_atomic_fetch_andnot_acquire(i, v);
| #elif defined(arch_atomic_fetch_andnot_relaxed)
| int ret = arch_atomic_fetch_andnot_relaxed(i, v);
| __atomic_acquire_fence();
| return ret;
| #elif defined(arch_atomic_fetch_andnot)
| return arch_atomic_fetch_andnot(i, v);
| #else
| return raw_atomic_fetch_and_acquire(~i, v);
| #endif
| }

Which is far easier to read. As we now always have a single copy of the
C prototype wrapping all the potential definitions, we now have an
obvious single location for kerneldoc comments.

At the same time, the fallbacks for raw_atomic*_xhcg() are made to use
'new' rather than 'i' as the name of the new value. This is what the
existing fallback template used, and is more consistent with the
raw_atomic{_try,}cmpxchg() fallbacks.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-24-mark.rutland@arm.com

authored by

Mark Rutland and committed by
Peter Zijlstra
1d78814d 63039946

+945 -1121
+887 -991
include/linux/atomic/atomic-arch-fallback.h
··· 428 428 429 429 #define raw_sync_cmpxchg arch_sync_cmpxchg 430 430 431 - #define raw_atomic_read arch_atomic_read 431 + static __always_inline int 432 + raw_atomic_read(const atomic_t *v) 433 + { 434 + return arch_atomic_read(v); 435 + } 432 436 433 - #if defined(arch_atomic_read_acquire) 434 - #define raw_atomic_read_acquire arch_atomic_read_acquire 435 - #elif defined(arch_atomic_read) 436 - #define raw_atomic_read_acquire arch_atomic_read 437 - #else 438 437 static __always_inline int 439 438 raw_atomic_read_acquire(const atomic_t *v) 440 439 { 440 + #if defined(arch_atomic_read_acquire) 441 + return arch_atomic_read_acquire(v); 442 + #elif defined(arch_atomic_read) 443 + return arch_atomic_read(v); 444 + #else 441 445 int ret; 442 446 443 447 if (__native_word(atomic_t)) { ··· 452 448 } 453 449 454 450 return ret; 455 - } 456 451 #endif 452 + } 457 453 458 - #define raw_atomic_set arch_atomic_set 454 + static __always_inline void 455 + raw_atomic_set(atomic_t *v, int i) 456 + { 457 + arch_atomic_set(v, i); 458 + } 459 459 460 - #if defined(arch_atomic_set_release) 461 - #define raw_atomic_set_release arch_atomic_set_release 462 - #elif defined(arch_atomic_set) 463 - #define raw_atomic_set_release arch_atomic_set 464 - #else 465 460 static __always_inline void 466 461 raw_atomic_set_release(atomic_t *v, int i) 467 462 { 463 + #if defined(arch_atomic_set_release) 464 + arch_atomic_set_release(v, i); 465 + #elif defined(arch_atomic_set) 466 + arch_atomic_set(v, i); 467 + #else 468 468 if (__native_word(atomic_t)) { 469 469 smp_store_release(&(v)->counter, i); 470 470 } else { 471 471 __atomic_release_fence(); 472 472 raw_atomic_set(v, i); 473 473 } 474 - } 475 474 #endif 475 + } 476 476 477 - #define raw_atomic_add arch_atomic_add 477 + static __always_inline void 478 + raw_atomic_add(int i, atomic_t *v) 479 + { 480 + arch_atomic_add(i, v); 481 + } 478 482 479 - #if defined(arch_atomic_add_return) 480 - #define raw_atomic_add_return arch_atomic_add_return 481 - #elif defined(arch_atomic_add_return_relaxed) 482 483 static __always_inline int 483 484 raw_atomic_add_return(int i, atomic_t *v) 484 485 { 486 + #if defined(arch_atomic_add_return) 487 + return arch_atomic_add_return(i, v); 488 + #elif defined(arch_atomic_add_return_relaxed) 485 489 int ret; 486 490 __atomic_pre_full_fence(); 487 491 ret = arch_atomic_add_return_relaxed(i, v); 488 492 __atomic_post_full_fence(); 489 493 return ret; 490 - } 491 494 #else 492 495 #error "Unable to define raw_atomic_add_return" 493 496 #endif 497 + } 494 498 495 - #if defined(arch_atomic_add_return_acquire) 496 - #define raw_atomic_add_return_acquire arch_atomic_add_return_acquire 497 - #elif defined(arch_atomic_add_return_relaxed) 498 499 static __always_inline int 499 500 raw_atomic_add_return_acquire(int i, atomic_t *v) 500 501 { 502 + #if defined(arch_atomic_add_return_acquire) 503 + return arch_atomic_add_return_acquire(i, v); 504 + #elif defined(arch_atomic_add_return_relaxed) 501 505 int ret = arch_atomic_add_return_relaxed(i, v); 502 506 __atomic_acquire_fence(); 503 507 return ret; 504 - } 505 508 #elif defined(arch_atomic_add_return) 506 - #define raw_atomic_add_return_acquire arch_atomic_add_return 509 + return arch_atomic_add_return(i, v); 507 510 #else 508 511 #error "Unable to define raw_atomic_add_return_acquire" 509 512 #endif 513 + } 510 514 511 - #if defined(arch_atomic_add_return_release) 512 - #define raw_atomic_add_return_release arch_atomic_add_return_release 513 - #elif defined(arch_atomic_add_return_relaxed) 514 515 static __always_inline int 515 516 raw_atomic_add_return_release(int i, atomic_t *v) 516 517 { 518 + #if defined(arch_atomic_add_return_release) 519 + return arch_atomic_add_return_release(i, v); 520 + #elif defined(arch_atomic_add_return_relaxed) 517 521 __atomic_release_fence(); 518 522 return arch_atomic_add_return_relaxed(i, v); 519 - } 520 523 #elif defined(arch_atomic_add_return) 521 - #define raw_atomic_add_return_release arch_atomic_add_return 524 + return arch_atomic_add_return(i, v); 522 525 #else 523 526 #error "Unable to define raw_atomic_add_return_release" 524 527 #endif 528 + } 525 529 530 + static __always_inline int 531 + raw_atomic_add_return_relaxed(int i, atomic_t *v) 532 + { 526 533 #if defined(arch_atomic_add_return_relaxed) 527 - #define raw_atomic_add_return_relaxed arch_atomic_add_return_relaxed 534 + return arch_atomic_add_return_relaxed(i, v); 528 535 #elif defined(arch_atomic_add_return) 529 - #define raw_atomic_add_return_relaxed arch_atomic_add_return 536 + return arch_atomic_add_return(i, v); 530 537 #else 531 538 #error "Unable to define raw_atomic_add_return_relaxed" 532 539 #endif 540 + } 533 541 534 - #if defined(arch_atomic_fetch_add) 535 - #define raw_atomic_fetch_add arch_atomic_fetch_add 536 - #elif defined(arch_atomic_fetch_add_relaxed) 537 542 static __always_inline int 538 543 raw_atomic_fetch_add(int i, atomic_t *v) 539 544 { 545 + #if defined(arch_atomic_fetch_add) 546 + return arch_atomic_fetch_add(i, v); 547 + #elif defined(arch_atomic_fetch_add_relaxed) 540 548 int ret; 541 549 __atomic_pre_full_fence(); 542 550 ret = arch_atomic_fetch_add_relaxed(i, v); 543 551 __atomic_post_full_fence(); 544 552 return ret; 545 - } 546 553 #else 547 554 #error "Unable to define raw_atomic_fetch_add" 548 555 #endif 556 + } 549 557 550 - #if defined(arch_atomic_fetch_add_acquire) 551 - #define raw_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire 552 - #elif defined(arch_atomic_fetch_add_relaxed) 553 558 static __always_inline int 554 559 raw_atomic_fetch_add_acquire(int i, atomic_t *v) 555 560 { 561 + #if defined(arch_atomic_fetch_add_acquire) 562 + return arch_atomic_fetch_add_acquire(i, v); 563 + #elif defined(arch_atomic_fetch_add_relaxed) 556 564 int ret = arch_atomic_fetch_add_relaxed(i, v); 557 565 __atomic_acquire_fence(); 558 566 return ret; 559 - } 560 567 #elif defined(arch_atomic_fetch_add) 561 - #define raw_atomic_fetch_add_acquire arch_atomic_fetch_add 568 + return arch_atomic_fetch_add(i, v); 562 569 #else 563 570 #error "Unable to define raw_atomic_fetch_add_acquire" 564 571 #endif 572 + } 565 573 566 - #if defined(arch_atomic_fetch_add_release) 567 - #define raw_atomic_fetch_add_release arch_atomic_fetch_add_release 568 - #elif defined(arch_atomic_fetch_add_relaxed) 569 574 static __always_inline int 570 575 raw_atomic_fetch_add_release(int i, atomic_t *v) 571 576 { 577 + #if defined(arch_atomic_fetch_add_release) 578 + return arch_atomic_fetch_add_release(i, v); 579 + #elif defined(arch_atomic_fetch_add_relaxed) 572 580 __atomic_release_fence(); 573 581 return arch_atomic_fetch_add_relaxed(i, v); 574 - } 575 582 #elif defined(arch_atomic_fetch_add) 576 - #define raw_atomic_fetch_add_release arch_atomic_fetch_add 583 + return arch_atomic_fetch_add(i, v); 577 584 #else 578 585 #error "Unable to define raw_atomic_fetch_add_release" 579 586 #endif 587 + } 580 588 589 + static __always_inline int 590 + raw_atomic_fetch_add_relaxed(int i, atomic_t *v) 591 + { 581 592 #if defined(arch_atomic_fetch_add_relaxed) 582 - #define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed 593 + return arch_atomic_fetch_add_relaxed(i, v); 583 594 #elif defined(arch_atomic_fetch_add) 584 - #define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add 595 + return arch_atomic_fetch_add(i, v); 585 596 #else 586 597 #error "Unable to define raw_atomic_fetch_add_relaxed" 587 598 #endif 599 + } 588 600 589 - #define raw_atomic_sub arch_atomic_sub 601 + static __always_inline void 602 + raw_atomic_sub(int i, atomic_t *v) 603 + { 604 + arch_atomic_sub(i, v); 605 + } 590 606 591 - #if defined(arch_atomic_sub_return) 592 - #define raw_atomic_sub_return arch_atomic_sub_return 593 - #elif defined(arch_atomic_sub_return_relaxed) 594 607 static __always_inline int 595 608 raw_atomic_sub_return(int i, atomic_t *v) 596 609 { 610 + #if defined(arch_atomic_sub_return) 611 + return arch_atomic_sub_return(i, v); 612 + #elif defined(arch_atomic_sub_return_relaxed) 597 613 int ret; 598 614 __atomic_pre_full_fence(); 599 615 ret = arch_atomic_sub_return_relaxed(i, v); 600 616 __atomic_post_full_fence(); 601 617 return ret; 602 - } 603 618 #else 604 619 #error "Unable to define raw_atomic_sub_return" 605 620 #endif 621 + } 606 622 607 - #if defined(arch_atomic_sub_return_acquire) 608 - #define raw_atomic_sub_return_acquire arch_atomic_sub_return_acquire 609 - #elif defined(arch_atomic_sub_return_relaxed) 610 623 static __always_inline int 611 624 raw_atomic_sub_return_acquire(int i, atomic_t *v) 612 625 { 626 + #if defined(arch_atomic_sub_return_acquire) 627 + return arch_atomic_sub_return_acquire(i, v); 628 + #elif defined(arch_atomic_sub_return_relaxed) 613 629 int ret = arch_atomic_sub_return_relaxed(i, v); 614 630 __atomic_acquire_fence(); 615 631 return ret; 616 - } 617 632 #elif defined(arch_atomic_sub_return) 618 - #define raw_atomic_sub_return_acquire arch_atomic_sub_return 633 + return arch_atomic_sub_return(i, v); 619 634 #else 620 635 #error "Unable to define raw_atomic_sub_return_acquire" 621 636 #endif 637 + } 622 638 623 - #if defined(arch_atomic_sub_return_release) 624 - #define raw_atomic_sub_return_release arch_atomic_sub_return_release 625 - #elif defined(arch_atomic_sub_return_relaxed) 626 639 static __always_inline int 627 640 raw_atomic_sub_return_release(int i, atomic_t *v) 628 641 { 642 + #if defined(arch_atomic_sub_return_release) 643 + return arch_atomic_sub_return_release(i, v); 644 + #elif defined(arch_atomic_sub_return_relaxed) 629 645 __atomic_release_fence(); 630 646 return arch_atomic_sub_return_relaxed(i, v); 631 - } 632 647 #elif defined(arch_atomic_sub_return) 633 - #define raw_atomic_sub_return_release arch_atomic_sub_return 648 + return arch_atomic_sub_return(i, v); 634 649 #else 635 650 #error "Unable to define raw_atomic_sub_return_release" 636 651 #endif 652 + } 637 653 654 + static __always_inline int 655 + raw_atomic_sub_return_relaxed(int i, atomic_t *v) 656 + { 638 657 #if defined(arch_atomic_sub_return_relaxed) 639 - #define raw_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed 658 + return arch_atomic_sub_return_relaxed(i, v); 640 659 #elif defined(arch_atomic_sub_return) 641 - #define raw_atomic_sub_return_relaxed arch_atomic_sub_return 660 + return arch_atomic_sub_return(i, v); 642 661 #else 643 662 #error "Unable to define raw_atomic_sub_return_relaxed" 644 663 #endif 664 + } 645 665 646 - #if defined(arch_atomic_fetch_sub) 647 - #define raw_atomic_fetch_sub arch_atomic_fetch_sub 648 - #elif defined(arch_atomic_fetch_sub_relaxed) 649 666 static __always_inline int 650 667 raw_atomic_fetch_sub(int i, atomic_t *v) 651 668 { 669 + #if defined(arch_atomic_fetch_sub) 670 + return arch_atomic_fetch_sub(i, v); 671 + #elif defined(arch_atomic_fetch_sub_relaxed) 652 672 int ret; 653 673 __atomic_pre_full_fence(); 654 674 ret = arch_atomic_fetch_sub_relaxed(i, v); 655 675 __atomic_post_full_fence(); 656 676 return ret; 657 - } 658 677 #else 659 678 #error "Unable to define raw_atomic_fetch_sub" 660 679 #endif 680 + } 661 681 662 - #if defined(arch_atomic_fetch_sub_acquire) 663 - #define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire 664 - #elif defined(arch_atomic_fetch_sub_relaxed) 665 682 static __always_inline int 666 683 raw_atomic_fetch_sub_acquire(int i, atomic_t *v) 667 684 { 685 + #if defined(arch_atomic_fetch_sub_acquire) 686 + return arch_atomic_fetch_sub_acquire(i, v); 687 + #elif defined(arch_atomic_fetch_sub_relaxed) 668 688 int ret = arch_atomic_fetch_sub_relaxed(i, v); 669 689 __atomic_acquire_fence(); 670 690 return ret; 671 - } 672 691 #elif defined(arch_atomic_fetch_sub) 673 - #define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub 692 + return arch_atomic_fetch_sub(i, v); 674 693 #else 675 694 #error "Unable to define raw_atomic_fetch_sub_acquire" 676 695 #endif 696 + } 677 697 678 - #if defined(arch_atomic_fetch_sub_release) 679 - #define raw_atomic_fetch_sub_release arch_atomic_fetch_sub_release 680 - #elif defined(arch_atomic_fetch_sub_relaxed) 681 698 static __always_inline int 682 699 raw_atomic_fetch_sub_release(int i, atomic_t *v) 683 700 { 701 + #if defined(arch_atomic_fetch_sub_release) 702 + return arch_atomic_fetch_sub_release(i, v); 703 + #elif defined(arch_atomic_fetch_sub_relaxed) 684 704 __atomic_release_fence(); 685 705 return arch_atomic_fetch_sub_relaxed(i, v); 686 - } 687 706 #elif defined(arch_atomic_fetch_sub) 688 - #define raw_atomic_fetch_sub_release arch_atomic_fetch_sub 707 + return arch_atomic_fetch_sub(i, v); 689 708 #else 690 709 #error "Unable to define raw_atomic_fetch_sub_release" 691 710 #endif 711 + } 692 712 713 + static __always_inline int 714 + raw_atomic_fetch_sub_relaxed(int i, atomic_t *v) 715 + { 693 716 #if defined(arch_atomic_fetch_sub_relaxed) 694 - #define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed 717 + return arch_atomic_fetch_sub_relaxed(i, v); 695 718 #elif defined(arch_atomic_fetch_sub) 696 - #define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub 719 + return arch_atomic_fetch_sub(i, v); 697 720 #else 698 721 #error "Unable to define raw_atomic_fetch_sub_relaxed" 699 722 #endif 723 + } 700 724 701 - #if defined(arch_atomic_inc) 702 - #define raw_atomic_inc arch_atomic_inc 703 - #else 704 725 static __always_inline void 705 726 raw_atomic_inc(atomic_t *v) 706 727 { 728 + #if defined(arch_atomic_inc) 729 + arch_atomic_inc(v); 730 + #else 707 731 raw_atomic_add(1, v); 708 - } 709 732 #endif 733 + } 710 734 711 - #if defined(arch_atomic_inc_return) 712 - #define raw_atomic_inc_return arch_atomic_inc_return 713 - #elif defined(arch_atomic_inc_return_relaxed) 714 735 static __always_inline int 715 736 raw_atomic_inc_return(atomic_t *v) 716 737 { 738 + #if defined(arch_atomic_inc_return) 739 + return arch_atomic_inc_return(v); 740 + #elif defined(arch_atomic_inc_return_relaxed) 717 741 int ret; 718 742 __atomic_pre_full_fence(); 719 743 ret = arch_atomic_inc_return_relaxed(v); 720 744 __atomic_post_full_fence(); 721 745 return ret; 722 - } 723 746 #else 724 - static __always_inline int 725 - raw_atomic_inc_return(atomic_t *v) 726 - { 727 747 return raw_atomic_add_return(1, v); 728 - } 729 748 #endif 749 + } 730 750 731 - #if defined(arch_atomic_inc_return_acquire) 732 - #define raw_atomic_inc_return_acquire arch_atomic_inc_return_acquire 733 - #elif defined(arch_atomic_inc_return_relaxed) 734 751 static __always_inline int 735 752 raw_atomic_inc_return_acquire(atomic_t *v) 736 753 { 754 + #if defined(arch_atomic_inc_return_acquire) 755 + return arch_atomic_inc_return_acquire(v); 756 + #elif defined(arch_atomic_inc_return_relaxed) 737 757 int ret = arch_atomic_inc_return_relaxed(v); 738 758 __atomic_acquire_fence(); 739 759 return ret; 740 - } 741 760 #elif defined(arch_atomic_inc_return) 742 - #define raw_atomic_inc_return_acquire arch_atomic_inc_return 761 + return arch_atomic_inc_return(v); 743 762 #else 744 - static __always_inline int 745 - raw_atomic_inc_return_acquire(atomic_t *v) 746 - { 747 763 return raw_atomic_add_return_acquire(1, v); 748 - } 749 764 #endif 765 + } 750 766 751 - #if defined(arch_atomic_inc_return_release) 752 - #define raw_atomic_inc_return_release arch_atomic_inc_return_release 753 - #elif defined(arch_atomic_inc_return_relaxed) 754 767 static __always_inline int 755 768 raw_atomic_inc_return_release(atomic_t *v) 756 769 { 770 + #if defined(arch_atomic_inc_return_release) 771 + return arch_atomic_inc_return_release(v); 772 + #elif defined(arch_atomic_inc_return_relaxed) 757 773 __atomic_release_fence(); 758 774 return arch_atomic_inc_return_relaxed(v); 759 - } 760 775 #elif defined(arch_atomic_inc_return) 761 - #define raw_atomic_inc_return_release arch_atomic_inc_return 776 + return arch_atomic_inc_return(v); 762 777 #else 763 - static __always_inline int 764 - raw_atomic_inc_return_release(atomic_t *v) 765 - { 766 778 return raw_atomic_add_return_release(1, v); 767 - } 768 779 #endif 780 + } 769 781 770 - #if defined(arch_atomic_inc_return_relaxed) 771 - #define raw_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed 772 - #elif defined(arch_atomic_inc_return) 773 - #define raw_atomic_inc_return_relaxed arch_atomic_inc_return 774 - #else 775 782 static __always_inline int 776 783 raw_atomic_inc_return_relaxed(atomic_t *v) 777 784 { 785 + #if defined(arch_atomic_inc_return_relaxed) 786 + return arch_atomic_inc_return_relaxed(v); 787 + #elif defined(arch_atomic_inc_return) 788 + return arch_atomic_inc_return(v); 789 + #else 778 790 return raw_atomic_add_return_relaxed(1, v); 779 - } 780 791 #endif 792 + } 781 793 782 - #if defined(arch_atomic_fetch_inc) 783 - #define raw_atomic_fetch_inc arch_atomic_fetch_inc 784 - #elif defined(arch_atomic_fetch_inc_relaxed) 785 794 static __always_inline int 786 795 raw_atomic_fetch_inc(atomic_t *v) 787 796 { 797 + #if defined(arch_atomic_fetch_inc) 798 + return arch_atomic_fetch_inc(v); 799 + #elif defined(arch_atomic_fetch_inc_relaxed) 788 800 int ret; 789 801 __atomic_pre_full_fence(); 790 802 ret = arch_atomic_fetch_inc_relaxed(v); 791 803 __atomic_post_full_fence(); 792 804 return ret; 793 - } 794 805 #else 795 - static __always_inline int 796 - raw_atomic_fetch_inc(atomic_t *v) 797 - { 798 806 return raw_atomic_fetch_add(1, v); 799 - } 800 807 #endif 808 + } 801 809 802 - #if defined(arch_atomic_fetch_inc_acquire) 803 - #define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 804 - #elif defined(arch_atomic_fetch_inc_relaxed) 805 810 static __always_inline int 806 811 raw_atomic_fetch_inc_acquire(atomic_t *v) 807 812 { 813 + #if defined(arch_atomic_fetch_inc_acquire) 814 + return arch_atomic_fetch_inc_acquire(v); 815 + #elif defined(arch_atomic_fetch_inc_relaxed) 808 816 int ret = arch_atomic_fetch_inc_relaxed(v); 809 817 __atomic_acquire_fence(); 810 818 return ret; 811 - } 812 819 #elif defined(arch_atomic_fetch_inc) 813 - #define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc 820 + return arch_atomic_fetch_inc(v); 814 821 #else 815 - static __always_inline int 816 - raw_atomic_fetch_inc_acquire(atomic_t *v) 817 - { 818 822 return raw_atomic_fetch_add_acquire(1, v); 819 - } 820 823 #endif 824 + } 821 825 822 - #if defined(arch_atomic_fetch_inc_release) 823 - #define raw_atomic_fetch_inc_release arch_atomic_fetch_inc_release 824 - #elif defined(arch_atomic_fetch_inc_relaxed) 825 826 static __always_inline int 826 827 raw_atomic_fetch_inc_release(atomic_t *v) 827 828 { 829 + #if defined(arch_atomic_fetch_inc_release) 830 + return arch_atomic_fetch_inc_release(v); 831 + #elif defined(arch_atomic_fetch_inc_relaxed) 828 832 __atomic_release_fence(); 829 833 return arch_atomic_fetch_inc_relaxed(v); 830 - } 831 834 #elif defined(arch_atomic_fetch_inc) 832 - #define raw_atomic_fetch_inc_release arch_atomic_fetch_inc 835 + return arch_atomic_fetch_inc(v); 833 836 #else 834 - static __always_inline int 835 - raw_atomic_fetch_inc_release(atomic_t *v) 836 - { 837 837 return raw_atomic_fetch_add_release(1, v); 838 - } 839 838 #endif 839 + } 840 840 841 - #if defined(arch_atomic_fetch_inc_relaxed) 842 - #define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed 843 - #elif defined(arch_atomic_fetch_inc) 844 - #define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc 845 - #else 846 841 static __always_inline int 847 842 raw_atomic_fetch_inc_relaxed(atomic_t *v) 848 843 { 849 - return raw_atomic_fetch_add_relaxed(1, v); 850 - } 851 - #endif 852 - 853 - #if defined(arch_atomic_dec) 854 - #define raw_atomic_dec arch_atomic_dec 844 + #if defined(arch_atomic_fetch_inc_relaxed) 845 + return arch_atomic_fetch_inc_relaxed(v); 846 + #elif defined(arch_atomic_fetch_inc) 847 + return arch_atomic_fetch_inc(v); 855 848 #else 849 + return raw_atomic_fetch_add_relaxed(1, v); 850 + #endif 851 + } 852 + 856 853 static __always_inline void 857 854 raw_atomic_dec(atomic_t *v) 858 855 { 856 + #if defined(arch_atomic_dec) 857 + arch_atomic_dec(v); 858 + #else 859 859 raw_atomic_sub(1, v); 860 - } 861 860 #endif 861 + } 862 862 863 - #if defined(arch_atomic_dec_return) 864 - #define raw_atomic_dec_return arch_atomic_dec_return 865 - #elif defined(arch_atomic_dec_return_relaxed) 866 863 static __always_inline int 867 864 raw_atomic_dec_return(atomic_t *v) 868 865 { 866 + #if defined(arch_atomic_dec_return) 867 + return arch_atomic_dec_return(v); 868 + #elif defined(arch_atomic_dec_return_relaxed) 869 869 int ret; 870 870 __atomic_pre_full_fence(); 871 871 ret = arch_atomic_dec_return_relaxed(v); 872 872 __atomic_post_full_fence(); 873 873 return ret; 874 - } 875 874 #else 876 - static __always_inline int 877 - raw_atomic_dec_return(atomic_t *v) 878 - { 879 875 return raw_atomic_sub_return(1, v); 880 - } 881 876 #endif 877 + } 882 878 883 - #if defined(arch_atomic_dec_return_acquire) 884 - #define raw_atomic_dec_return_acquire arch_atomic_dec_return_acquire 885 - #elif defined(arch_atomic_dec_return_relaxed) 886 879 static __always_inline int 887 880 raw_atomic_dec_return_acquire(atomic_t *v) 888 881 { 882 + #if defined(arch_atomic_dec_return_acquire) 883 + return arch_atomic_dec_return_acquire(v); 884 + #elif defined(arch_atomic_dec_return_relaxed) 889 885 int ret = arch_atomic_dec_return_relaxed(v); 890 886 __atomic_acquire_fence(); 891 887 return ret; 892 - } 893 888 #elif defined(arch_atomic_dec_return) 894 - #define raw_atomic_dec_return_acquire arch_atomic_dec_return 889 + return arch_atomic_dec_return(v); 895 890 #else 896 - static __always_inline int 897 - raw_atomic_dec_return_acquire(atomic_t *v) 898 - { 899 891 return raw_atomic_sub_return_acquire(1, v); 900 - } 901 892 #endif 893 + } 902 894 903 - #if defined(arch_atomic_dec_return_release) 904 - #define raw_atomic_dec_return_release arch_atomic_dec_return_release 905 - #elif defined(arch_atomic_dec_return_relaxed) 906 895 static __always_inline int 907 896 raw_atomic_dec_return_release(atomic_t *v) 908 897 { 898 + #if defined(arch_atomic_dec_return_release) 899 + return arch_atomic_dec_return_release(v); 900 + #elif defined(arch_atomic_dec_return_relaxed) 909 901 __atomic_release_fence(); 910 902 return arch_atomic_dec_return_relaxed(v); 911 - } 912 903 #elif defined(arch_atomic_dec_return) 913 - #define raw_atomic_dec_return_release arch_atomic_dec_return 904 + return arch_atomic_dec_return(v); 914 905 #else 915 - static __always_inline int 916 - raw_atomic_dec_return_release(atomic_t *v) 917 - { 918 906 return raw_atomic_sub_return_release(1, v); 919 - } 920 907 #endif 908 + } 921 909 922 - #if defined(arch_atomic_dec_return_relaxed) 923 - #define raw_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed 924 - #elif defined(arch_atomic_dec_return) 925 - #define raw_atomic_dec_return_relaxed arch_atomic_dec_return 926 - #else 927 910 static __always_inline int 928 911 raw_atomic_dec_return_relaxed(atomic_t *v) 929 912 { 913 + #if defined(arch_atomic_dec_return_relaxed) 914 + return arch_atomic_dec_return_relaxed(v); 915 + #elif defined(arch_atomic_dec_return) 916 + return arch_atomic_dec_return(v); 917 + #else 930 918 return raw_atomic_sub_return_relaxed(1, v); 931 - } 932 919 #endif 920 + } 933 921 934 - #if defined(arch_atomic_fetch_dec) 935 - #define raw_atomic_fetch_dec arch_atomic_fetch_dec 936 - #elif defined(arch_atomic_fetch_dec_relaxed) 937 922 static __always_inline int 938 923 raw_atomic_fetch_dec(atomic_t *v) 939 924 { 925 + #if defined(arch_atomic_fetch_dec) 926 + return arch_atomic_fetch_dec(v); 927 + #elif defined(arch_atomic_fetch_dec_relaxed) 940 928 int ret; 941 929 __atomic_pre_full_fence(); 942 930 ret = arch_atomic_fetch_dec_relaxed(v); 943 931 __atomic_post_full_fence(); 944 932 return ret; 945 - } 946 933 #else 947 - static __always_inline int 948 - raw_atomic_fetch_dec(atomic_t *v) 949 - { 950 934 return raw_atomic_fetch_sub(1, v); 951 - } 952 935 #endif 936 + } 953 937 954 - #if defined(arch_atomic_fetch_dec_acquire) 955 - #define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 956 - #elif defined(arch_atomic_fetch_dec_relaxed) 957 938 static __always_inline int 958 939 raw_atomic_fetch_dec_acquire(atomic_t *v) 959 940 { 941 + #if defined(arch_atomic_fetch_dec_acquire) 942 + return arch_atomic_fetch_dec_acquire(v); 943 + #elif defined(arch_atomic_fetch_dec_relaxed) 960 944 int ret = arch_atomic_fetch_dec_relaxed(v); 961 945 __atomic_acquire_fence(); 962 946 return ret; 963 - } 964 947 #elif defined(arch_atomic_fetch_dec) 965 - #define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec 948 + return arch_atomic_fetch_dec(v); 966 949 #else 967 - static __always_inline int 968 - raw_atomic_fetch_dec_acquire(atomic_t *v) 969 - { 970 950 return raw_atomic_fetch_sub_acquire(1, v); 971 - } 972 951 #endif 952 + } 973 953 974 - #if defined(arch_atomic_fetch_dec_release) 975 - #define raw_atomic_fetch_dec_release arch_atomic_fetch_dec_release 976 - #elif defined(arch_atomic_fetch_dec_relaxed) 977 954 static __always_inline int 978 955 raw_atomic_fetch_dec_release(atomic_t *v) 979 956 { 957 + #if defined(arch_atomic_fetch_dec_release) 958 + return arch_atomic_fetch_dec_release(v); 959 + #elif defined(arch_atomic_fetch_dec_relaxed) 980 960 __atomic_release_fence(); 981 961 return arch_atomic_fetch_dec_relaxed(v); 982 - } 983 962 #elif defined(arch_atomic_fetch_dec) 984 - #define raw_atomic_fetch_dec_release arch_atomic_fetch_dec 963 + return arch_atomic_fetch_dec(v); 985 964 #else 986 - static __always_inline int 987 - raw_atomic_fetch_dec_release(atomic_t *v) 988 - { 989 965 return raw_atomic_fetch_sub_release(1, v); 990 - } 991 966 #endif 967 + } 992 968 993 - #if defined(arch_atomic_fetch_dec_relaxed) 994 - #define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed 995 - #elif defined(arch_atomic_fetch_dec) 996 - #define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec 997 - #else 998 969 static __always_inline int 999 970 raw_atomic_fetch_dec_relaxed(atomic_t *v) 1000 971 { 972 + #if defined(arch_atomic_fetch_dec_relaxed) 973 + return arch_atomic_fetch_dec_relaxed(v); 974 + #elif defined(arch_atomic_fetch_dec) 975 + return arch_atomic_fetch_dec(v); 976 + #else 1001 977 return raw_atomic_fetch_sub_relaxed(1, v); 1002 - } 1003 978 #endif 979 + } 1004 980 1005 - #define raw_atomic_and arch_atomic_and 981 + static __always_inline void 982 + raw_atomic_and(int i, atomic_t *v) 983 + { 984 + arch_atomic_and(i, v); 985 + } 1006 986 1007 - #if defined(arch_atomic_fetch_and) 1008 - #define raw_atomic_fetch_and arch_atomic_fetch_and 1009 - #elif defined(arch_atomic_fetch_and_relaxed) 1010 987 static __always_inline int 1011 988 raw_atomic_fetch_and(int i, atomic_t *v) 1012 989 { 990 + #if defined(arch_atomic_fetch_and) 991 + return arch_atomic_fetch_and(i, v); 992 + #elif defined(arch_atomic_fetch_and_relaxed) 1013 993 int ret; 1014 994 __atomic_pre_full_fence(); 1015 995 ret = arch_atomic_fetch_and_relaxed(i, v); 1016 996 __atomic_post_full_fence(); 1017 997 return ret; 1018 - } 1019 998 #else 1020 999 #error "Unable to define raw_atomic_fetch_and" 1021 1000 #endif 1001 + } 1022 1002 1023 - #if defined(arch_atomic_fetch_and_acquire) 1024 - #define raw_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire 1025 - #elif defined(arch_atomic_fetch_and_relaxed) 1026 1003 static __always_inline int 1027 1004 raw_atomic_fetch_and_acquire(int i, atomic_t *v) 1028 1005 { 1006 + #if defined(arch_atomic_fetch_and_acquire) 1007 + return arch_atomic_fetch_and_acquire(i, v); 1008 + #elif defined(arch_atomic_fetch_and_relaxed) 1029 1009 int ret = arch_atomic_fetch_and_relaxed(i, v); 1030 1010 __atomic_acquire_fence(); 1031 1011 return ret; 1032 - } 1033 1012 #elif defined(arch_atomic_fetch_and) 1034 - #define raw_atomic_fetch_and_acquire arch_atomic_fetch_and 1013 + return arch_atomic_fetch_and(i, v); 1035 1014 #else 1036 1015 #error "Unable to define raw_atomic_fetch_and_acquire" 1037 1016 #endif 1017 + } 1038 1018 1039 - #if defined(arch_atomic_fetch_and_release) 1040 - #define raw_atomic_fetch_and_release arch_atomic_fetch_and_release 1041 - #elif defined(arch_atomic_fetch_and_relaxed) 1042 1019 static __always_inline int 1043 1020 raw_atomic_fetch_and_release(int i, atomic_t *v) 1044 1021 { 1022 + #if defined(arch_atomic_fetch_and_release) 1023 + return arch_atomic_fetch_and_release(i, v); 1024 + #elif defined(arch_atomic_fetch_and_relaxed) 1045 1025 __atomic_release_fence(); 1046 1026 return arch_atomic_fetch_and_relaxed(i, v); 1047 - } 1048 1027 #elif defined(arch_atomic_fetch_and) 1049 - #define raw_atomic_fetch_and_release arch_atomic_fetch_and 1028 + return arch_atomic_fetch_and(i, v); 1050 1029 #else 1051 1030 #error "Unable to define raw_atomic_fetch_and_release" 1052 1031 #endif 1032 + } 1053 1033 1034 + static __always_inline int 1035 + raw_atomic_fetch_and_relaxed(int i, atomic_t *v) 1036 + { 1054 1037 #if defined(arch_atomic_fetch_and_relaxed) 1055 - #define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed 1038 + return arch_atomic_fetch_and_relaxed(i, v); 1056 1039 #elif defined(arch_atomic_fetch_and) 1057 - #define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and 1040 + return arch_atomic_fetch_and(i, v); 1058 1041 #else 1059 1042 #error "Unable to define raw_atomic_fetch_and_relaxed" 1060 1043 #endif 1044 + } 1061 1045 1062 - #if defined(arch_atomic_andnot) 1063 - #define raw_atomic_andnot arch_atomic_andnot 1064 - #else 1065 1046 static __always_inline void 1066 1047 raw_atomic_andnot(int i, atomic_t *v) 1067 1048 { 1049 + #if defined(arch_atomic_andnot) 1050 + arch_atomic_andnot(i, v); 1051 + #else 1068 1052 raw_atomic_and(~i, v); 1069 - } 1070 1053 #endif 1054 + } 1071 1055 1072 - #if defined(arch_atomic_fetch_andnot) 1073 - #define raw_atomic_fetch_andnot arch_atomic_fetch_andnot 1074 - #elif defined(arch_atomic_fetch_andnot_relaxed) 1075 1056 static __always_inline int 1076 1057 raw_atomic_fetch_andnot(int i, atomic_t *v) 1077 1058 { 1059 + #if defined(arch_atomic_fetch_andnot) 1060 + return arch_atomic_fetch_andnot(i, v); 1061 + #elif defined(arch_atomic_fetch_andnot_relaxed) 1078 1062 int ret; 1079 1063 __atomic_pre_full_fence(); 1080 1064 ret = arch_atomic_fetch_andnot_relaxed(i, v); 1081 1065 __atomic_post_full_fence(); 1082 1066 return ret; 1083 - } 1084 1067 #else 1085 - static __always_inline int 1086 - raw_atomic_fetch_andnot(int i, atomic_t *v) 1087 - { 1088 1068 return raw_atomic_fetch_and(~i, v); 1089 - } 1090 1069 #endif 1070 + } 1091 1071 1092 - #if defined(arch_atomic_fetch_andnot_acquire) 1093 - #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 1094 - #elif defined(arch_atomic_fetch_andnot_relaxed) 1095 1072 static __always_inline int 1096 1073 raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) 1097 1074 { 1075 + #if defined(arch_atomic_fetch_andnot_acquire) 1076 + return arch_atomic_fetch_andnot_acquire(i, v); 1077 + #elif defined(arch_atomic_fetch_andnot_relaxed) 1098 1078 int ret = arch_atomic_fetch_andnot_relaxed(i, v); 1099 1079 __atomic_acquire_fence(); 1100 1080 return ret; 1101 - } 1102 1081 #elif defined(arch_atomic_fetch_andnot) 1103 - #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot 1082 + return arch_atomic_fetch_andnot(i, v); 1104 1083 #else 1105 - static __always_inline int 1106 - raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) 1107 - { 1108 1084 return raw_atomic_fetch_and_acquire(~i, v); 1109 - } 1110 1085 #endif 1086 + } 1111 1087 1112 - #if defined(arch_atomic_fetch_andnot_release) 1113 - #define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 1114 - #elif defined(arch_atomic_fetch_andnot_relaxed) 1115 1088 static __always_inline int 1116 1089 raw_atomic_fetch_andnot_release(int i, atomic_t *v) 1117 1090 { 1091 + #if defined(arch_atomic_fetch_andnot_release) 1092 + return arch_atomic_fetch_andnot_release(i, v); 1093 + #elif defined(arch_atomic_fetch_andnot_relaxed) 1118 1094 __atomic_release_fence(); 1119 1095 return arch_atomic_fetch_andnot_relaxed(i, v); 1120 - } 1121 1096 #elif defined(arch_atomic_fetch_andnot) 1122 - #define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot 1097 + return arch_atomic_fetch_andnot(i, v); 1123 1098 #else 1124 - static __always_inline int 1125 - raw_atomic_fetch_andnot_release(int i, atomic_t *v) 1126 - { 1127 1099 return raw_atomic_fetch_and_release(~i, v); 1128 - } 1129 1100 #endif 1101 + } 1130 1102 1131 - #if defined(arch_atomic_fetch_andnot_relaxed) 1132 - #define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed 1133 - #elif defined(arch_atomic_fetch_andnot) 1134 - #define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot 1135 - #else 1136 1103 static __always_inline int 1137 1104 raw_atomic_fetch_andnot_relaxed(int i, atomic_t *v) 1138 1105 { 1106 + #if defined(arch_atomic_fetch_andnot_relaxed) 1107 + return arch_atomic_fetch_andnot_relaxed(i, v); 1108 + #elif defined(arch_atomic_fetch_andnot) 1109 + return arch_atomic_fetch_andnot(i, v); 1110 + #else 1139 1111 return raw_atomic_fetch_and_relaxed(~i, v); 1140 - } 1141 1112 #endif 1113 + } 1142 1114 1143 - #define raw_atomic_or arch_atomic_or 1115 + static __always_inline void 1116 + raw_atomic_or(int i, atomic_t *v) 1117 + { 1118 + arch_atomic_or(i, v); 1119 + } 1144 1120 1145 - #if defined(arch_atomic_fetch_or) 1146 - #define raw_atomic_fetch_or arch_atomic_fetch_or 1147 - #elif defined(arch_atomic_fetch_or_relaxed) 1148 1121 static __always_inline int 1149 1122 raw_atomic_fetch_or(int i, atomic_t *v) 1150 1123 { 1124 + #if defined(arch_atomic_fetch_or) 1125 + return arch_atomic_fetch_or(i, v); 1126 + #elif defined(arch_atomic_fetch_or_relaxed) 1151 1127 int ret; 1152 1128 __atomic_pre_full_fence(); 1153 1129 ret = arch_atomic_fetch_or_relaxed(i, v); 1154 1130 __atomic_post_full_fence(); 1155 1131 return ret; 1156 - } 1157 1132 #else 1158 1133 #error "Unable to define raw_atomic_fetch_or" 1159 1134 #endif 1135 + } 1160 1136 1161 - #if defined(arch_atomic_fetch_or_acquire) 1162 - #define raw_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire 1163 - #elif defined(arch_atomic_fetch_or_relaxed) 1164 1137 static __always_inline int 1165 1138 raw_atomic_fetch_or_acquire(int i, atomic_t *v) 1166 1139 { 1140 + #if defined(arch_atomic_fetch_or_acquire) 1141 + return arch_atomic_fetch_or_acquire(i, v); 1142 + #elif defined(arch_atomic_fetch_or_relaxed) 1167 1143 int ret = arch_atomic_fetch_or_relaxed(i, v); 1168 1144 __atomic_acquire_fence(); 1169 1145 return ret; 1170 - } 1171 1146 #elif defined(arch_atomic_fetch_or) 1172 - #define raw_atomic_fetch_or_acquire arch_atomic_fetch_or 1147 + return arch_atomic_fetch_or(i, v); 1173 1148 #else 1174 1149 #error "Unable to define raw_atomic_fetch_or_acquire" 1175 1150 #endif 1151 + } 1176 1152 1177 - #if defined(arch_atomic_fetch_or_release) 1178 - #define raw_atomic_fetch_or_release arch_atomic_fetch_or_release 1179 - #elif defined(arch_atomic_fetch_or_relaxed) 1180 1153 static __always_inline int 1181 1154 raw_atomic_fetch_or_release(int i, atomic_t *v) 1182 1155 { 1156 + #if defined(arch_atomic_fetch_or_release) 1157 + return arch_atomic_fetch_or_release(i, v); 1158 + #elif defined(arch_atomic_fetch_or_relaxed) 1183 1159 __atomic_release_fence(); 1184 1160 return arch_atomic_fetch_or_relaxed(i, v); 1185 - } 1186 1161 #elif defined(arch_atomic_fetch_or) 1187 - #define raw_atomic_fetch_or_release arch_atomic_fetch_or 1162 + return arch_atomic_fetch_or(i, v); 1188 1163 #else 1189 1164 #error "Unable to define raw_atomic_fetch_or_release" 1190 1165 #endif 1166 + } 1191 1167 1168 + static __always_inline int 1169 + raw_atomic_fetch_or_relaxed(int i, atomic_t *v) 1170 + { 1192 1171 #if defined(arch_atomic_fetch_or_relaxed) 1193 - #define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed 1172 + return arch_atomic_fetch_or_relaxed(i, v); 1194 1173 #elif defined(arch_atomic_fetch_or) 1195 - #define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or 1174 + return arch_atomic_fetch_or(i, v); 1196 1175 #else 1197 1176 #error "Unable to define raw_atomic_fetch_or_relaxed" 1198 1177 #endif 1178 + } 1199 1179 1200 - #define raw_atomic_xor arch_atomic_xor 1180 + static __always_inline void 1181 + raw_atomic_xor(int i, atomic_t *v) 1182 + { 1183 + arch_atomic_xor(i, v); 1184 + } 1201 1185 1202 - #if defined(arch_atomic_fetch_xor) 1203 - #define raw_atomic_fetch_xor arch_atomic_fetch_xor 1204 - #elif defined(arch_atomic_fetch_xor_relaxed) 1205 1186 static __always_inline int 1206 1187 raw_atomic_fetch_xor(int i, atomic_t *v) 1207 1188 { 1189 + #if defined(arch_atomic_fetch_xor) 1190 + return arch_atomic_fetch_xor(i, v); 1191 + #elif defined(arch_atomic_fetch_xor_relaxed) 1208 1192 int ret; 1209 1193 __atomic_pre_full_fence(); 1210 1194 ret = arch_atomic_fetch_xor_relaxed(i, v); 1211 1195 __atomic_post_full_fence(); 1212 1196 return ret; 1213 - } 1214 1197 #else 1215 1198 #error "Unable to define raw_atomic_fetch_xor" 1216 1199 #endif 1200 + } 1217 1201 1218 - #if defined(arch_atomic_fetch_xor_acquire) 1219 - #define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire 1220 - #elif defined(arch_atomic_fetch_xor_relaxed) 1221 1202 static __always_inline int 1222 1203 raw_atomic_fetch_xor_acquire(int i, atomic_t *v) 1223 1204 { 1205 + #if defined(arch_atomic_fetch_xor_acquire) 1206 + return arch_atomic_fetch_xor_acquire(i, v); 1207 + #elif defined(arch_atomic_fetch_xor_relaxed) 1224 1208 int ret = arch_atomic_fetch_xor_relaxed(i, v); 1225 1209 __atomic_acquire_fence(); 1226 1210 return ret; 1227 - } 1228 1211 #elif defined(arch_atomic_fetch_xor) 1229 - #define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor 1212 + return arch_atomic_fetch_xor(i, v); 1230 1213 #else 1231 1214 #error "Unable to define raw_atomic_fetch_xor_acquire" 1232 1215 #endif 1216 + } 1233 1217 1234 - #if defined(arch_atomic_fetch_xor_release) 1235 - #define raw_atomic_fetch_xor_release arch_atomic_fetch_xor_release 1236 - #elif defined(arch_atomic_fetch_xor_relaxed) 1237 1218 static __always_inline int 1238 1219 raw_atomic_fetch_xor_release(int i, atomic_t *v) 1239 1220 { 1221 + #if defined(arch_atomic_fetch_xor_release) 1222 + return arch_atomic_fetch_xor_release(i, v); 1223 + #elif defined(arch_atomic_fetch_xor_relaxed) 1240 1224 __atomic_release_fence(); 1241 1225 return arch_atomic_fetch_xor_relaxed(i, v); 1242 - } 1243 1226 #elif defined(arch_atomic_fetch_xor) 1244 - #define raw_atomic_fetch_xor_release arch_atomic_fetch_xor 1227 + return arch_atomic_fetch_xor(i, v); 1245 1228 #else 1246 1229 #error "Unable to define raw_atomic_fetch_xor_release" 1247 1230 #endif 1231 + } 1248 1232 1233 + static __always_inline int 1234 + raw_atomic_fetch_xor_relaxed(int i, atomic_t *v) 1235 + { 1249 1236 #if defined(arch_atomic_fetch_xor_relaxed) 1250 - #define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed 1237 + return arch_atomic_fetch_xor_relaxed(i, v); 1251 1238 #elif defined(arch_atomic_fetch_xor) 1252 - #define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor 1239 + return arch_atomic_fetch_xor(i, v); 1253 1240 #else 1254 1241 #error "Unable to define raw_atomic_fetch_xor_relaxed" 1255 1242 #endif 1256 - 1257 - #if defined(arch_atomic_xchg) 1258 - #define raw_atomic_xchg arch_atomic_xchg 1259 - #elif defined(arch_atomic_xchg_relaxed) 1260 - static __always_inline int 1261 - raw_atomic_xchg(atomic_t *v, int i) 1262 - { 1263 - int ret; 1264 - __atomic_pre_full_fence(); 1265 - ret = arch_atomic_xchg_relaxed(v, i); 1266 - __atomic_post_full_fence(); 1267 - return ret; 1268 1243 } 1269 - #else 1244 + 1270 1245 static __always_inline int 1271 1246 raw_atomic_xchg(atomic_t *v, int new) 1272 1247 { 1273 - return raw_xchg(&v->counter, new); 1274 - } 1275 - #endif 1276 - 1277 - #if defined(arch_atomic_xchg_acquire) 1278 - #define raw_atomic_xchg_acquire arch_atomic_xchg_acquire 1248 + #if defined(arch_atomic_xchg) 1249 + return arch_atomic_xchg(v, new); 1279 1250 #elif defined(arch_atomic_xchg_relaxed) 1280 - static __always_inline int 1281 - raw_atomic_xchg_acquire(atomic_t *v, int i) 1282 - { 1283 - int ret = arch_atomic_xchg_relaxed(v, i); 1284 - __atomic_acquire_fence(); 1251 + int ret; 1252 + __atomic_pre_full_fence(); 1253 + ret = arch_atomic_xchg_relaxed(v, new); 1254 + __atomic_post_full_fence(); 1285 1255 return ret; 1286 - } 1287 - #elif defined(arch_atomic_xchg) 1288 - #define raw_atomic_xchg_acquire arch_atomic_xchg 1289 1256 #else 1257 + return raw_xchg(&v->counter, new); 1258 + #endif 1259 + } 1260 + 1290 1261 static __always_inline int 1291 1262 raw_atomic_xchg_acquire(atomic_t *v, int new) 1292 1263 { 1293 - return raw_xchg_acquire(&v->counter, new); 1294 - } 1295 - #endif 1296 - 1297 - #if defined(arch_atomic_xchg_release) 1298 - #define raw_atomic_xchg_release arch_atomic_xchg_release 1264 + #if defined(arch_atomic_xchg_acquire) 1265 + return arch_atomic_xchg_acquire(v, new); 1299 1266 #elif defined(arch_atomic_xchg_relaxed) 1300 - static __always_inline int 1301 - raw_atomic_xchg_release(atomic_t *v, int i) 1302 - { 1303 - __atomic_release_fence(); 1304 - return arch_atomic_xchg_relaxed(v, i); 1305 - } 1267 + int ret = arch_atomic_xchg_relaxed(v, new); 1268 + __atomic_acquire_fence(); 1269 + return ret; 1306 1270 #elif defined(arch_atomic_xchg) 1307 - #define raw_atomic_xchg_release arch_atomic_xchg 1271 + return arch_atomic_xchg(v, new); 1308 1272 #else 1273 + return raw_xchg_acquire(&v->counter, new); 1274 + #endif 1275 + } 1276 + 1309 1277 static __always_inline int 1310 1278 raw_atomic_xchg_release(atomic_t *v, int new) 1311 1279 { 1312 - return raw_xchg_release(&v->counter, new); 1313 - } 1314 - #endif 1315 - 1316 - #if defined(arch_atomic_xchg_relaxed) 1317 - #define raw_atomic_xchg_relaxed arch_atomic_xchg_relaxed 1280 + #if defined(arch_atomic_xchg_release) 1281 + return arch_atomic_xchg_release(v, new); 1282 + #elif defined(arch_atomic_xchg_relaxed) 1283 + __atomic_release_fence(); 1284 + return arch_atomic_xchg_relaxed(v, new); 1318 1285 #elif defined(arch_atomic_xchg) 1319 - #define raw_atomic_xchg_relaxed arch_atomic_xchg 1286 + return arch_atomic_xchg(v, new); 1320 1287 #else 1288 + return raw_xchg_release(&v->counter, new); 1289 + #endif 1290 + } 1291 + 1321 1292 static __always_inline int 1322 1293 raw_atomic_xchg_relaxed(atomic_t *v, int new) 1323 1294 { 1295 + #if defined(arch_atomic_xchg_relaxed) 1296 + return arch_atomic_xchg_relaxed(v, new); 1297 + #elif defined(arch_atomic_xchg) 1298 + return arch_atomic_xchg(v, new); 1299 + #else 1324 1300 return raw_xchg_relaxed(&v->counter, new); 1325 - } 1326 1301 #endif 1302 + } 1327 1303 1328 - #if defined(arch_atomic_cmpxchg) 1329 - #define raw_atomic_cmpxchg arch_atomic_cmpxchg 1330 - #elif defined(arch_atomic_cmpxchg_relaxed) 1331 1304 static __always_inline int 1332 1305 raw_atomic_cmpxchg(atomic_t *v, int old, int new) 1333 1306 { 1307 + #if defined(arch_atomic_cmpxchg) 1308 + return arch_atomic_cmpxchg(v, old, new); 1309 + #elif defined(arch_atomic_cmpxchg_relaxed) 1334 1310 int ret; 1335 1311 __atomic_pre_full_fence(); 1336 1312 ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1337 1313 __atomic_post_full_fence(); 1338 1314 return ret; 1339 - } 1340 1315 #else 1341 - static __always_inline int 1342 - raw_atomic_cmpxchg(atomic_t *v, int old, int new) 1343 - { 1344 1316 return raw_cmpxchg(&v->counter, old, new); 1345 - } 1346 1317 #endif 1318 + } 1347 1319 1348 - #if defined(arch_atomic_cmpxchg_acquire) 1349 - #define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire 1350 - #elif defined(arch_atomic_cmpxchg_relaxed) 1351 1320 static __always_inline int 1352 1321 raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1353 1322 { 1323 + #if defined(arch_atomic_cmpxchg_acquire) 1324 + return arch_atomic_cmpxchg_acquire(v, old, new); 1325 + #elif defined(arch_atomic_cmpxchg_relaxed) 1354 1326 int ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1355 1327 __atomic_acquire_fence(); 1356 1328 return ret; 1357 - } 1358 1329 #elif defined(arch_atomic_cmpxchg) 1359 - #define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg 1330 + return arch_atomic_cmpxchg(v, old, new); 1360 1331 #else 1361 - static __always_inline int 1362 - raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1363 - { 1364 1332 return raw_cmpxchg_acquire(&v->counter, old, new); 1365 - } 1366 1333 #endif 1334 + } 1367 1335 1368 - #if defined(arch_atomic_cmpxchg_release) 1369 - #define raw_atomic_cmpxchg_release arch_atomic_cmpxchg_release 1370 - #elif defined(arch_atomic_cmpxchg_relaxed) 1371 1336 static __always_inline int 1372 1337 raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1373 1338 { 1339 + #if defined(arch_atomic_cmpxchg_release) 1340 + return arch_atomic_cmpxchg_release(v, old, new); 1341 + #elif defined(arch_atomic_cmpxchg_relaxed) 1374 1342 __atomic_release_fence(); 1375 1343 return arch_atomic_cmpxchg_relaxed(v, old, new); 1376 - } 1377 1344 #elif defined(arch_atomic_cmpxchg) 1378 - #define raw_atomic_cmpxchg_release arch_atomic_cmpxchg 1345 + return arch_atomic_cmpxchg(v, old, new); 1379 1346 #else 1380 - static __always_inline int 1381 - raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1382 - { 1383 1347 return raw_cmpxchg_release(&v->counter, old, new); 1384 - } 1385 1348 #endif 1349 + } 1386 1350 1387 - #if defined(arch_atomic_cmpxchg_relaxed) 1388 - #define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed 1389 - #elif defined(arch_atomic_cmpxchg) 1390 - #define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg 1391 - #else 1392 1351 static __always_inline int 1393 1352 raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) 1394 1353 { 1354 + #if defined(arch_atomic_cmpxchg_relaxed) 1355 + return arch_atomic_cmpxchg_relaxed(v, old, new); 1356 + #elif defined(arch_atomic_cmpxchg) 1357 + return arch_atomic_cmpxchg(v, old, new); 1358 + #else 1395 1359 return raw_cmpxchg_relaxed(&v->counter, old, new); 1396 - } 1397 1360 #endif 1361 + } 1398 1362 1399 - #if defined(arch_atomic_try_cmpxchg) 1400 - #define raw_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1401 - #elif defined(arch_atomic_try_cmpxchg_relaxed) 1402 1363 static __always_inline bool 1403 1364 raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1404 1365 { 1366 + #if defined(arch_atomic_try_cmpxchg) 1367 + return arch_atomic_try_cmpxchg(v, old, new); 1368 + #elif defined(arch_atomic_try_cmpxchg_relaxed) 1405 1369 bool ret; 1406 1370 __atomic_pre_full_fence(); 1407 1371 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1408 1372 __atomic_post_full_fence(); 1409 1373 return ret; 1410 - } 1411 1374 #else 1412 - static __always_inline bool 1413 - raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1414 - { 1415 1375 int r, o = *old; 1416 1376 r = raw_atomic_cmpxchg(v, o, new); 1417 1377 if (unlikely(r != o)) 1418 1378 *old = r; 1419 1379 return likely(r == o); 1420 - } 1421 1380 #endif 1381 + } 1422 1382 1423 - #if defined(arch_atomic_try_cmpxchg_acquire) 1424 - #define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1425 - #elif defined(arch_atomic_try_cmpxchg_relaxed) 1426 1383 static __always_inline bool 1427 1384 raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1428 1385 { 1386 + #if defined(arch_atomic_try_cmpxchg_acquire) 1387 + return arch_atomic_try_cmpxchg_acquire(v, old, new); 1388 + #elif defined(arch_atomic_try_cmpxchg_relaxed) 1429 1389 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1430 1390 __atomic_acquire_fence(); 1431 1391 return ret; 1432 - } 1433 1392 #elif defined(arch_atomic_try_cmpxchg) 1434 - #define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg 1393 + return arch_atomic_try_cmpxchg(v, old, new); 1435 1394 #else 1436 - static __always_inline bool 1437 - raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1438 - { 1439 1395 int r, o = *old; 1440 1396 r = raw_atomic_cmpxchg_acquire(v, o, new); 1441 1397 if (unlikely(r != o)) 1442 1398 *old = r; 1443 1399 return likely(r == o); 1444 - } 1445 1400 #endif 1401 + } 1446 1402 1447 - #if defined(arch_atomic_try_cmpxchg_release) 1448 - #define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1449 - #elif defined(arch_atomic_try_cmpxchg_relaxed) 1450 1403 static __always_inline bool 1451 1404 raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1452 1405 { 1406 + #if defined(arch_atomic_try_cmpxchg_release) 1407 + return arch_atomic_try_cmpxchg_release(v, old, new); 1408 + #elif defined(arch_atomic_try_cmpxchg_relaxed) 1453 1409 __atomic_release_fence(); 1454 1410 return arch_atomic_try_cmpxchg_relaxed(v, old, new); 1455 - } 1456 1411 #elif defined(arch_atomic_try_cmpxchg) 1457 - #define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg 1412 + return arch_atomic_try_cmpxchg(v, old, new); 1458 1413 #else 1459 - static __always_inline bool 1460 - raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1461 - { 1462 1414 int r, o = *old; 1463 1415 r = raw_atomic_cmpxchg_release(v, o, new); 1464 1416 if (unlikely(r != o)) 1465 1417 *old = r; 1466 1418 return likely(r == o); 1467 - } 1468 1419 #endif 1420 + } 1469 1421 1470 - #if defined(arch_atomic_try_cmpxchg_relaxed) 1471 - #define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed 1472 - #elif defined(arch_atomic_try_cmpxchg) 1473 - #define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg 1474 - #else 1475 1422 static __always_inline bool 1476 1423 raw_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 1477 1424 { 1425 + #if defined(arch_atomic_try_cmpxchg_relaxed) 1426 + return arch_atomic_try_cmpxchg_relaxed(v, old, new); 1427 + #elif defined(arch_atomic_try_cmpxchg) 1428 + return arch_atomic_try_cmpxchg(v, old, new); 1429 + #else 1478 1430 int r, o = *old; 1479 1431 r = raw_atomic_cmpxchg_relaxed(v, o, new); 1480 1432 if (unlikely(r != o)) 1481 1433 *old = r; 1482 1434 return likely(r == o); 1483 - } 1484 1435 #endif 1436 + } 1485 1437 1486 - #if defined(arch_atomic_sub_and_test) 1487 - #define raw_atomic_sub_and_test arch_atomic_sub_and_test 1488 - #else 1489 1438 static __always_inline bool 1490 1439 raw_atomic_sub_and_test(int i, atomic_t *v) 1491 1440 { 1492 - return raw_atomic_sub_return(i, v) == 0; 1493 - } 1494 - #endif 1495 - 1496 - #if defined(arch_atomic_dec_and_test) 1497 - #define raw_atomic_dec_and_test arch_atomic_dec_and_test 1441 + #if defined(arch_atomic_sub_and_test) 1442 + return arch_atomic_sub_and_test(i, v); 1498 1443 #else 1444 + return raw_atomic_sub_return(i, v) == 0; 1445 + #endif 1446 + } 1447 + 1499 1448 static __always_inline bool 1500 1449 raw_atomic_dec_and_test(atomic_t *v) 1501 1450 { 1502 - return raw_atomic_dec_return(v) == 0; 1503 - } 1504 - #endif 1505 - 1506 - #if defined(arch_atomic_inc_and_test) 1507 - #define raw_atomic_inc_and_test arch_atomic_inc_and_test 1451 + #if defined(arch_atomic_dec_and_test) 1452 + return arch_atomic_dec_and_test(v); 1508 1453 #else 1454 + return raw_atomic_dec_return(v) == 0; 1455 + #endif 1456 + } 1457 + 1509 1458 static __always_inline bool 1510 1459 raw_atomic_inc_and_test(atomic_t *v) 1511 1460 { 1461 + #if defined(arch_atomic_inc_and_test) 1462 + return arch_atomic_inc_and_test(v); 1463 + #else 1512 1464 return raw_atomic_inc_return(v) == 0; 1513 - } 1514 1465 #endif 1466 + } 1515 1467 1516 - #if defined(arch_atomic_add_negative) 1517 - #define raw_atomic_add_negative arch_atomic_add_negative 1518 - #elif defined(arch_atomic_add_negative_relaxed) 1519 1468 static __always_inline bool 1520 1469 raw_atomic_add_negative(int i, atomic_t *v) 1521 1470 { 1471 + #if defined(arch_atomic_add_negative) 1472 + return arch_atomic_add_negative(i, v); 1473 + #elif defined(arch_atomic_add_negative_relaxed) 1522 1474 bool ret; 1523 1475 __atomic_pre_full_fence(); 1524 1476 ret = arch_atomic_add_negative_relaxed(i, v); 1525 1477 __atomic_post_full_fence(); 1526 1478 return ret; 1527 - } 1528 1479 #else 1529 - static __always_inline bool 1530 - raw_atomic_add_negative(int i, atomic_t *v) 1531 - { 1532 1480 return raw_atomic_add_return(i, v) < 0; 1533 - } 1534 1481 #endif 1482 + } 1535 1483 1536 - #if defined(arch_atomic_add_negative_acquire) 1537 - #define raw_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1538 - #elif defined(arch_atomic_add_negative_relaxed) 1539 1484 static __always_inline bool 1540 1485 raw_atomic_add_negative_acquire(int i, atomic_t *v) 1541 1486 { 1487 + #if defined(arch_atomic_add_negative_acquire) 1488 + return arch_atomic_add_negative_acquire(i, v); 1489 + #elif defined(arch_atomic_add_negative_relaxed) 1542 1490 bool ret = arch_atomic_add_negative_relaxed(i, v); 1543 1491 __atomic_acquire_fence(); 1544 1492 return ret; 1545 - } 1546 1493 #elif defined(arch_atomic_add_negative) 1547 - #define raw_atomic_add_negative_acquire arch_atomic_add_negative 1494 + return arch_atomic_add_negative(i, v); 1548 1495 #else 1549 - static __always_inline bool 1550 - raw_atomic_add_negative_acquire(int i, atomic_t *v) 1551 - { 1552 1496 return raw_atomic_add_return_acquire(i, v) < 0; 1553 - } 1554 1497 #endif 1498 + } 1555 1499 1556 - #if defined(arch_atomic_add_negative_release) 1557 - #define raw_atomic_add_negative_release arch_atomic_add_negative_release 1558 - #elif defined(arch_atomic_add_negative_relaxed) 1559 1500 static __always_inline bool 1560 1501 raw_atomic_add_negative_release(int i, atomic_t *v) 1561 1502 { 1503 + #if defined(arch_atomic_add_negative_release) 1504 + return arch_atomic_add_negative_release(i, v); 1505 + #elif defined(arch_atomic_add_negative_relaxed) 1562 1506 __atomic_release_fence(); 1563 1507 return arch_atomic_add_negative_relaxed(i, v); 1564 - } 1565 1508 #elif defined(arch_atomic_add_negative) 1566 - #define raw_atomic_add_negative_release arch_atomic_add_negative 1509 + return arch_atomic_add_negative(i, v); 1567 1510 #else 1568 - static __always_inline bool 1569 - raw_atomic_add_negative_release(int i, atomic_t *v) 1570 - { 1571 1511 return raw_atomic_add_return_release(i, v) < 0; 1572 - } 1573 1512 #endif 1513 + } 1574 1514 1575 - #if defined(arch_atomic_add_negative_relaxed) 1576 - #define raw_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed 1577 - #elif defined(arch_atomic_add_negative) 1578 - #define raw_atomic_add_negative_relaxed arch_atomic_add_negative 1579 - #else 1580 1515 static __always_inline bool 1581 1516 raw_atomic_add_negative_relaxed(int i, atomic_t *v) 1582 1517 { 1583 - return raw_atomic_add_return_relaxed(i, v) < 0; 1584 - } 1585 - #endif 1586 - 1587 - #if defined(arch_atomic_fetch_add_unless) 1588 - #define raw_atomic_fetch_add_unless arch_atomic_fetch_add_unless 1518 + #if defined(arch_atomic_add_negative_relaxed) 1519 + return arch_atomic_add_negative_relaxed(i, v); 1520 + #elif defined(arch_atomic_add_negative) 1521 + return arch_atomic_add_negative(i, v); 1589 1522 #else 1523 + return raw_atomic_add_return_relaxed(i, v) < 0; 1524 + #endif 1525 + } 1526 + 1590 1527 static __always_inline int 1591 1528 raw_atomic_fetch_add_unless(atomic_t *v, int a, int u) 1592 1529 { 1530 + #if defined(arch_atomic_fetch_add_unless) 1531 + return arch_atomic_fetch_add_unless(v, a, u); 1532 + #else 1593 1533 int c = raw_atomic_read(v); 1594 1534 1595 1535 do { ··· 1542 1594 } while (!raw_atomic_try_cmpxchg(v, &c, c + a)); 1543 1595 1544 1596 return c; 1545 - } 1546 1597 #endif 1598 + } 1547 1599 1548 - #if defined(arch_atomic_add_unless) 1549 - #define raw_atomic_add_unless arch_atomic_add_unless 1550 - #else 1551 1600 static __always_inline bool 1552 1601 raw_atomic_add_unless(atomic_t *v, int a, int u) 1553 1602 { 1554 - return raw_atomic_fetch_add_unless(v, a, u) != u; 1555 - } 1556 - #endif 1557 - 1558 - #if defined(arch_atomic_inc_not_zero) 1559 - #define raw_atomic_inc_not_zero arch_atomic_inc_not_zero 1603 + #if defined(arch_atomic_add_unless) 1604 + return arch_atomic_add_unless(v, a, u); 1560 1605 #else 1606 + return raw_atomic_fetch_add_unless(v, a, u) != u; 1607 + #endif 1608 + } 1609 + 1561 1610 static __always_inline bool 1562 1611 raw_atomic_inc_not_zero(atomic_t *v) 1563 1612 { 1564 - return raw_atomic_add_unless(v, 1, 0); 1565 - } 1566 - #endif 1567 - 1568 - #if defined(arch_atomic_inc_unless_negative) 1569 - #define raw_atomic_inc_unless_negative arch_atomic_inc_unless_negative 1613 + #if defined(arch_atomic_inc_not_zero) 1614 + return arch_atomic_inc_not_zero(v); 1570 1615 #else 1616 + return raw_atomic_add_unless(v, 1, 0); 1617 + #endif 1618 + } 1619 + 1571 1620 static __always_inline bool 1572 1621 raw_atomic_inc_unless_negative(atomic_t *v) 1573 1622 { 1623 + #if defined(arch_atomic_inc_unless_negative) 1624 + return arch_atomic_inc_unless_negative(v); 1625 + #else 1574 1626 int c = raw_atomic_read(v); 1575 1627 1576 1628 do { ··· 1579 1631 } while (!raw_atomic_try_cmpxchg(v, &c, c + 1)); 1580 1632 1581 1633 return true; 1582 - } 1583 1634 #endif 1635 + } 1584 1636 1585 - #if defined(arch_atomic_dec_unless_positive) 1586 - #define raw_atomic_dec_unless_positive arch_atomic_dec_unless_positive 1587 - #else 1588 1637 static __always_inline bool 1589 1638 raw_atomic_dec_unless_positive(atomic_t *v) 1590 1639 { 1640 + #if defined(arch_atomic_dec_unless_positive) 1641 + return arch_atomic_dec_unless_positive(v); 1642 + #else 1591 1643 int c = raw_atomic_read(v); 1592 1644 1593 1645 do { ··· 1596 1648 } while (!raw_atomic_try_cmpxchg(v, &c, c - 1)); 1597 1649 1598 1650 return true; 1599 - } 1600 1651 #endif 1652 + } 1601 1653 1602 - #if defined(arch_atomic_dec_if_positive) 1603 - #define raw_atomic_dec_if_positive arch_atomic_dec_if_positive 1604 - #else 1605 1654 static __always_inline int 1606 1655 raw_atomic_dec_if_positive(atomic_t *v) 1607 1656 { 1657 + #if defined(arch_atomic_dec_if_positive) 1658 + return arch_atomic_dec_if_positive(v); 1659 + #else 1608 1660 int dec, c = raw_atomic_read(v); 1609 1661 1610 1662 do { ··· 1614 1666 } while (!raw_atomic_try_cmpxchg(v, &c, dec)); 1615 1667 1616 1668 return dec; 1617 - } 1618 1669 #endif 1670 + } 1619 1671 1620 1672 #ifdef CONFIG_GENERIC_ATOMIC64 1621 1673 #include <asm-generic/atomic64.h> 1622 1674 #endif 1623 1675 1624 - #define raw_atomic64_read arch_atomic64_read 1676 + static __always_inline s64 1677 + raw_atomic64_read(const atomic64_t *v) 1678 + { 1679 + return arch_atomic64_read(v); 1680 + } 1625 1681 1626 - #if defined(arch_atomic64_read_acquire) 1627 - #define raw_atomic64_read_acquire arch_atomic64_read_acquire 1628 - #elif defined(arch_atomic64_read) 1629 - #define raw_atomic64_read_acquire arch_atomic64_read 1630 - #else 1631 1682 static __always_inline s64 1632 1683 raw_atomic64_read_acquire(const atomic64_t *v) 1633 1684 { 1685 + #if defined(arch_atomic64_read_acquire) 1686 + return arch_atomic64_read_acquire(v); 1687 + #elif defined(arch_atomic64_read) 1688 + return arch_atomic64_read(v); 1689 + #else 1634 1690 s64 ret; 1635 1691 1636 1692 if (__native_word(atomic64_t)) { ··· 1645 1693 } 1646 1694 1647 1695 return ret; 1648 - } 1649 1696 #endif 1697 + } 1650 1698 1651 - #define raw_atomic64_set arch_atomic64_set 1699 + static __always_inline void 1700 + raw_atomic64_set(atomic64_t *v, s64 i) 1701 + { 1702 + arch_atomic64_set(v, i); 1703 + } 1652 1704 1653 - #if defined(arch_atomic64_set_release) 1654 - #define raw_atomic64_set_release arch_atomic64_set_release 1655 - #elif defined(arch_atomic64_set) 1656 - #define raw_atomic64_set_release arch_atomic64_set 1657 - #else 1658 1705 static __always_inline void 1659 1706 raw_atomic64_set_release(atomic64_t *v, s64 i) 1660 1707 { 1708 + #if defined(arch_atomic64_set_release) 1709 + arch_atomic64_set_release(v, i); 1710 + #elif defined(arch_atomic64_set) 1711 + arch_atomic64_set(v, i); 1712 + #else 1661 1713 if (__native_word(atomic64_t)) { 1662 1714 smp_store_release(&(v)->counter, i); 1663 1715 } else { 1664 1716 __atomic_release_fence(); 1665 1717 raw_atomic64_set(v, i); 1666 1718 } 1667 - } 1668 1719 #endif 1720 + } 1669 1721 1670 - #define raw_atomic64_add arch_atomic64_add 1722 + static __always_inline void 1723 + raw_atomic64_add(s64 i, atomic64_t *v) 1724 + { 1725 + arch_atomic64_add(i, v); 1726 + } 1671 1727 1672 - #if defined(arch_atomic64_add_return) 1673 - #define raw_atomic64_add_return arch_atomic64_add_return 1674 - #elif defined(arch_atomic64_add_return_relaxed) 1675 1728 static __always_inline s64 1676 1729 raw_atomic64_add_return(s64 i, atomic64_t *v) 1677 1730 { 1731 + #if defined(arch_atomic64_add_return) 1732 + return arch_atomic64_add_return(i, v); 1733 + #elif defined(arch_atomic64_add_return_relaxed) 1678 1734 s64 ret; 1679 1735 __atomic_pre_full_fence(); 1680 1736 ret = arch_atomic64_add_return_relaxed(i, v); 1681 1737 __atomic_post_full_fence(); 1682 1738 return ret; 1683 - } 1684 1739 #else 1685 1740 #error "Unable to define raw_atomic64_add_return" 1686 1741 #endif 1742 + } 1687 1743 1688 - #if defined(arch_atomic64_add_return_acquire) 1689 - #define raw_atomic64_add_return_acquire arch_atomic64_add_return_acquire 1690 - #elif defined(arch_atomic64_add_return_relaxed) 1691 1744 static __always_inline s64 1692 1745 raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) 1693 1746 { 1747 + #if defined(arch_atomic64_add_return_acquire) 1748 + return arch_atomic64_add_return_acquire(i, v); 1749 + #elif defined(arch_atomic64_add_return_relaxed) 1694 1750 s64 ret = arch_atomic64_add_return_relaxed(i, v); 1695 1751 __atomic_acquire_fence(); 1696 1752 return ret; 1697 - } 1698 1753 #elif defined(arch_atomic64_add_return) 1699 - #define raw_atomic64_add_return_acquire arch_atomic64_add_return 1754 + return arch_atomic64_add_return(i, v); 1700 1755 #else 1701 1756 #error "Unable to define raw_atomic64_add_return_acquire" 1702 1757 #endif 1758 + } 1703 1759 1704 - #if defined(arch_atomic64_add_return_release) 1705 - #define raw_atomic64_add_return_release arch_atomic64_add_return_release 1706 - #elif defined(arch_atomic64_add_return_relaxed) 1707 1760 static __always_inline s64 1708 1761 raw_atomic64_add_return_release(s64 i, atomic64_t *v) 1709 1762 { 1763 + #if defined(arch_atomic64_add_return_release) 1764 + return arch_atomic64_add_return_release(i, v); 1765 + #elif defined(arch_atomic64_add_return_relaxed) 1710 1766 __atomic_release_fence(); 1711 1767 return arch_atomic64_add_return_relaxed(i, v); 1712 - } 1713 1768 #elif defined(arch_atomic64_add_return) 1714 - #define raw_atomic64_add_return_release arch_atomic64_add_return 1769 + return arch_atomic64_add_return(i, v); 1715 1770 #else 1716 1771 #error "Unable to define raw_atomic64_add_return_release" 1717 1772 #endif 1773 + } 1718 1774 1775 + static __always_inline s64 1776 + raw_atomic64_add_return_relaxed(s64 i, atomic64_t *v) 1777 + { 1719 1778 #if defined(arch_atomic64_add_return_relaxed) 1720 - #define raw_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed 1779 + return arch_atomic64_add_return_relaxed(i, v); 1721 1780 #elif defined(arch_atomic64_add_return) 1722 - #define raw_atomic64_add_return_relaxed arch_atomic64_add_return 1781 + return arch_atomic64_add_return(i, v); 1723 1782 #else 1724 1783 #error "Unable to define raw_atomic64_add_return_relaxed" 1725 1784 #endif 1785 + } 1726 1786 1727 - #if defined(arch_atomic64_fetch_add) 1728 - #define raw_atomic64_fetch_add arch_atomic64_fetch_add 1729 - #elif defined(arch_atomic64_fetch_add_relaxed) 1730 1787 static __always_inline s64 1731 1788 raw_atomic64_fetch_add(s64 i, atomic64_t *v) 1732 1789 { 1790 + #if defined(arch_atomic64_fetch_add) 1791 + return arch_atomic64_fetch_add(i, v); 1792 + #elif defined(arch_atomic64_fetch_add_relaxed) 1733 1793 s64 ret; 1734 1794 __atomic_pre_full_fence(); 1735 1795 ret = arch_atomic64_fetch_add_relaxed(i, v); 1736 1796 __atomic_post_full_fence(); 1737 1797 return ret; 1738 - } 1739 1798 #else 1740 1799 #error "Unable to define raw_atomic64_fetch_add" 1741 1800 #endif 1801 + } 1742 1802 1743 - #if defined(arch_atomic64_fetch_add_acquire) 1744 - #define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire 1745 - #elif defined(arch_atomic64_fetch_add_relaxed) 1746 1803 static __always_inline s64 1747 1804 raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1748 1805 { 1806 + #if defined(arch_atomic64_fetch_add_acquire) 1807 + return arch_atomic64_fetch_add_acquire(i, v); 1808 + #elif defined(arch_atomic64_fetch_add_relaxed) 1749 1809 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); 1750 1810 __atomic_acquire_fence(); 1751 1811 return ret; 1752 - } 1753 1812 #elif defined(arch_atomic64_fetch_add) 1754 - #define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add 1813 + return arch_atomic64_fetch_add(i, v); 1755 1814 #else 1756 1815 #error "Unable to define raw_atomic64_fetch_add_acquire" 1757 1816 #endif 1817 + } 1758 1818 1759 - #if defined(arch_atomic64_fetch_add_release) 1760 - #define raw_atomic64_fetch_add_release arch_atomic64_fetch_add_release 1761 - #elif defined(arch_atomic64_fetch_add_relaxed) 1762 1819 static __always_inline s64 1763 1820 raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) 1764 1821 { 1822 + #if defined(arch_atomic64_fetch_add_release) 1823 + return arch_atomic64_fetch_add_release(i, v); 1824 + #elif defined(arch_atomic64_fetch_add_relaxed) 1765 1825 __atomic_release_fence(); 1766 1826 return arch_atomic64_fetch_add_relaxed(i, v); 1767 - } 1768 1827 #elif defined(arch_atomic64_fetch_add) 1769 - #define raw_atomic64_fetch_add_release arch_atomic64_fetch_add 1828 + return arch_atomic64_fetch_add(i, v); 1770 1829 #else 1771 1830 #error "Unable to define raw_atomic64_fetch_add_release" 1772 1831 #endif 1832 + } 1773 1833 1834 + static __always_inline s64 1835 + raw_atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) 1836 + { 1774 1837 #if defined(arch_atomic64_fetch_add_relaxed) 1775 - #define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed 1838 + return arch_atomic64_fetch_add_relaxed(i, v); 1776 1839 #elif defined(arch_atomic64_fetch_add) 1777 - #define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add 1840 + return arch_atomic64_fetch_add(i, v); 1778 1841 #else 1779 1842 #error "Unable to define raw_atomic64_fetch_add_relaxed" 1780 1843 #endif 1844 + } 1781 1845 1782 - #define raw_atomic64_sub arch_atomic64_sub 1846 + static __always_inline void 1847 + raw_atomic64_sub(s64 i, atomic64_t *v) 1848 + { 1849 + arch_atomic64_sub(i, v); 1850 + } 1783 1851 1784 - #if defined(arch_atomic64_sub_return) 1785 - #define raw_atomic64_sub_return arch_atomic64_sub_return 1786 - #elif defined(arch_atomic64_sub_return_relaxed) 1787 1852 static __always_inline s64 1788 1853 raw_atomic64_sub_return(s64 i, atomic64_t *v) 1789 1854 { 1855 + #if defined(arch_atomic64_sub_return) 1856 + return arch_atomic64_sub_return(i, v); 1857 + #elif defined(arch_atomic64_sub_return_relaxed) 1790 1858 s64 ret; 1791 1859 __atomic_pre_full_fence(); 1792 1860 ret = arch_atomic64_sub_return_relaxed(i, v); 1793 1861 __atomic_post_full_fence(); 1794 1862 return ret; 1795 - } 1796 1863 #else 1797 1864 #error "Unable to define raw_atomic64_sub_return" 1798 1865 #endif 1866 + } 1799 1867 1800 - #if defined(arch_atomic64_sub_return_acquire) 1801 - #define raw_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire 1802 - #elif defined(arch_atomic64_sub_return_relaxed) 1803 1868 static __always_inline s64 1804 1869 raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1805 1870 { 1871 + #if defined(arch_atomic64_sub_return_acquire) 1872 + return arch_atomic64_sub_return_acquire(i, v); 1873 + #elif defined(arch_atomic64_sub_return_relaxed) 1806 1874 s64 ret = arch_atomic64_sub_return_relaxed(i, v); 1807 1875 __atomic_acquire_fence(); 1808 1876 return ret; 1809 - } 1810 1877 #elif defined(arch_atomic64_sub_return) 1811 - #define raw_atomic64_sub_return_acquire arch_atomic64_sub_return 1878 + return arch_atomic64_sub_return(i, v); 1812 1879 #else 1813 1880 #error "Unable to define raw_atomic64_sub_return_acquire" 1814 1881 #endif 1882 + } 1815 1883 1816 - #if defined(arch_atomic64_sub_return_release) 1817 - #define raw_atomic64_sub_return_release arch_atomic64_sub_return_release 1818 - #elif defined(arch_atomic64_sub_return_relaxed) 1819 1884 static __always_inline s64 1820 1885 raw_atomic64_sub_return_release(s64 i, atomic64_t *v) 1821 1886 { 1887 + #if defined(arch_atomic64_sub_return_release) 1888 + return arch_atomic64_sub_return_release(i, v); 1889 + #elif defined(arch_atomic64_sub_return_relaxed) 1822 1890 __atomic_release_fence(); 1823 1891 return arch_atomic64_sub_return_relaxed(i, v); 1824 - } 1825 1892 #elif defined(arch_atomic64_sub_return) 1826 - #define raw_atomic64_sub_return_release arch_atomic64_sub_return 1893 + return arch_atomic64_sub_return(i, v); 1827 1894 #else 1828 1895 #error "Unable to define raw_atomic64_sub_return_release" 1829 1896 #endif 1897 + } 1830 1898 1899 + static __always_inline s64 1900 + raw_atomic64_sub_return_relaxed(s64 i, atomic64_t *v) 1901 + { 1831 1902 #if defined(arch_atomic64_sub_return_relaxed) 1832 - #define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed 1903 + return arch_atomic64_sub_return_relaxed(i, v); 1833 1904 #elif defined(arch_atomic64_sub_return) 1834 - #define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return 1905 + return arch_atomic64_sub_return(i, v); 1835 1906 #else 1836 1907 #error "Unable to define raw_atomic64_sub_return_relaxed" 1837 1908 #endif 1909 + } 1838 1910 1839 - #if defined(arch_atomic64_fetch_sub) 1840 - #define raw_atomic64_fetch_sub arch_atomic64_fetch_sub 1841 - #elif defined(arch_atomic64_fetch_sub_relaxed) 1842 1911 static __always_inline s64 1843 1912 raw_atomic64_fetch_sub(s64 i, atomic64_t *v) 1844 1913 { 1914 + #if defined(arch_atomic64_fetch_sub) 1915 + return arch_atomic64_fetch_sub(i, v); 1916 + #elif defined(arch_atomic64_fetch_sub_relaxed) 1845 1917 s64 ret; 1846 1918 __atomic_pre_full_fence(); 1847 1919 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1848 1920 __atomic_post_full_fence(); 1849 1921 return ret; 1850 - } 1851 1922 #else 1852 1923 #error "Unable to define raw_atomic64_fetch_sub" 1853 1924 #endif 1925 + } 1854 1926 1855 - #if defined(arch_atomic64_fetch_sub_acquire) 1856 - #define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire 1857 - #elif defined(arch_atomic64_fetch_sub_relaxed) 1858 1927 static __always_inline s64 1859 1928 raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1860 1929 { 1930 + #if defined(arch_atomic64_fetch_sub_acquire) 1931 + return arch_atomic64_fetch_sub_acquire(i, v); 1932 + #elif defined(arch_atomic64_fetch_sub_relaxed) 1861 1933 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1862 1934 __atomic_acquire_fence(); 1863 1935 return ret; 1864 - } 1865 1936 #elif defined(arch_atomic64_fetch_sub) 1866 - #define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub 1937 + return arch_atomic64_fetch_sub(i, v); 1867 1938 #else 1868 1939 #error "Unable to define raw_atomic64_fetch_sub_acquire" 1869 1940 #endif 1941 + } 1870 1942 1871 - #if defined(arch_atomic64_fetch_sub_release) 1872 - #define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release 1873 - #elif defined(arch_atomic64_fetch_sub_relaxed) 1874 1943 static __always_inline s64 1875 1944 raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1876 1945 { 1946 + #if defined(arch_atomic64_fetch_sub_release) 1947 + return arch_atomic64_fetch_sub_release(i, v); 1948 + #elif defined(arch_atomic64_fetch_sub_relaxed) 1877 1949 __atomic_release_fence(); 1878 1950 return arch_atomic64_fetch_sub_relaxed(i, v); 1879 - } 1880 1951 #elif defined(arch_atomic64_fetch_sub) 1881 - #define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub 1952 + return arch_atomic64_fetch_sub(i, v); 1882 1953 #else 1883 1954 #error "Unable to define raw_atomic64_fetch_sub_release" 1884 1955 #endif 1956 + } 1885 1957 1958 + static __always_inline s64 1959 + raw_atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) 1960 + { 1886 1961 #if defined(arch_atomic64_fetch_sub_relaxed) 1887 - #define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed 1962 + return arch_atomic64_fetch_sub_relaxed(i, v); 1888 1963 #elif defined(arch_atomic64_fetch_sub) 1889 - #define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub 1964 + return arch_atomic64_fetch_sub(i, v); 1890 1965 #else 1891 1966 #error "Unable to define raw_atomic64_fetch_sub_relaxed" 1892 1967 #endif 1968 + } 1893 1969 1894 - #if defined(arch_atomic64_inc) 1895 - #define raw_atomic64_inc arch_atomic64_inc 1896 - #else 1897 1970 static __always_inline void 1898 1971 raw_atomic64_inc(atomic64_t *v) 1899 1972 { 1973 + #if defined(arch_atomic64_inc) 1974 + arch_atomic64_inc(v); 1975 + #else 1900 1976 raw_atomic64_add(1, v); 1901 - } 1902 1977 #endif 1978 + } 1903 1979 1904 - #if defined(arch_atomic64_inc_return) 1905 - #define raw_atomic64_inc_return arch_atomic64_inc_return 1906 - #elif defined(arch_atomic64_inc_return_relaxed) 1907 1980 static __always_inline s64 1908 1981 raw_atomic64_inc_return(atomic64_t *v) 1909 1982 { 1983 + #if defined(arch_atomic64_inc_return) 1984 + return arch_atomic64_inc_return(v); 1985 + #elif defined(arch_atomic64_inc_return_relaxed) 1910 1986 s64 ret; 1911 1987 __atomic_pre_full_fence(); 1912 1988 ret = arch_atomic64_inc_return_relaxed(v); 1913 1989 __atomic_post_full_fence(); 1914 1990 return ret; 1915 - } 1916 1991 #else 1917 - static __always_inline s64 1918 - raw_atomic64_inc_return(atomic64_t *v) 1919 - { 1920 1992 return raw_atomic64_add_return(1, v); 1921 - } 1922 1993 #endif 1994 + } 1923 1995 1924 - #if defined(arch_atomic64_inc_return_acquire) 1925 - #define raw_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1926 - #elif defined(arch_atomic64_inc_return_relaxed) 1927 1996 static __always_inline s64 1928 1997 raw_atomic64_inc_return_acquire(atomic64_t *v) 1929 1998 { 1999 + #if defined(arch_atomic64_inc_return_acquire) 2000 + return arch_atomic64_inc_return_acquire(v); 2001 + #elif defined(arch_atomic64_inc_return_relaxed) 1930 2002 s64 ret = arch_atomic64_inc_return_relaxed(v); 1931 2003 __atomic_acquire_fence(); 1932 2004 return ret; 1933 - } 1934 2005 #elif defined(arch_atomic64_inc_return) 1935 - #define raw_atomic64_inc_return_acquire arch_atomic64_inc_return 2006 + return arch_atomic64_inc_return(v); 1936 2007 #else 1937 - static __always_inline s64 1938 - raw_atomic64_inc_return_acquire(atomic64_t *v) 1939 - { 1940 2008 return raw_atomic64_add_return_acquire(1, v); 1941 - } 1942 2009 #endif 2010 + } 1943 2011 1944 - #if defined(arch_atomic64_inc_return_release) 1945 - #define raw_atomic64_inc_return_release arch_atomic64_inc_return_release 1946 - #elif defined(arch_atomic64_inc_return_relaxed) 1947 2012 static __always_inline s64 1948 2013 raw_atomic64_inc_return_release(atomic64_t *v) 1949 2014 { 2015 + #if defined(arch_atomic64_inc_return_release) 2016 + return arch_atomic64_inc_return_release(v); 2017 + #elif defined(arch_atomic64_inc_return_relaxed) 1950 2018 __atomic_release_fence(); 1951 2019 return arch_atomic64_inc_return_relaxed(v); 1952 - } 1953 2020 #elif defined(arch_atomic64_inc_return) 1954 - #define raw_atomic64_inc_return_release arch_atomic64_inc_return 2021 + return arch_atomic64_inc_return(v); 1955 2022 #else 1956 - static __always_inline s64 1957 - raw_atomic64_inc_return_release(atomic64_t *v) 1958 - { 1959 2023 return raw_atomic64_add_return_release(1, v); 1960 - } 1961 2024 #endif 2025 + } 1962 2026 1963 - #if defined(arch_atomic64_inc_return_relaxed) 1964 - #define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed 1965 - #elif defined(arch_atomic64_inc_return) 1966 - #define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return 1967 - #else 1968 2027 static __always_inline s64 1969 2028 raw_atomic64_inc_return_relaxed(atomic64_t *v) 1970 2029 { 2030 + #if defined(arch_atomic64_inc_return_relaxed) 2031 + return arch_atomic64_inc_return_relaxed(v); 2032 + #elif defined(arch_atomic64_inc_return) 2033 + return arch_atomic64_inc_return(v); 2034 + #else 1971 2035 return raw_atomic64_add_return_relaxed(1, v); 1972 - } 1973 2036 #endif 2037 + } 1974 2038 1975 - #if defined(arch_atomic64_fetch_inc) 1976 - #define raw_atomic64_fetch_inc arch_atomic64_fetch_inc 1977 - #elif defined(arch_atomic64_fetch_inc_relaxed) 1978 2039 static __always_inline s64 1979 2040 raw_atomic64_fetch_inc(atomic64_t *v) 1980 2041 { 2042 + #if defined(arch_atomic64_fetch_inc) 2043 + return arch_atomic64_fetch_inc(v); 2044 + #elif defined(arch_atomic64_fetch_inc_relaxed) 1981 2045 s64 ret; 1982 2046 __atomic_pre_full_fence(); 1983 2047 ret = arch_atomic64_fetch_inc_relaxed(v); 1984 2048 __atomic_post_full_fence(); 1985 2049 return ret; 1986 - } 1987 2050 #else 1988 - static __always_inline s64 1989 - raw_atomic64_fetch_inc(atomic64_t *v) 1990 - { 1991 2051 return raw_atomic64_fetch_add(1, v); 1992 - } 1993 2052 #endif 2053 + } 1994 2054 1995 - #if defined(arch_atomic64_fetch_inc_acquire) 1996 - #define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1997 - #elif defined(arch_atomic64_fetch_inc_relaxed) 1998 2055 static __always_inline s64 1999 2056 raw_atomic64_fetch_inc_acquire(atomic64_t *v) 2000 2057 { 2058 + #if defined(arch_atomic64_fetch_inc_acquire) 2059 + return arch_atomic64_fetch_inc_acquire(v); 2060 + #elif defined(arch_atomic64_fetch_inc_relaxed) 2001 2061 s64 ret = arch_atomic64_fetch_inc_relaxed(v); 2002 2062 __atomic_acquire_fence(); 2003 2063 return ret; 2004 - } 2005 2064 #elif defined(arch_atomic64_fetch_inc) 2006 - #define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc 2065 + return arch_atomic64_fetch_inc(v); 2007 2066 #else 2008 - static __always_inline s64 2009 - raw_atomic64_fetch_inc_acquire(atomic64_t *v) 2010 - { 2011 2067 return raw_atomic64_fetch_add_acquire(1, v); 2012 - } 2013 2068 #endif 2069 + } 2014 2070 2015 - #if defined(arch_atomic64_fetch_inc_release) 2016 - #define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 2017 - #elif defined(arch_atomic64_fetch_inc_relaxed) 2018 2071 static __always_inline s64 2019 2072 raw_atomic64_fetch_inc_release(atomic64_t *v) 2020 2073 { 2074 + #if defined(arch_atomic64_fetch_inc_release) 2075 + return arch_atomic64_fetch_inc_release(v); 2076 + #elif defined(arch_atomic64_fetch_inc_relaxed) 2021 2077 __atomic_release_fence(); 2022 2078 return arch_atomic64_fetch_inc_relaxed(v); 2023 - } 2024 2079 #elif defined(arch_atomic64_fetch_inc) 2025 - #define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc 2080 + return arch_atomic64_fetch_inc(v); 2026 2081 #else 2027 - static __always_inline s64 2028 - raw_atomic64_fetch_inc_release(atomic64_t *v) 2029 - { 2030 2082 return raw_atomic64_fetch_add_release(1, v); 2031 - } 2032 2083 #endif 2084 + } 2033 2085 2034 - #if defined(arch_atomic64_fetch_inc_relaxed) 2035 - #define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed 2036 - #elif defined(arch_atomic64_fetch_inc) 2037 - #define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc 2038 - #else 2039 2086 static __always_inline s64 2040 2087 raw_atomic64_fetch_inc_relaxed(atomic64_t *v) 2041 2088 { 2042 - return raw_atomic64_fetch_add_relaxed(1, v); 2043 - } 2044 - #endif 2045 - 2046 - #if defined(arch_atomic64_dec) 2047 - #define raw_atomic64_dec arch_atomic64_dec 2089 + #if defined(arch_atomic64_fetch_inc_relaxed) 2090 + return arch_atomic64_fetch_inc_relaxed(v); 2091 + #elif defined(arch_atomic64_fetch_inc) 2092 + return arch_atomic64_fetch_inc(v); 2048 2093 #else 2094 + return raw_atomic64_fetch_add_relaxed(1, v); 2095 + #endif 2096 + } 2097 + 2049 2098 static __always_inline void 2050 2099 raw_atomic64_dec(atomic64_t *v) 2051 2100 { 2101 + #if defined(arch_atomic64_dec) 2102 + arch_atomic64_dec(v); 2103 + #else 2052 2104 raw_atomic64_sub(1, v); 2053 - } 2054 2105 #endif 2106 + } 2055 2107 2056 - #if defined(arch_atomic64_dec_return) 2057 - #define raw_atomic64_dec_return arch_atomic64_dec_return 2058 - #elif defined(arch_atomic64_dec_return_relaxed) 2059 2108 static __always_inline s64 2060 2109 raw_atomic64_dec_return(atomic64_t *v) 2061 2110 { 2111 + #if defined(arch_atomic64_dec_return) 2112 + return arch_atomic64_dec_return(v); 2113 + #elif defined(arch_atomic64_dec_return_relaxed) 2062 2114 s64 ret; 2063 2115 __atomic_pre_full_fence(); 2064 2116 ret = arch_atomic64_dec_return_relaxed(v); 2065 2117 __atomic_post_full_fence(); 2066 2118 return ret; 2067 - } 2068 2119 #else 2069 - static __always_inline s64 2070 - raw_atomic64_dec_return(atomic64_t *v) 2071 - { 2072 2120 return raw_atomic64_sub_return(1, v); 2073 - } 2074 2121 #endif 2122 + } 2075 2123 2076 - #if defined(arch_atomic64_dec_return_acquire) 2077 - #define raw_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 2078 - #elif defined(arch_atomic64_dec_return_relaxed) 2079 2124 static __always_inline s64 2080 2125 raw_atomic64_dec_return_acquire(atomic64_t *v) 2081 2126 { 2127 + #if defined(arch_atomic64_dec_return_acquire) 2128 + return arch_atomic64_dec_return_acquire(v); 2129 + #elif defined(arch_atomic64_dec_return_relaxed) 2082 2130 s64 ret = arch_atomic64_dec_return_relaxed(v); 2083 2131 __atomic_acquire_fence(); 2084 2132 return ret; 2085 - } 2086 2133 #elif defined(arch_atomic64_dec_return) 2087 - #define raw_atomic64_dec_return_acquire arch_atomic64_dec_return 2134 + return arch_atomic64_dec_return(v); 2088 2135 #else 2089 - static __always_inline s64 2090 - raw_atomic64_dec_return_acquire(atomic64_t *v) 2091 - { 2092 2136 return raw_atomic64_sub_return_acquire(1, v); 2093 - } 2094 2137 #endif 2138 + } 2095 2139 2096 - #if defined(arch_atomic64_dec_return_release) 2097 - #define raw_atomic64_dec_return_release arch_atomic64_dec_return_release 2098 - #elif defined(arch_atomic64_dec_return_relaxed) 2099 2140 static __always_inline s64 2100 2141 raw_atomic64_dec_return_release(atomic64_t *v) 2101 2142 { 2143 + #if defined(arch_atomic64_dec_return_release) 2144 + return arch_atomic64_dec_return_release(v); 2145 + #elif defined(arch_atomic64_dec_return_relaxed) 2102 2146 __atomic_release_fence(); 2103 2147 return arch_atomic64_dec_return_relaxed(v); 2104 - } 2105 2148 #elif defined(arch_atomic64_dec_return) 2106 - #define raw_atomic64_dec_return_release arch_atomic64_dec_return 2149 + return arch_atomic64_dec_return(v); 2107 2150 #else 2108 - static __always_inline s64 2109 - raw_atomic64_dec_return_release(atomic64_t *v) 2110 - { 2111 2151 return raw_atomic64_sub_return_release(1, v); 2112 - } 2113 2152 #endif 2153 + } 2114 2154 2115 - #if defined(arch_atomic64_dec_return_relaxed) 2116 - #define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed 2117 - #elif defined(arch_atomic64_dec_return) 2118 - #define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return 2119 - #else 2120 2155 static __always_inline s64 2121 2156 raw_atomic64_dec_return_relaxed(atomic64_t *v) 2122 2157 { 2158 + #if defined(arch_atomic64_dec_return_relaxed) 2159 + return arch_atomic64_dec_return_relaxed(v); 2160 + #elif defined(arch_atomic64_dec_return) 2161 + return arch_atomic64_dec_return(v); 2162 + #else 2123 2163 return raw_atomic64_sub_return_relaxed(1, v); 2124 - } 2125 2164 #endif 2165 + } 2126 2166 2127 - #if defined(arch_atomic64_fetch_dec) 2128 - #define raw_atomic64_fetch_dec arch_atomic64_fetch_dec 2129 - #elif defined(arch_atomic64_fetch_dec_relaxed) 2130 2167 static __always_inline s64 2131 2168 raw_atomic64_fetch_dec(atomic64_t *v) 2132 2169 { 2170 + #if defined(arch_atomic64_fetch_dec) 2171 + return arch_atomic64_fetch_dec(v); 2172 + #elif defined(arch_atomic64_fetch_dec_relaxed) 2133 2173 s64 ret; 2134 2174 __atomic_pre_full_fence(); 2135 2175 ret = arch_atomic64_fetch_dec_relaxed(v); 2136 2176 __atomic_post_full_fence(); 2137 2177 return ret; 2138 - } 2139 2178 #else 2140 - static __always_inline s64 2141 - raw_atomic64_fetch_dec(atomic64_t *v) 2142 - { 2143 2179 return raw_atomic64_fetch_sub(1, v); 2144 - } 2145 2180 #endif 2181 + } 2146 2182 2147 - #if defined(arch_atomic64_fetch_dec_acquire) 2148 - #define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 2149 - #elif defined(arch_atomic64_fetch_dec_relaxed) 2150 2183 static __always_inline s64 2151 2184 raw_atomic64_fetch_dec_acquire(atomic64_t *v) 2152 2185 { 2186 + #if defined(arch_atomic64_fetch_dec_acquire) 2187 + return arch_atomic64_fetch_dec_acquire(v); 2188 + #elif defined(arch_atomic64_fetch_dec_relaxed) 2153 2189 s64 ret = arch_atomic64_fetch_dec_relaxed(v); 2154 2190 __atomic_acquire_fence(); 2155 2191 return ret; 2156 - } 2157 2192 #elif defined(arch_atomic64_fetch_dec) 2158 - #define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec 2193 + return arch_atomic64_fetch_dec(v); 2159 2194 #else 2160 - static __always_inline s64 2161 - raw_atomic64_fetch_dec_acquire(atomic64_t *v) 2162 - { 2163 2195 return raw_atomic64_fetch_sub_acquire(1, v); 2164 - } 2165 2196 #endif 2197 + } 2166 2198 2167 - #if defined(arch_atomic64_fetch_dec_release) 2168 - #define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 2169 - #elif defined(arch_atomic64_fetch_dec_relaxed) 2170 2199 static __always_inline s64 2171 2200 raw_atomic64_fetch_dec_release(atomic64_t *v) 2172 2201 { 2202 + #if defined(arch_atomic64_fetch_dec_release) 2203 + return arch_atomic64_fetch_dec_release(v); 2204 + #elif defined(arch_atomic64_fetch_dec_relaxed) 2173 2205 __atomic_release_fence(); 2174 2206 return arch_atomic64_fetch_dec_relaxed(v); 2175 - } 2176 2207 #elif defined(arch_atomic64_fetch_dec) 2177 - #define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec 2208 + return arch_atomic64_fetch_dec(v); 2178 2209 #else 2179 - static __always_inline s64 2180 - raw_atomic64_fetch_dec_release(atomic64_t *v) 2181 - { 2182 2210 return raw_atomic64_fetch_sub_release(1, v); 2183 - } 2184 2211 #endif 2212 + } 2185 2213 2186 - #if defined(arch_atomic64_fetch_dec_relaxed) 2187 - #define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed 2188 - #elif defined(arch_atomic64_fetch_dec) 2189 - #define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec 2190 - #else 2191 2214 static __always_inline s64 2192 2215 raw_atomic64_fetch_dec_relaxed(atomic64_t *v) 2193 2216 { 2217 + #if defined(arch_atomic64_fetch_dec_relaxed) 2218 + return arch_atomic64_fetch_dec_relaxed(v); 2219 + #elif defined(arch_atomic64_fetch_dec) 2220 + return arch_atomic64_fetch_dec(v); 2221 + #else 2194 2222 return raw_atomic64_fetch_sub_relaxed(1, v); 2195 - } 2196 2223 #endif 2224 + } 2197 2225 2198 - #define raw_atomic64_and arch_atomic64_and 2226 + static __always_inline void 2227 + raw_atomic64_and(s64 i, atomic64_t *v) 2228 + { 2229 + arch_atomic64_and(i, v); 2230 + } 2199 2231 2200 - #if defined(arch_atomic64_fetch_and) 2201 - #define raw_atomic64_fetch_and arch_atomic64_fetch_and 2202 - #elif defined(arch_atomic64_fetch_and_relaxed) 2203 2232 static __always_inline s64 2204 2233 raw_atomic64_fetch_and(s64 i, atomic64_t *v) 2205 2234 { 2235 + #if defined(arch_atomic64_fetch_and) 2236 + return arch_atomic64_fetch_and(i, v); 2237 + #elif defined(arch_atomic64_fetch_and_relaxed) 2206 2238 s64 ret; 2207 2239 __atomic_pre_full_fence(); 2208 2240 ret = arch_atomic64_fetch_and_relaxed(i, v); 2209 2241 __atomic_post_full_fence(); 2210 2242 return ret; 2211 - } 2212 2243 #else 2213 2244 #error "Unable to define raw_atomic64_fetch_and" 2214 2245 #endif 2246 + } 2215 2247 2216 - #if defined(arch_atomic64_fetch_and_acquire) 2217 - #define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire 2218 - #elif defined(arch_atomic64_fetch_and_relaxed) 2219 2248 static __always_inline s64 2220 2249 raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 2221 2250 { 2251 + #if defined(arch_atomic64_fetch_and_acquire) 2252 + return arch_atomic64_fetch_and_acquire(i, v); 2253 + #elif defined(arch_atomic64_fetch_and_relaxed) 2222 2254 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); 2223 2255 __atomic_acquire_fence(); 2224 2256 return ret; 2225 - } 2226 2257 #elif defined(arch_atomic64_fetch_and) 2227 - #define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and 2258 + return arch_atomic64_fetch_and(i, v); 2228 2259 #else 2229 2260 #error "Unable to define raw_atomic64_fetch_and_acquire" 2230 2261 #endif 2262 + } 2231 2263 2232 - #if defined(arch_atomic64_fetch_and_release) 2233 - #define raw_atomic64_fetch_and_release arch_atomic64_fetch_and_release 2234 - #elif defined(arch_atomic64_fetch_and_relaxed) 2235 2264 static __always_inline s64 2236 2265 raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) 2237 2266 { 2267 + #if defined(arch_atomic64_fetch_and_release) 2268 + return arch_atomic64_fetch_and_release(i, v); 2269 + #elif defined(arch_atomic64_fetch_and_relaxed) 2238 2270 __atomic_release_fence(); 2239 2271 return arch_atomic64_fetch_and_relaxed(i, v); 2240 - } 2241 2272 #elif defined(arch_atomic64_fetch_and) 2242 - #define raw_atomic64_fetch_and_release arch_atomic64_fetch_and 2273 + return arch_atomic64_fetch_and(i, v); 2243 2274 #else 2244 2275 #error "Unable to define raw_atomic64_fetch_and_release" 2245 2276 #endif 2277 + } 2246 2278 2279 + static __always_inline s64 2280 + raw_atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) 2281 + { 2247 2282 #if defined(arch_atomic64_fetch_and_relaxed) 2248 - #define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed 2283 + return arch_atomic64_fetch_and_relaxed(i, v); 2249 2284 #elif defined(arch_atomic64_fetch_and) 2250 - #define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and 2285 + return arch_atomic64_fetch_and(i, v); 2251 2286 #else 2252 2287 #error "Unable to define raw_atomic64_fetch_and_relaxed" 2253 2288 #endif 2289 + } 2254 2290 2255 - #if defined(arch_atomic64_andnot) 2256 - #define raw_atomic64_andnot arch_atomic64_andnot 2257 - #else 2258 2291 static __always_inline void 2259 2292 raw_atomic64_andnot(s64 i, atomic64_t *v) 2260 2293 { 2294 + #if defined(arch_atomic64_andnot) 2295 + arch_atomic64_andnot(i, v); 2296 + #else 2261 2297 raw_atomic64_and(~i, v); 2262 - } 2263 2298 #endif 2299 + } 2264 2300 2265 - #if defined(arch_atomic64_fetch_andnot) 2266 - #define raw_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2267 - #elif defined(arch_atomic64_fetch_andnot_relaxed) 2268 2301 static __always_inline s64 2269 2302 raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2270 2303 { 2304 + #if defined(arch_atomic64_fetch_andnot) 2305 + return arch_atomic64_fetch_andnot(i, v); 2306 + #elif defined(arch_atomic64_fetch_andnot_relaxed) 2271 2307 s64 ret; 2272 2308 __atomic_pre_full_fence(); 2273 2309 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2274 2310 __atomic_post_full_fence(); 2275 2311 return ret; 2276 - } 2277 2312 #else 2278 - static __always_inline s64 2279 - raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2280 - { 2281 2313 return raw_atomic64_fetch_and(~i, v); 2282 - } 2283 2314 #endif 2315 + } 2284 2316 2285 - #if defined(arch_atomic64_fetch_andnot_acquire) 2286 - #define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2287 - #elif defined(arch_atomic64_fetch_andnot_relaxed) 2288 2317 static __always_inline s64 2289 2318 raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2290 2319 { 2320 + #if defined(arch_atomic64_fetch_andnot_acquire) 2321 + return arch_atomic64_fetch_andnot_acquire(i, v); 2322 + #elif defined(arch_atomic64_fetch_andnot_relaxed) 2291 2323 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2292 2324 __atomic_acquire_fence(); 2293 2325 return ret; 2294 - } 2295 2326 #elif defined(arch_atomic64_fetch_andnot) 2296 - #define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot 2327 + return arch_atomic64_fetch_andnot(i, v); 2297 2328 #else 2298 - static __always_inline s64 2299 - raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2300 - { 2301 2329 return raw_atomic64_fetch_and_acquire(~i, v); 2302 - } 2303 2330 #endif 2331 + } 2304 2332 2305 - #if defined(arch_atomic64_fetch_andnot_release) 2306 - #define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2307 - #elif defined(arch_atomic64_fetch_andnot_relaxed) 2308 2333 static __always_inline s64 2309 2334 raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2310 2335 { 2336 + #if defined(arch_atomic64_fetch_andnot_release) 2337 + return arch_atomic64_fetch_andnot_release(i, v); 2338 + #elif defined(arch_atomic64_fetch_andnot_relaxed) 2311 2339 __atomic_release_fence(); 2312 2340 return arch_atomic64_fetch_andnot_relaxed(i, v); 2313 - } 2314 2341 #elif defined(arch_atomic64_fetch_andnot) 2315 - #define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot 2342 + return arch_atomic64_fetch_andnot(i, v); 2316 2343 #else 2317 - static __always_inline s64 2318 - raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2319 - { 2320 2344 return raw_atomic64_fetch_and_release(~i, v); 2321 - } 2322 2345 #endif 2346 + } 2323 2347 2324 - #if defined(arch_atomic64_fetch_andnot_relaxed) 2325 - #define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 2326 - #elif defined(arch_atomic64_fetch_andnot) 2327 - #define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot 2328 - #else 2329 2348 static __always_inline s64 2330 2349 raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 2331 2350 { 2351 + #if defined(arch_atomic64_fetch_andnot_relaxed) 2352 + return arch_atomic64_fetch_andnot_relaxed(i, v); 2353 + #elif defined(arch_atomic64_fetch_andnot) 2354 + return arch_atomic64_fetch_andnot(i, v); 2355 + #else 2332 2356 return raw_atomic64_fetch_and_relaxed(~i, v); 2333 - } 2334 2357 #endif 2358 + } 2335 2359 2336 - #define raw_atomic64_or arch_atomic64_or 2360 + static __always_inline void 2361 + raw_atomic64_or(s64 i, atomic64_t *v) 2362 + { 2363 + arch_atomic64_or(i, v); 2364 + } 2337 2365 2338 - #if defined(arch_atomic64_fetch_or) 2339 - #define raw_atomic64_fetch_or arch_atomic64_fetch_or 2340 - #elif defined(arch_atomic64_fetch_or_relaxed) 2341 2366 static __always_inline s64 2342 2367 raw_atomic64_fetch_or(s64 i, atomic64_t *v) 2343 2368 { 2369 + #if defined(arch_atomic64_fetch_or) 2370 + return arch_atomic64_fetch_or(i, v); 2371 + #elif defined(arch_atomic64_fetch_or_relaxed) 2344 2372 s64 ret; 2345 2373 __atomic_pre_full_fence(); 2346 2374 ret = arch_atomic64_fetch_or_relaxed(i, v); 2347 2375 __atomic_post_full_fence(); 2348 2376 return ret; 2349 - } 2350 2377 #else 2351 2378 #error "Unable to define raw_atomic64_fetch_or" 2352 2379 #endif 2380 + } 2353 2381 2354 - #if defined(arch_atomic64_fetch_or_acquire) 2355 - #define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire 2356 - #elif defined(arch_atomic64_fetch_or_relaxed) 2357 2382 static __always_inline s64 2358 2383 raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 2359 2384 { 2385 + #if defined(arch_atomic64_fetch_or_acquire) 2386 + return arch_atomic64_fetch_or_acquire(i, v); 2387 + #elif defined(arch_atomic64_fetch_or_relaxed) 2360 2388 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); 2361 2389 __atomic_acquire_fence(); 2362 2390 return ret; 2363 - } 2364 2391 #elif defined(arch_atomic64_fetch_or) 2365 - #define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or 2392 + return arch_atomic64_fetch_or(i, v); 2366 2393 #else 2367 2394 #error "Unable to define raw_atomic64_fetch_or_acquire" 2368 2395 #endif 2396 + } 2369 2397 2370 - #if defined(arch_atomic64_fetch_or_release) 2371 - #define raw_atomic64_fetch_or_release arch_atomic64_fetch_or_release 2372 - #elif defined(arch_atomic64_fetch_or_relaxed) 2373 2398 static __always_inline s64 2374 2399 raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) 2375 2400 { 2401 + #if defined(arch_atomic64_fetch_or_release) 2402 + return arch_atomic64_fetch_or_release(i, v); 2403 + #elif defined(arch_atomic64_fetch_or_relaxed) 2376 2404 __atomic_release_fence(); 2377 2405 return arch_atomic64_fetch_or_relaxed(i, v); 2378 - } 2379 2406 #elif defined(arch_atomic64_fetch_or) 2380 - #define raw_atomic64_fetch_or_release arch_atomic64_fetch_or 2407 + return arch_atomic64_fetch_or(i, v); 2381 2408 #else 2382 2409 #error "Unable to define raw_atomic64_fetch_or_release" 2383 2410 #endif 2411 + } 2384 2412 2413 + static __always_inline s64 2414 + raw_atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) 2415 + { 2385 2416 #if defined(arch_atomic64_fetch_or_relaxed) 2386 - #define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed 2417 + return arch_atomic64_fetch_or_relaxed(i, v); 2387 2418 #elif defined(arch_atomic64_fetch_or) 2388 - #define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or 2419 + return arch_atomic64_fetch_or(i, v); 2389 2420 #else 2390 2421 #error "Unable to define raw_atomic64_fetch_or_relaxed" 2391 2422 #endif 2423 + } 2392 2424 2393 - #define raw_atomic64_xor arch_atomic64_xor 2425 + static __always_inline void 2426 + raw_atomic64_xor(s64 i, atomic64_t *v) 2427 + { 2428 + arch_atomic64_xor(i, v); 2429 + } 2394 2430 2395 - #if defined(arch_atomic64_fetch_xor) 2396 - #define raw_atomic64_fetch_xor arch_atomic64_fetch_xor 2397 - #elif defined(arch_atomic64_fetch_xor_relaxed) 2398 2431 static __always_inline s64 2399 2432 raw_atomic64_fetch_xor(s64 i, atomic64_t *v) 2400 2433 { 2434 + #if defined(arch_atomic64_fetch_xor) 2435 + return arch_atomic64_fetch_xor(i, v); 2436 + #elif defined(arch_atomic64_fetch_xor_relaxed) 2401 2437 s64 ret; 2402 2438 __atomic_pre_full_fence(); 2403 2439 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2404 2440 __atomic_post_full_fence(); 2405 2441 return ret; 2406 - } 2407 2442 #else 2408 2443 #error "Unable to define raw_atomic64_fetch_xor" 2409 2444 #endif 2445 + } 2410 2446 2411 - #if defined(arch_atomic64_fetch_xor_acquire) 2412 - #define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire 2413 - #elif defined(arch_atomic64_fetch_xor_relaxed) 2414 2447 static __always_inline s64 2415 2448 raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 2416 2449 { 2450 + #if defined(arch_atomic64_fetch_xor_acquire) 2451 + return arch_atomic64_fetch_xor_acquire(i, v); 2452 + #elif defined(arch_atomic64_fetch_xor_relaxed) 2417 2453 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2418 2454 __atomic_acquire_fence(); 2419 2455 return ret; 2420 - } 2421 2456 #elif defined(arch_atomic64_fetch_xor) 2422 - #define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor 2457 + return arch_atomic64_fetch_xor(i, v); 2423 2458 #else 2424 2459 #error "Unable to define raw_atomic64_fetch_xor_acquire" 2425 2460 #endif 2461 + } 2426 2462 2427 - #if defined(arch_atomic64_fetch_xor_release) 2428 - #define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release 2429 - #elif defined(arch_atomic64_fetch_xor_relaxed) 2430 2463 static __always_inline s64 2431 2464 raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) 2432 2465 { 2466 + #if defined(arch_atomic64_fetch_xor_release) 2467 + return arch_atomic64_fetch_xor_release(i, v); 2468 + #elif defined(arch_atomic64_fetch_xor_relaxed) 2433 2469 __atomic_release_fence(); 2434 2470 return arch_atomic64_fetch_xor_relaxed(i, v); 2435 - } 2436 2471 #elif defined(arch_atomic64_fetch_xor) 2437 - #define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor 2472 + return arch_atomic64_fetch_xor(i, v); 2438 2473 #else 2439 2474 #error "Unable to define raw_atomic64_fetch_xor_release" 2440 2475 #endif 2476 + } 2441 2477 2478 + static __always_inline s64 2479 + raw_atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) 2480 + { 2442 2481 #if defined(arch_atomic64_fetch_xor_relaxed) 2443 - #define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed 2482 + return arch_atomic64_fetch_xor_relaxed(i, v); 2444 2483 #elif defined(arch_atomic64_fetch_xor) 2445 - #define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor 2484 + return arch_atomic64_fetch_xor(i, v); 2446 2485 #else 2447 2486 #error "Unable to define raw_atomic64_fetch_xor_relaxed" 2448 2487 #endif 2449 - 2450 - #if defined(arch_atomic64_xchg) 2451 - #define raw_atomic64_xchg arch_atomic64_xchg 2452 - #elif defined(arch_atomic64_xchg_relaxed) 2453 - static __always_inline s64 2454 - raw_atomic64_xchg(atomic64_t *v, s64 i) 2455 - { 2456 - s64 ret; 2457 - __atomic_pre_full_fence(); 2458 - ret = arch_atomic64_xchg_relaxed(v, i); 2459 - __atomic_post_full_fence(); 2460 - return ret; 2461 2488 } 2462 - #else 2489 + 2463 2490 static __always_inline s64 2464 2491 raw_atomic64_xchg(atomic64_t *v, s64 new) 2465 2492 { 2466 - return raw_xchg(&v->counter, new); 2467 - } 2468 - #endif 2469 - 2470 - #if defined(arch_atomic64_xchg_acquire) 2471 - #define raw_atomic64_xchg_acquire arch_atomic64_xchg_acquire 2493 + #if defined(arch_atomic64_xchg) 2494 + return arch_atomic64_xchg(v, new); 2472 2495 #elif defined(arch_atomic64_xchg_relaxed) 2473 - static __always_inline s64 2474 - raw_atomic64_xchg_acquire(atomic64_t *v, s64 i) 2475 - { 2476 - s64 ret = arch_atomic64_xchg_relaxed(v, i); 2477 - __atomic_acquire_fence(); 2496 + s64 ret; 2497 + __atomic_pre_full_fence(); 2498 + ret = arch_atomic64_xchg_relaxed(v, new); 2499 + __atomic_post_full_fence(); 2478 2500 return ret; 2479 - } 2480 - #elif defined(arch_atomic64_xchg) 2481 - #define raw_atomic64_xchg_acquire arch_atomic64_xchg 2482 2501 #else 2502 + return raw_xchg(&v->counter, new); 2503 + #endif 2504 + } 2505 + 2483 2506 static __always_inline s64 2484 2507 raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) 2485 2508 { 2486 - return raw_xchg_acquire(&v->counter, new); 2487 - } 2488 - #endif 2489 - 2490 - #if defined(arch_atomic64_xchg_release) 2491 - #define raw_atomic64_xchg_release arch_atomic64_xchg_release 2509 + #if defined(arch_atomic64_xchg_acquire) 2510 + return arch_atomic64_xchg_acquire(v, new); 2492 2511 #elif defined(arch_atomic64_xchg_relaxed) 2493 - static __always_inline s64 2494 - raw_atomic64_xchg_release(atomic64_t *v, s64 i) 2495 - { 2496 - __atomic_release_fence(); 2497 - return arch_atomic64_xchg_relaxed(v, i); 2498 - } 2512 + s64 ret = arch_atomic64_xchg_relaxed(v, new); 2513 + __atomic_acquire_fence(); 2514 + return ret; 2499 2515 #elif defined(arch_atomic64_xchg) 2500 - #define raw_atomic64_xchg_release arch_atomic64_xchg 2516 + return arch_atomic64_xchg(v, new); 2501 2517 #else 2518 + return raw_xchg_acquire(&v->counter, new); 2519 + #endif 2520 + } 2521 + 2502 2522 static __always_inline s64 2503 2523 raw_atomic64_xchg_release(atomic64_t *v, s64 new) 2504 2524 { 2505 - return raw_xchg_release(&v->counter, new); 2506 - } 2507 - #endif 2508 - 2509 - #if defined(arch_atomic64_xchg_relaxed) 2510 - #define raw_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed 2525 + #if defined(arch_atomic64_xchg_release) 2526 + return arch_atomic64_xchg_release(v, new); 2527 + #elif defined(arch_atomic64_xchg_relaxed) 2528 + __atomic_release_fence(); 2529 + return arch_atomic64_xchg_relaxed(v, new); 2511 2530 #elif defined(arch_atomic64_xchg) 2512 - #define raw_atomic64_xchg_relaxed arch_atomic64_xchg 2531 + return arch_atomic64_xchg(v, new); 2513 2532 #else 2533 + return raw_xchg_release(&v->counter, new); 2534 + #endif 2535 + } 2536 + 2514 2537 static __always_inline s64 2515 2538 raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new) 2516 2539 { 2540 + #if defined(arch_atomic64_xchg_relaxed) 2541 + return arch_atomic64_xchg_relaxed(v, new); 2542 + #elif defined(arch_atomic64_xchg) 2543 + return arch_atomic64_xchg(v, new); 2544 + #else 2517 2545 return raw_xchg_relaxed(&v->counter, new); 2518 - } 2519 2546 #endif 2547 + } 2520 2548 2521 - #if defined(arch_atomic64_cmpxchg) 2522 - #define raw_atomic64_cmpxchg arch_atomic64_cmpxchg 2523 - #elif defined(arch_atomic64_cmpxchg_relaxed) 2524 2549 static __always_inline s64 2525 2550 raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2526 2551 { 2552 + #if defined(arch_atomic64_cmpxchg) 2553 + return arch_atomic64_cmpxchg(v, old, new); 2554 + #elif defined(arch_atomic64_cmpxchg_relaxed) 2527 2555 s64 ret; 2528 2556 __atomic_pre_full_fence(); 2529 2557 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2530 2558 __atomic_post_full_fence(); 2531 2559 return ret; 2532 - } 2533 2560 #else 2534 - static __always_inline s64 2535 - raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2536 - { 2537 2561 return raw_cmpxchg(&v->counter, old, new); 2538 - } 2539 2562 #endif 2563 + } 2540 2564 2541 - #if defined(arch_atomic64_cmpxchg_acquire) 2542 - #define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire 2543 - #elif defined(arch_atomic64_cmpxchg_relaxed) 2544 2565 static __always_inline s64 2545 2566 raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2546 2567 { 2568 + #if defined(arch_atomic64_cmpxchg_acquire) 2569 + return arch_atomic64_cmpxchg_acquire(v, old, new); 2570 + #elif defined(arch_atomic64_cmpxchg_relaxed) 2547 2571 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2548 2572 __atomic_acquire_fence(); 2549 2573 return ret; 2550 - } 2551 2574 #elif defined(arch_atomic64_cmpxchg) 2552 - #define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg 2575 + return arch_atomic64_cmpxchg(v, old, new); 2553 2576 #else 2554 - static __always_inline s64 2555 - raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2556 - { 2557 2577 return raw_cmpxchg_acquire(&v->counter, old, new); 2558 - } 2559 2578 #endif 2579 + } 2560 2580 2561 - #if defined(arch_atomic64_cmpxchg_release) 2562 - #define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release 2563 - #elif defined(arch_atomic64_cmpxchg_relaxed) 2564 2581 static __always_inline s64 2565 2582 raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2566 2583 { 2584 + #if defined(arch_atomic64_cmpxchg_release) 2585 + return arch_atomic64_cmpxchg_release(v, old, new); 2586 + #elif defined(arch_atomic64_cmpxchg_relaxed) 2567 2587 __atomic_release_fence(); 2568 2588 return arch_atomic64_cmpxchg_relaxed(v, old, new); 2569 - } 2570 2589 #elif defined(arch_atomic64_cmpxchg) 2571 - #define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg 2590 + return arch_atomic64_cmpxchg(v, old, new); 2572 2591 #else 2573 - static __always_inline s64 2574 - raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2575 - { 2576 2592 return raw_cmpxchg_release(&v->counter, old, new); 2577 - } 2578 2593 #endif 2594 + } 2579 2595 2580 - #if defined(arch_atomic64_cmpxchg_relaxed) 2581 - #define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed 2582 - #elif defined(arch_atomic64_cmpxchg) 2583 - #define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg 2584 - #else 2585 2596 static __always_inline s64 2586 2597 raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) 2587 2598 { 2599 + #if defined(arch_atomic64_cmpxchg_relaxed) 2600 + return arch_atomic64_cmpxchg_relaxed(v, old, new); 2601 + #elif defined(arch_atomic64_cmpxchg) 2602 + return arch_atomic64_cmpxchg(v, old, new); 2603 + #else 2588 2604 return raw_cmpxchg_relaxed(&v->counter, old, new); 2589 - } 2590 2605 #endif 2606 + } 2591 2607 2592 - #if defined(arch_atomic64_try_cmpxchg) 2593 - #define raw_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2594 - #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2595 2608 static __always_inline bool 2596 2609 raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2597 2610 { 2611 + #if defined(arch_atomic64_try_cmpxchg) 2612 + return arch_atomic64_try_cmpxchg(v, old, new); 2613 + #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2598 2614 bool ret; 2599 2615 __atomic_pre_full_fence(); 2600 2616 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2601 2617 __atomic_post_full_fence(); 2602 2618 return ret; 2603 - } 2604 2619 #else 2605 - static __always_inline bool 2606 - raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2607 - { 2608 2620 s64 r, o = *old; 2609 2621 r = raw_atomic64_cmpxchg(v, o, new); 2610 2622 if (unlikely(r != o)) 2611 2623 *old = r; 2612 2624 return likely(r == o); 2613 - } 2614 2625 #endif 2626 + } 2615 2627 2616 - #if defined(arch_atomic64_try_cmpxchg_acquire) 2617 - #define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2618 - #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2619 2628 static __always_inline bool 2620 2629 raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2621 2630 { 2631 + #if defined(arch_atomic64_try_cmpxchg_acquire) 2632 + return arch_atomic64_try_cmpxchg_acquire(v, old, new); 2633 + #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2622 2634 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2623 2635 __atomic_acquire_fence(); 2624 2636 return ret; 2625 - } 2626 2637 #elif defined(arch_atomic64_try_cmpxchg) 2627 - #define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg 2638 + return arch_atomic64_try_cmpxchg(v, old, new); 2628 2639 #else 2629 - static __always_inline bool 2630 - raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2631 - { 2632 2640 s64 r, o = *old; 2633 2641 r = raw_atomic64_cmpxchg_acquire(v, o, new); 2634 2642 if (unlikely(r != o)) 2635 2643 *old = r; 2636 2644 return likely(r == o); 2637 - } 2638 2645 #endif 2646 + } 2639 2647 2640 - #if defined(arch_atomic64_try_cmpxchg_release) 2641 - #define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2642 - #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2643 2648 static __always_inline bool 2644 2649 raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2645 2650 { 2651 + #if defined(arch_atomic64_try_cmpxchg_release) 2652 + return arch_atomic64_try_cmpxchg_release(v, old, new); 2653 + #elif defined(arch_atomic64_try_cmpxchg_relaxed) 2646 2654 __atomic_release_fence(); 2647 2655 return arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2648 - } 2649 2656 #elif defined(arch_atomic64_try_cmpxchg) 2650 - #define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg 2657 + return arch_atomic64_try_cmpxchg(v, old, new); 2651 2658 #else 2652 - static __always_inline bool 2653 - raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2654 - { 2655 2659 s64 r, o = *old; 2656 2660 r = raw_atomic64_cmpxchg_release(v, o, new); 2657 2661 if (unlikely(r != o)) 2658 2662 *old = r; 2659 2663 return likely(r == o); 2660 - } 2661 2664 #endif 2665 + } 2662 2666 2663 - #if defined(arch_atomic64_try_cmpxchg_relaxed) 2664 - #define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed 2665 - #elif defined(arch_atomic64_try_cmpxchg) 2666 - #define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg 2667 - #else 2668 2667 static __always_inline bool 2669 2668 raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2670 2669 { 2670 + #if defined(arch_atomic64_try_cmpxchg_relaxed) 2671 + return arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2672 + #elif defined(arch_atomic64_try_cmpxchg) 2673 + return arch_atomic64_try_cmpxchg(v, old, new); 2674 + #else 2671 2675 s64 r, o = *old; 2672 2676 r = raw_atomic64_cmpxchg_relaxed(v, o, new); 2673 2677 if (unlikely(r != o)) 2674 2678 *old = r; 2675 2679 return likely(r == o); 2676 - } 2677 2680 #endif 2681 + } 2678 2682 2679 - #if defined(arch_atomic64_sub_and_test) 2680 - #define raw_atomic64_sub_and_test arch_atomic64_sub_and_test 2681 - #else 2682 2683 static __always_inline bool 2683 2684 raw_atomic64_sub_and_test(s64 i, atomic64_t *v) 2684 2685 { 2685 - return raw_atomic64_sub_return(i, v) == 0; 2686 - } 2687 - #endif 2688 - 2689 - #if defined(arch_atomic64_dec_and_test) 2690 - #define raw_atomic64_dec_and_test arch_atomic64_dec_and_test 2686 + #if defined(arch_atomic64_sub_and_test) 2687 + return arch_atomic64_sub_and_test(i, v); 2691 2688 #else 2689 + return raw_atomic64_sub_return(i, v) == 0; 2690 + #endif 2691 + } 2692 + 2692 2693 static __always_inline bool 2693 2694 raw_atomic64_dec_and_test(atomic64_t *v) 2694 2695 { 2695 - return raw_atomic64_dec_return(v) == 0; 2696 - } 2697 - #endif 2698 - 2699 - #if defined(arch_atomic64_inc_and_test) 2700 - #define raw_atomic64_inc_and_test arch_atomic64_inc_and_test 2696 + #if defined(arch_atomic64_dec_and_test) 2697 + return arch_atomic64_dec_and_test(v); 2701 2698 #else 2699 + return raw_atomic64_dec_return(v) == 0; 2700 + #endif 2701 + } 2702 + 2702 2703 static __always_inline bool 2703 2704 raw_atomic64_inc_and_test(atomic64_t *v) 2704 2705 { 2706 + #if defined(arch_atomic64_inc_and_test) 2707 + return arch_atomic64_inc_and_test(v); 2708 + #else 2705 2709 return raw_atomic64_inc_return(v) == 0; 2706 - } 2707 2710 #endif 2711 + } 2708 2712 2709 - #if defined(arch_atomic64_add_negative) 2710 - #define raw_atomic64_add_negative arch_atomic64_add_negative 2711 - #elif defined(arch_atomic64_add_negative_relaxed) 2712 2713 static __always_inline bool 2713 2714 raw_atomic64_add_negative(s64 i, atomic64_t *v) 2714 2715 { 2716 + #if defined(arch_atomic64_add_negative) 2717 + return arch_atomic64_add_negative(i, v); 2718 + #elif defined(arch_atomic64_add_negative_relaxed) 2715 2719 bool ret; 2716 2720 __atomic_pre_full_fence(); 2717 2721 ret = arch_atomic64_add_negative_relaxed(i, v); 2718 2722 __atomic_post_full_fence(); 2719 2723 return ret; 2720 - } 2721 2724 #else 2722 - static __always_inline bool 2723 - raw_atomic64_add_negative(s64 i, atomic64_t *v) 2724 - { 2725 2725 return raw_atomic64_add_return(i, v) < 0; 2726 - } 2727 2726 #endif 2727 + } 2728 2728 2729 - #if defined(arch_atomic64_add_negative_acquire) 2730 - #define raw_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2731 - #elif defined(arch_atomic64_add_negative_relaxed) 2732 2729 static __always_inline bool 2733 2730 raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2734 2731 { 2732 + #if defined(arch_atomic64_add_negative_acquire) 2733 + return arch_atomic64_add_negative_acquire(i, v); 2734 + #elif defined(arch_atomic64_add_negative_relaxed) 2735 2735 bool ret = arch_atomic64_add_negative_relaxed(i, v); 2736 2736 __atomic_acquire_fence(); 2737 2737 return ret; 2738 - } 2739 2738 #elif defined(arch_atomic64_add_negative) 2740 - #define raw_atomic64_add_negative_acquire arch_atomic64_add_negative 2739 + return arch_atomic64_add_negative(i, v); 2741 2740 #else 2742 - static __always_inline bool 2743 - raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2744 - { 2745 2741 return raw_atomic64_add_return_acquire(i, v) < 0; 2746 - } 2747 2742 #endif 2743 + } 2748 2744 2749 - #if defined(arch_atomic64_add_negative_release) 2750 - #define raw_atomic64_add_negative_release arch_atomic64_add_negative_release 2751 - #elif defined(arch_atomic64_add_negative_relaxed) 2752 2745 static __always_inline bool 2753 2746 raw_atomic64_add_negative_release(s64 i, atomic64_t *v) 2754 2747 { 2748 + #if defined(arch_atomic64_add_negative_release) 2749 + return arch_atomic64_add_negative_release(i, v); 2750 + #elif defined(arch_atomic64_add_negative_relaxed) 2755 2751 __atomic_release_fence(); 2756 2752 return arch_atomic64_add_negative_relaxed(i, v); 2757 - } 2758 2753 #elif defined(arch_atomic64_add_negative) 2759 - #define raw_atomic64_add_negative_release arch_atomic64_add_negative 2754 + return arch_atomic64_add_negative(i, v); 2760 2755 #else 2761 - static __always_inline bool 2762 - raw_atomic64_add_negative_release(s64 i, atomic64_t *v) 2763 - { 2764 2756 return raw_atomic64_add_return_release(i, v) < 0; 2765 - } 2766 2757 #endif 2758 + } 2767 2759 2768 - #if defined(arch_atomic64_add_negative_relaxed) 2769 - #define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed 2770 - #elif defined(arch_atomic64_add_negative) 2771 - #define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative 2772 - #else 2773 2760 static __always_inline bool 2774 2761 raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 2775 2762 { 2776 - return raw_atomic64_add_return_relaxed(i, v) < 0; 2777 - } 2778 - #endif 2779 - 2780 - #if defined(arch_atomic64_fetch_add_unless) 2781 - #define raw_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 2763 + #if defined(arch_atomic64_add_negative_relaxed) 2764 + return arch_atomic64_add_negative_relaxed(i, v); 2765 + #elif defined(arch_atomic64_add_negative) 2766 + return arch_atomic64_add_negative(i, v); 2782 2767 #else 2768 + return raw_atomic64_add_return_relaxed(i, v) < 0; 2769 + #endif 2770 + } 2771 + 2783 2772 static __always_inline s64 2784 2773 raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2785 2774 { 2775 + #if defined(arch_atomic64_fetch_add_unless) 2776 + return arch_atomic64_fetch_add_unless(v, a, u); 2777 + #else 2786 2778 s64 c = raw_atomic64_read(v); 2787 2779 2788 2780 do { ··· 2735 2839 } while (!raw_atomic64_try_cmpxchg(v, &c, c + a)); 2736 2840 2737 2841 return c; 2738 - } 2739 2842 #endif 2843 + } 2740 2844 2741 - #if defined(arch_atomic64_add_unless) 2742 - #define raw_atomic64_add_unless arch_atomic64_add_unless 2743 - #else 2744 2845 static __always_inline bool 2745 2846 raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2746 2847 { 2747 - return raw_atomic64_fetch_add_unless(v, a, u) != u; 2748 - } 2749 - #endif 2750 - 2751 - #if defined(arch_atomic64_inc_not_zero) 2752 - #define raw_atomic64_inc_not_zero arch_atomic64_inc_not_zero 2848 + #if defined(arch_atomic64_add_unless) 2849 + return arch_atomic64_add_unless(v, a, u); 2753 2850 #else 2851 + return raw_atomic64_fetch_add_unless(v, a, u) != u; 2852 + #endif 2853 + } 2854 + 2754 2855 static __always_inline bool 2755 2856 raw_atomic64_inc_not_zero(atomic64_t *v) 2756 2857 { 2757 - return raw_atomic64_add_unless(v, 1, 0); 2758 - } 2759 - #endif 2760 - 2761 - #if defined(arch_atomic64_inc_unless_negative) 2762 - #define raw_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative 2858 + #if defined(arch_atomic64_inc_not_zero) 2859 + return arch_atomic64_inc_not_zero(v); 2763 2860 #else 2861 + return raw_atomic64_add_unless(v, 1, 0); 2862 + #endif 2863 + } 2864 + 2764 2865 static __always_inline bool 2765 2866 raw_atomic64_inc_unless_negative(atomic64_t *v) 2766 2867 { 2868 + #if defined(arch_atomic64_inc_unless_negative) 2869 + return arch_atomic64_inc_unless_negative(v); 2870 + #else 2767 2871 s64 c = raw_atomic64_read(v); 2768 2872 2769 2873 do { ··· 2772 2876 } while (!raw_atomic64_try_cmpxchg(v, &c, c + 1)); 2773 2877 2774 2878 return true; 2775 - } 2776 2879 #endif 2880 + } 2777 2881 2778 - #if defined(arch_atomic64_dec_unless_positive) 2779 - #define raw_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive 2780 - #else 2781 2882 static __always_inline bool 2782 2883 raw_atomic64_dec_unless_positive(atomic64_t *v) 2783 2884 { 2885 + #if defined(arch_atomic64_dec_unless_positive) 2886 + return arch_atomic64_dec_unless_positive(v); 2887 + #else 2784 2888 s64 c = raw_atomic64_read(v); 2785 2889 2786 2890 do { ··· 2789 2893 } while (!raw_atomic64_try_cmpxchg(v, &c, c - 1)); 2790 2894 2791 2895 return true; 2792 - } 2793 2896 #endif 2897 + } 2794 2898 2795 - #if defined(arch_atomic64_dec_if_positive) 2796 - #define raw_atomic64_dec_if_positive arch_atomic64_dec_if_positive 2797 - #else 2798 2899 static __always_inline s64 2799 2900 raw_atomic64_dec_if_positive(atomic64_t *v) 2800 2901 { 2902 + #if defined(arch_atomic64_dec_if_positive) 2903 + return arch_atomic64_dec_if_positive(v); 2904 + #else 2801 2905 s64 dec, c = raw_atomic64_read(v); 2802 2906 2803 2907 do { ··· 2807 2911 } while (!raw_atomic64_try_cmpxchg(v, &c, dec)); 2808 2912 2809 2913 return dec; 2810 - } 2811 2914 #endif 2915 + } 2812 2916 2813 2917 #endif /* _LINUX_ATOMIC_FALLBACK_H */ 2814 - // c2048fccede6fac923252290e2b303949d5dec83 2918 + // 205e090382132f1fc85e48b46e722865f9c81309
+25 -25
include/linux/atomic/atomic-instrumented.h
··· 462 462 } 463 463 464 464 static __always_inline int 465 - atomic_xchg(atomic_t *v, int i) 465 + atomic_xchg(atomic_t *v, int new) 466 466 { 467 467 kcsan_mb(); 468 468 instrument_atomic_read_write(v, sizeof(*v)); 469 - return raw_atomic_xchg(v, i); 469 + return raw_atomic_xchg(v, new); 470 470 } 471 471 472 472 static __always_inline int 473 - atomic_xchg_acquire(atomic_t *v, int i) 473 + atomic_xchg_acquire(atomic_t *v, int new) 474 474 { 475 475 instrument_atomic_read_write(v, sizeof(*v)); 476 - return raw_atomic_xchg_acquire(v, i); 476 + return raw_atomic_xchg_acquire(v, new); 477 477 } 478 478 479 479 static __always_inline int 480 - atomic_xchg_release(atomic_t *v, int i) 480 + atomic_xchg_release(atomic_t *v, int new) 481 481 { 482 482 kcsan_release(); 483 483 instrument_atomic_read_write(v, sizeof(*v)); 484 - return raw_atomic_xchg_release(v, i); 484 + return raw_atomic_xchg_release(v, new); 485 485 } 486 486 487 487 static __always_inline int 488 - atomic_xchg_relaxed(atomic_t *v, int i) 488 + atomic_xchg_relaxed(atomic_t *v, int new) 489 489 { 490 490 instrument_atomic_read_write(v, sizeof(*v)); 491 - return raw_atomic_xchg_relaxed(v, i); 491 + return raw_atomic_xchg_relaxed(v, new); 492 492 } 493 493 494 494 static __always_inline int ··· 1103 1103 } 1104 1104 1105 1105 static __always_inline s64 1106 - atomic64_xchg(atomic64_t *v, s64 i) 1106 + atomic64_xchg(atomic64_t *v, s64 new) 1107 1107 { 1108 1108 kcsan_mb(); 1109 1109 instrument_atomic_read_write(v, sizeof(*v)); 1110 - return raw_atomic64_xchg(v, i); 1110 + return raw_atomic64_xchg(v, new); 1111 1111 } 1112 1112 1113 1113 static __always_inline s64 1114 - atomic64_xchg_acquire(atomic64_t *v, s64 i) 1114 + atomic64_xchg_acquire(atomic64_t *v, s64 new) 1115 1115 { 1116 1116 instrument_atomic_read_write(v, sizeof(*v)); 1117 - return raw_atomic64_xchg_acquire(v, i); 1117 + return raw_atomic64_xchg_acquire(v, new); 1118 1118 } 1119 1119 1120 1120 static __always_inline s64 1121 - atomic64_xchg_release(atomic64_t *v, s64 i) 1121 + atomic64_xchg_release(atomic64_t *v, s64 new) 1122 1122 { 1123 1123 kcsan_release(); 1124 1124 instrument_atomic_read_write(v, sizeof(*v)); 1125 - return raw_atomic64_xchg_release(v, i); 1125 + return raw_atomic64_xchg_release(v, new); 1126 1126 } 1127 1127 1128 1128 static __always_inline s64 1129 - atomic64_xchg_relaxed(atomic64_t *v, s64 i) 1129 + atomic64_xchg_relaxed(atomic64_t *v, s64 new) 1130 1130 { 1131 1131 instrument_atomic_read_write(v, sizeof(*v)); 1132 - return raw_atomic64_xchg_relaxed(v, i); 1132 + return raw_atomic64_xchg_relaxed(v, new); 1133 1133 } 1134 1134 1135 1135 static __always_inline s64 ··· 1744 1744 } 1745 1745 1746 1746 static __always_inline long 1747 - atomic_long_xchg(atomic_long_t *v, long i) 1747 + atomic_long_xchg(atomic_long_t *v, long new) 1748 1748 { 1749 1749 kcsan_mb(); 1750 1750 instrument_atomic_read_write(v, sizeof(*v)); 1751 - return raw_atomic_long_xchg(v, i); 1751 + return raw_atomic_long_xchg(v, new); 1752 1752 } 1753 1753 1754 1754 static __always_inline long 1755 - atomic_long_xchg_acquire(atomic_long_t *v, long i) 1755 + atomic_long_xchg_acquire(atomic_long_t *v, long new) 1756 1756 { 1757 1757 instrument_atomic_read_write(v, sizeof(*v)); 1758 - return raw_atomic_long_xchg_acquire(v, i); 1758 + return raw_atomic_long_xchg_acquire(v, new); 1759 1759 } 1760 1760 1761 1761 static __always_inline long 1762 - atomic_long_xchg_release(atomic_long_t *v, long i) 1762 + atomic_long_xchg_release(atomic_long_t *v, long new) 1763 1763 { 1764 1764 kcsan_release(); 1765 1765 instrument_atomic_read_write(v, sizeof(*v)); 1766 - return raw_atomic_long_xchg_release(v, i); 1766 + return raw_atomic_long_xchg_release(v, new); 1767 1767 } 1768 1768 1769 1769 static __always_inline long 1770 - atomic_long_xchg_relaxed(atomic_long_t *v, long i) 1770 + atomic_long_xchg_relaxed(atomic_long_t *v, long new) 1771 1771 { 1772 1772 instrument_atomic_read_write(v, sizeof(*v)); 1773 - return raw_atomic_long_xchg_relaxed(v, i); 1773 + return raw_atomic_long_xchg_relaxed(v, new); 1774 1774 } 1775 1775 1776 1776 static __always_inline long ··· 2231 2231 2232 2232 2233 2233 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ 2234 - // f6502977180430e61c1a7c4e5e665f04f501fb8d 2234 + // a4c3d2b229f907654cc53cb5d40e80f7fed1ec9c
+13 -13
include/linux/atomic/atomic-long.h
··· 622 622 } 623 623 624 624 static __always_inline long 625 - raw_atomic_long_xchg(atomic_long_t *v, long i) 625 + raw_atomic_long_xchg(atomic_long_t *v, long new) 626 626 { 627 627 #ifdef CONFIG_64BIT 628 - return raw_atomic64_xchg(v, i); 628 + return raw_atomic64_xchg(v, new); 629 629 #else 630 - return raw_atomic_xchg(v, i); 630 + return raw_atomic_xchg(v, new); 631 631 #endif 632 632 } 633 633 634 634 static __always_inline long 635 - raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) 635 + raw_atomic_long_xchg_acquire(atomic_long_t *v, long new) 636 636 { 637 637 #ifdef CONFIG_64BIT 638 - return raw_atomic64_xchg_acquire(v, i); 638 + return raw_atomic64_xchg_acquire(v, new); 639 639 #else 640 - return raw_atomic_xchg_acquire(v, i); 640 + return raw_atomic_xchg_acquire(v, new); 641 641 #endif 642 642 } 643 643 644 644 static __always_inline long 645 - raw_atomic_long_xchg_release(atomic_long_t *v, long i) 645 + raw_atomic_long_xchg_release(atomic_long_t *v, long new) 646 646 { 647 647 #ifdef CONFIG_64BIT 648 - return raw_atomic64_xchg_release(v, i); 648 + return raw_atomic64_xchg_release(v, new); 649 649 #else 650 - return raw_atomic_xchg_release(v, i); 650 + return raw_atomic_xchg_release(v, new); 651 651 #endif 652 652 } 653 653 654 654 static __always_inline long 655 - raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 655 + raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new) 656 656 { 657 657 #ifdef CONFIG_64BIT 658 - return raw_atomic64_xchg_relaxed(v, i); 658 + return raw_atomic64_xchg_relaxed(v, new); 659 659 #else 660 - return raw_atomic_xchg_relaxed(v, i); 660 + return raw_atomic_xchg_relaxed(v, new); 661 661 #endif 662 662 } 663 663 ··· 872 872 } 873 873 874 874 #endif /* _LINUX_ATOMIC_LONG_H */ 875 - // ad09f849db0db5b30c82e497eeb9056a394c5f22 875 + // e785d25cc3f220b7d473d36aac9da85dd7eb13a8
+1 -1
scripts/atomic/atomics.tbl
··· 27 27 andnot vF i v 28 28 or vF i v 29 29 xor vF i v 30 - xchg I v i 30 + xchg I v i:new 31 31 cmpxchg I v i:old i:new 32 32 try_cmpxchg B v p:old i:new 33 33 sub_and_test b i v
-4
scripts/atomic/fallbacks/acquire
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}${name}${sfx}_acquire(${params}) 4 - { 5 2 ${ret} ret = arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); 6 3 __atomic_acquire_fence(); 7 4 return ret; 8 - } 9 5 EOF
-4
scripts/atomic/fallbacks/add_negative
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_add_negative${order}(${int} i, ${atomic}_t *v) 4 - { 5 2 return raw_${atomic}_add_return${order}(i, v) < 0; 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/add_unless
··· 1 1 cat << EOF 2 - static __always_inline bool 3 - raw_${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u) 4 - { 5 2 return raw_${atomic}_fetch_add_unless(v, a, u) != u; 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/andnot
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}andnot${sfx}${order}(${int} i, ${atomic}_t *v) 4 - { 5 2 ${retstmt}raw_${atomic}_${pfx}and${sfx}${order}(~i, v); 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/cmpxchg
··· 1 1 cat <<EOF 2 - static __always_inline ${int} 3 - raw_${atomic}_cmpxchg${order}(${atomic}_t *v, ${int} old, ${int} new) 4 - { 5 2 return raw_cmpxchg${order}(&v->counter, old, new); 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/dec
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}dec${sfx}${order}(${atomic}_t *v) 4 - { 5 2 ${retstmt}raw_${atomic}_${pfx}sub${sfx}${order}(1, v); 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/dec_and_test
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_dec_and_test(${atomic}_t *v) 4 - { 5 2 return raw_${atomic}_dec_return(v) == 0; 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/dec_if_positive
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_dec_if_positive(${atomic}_t *v) 4 - { 5 2 ${int} dec, c = raw_${atomic}_read(v); 6 3 7 4 do { ··· 8 11 } while (!raw_${atomic}_try_cmpxchg(v, &c, dec)); 9 12 10 13 return dec; 11 - } 12 14 EOF
-4
scripts/atomic/fallbacks/dec_unless_positive
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_dec_unless_positive(${atomic}_t *v) 4 - { 5 2 ${int} c = raw_${atomic}_read(v); 6 3 7 4 do { ··· 7 10 } while (!raw_${atomic}_try_cmpxchg(v, &c, c - 1)); 8 11 9 12 return true; 10 - } 11 13 EOF
-4
scripts/atomic/fallbacks/fence
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}${name}${sfx}(${params}) 4 - { 5 2 ${ret} ret; 6 3 __atomic_pre_full_fence(); 7 4 ret = arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); 8 5 __atomic_post_full_fence(); 9 6 return ret; 10 - } 11 7 EOF
-4
scripts/atomic/fallbacks/fetch_add_unless
··· 1 1 cat << EOF 2 - static __always_inline ${int} 3 - raw_${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u) 4 - { 5 2 ${int} c = raw_${atomic}_read(v); 6 3 7 4 do { ··· 7 10 } while (!raw_${atomic}_try_cmpxchg(v, &c, c + a)); 8 11 9 12 return c; 10 - } 11 13 EOF
-4
scripts/atomic/fallbacks/inc
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}inc${sfx}${order}(${atomic}_t *v) 4 - { 5 2 ${retstmt}raw_${atomic}_${pfx}add${sfx}${order}(1, v); 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/inc_and_test
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_inc_and_test(${atomic}_t *v) 4 - { 5 2 return raw_${atomic}_inc_return(v) == 0; 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/inc_not_zero
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_inc_not_zero(${atomic}_t *v) 4 - { 5 2 return raw_${atomic}_add_unless(v, 1, 0); 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/inc_unless_negative
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_inc_unless_negative(${atomic}_t *v) 4 - { 5 2 ${int} c = raw_${atomic}_read(v); 6 3 7 4 do { ··· 7 10 } while (!raw_${atomic}_try_cmpxchg(v, &c, c + 1)); 8 11 9 12 return true; 10 - } 11 13 EOF
-4
scripts/atomic/fallbacks/read_acquire
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_read_acquire(const ${atomic}_t *v) 4 - { 5 2 ${int} ret; 6 3 7 4 if (__native_word(${atomic}_t)) { ··· 9 12 } 10 13 11 14 return ret; 12 - } 13 15 EOF
-4
scripts/atomic/fallbacks/release
··· 1 1 cat <<EOF 2 - static __always_inline ${ret} 3 - raw_${atomic}_${pfx}${name}${sfx}_release(${params}) 4 - { 5 2 __atomic_release_fence(); 6 3 ${retstmt}arch_${atomic}_${pfx}${name}${sfx}_relaxed(${args}); 7 - } 8 4 EOF
-4
scripts/atomic/fallbacks/set_release
··· 1 1 cat <<EOF 2 - static __always_inline void 3 - raw_${atomic}_set_release(${atomic}_t *v, ${int} i) 4 - { 5 2 if (__native_word(${atomic}_t)) { 6 3 smp_store_release(&(v)->counter, i); 7 4 } else { 8 5 __atomic_release_fence(); 9 6 raw_${atomic}_set(v, i); 10 7 } 11 - } 12 8 EOF
-4
scripts/atomic/fallbacks/sub_and_test
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_sub_and_test(${int} i, ${atomic}_t *v) 4 - { 5 2 return raw_${atomic}_sub_return(i, v) == 0; 6 - } 7 3 EOF
-4
scripts/atomic/fallbacks/try_cmpxchg
··· 1 1 cat <<EOF 2 - static __always_inline bool 3 - raw_${atomic}_try_cmpxchg${order}(${atomic}_t *v, ${int} *old, ${int} new) 4 - { 5 2 ${int} r, o = *old; 6 3 r = raw_${atomic}_cmpxchg${order}(v, o, new); 7 4 if (unlikely(r != o)) 8 5 *old = r; 9 6 return likely(r == o); 10 - } 11 7 EOF
-4
scripts/atomic/fallbacks/xchg
··· 1 1 cat <<EOF 2 - static __always_inline ${int} 3 - raw_${atomic}_xchg${order}(${atomic}_t *v, ${int} new) 4 - { 5 2 return raw_xchg${order}(&v->counter, new); 6 - } 7 3 EOF
+19 -7
scripts/atomic/gen-atomic-fallback.sh
··· 60 60 local name="$1"; shift 61 61 local sfx="$1"; shift 62 62 local order="$1"; shift 63 - local atomic="$1" 63 + local atomic="$1"; shift 64 + local int="$1"; shift 64 65 65 66 local atomicname="${atomic}_${pfx}${name}${sfx}${order}" 66 67 local basename="${atomic}_${pfx}${name}${sfx}" 67 68 68 69 local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" 70 + 71 + local ret="$(gen_ret_type "${meta}" "${int}")" 72 + local retstmt="$(gen_ret_stmt "${meta}")" 73 + local params="$(gen_params "${int}" "${atomic}" "$@")" 74 + local args="$(gen_args "$@")" 75 + 76 + printf "static __always_inline ${ret}\n" 77 + printf "raw_${atomicname}(${params})\n" 78 + printf "{\n" 69 79 70 80 # Where there is no possible fallback, this order variant is mandatory 71 81 # and must be provided by arch code. Add a comment to the header to ··· 85 75 # define this order variant as a C function without a preprocessor 86 76 # symbol. 87 77 if [ -z ${template} ] && [ -z "${order}" ] && ! meta_has_relaxed "${meta}"; then 88 - printf "#define raw_${atomicname} arch_${atomicname}\n\n" 78 + printf "\t${retstmt}arch_${atomicname}(${args});\n" 79 + printf "}\n\n" 89 80 return 90 81 fi 91 82 92 83 printf "#if defined(arch_${atomicname})\n" 93 - printf "#define raw_${atomicname} arch_${atomicname}\n" 84 + printf "\t${retstmt}arch_${atomicname}(${args});\n" 94 85 95 86 # Allow FULL/ACQUIRE/RELEASE ops to be defined in terms of RELAXED ops 96 87 if [ "${order}" != "_relaxed" ] && meta_has_relaxed "${meta}"; then 97 88 printf "#elif defined(arch_${basename}_relaxed)\n" 98 - gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" 89 + gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "${atomic}" "${int}" "$@" 99 90 fi 100 91 101 92 # Allow ACQUIRE/RELEASE/RELAXED ops to be defined in terms of FULL ops 102 93 if [ ! -z "${order}" ]; then 103 94 printf "#elif defined(arch_${basename})\n" 104 - printf "#define raw_${atomicname} arch_${basename}\n" 95 + printf "\t${retstmt}arch_${basename}(${args});\n" 105 96 fi 106 97 107 98 printf "#else\n" 108 99 if [ ! -z "${template}" ]; then 109 - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" 100 + gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "${atomic}" "${int}" "$@" 110 101 else 111 102 printf "#error \"Unable to define raw_${atomicname}\"\n" 112 103 fi 113 104 114 - printf "#endif\n\n" 105 + printf "#endif\n" 106 + printf "}\n\n" 115 107 } 116 108 117 109