at v5.8-rc1 55 kB view raw
1// SPDX-License-Identifier: GPL-2.0 2 3// Generated by scripts/atomic/gen-atomic-fallback.sh 4// DO NOT MODIFY THIS FILE DIRECTLY 5 6#ifndef _LINUX_ATOMIC_FALLBACK_H 7#define _LINUX_ATOMIC_FALLBACK_H 8 9#include <linux/compiler.h> 10 11#ifndef xchg_relaxed 12#define xchg_relaxed xchg 13#define xchg_acquire xchg 14#define xchg_release xchg 15#else /* xchg_relaxed */ 16 17#ifndef xchg_acquire 18#define xchg_acquire(...) \ 19 __atomic_op_acquire(xchg, __VA_ARGS__) 20#endif 21 22#ifndef xchg_release 23#define xchg_release(...) \ 24 __atomic_op_release(xchg, __VA_ARGS__) 25#endif 26 27#ifndef xchg 28#define xchg(...) \ 29 __atomic_op_fence(xchg, __VA_ARGS__) 30#endif 31 32#endif /* xchg_relaxed */ 33 34#ifndef cmpxchg_relaxed 35#define cmpxchg_relaxed cmpxchg 36#define cmpxchg_acquire cmpxchg 37#define cmpxchg_release cmpxchg 38#else /* cmpxchg_relaxed */ 39 40#ifndef cmpxchg_acquire 41#define cmpxchg_acquire(...) \ 42 __atomic_op_acquire(cmpxchg, __VA_ARGS__) 43#endif 44 45#ifndef cmpxchg_release 46#define cmpxchg_release(...) \ 47 __atomic_op_release(cmpxchg, __VA_ARGS__) 48#endif 49 50#ifndef cmpxchg 51#define cmpxchg(...) \ 52 __atomic_op_fence(cmpxchg, __VA_ARGS__) 53#endif 54 55#endif /* cmpxchg_relaxed */ 56 57#ifndef cmpxchg64_relaxed 58#define cmpxchg64_relaxed cmpxchg64 59#define cmpxchg64_acquire cmpxchg64 60#define cmpxchg64_release cmpxchg64 61#else /* cmpxchg64_relaxed */ 62 63#ifndef cmpxchg64_acquire 64#define cmpxchg64_acquire(...) \ 65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__) 66#endif 67 68#ifndef cmpxchg64_release 69#define cmpxchg64_release(...) \ 70 __atomic_op_release(cmpxchg64, __VA_ARGS__) 71#endif 72 73#ifndef cmpxchg64 74#define cmpxchg64(...) \ 75 __atomic_op_fence(cmpxchg64, __VA_ARGS__) 76#endif 77 78#endif /* cmpxchg64_relaxed */ 79 80#ifndef atomic_read_acquire 81static __always_inline int 82atomic_read_acquire(const atomic_t *v) 83{ 84 return smp_load_acquire(&(v)->counter); 85} 86#define atomic_read_acquire atomic_read_acquire 87#endif 88 89#ifndef atomic_set_release 90static __always_inline void 91atomic_set_release(atomic_t *v, int i) 92{ 93 smp_store_release(&(v)->counter, i); 94} 95#define atomic_set_release atomic_set_release 96#endif 97 98#ifndef atomic_add_return_relaxed 99#define atomic_add_return_acquire atomic_add_return 100#define atomic_add_return_release atomic_add_return 101#define atomic_add_return_relaxed atomic_add_return 102#else /* atomic_add_return_relaxed */ 103 104#ifndef atomic_add_return_acquire 105static __always_inline int 106atomic_add_return_acquire(int i, atomic_t *v) 107{ 108 int ret = atomic_add_return_relaxed(i, v); 109 __atomic_acquire_fence(); 110 return ret; 111} 112#define atomic_add_return_acquire atomic_add_return_acquire 113#endif 114 115#ifndef atomic_add_return_release 116static __always_inline int 117atomic_add_return_release(int i, atomic_t *v) 118{ 119 __atomic_release_fence(); 120 return atomic_add_return_relaxed(i, v); 121} 122#define atomic_add_return_release atomic_add_return_release 123#endif 124 125#ifndef atomic_add_return 126static __always_inline int 127atomic_add_return(int i, atomic_t *v) 128{ 129 int ret; 130 __atomic_pre_full_fence(); 131 ret = atomic_add_return_relaxed(i, v); 132 __atomic_post_full_fence(); 133 return ret; 134} 135#define atomic_add_return atomic_add_return 136#endif 137 138#endif /* atomic_add_return_relaxed */ 139 140#ifndef atomic_fetch_add_relaxed 141#define atomic_fetch_add_acquire atomic_fetch_add 142#define atomic_fetch_add_release atomic_fetch_add 143#define atomic_fetch_add_relaxed atomic_fetch_add 144#else /* atomic_fetch_add_relaxed */ 145 146#ifndef atomic_fetch_add_acquire 147static __always_inline int 148atomic_fetch_add_acquire(int i, atomic_t *v) 149{ 150 int ret = atomic_fetch_add_relaxed(i, v); 151 __atomic_acquire_fence(); 152 return ret; 153} 154#define atomic_fetch_add_acquire atomic_fetch_add_acquire 155#endif 156 157#ifndef atomic_fetch_add_release 158static __always_inline int 159atomic_fetch_add_release(int i, atomic_t *v) 160{ 161 __atomic_release_fence(); 162 return atomic_fetch_add_relaxed(i, v); 163} 164#define atomic_fetch_add_release atomic_fetch_add_release 165#endif 166 167#ifndef atomic_fetch_add 168static __always_inline int 169atomic_fetch_add(int i, atomic_t *v) 170{ 171 int ret; 172 __atomic_pre_full_fence(); 173 ret = atomic_fetch_add_relaxed(i, v); 174 __atomic_post_full_fence(); 175 return ret; 176} 177#define atomic_fetch_add atomic_fetch_add 178#endif 179 180#endif /* atomic_fetch_add_relaxed */ 181 182#ifndef atomic_sub_return_relaxed 183#define atomic_sub_return_acquire atomic_sub_return 184#define atomic_sub_return_release atomic_sub_return 185#define atomic_sub_return_relaxed atomic_sub_return 186#else /* atomic_sub_return_relaxed */ 187 188#ifndef atomic_sub_return_acquire 189static __always_inline int 190atomic_sub_return_acquire(int i, atomic_t *v) 191{ 192 int ret = atomic_sub_return_relaxed(i, v); 193 __atomic_acquire_fence(); 194 return ret; 195} 196#define atomic_sub_return_acquire atomic_sub_return_acquire 197#endif 198 199#ifndef atomic_sub_return_release 200static __always_inline int 201atomic_sub_return_release(int i, atomic_t *v) 202{ 203 __atomic_release_fence(); 204 return atomic_sub_return_relaxed(i, v); 205} 206#define atomic_sub_return_release atomic_sub_return_release 207#endif 208 209#ifndef atomic_sub_return 210static __always_inline int 211atomic_sub_return(int i, atomic_t *v) 212{ 213 int ret; 214 __atomic_pre_full_fence(); 215 ret = atomic_sub_return_relaxed(i, v); 216 __atomic_post_full_fence(); 217 return ret; 218} 219#define atomic_sub_return atomic_sub_return 220#endif 221 222#endif /* atomic_sub_return_relaxed */ 223 224#ifndef atomic_fetch_sub_relaxed 225#define atomic_fetch_sub_acquire atomic_fetch_sub 226#define atomic_fetch_sub_release atomic_fetch_sub 227#define atomic_fetch_sub_relaxed atomic_fetch_sub 228#else /* atomic_fetch_sub_relaxed */ 229 230#ifndef atomic_fetch_sub_acquire 231static __always_inline int 232atomic_fetch_sub_acquire(int i, atomic_t *v) 233{ 234 int ret = atomic_fetch_sub_relaxed(i, v); 235 __atomic_acquire_fence(); 236 return ret; 237} 238#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire 239#endif 240 241#ifndef atomic_fetch_sub_release 242static __always_inline int 243atomic_fetch_sub_release(int i, atomic_t *v) 244{ 245 __atomic_release_fence(); 246 return atomic_fetch_sub_relaxed(i, v); 247} 248#define atomic_fetch_sub_release atomic_fetch_sub_release 249#endif 250 251#ifndef atomic_fetch_sub 252static __always_inline int 253atomic_fetch_sub(int i, atomic_t *v) 254{ 255 int ret; 256 __atomic_pre_full_fence(); 257 ret = atomic_fetch_sub_relaxed(i, v); 258 __atomic_post_full_fence(); 259 return ret; 260} 261#define atomic_fetch_sub atomic_fetch_sub 262#endif 263 264#endif /* atomic_fetch_sub_relaxed */ 265 266#ifndef atomic_inc 267static __always_inline void 268atomic_inc(atomic_t *v) 269{ 270 atomic_add(1, v); 271} 272#define atomic_inc atomic_inc 273#endif 274 275#ifndef atomic_inc_return_relaxed 276#ifdef atomic_inc_return 277#define atomic_inc_return_acquire atomic_inc_return 278#define atomic_inc_return_release atomic_inc_return 279#define atomic_inc_return_relaxed atomic_inc_return 280#endif /* atomic_inc_return */ 281 282#ifndef atomic_inc_return 283static __always_inline int 284atomic_inc_return(atomic_t *v) 285{ 286 return atomic_add_return(1, v); 287} 288#define atomic_inc_return atomic_inc_return 289#endif 290 291#ifndef atomic_inc_return_acquire 292static __always_inline int 293atomic_inc_return_acquire(atomic_t *v) 294{ 295 return atomic_add_return_acquire(1, v); 296} 297#define atomic_inc_return_acquire atomic_inc_return_acquire 298#endif 299 300#ifndef atomic_inc_return_release 301static __always_inline int 302atomic_inc_return_release(atomic_t *v) 303{ 304 return atomic_add_return_release(1, v); 305} 306#define atomic_inc_return_release atomic_inc_return_release 307#endif 308 309#ifndef atomic_inc_return_relaxed 310static __always_inline int 311atomic_inc_return_relaxed(atomic_t *v) 312{ 313 return atomic_add_return_relaxed(1, v); 314} 315#define atomic_inc_return_relaxed atomic_inc_return_relaxed 316#endif 317 318#else /* atomic_inc_return_relaxed */ 319 320#ifndef atomic_inc_return_acquire 321static __always_inline int 322atomic_inc_return_acquire(atomic_t *v) 323{ 324 int ret = atomic_inc_return_relaxed(v); 325 __atomic_acquire_fence(); 326 return ret; 327} 328#define atomic_inc_return_acquire atomic_inc_return_acquire 329#endif 330 331#ifndef atomic_inc_return_release 332static __always_inline int 333atomic_inc_return_release(atomic_t *v) 334{ 335 __atomic_release_fence(); 336 return atomic_inc_return_relaxed(v); 337} 338#define atomic_inc_return_release atomic_inc_return_release 339#endif 340 341#ifndef atomic_inc_return 342static __always_inline int 343atomic_inc_return(atomic_t *v) 344{ 345 int ret; 346 __atomic_pre_full_fence(); 347 ret = atomic_inc_return_relaxed(v); 348 __atomic_post_full_fence(); 349 return ret; 350} 351#define atomic_inc_return atomic_inc_return 352#endif 353 354#endif /* atomic_inc_return_relaxed */ 355 356#ifndef atomic_fetch_inc_relaxed 357#ifdef atomic_fetch_inc 358#define atomic_fetch_inc_acquire atomic_fetch_inc 359#define atomic_fetch_inc_release atomic_fetch_inc 360#define atomic_fetch_inc_relaxed atomic_fetch_inc 361#endif /* atomic_fetch_inc */ 362 363#ifndef atomic_fetch_inc 364static __always_inline int 365atomic_fetch_inc(atomic_t *v) 366{ 367 return atomic_fetch_add(1, v); 368} 369#define atomic_fetch_inc atomic_fetch_inc 370#endif 371 372#ifndef atomic_fetch_inc_acquire 373static __always_inline int 374atomic_fetch_inc_acquire(atomic_t *v) 375{ 376 return atomic_fetch_add_acquire(1, v); 377} 378#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire 379#endif 380 381#ifndef atomic_fetch_inc_release 382static __always_inline int 383atomic_fetch_inc_release(atomic_t *v) 384{ 385 return atomic_fetch_add_release(1, v); 386} 387#define atomic_fetch_inc_release atomic_fetch_inc_release 388#endif 389 390#ifndef atomic_fetch_inc_relaxed 391static __always_inline int 392atomic_fetch_inc_relaxed(atomic_t *v) 393{ 394 return atomic_fetch_add_relaxed(1, v); 395} 396#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed 397#endif 398 399#else /* atomic_fetch_inc_relaxed */ 400 401#ifndef atomic_fetch_inc_acquire 402static __always_inline int 403atomic_fetch_inc_acquire(atomic_t *v) 404{ 405 int ret = atomic_fetch_inc_relaxed(v); 406 __atomic_acquire_fence(); 407 return ret; 408} 409#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire 410#endif 411 412#ifndef atomic_fetch_inc_release 413static __always_inline int 414atomic_fetch_inc_release(atomic_t *v) 415{ 416 __atomic_release_fence(); 417 return atomic_fetch_inc_relaxed(v); 418} 419#define atomic_fetch_inc_release atomic_fetch_inc_release 420#endif 421 422#ifndef atomic_fetch_inc 423static __always_inline int 424atomic_fetch_inc(atomic_t *v) 425{ 426 int ret; 427 __atomic_pre_full_fence(); 428 ret = atomic_fetch_inc_relaxed(v); 429 __atomic_post_full_fence(); 430 return ret; 431} 432#define atomic_fetch_inc atomic_fetch_inc 433#endif 434 435#endif /* atomic_fetch_inc_relaxed */ 436 437#ifndef atomic_dec 438static __always_inline void 439atomic_dec(atomic_t *v) 440{ 441 atomic_sub(1, v); 442} 443#define atomic_dec atomic_dec 444#endif 445 446#ifndef atomic_dec_return_relaxed 447#ifdef atomic_dec_return 448#define atomic_dec_return_acquire atomic_dec_return 449#define atomic_dec_return_release atomic_dec_return 450#define atomic_dec_return_relaxed atomic_dec_return 451#endif /* atomic_dec_return */ 452 453#ifndef atomic_dec_return 454static __always_inline int 455atomic_dec_return(atomic_t *v) 456{ 457 return atomic_sub_return(1, v); 458} 459#define atomic_dec_return atomic_dec_return 460#endif 461 462#ifndef atomic_dec_return_acquire 463static __always_inline int 464atomic_dec_return_acquire(atomic_t *v) 465{ 466 return atomic_sub_return_acquire(1, v); 467} 468#define atomic_dec_return_acquire atomic_dec_return_acquire 469#endif 470 471#ifndef atomic_dec_return_release 472static __always_inline int 473atomic_dec_return_release(atomic_t *v) 474{ 475 return atomic_sub_return_release(1, v); 476} 477#define atomic_dec_return_release atomic_dec_return_release 478#endif 479 480#ifndef atomic_dec_return_relaxed 481static __always_inline int 482atomic_dec_return_relaxed(atomic_t *v) 483{ 484 return atomic_sub_return_relaxed(1, v); 485} 486#define atomic_dec_return_relaxed atomic_dec_return_relaxed 487#endif 488 489#else /* atomic_dec_return_relaxed */ 490 491#ifndef atomic_dec_return_acquire 492static __always_inline int 493atomic_dec_return_acquire(atomic_t *v) 494{ 495 int ret = atomic_dec_return_relaxed(v); 496 __atomic_acquire_fence(); 497 return ret; 498} 499#define atomic_dec_return_acquire atomic_dec_return_acquire 500#endif 501 502#ifndef atomic_dec_return_release 503static __always_inline int 504atomic_dec_return_release(atomic_t *v) 505{ 506 __atomic_release_fence(); 507 return atomic_dec_return_relaxed(v); 508} 509#define atomic_dec_return_release atomic_dec_return_release 510#endif 511 512#ifndef atomic_dec_return 513static __always_inline int 514atomic_dec_return(atomic_t *v) 515{ 516 int ret; 517 __atomic_pre_full_fence(); 518 ret = atomic_dec_return_relaxed(v); 519 __atomic_post_full_fence(); 520 return ret; 521} 522#define atomic_dec_return atomic_dec_return 523#endif 524 525#endif /* atomic_dec_return_relaxed */ 526 527#ifndef atomic_fetch_dec_relaxed 528#ifdef atomic_fetch_dec 529#define atomic_fetch_dec_acquire atomic_fetch_dec 530#define atomic_fetch_dec_release atomic_fetch_dec 531#define atomic_fetch_dec_relaxed atomic_fetch_dec 532#endif /* atomic_fetch_dec */ 533 534#ifndef atomic_fetch_dec 535static __always_inline int 536atomic_fetch_dec(atomic_t *v) 537{ 538 return atomic_fetch_sub(1, v); 539} 540#define atomic_fetch_dec atomic_fetch_dec 541#endif 542 543#ifndef atomic_fetch_dec_acquire 544static __always_inline int 545atomic_fetch_dec_acquire(atomic_t *v) 546{ 547 return atomic_fetch_sub_acquire(1, v); 548} 549#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire 550#endif 551 552#ifndef atomic_fetch_dec_release 553static __always_inline int 554atomic_fetch_dec_release(atomic_t *v) 555{ 556 return atomic_fetch_sub_release(1, v); 557} 558#define atomic_fetch_dec_release atomic_fetch_dec_release 559#endif 560 561#ifndef atomic_fetch_dec_relaxed 562static __always_inline int 563atomic_fetch_dec_relaxed(atomic_t *v) 564{ 565 return atomic_fetch_sub_relaxed(1, v); 566} 567#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed 568#endif 569 570#else /* atomic_fetch_dec_relaxed */ 571 572#ifndef atomic_fetch_dec_acquire 573static __always_inline int 574atomic_fetch_dec_acquire(atomic_t *v) 575{ 576 int ret = atomic_fetch_dec_relaxed(v); 577 __atomic_acquire_fence(); 578 return ret; 579} 580#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire 581#endif 582 583#ifndef atomic_fetch_dec_release 584static __always_inline int 585atomic_fetch_dec_release(atomic_t *v) 586{ 587 __atomic_release_fence(); 588 return atomic_fetch_dec_relaxed(v); 589} 590#define atomic_fetch_dec_release atomic_fetch_dec_release 591#endif 592 593#ifndef atomic_fetch_dec 594static __always_inline int 595atomic_fetch_dec(atomic_t *v) 596{ 597 int ret; 598 __atomic_pre_full_fence(); 599 ret = atomic_fetch_dec_relaxed(v); 600 __atomic_post_full_fence(); 601 return ret; 602} 603#define atomic_fetch_dec atomic_fetch_dec 604#endif 605 606#endif /* atomic_fetch_dec_relaxed */ 607 608#ifndef atomic_fetch_and_relaxed 609#define atomic_fetch_and_acquire atomic_fetch_and 610#define atomic_fetch_and_release atomic_fetch_and 611#define atomic_fetch_and_relaxed atomic_fetch_and 612#else /* atomic_fetch_and_relaxed */ 613 614#ifndef atomic_fetch_and_acquire 615static __always_inline int 616atomic_fetch_and_acquire(int i, atomic_t *v) 617{ 618 int ret = atomic_fetch_and_relaxed(i, v); 619 __atomic_acquire_fence(); 620 return ret; 621} 622#define atomic_fetch_and_acquire atomic_fetch_and_acquire 623#endif 624 625#ifndef atomic_fetch_and_release 626static __always_inline int 627atomic_fetch_and_release(int i, atomic_t *v) 628{ 629 __atomic_release_fence(); 630 return atomic_fetch_and_relaxed(i, v); 631} 632#define atomic_fetch_and_release atomic_fetch_and_release 633#endif 634 635#ifndef atomic_fetch_and 636static __always_inline int 637atomic_fetch_and(int i, atomic_t *v) 638{ 639 int ret; 640 __atomic_pre_full_fence(); 641 ret = atomic_fetch_and_relaxed(i, v); 642 __atomic_post_full_fence(); 643 return ret; 644} 645#define atomic_fetch_and atomic_fetch_and 646#endif 647 648#endif /* atomic_fetch_and_relaxed */ 649 650#ifndef atomic_andnot 651static __always_inline void 652atomic_andnot(int i, atomic_t *v) 653{ 654 atomic_and(~i, v); 655} 656#define atomic_andnot atomic_andnot 657#endif 658 659#ifndef atomic_fetch_andnot_relaxed 660#ifdef atomic_fetch_andnot 661#define atomic_fetch_andnot_acquire atomic_fetch_andnot 662#define atomic_fetch_andnot_release atomic_fetch_andnot 663#define atomic_fetch_andnot_relaxed atomic_fetch_andnot 664#endif /* atomic_fetch_andnot */ 665 666#ifndef atomic_fetch_andnot 667static __always_inline int 668atomic_fetch_andnot(int i, atomic_t *v) 669{ 670 return atomic_fetch_and(~i, v); 671} 672#define atomic_fetch_andnot atomic_fetch_andnot 673#endif 674 675#ifndef atomic_fetch_andnot_acquire 676static __always_inline int 677atomic_fetch_andnot_acquire(int i, atomic_t *v) 678{ 679 return atomic_fetch_and_acquire(~i, v); 680} 681#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire 682#endif 683 684#ifndef atomic_fetch_andnot_release 685static __always_inline int 686atomic_fetch_andnot_release(int i, atomic_t *v) 687{ 688 return atomic_fetch_and_release(~i, v); 689} 690#define atomic_fetch_andnot_release atomic_fetch_andnot_release 691#endif 692 693#ifndef atomic_fetch_andnot_relaxed 694static __always_inline int 695atomic_fetch_andnot_relaxed(int i, atomic_t *v) 696{ 697 return atomic_fetch_and_relaxed(~i, v); 698} 699#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed 700#endif 701 702#else /* atomic_fetch_andnot_relaxed */ 703 704#ifndef atomic_fetch_andnot_acquire 705static __always_inline int 706atomic_fetch_andnot_acquire(int i, atomic_t *v) 707{ 708 int ret = atomic_fetch_andnot_relaxed(i, v); 709 __atomic_acquire_fence(); 710 return ret; 711} 712#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire 713#endif 714 715#ifndef atomic_fetch_andnot_release 716static __always_inline int 717atomic_fetch_andnot_release(int i, atomic_t *v) 718{ 719 __atomic_release_fence(); 720 return atomic_fetch_andnot_relaxed(i, v); 721} 722#define atomic_fetch_andnot_release atomic_fetch_andnot_release 723#endif 724 725#ifndef atomic_fetch_andnot 726static __always_inline int 727atomic_fetch_andnot(int i, atomic_t *v) 728{ 729 int ret; 730 __atomic_pre_full_fence(); 731 ret = atomic_fetch_andnot_relaxed(i, v); 732 __atomic_post_full_fence(); 733 return ret; 734} 735#define atomic_fetch_andnot atomic_fetch_andnot 736#endif 737 738#endif /* atomic_fetch_andnot_relaxed */ 739 740#ifndef atomic_fetch_or_relaxed 741#define atomic_fetch_or_acquire atomic_fetch_or 742#define atomic_fetch_or_release atomic_fetch_or 743#define atomic_fetch_or_relaxed atomic_fetch_or 744#else /* atomic_fetch_or_relaxed */ 745 746#ifndef atomic_fetch_or_acquire 747static __always_inline int 748atomic_fetch_or_acquire(int i, atomic_t *v) 749{ 750 int ret = atomic_fetch_or_relaxed(i, v); 751 __atomic_acquire_fence(); 752 return ret; 753} 754#define atomic_fetch_or_acquire atomic_fetch_or_acquire 755#endif 756 757#ifndef atomic_fetch_or_release 758static __always_inline int 759atomic_fetch_or_release(int i, atomic_t *v) 760{ 761 __atomic_release_fence(); 762 return atomic_fetch_or_relaxed(i, v); 763} 764#define atomic_fetch_or_release atomic_fetch_or_release 765#endif 766 767#ifndef atomic_fetch_or 768static __always_inline int 769atomic_fetch_or(int i, atomic_t *v) 770{ 771 int ret; 772 __atomic_pre_full_fence(); 773 ret = atomic_fetch_or_relaxed(i, v); 774 __atomic_post_full_fence(); 775 return ret; 776} 777#define atomic_fetch_or atomic_fetch_or 778#endif 779 780#endif /* atomic_fetch_or_relaxed */ 781 782#ifndef atomic_fetch_xor_relaxed 783#define atomic_fetch_xor_acquire atomic_fetch_xor 784#define atomic_fetch_xor_release atomic_fetch_xor 785#define atomic_fetch_xor_relaxed atomic_fetch_xor 786#else /* atomic_fetch_xor_relaxed */ 787 788#ifndef atomic_fetch_xor_acquire 789static __always_inline int 790atomic_fetch_xor_acquire(int i, atomic_t *v) 791{ 792 int ret = atomic_fetch_xor_relaxed(i, v); 793 __atomic_acquire_fence(); 794 return ret; 795} 796#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire 797#endif 798 799#ifndef atomic_fetch_xor_release 800static __always_inline int 801atomic_fetch_xor_release(int i, atomic_t *v) 802{ 803 __atomic_release_fence(); 804 return atomic_fetch_xor_relaxed(i, v); 805} 806#define atomic_fetch_xor_release atomic_fetch_xor_release 807#endif 808 809#ifndef atomic_fetch_xor 810static __always_inline int 811atomic_fetch_xor(int i, atomic_t *v) 812{ 813 int ret; 814 __atomic_pre_full_fence(); 815 ret = atomic_fetch_xor_relaxed(i, v); 816 __atomic_post_full_fence(); 817 return ret; 818} 819#define atomic_fetch_xor atomic_fetch_xor 820#endif 821 822#endif /* atomic_fetch_xor_relaxed */ 823 824#ifndef atomic_xchg_relaxed 825#define atomic_xchg_acquire atomic_xchg 826#define atomic_xchg_release atomic_xchg 827#define atomic_xchg_relaxed atomic_xchg 828#else /* atomic_xchg_relaxed */ 829 830#ifndef atomic_xchg_acquire 831static __always_inline int 832atomic_xchg_acquire(atomic_t *v, int i) 833{ 834 int ret = atomic_xchg_relaxed(v, i); 835 __atomic_acquire_fence(); 836 return ret; 837} 838#define atomic_xchg_acquire atomic_xchg_acquire 839#endif 840 841#ifndef atomic_xchg_release 842static __always_inline int 843atomic_xchg_release(atomic_t *v, int i) 844{ 845 __atomic_release_fence(); 846 return atomic_xchg_relaxed(v, i); 847} 848#define atomic_xchg_release atomic_xchg_release 849#endif 850 851#ifndef atomic_xchg 852static __always_inline int 853atomic_xchg(atomic_t *v, int i) 854{ 855 int ret; 856 __atomic_pre_full_fence(); 857 ret = atomic_xchg_relaxed(v, i); 858 __atomic_post_full_fence(); 859 return ret; 860} 861#define atomic_xchg atomic_xchg 862#endif 863 864#endif /* atomic_xchg_relaxed */ 865 866#ifndef atomic_cmpxchg_relaxed 867#define atomic_cmpxchg_acquire atomic_cmpxchg 868#define atomic_cmpxchg_release atomic_cmpxchg 869#define atomic_cmpxchg_relaxed atomic_cmpxchg 870#else /* atomic_cmpxchg_relaxed */ 871 872#ifndef atomic_cmpxchg_acquire 873static __always_inline int 874atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 875{ 876 int ret = atomic_cmpxchg_relaxed(v, old, new); 877 __atomic_acquire_fence(); 878 return ret; 879} 880#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire 881#endif 882 883#ifndef atomic_cmpxchg_release 884static __always_inline int 885atomic_cmpxchg_release(atomic_t *v, int old, int new) 886{ 887 __atomic_release_fence(); 888 return atomic_cmpxchg_relaxed(v, old, new); 889} 890#define atomic_cmpxchg_release atomic_cmpxchg_release 891#endif 892 893#ifndef atomic_cmpxchg 894static __always_inline int 895atomic_cmpxchg(atomic_t *v, int old, int new) 896{ 897 int ret; 898 __atomic_pre_full_fence(); 899 ret = atomic_cmpxchg_relaxed(v, old, new); 900 __atomic_post_full_fence(); 901 return ret; 902} 903#define atomic_cmpxchg atomic_cmpxchg 904#endif 905 906#endif /* atomic_cmpxchg_relaxed */ 907 908#ifndef atomic_try_cmpxchg_relaxed 909#ifdef atomic_try_cmpxchg 910#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg 911#define atomic_try_cmpxchg_release atomic_try_cmpxchg 912#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg 913#endif /* atomic_try_cmpxchg */ 914 915#ifndef atomic_try_cmpxchg 916static __always_inline bool 917atomic_try_cmpxchg(atomic_t *v, int *old, int new) 918{ 919 int r, o = *old; 920 r = atomic_cmpxchg(v, o, new); 921 if (unlikely(r != o)) 922 *old = r; 923 return likely(r == o); 924} 925#define atomic_try_cmpxchg atomic_try_cmpxchg 926#endif 927 928#ifndef atomic_try_cmpxchg_acquire 929static __always_inline bool 930atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 931{ 932 int r, o = *old; 933 r = atomic_cmpxchg_acquire(v, o, new); 934 if (unlikely(r != o)) 935 *old = r; 936 return likely(r == o); 937} 938#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire 939#endif 940 941#ifndef atomic_try_cmpxchg_release 942static __always_inline bool 943atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 944{ 945 int r, o = *old; 946 r = atomic_cmpxchg_release(v, o, new); 947 if (unlikely(r != o)) 948 *old = r; 949 return likely(r == o); 950} 951#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release 952#endif 953 954#ifndef atomic_try_cmpxchg_relaxed 955static __always_inline bool 956atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 957{ 958 int r, o = *old; 959 r = atomic_cmpxchg_relaxed(v, o, new); 960 if (unlikely(r != o)) 961 *old = r; 962 return likely(r == o); 963} 964#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed 965#endif 966 967#else /* atomic_try_cmpxchg_relaxed */ 968 969#ifndef atomic_try_cmpxchg_acquire 970static __always_inline bool 971atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 972{ 973 bool ret = atomic_try_cmpxchg_relaxed(v, old, new); 974 __atomic_acquire_fence(); 975 return ret; 976} 977#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire 978#endif 979 980#ifndef atomic_try_cmpxchg_release 981static __always_inline bool 982atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 983{ 984 __atomic_release_fence(); 985 return atomic_try_cmpxchg_relaxed(v, old, new); 986} 987#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release 988#endif 989 990#ifndef atomic_try_cmpxchg 991static __always_inline bool 992atomic_try_cmpxchg(atomic_t *v, int *old, int new) 993{ 994 bool ret; 995 __atomic_pre_full_fence(); 996 ret = atomic_try_cmpxchg_relaxed(v, old, new); 997 __atomic_post_full_fence(); 998 return ret; 999} 1000#define atomic_try_cmpxchg atomic_try_cmpxchg 1001#endif 1002 1003#endif /* atomic_try_cmpxchg_relaxed */ 1004 1005#ifndef atomic_sub_and_test 1006/** 1007 * atomic_sub_and_test - subtract value from variable and test result 1008 * @i: integer value to subtract 1009 * @v: pointer of type atomic_t 1010 * 1011 * Atomically subtracts @i from @v and returns 1012 * true if the result is zero, or false for all 1013 * other cases. 1014 */ 1015static __always_inline bool 1016atomic_sub_and_test(int i, atomic_t *v) 1017{ 1018 return atomic_sub_return(i, v) == 0; 1019} 1020#define atomic_sub_and_test atomic_sub_and_test 1021#endif 1022 1023#ifndef atomic_dec_and_test 1024/** 1025 * atomic_dec_and_test - decrement and test 1026 * @v: pointer of type atomic_t 1027 * 1028 * Atomically decrements @v by 1 and 1029 * returns true if the result is 0, or false for all other 1030 * cases. 1031 */ 1032static __always_inline bool 1033atomic_dec_and_test(atomic_t *v) 1034{ 1035 return atomic_dec_return(v) == 0; 1036} 1037#define atomic_dec_and_test atomic_dec_and_test 1038#endif 1039 1040#ifndef atomic_inc_and_test 1041/** 1042 * atomic_inc_and_test - increment and test 1043 * @v: pointer of type atomic_t 1044 * 1045 * Atomically increments @v by 1 1046 * and returns true if the result is zero, or false for all 1047 * other cases. 1048 */ 1049static __always_inline bool 1050atomic_inc_and_test(atomic_t *v) 1051{ 1052 return atomic_inc_return(v) == 0; 1053} 1054#define atomic_inc_and_test atomic_inc_and_test 1055#endif 1056 1057#ifndef atomic_add_negative 1058/** 1059 * atomic_add_negative - add and test if negative 1060 * @i: integer value to add 1061 * @v: pointer of type atomic_t 1062 * 1063 * Atomically adds @i to @v and returns true 1064 * if the result is negative, or false when 1065 * result is greater than or equal to zero. 1066 */ 1067static __always_inline bool 1068atomic_add_negative(int i, atomic_t *v) 1069{ 1070 return atomic_add_return(i, v) < 0; 1071} 1072#define atomic_add_negative atomic_add_negative 1073#endif 1074 1075#ifndef atomic_fetch_add_unless 1076/** 1077 * atomic_fetch_add_unless - add unless the number is already a given value 1078 * @v: pointer of type atomic_t 1079 * @a: the amount to add to v... 1080 * @u: ...unless v is equal to u. 1081 * 1082 * Atomically adds @a to @v, so long as @v was not already @u. 1083 * Returns original value of @v 1084 */ 1085static __always_inline int 1086atomic_fetch_add_unless(atomic_t *v, int a, int u) 1087{ 1088 int c = atomic_read(v); 1089 1090 do { 1091 if (unlikely(c == u)) 1092 break; 1093 } while (!atomic_try_cmpxchg(v, &c, c + a)); 1094 1095 return c; 1096} 1097#define atomic_fetch_add_unless atomic_fetch_add_unless 1098#endif 1099 1100#ifndef atomic_add_unless 1101/** 1102 * atomic_add_unless - add unless the number is already a given value 1103 * @v: pointer of type atomic_t 1104 * @a: the amount to add to v... 1105 * @u: ...unless v is equal to u. 1106 * 1107 * Atomically adds @a to @v, if @v was not already @u. 1108 * Returns true if the addition was done. 1109 */ 1110static __always_inline bool 1111atomic_add_unless(atomic_t *v, int a, int u) 1112{ 1113 return atomic_fetch_add_unless(v, a, u) != u; 1114} 1115#define atomic_add_unless atomic_add_unless 1116#endif 1117 1118#ifndef atomic_inc_not_zero 1119/** 1120 * atomic_inc_not_zero - increment unless the number is zero 1121 * @v: pointer of type atomic_t 1122 * 1123 * Atomically increments @v by 1, if @v is non-zero. 1124 * Returns true if the increment was done. 1125 */ 1126static __always_inline bool 1127atomic_inc_not_zero(atomic_t *v) 1128{ 1129 return atomic_add_unless(v, 1, 0); 1130} 1131#define atomic_inc_not_zero atomic_inc_not_zero 1132#endif 1133 1134#ifndef atomic_inc_unless_negative 1135static __always_inline bool 1136atomic_inc_unless_negative(atomic_t *v) 1137{ 1138 int c = atomic_read(v); 1139 1140 do { 1141 if (unlikely(c < 0)) 1142 return false; 1143 } while (!atomic_try_cmpxchg(v, &c, c + 1)); 1144 1145 return true; 1146} 1147#define atomic_inc_unless_negative atomic_inc_unless_negative 1148#endif 1149 1150#ifndef atomic_dec_unless_positive 1151static __always_inline bool 1152atomic_dec_unless_positive(atomic_t *v) 1153{ 1154 int c = atomic_read(v); 1155 1156 do { 1157 if (unlikely(c > 0)) 1158 return false; 1159 } while (!atomic_try_cmpxchg(v, &c, c - 1)); 1160 1161 return true; 1162} 1163#define atomic_dec_unless_positive atomic_dec_unless_positive 1164#endif 1165 1166#ifndef atomic_dec_if_positive 1167static __always_inline int 1168atomic_dec_if_positive(atomic_t *v) 1169{ 1170 int dec, c = atomic_read(v); 1171 1172 do { 1173 dec = c - 1; 1174 if (unlikely(dec < 0)) 1175 break; 1176 } while (!atomic_try_cmpxchg(v, &c, dec)); 1177 1178 return dec; 1179} 1180#define atomic_dec_if_positive atomic_dec_if_positive 1181#endif 1182 1183#ifdef CONFIG_GENERIC_ATOMIC64 1184#include <asm-generic/atomic64.h> 1185#endif 1186 1187#ifndef atomic64_read_acquire 1188static __always_inline s64 1189atomic64_read_acquire(const atomic64_t *v) 1190{ 1191 return smp_load_acquire(&(v)->counter); 1192} 1193#define atomic64_read_acquire atomic64_read_acquire 1194#endif 1195 1196#ifndef atomic64_set_release 1197static __always_inline void 1198atomic64_set_release(atomic64_t *v, s64 i) 1199{ 1200 smp_store_release(&(v)->counter, i); 1201} 1202#define atomic64_set_release atomic64_set_release 1203#endif 1204 1205#ifndef atomic64_add_return_relaxed 1206#define atomic64_add_return_acquire atomic64_add_return 1207#define atomic64_add_return_release atomic64_add_return 1208#define atomic64_add_return_relaxed atomic64_add_return 1209#else /* atomic64_add_return_relaxed */ 1210 1211#ifndef atomic64_add_return_acquire 1212static __always_inline s64 1213atomic64_add_return_acquire(s64 i, atomic64_t *v) 1214{ 1215 s64 ret = atomic64_add_return_relaxed(i, v); 1216 __atomic_acquire_fence(); 1217 return ret; 1218} 1219#define atomic64_add_return_acquire atomic64_add_return_acquire 1220#endif 1221 1222#ifndef atomic64_add_return_release 1223static __always_inline s64 1224atomic64_add_return_release(s64 i, atomic64_t *v) 1225{ 1226 __atomic_release_fence(); 1227 return atomic64_add_return_relaxed(i, v); 1228} 1229#define atomic64_add_return_release atomic64_add_return_release 1230#endif 1231 1232#ifndef atomic64_add_return 1233static __always_inline s64 1234atomic64_add_return(s64 i, atomic64_t *v) 1235{ 1236 s64 ret; 1237 __atomic_pre_full_fence(); 1238 ret = atomic64_add_return_relaxed(i, v); 1239 __atomic_post_full_fence(); 1240 return ret; 1241} 1242#define atomic64_add_return atomic64_add_return 1243#endif 1244 1245#endif /* atomic64_add_return_relaxed */ 1246 1247#ifndef atomic64_fetch_add_relaxed 1248#define atomic64_fetch_add_acquire atomic64_fetch_add 1249#define atomic64_fetch_add_release atomic64_fetch_add 1250#define atomic64_fetch_add_relaxed atomic64_fetch_add 1251#else /* atomic64_fetch_add_relaxed */ 1252 1253#ifndef atomic64_fetch_add_acquire 1254static __always_inline s64 1255atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1256{ 1257 s64 ret = atomic64_fetch_add_relaxed(i, v); 1258 __atomic_acquire_fence(); 1259 return ret; 1260} 1261#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire 1262#endif 1263 1264#ifndef atomic64_fetch_add_release 1265static __always_inline s64 1266atomic64_fetch_add_release(s64 i, atomic64_t *v) 1267{ 1268 __atomic_release_fence(); 1269 return atomic64_fetch_add_relaxed(i, v); 1270} 1271#define atomic64_fetch_add_release atomic64_fetch_add_release 1272#endif 1273 1274#ifndef atomic64_fetch_add 1275static __always_inline s64 1276atomic64_fetch_add(s64 i, atomic64_t *v) 1277{ 1278 s64 ret; 1279 __atomic_pre_full_fence(); 1280 ret = atomic64_fetch_add_relaxed(i, v); 1281 __atomic_post_full_fence(); 1282 return ret; 1283} 1284#define atomic64_fetch_add atomic64_fetch_add 1285#endif 1286 1287#endif /* atomic64_fetch_add_relaxed */ 1288 1289#ifndef atomic64_sub_return_relaxed 1290#define atomic64_sub_return_acquire atomic64_sub_return 1291#define atomic64_sub_return_release atomic64_sub_return 1292#define atomic64_sub_return_relaxed atomic64_sub_return 1293#else /* atomic64_sub_return_relaxed */ 1294 1295#ifndef atomic64_sub_return_acquire 1296static __always_inline s64 1297atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1298{ 1299 s64 ret = atomic64_sub_return_relaxed(i, v); 1300 __atomic_acquire_fence(); 1301 return ret; 1302} 1303#define atomic64_sub_return_acquire atomic64_sub_return_acquire 1304#endif 1305 1306#ifndef atomic64_sub_return_release 1307static __always_inline s64 1308atomic64_sub_return_release(s64 i, atomic64_t *v) 1309{ 1310 __atomic_release_fence(); 1311 return atomic64_sub_return_relaxed(i, v); 1312} 1313#define atomic64_sub_return_release atomic64_sub_return_release 1314#endif 1315 1316#ifndef atomic64_sub_return 1317static __always_inline s64 1318atomic64_sub_return(s64 i, atomic64_t *v) 1319{ 1320 s64 ret; 1321 __atomic_pre_full_fence(); 1322 ret = atomic64_sub_return_relaxed(i, v); 1323 __atomic_post_full_fence(); 1324 return ret; 1325} 1326#define atomic64_sub_return atomic64_sub_return 1327#endif 1328 1329#endif /* atomic64_sub_return_relaxed */ 1330 1331#ifndef atomic64_fetch_sub_relaxed 1332#define atomic64_fetch_sub_acquire atomic64_fetch_sub 1333#define atomic64_fetch_sub_release atomic64_fetch_sub 1334#define atomic64_fetch_sub_relaxed atomic64_fetch_sub 1335#else /* atomic64_fetch_sub_relaxed */ 1336 1337#ifndef atomic64_fetch_sub_acquire 1338static __always_inline s64 1339atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1340{ 1341 s64 ret = atomic64_fetch_sub_relaxed(i, v); 1342 __atomic_acquire_fence(); 1343 return ret; 1344} 1345#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire 1346#endif 1347 1348#ifndef atomic64_fetch_sub_release 1349static __always_inline s64 1350atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1351{ 1352 __atomic_release_fence(); 1353 return atomic64_fetch_sub_relaxed(i, v); 1354} 1355#define atomic64_fetch_sub_release atomic64_fetch_sub_release 1356#endif 1357 1358#ifndef atomic64_fetch_sub 1359static __always_inline s64 1360atomic64_fetch_sub(s64 i, atomic64_t *v) 1361{ 1362 s64 ret; 1363 __atomic_pre_full_fence(); 1364 ret = atomic64_fetch_sub_relaxed(i, v); 1365 __atomic_post_full_fence(); 1366 return ret; 1367} 1368#define atomic64_fetch_sub atomic64_fetch_sub 1369#endif 1370 1371#endif /* atomic64_fetch_sub_relaxed */ 1372 1373#ifndef atomic64_inc 1374static __always_inline void 1375atomic64_inc(atomic64_t *v) 1376{ 1377 atomic64_add(1, v); 1378} 1379#define atomic64_inc atomic64_inc 1380#endif 1381 1382#ifndef atomic64_inc_return_relaxed 1383#ifdef atomic64_inc_return 1384#define atomic64_inc_return_acquire atomic64_inc_return 1385#define atomic64_inc_return_release atomic64_inc_return 1386#define atomic64_inc_return_relaxed atomic64_inc_return 1387#endif /* atomic64_inc_return */ 1388 1389#ifndef atomic64_inc_return 1390static __always_inline s64 1391atomic64_inc_return(atomic64_t *v) 1392{ 1393 return atomic64_add_return(1, v); 1394} 1395#define atomic64_inc_return atomic64_inc_return 1396#endif 1397 1398#ifndef atomic64_inc_return_acquire 1399static __always_inline s64 1400atomic64_inc_return_acquire(atomic64_t *v) 1401{ 1402 return atomic64_add_return_acquire(1, v); 1403} 1404#define atomic64_inc_return_acquire atomic64_inc_return_acquire 1405#endif 1406 1407#ifndef atomic64_inc_return_release 1408static __always_inline s64 1409atomic64_inc_return_release(atomic64_t *v) 1410{ 1411 return atomic64_add_return_release(1, v); 1412} 1413#define atomic64_inc_return_release atomic64_inc_return_release 1414#endif 1415 1416#ifndef atomic64_inc_return_relaxed 1417static __always_inline s64 1418atomic64_inc_return_relaxed(atomic64_t *v) 1419{ 1420 return atomic64_add_return_relaxed(1, v); 1421} 1422#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed 1423#endif 1424 1425#else /* atomic64_inc_return_relaxed */ 1426 1427#ifndef atomic64_inc_return_acquire 1428static __always_inline s64 1429atomic64_inc_return_acquire(atomic64_t *v) 1430{ 1431 s64 ret = atomic64_inc_return_relaxed(v); 1432 __atomic_acquire_fence(); 1433 return ret; 1434} 1435#define atomic64_inc_return_acquire atomic64_inc_return_acquire 1436#endif 1437 1438#ifndef atomic64_inc_return_release 1439static __always_inline s64 1440atomic64_inc_return_release(atomic64_t *v) 1441{ 1442 __atomic_release_fence(); 1443 return atomic64_inc_return_relaxed(v); 1444} 1445#define atomic64_inc_return_release atomic64_inc_return_release 1446#endif 1447 1448#ifndef atomic64_inc_return 1449static __always_inline s64 1450atomic64_inc_return(atomic64_t *v) 1451{ 1452 s64 ret; 1453 __atomic_pre_full_fence(); 1454 ret = atomic64_inc_return_relaxed(v); 1455 __atomic_post_full_fence(); 1456 return ret; 1457} 1458#define atomic64_inc_return atomic64_inc_return 1459#endif 1460 1461#endif /* atomic64_inc_return_relaxed */ 1462 1463#ifndef atomic64_fetch_inc_relaxed 1464#ifdef atomic64_fetch_inc 1465#define atomic64_fetch_inc_acquire atomic64_fetch_inc 1466#define atomic64_fetch_inc_release atomic64_fetch_inc 1467#define atomic64_fetch_inc_relaxed atomic64_fetch_inc 1468#endif /* atomic64_fetch_inc */ 1469 1470#ifndef atomic64_fetch_inc 1471static __always_inline s64 1472atomic64_fetch_inc(atomic64_t *v) 1473{ 1474 return atomic64_fetch_add(1, v); 1475} 1476#define atomic64_fetch_inc atomic64_fetch_inc 1477#endif 1478 1479#ifndef atomic64_fetch_inc_acquire 1480static __always_inline s64 1481atomic64_fetch_inc_acquire(atomic64_t *v) 1482{ 1483 return atomic64_fetch_add_acquire(1, v); 1484} 1485#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire 1486#endif 1487 1488#ifndef atomic64_fetch_inc_release 1489static __always_inline s64 1490atomic64_fetch_inc_release(atomic64_t *v) 1491{ 1492 return atomic64_fetch_add_release(1, v); 1493} 1494#define atomic64_fetch_inc_release atomic64_fetch_inc_release 1495#endif 1496 1497#ifndef atomic64_fetch_inc_relaxed 1498static __always_inline s64 1499atomic64_fetch_inc_relaxed(atomic64_t *v) 1500{ 1501 return atomic64_fetch_add_relaxed(1, v); 1502} 1503#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed 1504#endif 1505 1506#else /* atomic64_fetch_inc_relaxed */ 1507 1508#ifndef atomic64_fetch_inc_acquire 1509static __always_inline s64 1510atomic64_fetch_inc_acquire(atomic64_t *v) 1511{ 1512 s64 ret = atomic64_fetch_inc_relaxed(v); 1513 __atomic_acquire_fence(); 1514 return ret; 1515} 1516#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire 1517#endif 1518 1519#ifndef atomic64_fetch_inc_release 1520static __always_inline s64 1521atomic64_fetch_inc_release(atomic64_t *v) 1522{ 1523 __atomic_release_fence(); 1524 return atomic64_fetch_inc_relaxed(v); 1525} 1526#define atomic64_fetch_inc_release atomic64_fetch_inc_release 1527#endif 1528 1529#ifndef atomic64_fetch_inc 1530static __always_inline s64 1531atomic64_fetch_inc(atomic64_t *v) 1532{ 1533 s64 ret; 1534 __atomic_pre_full_fence(); 1535 ret = atomic64_fetch_inc_relaxed(v); 1536 __atomic_post_full_fence(); 1537 return ret; 1538} 1539#define atomic64_fetch_inc atomic64_fetch_inc 1540#endif 1541 1542#endif /* atomic64_fetch_inc_relaxed */ 1543 1544#ifndef atomic64_dec 1545static __always_inline void 1546atomic64_dec(atomic64_t *v) 1547{ 1548 atomic64_sub(1, v); 1549} 1550#define atomic64_dec atomic64_dec 1551#endif 1552 1553#ifndef atomic64_dec_return_relaxed 1554#ifdef atomic64_dec_return 1555#define atomic64_dec_return_acquire atomic64_dec_return 1556#define atomic64_dec_return_release atomic64_dec_return 1557#define atomic64_dec_return_relaxed atomic64_dec_return 1558#endif /* atomic64_dec_return */ 1559 1560#ifndef atomic64_dec_return 1561static __always_inline s64 1562atomic64_dec_return(atomic64_t *v) 1563{ 1564 return atomic64_sub_return(1, v); 1565} 1566#define atomic64_dec_return atomic64_dec_return 1567#endif 1568 1569#ifndef atomic64_dec_return_acquire 1570static __always_inline s64 1571atomic64_dec_return_acquire(atomic64_t *v) 1572{ 1573 return atomic64_sub_return_acquire(1, v); 1574} 1575#define atomic64_dec_return_acquire atomic64_dec_return_acquire 1576#endif 1577 1578#ifndef atomic64_dec_return_release 1579static __always_inline s64 1580atomic64_dec_return_release(atomic64_t *v) 1581{ 1582 return atomic64_sub_return_release(1, v); 1583} 1584#define atomic64_dec_return_release atomic64_dec_return_release 1585#endif 1586 1587#ifndef atomic64_dec_return_relaxed 1588static __always_inline s64 1589atomic64_dec_return_relaxed(atomic64_t *v) 1590{ 1591 return atomic64_sub_return_relaxed(1, v); 1592} 1593#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed 1594#endif 1595 1596#else /* atomic64_dec_return_relaxed */ 1597 1598#ifndef atomic64_dec_return_acquire 1599static __always_inline s64 1600atomic64_dec_return_acquire(atomic64_t *v) 1601{ 1602 s64 ret = atomic64_dec_return_relaxed(v); 1603 __atomic_acquire_fence(); 1604 return ret; 1605} 1606#define atomic64_dec_return_acquire atomic64_dec_return_acquire 1607#endif 1608 1609#ifndef atomic64_dec_return_release 1610static __always_inline s64 1611atomic64_dec_return_release(atomic64_t *v) 1612{ 1613 __atomic_release_fence(); 1614 return atomic64_dec_return_relaxed(v); 1615} 1616#define atomic64_dec_return_release atomic64_dec_return_release 1617#endif 1618 1619#ifndef atomic64_dec_return 1620static __always_inline s64 1621atomic64_dec_return(atomic64_t *v) 1622{ 1623 s64 ret; 1624 __atomic_pre_full_fence(); 1625 ret = atomic64_dec_return_relaxed(v); 1626 __atomic_post_full_fence(); 1627 return ret; 1628} 1629#define atomic64_dec_return atomic64_dec_return 1630#endif 1631 1632#endif /* atomic64_dec_return_relaxed */ 1633 1634#ifndef atomic64_fetch_dec_relaxed 1635#ifdef atomic64_fetch_dec 1636#define atomic64_fetch_dec_acquire atomic64_fetch_dec 1637#define atomic64_fetch_dec_release atomic64_fetch_dec 1638#define atomic64_fetch_dec_relaxed atomic64_fetch_dec 1639#endif /* atomic64_fetch_dec */ 1640 1641#ifndef atomic64_fetch_dec 1642static __always_inline s64 1643atomic64_fetch_dec(atomic64_t *v) 1644{ 1645 return atomic64_fetch_sub(1, v); 1646} 1647#define atomic64_fetch_dec atomic64_fetch_dec 1648#endif 1649 1650#ifndef atomic64_fetch_dec_acquire 1651static __always_inline s64 1652atomic64_fetch_dec_acquire(atomic64_t *v) 1653{ 1654 return atomic64_fetch_sub_acquire(1, v); 1655} 1656#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire 1657#endif 1658 1659#ifndef atomic64_fetch_dec_release 1660static __always_inline s64 1661atomic64_fetch_dec_release(atomic64_t *v) 1662{ 1663 return atomic64_fetch_sub_release(1, v); 1664} 1665#define atomic64_fetch_dec_release atomic64_fetch_dec_release 1666#endif 1667 1668#ifndef atomic64_fetch_dec_relaxed 1669static __always_inline s64 1670atomic64_fetch_dec_relaxed(atomic64_t *v) 1671{ 1672 return atomic64_fetch_sub_relaxed(1, v); 1673} 1674#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed 1675#endif 1676 1677#else /* atomic64_fetch_dec_relaxed */ 1678 1679#ifndef atomic64_fetch_dec_acquire 1680static __always_inline s64 1681atomic64_fetch_dec_acquire(atomic64_t *v) 1682{ 1683 s64 ret = atomic64_fetch_dec_relaxed(v); 1684 __atomic_acquire_fence(); 1685 return ret; 1686} 1687#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire 1688#endif 1689 1690#ifndef atomic64_fetch_dec_release 1691static __always_inline s64 1692atomic64_fetch_dec_release(atomic64_t *v) 1693{ 1694 __atomic_release_fence(); 1695 return atomic64_fetch_dec_relaxed(v); 1696} 1697#define atomic64_fetch_dec_release atomic64_fetch_dec_release 1698#endif 1699 1700#ifndef atomic64_fetch_dec 1701static __always_inline s64 1702atomic64_fetch_dec(atomic64_t *v) 1703{ 1704 s64 ret; 1705 __atomic_pre_full_fence(); 1706 ret = atomic64_fetch_dec_relaxed(v); 1707 __atomic_post_full_fence(); 1708 return ret; 1709} 1710#define atomic64_fetch_dec atomic64_fetch_dec 1711#endif 1712 1713#endif /* atomic64_fetch_dec_relaxed */ 1714 1715#ifndef atomic64_fetch_and_relaxed 1716#define atomic64_fetch_and_acquire atomic64_fetch_and 1717#define atomic64_fetch_and_release atomic64_fetch_and 1718#define atomic64_fetch_and_relaxed atomic64_fetch_and 1719#else /* atomic64_fetch_and_relaxed */ 1720 1721#ifndef atomic64_fetch_and_acquire 1722static __always_inline s64 1723atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 1724{ 1725 s64 ret = atomic64_fetch_and_relaxed(i, v); 1726 __atomic_acquire_fence(); 1727 return ret; 1728} 1729#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire 1730#endif 1731 1732#ifndef atomic64_fetch_and_release 1733static __always_inline s64 1734atomic64_fetch_and_release(s64 i, atomic64_t *v) 1735{ 1736 __atomic_release_fence(); 1737 return atomic64_fetch_and_relaxed(i, v); 1738} 1739#define atomic64_fetch_and_release atomic64_fetch_and_release 1740#endif 1741 1742#ifndef atomic64_fetch_and 1743static __always_inline s64 1744atomic64_fetch_and(s64 i, atomic64_t *v) 1745{ 1746 s64 ret; 1747 __atomic_pre_full_fence(); 1748 ret = atomic64_fetch_and_relaxed(i, v); 1749 __atomic_post_full_fence(); 1750 return ret; 1751} 1752#define atomic64_fetch_and atomic64_fetch_and 1753#endif 1754 1755#endif /* atomic64_fetch_and_relaxed */ 1756 1757#ifndef atomic64_andnot 1758static __always_inline void 1759atomic64_andnot(s64 i, atomic64_t *v) 1760{ 1761 atomic64_and(~i, v); 1762} 1763#define atomic64_andnot atomic64_andnot 1764#endif 1765 1766#ifndef atomic64_fetch_andnot_relaxed 1767#ifdef atomic64_fetch_andnot 1768#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 1769#define atomic64_fetch_andnot_release atomic64_fetch_andnot 1770#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 1771#endif /* atomic64_fetch_andnot */ 1772 1773#ifndef atomic64_fetch_andnot 1774static __always_inline s64 1775atomic64_fetch_andnot(s64 i, atomic64_t *v) 1776{ 1777 return atomic64_fetch_and(~i, v); 1778} 1779#define atomic64_fetch_andnot atomic64_fetch_andnot 1780#endif 1781 1782#ifndef atomic64_fetch_andnot_acquire 1783static __always_inline s64 1784atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 1785{ 1786 return atomic64_fetch_and_acquire(~i, v); 1787} 1788#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire 1789#endif 1790 1791#ifndef atomic64_fetch_andnot_release 1792static __always_inline s64 1793atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 1794{ 1795 return atomic64_fetch_and_release(~i, v); 1796} 1797#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release 1798#endif 1799 1800#ifndef atomic64_fetch_andnot_relaxed 1801static __always_inline s64 1802atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 1803{ 1804 return atomic64_fetch_and_relaxed(~i, v); 1805} 1806#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed 1807#endif 1808 1809#else /* atomic64_fetch_andnot_relaxed */ 1810 1811#ifndef atomic64_fetch_andnot_acquire 1812static __always_inline s64 1813atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 1814{ 1815 s64 ret = atomic64_fetch_andnot_relaxed(i, v); 1816 __atomic_acquire_fence(); 1817 return ret; 1818} 1819#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire 1820#endif 1821 1822#ifndef atomic64_fetch_andnot_release 1823static __always_inline s64 1824atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 1825{ 1826 __atomic_release_fence(); 1827 return atomic64_fetch_andnot_relaxed(i, v); 1828} 1829#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release 1830#endif 1831 1832#ifndef atomic64_fetch_andnot 1833static __always_inline s64 1834atomic64_fetch_andnot(s64 i, atomic64_t *v) 1835{ 1836 s64 ret; 1837 __atomic_pre_full_fence(); 1838 ret = atomic64_fetch_andnot_relaxed(i, v); 1839 __atomic_post_full_fence(); 1840 return ret; 1841} 1842#define atomic64_fetch_andnot atomic64_fetch_andnot 1843#endif 1844 1845#endif /* atomic64_fetch_andnot_relaxed */ 1846 1847#ifndef atomic64_fetch_or_relaxed 1848#define atomic64_fetch_or_acquire atomic64_fetch_or 1849#define atomic64_fetch_or_release atomic64_fetch_or 1850#define atomic64_fetch_or_relaxed atomic64_fetch_or 1851#else /* atomic64_fetch_or_relaxed */ 1852 1853#ifndef atomic64_fetch_or_acquire 1854static __always_inline s64 1855atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 1856{ 1857 s64 ret = atomic64_fetch_or_relaxed(i, v); 1858 __atomic_acquire_fence(); 1859 return ret; 1860} 1861#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire 1862#endif 1863 1864#ifndef atomic64_fetch_or_release 1865static __always_inline s64 1866atomic64_fetch_or_release(s64 i, atomic64_t *v) 1867{ 1868 __atomic_release_fence(); 1869 return atomic64_fetch_or_relaxed(i, v); 1870} 1871#define atomic64_fetch_or_release atomic64_fetch_or_release 1872#endif 1873 1874#ifndef atomic64_fetch_or 1875static __always_inline s64 1876atomic64_fetch_or(s64 i, atomic64_t *v) 1877{ 1878 s64 ret; 1879 __atomic_pre_full_fence(); 1880 ret = atomic64_fetch_or_relaxed(i, v); 1881 __atomic_post_full_fence(); 1882 return ret; 1883} 1884#define atomic64_fetch_or atomic64_fetch_or 1885#endif 1886 1887#endif /* atomic64_fetch_or_relaxed */ 1888 1889#ifndef atomic64_fetch_xor_relaxed 1890#define atomic64_fetch_xor_acquire atomic64_fetch_xor 1891#define atomic64_fetch_xor_release atomic64_fetch_xor 1892#define atomic64_fetch_xor_relaxed atomic64_fetch_xor 1893#else /* atomic64_fetch_xor_relaxed */ 1894 1895#ifndef atomic64_fetch_xor_acquire 1896static __always_inline s64 1897atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 1898{ 1899 s64 ret = atomic64_fetch_xor_relaxed(i, v); 1900 __atomic_acquire_fence(); 1901 return ret; 1902} 1903#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire 1904#endif 1905 1906#ifndef atomic64_fetch_xor_release 1907static __always_inline s64 1908atomic64_fetch_xor_release(s64 i, atomic64_t *v) 1909{ 1910 __atomic_release_fence(); 1911 return atomic64_fetch_xor_relaxed(i, v); 1912} 1913#define atomic64_fetch_xor_release atomic64_fetch_xor_release 1914#endif 1915 1916#ifndef atomic64_fetch_xor 1917static __always_inline s64 1918atomic64_fetch_xor(s64 i, atomic64_t *v) 1919{ 1920 s64 ret; 1921 __atomic_pre_full_fence(); 1922 ret = atomic64_fetch_xor_relaxed(i, v); 1923 __atomic_post_full_fence(); 1924 return ret; 1925} 1926#define atomic64_fetch_xor atomic64_fetch_xor 1927#endif 1928 1929#endif /* atomic64_fetch_xor_relaxed */ 1930 1931#ifndef atomic64_xchg_relaxed 1932#define atomic64_xchg_acquire atomic64_xchg 1933#define atomic64_xchg_release atomic64_xchg 1934#define atomic64_xchg_relaxed atomic64_xchg 1935#else /* atomic64_xchg_relaxed */ 1936 1937#ifndef atomic64_xchg_acquire 1938static __always_inline s64 1939atomic64_xchg_acquire(atomic64_t *v, s64 i) 1940{ 1941 s64 ret = atomic64_xchg_relaxed(v, i); 1942 __atomic_acquire_fence(); 1943 return ret; 1944} 1945#define atomic64_xchg_acquire atomic64_xchg_acquire 1946#endif 1947 1948#ifndef atomic64_xchg_release 1949static __always_inline s64 1950atomic64_xchg_release(atomic64_t *v, s64 i) 1951{ 1952 __atomic_release_fence(); 1953 return atomic64_xchg_relaxed(v, i); 1954} 1955#define atomic64_xchg_release atomic64_xchg_release 1956#endif 1957 1958#ifndef atomic64_xchg 1959static __always_inline s64 1960atomic64_xchg(atomic64_t *v, s64 i) 1961{ 1962 s64 ret; 1963 __atomic_pre_full_fence(); 1964 ret = atomic64_xchg_relaxed(v, i); 1965 __atomic_post_full_fence(); 1966 return ret; 1967} 1968#define atomic64_xchg atomic64_xchg 1969#endif 1970 1971#endif /* atomic64_xchg_relaxed */ 1972 1973#ifndef atomic64_cmpxchg_relaxed 1974#define atomic64_cmpxchg_acquire atomic64_cmpxchg 1975#define atomic64_cmpxchg_release atomic64_cmpxchg 1976#define atomic64_cmpxchg_relaxed atomic64_cmpxchg 1977#else /* atomic64_cmpxchg_relaxed */ 1978 1979#ifndef atomic64_cmpxchg_acquire 1980static __always_inline s64 1981atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 1982{ 1983 s64 ret = atomic64_cmpxchg_relaxed(v, old, new); 1984 __atomic_acquire_fence(); 1985 return ret; 1986} 1987#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire 1988#endif 1989 1990#ifndef atomic64_cmpxchg_release 1991static __always_inline s64 1992atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 1993{ 1994 __atomic_release_fence(); 1995 return atomic64_cmpxchg_relaxed(v, old, new); 1996} 1997#define atomic64_cmpxchg_release atomic64_cmpxchg_release 1998#endif 1999 2000#ifndef atomic64_cmpxchg 2001static __always_inline s64 2002atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2003{ 2004 s64 ret; 2005 __atomic_pre_full_fence(); 2006 ret = atomic64_cmpxchg_relaxed(v, old, new); 2007 __atomic_post_full_fence(); 2008 return ret; 2009} 2010#define atomic64_cmpxchg atomic64_cmpxchg 2011#endif 2012 2013#endif /* atomic64_cmpxchg_relaxed */ 2014 2015#ifndef atomic64_try_cmpxchg_relaxed 2016#ifdef atomic64_try_cmpxchg 2017#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg 2018#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg 2019#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg 2020#endif /* atomic64_try_cmpxchg */ 2021 2022#ifndef atomic64_try_cmpxchg 2023static __always_inline bool 2024atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2025{ 2026 s64 r, o = *old; 2027 r = atomic64_cmpxchg(v, o, new); 2028 if (unlikely(r != o)) 2029 *old = r; 2030 return likely(r == o); 2031} 2032#define atomic64_try_cmpxchg atomic64_try_cmpxchg 2033#endif 2034 2035#ifndef atomic64_try_cmpxchg_acquire 2036static __always_inline bool 2037atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2038{ 2039 s64 r, o = *old; 2040 r = atomic64_cmpxchg_acquire(v, o, new); 2041 if (unlikely(r != o)) 2042 *old = r; 2043 return likely(r == o); 2044} 2045#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire 2046#endif 2047 2048#ifndef atomic64_try_cmpxchg_release 2049static __always_inline bool 2050atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2051{ 2052 s64 r, o = *old; 2053 r = atomic64_cmpxchg_release(v, o, new); 2054 if (unlikely(r != o)) 2055 *old = r; 2056 return likely(r == o); 2057} 2058#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release 2059#endif 2060 2061#ifndef atomic64_try_cmpxchg_relaxed 2062static __always_inline bool 2063atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2064{ 2065 s64 r, o = *old; 2066 r = atomic64_cmpxchg_relaxed(v, o, new); 2067 if (unlikely(r != o)) 2068 *old = r; 2069 return likely(r == o); 2070} 2071#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed 2072#endif 2073 2074#else /* atomic64_try_cmpxchg_relaxed */ 2075 2076#ifndef atomic64_try_cmpxchg_acquire 2077static __always_inline bool 2078atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2079{ 2080 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new); 2081 __atomic_acquire_fence(); 2082 return ret; 2083} 2084#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire 2085#endif 2086 2087#ifndef atomic64_try_cmpxchg_release 2088static __always_inline bool 2089atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2090{ 2091 __atomic_release_fence(); 2092 return atomic64_try_cmpxchg_relaxed(v, old, new); 2093} 2094#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release 2095#endif 2096 2097#ifndef atomic64_try_cmpxchg 2098static __always_inline bool 2099atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2100{ 2101 bool ret; 2102 __atomic_pre_full_fence(); 2103 ret = atomic64_try_cmpxchg_relaxed(v, old, new); 2104 __atomic_post_full_fence(); 2105 return ret; 2106} 2107#define atomic64_try_cmpxchg atomic64_try_cmpxchg 2108#endif 2109 2110#endif /* atomic64_try_cmpxchg_relaxed */ 2111 2112#ifndef atomic64_sub_and_test 2113/** 2114 * atomic64_sub_and_test - subtract value from variable and test result 2115 * @i: integer value to subtract 2116 * @v: pointer of type atomic64_t 2117 * 2118 * Atomically subtracts @i from @v and returns 2119 * true if the result is zero, or false for all 2120 * other cases. 2121 */ 2122static __always_inline bool 2123atomic64_sub_and_test(s64 i, atomic64_t *v) 2124{ 2125 return atomic64_sub_return(i, v) == 0; 2126} 2127#define atomic64_sub_and_test atomic64_sub_and_test 2128#endif 2129 2130#ifndef atomic64_dec_and_test 2131/** 2132 * atomic64_dec_and_test - decrement and test 2133 * @v: pointer of type atomic64_t 2134 * 2135 * Atomically decrements @v by 1 and 2136 * returns true if the result is 0, or false for all other 2137 * cases. 2138 */ 2139static __always_inline bool 2140atomic64_dec_and_test(atomic64_t *v) 2141{ 2142 return atomic64_dec_return(v) == 0; 2143} 2144#define atomic64_dec_and_test atomic64_dec_and_test 2145#endif 2146 2147#ifndef atomic64_inc_and_test 2148/** 2149 * atomic64_inc_and_test - increment and test 2150 * @v: pointer of type atomic64_t 2151 * 2152 * Atomically increments @v by 1 2153 * and returns true if the result is zero, or false for all 2154 * other cases. 2155 */ 2156static __always_inline bool 2157atomic64_inc_and_test(atomic64_t *v) 2158{ 2159 return atomic64_inc_return(v) == 0; 2160} 2161#define atomic64_inc_and_test atomic64_inc_and_test 2162#endif 2163 2164#ifndef atomic64_add_negative 2165/** 2166 * atomic64_add_negative - add and test if negative 2167 * @i: integer value to add 2168 * @v: pointer of type atomic64_t 2169 * 2170 * Atomically adds @i to @v and returns true 2171 * if the result is negative, or false when 2172 * result is greater than or equal to zero. 2173 */ 2174static __always_inline bool 2175atomic64_add_negative(s64 i, atomic64_t *v) 2176{ 2177 return atomic64_add_return(i, v) < 0; 2178} 2179#define atomic64_add_negative atomic64_add_negative 2180#endif 2181 2182#ifndef atomic64_fetch_add_unless 2183/** 2184 * atomic64_fetch_add_unless - add unless the number is already a given value 2185 * @v: pointer of type atomic64_t 2186 * @a: the amount to add to v... 2187 * @u: ...unless v is equal to u. 2188 * 2189 * Atomically adds @a to @v, so long as @v was not already @u. 2190 * Returns original value of @v 2191 */ 2192static __always_inline s64 2193atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2194{ 2195 s64 c = atomic64_read(v); 2196 2197 do { 2198 if (unlikely(c == u)) 2199 break; 2200 } while (!atomic64_try_cmpxchg(v, &c, c + a)); 2201 2202 return c; 2203} 2204#define atomic64_fetch_add_unless atomic64_fetch_add_unless 2205#endif 2206 2207#ifndef atomic64_add_unless 2208/** 2209 * atomic64_add_unless - add unless the number is already a given value 2210 * @v: pointer of type atomic64_t 2211 * @a: the amount to add to v... 2212 * @u: ...unless v is equal to u. 2213 * 2214 * Atomically adds @a to @v, if @v was not already @u. 2215 * Returns true if the addition was done. 2216 */ 2217static __always_inline bool 2218atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2219{ 2220 return atomic64_fetch_add_unless(v, a, u) != u; 2221} 2222#define atomic64_add_unless atomic64_add_unless 2223#endif 2224 2225#ifndef atomic64_inc_not_zero 2226/** 2227 * atomic64_inc_not_zero - increment unless the number is zero 2228 * @v: pointer of type atomic64_t 2229 * 2230 * Atomically increments @v by 1, if @v is non-zero. 2231 * Returns true if the increment was done. 2232 */ 2233static __always_inline bool 2234atomic64_inc_not_zero(atomic64_t *v) 2235{ 2236 return atomic64_add_unless(v, 1, 0); 2237} 2238#define atomic64_inc_not_zero atomic64_inc_not_zero 2239#endif 2240 2241#ifndef atomic64_inc_unless_negative 2242static __always_inline bool 2243atomic64_inc_unless_negative(atomic64_t *v) 2244{ 2245 s64 c = atomic64_read(v); 2246 2247 do { 2248 if (unlikely(c < 0)) 2249 return false; 2250 } while (!atomic64_try_cmpxchg(v, &c, c + 1)); 2251 2252 return true; 2253} 2254#define atomic64_inc_unless_negative atomic64_inc_unless_negative 2255#endif 2256 2257#ifndef atomic64_dec_unless_positive 2258static __always_inline bool 2259atomic64_dec_unless_positive(atomic64_t *v) 2260{ 2261 s64 c = atomic64_read(v); 2262 2263 do { 2264 if (unlikely(c > 0)) 2265 return false; 2266 } while (!atomic64_try_cmpxchg(v, &c, c - 1)); 2267 2268 return true; 2269} 2270#define atomic64_dec_unless_positive atomic64_dec_unless_positive 2271#endif 2272 2273#ifndef atomic64_dec_if_positive 2274static __always_inline s64 2275atomic64_dec_if_positive(atomic64_t *v) 2276{ 2277 s64 dec, c = atomic64_read(v); 2278 2279 do { 2280 dec = c - 1; 2281 if (unlikely(dec < 0)) 2282 break; 2283 } while (!atomic64_try_cmpxchg(v, &c, dec)); 2284 2285 return dec; 2286} 2287#define atomic64_dec_if_positive atomic64_dec_if_positive 2288#endif 2289 2290#endif /* _LINUX_ATOMIC_FALLBACK_H */ 2291// 1fac0941c79bf0ae100723cc2ac9b94061f0b67a