at v5.12-rc2 66 kB view raw
1// SPDX-License-Identifier: GPL-2.0 2 3// Generated by scripts/atomic/gen-atomic-fallback.sh 4// DO NOT MODIFY THIS FILE DIRECTLY 5 6#ifndef _LINUX_ATOMIC_FALLBACK_H 7#define _LINUX_ATOMIC_FALLBACK_H 8 9#include <linux/compiler.h> 10 11#ifndef xchg_relaxed 12#define xchg_acquire xchg 13#define xchg_release xchg 14#define xchg_relaxed xchg 15#else /* xchg_relaxed */ 16 17#ifndef xchg_acquire 18#define xchg_acquire(...) \ 19 __atomic_op_acquire(xchg, __VA_ARGS__) 20#endif 21 22#ifndef xchg_release 23#define xchg_release(...) \ 24 __atomic_op_release(xchg, __VA_ARGS__) 25#endif 26 27#ifndef xchg 28#define xchg(...) \ 29 __atomic_op_fence(xchg, __VA_ARGS__) 30#endif 31 32#endif /* xchg_relaxed */ 33 34#ifndef cmpxchg_relaxed 35#define cmpxchg_acquire cmpxchg 36#define cmpxchg_release cmpxchg 37#define cmpxchg_relaxed cmpxchg 38#else /* cmpxchg_relaxed */ 39 40#ifndef cmpxchg_acquire 41#define cmpxchg_acquire(...) \ 42 __atomic_op_acquire(cmpxchg, __VA_ARGS__) 43#endif 44 45#ifndef cmpxchg_release 46#define cmpxchg_release(...) \ 47 __atomic_op_release(cmpxchg, __VA_ARGS__) 48#endif 49 50#ifndef cmpxchg 51#define cmpxchg(...) \ 52 __atomic_op_fence(cmpxchg, __VA_ARGS__) 53#endif 54 55#endif /* cmpxchg_relaxed */ 56 57#ifndef cmpxchg64_relaxed 58#define cmpxchg64_acquire cmpxchg64 59#define cmpxchg64_release cmpxchg64 60#define cmpxchg64_relaxed cmpxchg64 61#else /* cmpxchg64_relaxed */ 62 63#ifndef cmpxchg64_acquire 64#define cmpxchg64_acquire(...) \ 65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__) 66#endif 67 68#ifndef cmpxchg64_release 69#define cmpxchg64_release(...) \ 70 __atomic_op_release(cmpxchg64, __VA_ARGS__) 71#endif 72 73#ifndef cmpxchg64 74#define cmpxchg64(...) \ 75 __atomic_op_fence(cmpxchg64, __VA_ARGS__) 76#endif 77 78#endif /* cmpxchg64_relaxed */ 79 80#ifndef try_cmpxchg_relaxed 81#ifdef try_cmpxchg 82#define try_cmpxchg_acquire try_cmpxchg 83#define try_cmpxchg_release try_cmpxchg 84#define try_cmpxchg_relaxed try_cmpxchg 85#endif /* try_cmpxchg */ 86 87#ifndef try_cmpxchg 88#define try_cmpxchg(_ptr, _oldp, _new) \ 89({ \ 90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 91 ___r = cmpxchg((_ptr), ___o, (_new)); \ 92 if (unlikely(___r != ___o)) \ 93 *___op = ___r; \ 94 likely(___r == ___o); \ 95}) 96#endif /* try_cmpxchg */ 97 98#ifndef try_cmpxchg_acquire 99#define try_cmpxchg_acquire(_ptr, _oldp, _new) \ 100({ \ 101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 102 ___r = cmpxchg_acquire((_ptr), ___o, (_new)); \ 103 if (unlikely(___r != ___o)) \ 104 *___op = ___r; \ 105 likely(___r == ___o); \ 106}) 107#endif /* try_cmpxchg_acquire */ 108 109#ifndef try_cmpxchg_release 110#define try_cmpxchg_release(_ptr, _oldp, _new) \ 111({ \ 112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 113 ___r = cmpxchg_release((_ptr), ___o, (_new)); \ 114 if (unlikely(___r != ___o)) \ 115 *___op = ___r; \ 116 likely(___r == ___o); \ 117}) 118#endif /* try_cmpxchg_release */ 119 120#ifndef try_cmpxchg_relaxed 121#define try_cmpxchg_relaxed(_ptr, _oldp, _new) \ 122({ \ 123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 124 ___r = cmpxchg_relaxed((_ptr), ___o, (_new)); \ 125 if (unlikely(___r != ___o)) \ 126 *___op = ___r; \ 127 likely(___r == ___o); \ 128}) 129#endif /* try_cmpxchg_relaxed */ 130 131#else /* try_cmpxchg_relaxed */ 132 133#ifndef try_cmpxchg_acquire 134#define try_cmpxchg_acquire(...) \ 135 __atomic_op_acquire(try_cmpxchg, __VA_ARGS__) 136#endif 137 138#ifndef try_cmpxchg_release 139#define try_cmpxchg_release(...) \ 140 __atomic_op_release(try_cmpxchg, __VA_ARGS__) 141#endif 142 143#ifndef try_cmpxchg 144#define try_cmpxchg(...) \ 145 __atomic_op_fence(try_cmpxchg, __VA_ARGS__) 146#endif 147 148#endif /* try_cmpxchg_relaxed */ 149 150#define arch_atomic_read atomic_read 151#define arch_atomic_read_acquire atomic_read_acquire 152 153#ifndef atomic_read_acquire 154static __always_inline int 155atomic_read_acquire(const atomic_t *v) 156{ 157 return smp_load_acquire(&(v)->counter); 158} 159#define atomic_read_acquire atomic_read_acquire 160#endif 161 162#define arch_atomic_set atomic_set 163#define arch_atomic_set_release atomic_set_release 164 165#ifndef atomic_set_release 166static __always_inline void 167atomic_set_release(atomic_t *v, int i) 168{ 169 smp_store_release(&(v)->counter, i); 170} 171#define atomic_set_release atomic_set_release 172#endif 173 174#define arch_atomic_add atomic_add 175 176#define arch_atomic_add_return atomic_add_return 177#define arch_atomic_add_return_acquire atomic_add_return_acquire 178#define arch_atomic_add_return_release atomic_add_return_release 179#define arch_atomic_add_return_relaxed atomic_add_return_relaxed 180 181#ifndef atomic_add_return_relaxed 182#define atomic_add_return_acquire atomic_add_return 183#define atomic_add_return_release atomic_add_return 184#define atomic_add_return_relaxed atomic_add_return 185#else /* atomic_add_return_relaxed */ 186 187#ifndef atomic_add_return_acquire 188static __always_inline int 189atomic_add_return_acquire(int i, atomic_t *v) 190{ 191 int ret = atomic_add_return_relaxed(i, v); 192 __atomic_acquire_fence(); 193 return ret; 194} 195#define atomic_add_return_acquire atomic_add_return_acquire 196#endif 197 198#ifndef atomic_add_return_release 199static __always_inline int 200atomic_add_return_release(int i, atomic_t *v) 201{ 202 __atomic_release_fence(); 203 return atomic_add_return_relaxed(i, v); 204} 205#define atomic_add_return_release atomic_add_return_release 206#endif 207 208#ifndef atomic_add_return 209static __always_inline int 210atomic_add_return(int i, atomic_t *v) 211{ 212 int ret; 213 __atomic_pre_full_fence(); 214 ret = atomic_add_return_relaxed(i, v); 215 __atomic_post_full_fence(); 216 return ret; 217} 218#define atomic_add_return atomic_add_return 219#endif 220 221#endif /* atomic_add_return_relaxed */ 222 223#define arch_atomic_fetch_add atomic_fetch_add 224#define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire 225#define arch_atomic_fetch_add_release atomic_fetch_add_release 226#define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed 227 228#ifndef atomic_fetch_add_relaxed 229#define atomic_fetch_add_acquire atomic_fetch_add 230#define atomic_fetch_add_release atomic_fetch_add 231#define atomic_fetch_add_relaxed atomic_fetch_add 232#else /* atomic_fetch_add_relaxed */ 233 234#ifndef atomic_fetch_add_acquire 235static __always_inline int 236atomic_fetch_add_acquire(int i, atomic_t *v) 237{ 238 int ret = atomic_fetch_add_relaxed(i, v); 239 __atomic_acquire_fence(); 240 return ret; 241} 242#define atomic_fetch_add_acquire atomic_fetch_add_acquire 243#endif 244 245#ifndef atomic_fetch_add_release 246static __always_inline int 247atomic_fetch_add_release(int i, atomic_t *v) 248{ 249 __atomic_release_fence(); 250 return atomic_fetch_add_relaxed(i, v); 251} 252#define atomic_fetch_add_release atomic_fetch_add_release 253#endif 254 255#ifndef atomic_fetch_add 256static __always_inline int 257atomic_fetch_add(int i, atomic_t *v) 258{ 259 int ret; 260 __atomic_pre_full_fence(); 261 ret = atomic_fetch_add_relaxed(i, v); 262 __atomic_post_full_fence(); 263 return ret; 264} 265#define atomic_fetch_add atomic_fetch_add 266#endif 267 268#endif /* atomic_fetch_add_relaxed */ 269 270#define arch_atomic_sub atomic_sub 271 272#define arch_atomic_sub_return atomic_sub_return 273#define arch_atomic_sub_return_acquire atomic_sub_return_acquire 274#define arch_atomic_sub_return_release atomic_sub_return_release 275#define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed 276 277#ifndef atomic_sub_return_relaxed 278#define atomic_sub_return_acquire atomic_sub_return 279#define atomic_sub_return_release atomic_sub_return 280#define atomic_sub_return_relaxed atomic_sub_return 281#else /* atomic_sub_return_relaxed */ 282 283#ifndef atomic_sub_return_acquire 284static __always_inline int 285atomic_sub_return_acquire(int i, atomic_t *v) 286{ 287 int ret = atomic_sub_return_relaxed(i, v); 288 __atomic_acquire_fence(); 289 return ret; 290} 291#define atomic_sub_return_acquire atomic_sub_return_acquire 292#endif 293 294#ifndef atomic_sub_return_release 295static __always_inline int 296atomic_sub_return_release(int i, atomic_t *v) 297{ 298 __atomic_release_fence(); 299 return atomic_sub_return_relaxed(i, v); 300} 301#define atomic_sub_return_release atomic_sub_return_release 302#endif 303 304#ifndef atomic_sub_return 305static __always_inline int 306atomic_sub_return(int i, atomic_t *v) 307{ 308 int ret; 309 __atomic_pre_full_fence(); 310 ret = atomic_sub_return_relaxed(i, v); 311 __atomic_post_full_fence(); 312 return ret; 313} 314#define atomic_sub_return atomic_sub_return 315#endif 316 317#endif /* atomic_sub_return_relaxed */ 318 319#define arch_atomic_fetch_sub atomic_fetch_sub 320#define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire 321#define arch_atomic_fetch_sub_release atomic_fetch_sub_release 322#define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed 323 324#ifndef atomic_fetch_sub_relaxed 325#define atomic_fetch_sub_acquire atomic_fetch_sub 326#define atomic_fetch_sub_release atomic_fetch_sub 327#define atomic_fetch_sub_relaxed atomic_fetch_sub 328#else /* atomic_fetch_sub_relaxed */ 329 330#ifndef atomic_fetch_sub_acquire 331static __always_inline int 332atomic_fetch_sub_acquire(int i, atomic_t *v) 333{ 334 int ret = atomic_fetch_sub_relaxed(i, v); 335 __atomic_acquire_fence(); 336 return ret; 337} 338#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire 339#endif 340 341#ifndef atomic_fetch_sub_release 342static __always_inline int 343atomic_fetch_sub_release(int i, atomic_t *v) 344{ 345 __atomic_release_fence(); 346 return atomic_fetch_sub_relaxed(i, v); 347} 348#define atomic_fetch_sub_release atomic_fetch_sub_release 349#endif 350 351#ifndef atomic_fetch_sub 352static __always_inline int 353atomic_fetch_sub(int i, atomic_t *v) 354{ 355 int ret; 356 __atomic_pre_full_fence(); 357 ret = atomic_fetch_sub_relaxed(i, v); 358 __atomic_post_full_fence(); 359 return ret; 360} 361#define atomic_fetch_sub atomic_fetch_sub 362#endif 363 364#endif /* atomic_fetch_sub_relaxed */ 365 366#define arch_atomic_inc atomic_inc 367 368#ifndef atomic_inc 369static __always_inline void 370atomic_inc(atomic_t *v) 371{ 372 atomic_add(1, v); 373} 374#define atomic_inc atomic_inc 375#endif 376 377#define arch_atomic_inc_return atomic_inc_return 378#define arch_atomic_inc_return_acquire atomic_inc_return_acquire 379#define arch_atomic_inc_return_release atomic_inc_return_release 380#define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed 381 382#ifndef atomic_inc_return_relaxed 383#ifdef atomic_inc_return 384#define atomic_inc_return_acquire atomic_inc_return 385#define atomic_inc_return_release atomic_inc_return 386#define atomic_inc_return_relaxed atomic_inc_return 387#endif /* atomic_inc_return */ 388 389#ifndef atomic_inc_return 390static __always_inline int 391atomic_inc_return(atomic_t *v) 392{ 393 return atomic_add_return(1, v); 394} 395#define atomic_inc_return atomic_inc_return 396#endif 397 398#ifndef atomic_inc_return_acquire 399static __always_inline int 400atomic_inc_return_acquire(atomic_t *v) 401{ 402 return atomic_add_return_acquire(1, v); 403} 404#define atomic_inc_return_acquire atomic_inc_return_acquire 405#endif 406 407#ifndef atomic_inc_return_release 408static __always_inline int 409atomic_inc_return_release(atomic_t *v) 410{ 411 return atomic_add_return_release(1, v); 412} 413#define atomic_inc_return_release atomic_inc_return_release 414#endif 415 416#ifndef atomic_inc_return_relaxed 417static __always_inline int 418atomic_inc_return_relaxed(atomic_t *v) 419{ 420 return atomic_add_return_relaxed(1, v); 421} 422#define atomic_inc_return_relaxed atomic_inc_return_relaxed 423#endif 424 425#else /* atomic_inc_return_relaxed */ 426 427#ifndef atomic_inc_return_acquire 428static __always_inline int 429atomic_inc_return_acquire(atomic_t *v) 430{ 431 int ret = atomic_inc_return_relaxed(v); 432 __atomic_acquire_fence(); 433 return ret; 434} 435#define atomic_inc_return_acquire atomic_inc_return_acquire 436#endif 437 438#ifndef atomic_inc_return_release 439static __always_inline int 440atomic_inc_return_release(atomic_t *v) 441{ 442 __atomic_release_fence(); 443 return atomic_inc_return_relaxed(v); 444} 445#define atomic_inc_return_release atomic_inc_return_release 446#endif 447 448#ifndef atomic_inc_return 449static __always_inline int 450atomic_inc_return(atomic_t *v) 451{ 452 int ret; 453 __atomic_pre_full_fence(); 454 ret = atomic_inc_return_relaxed(v); 455 __atomic_post_full_fence(); 456 return ret; 457} 458#define atomic_inc_return atomic_inc_return 459#endif 460 461#endif /* atomic_inc_return_relaxed */ 462 463#define arch_atomic_fetch_inc atomic_fetch_inc 464#define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire 465#define arch_atomic_fetch_inc_release atomic_fetch_inc_release 466#define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed 467 468#ifndef atomic_fetch_inc_relaxed 469#ifdef atomic_fetch_inc 470#define atomic_fetch_inc_acquire atomic_fetch_inc 471#define atomic_fetch_inc_release atomic_fetch_inc 472#define atomic_fetch_inc_relaxed atomic_fetch_inc 473#endif /* atomic_fetch_inc */ 474 475#ifndef atomic_fetch_inc 476static __always_inline int 477atomic_fetch_inc(atomic_t *v) 478{ 479 return atomic_fetch_add(1, v); 480} 481#define atomic_fetch_inc atomic_fetch_inc 482#endif 483 484#ifndef atomic_fetch_inc_acquire 485static __always_inline int 486atomic_fetch_inc_acquire(atomic_t *v) 487{ 488 return atomic_fetch_add_acquire(1, v); 489} 490#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire 491#endif 492 493#ifndef atomic_fetch_inc_release 494static __always_inline int 495atomic_fetch_inc_release(atomic_t *v) 496{ 497 return atomic_fetch_add_release(1, v); 498} 499#define atomic_fetch_inc_release atomic_fetch_inc_release 500#endif 501 502#ifndef atomic_fetch_inc_relaxed 503static __always_inline int 504atomic_fetch_inc_relaxed(atomic_t *v) 505{ 506 return atomic_fetch_add_relaxed(1, v); 507} 508#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed 509#endif 510 511#else /* atomic_fetch_inc_relaxed */ 512 513#ifndef atomic_fetch_inc_acquire 514static __always_inline int 515atomic_fetch_inc_acquire(atomic_t *v) 516{ 517 int ret = atomic_fetch_inc_relaxed(v); 518 __atomic_acquire_fence(); 519 return ret; 520} 521#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire 522#endif 523 524#ifndef atomic_fetch_inc_release 525static __always_inline int 526atomic_fetch_inc_release(atomic_t *v) 527{ 528 __atomic_release_fence(); 529 return atomic_fetch_inc_relaxed(v); 530} 531#define atomic_fetch_inc_release atomic_fetch_inc_release 532#endif 533 534#ifndef atomic_fetch_inc 535static __always_inline int 536atomic_fetch_inc(atomic_t *v) 537{ 538 int ret; 539 __atomic_pre_full_fence(); 540 ret = atomic_fetch_inc_relaxed(v); 541 __atomic_post_full_fence(); 542 return ret; 543} 544#define atomic_fetch_inc atomic_fetch_inc 545#endif 546 547#endif /* atomic_fetch_inc_relaxed */ 548 549#define arch_atomic_dec atomic_dec 550 551#ifndef atomic_dec 552static __always_inline void 553atomic_dec(atomic_t *v) 554{ 555 atomic_sub(1, v); 556} 557#define atomic_dec atomic_dec 558#endif 559 560#define arch_atomic_dec_return atomic_dec_return 561#define arch_atomic_dec_return_acquire atomic_dec_return_acquire 562#define arch_atomic_dec_return_release atomic_dec_return_release 563#define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed 564 565#ifndef atomic_dec_return_relaxed 566#ifdef atomic_dec_return 567#define atomic_dec_return_acquire atomic_dec_return 568#define atomic_dec_return_release atomic_dec_return 569#define atomic_dec_return_relaxed atomic_dec_return 570#endif /* atomic_dec_return */ 571 572#ifndef atomic_dec_return 573static __always_inline int 574atomic_dec_return(atomic_t *v) 575{ 576 return atomic_sub_return(1, v); 577} 578#define atomic_dec_return atomic_dec_return 579#endif 580 581#ifndef atomic_dec_return_acquire 582static __always_inline int 583atomic_dec_return_acquire(atomic_t *v) 584{ 585 return atomic_sub_return_acquire(1, v); 586} 587#define atomic_dec_return_acquire atomic_dec_return_acquire 588#endif 589 590#ifndef atomic_dec_return_release 591static __always_inline int 592atomic_dec_return_release(atomic_t *v) 593{ 594 return atomic_sub_return_release(1, v); 595} 596#define atomic_dec_return_release atomic_dec_return_release 597#endif 598 599#ifndef atomic_dec_return_relaxed 600static __always_inline int 601atomic_dec_return_relaxed(atomic_t *v) 602{ 603 return atomic_sub_return_relaxed(1, v); 604} 605#define atomic_dec_return_relaxed atomic_dec_return_relaxed 606#endif 607 608#else /* atomic_dec_return_relaxed */ 609 610#ifndef atomic_dec_return_acquire 611static __always_inline int 612atomic_dec_return_acquire(atomic_t *v) 613{ 614 int ret = atomic_dec_return_relaxed(v); 615 __atomic_acquire_fence(); 616 return ret; 617} 618#define atomic_dec_return_acquire atomic_dec_return_acquire 619#endif 620 621#ifndef atomic_dec_return_release 622static __always_inline int 623atomic_dec_return_release(atomic_t *v) 624{ 625 __atomic_release_fence(); 626 return atomic_dec_return_relaxed(v); 627} 628#define atomic_dec_return_release atomic_dec_return_release 629#endif 630 631#ifndef atomic_dec_return 632static __always_inline int 633atomic_dec_return(atomic_t *v) 634{ 635 int ret; 636 __atomic_pre_full_fence(); 637 ret = atomic_dec_return_relaxed(v); 638 __atomic_post_full_fence(); 639 return ret; 640} 641#define atomic_dec_return atomic_dec_return 642#endif 643 644#endif /* atomic_dec_return_relaxed */ 645 646#define arch_atomic_fetch_dec atomic_fetch_dec 647#define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire 648#define arch_atomic_fetch_dec_release atomic_fetch_dec_release 649#define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed 650 651#ifndef atomic_fetch_dec_relaxed 652#ifdef atomic_fetch_dec 653#define atomic_fetch_dec_acquire atomic_fetch_dec 654#define atomic_fetch_dec_release atomic_fetch_dec 655#define atomic_fetch_dec_relaxed atomic_fetch_dec 656#endif /* atomic_fetch_dec */ 657 658#ifndef atomic_fetch_dec 659static __always_inline int 660atomic_fetch_dec(atomic_t *v) 661{ 662 return atomic_fetch_sub(1, v); 663} 664#define atomic_fetch_dec atomic_fetch_dec 665#endif 666 667#ifndef atomic_fetch_dec_acquire 668static __always_inline int 669atomic_fetch_dec_acquire(atomic_t *v) 670{ 671 return atomic_fetch_sub_acquire(1, v); 672} 673#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire 674#endif 675 676#ifndef atomic_fetch_dec_release 677static __always_inline int 678atomic_fetch_dec_release(atomic_t *v) 679{ 680 return atomic_fetch_sub_release(1, v); 681} 682#define atomic_fetch_dec_release atomic_fetch_dec_release 683#endif 684 685#ifndef atomic_fetch_dec_relaxed 686static __always_inline int 687atomic_fetch_dec_relaxed(atomic_t *v) 688{ 689 return atomic_fetch_sub_relaxed(1, v); 690} 691#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed 692#endif 693 694#else /* atomic_fetch_dec_relaxed */ 695 696#ifndef atomic_fetch_dec_acquire 697static __always_inline int 698atomic_fetch_dec_acquire(atomic_t *v) 699{ 700 int ret = atomic_fetch_dec_relaxed(v); 701 __atomic_acquire_fence(); 702 return ret; 703} 704#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire 705#endif 706 707#ifndef atomic_fetch_dec_release 708static __always_inline int 709atomic_fetch_dec_release(atomic_t *v) 710{ 711 __atomic_release_fence(); 712 return atomic_fetch_dec_relaxed(v); 713} 714#define atomic_fetch_dec_release atomic_fetch_dec_release 715#endif 716 717#ifndef atomic_fetch_dec 718static __always_inline int 719atomic_fetch_dec(atomic_t *v) 720{ 721 int ret; 722 __atomic_pre_full_fence(); 723 ret = atomic_fetch_dec_relaxed(v); 724 __atomic_post_full_fence(); 725 return ret; 726} 727#define atomic_fetch_dec atomic_fetch_dec 728#endif 729 730#endif /* atomic_fetch_dec_relaxed */ 731 732#define arch_atomic_and atomic_and 733 734#define arch_atomic_fetch_and atomic_fetch_and 735#define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire 736#define arch_atomic_fetch_and_release atomic_fetch_and_release 737#define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed 738 739#ifndef atomic_fetch_and_relaxed 740#define atomic_fetch_and_acquire atomic_fetch_and 741#define atomic_fetch_and_release atomic_fetch_and 742#define atomic_fetch_and_relaxed atomic_fetch_and 743#else /* atomic_fetch_and_relaxed */ 744 745#ifndef atomic_fetch_and_acquire 746static __always_inline int 747atomic_fetch_and_acquire(int i, atomic_t *v) 748{ 749 int ret = atomic_fetch_and_relaxed(i, v); 750 __atomic_acquire_fence(); 751 return ret; 752} 753#define atomic_fetch_and_acquire atomic_fetch_and_acquire 754#endif 755 756#ifndef atomic_fetch_and_release 757static __always_inline int 758atomic_fetch_and_release(int i, atomic_t *v) 759{ 760 __atomic_release_fence(); 761 return atomic_fetch_and_relaxed(i, v); 762} 763#define atomic_fetch_and_release atomic_fetch_and_release 764#endif 765 766#ifndef atomic_fetch_and 767static __always_inline int 768atomic_fetch_and(int i, atomic_t *v) 769{ 770 int ret; 771 __atomic_pre_full_fence(); 772 ret = atomic_fetch_and_relaxed(i, v); 773 __atomic_post_full_fence(); 774 return ret; 775} 776#define atomic_fetch_and atomic_fetch_and 777#endif 778 779#endif /* atomic_fetch_and_relaxed */ 780 781#define arch_atomic_andnot atomic_andnot 782 783#ifndef atomic_andnot 784static __always_inline void 785atomic_andnot(int i, atomic_t *v) 786{ 787 atomic_and(~i, v); 788} 789#define atomic_andnot atomic_andnot 790#endif 791 792#define arch_atomic_fetch_andnot atomic_fetch_andnot 793#define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire 794#define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release 795#define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed 796 797#ifndef atomic_fetch_andnot_relaxed 798#ifdef atomic_fetch_andnot 799#define atomic_fetch_andnot_acquire atomic_fetch_andnot 800#define atomic_fetch_andnot_release atomic_fetch_andnot 801#define atomic_fetch_andnot_relaxed atomic_fetch_andnot 802#endif /* atomic_fetch_andnot */ 803 804#ifndef atomic_fetch_andnot 805static __always_inline int 806atomic_fetch_andnot(int i, atomic_t *v) 807{ 808 return atomic_fetch_and(~i, v); 809} 810#define atomic_fetch_andnot atomic_fetch_andnot 811#endif 812 813#ifndef atomic_fetch_andnot_acquire 814static __always_inline int 815atomic_fetch_andnot_acquire(int i, atomic_t *v) 816{ 817 return atomic_fetch_and_acquire(~i, v); 818} 819#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire 820#endif 821 822#ifndef atomic_fetch_andnot_release 823static __always_inline int 824atomic_fetch_andnot_release(int i, atomic_t *v) 825{ 826 return atomic_fetch_and_release(~i, v); 827} 828#define atomic_fetch_andnot_release atomic_fetch_andnot_release 829#endif 830 831#ifndef atomic_fetch_andnot_relaxed 832static __always_inline int 833atomic_fetch_andnot_relaxed(int i, atomic_t *v) 834{ 835 return atomic_fetch_and_relaxed(~i, v); 836} 837#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed 838#endif 839 840#else /* atomic_fetch_andnot_relaxed */ 841 842#ifndef atomic_fetch_andnot_acquire 843static __always_inline int 844atomic_fetch_andnot_acquire(int i, atomic_t *v) 845{ 846 int ret = atomic_fetch_andnot_relaxed(i, v); 847 __atomic_acquire_fence(); 848 return ret; 849} 850#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire 851#endif 852 853#ifndef atomic_fetch_andnot_release 854static __always_inline int 855atomic_fetch_andnot_release(int i, atomic_t *v) 856{ 857 __atomic_release_fence(); 858 return atomic_fetch_andnot_relaxed(i, v); 859} 860#define atomic_fetch_andnot_release atomic_fetch_andnot_release 861#endif 862 863#ifndef atomic_fetch_andnot 864static __always_inline int 865atomic_fetch_andnot(int i, atomic_t *v) 866{ 867 int ret; 868 __atomic_pre_full_fence(); 869 ret = atomic_fetch_andnot_relaxed(i, v); 870 __atomic_post_full_fence(); 871 return ret; 872} 873#define atomic_fetch_andnot atomic_fetch_andnot 874#endif 875 876#endif /* atomic_fetch_andnot_relaxed */ 877 878#define arch_atomic_or atomic_or 879 880#define arch_atomic_fetch_or atomic_fetch_or 881#define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire 882#define arch_atomic_fetch_or_release atomic_fetch_or_release 883#define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed 884 885#ifndef atomic_fetch_or_relaxed 886#define atomic_fetch_or_acquire atomic_fetch_or 887#define atomic_fetch_or_release atomic_fetch_or 888#define atomic_fetch_or_relaxed atomic_fetch_or 889#else /* atomic_fetch_or_relaxed */ 890 891#ifndef atomic_fetch_or_acquire 892static __always_inline int 893atomic_fetch_or_acquire(int i, atomic_t *v) 894{ 895 int ret = atomic_fetch_or_relaxed(i, v); 896 __atomic_acquire_fence(); 897 return ret; 898} 899#define atomic_fetch_or_acquire atomic_fetch_or_acquire 900#endif 901 902#ifndef atomic_fetch_or_release 903static __always_inline int 904atomic_fetch_or_release(int i, atomic_t *v) 905{ 906 __atomic_release_fence(); 907 return atomic_fetch_or_relaxed(i, v); 908} 909#define atomic_fetch_or_release atomic_fetch_or_release 910#endif 911 912#ifndef atomic_fetch_or 913static __always_inline int 914atomic_fetch_or(int i, atomic_t *v) 915{ 916 int ret; 917 __atomic_pre_full_fence(); 918 ret = atomic_fetch_or_relaxed(i, v); 919 __atomic_post_full_fence(); 920 return ret; 921} 922#define atomic_fetch_or atomic_fetch_or 923#endif 924 925#endif /* atomic_fetch_or_relaxed */ 926 927#define arch_atomic_xor atomic_xor 928 929#define arch_atomic_fetch_xor atomic_fetch_xor 930#define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire 931#define arch_atomic_fetch_xor_release atomic_fetch_xor_release 932#define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed 933 934#ifndef atomic_fetch_xor_relaxed 935#define atomic_fetch_xor_acquire atomic_fetch_xor 936#define atomic_fetch_xor_release atomic_fetch_xor 937#define atomic_fetch_xor_relaxed atomic_fetch_xor 938#else /* atomic_fetch_xor_relaxed */ 939 940#ifndef atomic_fetch_xor_acquire 941static __always_inline int 942atomic_fetch_xor_acquire(int i, atomic_t *v) 943{ 944 int ret = atomic_fetch_xor_relaxed(i, v); 945 __atomic_acquire_fence(); 946 return ret; 947} 948#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire 949#endif 950 951#ifndef atomic_fetch_xor_release 952static __always_inline int 953atomic_fetch_xor_release(int i, atomic_t *v) 954{ 955 __atomic_release_fence(); 956 return atomic_fetch_xor_relaxed(i, v); 957} 958#define atomic_fetch_xor_release atomic_fetch_xor_release 959#endif 960 961#ifndef atomic_fetch_xor 962static __always_inline int 963atomic_fetch_xor(int i, atomic_t *v) 964{ 965 int ret; 966 __atomic_pre_full_fence(); 967 ret = atomic_fetch_xor_relaxed(i, v); 968 __atomic_post_full_fence(); 969 return ret; 970} 971#define atomic_fetch_xor atomic_fetch_xor 972#endif 973 974#endif /* atomic_fetch_xor_relaxed */ 975 976#define arch_atomic_xchg atomic_xchg 977#define arch_atomic_xchg_acquire atomic_xchg_acquire 978#define arch_atomic_xchg_release atomic_xchg_release 979#define arch_atomic_xchg_relaxed atomic_xchg_relaxed 980 981#ifndef atomic_xchg_relaxed 982#define atomic_xchg_acquire atomic_xchg 983#define atomic_xchg_release atomic_xchg 984#define atomic_xchg_relaxed atomic_xchg 985#else /* atomic_xchg_relaxed */ 986 987#ifndef atomic_xchg_acquire 988static __always_inline int 989atomic_xchg_acquire(atomic_t *v, int i) 990{ 991 int ret = atomic_xchg_relaxed(v, i); 992 __atomic_acquire_fence(); 993 return ret; 994} 995#define atomic_xchg_acquire atomic_xchg_acquire 996#endif 997 998#ifndef atomic_xchg_release 999static __always_inline int 1000atomic_xchg_release(atomic_t *v, int i) 1001{ 1002 __atomic_release_fence(); 1003 return atomic_xchg_relaxed(v, i); 1004} 1005#define atomic_xchg_release atomic_xchg_release 1006#endif 1007 1008#ifndef atomic_xchg 1009static __always_inline int 1010atomic_xchg(atomic_t *v, int i) 1011{ 1012 int ret; 1013 __atomic_pre_full_fence(); 1014 ret = atomic_xchg_relaxed(v, i); 1015 __atomic_post_full_fence(); 1016 return ret; 1017} 1018#define atomic_xchg atomic_xchg 1019#endif 1020 1021#endif /* atomic_xchg_relaxed */ 1022 1023#define arch_atomic_cmpxchg atomic_cmpxchg 1024#define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire 1025#define arch_atomic_cmpxchg_release atomic_cmpxchg_release 1026#define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed 1027 1028#ifndef atomic_cmpxchg_relaxed 1029#define atomic_cmpxchg_acquire atomic_cmpxchg 1030#define atomic_cmpxchg_release atomic_cmpxchg 1031#define atomic_cmpxchg_relaxed atomic_cmpxchg 1032#else /* atomic_cmpxchg_relaxed */ 1033 1034#ifndef atomic_cmpxchg_acquire 1035static __always_inline int 1036atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1037{ 1038 int ret = atomic_cmpxchg_relaxed(v, old, new); 1039 __atomic_acquire_fence(); 1040 return ret; 1041} 1042#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire 1043#endif 1044 1045#ifndef atomic_cmpxchg_release 1046static __always_inline int 1047atomic_cmpxchg_release(atomic_t *v, int old, int new) 1048{ 1049 __atomic_release_fence(); 1050 return atomic_cmpxchg_relaxed(v, old, new); 1051} 1052#define atomic_cmpxchg_release atomic_cmpxchg_release 1053#endif 1054 1055#ifndef atomic_cmpxchg 1056static __always_inline int 1057atomic_cmpxchg(atomic_t *v, int old, int new) 1058{ 1059 int ret; 1060 __atomic_pre_full_fence(); 1061 ret = atomic_cmpxchg_relaxed(v, old, new); 1062 __atomic_post_full_fence(); 1063 return ret; 1064} 1065#define atomic_cmpxchg atomic_cmpxchg 1066#endif 1067 1068#endif /* atomic_cmpxchg_relaxed */ 1069 1070#define arch_atomic_try_cmpxchg atomic_try_cmpxchg 1071#define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire 1072#define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release 1073#define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed 1074 1075#ifndef atomic_try_cmpxchg_relaxed 1076#ifdef atomic_try_cmpxchg 1077#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg 1078#define atomic_try_cmpxchg_release atomic_try_cmpxchg 1079#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg 1080#endif /* atomic_try_cmpxchg */ 1081 1082#ifndef atomic_try_cmpxchg 1083static __always_inline bool 1084atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1085{ 1086 int r, o = *old; 1087 r = atomic_cmpxchg(v, o, new); 1088 if (unlikely(r != o)) 1089 *old = r; 1090 return likely(r == o); 1091} 1092#define atomic_try_cmpxchg atomic_try_cmpxchg 1093#endif 1094 1095#ifndef atomic_try_cmpxchg_acquire 1096static __always_inline bool 1097atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1098{ 1099 int r, o = *old; 1100 r = atomic_cmpxchg_acquire(v, o, new); 1101 if (unlikely(r != o)) 1102 *old = r; 1103 return likely(r == o); 1104} 1105#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire 1106#endif 1107 1108#ifndef atomic_try_cmpxchg_release 1109static __always_inline bool 1110atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1111{ 1112 int r, o = *old; 1113 r = atomic_cmpxchg_release(v, o, new); 1114 if (unlikely(r != o)) 1115 *old = r; 1116 return likely(r == o); 1117} 1118#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release 1119#endif 1120 1121#ifndef atomic_try_cmpxchg_relaxed 1122static __always_inline bool 1123atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 1124{ 1125 int r, o = *old; 1126 r = atomic_cmpxchg_relaxed(v, o, new); 1127 if (unlikely(r != o)) 1128 *old = r; 1129 return likely(r == o); 1130} 1131#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed 1132#endif 1133 1134#else /* atomic_try_cmpxchg_relaxed */ 1135 1136#ifndef atomic_try_cmpxchg_acquire 1137static __always_inline bool 1138atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1139{ 1140 bool ret = atomic_try_cmpxchg_relaxed(v, old, new); 1141 __atomic_acquire_fence(); 1142 return ret; 1143} 1144#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire 1145#endif 1146 1147#ifndef atomic_try_cmpxchg_release 1148static __always_inline bool 1149atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1150{ 1151 __atomic_release_fence(); 1152 return atomic_try_cmpxchg_relaxed(v, old, new); 1153} 1154#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release 1155#endif 1156 1157#ifndef atomic_try_cmpxchg 1158static __always_inline bool 1159atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1160{ 1161 bool ret; 1162 __atomic_pre_full_fence(); 1163 ret = atomic_try_cmpxchg_relaxed(v, old, new); 1164 __atomic_post_full_fence(); 1165 return ret; 1166} 1167#define atomic_try_cmpxchg atomic_try_cmpxchg 1168#endif 1169 1170#endif /* atomic_try_cmpxchg_relaxed */ 1171 1172#define arch_atomic_sub_and_test atomic_sub_and_test 1173 1174#ifndef atomic_sub_and_test 1175/** 1176 * atomic_sub_and_test - subtract value from variable and test result 1177 * @i: integer value to subtract 1178 * @v: pointer of type atomic_t 1179 * 1180 * Atomically subtracts @i from @v and returns 1181 * true if the result is zero, or false for all 1182 * other cases. 1183 */ 1184static __always_inline bool 1185atomic_sub_and_test(int i, atomic_t *v) 1186{ 1187 return atomic_sub_return(i, v) == 0; 1188} 1189#define atomic_sub_and_test atomic_sub_and_test 1190#endif 1191 1192#define arch_atomic_dec_and_test atomic_dec_and_test 1193 1194#ifndef atomic_dec_and_test 1195/** 1196 * atomic_dec_and_test - decrement and test 1197 * @v: pointer of type atomic_t 1198 * 1199 * Atomically decrements @v by 1 and 1200 * returns true if the result is 0, or false for all other 1201 * cases. 1202 */ 1203static __always_inline bool 1204atomic_dec_and_test(atomic_t *v) 1205{ 1206 return atomic_dec_return(v) == 0; 1207} 1208#define atomic_dec_and_test atomic_dec_and_test 1209#endif 1210 1211#define arch_atomic_inc_and_test atomic_inc_and_test 1212 1213#ifndef atomic_inc_and_test 1214/** 1215 * atomic_inc_and_test - increment and test 1216 * @v: pointer of type atomic_t 1217 * 1218 * Atomically increments @v by 1 1219 * and returns true if the result is zero, or false for all 1220 * other cases. 1221 */ 1222static __always_inline bool 1223atomic_inc_and_test(atomic_t *v) 1224{ 1225 return atomic_inc_return(v) == 0; 1226} 1227#define atomic_inc_and_test atomic_inc_and_test 1228#endif 1229 1230#define arch_atomic_add_negative atomic_add_negative 1231 1232#ifndef atomic_add_negative 1233/** 1234 * atomic_add_negative - add and test if negative 1235 * @i: integer value to add 1236 * @v: pointer of type atomic_t 1237 * 1238 * Atomically adds @i to @v and returns true 1239 * if the result is negative, or false when 1240 * result is greater than or equal to zero. 1241 */ 1242static __always_inline bool 1243atomic_add_negative(int i, atomic_t *v) 1244{ 1245 return atomic_add_return(i, v) < 0; 1246} 1247#define atomic_add_negative atomic_add_negative 1248#endif 1249 1250#define arch_atomic_fetch_add_unless atomic_fetch_add_unless 1251 1252#ifndef atomic_fetch_add_unless 1253/** 1254 * atomic_fetch_add_unless - add unless the number is already a given value 1255 * @v: pointer of type atomic_t 1256 * @a: the amount to add to v... 1257 * @u: ...unless v is equal to u. 1258 * 1259 * Atomically adds @a to @v, so long as @v was not already @u. 1260 * Returns original value of @v 1261 */ 1262static __always_inline int 1263atomic_fetch_add_unless(atomic_t *v, int a, int u) 1264{ 1265 int c = atomic_read(v); 1266 1267 do { 1268 if (unlikely(c == u)) 1269 break; 1270 } while (!atomic_try_cmpxchg(v, &c, c + a)); 1271 1272 return c; 1273} 1274#define atomic_fetch_add_unless atomic_fetch_add_unless 1275#endif 1276 1277#define arch_atomic_add_unless atomic_add_unless 1278 1279#ifndef atomic_add_unless 1280/** 1281 * atomic_add_unless - add unless the number is already a given value 1282 * @v: pointer of type atomic_t 1283 * @a: the amount to add to v... 1284 * @u: ...unless v is equal to u. 1285 * 1286 * Atomically adds @a to @v, if @v was not already @u. 1287 * Returns true if the addition was done. 1288 */ 1289static __always_inline bool 1290atomic_add_unless(atomic_t *v, int a, int u) 1291{ 1292 return atomic_fetch_add_unless(v, a, u) != u; 1293} 1294#define atomic_add_unless atomic_add_unless 1295#endif 1296 1297#define arch_atomic_inc_not_zero atomic_inc_not_zero 1298 1299#ifndef atomic_inc_not_zero 1300/** 1301 * atomic_inc_not_zero - increment unless the number is zero 1302 * @v: pointer of type atomic_t 1303 * 1304 * Atomically increments @v by 1, if @v is non-zero. 1305 * Returns true if the increment was done. 1306 */ 1307static __always_inline bool 1308atomic_inc_not_zero(atomic_t *v) 1309{ 1310 return atomic_add_unless(v, 1, 0); 1311} 1312#define atomic_inc_not_zero atomic_inc_not_zero 1313#endif 1314 1315#define arch_atomic_inc_unless_negative atomic_inc_unless_negative 1316 1317#ifndef atomic_inc_unless_negative 1318static __always_inline bool 1319atomic_inc_unless_negative(atomic_t *v) 1320{ 1321 int c = atomic_read(v); 1322 1323 do { 1324 if (unlikely(c < 0)) 1325 return false; 1326 } while (!atomic_try_cmpxchg(v, &c, c + 1)); 1327 1328 return true; 1329} 1330#define atomic_inc_unless_negative atomic_inc_unless_negative 1331#endif 1332 1333#define arch_atomic_dec_unless_positive atomic_dec_unless_positive 1334 1335#ifndef atomic_dec_unless_positive 1336static __always_inline bool 1337atomic_dec_unless_positive(atomic_t *v) 1338{ 1339 int c = atomic_read(v); 1340 1341 do { 1342 if (unlikely(c > 0)) 1343 return false; 1344 } while (!atomic_try_cmpxchg(v, &c, c - 1)); 1345 1346 return true; 1347} 1348#define atomic_dec_unless_positive atomic_dec_unless_positive 1349#endif 1350 1351#define arch_atomic_dec_if_positive atomic_dec_if_positive 1352 1353#ifndef atomic_dec_if_positive 1354static __always_inline int 1355atomic_dec_if_positive(atomic_t *v) 1356{ 1357 int dec, c = atomic_read(v); 1358 1359 do { 1360 dec = c - 1; 1361 if (unlikely(dec < 0)) 1362 break; 1363 } while (!atomic_try_cmpxchg(v, &c, dec)); 1364 1365 return dec; 1366} 1367#define atomic_dec_if_positive atomic_dec_if_positive 1368#endif 1369 1370#ifdef CONFIG_GENERIC_ATOMIC64 1371#include <asm-generic/atomic64.h> 1372#endif 1373 1374#define arch_atomic64_read atomic64_read 1375#define arch_atomic64_read_acquire atomic64_read_acquire 1376 1377#ifndef atomic64_read_acquire 1378static __always_inline s64 1379atomic64_read_acquire(const atomic64_t *v) 1380{ 1381 return smp_load_acquire(&(v)->counter); 1382} 1383#define atomic64_read_acquire atomic64_read_acquire 1384#endif 1385 1386#define arch_atomic64_set atomic64_set 1387#define arch_atomic64_set_release atomic64_set_release 1388 1389#ifndef atomic64_set_release 1390static __always_inline void 1391atomic64_set_release(atomic64_t *v, s64 i) 1392{ 1393 smp_store_release(&(v)->counter, i); 1394} 1395#define atomic64_set_release atomic64_set_release 1396#endif 1397 1398#define arch_atomic64_add atomic64_add 1399 1400#define arch_atomic64_add_return atomic64_add_return 1401#define arch_atomic64_add_return_acquire atomic64_add_return_acquire 1402#define arch_atomic64_add_return_release atomic64_add_return_release 1403#define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed 1404 1405#ifndef atomic64_add_return_relaxed 1406#define atomic64_add_return_acquire atomic64_add_return 1407#define atomic64_add_return_release atomic64_add_return 1408#define atomic64_add_return_relaxed atomic64_add_return 1409#else /* atomic64_add_return_relaxed */ 1410 1411#ifndef atomic64_add_return_acquire 1412static __always_inline s64 1413atomic64_add_return_acquire(s64 i, atomic64_t *v) 1414{ 1415 s64 ret = atomic64_add_return_relaxed(i, v); 1416 __atomic_acquire_fence(); 1417 return ret; 1418} 1419#define atomic64_add_return_acquire atomic64_add_return_acquire 1420#endif 1421 1422#ifndef atomic64_add_return_release 1423static __always_inline s64 1424atomic64_add_return_release(s64 i, atomic64_t *v) 1425{ 1426 __atomic_release_fence(); 1427 return atomic64_add_return_relaxed(i, v); 1428} 1429#define atomic64_add_return_release atomic64_add_return_release 1430#endif 1431 1432#ifndef atomic64_add_return 1433static __always_inline s64 1434atomic64_add_return(s64 i, atomic64_t *v) 1435{ 1436 s64 ret; 1437 __atomic_pre_full_fence(); 1438 ret = atomic64_add_return_relaxed(i, v); 1439 __atomic_post_full_fence(); 1440 return ret; 1441} 1442#define atomic64_add_return atomic64_add_return 1443#endif 1444 1445#endif /* atomic64_add_return_relaxed */ 1446 1447#define arch_atomic64_fetch_add atomic64_fetch_add 1448#define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire 1449#define arch_atomic64_fetch_add_release atomic64_fetch_add_release 1450#define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed 1451 1452#ifndef atomic64_fetch_add_relaxed 1453#define atomic64_fetch_add_acquire atomic64_fetch_add 1454#define atomic64_fetch_add_release atomic64_fetch_add 1455#define atomic64_fetch_add_relaxed atomic64_fetch_add 1456#else /* atomic64_fetch_add_relaxed */ 1457 1458#ifndef atomic64_fetch_add_acquire 1459static __always_inline s64 1460atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1461{ 1462 s64 ret = atomic64_fetch_add_relaxed(i, v); 1463 __atomic_acquire_fence(); 1464 return ret; 1465} 1466#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire 1467#endif 1468 1469#ifndef atomic64_fetch_add_release 1470static __always_inline s64 1471atomic64_fetch_add_release(s64 i, atomic64_t *v) 1472{ 1473 __atomic_release_fence(); 1474 return atomic64_fetch_add_relaxed(i, v); 1475} 1476#define atomic64_fetch_add_release atomic64_fetch_add_release 1477#endif 1478 1479#ifndef atomic64_fetch_add 1480static __always_inline s64 1481atomic64_fetch_add(s64 i, atomic64_t *v) 1482{ 1483 s64 ret; 1484 __atomic_pre_full_fence(); 1485 ret = atomic64_fetch_add_relaxed(i, v); 1486 __atomic_post_full_fence(); 1487 return ret; 1488} 1489#define atomic64_fetch_add atomic64_fetch_add 1490#endif 1491 1492#endif /* atomic64_fetch_add_relaxed */ 1493 1494#define arch_atomic64_sub atomic64_sub 1495 1496#define arch_atomic64_sub_return atomic64_sub_return 1497#define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire 1498#define arch_atomic64_sub_return_release atomic64_sub_return_release 1499#define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed 1500 1501#ifndef atomic64_sub_return_relaxed 1502#define atomic64_sub_return_acquire atomic64_sub_return 1503#define atomic64_sub_return_release atomic64_sub_return 1504#define atomic64_sub_return_relaxed atomic64_sub_return 1505#else /* atomic64_sub_return_relaxed */ 1506 1507#ifndef atomic64_sub_return_acquire 1508static __always_inline s64 1509atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1510{ 1511 s64 ret = atomic64_sub_return_relaxed(i, v); 1512 __atomic_acquire_fence(); 1513 return ret; 1514} 1515#define atomic64_sub_return_acquire atomic64_sub_return_acquire 1516#endif 1517 1518#ifndef atomic64_sub_return_release 1519static __always_inline s64 1520atomic64_sub_return_release(s64 i, atomic64_t *v) 1521{ 1522 __atomic_release_fence(); 1523 return atomic64_sub_return_relaxed(i, v); 1524} 1525#define atomic64_sub_return_release atomic64_sub_return_release 1526#endif 1527 1528#ifndef atomic64_sub_return 1529static __always_inline s64 1530atomic64_sub_return(s64 i, atomic64_t *v) 1531{ 1532 s64 ret; 1533 __atomic_pre_full_fence(); 1534 ret = atomic64_sub_return_relaxed(i, v); 1535 __atomic_post_full_fence(); 1536 return ret; 1537} 1538#define atomic64_sub_return atomic64_sub_return 1539#endif 1540 1541#endif /* atomic64_sub_return_relaxed */ 1542 1543#define arch_atomic64_fetch_sub atomic64_fetch_sub 1544#define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire 1545#define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release 1546#define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed 1547 1548#ifndef atomic64_fetch_sub_relaxed 1549#define atomic64_fetch_sub_acquire atomic64_fetch_sub 1550#define atomic64_fetch_sub_release atomic64_fetch_sub 1551#define atomic64_fetch_sub_relaxed atomic64_fetch_sub 1552#else /* atomic64_fetch_sub_relaxed */ 1553 1554#ifndef atomic64_fetch_sub_acquire 1555static __always_inline s64 1556atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1557{ 1558 s64 ret = atomic64_fetch_sub_relaxed(i, v); 1559 __atomic_acquire_fence(); 1560 return ret; 1561} 1562#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire 1563#endif 1564 1565#ifndef atomic64_fetch_sub_release 1566static __always_inline s64 1567atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1568{ 1569 __atomic_release_fence(); 1570 return atomic64_fetch_sub_relaxed(i, v); 1571} 1572#define atomic64_fetch_sub_release atomic64_fetch_sub_release 1573#endif 1574 1575#ifndef atomic64_fetch_sub 1576static __always_inline s64 1577atomic64_fetch_sub(s64 i, atomic64_t *v) 1578{ 1579 s64 ret; 1580 __atomic_pre_full_fence(); 1581 ret = atomic64_fetch_sub_relaxed(i, v); 1582 __atomic_post_full_fence(); 1583 return ret; 1584} 1585#define atomic64_fetch_sub atomic64_fetch_sub 1586#endif 1587 1588#endif /* atomic64_fetch_sub_relaxed */ 1589 1590#define arch_atomic64_inc atomic64_inc 1591 1592#ifndef atomic64_inc 1593static __always_inline void 1594atomic64_inc(atomic64_t *v) 1595{ 1596 atomic64_add(1, v); 1597} 1598#define atomic64_inc atomic64_inc 1599#endif 1600 1601#define arch_atomic64_inc_return atomic64_inc_return 1602#define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire 1603#define arch_atomic64_inc_return_release atomic64_inc_return_release 1604#define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed 1605 1606#ifndef atomic64_inc_return_relaxed 1607#ifdef atomic64_inc_return 1608#define atomic64_inc_return_acquire atomic64_inc_return 1609#define atomic64_inc_return_release atomic64_inc_return 1610#define atomic64_inc_return_relaxed atomic64_inc_return 1611#endif /* atomic64_inc_return */ 1612 1613#ifndef atomic64_inc_return 1614static __always_inline s64 1615atomic64_inc_return(atomic64_t *v) 1616{ 1617 return atomic64_add_return(1, v); 1618} 1619#define atomic64_inc_return atomic64_inc_return 1620#endif 1621 1622#ifndef atomic64_inc_return_acquire 1623static __always_inline s64 1624atomic64_inc_return_acquire(atomic64_t *v) 1625{ 1626 return atomic64_add_return_acquire(1, v); 1627} 1628#define atomic64_inc_return_acquire atomic64_inc_return_acquire 1629#endif 1630 1631#ifndef atomic64_inc_return_release 1632static __always_inline s64 1633atomic64_inc_return_release(atomic64_t *v) 1634{ 1635 return atomic64_add_return_release(1, v); 1636} 1637#define atomic64_inc_return_release atomic64_inc_return_release 1638#endif 1639 1640#ifndef atomic64_inc_return_relaxed 1641static __always_inline s64 1642atomic64_inc_return_relaxed(atomic64_t *v) 1643{ 1644 return atomic64_add_return_relaxed(1, v); 1645} 1646#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed 1647#endif 1648 1649#else /* atomic64_inc_return_relaxed */ 1650 1651#ifndef atomic64_inc_return_acquire 1652static __always_inline s64 1653atomic64_inc_return_acquire(atomic64_t *v) 1654{ 1655 s64 ret = atomic64_inc_return_relaxed(v); 1656 __atomic_acquire_fence(); 1657 return ret; 1658} 1659#define atomic64_inc_return_acquire atomic64_inc_return_acquire 1660#endif 1661 1662#ifndef atomic64_inc_return_release 1663static __always_inline s64 1664atomic64_inc_return_release(atomic64_t *v) 1665{ 1666 __atomic_release_fence(); 1667 return atomic64_inc_return_relaxed(v); 1668} 1669#define atomic64_inc_return_release atomic64_inc_return_release 1670#endif 1671 1672#ifndef atomic64_inc_return 1673static __always_inline s64 1674atomic64_inc_return(atomic64_t *v) 1675{ 1676 s64 ret; 1677 __atomic_pre_full_fence(); 1678 ret = atomic64_inc_return_relaxed(v); 1679 __atomic_post_full_fence(); 1680 return ret; 1681} 1682#define atomic64_inc_return atomic64_inc_return 1683#endif 1684 1685#endif /* atomic64_inc_return_relaxed */ 1686 1687#define arch_atomic64_fetch_inc atomic64_fetch_inc 1688#define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire 1689#define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release 1690#define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed 1691 1692#ifndef atomic64_fetch_inc_relaxed 1693#ifdef atomic64_fetch_inc 1694#define atomic64_fetch_inc_acquire atomic64_fetch_inc 1695#define atomic64_fetch_inc_release atomic64_fetch_inc 1696#define atomic64_fetch_inc_relaxed atomic64_fetch_inc 1697#endif /* atomic64_fetch_inc */ 1698 1699#ifndef atomic64_fetch_inc 1700static __always_inline s64 1701atomic64_fetch_inc(atomic64_t *v) 1702{ 1703 return atomic64_fetch_add(1, v); 1704} 1705#define atomic64_fetch_inc atomic64_fetch_inc 1706#endif 1707 1708#ifndef atomic64_fetch_inc_acquire 1709static __always_inline s64 1710atomic64_fetch_inc_acquire(atomic64_t *v) 1711{ 1712 return atomic64_fetch_add_acquire(1, v); 1713} 1714#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire 1715#endif 1716 1717#ifndef atomic64_fetch_inc_release 1718static __always_inline s64 1719atomic64_fetch_inc_release(atomic64_t *v) 1720{ 1721 return atomic64_fetch_add_release(1, v); 1722} 1723#define atomic64_fetch_inc_release atomic64_fetch_inc_release 1724#endif 1725 1726#ifndef atomic64_fetch_inc_relaxed 1727static __always_inline s64 1728atomic64_fetch_inc_relaxed(atomic64_t *v) 1729{ 1730 return atomic64_fetch_add_relaxed(1, v); 1731} 1732#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed 1733#endif 1734 1735#else /* atomic64_fetch_inc_relaxed */ 1736 1737#ifndef atomic64_fetch_inc_acquire 1738static __always_inline s64 1739atomic64_fetch_inc_acquire(atomic64_t *v) 1740{ 1741 s64 ret = atomic64_fetch_inc_relaxed(v); 1742 __atomic_acquire_fence(); 1743 return ret; 1744} 1745#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire 1746#endif 1747 1748#ifndef atomic64_fetch_inc_release 1749static __always_inline s64 1750atomic64_fetch_inc_release(atomic64_t *v) 1751{ 1752 __atomic_release_fence(); 1753 return atomic64_fetch_inc_relaxed(v); 1754} 1755#define atomic64_fetch_inc_release atomic64_fetch_inc_release 1756#endif 1757 1758#ifndef atomic64_fetch_inc 1759static __always_inline s64 1760atomic64_fetch_inc(atomic64_t *v) 1761{ 1762 s64 ret; 1763 __atomic_pre_full_fence(); 1764 ret = atomic64_fetch_inc_relaxed(v); 1765 __atomic_post_full_fence(); 1766 return ret; 1767} 1768#define atomic64_fetch_inc atomic64_fetch_inc 1769#endif 1770 1771#endif /* atomic64_fetch_inc_relaxed */ 1772 1773#define arch_atomic64_dec atomic64_dec 1774 1775#ifndef atomic64_dec 1776static __always_inline void 1777atomic64_dec(atomic64_t *v) 1778{ 1779 atomic64_sub(1, v); 1780} 1781#define atomic64_dec atomic64_dec 1782#endif 1783 1784#define arch_atomic64_dec_return atomic64_dec_return 1785#define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire 1786#define arch_atomic64_dec_return_release atomic64_dec_return_release 1787#define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed 1788 1789#ifndef atomic64_dec_return_relaxed 1790#ifdef atomic64_dec_return 1791#define atomic64_dec_return_acquire atomic64_dec_return 1792#define atomic64_dec_return_release atomic64_dec_return 1793#define atomic64_dec_return_relaxed atomic64_dec_return 1794#endif /* atomic64_dec_return */ 1795 1796#ifndef atomic64_dec_return 1797static __always_inline s64 1798atomic64_dec_return(atomic64_t *v) 1799{ 1800 return atomic64_sub_return(1, v); 1801} 1802#define atomic64_dec_return atomic64_dec_return 1803#endif 1804 1805#ifndef atomic64_dec_return_acquire 1806static __always_inline s64 1807atomic64_dec_return_acquire(atomic64_t *v) 1808{ 1809 return atomic64_sub_return_acquire(1, v); 1810} 1811#define atomic64_dec_return_acquire atomic64_dec_return_acquire 1812#endif 1813 1814#ifndef atomic64_dec_return_release 1815static __always_inline s64 1816atomic64_dec_return_release(atomic64_t *v) 1817{ 1818 return atomic64_sub_return_release(1, v); 1819} 1820#define atomic64_dec_return_release atomic64_dec_return_release 1821#endif 1822 1823#ifndef atomic64_dec_return_relaxed 1824static __always_inline s64 1825atomic64_dec_return_relaxed(atomic64_t *v) 1826{ 1827 return atomic64_sub_return_relaxed(1, v); 1828} 1829#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed 1830#endif 1831 1832#else /* atomic64_dec_return_relaxed */ 1833 1834#ifndef atomic64_dec_return_acquire 1835static __always_inline s64 1836atomic64_dec_return_acquire(atomic64_t *v) 1837{ 1838 s64 ret = atomic64_dec_return_relaxed(v); 1839 __atomic_acquire_fence(); 1840 return ret; 1841} 1842#define atomic64_dec_return_acquire atomic64_dec_return_acquire 1843#endif 1844 1845#ifndef atomic64_dec_return_release 1846static __always_inline s64 1847atomic64_dec_return_release(atomic64_t *v) 1848{ 1849 __atomic_release_fence(); 1850 return atomic64_dec_return_relaxed(v); 1851} 1852#define atomic64_dec_return_release atomic64_dec_return_release 1853#endif 1854 1855#ifndef atomic64_dec_return 1856static __always_inline s64 1857atomic64_dec_return(atomic64_t *v) 1858{ 1859 s64 ret; 1860 __atomic_pre_full_fence(); 1861 ret = atomic64_dec_return_relaxed(v); 1862 __atomic_post_full_fence(); 1863 return ret; 1864} 1865#define atomic64_dec_return atomic64_dec_return 1866#endif 1867 1868#endif /* atomic64_dec_return_relaxed */ 1869 1870#define arch_atomic64_fetch_dec atomic64_fetch_dec 1871#define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire 1872#define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release 1873#define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed 1874 1875#ifndef atomic64_fetch_dec_relaxed 1876#ifdef atomic64_fetch_dec 1877#define atomic64_fetch_dec_acquire atomic64_fetch_dec 1878#define atomic64_fetch_dec_release atomic64_fetch_dec 1879#define atomic64_fetch_dec_relaxed atomic64_fetch_dec 1880#endif /* atomic64_fetch_dec */ 1881 1882#ifndef atomic64_fetch_dec 1883static __always_inline s64 1884atomic64_fetch_dec(atomic64_t *v) 1885{ 1886 return atomic64_fetch_sub(1, v); 1887} 1888#define atomic64_fetch_dec atomic64_fetch_dec 1889#endif 1890 1891#ifndef atomic64_fetch_dec_acquire 1892static __always_inline s64 1893atomic64_fetch_dec_acquire(atomic64_t *v) 1894{ 1895 return atomic64_fetch_sub_acquire(1, v); 1896} 1897#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire 1898#endif 1899 1900#ifndef atomic64_fetch_dec_release 1901static __always_inline s64 1902atomic64_fetch_dec_release(atomic64_t *v) 1903{ 1904 return atomic64_fetch_sub_release(1, v); 1905} 1906#define atomic64_fetch_dec_release atomic64_fetch_dec_release 1907#endif 1908 1909#ifndef atomic64_fetch_dec_relaxed 1910static __always_inline s64 1911atomic64_fetch_dec_relaxed(atomic64_t *v) 1912{ 1913 return atomic64_fetch_sub_relaxed(1, v); 1914} 1915#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed 1916#endif 1917 1918#else /* atomic64_fetch_dec_relaxed */ 1919 1920#ifndef atomic64_fetch_dec_acquire 1921static __always_inline s64 1922atomic64_fetch_dec_acquire(atomic64_t *v) 1923{ 1924 s64 ret = atomic64_fetch_dec_relaxed(v); 1925 __atomic_acquire_fence(); 1926 return ret; 1927} 1928#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire 1929#endif 1930 1931#ifndef atomic64_fetch_dec_release 1932static __always_inline s64 1933atomic64_fetch_dec_release(atomic64_t *v) 1934{ 1935 __atomic_release_fence(); 1936 return atomic64_fetch_dec_relaxed(v); 1937} 1938#define atomic64_fetch_dec_release atomic64_fetch_dec_release 1939#endif 1940 1941#ifndef atomic64_fetch_dec 1942static __always_inline s64 1943atomic64_fetch_dec(atomic64_t *v) 1944{ 1945 s64 ret; 1946 __atomic_pre_full_fence(); 1947 ret = atomic64_fetch_dec_relaxed(v); 1948 __atomic_post_full_fence(); 1949 return ret; 1950} 1951#define atomic64_fetch_dec atomic64_fetch_dec 1952#endif 1953 1954#endif /* atomic64_fetch_dec_relaxed */ 1955 1956#define arch_atomic64_and atomic64_and 1957 1958#define arch_atomic64_fetch_and atomic64_fetch_and 1959#define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire 1960#define arch_atomic64_fetch_and_release atomic64_fetch_and_release 1961#define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed 1962 1963#ifndef atomic64_fetch_and_relaxed 1964#define atomic64_fetch_and_acquire atomic64_fetch_and 1965#define atomic64_fetch_and_release atomic64_fetch_and 1966#define atomic64_fetch_and_relaxed atomic64_fetch_and 1967#else /* atomic64_fetch_and_relaxed */ 1968 1969#ifndef atomic64_fetch_and_acquire 1970static __always_inline s64 1971atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 1972{ 1973 s64 ret = atomic64_fetch_and_relaxed(i, v); 1974 __atomic_acquire_fence(); 1975 return ret; 1976} 1977#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire 1978#endif 1979 1980#ifndef atomic64_fetch_and_release 1981static __always_inline s64 1982atomic64_fetch_and_release(s64 i, atomic64_t *v) 1983{ 1984 __atomic_release_fence(); 1985 return atomic64_fetch_and_relaxed(i, v); 1986} 1987#define atomic64_fetch_and_release atomic64_fetch_and_release 1988#endif 1989 1990#ifndef atomic64_fetch_and 1991static __always_inline s64 1992atomic64_fetch_and(s64 i, atomic64_t *v) 1993{ 1994 s64 ret; 1995 __atomic_pre_full_fence(); 1996 ret = atomic64_fetch_and_relaxed(i, v); 1997 __atomic_post_full_fence(); 1998 return ret; 1999} 2000#define atomic64_fetch_and atomic64_fetch_and 2001#endif 2002 2003#endif /* atomic64_fetch_and_relaxed */ 2004 2005#define arch_atomic64_andnot atomic64_andnot 2006 2007#ifndef atomic64_andnot 2008static __always_inline void 2009atomic64_andnot(s64 i, atomic64_t *v) 2010{ 2011 atomic64_and(~i, v); 2012} 2013#define atomic64_andnot atomic64_andnot 2014#endif 2015 2016#define arch_atomic64_fetch_andnot atomic64_fetch_andnot 2017#define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire 2018#define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release 2019#define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed 2020 2021#ifndef atomic64_fetch_andnot_relaxed 2022#ifdef atomic64_fetch_andnot 2023#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 2024#define atomic64_fetch_andnot_release atomic64_fetch_andnot 2025#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 2026#endif /* atomic64_fetch_andnot */ 2027 2028#ifndef atomic64_fetch_andnot 2029static __always_inline s64 2030atomic64_fetch_andnot(s64 i, atomic64_t *v) 2031{ 2032 return atomic64_fetch_and(~i, v); 2033} 2034#define atomic64_fetch_andnot atomic64_fetch_andnot 2035#endif 2036 2037#ifndef atomic64_fetch_andnot_acquire 2038static __always_inline s64 2039atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2040{ 2041 return atomic64_fetch_and_acquire(~i, v); 2042} 2043#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire 2044#endif 2045 2046#ifndef atomic64_fetch_andnot_release 2047static __always_inline s64 2048atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2049{ 2050 return atomic64_fetch_and_release(~i, v); 2051} 2052#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release 2053#endif 2054 2055#ifndef atomic64_fetch_andnot_relaxed 2056static __always_inline s64 2057atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 2058{ 2059 return atomic64_fetch_and_relaxed(~i, v); 2060} 2061#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed 2062#endif 2063 2064#else /* atomic64_fetch_andnot_relaxed */ 2065 2066#ifndef atomic64_fetch_andnot_acquire 2067static __always_inline s64 2068atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2069{ 2070 s64 ret = atomic64_fetch_andnot_relaxed(i, v); 2071 __atomic_acquire_fence(); 2072 return ret; 2073} 2074#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire 2075#endif 2076 2077#ifndef atomic64_fetch_andnot_release 2078static __always_inline s64 2079atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2080{ 2081 __atomic_release_fence(); 2082 return atomic64_fetch_andnot_relaxed(i, v); 2083} 2084#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release 2085#endif 2086 2087#ifndef atomic64_fetch_andnot 2088static __always_inline s64 2089atomic64_fetch_andnot(s64 i, atomic64_t *v) 2090{ 2091 s64 ret; 2092 __atomic_pre_full_fence(); 2093 ret = atomic64_fetch_andnot_relaxed(i, v); 2094 __atomic_post_full_fence(); 2095 return ret; 2096} 2097#define atomic64_fetch_andnot atomic64_fetch_andnot 2098#endif 2099 2100#endif /* atomic64_fetch_andnot_relaxed */ 2101 2102#define arch_atomic64_or atomic64_or 2103 2104#define arch_atomic64_fetch_or atomic64_fetch_or 2105#define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire 2106#define arch_atomic64_fetch_or_release atomic64_fetch_or_release 2107#define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed 2108 2109#ifndef atomic64_fetch_or_relaxed 2110#define atomic64_fetch_or_acquire atomic64_fetch_or 2111#define atomic64_fetch_or_release atomic64_fetch_or 2112#define atomic64_fetch_or_relaxed atomic64_fetch_or 2113#else /* atomic64_fetch_or_relaxed */ 2114 2115#ifndef atomic64_fetch_or_acquire 2116static __always_inline s64 2117atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 2118{ 2119 s64 ret = atomic64_fetch_or_relaxed(i, v); 2120 __atomic_acquire_fence(); 2121 return ret; 2122} 2123#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire 2124#endif 2125 2126#ifndef atomic64_fetch_or_release 2127static __always_inline s64 2128atomic64_fetch_or_release(s64 i, atomic64_t *v) 2129{ 2130 __atomic_release_fence(); 2131 return atomic64_fetch_or_relaxed(i, v); 2132} 2133#define atomic64_fetch_or_release atomic64_fetch_or_release 2134#endif 2135 2136#ifndef atomic64_fetch_or 2137static __always_inline s64 2138atomic64_fetch_or(s64 i, atomic64_t *v) 2139{ 2140 s64 ret; 2141 __atomic_pre_full_fence(); 2142 ret = atomic64_fetch_or_relaxed(i, v); 2143 __atomic_post_full_fence(); 2144 return ret; 2145} 2146#define atomic64_fetch_or atomic64_fetch_or 2147#endif 2148 2149#endif /* atomic64_fetch_or_relaxed */ 2150 2151#define arch_atomic64_xor atomic64_xor 2152 2153#define arch_atomic64_fetch_xor atomic64_fetch_xor 2154#define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire 2155#define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release 2156#define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed 2157 2158#ifndef atomic64_fetch_xor_relaxed 2159#define atomic64_fetch_xor_acquire atomic64_fetch_xor 2160#define atomic64_fetch_xor_release atomic64_fetch_xor 2161#define atomic64_fetch_xor_relaxed atomic64_fetch_xor 2162#else /* atomic64_fetch_xor_relaxed */ 2163 2164#ifndef atomic64_fetch_xor_acquire 2165static __always_inline s64 2166atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 2167{ 2168 s64 ret = atomic64_fetch_xor_relaxed(i, v); 2169 __atomic_acquire_fence(); 2170 return ret; 2171} 2172#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire 2173#endif 2174 2175#ifndef atomic64_fetch_xor_release 2176static __always_inline s64 2177atomic64_fetch_xor_release(s64 i, atomic64_t *v) 2178{ 2179 __atomic_release_fence(); 2180 return atomic64_fetch_xor_relaxed(i, v); 2181} 2182#define atomic64_fetch_xor_release atomic64_fetch_xor_release 2183#endif 2184 2185#ifndef atomic64_fetch_xor 2186static __always_inline s64 2187atomic64_fetch_xor(s64 i, atomic64_t *v) 2188{ 2189 s64 ret; 2190 __atomic_pre_full_fence(); 2191 ret = atomic64_fetch_xor_relaxed(i, v); 2192 __atomic_post_full_fence(); 2193 return ret; 2194} 2195#define atomic64_fetch_xor atomic64_fetch_xor 2196#endif 2197 2198#endif /* atomic64_fetch_xor_relaxed */ 2199 2200#define arch_atomic64_xchg atomic64_xchg 2201#define arch_atomic64_xchg_acquire atomic64_xchg_acquire 2202#define arch_atomic64_xchg_release atomic64_xchg_release 2203#define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed 2204 2205#ifndef atomic64_xchg_relaxed 2206#define atomic64_xchg_acquire atomic64_xchg 2207#define atomic64_xchg_release atomic64_xchg 2208#define atomic64_xchg_relaxed atomic64_xchg 2209#else /* atomic64_xchg_relaxed */ 2210 2211#ifndef atomic64_xchg_acquire 2212static __always_inline s64 2213atomic64_xchg_acquire(atomic64_t *v, s64 i) 2214{ 2215 s64 ret = atomic64_xchg_relaxed(v, i); 2216 __atomic_acquire_fence(); 2217 return ret; 2218} 2219#define atomic64_xchg_acquire atomic64_xchg_acquire 2220#endif 2221 2222#ifndef atomic64_xchg_release 2223static __always_inline s64 2224atomic64_xchg_release(atomic64_t *v, s64 i) 2225{ 2226 __atomic_release_fence(); 2227 return atomic64_xchg_relaxed(v, i); 2228} 2229#define atomic64_xchg_release atomic64_xchg_release 2230#endif 2231 2232#ifndef atomic64_xchg 2233static __always_inline s64 2234atomic64_xchg(atomic64_t *v, s64 i) 2235{ 2236 s64 ret; 2237 __atomic_pre_full_fence(); 2238 ret = atomic64_xchg_relaxed(v, i); 2239 __atomic_post_full_fence(); 2240 return ret; 2241} 2242#define atomic64_xchg atomic64_xchg 2243#endif 2244 2245#endif /* atomic64_xchg_relaxed */ 2246 2247#define arch_atomic64_cmpxchg atomic64_cmpxchg 2248#define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire 2249#define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release 2250#define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed 2251 2252#ifndef atomic64_cmpxchg_relaxed 2253#define atomic64_cmpxchg_acquire atomic64_cmpxchg 2254#define atomic64_cmpxchg_release atomic64_cmpxchg 2255#define atomic64_cmpxchg_relaxed atomic64_cmpxchg 2256#else /* atomic64_cmpxchg_relaxed */ 2257 2258#ifndef atomic64_cmpxchg_acquire 2259static __always_inline s64 2260atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2261{ 2262 s64 ret = atomic64_cmpxchg_relaxed(v, old, new); 2263 __atomic_acquire_fence(); 2264 return ret; 2265} 2266#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire 2267#endif 2268 2269#ifndef atomic64_cmpxchg_release 2270static __always_inline s64 2271atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2272{ 2273 __atomic_release_fence(); 2274 return atomic64_cmpxchg_relaxed(v, old, new); 2275} 2276#define atomic64_cmpxchg_release atomic64_cmpxchg_release 2277#endif 2278 2279#ifndef atomic64_cmpxchg 2280static __always_inline s64 2281atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2282{ 2283 s64 ret; 2284 __atomic_pre_full_fence(); 2285 ret = atomic64_cmpxchg_relaxed(v, old, new); 2286 __atomic_post_full_fence(); 2287 return ret; 2288} 2289#define atomic64_cmpxchg atomic64_cmpxchg 2290#endif 2291 2292#endif /* atomic64_cmpxchg_relaxed */ 2293 2294#define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg 2295#define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire 2296#define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release 2297#define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed 2298 2299#ifndef atomic64_try_cmpxchg_relaxed 2300#ifdef atomic64_try_cmpxchg 2301#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg 2302#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg 2303#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg 2304#endif /* atomic64_try_cmpxchg */ 2305 2306#ifndef atomic64_try_cmpxchg 2307static __always_inline bool 2308atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2309{ 2310 s64 r, o = *old; 2311 r = atomic64_cmpxchg(v, o, new); 2312 if (unlikely(r != o)) 2313 *old = r; 2314 return likely(r == o); 2315} 2316#define atomic64_try_cmpxchg atomic64_try_cmpxchg 2317#endif 2318 2319#ifndef atomic64_try_cmpxchg_acquire 2320static __always_inline bool 2321atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2322{ 2323 s64 r, o = *old; 2324 r = atomic64_cmpxchg_acquire(v, o, new); 2325 if (unlikely(r != o)) 2326 *old = r; 2327 return likely(r == o); 2328} 2329#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire 2330#endif 2331 2332#ifndef atomic64_try_cmpxchg_release 2333static __always_inline bool 2334atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2335{ 2336 s64 r, o = *old; 2337 r = atomic64_cmpxchg_release(v, o, new); 2338 if (unlikely(r != o)) 2339 *old = r; 2340 return likely(r == o); 2341} 2342#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release 2343#endif 2344 2345#ifndef atomic64_try_cmpxchg_relaxed 2346static __always_inline bool 2347atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2348{ 2349 s64 r, o = *old; 2350 r = atomic64_cmpxchg_relaxed(v, o, new); 2351 if (unlikely(r != o)) 2352 *old = r; 2353 return likely(r == o); 2354} 2355#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed 2356#endif 2357 2358#else /* atomic64_try_cmpxchg_relaxed */ 2359 2360#ifndef atomic64_try_cmpxchg_acquire 2361static __always_inline bool 2362atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2363{ 2364 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new); 2365 __atomic_acquire_fence(); 2366 return ret; 2367} 2368#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire 2369#endif 2370 2371#ifndef atomic64_try_cmpxchg_release 2372static __always_inline bool 2373atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2374{ 2375 __atomic_release_fence(); 2376 return atomic64_try_cmpxchg_relaxed(v, old, new); 2377} 2378#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release 2379#endif 2380 2381#ifndef atomic64_try_cmpxchg 2382static __always_inline bool 2383atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2384{ 2385 bool ret; 2386 __atomic_pre_full_fence(); 2387 ret = atomic64_try_cmpxchg_relaxed(v, old, new); 2388 __atomic_post_full_fence(); 2389 return ret; 2390} 2391#define atomic64_try_cmpxchg atomic64_try_cmpxchg 2392#endif 2393 2394#endif /* atomic64_try_cmpxchg_relaxed */ 2395 2396#define arch_atomic64_sub_and_test atomic64_sub_and_test 2397 2398#ifndef atomic64_sub_and_test 2399/** 2400 * atomic64_sub_and_test - subtract value from variable and test result 2401 * @i: integer value to subtract 2402 * @v: pointer of type atomic64_t 2403 * 2404 * Atomically subtracts @i from @v and returns 2405 * true if the result is zero, or false for all 2406 * other cases. 2407 */ 2408static __always_inline bool 2409atomic64_sub_and_test(s64 i, atomic64_t *v) 2410{ 2411 return atomic64_sub_return(i, v) == 0; 2412} 2413#define atomic64_sub_and_test atomic64_sub_and_test 2414#endif 2415 2416#define arch_atomic64_dec_and_test atomic64_dec_and_test 2417 2418#ifndef atomic64_dec_and_test 2419/** 2420 * atomic64_dec_and_test - decrement and test 2421 * @v: pointer of type atomic64_t 2422 * 2423 * Atomically decrements @v by 1 and 2424 * returns true if the result is 0, or false for all other 2425 * cases. 2426 */ 2427static __always_inline bool 2428atomic64_dec_and_test(atomic64_t *v) 2429{ 2430 return atomic64_dec_return(v) == 0; 2431} 2432#define atomic64_dec_and_test atomic64_dec_and_test 2433#endif 2434 2435#define arch_atomic64_inc_and_test atomic64_inc_and_test 2436 2437#ifndef atomic64_inc_and_test 2438/** 2439 * atomic64_inc_and_test - increment and test 2440 * @v: pointer of type atomic64_t 2441 * 2442 * Atomically increments @v by 1 2443 * and returns true if the result is zero, or false for all 2444 * other cases. 2445 */ 2446static __always_inline bool 2447atomic64_inc_and_test(atomic64_t *v) 2448{ 2449 return atomic64_inc_return(v) == 0; 2450} 2451#define atomic64_inc_and_test atomic64_inc_and_test 2452#endif 2453 2454#define arch_atomic64_add_negative atomic64_add_negative 2455 2456#ifndef atomic64_add_negative 2457/** 2458 * atomic64_add_negative - add and test if negative 2459 * @i: integer value to add 2460 * @v: pointer of type atomic64_t 2461 * 2462 * Atomically adds @i to @v and returns true 2463 * if the result is negative, or false when 2464 * result is greater than or equal to zero. 2465 */ 2466static __always_inline bool 2467atomic64_add_negative(s64 i, atomic64_t *v) 2468{ 2469 return atomic64_add_return(i, v) < 0; 2470} 2471#define atomic64_add_negative atomic64_add_negative 2472#endif 2473 2474#define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless 2475 2476#ifndef atomic64_fetch_add_unless 2477/** 2478 * atomic64_fetch_add_unless - add unless the number is already a given value 2479 * @v: pointer of type atomic64_t 2480 * @a: the amount to add to v... 2481 * @u: ...unless v is equal to u. 2482 * 2483 * Atomically adds @a to @v, so long as @v was not already @u. 2484 * Returns original value of @v 2485 */ 2486static __always_inline s64 2487atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2488{ 2489 s64 c = atomic64_read(v); 2490 2491 do { 2492 if (unlikely(c == u)) 2493 break; 2494 } while (!atomic64_try_cmpxchg(v, &c, c + a)); 2495 2496 return c; 2497} 2498#define atomic64_fetch_add_unless atomic64_fetch_add_unless 2499#endif 2500 2501#define arch_atomic64_add_unless atomic64_add_unless 2502 2503#ifndef atomic64_add_unless 2504/** 2505 * atomic64_add_unless - add unless the number is already a given value 2506 * @v: pointer of type atomic64_t 2507 * @a: the amount to add to v... 2508 * @u: ...unless v is equal to u. 2509 * 2510 * Atomically adds @a to @v, if @v was not already @u. 2511 * Returns true if the addition was done. 2512 */ 2513static __always_inline bool 2514atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2515{ 2516 return atomic64_fetch_add_unless(v, a, u) != u; 2517} 2518#define atomic64_add_unless atomic64_add_unless 2519#endif 2520 2521#define arch_atomic64_inc_not_zero atomic64_inc_not_zero 2522 2523#ifndef atomic64_inc_not_zero 2524/** 2525 * atomic64_inc_not_zero - increment unless the number is zero 2526 * @v: pointer of type atomic64_t 2527 * 2528 * Atomically increments @v by 1, if @v is non-zero. 2529 * Returns true if the increment was done. 2530 */ 2531static __always_inline bool 2532atomic64_inc_not_zero(atomic64_t *v) 2533{ 2534 return atomic64_add_unless(v, 1, 0); 2535} 2536#define atomic64_inc_not_zero atomic64_inc_not_zero 2537#endif 2538 2539#define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative 2540 2541#ifndef atomic64_inc_unless_negative 2542static __always_inline bool 2543atomic64_inc_unless_negative(atomic64_t *v) 2544{ 2545 s64 c = atomic64_read(v); 2546 2547 do { 2548 if (unlikely(c < 0)) 2549 return false; 2550 } while (!atomic64_try_cmpxchg(v, &c, c + 1)); 2551 2552 return true; 2553} 2554#define atomic64_inc_unless_negative atomic64_inc_unless_negative 2555#endif 2556 2557#define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive 2558 2559#ifndef atomic64_dec_unless_positive 2560static __always_inline bool 2561atomic64_dec_unless_positive(atomic64_t *v) 2562{ 2563 s64 c = atomic64_read(v); 2564 2565 do { 2566 if (unlikely(c > 0)) 2567 return false; 2568 } while (!atomic64_try_cmpxchg(v, &c, c - 1)); 2569 2570 return true; 2571} 2572#define atomic64_dec_unless_positive atomic64_dec_unless_positive 2573#endif 2574 2575#define arch_atomic64_dec_if_positive atomic64_dec_if_positive 2576 2577#ifndef atomic64_dec_if_positive 2578static __always_inline s64 2579atomic64_dec_if_positive(atomic64_t *v) 2580{ 2581 s64 dec, c = atomic64_read(v); 2582 2583 do { 2584 dec = c - 1; 2585 if (unlikely(dec < 0)) 2586 break; 2587 } while (!atomic64_try_cmpxchg(v, &c, dec)); 2588 2589 return dec; 2590} 2591#define atomic64_dec_if_positive atomic64_dec_if_positive 2592#endif 2593 2594#endif /* _LINUX_ATOMIC_FALLBACK_H */ 2595// d78e6c293c661c15188f0ec05bce45188c8d5892