at v6.4 72 kB view raw
1// SPDX-License-Identifier: GPL-2.0 2 3// Generated by scripts/atomic/gen-atomic-fallback.sh 4// DO NOT MODIFY THIS FILE DIRECTLY 5 6#ifndef _LINUX_ATOMIC_FALLBACK_H 7#define _LINUX_ATOMIC_FALLBACK_H 8 9#include <linux/compiler.h> 10 11#ifndef arch_xchg_relaxed 12#define arch_xchg_acquire arch_xchg 13#define arch_xchg_release arch_xchg 14#define arch_xchg_relaxed arch_xchg 15#else /* arch_xchg_relaxed */ 16 17#ifndef arch_xchg_acquire 18#define arch_xchg_acquire(...) \ 19 __atomic_op_acquire(arch_xchg, __VA_ARGS__) 20#endif 21 22#ifndef arch_xchg_release 23#define arch_xchg_release(...) \ 24 __atomic_op_release(arch_xchg, __VA_ARGS__) 25#endif 26 27#ifndef arch_xchg 28#define arch_xchg(...) \ 29 __atomic_op_fence(arch_xchg, __VA_ARGS__) 30#endif 31 32#endif /* arch_xchg_relaxed */ 33 34#ifndef arch_cmpxchg_relaxed 35#define arch_cmpxchg_acquire arch_cmpxchg 36#define arch_cmpxchg_release arch_cmpxchg 37#define arch_cmpxchg_relaxed arch_cmpxchg 38#else /* arch_cmpxchg_relaxed */ 39 40#ifndef arch_cmpxchg_acquire 41#define arch_cmpxchg_acquire(...) \ 42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__) 43#endif 44 45#ifndef arch_cmpxchg_release 46#define arch_cmpxchg_release(...) \ 47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__) 48#endif 49 50#ifndef arch_cmpxchg 51#define arch_cmpxchg(...) \ 52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) 53#endif 54 55#endif /* arch_cmpxchg_relaxed */ 56 57#ifndef arch_cmpxchg64_relaxed 58#define arch_cmpxchg64_acquire arch_cmpxchg64 59#define arch_cmpxchg64_release arch_cmpxchg64 60#define arch_cmpxchg64_relaxed arch_cmpxchg64 61#else /* arch_cmpxchg64_relaxed */ 62 63#ifndef arch_cmpxchg64_acquire 64#define arch_cmpxchg64_acquire(...) \ 65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__) 66#endif 67 68#ifndef arch_cmpxchg64_release 69#define arch_cmpxchg64_release(...) \ 70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__) 71#endif 72 73#ifndef arch_cmpxchg64 74#define arch_cmpxchg64(...) \ 75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) 76#endif 77 78#endif /* arch_cmpxchg64_relaxed */ 79 80#ifndef arch_try_cmpxchg_relaxed 81#ifdef arch_try_cmpxchg 82#define arch_try_cmpxchg_acquire arch_try_cmpxchg 83#define arch_try_cmpxchg_release arch_try_cmpxchg 84#define arch_try_cmpxchg_relaxed arch_try_cmpxchg 85#endif /* arch_try_cmpxchg */ 86 87#ifndef arch_try_cmpxchg 88#define arch_try_cmpxchg(_ptr, _oldp, _new) \ 89({ \ 90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 91 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \ 92 if (unlikely(___r != ___o)) \ 93 *___op = ___r; \ 94 likely(___r == ___o); \ 95}) 96#endif /* arch_try_cmpxchg */ 97 98#ifndef arch_try_cmpxchg_acquire 99#define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \ 100({ \ 101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \ 103 if (unlikely(___r != ___o)) \ 104 *___op = ___r; \ 105 likely(___r == ___o); \ 106}) 107#endif /* arch_try_cmpxchg_acquire */ 108 109#ifndef arch_try_cmpxchg_release 110#define arch_try_cmpxchg_release(_ptr, _oldp, _new) \ 111({ \ 112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \ 114 if (unlikely(___r != ___o)) \ 115 *___op = ___r; \ 116 likely(___r == ___o); \ 117}) 118#endif /* arch_try_cmpxchg_release */ 119 120#ifndef arch_try_cmpxchg_relaxed 121#define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \ 122({ \ 123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \ 125 if (unlikely(___r != ___o)) \ 126 *___op = ___r; \ 127 likely(___r == ___o); \ 128}) 129#endif /* arch_try_cmpxchg_relaxed */ 130 131#else /* arch_try_cmpxchg_relaxed */ 132 133#ifndef arch_try_cmpxchg_acquire 134#define arch_try_cmpxchg_acquire(...) \ 135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__) 136#endif 137 138#ifndef arch_try_cmpxchg_release 139#define arch_try_cmpxchg_release(...) \ 140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__) 141#endif 142 143#ifndef arch_try_cmpxchg 144#define arch_try_cmpxchg(...) \ 145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__) 146#endif 147 148#endif /* arch_try_cmpxchg_relaxed */ 149 150#ifndef arch_try_cmpxchg64_relaxed 151#ifdef arch_try_cmpxchg64 152#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 153#define arch_try_cmpxchg64_release arch_try_cmpxchg64 154#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 155#endif /* arch_try_cmpxchg64 */ 156 157#ifndef arch_try_cmpxchg64 158#define arch_try_cmpxchg64(_ptr, _oldp, _new) \ 159({ \ 160 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 161 ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ 162 if (unlikely(___r != ___o)) \ 163 *___op = ___r; \ 164 likely(___r == ___o); \ 165}) 166#endif /* arch_try_cmpxchg64 */ 167 168#ifndef arch_try_cmpxchg64_acquire 169#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ 170({ \ 171 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 172 ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ 173 if (unlikely(___r != ___o)) \ 174 *___op = ___r; \ 175 likely(___r == ___o); \ 176}) 177#endif /* arch_try_cmpxchg64_acquire */ 178 179#ifndef arch_try_cmpxchg64_release 180#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ 181({ \ 182 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 183 ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ 184 if (unlikely(___r != ___o)) \ 185 *___op = ___r; \ 186 likely(___r == ___o); \ 187}) 188#endif /* arch_try_cmpxchg64_release */ 189 190#ifndef arch_try_cmpxchg64_relaxed 191#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ 192({ \ 193 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 194 ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ 195 if (unlikely(___r != ___o)) \ 196 *___op = ___r; \ 197 likely(___r == ___o); \ 198}) 199#endif /* arch_try_cmpxchg64_relaxed */ 200 201#else /* arch_try_cmpxchg64_relaxed */ 202 203#ifndef arch_try_cmpxchg64_acquire 204#define arch_try_cmpxchg64_acquire(...) \ 205 __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) 206#endif 207 208#ifndef arch_try_cmpxchg64_release 209#define arch_try_cmpxchg64_release(...) \ 210 __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) 211#endif 212 213#ifndef arch_try_cmpxchg64 214#define arch_try_cmpxchg64(...) \ 215 __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) 216#endif 217 218#endif /* arch_try_cmpxchg64_relaxed */ 219 220#ifndef arch_try_cmpxchg_local 221#define arch_try_cmpxchg_local(_ptr, _oldp, _new) \ 222({ \ 223 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 224 ___r = arch_cmpxchg_local((_ptr), ___o, (_new)); \ 225 if (unlikely(___r != ___o)) \ 226 *___op = ___r; \ 227 likely(___r == ___o); \ 228}) 229#endif /* arch_try_cmpxchg_local */ 230 231#ifndef arch_try_cmpxchg64_local 232#define arch_try_cmpxchg64_local(_ptr, _oldp, _new) \ 233({ \ 234 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 235 ___r = arch_cmpxchg64_local((_ptr), ___o, (_new)); \ 236 if (unlikely(___r != ___o)) \ 237 *___op = ___r; \ 238 likely(___r == ___o); \ 239}) 240#endif /* arch_try_cmpxchg64_local */ 241 242#ifndef arch_atomic_read_acquire 243static __always_inline int 244arch_atomic_read_acquire(const atomic_t *v) 245{ 246 int ret; 247 248 if (__native_word(atomic_t)) { 249 ret = smp_load_acquire(&(v)->counter); 250 } else { 251 ret = arch_atomic_read(v); 252 __atomic_acquire_fence(); 253 } 254 255 return ret; 256} 257#define arch_atomic_read_acquire arch_atomic_read_acquire 258#endif 259 260#ifndef arch_atomic_set_release 261static __always_inline void 262arch_atomic_set_release(atomic_t *v, int i) 263{ 264 if (__native_word(atomic_t)) { 265 smp_store_release(&(v)->counter, i); 266 } else { 267 __atomic_release_fence(); 268 arch_atomic_set(v, i); 269 } 270} 271#define arch_atomic_set_release arch_atomic_set_release 272#endif 273 274#ifndef arch_atomic_add_return_relaxed 275#define arch_atomic_add_return_acquire arch_atomic_add_return 276#define arch_atomic_add_return_release arch_atomic_add_return 277#define arch_atomic_add_return_relaxed arch_atomic_add_return 278#else /* arch_atomic_add_return_relaxed */ 279 280#ifndef arch_atomic_add_return_acquire 281static __always_inline int 282arch_atomic_add_return_acquire(int i, atomic_t *v) 283{ 284 int ret = arch_atomic_add_return_relaxed(i, v); 285 __atomic_acquire_fence(); 286 return ret; 287} 288#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire 289#endif 290 291#ifndef arch_atomic_add_return_release 292static __always_inline int 293arch_atomic_add_return_release(int i, atomic_t *v) 294{ 295 __atomic_release_fence(); 296 return arch_atomic_add_return_relaxed(i, v); 297} 298#define arch_atomic_add_return_release arch_atomic_add_return_release 299#endif 300 301#ifndef arch_atomic_add_return 302static __always_inline int 303arch_atomic_add_return(int i, atomic_t *v) 304{ 305 int ret; 306 __atomic_pre_full_fence(); 307 ret = arch_atomic_add_return_relaxed(i, v); 308 __atomic_post_full_fence(); 309 return ret; 310} 311#define arch_atomic_add_return arch_atomic_add_return 312#endif 313 314#endif /* arch_atomic_add_return_relaxed */ 315 316#ifndef arch_atomic_fetch_add_relaxed 317#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add 318#define arch_atomic_fetch_add_release arch_atomic_fetch_add 319#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add 320#else /* arch_atomic_fetch_add_relaxed */ 321 322#ifndef arch_atomic_fetch_add_acquire 323static __always_inline int 324arch_atomic_fetch_add_acquire(int i, atomic_t *v) 325{ 326 int ret = arch_atomic_fetch_add_relaxed(i, v); 327 __atomic_acquire_fence(); 328 return ret; 329} 330#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire 331#endif 332 333#ifndef arch_atomic_fetch_add_release 334static __always_inline int 335arch_atomic_fetch_add_release(int i, atomic_t *v) 336{ 337 __atomic_release_fence(); 338 return arch_atomic_fetch_add_relaxed(i, v); 339} 340#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release 341#endif 342 343#ifndef arch_atomic_fetch_add 344static __always_inline int 345arch_atomic_fetch_add(int i, atomic_t *v) 346{ 347 int ret; 348 __atomic_pre_full_fence(); 349 ret = arch_atomic_fetch_add_relaxed(i, v); 350 __atomic_post_full_fence(); 351 return ret; 352} 353#define arch_atomic_fetch_add arch_atomic_fetch_add 354#endif 355 356#endif /* arch_atomic_fetch_add_relaxed */ 357 358#ifndef arch_atomic_sub_return_relaxed 359#define arch_atomic_sub_return_acquire arch_atomic_sub_return 360#define arch_atomic_sub_return_release arch_atomic_sub_return 361#define arch_atomic_sub_return_relaxed arch_atomic_sub_return 362#else /* arch_atomic_sub_return_relaxed */ 363 364#ifndef arch_atomic_sub_return_acquire 365static __always_inline int 366arch_atomic_sub_return_acquire(int i, atomic_t *v) 367{ 368 int ret = arch_atomic_sub_return_relaxed(i, v); 369 __atomic_acquire_fence(); 370 return ret; 371} 372#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire 373#endif 374 375#ifndef arch_atomic_sub_return_release 376static __always_inline int 377arch_atomic_sub_return_release(int i, atomic_t *v) 378{ 379 __atomic_release_fence(); 380 return arch_atomic_sub_return_relaxed(i, v); 381} 382#define arch_atomic_sub_return_release arch_atomic_sub_return_release 383#endif 384 385#ifndef arch_atomic_sub_return 386static __always_inline int 387arch_atomic_sub_return(int i, atomic_t *v) 388{ 389 int ret; 390 __atomic_pre_full_fence(); 391 ret = arch_atomic_sub_return_relaxed(i, v); 392 __atomic_post_full_fence(); 393 return ret; 394} 395#define arch_atomic_sub_return arch_atomic_sub_return 396#endif 397 398#endif /* arch_atomic_sub_return_relaxed */ 399 400#ifndef arch_atomic_fetch_sub_relaxed 401#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub 402#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub 403#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub 404#else /* arch_atomic_fetch_sub_relaxed */ 405 406#ifndef arch_atomic_fetch_sub_acquire 407static __always_inline int 408arch_atomic_fetch_sub_acquire(int i, atomic_t *v) 409{ 410 int ret = arch_atomic_fetch_sub_relaxed(i, v); 411 __atomic_acquire_fence(); 412 return ret; 413} 414#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire 415#endif 416 417#ifndef arch_atomic_fetch_sub_release 418static __always_inline int 419arch_atomic_fetch_sub_release(int i, atomic_t *v) 420{ 421 __atomic_release_fence(); 422 return arch_atomic_fetch_sub_relaxed(i, v); 423} 424#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release 425#endif 426 427#ifndef arch_atomic_fetch_sub 428static __always_inline int 429arch_atomic_fetch_sub(int i, atomic_t *v) 430{ 431 int ret; 432 __atomic_pre_full_fence(); 433 ret = arch_atomic_fetch_sub_relaxed(i, v); 434 __atomic_post_full_fence(); 435 return ret; 436} 437#define arch_atomic_fetch_sub arch_atomic_fetch_sub 438#endif 439 440#endif /* arch_atomic_fetch_sub_relaxed */ 441 442#ifndef arch_atomic_inc 443static __always_inline void 444arch_atomic_inc(atomic_t *v) 445{ 446 arch_atomic_add(1, v); 447} 448#define arch_atomic_inc arch_atomic_inc 449#endif 450 451#ifndef arch_atomic_inc_return_relaxed 452#ifdef arch_atomic_inc_return 453#define arch_atomic_inc_return_acquire arch_atomic_inc_return 454#define arch_atomic_inc_return_release arch_atomic_inc_return 455#define arch_atomic_inc_return_relaxed arch_atomic_inc_return 456#endif /* arch_atomic_inc_return */ 457 458#ifndef arch_atomic_inc_return 459static __always_inline int 460arch_atomic_inc_return(atomic_t *v) 461{ 462 return arch_atomic_add_return(1, v); 463} 464#define arch_atomic_inc_return arch_atomic_inc_return 465#endif 466 467#ifndef arch_atomic_inc_return_acquire 468static __always_inline int 469arch_atomic_inc_return_acquire(atomic_t *v) 470{ 471 return arch_atomic_add_return_acquire(1, v); 472} 473#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 474#endif 475 476#ifndef arch_atomic_inc_return_release 477static __always_inline int 478arch_atomic_inc_return_release(atomic_t *v) 479{ 480 return arch_atomic_add_return_release(1, v); 481} 482#define arch_atomic_inc_return_release arch_atomic_inc_return_release 483#endif 484 485#ifndef arch_atomic_inc_return_relaxed 486static __always_inline int 487arch_atomic_inc_return_relaxed(atomic_t *v) 488{ 489 return arch_atomic_add_return_relaxed(1, v); 490} 491#define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed 492#endif 493 494#else /* arch_atomic_inc_return_relaxed */ 495 496#ifndef arch_atomic_inc_return_acquire 497static __always_inline int 498arch_atomic_inc_return_acquire(atomic_t *v) 499{ 500 int ret = arch_atomic_inc_return_relaxed(v); 501 __atomic_acquire_fence(); 502 return ret; 503} 504#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 505#endif 506 507#ifndef arch_atomic_inc_return_release 508static __always_inline int 509arch_atomic_inc_return_release(atomic_t *v) 510{ 511 __atomic_release_fence(); 512 return arch_atomic_inc_return_relaxed(v); 513} 514#define arch_atomic_inc_return_release arch_atomic_inc_return_release 515#endif 516 517#ifndef arch_atomic_inc_return 518static __always_inline int 519arch_atomic_inc_return(atomic_t *v) 520{ 521 int ret; 522 __atomic_pre_full_fence(); 523 ret = arch_atomic_inc_return_relaxed(v); 524 __atomic_post_full_fence(); 525 return ret; 526} 527#define arch_atomic_inc_return arch_atomic_inc_return 528#endif 529 530#endif /* arch_atomic_inc_return_relaxed */ 531 532#ifndef arch_atomic_fetch_inc_relaxed 533#ifdef arch_atomic_fetch_inc 534#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc 535#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc 536#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc 537#endif /* arch_atomic_fetch_inc */ 538 539#ifndef arch_atomic_fetch_inc 540static __always_inline int 541arch_atomic_fetch_inc(atomic_t *v) 542{ 543 return arch_atomic_fetch_add(1, v); 544} 545#define arch_atomic_fetch_inc arch_atomic_fetch_inc 546#endif 547 548#ifndef arch_atomic_fetch_inc_acquire 549static __always_inline int 550arch_atomic_fetch_inc_acquire(atomic_t *v) 551{ 552 return arch_atomic_fetch_add_acquire(1, v); 553} 554#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 555#endif 556 557#ifndef arch_atomic_fetch_inc_release 558static __always_inline int 559arch_atomic_fetch_inc_release(atomic_t *v) 560{ 561 return arch_atomic_fetch_add_release(1, v); 562} 563#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 564#endif 565 566#ifndef arch_atomic_fetch_inc_relaxed 567static __always_inline int 568arch_atomic_fetch_inc_relaxed(atomic_t *v) 569{ 570 return arch_atomic_fetch_add_relaxed(1, v); 571} 572#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed 573#endif 574 575#else /* arch_atomic_fetch_inc_relaxed */ 576 577#ifndef arch_atomic_fetch_inc_acquire 578static __always_inline int 579arch_atomic_fetch_inc_acquire(atomic_t *v) 580{ 581 int ret = arch_atomic_fetch_inc_relaxed(v); 582 __atomic_acquire_fence(); 583 return ret; 584} 585#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 586#endif 587 588#ifndef arch_atomic_fetch_inc_release 589static __always_inline int 590arch_atomic_fetch_inc_release(atomic_t *v) 591{ 592 __atomic_release_fence(); 593 return arch_atomic_fetch_inc_relaxed(v); 594} 595#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 596#endif 597 598#ifndef arch_atomic_fetch_inc 599static __always_inline int 600arch_atomic_fetch_inc(atomic_t *v) 601{ 602 int ret; 603 __atomic_pre_full_fence(); 604 ret = arch_atomic_fetch_inc_relaxed(v); 605 __atomic_post_full_fence(); 606 return ret; 607} 608#define arch_atomic_fetch_inc arch_atomic_fetch_inc 609#endif 610 611#endif /* arch_atomic_fetch_inc_relaxed */ 612 613#ifndef arch_atomic_dec 614static __always_inline void 615arch_atomic_dec(atomic_t *v) 616{ 617 arch_atomic_sub(1, v); 618} 619#define arch_atomic_dec arch_atomic_dec 620#endif 621 622#ifndef arch_atomic_dec_return_relaxed 623#ifdef arch_atomic_dec_return 624#define arch_atomic_dec_return_acquire arch_atomic_dec_return 625#define arch_atomic_dec_return_release arch_atomic_dec_return 626#define arch_atomic_dec_return_relaxed arch_atomic_dec_return 627#endif /* arch_atomic_dec_return */ 628 629#ifndef arch_atomic_dec_return 630static __always_inline int 631arch_atomic_dec_return(atomic_t *v) 632{ 633 return arch_atomic_sub_return(1, v); 634} 635#define arch_atomic_dec_return arch_atomic_dec_return 636#endif 637 638#ifndef arch_atomic_dec_return_acquire 639static __always_inline int 640arch_atomic_dec_return_acquire(atomic_t *v) 641{ 642 return arch_atomic_sub_return_acquire(1, v); 643} 644#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 645#endif 646 647#ifndef arch_atomic_dec_return_release 648static __always_inline int 649arch_atomic_dec_return_release(atomic_t *v) 650{ 651 return arch_atomic_sub_return_release(1, v); 652} 653#define arch_atomic_dec_return_release arch_atomic_dec_return_release 654#endif 655 656#ifndef arch_atomic_dec_return_relaxed 657static __always_inline int 658arch_atomic_dec_return_relaxed(atomic_t *v) 659{ 660 return arch_atomic_sub_return_relaxed(1, v); 661} 662#define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed 663#endif 664 665#else /* arch_atomic_dec_return_relaxed */ 666 667#ifndef arch_atomic_dec_return_acquire 668static __always_inline int 669arch_atomic_dec_return_acquire(atomic_t *v) 670{ 671 int ret = arch_atomic_dec_return_relaxed(v); 672 __atomic_acquire_fence(); 673 return ret; 674} 675#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 676#endif 677 678#ifndef arch_atomic_dec_return_release 679static __always_inline int 680arch_atomic_dec_return_release(atomic_t *v) 681{ 682 __atomic_release_fence(); 683 return arch_atomic_dec_return_relaxed(v); 684} 685#define arch_atomic_dec_return_release arch_atomic_dec_return_release 686#endif 687 688#ifndef arch_atomic_dec_return 689static __always_inline int 690arch_atomic_dec_return(atomic_t *v) 691{ 692 int ret; 693 __atomic_pre_full_fence(); 694 ret = arch_atomic_dec_return_relaxed(v); 695 __atomic_post_full_fence(); 696 return ret; 697} 698#define arch_atomic_dec_return arch_atomic_dec_return 699#endif 700 701#endif /* arch_atomic_dec_return_relaxed */ 702 703#ifndef arch_atomic_fetch_dec_relaxed 704#ifdef arch_atomic_fetch_dec 705#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec 706#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec 707#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec 708#endif /* arch_atomic_fetch_dec */ 709 710#ifndef arch_atomic_fetch_dec 711static __always_inline int 712arch_atomic_fetch_dec(atomic_t *v) 713{ 714 return arch_atomic_fetch_sub(1, v); 715} 716#define arch_atomic_fetch_dec arch_atomic_fetch_dec 717#endif 718 719#ifndef arch_atomic_fetch_dec_acquire 720static __always_inline int 721arch_atomic_fetch_dec_acquire(atomic_t *v) 722{ 723 return arch_atomic_fetch_sub_acquire(1, v); 724} 725#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 726#endif 727 728#ifndef arch_atomic_fetch_dec_release 729static __always_inline int 730arch_atomic_fetch_dec_release(atomic_t *v) 731{ 732 return arch_atomic_fetch_sub_release(1, v); 733} 734#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 735#endif 736 737#ifndef arch_atomic_fetch_dec_relaxed 738static __always_inline int 739arch_atomic_fetch_dec_relaxed(atomic_t *v) 740{ 741 return arch_atomic_fetch_sub_relaxed(1, v); 742} 743#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed 744#endif 745 746#else /* arch_atomic_fetch_dec_relaxed */ 747 748#ifndef arch_atomic_fetch_dec_acquire 749static __always_inline int 750arch_atomic_fetch_dec_acquire(atomic_t *v) 751{ 752 int ret = arch_atomic_fetch_dec_relaxed(v); 753 __atomic_acquire_fence(); 754 return ret; 755} 756#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 757#endif 758 759#ifndef arch_atomic_fetch_dec_release 760static __always_inline int 761arch_atomic_fetch_dec_release(atomic_t *v) 762{ 763 __atomic_release_fence(); 764 return arch_atomic_fetch_dec_relaxed(v); 765} 766#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 767#endif 768 769#ifndef arch_atomic_fetch_dec 770static __always_inline int 771arch_atomic_fetch_dec(atomic_t *v) 772{ 773 int ret; 774 __atomic_pre_full_fence(); 775 ret = arch_atomic_fetch_dec_relaxed(v); 776 __atomic_post_full_fence(); 777 return ret; 778} 779#define arch_atomic_fetch_dec arch_atomic_fetch_dec 780#endif 781 782#endif /* arch_atomic_fetch_dec_relaxed */ 783 784#ifndef arch_atomic_fetch_and_relaxed 785#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and 786#define arch_atomic_fetch_and_release arch_atomic_fetch_and 787#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and 788#else /* arch_atomic_fetch_and_relaxed */ 789 790#ifndef arch_atomic_fetch_and_acquire 791static __always_inline int 792arch_atomic_fetch_and_acquire(int i, atomic_t *v) 793{ 794 int ret = arch_atomic_fetch_and_relaxed(i, v); 795 __atomic_acquire_fence(); 796 return ret; 797} 798#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire 799#endif 800 801#ifndef arch_atomic_fetch_and_release 802static __always_inline int 803arch_atomic_fetch_and_release(int i, atomic_t *v) 804{ 805 __atomic_release_fence(); 806 return arch_atomic_fetch_and_relaxed(i, v); 807} 808#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release 809#endif 810 811#ifndef arch_atomic_fetch_and 812static __always_inline int 813arch_atomic_fetch_and(int i, atomic_t *v) 814{ 815 int ret; 816 __atomic_pre_full_fence(); 817 ret = arch_atomic_fetch_and_relaxed(i, v); 818 __atomic_post_full_fence(); 819 return ret; 820} 821#define arch_atomic_fetch_and arch_atomic_fetch_and 822#endif 823 824#endif /* arch_atomic_fetch_and_relaxed */ 825 826#ifndef arch_atomic_andnot 827static __always_inline void 828arch_atomic_andnot(int i, atomic_t *v) 829{ 830 arch_atomic_and(~i, v); 831} 832#define arch_atomic_andnot arch_atomic_andnot 833#endif 834 835#ifndef arch_atomic_fetch_andnot_relaxed 836#ifdef arch_atomic_fetch_andnot 837#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot 838#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot 839#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot 840#endif /* arch_atomic_fetch_andnot */ 841 842#ifndef arch_atomic_fetch_andnot 843static __always_inline int 844arch_atomic_fetch_andnot(int i, atomic_t *v) 845{ 846 return arch_atomic_fetch_and(~i, v); 847} 848#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 849#endif 850 851#ifndef arch_atomic_fetch_andnot_acquire 852static __always_inline int 853arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 854{ 855 return arch_atomic_fetch_and_acquire(~i, v); 856} 857#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 858#endif 859 860#ifndef arch_atomic_fetch_andnot_release 861static __always_inline int 862arch_atomic_fetch_andnot_release(int i, atomic_t *v) 863{ 864 return arch_atomic_fetch_and_release(~i, v); 865} 866#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 867#endif 868 869#ifndef arch_atomic_fetch_andnot_relaxed 870static __always_inline int 871arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v) 872{ 873 return arch_atomic_fetch_and_relaxed(~i, v); 874} 875#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed 876#endif 877 878#else /* arch_atomic_fetch_andnot_relaxed */ 879 880#ifndef arch_atomic_fetch_andnot_acquire 881static __always_inline int 882arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 883{ 884 int ret = arch_atomic_fetch_andnot_relaxed(i, v); 885 __atomic_acquire_fence(); 886 return ret; 887} 888#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 889#endif 890 891#ifndef arch_atomic_fetch_andnot_release 892static __always_inline int 893arch_atomic_fetch_andnot_release(int i, atomic_t *v) 894{ 895 __atomic_release_fence(); 896 return arch_atomic_fetch_andnot_relaxed(i, v); 897} 898#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 899#endif 900 901#ifndef arch_atomic_fetch_andnot 902static __always_inline int 903arch_atomic_fetch_andnot(int i, atomic_t *v) 904{ 905 int ret; 906 __atomic_pre_full_fence(); 907 ret = arch_atomic_fetch_andnot_relaxed(i, v); 908 __atomic_post_full_fence(); 909 return ret; 910} 911#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 912#endif 913 914#endif /* arch_atomic_fetch_andnot_relaxed */ 915 916#ifndef arch_atomic_fetch_or_relaxed 917#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or 918#define arch_atomic_fetch_or_release arch_atomic_fetch_or 919#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or 920#else /* arch_atomic_fetch_or_relaxed */ 921 922#ifndef arch_atomic_fetch_or_acquire 923static __always_inline int 924arch_atomic_fetch_or_acquire(int i, atomic_t *v) 925{ 926 int ret = arch_atomic_fetch_or_relaxed(i, v); 927 __atomic_acquire_fence(); 928 return ret; 929} 930#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire 931#endif 932 933#ifndef arch_atomic_fetch_or_release 934static __always_inline int 935arch_atomic_fetch_or_release(int i, atomic_t *v) 936{ 937 __atomic_release_fence(); 938 return arch_atomic_fetch_or_relaxed(i, v); 939} 940#define arch_atomic_fetch_or_release arch_atomic_fetch_or_release 941#endif 942 943#ifndef arch_atomic_fetch_or 944static __always_inline int 945arch_atomic_fetch_or(int i, atomic_t *v) 946{ 947 int ret; 948 __atomic_pre_full_fence(); 949 ret = arch_atomic_fetch_or_relaxed(i, v); 950 __atomic_post_full_fence(); 951 return ret; 952} 953#define arch_atomic_fetch_or arch_atomic_fetch_or 954#endif 955 956#endif /* arch_atomic_fetch_or_relaxed */ 957 958#ifndef arch_atomic_fetch_xor_relaxed 959#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor 960#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor 961#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor 962#else /* arch_atomic_fetch_xor_relaxed */ 963 964#ifndef arch_atomic_fetch_xor_acquire 965static __always_inline int 966arch_atomic_fetch_xor_acquire(int i, atomic_t *v) 967{ 968 int ret = arch_atomic_fetch_xor_relaxed(i, v); 969 __atomic_acquire_fence(); 970 return ret; 971} 972#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire 973#endif 974 975#ifndef arch_atomic_fetch_xor_release 976static __always_inline int 977arch_atomic_fetch_xor_release(int i, atomic_t *v) 978{ 979 __atomic_release_fence(); 980 return arch_atomic_fetch_xor_relaxed(i, v); 981} 982#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release 983#endif 984 985#ifndef arch_atomic_fetch_xor 986static __always_inline int 987arch_atomic_fetch_xor(int i, atomic_t *v) 988{ 989 int ret; 990 __atomic_pre_full_fence(); 991 ret = arch_atomic_fetch_xor_relaxed(i, v); 992 __atomic_post_full_fence(); 993 return ret; 994} 995#define arch_atomic_fetch_xor arch_atomic_fetch_xor 996#endif 997 998#endif /* arch_atomic_fetch_xor_relaxed */ 999 1000#ifndef arch_atomic_xchg_relaxed 1001#define arch_atomic_xchg_acquire arch_atomic_xchg 1002#define arch_atomic_xchg_release arch_atomic_xchg 1003#define arch_atomic_xchg_relaxed arch_atomic_xchg 1004#else /* arch_atomic_xchg_relaxed */ 1005 1006#ifndef arch_atomic_xchg_acquire 1007static __always_inline int 1008arch_atomic_xchg_acquire(atomic_t *v, int i) 1009{ 1010 int ret = arch_atomic_xchg_relaxed(v, i); 1011 __atomic_acquire_fence(); 1012 return ret; 1013} 1014#define arch_atomic_xchg_acquire arch_atomic_xchg_acquire 1015#endif 1016 1017#ifndef arch_atomic_xchg_release 1018static __always_inline int 1019arch_atomic_xchg_release(atomic_t *v, int i) 1020{ 1021 __atomic_release_fence(); 1022 return arch_atomic_xchg_relaxed(v, i); 1023} 1024#define arch_atomic_xchg_release arch_atomic_xchg_release 1025#endif 1026 1027#ifndef arch_atomic_xchg 1028static __always_inline int 1029arch_atomic_xchg(atomic_t *v, int i) 1030{ 1031 int ret; 1032 __atomic_pre_full_fence(); 1033 ret = arch_atomic_xchg_relaxed(v, i); 1034 __atomic_post_full_fence(); 1035 return ret; 1036} 1037#define arch_atomic_xchg arch_atomic_xchg 1038#endif 1039 1040#endif /* arch_atomic_xchg_relaxed */ 1041 1042#ifndef arch_atomic_cmpxchg_relaxed 1043#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg 1044#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg 1045#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg 1046#else /* arch_atomic_cmpxchg_relaxed */ 1047 1048#ifndef arch_atomic_cmpxchg_acquire 1049static __always_inline int 1050arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1051{ 1052 int ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1053 __atomic_acquire_fence(); 1054 return ret; 1055} 1056#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire 1057#endif 1058 1059#ifndef arch_atomic_cmpxchg_release 1060static __always_inline int 1061arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1062{ 1063 __atomic_release_fence(); 1064 return arch_atomic_cmpxchg_relaxed(v, old, new); 1065} 1066#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release 1067#endif 1068 1069#ifndef arch_atomic_cmpxchg 1070static __always_inline int 1071arch_atomic_cmpxchg(atomic_t *v, int old, int new) 1072{ 1073 int ret; 1074 __atomic_pre_full_fence(); 1075 ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1076 __atomic_post_full_fence(); 1077 return ret; 1078} 1079#define arch_atomic_cmpxchg arch_atomic_cmpxchg 1080#endif 1081 1082#endif /* arch_atomic_cmpxchg_relaxed */ 1083 1084#ifndef arch_atomic_try_cmpxchg_relaxed 1085#ifdef arch_atomic_try_cmpxchg 1086#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg 1087#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg 1088#define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg 1089#endif /* arch_atomic_try_cmpxchg */ 1090 1091#ifndef arch_atomic_try_cmpxchg 1092static __always_inline bool 1093arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1094{ 1095 int r, o = *old; 1096 r = arch_atomic_cmpxchg(v, o, new); 1097 if (unlikely(r != o)) 1098 *old = r; 1099 return likely(r == o); 1100} 1101#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1102#endif 1103 1104#ifndef arch_atomic_try_cmpxchg_acquire 1105static __always_inline bool 1106arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1107{ 1108 int r, o = *old; 1109 r = arch_atomic_cmpxchg_acquire(v, o, new); 1110 if (unlikely(r != o)) 1111 *old = r; 1112 return likely(r == o); 1113} 1114#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1115#endif 1116 1117#ifndef arch_atomic_try_cmpxchg_release 1118static __always_inline bool 1119arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1120{ 1121 int r, o = *old; 1122 r = arch_atomic_cmpxchg_release(v, o, new); 1123 if (unlikely(r != o)) 1124 *old = r; 1125 return likely(r == o); 1126} 1127#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1128#endif 1129 1130#ifndef arch_atomic_try_cmpxchg_relaxed 1131static __always_inline bool 1132arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 1133{ 1134 int r, o = *old; 1135 r = arch_atomic_cmpxchg_relaxed(v, o, new); 1136 if (unlikely(r != o)) 1137 *old = r; 1138 return likely(r == o); 1139} 1140#define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed 1141#endif 1142 1143#else /* arch_atomic_try_cmpxchg_relaxed */ 1144 1145#ifndef arch_atomic_try_cmpxchg_acquire 1146static __always_inline bool 1147arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1148{ 1149 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1150 __atomic_acquire_fence(); 1151 return ret; 1152} 1153#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1154#endif 1155 1156#ifndef arch_atomic_try_cmpxchg_release 1157static __always_inline bool 1158arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1159{ 1160 __atomic_release_fence(); 1161 return arch_atomic_try_cmpxchg_relaxed(v, old, new); 1162} 1163#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1164#endif 1165 1166#ifndef arch_atomic_try_cmpxchg 1167static __always_inline bool 1168arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1169{ 1170 bool ret; 1171 __atomic_pre_full_fence(); 1172 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1173 __atomic_post_full_fence(); 1174 return ret; 1175} 1176#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1177#endif 1178 1179#endif /* arch_atomic_try_cmpxchg_relaxed */ 1180 1181#ifndef arch_atomic_sub_and_test 1182/** 1183 * arch_atomic_sub_and_test - subtract value from variable and test result 1184 * @i: integer value to subtract 1185 * @v: pointer of type atomic_t 1186 * 1187 * Atomically subtracts @i from @v and returns 1188 * true if the result is zero, or false for all 1189 * other cases. 1190 */ 1191static __always_inline bool 1192arch_atomic_sub_and_test(int i, atomic_t *v) 1193{ 1194 return arch_atomic_sub_return(i, v) == 0; 1195} 1196#define arch_atomic_sub_and_test arch_atomic_sub_and_test 1197#endif 1198 1199#ifndef arch_atomic_dec_and_test 1200/** 1201 * arch_atomic_dec_and_test - decrement and test 1202 * @v: pointer of type atomic_t 1203 * 1204 * Atomically decrements @v by 1 and 1205 * returns true if the result is 0, or false for all other 1206 * cases. 1207 */ 1208static __always_inline bool 1209arch_atomic_dec_and_test(atomic_t *v) 1210{ 1211 return arch_atomic_dec_return(v) == 0; 1212} 1213#define arch_atomic_dec_and_test arch_atomic_dec_and_test 1214#endif 1215 1216#ifndef arch_atomic_inc_and_test 1217/** 1218 * arch_atomic_inc_and_test - increment and test 1219 * @v: pointer of type atomic_t 1220 * 1221 * Atomically increments @v by 1 1222 * and returns true if the result is zero, or false for all 1223 * other cases. 1224 */ 1225static __always_inline bool 1226arch_atomic_inc_and_test(atomic_t *v) 1227{ 1228 return arch_atomic_inc_return(v) == 0; 1229} 1230#define arch_atomic_inc_and_test arch_atomic_inc_and_test 1231#endif 1232 1233#ifndef arch_atomic_add_negative_relaxed 1234#ifdef arch_atomic_add_negative 1235#define arch_atomic_add_negative_acquire arch_atomic_add_negative 1236#define arch_atomic_add_negative_release arch_atomic_add_negative 1237#define arch_atomic_add_negative_relaxed arch_atomic_add_negative 1238#endif /* arch_atomic_add_negative */ 1239 1240#ifndef arch_atomic_add_negative 1241/** 1242 * arch_atomic_add_negative - Add and test if negative 1243 * @i: integer value to add 1244 * @v: pointer of type atomic_t 1245 * 1246 * Atomically adds @i to @v and returns true if the result is negative, 1247 * or false when the result is greater than or equal to zero. 1248 */ 1249static __always_inline bool 1250arch_atomic_add_negative(int i, atomic_t *v) 1251{ 1252 return arch_atomic_add_return(i, v) < 0; 1253} 1254#define arch_atomic_add_negative arch_atomic_add_negative 1255#endif 1256 1257#ifndef arch_atomic_add_negative_acquire 1258/** 1259 * arch_atomic_add_negative_acquire - Add and test if negative 1260 * @i: integer value to add 1261 * @v: pointer of type atomic_t 1262 * 1263 * Atomically adds @i to @v and returns true if the result is negative, 1264 * or false when the result is greater than or equal to zero. 1265 */ 1266static __always_inline bool 1267arch_atomic_add_negative_acquire(int i, atomic_t *v) 1268{ 1269 return arch_atomic_add_return_acquire(i, v) < 0; 1270} 1271#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1272#endif 1273 1274#ifndef arch_atomic_add_negative_release 1275/** 1276 * arch_atomic_add_negative_release - Add and test if negative 1277 * @i: integer value to add 1278 * @v: pointer of type atomic_t 1279 * 1280 * Atomically adds @i to @v and returns true if the result is negative, 1281 * or false when the result is greater than or equal to zero. 1282 */ 1283static __always_inline bool 1284arch_atomic_add_negative_release(int i, atomic_t *v) 1285{ 1286 return arch_atomic_add_return_release(i, v) < 0; 1287} 1288#define arch_atomic_add_negative_release arch_atomic_add_negative_release 1289#endif 1290 1291#ifndef arch_atomic_add_negative_relaxed 1292/** 1293 * arch_atomic_add_negative_relaxed - Add and test if negative 1294 * @i: integer value to add 1295 * @v: pointer of type atomic_t 1296 * 1297 * Atomically adds @i to @v and returns true if the result is negative, 1298 * or false when the result is greater than or equal to zero. 1299 */ 1300static __always_inline bool 1301arch_atomic_add_negative_relaxed(int i, atomic_t *v) 1302{ 1303 return arch_atomic_add_return_relaxed(i, v) < 0; 1304} 1305#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed 1306#endif 1307 1308#else /* arch_atomic_add_negative_relaxed */ 1309 1310#ifndef arch_atomic_add_negative_acquire 1311static __always_inline bool 1312arch_atomic_add_negative_acquire(int i, atomic_t *v) 1313{ 1314 bool ret = arch_atomic_add_negative_relaxed(i, v); 1315 __atomic_acquire_fence(); 1316 return ret; 1317} 1318#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1319#endif 1320 1321#ifndef arch_atomic_add_negative_release 1322static __always_inline bool 1323arch_atomic_add_negative_release(int i, atomic_t *v) 1324{ 1325 __atomic_release_fence(); 1326 return arch_atomic_add_negative_relaxed(i, v); 1327} 1328#define arch_atomic_add_negative_release arch_atomic_add_negative_release 1329#endif 1330 1331#ifndef arch_atomic_add_negative 1332static __always_inline bool 1333arch_atomic_add_negative(int i, atomic_t *v) 1334{ 1335 bool ret; 1336 __atomic_pre_full_fence(); 1337 ret = arch_atomic_add_negative_relaxed(i, v); 1338 __atomic_post_full_fence(); 1339 return ret; 1340} 1341#define arch_atomic_add_negative arch_atomic_add_negative 1342#endif 1343 1344#endif /* arch_atomic_add_negative_relaxed */ 1345 1346#ifndef arch_atomic_fetch_add_unless 1347/** 1348 * arch_atomic_fetch_add_unless - add unless the number is already a given value 1349 * @v: pointer of type atomic_t 1350 * @a: the amount to add to v... 1351 * @u: ...unless v is equal to u. 1352 * 1353 * Atomically adds @a to @v, so long as @v was not already @u. 1354 * Returns original value of @v 1355 */ 1356static __always_inline int 1357arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) 1358{ 1359 int c = arch_atomic_read(v); 1360 1361 do { 1362 if (unlikely(c == u)) 1363 break; 1364 } while (!arch_atomic_try_cmpxchg(v, &c, c + a)); 1365 1366 return c; 1367} 1368#define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless 1369#endif 1370 1371#ifndef arch_atomic_add_unless 1372/** 1373 * arch_atomic_add_unless - add unless the number is already a given value 1374 * @v: pointer of type atomic_t 1375 * @a: the amount to add to v... 1376 * @u: ...unless v is equal to u. 1377 * 1378 * Atomically adds @a to @v, if @v was not already @u. 1379 * Returns true if the addition was done. 1380 */ 1381static __always_inline bool 1382arch_atomic_add_unless(atomic_t *v, int a, int u) 1383{ 1384 return arch_atomic_fetch_add_unless(v, a, u) != u; 1385} 1386#define arch_atomic_add_unless arch_atomic_add_unless 1387#endif 1388 1389#ifndef arch_atomic_inc_not_zero 1390/** 1391 * arch_atomic_inc_not_zero - increment unless the number is zero 1392 * @v: pointer of type atomic_t 1393 * 1394 * Atomically increments @v by 1, if @v is non-zero. 1395 * Returns true if the increment was done. 1396 */ 1397static __always_inline bool 1398arch_atomic_inc_not_zero(atomic_t *v) 1399{ 1400 return arch_atomic_add_unless(v, 1, 0); 1401} 1402#define arch_atomic_inc_not_zero arch_atomic_inc_not_zero 1403#endif 1404 1405#ifndef arch_atomic_inc_unless_negative 1406static __always_inline bool 1407arch_atomic_inc_unless_negative(atomic_t *v) 1408{ 1409 int c = arch_atomic_read(v); 1410 1411 do { 1412 if (unlikely(c < 0)) 1413 return false; 1414 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1)); 1415 1416 return true; 1417} 1418#define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative 1419#endif 1420 1421#ifndef arch_atomic_dec_unless_positive 1422static __always_inline bool 1423arch_atomic_dec_unless_positive(atomic_t *v) 1424{ 1425 int c = arch_atomic_read(v); 1426 1427 do { 1428 if (unlikely(c > 0)) 1429 return false; 1430 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1)); 1431 1432 return true; 1433} 1434#define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive 1435#endif 1436 1437#ifndef arch_atomic_dec_if_positive 1438static __always_inline int 1439arch_atomic_dec_if_positive(atomic_t *v) 1440{ 1441 int dec, c = arch_atomic_read(v); 1442 1443 do { 1444 dec = c - 1; 1445 if (unlikely(dec < 0)) 1446 break; 1447 } while (!arch_atomic_try_cmpxchg(v, &c, dec)); 1448 1449 return dec; 1450} 1451#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive 1452#endif 1453 1454#ifdef CONFIG_GENERIC_ATOMIC64 1455#include <asm-generic/atomic64.h> 1456#endif 1457 1458#ifndef arch_atomic64_read_acquire 1459static __always_inline s64 1460arch_atomic64_read_acquire(const atomic64_t *v) 1461{ 1462 s64 ret; 1463 1464 if (__native_word(atomic64_t)) { 1465 ret = smp_load_acquire(&(v)->counter); 1466 } else { 1467 ret = arch_atomic64_read(v); 1468 __atomic_acquire_fence(); 1469 } 1470 1471 return ret; 1472} 1473#define arch_atomic64_read_acquire arch_atomic64_read_acquire 1474#endif 1475 1476#ifndef arch_atomic64_set_release 1477static __always_inline void 1478arch_atomic64_set_release(atomic64_t *v, s64 i) 1479{ 1480 if (__native_word(atomic64_t)) { 1481 smp_store_release(&(v)->counter, i); 1482 } else { 1483 __atomic_release_fence(); 1484 arch_atomic64_set(v, i); 1485 } 1486} 1487#define arch_atomic64_set_release arch_atomic64_set_release 1488#endif 1489 1490#ifndef arch_atomic64_add_return_relaxed 1491#define arch_atomic64_add_return_acquire arch_atomic64_add_return 1492#define arch_atomic64_add_return_release arch_atomic64_add_return 1493#define arch_atomic64_add_return_relaxed arch_atomic64_add_return 1494#else /* arch_atomic64_add_return_relaxed */ 1495 1496#ifndef arch_atomic64_add_return_acquire 1497static __always_inline s64 1498arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) 1499{ 1500 s64 ret = arch_atomic64_add_return_relaxed(i, v); 1501 __atomic_acquire_fence(); 1502 return ret; 1503} 1504#define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire 1505#endif 1506 1507#ifndef arch_atomic64_add_return_release 1508static __always_inline s64 1509arch_atomic64_add_return_release(s64 i, atomic64_t *v) 1510{ 1511 __atomic_release_fence(); 1512 return arch_atomic64_add_return_relaxed(i, v); 1513} 1514#define arch_atomic64_add_return_release arch_atomic64_add_return_release 1515#endif 1516 1517#ifndef arch_atomic64_add_return 1518static __always_inline s64 1519arch_atomic64_add_return(s64 i, atomic64_t *v) 1520{ 1521 s64 ret; 1522 __atomic_pre_full_fence(); 1523 ret = arch_atomic64_add_return_relaxed(i, v); 1524 __atomic_post_full_fence(); 1525 return ret; 1526} 1527#define arch_atomic64_add_return arch_atomic64_add_return 1528#endif 1529 1530#endif /* arch_atomic64_add_return_relaxed */ 1531 1532#ifndef arch_atomic64_fetch_add_relaxed 1533#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add 1534#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add 1535#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add 1536#else /* arch_atomic64_fetch_add_relaxed */ 1537 1538#ifndef arch_atomic64_fetch_add_acquire 1539static __always_inline s64 1540arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1541{ 1542 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); 1543 __atomic_acquire_fence(); 1544 return ret; 1545} 1546#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire 1547#endif 1548 1549#ifndef arch_atomic64_fetch_add_release 1550static __always_inline s64 1551arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) 1552{ 1553 __atomic_release_fence(); 1554 return arch_atomic64_fetch_add_relaxed(i, v); 1555} 1556#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release 1557#endif 1558 1559#ifndef arch_atomic64_fetch_add 1560static __always_inline s64 1561arch_atomic64_fetch_add(s64 i, atomic64_t *v) 1562{ 1563 s64 ret; 1564 __atomic_pre_full_fence(); 1565 ret = arch_atomic64_fetch_add_relaxed(i, v); 1566 __atomic_post_full_fence(); 1567 return ret; 1568} 1569#define arch_atomic64_fetch_add arch_atomic64_fetch_add 1570#endif 1571 1572#endif /* arch_atomic64_fetch_add_relaxed */ 1573 1574#ifndef arch_atomic64_sub_return_relaxed 1575#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return 1576#define arch_atomic64_sub_return_release arch_atomic64_sub_return 1577#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return 1578#else /* arch_atomic64_sub_return_relaxed */ 1579 1580#ifndef arch_atomic64_sub_return_acquire 1581static __always_inline s64 1582arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1583{ 1584 s64 ret = arch_atomic64_sub_return_relaxed(i, v); 1585 __atomic_acquire_fence(); 1586 return ret; 1587} 1588#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire 1589#endif 1590 1591#ifndef arch_atomic64_sub_return_release 1592static __always_inline s64 1593arch_atomic64_sub_return_release(s64 i, atomic64_t *v) 1594{ 1595 __atomic_release_fence(); 1596 return arch_atomic64_sub_return_relaxed(i, v); 1597} 1598#define arch_atomic64_sub_return_release arch_atomic64_sub_return_release 1599#endif 1600 1601#ifndef arch_atomic64_sub_return 1602static __always_inline s64 1603arch_atomic64_sub_return(s64 i, atomic64_t *v) 1604{ 1605 s64 ret; 1606 __atomic_pre_full_fence(); 1607 ret = arch_atomic64_sub_return_relaxed(i, v); 1608 __atomic_post_full_fence(); 1609 return ret; 1610} 1611#define arch_atomic64_sub_return arch_atomic64_sub_return 1612#endif 1613 1614#endif /* arch_atomic64_sub_return_relaxed */ 1615 1616#ifndef arch_atomic64_fetch_sub_relaxed 1617#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub 1618#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub 1619#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub 1620#else /* arch_atomic64_fetch_sub_relaxed */ 1621 1622#ifndef arch_atomic64_fetch_sub_acquire 1623static __always_inline s64 1624arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1625{ 1626 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1627 __atomic_acquire_fence(); 1628 return ret; 1629} 1630#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire 1631#endif 1632 1633#ifndef arch_atomic64_fetch_sub_release 1634static __always_inline s64 1635arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1636{ 1637 __atomic_release_fence(); 1638 return arch_atomic64_fetch_sub_relaxed(i, v); 1639} 1640#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release 1641#endif 1642 1643#ifndef arch_atomic64_fetch_sub 1644static __always_inline s64 1645arch_atomic64_fetch_sub(s64 i, atomic64_t *v) 1646{ 1647 s64 ret; 1648 __atomic_pre_full_fence(); 1649 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1650 __atomic_post_full_fence(); 1651 return ret; 1652} 1653#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub 1654#endif 1655 1656#endif /* arch_atomic64_fetch_sub_relaxed */ 1657 1658#ifndef arch_atomic64_inc 1659static __always_inline void 1660arch_atomic64_inc(atomic64_t *v) 1661{ 1662 arch_atomic64_add(1, v); 1663} 1664#define arch_atomic64_inc arch_atomic64_inc 1665#endif 1666 1667#ifndef arch_atomic64_inc_return_relaxed 1668#ifdef arch_atomic64_inc_return 1669#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return 1670#define arch_atomic64_inc_return_release arch_atomic64_inc_return 1671#define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return 1672#endif /* arch_atomic64_inc_return */ 1673 1674#ifndef arch_atomic64_inc_return 1675static __always_inline s64 1676arch_atomic64_inc_return(atomic64_t *v) 1677{ 1678 return arch_atomic64_add_return(1, v); 1679} 1680#define arch_atomic64_inc_return arch_atomic64_inc_return 1681#endif 1682 1683#ifndef arch_atomic64_inc_return_acquire 1684static __always_inline s64 1685arch_atomic64_inc_return_acquire(atomic64_t *v) 1686{ 1687 return arch_atomic64_add_return_acquire(1, v); 1688} 1689#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1690#endif 1691 1692#ifndef arch_atomic64_inc_return_release 1693static __always_inline s64 1694arch_atomic64_inc_return_release(atomic64_t *v) 1695{ 1696 return arch_atomic64_add_return_release(1, v); 1697} 1698#define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1699#endif 1700 1701#ifndef arch_atomic64_inc_return_relaxed 1702static __always_inline s64 1703arch_atomic64_inc_return_relaxed(atomic64_t *v) 1704{ 1705 return arch_atomic64_add_return_relaxed(1, v); 1706} 1707#define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed 1708#endif 1709 1710#else /* arch_atomic64_inc_return_relaxed */ 1711 1712#ifndef arch_atomic64_inc_return_acquire 1713static __always_inline s64 1714arch_atomic64_inc_return_acquire(atomic64_t *v) 1715{ 1716 s64 ret = arch_atomic64_inc_return_relaxed(v); 1717 __atomic_acquire_fence(); 1718 return ret; 1719} 1720#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1721#endif 1722 1723#ifndef arch_atomic64_inc_return_release 1724static __always_inline s64 1725arch_atomic64_inc_return_release(atomic64_t *v) 1726{ 1727 __atomic_release_fence(); 1728 return arch_atomic64_inc_return_relaxed(v); 1729} 1730#define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1731#endif 1732 1733#ifndef arch_atomic64_inc_return 1734static __always_inline s64 1735arch_atomic64_inc_return(atomic64_t *v) 1736{ 1737 s64 ret; 1738 __atomic_pre_full_fence(); 1739 ret = arch_atomic64_inc_return_relaxed(v); 1740 __atomic_post_full_fence(); 1741 return ret; 1742} 1743#define arch_atomic64_inc_return arch_atomic64_inc_return 1744#endif 1745 1746#endif /* arch_atomic64_inc_return_relaxed */ 1747 1748#ifndef arch_atomic64_fetch_inc_relaxed 1749#ifdef arch_atomic64_fetch_inc 1750#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc 1751#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc 1752#define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc 1753#endif /* arch_atomic64_fetch_inc */ 1754 1755#ifndef arch_atomic64_fetch_inc 1756static __always_inline s64 1757arch_atomic64_fetch_inc(atomic64_t *v) 1758{ 1759 return arch_atomic64_fetch_add(1, v); 1760} 1761#define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1762#endif 1763 1764#ifndef arch_atomic64_fetch_inc_acquire 1765static __always_inline s64 1766arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1767{ 1768 return arch_atomic64_fetch_add_acquire(1, v); 1769} 1770#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1771#endif 1772 1773#ifndef arch_atomic64_fetch_inc_release 1774static __always_inline s64 1775arch_atomic64_fetch_inc_release(atomic64_t *v) 1776{ 1777 return arch_atomic64_fetch_add_release(1, v); 1778} 1779#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1780#endif 1781 1782#ifndef arch_atomic64_fetch_inc_relaxed 1783static __always_inline s64 1784arch_atomic64_fetch_inc_relaxed(atomic64_t *v) 1785{ 1786 return arch_atomic64_fetch_add_relaxed(1, v); 1787} 1788#define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed 1789#endif 1790 1791#else /* arch_atomic64_fetch_inc_relaxed */ 1792 1793#ifndef arch_atomic64_fetch_inc_acquire 1794static __always_inline s64 1795arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1796{ 1797 s64 ret = arch_atomic64_fetch_inc_relaxed(v); 1798 __atomic_acquire_fence(); 1799 return ret; 1800} 1801#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1802#endif 1803 1804#ifndef arch_atomic64_fetch_inc_release 1805static __always_inline s64 1806arch_atomic64_fetch_inc_release(atomic64_t *v) 1807{ 1808 __atomic_release_fence(); 1809 return arch_atomic64_fetch_inc_relaxed(v); 1810} 1811#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1812#endif 1813 1814#ifndef arch_atomic64_fetch_inc 1815static __always_inline s64 1816arch_atomic64_fetch_inc(atomic64_t *v) 1817{ 1818 s64 ret; 1819 __atomic_pre_full_fence(); 1820 ret = arch_atomic64_fetch_inc_relaxed(v); 1821 __atomic_post_full_fence(); 1822 return ret; 1823} 1824#define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1825#endif 1826 1827#endif /* arch_atomic64_fetch_inc_relaxed */ 1828 1829#ifndef arch_atomic64_dec 1830static __always_inline void 1831arch_atomic64_dec(atomic64_t *v) 1832{ 1833 arch_atomic64_sub(1, v); 1834} 1835#define arch_atomic64_dec arch_atomic64_dec 1836#endif 1837 1838#ifndef arch_atomic64_dec_return_relaxed 1839#ifdef arch_atomic64_dec_return 1840#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return 1841#define arch_atomic64_dec_return_release arch_atomic64_dec_return 1842#define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return 1843#endif /* arch_atomic64_dec_return */ 1844 1845#ifndef arch_atomic64_dec_return 1846static __always_inline s64 1847arch_atomic64_dec_return(atomic64_t *v) 1848{ 1849 return arch_atomic64_sub_return(1, v); 1850} 1851#define arch_atomic64_dec_return arch_atomic64_dec_return 1852#endif 1853 1854#ifndef arch_atomic64_dec_return_acquire 1855static __always_inline s64 1856arch_atomic64_dec_return_acquire(atomic64_t *v) 1857{ 1858 return arch_atomic64_sub_return_acquire(1, v); 1859} 1860#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1861#endif 1862 1863#ifndef arch_atomic64_dec_return_release 1864static __always_inline s64 1865arch_atomic64_dec_return_release(atomic64_t *v) 1866{ 1867 return arch_atomic64_sub_return_release(1, v); 1868} 1869#define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1870#endif 1871 1872#ifndef arch_atomic64_dec_return_relaxed 1873static __always_inline s64 1874arch_atomic64_dec_return_relaxed(atomic64_t *v) 1875{ 1876 return arch_atomic64_sub_return_relaxed(1, v); 1877} 1878#define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed 1879#endif 1880 1881#else /* arch_atomic64_dec_return_relaxed */ 1882 1883#ifndef arch_atomic64_dec_return_acquire 1884static __always_inline s64 1885arch_atomic64_dec_return_acquire(atomic64_t *v) 1886{ 1887 s64 ret = arch_atomic64_dec_return_relaxed(v); 1888 __atomic_acquire_fence(); 1889 return ret; 1890} 1891#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1892#endif 1893 1894#ifndef arch_atomic64_dec_return_release 1895static __always_inline s64 1896arch_atomic64_dec_return_release(atomic64_t *v) 1897{ 1898 __atomic_release_fence(); 1899 return arch_atomic64_dec_return_relaxed(v); 1900} 1901#define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1902#endif 1903 1904#ifndef arch_atomic64_dec_return 1905static __always_inline s64 1906arch_atomic64_dec_return(atomic64_t *v) 1907{ 1908 s64 ret; 1909 __atomic_pre_full_fence(); 1910 ret = arch_atomic64_dec_return_relaxed(v); 1911 __atomic_post_full_fence(); 1912 return ret; 1913} 1914#define arch_atomic64_dec_return arch_atomic64_dec_return 1915#endif 1916 1917#endif /* arch_atomic64_dec_return_relaxed */ 1918 1919#ifndef arch_atomic64_fetch_dec_relaxed 1920#ifdef arch_atomic64_fetch_dec 1921#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec 1922#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec 1923#define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec 1924#endif /* arch_atomic64_fetch_dec */ 1925 1926#ifndef arch_atomic64_fetch_dec 1927static __always_inline s64 1928arch_atomic64_fetch_dec(atomic64_t *v) 1929{ 1930 return arch_atomic64_fetch_sub(1, v); 1931} 1932#define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 1933#endif 1934 1935#ifndef arch_atomic64_fetch_dec_acquire 1936static __always_inline s64 1937arch_atomic64_fetch_dec_acquire(atomic64_t *v) 1938{ 1939 return arch_atomic64_fetch_sub_acquire(1, v); 1940} 1941#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 1942#endif 1943 1944#ifndef arch_atomic64_fetch_dec_release 1945static __always_inline s64 1946arch_atomic64_fetch_dec_release(atomic64_t *v) 1947{ 1948 return arch_atomic64_fetch_sub_release(1, v); 1949} 1950#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 1951#endif 1952 1953#ifndef arch_atomic64_fetch_dec_relaxed 1954static __always_inline s64 1955arch_atomic64_fetch_dec_relaxed(atomic64_t *v) 1956{ 1957 return arch_atomic64_fetch_sub_relaxed(1, v); 1958} 1959#define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed 1960#endif 1961 1962#else /* arch_atomic64_fetch_dec_relaxed */ 1963 1964#ifndef arch_atomic64_fetch_dec_acquire 1965static __always_inline s64 1966arch_atomic64_fetch_dec_acquire(atomic64_t *v) 1967{ 1968 s64 ret = arch_atomic64_fetch_dec_relaxed(v); 1969 __atomic_acquire_fence(); 1970 return ret; 1971} 1972#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 1973#endif 1974 1975#ifndef arch_atomic64_fetch_dec_release 1976static __always_inline s64 1977arch_atomic64_fetch_dec_release(atomic64_t *v) 1978{ 1979 __atomic_release_fence(); 1980 return arch_atomic64_fetch_dec_relaxed(v); 1981} 1982#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 1983#endif 1984 1985#ifndef arch_atomic64_fetch_dec 1986static __always_inline s64 1987arch_atomic64_fetch_dec(atomic64_t *v) 1988{ 1989 s64 ret; 1990 __atomic_pre_full_fence(); 1991 ret = arch_atomic64_fetch_dec_relaxed(v); 1992 __atomic_post_full_fence(); 1993 return ret; 1994} 1995#define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 1996#endif 1997 1998#endif /* arch_atomic64_fetch_dec_relaxed */ 1999 2000#ifndef arch_atomic64_fetch_and_relaxed 2001#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and 2002#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and 2003#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and 2004#else /* arch_atomic64_fetch_and_relaxed */ 2005 2006#ifndef arch_atomic64_fetch_and_acquire 2007static __always_inline s64 2008arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 2009{ 2010 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); 2011 __atomic_acquire_fence(); 2012 return ret; 2013} 2014#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire 2015#endif 2016 2017#ifndef arch_atomic64_fetch_and_release 2018static __always_inline s64 2019arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) 2020{ 2021 __atomic_release_fence(); 2022 return arch_atomic64_fetch_and_relaxed(i, v); 2023} 2024#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release 2025#endif 2026 2027#ifndef arch_atomic64_fetch_and 2028static __always_inline s64 2029arch_atomic64_fetch_and(s64 i, atomic64_t *v) 2030{ 2031 s64 ret; 2032 __atomic_pre_full_fence(); 2033 ret = arch_atomic64_fetch_and_relaxed(i, v); 2034 __atomic_post_full_fence(); 2035 return ret; 2036} 2037#define arch_atomic64_fetch_and arch_atomic64_fetch_and 2038#endif 2039 2040#endif /* arch_atomic64_fetch_and_relaxed */ 2041 2042#ifndef arch_atomic64_andnot 2043static __always_inline void 2044arch_atomic64_andnot(s64 i, atomic64_t *v) 2045{ 2046 arch_atomic64_and(~i, v); 2047} 2048#define arch_atomic64_andnot arch_atomic64_andnot 2049#endif 2050 2051#ifndef arch_atomic64_fetch_andnot_relaxed 2052#ifdef arch_atomic64_fetch_andnot 2053#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot 2054#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot 2055#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot 2056#endif /* arch_atomic64_fetch_andnot */ 2057 2058#ifndef arch_atomic64_fetch_andnot 2059static __always_inline s64 2060arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2061{ 2062 return arch_atomic64_fetch_and(~i, v); 2063} 2064#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2065#endif 2066 2067#ifndef arch_atomic64_fetch_andnot_acquire 2068static __always_inline s64 2069arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2070{ 2071 return arch_atomic64_fetch_and_acquire(~i, v); 2072} 2073#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2074#endif 2075 2076#ifndef arch_atomic64_fetch_andnot_release 2077static __always_inline s64 2078arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2079{ 2080 return arch_atomic64_fetch_and_release(~i, v); 2081} 2082#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2083#endif 2084 2085#ifndef arch_atomic64_fetch_andnot_relaxed 2086static __always_inline s64 2087arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 2088{ 2089 return arch_atomic64_fetch_and_relaxed(~i, v); 2090} 2091#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 2092#endif 2093 2094#else /* arch_atomic64_fetch_andnot_relaxed */ 2095 2096#ifndef arch_atomic64_fetch_andnot_acquire 2097static __always_inline s64 2098arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2099{ 2100 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2101 __atomic_acquire_fence(); 2102 return ret; 2103} 2104#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2105#endif 2106 2107#ifndef arch_atomic64_fetch_andnot_release 2108static __always_inline s64 2109arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2110{ 2111 __atomic_release_fence(); 2112 return arch_atomic64_fetch_andnot_relaxed(i, v); 2113} 2114#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2115#endif 2116 2117#ifndef arch_atomic64_fetch_andnot 2118static __always_inline s64 2119arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2120{ 2121 s64 ret; 2122 __atomic_pre_full_fence(); 2123 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2124 __atomic_post_full_fence(); 2125 return ret; 2126} 2127#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2128#endif 2129 2130#endif /* arch_atomic64_fetch_andnot_relaxed */ 2131 2132#ifndef arch_atomic64_fetch_or_relaxed 2133#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or 2134#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or 2135#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or 2136#else /* arch_atomic64_fetch_or_relaxed */ 2137 2138#ifndef arch_atomic64_fetch_or_acquire 2139static __always_inline s64 2140arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 2141{ 2142 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); 2143 __atomic_acquire_fence(); 2144 return ret; 2145} 2146#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire 2147#endif 2148 2149#ifndef arch_atomic64_fetch_or_release 2150static __always_inline s64 2151arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) 2152{ 2153 __atomic_release_fence(); 2154 return arch_atomic64_fetch_or_relaxed(i, v); 2155} 2156#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release 2157#endif 2158 2159#ifndef arch_atomic64_fetch_or 2160static __always_inline s64 2161arch_atomic64_fetch_or(s64 i, atomic64_t *v) 2162{ 2163 s64 ret; 2164 __atomic_pre_full_fence(); 2165 ret = arch_atomic64_fetch_or_relaxed(i, v); 2166 __atomic_post_full_fence(); 2167 return ret; 2168} 2169#define arch_atomic64_fetch_or arch_atomic64_fetch_or 2170#endif 2171 2172#endif /* arch_atomic64_fetch_or_relaxed */ 2173 2174#ifndef arch_atomic64_fetch_xor_relaxed 2175#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor 2176#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor 2177#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor 2178#else /* arch_atomic64_fetch_xor_relaxed */ 2179 2180#ifndef arch_atomic64_fetch_xor_acquire 2181static __always_inline s64 2182arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 2183{ 2184 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2185 __atomic_acquire_fence(); 2186 return ret; 2187} 2188#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire 2189#endif 2190 2191#ifndef arch_atomic64_fetch_xor_release 2192static __always_inline s64 2193arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) 2194{ 2195 __atomic_release_fence(); 2196 return arch_atomic64_fetch_xor_relaxed(i, v); 2197} 2198#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release 2199#endif 2200 2201#ifndef arch_atomic64_fetch_xor 2202static __always_inline s64 2203arch_atomic64_fetch_xor(s64 i, atomic64_t *v) 2204{ 2205 s64 ret; 2206 __atomic_pre_full_fence(); 2207 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2208 __atomic_post_full_fence(); 2209 return ret; 2210} 2211#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor 2212#endif 2213 2214#endif /* arch_atomic64_fetch_xor_relaxed */ 2215 2216#ifndef arch_atomic64_xchg_relaxed 2217#define arch_atomic64_xchg_acquire arch_atomic64_xchg 2218#define arch_atomic64_xchg_release arch_atomic64_xchg 2219#define arch_atomic64_xchg_relaxed arch_atomic64_xchg 2220#else /* arch_atomic64_xchg_relaxed */ 2221 2222#ifndef arch_atomic64_xchg_acquire 2223static __always_inline s64 2224arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) 2225{ 2226 s64 ret = arch_atomic64_xchg_relaxed(v, i); 2227 __atomic_acquire_fence(); 2228 return ret; 2229} 2230#define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire 2231#endif 2232 2233#ifndef arch_atomic64_xchg_release 2234static __always_inline s64 2235arch_atomic64_xchg_release(atomic64_t *v, s64 i) 2236{ 2237 __atomic_release_fence(); 2238 return arch_atomic64_xchg_relaxed(v, i); 2239} 2240#define arch_atomic64_xchg_release arch_atomic64_xchg_release 2241#endif 2242 2243#ifndef arch_atomic64_xchg 2244static __always_inline s64 2245arch_atomic64_xchg(atomic64_t *v, s64 i) 2246{ 2247 s64 ret; 2248 __atomic_pre_full_fence(); 2249 ret = arch_atomic64_xchg_relaxed(v, i); 2250 __atomic_post_full_fence(); 2251 return ret; 2252} 2253#define arch_atomic64_xchg arch_atomic64_xchg 2254#endif 2255 2256#endif /* arch_atomic64_xchg_relaxed */ 2257 2258#ifndef arch_atomic64_cmpxchg_relaxed 2259#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg 2260#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg 2261#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg 2262#else /* arch_atomic64_cmpxchg_relaxed */ 2263 2264#ifndef arch_atomic64_cmpxchg_acquire 2265static __always_inline s64 2266arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2267{ 2268 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2269 __atomic_acquire_fence(); 2270 return ret; 2271} 2272#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire 2273#endif 2274 2275#ifndef arch_atomic64_cmpxchg_release 2276static __always_inline s64 2277arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2278{ 2279 __atomic_release_fence(); 2280 return arch_atomic64_cmpxchg_relaxed(v, old, new); 2281} 2282#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release 2283#endif 2284 2285#ifndef arch_atomic64_cmpxchg 2286static __always_inline s64 2287arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2288{ 2289 s64 ret; 2290 __atomic_pre_full_fence(); 2291 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2292 __atomic_post_full_fence(); 2293 return ret; 2294} 2295#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 2296#endif 2297 2298#endif /* arch_atomic64_cmpxchg_relaxed */ 2299 2300#ifndef arch_atomic64_try_cmpxchg_relaxed 2301#ifdef arch_atomic64_try_cmpxchg 2302#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg 2303#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg 2304#define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg 2305#endif /* arch_atomic64_try_cmpxchg */ 2306 2307#ifndef arch_atomic64_try_cmpxchg 2308static __always_inline bool 2309arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2310{ 2311 s64 r, o = *old; 2312 r = arch_atomic64_cmpxchg(v, o, new); 2313 if (unlikely(r != o)) 2314 *old = r; 2315 return likely(r == o); 2316} 2317#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2318#endif 2319 2320#ifndef arch_atomic64_try_cmpxchg_acquire 2321static __always_inline bool 2322arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2323{ 2324 s64 r, o = *old; 2325 r = arch_atomic64_cmpxchg_acquire(v, o, new); 2326 if (unlikely(r != o)) 2327 *old = r; 2328 return likely(r == o); 2329} 2330#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2331#endif 2332 2333#ifndef arch_atomic64_try_cmpxchg_release 2334static __always_inline bool 2335arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2336{ 2337 s64 r, o = *old; 2338 r = arch_atomic64_cmpxchg_release(v, o, new); 2339 if (unlikely(r != o)) 2340 *old = r; 2341 return likely(r == o); 2342} 2343#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2344#endif 2345 2346#ifndef arch_atomic64_try_cmpxchg_relaxed 2347static __always_inline bool 2348arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2349{ 2350 s64 r, o = *old; 2351 r = arch_atomic64_cmpxchg_relaxed(v, o, new); 2352 if (unlikely(r != o)) 2353 *old = r; 2354 return likely(r == o); 2355} 2356#define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed 2357#endif 2358 2359#else /* arch_atomic64_try_cmpxchg_relaxed */ 2360 2361#ifndef arch_atomic64_try_cmpxchg_acquire 2362static __always_inline bool 2363arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2364{ 2365 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2366 __atomic_acquire_fence(); 2367 return ret; 2368} 2369#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2370#endif 2371 2372#ifndef arch_atomic64_try_cmpxchg_release 2373static __always_inline bool 2374arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2375{ 2376 __atomic_release_fence(); 2377 return arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2378} 2379#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2380#endif 2381 2382#ifndef arch_atomic64_try_cmpxchg 2383static __always_inline bool 2384arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2385{ 2386 bool ret; 2387 __atomic_pre_full_fence(); 2388 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2389 __atomic_post_full_fence(); 2390 return ret; 2391} 2392#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2393#endif 2394 2395#endif /* arch_atomic64_try_cmpxchg_relaxed */ 2396 2397#ifndef arch_atomic64_sub_and_test 2398/** 2399 * arch_atomic64_sub_and_test - subtract value from variable and test result 2400 * @i: integer value to subtract 2401 * @v: pointer of type atomic64_t 2402 * 2403 * Atomically subtracts @i from @v and returns 2404 * true if the result is zero, or false for all 2405 * other cases. 2406 */ 2407static __always_inline bool 2408arch_atomic64_sub_and_test(s64 i, atomic64_t *v) 2409{ 2410 return arch_atomic64_sub_return(i, v) == 0; 2411} 2412#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test 2413#endif 2414 2415#ifndef arch_atomic64_dec_and_test 2416/** 2417 * arch_atomic64_dec_and_test - decrement and test 2418 * @v: pointer of type atomic64_t 2419 * 2420 * Atomically decrements @v by 1 and 2421 * returns true if the result is 0, or false for all other 2422 * cases. 2423 */ 2424static __always_inline bool 2425arch_atomic64_dec_and_test(atomic64_t *v) 2426{ 2427 return arch_atomic64_dec_return(v) == 0; 2428} 2429#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test 2430#endif 2431 2432#ifndef arch_atomic64_inc_and_test 2433/** 2434 * arch_atomic64_inc_and_test - increment and test 2435 * @v: pointer of type atomic64_t 2436 * 2437 * Atomically increments @v by 1 2438 * and returns true if the result is zero, or false for all 2439 * other cases. 2440 */ 2441static __always_inline bool 2442arch_atomic64_inc_and_test(atomic64_t *v) 2443{ 2444 return arch_atomic64_inc_return(v) == 0; 2445} 2446#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test 2447#endif 2448 2449#ifndef arch_atomic64_add_negative_relaxed 2450#ifdef arch_atomic64_add_negative 2451#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative 2452#define arch_atomic64_add_negative_release arch_atomic64_add_negative 2453#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative 2454#endif /* arch_atomic64_add_negative */ 2455 2456#ifndef arch_atomic64_add_negative 2457/** 2458 * arch_atomic64_add_negative - Add and test if negative 2459 * @i: integer value to add 2460 * @v: pointer of type atomic64_t 2461 * 2462 * Atomically adds @i to @v and returns true if the result is negative, 2463 * or false when the result is greater than or equal to zero. 2464 */ 2465static __always_inline bool 2466arch_atomic64_add_negative(s64 i, atomic64_t *v) 2467{ 2468 return arch_atomic64_add_return(i, v) < 0; 2469} 2470#define arch_atomic64_add_negative arch_atomic64_add_negative 2471#endif 2472 2473#ifndef arch_atomic64_add_negative_acquire 2474/** 2475 * arch_atomic64_add_negative_acquire - Add and test if negative 2476 * @i: integer value to add 2477 * @v: pointer of type atomic64_t 2478 * 2479 * Atomically adds @i to @v and returns true if the result is negative, 2480 * or false when the result is greater than or equal to zero. 2481 */ 2482static __always_inline bool 2483arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2484{ 2485 return arch_atomic64_add_return_acquire(i, v) < 0; 2486} 2487#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2488#endif 2489 2490#ifndef arch_atomic64_add_negative_release 2491/** 2492 * arch_atomic64_add_negative_release - Add and test if negative 2493 * @i: integer value to add 2494 * @v: pointer of type atomic64_t 2495 * 2496 * Atomically adds @i to @v and returns true if the result is negative, 2497 * or false when the result is greater than or equal to zero. 2498 */ 2499static __always_inline bool 2500arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2501{ 2502 return arch_atomic64_add_return_release(i, v) < 0; 2503} 2504#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2505#endif 2506 2507#ifndef arch_atomic64_add_negative_relaxed 2508/** 2509 * arch_atomic64_add_negative_relaxed - Add and test if negative 2510 * @i: integer value to add 2511 * @v: pointer of type atomic64_t 2512 * 2513 * Atomically adds @i to @v and returns true if the result is negative, 2514 * or false when the result is greater than or equal to zero. 2515 */ 2516static __always_inline bool 2517arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 2518{ 2519 return arch_atomic64_add_return_relaxed(i, v) < 0; 2520} 2521#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed 2522#endif 2523 2524#else /* arch_atomic64_add_negative_relaxed */ 2525 2526#ifndef arch_atomic64_add_negative_acquire 2527static __always_inline bool 2528arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2529{ 2530 bool ret = arch_atomic64_add_negative_relaxed(i, v); 2531 __atomic_acquire_fence(); 2532 return ret; 2533} 2534#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2535#endif 2536 2537#ifndef arch_atomic64_add_negative_release 2538static __always_inline bool 2539arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2540{ 2541 __atomic_release_fence(); 2542 return arch_atomic64_add_negative_relaxed(i, v); 2543} 2544#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2545#endif 2546 2547#ifndef arch_atomic64_add_negative 2548static __always_inline bool 2549arch_atomic64_add_negative(s64 i, atomic64_t *v) 2550{ 2551 bool ret; 2552 __atomic_pre_full_fence(); 2553 ret = arch_atomic64_add_negative_relaxed(i, v); 2554 __atomic_post_full_fence(); 2555 return ret; 2556} 2557#define arch_atomic64_add_negative arch_atomic64_add_negative 2558#endif 2559 2560#endif /* arch_atomic64_add_negative_relaxed */ 2561 2562#ifndef arch_atomic64_fetch_add_unless 2563/** 2564 * arch_atomic64_fetch_add_unless - add unless the number is already a given value 2565 * @v: pointer of type atomic64_t 2566 * @a: the amount to add to v... 2567 * @u: ...unless v is equal to u. 2568 * 2569 * Atomically adds @a to @v, so long as @v was not already @u. 2570 * Returns original value of @v 2571 */ 2572static __always_inline s64 2573arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2574{ 2575 s64 c = arch_atomic64_read(v); 2576 2577 do { 2578 if (unlikely(c == u)) 2579 break; 2580 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a)); 2581 2582 return c; 2583} 2584#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 2585#endif 2586 2587#ifndef arch_atomic64_add_unless 2588/** 2589 * arch_atomic64_add_unless - add unless the number is already a given value 2590 * @v: pointer of type atomic64_t 2591 * @a: the amount to add to v... 2592 * @u: ...unless v is equal to u. 2593 * 2594 * Atomically adds @a to @v, if @v was not already @u. 2595 * Returns true if the addition was done. 2596 */ 2597static __always_inline bool 2598arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2599{ 2600 return arch_atomic64_fetch_add_unless(v, a, u) != u; 2601} 2602#define arch_atomic64_add_unless arch_atomic64_add_unless 2603#endif 2604 2605#ifndef arch_atomic64_inc_not_zero 2606/** 2607 * arch_atomic64_inc_not_zero - increment unless the number is zero 2608 * @v: pointer of type atomic64_t 2609 * 2610 * Atomically increments @v by 1, if @v is non-zero. 2611 * Returns true if the increment was done. 2612 */ 2613static __always_inline bool 2614arch_atomic64_inc_not_zero(atomic64_t *v) 2615{ 2616 return arch_atomic64_add_unless(v, 1, 0); 2617} 2618#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero 2619#endif 2620 2621#ifndef arch_atomic64_inc_unless_negative 2622static __always_inline bool 2623arch_atomic64_inc_unless_negative(atomic64_t *v) 2624{ 2625 s64 c = arch_atomic64_read(v); 2626 2627 do { 2628 if (unlikely(c < 0)) 2629 return false; 2630 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1)); 2631 2632 return true; 2633} 2634#define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative 2635#endif 2636 2637#ifndef arch_atomic64_dec_unless_positive 2638static __always_inline bool 2639arch_atomic64_dec_unless_positive(atomic64_t *v) 2640{ 2641 s64 c = arch_atomic64_read(v); 2642 2643 do { 2644 if (unlikely(c > 0)) 2645 return false; 2646 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1)); 2647 2648 return true; 2649} 2650#define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive 2651#endif 2652 2653#ifndef arch_atomic64_dec_if_positive 2654static __always_inline s64 2655arch_atomic64_dec_if_positive(atomic64_t *v) 2656{ 2657 s64 dec, c = arch_atomic64_read(v); 2658 2659 do { 2660 dec = c - 1; 2661 if (unlikely(dec < 0)) 2662 break; 2663 } while (!arch_atomic64_try_cmpxchg(v, &c, dec)); 2664 2665 return dec; 2666} 2667#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 2668#endif 2669 2670#endif /* _LINUX_ATOMIC_FALLBACK_H */ 2671// ad2e2b4d168dbc60a73922616047a9bfa446af36