at v4.18 31 kB view raw
1/* SPDX-License-Identifier: GPL-2.0 */ 2/* Atomic operations usable in machine independent code */ 3#ifndef _LINUX_ATOMIC_H 4#define _LINUX_ATOMIC_H 5#include <asm/atomic.h> 6#include <asm/barrier.h> 7 8/* 9 * Relaxed variants of xchg, cmpxchg and some atomic operations. 10 * 11 * We support four variants: 12 * 13 * - Fully ordered: The default implementation, no suffix required. 14 * - Acquire: Provides ACQUIRE semantics, _acquire suffix. 15 * - Release: Provides RELEASE semantics, _release suffix. 16 * - Relaxed: No ordering guarantees, _relaxed suffix. 17 * 18 * For compound atomics performing both a load and a store, ACQUIRE 19 * semantics apply only to the load and RELEASE semantics only to the 20 * store portion of the operation. Note that a failed cmpxchg_acquire 21 * does -not- imply any memory ordering constraints. 22 * 23 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions. 24 */ 25 26#ifndef atomic_read_acquire 27#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter) 28#endif 29 30#ifndef atomic_set_release 31#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i)) 32#endif 33 34/* 35 * The idea here is to build acquire/release variants by adding explicit 36 * barriers on top of the relaxed variant. In the case where the relaxed 37 * variant is already fully ordered, no additional barriers are needed. 38 * 39 * Besides, if an arch has a special barrier for acquire/release, it could 40 * implement its own __atomic_op_* and use the same framework for building 41 * variants 42 * 43 * If an architecture overrides __atomic_op_acquire() it will probably want 44 * to define smp_mb__after_spinlock(). 45 */ 46#ifndef __atomic_op_acquire 47#define __atomic_op_acquire(op, args...) \ 48({ \ 49 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ 50 smp_mb__after_atomic(); \ 51 __ret; \ 52}) 53#endif 54 55#ifndef __atomic_op_release 56#define __atomic_op_release(op, args...) \ 57({ \ 58 smp_mb__before_atomic(); \ 59 op##_relaxed(args); \ 60}) 61#endif 62 63#ifndef __atomic_op_fence 64#define __atomic_op_fence(op, args...) \ 65({ \ 66 typeof(op##_relaxed(args)) __ret; \ 67 smp_mb__before_atomic(); \ 68 __ret = op##_relaxed(args); \ 69 smp_mb__after_atomic(); \ 70 __ret; \ 71}) 72#endif 73 74/* atomic_add_return_relaxed */ 75#ifndef atomic_add_return_relaxed 76#define atomic_add_return_relaxed atomic_add_return 77#define atomic_add_return_acquire atomic_add_return 78#define atomic_add_return_release atomic_add_return 79 80#else /* atomic_add_return_relaxed */ 81 82#ifndef atomic_add_return_acquire 83#define atomic_add_return_acquire(...) \ 84 __atomic_op_acquire(atomic_add_return, __VA_ARGS__) 85#endif 86 87#ifndef atomic_add_return_release 88#define atomic_add_return_release(...) \ 89 __atomic_op_release(atomic_add_return, __VA_ARGS__) 90#endif 91 92#ifndef atomic_add_return 93#define atomic_add_return(...) \ 94 __atomic_op_fence(atomic_add_return, __VA_ARGS__) 95#endif 96#endif /* atomic_add_return_relaxed */ 97 98/* atomic_inc_return_relaxed */ 99#ifndef atomic_inc_return_relaxed 100#define atomic_inc_return_relaxed atomic_inc_return 101#define atomic_inc_return_acquire atomic_inc_return 102#define atomic_inc_return_release atomic_inc_return 103 104#else /* atomic_inc_return_relaxed */ 105 106#ifndef atomic_inc_return_acquire 107#define atomic_inc_return_acquire(...) \ 108 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__) 109#endif 110 111#ifndef atomic_inc_return_release 112#define atomic_inc_return_release(...) \ 113 __atomic_op_release(atomic_inc_return, __VA_ARGS__) 114#endif 115 116#ifndef atomic_inc_return 117#define atomic_inc_return(...) \ 118 __atomic_op_fence(atomic_inc_return, __VA_ARGS__) 119#endif 120#endif /* atomic_inc_return_relaxed */ 121 122/* atomic_sub_return_relaxed */ 123#ifndef atomic_sub_return_relaxed 124#define atomic_sub_return_relaxed atomic_sub_return 125#define atomic_sub_return_acquire atomic_sub_return 126#define atomic_sub_return_release atomic_sub_return 127 128#else /* atomic_sub_return_relaxed */ 129 130#ifndef atomic_sub_return_acquire 131#define atomic_sub_return_acquire(...) \ 132 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__) 133#endif 134 135#ifndef atomic_sub_return_release 136#define atomic_sub_return_release(...) \ 137 __atomic_op_release(atomic_sub_return, __VA_ARGS__) 138#endif 139 140#ifndef atomic_sub_return 141#define atomic_sub_return(...) \ 142 __atomic_op_fence(atomic_sub_return, __VA_ARGS__) 143#endif 144#endif /* atomic_sub_return_relaxed */ 145 146/* atomic_dec_return_relaxed */ 147#ifndef atomic_dec_return_relaxed 148#define atomic_dec_return_relaxed atomic_dec_return 149#define atomic_dec_return_acquire atomic_dec_return 150#define atomic_dec_return_release atomic_dec_return 151 152#else /* atomic_dec_return_relaxed */ 153 154#ifndef atomic_dec_return_acquire 155#define atomic_dec_return_acquire(...) \ 156 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__) 157#endif 158 159#ifndef atomic_dec_return_release 160#define atomic_dec_return_release(...) \ 161 __atomic_op_release(atomic_dec_return, __VA_ARGS__) 162#endif 163 164#ifndef atomic_dec_return 165#define atomic_dec_return(...) \ 166 __atomic_op_fence(atomic_dec_return, __VA_ARGS__) 167#endif 168#endif /* atomic_dec_return_relaxed */ 169 170 171/* atomic_fetch_add_relaxed */ 172#ifndef atomic_fetch_add_relaxed 173#define atomic_fetch_add_relaxed atomic_fetch_add 174#define atomic_fetch_add_acquire atomic_fetch_add 175#define atomic_fetch_add_release atomic_fetch_add 176 177#else /* atomic_fetch_add_relaxed */ 178 179#ifndef atomic_fetch_add_acquire 180#define atomic_fetch_add_acquire(...) \ 181 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__) 182#endif 183 184#ifndef atomic_fetch_add_release 185#define atomic_fetch_add_release(...) \ 186 __atomic_op_release(atomic_fetch_add, __VA_ARGS__) 187#endif 188 189#ifndef atomic_fetch_add 190#define atomic_fetch_add(...) \ 191 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__) 192#endif 193#endif /* atomic_fetch_add_relaxed */ 194 195/* atomic_fetch_inc_relaxed */ 196#ifndef atomic_fetch_inc_relaxed 197 198#ifndef atomic_fetch_inc 199#define atomic_fetch_inc(v) atomic_fetch_add(1, (v)) 200#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v)) 201#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v)) 202#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v)) 203#else /* atomic_fetch_inc */ 204#define atomic_fetch_inc_relaxed atomic_fetch_inc 205#define atomic_fetch_inc_acquire atomic_fetch_inc 206#define atomic_fetch_inc_release atomic_fetch_inc 207#endif /* atomic_fetch_inc */ 208 209#else /* atomic_fetch_inc_relaxed */ 210 211#ifndef atomic_fetch_inc_acquire 212#define atomic_fetch_inc_acquire(...) \ 213 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__) 214#endif 215 216#ifndef atomic_fetch_inc_release 217#define atomic_fetch_inc_release(...) \ 218 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__) 219#endif 220 221#ifndef atomic_fetch_inc 222#define atomic_fetch_inc(...) \ 223 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__) 224#endif 225#endif /* atomic_fetch_inc_relaxed */ 226 227/* atomic_fetch_sub_relaxed */ 228#ifndef atomic_fetch_sub_relaxed 229#define atomic_fetch_sub_relaxed atomic_fetch_sub 230#define atomic_fetch_sub_acquire atomic_fetch_sub 231#define atomic_fetch_sub_release atomic_fetch_sub 232 233#else /* atomic_fetch_sub_relaxed */ 234 235#ifndef atomic_fetch_sub_acquire 236#define atomic_fetch_sub_acquire(...) \ 237 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__) 238#endif 239 240#ifndef atomic_fetch_sub_release 241#define atomic_fetch_sub_release(...) \ 242 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__) 243#endif 244 245#ifndef atomic_fetch_sub 246#define atomic_fetch_sub(...) \ 247 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__) 248#endif 249#endif /* atomic_fetch_sub_relaxed */ 250 251/* atomic_fetch_dec_relaxed */ 252#ifndef atomic_fetch_dec_relaxed 253 254#ifndef atomic_fetch_dec 255#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v)) 256#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v)) 257#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v)) 258#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v)) 259#else /* atomic_fetch_dec */ 260#define atomic_fetch_dec_relaxed atomic_fetch_dec 261#define atomic_fetch_dec_acquire atomic_fetch_dec 262#define atomic_fetch_dec_release atomic_fetch_dec 263#endif /* atomic_fetch_dec */ 264 265#else /* atomic_fetch_dec_relaxed */ 266 267#ifndef atomic_fetch_dec_acquire 268#define atomic_fetch_dec_acquire(...) \ 269 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__) 270#endif 271 272#ifndef atomic_fetch_dec_release 273#define atomic_fetch_dec_release(...) \ 274 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__) 275#endif 276 277#ifndef atomic_fetch_dec 278#define atomic_fetch_dec(...) \ 279 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__) 280#endif 281#endif /* atomic_fetch_dec_relaxed */ 282 283/* atomic_fetch_or_relaxed */ 284#ifndef atomic_fetch_or_relaxed 285#define atomic_fetch_or_relaxed atomic_fetch_or 286#define atomic_fetch_or_acquire atomic_fetch_or 287#define atomic_fetch_or_release atomic_fetch_or 288 289#else /* atomic_fetch_or_relaxed */ 290 291#ifndef atomic_fetch_or_acquire 292#define atomic_fetch_or_acquire(...) \ 293 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__) 294#endif 295 296#ifndef atomic_fetch_or_release 297#define atomic_fetch_or_release(...) \ 298 __atomic_op_release(atomic_fetch_or, __VA_ARGS__) 299#endif 300 301#ifndef atomic_fetch_or 302#define atomic_fetch_or(...) \ 303 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__) 304#endif 305#endif /* atomic_fetch_or_relaxed */ 306 307/* atomic_fetch_and_relaxed */ 308#ifndef atomic_fetch_and_relaxed 309#define atomic_fetch_and_relaxed atomic_fetch_and 310#define atomic_fetch_and_acquire atomic_fetch_and 311#define atomic_fetch_and_release atomic_fetch_and 312 313#else /* atomic_fetch_and_relaxed */ 314 315#ifndef atomic_fetch_and_acquire 316#define atomic_fetch_and_acquire(...) \ 317 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__) 318#endif 319 320#ifndef atomic_fetch_and_release 321#define atomic_fetch_and_release(...) \ 322 __atomic_op_release(atomic_fetch_and, __VA_ARGS__) 323#endif 324 325#ifndef atomic_fetch_and 326#define atomic_fetch_and(...) \ 327 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__) 328#endif 329#endif /* atomic_fetch_and_relaxed */ 330 331#ifdef atomic_andnot 332/* atomic_fetch_andnot_relaxed */ 333#ifndef atomic_fetch_andnot_relaxed 334#define atomic_fetch_andnot_relaxed atomic_fetch_andnot 335#define atomic_fetch_andnot_acquire atomic_fetch_andnot 336#define atomic_fetch_andnot_release atomic_fetch_andnot 337 338#else /* atomic_fetch_andnot_relaxed */ 339 340#ifndef atomic_fetch_andnot_acquire 341#define atomic_fetch_andnot_acquire(...) \ 342 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__) 343#endif 344 345#ifndef atomic_fetch_andnot_release 346#define atomic_fetch_andnot_release(...) \ 347 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__) 348#endif 349 350#ifndef atomic_fetch_andnot 351#define atomic_fetch_andnot(...) \ 352 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) 353#endif 354#endif /* atomic_fetch_andnot_relaxed */ 355#endif /* atomic_andnot */ 356 357/* atomic_fetch_xor_relaxed */ 358#ifndef atomic_fetch_xor_relaxed 359#define atomic_fetch_xor_relaxed atomic_fetch_xor 360#define atomic_fetch_xor_acquire atomic_fetch_xor 361#define atomic_fetch_xor_release atomic_fetch_xor 362 363#else /* atomic_fetch_xor_relaxed */ 364 365#ifndef atomic_fetch_xor_acquire 366#define atomic_fetch_xor_acquire(...) \ 367 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__) 368#endif 369 370#ifndef atomic_fetch_xor_release 371#define atomic_fetch_xor_release(...) \ 372 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__) 373#endif 374 375#ifndef atomic_fetch_xor 376#define atomic_fetch_xor(...) \ 377 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__) 378#endif 379#endif /* atomic_fetch_xor_relaxed */ 380 381 382/* atomic_xchg_relaxed */ 383#ifndef atomic_xchg_relaxed 384#define atomic_xchg_relaxed atomic_xchg 385#define atomic_xchg_acquire atomic_xchg 386#define atomic_xchg_release atomic_xchg 387 388#else /* atomic_xchg_relaxed */ 389 390#ifndef atomic_xchg_acquire 391#define atomic_xchg_acquire(...) \ 392 __atomic_op_acquire(atomic_xchg, __VA_ARGS__) 393#endif 394 395#ifndef atomic_xchg_release 396#define atomic_xchg_release(...) \ 397 __atomic_op_release(atomic_xchg, __VA_ARGS__) 398#endif 399 400#ifndef atomic_xchg 401#define atomic_xchg(...) \ 402 __atomic_op_fence(atomic_xchg, __VA_ARGS__) 403#endif 404#endif /* atomic_xchg_relaxed */ 405 406/* atomic_cmpxchg_relaxed */ 407#ifndef atomic_cmpxchg_relaxed 408#define atomic_cmpxchg_relaxed atomic_cmpxchg 409#define atomic_cmpxchg_acquire atomic_cmpxchg 410#define atomic_cmpxchg_release atomic_cmpxchg 411 412#else /* atomic_cmpxchg_relaxed */ 413 414#ifndef atomic_cmpxchg_acquire 415#define atomic_cmpxchg_acquire(...) \ 416 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__) 417#endif 418 419#ifndef atomic_cmpxchg_release 420#define atomic_cmpxchg_release(...) \ 421 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__) 422#endif 423 424#ifndef atomic_cmpxchg 425#define atomic_cmpxchg(...) \ 426 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__) 427#endif 428#endif /* atomic_cmpxchg_relaxed */ 429 430#ifndef atomic_try_cmpxchg 431 432#define __atomic_try_cmpxchg(type, _p, _po, _n) \ 433({ \ 434 typeof(_po) __po = (_po); \ 435 typeof(*(_po)) __r, __o = *__po; \ 436 __r = atomic_cmpxchg##type((_p), __o, (_n)); \ 437 if (unlikely(__r != __o)) \ 438 *__po = __r; \ 439 likely(__r == __o); \ 440}) 441 442#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n) 443#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n) 444#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n) 445#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n) 446 447#else /* atomic_try_cmpxchg */ 448#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg 449#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg 450#define atomic_try_cmpxchg_release atomic_try_cmpxchg 451#endif /* atomic_try_cmpxchg */ 452 453/* cmpxchg_relaxed */ 454#ifndef cmpxchg_relaxed 455#define cmpxchg_relaxed cmpxchg 456#define cmpxchg_acquire cmpxchg 457#define cmpxchg_release cmpxchg 458 459#else /* cmpxchg_relaxed */ 460 461#ifndef cmpxchg_acquire 462#define cmpxchg_acquire(...) \ 463 __atomic_op_acquire(cmpxchg, __VA_ARGS__) 464#endif 465 466#ifndef cmpxchg_release 467#define cmpxchg_release(...) \ 468 __atomic_op_release(cmpxchg, __VA_ARGS__) 469#endif 470 471#ifndef cmpxchg 472#define cmpxchg(...) \ 473 __atomic_op_fence(cmpxchg, __VA_ARGS__) 474#endif 475#endif /* cmpxchg_relaxed */ 476 477/* cmpxchg64_relaxed */ 478#ifndef cmpxchg64_relaxed 479#define cmpxchg64_relaxed cmpxchg64 480#define cmpxchg64_acquire cmpxchg64 481#define cmpxchg64_release cmpxchg64 482 483#else /* cmpxchg64_relaxed */ 484 485#ifndef cmpxchg64_acquire 486#define cmpxchg64_acquire(...) \ 487 __atomic_op_acquire(cmpxchg64, __VA_ARGS__) 488#endif 489 490#ifndef cmpxchg64_release 491#define cmpxchg64_release(...) \ 492 __atomic_op_release(cmpxchg64, __VA_ARGS__) 493#endif 494 495#ifndef cmpxchg64 496#define cmpxchg64(...) \ 497 __atomic_op_fence(cmpxchg64, __VA_ARGS__) 498#endif 499#endif /* cmpxchg64_relaxed */ 500 501/* xchg_relaxed */ 502#ifndef xchg_relaxed 503#define xchg_relaxed xchg 504#define xchg_acquire xchg 505#define xchg_release xchg 506 507#else /* xchg_relaxed */ 508 509#ifndef xchg_acquire 510#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__) 511#endif 512 513#ifndef xchg_release 514#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__) 515#endif 516 517#ifndef xchg 518#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__) 519#endif 520#endif /* xchg_relaxed */ 521 522/** 523 * atomic_add_unless - add unless the number is already a given value 524 * @v: pointer of type atomic_t 525 * @a: the amount to add to v... 526 * @u: ...unless v is equal to u. 527 * 528 * Atomically adds @a to @v, so long as @v was not already @u. 529 * Returns non-zero if @v was not @u, and zero otherwise. 530 */ 531static inline int atomic_add_unless(atomic_t *v, int a, int u) 532{ 533 return __atomic_add_unless(v, a, u) != u; 534} 535 536/** 537 * atomic_inc_not_zero - increment unless the number is zero 538 * @v: pointer of type atomic_t 539 * 540 * Atomically increments @v by 1, so long as @v is non-zero. 541 * Returns non-zero if @v was non-zero, and zero otherwise. 542 */ 543#ifndef atomic_inc_not_zero 544#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 545#endif 546 547#ifndef atomic_andnot 548static inline void atomic_andnot(int i, atomic_t *v) 549{ 550 atomic_and(~i, v); 551} 552 553static inline int atomic_fetch_andnot(int i, atomic_t *v) 554{ 555 return atomic_fetch_and(~i, v); 556} 557 558static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v) 559{ 560 return atomic_fetch_and_relaxed(~i, v); 561} 562 563static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v) 564{ 565 return atomic_fetch_and_acquire(~i, v); 566} 567 568static inline int atomic_fetch_andnot_release(int i, atomic_t *v) 569{ 570 return atomic_fetch_and_release(~i, v); 571} 572#endif 573 574/** 575 * atomic_inc_not_zero_hint - increment if not null 576 * @v: pointer of type atomic_t 577 * @hint: probable value of the atomic before the increment 578 * 579 * This version of atomic_inc_not_zero() gives a hint of probable 580 * value of the atomic. This helps processor to not read the memory 581 * before doing the atomic read/modify/write cycle, lowering 582 * number of bus transactions on some arches. 583 * 584 * Returns: 0 if increment was not done, 1 otherwise. 585 */ 586#ifndef atomic_inc_not_zero_hint 587static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint) 588{ 589 int val, c = hint; 590 591 /* sanity test, should be removed by compiler if hint is a constant */ 592 if (!hint) 593 return atomic_inc_not_zero(v); 594 595 do { 596 val = atomic_cmpxchg(v, c, c + 1); 597 if (val == c) 598 return 1; 599 c = val; 600 } while (c); 601 602 return 0; 603} 604#endif 605 606#ifndef atomic_inc_unless_negative 607static inline int atomic_inc_unless_negative(atomic_t *p) 608{ 609 int v, v1; 610 for (v = 0; v >= 0; v = v1) { 611 v1 = atomic_cmpxchg(p, v, v + 1); 612 if (likely(v1 == v)) 613 return 1; 614 } 615 return 0; 616} 617#endif 618 619#ifndef atomic_dec_unless_positive 620static inline int atomic_dec_unless_positive(atomic_t *p) 621{ 622 int v, v1; 623 for (v = 0; v <= 0; v = v1) { 624 v1 = atomic_cmpxchg(p, v, v - 1); 625 if (likely(v1 == v)) 626 return 1; 627 } 628 return 0; 629} 630#endif 631 632/* 633 * atomic_dec_if_positive - decrement by 1 if old value positive 634 * @v: pointer of type atomic_t 635 * 636 * The function returns the old value of *v minus 1, even if 637 * the atomic variable, v, was not decremented. 638 */ 639#ifndef atomic_dec_if_positive 640static inline int atomic_dec_if_positive(atomic_t *v) 641{ 642 int c, old, dec; 643 c = atomic_read(v); 644 for (;;) { 645 dec = c - 1; 646 if (unlikely(dec < 0)) 647 break; 648 old = atomic_cmpxchg((v), c, dec); 649 if (likely(old == c)) 650 break; 651 c = old; 652 } 653 return dec; 654} 655#endif 656 657#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) 658#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) 659 660#ifdef CONFIG_GENERIC_ATOMIC64 661#include <asm-generic/atomic64.h> 662#endif 663 664#ifndef atomic64_read_acquire 665#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) 666#endif 667 668#ifndef atomic64_set_release 669#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) 670#endif 671 672/* atomic64_add_return_relaxed */ 673#ifndef atomic64_add_return_relaxed 674#define atomic64_add_return_relaxed atomic64_add_return 675#define atomic64_add_return_acquire atomic64_add_return 676#define atomic64_add_return_release atomic64_add_return 677 678#else /* atomic64_add_return_relaxed */ 679 680#ifndef atomic64_add_return_acquire 681#define atomic64_add_return_acquire(...) \ 682 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) 683#endif 684 685#ifndef atomic64_add_return_release 686#define atomic64_add_return_release(...) \ 687 __atomic_op_release(atomic64_add_return, __VA_ARGS__) 688#endif 689 690#ifndef atomic64_add_return 691#define atomic64_add_return(...) \ 692 __atomic_op_fence(atomic64_add_return, __VA_ARGS__) 693#endif 694#endif /* atomic64_add_return_relaxed */ 695 696/* atomic64_inc_return_relaxed */ 697#ifndef atomic64_inc_return_relaxed 698#define atomic64_inc_return_relaxed atomic64_inc_return 699#define atomic64_inc_return_acquire atomic64_inc_return 700#define atomic64_inc_return_release atomic64_inc_return 701 702#else /* atomic64_inc_return_relaxed */ 703 704#ifndef atomic64_inc_return_acquire 705#define atomic64_inc_return_acquire(...) \ 706 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) 707#endif 708 709#ifndef atomic64_inc_return_release 710#define atomic64_inc_return_release(...) \ 711 __atomic_op_release(atomic64_inc_return, __VA_ARGS__) 712#endif 713 714#ifndef atomic64_inc_return 715#define atomic64_inc_return(...) \ 716 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) 717#endif 718#endif /* atomic64_inc_return_relaxed */ 719 720 721/* atomic64_sub_return_relaxed */ 722#ifndef atomic64_sub_return_relaxed 723#define atomic64_sub_return_relaxed atomic64_sub_return 724#define atomic64_sub_return_acquire atomic64_sub_return 725#define atomic64_sub_return_release atomic64_sub_return 726 727#else /* atomic64_sub_return_relaxed */ 728 729#ifndef atomic64_sub_return_acquire 730#define atomic64_sub_return_acquire(...) \ 731 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) 732#endif 733 734#ifndef atomic64_sub_return_release 735#define atomic64_sub_return_release(...) \ 736 __atomic_op_release(atomic64_sub_return, __VA_ARGS__) 737#endif 738 739#ifndef atomic64_sub_return 740#define atomic64_sub_return(...) \ 741 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) 742#endif 743#endif /* atomic64_sub_return_relaxed */ 744 745/* atomic64_dec_return_relaxed */ 746#ifndef atomic64_dec_return_relaxed 747#define atomic64_dec_return_relaxed atomic64_dec_return 748#define atomic64_dec_return_acquire atomic64_dec_return 749#define atomic64_dec_return_release atomic64_dec_return 750 751#else /* atomic64_dec_return_relaxed */ 752 753#ifndef atomic64_dec_return_acquire 754#define atomic64_dec_return_acquire(...) \ 755 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) 756#endif 757 758#ifndef atomic64_dec_return_release 759#define atomic64_dec_return_release(...) \ 760 __atomic_op_release(atomic64_dec_return, __VA_ARGS__) 761#endif 762 763#ifndef atomic64_dec_return 764#define atomic64_dec_return(...) \ 765 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) 766#endif 767#endif /* atomic64_dec_return_relaxed */ 768 769 770/* atomic64_fetch_add_relaxed */ 771#ifndef atomic64_fetch_add_relaxed 772#define atomic64_fetch_add_relaxed atomic64_fetch_add 773#define atomic64_fetch_add_acquire atomic64_fetch_add 774#define atomic64_fetch_add_release atomic64_fetch_add 775 776#else /* atomic64_fetch_add_relaxed */ 777 778#ifndef atomic64_fetch_add_acquire 779#define atomic64_fetch_add_acquire(...) \ 780 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__) 781#endif 782 783#ifndef atomic64_fetch_add_release 784#define atomic64_fetch_add_release(...) \ 785 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__) 786#endif 787 788#ifndef atomic64_fetch_add 789#define atomic64_fetch_add(...) \ 790 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__) 791#endif 792#endif /* atomic64_fetch_add_relaxed */ 793 794/* atomic64_fetch_inc_relaxed */ 795#ifndef atomic64_fetch_inc_relaxed 796 797#ifndef atomic64_fetch_inc 798#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v)) 799#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v)) 800#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v)) 801#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v)) 802#else /* atomic64_fetch_inc */ 803#define atomic64_fetch_inc_relaxed atomic64_fetch_inc 804#define atomic64_fetch_inc_acquire atomic64_fetch_inc 805#define atomic64_fetch_inc_release atomic64_fetch_inc 806#endif /* atomic64_fetch_inc */ 807 808#else /* atomic64_fetch_inc_relaxed */ 809 810#ifndef atomic64_fetch_inc_acquire 811#define atomic64_fetch_inc_acquire(...) \ 812 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__) 813#endif 814 815#ifndef atomic64_fetch_inc_release 816#define atomic64_fetch_inc_release(...) \ 817 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__) 818#endif 819 820#ifndef atomic64_fetch_inc 821#define atomic64_fetch_inc(...) \ 822 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__) 823#endif 824#endif /* atomic64_fetch_inc_relaxed */ 825 826/* atomic64_fetch_sub_relaxed */ 827#ifndef atomic64_fetch_sub_relaxed 828#define atomic64_fetch_sub_relaxed atomic64_fetch_sub 829#define atomic64_fetch_sub_acquire atomic64_fetch_sub 830#define atomic64_fetch_sub_release atomic64_fetch_sub 831 832#else /* atomic64_fetch_sub_relaxed */ 833 834#ifndef atomic64_fetch_sub_acquire 835#define atomic64_fetch_sub_acquire(...) \ 836 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__) 837#endif 838 839#ifndef atomic64_fetch_sub_release 840#define atomic64_fetch_sub_release(...) \ 841 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__) 842#endif 843 844#ifndef atomic64_fetch_sub 845#define atomic64_fetch_sub(...) \ 846 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__) 847#endif 848#endif /* atomic64_fetch_sub_relaxed */ 849 850/* atomic64_fetch_dec_relaxed */ 851#ifndef atomic64_fetch_dec_relaxed 852 853#ifndef atomic64_fetch_dec 854#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v)) 855#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v)) 856#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v)) 857#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v)) 858#else /* atomic64_fetch_dec */ 859#define atomic64_fetch_dec_relaxed atomic64_fetch_dec 860#define atomic64_fetch_dec_acquire atomic64_fetch_dec 861#define atomic64_fetch_dec_release atomic64_fetch_dec 862#endif /* atomic64_fetch_dec */ 863 864#else /* atomic64_fetch_dec_relaxed */ 865 866#ifndef atomic64_fetch_dec_acquire 867#define atomic64_fetch_dec_acquire(...) \ 868 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__) 869#endif 870 871#ifndef atomic64_fetch_dec_release 872#define atomic64_fetch_dec_release(...) \ 873 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__) 874#endif 875 876#ifndef atomic64_fetch_dec 877#define atomic64_fetch_dec(...) \ 878 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__) 879#endif 880#endif /* atomic64_fetch_dec_relaxed */ 881 882/* atomic64_fetch_or_relaxed */ 883#ifndef atomic64_fetch_or_relaxed 884#define atomic64_fetch_or_relaxed atomic64_fetch_or 885#define atomic64_fetch_or_acquire atomic64_fetch_or 886#define atomic64_fetch_or_release atomic64_fetch_or 887 888#else /* atomic64_fetch_or_relaxed */ 889 890#ifndef atomic64_fetch_or_acquire 891#define atomic64_fetch_or_acquire(...) \ 892 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__) 893#endif 894 895#ifndef atomic64_fetch_or_release 896#define atomic64_fetch_or_release(...) \ 897 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__) 898#endif 899 900#ifndef atomic64_fetch_or 901#define atomic64_fetch_or(...) \ 902 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__) 903#endif 904#endif /* atomic64_fetch_or_relaxed */ 905 906/* atomic64_fetch_and_relaxed */ 907#ifndef atomic64_fetch_and_relaxed 908#define atomic64_fetch_and_relaxed atomic64_fetch_and 909#define atomic64_fetch_and_acquire atomic64_fetch_and 910#define atomic64_fetch_and_release atomic64_fetch_and 911 912#else /* atomic64_fetch_and_relaxed */ 913 914#ifndef atomic64_fetch_and_acquire 915#define atomic64_fetch_and_acquire(...) \ 916 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__) 917#endif 918 919#ifndef atomic64_fetch_and_release 920#define atomic64_fetch_and_release(...) \ 921 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__) 922#endif 923 924#ifndef atomic64_fetch_and 925#define atomic64_fetch_and(...) \ 926 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__) 927#endif 928#endif /* atomic64_fetch_and_relaxed */ 929 930#ifdef atomic64_andnot 931/* atomic64_fetch_andnot_relaxed */ 932#ifndef atomic64_fetch_andnot_relaxed 933#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 934#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 935#define atomic64_fetch_andnot_release atomic64_fetch_andnot 936 937#else /* atomic64_fetch_andnot_relaxed */ 938 939#ifndef atomic64_fetch_andnot_acquire 940#define atomic64_fetch_andnot_acquire(...) \ 941 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__) 942#endif 943 944#ifndef atomic64_fetch_andnot_release 945#define atomic64_fetch_andnot_release(...) \ 946 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__) 947#endif 948 949#ifndef atomic64_fetch_andnot 950#define atomic64_fetch_andnot(...) \ 951 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) 952#endif 953#endif /* atomic64_fetch_andnot_relaxed */ 954#endif /* atomic64_andnot */ 955 956/* atomic64_fetch_xor_relaxed */ 957#ifndef atomic64_fetch_xor_relaxed 958#define atomic64_fetch_xor_relaxed atomic64_fetch_xor 959#define atomic64_fetch_xor_acquire atomic64_fetch_xor 960#define atomic64_fetch_xor_release atomic64_fetch_xor 961 962#else /* atomic64_fetch_xor_relaxed */ 963 964#ifndef atomic64_fetch_xor_acquire 965#define atomic64_fetch_xor_acquire(...) \ 966 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__) 967#endif 968 969#ifndef atomic64_fetch_xor_release 970#define atomic64_fetch_xor_release(...) \ 971 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__) 972#endif 973 974#ifndef atomic64_fetch_xor 975#define atomic64_fetch_xor(...) \ 976 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__) 977#endif 978#endif /* atomic64_fetch_xor_relaxed */ 979 980 981/* atomic64_xchg_relaxed */ 982#ifndef atomic64_xchg_relaxed 983#define atomic64_xchg_relaxed atomic64_xchg 984#define atomic64_xchg_acquire atomic64_xchg 985#define atomic64_xchg_release atomic64_xchg 986 987#else /* atomic64_xchg_relaxed */ 988 989#ifndef atomic64_xchg_acquire 990#define atomic64_xchg_acquire(...) \ 991 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) 992#endif 993 994#ifndef atomic64_xchg_release 995#define atomic64_xchg_release(...) \ 996 __atomic_op_release(atomic64_xchg, __VA_ARGS__) 997#endif 998 999#ifndef atomic64_xchg 1000#define atomic64_xchg(...) \ 1001 __atomic_op_fence(atomic64_xchg, __VA_ARGS__) 1002#endif 1003#endif /* atomic64_xchg_relaxed */ 1004 1005/* atomic64_cmpxchg_relaxed */ 1006#ifndef atomic64_cmpxchg_relaxed 1007#define atomic64_cmpxchg_relaxed atomic64_cmpxchg 1008#define atomic64_cmpxchg_acquire atomic64_cmpxchg 1009#define atomic64_cmpxchg_release atomic64_cmpxchg 1010 1011#else /* atomic64_cmpxchg_relaxed */ 1012 1013#ifndef atomic64_cmpxchg_acquire 1014#define atomic64_cmpxchg_acquire(...) \ 1015 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) 1016#endif 1017 1018#ifndef atomic64_cmpxchg_release 1019#define atomic64_cmpxchg_release(...) \ 1020 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) 1021#endif 1022 1023#ifndef atomic64_cmpxchg 1024#define atomic64_cmpxchg(...) \ 1025 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) 1026#endif 1027#endif /* atomic64_cmpxchg_relaxed */ 1028 1029#ifndef atomic64_try_cmpxchg 1030 1031#define __atomic64_try_cmpxchg(type, _p, _po, _n) \ 1032({ \ 1033 typeof(_po) __po = (_po); \ 1034 typeof(*(_po)) __r, __o = *__po; \ 1035 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \ 1036 if (unlikely(__r != __o)) \ 1037 *__po = __r; \ 1038 likely(__r == __o); \ 1039}) 1040 1041#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n) 1042#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n) 1043#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n) 1044#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n) 1045 1046#else /* atomic64_try_cmpxchg */ 1047#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg 1048#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg 1049#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg 1050#endif /* atomic64_try_cmpxchg */ 1051 1052#ifndef atomic64_andnot 1053static inline void atomic64_andnot(long long i, atomic64_t *v) 1054{ 1055 atomic64_and(~i, v); 1056} 1057 1058static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v) 1059{ 1060 return atomic64_fetch_and(~i, v); 1061} 1062 1063static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v) 1064{ 1065 return atomic64_fetch_and_relaxed(~i, v); 1066} 1067 1068static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v) 1069{ 1070 return atomic64_fetch_and_acquire(~i, v); 1071} 1072 1073static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v) 1074{ 1075 return atomic64_fetch_and_release(~i, v); 1076} 1077#endif 1078 1079#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) 1080#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) 1081 1082#include <asm-generic/atomic-long.h> 1083 1084#endif /* _LINUX_ATOMIC_H */