at v6.8 46 kB view raw
1// SPDX-License-Identifier: GPL-2.0 2 3// Generated by scripts/atomic/gen-atomic-long.sh 4// DO NOT MODIFY THIS FILE DIRECTLY 5 6#ifndef _LINUX_ATOMIC_LONG_H 7#define _LINUX_ATOMIC_LONG_H 8 9#include <linux/compiler.h> 10#include <asm/types.h> 11 12#ifdef CONFIG_64BIT 13typedef atomic64_t atomic_long_t; 14#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) 15#define atomic_long_cond_read_acquire atomic64_cond_read_acquire 16#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed 17#else 18typedef atomic_t atomic_long_t; 19#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) 20#define atomic_long_cond_read_acquire atomic_cond_read_acquire 21#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 22#endif 23 24/** 25 * raw_atomic_long_read() - atomic load with relaxed ordering 26 * @v: pointer to atomic_long_t 27 * 28 * Atomically loads the value of @v with relaxed ordering. 29 * 30 * Safe to use in noinstr code; prefer atomic_long_read() elsewhere. 31 * 32 * Return: The value loaded from @v. 33 */ 34static __always_inline long 35raw_atomic_long_read(const atomic_long_t *v) 36{ 37#ifdef CONFIG_64BIT 38 return raw_atomic64_read(v); 39#else 40 return raw_atomic_read(v); 41#endif 42} 43 44/** 45 * raw_atomic_long_read_acquire() - atomic load with acquire ordering 46 * @v: pointer to atomic_long_t 47 * 48 * Atomically loads the value of @v with acquire ordering. 49 * 50 * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere. 51 * 52 * Return: The value loaded from @v. 53 */ 54static __always_inline long 55raw_atomic_long_read_acquire(const atomic_long_t *v) 56{ 57#ifdef CONFIG_64BIT 58 return raw_atomic64_read_acquire(v); 59#else 60 return raw_atomic_read_acquire(v); 61#endif 62} 63 64/** 65 * raw_atomic_long_set() - atomic set with relaxed ordering 66 * @v: pointer to atomic_long_t 67 * @i: long value to assign 68 * 69 * Atomically sets @v to @i with relaxed ordering. 70 * 71 * Safe to use in noinstr code; prefer atomic_long_set() elsewhere. 72 * 73 * Return: Nothing. 74 */ 75static __always_inline void 76raw_atomic_long_set(atomic_long_t *v, long i) 77{ 78#ifdef CONFIG_64BIT 79 raw_atomic64_set(v, i); 80#else 81 raw_atomic_set(v, i); 82#endif 83} 84 85/** 86 * raw_atomic_long_set_release() - atomic set with release ordering 87 * @v: pointer to atomic_long_t 88 * @i: long value to assign 89 * 90 * Atomically sets @v to @i with release ordering. 91 * 92 * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere. 93 * 94 * Return: Nothing. 95 */ 96static __always_inline void 97raw_atomic_long_set_release(atomic_long_t *v, long i) 98{ 99#ifdef CONFIG_64BIT 100 raw_atomic64_set_release(v, i); 101#else 102 raw_atomic_set_release(v, i); 103#endif 104} 105 106/** 107 * raw_atomic_long_add() - atomic add with relaxed ordering 108 * @i: long value to add 109 * @v: pointer to atomic_long_t 110 * 111 * Atomically updates @v to (@v + @i) with relaxed ordering. 112 * 113 * Safe to use in noinstr code; prefer atomic_long_add() elsewhere. 114 * 115 * Return: Nothing. 116 */ 117static __always_inline void 118raw_atomic_long_add(long i, atomic_long_t *v) 119{ 120#ifdef CONFIG_64BIT 121 raw_atomic64_add(i, v); 122#else 123 raw_atomic_add(i, v); 124#endif 125} 126 127/** 128 * raw_atomic_long_add_return() - atomic add with full ordering 129 * @i: long value to add 130 * @v: pointer to atomic_long_t 131 * 132 * Atomically updates @v to (@v + @i) with full ordering. 133 * 134 * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere. 135 * 136 * Return: The updated value of @v. 137 */ 138static __always_inline long 139raw_atomic_long_add_return(long i, atomic_long_t *v) 140{ 141#ifdef CONFIG_64BIT 142 return raw_atomic64_add_return(i, v); 143#else 144 return raw_atomic_add_return(i, v); 145#endif 146} 147 148/** 149 * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering 150 * @i: long value to add 151 * @v: pointer to atomic_long_t 152 * 153 * Atomically updates @v to (@v + @i) with acquire ordering. 154 * 155 * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere. 156 * 157 * Return: The updated value of @v. 158 */ 159static __always_inline long 160raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 161{ 162#ifdef CONFIG_64BIT 163 return raw_atomic64_add_return_acquire(i, v); 164#else 165 return raw_atomic_add_return_acquire(i, v); 166#endif 167} 168 169/** 170 * raw_atomic_long_add_return_release() - atomic add with release ordering 171 * @i: long value to add 172 * @v: pointer to atomic_long_t 173 * 174 * Atomically updates @v to (@v + @i) with release ordering. 175 * 176 * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere. 177 * 178 * Return: The updated value of @v. 179 */ 180static __always_inline long 181raw_atomic_long_add_return_release(long i, atomic_long_t *v) 182{ 183#ifdef CONFIG_64BIT 184 return raw_atomic64_add_return_release(i, v); 185#else 186 return raw_atomic_add_return_release(i, v); 187#endif 188} 189 190/** 191 * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering 192 * @i: long value to add 193 * @v: pointer to atomic_long_t 194 * 195 * Atomically updates @v to (@v + @i) with relaxed ordering. 196 * 197 * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere. 198 * 199 * Return: The updated value of @v. 200 */ 201static __always_inline long 202raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 203{ 204#ifdef CONFIG_64BIT 205 return raw_atomic64_add_return_relaxed(i, v); 206#else 207 return raw_atomic_add_return_relaxed(i, v); 208#endif 209} 210 211/** 212 * raw_atomic_long_fetch_add() - atomic add with full ordering 213 * @i: long value to add 214 * @v: pointer to atomic_long_t 215 * 216 * Atomically updates @v to (@v + @i) with full ordering. 217 * 218 * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere. 219 * 220 * Return: The original value of @v. 221 */ 222static __always_inline long 223raw_atomic_long_fetch_add(long i, atomic_long_t *v) 224{ 225#ifdef CONFIG_64BIT 226 return raw_atomic64_fetch_add(i, v); 227#else 228 return raw_atomic_fetch_add(i, v); 229#endif 230} 231 232/** 233 * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering 234 * @i: long value to add 235 * @v: pointer to atomic_long_t 236 * 237 * Atomically updates @v to (@v + @i) with acquire ordering. 238 * 239 * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere. 240 * 241 * Return: The original value of @v. 242 */ 243static __always_inline long 244raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 245{ 246#ifdef CONFIG_64BIT 247 return raw_atomic64_fetch_add_acquire(i, v); 248#else 249 return raw_atomic_fetch_add_acquire(i, v); 250#endif 251} 252 253/** 254 * raw_atomic_long_fetch_add_release() - atomic add with release ordering 255 * @i: long value to add 256 * @v: pointer to atomic_long_t 257 * 258 * Atomically updates @v to (@v + @i) with release ordering. 259 * 260 * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere. 261 * 262 * Return: The original value of @v. 263 */ 264static __always_inline long 265raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 266{ 267#ifdef CONFIG_64BIT 268 return raw_atomic64_fetch_add_release(i, v); 269#else 270 return raw_atomic_fetch_add_release(i, v); 271#endif 272} 273 274/** 275 * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering 276 * @i: long value to add 277 * @v: pointer to atomic_long_t 278 * 279 * Atomically updates @v to (@v + @i) with relaxed ordering. 280 * 281 * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere. 282 * 283 * Return: The original value of @v. 284 */ 285static __always_inline long 286raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 287{ 288#ifdef CONFIG_64BIT 289 return raw_atomic64_fetch_add_relaxed(i, v); 290#else 291 return raw_atomic_fetch_add_relaxed(i, v); 292#endif 293} 294 295/** 296 * raw_atomic_long_sub() - atomic subtract with relaxed ordering 297 * @i: long value to subtract 298 * @v: pointer to atomic_long_t 299 * 300 * Atomically updates @v to (@v - @i) with relaxed ordering. 301 * 302 * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere. 303 * 304 * Return: Nothing. 305 */ 306static __always_inline void 307raw_atomic_long_sub(long i, atomic_long_t *v) 308{ 309#ifdef CONFIG_64BIT 310 raw_atomic64_sub(i, v); 311#else 312 raw_atomic_sub(i, v); 313#endif 314} 315 316/** 317 * raw_atomic_long_sub_return() - atomic subtract with full ordering 318 * @i: long value to subtract 319 * @v: pointer to atomic_long_t 320 * 321 * Atomically updates @v to (@v - @i) with full ordering. 322 * 323 * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere. 324 * 325 * Return: The updated value of @v. 326 */ 327static __always_inline long 328raw_atomic_long_sub_return(long i, atomic_long_t *v) 329{ 330#ifdef CONFIG_64BIT 331 return raw_atomic64_sub_return(i, v); 332#else 333 return raw_atomic_sub_return(i, v); 334#endif 335} 336 337/** 338 * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering 339 * @i: long value to subtract 340 * @v: pointer to atomic_long_t 341 * 342 * Atomically updates @v to (@v - @i) with acquire ordering. 343 * 344 * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere. 345 * 346 * Return: The updated value of @v. 347 */ 348static __always_inline long 349raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 350{ 351#ifdef CONFIG_64BIT 352 return raw_atomic64_sub_return_acquire(i, v); 353#else 354 return raw_atomic_sub_return_acquire(i, v); 355#endif 356} 357 358/** 359 * raw_atomic_long_sub_return_release() - atomic subtract with release ordering 360 * @i: long value to subtract 361 * @v: pointer to atomic_long_t 362 * 363 * Atomically updates @v to (@v - @i) with release ordering. 364 * 365 * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere. 366 * 367 * Return: The updated value of @v. 368 */ 369static __always_inline long 370raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 371{ 372#ifdef CONFIG_64BIT 373 return raw_atomic64_sub_return_release(i, v); 374#else 375 return raw_atomic_sub_return_release(i, v); 376#endif 377} 378 379/** 380 * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering 381 * @i: long value to subtract 382 * @v: pointer to atomic_long_t 383 * 384 * Atomically updates @v to (@v - @i) with relaxed ordering. 385 * 386 * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere. 387 * 388 * Return: The updated value of @v. 389 */ 390static __always_inline long 391raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 392{ 393#ifdef CONFIG_64BIT 394 return raw_atomic64_sub_return_relaxed(i, v); 395#else 396 return raw_atomic_sub_return_relaxed(i, v); 397#endif 398} 399 400/** 401 * raw_atomic_long_fetch_sub() - atomic subtract with full ordering 402 * @i: long value to subtract 403 * @v: pointer to atomic_long_t 404 * 405 * Atomically updates @v to (@v - @i) with full ordering. 406 * 407 * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere. 408 * 409 * Return: The original value of @v. 410 */ 411static __always_inline long 412raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 413{ 414#ifdef CONFIG_64BIT 415 return raw_atomic64_fetch_sub(i, v); 416#else 417 return raw_atomic_fetch_sub(i, v); 418#endif 419} 420 421/** 422 * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering 423 * @i: long value to subtract 424 * @v: pointer to atomic_long_t 425 * 426 * Atomically updates @v to (@v - @i) with acquire ordering. 427 * 428 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere. 429 * 430 * Return: The original value of @v. 431 */ 432static __always_inline long 433raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 434{ 435#ifdef CONFIG_64BIT 436 return raw_atomic64_fetch_sub_acquire(i, v); 437#else 438 return raw_atomic_fetch_sub_acquire(i, v); 439#endif 440} 441 442/** 443 * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering 444 * @i: long value to subtract 445 * @v: pointer to atomic_long_t 446 * 447 * Atomically updates @v to (@v - @i) with release ordering. 448 * 449 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere. 450 * 451 * Return: The original value of @v. 452 */ 453static __always_inline long 454raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 455{ 456#ifdef CONFIG_64BIT 457 return raw_atomic64_fetch_sub_release(i, v); 458#else 459 return raw_atomic_fetch_sub_release(i, v); 460#endif 461} 462 463/** 464 * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering 465 * @i: long value to subtract 466 * @v: pointer to atomic_long_t 467 * 468 * Atomically updates @v to (@v - @i) with relaxed ordering. 469 * 470 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere. 471 * 472 * Return: The original value of @v. 473 */ 474static __always_inline long 475raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 476{ 477#ifdef CONFIG_64BIT 478 return raw_atomic64_fetch_sub_relaxed(i, v); 479#else 480 return raw_atomic_fetch_sub_relaxed(i, v); 481#endif 482} 483 484/** 485 * raw_atomic_long_inc() - atomic increment with relaxed ordering 486 * @v: pointer to atomic_long_t 487 * 488 * Atomically updates @v to (@v + 1) with relaxed ordering. 489 * 490 * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere. 491 * 492 * Return: Nothing. 493 */ 494static __always_inline void 495raw_atomic_long_inc(atomic_long_t *v) 496{ 497#ifdef CONFIG_64BIT 498 raw_atomic64_inc(v); 499#else 500 raw_atomic_inc(v); 501#endif 502} 503 504/** 505 * raw_atomic_long_inc_return() - atomic increment with full ordering 506 * @v: pointer to atomic_long_t 507 * 508 * Atomically updates @v to (@v + 1) with full ordering. 509 * 510 * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere. 511 * 512 * Return: The updated value of @v. 513 */ 514static __always_inline long 515raw_atomic_long_inc_return(atomic_long_t *v) 516{ 517#ifdef CONFIG_64BIT 518 return raw_atomic64_inc_return(v); 519#else 520 return raw_atomic_inc_return(v); 521#endif 522} 523 524/** 525 * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering 526 * @v: pointer to atomic_long_t 527 * 528 * Atomically updates @v to (@v + 1) with acquire ordering. 529 * 530 * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere. 531 * 532 * Return: The updated value of @v. 533 */ 534static __always_inline long 535raw_atomic_long_inc_return_acquire(atomic_long_t *v) 536{ 537#ifdef CONFIG_64BIT 538 return raw_atomic64_inc_return_acquire(v); 539#else 540 return raw_atomic_inc_return_acquire(v); 541#endif 542} 543 544/** 545 * raw_atomic_long_inc_return_release() - atomic increment with release ordering 546 * @v: pointer to atomic_long_t 547 * 548 * Atomically updates @v to (@v + 1) with release ordering. 549 * 550 * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere. 551 * 552 * Return: The updated value of @v. 553 */ 554static __always_inline long 555raw_atomic_long_inc_return_release(atomic_long_t *v) 556{ 557#ifdef CONFIG_64BIT 558 return raw_atomic64_inc_return_release(v); 559#else 560 return raw_atomic_inc_return_release(v); 561#endif 562} 563 564/** 565 * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering 566 * @v: pointer to atomic_long_t 567 * 568 * Atomically updates @v to (@v + 1) with relaxed ordering. 569 * 570 * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere. 571 * 572 * Return: The updated value of @v. 573 */ 574static __always_inline long 575raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 576{ 577#ifdef CONFIG_64BIT 578 return raw_atomic64_inc_return_relaxed(v); 579#else 580 return raw_atomic_inc_return_relaxed(v); 581#endif 582} 583 584/** 585 * raw_atomic_long_fetch_inc() - atomic increment with full ordering 586 * @v: pointer to atomic_long_t 587 * 588 * Atomically updates @v to (@v + 1) with full ordering. 589 * 590 * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere. 591 * 592 * Return: The original value of @v. 593 */ 594static __always_inline long 595raw_atomic_long_fetch_inc(atomic_long_t *v) 596{ 597#ifdef CONFIG_64BIT 598 return raw_atomic64_fetch_inc(v); 599#else 600 return raw_atomic_fetch_inc(v); 601#endif 602} 603 604/** 605 * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering 606 * @v: pointer to atomic_long_t 607 * 608 * Atomically updates @v to (@v + 1) with acquire ordering. 609 * 610 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere. 611 * 612 * Return: The original value of @v. 613 */ 614static __always_inline long 615raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 616{ 617#ifdef CONFIG_64BIT 618 return raw_atomic64_fetch_inc_acquire(v); 619#else 620 return raw_atomic_fetch_inc_acquire(v); 621#endif 622} 623 624/** 625 * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering 626 * @v: pointer to atomic_long_t 627 * 628 * Atomically updates @v to (@v + 1) with release ordering. 629 * 630 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere. 631 * 632 * Return: The original value of @v. 633 */ 634static __always_inline long 635raw_atomic_long_fetch_inc_release(atomic_long_t *v) 636{ 637#ifdef CONFIG_64BIT 638 return raw_atomic64_fetch_inc_release(v); 639#else 640 return raw_atomic_fetch_inc_release(v); 641#endif 642} 643 644/** 645 * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering 646 * @v: pointer to atomic_long_t 647 * 648 * Atomically updates @v to (@v + 1) with relaxed ordering. 649 * 650 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere. 651 * 652 * Return: The original value of @v. 653 */ 654static __always_inline long 655raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 656{ 657#ifdef CONFIG_64BIT 658 return raw_atomic64_fetch_inc_relaxed(v); 659#else 660 return raw_atomic_fetch_inc_relaxed(v); 661#endif 662} 663 664/** 665 * raw_atomic_long_dec() - atomic decrement with relaxed ordering 666 * @v: pointer to atomic_long_t 667 * 668 * Atomically updates @v to (@v - 1) with relaxed ordering. 669 * 670 * Safe to use in noinstr code; prefer atomic_long_dec() elsewhere. 671 * 672 * Return: Nothing. 673 */ 674static __always_inline void 675raw_atomic_long_dec(atomic_long_t *v) 676{ 677#ifdef CONFIG_64BIT 678 raw_atomic64_dec(v); 679#else 680 raw_atomic_dec(v); 681#endif 682} 683 684/** 685 * raw_atomic_long_dec_return() - atomic decrement with full ordering 686 * @v: pointer to atomic_long_t 687 * 688 * Atomically updates @v to (@v - 1) with full ordering. 689 * 690 * Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere. 691 * 692 * Return: The updated value of @v. 693 */ 694static __always_inline long 695raw_atomic_long_dec_return(atomic_long_t *v) 696{ 697#ifdef CONFIG_64BIT 698 return raw_atomic64_dec_return(v); 699#else 700 return raw_atomic_dec_return(v); 701#endif 702} 703 704/** 705 * raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering 706 * @v: pointer to atomic_long_t 707 * 708 * Atomically updates @v to (@v - 1) with acquire ordering. 709 * 710 * Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere. 711 * 712 * Return: The updated value of @v. 713 */ 714static __always_inline long 715raw_atomic_long_dec_return_acquire(atomic_long_t *v) 716{ 717#ifdef CONFIG_64BIT 718 return raw_atomic64_dec_return_acquire(v); 719#else 720 return raw_atomic_dec_return_acquire(v); 721#endif 722} 723 724/** 725 * raw_atomic_long_dec_return_release() - atomic decrement with release ordering 726 * @v: pointer to atomic_long_t 727 * 728 * Atomically updates @v to (@v - 1) with release ordering. 729 * 730 * Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere. 731 * 732 * Return: The updated value of @v. 733 */ 734static __always_inline long 735raw_atomic_long_dec_return_release(atomic_long_t *v) 736{ 737#ifdef CONFIG_64BIT 738 return raw_atomic64_dec_return_release(v); 739#else 740 return raw_atomic_dec_return_release(v); 741#endif 742} 743 744/** 745 * raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering 746 * @v: pointer to atomic_long_t 747 * 748 * Atomically updates @v to (@v - 1) with relaxed ordering. 749 * 750 * Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere. 751 * 752 * Return: The updated value of @v. 753 */ 754static __always_inline long 755raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 756{ 757#ifdef CONFIG_64BIT 758 return raw_atomic64_dec_return_relaxed(v); 759#else 760 return raw_atomic_dec_return_relaxed(v); 761#endif 762} 763 764/** 765 * raw_atomic_long_fetch_dec() - atomic decrement with full ordering 766 * @v: pointer to atomic_long_t 767 * 768 * Atomically updates @v to (@v - 1) with full ordering. 769 * 770 * Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere. 771 * 772 * Return: The original value of @v. 773 */ 774static __always_inline long 775raw_atomic_long_fetch_dec(atomic_long_t *v) 776{ 777#ifdef CONFIG_64BIT 778 return raw_atomic64_fetch_dec(v); 779#else 780 return raw_atomic_fetch_dec(v); 781#endif 782} 783 784/** 785 * raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering 786 * @v: pointer to atomic_long_t 787 * 788 * Atomically updates @v to (@v - 1) with acquire ordering. 789 * 790 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere. 791 * 792 * Return: The original value of @v. 793 */ 794static __always_inline long 795raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 796{ 797#ifdef CONFIG_64BIT 798 return raw_atomic64_fetch_dec_acquire(v); 799#else 800 return raw_atomic_fetch_dec_acquire(v); 801#endif 802} 803 804/** 805 * raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering 806 * @v: pointer to atomic_long_t 807 * 808 * Atomically updates @v to (@v - 1) with release ordering. 809 * 810 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere. 811 * 812 * Return: The original value of @v. 813 */ 814static __always_inline long 815raw_atomic_long_fetch_dec_release(atomic_long_t *v) 816{ 817#ifdef CONFIG_64BIT 818 return raw_atomic64_fetch_dec_release(v); 819#else 820 return raw_atomic_fetch_dec_release(v); 821#endif 822} 823 824/** 825 * raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering 826 * @v: pointer to atomic_long_t 827 * 828 * Atomically updates @v to (@v - 1) with relaxed ordering. 829 * 830 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere. 831 * 832 * Return: The original value of @v. 833 */ 834static __always_inline long 835raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 836{ 837#ifdef CONFIG_64BIT 838 return raw_atomic64_fetch_dec_relaxed(v); 839#else 840 return raw_atomic_fetch_dec_relaxed(v); 841#endif 842} 843 844/** 845 * raw_atomic_long_and() - atomic bitwise AND with relaxed ordering 846 * @i: long value 847 * @v: pointer to atomic_long_t 848 * 849 * Atomically updates @v to (@v & @i) with relaxed ordering. 850 * 851 * Safe to use in noinstr code; prefer atomic_long_and() elsewhere. 852 * 853 * Return: Nothing. 854 */ 855static __always_inline void 856raw_atomic_long_and(long i, atomic_long_t *v) 857{ 858#ifdef CONFIG_64BIT 859 raw_atomic64_and(i, v); 860#else 861 raw_atomic_and(i, v); 862#endif 863} 864 865/** 866 * raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering 867 * @i: long value 868 * @v: pointer to atomic_long_t 869 * 870 * Atomically updates @v to (@v & @i) with full ordering. 871 * 872 * Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere. 873 * 874 * Return: The original value of @v. 875 */ 876static __always_inline long 877raw_atomic_long_fetch_and(long i, atomic_long_t *v) 878{ 879#ifdef CONFIG_64BIT 880 return raw_atomic64_fetch_and(i, v); 881#else 882 return raw_atomic_fetch_and(i, v); 883#endif 884} 885 886/** 887 * raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering 888 * @i: long value 889 * @v: pointer to atomic_long_t 890 * 891 * Atomically updates @v to (@v & @i) with acquire ordering. 892 * 893 * Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere. 894 * 895 * Return: The original value of @v. 896 */ 897static __always_inline long 898raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 899{ 900#ifdef CONFIG_64BIT 901 return raw_atomic64_fetch_and_acquire(i, v); 902#else 903 return raw_atomic_fetch_and_acquire(i, v); 904#endif 905} 906 907/** 908 * raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering 909 * @i: long value 910 * @v: pointer to atomic_long_t 911 * 912 * Atomically updates @v to (@v & @i) with release ordering. 913 * 914 * Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere. 915 * 916 * Return: The original value of @v. 917 */ 918static __always_inline long 919raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 920{ 921#ifdef CONFIG_64BIT 922 return raw_atomic64_fetch_and_release(i, v); 923#else 924 return raw_atomic_fetch_and_release(i, v); 925#endif 926} 927 928/** 929 * raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering 930 * @i: long value 931 * @v: pointer to atomic_long_t 932 * 933 * Atomically updates @v to (@v & @i) with relaxed ordering. 934 * 935 * Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere. 936 * 937 * Return: The original value of @v. 938 */ 939static __always_inline long 940raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 941{ 942#ifdef CONFIG_64BIT 943 return raw_atomic64_fetch_and_relaxed(i, v); 944#else 945 return raw_atomic_fetch_and_relaxed(i, v); 946#endif 947} 948 949/** 950 * raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering 951 * @i: long value 952 * @v: pointer to atomic_long_t 953 * 954 * Atomically updates @v to (@v & ~@i) with relaxed ordering. 955 * 956 * Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere. 957 * 958 * Return: Nothing. 959 */ 960static __always_inline void 961raw_atomic_long_andnot(long i, atomic_long_t *v) 962{ 963#ifdef CONFIG_64BIT 964 raw_atomic64_andnot(i, v); 965#else 966 raw_atomic_andnot(i, v); 967#endif 968} 969 970/** 971 * raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering 972 * @i: long value 973 * @v: pointer to atomic_long_t 974 * 975 * Atomically updates @v to (@v & ~@i) with full ordering. 976 * 977 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere. 978 * 979 * Return: The original value of @v. 980 */ 981static __always_inline long 982raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 983{ 984#ifdef CONFIG_64BIT 985 return raw_atomic64_fetch_andnot(i, v); 986#else 987 return raw_atomic_fetch_andnot(i, v); 988#endif 989} 990 991/** 992 * raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering 993 * @i: long value 994 * @v: pointer to atomic_long_t 995 * 996 * Atomically updates @v to (@v & ~@i) with acquire ordering. 997 * 998 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere. 999 * 1000 * Return: The original value of @v. 1001 */ 1002static __always_inline long 1003raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 1004{ 1005#ifdef CONFIG_64BIT 1006 return raw_atomic64_fetch_andnot_acquire(i, v); 1007#else 1008 return raw_atomic_fetch_andnot_acquire(i, v); 1009#endif 1010} 1011 1012/** 1013 * raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering 1014 * @i: long value 1015 * @v: pointer to atomic_long_t 1016 * 1017 * Atomically updates @v to (@v & ~@i) with release ordering. 1018 * 1019 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere. 1020 * 1021 * Return: The original value of @v. 1022 */ 1023static __always_inline long 1024raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 1025{ 1026#ifdef CONFIG_64BIT 1027 return raw_atomic64_fetch_andnot_release(i, v); 1028#else 1029 return raw_atomic_fetch_andnot_release(i, v); 1030#endif 1031} 1032 1033/** 1034 * raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering 1035 * @i: long value 1036 * @v: pointer to atomic_long_t 1037 * 1038 * Atomically updates @v to (@v & ~@i) with relaxed ordering. 1039 * 1040 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere. 1041 * 1042 * Return: The original value of @v. 1043 */ 1044static __always_inline long 1045raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 1046{ 1047#ifdef CONFIG_64BIT 1048 return raw_atomic64_fetch_andnot_relaxed(i, v); 1049#else 1050 return raw_atomic_fetch_andnot_relaxed(i, v); 1051#endif 1052} 1053 1054/** 1055 * raw_atomic_long_or() - atomic bitwise OR with relaxed ordering 1056 * @i: long value 1057 * @v: pointer to atomic_long_t 1058 * 1059 * Atomically updates @v to (@v | @i) with relaxed ordering. 1060 * 1061 * Safe to use in noinstr code; prefer atomic_long_or() elsewhere. 1062 * 1063 * Return: Nothing. 1064 */ 1065static __always_inline void 1066raw_atomic_long_or(long i, atomic_long_t *v) 1067{ 1068#ifdef CONFIG_64BIT 1069 raw_atomic64_or(i, v); 1070#else 1071 raw_atomic_or(i, v); 1072#endif 1073} 1074 1075/** 1076 * raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering 1077 * @i: long value 1078 * @v: pointer to atomic_long_t 1079 * 1080 * Atomically updates @v to (@v | @i) with full ordering. 1081 * 1082 * Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere. 1083 * 1084 * Return: The original value of @v. 1085 */ 1086static __always_inline long 1087raw_atomic_long_fetch_or(long i, atomic_long_t *v) 1088{ 1089#ifdef CONFIG_64BIT 1090 return raw_atomic64_fetch_or(i, v); 1091#else 1092 return raw_atomic_fetch_or(i, v); 1093#endif 1094} 1095 1096/** 1097 * raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering 1098 * @i: long value 1099 * @v: pointer to atomic_long_t 1100 * 1101 * Atomically updates @v to (@v | @i) with acquire ordering. 1102 * 1103 * Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere. 1104 * 1105 * Return: The original value of @v. 1106 */ 1107static __always_inline long 1108raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 1109{ 1110#ifdef CONFIG_64BIT 1111 return raw_atomic64_fetch_or_acquire(i, v); 1112#else 1113 return raw_atomic_fetch_or_acquire(i, v); 1114#endif 1115} 1116 1117/** 1118 * raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering 1119 * @i: long value 1120 * @v: pointer to atomic_long_t 1121 * 1122 * Atomically updates @v to (@v | @i) with release ordering. 1123 * 1124 * Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere. 1125 * 1126 * Return: The original value of @v. 1127 */ 1128static __always_inline long 1129raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 1130{ 1131#ifdef CONFIG_64BIT 1132 return raw_atomic64_fetch_or_release(i, v); 1133#else 1134 return raw_atomic_fetch_or_release(i, v); 1135#endif 1136} 1137 1138/** 1139 * raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering 1140 * @i: long value 1141 * @v: pointer to atomic_long_t 1142 * 1143 * Atomically updates @v to (@v | @i) with relaxed ordering. 1144 * 1145 * Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere. 1146 * 1147 * Return: The original value of @v. 1148 */ 1149static __always_inline long 1150raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 1151{ 1152#ifdef CONFIG_64BIT 1153 return raw_atomic64_fetch_or_relaxed(i, v); 1154#else 1155 return raw_atomic_fetch_or_relaxed(i, v); 1156#endif 1157} 1158 1159/** 1160 * raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering 1161 * @i: long value 1162 * @v: pointer to atomic_long_t 1163 * 1164 * Atomically updates @v to (@v ^ @i) with relaxed ordering. 1165 * 1166 * Safe to use in noinstr code; prefer atomic_long_xor() elsewhere. 1167 * 1168 * Return: Nothing. 1169 */ 1170static __always_inline void 1171raw_atomic_long_xor(long i, atomic_long_t *v) 1172{ 1173#ifdef CONFIG_64BIT 1174 raw_atomic64_xor(i, v); 1175#else 1176 raw_atomic_xor(i, v); 1177#endif 1178} 1179 1180/** 1181 * raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering 1182 * @i: long value 1183 * @v: pointer to atomic_long_t 1184 * 1185 * Atomically updates @v to (@v ^ @i) with full ordering. 1186 * 1187 * Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere. 1188 * 1189 * Return: The original value of @v. 1190 */ 1191static __always_inline long 1192raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 1193{ 1194#ifdef CONFIG_64BIT 1195 return raw_atomic64_fetch_xor(i, v); 1196#else 1197 return raw_atomic_fetch_xor(i, v); 1198#endif 1199} 1200 1201/** 1202 * raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering 1203 * @i: long value 1204 * @v: pointer to atomic_long_t 1205 * 1206 * Atomically updates @v to (@v ^ @i) with acquire ordering. 1207 * 1208 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere. 1209 * 1210 * Return: The original value of @v. 1211 */ 1212static __always_inline long 1213raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 1214{ 1215#ifdef CONFIG_64BIT 1216 return raw_atomic64_fetch_xor_acquire(i, v); 1217#else 1218 return raw_atomic_fetch_xor_acquire(i, v); 1219#endif 1220} 1221 1222/** 1223 * raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering 1224 * @i: long value 1225 * @v: pointer to atomic_long_t 1226 * 1227 * Atomically updates @v to (@v ^ @i) with release ordering. 1228 * 1229 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere. 1230 * 1231 * Return: The original value of @v. 1232 */ 1233static __always_inline long 1234raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 1235{ 1236#ifdef CONFIG_64BIT 1237 return raw_atomic64_fetch_xor_release(i, v); 1238#else 1239 return raw_atomic_fetch_xor_release(i, v); 1240#endif 1241} 1242 1243/** 1244 * raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering 1245 * @i: long value 1246 * @v: pointer to atomic_long_t 1247 * 1248 * Atomically updates @v to (@v ^ @i) with relaxed ordering. 1249 * 1250 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere. 1251 * 1252 * Return: The original value of @v. 1253 */ 1254static __always_inline long 1255raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 1256{ 1257#ifdef CONFIG_64BIT 1258 return raw_atomic64_fetch_xor_relaxed(i, v); 1259#else 1260 return raw_atomic_fetch_xor_relaxed(i, v); 1261#endif 1262} 1263 1264/** 1265 * raw_atomic_long_xchg() - atomic exchange with full ordering 1266 * @v: pointer to atomic_long_t 1267 * @new: long value to assign 1268 * 1269 * Atomically updates @v to @new with full ordering. 1270 * 1271 * Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere. 1272 * 1273 * Return: The original value of @v. 1274 */ 1275static __always_inline long 1276raw_atomic_long_xchg(atomic_long_t *v, long new) 1277{ 1278#ifdef CONFIG_64BIT 1279 return raw_atomic64_xchg(v, new); 1280#else 1281 return raw_atomic_xchg(v, new); 1282#endif 1283} 1284 1285/** 1286 * raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering 1287 * @v: pointer to atomic_long_t 1288 * @new: long value to assign 1289 * 1290 * Atomically updates @v to @new with acquire ordering. 1291 * 1292 * Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere. 1293 * 1294 * Return: The original value of @v. 1295 */ 1296static __always_inline long 1297raw_atomic_long_xchg_acquire(atomic_long_t *v, long new) 1298{ 1299#ifdef CONFIG_64BIT 1300 return raw_atomic64_xchg_acquire(v, new); 1301#else 1302 return raw_atomic_xchg_acquire(v, new); 1303#endif 1304} 1305 1306/** 1307 * raw_atomic_long_xchg_release() - atomic exchange with release ordering 1308 * @v: pointer to atomic_long_t 1309 * @new: long value to assign 1310 * 1311 * Atomically updates @v to @new with release ordering. 1312 * 1313 * Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere. 1314 * 1315 * Return: The original value of @v. 1316 */ 1317static __always_inline long 1318raw_atomic_long_xchg_release(atomic_long_t *v, long new) 1319{ 1320#ifdef CONFIG_64BIT 1321 return raw_atomic64_xchg_release(v, new); 1322#else 1323 return raw_atomic_xchg_release(v, new); 1324#endif 1325} 1326 1327/** 1328 * raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering 1329 * @v: pointer to atomic_long_t 1330 * @new: long value to assign 1331 * 1332 * Atomically updates @v to @new with relaxed ordering. 1333 * 1334 * Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere. 1335 * 1336 * Return: The original value of @v. 1337 */ 1338static __always_inline long 1339raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new) 1340{ 1341#ifdef CONFIG_64BIT 1342 return raw_atomic64_xchg_relaxed(v, new); 1343#else 1344 return raw_atomic_xchg_relaxed(v, new); 1345#endif 1346} 1347 1348/** 1349 * raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering 1350 * @v: pointer to atomic_long_t 1351 * @old: long value to compare with 1352 * @new: long value to assign 1353 * 1354 * If (@v == @old), atomically updates @v to @new with full ordering. 1355 * 1356 * Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere. 1357 * 1358 * Return: The original value of @v. 1359 */ 1360static __always_inline long 1361raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 1362{ 1363#ifdef CONFIG_64BIT 1364 return raw_atomic64_cmpxchg(v, old, new); 1365#else 1366 return raw_atomic_cmpxchg(v, old, new); 1367#endif 1368} 1369 1370/** 1371 * raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering 1372 * @v: pointer to atomic_long_t 1373 * @old: long value to compare with 1374 * @new: long value to assign 1375 * 1376 * If (@v == @old), atomically updates @v to @new with acquire ordering. 1377 * 1378 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere. 1379 * 1380 * Return: The original value of @v. 1381 */ 1382static __always_inline long 1383raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 1384{ 1385#ifdef CONFIG_64BIT 1386 return raw_atomic64_cmpxchg_acquire(v, old, new); 1387#else 1388 return raw_atomic_cmpxchg_acquire(v, old, new); 1389#endif 1390} 1391 1392/** 1393 * raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering 1394 * @v: pointer to atomic_long_t 1395 * @old: long value to compare with 1396 * @new: long value to assign 1397 * 1398 * If (@v == @old), atomically updates @v to @new with release ordering. 1399 * 1400 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere. 1401 * 1402 * Return: The original value of @v. 1403 */ 1404static __always_inline long 1405raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 1406{ 1407#ifdef CONFIG_64BIT 1408 return raw_atomic64_cmpxchg_release(v, old, new); 1409#else 1410 return raw_atomic_cmpxchg_release(v, old, new); 1411#endif 1412} 1413 1414/** 1415 * raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering 1416 * @v: pointer to atomic_long_t 1417 * @old: long value to compare with 1418 * @new: long value to assign 1419 * 1420 * If (@v == @old), atomically updates @v to @new with relaxed ordering. 1421 * 1422 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere. 1423 * 1424 * Return: The original value of @v. 1425 */ 1426static __always_inline long 1427raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 1428{ 1429#ifdef CONFIG_64BIT 1430 return raw_atomic64_cmpxchg_relaxed(v, old, new); 1431#else 1432 return raw_atomic_cmpxchg_relaxed(v, old, new); 1433#endif 1434} 1435 1436/** 1437 * raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering 1438 * @v: pointer to atomic_long_t 1439 * @old: pointer to long value to compare with 1440 * @new: long value to assign 1441 * 1442 * If (@v == @old), atomically updates @v to @new with full ordering. 1443 * Otherwise, updates @old to the current value of @v. 1444 * 1445 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere. 1446 * 1447 * Return: @true if the exchange occured, @false otherwise. 1448 */ 1449static __always_inline bool 1450raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 1451{ 1452#ifdef CONFIG_64BIT 1453 return raw_atomic64_try_cmpxchg(v, (s64 *)old, new); 1454#else 1455 return raw_atomic_try_cmpxchg(v, (int *)old, new); 1456#endif 1457} 1458 1459/** 1460 * raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering 1461 * @v: pointer to atomic_long_t 1462 * @old: pointer to long value to compare with 1463 * @new: long value to assign 1464 * 1465 * If (@v == @old), atomically updates @v to @new with acquire ordering. 1466 * Otherwise, updates @old to the current value of @v. 1467 * 1468 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere. 1469 * 1470 * Return: @true if the exchange occured, @false otherwise. 1471 */ 1472static __always_inline bool 1473raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 1474{ 1475#ifdef CONFIG_64BIT 1476 return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 1477#else 1478 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new); 1479#endif 1480} 1481 1482/** 1483 * raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering 1484 * @v: pointer to atomic_long_t 1485 * @old: pointer to long value to compare with 1486 * @new: long value to assign 1487 * 1488 * If (@v == @old), atomically updates @v to @new with release ordering. 1489 * Otherwise, updates @old to the current value of @v. 1490 * 1491 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere. 1492 * 1493 * Return: @true if the exchange occured, @false otherwise. 1494 */ 1495static __always_inline bool 1496raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 1497{ 1498#ifdef CONFIG_64BIT 1499 return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 1500#else 1501 return raw_atomic_try_cmpxchg_release(v, (int *)old, new); 1502#endif 1503} 1504 1505/** 1506 * raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering 1507 * @v: pointer to atomic_long_t 1508 * @old: pointer to long value to compare with 1509 * @new: long value to assign 1510 * 1511 * If (@v == @old), atomically updates @v to @new with relaxed ordering. 1512 * Otherwise, updates @old to the current value of @v. 1513 * 1514 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere. 1515 * 1516 * Return: @true if the exchange occured, @false otherwise. 1517 */ 1518static __always_inline bool 1519raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 1520{ 1521#ifdef CONFIG_64BIT 1522 return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 1523#else 1524 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 1525#endif 1526} 1527 1528/** 1529 * raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering 1530 * @i: long value to add 1531 * @v: pointer to atomic_long_t 1532 * 1533 * Atomically updates @v to (@v - @i) with full ordering. 1534 * 1535 * Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere. 1536 * 1537 * Return: @true if the resulting value of @v is zero, @false otherwise. 1538 */ 1539static __always_inline bool 1540raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 1541{ 1542#ifdef CONFIG_64BIT 1543 return raw_atomic64_sub_and_test(i, v); 1544#else 1545 return raw_atomic_sub_and_test(i, v); 1546#endif 1547} 1548 1549/** 1550 * raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering 1551 * @v: pointer to atomic_long_t 1552 * 1553 * Atomically updates @v to (@v - 1) with full ordering. 1554 * 1555 * Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere. 1556 * 1557 * Return: @true if the resulting value of @v is zero, @false otherwise. 1558 */ 1559static __always_inline bool 1560raw_atomic_long_dec_and_test(atomic_long_t *v) 1561{ 1562#ifdef CONFIG_64BIT 1563 return raw_atomic64_dec_and_test(v); 1564#else 1565 return raw_atomic_dec_and_test(v); 1566#endif 1567} 1568 1569/** 1570 * raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering 1571 * @v: pointer to atomic_long_t 1572 * 1573 * Atomically updates @v to (@v + 1) with full ordering. 1574 * 1575 * Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere. 1576 * 1577 * Return: @true if the resulting value of @v is zero, @false otherwise. 1578 */ 1579static __always_inline bool 1580raw_atomic_long_inc_and_test(atomic_long_t *v) 1581{ 1582#ifdef CONFIG_64BIT 1583 return raw_atomic64_inc_and_test(v); 1584#else 1585 return raw_atomic_inc_and_test(v); 1586#endif 1587} 1588 1589/** 1590 * raw_atomic_long_add_negative() - atomic add and test if negative with full ordering 1591 * @i: long value to add 1592 * @v: pointer to atomic_long_t 1593 * 1594 * Atomically updates @v to (@v + @i) with full ordering. 1595 * 1596 * Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere. 1597 * 1598 * Return: @true if the resulting value of @v is negative, @false otherwise. 1599 */ 1600static __always_inline bool 1601raw_atomic_long_add_negative(long i, atomic_long_t *v) 1602{ 1603#ifdef CONFIG_64BIT 1604 return raw_atomic64_add_negative(i, v); 1605#else 1606 return raw_atomic_add_negative(i, v); 1607#endif 1608} 1609 1610/** 1611 * raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering 1612 * @i: long value to add 1613 * @v: pointer to atomic_long_t 1614 * 1615 * Atomically updates @v to (@v + @i) with acquire ordering. 1616 * 1617 * Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere. 1618 * 1619 * Return: @true if the resulting value of @v is negative, @false otherwise. 1620 */ 1621static __always_inline bool 1622raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 1623{ 1624#ifdef CONFIG_64BIT 1625 return raw_atomic64_add_negative_acquire(i, v); 1626#else 1627 return raw_atomic_add_negative_acquire(i, v); 1628#endif 1629} 1630 1631/** 1632 * raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering 1633 * @i: long value to add 1634 * @v: pointer to atomic_long_t 1635 * 1636 * Atomically updates @v to (@v + @i) with release ordering. 1637 * 1638 * Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere. 1639 * 1640 * Return: @true if the resulting value of @v is negative, @false otherwise. 1641 */ 1642static __always_inline bool 1643raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 1644{ 1645#ifdef CONFIG_64BIT 1646 return raw_atomic64_add_negative_release(i, v); 1647#else 1648 return raw_atomic_add_negative_release(i, v); 1649#endif 1650} 1651 1652/** 1653 * raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering 1654 * @i: long value to add 1655 * @v: pointer to atomic_long_t 1656 * 1657 * Atomically updates @v to (@v + @i) with relaxed ordering. 1658 * 1659 * Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere. 1660 * 1661 * Return: @true if the resulting value of @v is negative, @false otherwise. 1662 */ 1663static __always_inline bool 1664raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 1665{ 1666#ifdef CONFIG_64BIT 1667 return raw_atomic64_add_negative_relaxed(i, v); 1668#else 1669 return raw_atomic_add_negative_relaxed(i, v); 1670#endif 1671} 1672 1673/** 1674 * raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering 1675 * @v: pointer to atomic_long_t 1676 * @a: long value to add 1677 * @u: long value to compare with 1678 * 1679 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering. 1680 * 1681 * Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere. 1682 * 1683 * Return: The original value of @v. 1684 */ 1685static __always_inline long 1686raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1687{ 1688#ifdef CONFIG_64BIT 1689 return raw_atomic64_fetch_add_unless(v, a, u); 1690#else 1691 return raw_atomic_fetch_add_unless(v, a, u); 1692#endif 1693} 1694 1695/** 1696 * raw_atomic_long_add_unless() - atomic add unless value with full ordering 1697 * @v: pointer to atomic_long_t 1698 * @a: long value to add 1699 * @u: long value to compare with 1700 * 1701 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering. 1702 * 1703 * Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere. 1704 * 1705 * Return: @true if @v was updated, @false otherwise. 1706 */ 1707static __always_inline bool 1708raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 1709{ 1710#ifdef CONFIG_64BIT 1711 return raw_atomic64_add_unless(v, a, u); 1712#else 1713 return raw_atomic_add_unless(v, a, u); 1714#endif 1715} 1716 1717/** 1718 * raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering 1719 * @v: pointer to atomic_long_t 1720 * 1721 * If (@v != 0), atomically updates @v to (@v + 1) with full ordering. 1722 * 1723 * Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere. 1724 * 1725 * Return: @true if @v was updated, @false otherwise. 1726 */ 1727static __always_inline bool 1728raw_atomic_long_inc_not_zero(atomic_long_t *v) 1729{ 1730#ifdef CONFIG_64BIT 1731 return raw_atomic64_inc_not_zero(v); 1732#else 1733 return raw_atomic_inc_not_zero(v); 1734#endif 1735} 1736 1737/** 1738 * raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering 1739 * @v: pointer to atomic_long_t 1740 * 1741 * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering. 1742 * 1743 * Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere. 1744 * 1745 * Return: @true if @v was updated, @false otherwise. 1746 */ 1747static __always_inline bool 1748raw_atomic_long_inc_unless_negative(atomic_long_t *v) 1749{ 1750#ifdef CONFIG_64BIT 1751 return raw_atomic64_inc_unless_negative(v); 1752#else 1753 return raw_atomic_inc_unless_negative(v); 1754#endif 1755} 1756 1757/** 1758 * raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering 1759 * @v: pointer to atomic_long_t 1760 * 1761 * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering. 1762 * 1763 * Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere. 1764 * 1765 * Return: @true if @v was updated, @false otherwise. 1766 */ 1767static __always_inline bool 1768raw_atomic_long_dec_unless_positive(atomic_long_t *v) 1769{ 1770#ifdef CONFIG_64BIT 1771 return raw_atomic64_dec_unless_positive(v); 1772#else 1773 return raw_atomic_dec_unless_positive(v); 1774#endif 1775} 1776 1777/** 1778 * raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering 1779 * @v: pointer to atomic_long_t 1780 * 1781 * If (@v > 0), atomically updates @v to (@v - 1) with full ordering. 1782 * 1783 * Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere. 1784 * 1785 * Return: The old value of (@v - 1), regardless of whether @v was updated. 1786 */ 1787static __always_inline long 1788raw_atomic_long_dec_if_positive(atomic_long_t *v) 1789{ 1790#ifdef CONFIG_64BIT 1791 return raw_atomic64_dec_if_positive(v); 1792#else 1793 return raw_atomic_dec_if_positive(v); 1794#endif 1795} 1796 1797#endif /* _LINUX_ATOMIC_LONG_H */ 1798// 4ef23f98c73cff96d239896175fd26b10b88899e