Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

locking/atomic: scripts: simplify raw_atomic_long*() definitions

Currently, atomic-long is split into two sections, one defining the
raw_atomic_long_*() ops for CONFIG_64BIT, and one defining the raw
atomic_long_*() ops for !CONFIG_64BIT.

With many lines elided, this looks like:

| #ifdef CONFIG_64BIT
| ...
| static __always_inline bool
| raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
| {
| return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
| }
| ...
| #else /* CONFIG_64BIT */
| ...
| static __always_inline bool
| raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
| {
| return raw_atomic_try_cmpxchg(v, (int *)old, new);
| }
| ...
| #endif

The two definitions are spread far apart in the file, and duplicate the
prototype, making it hard to have a legible set of kerneldoc comments.

Make this simpler by defining the C prototype once, and writing the two
definitions inline. For example, the above becomes:

| static __always_inline bool
| raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
| {
| #ifdef CONFIG_64BIT
| return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
| #else
| return raw_atomic_try_cmpxchg(v, (int *)old, new);
| #endif
| }

As we now always have a single copy of the C prototype wrapping all the
potential definitions, we now have an obvious single location for kerneldoc
comments. As a bonus, both the script and the generated file are
somewhat shorter.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-23-mark.rutland@arm.com

authored by

Mark Rutland and committed by
Peter Zijlstra
63039946 b916a8c7

+351 -535
+342 -517
include/linux/atomic/atomic-long.h
··· 21 21 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 22 22 #endif 23 23 24 + static __always_inline long 25 + raw_atomic_long_read(const atomic_long_t *v) 26 + { 24 27 #ifdef CONFIG_64BIT 25 - 26 - static __always_inline long 27 - raw_atomic_long_read(const atomic_long_t *v) 28 - { 29 28 return raw_atomic64_read(v); 30 - } 31 - 32 - static __always_inline long 33 - raw_atomic_long_read_acquire(const atomic_long_t *v) 34 - { 35 - return raw_atomic64_read_acquire(v); 36 - } 37 - 38 - static __always_inline void 39 - raw_atomic_long_set(atomic_long_t *v, long i) 40 - { 41 - raw_atomic64_set(v, i); 42 - } 43 - 44 - static __always_inline void 45 - raw_atomic_long_set_release(atomic_long_t *v, long i) 46 - { 47 - raw_atomic64_set_release(v, i); 48 - } 49 - 50 - static __always_inline void 51 - raw_atomic_long_add(long i, atomic_long_t *v) 52 - { 53 - raw_atomic64_add(i, v); 54 - } 55 - 56 - static __always_inline long 57 - raw_atomic_long_add_return(long i, atomic_long_t *v) 58 - { 59 - return raw_atomic64_add_return(i, v); 60 - } 61 - 62 - static __always_inline long 63 - raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 64 - { 65 - return raw_atomic64_add_return_acquire(i, v); 66 - } 67 - 68 - static __always_inline long 69 - raw_atomic_long_add_return_release(long i, atomic_long_t *v) 70 - { 71 - return raw_atomic64_add_return_release(i, v); 72 - } 73 - 74 - static __always_inline long 75 - raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 76 - { 77 - return raw_atomic64_add_return_relaxed(i, v); 78 - } 79 - 80 - static __always_inline long 81 - raw_atomic_long_fetch_add(long i, atomic_long_t *v) 82 - { 83 - return raw_atomic64_fetch_add(i, v); 84 - } 85 - 86 - static __always_inline long 87 - raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 88 - { 89 - return raw_atomic64_fetch_add_acquire(i, v); 90 - } 91 - 92 - static __always_inline long 93 - raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 94 - { 95 - return raw_atomic64_fetch_add_release(i, v); 96 - } 97 - 98 - static __always_inline long 99 - raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 100 - { 101 - return raw_atomic64_fetch_add_relaxed(i, v); 102 - } 103 - 104 - static __always_inline void 105 - raw_atomic_long_sub(long i, atomic_long_t *v) 106 - { 107 - raw_atomic64_sub(i, v); 108 - } 109 - 110 - static __always_inline long 111 - raw_atomic_long_sub_return(long i, atomic_long_t *v) 112 - { 113 - return raw_atomic64_sub_return(i, v); 114 - } 115 - 116 - static __always_inline long 117 - raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 118 - { 119 - return raw_atomic64_sub_return_acquire(i, v); 120 - } 121 - 122 - static __always_inline long 123 - raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 124 - { 125 - return raw_atomic64_sub_return_release(i, v); 126 - } 127 - 128 - static __always_inline long 129 - raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 130 - { 131 - return raw_atomic64_sub_return_relaxed(i, v); 132 - } 133 - 134 - static __always_inline long 135 - raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 136 - { 137 - return raw_atomic64_fetch_sub(i, v); 138 - } 139 - 140 - static __always_inline long 141 - raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 142 - { 143 - return raw_atomic64_fetch_sub_acquire(i, v); 144 - } 145 - 146 - static __always_inline long 147 - raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 148 - { 149 - return raw_atomic64_fetch_sub_release(i, v); 150 - } 151 - 152 - static __always_inline long 153 - raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 154 - { 155 - return raw_atomic64_fetch_sub_relaxed(i, v); 156 - } 157 - 158 - static __always_inline void 159 - raw_atomic_long_inc(atomic_long_t *v) 160 - { 161 - raw_atomic64_inc(v); 162 - } 163 - 164 - static __always_inline long 165 - raw_atomic_long_inc_return(atomic_long_t *v) 166 - { 167 - return raw_atomic64_inc_return(v); 168 - } 169 - 170 - static __always_inline long 171 - raw_atomic_long_inc_return_acquire(atomic_long_t *v) 172 - { 173 - return raw_atomic64_inc_return_acquire(v); 174 - } 175 - 176 - static __always_inline long 177 - raw_atomic_long_inc_return_release(atomic_long_t *v) 178 - { 179 - return raw_atomic64_inc_return_release(v); 180 - } 181 - 182 - static __always_inline long 183 - raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 184 - { 185 - return raw_atomic64_inc_return_relaxed(v); 186 - } 187 - 188 - static __always_inline long 189 - raw_atomic_long_fetch_inc(atomic_long_t *v) 190 - { 191 - return raw_atomic64_fetch_inc(v); 192 - } 193 - 194 - static __always_inline long 195 - raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 196 - { 197 - return raw_atomic64_fetch_inc_acquire(v); 198 - } 199 - 200 - static __always_inline long 201 - raw_atomic_long_fetch_inc_release(atomic_long_t *v) 202 - { 203 - return raw_atomic64_fetch_inc_release(v); 204 - } 205 - 206 - static __always_inline long 207 - raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 208 - { 209 - return raw_atomic64_fetch_inc_relaxed(v); 210 - } 211 - 212 - static __always_inline void 213 - raw_atomic_long_dec(atomic_long_t *v) 214 - { 215 - raw_atomic64_dec(v); 216 - } 217 - 218 - static __always_inline long 219 - raw_atomic_long_dec_return(atomic_long_t *v) 220 - { 221 - return raw_atomic64_dec_return(v); 222 - } 223 - 224 - static __always_inline long 225 - raw_atomic_long_dec_return_acquire(atomic_long_t *v) 226 - { 227 - return raw_atomic64_dec_return_acquire(v); 228 - } 229 - 230 - static __always_inline long 231 - raw_atomic_long_dec_return_release(atomic_long_t *v) 232 - { 233 - return raw_atomic64_dec_return_release(v); 234 - } 235 - 236 - static __always_inline long 237 - raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 238 - { 239 - return raw_atomic64_dec_return_relaxed(v); 240 - } 241 - 242 - static __always_inline long 243 - raw_atomic_long_fetch_dec(atomic_long_t *v) 244 - { 245 - return raw_atomic64_fetch_dec(v); 246 - } 247 - 248 - static __always_inline long 249 - raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 250 - { 251 - return raw_atomic64_fetch_dec_acquire(v); 252 - } 253 - 254 - static __always_inline long 255 - raw_atomic_long_fetch_dec_release(atomic_long_t *v) 256 - { 257 - return raw_atomic64_fetch_dec_release(v); 258 - } 259 - 260 - static __always_inline long 261 - raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 262 - { 263 - return raw_atomic64_fetch_dec_relaxed(v); 264 - } 265 - 266 - static __always_inline void 267 - raw_atomic_long_and(long i, atomic_long_t *v) 268 - { 269 - raw_atomic64_and(i, v); 270 - } 271 - 272 - static __always_inline long 273 - raw_atomic_long_fetch_and(long i, atomic_long_t *v) 274 - { 275 - return raw_atomic64_fetch_and(i, v); 276 - } 277 - 278 - static __always_inline long 279 - raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 280 - { 281 - return raw_atomic64_fetch_and_acquire(i, v); 282 - } 283 - 284 - static __always_inline long 285 - raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 286 - { 287 - return raw_atomic64_fetch_and_release(i, v); 288 - } 289 - 290 - static __always_inline long 291 - raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 292 - { 293 - return raw_atomic64_fetch_and_relaxed(i, v); 294 - } 295 - 296 - static __always_inline void 297 - raw_atomic_long_andnot(long i, atomic_long_t *v) 298 - { 299 - raw_atomic64_andnot(i, v); 300 - } 301 - 302 - static __always_inline long 303 - raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 304 - { 305 - return raw_atomic64_fetch_andnot(i, v); 306 - } 307 - 308 - static __always_inline long 309 - raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 310 - { 311 - return raw_atomic64_fetch_andnot_acquire(i, v); 312 - } 313 - 314 - static __always_inline long 315 - raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 316 - { 317 - return raw_atomic64_fetch_andnot_release(i, v); 318 - } 319 - 320 - static __always_inline long 321 - raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 322 - { 323 - return raw_atomic64_fetch_andnot_relaxed(i, v); 324 - } 325 - 326 - static __always_inline void 327 - raw_atomic_long_or(long i, atomic_long_t *v) 328 - { 329 - raw_atomic64_or(i, v); 330 - } 331 - 332 - static __always_inline long 333 - raw_atomic_long_fetch_or(long i, atomic_long_t *v) 334 - { 335 - return raw_atomic64_fetch_or(i, v); 336 - } 337 - 338 - static __always_inline long 339 - raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 340 - { 341 - return raw_atomic64_fetch_or_acquire(i, v); 342 - } 343 - 344 - static __always_inline long 345 - raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 346 - { 347 - return raw_atomic64_fetch_or_release(i, v); 348 - } 349 - 350 - static __always_inline long 351 - raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 352 - { 353 - return raw_atomic64_fetch_or_relaxed(i, v); 354 - } 355 - 356 - static __always_inline void 357 - raw_atomic_long_xor(long i, atomic_long_t *v) 358 - { 359 - raw_atomic64_xor(i, v); 360 - } 361 - 362 - static __always_inline long 363 - raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 364 - { 365 - return raw_atomic64_fetch_xor(i, v); 366 - } 367 - 368 - static __always_inline long 369 - raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 370 - { 371 - return raw_atomic64_fetch_xor_acquire(i, v); 372 - } 373 - 374 - static __always_inline long 375 - raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 376 - { 377 - return raw_atomic64_fetch_xor_release(i, v); 378 - } 379 - 380 - static __always_inline long 381 - raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 382 - { 383 - return raw_atomic64_fetch_xor_relaxed(i, v); 384 - } 385 - 386 - static __always_inline long 387 - raw_atomic_long_xchg(atomic_long_t *v, long i) 388 - { 389 - return raw_atomic64_xchg(v, i); 390 - } 391 - 392 - static __always_inline long 393 - raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) 394 - { 395 - return raw_atomic64_xchg_acquire(v, i); 396 - } 397 - 398 - static __always_inline long 399 - raw_atomic_long_xchg_release(atomic_long_t *v, long i) 400 - { 401 - return raw_atomic64_xchg_release(v, i); 402 - } 403 - 404 - static __always_inline long 405 - raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 406 - { 407 - return raw_atomic64_xchg_relaxed(v, i); 408 - } 409 - 410 - static __always_inline long 411 - raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 412 - { 413 - return raw_atomic64_cmpxchg(v, old, new); 414 - } 415 - 416 - static __always_inline long 417 - raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 418 - { 419 - return raw_atomic64_cmpxchg_acquire(v, old, new); 420 - } 421 - 422 - static __always_inline long 423 - raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 424 - { 425 - return raw_atomic64_cmpxchg_release(v, old, new); 426 - } 427 - 428 - static __always_inline long 429 - raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 430 - { 431 - return raw_atomic64_cmpxchg_relaxed(v, old, new); 432 - } 433 - 434 - static __always_inline bool 435 - raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 436 - { 437 - return raw_atomic64_try_cmpxchg(v, (s64 *)old, new); 438 - } 439 - 440 - static __always_inline bool 441 - raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 442 - { 443 - return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 444 - } 445 - 446 - static __always_inline bool 447 - raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 448 - { 449 - return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 450 - } 451 - 452 - static __always_inline bool 453 - raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 454 - { 455 - return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 456 - } 457 - 458 - static __always_inline bool 459 - raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 460 - { 461 - return raw_atomic64_sub_and_test(i, v); 462 - } 463 - 464 - static __always_inline bool 465 - raw_atomic_long_dec_and_test(atomic_long_t *v) 466 - { 467 - return raw_atomic64_dec_and_test(v); 468 - } 469 - 470 - static __always_inline bool 471 - raw_atomic_long_inc_and_test(atomic_long_t *v) 472 - { 473 - return raw_atomic64_inc_and_test(v); 474 - } 475 - 476 - static __always_inline bool 477 - raw_atomic_long_add_negative(long i, atomic_long_t *v) 478 - { 479 - return raw_atomic64_add_negative(i, v); 480 - } 481 - 482 - static __always_inline bool 483 - raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 484 - { 485 - return raw_atomic64_add_negative_acquire(i, v); 486 - } 487 - 488 - static __always_inline bool 489 - raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 490 - { 491 - return raw_atomic64_add_negative_release(i, v); 492 - } 493 - 494 - static __always_inline bool 495 - raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 496 - { 497 - return raw_atomic64_add_negative_relaxed(i, v); 498 - } 499 - 500 - static __always_inline long 501 - raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 502 - { 503 - return raw_atomic64_fetch_add_unless(v, a, u); 504 - } 505 - 506 - static __always_inline bool 507 - raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 508 - { 509 - return raw_atomic64_add_unless(v, a, u); 510 - } 511 - 512 - static __always_inline bool 513 - raw_atomic_long_inc_not_zero(atomic_long_t *v) 514 - { 515 - return raw_atomic64_inc_not_zero(v); 516 - } 517 - 518 - static __always_inline bool 519 - raw_atomic_long_inc_unless_negative(atomic_long_t *v) 520 - { 521 - return raw_atomic64_inc_unless_negative(v); 522 - } 523 - 524 - static __always_inline bool 525 - raw_atomic_long_dec_unless_positive(atomic_long_t *v) 526 - { 527 - return raw_atomic64_dec_unless_positive(v); 528 - } 529 - 530 - static __always_inline long 531 - raw_atomic_long_dec_if_positive(atomic_long_t *v) 532 - { 533 - return raw_atomic64_dec_if_positive(v); 534 - } 535 - 536 - #else /* CONFIG_64BIT */ 537 - 538 - static __always_inline long 539 - raw_atomic_long_read(const atomic_long_t *v) 540 - { 29 + #else 541 30 return raw_atomic_read(v); 31 + #endif 542 32 } 543 33 544 34 static __always_inline long 545 35 raw_atomic_long_read_acquire(const atomic_long_t *v) 546 36 { 37 + #ifdef CONFIG_64BIT 38 + return raw_atomic64_read_acquire(v); 39 + #else 547 40 return raw_atomic_read_acquire(v); 41 + #endif 548 42 } 549 43 550 44 static __always_inline void 551 45 raw_atomic_long_set(atomic_long_t *v, long i) 552 46 { 47 + #ifdef CONFIG_64BIT 48 + raw_atomic64_set(v, i); 49 + #else 553 50 raw_atomic_set(v, i); 51 + #endif 554 52 } 555 53 556 54 static __always_inline void 557 55 raw_atomic_long_set_release(atomic_long_t *v, long i) 558 56 { 57 + #ifdef CONFIG_64BIT 58 + raw_atomic64_set_release(v, i); 59 + #else 559 60 raw_atomic_set_release(v, i); 61 + #endif 560 62 } 561 63 562 64 static __always_inline void 563 65 raw_atomic_long_add(long i, atomic_long_t *v) 564 66 { 67 + #ifdef CONFIG_64BIT 68 + raw_atomic64_add(i, v); 69 + #else 565 70 raw_atomic_add(i, v); 71 + #endif 566 72 } 567 73 568 74 static __always_inline long 569 75 raw_atomic_long_add_return(long i, atomic_long_t *v) 570 76 { 77 + #ifdef CONFIG_64BIT 78 + return raw_atomic64_add_return(i, v); 79 + #else 571 80 return raw_atomic_add_return(i, v); 81 + #endif 572 82 } 573 83 574 84 static __always_inline long 575 85 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 576 86 { 87 + #ifdef CONFIG_64BIT 88 + return raw_atomic64_add_return_acquire(i, v); 89 + #else 577 90 return raw_atomic_add_return_acquire(i, v); 91 + #endif 578 92 } 579 93 580 94 static __always_inline long 581 95 raw_atomic_long_add_return_release(long i, atomic_long_t *v) 582 96 { 97 + #ifdef CONFIG_64BIT 98 + return raw_atomic64_add_return_release(i, v); 99 + #else 583 100 return raw_atomic_add_return_release(i, v); 101 + #endif 584 102 } 585 103 586 104 static __always_inline long 587 105 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 588 106 { 107 + #ifdef CONFIG_64BIT 108 + return raw_atomic64_add_return_relaxed(i, v); 109 + #else 589 110 return raw_atomic_add_return_relaxed(i, v); 111 + #endif 590 112 } 591 113 592 114 static __always_inline long 593 115 raw_atomic_long_fetch_add(long i, atomic_long_t *v) 594 116 { 117 + #ifdef CONFIG_64BIT 118 + return raw_atomic64_fetch_add(i, v); 119 + #else 595 120 return raw_atomic_fetch_add(i, v); 121 + #endif 596 122 } 597 123 598 124 static __always_inline long 599 125 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 600 126 { 127 + #ifdef CONFIG_64BIT 128 + return raw_atomic64_fetch_add_acquire(i, v); 129 + #else 601 130 return raw_atomic_fetch_add_acquire(i, v); 131 + #endif 602 132 } 603 133 604 134 static __always_inline long 605 135 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 606 136 { 137 + #ifdef CONFIG_64BIT 138 + return raw_atomic64_fetch_add_release(i, v); 139 + #else 607 140 return raw_atomic_fetch_add_release(i, v); 141 + #endif 608 142 } 609 143 610 144 static __always_inline long 611 145 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 612 146 { 147 + #ifdef CONFIG_64BIT 148 + return raw_atomic64_fetch_add_relaxed(i, v); 149 + #else 613 150 return raw_atomic_fetch_add_relaxed(i, v); 151 + #endif 614 152 } 615 153 616 154 static __always_inline void 617 155 raw_atomic_long_sub(long i, atomic_long_t *v) 618 156 { 157 + #ifdef CONFIG_64BIT 158 + raw_atomic64_sub(i, v); 159 + #else 619 160 raw_atomic_sub(i, v); 161 + #endif 620 162 } 621 163 622 164 static __always_inline long 623 165 raw_atomic_long_sub_return(long i, atomic_long_t *v) 624 166 { 167 + #ifdef CONFIG_64BIT 168 + return raw_atomic64_sub_return(i, v); 169 + #else 625 170 return raw_atomic_sub_return(i, v); 171 + #endif 626 172 } 627 173 628 174 static __always_inline long 629 175 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 630 176 { 177 + #ifdef CONFIG_64BIT 178 + return raw_atomic64_sub_return_acquire(i, v); 179 + #else 631 180 return raw_atomic_sub_return_acquire(i, v); 181 + #endif 632 182 } 633 183 634 184 static __always_inline long 635 185 raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 636 186 { 187 + #ifdef CONFIG_64BIT 188 + return raw_atomic64_sub_return_release(i, v); 189 + #else 637 190 return raw_atomic_sub_return_release(i, v); 191 + #endif 638 192 } 639 193 640 194 static __always_inline long 641 195 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 642 196 { 197 + #ifdef CONFIG_64BIT 198 + return raw_atomic64_sub_return_relaxed(i, v); 199 + #else 643 200 return raw_atomic_sub_return_relaxed(i, v); 201 + #endif 644 202 } 645 203 646 204 static __always_inline long 647 205 raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 648 206 { 207 + #ifdef CONFIG_64BIT 208 + return raw_atomic64_fetch_sub(i, v); 209 + #else 649 210 return raw_atomic_fetch_sub(i, v); 211 + #endif 650 212 } 651 213 652 214 static __always_inline long 653 215 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 654 216 { 217 + #ifdef CONFIG_64BIT 218 + return raw_atomic64_fetch_sub_acquire(i, v); 219 + #else 655 220 return raw_atomic_fetch_sub_acquire(i, v); 221 + #endif 656 222 } 657 223 658 224 static __always_inline long 659 225 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 660 226 { 227 + #ifdef CONFIG_64BIT 228 + return raw_atomic64_fetch_sub_release(i, v); 229 + #else 661 230 return raw_atomic_fetch_sub_release(i, v); 231 + #endif 662 232 } 663 233 664 234 static __always_inline long 665 235 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 666 236 { 237 + #ifdef CONFIG_64BIT 238 + return raw_atomic64_fetch_sub_relaxed(i, v); 239 + #else 667 240 return raw_atomic_fetch_sub_relaxed(i, v); 241 + #endif 668 242 } 669 243 670 244 static __always_inline void 671 245 raw_atomic_long_inc(atomic_long_t *v) 672 246 { 247 + #ifdef CONFIG_64BIT 248 + raw_atomic64_inc(v); 249 + #else 673 250 raw_atomic_inc(v); 251 + #endif 674 252 } 675 253 676 254 static __always_inline long 677 255 raw_atomic_long_inc_return(atomic_long_t *v) 678 256 { 257 + #ifdef CONFIG_64BIT 258 + return raw_atomic64_inc_return(v); 259 + #else 679 260 return raw_atomic_inc_return(v); 261 + #endif 680 262 } 681 263 682 264 static __always_inline long 683 265 raw_atomic_long_inc_return_acquire(atomic_long_t *v) 684 266 { 267 + #ifdef CONFIG_64BIT 268 + return raw_atomic64_inc_return_acquire(v); 269 + #else 685 270 return raw_atomic_inc_return_acquire(v); 271 + #endif 686 272 } 687 273 688 274 static __always_inline long 689 275 raw_atomic_long_inc_return_release(atomic_long_t *v) 690 276 { 277 + #ifdef CONFIG_64BIT 278 + return raw_atomic64_inc_return_release(v); 279 + #else 691 280 return raw_atomic_inc_return_release(v); 281 + #endif 692 282 } 693 283 694 284 static __always_inline long 695 285 raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 696 286 { 287 + #ifdef CONFIG_64BIT 288 + return raw_atomic64_inc_return_relaxed(v); 289 + #else 697 290 return raw_atomic_inc_return_relaxed(v); 291 + #endif 698 292 } 699 293 700 294 static __always_inline long 701 295 raw_atomic_long_fetch_inc(atomic_long_t *v) 702 296 { 297 + #ifdef CONFIG_64BIT 298 + return raw_atomic64_fetch_inc(v); 299 + #else 703 300 return raw_atomic_fetch_inc(v); 301 + #endif 704 302 } 705 303 706 304 static __always_inline long 707 305 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 708 306 { 307 + #ifdef CONFIG_64BIT 308 + return raw_atomic64_fetch_inc_acquire(v); 309 + #else 709 310 return raw_atomic_fetch_inc_acquire(v); 311 + #endif 710 312 } 711 313 712 314 static __always_inline long 713 315 raw_atomic_long_fetch_inc_release(atomic_long_t *v) 714 316 { 317 + #ifdef CONFIG_64BIT 318 + return raw_atomic64_fetch_inc_release(v); 319 + #else 715 320 return raw_atomic_fetch_inc_release(v); 321 + #endif 716 322 } 717 323 718 324 static __always_inline long 719 325 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 720 326 { 327 + #ifdef CONFIG_64BIT 328 + return raw_atomic64_fetch_inc_relaxed(v); 329 + #else 721 330 return raw_atomic_fetch_inc_relaxed(v); 331 + #endif 722 332 } 723 333 724 334 static __always_inline void 725 335 raw_atomic_long_dec(atomic_long_t *v) 726 336 { 337 + #ifdef CONFIG_64BIT 338 + raw_atomic64_dec(v); 339 + #else 727 340 raw_atomic_dec(v); 341 + #endif 728 342 } 729 343 730 344 static __always_inline long 731 345 raw_atomic_long_dec_return(atomic_long_t *v) 732 346 { 347 + #ifdef CONFIG_64BIT 348 + return raw_atomic64_dec_return(v); 349 + #else 733 350 return raw_atomic_dec_return(v); 351 + #endif 734 352 } 735 353 736 354 static __always_inline long 737 355 raw_atomic_long_dec_return_acquire(atomic_long_t *v) 738 356 { 357 + #ifdef CONFIG_64BIT 358 + return raw_atomic64_dec_return_acquire(v); 359 + #else 739 360 return raw_atomic_dec_return_acquire(v); 361 + #endif 740 362 } 741 363 742 364 static __always_inline long 743 365 raw_atomic_long_dec_return_release(atomic_long_t *v) 744 366 { 367 + #ifdef CONFIG_64BIT 368 + return raw_atomic64_dec_return_release(v); 369 + #else 745 370 return raw_atomic_dec_return_release(v); 371 + #endif 746 372 } 747 373 748 374 static __always_inline long 749 375 raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 750 376 { 377 + #ifdef CONFIG_64BIT 378 + return raw_atomic64_dec_return_relaxed(v); 379 + #else 751 380 return raw_atomic_dec_return_relaxed(v); 381 + #endif 752 382 } 753 383 754 384 static __always_inline long 755 385 raw_atomic_long_fetch_dec(atomic_long_t *v) 756 386 { 387 + #ifdef CONFIG_64BIT 388 + return raw_atomic64_fetch_dec(v); 389 + #else 757 390 return raw_atomic_fetch_dec(v); 391 + #endif 758 392 } 759 393 760 394 static __always_inline long 761 395 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 762 396 { 397 + #ifdef CONFIG_64BIT 398 + return raw_atomic64_fetch_dec_acquire(v); 399 + #else 763 400 return raw_atomic_fetch_dec_acquire(v); 401 + #endif 764 402 } 765 403 766 404 static __always_inline long 767 405 raw_atomic_long_fetch_dec_release(atomic_long_t *v) 768 406 { 407 + #ifdef CONFIG_64BIT 408 + return raw_atomic64_fetch_dec_release(v); 409 + #else 769 410 return raw_atomic_fetch_dec_release(v); 411 + #endif 770 412 } 771 413 772 414 static __always_inline long 773 415 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 774 416 { 417 + #ifdef CONFIG_64BIT 418 + return raw_atomic64_fetch_dec_relaxed(v); 419 + #else 775 420 return raw_atomic_fetch_dec_relaxed(v); 421 + #endif 776 422 } 777 423 778 424 static __always_inline void 779 425 raw_atomic_long_and(long i, atomic_long_t *v) 780 426 { 427 + #ifdef CONFIG_64BIT 428 + raw_atomic64_and(i, v); 429 + #else 781 430 raw_atomic_and(i, v); 431 + #endif 782 432 } 783 433 784 434 static __always_inline long 785 435 raw_atomic_long_fetch_and(long i, atomic_long_t *v) 786 436 { 437 + #ifdef CONFIG_64BIT 438 + return raw_atomic64_fetch_and(i, v); 439 + #else 787 440 return raw_atomic_fetch_and(i, v); 441 + #endif 788 442 } 789 443 790 444 static __always_inline long 791 445 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 792 446 { 447 + #ifdef CONFIG_64BIT 448 + return raw_atomic64_fetch_and_acquire(i, v); 449 + #else 793 450 return raw_atomic_fetch_and_acquire(i, v); 451 + #endif 794 452 } 795 453 796 454 static __always_inline long 797 455 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 798 456 { 457 + #ifdef CONFIG_64BIT 458 + return raw_atomic64_fetch_and_release(i, v); 459 + #else 799 460 return raw_atomic_fetch_and_release(i, v); 461 + #endif 800 462 } 801 463 802 464 static __always_inline long 803 465 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 804 466 { 467 + #ifdef CONFIG_64BIT 468 + return raw_atomic64_fetch_and_relaxed(i, v); 469 + #else 805 470 return raw_atomic_fetch_and_relaxed(i, v); 471 + #endif 806 472 } 807 473 808 474 static __always_inline void 809 475 raw_atomic_long_andnot(long i, atomic_long_t *v) 810 476 { 477 + #ifdef CONFIG_64BIT 478 + raw_atomic64_andnot(i, v); 479 + #else 811 480 raw_atomic_andnot(i, v); 481 + #endif 812 482 } 813 483 814 484 static __always_inline long 815 485 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 816 486 { 487 + #ifdef CONFIG_64BIT 488 + return raw_atomic64_fetch_andnot(i, v); 489 + #else 817 490 return raw_atomic_fetch_andnot(i, v); 491 + #endif 818 492 } 819 493 820 494 static __always_inline long 821 495 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 822 496 { 497 + #ifdef CONFIG_64BIT 498 + return raw_atomic64_fetch_andnot_acquire(i, v); 499 + #else 823 500 return raw_atomic_fetch_andnot_acquire(i, v); 501 + #endif 824 502 } 825 503 826 504 static __always_inline long 827 505 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 828 506 { 507 + #ifdef CONFIG_64BIT 508 + return raw_atomic64_fetch_andnot_release(i, v); 509 + #else 829 510 return raw_atomic_fetch_andnot_release(i, v); 511 + #endif 830 512 } 831 513 832 514 static __always_inline long 833 515 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 834 516 { 517 + #ifdef CONFIG_64BIT 518 + return raw_atomic64_fetch_andnot_relaxed(i, v); 519 + #else 835 520 return raw_atomic_fetch_andnot_relaxed(i, v); 521 + #endif 836 522 } 837 523 838 524 static __always_inline void 839 525 raw_atomic_long_or(long i, atomic_long_t *v) 840 526 { 527 + #ifdef CONFIG_64BIT 528 + raw_atomic64_or(i, v); 529 + #else 841 530 raw_atomic_or(i, v); 531 + #endif 842 532 } 843 533 844 534 static __always_inline long 845 535 raw_atomic_long_fetch_or(long i, atomic_long_t *v) 846 536 { 537 + #ifdef CONFIG_64BIT 538 + return raw_atomic64_fetch_or(i, v); 539 + #else 847 540 return raw_atomic_fetch_or(i, v); 541 + #endif 848 542 } 849 543 850 544 static __always_inline long 851 545 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 852 546 { 547 + #ifdef CONFIG_64BIT 548 + return raw_atomic64_fetch_or_acquire(i, v); 549 + #else 853 550 return raw_atomic_fetch_or_acquire(i, v); 551 + #endif 854 552 } 855 553 856 554 static __always_inline long 857 555 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 858 556 { 557 + #ifdef CONFIG_64BIT 558 + return raw_atomic64_fetch_or_release(i, v); 559 + #else 859 560 return raw_atomic_fetch_or_release(i, v); 561 + #endif 860 562 } 861 563 862 564 static __always_inline long 863 565 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 864 566 { 567 + #ifdef CONFIG_64BIT 568 + return raw_atomic64_fetch_or_relaxed(i, v); 569 + #else 865 570 return raw_atomic_fetch_or_relaxed(i, v); 571 + #endif 866 572 } 867 573 868 574 static __always_inline void 869 575 raw_atomic_long_xor(long i, atomic_long_t *v) 870 576 { 577 + #ifdef CONFIG_64BIT 578 + raw_atomic64_xor(i, v); 579 + #else 871 580 raw_atomic_xor(i, v); 581 + #endif 872 582 } 873 583 874 584 static __always_inline long 875 585 raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 876 586 { 587 + #ifdef CONFIG_64BIT 588 + return raw_atomic64_fetch_xor(i, v); 589 + #else 877 590 return raw_atomic_fetch_xor(i, v); 591 + #endif 878 592 } 879 593 880 594 static __always_inline long 881 595 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 882 596 { 597 + #ifdef CONFIG_64BIT 598 + return raw_atomic64_fetch_xor_acquire(i, v); 599 + #else 883 600 return raw_atomic_fetch_xor_acquire(i, v); 601 + #endif 884 602 } 885 603 886 604 static __always_inline long 887 605 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 888 606 { 607 + #ifdef CONFIG_64BIT 608 + return raw_atomic64_fetch_xor_release(i, v); 609 + #else 889 610 return raw_atomic_fetch_xor_release(i, v); 611 + #endif 890 612 } 891 613 892 614 static __always_inline long 893 615 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 894 616 { 617 + #ifdef CONFIG_64BIT 618 + return raw_atomic64_fetch_xor_relaxed(i, v); 619 + #else 895 620 return raw_atomic_fetch_xor_relaxed(i, v); 621 + #endif 896 622 } 897 623 898 624 static __always_inline long 899 625 raw_atomic_long_xchg(atomic_long_t *v, long i) 900 626 { 627 + #ifdef CONFIG_64BIT 628 + return raw_atomic64_xchg(v, i); 629 + #else 901 630 return raw_atomic_xchg(v, i); 631 + #endif 902 632 } 903 633 904 634 static __always_inline long 905 635 raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) 906 636 { 637 + #ifdef CONFIG_64BIT 638 + return raw_atomic64_xchg_acquire(v, i); 639 + #else 907 640 return raw_atomic_xchg_acquire(v, i); 641 + #endif 908 642 } 909 643 910 644 static __always_inline long 911 645 raw_atomic_long_xchg_release(atomic_long_t *v, long i) 912 646 { 647 + #ifdef CONFIG_64BIT 648 + return raw_atomic64_xchg_release(v, i); 649 + #else 913 650 return raw_atomic_xchg_release(v, i); 651 + #endif 914 652 } 915 653 916 654 static __always_inline long 917 655 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 918 656 { 657 + #ifdef CONFIG_64BIT 658 + return raw_atomic64_xchg_relaxed(v, i); 659 + #else 919 660 return raw_atomic_xchg_relaxed(v, i); 661 + #endif 920 662 } 921 663 922 664 static __always_inline long 923 665 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 924 666 { 667 + #ifdef CONFIG_64BIT 668 + return raw_atomic64_cmpxchg(v, old, new); 669 + #else 925 670 return raw_atomic_cmpxchg(v, old, new); 671 + #endif 926 672 } 927 673 928 674 static __always_inline long 929 675 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 930 676 { 677 + #ifdef CONFIG_64BIT 678 + return raw_atomic64_cmpxchg_acquire(v, old, new); 679 + #else 931 680 return raw_atomic_cmpxchg_acquire(v, old, new); 681 + #endif 932 682 } 933 683 934 684 static __always_inline long 935 685 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 936 686 { 687 + #ifdef CONFIG_64BIT 688 + return raw_atomic64_cmpxchg_release(v, old, new); 689 + #else 937 690 return raw_atomic_cmpxchg_release(v, old, new); 691 + #endif 938 692 } 939 693 940 694 static __always_inline long 941 695 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 942 696 { 697 + #ifdef CONFIG_64BIT 698 + return raw_atomic64_cmpxchg_relaxed(v, old, new); 699 + #else 943 700 return raw_atomic_cmpxchg_relaxed(v, old, new); 701 + #endif 944 702 } 945 703 946 704 static __always_inline bool 947 705 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 948 706 { 707 + #ifdef CONFIG_64BIT 708 + return raw_atomic64_try_cmpxchg(v, (s64 *)old, new); 709 + #else 949 710 return raw_atomic_try_cmpxchg(v, (int *)old, new); 711 + #endif 950 712 } 951 713 952 714 static __always_inline bool 953 715 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 954 716 { 717 + #ifdef CONFIG_64BIT 718 + return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 719 + #else 955 720 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new); 721 + #endif 956 722 } 957 723 958 724 static __always_inline bool 959 725 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 960 726 { 727 + #ifdef CONFIG_64BIT 728 + return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 729 + #else 961 730 return raw_atomic_try_cmpxchg_release(v, (int *)old, new); 731 + #endif 962 732 } 963 733 964 734 static __always_inline bool 965 735 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 966 736 { 737 + #ifdef CONFIG_64BIT 738 + return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 739 + #else 967 740 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 741 + #endif 968 742 } 969 743 970 744 static __always_inline bool 971 745 raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 972 746 { 747 + #ifdef CONFIG_64BIT 748 + return raw_atomic64_sub_and_test(i, v); 749 + #else 973 750 return raw_atomic_sub_and_test(i, v); 751 + #endif 974 752 } 975 753 976 754 static __always_inline bool 977 755 raw_atomic_long_dec_and_test(atomic_long_t *v) 978 756 { 757 + #ifdef CONFIG_64BIT 758 + return raw_atomic64_dec_and_test(v); 759 + #else 979 760 return raw_atomic_dec_and_test(v); 761 + #endif 980 762 } 981 763 982 764 static __always_inline bool 983 765 raw_atomic_long_inc_and_test(atomic_long_t *v) 984 766 { 767 + #ifdef CONFIG_64BIT 768 + return raw_atomic64_inc_and_test(v); 769 + #else 985 770 return raw_atomic_inc_and_test(v); 771 + #endif 986 772 } 987 773 988 774 static __always_inline bool 989 775 raw_atomic_long_add_negative(long i, atomic_long_t *v) 990 776 { 777 + #ifdef CONFIG_64BIT 778 + return raw_atomic64_add_negative(i, v); 779 + #else 991 780 return raw_atomic_add_negative(i, v); 781 + #endif 992 782 } 993 783 994 784 static __always_inline bool 995 785 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 996 786 { 787 + #ifdef CONFIG_64BIT 788 + return raw_atomic64_add_negative_acquire(i, v); 789 + #else 997 790 return raw_atomic_add_negative_acquire(i, v); 791 + #endif 998 792 } 999 793 1000 794 static __always_inline bool 1001 795 raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 1002 796 { 797 + #ifdef CONFIG_64BIT 798 + return raw_atomic64_add_negative_release(i, v); 799 + #else 1003 800 return raw_atomic_add_negative_release(i, v); 801 + #endif 1004 802 } 1005 803 1006 804 static __always_inline bool 1007 805 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 1008 806 { 807 + #ifdef CONFIG_64BIT 808 + return raw_atomic64_add_negative_relaxed(i, v); 809 + #else 1009 810 return raw_atomic_add_negative_relaxed(i, v); 811 + #endif 1010 812 } 1011 813 1012 814 static __always_inline long 1013 815 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1014 816 { 817 + #ifdef CONFIG_64BIT 818 + return raw_atomic64_fetch_add_unless(v, a, u); 819 + #else 1015 820 return raw_atomic_fetch_add_unless(v, a, u); 821 + #endif 1016 822 } 1017 823 1018 824 static __always_inline bool 1019 825 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 1020 826 { 827 + #ifdef CONFIG_64BIT 828 + return raw_atomic64_add_unless(v, a, u); 829 + #else 1021 830 return raw_atomic_add_unless(v, a, u); 831 + #endif 1022 832 } 1023 833 1024 834 static __always_inline bool 1025 835 raw_atomic_long_inc_not_zero(atomic_long_t *v) 1026 836 { 837 + #ifdef CONFIG_64BIT 838 + return raw_atomic64_inc_not_zero(v); 839 + #else 1027 840 return raw_atomic_inc_not_zero(v); 841 + #endif 1028 842 } 1029 843 1030 844 static __always_inline bool 1031 845 raw_atomic_long_inc_unless_negative(atomic_long_t *v) 1032 846 { 847 + #ifdef CONFIG_64BIT 848 + return raw_atomic64_inc_unless_negative(v); 849 + #else 1033 850 return raw_atomic_inc_unless_negative(v); 851 + #endif 1034 852 } 1035 853 1036 854 static __always_inline bool 1037 855 raw_atomic_long_dec_unless_positive(atomic_long_t *v) 1038 856 { 857 + #ifdef CONFIG_64BIT 858 + return raw_atomic64_dec_unless_positive(v); 859 + #else 1039 860 return raw_atomic_dec_unless_positive(v); 861 + #endif 1040 862 } 1041 863 1042 864 static __always_inline long 1043 865 raw_atomic_long_dec_if_positive(atomic_long_t *v) 1044 866 { 867 + #ifdef CONFIG_64BIT 868 + return raw_atomic64_dec_if_positive(v); 869 + #else 1045 870 return raw_atomic_dec_if_positive(v); 871 + #endif 1046 872 } 1047 873 1048 - #endif /* CONFIG_64BIT */ 1049 874 #endif /* _LINUX_ATOMIC_LONG_H */ 1050 - // 108784846d3bbbb201b8dabe621c5dc30b216206 875 + // ad09f849db0db5b30c82e497eeb9056a394c5f22
+9 -18
scripts/atomic/gen-atomic-long.sh
··· 32 32 done 33 33 } 34 34 35 - #gen_proto_order_variant(meta, pfx, name, sfx, order, atomic, int, arg...) 35 + #gen_proto_order_variant(meta, pfx, name, sfx, order, arg...) 36 36 gen_proto_order_variant() 37 37 { 38 38 local meta="$1"; shift ··· 40 40 local name="$1"; shift 41 41 local sfx="$1"; shift 42 42 local order="$1"; shift 43 - local atomic="$1"; shift 44 - local int="$1"; shift 45 43 46 44 local atomicname="${pfx}${name}${sfx}${order}" 47 45 48 46 local ret="$(gen_ret_type "${meta}" "long")" 49 47 local params="$(gen_params "long" "atomic_long" "$@")" 50 - local argscast="$(gen_args_cast "${int}" "${atomic}" "$@")" 48 + local argscast_32="$(gen_args_cast "int" "atomic" "$@")" 49 + local argscast_64="$(gen_args_cast "s64" "atomic64" "$@")" 51 50 local retstmt="$(gen_ret_stmt "${meta}")" 52 51 53 52 cat <<EOF 54 53 static __always_inline ${ret} 55 54 raw_atomic_long_${atomicname}(${params}) 56 55 { 57 - ${retstmt}raw_${atomic}_${atomicname}(${argscast}); 56 + #ifdef CONFIG_64BIT 57 + ${retstmt}raw_atomic64_${atomicname}(${argscast_64}); 58 + #else 59 + ${retstmt}raw_atomic_${atomicname}(${argscast_32}); 60 + #endif 58 61 } 59 62 60 63 EOF ··· 87 84 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 88 85 #endif 89 86 90 - #ifdef CONFIG_64BIT 91 - 92 87 EOF 93 88 94 89 grep '^[a-z]' "$1" | while read name meta args; do 95 - gen_proto "${meta}" "${name}" "atomic64" "s64" ${args} 90 + gen_proto "${meta}" "${name}" ${args} 96 91 done 97 92 98 93 cat <<EOF 99 - #else /* CONFIG_64BIT */ 100 - 101 - EOF 102 - 103 - grep '^[a-z]' "$1" | while read name meta args; do 104 - gen_proto "${meta}" "${name}" "atomic" "int" ${args} 105 - done 106 - 107 - cat <<EOF 108 - #endif /* CONFIG_64BIT */ 109 94 #endif /* _LINUX_ATOMIC_LONG_H */ 110 95 EOF