Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

atomics: Provide atomic_add_negative() variants

atomic_add_negative() does not provide the relaxed/acquire/release
variants.

Provide them in preparation for a new scalable reference count algorithm.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230323102800.101763813@linutronix.de

authored by

Thomas Gleixner and committed by
Peter Zijlstra
e5ab9eff fe15c26e

+309 -18
+199 -9
include/linux/atomic/atomic-arch-fallback.h
··· 1208 1208 #define arch_atomic_inc_and_test arch_atomic_inc_and_test 1209 1209 #endif 1210 1210 1211 + #ifndef arch_atomic_add_negative_relaxed 1212 + #ifdef arch_atomic_add_negative 1213 + #define arch_atomic_add_negative_acquire arch_atomic_add_negative 1214 + #define arch_atomic_add_negative_release arch_atomic_add_negative 1215 + #define arch_atomic_add_negative_relaxed arch_atomic_add_negative 1216 + #endif /* arch_atomic_add_negative */ 1217 + 1211 1218 #ifndef arch_atomic_add_negative 1212 1219 /** 1213 - * arch_atomic_add_negative - add and test if negative 1220 + * arch_atomic_add_negative - Add and test if negative 1214 1221 * @i: integer value to add 1215 1222 * @v: pointer of type atomic_t 1216 1223 * 1217 - * Atomically adds @i to @v and returns true 1218 - * if the result is negative, or false when 1219 - * result is greater than or equal to zero. 1224 + * Atomically adds @i to @v and returns true if the result is negative, 1225 + * or false when the result is greater than or equal to zero. 1220 1226 */ 1221 1227 static __always_inline bool 1222 1228 arch_atomic_add_negative(int i, atomic_t *v) ··· 1231 1225 } 1232 1226 #define arch_atomic_add_negative arch_atomic_add_negative 1233 1227 #endif 1228 + 1229 + #ifndef arch_atomic_add_negative_acquire 1230 + /** 1231 + * arch_atomic_add_negative_acquire - Add and test if negative 1232 + * @i: integer value to add 1233 + * @v: pointer of type atomic_t 1234 + * 1235 + * Atomically adds @i to @v and returns true if the result is negative, 1236 + * or false when the result is greater than or equal to zero. 1237 + */ 1238 + static __always_inline bool 1239 + arch_atomic_add_negative_acquire(int i, atomic_t *v) 1240 + { 1241 + return arch_atomic_add_return_acquire(i, v) < 0; 1242 + } 1243 + #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1244 + #endif 1245 + 1246 + #ifndef arch_atomic_add_negative_release 1247 + /** 1248 + * arch_atomic_add_negative_release - Add and test if negative 1249 + * @i: integer value to add 1250 + * @v: pointer of type atomic_t 1251 + * 1252 + * Atomically adds @i to @v and returns true if the result is negative, 1253 + * or false when the result is greater than or equal to zero. 1254 + */ 1255 + static __always_inline bool 1256 + arch_atomic_add_negative_release(int i, atomic_t *v) 1257 + { 1258 + return arch_atomic_add_return_release(i, v) < 0; 1259 + } 1260 + #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1261 + #endif 1262 + 1263 + #ifndef arch_atomic_add_negative_relaxed 1264 + /** 1265 + * arch_atomic_add_negative_relaxed - Add and test if negative 1266 + * @i: integer value to add 1267 + * @v: pointer of type atomic_t 1268 + * 1269 + * Atomically adds @i to @v and returns true if the result is negative, 1270 + * or false when the result is greater than or equal to zero. 1271 + */ 1272 + static __always_inline bool 1273 + arch_atomic_add_negative_relaxed(int i, atomic_t *v) 1274 + { 1275 + return arch_atomic_add_return_relaxed(i, v) < 0; 1276 + } 1277 + #define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed 1278 + #endif 1279 + 1280 + #else /* arch_atomic_add_negative_relaxed */ 1281 + 1282 + #ifndef arch_atomic_add_negative_acquire 1283 + static __always_inline bool 1284 + arch_atomic_add_negative_acquire(int i, atomic_t *v) 1285 + { 1286 + bool ret = arch_atomic_add_negative_relaxed(i, v); 1287 + __atomic_acquire_fence(); 1288 + return ret; 1289 + } 1290 + #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1291 + #endif 1292 + 1293 + #ifndef arch_atomic_add_negative_release 1294 + static __always_inline bool 1295 + arch_atomic_add_negative_release(int i, atomic_t *v) 1296 + { 1297 + __atomic_release_fence(); 1298 + return arch_atomic_add_negative_relaxed(i, v); 1299 + } 1300 + #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1301 + #endif 1302 + 1303 + #ifndef arch_atomic_add_negative 1304 + static __always_inline bool 1305 + arch_atomic_add_negative(int i, atomic_t *v) 1306 + { 1307 + bool ret; 1308 + __atomic_pre_full_fence(); 1309 + ret = arch_atomic_add_negative_relaxed(i, v); 1310 + __atomic_post_full_fence(); 1311 + return ret; 1312 + } 1313 + #define arch_atomic_add_negative arch_atomic_add_negative 1314 + #endif 1315 + 1316 + #endif /* arch_atomic_add_negative_relaxed */ 1234 1317 1235 1318 #ifndef arch_atomic_fetch_add_unless 1236 1319 /** ··· 2424 2329 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test 2425 2330 #endif 2426 2331 2332 + #ifndef arch_atomic64_add_negative_relaxed 2333 + #ifdef arch_atomic64_add_negative 2334 + #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative 2335 + #define arch_atomic64_add_negative_release arch_atomic64_add_negative 2336 + #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative 2337 + #endif /* arch_atomic64_add_negative */ 2338 + 2427 2339 #ifndef arch_atomic64_add_negative 2428 2340 /** 2429 - * arch_atomic64_add_negative - add and test if negative 2341 + * arch_atomic64_add_negative - Add and test if negative 2430 2342 * @i: integer value to add 2431 2343 * @v: pointer of type atomic64_t 2432 2344 * 2433 - * Atomically adds @i to @v and returns true 2434 - * if the result is negative, or false when 2435 - * result is greater than or equal to zero. 2345 + * Atomically adds @i to @v and returns true if the result is negative, 2346 + * or false when the result is greater than or equal to zero. 2436 2347 */ 2437 2348 static __always_inline bool 2438 2349 arch_atomic64_add_negative(s64 i, atomic64_t *v) ··· 2447 2346 } 2448 2347 #define arch_atomic64_add_negative arch_atomic64_add_negative 2449 2348 #endif 2349 + 2350 + #ifndef arch_atomic64_add_negative_acquire 2351 + /** 2352 + * arch_atomic64_add_negative_acquire - Add and test if negative 2353 + * @i: integer value to add 2354 + * @v: pointer of type atomic64_t 2355 + * 2356 + * Atomically adds @i to @v and returns true if the result is negative, 2357 + * or false when the result is greater than or equal to zero. 2358 + */ 2359 + static __always_inline bool 2360 + arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2361 + { 2362 + return arch_atomic64_add_return_acquire(i, v) < 0; 2363 + } 2364 + #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2365 + #endif 2366 + 2367 + #ifndef arch_atomic64_add_negative_release 2368 + /** 2369 + * arch_atomic64_add_negative_release - Add and test if negative 2370 + * @i: integer value to add 2371 + * @v: pointer of type atomic64_t 2372 + * 2373 + * Atomically adds @i to @v and returns true if the result is negative, 2374 + * or false when the result is greater than or equal to zero. 2375 + */ 2376 + static __always_inline bool 2377 + arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2378 + { 2379 + return arch_atomic64_add_return_release(i, v) < 0; 2380 + } 2381 + #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2382 + #endif 2383 + 2384 + #ifndef arch_atomic64_add_negative_relaxed 2385 + /** 2386 + * arch_atomic64_add_negative_relaxed - Add and test if negative 2387 + * @i: integer value to add 2388 + * @v: pointer of type atomic64_t 2389 + * 2390 + * Atomically adds @i to @v and returns true if the result is negative, 2391 + * or false when the result is greater than or equal to zero. 2392 + */ 2393 + static __always_inline bool 2394 + arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 2395 + { 2396 + return arch_atomic64_add_return_relaxed(i, v) < 0; 2397 + } 2398 + #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed 2399 + #endif 2400 + 2401 + #else /* arch_atomic64_add_negative_relaxed */ 2402 + 2403 + #ifndef arch_atomic64_add_negative_acquire 2404 + static __always_inline bool 2405 + arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2406 + { 2407 + bool ret = arch_atomic64_add_negative_relaxed(i, v); 2408 + __atomic_acquire_fence(); 2409 + return ret; 2410 + } 2411 + #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2412 + #endif 2413 + 2414 + #ifndef arch_atomic64_add_negative_release 2415 + static __always_inline bool 2416 + arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2417 + { 2418 + __atomic_release_fence(); 2419 + return arch_atomic64_add_negative_relaxed(i, v); 2420 + } 2421 + #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2422 + #endif 2423 + 2424 + #ifndef arch_atomic64_add_negative 2425 + static __always_inline bool 2426 + arch_atomic64_add_negative(s64 i, atomic64_t *v) 2427 + { 2428 + bool ret; 2429 + __atomic_pre_full_fence(); 2430 + ret = arch_atomic64_add_negative_relaxed(i, v); 2431 + __atomic_post_full_fence(); 2432 + return ret; 2433 + } 2434 + #define arch_atomic64_add_negative arch_atomic64_add_negative 2435 + #endif 2436 + 2437 + #endif /* arch_atomic64_add_negative_relaxed */ 2450 2438 2451 2439 #ifndef arch_atomic64_fetch_add_unless 2452 2440 /** ··· 2646 2456 #endif 2647 2457 2648 2458 #endif /* _LINUX_ATOMIC_FALLBACK_H */ 2649 - // b5e87bdd5ede61470c29f7a7e4de781af3770f09 2459 + // 00071fffa021cec66f6290d706d69c91df87bade
+67 -1
include/linux/atomic/atomic-instrumented.h
··· 592 592 return arch_atomic_add_negative(i, v); 593 593 } 594 594 595 + static __always_inline bool 596 + atomic_add_negative_acquire(int i, atomic_t *v) 597 + { 598 + instrument_atomic_read_write(v, sizeof(*v)); 599 + return arch_atomic_add_negative_acquire(i, v); 600 + } 601 + 602 + static __always_inline bool 603 + atomic_add_negative_release(int i, atomic_t *v) 604 + { 605 + kcsan_release(); 606 + instrument_atomic_read_write(v, sizeof(*v)); 607 + return arch_atomic_add_negative_release(i, v); 608 + } 609 + 610 + static __always_inline bool 611 + atomic_add_negative_relaxed(int i, atomic_t *v) 612 + { 613 + instrument_atomic_read_write(v, sizeof(*v)); 614 + return arch_atomic_add_negative_relaxed(i, v); 615 + } 616 + 595 617 static __always_inline int 596 618 atomic_fetch_add_unless(atomic_t *v, int a, int u) 597 619 { ··· 1231 1209 kcsan_mb(); 1232 1210 instrument_atomic_read_write(v, sizeof(*v)); 1233 1211 return arch_atomic64_add_negative(i, v); 1212 + } 1213 + 1214 + static __always_inline bool 1215 + atomic64_add_negative_acquire(s64 i, atomic64_t *v) 1216 + { 1217 + instrument_atomic_read_write(v, sizeof(*v)); 1218 + return arch_atomic64_add_negative_acquire(i, v); 1219 + } 1220 + 1221 + static __always_inline bool 1222 + atomic64_add_negative_release(s64 i, atomic64_t *v) 1223 + { 1224 + kcsan_release(); 1225 + instrument_atomic_read_write(v, sizeof(*v)); 1226 + return arch_atomic64_add_negative_release(i, v); 1227 + } 1228 + 1229 + static __always_inline bool 1230 + atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 1231 + { 1232 + instrument_atomic_read_write(v, sizeof(*v)); 1233 + return arch_atomic64_add_negative_relaxed(i, v); 1234 1234 } 1235 1235 1236 1236 static __always_inline s64 ··· 1874 1830 return arch_atomic_long_add_negative(i, v); 1875 1831 } 1876 1832 1833 + static __always_inline bool 1834 + atomic_long_add_negative_acquire(long i, atomic_long_t *v) 1835 + { 1836 + instrument_atomic_read_write(v, sizeof(*v)); 1837 + return arch_atomic_long_add_negative_acquire(i, v); 1838 + } 1839 + 1840 + static __always_inline bool 1841 + atomic_long_add_negative_release(long i, atomic_long_t *v) 1842 + { 1843 + kcsan_release(); 1844 + instrument_atomic_read_write(v, sizeof(*v)); 1845 + return arch_atomic_long_add_negative_release(i, v); 1846 + } 1847 + 1848 + static __always_inline bool 1849 + atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 1850 + { 1851 + instrument_atomic_read_write(v, sizeof(*v)); 1852 + return arch_atomic_long_add_negative_relaxed(i, v); 1853 + } 1854 + 1877 1855 static __always_inline long 1878 1856 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1879 1857 { ··· 2149 2083 }) 2150 2084 2151 2085 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ 2152 - // 764f741eb77a7ad565dc8d99ce2837d5542e8aee 2086 + // 1b485de9cbaa4900de59e14ee2084357eaeb1c3a
+37 -1
include/linux/atomic/atomic-long.h
··· 479 479 return arch_atomic64_add_negative(i, v); 480 480 } 481 481 482 + static __always_inline bool 483 + arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 484 + { 485 + return arch_atomic64_add_negative_acquire(i, v); 486 + } 487 + 488 + static __always_inline bool 489 + arch_atomic_long_add_negative_release(long i, atomic_long_t *v) 490 + { 491 + return arch_atomic64_add_negative_release(i, v); 492 + } 493 + 494 + static __always_inline bool 495 + arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 496 + { 497 + return arch_atomic64_add_negative_relaxed(i, v); 498 + } 499 + 482 500 static __always_inline long 483 501 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 484 502 { ··· 991 973 return arch_atomic_add_negative(i, v); 992 974 } 993 975 976 + static __always_inline bool 977 + arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 978 + { 979 + return arch_atomic_add_negative_acquire(i, v); 980 + } 981 + 982 + static __always_inline bool 983 + arch_atomic_long_add_negative_release(long i, atomic_long_t *v) 984 + { 985 + return arch_atomic_add_negative_release(i, v); 986 + } 987 + 988 + static __always_inline bool 989 + arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 990 + { 991 + return arch_atomic_add_negative_relaxed(i, v); 992 + } 993 + 994 994 static __always_inline long 995 995 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 996 996 { ··· 1047 1011 1048 1012 #endif /* CONFIG_64BIT */ 1049 1013 #endif /* _LINUX_ATOMIC_LONG_H */ 1050 - // e8f0e08ff072b74d180eabe2ad001282b38c2c88 1014 + // a194c07d7d2f4b0e178d3c118c919775d5d65f50
+1 -1
scripts/atomic/atomics.tbl
··· 33 33 sub_and_test b i v 34 34 dec_and_test b v 35 35 inc_and_test b v 36 - add_negative b i v 36 + add_negative B i v 37 37 add_unless fb v i:a i:u 38 38 inc_not_zero b v 39 39 inc_unless_negative b v
+5 -6
scripts/atomic/fallbacks/add_negative
··· 1 1 cat <<EOF 2 2 /** 3 - * arch_${atomic}_add_negative - add and test if negative 3 + * arch_${atomic}_add_negative${order} - Add and test if negative 4 4 * @i: integer value to add 5 5 * @v: pointer of type ${atomic}_t 6 6 * 7 - * Atomically adds @i to @v and returns true 8 - * if the result is negative, or false when 9 - * result is greater than or equal to zero. 7 + * Atomically adds @i to @v and returns true if the result is negative, 8 + * or false when the result is greater than or equal to zero. 10 9 */ 11 10 static __always_inline bool 12 - arch_${atomic}_add_negative(${int} i, ${atomic}_t *v) 11 + arch_${atomic}_add_negative${order}(${int} i, ${atomic}_t *v) 13 12 { 14 - return arch_${atomic}_add_return(i, v) < 0; 13 + return arch_${atomic}_add_return${order}(i, v) < 0; 15 14 } 16 15 EOF