Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

atomics/treewide: Make conditional inc/dec ops optional

The conditional inc/dec ops differ for atomic_t and atomic64_t:

- atomic_inc_unless_positive() is optional for atomic_t, and doesn't exist for atomic64_t.
- atomic_dec_unless_negative() is optional for atomic_t, and doesn't exist for atomic64_t.
- atomic_dec_if_positive is optional for atomic_t, and is mandatory for atomic64_t.

Let's make these consistently optional for both. At the same time, let's
clean up the existing fallbacks to use atomic_try_cmpxchg().

The instrumented atomics are updated accordingly.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-18-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>

authored by

Mark Rutland and committed by
Ingo Molnar
b3a2a05f 9837559d

+85 -98
+1
arch/alpha/include/asm/atomic.h
··· 296 296 smp_mb(); 297 297 return old - 1; 298 298 } 299 + #define atomic64_dec_if_positive atomic64_dec_if_positive 299 300 300 301 #endif /* _ALPHA_ATOMIC_H */
+1
arch/arc/include/asm/atomic.h
··· 517 517 518 518 return val; 519 519 } 520 + #define atomic64_dec_if_positive atomic64_dec_if_positive 520 521 521 522 /** 522 523 * atomic64_fetch_add_unless - add unless the number is a given value
+1
arch/arm/include/asm/atomic.h
··· 474 474 475 475 return result; 476 476 } 477 + #define atomic64_dec_if_positive atomic64_dec_if_positive 477 478 478 479 static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a, 479 480 long long u)
+2
arch/arm64/include/asm/atomic.h
··· 159 159 160 160 #define atomic64_andnot atomic64_andnot 161 161 162 + #define atomic64_dec_if_positive atomic64_dec_if_positive 163 + 162 164 #endif 163 165 #endif
-16
arch/ia64/include/asm/atomic.h
··· 215 215 (cmpxchg(&((v)->counter), old, new)) 216 216 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 217 217 218 - static __inline__ long atomic64_dec_if_positive(atomic64_t *v) 219 - { 220 - long c, old, dec; 221 - c = atomic64_read(v); 222 - for (;;) { 223 - dec = c - 1; 224 - if (unlikely(dec < 0)) 225 - break; 226 - old = atomic64_cmpxchg((v), c, dec); 227 - if (likely(old == c)) 228 - break; 229 - c = old; 230 - } 231 - return dec; 232 - } 233 - 234 218 #define atomic_add(i,v) (void)atomic_add_return((i), (v)) 235 219 #define atomic_sub(i,v) (void)atomic_sub_return((i), (v)) 236 220
-23
arch/parisc/include/asm/atomic.h
··· 223 223 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) 224 224 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 225 225 226 - /* 227 - * atomic64_dec_if_positive - decrement by 1 if old value positive 228 - * @v: pointer of type atomic_t 229 - * 230 - * The function returns the old value of *v minus 1, even if 231 - * the atomic variable, v, was not decremented. 232 - */ 233 - static inline long atomic64_dec_if_positive(atomic64_t *v) 234 - { 235 - long c, old, dec; 236 - c = atomic64_read(v); 237 - for (;;) { 238 - dec = c - 1; 239 - if (unlikely(dec < 0)) 240 - break; 241 - old = atomic64_cmpxchg((v), c, dec); 242 - if (likely(old == c)) 243 - break; 244 - c = old; 245 - } 246 - return dec; 247 - } 248 - 249 226 #endif /* !CONFIG_64BIT */ 250 227 251 228
+1
arch/powerpc/include/asm/atomic.h
··· 488 488 489 489 return t; 490 490 } 491 + #define atomic64_dec_if_positive atomic64_dec_if_positive 491 492 492 493 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 493 494 #define atomic64_cmpxchg_relaxed(v, o, n) \
-17
arch/s390/include/asm/atomic.h
··· 145 145 146 146 #undef ATOMIC64_OPS 147 147 148 - static inline long atomic64_dec_if_positive(atomic64_t *v) 149 - { 150 - long c, old, dec; 151 - 152 - c = atomic64_read(v); 153 - for (;;) { 154 - dec = c - 1; 155 - if (unlikely(dec < 0)) 156 - break; 157 - old = atomic64_cmpxchg((v), c, dec); 158 - if (likely(old == c)) 159 - break; 160 - c = old; 161 - } 162 - return dec; 163 - } 164 - 165 148 #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v) 166 149 #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v) 167 150 #define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
+1
arch/sparc/include/asm/atomic_64.h
··· 62 62 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 63 63 64 64 long atomic64_dec_if_positive(atomic64_t *v); 65 + #define atomic64_dec_if_positive atomic64_dec_if_positive 65 66 66 67 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
+1
arch/x86/include/asm/atomic64_32.h
··· 254 254 return r; 255 255 } 256 256 257 + #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 257 258 static inline long long arch_atomic64_dec_if_positive(atomic64_t *v) 258 259 { 259 260 long long r;
-18
arch/x86/include/asm/atomic64_64.h
··· 191 191 return xchg(&v->counter, new); 192 192 } 193 193 194 - /* 195 - * arch_atomic64_dec_if_positive - decrement by 1 if old value positive 196 - * @v: pointer of type atomic_t 197 - * 198 - * The function returns the old value of *v minus 1, even if 199 - * the atomic variable, v, was not decremented. 200 - */ 201 - static inline long arch_atomic64_dec_if_positive(atomic64_t *v) 202 - { 203 - s64 dec, c = arch_atomic64_read(v); 204 - do { 205 - dec = c - 1; 206 - if (unlikely(dec < 0)) 207 - break; 208 - } while (!arch_atomic64_try_cmpxchg(v, &c, dec)); 209 - return dec; 210 - } 211 - 212 194 static inline void arch_atomic64_and(long i, atomic64_t *v) 213 195 { 214 196 asm volatile(LOCK_PREFIX "andq %1,%0"
+3
include/asm-generic/atomic-instrumented.h
··· 243 243 } 244 244 #endif 245 245 246 + #ifdef arch_atomic64_dec_if_positive 247 + #define atomic64_dec_if_positive atomic64_dec_if_positive 246 248 static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v) 247 249 { 248 250 kasan_check_write(v, sizeof(*v)); 249 251 return arch_atomic64_dec_if_positive(v); 250 252 } 253 + #endif 251 254 252 255 #ifdef arch_atomic_dec_and_test 253 256 #define atomic_dec_and_test atomic_dec_and_test
+1
include/asm-generic/atomic64.h
··· 51 51 #undef ATOMIC64_OP 52 52 53 53 extern long long atomic64_dec_if_positive(atomic64_t *v); 54 + #define atomic64_dec_if_positive atomic64_dec_if_positive 54 55 extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n); 55 56 extern long long atomic64_xchg(atomic64_t *v, long long new); 56 57 extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u);
+73 -24
include/linux/atomic.h
··· 683 683 #endif 684 684 685 685 #ifndef atomic_inc_unless_negative 686 - static inline bool atomic_inc_unless_negative(atomic_t *p) 686 + static inline bool atomic_inc_unless_negative(atomic_t *v) 687 687 { 688 - int v, v1; 689 - for (v = 0; v >= 0; v = v1) { 690 - v1 = atomic_cmpxchg(p, v, v + 1); 691 - if (likely(v1 == v)) 692 - return true; 693 - } 694 - return false; 688 + int c = atomic_read(v); 689 + 690 + do { 691 + if (unlikely(c < 0)) 692 + return false; 693 + } while (!atomic_try_cmpxchg(v, &c, c + 1)); 694 + 695 + return true; 695 696 } 696 697 #endif 697 698 698 699 #ifndef atomic_dec_unless_positive 699 - static inline bool atomic_dec_unless_positive(atomic_t *p) 700 + static inline bool atomic_dec_unless_positive(atomic_t *v) 700 701 { 701 - int v, v1; 702 - for (v = 0; v <= 0; v = v1) { 703 - v1 = atomic_cmpxchg(p, v, v - 1); 704 - if (likely(v1 == v)) 705 - return true; 706 - } 707 - return false; 702 + int c = atomic_read(v); 703 + 704 + do { 705 + if (unlikely(c > 0)) 706 + return false; 707 + } while (!atomic_try_cmpxchg(v, &c, c - 1)); 708 + 709 + return true; 708 710 } 709 711 #endif 710 712 ··· 720 718 #ifndef atomic_dec_if_positive 721 719 static inline int atomic_dec_if_positive(atomic_t *v) 722 720 { 723 - int c, old, dec; 724 - c = atomic_read(v); 725 - for (;;) { 721 + int dec, c = atomic_read(v); 722 + 723 + do { 726 724 dec = c - 1; 727 725 if (unlikely(dec < 0)) 728 726 break; 729 - old = atomic_cmpxchg((v), c, dec); 730 - if (likely(old == c)) 731 - break; 732 - c = old; 733 - } 727 + } while (!atomic_try_cmpxchg(v, &c, dec)); 728 + 734 729 return dec; 735 730 } 736 731 #endif ··· 1286 1287 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v) 1287 1288 { 1288 1289 return atomic64_fetch_and_release(~i, v); 1290 + } 1291 + #endif 1292 + 1293 + #ifndef atomic64_inc_unless_negative 1294 + static inline bool atomic64_inc_unless_negative(atomic64_t *v) 1295 + { 1296 + long long c = atomic64_read(v); 1297 + 1298 + do { 1299 + if (unlikely(c < 0)) 1300 + return false; 1301 + } while (!atomic64_try_cmpxchg(v, &c, c + 1)); 1302 + 1303 + return true; 1304 + } 1305 + #endif 1306 + 1307 + #ifndef atomic64_dec_unless_positive 1308 + static inline bool atomic64_dec_unless_positive(atomic64_t *v) 1309 + { 1310 + long long c = atomic64_read(v); 1311 + 1312 + do { 1313 + if (unlikely(c > 0)) 1314 + return false; 1315 + } while (!atomic64_try_cmpxchg(v, &c, c - 1)); 1316 + 1317 + return true; 1318 + } 1319 + #endif 1320 + 1321 + /* 1322 + * atomic64_dec_if_positive - decrement by 1 if old value positive 1323 + * @v: pointer of type atomic64_t 1324 + * 1325 + * The function returns the old value of *v minus 1, even if 1326 + * the atomic64 variable, v, was not decremented. 1327 + */ 1328 + #ifndef atomic64_dec_if_positive 1329 + static inline long long atomic64_dec_if_positive(atomic64_t *v) 1330 + { 1331 + long long dec, c = atomic64_read(v); 1332 + 1333 + do { 1334 + dec = c - 1; 1335 + if (unlikely(dec < 0)) 1336 + break; 1337 + } while (!atomic64_try_cmpxchg(v, &c, dec)); 1338 + 1339 + return dec; 1289 1340 } 1290 1341 #endif 1291 1342