Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

atomic, arch: Audit atomic_{read,set}()

This patch makes sure that atomic_{read,set}() are at least
{READ,WRITE}_ONCE().

We already had the 'requirement' that atomic_read() should use
ACCESS_ONCE(), and most archs had this, but a few were lacking.
All are now converted to use READ_ONCE().

And, by a symmetry and general paranoia argument, upgrade atomic_set()
to use WRITE_ONCE().

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Dmitry Vyukov <dvyukov@google.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: james.hogan@imgtec.com
Cc: linux-kernel@vger.kernel.org
Cc: oleg@redhat.com
Cc: will.deacon@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>

authored by

Peter Zijlstra and committed by
Ingo Molnar
62e8a325 90fe6514

+53 -53
+4 -4
arch/alpha/include/asm/atomic.h
··· 17 17 #define ATOMIC_INIT(i) { (i) } 18 18 #define ATOMIC64_INIT(i) { (i) } 19 19 20 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 21 - #define atomic64_read(v) ACCESS_ONCE((v)->counter) 20 + #define atomic_read(v) READ_ONCE((v)->counter) 21 + #define atomic64_read(v) READ_ONCE((v)->counter) 22 22 23 - #define atomic_set(v,i) ((v)->counter = (i)) 24 - #define atomic64_set(v,i) ((v)->counter = (i)) 23 + #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) 24 + #define atomic64_set(v,i) WRITE_ONCE((v)->counter, (i)) 25 25 26 26 /* 27 27 * To get proper branch prediction for the main line, we must branch
+4 -4
arch/arc/include/asm/atomic.h
··· 17 17 #include <asm/barrier.h> 18 18 #include <asm/smp.h> 19 19 20 - #define atomic_read(v) ((v)->counter) 20 + #define atomic_read(v) READ_ONCE((v)->counter) 21 21 22 22 #ifdef CONFIG_ARC_HAS_LLSC 23 23 24 - #define atomic_set(v, i) (((v)->counter) = (i)) 24 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 25 25 26 26 #ifdef CONFIG_ARC_STAR_9000923308 27 27 ··· 107 107 #ifndef CONFIG_SMP 108 108 109 109 /* violating atomic_xxx API locking protocol in UP for optimization sake */ 110 - #define atomic_set(v, i) (((v)->counter) = (i)) 110 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 111 111 112 112 #else 113 113 ··· 125 125 unsigned long flags; 126 126 127 127 atomic_ops_lock(flags); 128 - v->counter = i; 128 + WRITE_ONCE(v->counter, i); 129 129 atomic_ops_unlock(flags); 130 130 } 131 131
+2 -2
arch/arm/include/asm/atomic.h
··· 27 27 * strex/ldrex monitor on some implementations. The reason we can use it for 28 28 * atomic_set() is the clrex or dummy strex done on every exception return. 29 29 */ 30 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 31 - #define atomic_set(v,i) (((v)->counter) = (i)) 30 + #define atomic_read(v) READ_ONCE((v)->counter) 31 + #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 32 32 33 33 #if __LINUX_ARM_ARCH__ >= 6 34 34
+1 -1
arch/arm64/include/asm/atomic.h
··· 54 54 #define ATOMIC_INIT(i) { (i) } 55 55 56 56 #define atomic_read(v) READ_ONCE((v)->counter) 57 - #define atomic_set(v, i) (((v)->counter) = (i)) 57 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 58 58 #define atomic_xchg(v, new) xchg(&((v)->counter), (new)) 59 59 #define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new)) 60 60
+2 -2
arch/avr32/include/asm/atomic.h
··· 19 19 20 20 #define ATOMIC_INIT(i) { (i) } 21 21 22 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 23 - #define atomic_set(v, i) (((v)->counter) = i) 22 + #define atomic_read(v) READ_ONCE((v)->counter) 23 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 24 24 25 25 #define ATOMIC_OP_RETURN(op, asm_op, asm_con) \ 26 26 static inline int __atomic_##op##_return(int i, atomic_t *v) \
+2 -2
arch/frv/include/asm/atomic.h
··· 32 32 */ 33 33 34 34 #define ATOMIC_INIT(i) { (i) } 35 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 36 - #define atomic_set(v, i) (((v)->counter) = (i)) 35 + #define atomic_read(v) READ_ONCE((v)->counter) 36 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 37 37 38 38 static inline int atomic_inc_return(atomic_t *v) 39 39 {
+2 -2
arch/h8300/include/asm/atomic.h
··· 11 11 12 12 #define ATOMIC_INIT(i) { (i) } 13 13 14 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 15 - #define atomic_set(v, i) (((v)->counter) = i) 14 + #define atomic_read(v) READ_ONCE((v)->counter) 15 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 16 16 17 17 #include <linux/kernel.h> 18 18
+1 -1
arch/hexagon/include/asm/atomic.h
··· 48 48 * 49 49 * Assumes all word reads on our architecture are atomic. 50 50 */ 51 - #define atomic_read(v) ((v)->counter) 51 + #define atomic_read(v) READ_ONCE((v)->counter) 52 52 53 53 /** 54 54 * atomic_xchg - atomic
+4 -4
arch/ia64/include/asm/atomic.h
··· 21 21 #define ATOMIC_INIT(i) { (i) } 22 22 #define ATOMIC64_INIT(i) { (i) } 23 23 24 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 25 - #define atomic64_read(v) ACCESS_ONCE((v)->counter) 24 + #define atomic_read(v) READ_ONCE((v)->counter) 25 + #define atomic64_read(v) READ_ONCE((v)->counter) 26 26 27 - #define atomic_set(v,i) (((v)->counter) = (i)) 28 - #define atomic64_set(v,i) (((v)->counter) = (i)) 27 + #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 28 + #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i)) 29 29 30 30 #define ATOMIC_OP(op, c_op) \ 31 31 static __inline__ int \
+2 -2
arch/m32r/include/asm/atomic.h
··· 28 28 * 29 29 * Atomically reads the value of @v. 30 30 */ 31 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 31 + #define atomic_read(v) READ_ONCE((v)->counter) 32 32 33 33 /** 34 34 * atomic_set - set atomic variable ··· 37 37 * 38 38 * Atomically sets the value of @v to @i. 39 39 */ 40 - #define atomic_set(v,i) (((v)->counter) = (i)) 40 + #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 41 41 42 42 #ifdef CONFIG_CHIP_M32700_TS1 43 43 #define __ATOMIC_CLOBBER , "r4"
+2 -2
arch/m68k/include/asm/atomic.h
··· 17 17 18 18 #define ATOMIC_INIT(i) { (i) } 19 19 20 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 21 - #define atomic_set(v, i) (((v)->counter) = i) 20 + #define atomic_read(v) READ_ONCE((v)->counter) 21 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 22 22 23 23 /* 24 24 * The ColdFire parts cannot do some immediate to memory operations,
+1 -1
arch/metag/include/asm/atomic_lnkget.h
··· 3 3 4 4 #define ATOMIC_INIT(i) { (i) } 5 5 6 - #define atomic_set(v, i) ((v)->counter = (i)) 6 + #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) 7 7 8 8 #include <linux/compiler.h> 9 9
+1 -1
arch/metag/include/asm/atomic_lock1.h
··· 10 10 11 11 static inline int atomic_read(const atomic_t *v) 12 12 { 13 - return (v)->counter; 13 + return READ_ONCE((v)->counter); 14 14 } 15 15 16 16 /*
+4 -4
arch/mips/include/asm/atomic.h
··· 30 30 * 31 31 * Atomically reads the value of @v. 32 32 */ 33 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 33 + #define atomic_read(v) READ_ONCE((v)->counter) 34 34 35 35 /* 36 36 * atomic_set - set atomic variable ··· 39 39 * 40 40 * Atomically sets the value of @v to @i. 41 41 */ 42 - #define atomic_set(v, i) ((v)->counter = (i)) 42 + #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) 43 43 44 44 #define ATOMIC_OP(op, c_op, asm_op) \ 45 45 static __inline__ void atomic_##op(int i, atomic_t * v) \ ··· 315 315 * @v: pointer of type atomic64_t 316 316 * 317 317 */ 318 - #define atomic64_read(v) ACCESS_ONCE((v)->counter) 318 + #define atomic64_read(v) READ_ONCE((v)->counter) 319 319 320 320 /* 321 321 * atomic64_set - set atomic variable 322 322 * @v: pointer of type atomic64_t 323 323 * @i: required value 324 324 */ 325 - #define atomic64_set(v, i) ((v)->counter = (i)) 325 + #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i)) 326 326 327 327 #define ATOMIC64_OP(op, c_op, asm_op) \ 328 328 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
+2 -2
arch/mn10300/include/asm/atomic.h
··· 34 34 * 35 35 * Atomically reads the value of @v. Note that the guaranteed 36 36 */ 37 - #define atomic_read(v) (ACCESS_ONCE((v)->counter)) 37 + #define atomic_read(v) READ_ONCE((v)->counter) 38 38 39 39 /** 40 40 * atomic_set - set atomic variable ··· 43 43 * 44 44 * Atomically sets the value of @v to @i. Note that the guaranteed 45 45 */ 46 - #define atomic_set(v, i) (((v)->counter) = (i)) 46 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 47 47 48 48 #define ATOMIC_OP(op) \ 49 49 static inline void atomic_##op(int i, atomic_t *v) \
+1 -1
arch/parisc/include/asm/atomic.h
··· 67 67 68 68 static __inline__ int atomic_read(const atomic_t *v) 69 69 { 70 - return ACCESS_ONCE((v)->counter); 70 + return READ_ONCE((v)->counter); 71 71 } 72 72 73 73 /* exported interface */
+2 -2
arch/sh/include/asm/atomic.h
··· 14 14 15 15 #define ATOMIC_INIT(i) { (i) } 16 16 17 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 18 - #define atomic_set(v,i) ((v)->counter = (i)) 17 + #define atomic_read(v) READ_ONCE((v)->counter) 18 + #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) 19 19 20 20 #if defined(CONFIG_GUSA_RB) 21 21 #include <asm/atomic-grb.h>
+4 -4
arch/sparc/include/asm/atomic_64.h
··· 14 14 #define ATOMIC_INIT(i) { (i) } 15 15 #define ATOMIC64_INIT(i) { (i) } 16 16 17 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 18 - #define atomic64_read(v) ACCESS_ONCE((v)->counter) 17 + #define atomic_read(v) READ_ONCE((v)->counter) 18 + #define atomic64_read(v) READ_ONCE((v)->counter) 19 19 20 - #define atomic_set(v, i) (((v)->counter) = i) 21 - #define atomic64_set(v, i) (((v)->counter) = i) 20 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 21 + #define atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i)) 22 22 23 23 #define ATOMIC_OP(op) \ 24 24 void atomic_##op(int, atomic_t *); \
+1 -1
arch/tile/include/asm/atomic.h
··· 34 34 */ 35 35 static inline int atomic_read(const atomic_t *v) 36 36 { 37 - return ACCESS_ONCE(v->counter); 37 + return READ_ONCE(v->counter); 38 38 } 39 39 40 40 /**
+3 -3
arch/tile/include/asm/atomic_64.h
··· 24 24 25 25 /* First, the 32-bit atomic ops that are "real" on our 64-bit platform. */ 26 26 27 - #define atomic_set(v, i) ((v)->counter = (i)) 27 + #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) 28 28 29 29 /* 30 30 * The smp_mb() operations throughout are to support the fact that ··· 82 82 83 83 #define ATOMIC64_INIT(i) { (i) } 84 84 85 - #define atomic64_read(v) ((v)->counter) 86 - #define atomic64_set(v, i) ((v)->counter = (i)) 85 + #define atomic64_read(v) READ_ONCE((v)->counter) 86 + #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i)) 87 87 88 88 static inline void atomic64_add(long i, atomic64_t *v) 89 89 {
+2 -2
arch/x86/include/asm/atomic.h
··· 24 24 */ 25 25 static __always_inline int atomic_read(const atomic_t *v) 26 26 { 27 - return ACCESS_ONCE((v)->counter); 27 + return READ_ONCE((v)->counter); 28 28 } 29 29 30 30 /** ··· 36 36 */ 37 37 static __always_inline void atomic_set(atomic_t *v, int i) 38 38 { 39 - v->counter = i; 39 + WRITE_ONCE(v->counter, i); 40 40 } 41 41 42 42 /**
+2 -2
arch/x86/include/asm/atomic64_64.h
··· 18 18 */ 19 19 static inline long atomic64_read(const atomic64_t *v) 20 20 { 21 - return ACCESS_ONCE((v)->counter); 21 + return READ_ONCE((v)->counter); 22 22 } 23 23 24 24 /** ··· 30 30 */ 31 31 static inline void atomic64_set(atomic64_t *v, long i) 32 32 { 33 - v->counter = i; 33 + WRITE_ONCE(v->counter, i); 34 34 } 35 35 36 36 /**
+2 -2
arch/xtensa/include/asm/atomic.h
··· 47 47 * 48 48 * Atomically reads the value of @v. 49 49 */ 50 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 50 + #define atomic_read(v) READ_ONCE((v)->counter) 51 51 52 52 /** 53 53 * atomic_set - set atomic variable ··· 56 56 * 57 57 * Atomically sets the value of @v to @i. 58 58 */ 59 - #define atomic_set(v,i) ((v)->counter = (i)) 59 + #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) 60 60 61 61 #if XCHAL_HAVE_S32C1I 62 62 #define ATOMIC_OP(op) \
+2 -2
include/asm-generic/atomic.h
··· 127 127 * Atomically reads the value of @v. 128 128 */ 129 129 #ifndef atomic_read 130 - #define atomic_read(v) ACCESS_ONCE((v)->counter) 130 + #define atomic_read(v) READ_ONCE((v)->counter) 131 131 #endif 132 132 133 133 /** ··· 137 137 * 138 138 * Atomically sets the value of @v to @i. 139 139 */ 140 - #define atomic_set(v, i) (((v)->counter) = (i)) 140 + #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 141 141 142 142 #include <linux/irqflags.h> 143 143