Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

atomic: Provide atomic_{or,xor,and}

Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>

authored by

Peter Zijlstra and committed by
Thomas Gleixner
e6942b7d 2957c035

+19 -68
-1
arch/alpha/include/asm/atomic.h
··· 110 110 ATOMIC_OPS(add) 111 111 ATOMIC_OPS(sub) 112 112 113 - #define CONFIG_ARCH_HAS_ATOMIC_OR 114 113 #define atomic_andnot atomic_andnot 115 114 #define atomic64_andnot atomic64_andnot 116 115
-1
arch/arc/include/asm/atomic.h
··· 144 144 ATOMIC_OPS(add, +=, add) 145 145 ATOMIC_OPS(sub, -=, sub) 146 146 147 - #define CONFIG_ARCH_HAS_ATOMIC_OR 148 147 #define atomic_andnot atomic_andnot 149 148 150 149 ATOMIC_OP(and, &=, and)
-1
arch/arm/include/asm/atomic.h
··· 194 194 ATOMIC_OPS(add, +=, add) 195 195 ATOMIC_OPS(sub, -=, sub) 196 196 197 - #define CONFIG_ARCH_HAS_ATOMIC_OR 198 197 #define atomic_andnot atomic_andnot 199 198 200 199 ATOMIC_OP(and, &=, and)
-1
arch/arm64/include/asm/atomic.h
··· 85 85 ATOMIC_OPS(add, add) 86 86 ATOMIC_OPS(sub, sub) 87 87 88 - #define CONFIG_ARCH_HAS_ATOMIC_OR 89 88 #define atomic_andnot atomic_andnot 90 89 91 90 ATOMIC_OP(and, and)
-2
arch/avr32/include/asm/atomic.h
··· 51 51 (void)__atomic_##op##_return(i, v); \ 52 52 } 53 53 54 - #define CONFIG_ARCH_HAS_ATOMIC_OR 55 - 56 54 ATOMIC_OP(and, and) 57 55 ATOMIC_OP(or, or) 58 56 ATOMIC_OP(xor, eor)
-2
arch/blackfin/include/asm/atomic.h
··· 28 28 #define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i) 29 29 #define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i)) 30 30 31 - #define CONFIG_ARCH_HAS_ATOMIC_OR 32 - 33 31 #define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i) 34 32 #define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i) 35 33 #define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
-2
arch/frv/include/asm/atomic.h
··· 192 192 (void)__atomic64_fetch_##op(i, &v->counter); \ 193 193 } 194 194 195 - #define CONFIG_ARCH_HAS_ATOMIC_OR 196 - 197 195 ATOMIC_OP(or) 198 196 ATOMIC_OP(and) 199 197 ATOMIC_OP(xor)
-2
arch/h8300/include/asm/atomic.h
··· 41 41 ATOMIC_OP_RETURN(add, +=) 42 42 ATOMIC_OP_RETURN(sub, -=) 43 43 44 - #define CONFIG_ARCH_HAS_ATOMIC_OR 45 - 46 44 ATOMIC_OP(and, &=) 47 45 ATOMIC_OP(or, |=) 48 46 ATOMIC_OP(xor, ^=)
-2
arch/hexagon/include/asm/atomic.h
··· 132 132 ATOMIC_OPS(add) 133 133 ATOMIC_OPS(sub) 134 134 135 - #define CONFIG_ARCH_HAS_ATOMIC_OR 136 - 137 135 ATOMIC_OP(and) 138 136 ATOMIC_OP(or) 139 137 ATOMIC_OP(xor)
-2
arch/ia64/include/asm/atomic.h
··· 69 69 : ia64_atomic_sub(__ia64_asr_i, v); \ 70 70 }) 71 71 72 - #define CONFIG_ARCH_HAS_ATOMIC_OR 73 - 74 72 ATOMIC_OP(and, &) 75 73 ATOMIC_OP(or, |) 76 74 ATOMIC_OP(xor, ^)
-2
arch/m32r/include/asm/atomic.h
··· 94 94 ATOMIC_OPS(add) 95 95 ATOMIC_OPS(sub) 96 96 97 - #define CONFIG_ARCH_HAS_ATOMIC_OR 98 - 99 97 ATOMIC_OP(and) 100 98 ATOMIC_OP(or) 101 99 ATOMIC_OP(xor)
-2
arch/m68k/include/asm/atomic.h
··· 77 77 ATOMIC_OPS(add, +=, add) 78 78 ATOMIC_OPS(sub, -=, sub) 79 79 80 - #define CONFIG_ARCH_HAS_ATOMIC_OR 81 - 82 80 ATOMIC_OP(and, &=, and) 83 81 ATOMIC_OP(or, |=, or) 84 82 ATOMIC_OP(xor, ^=, eor)
-2
arch/metag/include/asm/atomic_lnkget.h
··· 74 74 ATOMIC_OPS(add) 75 75 ATOMIC_OPS(sub) 76 76 77 - #define CONFIG_ARCH_HAS_ATOMIC_OR 78 - 79 77 ATOMIC_OP(and) 80 78 ATOMIC_OP(or) 81 79 ATOMIC_OP(xor)
-2
arch/mips/include/asm/atomic.h
··· 137 137 ATOMIC_OPS(add, +=, addu) 138 138 ATOMIC_OPS(sub, -=, subu) 139 139 140 - #define CONFIG_ARCH_HAS_ATOMIC_OR 141 - 142 140 ATOMIC_OP(and, &=, and) 143 141 ATOMIC_OP(or, |=, or) 144 142 ATOMIC_OP(xor, ^=, xor)
-2
arch/mn10300/include/asm/atomic.h
··· 89 89 ATOMIC_OPS(add) 90 90 ATOMIC_OPS(sub) 91 91 92 - #define CONFIG_ARCH_HAS_ATOMIC_OR 93 - 94 92 ATOMIC_OP(and) 95 93 ATOMIC_OP(or) 96 94 ATOMIC_OP(xor)
-2
arch/parisc/include/asm/atomic.h
··· 126 126 ATOMIC_OPS(add, +=) 127 127 ATOMIC_OPS(sub, -=) 128 128 129 - #define CONFIG_ARCH_HAS_ATOMIC_OR 130 - 131 129 ATOMIC_OP(and, &=) 132 130 ATOMIC_OP(or, |=) 133 131 ATOMIC_OP(xor, ^=)
-2
arch/powerpc/include/asm/atomic.h
··· 67 67 ATOMIC_OPS(add, add) 68 68 ATOMIC_OPS(sub, subf) 69 69 70 - #define CONFIG_ARCH_HAS_ATOMIC_OR 71 - 72 70 ATOMIC_OP(and, and) 73 71 ATOMIC_OP(or, or) 74 72 ATOMIC_OP(xor, xor)
-2
arch/s390/include/asm/atomic.h
··· 282 282 __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \ 283 283 } 284 284 285 - #define CONFIG_ARCH_HAS_ATOMIC_OR 286 - 287 285 ATOMIC64_OP(and, AND) 288 286 ATOMIC64_OP(or, OR) 289 287 ATOMIC64_OP(xor, XOR)
-2
arch/sh/include/asm/atomic-grb.h
··· 48 48 ATOMIC_OPS(add) 49 49 ATOMIC_OPS(sub) 50 50 51 - #define CONFIG_ARCH_HAS_ATOMIC_OR 52 - 53 51 ATOMIC_OP(and) 54 52 ATOMIC_OP(or) 55 53 ATOMIC_OP(xor)
-2
arch/sparc/include/asm/atomic_32.h
··· 17 17 #include <asm/barrier.h> 18 18 #include <asm-generic/atomic64.h> 19 19 20 - #define CONFIG_ARCH_HAS_ATOMIC_OR 21 - 22 20 #define ATOMIC_INIT(i) { (i) } 23 21 24 22 int atomic_add_return(int, atomic_t *);
-2
arch/sparc/include/asm/atomic_64.h
··· 33 33 ATOMIC_OPS(add) 34 34 ATOMIC_OPS(sub) 35 35 36 - #define CONFIG_ARCH_HAS_ATOMIC_OR 37 - 38 36 ATOMIC_OP(and) 39 37 ATOMIC_OP(or) 40 38 ATOMIC_OP(xor)
-2
arch/tile/include/asm/atomic_32.h
··· 41 41 _atomic_##op((unsigned long *)&v->counter, i); \ 42 42 } 43 43 44 - #define CONFIG_ARCH_HAS_ATOMIC_OR 45 - 46 44 ATOMIC_OP(and) 47 45 ATOMIC_OP(or) 48 46 ATOMIC_OP(xor)
-2
arch/tile/include/asm/atomic_64.h
··· 58 58 return oldval; 59 59 } 60 60 61 - #define CONFIG_ARCH_HAS_ATOMIC_OR 62 - 63 61 static inline void atomic_and(int i, atomic_t *v) 64 62 { 65 63 __insn_fetchand4((void *)&v->counter, i);
-2
arch/x86/include/asm/atomic.h
··· 191 191 : "memory"); \ 192 192 } 193 193 194 - #define CONFIG_ARCH_HAS_ATOMIC_OR 195 - 196 194 ATOMIC_OP(and) 197 195 ATOMIC_OP(or) 198 196 ATOMIC_OP(xor)
-2
arch/xtensa/include/asm/atomic.h
··· 145 145 ATOMIC_OPS(add) 146 146 ATOMIC_OPS(sub) 147 147 148 - #define CONFIG_ARCH_HAS_ATOMIC_OR 149 - 150 148 ATOMIC_OP(and) 151 149 ATOMIC_OP(or) 152 150 ATOMIC_OP(xor)
+12 -9
include/asm-generic/atomic.h
··· 102 102 ATOMIC_OP(and, &) 103 103 #endif 104 104 105 - #ifndef atomic_clear_mask 106 - #define atomic_clear_mask(i, v) atomic_and(~(i), (v)) 107 - #endif 108 - 109 105 #ifndef atomic_or 110 - #ifndef CONFIG_ARCH_HAS_ATOMIC_OR 111 - #define CONFIG_ARCH_HAS_ATOMIC_OR 112 - #endif 113 106 ATOMIC_OP(or, |) 114 107 #endif 115 108 116 - #ifndef atomic_set_mask 117 - #define atomic_set_mask(i, v) atomic_or((i), (v)) 109 + #ifndef atomic_xor 110 + ATOMIC_OP(xor, ^) 118 111 #endif 119 112 120 113 #undef ATOMIC_OP_RETURN 121 114 #undef ATOMIC_OP 115 + 116 + static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v) 117 + { 118 + atomic_and(~mask, v); 119 + } 120 + 121 + static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v) 122 + { 123 + atomic_or(mask, v); 124 + } 122 125 123 126 /* 124 127 * Atomic operations that C can't guarantee us. Useful for
+4
include/asm-generic/atomic64.h
··· 32 32 ATOMIC64_OPS(add) 33 33 ATOMIC64_OPS(sub) 34 34 35 + ATOMIC64_OP(and) 36 + ATOMIC64_OP(or) 37 + ATOMIC64_OP(xor) 38 + 35 39 #undef ATOMIC64_OPS 36 40 #undef ATOMIC64_OP_RETURN 37 41 #undef ATOMIC64_OP
-13
include/linux/atomic.h
··· 111 111 } 112 112 #endif 113 113 114 - #ifndef CONFIG_ARCH_HAS_ATOMIC_OR 115 - static inline void atomic_or(int i, atomic_t *v) 116 - { 117 - int old; 118 - int new; 119 - 120 - do { 121 - old = atomic_read(v); 122 - new = old | i; 123 - } while (atomic_cmpxchg(v, old, new) != old); 124 - } 125 - #endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */ 126 - 127 114 #include <asm-generic/atomic-long.h> 128 115 #ifdef CONFIG_GENERIC_ATOMIC64 129 116 #include <asm-generic/atomic64.h>
+3
lib/atomic64.c
··· 102 102 103 103 ATOMIC64_OPS(add, +=) 104 104 ATOMIC64_OPS(sub, -=) 105 + ATOMIC64_OP(and, &=) 106 + ATOMIC64_OP(or, |=) 107 + ATOMIC64_OP(xor, ^=) 105 108 106 109 #undef ATOMIC64_OPS 107 110 #undef ATOMIC64_OP_RETURN