asm-generic: make atomic_add_unless a function

atomic_add_unless is a macro so, bad things happen if the caller defines
a local variable named c, just like like the local variable c defined by
the macro. Thus, convert atomic_add_unless to a function. (bug triggered
by net/ipv4/netfilter/ipt_CLUSTERIP.c: clusterip_config_find_get calls
atomic_inc_not_zero)

Signed-off-by: Mathieu Lacage <mathieu.lacage@inria.fr>
Signed-off-by: Arnd Bergmann <arnd@arndb.de>

authored by Mathieu Lacage and committed by Arnd Bergmann 8b9d4069 6b0cd00b

+17 -17
+17 -17
include/asm-generic/atomic.h
··· 119 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 120 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 121 122 - #define atomic_add_unless(v, a, u) \ 123 - ({ \ 124 - int c, old; \ 125 - c = atomic_read(v); \ 126 - while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 127 - c = old; \ 128 - c != (u); \ 129 - }) 130 131 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 132 ··· 148 *addr &= mask; 149 raw_local_irq_restore(flags); 150 } 151 - 152 - #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 153 - #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 154 - 155 - #define cmpxchg_local(ptr, o, n) \ 156 - ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 157 - (unsigned long)(n), sizeof(*(ptr)))) 158 - 159 - #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 160 161 /* Assume that atomic operations are already serializing */ 162 #define smp_mb__before_atomic_dec() barrier()
··· 119 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 120 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 121 122 + #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 123 + #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 124 + 125 + #define cmpxchg_local(ptr, o, n) \ 126 + ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 127 + (unsigned long)(n), sizeof(*(ptr)))) 128 + 129 + #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 130 + 131 + static inline int atomic_add_unless(atomic_t *v, int a, int u) 132 + { 133 + int c, old; 134 + c = atomic_read(v); 135 + while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c) 136 + c = old; 137 + return c != u; 138 + } 139 140 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 141 ··· 139 *addr &= mask; 140 raw_local_irq_restore(flags); 141 } 142 143 /* Assume that atomic operations are already serializing */ 144 #define smp_mb__before_atomic_dec() barrier()