alpha: no need to include asm/xchg.h twice

We used to generate different helpers for local and full
{cmp,}xchg(); these days the barriers are in arch_{cmp,}xchg()
instead and generated helpers are identical for local and
full cases. No need for those parametrized includes of
asm/xchg.h - we might as well insert its contents directly
in asm/cmpxchg.h and do it only once.

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>

authored by Al Viro and committed by Arnd Bergmann 7193b5c4 47ac09b9

+223 -262
+223 -16
arch/alpha/include/asm/cmpxchg.h
··· 3 3 #define _ALPHA_CMPXCHG_H 4 4 5 5 /* 6 - * Atomic exchange routines. 6 + * Atomic exchange. 7 + * Since it can be used to implement critical sections 8 + * it must clobber "memory" (also for interrupts in UP). 7 9 */ 8 10 9 - #define ____xchg(type, args...) __arch_xchg ## type ## _local(args) 10 - #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args) 11 - #include <asm/xchg.h> 11 + static inline unsigned long 12 + ____xchg_u8(volatile char *m, unsigned long val) 13 + { 14 + unsigned long ret, tmp, addr64; 15 + 16 + __asm__ __volatile__( 17 + " andnot %4,7,%3\n" 18 + " insbl %1,%4,%1\n" 19 + "1: ldq_l %2,0(%3)\n" 20 + " extbl %2,%4,%0\n" 21 + " mskbl %2,%4,%2\n" 22 + " or %1,%2,%2\n" 23 + " stq_c %2,0(%3)\n" 24 + " beq %2,2f\n" 25 + ".subsection 2\n" 26 + "2: br 1b\n" 27 + ".previous" 28 + : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) 29 + : "r" ((long)m), "1" (val) : "memory"); 30 + 31 + return ret; 32 + } 33 + 34 + static inline unsigned long 35 + ____xchg_u16(volatile short *m, unsigned long val) 36 + { 37 + unsigned long ret, tmp, addr64; 38 + 39 + __asm__ __volatile__( 40 + " andnot %4,7,%3\n" 41 + " inswl %1,%4,%1\n" 42 + "1: ldq_l %2,0(%3)\n" 43 + " extwl %2,%4,%0\n" 44 + " mskwl %2,%4,%2\n" 45 + " or %1,%2,%2\n" 46 + " stq_c %2,0(%3)\n" 47 + " beq %2,2f\n" 48 + ".subsection 2\n" 49 + "2: br 1b\n" 50 + ".previous" 51 + : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) 52 + : "r" ((long)m), "1" (val) : "memory"); 53 + 54 + return ret; 55 + } 56 + 57 + static inline unsigned long 58 + ____xchg_u32(volatile int *m, unsigned long val) 59 + { 60 + unsigned long dummy; 61 + 62 + __asm__ __volatile__( 63 + "1: ldl_l %0,%4\n" 64 + " bis $31,%3,%1\n" 65 + " stl_c %1,%2\n" 66 + " beq %1,2f\n" 67 + ".subsection 2\n" 68 + "2: br 1b\n" 69 + ".previous" 70 + : "=&r" (val), "=&r" (dummy), "=m" (*m) 71 + : "rI" (val), "m" (*m) : "memory"); 72 + 73 + return val; 74 + } 75 + 76 + static inline unsigned long 77 + ____xchg_u64(volatile long *m, unsigned long val) 78 + { 79 + unsigned long dummy; 80 + 81 + __asm__ __volatile__( 82 + "1: ldq_l %0,%4\n" 83 + " bis $31,%3,%1\n" 84 + " stq_c %1,%2\n" 85 + " beq %1,2f\n" 86 + ".subsection 2\n" 87 + "2: br 1b\n" 88 + ".previous" 89 + : "=&r" (val), "=&r" (dummy), "=m" (*m) 90 + : "rI" (val), "m" (*m) : "memory"); 91 + 92 + return val; 93 + } 94 + 95 + /* This function doesn't exist, so you'll get a linker error 96 + if something tries to do an invalid xchg(). */ 97 + extern void __xchg_called_with_bad_pointer(void); 98 + 99 + static __always_inline unsigned long 100 + ____xchg(volatile void *ptr, unsigned long x, int size) 101 + { 102 + return 103 + size == 1 ? ____xchg_u8(ptr, x) : 104 + size == 2 ? ____xchg_u16(ptr, x) : 105 + size == 4 ? ____xchg_u32(ptr, x) : 106 + size == 8 ? ____xchg_u64(ptr, x) : 107 + (__xchg_called_with_bad_pointer(), x); 108 + } 109 + 110 + /* 111 + * Atomic compare and exchange. Compare OLD with MEM, if identical, 112 + * store NEW in MEM. Return the initial value in MEM. Success is 113 + * indicated by comparing RETURN with OLD. 114 + */ 115 + 116 + static inline unsigned long 117 + ____cmpxchg_u8(volatile char *m, unsigned char old, unsigned char new) 118 + { 119 + unsigned long prev, tmp, cmp, addr64; 120 + 121 + __asm__ __volatile__( 122 + " andnot %5,7,%4\n" 123 + " insbl %1,%5,%1\n" 124 + "1: ldq_l %2,0(%4)\n" 125 + " extbl %2,%5,%0\n" 126 + " cmpeq %0,%6,%3\n" 127 + " beq %3,2f\n" 128 + " mskbl %2,%5,%2\n" 129 + " or %1,%2,%2\n" 130 + " stq_c %2,0(%4)\n" 131 + " beq %2,3f\n" 132 + "2:\n" 133 + ".subsection 2\n" 134 + "3: br 1b\n" 135 + ".previous" 136 + : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) 137 + : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); 138 + 139 + return prev; 140 + } 141 + 142 + static inline unsigned long 143 + ____cmpxchg_u16(volatile short *m, unsigned short old, unsigned short new) 144 + { 145 + unsigned long prev, tmp, cmp, addr64; 146 + 147 + __asm__ __volatile__( 148 + " andnot %5,7,%4\n" 149 + " inswl %1,%5,%1\n" 150 + "1: ldq_l %2,0(%4)\n" 151 + " extwl %2,%5,%0\n" 152 + " cmpeq %0,%6,%3\n" 153 + " beq %3,2f\n" 154 + " mskwl %2,%5,%2\n" 155 + " or %1,%2,%2\n" 156 + " stq_c %2,0(%4)\n" 157 + " beq %2,3f\n" 158 + "2:\n" 159 + ".subsection 2\n" 160 + "3: br 1b\n" 161 + ".previous" 162 + : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) 163 + : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); 164 + 165 + return prev; 166 + } 167 + 168 + static inline unsigned long 169 + ____cmpxchg_u32(volatile int *m, int old, int new) 170 + { 171 + unsigned long prev, cmp; 172 + 173 + __asm__ __volatile__( 174 + "1: ldl_l %0,%5\n" 175 + " cmpeq %0,%3,%1\n" 176 + " beq %1,2f\n" 177 + " mov %4,%1\n" 178 + " stl_c %1,%2\n" 179 + " beq %1,3f\n" 180 + "2:\n" 181 + ".subsection 2\n" 182 + "3: br 1b\n" 183 + ".previous" 184 + : "=&r"(prev), "=&r"(cmp), "=m"(*m) 185 + : "r"((long) old), "r"(new), "m"(*m) : "memory"); 186 + 187 + return prev; 188 + } 189 + 190 + static inline unsigned long 191 + ____cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new) 192 + { 193 + unsigned long prev, cmp; 194 + 195 + __asm__ __volatile__( 196 + "1: ldq_l %0,%5\n" 197 + " cmpeq %0,%3,%1\n" 198 + " beq %1,2f\n" 199 + " mov %4,%1\n" 200 + " stq_c %1,%2\n" 201 + " beq %1,3f\n" 202 + "2:\n" 203 + ".subsection 2\n" 204 + "3: br 1b\n" 205 + ".previous" 206 + : "=&r"(prev), "=&r"(cmp), "=m"(*m) 207 + : "r"((long) old), "r"(new), "m"(*m) : "memory"); 208 + 209 + return prev; 210 + } 211 + 212 + /* This function doesn't exist, so you'll get a linker error 213 + if something tries to do an invalid cmpxchg(). */ 214 + extern void __cmpxchg_called_with_bad_pointer(void); 215 + 216 + static __always_inline unsigned long 217 + ____cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, 218 + int size) 219 + { 220 + return 221 + size == 1 ? ____cmpxchg_u8(ptr, old, new) : 222 + size == 2 ? ____cmpxchg_u16(ptr, old, new) : 223 + size == 4 ? ____cmpxchg_u32(ptr, old, new) : 224 + size == 8 ? ____cmpxchg_u64(ptr, old, new) : 225 + (__cmpxchg_called_with_bad_pointer(), old); 226 + } 12 227 13 228 #define xchg_local(ptr, x) \ 14 229 ({ \ 15 230 __typeof__(*(ptr)) _x_ = (x); \ 16 - (__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\ 231 + (__typeof__(*(ptr))) ____xchg((ptr), (unsigned long)_x_, \ 17 232 sizeof(*(ptr))); \ 18 233 }) 19 234 ··· 236 21 ({ \ 237 22 __typeof__(*(ptr)) _o_ = (o); \ 238 23 __typeof__(*(ptr)) _n_ = (n); \ 239 - (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ 24 + (__typeof__(*(ptr))) ____cmpxchg((ptr), (unsigned long)_o_, \ 240 25 (unsigned long)_n_, \ 241 26 sizeof(*(ptr))); \ 242 27 }) ··· 246 31 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 247 32 cmpxchg_local((ptr), (o), (n)); \ 248 33 }) 249 - 250 - #undef ____xchg 251 - #undef ____cmpxchg 252 - #define ____xchg(type, args...) __arch_xchg ##type(args) 253 - #define ____cmpxchg(type, args...) __cmpxchg ##type(args) 254 - #include <asm/xchg.h> 255 34 256 35 /* 257 36 * The leading and the trailing memory barriers guarantee that these ··· 257 48 __typeof__(*(ptr)) _x_ = (x); \ 258 49 smp_mb(); \ 259 50 __ret = (__typeof__(*(ptr))) \ 260 - __arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ 51 + ____xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ 261 52 smp_mb(); \ 262 53 __ret; \ 263 54 }) ··· 268 59 __typeof__(*(ptr)) _o_ = (o); \ 269 60 __typeof__(*(ptr)) _n_ = (n); \ 270 61 smp_mb(); \ 271 - __ret = (__typeof__(*(ptr))) __cmpxchg((ptr), \ 62 + __ret = (__typeof__(*(ptr))) ____cmpxchg((ptr), \ 272 63 (unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\ 273 64 smp_mb(); \ 274 65 __ret; \ ··· 279 70 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 280 71 arch_cmpxchg((ptr), (o), (n)); \ 281 72 }) 282 - 283 - #undef ____cmpxchg 284 73 285 74 #endif /* _ALPHA_CMPXCHG_H */
-246
arch/alpha/include/asm/xchg.h
··· 1 - /* SPDX-License-Identifier: GPL-2.0 */ 2 - #ifndef _ALPHA_CMPXCHG_H 3 - #error Do not include xchg.h directly! 4 - #else 5 - /* 6 - * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code 7 - * except that local version do not have the expensive memory barrier. 8 - * So this file is included twice from asm/cmpxchg.h. 9 - */ 10 - 11 - /* 12 - * Atomic exchange. 13 - * Since it can be used to implement critical sections 14 - * it must clobber "memory" (also for interrupts in UP). 15 - */ 16 - 17 - static inline unsigned long 18 - ____xchg(_u8, volatile char *m, unsigned long val) 19 - { 20 - unsigned long ret, tmp, addr64; 21 - 22 - __asm__ __volatile__( 23 - " andnot %4,7,%3\n" 24 - " insbl %1,%4,%1\n" 25 - "1: ldq_l %2,0(%3)\n" 26 - " extbl %2,%4,%0\n" 27 - " mskbl %2,%4,%2\n" 28 - " or %1,%2,%2\n" 29 - " stq_c %2,0(%3)\n" 30 - " beq %2,2f\n" 31 - ".subsection 2\n" 32 - "2: br 1b\n" 33 - ".previous" 34 - : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) 35 - : "r" ((long)m), "1" (val) : "memory"); 36 - 37 - return ret; 38 - } 39 - 40 - static inline unsigned long 41 - ____xchg(_u16, volatile short *m, unsigned long val) 42 - { 43 - unsigned long ret, tmp, addr64; 44 - 45 - __asm__ __volatile__( 46 - " andnot %4,7,%3\n" 47 - " inswl %1,%4,%1\n" 48 - "1: ldq_l %2,0(%3)\n" 49 - " extwl %2,%4,%0\n" 50 - " mskwl %2,%4,%2\n" 51 - " or %1,%2,%2\n" 52 - " stq_c %2,0(%3)\n" 53 - " beq %2,2f\n" 54 - ".subsection 2\n" 55 - "2: br 1b\n" 56 - ".previous" 57 - : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) 58 - : "r" ((long)m), "1" (val) : "memory"); 59 - 60 - return ret; 61 - } 62 - 63 - static inline unsigned long 64 - ____xchg(_u32, volatile int *m, unsigned long val) 65 - { 66 - unsigned long dummy; 67 - 68 - __asm__ __volatile__( 69 - "1: ldl_l %0,%4\n" 70 - " bis $31,%3,%1\n" 71 - " stl_c %1,%2\n" 72 - " beq %1,2f\n" 73 - ".subsection 2\n" 74 - "2: br 1b\n" 75 - ".previous" 76 - : "=&r" (val), "=&r" (dummy), "=m" (*m) 77 - : "rI" (val), "m" (*m) : "memory"); 78 - 79 - return val; 80 - } 81 - 82 - static inline unsigned long 83 - ____xchg(_u64, volatile long *m, unsigned long val) 84 - { 85 - unsigned long dummy; 86 - 87 - __asm__ __volatile__( 88 - "1: ldq_l %0,%4\n" 89 - " bis $31,%3,%1\n" 90 - " stq_c %1,%2\n" 91 - " beq %1,2f\n" 92 - ".subsection 2\n" 93 - "2: br 1b\n" 94 - ".previous" 95 - : "=&r" (val), "=&r" (dummy), "=m" (*m) 96 - : "rI" (val), "m" (*m) : "memory"); 97 - 98 - return val; 99 - } 100 - 101 - /* This function doesn't exist, so you'll get a linker error 102 - if something tries to do an invalid xchg(). */ 103 - extern void __xchg_called_with_bad_pointer(void); 104 - 105 - static __always_inline unsigned long 106 - ____xchg(, volatile void *ptr, unsigned long x, int size) 107 - { 108 - switch (size) { 109 - case 1: 110 - return ____xchg(_u8, ptr, x); 111 - case 2: 112 - return ____xchg(_u16, ptr, x); 113 - case 4: 114 - return ____xchg(_u32, ptr, x); 115 - case 8: 116 - return ____xchg(_u64, ptr, x); 117 - } 118 - __xchg_called_with_bad_pointer(); 119 - return x; 120 - } 121 - 122 - /* 123 - * Atomic compare and exchange. Compare OLD with MEM, if identical, 124 - * store NEW in MEM. Return the initial value in MEM. Success is 125 - * indicated by comparing RETURN with OLD. 126 - */ 127 - 128 - static inline unsigned long 129 - ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new) 130 - { 131 - unsigned long prev, tmp, cmp, addr64; 132 - 133 - __asm__ __volatile__( 134 - " andnot %5,7,%4\n" 135 - " insbl %1,%5,%1\n" 136 - "1: ldq_l %2,0(%4)\n" 137 - " extbl %2,%5,%0\n" 138 - " cmpeq %0,%6,%3\n" 139 - " beq %3,2f\n" 140 - " mskbl %2,%5,%2\n" 141 - " or %1,%2,%2\n" 142 - " stq_c %2,0(%4)\n" 143 - " beq %2,3f\n" 144 - "2:\n" 145 - ".subsection 2\n" 146 - "3: br 1b\n" 147 - ".previous" 148 - : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) 149 - : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); 150 - 151 - return prev; 152 - } 153 - 154 - static inline unsigned long 155 - ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new) 156 - { 157 - unsigned long prev, tmp, cmp, addr64; 158 - 159 - __asm__ __volatile__( 160 - " andnot %5,7,%4\n" 161 - " inswl %1,%5,%1\n" 162 - "1: ldq_l %2,0(%4)\n" 163 - " extwl %2,%5,%0\n" 164 - " cmpeq %0,%6,%3\n" 165 - " beq %3,2f\n" 166 - " mskwl %2,%5,%2\n" 167 - " or %1,%2,%2\n" 168 - " stq_c %2,0(%4)\n" 169 - " beq %2,3f\n" 170 - "2:\n" 171 - ".subsection 2\n" 172 - "3: br 1b\n" 173 - ".previous" 174 - : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) 175 - : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); 176 - 177 - return prev; 178 - } 179 - 180 - static inline unsigned long 181 - ____cmpxchg(_u32, volatile int *m, int old, int new) 182 - { 183 - unsigned long prev, cmp; 184 - 185 - __asm__ __volatile__( 186 - "1: ldl_l %0,%5\n" 187 - " cmpeq %0,%3,%1\n" 188 - " beq %1,2f\n" 189 - " mov %4,%1\n" 190 - " stl_c %1,%2\n" 191 - " beq %1,3f\n" 192 - "2:\n" 193 - ".subsection 2\n" 194 - "3: br 1b\n" 195 - ".previous" 196 - : "=&r"(prev), "=&r"(cmp), "=m"(*m) 197 - : "r"((long) old), "r"(new), "m"(*m) : "memory"); 198 - 199 - return prev; 200 - } 201 - 202 - static inline unsigned long 203 - ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new) 204 - { 205 - unsigned long prev, cmp; 206 - 207 - __asm__ __volatile__( 208 - "1: ldq_l %0,%5\n" 209 - " cmpeq %0,%3,%1\n" 210 - " beq %1,2f\n" 211 - " mov %4,%1\n" 212 - " stq_c %1,%2\n" 213 - " beq %1,3f\n" 214 - "2:\n" 215 - ".subsection 2\n" 216 - "3: br 1b\n" 217 - ".previous" 218 - : "=&r"(prev), "=&r"(cmp), "=m"(*m) 219 - : "r"((long) old), "r"(new), "m"(*m) : "memory"); 220 - 221 - return prev; 222 - } 223 - 224 - /* This function doesn't exist, so you'll get a linker error 225 - if something tries to do an invalid cmpxchg(). */ 226 - extern void __cmpxchg_called_with_bad_pointer(void); 227 - 228 - static __always_inline unsigned long 229 - ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new, 230 - int size) 231 - { 232 - switch (size) { 233 - case 1: 234 - return ____cmpxchg(_u8, ptr, old, new); 235 - case 2: 236 - return ____cmpxchg(_u16, ptr, old, new); 237 - case 4: 238 - return ____cmpxchg(_u32, ptr, old, new); 239 - case 8: 240 - return ____cmpxchg(_u64, ptr, old, new); 241 - } 242 - __cmpxchg_called_with_bad_pointer(); 243 - return old; 244 - } 245 - 246 - #endif