Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

locking/arch: Rename all internal __xchg() names to __arch_xchg()

Decrease the probability of this internal facility to be used by
driver code.

Signed-off-by: Andrzej Hajda <andrzej.hajda@intel.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Reviewed-by: Arnd Bergmann <arnd@arndb.de>
Reviewed-by: Andi Shyti <andi.shyti@linux.intel.com>
Acked-by: Geert Uytterhoeven <geert@linux-m68k.org> [m68k]
Acked-by: Palmer Dabbelt <palmer@rivosinc.com> [riscv]
Link: https://lore.kernel.org/r/20230118154450.73842-1-andrzej.hajda@intel.com
Cc: Linus Torvalds <torvalds@linux-foundation.org>

authored by

Andrzej Hajda and committed by
Ingo Molnar
06855063 ee1ee6db

+54 -54
+5 -5
arch/alpha/include/asm/cmpxchg.h
··· 6 6 * Atomic exchange routines. 7 7 */ 8 8 9 - #define ____xchg(type, args...) __xchg ## type ## _local(args) 9 + #define ____xchg(type, args...) __arch_xchg ## type ## _local(args) 10 10 #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args) 11 11 #include <asm/xchg.h> 12 12 13 13 #define xchg_local(ptr, x) \ 14 14 ({ \ 15 15 __typeof__(*(ptr)) _x_ = (x); \ 16 - (__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \ 17 - sizeof(*(ptr))); \ 16 + (__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\ 17 + sizeof(*(ptr))); \ 18 18 }) 19 19 20 20 #define arch_cmpxchg_local(ptr, o, n) \ ··· 34 34 35 35 #undef ____xchg 36 36 #undef ____cmpxchg 37 - #define ____xchg(type, args...) __xchg ##type(args) 37 + #define ____xchg(type, args...) __arch_xchg ##type(args) 38 38 #define ____cmpxchg(type, args...) __cmpxchg ##type(args) 39 39 #include <asm/xchg.h> 40 40 ··· 48 48 __typeof__(*(ptr)) _x_ = (x); \ 49 49 smp_mb(); \ 50 50 __ret = (__typeof__(*(ptr))) \ 51 - __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ 51 + __arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ 52 52 smp_mb(); \ 53 53 __ret; \ 54 54 })
+2 -2
arch/arc/include/asm/cmpxchg.h
··· 85 85 */ 86 86 #ifdef CONFIG_ARC_HAS_LLSC 87 87 88 - #define __xchg(ptr, val) \ 88 + #define __arch_xchg(ptr, val) \ 89 89 ({ \ 90 90 __asm__ __volatile__( \ 91 91 " ex %0, [%1] \n" /* set new value */ \ ··· 102 102 \ 103 103 switch(sizeof(*(_p_))) { \ 104 104 case 4: \ 105 - _val_ = __xchg(_p_, _val_); \ 105 + _val_ = __arch_xchg(_p_, _val_); \ 106 106 break; \ 107 107 default: \ 108 108 BUILD_BUG(); \
+4 -3
arch/arm/include/asm/cmpxchg.h
··· 25 25 #define swp_is_buggy 26 26 #endif 27 27 28 - static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) 28 + static inline unsigned long 29 + __arch_xchg(unsigned long x, volatile void *ptr, int size) 29 30 { 30 31 extern void __bad_xchg(volatile void *, int); 31 32 unsigned long ret; ··· 116 115 } 117 116 118 117 #define arch_xchg_relaxed(ptr, x) ({ \ 119 - (__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \ 120 - sizeof(*(ptr))); \ 118 + (__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr), \ 119 + sizeof(*(ptr))); \ 121 120 }) 122 121 123 122 #include <asm-generic/cmpxchg-local.h>
+3 -4
arch/arm64/include/asm/cmpxchg.h
··· 62 62 #undef __XCHG_CASE 63 63 64 64 #define __XCHG_GEN(sfx) \ 65 - static __always_inline unsigned long __xchg##sfx(unsigned long x, \ 66 - volatile void *ptr, \ 67 - int size) \ 65 + static __always_inline unsigned long \ 66 + __arch_xchg##sfx(unsigned long x, volatile void *ptr, int size) \ 68 67 { \ 69 68 switch (size) { \ 70 69 case 1: \ ··· 92 93 ({ \ 93 94 __typeof__(*(ptr)) __ret; \ 94 95 __ret = (__typeof__(*(ptr))) \ 95 - __xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \ 96 + __arch_xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \ 96 97 __ret; \ 97 98 }) 98 99
+5 -5
arch/hexagon/include/asm/cmpxchg.h
··· 9 9 #define _ASM_CMPXCHG_H 10 10 11 11 /* 12 - * __xchg - atomically exchange a register and a memory location 12 + * __arch_xchg - atomically exchange a register and a memory location 13 13 * @x: value to swap 14 14 * @ptr: pointer to memory 15 15 * @size: size of the value ··· 19 19 * Note: there was an errata for V2 about .new's and memw_locked. 20 20 * 21 21 */ 22 - static inline unsigned long __xchg(unsigned long x, volatile void *ptr, 23 - int size) 22 + static inline unsigned long 23 + __arch_xchg(unsigned long x, volatile void *ptr, int size) 24 24 { 25 25 unsigned long retval; 26 26 ··· 42 42 * Atomically swap the contents of a register with memory. Should be atomic 43 43 * between multiple CPU's and within interrupts on the same CPU. 44 44 */ 45 - #define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \ 46 - sizeof(*(ptr)))) 45 + #define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \ 46 + sizeof(*(ptr)))) 47 47 48 48 /* 49 49 * see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps.
+1 -1
arch/ia64/include/asm/cmpxchg.h
··· 5 5 #include <uapi/asm/cmpxchg.h> 6 6 7 7 #define arch_xchg(ptr, x) \ 8 - ({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));}) 8 + ({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));}) 9 9 10 10 #define arch_cmpxchg(ptr, o, n) cmpxchg_acq((ptr), (o), (n)) 11 11 #define arch_cmpxchg64(ptr, o, n) cmpxchg_acq((ptr), (o), (n))
+2 -2
arch/ia64/include/uapi/asm/cmpxchg.h
··· 23 23 */ 24 24 extern void ia64_xchg_called_with_bad_pointer(void); 25 25 26 - #define __xchg(x, ptr, size) \ 26 + #define __arch_xchg(x, ptr, size) \ 27 27 ({ \ 28 28 unsigned long __xchg_result; \ 29 29 \ ··· 51 51 52 52 #ifndef __KERNEL__ 53 53 #define xchg(ptr, x) \ 54 - ({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));}) 54 + ({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));}) 55 55 #endif 56 56 57 57 /*
+2 -2
arch/loongarch/include/asm/cmpxchg.h
··· 62 62 } 63 63 64 64 static __always_inline unsigned long 65 - __xchg(volatile void *ptr, unsigned long x, int size) 65 + __arch_xchg(volatile void *ptr, unsigned long x, int size) 66 66 { 67 67 switch (size) { 68 68 case 1: ··· 87 87 __typeof__(*(ptr)) __res; \ 88 88 \ 89 89 __res = (__typeof__(*(ptr))) \ 90 - __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \ 90 + __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \ 91 91 \ 92 92 __res; \ 93 93 })
+3 -3
arch/m68k/include/asm/cmpxchg.h
··· 9 9 extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int); 10 10 11 11 #ifndef CONFIG_RMW_INSNS 12 - static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 12 + static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size) 13 13 { 14 14 unsigned long flags, tmp; 15 15 ··· 40 40 return x; 41 41 } 42 42 #else 43 - static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 43 + static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size) 44 44 { 45 45 switch (size) { 46 46 case 1: ··· 75 75 } 76 76 #endif 77 77 78 - #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));}) 78 + #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));}) 79 79 80 80 #include <asm-generic/cmpxchg-local.h> 81 81
+2 -2
arch/mips/include/asm/cmpxchg.h
··· 68 68 unsigned int size); 69 69 70 70 static __always_inline 71 - unsigned long __xchg(volatile void *ptr, unsigned long x, int size) 71 + unsigned long __arch_xchg(volatile void *ptr, unsigned long x, int size) 72 72 { 73 73 switch (size) { 74 74 case 1: ··· 102 102 smp_mb__before_llsc(); \ 103 103 \ 104 104 __res = (__typeof__(*(ptr))) \ 105 - __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \ 105 + __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \ 106 106 \ 107 107 smp_llsc_mb(); \ 108 108 \
+5 -5
arch/openrisc/include/asm/cmpxchg.h
··· 147 147 extern unsigned long __xchg_called_with_bad_pointer(void) 148 148 __compiletime_error("Bad argument size for xchg"); 149 149 150 - static inline unsigned long __xchg(volatile void *ptr, unsigned long with, 151 - int size) 150 + static inline unsigned long 151 + __arch_xchg(volatile void *ptr, unsigned long with, int size) 152 152 { 153 153 switch (size) { 154 154 case 1: ··· 163 163 164 164 #define arch_xchg(ptr, with) \ 165 165 ({ \ 166 - (__typeof__(*(ptr))) __xchg((ptr), \ 167 - (unsigned long)(with), \ 168 - sizeof(*(ptr))); \ 166 + (__typeof__(*(ptr))) __arch_xchg((ptr), \ 167 + (unsigned long)(with), \ 168 + sizeof(*(ptr))); \ 169 169 }) 170 170 171 171 #endif /* __ASM_OPENRISC_CMPXCHG_H */
+2 -2
arch/parisc/include/asm/cmpxchg.h
··· 22 22 23 23 /* optimizer better get rid of switch since size is a constant */ 24 24 static inline unsigned long 25 - __xchg(unsigned long x, volatile void *ptr, int size) 25 + __arch_xchg(unsigned long x, volatile void *ptr, int size) 26 26 { 27 27 switch (size) { 28 28 #ifdef CONFIG_64BIT ··· 49 49 __typeof__(*(ptr)) __ret; \ 50 50 __typeof__(*(ptr)) _x_ = (x); \ 51 51 __ret = (__typeof__(*(ptr))) \ 52 - __xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \ 52 + __arch_xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \ 53 53 __ret; \ 54 54 }) 55 55
+2 -2
arch/powerpc/include/asm/cmpxchg.h
··· 229 229 return __xchg_u64_local(ptr, x); 230 230 #endif 231 231 } 232 - BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg"); 232 + BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local"); 233 233 return x; 234 234 } 235 235 ··· 248 248 return __xchg_u64_relaxed(ptr, x); 249 249 #endif 250 250 } 251 - BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local"); 251 + BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_relaxed"); 252 252 return x; 253 253 } 254 254 #define arch_xchg_local(ptr,x) \
+1 -1
arch/riscv/include/asm/atomic.h
··· 261 261 static __always_inline \ 262 262 c_t arch_atomic##prefix##_xchg(atomic##prefix##_t *v, c_t n) \ 263 263 { \ 264 - return __xchg(&(v->counter), n, size); \ 264 + return __arch_xchg(&(v->counter), n, size); \ 265 265 } \ 266 266 static __always_inline \ 267 267 c_t arch_atomic##prefix##_cmpxchg_relaxed(atomic##prefix##_t *v, \
+2 -2
arch/riscv/include/asm/cmpxchg.h
··· 114 114 _x_, sizeof(*(ptr))); \ 115 115 }) 116 116 117 - #define __xchg(ptr, new, size) \ 117 + #define __arch_xchg(ptr, new, size) \ 118 118 ({ \ 119 119 __typeof__(ptr) __ptr = (ptr); \ 120 120 __typeof__(new) __new = (new); \ ··· 143 143 #define arch_xchg(ptr, x) \ 144 144 ({ \ 145 145 __typeof__(*(ptr)) _x_ = (x); \ 146 - (__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr))); \ 146 + (__typeof__(*(ptr))) __arch_xchg((ptr), _x_, sizeof(*(ptr))); \ 147 147 }) 148 148 149 149 #define xchg32(ptr, x) \
+4 -4
arch/s390/include/asm/cmpxchg.h
··· 14 14 15 15 void __xchg_called_with_bad_pointer(void); 16 16 17 - static __always_inline unsigned long __xchg(unsigned long x, 18 - unsigned long address, int size) 17 + static __always_inline unsigned long 18 + __arch_xchg(unsigned long x, unsigned long address, int size) 19 19 { 20 20 unsigned long old; 21 21 int shift; ··· 77 77 __typeof__(*(ptr)) __ret; \ 78 78 \ 79 79 __ret = (__typeof__(*(ptr))) \ 80 - __xchg((unsigned long)(x), (unsigned long)(ptr), \ 81 - sizeof(*(ptr))); \ 80 + __arch_xchg((unsigned long)(x), (unsigned long)(ptr), \ 81 + sizeof(*(ptr))); \ 82 82 __ret; \ 83 83 }) 84 84
+2 -2
arch/sh/include/asm/cmpxchg.h
··· 22 22 23 23 extern void __xchg_called_with_bad_pointer(void); 24 24 25 - #define __xchg(ptr, x, size) \ 25 + #define __arch_xchg(ptr, x, size) \ 26 26 ({ \ 27 27 unsigned long __xchg__res; \ 28 28 volatile void *__xchg_ptr = (ptr); \ ··· 46 46 }) 47 47 48 48 #define arch_xchg(ptr,x) \ 49 - ((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr)))) 49 + ((__typeof__(*(ptr)))__arch_xchg((ptr),(unsigned long)(x), sizeof(*(ptr)))) 50 50 51 51 /* This function doesn't exist, so you'll get a linker error 52 52 * if something tries to do an invalid cmpxchg(). */
+2 -2
arch/sparc/include/asm/cmpxchg_32.h
··· 15 15 unsigned long __xchg_u32(volatile u32 *m, u32 new); 16 16 void __xchg_called_with_bad_pointer(void); 17 17 18 - static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size) 18 + static inline unsigned long __arch_xchg(unsigned long x, __volatile__ void * ptr, int size) 19 19 { 20 20 switch (size) { 21 21 case 4: ··· 25 25 return x; 26 26 } 27 27 28 - #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));}) 28 + #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));}) 29 29 30 30 /* Emulate cmpxchg() the same way we emulate atomics, 31 31 * by hashing the object address and indexing into an array
+3 -3
arch/sparc/include/asm/cmpxchg_64.h
··· 55 55 #define arch_xchg(ptr,x) \ 56 56 ({ __typeof__(*(ptr)) __ret; \ 57 57 __ret = (__typeof__(*(ptr))) \ 58 - __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \ 58 + __arch_xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \ 59 59 __ret; \ 60 60 }) 61 61 ··· 87 87 return (load32 & mask) >> bit_shift; 88 88 } 89 89 90 - static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, 91 - int size) 90 + static inline unsigned long 91 + __arch_xchg(unsigned long x, __volatile__ void * ptr, int size) 92 92 { 93 93 switch (size) { 94 94 case 2:
+2 -2
arch/xtensa/include/asm/cmpxchg.h
··· 170 170 } 171 171 172 172 #define arch_xchg(ptr,x) \ 173 - ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 173 + ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 174 174 175 175 static inline u32 xchg_small(volatile void *ptr, u32 x, int size) 176 176 { ··· 203 203 extern void __xchg_called_with_bad_pointer(void); 204 204 205 205 static __inline__ unsigned long 206 - __xchg(unsigned long x, volatile void * ptr, int size) 206 + __arch_xchg(unsigned long x, volatile void * ptr, int size) 207 207 { 208 208 switch (size) { 209 209 case 1: