Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

powerpc: Rename LWSYNC_ON_SMP to PPC_RELEASE_BARRIER, ISYNC_ON_SMP to PPC_ACQUIRE_BARRIER

For performance reasons we are about to change ISYNC_ON_SMP to sometimes be
lwsync. Now that the macro name doesn't make sense, change it and LWSYNC_ON_SMP
to better explain what the barriers are doing.

Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>

authored by

Anton Blanchard and committed by
Benjamin Herrenschmidt
f10e2e5b 66d99b88

+65 -60
+24 -24
arch/powerpc/include/asm/atomic.h
··· 49 49 int t; 50 50 51 51 __asm__ __volatile__( 52 - LWSYNC_ON_SMP 52 + PPC_RELEASE_BARRIER 53 53 "1: lwarx %0,0,%2 # atomic_add_return\n\ 54 54 add %0,%1,%0\n" 55 55 PPC405_ERR77(0,%2) 56 56 " stwcx. %0,0,%2 \n\ 57 57 bne- 1b" 58 - ISYNC_ON_SMP 58 + PPC_ACQUIRE_BARRIER 59 59 : "=&r" (t) 60 60 : "r" (a), "r" (&v->counter) 61 61 : "cc", "memory"); ··· 85 85 int t; 86 86 87 87 __asm__ __volatile__( 88 - LWSYNC_ON_SMP 88 + PPC_RELEASE_BARRIER 89 89 "1: lwarx %0,0,%2 # atomic_sub_return\n\ 90 90 subf %0,%1,%0\n" 91 91 PPC405_ERR77(0,%2) 92 92 " stwcx. %0,0,%2 \n\ 93 93 bne- 1b" 94 - ISYNC_ON_SMP 94 + PPC_ACQUIRE_BARRIER 95 95 : "=&r" (t) 96 96 : "r" (a), "r" (&v->counter) 97 97 : "cc", "memory"); ··· 119 119 int t; 120 120 121 121 __asm__ __volatile__( 122 - LWSYNC_ON_SMP 122 + PPC_RELEASE_BARRIER 123 123 "1: lwarx %0,0,%1 # atomic_inc_return\n\ 124 124 addic %0,%0,1\n" 125 125 PPC405_ERR77(0,%1) 126 126 " stwcx. %0,0,%1 \n\ 127 127 bne- 1b" 128 - ISYNC_ON_SMP 128 + PPC_ACQUIRE_BARRIER 129 129 : "=&r" (t) 130 130 : "r" (&v->counter) 131 131 : "cc", "xer", "memory"); ··· 163 163 int t; 164 164 165 165 __asm__ __volatile__( 166 - LWSYNC_ON_SMP 166 + PPC_RELEASE_BARRIER 167 167 "1: lwarx %0,0,%1 # atomic_dec_return\n\ 168 168 addic %0,%0,-1\n" 169 169 PPC405_ERR77(0,%1) 170 170 " stwcx. %0,0,%1\n\ 171 171 bne- 1b" 172 - ISYNC_ON_SMP 172 + PPC_ACQUIRE_BARRIER 173 173 : "=&r" (t) 174 174 : "r" (&v->counter) 175 175 : "cc", "xer", "memory"); ··· 194 194 int t; 195 195 196 196 __asm__ __volatile__ ( 197 - LWSYNC_ON_SMP 197 + PPC_RELEASE_BARRIER 198 198 "1: lwarx %0,0,%1 # atomic_add_unless\n\ 199 199 cmpw 0,%0,%3 \n\ 200 200 beq- 2f \n\ ··· 202 202 PPC405_ERR77(0,%2) 203 203 " stwcx. %0,0,%1 \n\ 204 204 bne- 1b \n" 205 - ISYNC_ON_SMP 205 + PPC_ACQUIRE_BARRIER 206 206 " subf %0,%2,%0 \n\ 207 207 2:" 208 208 : "=&r" (t) ··· 227 227 int t; 228 228 229 229 __asm__ __volatile__( 230 - LWSYNC_ON_SMP 230 + PPC_RELEASE_BARRIER 231 231 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ 232 232 cmpwi %0,1\n\ 233 233 addi %0,%0,-1\n\ ··· 235 235 PPC405_ERR77(0,%1) 236 236 " stwcx. %0,0,%1\n\ 237 237 bne- 1b" 238 - ISYNC_ON_SMP 238 + PPC_ACQUIRE_BARRIER 239 239 "\n\ 240 240 2:" : "=&b" (t) 241 241 : "r" (&v->counter) ··· 286 286 long t; 287 287 288 288 __asm__ __volatile__( 289 - LWSYNC_ON_SMP 289 + PPC_RELEASE_BARRIER 290 290 "1: ldarx %0,0,%2 # atomic64_add_return\n\ 291 291 add %0,%1,%0\n\ 292 292 stdcx. %0,0,%2 \n\ 293 293 bne- 1b" 294 - ISYNC_ON_SMP 294 + PPC_ACQUIRE_BARRIER 295 295 : "=&r" (t) 296 296 : "r" (a), "r" (&v->counter) 297 297 : "cc", "memory"); ··· 320 320 long t; 321 321 322 322 __asm__ __volatile__( 323 - LWSYNC_ON_SMP 323 + PPC_RELEASE_BARRIER 324 324 "1: ldarx %0,0,%2 # atomic64_sub_return\n\ 325 325 subf %0,%1,%0\n\ 326 326 stdcx. %0,0,%2 \n\ 327 327 bne- 1b" 328 - ISYNC_ON_SMP 328 + PPC_ACQUIRE_BARRIER 329 329 : "=&r" (t) 330 330 : "r" (a), "r" (&v->counter) 331 331 : "cc", "memory"); ··· 352 352 long t; 353 353 354 354 __asm__ __volatile__( 355 - LWSYNC_ON_SMP 355 + PPC_RELEASE_BARRIER 356 356 "1: ldarx %0,0,%1 # atomic64_inc_return\n\ 357 357 addic %0,%0,1\n\ 358 358 stdcx. %0,0,%1 \n\ 359 359 bne- 1b" 360 - ISYNC_ON_SMP 360 + PPC_ACQUIRE_BARRIER 361 361 : "=&r" (t) 362 362 : "r" (&v->counter) 363 363 : "cc", "xer", "memory"); ··· 394 394 long t; 395 395 396 396 __asm__ __volatile__( 397 - LWSYNC_ON_SMP 397 + PPC_RELEASE_BARRIER 398 398 "1: ldarx %0,0,%1 # atomic64_dec_return\n\ 399 399 addic %0,%0,-1\n\ 400 400 stdcx. %0,0,%1\n\ 401 401 bne- 1b" 402 - ISYNC_ON_SMP 402 + PPC_ACQUIRE_BARRIER 403 403 : "=&r" (t) 404 404 : "r" (&v->counter) 405 405 : "cc", "xer", "memory"); ··· 419 419 long t; 420 420 421 421 __asm__ __volatile__( 422 - LWSYNC_ON_SMP 422 + PPC_RELEASE_BARRIER 423 423 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ 424 424 addic. %0,%0,-1\n\ 425 425 blt- 2f\n\ 426 426 stdcx. %0,0,%1\n\ 427 427 bne- 1b" 428 - ISYNC_ON_SMP 428 + PPC_ACQUIRE_BARRIER 429 429 "\n\ 430 430 2:" : "=&r" (t) 431 431 : "r" (&v->counter) ··· 451 451 long t; 452 452 453 453 __asm__ __volatile__ ( 454 - LWSYNC_ON_SMP 454 + PPC_RELEASE_BARRIER 455 455 "1: ldarx %0,0,%1 # atomic_add_unless\n\ 456 456 cmpd 0,%0,%3 \n\ 457 457 beq- 2f \n\ 458 458 add %0,%2,%0 \n" 459 459 " stdcx. %0,0,%1 \n\ 460 460 bne- 1b \n" 461 - ISYNC_ON_SMP 461 + PPC_ACQUIRE_BARRIER 462 462 " subf %0,%2,%0 \n\ 463 463 2:" 464 464 : "=&r" (t)
+10 -6
arch/powerpc/include/asm/bitops.h
··· 78 78 79 79 DEFINE_BITOP(set_bits, or, "", "") 80 80 DEFINE_BITOP(clear_bits, andc, "", "") 81 - DEFINE_BITOP(clear_bits_unlock, andc, LWSYNC_ON_SMP, "") 81 + DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER, "") 82 82 DEFINE_BITOP(change_bits, xor, "", "") 83 83 84 84 static __inline__ void set_bit(int nr, volatile unsigned long *addr) ··· 124 124 return (old & mask); \ 125 125 } 126 126 127 - DEFINE_TESTOP(test_and_set_bits, or, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 128 - DEFINE_TESTOP(test_and_set_bits_lock, or, "", ISYNC_ON_SMP, 1) 129 - DEFINE_TESTOP(test_and_clear_bits, andc, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 130 - DEFINE_TESTOP(test_and_change_bits, xor, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0) 127 + DEFINE_TESTOP(test_and_set_bits, or, PPC_RELEASE_BARRIER, 128 + PPC_ACQUIRE_BARRIER, 0) 129 + DEFINE_TESTOP(test_and_set_bits_lock, or, "", 130 + PPC_ACQUIRE_BARRIER, 1) 131 + DEFINE_TESTOP(test_and_clear_bits, andc, PPC_RELEASE_BARRIER, 132 + PPC_ACQUIRE_BARRIER, 0) 133 + DEFINE_TESTOP(test_and_change_bits, xor, PPC_RELEASE_BARRIER, 134 + PPC_ACQUIRE_BARRIER, 0) 131 135 132 136 static __inline__ int test_and_set_bit(unsigned long nr, 133 137 volatile unsigned long *addr) ··· 162 158 163 159 static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr) 164 160 { 165 - __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory"); 161 + __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory"); 166 162 __clear_bit(nr, addr); 167 163 } 168 164
+3 -3
arch/powerpc/include/asm/futex.h
··· 11 11 12 12 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ 13 13 __asm__ __volatile ( \ 14 - LWSYNC_ON_SMP \ 14 + PPC_RELEASE_BARRIER \ 15 15 "1: lwarx %0,0,%2\n" \ 16 16 insn \ 17 17 PPC405_ERR77(0, %2) \ ··· 90 90 return -EFAULT; 91 91 92 92 __asm__ __volatile__ ( 93 - LWSYNC_ON_SMP 93 + PPC_RELEASE_BARRIER 94 94 "1: lwarx %0,0,%2 # futex_atomic_cmpxchg_inatomic\n\ 95 95 cmpw 0,%0,%3\n\ 96 96 bne- 3f\n" 97 97 PPC405_ERR77(0,%2) 98 98 "2: stwcx. %4,0,%2\n\ 99 99 bne- 1b\n" 100 - ISYNC_ON_SMP 100 + PPC_ACQUIRE_BARRIER 101 101 "3: .section .fixup,\"ax\"\n\ 102 102 4: li %0,%5\n\ 103 103 b 3b\n\
+3 -3
arch/powerpc/include/asm/mutex.h
··· 15 15 PPC405_ERR77(0,%1) 16 16 " stwcx. %3,0,%1\n\ 17 17 bne- 1b" 18 - ISYNC_ON_SMP 18 + PPC_ACQUIRE_BARRIER 19 19 "\n\ 20 20 2:" 21 21 : "=&r" (t) ··· 35 35 PPC405_ERR77(0,%1) 36 36 " stwcx. %0,0,%1\n\ 37 37 bne- 1b" 38 - ISYNC_ON_SMP 38 + PPC_ACQUIRE_BARRIER 39 39 : "=&r" (t) 40 40 : "r" (&v->counter) 41 41 : "cc", "memory"); ··· 48 48 int t; 49 49 50 50 __asm__ __volatile__( 51 - LWSYNC_ON_SMP 51 + PPC_RELEASE_BARRIER 52 52 "1: lwarx %0,0,%1 # mutex unlock\n\ 53 53 addic %0,%0,1\n" 54 54 PPC405_ERR77(0,%1)
+13 -12
arch/powerpc/include/asm/spinlock.h
··· 65 65 cmpwi 0,%0,0\n\ 66 66 bne- 2f\n\ 67 67 stwcx. %1,0,%2\n\ 68 - bne- 1b\n\ 69 - isync\n\ 70 - 2:" : "=&r" (tmp) 68 + bne- 1b\n" 69 + PPC_ACQUIRE_BARRIER 70 + "2:" 71 + : "=&r" (tmp) 71 72 : "r" (token), "r" (&lock->slock) 72 73 : "cr0", "memory"); 73 74 ··· 146 145 { 147 146 SYNC_IO; 148 147 __asm__ __volatile__("# arch_spin_unlock\n\t" 149 - LWSYNC_ON_SMP: : :"memory"); 148 + PPC_RELEASE_BARRIER: : :"memory"); 150 149 lock->slock = 0; 151 150 } 152 151 ··· 194 193 ble- 2f\n" 195 194 PPC405_ERR77(0,%1) 196 195 " stwcx. %0,0,%1\n\ 197 - bne- 1b\n\ 198 - isync\n\ 199 - 2:" : "=&r" (tmp) 196 + bne- 1b\n" 197 + PPC_ACQUIRE_BARRIER 198 + "2:" : "=&r" (tmp) 200 199 : "r" (&rw->lock) 201 200 : "cr0", "xer", "memory"); 202 201 ··· 218 217 bne- 2f\n" 219 218 PPC405_ERR77(0,%1) 220 219 " stwcx. %1,0,%2\n\ 221 - bne- 1b\n\ 222 - isync\n\ 223 - 2:" : "=&r" (tmp) 220 + bne- 1b\n" 221 + PPC_ACQUIRE_BARRIER 222 + "2:" : "=&r" (tmp) 224 223 : "r" (token), "r" (&rw->lock) 225 224 : "cr0", "memory"); 226 225 ··· 271 270 272 271 __asm__ __volatile__( 273 272 "# read_unlock\n\t" 274 - LWSYNC_ON_SMP 273 + PPC_RELEASE_BARRIER 275 274 "1: lwarx %0,0,%1\n\ 276 275 addic %0,%0,-1\n" 277 276 PPC405_ERR77(0,%1) ··· 285 284 static inline void arch_write_unlock(arch_rwlock_t *rw) 286 285 { 287 286 __asm__ __volatile__("# write_unlock\n\t" 288 - LWSYNC_ON_SMP: : :"memory"); 287 + PPC_RELEASE_BARRIER: : :"memory"); 289 288 rw->lock = 0; 290 289 } 291 290
+4 -4
arch/powerpc/include/asm/synch.h
··· 37 37 #endif 38 38 39 39 #ifdef CONFIG_SMP 40 - #define ISYNC_ON_SMP "\n\tisync\n" 41 - #define LWSYNC_ON_SMP stringify_in_c(LWSYNC) "\n" 40 + #define PPC_ACQUIRE_BARRIER "\n\tisync\n" 41 + #define PPC_RELEASE_BARRIER stringify_in_c(LWSYNC) "\n" 42 42 #else 43 - #define ISYNC_ON_SMP 44 - #define LWSYNC_ON_SMP 43 + #define PPC_ACQUIRE_BARRIER 44 + #define PPC_RELEASE_BARRIER 45 45 #endif 46 46 47 47 #endif /* __KERNEL__ */
+8 -8
arch/powerpc/include/asm/system.h
··· 232 232 unsigned long prev; 233 233 234 234 __asm__ __volatile__( 235 - LWSYNC_ON_SMP 235 + PPC_RELEASE_BARRIER 236 236 "1: lwarx %0,0,%2 \n" 237 237 PPC405_ERR77(0,%2) 238 238 " stwcx. %3,0,%2 \n\ 239 239 bne- 1b" 240 - ISYNC_ON_SMP 240 + PPC_ACQUIRE_BARRIER 241 241 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) 242 242 : "r" (p), "r" (val) 243 243 : "cc", "memory"); ··· 275 275 unsigned long prev; 276 276 277 277 __asm__ __volatile__( 278 - LWSYNC_ON_SMP 278 + PPC_RELEASE_BARRIER 279 279 "1: ldarx %0,0,%2 \n" 280 280 PPC405_ERR77(0,%2) 281 281 " stdcx. %3,0,%2 \n\ 282 282 bne- 1b" 283 - ISYNC_ON_SMP 283 + PPC_ACQUIRE_BARRIER 284 284 : "=&r" (prev), "+m" (*(volatile unsigned long *)p) 285 285 : "r" (p), "r" (val) 286 286 : "cc", "memory"); ··· 366 366 unsigned int prev; 367 367 368 368 __asm__ __volatile__ ( 369 - LWSYNC_ON_SMP 369 + PPC_RELEASE_BARRIER 370 370 "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ 371 371 cmpw 0,%0,%3\n\ 372 372 bne- 2f\n" 373 373 PPC405_ERR77(0,%2) 374 374 " stwcx. %4,0,%2\n\ 375 375 bne- 1b" 376 - ISYNC_ON_SMP 376 + PPC_ACQUIRE_BARRIER 377 377 "\n\ 378 378 2:" 379 379 : "=&r" (prev), "+m" (*p) ··· 412 412 unsigned long prev; 413 413 414 414 __asm__ __volatile__ ( 415 - LWSYNC_ON_SMP 415 + PPC_RELEASE_BARRIER 416 416 "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ 417 417 cmpd 0,%0,%3\n\ 418 418 bne- 2f\n\ 419 419 stdcx. %4,0,%2\n\ 420 420 bne- 1b" 421 - ISYNC_ON_SMP 421 + PPC_ACQUIRE_BARRIER 422 422 "\n\ 423 423 2:" 424 424 : "=&r" (prev), "+m" (*p)