Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM

The GCC_OFF12_ASM macro is used for 12-bit immediate constrains
but we will also use it for 9-bit constrains on MIPS R6 so we
rename it to something more appropriate.

Cc: Maciej W. Rozycki <macro@linux-mips.org>
Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>

+93 -93
+15 -15
arch/mips/include/asm/atomic.h
··· 54 54 " sc %0, %1 \n" \ 55 55 " beqzl %0, 1b \n" \ 56 56 " .set mips0 \n" \ 57 - : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ 57 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 58 58 : "Ir" (i)); \ 59 59 } else if (kernel_uses_llsc) { \ 60 60 int temp; \ ··· 66 66 " " #asm_op " %0, %2 \n" \ 67 67 " sc %0, %1 \n" \ 68 68 " .set mips0 \n" \ 69 - : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ 69 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 70 70 : "Ir" (i)); \ 71 71 } while (unlikely(!temp)); \ 72 72 } else { \ ··· 97 97 " " #asm_op " %0, %1, %3 \n" \ 98 98 " .set mips0 \n" \ 99 99 : "=&r" (result), "=&r" (temp), \ 100 - "+" GCC_OFF12_ASM() (v->counter) \ 100 + "+" GCC_OFF_SMALL_ASM() (v->counter) \ 101 101 : "Ir" (i)); \ 102 102 } else if (kernel_uses_llsc) { \ 103 103 int temp; \ ··· 110 110 " sc %0, %2 \n" \ 111 111 " .set mips0 \n" \ 112 112 : "=&r" (result), "=&r" (temp), \ 113 - "+" GCC_OFF12_ASM() (v->counter) \ 113 + "+" GCC_OFF_SMALL_ASM() (v->counter) \ 114 114 : "Ir" (i)); \ 115 115 } while (unlikely(!result)); \ 116 116 \ ··· 171 171 "1: \n" 172 172 " .set mips0 \n" 173 173 : "=&r" (result), "=&r" (temp), 174 - "+" GCC_OFF12_ASM() (v->counter) 175 - : "Ir" (i), GCC_OFF12_ASM() (v->counter) 174 + "+" GCC_OFF_SMALL_ASM() (v->counter) 175 + : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) 176 176 : "memory"); 177 177 } else if (kernel_uses_llsc) { 178 178 int temp; ··· 190 190 "1: \n" 191 191 " .set mips0 \n" 192 192 : "=&r" (result), "=&r" (temp), 193 - "+" GCC_OFF12_ASM() (v->counter) 193 + "+" GCC_OFF_SMALL_ASM() (v->counter) 194 194 : "Ir" (i)); 195 195 } else { 196 196 unsigned long flags; ··· 333 333 " scd %0, %1 \n" \ 334 334 " beqzl %0, 1b \n" \ 335 335 " .set mips0 \n" \ 336 - : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ 336 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 337 337 : "Ir" (i)); \ 338 338 } else if (kernel_uses_llsc) { \ 339 339 long temp; \ ··· 345 345 " " #asm_op " %0, %2 \n" \ 346 346 " scd %0, %1 \n" \ 347 347 " .set mips0 \n" \ 348 - : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ 348 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 349 349 : "Ir" (i)); \ 350 350 } while (unlikely(!temp)); \ 351 351 } else { \ ··· 376 376 " " #asm_op " %0, %1, %3 \n" \ 377 377 " .set mips0 \n" \ 378 378 : "=&r" (result), "=&r" (temp), \ 379 - "+" GCC_OFF12_ASM() (v->counter) \ 379 + "+" GCC_OFF_SMALL_ASM() (v->counter) \ 380 380 : "Ir" (i)); \ 381 381 } else if (kernel_uses_llsc) { \ 382 382 long temp; \ ··· 389 389 " scd %0, %2 \n" \ 390 390 " .set mips0 \n" \ 391 391 : "=&r" (result), "=&r" (temp), \ 392 - "=" GCC_OFF12_ASM() (v->counter) \ 393 - : "Ir" (i), GCC_OFF12_ASM() (v->counter) \ 392 + "=" GCC_OFF_SMALL_ASM() (v->counter) \ 393 + : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ 394 394 : "memory"); \ 395 395 } while (unlikely(!result)); \ 396 396 \ ··· 452 452 "1: \n" 453 453 " .set mips0 \n" 454 454 : "=&r" (result), "=&r" (temp), 455 - "=" GCC_OFF12_ASM() (v->counter) 456 - : "Ir" (i), GCC_OFF12_ASM() (v->counter) 455 + "=" GCC_OFF_SMALL_ASM() (v->counter) 456 + : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) 457 457 : "memory"); 458 458 } else if (kernel_uses_llsc) { 459 459 long temp; ··· 471 471 "1: \n" 472 472 " .set mips0 \n" 473 473 : "=&r" (result), "=&r" (temp), 474 - "+" GCC_OFF12_ASM() (v->counter) 474 + "+" GCC_OFF_SMALL_ASM() (v->counter) 475 475 : "Ir" (i)); 476 476 } else { 477 477 unsigned long flags;
+17 -17
arch/mips/include/asm/bitops.h
··· 79 79 " " __SC "%0, %1 \n" 80 80 " beqzl %0, 1b \n" 81 81 " .set mips0 \n" 82 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*m) 83 - : "ir" (1UL << bit), GCC_OFF12_ASM() (*m)); 82 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) 83 + : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); 84 84 #ifdef CONFIG_CPU_MIPSR2 85 85 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { 86 86 do { ··· 88 88 " " __LL "%0, %1 # set_bit \n" 89 89 " " __INS "%0, %3, %2, 1 \n" 90 90 " " __SC "%0, %1 \n" 91 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 91 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 92 92 : "ir" (bit), "r" (~0)); 93 93 } while (unlikely(!temp)); 94 94 #endif /* CONFIG_CPU_MIPSR2 */ ··· 100 100 " or %0, %2 \n" 101 101 " " __SC "%0, %1 \n" 102 102 " .set mips0 \n" 103 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 103 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 104 104 : "ir" (1UL << bit)); 105 105 } while (unlikely(!temp)); 106 106 } else ··· 131 131 " " __SC "%0, %1 \n" 132 132 " beqzl %0, 1b \n" 133 133 " .set mips0 \n" 134 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 134 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 135 135 : "ir" (~(1UL << bit))); 136 136 #ifdef CONFIG_CPU_MIPSR2 137 137 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { ··· 140 140 " " __LL "%0, %1 # clear_bit \n" 141 141 " " __INS "%0, $0, %2, 1 \n" 142 142 " " __SC "%0, %1 \n" 143 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 143 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 144 144 : "ir" (bit)); 145 145 } while (unlikely(!temp)); 146 146 #endif /* CONFIG_CPU_MIPSR2 */ ··· 152 152 " and %0, %2 \n" 153 153 " " __SC "%0, %1 \n" 154 154 " .set mips0 \n" 155 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 155 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 156 156 : "ir" (~(1UL << bit))); 157 157 } while (unlikely(!temp)); 158 158 } else ··· 197 197 " " __SC "%0, %1 \n" 198 198 " beqzl %0, 1b \n" 199 199 " .set mips0 \n" 200 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 200 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 201 201 : "ir" (1UL << bit)); 202 202 } else if (kernel_uses_llsc) { 203 203 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); ··· 210 210 " xor %0, %2 \n" 211 211 " " __SC "%0, %1 \n" 212 212 " .set mips0 \n" 213 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) 213 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) 214 214 : "ir" (1UL << bit)); 215 215 } while (unlikely(!temp)); 216 216 } else ··· 245 245 " beqzl %2, 1b \n" 246 246 " and %2, %0, %3 \n" 247 247 " .set mips0 \n" 248 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 248 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 249 249 : "r" (1UL << bit) 250 250 : "memory"); 251 251 } else if (kernel_uses_llsc) { ··· 259 259 " or %2, %0, %3 \n" 260 260 " " __SC "%2, %1 \n" 261 261 " .set mips0 \n" 262 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 262 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 263 263 : "r" (1UL << bit) 264 264 : "memory"); 265 265 } while (unlikely(!res)); ··· 313 313 " or %2, %0, %3 \n" 314 314 " " __SC "%2, %1 \n" 315 315 " .set mips0 \n" 316 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 316 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 317 317 : "r" (1UL << bit) 318 318 : "memory"); 319 319 } while (unlikely(!res)); ··· 355 355 " beqzl %2, 1b \n" 356 356 " and %2, %0, %3 \n" 357 357 " .set mips0 \n" 358 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 358 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 359 359 : "r" (1UL << bit) 360 360 : "memory"); 361 361 #ifdef CONFIG_CPU_MIPSR2 ··· 369 369 " " __EXT "%2, %0, %3, 1 \n" 370 370 " " __INS "%0, $0, %3, 1 \n" 371 371 " " __SC "%0, %1 \n" 372 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 372 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 373 373 : "ir" (bit) 374 374 : "memory"); 375 375 } while (unlikely(!temp)); ··· 386 386 " xor %2, %3 \n" 387 387 " " __SC "%2, %1 \n" 388 388 " .set mips0 \n" 389 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 389 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 390 390 : "r" (1UL << bit) 391 391 : "memory"); 392 392 } while (unlikely(!res)); ··· 428 428 " beqzl %2, 1b \n" 429 429 " and %2, %0, %3 \n" 430 430 " .set mips0 \n" 431 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 431 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 432 432 : "r" (1UL << bit) 433 433 : "memory"); 434 434 } else if (kernel_uses_llsc) { ··· 442 442 " xor %2, %0, %3 \n" 443 443 " " __SC "\t%2, %1 \n" 444 444 " .set mips0 \n" 445 - : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) 445 + : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) 446 446 : "r" (1UL << bit) 447 447 : "memory"); 448 448 } while (unlikely(!res));
+12 -12
arch/mips/include/asm/cmpxchg.h
··· 31 31 " sc %2, %1 \n" 32 32 " beqzl %2, 1b \n" 33 33 " .set mips0 \n" 34 - : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) 35 - : GCC_OFF12_ASM() (*m), "Jr" (val) 34 + : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) 35 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) 36 36 : "memory"); 37 37 } else if (kernel_uses_llsc) { 38 38 unsigned long dummy; ··· 46 46 " .set arch=r4000 \n" 47 47 " sc %2, %1 \n" 48 48 " .set mips0 \n" 49 - : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), 49 + : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), 50 50 "=&r" (dummy) 51 - : GCC_OFF12_ASM() (*m), "Jr" (val) 51 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) 52 52 : "memory"); 53 53 } while (unlikely(!dummy)); 54 54 } else { ··· 82 82 " scd %2, %1 \n" 83 83 " beqzl %2, 1b \n" 84 84 " .set mips0 \n" 85 - : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) 86 - : GCC_OFF12_ASM() (*m), "Jr" (val) 85 + : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) 86 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) 87 87 : "memory"); 88 88 } else if (kernel_uses_llsc) { 89 89 unsigned long dummy; ··· 95 95 " move %2, %z4 \n" 96 96 " scd %2, %1 \n" 97 97 " .set mips0 \n" 98 - : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), 98 + : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), 99 99 "=&r" (dummy) 100 - : GCC_OFF12_ASM() (*m), "Jr" (val) 100 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) 101 101 : "memory"); 102 102 } while (unlikely(!dummy)); 103 103 } else { ··· 158 158 " beqzl $1, 1b \n" \ 159 159 "2: \n" \ 160 160 " .set pop \n" \ 161 - : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ 162 - : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ 161 + : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ 162 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ 163 163 : "memory"); \ 164 164 } else if (kernel_uses_llsc) { \ 165 165 __asm__ __volatile__( \ ··· 175 175 " beqz $1, 1b \n" \ 176 176 " .set pop \n" \ 177 177 "2: \n" \ 178 - : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ 179 - : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ 178 + : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ 179 + : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ 180 180 : "memory"); \ 181 181 } else { \ 182 182 unsigned long __flags; \
+2 -2
arch/mips/include/asm/compiler.h
··· 17 17 #endif 18 18 19 19 #ifndef CONFIG_CPU_MICROMIPS 20 - #define GCC_OFF12_ASM() "R" 20 + #define GCC_OFF_SMALL_ASM() "R" 21 21 #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) 22 - #define GCC_OFF12_ASM() "ZC" 22 + #define GCC_OFF_SMALL_ASM() "ZC" 23 23 #else 24 24 #error "microMIPS compilation unsupported with GCC older than 4.9" 25 25 #endif
+2 -2
arch/mips/include/asm/edac.h
··· 26 26 " sc %0, %1 \n" 27 27 " beqz %0, 1b \n" 28 28 " .set mips0 \n" 29 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr) 30 - : GCC_OFF12_ASM() (*virt_addr)); 29 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr) 30 + : GCC_OFF_SMALL_ASM() (*virt_addr)); 31 31 32 32 virt_addr++; 33 33 }
+8 -8
arch/mips/include/asm/futex.h
··· 45 45 " "__UA_ADDR "\t2b, 4b \n" \ 46 46 " .previous \n" \ 47 47 : "=r" (ret), "=&r" (oldval), \ 48 - "=" GCC_OFF12_ASM() (*uaddr) \ 49 - : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ 48 + "=" GCC_OFF_SMALL_ASM() (*uaddr) \ 49 + : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ 50 50 "i" (-EFAULT) \ 51 51 : "memory"); \ 52 52 } else if (cpu_has_llsc) { \ ··· 74 74 " "__UA_ADDR "\t2b, 4b \n" \ 75 75 " .previous \n" \ 76 76 : "=r" (ret), "=&r" (oldval), \ 77 - "=" GCC_OFF12_ASM() (*uaddr) \ 78 - : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ 77 + "=" GCC_OFF_SMALL_ASM() (*uaddr) \ 78 + : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ 79 79 "i" (-EFAULT) \ 80 80 : "memory"); \ 81 81 } else \ ··· 174 174 " "__UA_ADDR "\t1b, 4b \n" 175 175 " "__UA_ADDR "\t2b, 4b \n" 176 176 " .previous \n" 177 - : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) 178 - : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 177 + : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) 178 + : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 179 179 "i" (-EFAULT) 180 180 : "memory"); 181 181 } else if (cpu_has_llsc) { ··· 203 203 " "__UA_ADDR "\t1b, 4b \n" 204 204 " "__UA_ADDR "\t2b, 4b \n" 205 205 " .previous \n" 206 - : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) 207 - : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 206 + : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) 207 + : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), 208 208 "i" (-EFAULT) 209 209 : "memory"); 210 210 } else
+12 -12
arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h
··· 85 85 " "__beqz"%0, 1b \n" 86 86 " nop \n" 87 87 " .set pop \n" 88 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) 89 - : "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr)); 88 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) 89 + : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr)); 90 90 } 91 91 92 92 /* ··· 106 106 " "__beqz"%0, 1b \n" 107 107 " nop \n" 108 108 " .set pop \n" 109 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) 110 - : "ir" (mask), GCC_OFF12_ASM() (*addr)); 109 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) 110 + : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); 111 111 } 112 112 113 113 /* ··· 127 127 " "__beqz"%0, 1b \n" 128 128 " nop \n" 129 129 " .set pop \n" 130 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) 131 - : "ir" (~mask), GCC_OFF12_ASM() (*addr)); 130 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) 131 + : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr)); 132 132 } 133 133 134 134 /* ··· 148 148 " "__beqz"%0, 1b \n" 149 149 " nop \n" 150 150 " .set pop \n" 151 - : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) 152 - : "ir" (mask), GCC_OFF12_ASM() (*addr)); 151 + : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) 152 + : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); 153 153 } 154 154 155 155 /* ··· 220 220 " .set arch=r4000 \n" \ 221 221 "1: ll %0, %1 #custom_read_reg32 \n" \ 222 222 " .set pop \n" \ 223 - : "=r" (tmp), "=" GCC_OFF12_ASM() (*address) \ 224 - : GCC_OFF12_ASM() (*address)) 223 + : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ 224 + : GCC_OFF_SMALL_ASM() (*address)) 225 225 226 226 #define custom_write_reg32(address, tmp) \ 227 227 __asm__ __volatile__( \ ··· 231 231 " "__beqz"%0, 1b \n" \ 232 232 " nop \n" \ 233 233 " .set pop \n" \ 234 - : "=&r" (tmp), "=" GCC_OFF12_ASM() (*address) \ 235 - : "0" (tmp), GCC_OFF12_ASM() (*address)) 234 + : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ 235 + : "0" (tmp), GCC_OFF_SMALL_ASM() (*address)) 236 236 237 237 #endif /* __ASM_REGOPS_H__ */
+1 -1
arch/mips/include/asm/octeon/cvmx-cmd-queue.h
··· 275 275 " lbu %[ticket], %[now_serving]\n" 276 276 "4:\n" 277 277 ".set pop\n" : 278 - [ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), 278 + [ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), 279 279 [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), 280 280 [my_ticket] "=r"(my_ticket) 281 281 );
+24 -24
arch/mips/include/asm/spinlock.h
··· 89 89 " subu %[ticket], %[ticket], 1 \n" 90 90 " .previous \n" 91 91 " .set pop \n" 92 - : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), 92 + : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), 93 93 [serving_now_ptr] "+m" (lock->h.serving_now), 94 94 [ticket] "=&r" (tmp), 95 95 [my_ticket] "=&r" (my_ticket) ··· 122 122 " subu %[ticket], %[ticket], 1 \n" 123 123 " .previous \n" 124 124 " .set pop \n" 125 - : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), 125 + : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), 126 126 [serving_now_ptr] "+m" (lock->h.serving_now), 127 127 [ticket] "=&r" (tmp), 128 128 [my_ticket] "=&r" (my_ticket) ··· 164 164 " li %[ticket], 0 \n" 165 165 " .previous \n" 166 166 " .set pop \n" 167 - : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), 167 + : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), 168 168 [ticket] "=&r" (tmp), 169 169 [my_ticket] "=&r" (tmp2), 170 170 [now_serving] "=&r" (tmp3) ··· 188 188 " li %[ticket], 0 \n" 189 189 " .previous \n" 190 190 " .set pop \n" 191 - : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), 191 + : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), 192 192 [ticket] "=&r" (tmp), 193 193 [my_ticket] "=&r" (tmp2), 194 194 [now_serving] "=&r" (tmp3) ··· 235 235 " beqzl %1, 1b \n" 236 236 " nop \n" 237 237 " .set reorder \n" 238 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 239 - : GCC_OFF12_ASM() (rw->lock) 238 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 239 + : GCC_OFF_SMALL_ASM() (rw->lock) 240 240 : "memory"); 241 241 } else { 242 242 do { ··· 245 245 " bltz %1, 1b \n" 246 246 " addu %1, 1 \n" 247 247 "2: sc %1, %0 \n" 248 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 249 - : GCC_OFF12_ASM() (rw->lock) 248 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 249 + : GCC_OFF_SMALL_ASM() (rw->lock) 250 250 : "memory"); 251 251 } while (unlikely(!tmp)); 252 252 } ··· 269 269 " sub %1, 1 \n" 270 270 " sc %1, %0 \n" 271 271 " beqzl %1, 1b \n" 272 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 273 - : GCC_OFF12_ASM() (rw->lock) 272 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 273 + : GCC_OFF_SMALL_ASM() (rw->lock) 274 274 : "memory"); 275 275 } else { 276 276 do { ··· 278 278 "1: ll %1, %2 # arch_read_unlock \n" 279 279 " sub %1, 1 \n" 280 280 " sc %1, %0 \n" 281 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 282 - : GCC_OFF12_ASM() (rw->lock) 281 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 282 + : GCC_OFF_SMALL_ASM() (rw->lock) 283 283 : "memory"); 284 284 } while (unlikely(!tmp)); 285 285 } ··· 299 299 " beqzl %1, 1b \n" 300 300 " nop \n" 301 301 " .set reorder \n" 302 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 303 - : GCC_OFF12_ASM() (rw->lock) 302 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 303 + : GCC_OFF_SMALL_ASM() (rw->lock) 304 304 : "memory"); 305 305 } else { 306 306 do { ··· 309 309 " bnez %1, 1b \n" 310 310 " lui %1, 0x8000 \n" 311 311 "2: sc %1, %0 \n" 312 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) 313 - : GCC_OFF12_ASM() (rw->lock) 312 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) 313 + : GCC_OFF_SMALL_ASM() (rw->lock) 314 314 : "memory"); 315 315 } while (unlikely(!tmp)); 316 316 } ··· 349 349 __WEAK_LLSC_MB 350 350 " li %2, 1 \n" 351 351 "2: \n" 352 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 353 - : GCC_OFF12_ASM() (rw->lock) 352 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 353 + : GCC_OFF_SMALL_ASM() (rw->lock) 354 354 : "memory"); 355 355 } else { 356 356 __asm__ __volatile__( ··· 366 366 __WEAK_LLSC_MB 367 367 " li %2, 1 \n" 368 368 "2: \n" 369 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 370 - : GCC_OFF12_ASM() (rw->lock) 369 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 370 + : GCC_OFF_SMALL_ASM() (rw->lock) 371 371 : "memory"); 372 372 } 373 373 ··· 393 393 " li %2, 1 \n" 394 394 " .set reorder \n" 395 395 "2: \n" 396 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 397 - : GCC_OFF12_ASM() (rw->lock) 396 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) 397 + : GCC_OFF_SMALL_ASM() (rw->lock) 398 398 : "memory"); 399 399 } else { 400 400 do { ··· 406 406 " sc %1, %0 \n" 407 407 " li %2, 1 \n" 408 408 "2: \n" 409 - : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), 409 + : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), 410 410 "=&r" (ret) 411 - : GCC_OFF12_ASM() (rw->lock) 411 + : GCC_OFF_SMALL_ASM() (rw->lock) 412 412 : "memory"); 413 413 } while (unlikely(!tmp)); 414 414