Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

Enable a suitable ISA for the assembler around ll/sc so that code builds even for processors that don't support the instructions. Plus minor formatting fixes.

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>

authored by

Maciej W. Rozycki and committed by
Ralf Baechle
aac8aa77 fded2e50

+118 -32
+8 -4
arch/mips/kernel/semaphore.c
··· 42 42 43 43 if (cpu_has_llsc && R10000_LLSC_WAR) { 44 44 __asm__ __volatile__( 45 - "1: ll %0, %2 \n" 45 + " .set mips2 \n" 46 + "1: ll %0, %2 # __sem_update_count \n" 46 47 " sra %1, %0, 31 \n" 47 48 " not %1 \n" 48 49 " and %1, %0, %1 \n" 49 - " add %1, %1, %3 \n" 50 + " addu %1, %1, %3 \n" 50 51 " sc %1, %2 \n" 51 52 " beqzl %1, 1b \n" 53 + " .set mips0 \n" 52 54 : "=&r" (old_count), "=&r" (tmp), "=m" (sem->count) 53 55 : "r" (incr), "m" (sem->count)); 54 56 } else if (cpu_has_llsc) { 55 57 __asm__ __volatile__( 56 - "1: ll %0, %2 \n" 58 + " .set mips2 \n" 59 + "1: ll %0, %2 # __sem_update_count \n" 57 60 " sra %1, %0, 31 \n" 58 61 " not %1 \n" 59 62 " and %1, %0, %1 \n" 60 - " add %1, %1, %3 \n" 63 + " addu %1, %1, %3 \n" 61 64 " sc %1, %2 \n" 62 65 " beqz %1, 1b \n" 66 + " .set mips0 \n" 63 67 : "=&r" (old_count), "=&r" (tmp), "=m" (sem->count) 64 68 : "r" (incr), "m" (sem->count)); 65 69 } else {
+40
include/asm-mips/atomic.h
··· 62 62 unsigned long temp; 63 63 64 64 __asm__ __volatile__( 65 + " .set mips2 \n" 65 66 "1: ll %0, %1 # atomic_add \n" 66 67 " addu %0, %2 \n" 67 68 " sc %0, %1 \n" 68 69 " beqzl %0, 1b \n" 70 + " .set mips0 \n" 69 71 : "=&r" (temp), "=m" (v->counter) 70 72 : "Ir" (i), "m" (v->counter)); 71 73 } else if (cpu_has_llsc) { 72 74 unsigned long temp; 73 75 74 76 __asm__ __volatile__( 77 + " .set mips2 \n" 75 78 "1: ll %0, %1 # atomic_add \n" 76 79 " addu %0, %2 \n" 77 80 " sc %0, %1 \n" 78 81 " beqz %0, 1b \n" 82 + " .set mips0 \n" 79 83 : "=&r" (temp), "=m" (v->counter) 80 84 : "Ir" (i), "m" (v->counter)); 81 85 } else { ··· 104 100 unsigned long temp; 105 101 106 102 __asm__ __volatile__( 103 + " .set mips2 \n" 107 104 "1: ll %0, %1 # atomic_sub \n" 108 105 " subu %0, %2 \n" 109 106 " sc %0, %1 \n" 110 107 " beqzl %0, 1b \n" 108 + " .set mips0 \n" 111 109 : "=&r" (temp), "=m" (v->counter) 112 110 : "Ir" (i), "m" (v->counter)); 113 111 } else if (cpu_has_llsc) { 114 112 unsigned long temp; 115 113 116 114 __asm__ __volatile__( 115 + " .set mips2 \n" 117 116 "1: ll %0, %1 # atomic_sub \n" 118 117 " subu %0, %2 \n" 119 118 " sc %0, %1 \n" 120 119 " beqz %0, 1b \n" 120 + " .set mips0 \n" 121 121 : "=&r" (temp), "=m" (v->counter) 122 122 : "Ir" (i), "m" (v->counter)); 123 123 } else { ··· 144 136 unsigned long temp; 145 137 146 138 __asm__ __volatile__( 139 + " .set mips2 \n" 147 140 "1: ll %1, %2 # atomic_add_return \n" 148 141 " addu %0, %1, %3 \n" 149 142 " sc %0, %2 \n" 150 143 " beqzl %0, 1b \n" 151 144 " addu %0, %1, %3 \n" 152 145 " sync \n" 146 + " .set mips0 \n" 153 147 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 154 148 : "Ir" (i), "m" (v->counter) 155 149 : "memory"); ··· 159 149 unsigned long temp; 160 150 161 151 __asm__ __volatile__( 152 + " .set mips2 \n" 162 153 "1: ll %1, %2 # atomic_add_return \n" 163 154 " addu %0, %1, %3 \n" 164 155 " sc %0, %2 \n" 165 156 " beqz %0, 1b \n" 166 157 " addu %0, %1, %3 \n" 167 158 " sync \n" 159 + " .set mips0 \n" 168 160 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 169 161 : "Ir" (i), "m" (v->counter) 170 162 : "memory"); ··· 191 179 unsigned long temp; 192 180 193 181 __asm__ __volatile__( 182 + " .set mips2 \n" 194 183 "1: ll %1, %2 # atomic_sub_return \n" 195 184 " subu %0, %1, %3 \n" 196 185 " sc %0, %2 \n" 197 186 " beqzl %0, 1b \n" 198 187 " subu %0, %1, %3 \n" 199 188 " sync \n" 189 + " .set mips0 \n" 200 190 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 201 191 : "Ir" (i), "m" (v->counter) 202 192 : "memory"); ··· 206 192 unsigned long temp; 207 193 208 194 __asm__ __volatile__( 195 + " .set mips2 \n" 209 196 "1: ll %1, %2 # atomic_sub_return \n" 210 197 " subu %0, %1, %3 \n" 211 198 " sc %0, %2 \n" 212 199 " beqz %0, 1b \n" 213 200 " subu %0, %1, %3 \n" 214 201 " sync \n" 202 + " .set mips0 \n" 215 203 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 216 204 : "Ir" (i), "m" (v->counter) 217 205 : "memory"); ··· 245 229 unsigned long temp; 246 230 247 231 __asm__ __volatile__( 232 + " .set mips2 \n" 248 233 "1: ll %1, %2 # atomic_sub_if_positive\n" 249 234 " subu %0, %1, %3 \n" 250 235 " bltz %0, 1f \n" ··· 253 236 " beqzl %0, 1b \n" 254 237 " sync \n" 255 238 "1: \n" 239 + " .set mips0 \n" 256 240 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 257 241 : "Ir" (i), "m" (v->counter) 258 242 : "memory"); ··· 261 243 unsigned long temp; 262 244 263 245 __asm__ __volatile__( 246 + " .set mips2 \n" 264 247 "1: ll %1, %2 # atomic_sub_if_positive\n" 265 248 " subu %0, %1, %3 \n" 266 249 " bltz %0, 1f \n" ··· 269 250 " beqz %0, 1b \n" 270 251 " sync \n" 271 252 "1: \n" 253 + " .set mips0 \n" 272 254 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 273 255 : "Ir" (i), "m" (v->counter) 274 256 : "memory"); ··· 387 367 unsigned long temp; 388 368 389 369 __asm__ __volatile__( 370 + " .set mips3 \n" 390 371 "1: lld %0, %1 # atomic64_add \n" 391 372 " addu %0, %2 \n" 392 373 " scd %0, %1 \n" 393 374 " beqzl %0, 1b \n" 375 + " .set mips0 \n" 394 376 : "=&r" (temp), "=m" (v->counter) 395 377 : "Ir" (i), "m" (v->counter)); 396 378 } else if (cpu_has_llsc) { 397 379 unsigned long temp; 398 380 399 381 __asm__ __volatile__( 382 + " .set mips3 \n" 400 383 "1: lld %0, %1 # atomic64_add \n" 401 384 " addu %0, %2 \n" 402 385 " scd %0, %1 \n" 403 386 " beqz %0, 1b \n" 387 + " .set mips0 \n" 404 388 : "=&r" (temp), "=m" (v->counter) 405 389 : "Ir" (i), "m" (v->counter)); 406 390 } else { ··· 429 405 unsigned long temp; 430 406 431 407 __asm__ __volatile__( 408 + " .set mips3 \n" 432 409 "1: lld %0, %1 # atomic64_sub \n" 433 410 " subu %0, %2 \n" 434 411 " scd %0, %1 \n" 435 412 " beqzl %0, 1b \n" 413 + " .set mips0 \n" 436 414 : "=&r" (temp), "=m" (v->counter) 437 415 : "Ir" (i), "m" (v->counter)); 438 416 } else if (cpu_has_llsc) { 439 417 unsigned long temp; 440 418 441 419 __asm__ __volatile__( 420 + " .set mips3 \n" 442 421 "1: lld %0, %1 # atomic64_sub \n" 443 422 " subu %0, %2 \n" 444 423 " scd %0, %1 \n" 445 424 " beqz %0, 1b \n" 425 + " .set mips0 \n" 446 426 : "=&r" (temp), "=m" (v->counter) 447 427 : "Ir" (i), "m" (v->counter)); 448 428 } else { ··· 469 441 unsigned long temp; 470 442 471 443 __asm__ __volatile__( 444 + " .set mips3 \n" 472 445 "1: lld %1, %2 # atomic64_add_return \n" 473 446 " addu %0, %1, %3 \n" 474 447 " scd %0, %2 \n" 475 448 " beqzl %0, 1b \n" 476 449 " addu %0, %1, %3 \n" 477 450 " sync \n" 451 + " .set mips0 \n" 478 452 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 479 453 : "Ir" (i), "m" (v->counter) 480 454 : "memory"); ··· 484 454 unsigned long temp; 485 455 486 456 __asm__ __volatile__( 457 + " .set mips3 \n" 487 458 "1: lld %1, %2 # atomic64_add_return \n" 488 459 " addu %0, %1, %3 \n" 489 460 " scd %0, %2 \n" 490 461 " beqz %0, 1b \n" 491 462 " addu %0, %1, %3 \n" 492 463 " sync \n" 464 + " .set mips0 \n" 493 465 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 494 466 : "Ir" (i), "m" (v->counter) 495 467 : "memory"); ··· 516 484 unsigned long temp; 517 485 518 486 __asm__ __volatile__( 487 + " .set mips3 \n" 519 488 "1: lld %1, %2 # atomic64_sub_return \n" 520 489 " subu %0, %1, %3 \n" 521 490 " scd %0, %2 \n" 522 491 " beqzl %0, 1b \n" 523 492 " subu %0, %1, %3 \n" 524 493 " sync \n" 494 + " .set mips0 \n" 525 495 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 526 496 : "Ir" (i), "m" (v->counter) 527 497 : "memory"); ··· 531 497 unsigned long temp; 532 498 533 499 __asm__ __volatile__( 500 + " .set mips3 \n" 534 501 "1: lld %1, %2 # atomic64_sub_return \n" 535 502 " subu %0, %1, %3 \n" 536 503 " scd %0, %2 \n" 537 504 " beqz %0, 1b \n" 538 505 " subu %0, %1, %3 \n" 539 506 " sync \n" 507 + " .set mips0 \n" 540 508 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 541 509 : "Ir" (i), "m" (v->counter) 542 510 : "memory"); ··· 570 534 unsigned long temp; 571 535 572 536 __asm__ __volatile__( 537 + " .set mips3 \n" 573 538 "1: lld %1, %2 # atomic64_sub_if_positive\n" 574 539 " dsubu %0, %1, %3 \n" 575 540 " bltz %0, 1f \n" ··· 578 541 " beqzl %0, 1b \n" 579 542 " sync \n" 580 543 "1: \n" 544 + " .set mips0 \n" 581 545 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 582 546 : "Ir" (i), "m" (v->counter) 583 547 : "memory"); ··· 586 548 unsigned long temp; 587 549 588 550 __asm__ __volatile__( 551 + " .set mips3 \n" 589 552 "1: lld %1, %2 # atomic64_sub_if_positive\n" 590 553 " dsubu %0, %1, %3 \n" 591 554 " bltz %0, 1f \n" ··· 594 555 " beqz %0, 1b \n" 595 556 " sync \n" 596 557 "1: \n" 558 + " .set mips0 \n" 597 559 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 598 560 : "Ir" (i), "m" (v->counter) 599 561 : "memory");
+50 -24
include/asm-mips/bitops.h
··· 18 18 #if (_MIPS_SZLONG == 32) 19 19 #define SZLONG_LOG 5 20 20 #define SZLONG_MASK 31UL 21 - #define __LL "ll " 22 - #define __SC "sc " 21 + #define __LL "ll " 22 + #define __SC "sc " 23 + #define __SET_MIPS ".set mips2 " 23 24 #define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) 24 25 #elif (_MIPS_SZLONG == 64) 25 26 #define SZLONG_LOG 6 26 27 #define SZLONG_MASK 63UL 27 - #define __LL "lld " 28 - #define __SC "scd " 28 + #define __LL "lld " 29 + #define __SC "scd " 30 + #define __SET_MIPS ".set mips3 " 29 31 #define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) 30 32 #endif 31 33 ··· 74 72 75 73 if (cpu_has_llsc && R10000_LLSC_WAR) { 76 74 __asm__ __volatile__( 75 + " " __SET_MIPS " \n" 77 76 "1: " __LL "%0, %1 # set_bit \n" 78 77 " or %0, %2 \n" 79 - " "__SC "%0, %1 \n" 78 + " " __SC "%0, %1 \n" 80 79 " beqzl %0, 1b \n" 80 + " .set mips0 \n" 81 81 : "=&r" (temp), "=m" (*m) 82 82 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 83 83 } else if (cpu_has_llsc) { 84 84 __asm__ __volatile__( 85 + " " __SET_MIPS " \n" 85 86 "1: " __LL "%0, %1 # set_bit \n" 86 87 " or %0, %2 \n" 87 - " "__SC "%0, %1 \n" 88 + " " __SC "%0, %1 \n" 88 89 " beqz %0, 1b \n" 90 + " .set mips0 \n" 89 91 : "=&r" (temp), "=m" (*m) 90 92 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 91 93 } else { ··· 138 132 139 133 if (cpu_has_llsc && R10000_LLSC_WAR) { 140 134 __asm__ __volatile__( 135 + " " __SET_MIPS " \n" 141 136 "1: " __LL "%0, %1 # clear_bit \n" 142 137 " and %0, %2 \n" 143 138 " " __SC "%0, %1 \n" 144 139 " beqzl %0, 1b \n" 140 + " .set mips0 \n" 145 141 : "=&r" (temp), "=m" (*m) 146 142 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); 147 143 } else if (cpu_has_llsc) { 148 144 __asm__ __volatile__( 145 + " " __SET_MIPS " \n" 149 146 "1: " __LL "%0, %1 # clear_bit \n" 150 147 " and %0, %2 \n" 151 148 " " __SC "%0, %1 \n" 152 149 " beqz %0, 1b \n" 150 + " .set mips0 \n" 153 151 : "=&r" (temp), "=m" (*m) 154 152 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); 155 153 } else { ··· 201 191 unsigned long temp; 202 192 203 193 __asm__ __volatile__( 194 + " " __SET_MIPS " \n" 204 195 "1: " __LL "%0, %1 # change_bit \n" 205 196 " xor %0, %2 \n" 206 - " "__SC "%0, %1 \n" 197 + " " __SC "%0, %1 \n" 207 198 " beqzl %0, 1b \n" 199 + " .set mips0 \n" 208 200 : "=&r" (temp), "=m" (*m) 209 201 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 210 202 } else if (cpu_has_llsc) { ··· 214 202 unsigned long temp; 215 203 216 204 __asm__ __volatile__( 205 + " " __SET_MIPS " \n" 217 206 "1: " __LL "%0, %1 # change_bit \n" 218 207 " xor %0, %2 \n" 219 - " "__SC "%0, %1 \n" 208 + " " __SC "%0, %1 \n" 220 209 " beqz %0, 1b \n" 210 + " .set mips0 \n" 221 211 : "=&r" (temp), "=m" (*m) 222 212 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 223 213 } else { ··· 267 253 unsigned long temp, res; 268 254 269 255 __asm__ __volatile__( 256 + " " __SET_MIPS " \n" 270 257 "1: " __LL "%0, %1 # test_and_set_bit \n" 271 258 " or %2, %0, %3 \n" 272 259 " " __SC "%2, %1 \n" 273 260 " beqzl %2, 1b \n" 274 261 " and %2, %0, %3 \n" 275 262 #ifdef CONFIG_SMP 276 - "sync \n" 263 + " sync \n" 277 264 #endif 265 + " .set mips0 \n" 278 266 : "=&r" (temp), "=m" (*m), "=&r" (res) 279 267 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 280 268 : "memory"); ··· 287 271 unsigned long temp, res; 288 272 289 273 __asm__ __volatile__( 290 - " .set noreorder # test_and_set_bit \n" 291 - "1: " __LL "%0, %1 \n" 274 + " .set push \n" 275 + " .set noreorder \n" 276 + " " __SET_MIPS " \n" 277 + "1: " __LL "%0, %1 # test_and_set_bit \n" 292 278 " or %2, %0, %3 \n" 293 279 " " __SC "%2, %1 \n" 294 280 " beqz %2, 1b \n" 295 281 " and %2, %0, %3 \n" 296 282 #ifdef CONFIG_SMP 297 - "sync \n" 283 + " sync \n" 298 284 #endif 299 - ".set\treorder" 285 + " .set pop \n" 300 286 : "=&r" (temp), "=m" (*m), "=&r" (res) 301 287 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 302 288 : "memory"); ··· 361 343 unsigned long temp, res; 362 344 363 345 __asm__ __volatile__( 346 + " " __SET_MIPS " \n" 364 347 "1: " __LL "%0, %1 # test_and_clear_bit \n" 365 348 " or %2, %0, %3 \n" 366 349 " xor %2, %3 \n" 367 - __SC "%2, %1 \n" 350 + " " __SC "%2, %1 \n" 368 351 " beqzl %2, 1b \n" 369 352 " and %2, %0, %3 \n" 370 353 #ifdef CONFIG_SMP 371 354 " sync \n" 372 355 #endif 356 + " .set mips0 \n" 373 357 : "=&r" (temp), "=m" (*m), "=&r" (res) 374 358 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 375 359 : "memory"); ··· 382 362 unsigned long temp, res; 383 363 384 364 __asm__ __volatile__( 385 - " .set noreorder # test_and_clear_bit \n" 386 - "1: " __LL "%0, %1 \n" 365 + " .set push \n" 366 + " .set noreorder \n" 367 + " " __SET_MIPS " \n" 368 + "1: " __LL "%0, %1 # test_and_clear_bit \n" 387 369 " or %2, %0, %3 \n" 388 370 " xor %2, %3 \n" 389 - __SC "%2, %1 \n" 371 + " " __SC "%2, %1 \n" 390 372 " beqz %2, 1b \n" 391 373 " and %2, %0, %3 \n" 392 374 #ifdef CONFIG_SMP 393 375 " sync \n" 394 376 #endif 395 - " .set reorder \n" 377 + " .set pop \n" 396 378 : "=&r" (temp), "=m" (*m), "=&r" (res) 397 379 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 398 380 : "memory"); ··· 457 435 unsigned long temp, res; 458 436 459 437 __asm__ __volatile__( 460 - "1: " __LL " %0, %1 # test_and_change_bit \n" 438 + " " __SET_MIPS " \n" 439 + "1: " __LL "%0, %1 # test_and_change_bit \n" 461 440 " xor %2, %0, %3 \n" 462 - " "__SC "%2, %1 \n" 441 + " " __SC "%2, %1 \n" 463 442 " beqzl %2, 1b \n" 464 443 " and %2, %0, %3 \n" 465 444 #ifdef CONFIG_SMP 466 445 " sync \n" 467 446 #endif 447 + " .set mips0 \n" 468 448 : "=&r" (temp), "=m" (*m), "=&r" (res) 469 449 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 470 450 : "memory"); ··· 477 453 unsigned long temp, res; 478 454 479 455 __asm__ __volatile__( 480 - " .set noreorder # test_and_change_bit \n" 481 - "1: " __LL " %0, %1 \n" 456 + " .set push \n" 457 + " .set noreorder \n" 458 + " " __SET_MIPS " \n" 459 + "1: " __LL "%0, %1 # test_and_change_bit \n" 482 460 " xor %2, %0, %3 \n" 483 - " "__SC "\t%2, %1 \n" 461 + " " __SC "\t%2, %1 \n" 484 462 " beqz %2, 1b \n" 485 463 " and %2, %0, %3 \n" 486 464 #ifdef CONFIG_SMP 487 465 " sync \n" 488 466 #endif 489 - " .set reorder \n" 467 + " .set pop \n" 490 468 : "=&r" (temp), "=m" (*m), "=&r" (res) 491 469 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 492 470 : "memory");
+20 -4
include/asm-mips/system.h
··· 176 176 unsigned long dummy; 177 177 178 178 __asm__ __volatile__( 179 + " .set mips2 \n" 179 180 "1: ll %0, %3 # xchg_u32 \n" 180 181 " move %2, %z4 \n" 181 182 " sc %2, %1 \n" ··· 185 184 #ifdef CONFIG_SMP 186 185 " sync \n" 187 186 #endif 187 + " .set mips0 \n" 188 188 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 189 189 : "R" (*m), "Jr" (val) 190 190 : "memory"); ··· 193 191 unsigned long dummy; 194 192 195 193 __asm__ __volatile__( 194 + " .set mips2 \n" 196 195 "1: ll %0, %3 # xchg_u32 \n" 197 196 " move %2, %z4 \n" 198 197 " sc %2, %1 \n" ··· 201 198 #ifdef CONFIG_SMP 202 199 " sync \n" 203 200 #endif 201 + " .set mips0 \n" 204 202 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 205 203 : "R" (*m), "Jr" (val) 206 204 : "memory"); ··· 226 222 unsigned long dummy; 227 223 228 224 __asm__ __volatile__( 225 + " .set mips3 \n" 229 226 "1: lld %0, %3 # xchg_u64 \n" 230 227 " move %2, %z4 \n" 231 228 " scd %2, %1 \n" ··· 235 230 #ifdef CONFIG_SMP 236 231 " sync \n" 237 232 #endif 233 + " .set mips0 \n" 238 234 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 239 235 : "R" (*m), "Jr" (val) 240 236 : "memory"); ··· 243 237 unsigned long dummy; 244 238 245 239 __asm__ __volatile__( 240 + " .set mips3 \n" 246 241 "1: lld %0, %3 # xchg_u64 \n" 247 242 " move %2, %z4 \n" 248 243 " scd %2, %1 \n" ··· 251 244 #ifdef CONFIG_SMP 252 245 " sync \n" 253 246 #endif 247 + " .set mips0 \n" 254 248 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 255 249 : "R" (*m), "Jr" (val) 256 250 : "memory"); ··· 299 291 300 292 if (cpu_has_llsc && R10000_LLSC_WAR) { 301 293 __asm__ __volatile__( 294 + " .set push \n" 302 295 " .set noat \n" 296 + " .set mips2 \n" 303 297 "1: ll %0, %2 # __cmpxchg_u32 \n" 304 298 " bne %0, %z3, 2f \n" 305 299 " move $1, %z4 \n" ··· 312 302 " sync \n" 313 303 #endif 314 304 "2: \n" 315 - " .set at \n" 305 + " .set pop \n" 316 306 : "=&r" (retval), "=m" (*m) 317 307 : "R" (*m), "Jr" (old), "Jr" (new) 318 308 : "memory"); 319 309 } else if (cpu_has_llsc) { 320 310 __asm__ __volatile__( 311 + " .set push \n" 321 312 " .set noat \n" 313 + " .set mips2 \n" 322 314 "1: ll %0, %2 # __cmpxchg_u32 \n" 323 315 " bne %0, %z3, 2f \n" 324 316 " move $1, %z4 \n" ··· 330 318 " sync \n" 331 319 #endif 332 320 "2: \n" 333 - " .set at \n" 321 + " .set pop \n" 334 322 : "=&r" (retval), "=m" (*m) 335 323 : "R" (*m), "Jr" (old), "Jr" (new) 336 324 : "memory"); ··· 355 343 356 344 if (cpu_has_llsc) { 357 345 __asm__ __volatile__( 346 + " .set push \n" 358 347 " .set noat \n" 348 + " .set mips3 \n" 359 349 "1: lld %0, %2 # __cmpxchg_u64 \n" 360 350 " bne %0, %z3, 2f \n" 361 351 " move $1, %z4 \n" ··· 368 354 " sync \n" 369 355 #endif 370 356 "2: \n" 371 - " .set at \n" 357 + " .set pop \n" 372 358 : "=&r" (retval), "=m" (*m) 373 359 : "R" (*m), "Jr" (old), "Jr" (new) 374 360 : "memory"); 375 361 } else if (cpu_has_llsc) { 376 362 __asm__ __volatile__( 363 + " .set push \n" 377 364 " .set noat \n" 365 + " .set mips2 \n" 378 366 "1: lld %0, %2 # __cmpxchg_u64 \n" 379 367 " bne %0, %z3, 2f \n" 380 368 " move $1, %z4 \n" ··· 386 370 " sync \n" 387 371 #endif 388 372 "2: \n" 389 - " .set at \n" 373 + " .set pop \n" 390 374 : "=&r" (retval), "=m" (*m) 391 375 : "R" (*m), "Jr" (old), "Jr" (new) 392 376 : "memory");