Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

MIPS: page: Use GPR number macros

Use GPR number macros in uasm code generation parts to
reduce code duplication.

No functional change.

Signed-off-by: Jiaxun Yang <jiaxun.yang@flygoat.com>
Signed-off-by: Thomas Bogendoerfer <tsbogend@alpha.franken.de>

authored by

Jiaxun Yang and committed by
Thomas Bogendoerfer
6aec8e05 c2fb9fe4

+95 -107
+95 -107
arch/mips/mm/page.c
··· 24 24 #include <asm/bootinfo.h> 25 25 #include <asm/mipsregs.h> 26 26 #include <asm/mmu_context.h> 27 + #include <asm/regdef.h> 27 28 #include <asm/cpu.h> 28 29 29 30 #ifdef CONFIG_SIBYTE_DMA_PAGEOPS ··· 34 33 #endif 35 34 36 35 #include <asm/uasm.h> 37 - 38 - /* Registers used in the assembled routines. */ 39 - #define ZERO 0 40 - #define AT 2 41 - #define A0 4 42 - #define A1 5 43 - #define A2 6 44 - #define T0 8 45 - #define T1 9 46 - #define T2 10 47 - #define T3 11 48 - #define T9 25 49 - #define RA 31 50 36 51 37 /* Handle labels (which must be positive integers). */ 52 38 enum label_id { ··· 94 106 IS_ENABLED(CONFIG_CPU_DADDI_WORKAROUNDS) && 95 107 r4k_daddiu_bug()) { 96 108 if (off > 0x7fff) { 97 - uasm_i_lui(buf, T9, uasm_rel_hi(off)); 98 - uasm_i_addiu(buf, T9, T9, uasm_rel_lo(off)); 109 + uasm_i_lui(buf, GPR_T9, uasm_rel_hi(off)); 110 + uasm_i_addiu(buf, GPR_T9, GPR_T9, uasm_rel_lo(off)); 99 111 } else 100 - uasm_i_addiu(buf, T9, ZERO, off); 101 - uasm_i_daddu(buf, reg1, reg2, T9); 112 + uasm_i_addiu(buf, GPR_T9, GPR_ZERO, off); 113 + uasm_i_daddu(buf, reg1, reg2, GPR_T9); 102 114 } else { 103 115 if (off > 0x7fff) { 104 - uasm_i_lui(buf, T9, uasm_rel_hi(off)); 105 - uasm_i_addiu(buf, T9, T9, uasm_rel_lo(off)); 106 - UASM_i_ADDU(buf, reg1, reg2, T9); 116 + uasm_i_lui(buf, GPR_T9, uasm_rel_hi(off)); 117 + uasm_i_addiu(buf, GPR_T9, GPR_T9, uasm_rel_lo(off)); 118 + UASM_i_ADDU(buf, reg1, reg2, GPR_T9); 107 119 } else 108 120 UASM_i_ADDIU(buf, reg1, reg2, off); 109 121 } ··· 221 233 static void build_clear_store(u32 **buf, int off) 222 234 { 223 235 if (cpu_has_64bit_gp_regs || cpu_has_64bit_zero_reg) { 224 - uasm_i_sd(buf, ZERO, off, A0); 236 + uasm_i_sd(buf, GPR_ZERO, off, GPR_A0); 225 237 } else { 226 - uasm_i_sw(buf, ZERO, off, A0); 238 + uasm_i_sw(buf, GPR_ZERO, off, GPR_A0); 227 239 } 228 240 } 229 241 ··· 234 246 235 247 if (pref_bias_clear_store) { 236 248 _uasm_i_pref(buf, pref_dst_mode, pref_bias_clear_store + off, 237 - A0); 249 + GPR_A0); 238 250 } else if (cache_line_size == (half_clear_loop_size << 1)) { 239 251 if (cpu_has_cache_cdex_s) { 240 - uasm_i_cache(buf, Create_Dirty_Excl_SD, off, A0); 252 + uasm_i_cache(buf, Create_Dirty_Excl_SD, off, GPR_A0); 241 253 } else if (cpu_has_cache_cdex_p) { 242 254 if (IS_ENABLED(CONFIG_WAR_R4600_V1_HIT_CACHEOP) && 243 255 cpu_is_r4600_v1_x()) { ··· 249 261 250 262 if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && 251 263 cpu_is_r4600_v2_x()) 252 - uasm_i_lw(buf, ZERO, ZERO, AT); 264 + uasm_i_lw(buf, GPR_ZERO, GPR_ZERO, GPR_AT); 253 265 254 - uasm_i_cache(buf, Create_Dirty_Excl_D, off, A0); 266 + uasm_i_cache(buf, Create_Dirty_Excl_D, off, GPR_A0); 255 267 } 256 268 } 257 269 } ··· 289 301 290 302 off = PAGE_SIZE - pref_bias_clear_store; 291 303 if (off > 0xffff || !pref_bias_clear_store) 292 - pg_addiu(&buf, A2, A0, off); 304 + pg_addiu(&buf, GPR_A2, GPR_A0, off); 293 305 else 294 - uasm_i_ori(&buf, A2, A0, off); 306 + uasm_i_ori(&buf, GPR_A2, GPR_A0, off); 295 307 296 308 if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && cpu_is_r4600_v2_x()) 297 - uasm_i_lui(&buf, AT, uasm_rel_hi(0xa0000000)); 309 + uasm_i_lui(&buf, GPR_AT, uasm_rel_hi(0xa0000000)); 298 310 299 311 off = cache_line_size ? min(8, pref_bias_clear_store / cache_line_size) 300 312 * cache_line_size : 0; ··· 308 320 build_clear_store(&buf, off); 309 321 off += clear_word_size; 310 322 } while (off < half_clear_loop_size); 311 - pg_addiu(&buf, A0, A0, 2 * off); 323 + pg_addiu(&buf, GPR_A0, GPR_A0, 2 * off); 312 324 off = -off; 313 325 do { 314 326 build_clear_pref(&buf, off); 315 327 if (off == -clear_word_size) 316 - uasm_il_bne(&buf, &r, A0, A2, label_clear_pref); 328 + uasm_il_bne(&buf, &r, GPR_A0, GPR_A2, label_clear_pref); 317 329 build_clear_store(&buf, off); 318 330 off += clear_word_size; 319 331 } while (off < 0); 320 332 321 333 if (pref_bias_clear_store) { 322 - pg_addiu(&buf, A2, A0, pref_bias_clear_store); 334 + pg_addiu(&buf, GPR_A2, GPR_A0, pref_bias_clear_store); 323 335 uasm_l_clear_nopref(&l, buf); 324 336 off = 0; 325 337 do { 326 338 build_clear_store(&buf, off); 327 339 off += clear_word_size; 328 340 } while (off < half_clear_loop_size); 329 - pg_addiu(&buf, A0, A0, 2 * off); 341 + pg_addiu(&buf, GPR_A0, GPR_A0, 2 * off); 330 342 off = -off; 331 343 do { 332 344 if (off == -clear_word_size) 333 - uasm_il_bne(&buf, &r, A0, A2, 345 + uasm_il_bne(&buf, &r, GPR_A0, GPR_A2, 334 346 label_clear_nopref); 335 347 build_clear_store(&buf, off); 336 348 off += clear_word_size; 337 349 } while (off < 0); 338 350 } 339 351 340 - uasm_i_jr(&buf, RA); 352 + uasm_i_jr(&buf, GPR_RA); 341 353 uasm_i_nop(&buf); 342 354 343 355 BUG_ON(buf > &__clear_page_end); ··· 357 369 static void build_copy_load(u32 **buf, int reg, int off) 358 370 { 359 371 if (cpu_has_64bit_gp_regs) { 360 - uasm_i_ld(buf, reg, off, A1); 372 + uasm_i_ld(buf, reg, off, GPR_A1); 361 373 } else { 362 - uasm_i_lw(buf, reg, off, A1); 374 + uasm_i_lw(buf, reg, off, GPR_A1); 363 375 } 364 376 } 365 377 366 378 static void build_copy_store(u32 **buf, int reg, int off) 367 379 { 368 380 if (cpu_has_64bit_gp_regs) { 369 - uasm_i_sd(buf, reg, off, A0); 381 + uasm_i_sd(buf, reg, off, GPR_A0); 370 382 } else { 371 - uasm_i_sw(buf, reg, off, A0); 383 + uasm_i_sw(buf, reg, off, GPR_A0); 372 384 } 373 385 } 374 386 ··· 378 390 return; 379 391 380 392 if (pref_bias_copy_load) 381 - _uasm_i_pref(buf, pref_src_mode, pref_bias_copy_load + off, A1); 393 + _uasm_i_pref(buf, pref_src_mode, pref_bias_copy_load + off, GPR_A1); 382 394 } 383 395 384 396 static inline void build_copy_store_pref(u32 **buf, int off) ··· 388 400 389 401 if (pref_bias_copy_store) { 390 402 _uasm_i_pref(buf, pref_dst_mode, pref_bias_copy_store + off, 391 - A0); 403 + GPR_A0); 392 404 } else if (cache_line_size == (half_copy_loop_size << 1)) { 393 405 if (cpu_has_cache_cdex_s) { 394 - uasm_i_cache(buf, Create_Dirty_Excl_SD, off, A0); 406 + uasm_i_cache(buf, Create_Dirty_Excl_SD, off, GPR_A0); 395 407 } else if (cpu_has_cache_cdex_p) { 396 408 if (IS_ENABLED(CONFIG_WAR_R4600_V1_HIT_CACHEOP) && 397 409 cpu_is_r4600_v1_x()) { ··· 403 415 404 416 if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && 405 417 cpu_is_r4600_v2_x()) 406 - uasm_i_lw(buf, ZERO, ZERO, AT); 418 + uasm_i_lw(buf, GPR_ZERO, GPR_ZERO, GPR_AT); 407 419 408 - uasm_i_cache(buf, Create_Dirty_Excl_D, off, A0); 420 + uasm_i_cache(buf, Create_Dirty_Excl_D, off, GPR_A0); 409 421 } 410 422 } 411 423 } ··· 442 454 443 455 off = PAGE_SIZE - pref_bias_copy_load; 444 456 if (off > 0xffff || !pref_bias_copy_load) 445 - pg_addiu(&buf, A2, A0, off); 457 + pg_addiu(&buf, GPR_A2, GPR_A0, off); 446 458 else 447 - uasm_i_ori(&buf, A2, A0, off); 459 + uasm_i_ori(&buf, GPR_A2, GPR_A0, off); 448 460 449 461 if (IS_ENABLED(CONFIG_WAR_R4600_V2_HIT_CACHEOP) && cpu_is_r4600_v2_x()) 450 - uasm_i_lui(&buf, AT, uasm_rel_hi(0xa0000000)); 462 + uasm_i_lui(&buf, GPR_AT, uasm_rel_hi(0xa0000000)); 451 463 452 464 off = cache_line_size ? min(8, pref_bias_copy_load / cache_line_size) * 453 465 cache_line_size : 0; ··· 464 476 uasm_l_copy_pref_both(&l, buf); 465 477 do { 466 478 build_copy_load_pref(&buf, off); 467 - build_copy_load(&buf, T0, off); 479 + build_copy_load(&buf, GPR_T0, off); 468 480 build_copy_load_pref(&buf, off + copy_word_size); 469 - build_copy_load(&buf, T1, off + copy_word_size); 481 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 470 482 build_copy_load_pref(&buf, off + 2 * copy_word_size); 471 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 483 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 472 484 build_copy_load_pref(&buf, off + 3 * copy_word_size); 473 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 485 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 474 486 build_copy_store_pref(&buf, off); 475 - build_copy_store(&buf, T0, off); 487 + build_copy_store(&buf, GPR_T0, off); 476 488 build_copy_store_pref(&buf, off + copy_word_size); 477 - build_copy_store(&buf, T1, off + copy_word_size); 489 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 478 490 build_copy_store_pref(&buf, off + 2 * copy_word_size); 479 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 491 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 480 492 build_copy_store_pref(&buf, off + 3 * copy_word_size); 481 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 493 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 482 494 off += 4 * copy_word_size; 483 495 } while (off < half_copy_loop_size); 484 - pg_addiu(&buf, A1, A1, 2 * off); 485 - pg_addiu(&buf, A0, A0, 2 * off); 496 + pg_addiu(&buf, GPR_A1, GPR_A1, 2 * off); 497 + pg_addiu(&buf, GPR_A0, GPR_A0, 2 * off); 486 498 off = -off; 487 499 do { 488 500 build_copy_load_pref(&buf, off); 489 - build_copy_load(&buf, T0, off); 501 + build_copy_load(&buf, GPR_T0, off); 490 502 build_copy_load_pref(&buf, off + copy_word_size); 491 - build_copy_load(&buf, T1, off + copy_word_size); 503 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 492 504 build_copy_load_pref(&buf, off + 2 * copy_word_size); 493 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 505 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 494 506 build_copy_load_pref(&buf, off + 3 * copy_word_size); 495 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 507 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 496 508 build_copy_store_pref(&buf, off); 497 - build_copy_store(&buf, T0, off); 509 + build_copy_store(&buf, GPR_T0, off); 498 510 build_copy_store_pref(&buf, off + copy_word_size); 499 - build_copy_store(&buf, T1, off + copy_word_size); 511 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 500 512 build_copy_store_pref(&buf, off + 2 * copy_word_size); 501 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 513 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 502 514 build_copy_store_pref(&buf, off + 3 * copy_word_size); 503 515 if (off == -(4 * copy_word_size)) 504 - uasm_il_bne(&buf, &r, A2, A0, label_copy_pref_both); 505 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 516 + uasm_il_bne(&buf, &r, GPR_A2, GPR_A0, label_copy_pref_both); 517 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 506 518 off += 4 * copy_word_size; 507 519 } while (off < 0); 508 520 509 521 if (pref_bias_copy_load - pref_bias_copy_store) { 510 - pg_addiu(&buf, A2, A0, 522 + pg_addiu(&buf, GPR_A2, GPR_A0, 511 523 pref_bias_copy_load - pref_bias_copy_store); 512 524 uasm_l_copy_pref_store(&l, buf); 513 525 off = 0; 514 526 do { 515 - build_copy_load(&buf, T0, off); 516 - build_copy_load(&buf, T1, off + copy_word_size); 517 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 518 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 527 + build_copy_load(&buf, GPR_T0, off); 528 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 529 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 530 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 519 531 build_copy_store_pref(&buf, off); 520 - build_copy_store(&buf, T0, off); 532 + build_copy_store(&buf, GPR_T0, off); 521 533 build_copy_store_pref(&buf, off + copy_word_size); 522 - build_copy_store(&buf, T1, off + copy_word_size); 534 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 523 535 build_copy_store_pref(&buf, off + 2 * copy_word_size); 524 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 536 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 525 537 build_copy_store_pref(&buf, off + 3 * copy_word_size); 526 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 538 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 527 539 off += 4 * copy_word_size; 528 540 } while (off < half_copy_loop_size); 529 - pg_addiu(&buf, A1, A1, 2 * off); 530 - pg_addiu(&buf, A0, A0, 2 * off); 541 + pg_addiu(&buf, GPR_A1, GPR_A1, 2 * off); 542 + pg_addiu(&buf, GPR_A0, GPR_A0, 2 * off); 531 543 off = -off; 532 544 do { 533 - build_copy_load(&buf, T0, off); 534 - build_copy_load(&buf, T1, off + copy_word_size); 535 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 536 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 545 + build_copy_load(&buf, GPR_T0, off); 546 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 547 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 548 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 537 549 build_copy_store_pref(&buf, off); 538 - build_copy_store(&buf, T0, off); 550 + build_copy_store(&buf, GPR_T0, off); 539 551 build_copy_store_pref(&buf, off + copy_word_size); 540 - build_copy_store(&buf, T1, off + copy_word_size); 552 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 541 553 build_copy_store_pref(&buf, off + 2 * copy_word_size); 542 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 554 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 543 555 build_copy_store_pref(&buf, off + 3 * copy_word_size); 544 556 if (off == -(4 * copy_word_size)) 545 - uasm_il_bne(&buf, &r, A2, A0, 557 + uasm_il_bne(&buf, &r, GPR_A2, GPR_A0, 546 558 label_copy_pref_store); 547 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 559 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 548 560 off += 4 * copy_word_size; 549 561 } while (off < 0); 550 562 } 551 563 552 564 if (pref_bias_copy_store) { 553 - pg_addiu(&buf, A2, A0, pref_bias_copy_store); 565 + pg_addiu(&buf, GPR_A2, GPR_A0, pref_bias_copy_store); 554 566 uasm_l_copy_nopref(&l, buf); 555 567 off = 0; 556 568 do { 557 - build_copy_load(&buf, T0, off); 558 - build_copy_load(&buf, T1, off + copy_word_size); 559 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 560 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 561 - build_copy_store(&buf, T0, off); 562 - build_copy_store(&buf, T1, off + copy_word_size); 563 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 564 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 569 + build_copy_load(&buf, GPR_T0, off); 570 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 571 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 572 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 573 + build_copy_store(&buf, GPR_T0, off); 574 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 575 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 576 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 565 577 off += 4 * copy_word_size; 566 578 } while (off < half_copy_loop_size); 567 - pg_addiu(&buf, A1, A1, 2 * off); 568 - pg_addiu(&buf, A0, A0, 2 * off); 579 + pg_addiu(&buf, GPR_A1, GPR_A1, 2 * off); 580 + pg_addiu(&buf, GPR_A0, GPR_A0, 2 * off); 569 581 off = -off; 570 582 do { 571 - build_copy_load(&buf, T0, off); 572 - build_copy_load(&buf, T1, off + copy_word_size); 573 - build_copy_load(&buf, T2, off + 2 * copy_word_size); 574 - build_copy_load(&buf, T3, off + 3 * copy_word_size); 575 - build_copy_store(&buf, T0, off); 576 - build_copy_store(&buf, T1, off + copy_word_size); 577 - build_copy_store(&buf, T2, off + 2 * copy_word_size); 583 + build_copy_load(&buf, GPR_T0, off); 584 + build_copy_load(&buf, GPR_T1, off + copy_word_size); 585 + build_copy_load(&buf, GPR_T2, off + 2 * copy_word_size); 586 + build_copy_load(&buf, GPR_T3, off + 3 * copy_word_size); 587 + build_copy_store(&buf, GPR_T0, off); 588 + build_copy_store(&buf, GPR_T1, off + copy_word_size); 589 + build_copy_store(&buf, GPR_T2, off + 2 * copy_word_size); 578 590 if (off == -(4 * copy_word_size)) 579 - uasm_il_bne(&buf, &r, A2, A0, 591 + uasm_il_bne(&buf, &r, GPR_A2, GPR_A0, 580 592 label_copy_nopref); 581 - build_copy_store(&buf, T3, off + 3 * copy_word_size); 593 + build_copy_store(&buf, GPR_T3, off + 3 * copy_word_size); 582 594 off += 4 * copy_word_size; 583 595 } while (off < 0); 584 596 } 585 597 586 - uasm_i_jr(&buf, RA); 598 + uasm_i_jr(&buf, GPR_RA); 587 599 uasm_i_nop(&buf); 588 600 589 601 BUG_ON(buf > &__copy_page_end);