Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

powerpc: Merge STK_REG/PARAM/FRAMESIZE

Merge the defines of STACKFRAMESIZE, STK_REG, STK_PARAM from different
places.

Signed-off-by: Michael Neuling <mikey@neuling.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>

authored by

Michael Neuling and committed by
Benjamin Herrenschmidt
44ce6a5e 4404a9f9

+93 -114
+5
arch/powerpc/include/asm/ppc_asm.h
··· 181 181 #ifdef __KERNEL__ 182 182 #ifdef CONFIG_PPC64 183 183 184 + #define STACKFRAMESIZE 256 185 + #define STK_REG(i) (112 + ((i)-14)*8) 186 + 187 + #define STK_PARAM(i) (48 + ((i)-3)*8) 188 + 184 189 #define XGLUE(a,b) a##b 185 190 #define GLUE(a,b) XGLUE(a,b) 186 191
-3
arch/powerpc/lib/checksum_64.S
··· 65 65 srwi r3,r3,16 66 66 blr 67 67 68 - #define STACKFRAMESIZE 256 69 - #define STK_REG(i) (112 + ((i)-14)*8) 70 - 71 68 /* 72 69 * Computes the checksum of a memory block at buff, length len, 73 70 * and adds in "sum" (32-bit).
-3
arch/powerpc/lib/copypage_power7.S
··· 20 20 #include <asm/page.h> 21 21 #include <asm/ppc_asm.h> 22 22 23 - #define STACKFRAMESIZE 256 24 - #define STK_REG(i) (112 + ((i)-14)*8) 25 - 26 23 _GLOBAL(copypage_power7) 27 24 /* 28 25 * We prefetch both the source and destination using enhanced touch
-3
arch/powerpc/lib/copyuser_power7.S
··· 19 19 */ 20 20 #include <asm/ppc_asm.h> 21 21 22 - #define STACKFRAMESIZE 256 23 - #define STK_REG(i) (112 + ((i)-14)*8) 24 - 25 22 .macro err1 26 23 100: 27 24 .section __ex_table,"a"
-3
arch/powerpc/lib/memcpy_power7.S
··· 19 19 */ 20 20 #include <asm/ppc_asm.h> 21 21 22 - #define STACKFRAMESIZE 256 23 - #define STK_REG(i) (112 + ((i)-14)*8) 24 - 25 22 _GLOBAL(memcpy_power7) 26 23 #ifdef CONFIG_ALTIVEC 27 24 cmpldi r5,16
+40 -48
arch/powerpc/mm/hash_low_64.S
··· 34 34 * | CR save area (SP + 8) 35 35 * SP ---> +-- Back chain (SP + 0) 36 36 */ 37 - #define STACKFRAMESIZE 256 38 - 39 - /* Save parameters offsets */ 40 - #define STK_PARM(i) (STACKFRAMESIZE + 48 + ((i)-3)*8) 41 - 42 - /* Save non-volatile offsets */ 43 - #define STK_REG(i) (112 + ((i)-14)*8) 44 - 45 37 46 38 #ifndef CONFIG_PPC_64K_PAGES 47 39 ··· 56 64 std r0,16(r1) 57 65 stdu r1,-STACKFRAMESIZE(r1) 58 66 /* Save all params that we need after a function call */ 59 - std r6,STK_PARM(R6)(r1) 60 - std r8,STK_PARM(R8)(r1) 61 - std r9,STK_PARM(R9)(r1) 67 + std r6,STK_PARAM(R6)(r1) 68 + std r8,STK_PARAM(R8)(r1) 69 + std r9,STK_PARAM(R9)(r1) 62 70 63 71 /* Save non-volatile registers. 64 72 * r31 will hold "old PTE" ··· 154 162 /* At this point, r3 contains new PP bits, save them in 155 163 * place of "access" in the param area (sic) 156 164 */ 157 - std r3,STK_PARM(R4)(r1) 165 + std r3,STK_PARAM(R4)(r1) 158 166 159 167 /* Get htab_hash_mask */ 160 168 ld r4,htab_hash_mask@got(2) ··· 184 192 rldicr r3,r0,3,63-3 /* r3 = (hash & mask) << 3 */ 185 193 186 194 /* Call ppc_md.hpte_insert */ 187 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 195 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 188 196 mr r4,r29 /* Retrieve va */ 189 197 li r7,0 /* !bolted, !secondary */ 190 198 li r8,MMU_PAGE_4K /* page size */ 191 - ld r9,STK_PARM(R9)(r1) /* segment size */ 199 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 192 200 _GLOBAL(htab_call_hpte_insert1) 193 201 bl . /* Patched by htab_finish_init() */ 194 202 cmpdi 0,r3,0 ··· 207 215 rldicr r3,r0,3,63-3 /* r0 = (~hash & mask) << 3 */ 208 216 209 217 /* Call ppc_md.hpte_insert */ 210 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 218 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 211 219 mr r4,r29 /* Retrieve va */ 212 220 li r7,HPTE_V_SECONDARY /* !bolted, secondary */ 213 221 li r8,MMU_PAGE_4K /* page size */ 214 - ld r9,STK_PARM(R9)(r1) /* segment size */ 222 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 215 223 _GLOBAL(htab_call_hpte_insert2) 216 224 bl . /* Patched by htab_finish_init() */ 217 225 cmpdi 0,r3,0 ··· 247 255 * (maybe add eieio may be good still ?) 248 256 */ 249 257 htab_write_out_pte: 250 - ld r6,STK_PARM(R6)(r1) 258 + ld r6,STK_PARAM(R6)(r1) 251 259 std r30,0(r6) 252 260 li r3, 0 253 261 htab_bail: ··· 280 288 /* Call ppc_md.hpte_updatepp */ 281 289 mr r5,r29 /* va */ 282 290 li r6,MMU_PAGE_4K /* page size */ 283 - ld r7,STK_PARM(R9)(r1) /* segment size */ 284 - ld r8,STK_PARM(R8)(r1) /* get "local" param */ 291 + ld r7,STK_PARAM(R9)(r1) /* segment size */ 292 + ld r8,STK_PARAM(R8)(r1) /* get "local" param */ 285 293 _GLOBAL(htab_call_hpte_updatepp) 286 294 bl . /* Patched by htab_finish_init() */ 287 295 ··· 304 312 305 313 htab_pte_insert_failure: 306 314 /* Bail out restoring old PTE */ 307 - ld r6,STK_PARM(R6)(r1) 315 + ld r6,STK_PARAM(R6)(r1) 308 316 std r31,0(r6) 309 317 li r3,-1 310 318 b htab_bail ··· 332 340 std r0,16(r1) 333 341 stdu r1,-STACKFRAMESIZE(r1) 334 342 /* Save all params that we need after a function call */ 335 - std r6,STK_PARM(R6)(r1) 336 - std r8,STK_PARM(R8)(r1) 337 - std r9,STK_PARM(R9)(r1) 343 + std r6,STK_PARAM(R6)(r1) 344 + std r8,STK_PARAM(R8)(r1) 345 + std r9,STK_PARAM(R9)(r1) 338 346 339 347 /* Save non-volatile registers. 340 348 * r31 will hold "old PTE" ··· 444 452 /* At this point, r3 contains new PP bits, save them in 445 453 * place of "access" in the param area (sic) 446 454 */ 447 - std r3,STK_PARM(R4)(r1) 455 + std r3,STK_PARAM(R4)(r1) 448 456 449 457 /* Get htab_hash_mask */ 450 458 ld r4,htab_hash_mask@got(2) ··· 465 473 andis. r0,r31,_PAGE_COMBO@h 466 474 beq htab_inval_old_hpte 467 475 468 - ld r6,STK_PARM(R6)(r1) 476 + ld r6,STK_PARAM(R6)(r1) 469 477 ori r26,r6,0x8000 /* Load the hidx mask */ 470 478 ld r26,0(r26) 471 479 addi r5,r25,36 /* Check actual HPTE_SUB bit, this */ ··· 487 495 rldicr r3,r0,3,63-3 /* r0 = (hash & mask) << 3 */ 488 496 489 497 /* Call ppc_md.hpte_insert */ 490 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 498 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 491 499 mr r4,r29 /* Retrieve va */ 492 500 li r7,0 /* !bolted, !secondary */ 493 501 li r8,MMU_PAGE_4K /* page size */ 494 - ld r9,STK_PARM(R9)(r1) /* segment size */ 502 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 495 503 _GLOBAL(htab_call_hpte_insert1) 496 504 bl . /* patched by htab_finish_init() */ 497 505 cmpdi 0,r3,0 ··· 514 522 rldicr r3,r0,3,63-3 /* r0 = (~hash & mask) << 3 */ 515 523 516 524 /* Call ppc_md.hpte_insert */ 517 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 525 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 518 526 mr r4,r29 /* Retrieve va */ 519 527 li r7,HPTE_V_SECONDARY /* !bolted, secondary */ 520 528 li r8,MMU_PAGE_4K /* page size */ 521 - ld r9,STK_PARM(R9)(r1) /* segment size */ 529 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 522 530 _GLOBAL(htab_call_hpte_insert2) 523 531 bl . /* patched by htab_finish_init() */ 524 532 cmpdi 0,r3,0 ··· 551 559 mr r4,r31 /* PTE.pte */ 552 560 li r5,0 /* PTE.hidx */ 553 561 li r6,MMU_PAGE_64K /* psize */ 554 - ld r7,STK_PARM(R9)(r1) /* ssize */ 555 - ld r8,STK_PARM(R8)(r1) /* local */ 562 + ld r7,STK_PARAM(R9)(r1) /* ssize */ 563 + ld r8,STK_PARAM(R8)(r1) /* local */ 556 564 bl .flush_hash_page 557 565 /* Clear out _PAGE_HPTE_SUB bits in the new linux PTE */ 558 566 lis r0,_PAGE_HPTE_SUB@h ··· 568 576 /* Insert slot number & secondary bit in PTE second half, 569 577 * clear _PAGE_BUSY and set approriate HPTE slot bit 570 578 */ 571 - ld r6,STK_PARM(R6)(r1) 579 + ld r6,STK_PARAM(R6)(r1) 572 580 li r0,_PAGE_BUSY 573 581 andc r30,r30,r0 574 582 /* HPTE SUB bit */ ··· 622 630 /* Call ppc_md.hpte_updatepp */ 623 631 mr r5,r29 /* va */ 624 632 li r6,MMU_PAGE_4K /* page size */ 625 - ld r7,STK_PARM(R9)(r1) /* segment size */ 626 - ld r8,STK_PARM(R8)(r1) /* get "local" param */ 633 + ld r7,STK_PARAM(R9)(r1) /* segment size */ 634 + ld r8,STK_PARAM(R8)(r1) /* get "local" param */ 627 635 _GLOBAL(htab_call_hpte_updatepp) 628 636 bl . /* patched by htab_finish_init() */ 629 637 ··· 636 644 /* Clear the BUSY bit and Write out the PTE */ 637 645 li r0,_PAGE_BUSY 638 646 andc r30,r30,r0 639 - ld r6,STK_PARM(R6)(r1) 647 + ld r6,STK_PARAM(R6)(r1) 640 648 std r30,0(r6) 641 649 li r3,0 642 650 b htab_bail ··· 649 657 650 658 htab_pte_insert_failure: 651 659 /* Bail out restoring old PTE */ 652 - ld r6,STK_PARM(R6)(r1) 660 + ld r6,STK_PARAM(R6)(r1) 653 661 std r31,0(r6) 654 662 li r3,-1 655 663 b htab_bail ··· 669 677 std r0,16(r1) 670 678 stdu r1,-STACKFRAMESIZE(r1) 671 679 /* Save all params that we need after a function call */ 672 - std r6,STK_PARM(R6)(r1) 673 - std r8,STK_PARM(R8)(r1) 674 - std r9,STK_PARM(R9)(r1) 680 + std r6,STK_PARAM(R6)(r1) 681 + std r8,STK_PARAM(R8)(r1) 682 + std r9,STK_PARAM(R9)(r1) 675 683 676 684 /* Save non-volatile registers. 677 685 * r31 will hold "old PTE" ··· 772 780 /* At this point, r3 contains new PP bits, save them in 773 781 * place of "access" in the param area (sic) 774 782 */ 775 - std r3,STK_PARM(R4)(r1) 783 + std r3,STK_PARAM(R4)(r1) 776 784 777 785 /* Get htab_hash_mask */ 778 786 ld r4,htab_hash_mask@got(2) ··· 805 813 rldicr r3,r0,3,63-3 /* r0 = (hash & mask) << 3 */ 806 814 807 815 /* Call ppc_md.hpte_insert */ 808 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 816 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 809 817 mr r4,r29 /* Retrieve va */ 810 818 li r7,0 /* !bolted, !secondary */ 811 819 li r8,MMU_PAGE_64K 812 - ld r9,STK_PARM(R9)(r1) /* segment size */ 820 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 813 821 _GLOBAL(ht64_call_hpte_insert1) 814 822 bl . /* patched by htab_finish_init() */ 815 823 cmpdi 0,r3,0 ··· 828 836 rldicr r3,r0,3,63-3 /* r0 = (~hash & mask) << 3 */ 829 837 830 838 /* Call ppc_md.hpte_insert */ 831 - ld r6,STK_PARM(R4)(r1) /* Retrieve new pp bits */ 839 + ld r6,STK_PARAM(R4)(r1) /* Retrieve new pp bits */ 832 840 mr r4,r29 /* Retrieve va */ 833 841 li r7,HPTE_V_SECONDARY /* !bolted, secondary */ 834 842 li r8,MMU_PAGE_64K 835 - ld r9,STK_PARM(R9)(r1) /* segment size */ 843 + ld r9,STK_PARAM(R9)(r1) /* segment size */ 836 844 _GLOBAL(ht64_call_hpte_insert2) 837 845 bl . /* patched by htab_finish_init() */ 838 846 cmpdi 0,r3,0 ··· 868 876 * (maybe add eieio may be good still ?) 869 877 */ 870 878 ht64_write_out_pte: 871 - ld r6,STK_PARM(R6)(r1) 879 + ld r6,STK_PARAM(R6)(r1) 872 880 std r30,0(r6) 873 881 li r3, 0 874 882 ht64_bail: ··· 901 909 /* Call ppc_md.hpte_updatepp */ 902 910 mr r5,r29 /* va */ 903 911 li r6,MMU_PAGE_64K 904 - ld r7,STK_PARM(R9)(r1) /* segment size */ 905 - ld r8,STK_PARM(R8)(r1) /* get "local" param */ 912 + ld r7,STK_PARAM(R9)(r1) /* segment size */ 913 + ld r8,STK_PARAM(R8)(r1) /* get "local" param */ 906 914 _GLOBAL(ht64_call_hpte_updatepp) 907 915 bl . /* patched by htab_finish_init() */ 908 916 ··· 925 933 926 934 ht64_pte_insert_failure: 927 935 /* Bail out restoring old PTE */ 928 - ld r6,STK_PARM(R6)(r1) 936 + ld r6,STK_PARAM(R6)(r1) 929 937 std r31,0(r6) 930 938 li r3,-1 931 939 b ht64_bail
+13 -15
arch/powerpc/platforms/cell/beat_hvCall.S
··· 22 22 23 23 #include <asm/ppc_asm.h> 24 24 25 - #define STK_PARM(i) (48 + ((i)-3)*8) 26 - 27 25 /* Not implemented on Beat, now */ 28 26 #define HCALL_INST_PRECALL 29 27 #define HCALL_INST_POSTCALL ··· 72 74 mr r6,r7 73 75 mr r7,r8 74 76 mr r8,r9 75 - ld r10,STK_PARM(R10)(r1) 77 + ld r10,STK_PARAM(R10)(r1) 76 78 77 79 HVSC /* invoke the hypervisor */ 78 80 ··· 92 94 93 95 HCALL_INST_PRECALL 94 96 95 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 97 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 96 98 97 99 mr r11,r3 98 100 mr r3,r5 ··· 106 108 107 109 HCALL_INST_POSTCALL 108 110 109 - ld r12,STK_PARM(R4)(r1) 111 + ld r12,STK_PARAM(R4)(r1) 110 112 std r4, 0(r12) 111 113 112 114 lwz r0,8(r1) ··· 123 125 124 126 HCALL_INST_PRECALL 125 127 126 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 128 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 127 129 128 130 mr r11,r3 129 131 mr r3,r5 ··· 137 139 138 140 HCALL_INST_POSTCALL 139 141 140 - ld r12,STK_PARM(R4)(r1) 142 + ld r12,STK_PARAM(R4)(r1) 141 143 std r4, 0(r12) 142 144 std r5, 8(r12) 143 145 ··· 155 157 156 158 HCALL_INST_PRECALL 157 159 158 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 160 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 159 161 160 162 mr r11,r3 161 163 mr r3,r5 ··· 169 171 170 172 HCALL_INST_POSTCALL 171 173 172 - ld r12,STK_PARM(R4)(r1) 174 + ld r12,STK_PARAM(R4)(r1) 173 175 std r4, 0(r12) 174 176 std r5, 8(r12) 175 177 std r6, 16(r12) ··· 188 190 189 191 HCALL_INST_PRECALL 190 192 191 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 193 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 192 194 193 195 mr r11,r3 194 196 mr r3,r5 ··· 202 204 203 205 HCALL_INST_POSTCALL 204 206 205 - ld r12,STK_PARM(R4)(r1) 207 + ld r12,STK_PARAM(R4)(r1) 206 208 std r4, 0(r12) 207 209 std r5, 8(r12) 208 210 std r6, 16(r12) ··· 222 224 223 225 HCALL_INST_PRECALL 224 226 225 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 227 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 226 228 227 229 mr r11,r3 228 230 mr r3,r5 ··· 236 238 237 239 HCALL_INST_POSTCALL 238 240 239 - ld r12,STK_PARM(R4)(r1) 241 + ld r12,STK_PARAM(R4)(r1) 240 242 std r4, 0(r12) 241 243 std r5, 8(r12) 242 244 std r6, 16(r12) ··· 257 259 258 260 HCALL_INST_PRECALL 259 261 260 - std r4,STK_PARM(R4)(r1) /* save ret buffer */ 262 + std r4,STK_PARAM(R4)(r1) /* save ret buffer */ 261 263 262 264 mr r11,r3 263 265 mr r3,r5 ··· 271 273 272 274 HCALL_INST_POSTCALL 273 275 274 - ld r12,STK_PARM(R4)(r1) 276 + ld r12,STK_PARAM(R4)(r1) 275 277 std r4, 0(r12) 276 278 std r5, 8(r12) 277 279 std r6, 16(r12)
-2
arch/powerpc/platforms/powernv/opal-takeover.S
··· 14 14 #include <asm/asm-offsets.h> 15 15 #include <asm/opal.h> 16 16 17 - #define STK_PARAM(i) (48 + ((i)-3)*8) 18 - 19 17 #define H_HAL_TAKEOVER 0x5124 20 18 #define H_HAL_TAKEOVER_QUERY_MAGIC -1 21 19
+35 -37
arch/powerpc/platforms/pseries/hvCall.S
··· 13 13 #include <asm/asm-offsets.h> 14 14 #include <asm/ptrace.h> 15 15 16 - #define STK_PARM(i) (48 + ((i)-3)*8) 17 - 18 16 #ifdef CONFIG_TRACEPOINTS 19 17 20 18 .section ".toc","aw" ··· 24 26 .section ".text" 25 27 26 28 /* 27 - * precall must preserve all registers. use unused STK_PARM() 29 + * precall must preserve all registers. use unused STK_PARAM() 28 30 * areas to save snapshots and opcode. We branch around this 29 31 * in early init (eg when populating the MMU hashtable) by using an 30 32 * unconditional cpu feature. ··· 38 40 cmpdi r12,0; \ 39 41 beq+ 1f; \ 40 42 mflr r0; \ 41 - std r3,STK_PARM(R3)(r1); \ 42 - std r4,STK_PARM(R4)(r1); \ 43 - std r5,STK_PARM(R5)(r1); \ 44 - std r6,STK_PARM(R6)(r1); \ 45 - std r7,STK_PARM(R7)(r1); \ 46 - std r8,STK_PARM(R8)(r1); \ 47 - std r9,STK_PARM(R9)(r1); \ 48 - std r10,STK_PARM(R10)(r1); \ 43 + std r3,STK_PARAM(R3)(r1); \ 44 + std r4,STK_PARAM(R4)(r1); \ 45 + std r5,STK_PARAM(R5)(r1); \ 46 + std r6,STK_PARAM(R6)(r1); \ 47 + std r7,STK_PARAM(R7)(r1); \ 48 + std r8,STK_PARAM(R8)(r1); \ 49 + std r9,STK_PARAM(R9)(r1); \ 50 + std r10,STK_PARAM(R10)(r1); \ 49 51 std r0,16(r1); \ 50 - addi r4,r1,STK_PARM(FIRST_REG); \ 52 + addi r4,r1,STK_PARAM(FIRST_REG); \ 51 53 stdu r1,-STACK_FRAME_OVERHEAD(r1); \ 52 54 bl .__trace_hcall_entry; \ 53 55 addi r1,r1,STACK_FRAME_OVERHEAD; \ 54 56 ld r0,16(r1); \ 55 - ld r3,STK_PARM(R3)(r1); \ 56 - ld r4,STK_PARM(R4)(r1); \ 57 - ld r5,STK_PARM(R5)(r1); \ 58 - ld r6,STK_PARM(R6)(r1); \ 59 - ld r7,STK_PARM(R7)(r1); \ 60 - ld r8,STK_PARM(R8)(r1); \ 61 - ld r9,STK_PARM(R9)(r1); \ 62 - ld r10,STK_PARM(R10)(r1); \ 57 + ld r3,STK_PARAM(R3)(r1); \ 58 + ld r4,STK_PARAM(R4)(r1); \ 59 + ld r5,STK_PARAM(R5)(r1); \ 60 + ld r6,STK_PARAM(R6)(r1); \ 61 + ld r7,STK_PARAM(R7)(r1); \ 62 + ld r8,STK_PARAM(R8)(r1); \ 63 + ld r9,STK_PARAM(R9)(r1); \ 64 + ld r10,STK_PARAM(R10)(r1); \ 63 65 mtlr r0; \ 64 66 1: 65 67 ··· 77 79 cmpdi r12,0; \ 78 80 beq+ 1f; \ 79 81 mflr r0; \ 80 - ld r6,STK_PARM(R3)(r1); \ 81 - std r3,STK_PARM(R3)(r1); \ 82 + ld r6,STK_PARAM(R3)(r1); \ 83 + std r3,STK_PARAM(R3)(r1); \ 82 84 mr r4,r3; \ 83 85 mr r3,r6; \ 84 86 std r0,16(r1); \ ··· 86 88 bl .__trace_hcall_exit; \ 87 89 addi r1,r1,STACK_FRAME_OVERHEAD; \ 88 90 ld r0,16(r1); \ 89 - ld r3,STK_PARM(R3)(r1); \ 91 + ld r3,STK_PARAM(R3)(r1); \ 90 92 mtlr r0; \ 91 93 1: 92 94 ··· 130 132 131 133 HCALL_INST_PRECALL(R5) 132 134 133 - std r4,STK_PARM(R4)(r1) /* Save ret buffer */ 135 + std r4,STK_PARAM(R4)(r1) /* Save ret buffer */ 134 136 135 137 mr r4,r5 136 138 mr r5,r6 ··· 141 143 142 144 HVSC /* invoke the hypervisor */ 143 145 144 - ld r12,STK_PARM(R4)(r1) 146 + ld r12,STK_PARAM(R4)(r1) 145 147 std r4, 0(r12) 146 148 std r5, 8(r12) 147 149 std r6, 16(r12) ··· 166 168 mfcr r0 167 169 stw r0,8(r1) 168 170 169 - std r4,STK_PARM(R4)(r1) /* Save ret buffer */ 171 + std r4,STK_PARAM(R4)(r1) /* Save ret buffer */ 170 172 171 173 mr r4,r5 172 174 mr r5,r6 ··· 177 179 178 180 HVSC /* invoke the hypervisor */ 179 181 180 - ld r12,STK_PARM(R4)(r1) 182 + ld r12,STK_PARAM(R4)(r1) 181 183 std r4, 0(r12) 182 184 std r5, 8(r12) 183 185 std r6, 16(r12) ··· 196 198 197 199 HCALL_INST_PRECALL(R5) 198 200 199 - std r4,STK_PARM(R4)(r1) /* Save ret buffer */ 201 + std r4,STK_PARAM(R4)(r1) /* Save ret buffer */ 200 202 201 203 mr r4,r5 202 204 mr r5,r6 ··· 204 206 mr r7,r8 205 207 mr r8,r9 206 208 mr r9,r10 207 - ld r10,STK_PARM(R11)(r1) /* put arg7 in R10 */ 208 - ld r11,STK_PARM(R12)(r1) /* put arg8 in R11 */ 209 - ld r12,STK_PARM(R13)(r1) /* put arg9 in R12 */ 209 + ld r10,STK_PARAM(R11)(r1) /* put arg7 in R10 */ 210 + ld r11,STK_PARAM(R12)(r1) /* put arg8 in R11 */ 211 + ld r12,STK_PARAM(R13)(r1) /* put arg9 in R12 */ 210 212 211 213 HVSC /* invoke the hypervisor */ 212 214 213 215 mr r0,r12 214 - ld r12,STK_PARM(R4)(r1) 216 + ld r12,STK_PARAM(R4)(r1) 215 217 std r4, 0(r12) 216 218 std r5, 8(r12) 217 219 std r6, 16(r12) ··· 236 238 mfcr r0 237 239 stw r0,8(r1) 238 240 239 - std r4,STK_PARM(R4)(r1) /* Save ret buffer */ 241 + std r4,STK_PARAM(R4)(r1) /* Save ret buffer */ 240 242 241 243 mr r4,r5 242 244 mr r5,r6 ··· 244 246 mr r7,r8 245 247 mr r8,r9 246 248 mr r9,r10 247 - ld r10,STK_PARM(R11)(r1) /* put arg7 in R10 */ 248 - ld r11,STK_PARM(R12)(r1) /* put arg8 in R11 */ 249 - ld r12,STK_PARM(R13)(r1) /* put arg9 in R12 */ 249 + ld r10,STK_PARAM(R11)(r1) /* put arg7 in R10 */ 250 + ld r11,STK_PARAM(R12)(r1) /* put arg8 in R11 */ 251 + ld r12,STK_PARAM(R13)(r1) /* put arg9 in R12 */ 250 252 251 253 HVSC /* invoke the hypervisor */ 252 254 253 255 mr r0,r12 254 - ld r12,STK_PARM(R4)(r1) 256 + ld r12,STK_PARAM(R4)(r1) 255 257 std r4, 0(r12) 256 258 std r5, 8(r12) 257 259 std r6, 16(r12)