Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

powerpc/ppc-opcode: Consolidate powerpc instructions from bpf_jit.h

Move macro definitions of powerpc instructions from bpf_jit.h to
ppc-opcode.h and adopt the users of the macros accordingly. `PPC_MR()`
is defined twice in bpf_jit.h, remove the duplicate one.

Signed-off-by: Balamuruhan S <bala24@linux.ibm.com>
Tested-by: Naveen N. Rao <naveen.n.rao@linux.vnet.ibm.com>
Acked-by: Naveen N. Rao <naveen.n.rao@linux.vnet.ibm.com>
Acked-by: Sandipan Das <sandipan@linux.ibm.com>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20200624113038.908074-5-bala24@linux.ibm.com

authored by

Balamuruhan S and committed by
Michael Ellerman
3a181237 06541865

+324 -373
+85
arch/powerpc/include/asm/ppc-opcode.h
··· 79 79 #define IMM_L(i) ((uintptr_t)(i) & 0xffff) 80 80 #define IMM_DS(i) ((uintptr_t)(i) & 0xfffc) 81 81 82 + /* 83 + * 16-bit immediate helper macros: HA() is for use with sign-extending instrs 84 + * (e.g. LD, ADDI). If the bottom 16 bits is "-ve", add another bit into the 85 + * top half to negate the effect (i.e. 0xffff + 1 = 0x(1)0000). 86 + */ 87 + #define IMM_H(i) ((uintptr_t)(i)>>16) 88 + #define IMM_HA(i) (((uintptr_t)(i)>>16) + \ 89 + (((uintptr_t)(i) & 0x8000) >> 15)) 90 + 91 + 82 92 /* opcode and xopcode for instructions */ 83 93 #define OP_TRAP 3 84 94 #define OP_TRAP_64 2 ··· 550 540 #define PPC_RAW_ADD_DOT(t, a, b) (PPC_INST_ADD | ___PPC_RT(t) | ___PPC_RA(a) | ___PPC_RB(b) | 0x1) 551 541 #define PPC_RAW_ADDC(t, a, b) (PPC_INST_ADDC | ___PPC_RT(t) | ___PPC_RA(a) | ___PPC_RB(b)) 552 542 #define PPC_RAW_ADDC_DOT(t, a, b) (PPC_INST_ADDC | ___PPC_RT(t) | ___PPC_RA(a) | ___PPC_RB(b) | 0x1) 543 + #define PPC_RAW_NOP() (PPC_INST_NOP) 544 + #define PPC_RAW_BLR() (PPC_INST_BLR) 545 + #define PPC_RAW_BLRL() (PPC_INST_BLRL) 546 + #define PPC_RAW_MTLR(r) (PPC_INST_MTLR | ___PPC_RT(r)) 547 + #define PPC_RAW_BCTR() (PPC_INST_BCTR) 548 + #define PPC_RAW_MTCTR(r) (PPC_INST_MTCTR | ___PPC_RT(r)) 549 + #define PPC_RAW_ADDI(d, a, i) (PPC_INST_ADDI | ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i)) 550 + #define PPC_RAW_LI(r, i) PPC_RAW_ADDI(r, 0, i) 551 + #define PPC_RAW_ADDIS(d, a, i) (PPC_INST_ADDIS | ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i)) 552 + #define PPC_RAW_LIS(r, i) PPC_RAW_ADDIS(r, 0, i) 553 + #define PPC_RAW_STDX(r, base, b) (PPC_INST_STDX | ___PPC_RS(r) | ___PPC_RA(base) | ___PPC_RB(b)) 554 + #define PPC_RAW_STDU(r, base, i) (PPC_INST_STDU | ___PPC_RS(r) | ___PPC_RA(base) | ((i) & 0xfffc)) 555 + #define PPC_RAW_STW(r, base, i) (PPC_INST_STW | ___PPC_RS(r) | ___PPC_RA(base) | IMM_L(i)) 556 + #define PPC_RAW_STWU(r, base, i) (PPC_INST_STWU | ___PPC_RS(r) | ___PPC_RA(base) | IMM_L(i)) 557 + #define PPC_RAW_STH(r, base, i) (PPC_INST_STH | ___PPC_RS(r) | ___PPC_RA(base) | IMM_L(i)) 558 + #define PPC_RAW_STB(r, base, i) (PPC_INST_STB | ___PPC_RS(r) | ___PPC_RA(base) | IMM_L(i)) 559 + #define PPC_RAW_LBZ(r, base, i) (PPC_INST_LBZ | ___PPC_RT(r) | ___PPC_RA(base) | IMM_L(i)) 560 + #define PPC_RAW_LDX(r, base, b) (PPC_INST_LDX | ___PPC_RT(r) | ___PPC_RA(base) | ___PPC_RB(b)) 561 + #define PPC_RAW_LHZ(r, base, i) (PPC_INST_LHZ | ___PPC_RT(r) | ___PPC_RA(base) | IMM_L(i)) 562 + #define PPC_RAW_LHBRX(r, base, b) (PPC_INST_LHBRX | ___PPC_RT(r) | ___PPC_RA(base) | ___PPC_RB(b)) 563 + #define PPC_RAW_LDBRX(r, base, b) (PPC_INST_LDBRX | ___PPC_RT(r) | ___PPC_RA(base) | ___PPC_RB(b)) 564 + #define PPC_RAW_STWCX(s, a, b) (PPC_INST_STWCX | ___PPC_RS(s) | ___PPC_RA(a) | ___PPC_RB(b)) 565 + #define PPC_RAW_CMPWI(a, i) (PPC_INST_CMPWI | ___PPC_RA(a) | IMM_L(i)) 566 + #define PPC_RAW_CMPDI(a, i) (PPC_INST_CMPDI | ___PPC_RA(a) | IMM_L(i)) 567 + #define PPC_RAW_CMPW(a, b) (PPC_INST_CMPW | ___PPC_RA(a) | ___PPC_RB(b)) 568 + #define PPC_RAW_CMPD(a, b) (PPC_INST_CMPD | ___PPC_RA(a) | ___PPC_RB(b)) 569 + #define PPC_RAW_CMPLWI(a, i) (PPC_INST_CMPLWI | ___PPC_RA(a) | IMM_L(i)) 570 + #define PPC_RAW_CMPLDI(a, i) (PPC_INST_CMPLDI | ___PPC_RA(a) | IMM_L(i)) 571 + #define PPC_RAW_CMPLW(a, b) (PPC_INST_CMPLW | ___PPC_RA(a) | ___PPC_RB(b)) 572 + #define PPC_RAW_CMPLD(a, b) (PPC_INST_CMPLD | ___PPC_RA(a) | ___PPC_RB(b)) 573 + #define PPC_RAW_SUB(d, a, b) (PPC_INST_SUB | ___PPC_RT(d) | ___PPC_RB(a) | ___PPC_RA(b)) 574 + #define PPC_RAW_MULD(d, a, b) (PPC_INST_MULLD | ___PPC_RT(d) | ___PPC_RA(a) | ___PPC_RB(b)) 575 + #define PPC_RAW_MULW(d, a, b) (PPC_INST_MULLW | ___PPC_RT(d) | ___PPC_RA(a) | ___PPC_RB(b)) 576 + #define PPC_RAW_MULHWU(d, a, b) (PPC_INST_MULHWU | ___PPC_RT(d) | ___PPC_RA(a) | ___PPC_RB(b)) 577 + #define PPC_RAW_MULI(d, a, i) (PPC_INST_MULLI | ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i)) 578 + #define PPC_RAW_DIVWU(d, a, b) (PPC_INST_DIVWU | ___PPC_RT(d) | ___PPC_RA(a) | ___PPC_RB(b)) 579 + #define PPC_RAW_DIVDU(d, a, b) (PPC_INST_DIVDU | ___PPC_RT(d) | ___PPC_RA(a) | ___PPC_RB(b)) 580 + #define PPC_RAW_AND(d, a, b) (PPC_INST_AND | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(b)) 581 + #define PPC_RAW_ANDI(d, a, i) (PPC_INST_ANDI | ___PPC_RA(d) | ___PPC_RS(a) | IMM_L(i)) 582 + #define PPC_RAW_AND_DOT(d, a, b) (PPC_INST_ANDDOT | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(b)) 583 + #define PPC_RAW_OR(d, a, b) (PPC_INST_OR | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(b)) 584 + #define PPC_RAW_MR(d, a) PPC_RAW_OR(d, a, a) 585 + #define PPC_RAW_ORI(d, a, i) (PPC_INST_ORI | ___PPC_RA(d) | ___PPC_RS(a) | IMM_L(i)) 586 + #define PPC_RAW_ORIS(d, a, i) (PPC_INST_ORIS | ___PPC_RA(d) | ___PPC_RS(a) | IMM_L(i)) 587 + #define PPC_RAW_XOR(d, a, b) (PPC_INST_XOR | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(b)) 588 + #define PPC_RAW_XORI(d, a, i) (PPC_INST_XORI | ___PPC_RA(d) | ___PPC_RS(a) | IMM_L(i)) 589 + #define PPC_RAW_XORIS(d, a, i) (PPC_INST_XORIS | ___PPC_RA(d) | ___PPC_RS(a) | IMM_L(i)) 590 + #define PPC_RAW_EXTSW(d, a) (PPC_INST_EXTSW | ___PPC_RA(d) | ___PPC_RS(a)) 591 + #define PPC_RAW_SLW(d, a, s) (PPC_INST_SLW | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 592 + #define PPC_RAW_SLD(d, a, s) (PPC_INST_SLD | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 593 + #define PPC_RAW_SRW(d, a, s) (PPC_INST_SRW | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 594 + #define PPC_RAW_SRAW(d, a, s) (PPC_INST_SRAW | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 595 + #define PPC_RAW_SRAWI(d, a, i) (PPC_INST_SRAWI | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH(i)) 596 + #define PPC_RAW_SRD(d, a, s) (PPC_INST_SRD | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 597 + #define PPC_RAW_SRAD(d, a, s) (PPC_INST_SRAD | ___PPC_RA(d) | ___PPC_RS(a) | ___PPC_RB(s)) 598 + #define PPC_RAW_SRADI(d, a, i) (PPC_INST_SRADI | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH64(i)) 599 + #define PPC_RAW_RLWINM(d, a, i, mb, me) \ 600 + (PPC_INST_RLWINM | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH(i) | __PPC_MB(mb) | __PPC_ME(me)) 601 + #define PPC_RAW_RLWINM_DOT(d, a, i, mb, me) \ 602 + (PPC_INST_RLWINM_DOT | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH(i) | __PPC_MB(mb) | __PPC_ME(me)) 603 + #define PPC_RAW_RLWIMI(d, a, i, mb, me) \ 604 + (PPC_INST_RLWIMI | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH(i) | __PPC_MB(mb) | __PPC_ME(me)) 605 + #define PPC_RAW_RLDICL(d, a, i, mb) (PPC_INST_RLDICL | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH64(i) | __PPC_MB64(mb)) 606 + #define PPC_RAW_RLDICR(d, a, i, me) (PPC_INST_RLDICR | ___PPC_RA(d) | ___PPC_RS(a) | __PPC_SH64(i) | __PPC_ME64(me)) 607 + 608 + /* slwi = rlwinm Rx, Ry, n, 0, 31-n */ 609 + #define PPC_RAW_SLWI(d, a, i) PPC_RAW_RLWINM(d, a, i, 0, 31-(i)) 610 + /* srwi = rlwinm Rx, Ry, 32-n, n, 31 */ 611 + #define PPC_RAW_SRWI(d, a, i) PPC_RAW_RLWINM(d, a, 32-(i), i, 31) 612 + /* sldi = rldicr Rx, Ry, n, 63-n */ 613 + #define PPC_RAW_SLDI(d, a, i) PPC_RAW_RLDICR(d, a, i, 63-(i)) 614 + /* sldi = rldicl Rx, Ry, 64-n, n */ 615 + #define PPC_RAW_SRDI(d, a, i) PPC_RAW_RLDICL(d, a, 64-(i), i) 616 + 617 + #define PPC_RAW_NEG(d, a) (PPC_INST_NEG | ___PPC_RT(d) | ___PPC_RA(a)) 553 618 554 619 /* Deal with instructions that older assemblers aren't aware of */ 555 620 #define PPC_BCCTR_FLUSH stringify_in_c(.long PPC_INST_BCCTR_FLUSH)
+14 -152
arch/powerpc/net/bpf_jit.h
··· 19 19 #define FUNCTION_DESCR_SIZE 0 20 20 #endif 21 21 22 - /* 23 - * 16-bit immediate helper macros: HA() is for use with sign-extending instrs 24 - * (e.g. LD, ADDI). If the bottom 16 bits is "-ve", add another bit into the 25 - * top half to negate the effect (i.e. 0xffff + 1 = 0x(1)0000). 26 - */ 27 - #define IMM_H(i) ((uintptr_t)(i)>>16) 28 - #define IMM_HA(i) (((uintptr_t)(i)>>16) + \ 29 - (((uintptr_t)(i) & 0x8000) >> 15)) 30 - 31 22 #define PLANT_INSTR(d, idx, instr) \ 32 23 do { if (d) { (d)[idx] = instr; } idx++; } while (0) 33 24 #define EMIT(instr) PLANT_INSTR(image, ctx->idx, instr) 34 - 35 - #define PPC_NOP() EMIT(PPC_INST_NOP) 36 - #define PPC_BLR() EMIT(PPC_INST_BLR) 37 - #define PPC_BLRL() EMIT(PPC_INST_BLRL) 38 - #define PPC_MTLR(r) EMIT(PPC_INST_MTLR | ___PPC_RT(r)) 39 - #define PPC_BCTR() EMIT(PPC_INST_BCTR) 40 - #define PPC_MTCTR(r) EMIT(PPC_INST_MTCTR | ___PPC_RT(r)) 41 - #define PPC_ADDI(d, a, i) EMIT(PPC_INST_ADDI | ___PPC_RT(d) | \ 42 - ___PPC_RA(a) | IMM_L(i)) 43 - #define PPC_MR(d, a) PPC_OR(d, a, a) 44 - #define PPC_LI(r, i) PPC_ADDI(r, 0, i) 45 - #define PPC_ADDIS(d, a, i) EMIT(PPC_INST_ADDIS | \ 46 - ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i)) 47 - #define PPC_LIS(r, i) PPC_ADDIS(r, 0, i) 48 - #define PPC_STDX(r, base, b) EMIT(PPC_INST_STDX | ___PPC_RS(r) | \ 49 - ___PPC_RA(base) | ___PPC_RB(b)) 50 - #define PPC_STDU(r, base, i) EMIT(PPC_INST_STDU | ___PPC_RS(r) | \ 51 - ___PPC_RA(base) | ((i) & 0xfffc)) 52 - #define PPC_STW(r, base, i) EMIT(PPC_INST_STW | ___PPC_RS(r) | \ 53 - ___PPC_RA(base) | IMM_L(i)) 54 - #define PPC_STWU(r, base, i) EMIT(PPC_INST_STWU | ___PPC_RS(r) | \ 55 - ___PPC_RA(base) | IMM_L(i)) 56 - #define PPC_STH(r, base, i) EMIT(PPC_INST_STH | ___PPC_RS(r) | \ 57 - ___PPC_RA(base) | IMM_L(i)) 58 - #define PPC_STB(r, base, i) EMIT(PPC_INST_STB | ___PPC_RS(r) | \ 59 - ___PPC_RA(base) | IMM_L(i)) 60 - 61 - #define PPC_LBZ(r, base, i) EMIT(PPC_INST_LBZ | ___PPC_RT(r) | \ 62 - ___PPC_RA(base) | IMM_L(i)) 63 - #define PPC_LDX(r, base, b) EMIT(PPC_INST_LDX | ___PPC_RT(r) | \ 64 - ___PPC_RA(base) | ___PPC_RB(b)) 65 - #define PPC_LHZ(r, base, i) EMIT(PPC_INST_LHZ | ___PPC_RT(r) | \ 66 - ___PPC_RA(base) | IMM_L(i)) 67 - #define PPC_LHBRX(r, base, b) EMIT(PPC_INST_LHBRX | ___PPC_RT(r) | \ 68 - ___PPC_RA(base) | ___PPC_RB(b)) 69 - #define PPC_LDBRX(r, base, b) EMIT(PPC_INST_LDBRX | ___PPC_RT(r) | \ 70 - ___PPC_RA(base) | ___PPC_RB(b)) 71 - 72 - #define PPC_BPF_STWCX(s, a, b) EMIT(PPC_INST_STWCX | ___PPC_RS(s) | \ 73 - ___PPC_RA(a) | ___PPC_RB(b)) 74 - #define PPC_CMPWI(a, i) EMIT(PPC_INST_CMPWI | ___PPC_RA(a) | IMM_L(i)) 75 - #define PPC_CMPDI(a, i) EMIT(PPC_INST_CMPDI | ___PPC_RA(a) | IMM_L(i)) 76 - #define PPC_CMPW(a, b) EMIT(PPC_INST_CMPW | ___PPC_RA(a) | \ 77 - ___PPC_RB(b)) 78 - #define PPC_CMPD(a, b) EMIT(PPC_INST_CMPD | ___PPC_RA(a) | \ 79 - ___PPC_RB(b)) 80 - #define PPC_CMPLWI(a, i) EMIT(PPC_INST_CMPLWI | ___PPC_RA(a) | IMM_L(i)) 81 - #define PPC_CMPLDI(a, i) EMIT(PPC_INST_CMPLDI | ___PPC_RA(a) | IMM_L(i)) 82 - #define PPC_CMPLW(a, b) EMIT(PPC_INST_CMPLW | ___PPC_RA(a) | \ 83 - ___PPC_RB(b)) 84 - #define PPC_CMPLD(a, b) EMIT(PPC_INST_CMPLD | ___PPC_RA(a) | \ 85 - ___PPC_RB(b)) 86 - 87 - #define PPC_SUB(d, a, b) EMIT(PPC_INST_SUB | ___PPC_RT(d) | \ 88 - ___PPC_RB(a) | ___PPC_RA(b)) 89 - #define PPC_MULD(d, a, b) EMIT(PPC_INST_MULLD | ___PPC_RT(d) | \ 90 - ___PPC_RA(a) | ___PPC_RB(b)) 91 - #define PPC_MULW(d, a, b) EMIT(PPC_INST_MULLW | ___PPC_RT(d) | \ 92 - ___PPC_RA(a) | ___PPC_RB(b)) 93 - #define PPC_MULHWU(d, a, b) EMIT(PPC_INST_MULHWU | ___PPC_RT(d) | \ 94 - ___PPC_RA(a) | ___PPC_RB(b)) 95 - #define PPC_MULI(d, a, i) EMIT(PPC_INST_MULLI | ___PPC_RT(d) | \ 96 - ___PPC_RA(a) | IMM_L(i)) 97 - #define PPC_DIVWU(d, a, b) EMIT(PPC_INST_DIVWU | ___PPC_RT(d) | \ 98 - ___PPC_RA(a) | ___PPC_RB(b)) 99 - #define PPC_DIVDU(d, a, b) EMIT(PPC_INST_DIVDU | ___PPC_RT(d) | \ 100 - ___PPC_RA(a) | ___PPC_RB(b)) 101 - #define PPC_AND(d, a, b) EMIT(PPC_INST_AND | ___PPC_RA(d) | \ 102 - ___PPC_RS(a) | ___PPC_RB(b)) 103 - #define PPC_ANDI(d, a, i) EMIT(PPC_INST_ANDI | ___PPC_RA(d) | \ 104 - ___PPC_RS(a) | IMM_L(i)) 105 - #define PPC_AND_DOT(d, a, b) EMIT(PPC_INST_ANDDOT | ___PPC_RA(d) | \ 106 - ___PPC_RS(a) | ___PPC_RB(b)) 107 - #define PPC_OR(d, a, b) EMIT(PPC_INST_OR | ___PPC_RA(d) | \ 108 - ___PPC_RS(a) | ___PPC_RB(b)) 109 - #define PPC_MR(d, a) PPC_OR(d, a, a) 110 - #define PPC_ORI(d, a, i) EMIT(PPC_INST_ORI | ___PPC_RA(d) | \ 111 - ___PPC_RS(a) | IMM_L(i)) 112 - #define PPC_ORIS(d, a, i) EMIT(PPC_INST_ORIS | ___PPC_RA(d) | \ 113 - ___PPC_RS(a) | IMM_L(i)) 114 - #define PPC_XOR(d, a, b) EMIT(PPC_INST_XOR | ___PPC_RA(d) | \ 115 - ___PPC_RS(a) | ___PPC_RB(b)) 116 - #define PPC_XORI(d, a, i) EMIT(PPC_INST_XORI | ___PPC_RA(d) | \ 117 - ___PPC_RS(a) | IMM_L(i)) 118 - #define PPC_XORIS(d, a, i) EMIT(PPC_INST_XORIS | ___PPC_RA(d) | \ 119 - ___PPC_RS(a) | IMM_L(i)) 120 - #define PPC_EXTSW(d, a) EMIT(PPC_INST_EXTSW | ___PPC_RA(d) | \ 121 - ___PPC_RS(a)) 122 - #define PPC_SLW(d, a, s) EMIT(PPC_INST_SLW | ___PPC_RA(d) | \ 123 - ___PPC_RS(a) | ___PPC_RB(s)) 124 - #define PPC_SLD(d, a, s) EMIT(PPC_INST_SLD | ___PPC_RA(d) | \ 125 - ___PPC_RS(a) | ___PPC_RB(s)) 126 - #define PPC_SRW(d, a, s) EMIT(PPC_INST_SRW | ___PPC_RA(d) | \ 127 - ___PPC_RS(a) | ___PPC_RB(s)) 128 - #define PPC_SRAW(d, a, s) EMIT(PPC_INST_SRAW | ___PPC_RA(d) | \ 129 - ___PPC_RS(a) | ___PPC_RB(s)) 130 - #define PPC_SRAWI(d, a, i) EMIT(PPC_INST_SRAWI | ___PPC_RA(d) | \ 131 - ___PPC_RS(a) | __PPC_SH(i)) 132 - #define PPC_SRD(d, a, s) EMIT(PPC_INST_SRD | ___PPC_RA(d) | \ 133 - ___PPC_RS(a) | ___PPC_RB(s)) 134 - #define PPC_SRAD(d, a, s) EMIT(PPC_INST_SRAD | ___PPC_RA(d) | \ 135 - ___PPC_RS(a) | ___PPC_RB(s)) 136 - #define PPC_SRADI(d, a, i) EMIT(PPC_INST_SRADI | ___PPC_RA(d) | \ 137 - ___PPC_RS(a) | __PPC_SH64(i)) 138 - #define PPC_RLWINM(d, a, i, mb, me) EMIT(PPC_INST_RLWINM | ___PPC_RA(d) | \ 139 - ___PPC_RS(a) | __PPC_SH(i) | \ 140 - __PPC_MB(mb) | __PPC_ME(me)) 141 - #define PPC_RLWINM_DOT(d, a, i, mb, me) EMIT(PPC_INST_RLWINM_DOT | \ 142 - ___PPC_RA(d) | ___PPC_RS(a) | \ 143 - __PPC_SH(i) | __PPC_MB(mb) | \ 144 - __PPC_ME(me)) 145 - #define PPC_RLWIMI(d, a, i, mb, me) EMIT(PPC_INST_RLWIMI | ___PPC_RA(d) | \ 146 - ___PPC_RS(a) | __PPC_SH(i) | \ 147 - __PPC_MB(mb) | __PPC_ME(me)) 148 - #define PPC_RLDICL(d, a, i, mb) EMIT(PPC_INST_RLDICL | ___PPC_RA(d) | \ 149 - ___PPC_RS(a) | __PPC_SH64(i) | \ 150 - __PPC_MB64(mb)) 151 - #define PPC_RLDICR(d, a, i, me) EMIT(PPC_INST_RLDICR | ___PPC_RA(d) | \ 152 - ___PPC_RS(a) | __PPC_SH64(i) | \ 153 - __PPC_ME64(me)) 154 - 155 - /* slwi = rlwinm Rx, Ry, n, 0, 31-n */ 156 - #define PPC_SLWI(d, a, i) PPC_RLWINM(d, a, i, 0, 31-(i)) 157 - /* srwi = rlwinm Rx, Ry, 32-n, n, 31 */ 158 - #define PPC_SRWI(d, a, i) PPC_RLWINM(d, a, 32-(i), i, 31) 159 - /* sldi = rldicr Rx, Ry, n, 63-n */ 160 - #define PPC_SLDI(d, a, i) PPC_RLDICR(d, a, i, 63-(i)) 161 - /* sldi = rldicl Rx, Ry, 64-n, n */ 162 - #define PPC_SRDI(d, a, i) PPC_RLDICL(d, a, 64-(i), i) 163 - 164 - #define PPC_NEG(d, a) EMIT(PPC_INST_NEG | ___PPC_RT(d) | ___PPC_RA(a)) 165 25 166 26 /* Long jump; (unconditional 'branch') */ 167 27 #define PPC_JMP(dest) EMIT(PPC_INST_BRANCH | \ ··· 35 175 #define PPC_LI32(d, i) do { \ 36 176 if ((int)(uintptr_t)(i) >= -32768 && \ 37 177 (int)(uintptr_t)(i) < 32768) \ 38 - PPC_LI(d, i); \ 178 + EMIT(PPC_RAW_LI(d, i)); \ 39 179 else { \ 40 - PPC_LIS(d, IMM_H(i)); \ 180 + EMIT(PPC_RAW_LIS(d, IMM_H(i))); \ 41 181 if (IMM_L(i)) \ 42 - PPC_ORI(d, d, IMM_L(i)); \ 182 + EMIT(PPC_RAW_ORI(d, d, IMM_L(i))); \ 43 183 } } while(0) 44 184 45 185 #define PPC_LI64(d, i) do { \ ··· 48 188 PPC_LI32(d, i); \ 49 189 else { \ 50 190 if (!((uintptr_t)(i) & 0xffff800000000000ULL)) \ 51 - PPC_LI(d, ((uintptr_t)(i) >> 32) & 0xffff); \ 191 + EMIT(PPC_RAW_LI(d, ((uintptr_t)(i) >> 32) & \ 192 + 0xffff)); \ 52 193 else { \ 53 - PPC_LIS(d, ((uintptr_t)(i) >> 48)); \ 194 + EMIT(PPC_RAW_LIS(d, ((uintptr_t)(i) >> 48))); \ 54 195 if ((uintptr_t)(i) & 0x0000ffff00000000ULL) \ 55 - PPC_ORI(d, d, \ 56 - ((uintptr_t)(i) >> 32) & 0xffff); \ 196 + EMIT(PPC_RAW_ORI(d, d, \ 197 + ((uintptr_t)(i) >> 32) & 0xffff)); \ 57 198 } \ 58 - PPC_SLDI(d, d, 32); \ 199 + EMIT(PPC_RAW_SLDI(d, d, 32)); \ 59 200 if ((uintptr_t)(i) & 0x00000000ffff0000ULL) \ 60 - PPC_ORIS(d, d, \ 61 - ((uintptr_t)(i) >> 16) & 0xffff); \ 201 + EMIT(PPC_RAW_ORIS(d, d, \ 202 + ((uintptr_t)(i) >> 16) & 0xffff)); \ 62 203 if ((uintptr_t)(i) & 0x000000000000ffffULL) \ 63 - PPC_ORI(d, d, (uintptr_t)(i) & 0xffff); \ 204 + EMIT(PPC_RAW_ORI(d, d, (uintptr_t)(i) & \ 205 + 0xffff)); \ 64 206 } } while (0) 65 207 66 208 #ifdef CONFIG_PPC64 ··· 86 224 #define PPC_BCC(cond, dest) do { \ 87 225 if (is_nearbranch((dest) - (ctx->idx * 4))) { \ 88 226 PPC_BCC_SHORT(cond, dest); \ 89 - PPC_NOP(); \ 227 + EMIT(PPC_RAW_NOP()); \ 90 228 } else { \ 91 229 /* Flip the 'T or F' bit to invert comparison */ \ 92 230 PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, (ctx->idx+2)*4); \
+12 -12
arch/powerpc/net/bpf_jit32.h
··· 72 72 DECLARE_LOAD_FUNC(sk_load_byte); 73 73 DECLARE_LOAD_FUNC(sk_load_byte_msh); 74 74 75 - #define PPC_LBZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LBZ(r, base, i); \ 76 - else { PPC_ADDIS(r, base, IMM_HA(i)); \ 77 - PPC_LBZ(r, r, IMM_L(i)); } } while(0) 75 + #define PPC_LBZ_OFFS(r, base, i) do { if ((i) < 32768) EMIT(PPC_RAW_LBZ(r, base, i)); \ 76 + else { EMIT(PPC_RAW_ADDIS(r, base, IMM_HA(i))); \ 77 + EMIT(PPC_RAW_LBZ(r, r, IMM_L(i))); } } while(0) 78 78 79 79 #define PPC_LD_OFFS(r, base, i) do { if ((i) < 32768) EMIT(PPC_RAW_LD(r, base, i)); \ 80 - else { PPC_ADDIS(r, base, IMM_HA(i)); \ 80 + else { EMIT(PPC_RAW_ADDIS(r, base, IMM_HA(i))); \ 81 81 EMIT(PPC_RAW_LD(r, r, IMM_L(i))); } } while(0) 82 82 83 83 #define PPC_LWZ_OFFS(r, base, i) do { if ((i) < 32768) EMIT(PPC_RAW_LWZ(r, base, i)); \ 84 - else { PPC_ADDIS(r, base, IMM_HA(i)); \ 84 + else { EMIT(PPC_RAW_ADDIS(r, base, IMM_HA(i))); \ 85 85 EMIT(PPC_RAW_LWZ(r, r, IMM_L(i))); } } while(0) 86 86 87 - #define PPC_LHZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LHZ(r, base, i); \ 88 - else { PPC_ADDIS(r, base, IMM_HA(i)); \ 89 - PPC_LHZ(r, r, IMM_L(i)); } } while(0) 87 + #define PPC_LHZ_OFFS(r, base, i) do { if ((i) < 32768) EMIT(PPC_RAW_LHZ(r, base, i)); \ 88 + else { EMIT(PPC_RAW_ADDIS(r, base, IMM_HA(i))); \ 89 + EMIT(PPC_RAW_LHZ(r, r, IMM_L(i))); } } while(0) 90 90 91 91 #ifdef CONFIG_PPC64 92 92 #define PPC_LL_OFFS(r, base, i) do { PPC_LD_OFFS(r, base, i); } while(0) ··· 107 107 } while(0) 108 108 #endif 109 109 #else 110 - #define PPC_BPF_LOAD_CPU(r) do { PPC_LI(r, 0); } while(0) 110 + #define PPC_BPF_LOAD_CPU(r) do { EMIT(PPC_RAW_LI(r, 0)); } while(0) 111 111 #endif 112 112 113 113 #define PPC_LHBRX_OFFS(r, base, i) \ 114 - do { PPC_LI32(r, i); PPC_LHBRX(r, r, base); } while(0) 114 + do { PPC_LI32(r, i); EMIT(PPC_RAW_LHBRX(r, r, base)); } while(0) 115 115 #ifdef __LITTLE_ENDIAN__ 116 116 #define PPC_NTOHS_OFFS(r, base, i) PPC_LHBRX_OFFS(r, base, i) 117 117 #else ··· 119 119 #endif 120 120 121 121 #define PPC_BPF_LL(r, base, i) do { EMIT(PPC_RAW_LWZ(r, base, i)); } while(0) 122 - #define PPC_BPF_STL(r, base, i) do { PPC_STW(r, base, i); } while(0) 123 - #define PPC_BPF_STLU(r, base, i) do { PPC_STWU(r, base, i); } while(0) 122 + #define PPC_BPF_STL(r, base, i) do { EMIT(PPC_RAW_STW(r, base, i)); } while(0) 123 + #define PPC_BPF_STLU(r, base, i) do { EMIT(PPC_RAW_STWU(r, base, i)); } while(0) 124 124 125 125 #define SEEN_DATAREF 0x10000 /* might call external helpers */ 126 126 #define SEEN_XREG 0x20000 /* X reg is used */
+7 -5
arch/powerpc/net/bpf_jit64.h
··· 70 70 */ 71 71 #define PPC_BPF_LL(r, base, i) do { \ 72 72 if ((i) % 4) { \ 73 - PPC_LI(b2p[TMP_REG_2], (i)); \ 74 - PPC_LDX(r, base, b2p[TMP_REG_2]); \ 73 + EMIT(PPC_RAW_LI(b2p[TMP_REG_2], (i)));\ 74 + EMIT(PPC_RAW_LDX(r, base, \ 75 + b2p[TMP_REG_2])); \ 75 76 } else \ 76 77 EMIT(PPC_RAW_LD(r, base, i)); \ 77 78 } while(0) 78 79 #define PPC_BPF_STL(r, base, i) do { \ 79 80 if ((i) % 4) { \ 80 - PPC_LI(b2p[TMP_REG_2], (i)); \ 81 - PPC_STDX(r, base, b2p[TMP_REG_2]); \ 81 + EMIT(PPC_RAW_LI(b2p[TMP_REG_2], (i)));\ 82 + EMIT(PPC_RAW_STDX(r, base, \ 83 + b2p[TMP_REG_2])); \ 82 84 } else \ 83 85 EMIT(PPC_RAW_STD(r, base, i)); \ 84 86 } while(0) 85 - #define PPC_BPF_STLU(r, base, i) do { PPC_STDU(r, base, i); } while(0) 87 + #define PPC_BPF_STLU(r, base, i) do { EMIT(PPC_RAW_STDU(r, base, i)); } while(0) 86 88 87 89 #define SEEN_FUNC 0x1000 /* might call external helpers */ 88 90 #define SEEN_STACK 0x2000 /* uses BPF stack */
+66 -66
arch/powerpc/net/bpf_jit_comp.c
··· 61 61 PPC_LWZ_OFFS(r_scratch1, r_skb, offsetof(struct sk_buff, 62 62 data_len)); 63 63 PPC_LWZ_OFFS(r_HL, r_skb, offsetof(struct sk_buff, len)); 64 - PPC_SUB(r_HL, r_HL, r_scratch1); 64 + EMIT(PPC_RAW_SUB(r_HL, r_HL, r_scratch1)); 65 65 PPC_LL_OFFS(r_D, r_skb, offsetof(struct sk_buff, data)); 66 66 } 67 67 ··· 70 70 * TODO: Could also detect whether first instr. sets X and 71 71 * avoid this (as below, with A). 72 72 */ 73 - PPC_LI(r_X, 0); 73 + EMIT(PPC_RAW_LI(r_X, 0)); 74 74 } 75 75 76 76 /* make sure we dont leak kernel information to user */ 77 77 if (bpf_needs_clear_a(&filter[0])) 78 - PPC_LI(r_A, 0); 78 + EMIT(PPC_RAW_LI(r_A, 0)); 79 79 } 80 80 81 81 static void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx) ··· 83 83 int i; 84 84 85 85 if (ctx->seen & (SEEN_MEM | SEEN_DATAREF)) { 86 - PPC_ADDI(1, 1, BPF_PPC_STACKFRAME); 86 + EMIT(PPC_RAW_ADDI(1, 1, BPF_PPC_STACKFRAME)); 87 87 if (ctx->seen & SEEN_DATAREF) { 88 88 PPC_BPF_LL(0, 1, PPC_LR_STKOFF); 89 - PPC_MTLR(0); 89 + EMIT(PPC_RAW_MTLR(0)); 90 90 PPC_BPF_LL(r_D, 1, -(REG_SZ*(32-r_D))); 91 91 PPC_BPF_LL(r_HL, 1, -(REG_SZ*(32-r_HL))); 92 92 } ··· 100 100 } 101 101 /* The RETs have left a return value in R3. */ 102 102 103 - PPC_BLR(); 103 + EMIT(PPC_RAW_BLR()); 104 104 } 105 105 106 106 #define CHOOSE_LOAD_FUNC(K, func) \ ··· 139 139 case BPF_ALU | BPF_ADD | BPF_K: /* A += K; */ 140 140 if (!K) 141 141 break; 142 - PPC_ADDI(r_A, r_A, IMM_L(K)); 142 + EMIT(PPC_RAW_ADDI(r_A, r_A, IMM_L(K))); 143 143 if (K >= 32768) 144 - PPC_ADDIS(r_A, r_A, IMM_HA(K)); 144 + EMIT(PPC_RAW_ADDIS(r_A, r_A, IMM_HA(K))); 145 145 break; 146 146 case BPF_ALU | BPF_SUB | BPF_X: /* A -= X; */ 147 147 ctx->seen |= SEEN_XREG; 148 - PPC_SUB(r_A, r_A, r_X); 148 + EMIT(PPC_RAW_SUB(r_A, r_A, r_X)); 149 149 break; 150 150 case BPF_ALU | BPF_SUB | BPF_K: /* A -= K */ 151 151 if (!K) 152 152 break; 153 - PPC_ADDI(r_A, r_A, IMM_L(-K)); 153 + EMIT(PPC_RAW_ADDI(r_A, r_A, IMM_L(-K))); 154 154 if (K >= 32768) 155 - PPC_ADDIS(r_A, r_A, IMM_HA(-K)); 155 + EMIT(PPC_RAW_ADDIS(r_A, r_A, IMM_HA(-K))); 156 156 break; 157 157 case BPF_ALU | BPF_MUL | BPF_X: /* A *= X; */ 158 158 ctx->seen |= SEEN_XREG; 159 - PPC_MULW(r_A, r_A, r_X); 159 + EMIT(PPC_RAW_MULW(r_A, r_A, r_X)); 160 160 break; 161 161 case BPF_ALU | BPF_MUL | BPF_K: /* A *= K */ 162 162 if (K < 32768) 163 - PPC_MULI(r_A, r_A, K); 163 + EMIT(PPC_RAW_MULI(r_A, r_A, K)); 164 164 else { 165 165 PPC_LI32(r_scratch1, K); 166 - PPC_MULW(r_A, r_A, r_scratch1); 166 + EMIT(PPC_RAW_MULW(r_A, r_A, r_scratch1)); 167 167 } 168 168 break; 169 169 case BPF_ALU | BPF_MOD | BPF_X: /* A %= X; */ 170 170 case BPF_ALU | BPF_DIV | BPF_X: /* A /= X; */ 171 171 ctx->seen |= SEEN_XREG; 172 - PPC_CMPWI(r_X, 0); 172 + EMIT(PPC_RAW_CMPWI(r_X, 0)); 173 173 if (ctx->pc_ret0 != -1) { 174 174 PPC_BCC(COND_EQ, addrs[ctx->pc_ret0]); 175 175 } else { 176 176 PPC_BCC_SHORT(COND_NE, (ctx->idx*4)+12); 177 - PPC_LI(r_ret, 0); 177 + EMIT(PPC_RAW_LI(r_ret, 0)); 178 178 PPC_JMP(exit_addr); 179 179 } 180 180 if (code == (BPF_ALU | BPF_MOD | BPF_X)) { 181 - PPC_DIVWU(r_scratch1, r_A, r_X); 182 - PPC_MULW(r_scratch1, r_X, r_scratch1); 183 - PPC_SUB(r_A, r_A, r_scratch1); 181 + EMIT(PPC_RAW_DIVWU(r_scratch1, r_A, r_X)); 182 + EMIT(PPC_RAW_MULW(r_scratch1, r_X, r_scratch1)); 183 + EMIT(PPC_RAW_SUB(r_A, r_A, r_scratch1)); 184 184 } else { 185 - PPC_DIVWU(r_A, r_A, r_X); 185 + EMIT(PPC_RAW_DIVWU(r_A, r_A, r_X)); 186 186 } 187 187 break; 188 188 case BPF_ALU | BPF_MOD | BPF_K: /* A %= K; */ 189 189 PPC_LI32(r_scratch2, K); 190 - PPC_DIVWU(r_scratch1, r_A, r_scratch2); 191 - PPC_MULW(r_scratch1, r_scratch2, r_scratch1); 192 - PPC_SUB(r_A, r_A, r_scratch1); 190 + EMIT(PPC_RAW_DIVWU(r_scratch1, r_A, r_scratch2)); 191 + EMIT(PPC_RAW_MULW(r_scratch1, r_scratch2, r_scratch1)); 192 + EMIT(PPC_RAW_SUB(r_A, r_A, r_scratch1)); 193 193 break; 194 194 case BPF_ALU | BPF_DIV | BPF_K: /* A /= K */ 195 195 if (K == 1) 196 196 break; 197 197 PPC_LI32(r_scratch1, K); 198 - PPC_DIVWU(r_A, r_A, r_scratch1); 198 + EMIT(PPC_RAW_DIVWU(r_A, r_A, r_scratch1)); 199 199 break; 200 200 case BPF_ALU | BPF_AND | BPF_X: 201 201 ctx->seen |= SEEN_XREG; 202 - PPC_AND(r_A, r_A, r_X); 202 + EMIT(PPC_RAW_AND(r_A, r_A, r_X)); 203 203 break; 204 204 case BPF_ALU | BPF_AND | BPF_K: 205 205 if (!IMM_H(K)) 206 - PPC_ANDI(r_A, r_A, K); 206 + EMIT(PPC_RAW_ANDI(r_A, r_A, K)); 207 207 else { 208 208 PPC_LI32(r_scratch1, K); 209 - PPC_AND(r_A, r_A, r_scratch1); 209 + EMIT(PPC_RAW_AND(r_A, r_A, r_scratch1)); 210 210 } 211 211 break; 212 212 case BPF_ALU | BPF_OR | BPF_X: 213 213 ctx->seen |= SEEN_XREG; 214 - PPC_OR(r_A, r_A, r_X); 214 + EMIT(PPC_RAW_OR(r_A, r_A, r_X)); 215 215 break; 216 216 case BPF_ALU | BPF_OR | BPF_K: 217 217 if (IMM_L(K)) 218 - PPC_ORI(r_A, r_A, IMM_L(K)); 218 + EMIT(PPC_RAW_ORI(r_A, r_A, IMM_L(K))); 219 219 if (K >= 65536) 220 - PPC_ORIS(r_A, r_A, IMM_H(K)); 220 + EMIT(PPC_RAW_ORIS(r_A, r_A, IMM_H(K))); 221 221 break; 222 222 case BPF_ANC | SKF_AD_ALU_XOR_X: 223 223 case BPF_ALU | BPF_XOR | BPF_X: /* A ^= X */ 224 224 ctx->seen |= SEEN_XREG; 225 - PPC_XOR(r_A, r_A, r_X); 225 + EMIT(PPC_RAW_XOR(r_A, r_A, r_X)); 226 226 break; 227 227 case BPF_ALU | BPF_XOR | BPF_K: /* A ^= K */ 228 228 if (IMM_L(K)) 229 - PPC_XORI(r_A, r_A, IMM_L(K)); 229 + EMIT(PPC_RAW_XORI(r_A, r_A, IMM_L(K))); 230 230 if (K >= 65536) 231 - PPC_XORIS(r_A, r_A, IMM_H(K)); 231 + EMIT(PPC_RAW_XORIS(r_A, r_A, IMM_H(K))); 232 232 break; 233 233 case BPF_ALU | BPF_LSH | BPF_X: /* A <<= X; */ 234 234 ctx->seen |= SEEN_XREG; 235 - PPC_SLW(r_A, r_A, r_X); 235 + EMIT(PPC_RAW_SLW(r_A, r_A, r_X)); 236 236 break; 237 237 case BPF_ALU | BPF_LSH | BPF_K: 238 238 if (K == 0) 239 239 break; 240 240 else 241 - PPC_SLWI(r_A, r_A, K); 241 + EMIT(PPC_RAW_SLWI(r_A, r_A, K)); 242 242 break; 243 243 case BPF_ALU | BPF_RSH | BPF_X: /* A >>= X; */ 244 244 ctx->seen |= SEEN_XREG; 245 - PPC_SRW(r_A, r_A, r_X); 245 + EMIT(PPC_RAW_SRW(r_A, r_A, r_X)); 246 246 break; 247 247 case BPF_ALU | BPF_RSH | BPF_K: /* A >>= K; */ 248 248 if (K == 0) 249 249 break; 250 250 else 251 - PPC_SRWI(r_A, r_A, K); 251 + EMIT(PPC_RAW_SRWI(r_A, r_A, K)); 252 252 break; 253 253 case BPF_ALU | BPF_NEG: 254 - PPC_NEG(r_A, r_A); 254 + EMIT(PPC_RAW_NEG(r_A, r_A)); 255 255 break; 256 256 case BPF_RET | BPF_K: 257 257 PPC_LI32(r_ret, K); ··· 277 277 if (ctx->seen) 278 278 PPC_JMP(exit_addr); 279 279 else 280 - PPC_BLR(); 280 + EMIT(PPC_RAW_BLR()); 281 281 } 282 282 break; 283 283 case BPF_RET | BPF_A: 284 - PPC_MR(r_ret, r_A); 284 + EMIT(PPC_RAW_MR(r_ret, r_A)); 285 285 if (i != flen - 1) { 286 286 if (ctx->seen) 287 287 PPC_JMP(exit_addr); 288 288 else 289 - PPC_BLR(); 289 + EMIT(PPC_RAW_BLR()); 290 290 } 291 291 break; 292 292 case BPF_MISC | BPF_TAX: /* X = A */ 293 - PPC_MR(r_X, r_A); 293 + EMIT(PPC_RAW_MR(r_X, r_A)); 294 294 break; 295 295 case BPF_MISC | BPF_TXA: /* A = X */ 296 296 ctx->seen |= SEEN_XREG; 297 - PPC_MR(r_A, r_X); 297 + EMIT(PPC_RAW_MR(r_A, r_X)); 298 298 break; 299 299 300 300 /*** Constant loads/M[] access ***/ ··· 305 305 PPC_LI32(r_X, K); 306 306 break; 307 307 case BPF_LD | BPF_MEM: /* A = mem[K] */ 308 - PPC_MR(r_A, r_M + (K & 0xf)); 308 + EMIT(PPC_RAW_MR(r_A, r_M + (K & 0xf))); 309 309 ctx->seen |= SEEN_MEM | (1<<(K & 0xf)); 310 310 break; 311 311 case BPF_LDX | BPF_MEM: /* X = mem[K] */ 312 - PPC_MR(r_X, r_M + (K & 0xf)); 312 + EMIT(PPC_RAW_MR(r_X, r_M + (K & 0xf))); 313 313 ctx->seen |= SEEN_MEM | (1<<(K & 0xf)); 314 314 break; 315 315 case BPF_ST: /* mem[K] = A */ 316 - PPC_MR(r_M + (K & 0xf), r_A); 316 + EMIT(PPC_RAW_MR(r_M + (K & 0xf), r_A)); 317 317 ctx->seen |= SEEN_MEM | (1<<(K & 0xf)); 318 318 break; 319 319 case BPF_STX: /* mem[K] = X */ 320 - PPC_MR(r_M + (K & 0xf), r_X); 320 + EMIT(PPC_RAW_MR(r_M + (K & 0xf), r_X)); 321 321 ctx->seen |= SEEN_XREG | SEEN_MEM | (1<<(K & 0xf)); 322 322 break; 323 323 case BPF_LD | BPF_W | BPF_LEN: /* A = skb->len; */ ··· 346 346 type) != 2); 347 347 PPC_LL_OFFS(r_scratch1, r_skb, offsetof(struct sk_buff, 348 348 dev)); 349 - PPC_CMPDI(r_scratch1, 0); 349 + EMIT(PPC_RAW_CMPDI(r_scratch1, 0)); 350 350 if (ctx->pc_ret0 != -1) { 351 351 PPC_BCC(COND_EQ, addrs[ctx->pc_ret0]); 352 352 } else { 353 353 /* Exit, returning 0; first pass hits here. */ 354 354 PPC_BCC_SHORT(COND_NE, ctx->idx * 4 + 12); 355 - PPC_LI(r_ret, 0); 355 + EMIT(PPC_RAW_LI(r_ret, 0)); 356 356 PPC_JMP(exit_addr); 357 357 } 358 358 if (code == (BPF_ANC | SKF_AD_IFINDEX)) { ··· 383 383 case BPF_ANC | SKF_AD_VLAN_TAG_PRESENT: 384 384 PPC_LBZ_OFFS(r_A, r_skb, PKT_VLAN_PRESENT_OFFSET()); 385 385 if (PKT_VLAN_PRESENT_BIT) 386 - PPC_SRWI(r_A, r_A, PKT_VLAN_PRESENT_BIT); 386 + EMIT(PPC_RAW_SRWI(r_A, r_A, PKT_VLAN_PRESENT_BIT)); 387 387 if (PKT_VLAN_PRESENT_BIT < 7) 388 - PPC_ANDI(r_A, r_A, 1); 388 + EMIT(PPC_RAW_ANDI(r_A, r_A, 1)); 389 389 break; 390 390 case BPF_ANC | SKF_AD_QUEUE: 391 391 BUILD_BUG_ON(sizeof_field(struct sk_buff, ··· 395 395 break; 396 396 case BPF_ANC | SKF_AD_PKTTYPE: 397 397 PPC_LBZ_OFFS(r_A, r_skb, PKT_TYPE_OFFSET()); 398 - PPC_ANDI(r_A, r_A, PKT_TYPE_MAX); 399 - PPC_SRWI(r_A, r_A, 5); 398 + EMIT(PPC_RAW_ANDI(r_A, r_A, PKT_TYPE_MAX)); 399 + EMIT(PPC_RAW_SRWI(r_A, r_A, 5)); 400 400 break; 401 401 case BPF_ANC | SKF_AD_CPU: 402 402 PPC_BPF_LOAD_CPU(r_A); ··· 414 414 /* Load from [K]. */ 415 415 ctx->seen |= SEEN_DATAREF; 416 416 PPC_FUNC_ADDR(r_scratch1, func); 417 - PPC_MTLR(r_scratch1); 417 + EMIT(PPC_RAW_MTLR(r_scratch1)); 418 418 PPC_LI32(r_addr, K); 419 - PPC_BLRL(); 419 + EMIT(PPC_RAW_BLRL()); 420 420 /* 421 421 * Helper returns 'lt' condition on error, and an 422 422 * appropriate return value in r3 ··· 440 440 */ 441 441 ctx->seen |= SEEN_DATAREF | SEEN_XREG; 442 442 PPC_FUNC_ADDR(r_scratch1, func); 443 - PPC_MTLR(r_scratch1); 444 - PPC_ADDI(r_addr, r_X, IMM_L(K)); 443 + EMIT(PPC_RAW_MTLR(r_scratch1)); 444 + EMIT(PPC_RAW_ADDI(r_addr, r_X, IMM_L(K))); 445 445 if (K >= 32768) 446 - PPC_ADDIS(r_addr, r_addr, IMM_HA(K)); 447 - PPC_BLRL(); 446 + EMIT(PPC_RAW_ADDIS(r_addr, r_addr, IMM_HA(K))); 447 + EMIT(PPC_RAW_BLRL()); 448 448 /* If error, cr0.LT set */ 449 449 PPC_BCC(COND_LT, exit_addr); 450 450 break; ··· 489 489 case BPF_JMP | BPF_JGE | BPF_X: 490 490 case BPF_JMP | BPF_JEQ | BPF_X: 491 491 ctx->seen |= SEEN_XREG; 492 - PPC_CMPLW(r_A, r_X); 492 + EMIT(PPC_RAW_CMPLW(r_A, r_X)); 493 493 break; 494 494 case BPF_JMP | BPF_JSET | BPF_X: 495 495 ctx->seen |= SEEN_XREG; 496 - PPC_AND_DOT(r_scratch1, r_A, r_X); 496 + EMIT(PPC_RAW_AND_DOT(r_scratch1, r_A, r_X)); 497 497 break; 498 498 case BPF_JMP | BPF_JEQ | BPF_K: 499 499 case BPF_JMP | BPF_JGT | BPF_K: 500 500 case BPF_JMP | BPF_JGE | BPF_K: 501 501 if (K < 32768) 502 - PPC_CMPLWI(r_A, K); 502 + EMIT(PPC_RAW_CMPLWI(r_A, K)); 503 503 else { 504 504 PPC_LI32(r_scratch1, K); 505 - PPC_CMPLW(r_A, r_scratch1); 505 + EMIT(PPC_RAW_CMPLW(r_A, r_scratch1)); 506 506 } 507 507 break; 508 508 case BPF_JMP | BPF_JSET | BPF_K: 509 509 if (K < 32768) 510 510 /* PPC_ANDI is /only/ dot-form */ 511 - PPC_ANDI(r_scratch1, r_A, K); 511 + EMIT(PPC_RAW_ANDI(r_scratch1, r_A, K)); 512 512 else { 513 513 PPC_LI32(r_scratch1, K); 514 - PPC_AND_DOT(r_scratch1, r_A, 515 - r_scratch1); 514 + EMIT(PPC_RAW_AND_DOT(r_scratch1, r_A, 515 + r_scratch1)); 516 516 } 517 517 break; 518 518 }
+140 -138
arch/powerpc/net/bpf_jit_comp64.c
··· 95 95 * invoked through a tail call. 96 96 */ 97 97 if (ctx->seen & SEEN_TAILCALL) { 98 - PPC_LI(b2p[TMP_REG_1], 0); 98 + EMIT(PPC_RAW_LI(b2p[TMP_REG_1], 0)); 99 99 /* this goes in the redzone */ 100 100 PPC_BPF_STL(b2p[TMP_REG_1], 1, -(BPF_PPC_STACK_SAVE + 8)); 101 101 } else { 102 - PPC_NOP(); 103 - PPC_NOP(); 102 + EMIT(PPC_RAW_NOP()); 103 + EMIT(PPC_RAW_NOP()); 104 104 } 105 105 106 106 #define BPF_TAILCALL_PROLOGUE_SIZE 8 ··· 129 129 130 130 /* Setup frame pointer to point to the bpf stack area */ 131 131 if (bpf_is_seen_register(ctx, BPF_REG_FP)) 132 - PPC_ADDI(b2p[BPF_REG_FP], 1, 133 - STACK_FRAME_MIN_SIZE + ctx->stack_size); 132 + EMIT(PPC_RAW_ADDI(b2p[BPF_REG_FP], 1, 133 + STACK_FRAME_MIN_SIZE + ctx->stack_size)); 134 134 } 135 135 136 136 static void bpf_jit_emit_common_epilogue(u32 *image, struct codegen_context *ctx) ··· 144 144 145 145 /* Tear down our stack frame */ 146 146 if (bpf_has_stack_frame(ctx)) { 147 - PPC_ADDI(1, 1, BPF_PPC_STACKFRAME + ctx->stack_size); 147 + EMIT(PPC_RAW_ADDI(1, 1, BPF_PPC_STACKFRAME + ctx->stack_size)); 148 148 if (ctx->seen & SEEN_FUNC) { 149 149 PPC_BPF_LL(0, 1, PPC_LR_STKOFF); 150 - PPC_MTLR(0); 150 + EMIT(PPC_RAW_MTLR(0)); 151 151 } 152 152 } 153 153 } ··· 157 157 bpf_jit_emit_common_epilogue(image, ctx); 158 158 159 159 /* Move result to r3 */ 160 - PPC_MR(3, b2p[BPF_REG_0]); 160 + EMIT(PPC_RAW_MR(3, b2p[BPF_REG_0])); 161 161 162 - PPC_BLR(); 162 + EMIT(PPC_RAW_BLR()); 163 163 } 164 164 165 165 static void bpf_jit_emit_func_call_hlp(u32 *image, struct codegen_context *ctx, ··· 171 171 /* Load actual entry point from function descriptor */ 172 172 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_2], 0); 173 173 /* ... and move it to LR */ 174 - PPC_MTLR(b2p[TMP_REG_1]); 174 + EMIT(PPC_RAW_MTLR(b2p[TMP_REG_1])); 175 175 /* 176 176 * Load TOC from function descriptor at offset 8. 177 177 * We can clobber r2 since we get called through a ··· 182 182 #else 183 183 /* We can clobber r12 */ 184 184 PPC_FUNC_ADDR(12, func); 185 - PPC_MTLR(12); 185 + EMIT(PPC_RAW_MTLR(12)); 186 186 #endif 187 - PPC_BLRL(); 187 + EMIT(PPC_RAW_BLRL()); 188 188 } 189 189 190 190 static void bpf_jit_emit_func_call_rel(u32 *image, struct codegen_context *ctx, ··· 206 206 * that PPC_LI64() can emit. 207 207 */ 208 208 for (i = ctx->idx - ctx_idx; i < 5; i++) 209 - PPC_NOP(); 209 + EMIT(PPC_RAW_NOP()); 210 210 211 211 #ifdef PPC64_ELF_ABI_v1 212 212 /* ··· 220 220 PPC_BPF_LL(12, 12, 0); 221 221 #endif 222 222 223 - PPC_MTLR(12); 224 - PPC_BLRL(); 223 + EMIT(PPC_RAW_MTLR(12)); 224 + EMIT(PPC_RAW_BLRL()); 225 225 } 226 226 227 227 static void bpf_jit_emit_tail_call(u32 *image, struct codegen_context *ctx, u32 out) ··· 240 240 * goto out; 241 241 */ 242 242 EMIT(PPC_RAW_LWZ(b2p[TMP_REG_1], b2p_bpf_array, offsetof(struct bpf_array, map.max_entries))); 243 - PPC_RLWINM(b2p_index, b2p_index, 0, 0, 31); 244 - PPC_CMPLW(b2p_index, b2p[TMP_REG_1]); 243 + EMIT(PPC_RAW_RLWINM(b2p_index, b2p_index, 0, 0, 31)); 244 + EMIT(PPC_RAW_CMPLW(b2p_index, b2p[TMP_REG_1])); 245 245 PPC_BCC(COND_GE, out); 246 246 247 247 /* ··· 249 249 * goto out; 250 250 */ 251 251 PPC_BPF_LL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx)); 252 - PPC_CMPLWI(b2p[TMP_REG_1], MAX_TAIL_CALL_CNT); 252 + EMIT(PPC_RAW_CMPLWI(b2p[TMP_REG_1], MAX_TAIL_CALL_CNT)); 253 253 PPC_BCC(COND_GT, out); 254 254 255 255 /* 256 256 * tail_call_cnt++; 257 257 */ 258 - PPC_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 1); 258 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 1)); 259 259 PPC_BPF_STL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx)); 260 260 261 261 /* prog = array->ptrs[index]; */ 262 - PPC_MULI(b2p[TMP_REG_1], b2p_index, 8); 262 + EMIT(PPC_RAW_MULI(b2p[TMP_REG_1], b2p_index, 8)); 263 263 EMIT(PPC_RAW_ADD(b2p[TMP_REG_1], b2p[TMP_REG_1], b2p_bpf_array)); 264 264 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_array, ptrs)); 265 265 ··· 267 267 * if (prog == NULL) 268 268 * goto out; 269 269 */ 270 - PPC_CMPLDI(b2p[TMP_REG_1], 0); 270 + EMIT(PPC_RAW_CMPLDI(b2p[TMP_REG_1], 0)); 271 271 PPC_BCC(COND_EQ, out); 272 272 273 273 /* goto *(prog->bpf_func + prologue_size); */ 274 274 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_prog, bpf_func)); 275 275 #ifdef PPC64_ELF_ABI_v1 276 276 /* skip past the function descriptor */ 277 - PPC_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 278 - FUNCTION_DESCR_SIZE + BPF_TAILCALL_PROLOGUE_SIZE); 277 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 278 + FUNCTION_DESCR_SIZE + BPF_TAILCALL_PROLOGUE_SIZE)); 279 279 #else 280 - PPC_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], BPF_TAILCALL_PROLOGUE_SIZE); 280 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], BPF_TAILCALL_PROLOGUE_SIZE)); 281 281 #endif 282 - PPC_MTCTR(b2p[TMP_REG_1]); 282 + EMIT(PPC_RAW_MTCTR(b2p[TMP_REG_1])); 283 283 284 284 /* tear down stack, restore NVRs, ... */ 285 285 bpf_jit_emit_common_epilogue(image, ctx); 286 286 287 - PPC_BCTR(); 287 + EMIT(PPC_RAW_BCTR()); 288 288 /* out: */ 289 289 } 290 290 ··· 344 344 goto bpf_alu32_trunc; 345 345 case BPF_ALU | BPF_SUB | BPF_X: /* (u32) dst -= (u32) src */ 346 346 case BPF_ALU64 | BPF_SUB | BPF_X: /* dst -= src */ 347 - PPC_SUB(dst_reg, dst_reg, src_reg); 347 + EMIT(PPC_RAW_SUB(dst_reg, dst_reg, src_reg)); 348 348 goto bpf_alu32_trunc; 349 349 case BPF_ALU | BPF_ADD | BPF_K: /* (u32) dst += (u32) imm */ 350 350 case BPF_ALU | BPF_SUB | BPF_K: /* (u32) dst -= (u32) imm */ ··· 354 354 imm = -imm; 355 355 if (imm) { 356 356 if (imm >= -32768 && imm < 32768) 357 - PPC_ADDI(dst_reg, dst_reg, IMM_L(imm)); 357 + EMIT(PPC_RAW_ADDI(dst_reg, dst_reg, IMM_L(imm))); 358 358 else { 359 359 PPC_LI32(b2p[TMP_REG_1], imm); 360 360 EMIT(PPC_RAW_ADD(dst_reg, dst_reg, b2p[TMP_REG_1])); ··· 364 364 case BPF_ALU | BPF_MUL | BPF_X: /* (u32) dst *= (u32) src */ 365 365 case BPF_ALU64 | BPF_MUL | BPF_X: /* dst *= src */ 366 366 if (BPF_CLASS(code) == BPF_ALU) 367 - PPC_MULW(dst_reg, dst_reg, src_reg); 367 + EMIT(PPC_RAW_MULW(dst_reg, dst_reg, src_reg)); 368 368 else 369 - PPC_MULD(dst_reg, dst_reg, src_reg); 369 + EMIT(PPC_RAW_MULD(dst_reg, dst_reg, src_reg)); 370 370 goto bpf_alu32_trunc; 371 371 case BPF_ALU | BPF_MUL | BPF_K: /* (u32) dst *= (u32) imm */ 372 372 case BPF_ALU64 | BPF_MUL | BPF_K: /* dst *= imm */ 373 373 if (imm >= -32768 && imm < 32768) 374 - PPC_MULI(dst_reg, dst_reg, IMM_L(imm)); 374 + EMIT(PPC_RAW_MULI(dst_reg, dst_reg, IMM_L(imm))); 375 375 else { 376 376 PPC_LI32(b2p[TMP_REG_1], imm); 377 377 if (BPF_CLASS(code) == BPF_ALU) 378 - PPC_MULW(dst_reg, dst_reg, 379 - b2p[TMP_REG_1]); 378 + EMIT(PPC_RAW_MULW(dst_reg, dst_reg, 379 + b2p[TMP_REG_1])); 380 380 else 381 - PPC_MULD(dst_reg, dst_reg, 382 - b2p[TMP_REG_1]); 381 + EMIT(PPC_RAW_MULD(dst_reg, dst_reg, 382 + b2p[TMP_REG_1])); 383 383 } 384 384 goto bpf_alu32_trunc; 385 385 case BPF_ALU | BPF_DIV | BPF_X: /* (u32) dst /= (u32) src */ 386 386 case BPF_ALU | BPF_MOD | BPF_X: /* (u32) dst %= (u32) src */ 387 387 if (BPF_OP(code) == BPF_MOD) { 388 - PPC_DIVWU(b2p[TMP_REG_1], dst_reg, src_reg); 389 - PPC_MULW(b2p[TMP_REG_1], src_reg, 390 - b2p[TMP_REG_1]); 391 - PPC_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]); 388 + EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_1], dst_reg, src_reg)); 389 + EMIT(PPC_RAW_MULW(b2p[TMP_REG_1], src_reg, 390 + b2p[TMP_REG_1])); 391 + EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1])); 392 392 } else 393 - PPC_DIVWU(dst_reg, dst_reg, src_reg); 393 + EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg, src_reg)); 394 394 goto bpf_alu32_trunc; 395 395 case BPF_ALU64 | BPF_DIV | BPF_X: /* dst /= src */ 396 396 case BPF_ALU64 | BPF_MOD | BPF_X: /* dst %= src */ 397 397 if (BPF_OP(code) == BPF_MOD) { 398 - PPC_DIVDU(b2p[TMP_REG_1], dst_reg, src_reg); 399 - PPC_MULD(b2p[TMP_REG_1], src_reg, 400 - b2p[TMP_REG_1]); 401 - PPC_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]); 398 + EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_1], dst_reg, src_reg)); 399 + EMIT(PPC_RAW_MULD(b2p[TMP_REG_1], src_reg, 400 + b2p[TMP_REG_1])); 401 + EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1])); 402 402 } else 403 - PPC_DIVDU(dst_reg, dst_reg, src_reg); 403 + EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg, src_reg)); 404 404 break; 405 405 case BPF_ALU | BPF_MOD | BPF_K: /* (u32) dst %= (u32) imm */ 406 406 case BPF_ALU | BPF_DIV | BPF_K: /* (u32) dst /= (u32) imm */ ··· 415 415 switch (BPF_CLASS(code)) { 416 416 case BPF_ALU: 417 417 if (BPF_OP(code) == BPF_MOD) { 418 - PPC_DIVWU(b2p[TMP_REG_2], dst_reg, 419 - b2p[TMP_REG_1]); 420 - PPC_MULW(b2p[TMP_REG_1], 418 + EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_2], 419 + dst_reg, 420 + b2p[TMP_REG_1])); 421 + EMIT(PPC_RAW_MULW(b2p[TMP_REG_1], 421 422 b2p[TMP_REG_1], 422 - b2p[TMP_REG_2]); 423 - PPC_SUB(dst_reg, dst_reg, 424 - b2p[TMP_REG_1]); 423 + b2p[TMP_REG_2])); 424 + EMIT(PPC_RAW_SUB(dst_reg, dst_reg, 425 + b2p[TMP_REG_1])); 425 426 } else 426 - PPC_DIVWU(dst_reg, dst_reg, 427 - b2p[TMP_REG_1]); 427 + EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg, 428 + b2p[TMP_REG_1])); 428 429 break; 429 430 case BPF_ALU64: 430 431 if (BPF_OP(code) == BPF_MOD) { 431 - PPC_DIVDU(b2p[TMP_REG_2], dst_reg, 432 - b2p[TMP_REG_1]); 433 - PPC_MULD(b2p[TMP_REG_1], 432 + EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_2], 433 + dst_reg, 434 + b2p[TMP_REG_1])); 435 + EMIT(PPC_RAW_MULD(b2p[TMP_REG_1], 434 436 b2p[TMP_REG_1], 435 - b2p[TMP_REG_2]); 436 - PPC_SUB(dst_reg, dst_reg, 437 - b2p[TMP_REG_1]); 437 + b2p[TMP_REG_2])); 438 + EMIT(PPC_RAW_SUB(dst_reg, dst_reg, 439 + b2p[TMP_REG_1])); 438 440 } else 439 - PPC_DIVDU(dst_reg, dst_reg, 440 - b2p[TMP_REG_1]); 441 + EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg, 442 + b2p[TMP_REG_1])); 441 443 break; 442 444 } 443 445 goto bpf_alu32_trunc; 444 446 case BPF_ALU | BPF_NEG: /* (u32) dst = -dst */ 445 447 case BPF_ALU64 | BPF_NEG: /* dst = -dst */ 446 - PPC_NEG(dst_reg, dst_reg); 448 + EMIT(PPC_RAW_NEG(dst_reg, dst_reg)); 447 449 goto bpf_alu32_trunc; 448 450 449 451 /* ··· 453 451 */ 454 452 case BPF_ALU | BPF_AND | BPF_X: /* (u32) dst = dst & src */ 455 453 case BPF_ALU64 | BPF_AND | BPF_X: /* dst = dst & src */ 456 - PPC_AND(dst_reg, dst_reg, src_reg); 454 + EMIT(PPC_RAW_AND(dst_reg, dst_reg, src_reg)); 457 455 goto bpf_alu32_trunc; 458 456 case BPF_ALU | BPF_AND | BPF_K: /* (u32) dst = dst & imm */ 459 457 case BPF_ALU64 | BPF_AND | BPF_K: /* dst = dst & imm */ 460 458 if (!IMM_H(imm)) 461 - PPC_ANDI(dst_reg, dst_reg, IMM_L(imm)); 459 + EMIT(PPC_RAW_ANDI(dst_reg, dst_reg, IMM_L(imm))); 462 460 else { 463 461 /* Sign-extended */ 464 462 PPC_LI32(b2p[TMP_REG_1], imm); 465 - PPC_AND(dst_reg, dst_reg, b2p[TMP_REG_1]); 463 + EMIT(PPC_RAW_AND(dst_reg, dst_reg, b2p[TMP_REG_1])); 466 464 } 467 465 goto bpf_alu32_trunc; 468 466 case BPF_ALU | BPF_OR | BPF_X: /* dst = (u32) dst | (u32) src */ 469 467 case BPF_ALU64 | BPF_OR | BPF_X: /* dst = dst | src */ 470 - PPC_OR(dst_reg, dst_reg, src_reg); 468 + EMIT(PPC_RAW_OR(dst_reg, dst_reg, src_reg)); 471 469 goto bpf_alu32_trunc; 472 470 case BPF_ALU | BPF_OR | BPF_K:/* dst = (u32) dst | (u32) imm */ 473 471 case BPF_ALU64 | BPF_OR | BPF_K:/* dst = dst | imm */ 474 472 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) { 475 473 /* Sign-extended */ 476 474 PPC_LI32(b2p[TMP_REG_1], imm); 477 - PPC_OR(dst_reg, dst_reg, b2p[TMP_REG_1]); 475 + EMIT(PPC_RAW_OR(dst_reg, dst_reg, b2p[TMP_REG_1])); 478 476 } else { 479 477 if (IMM_L(imm)) 480 - PPC_ORI(dst_reg, dst_reg, IMM_L(imm)); 478 + EMIT(PPC_RAW_ORI(dst_reg, dst_reg, IMM_L(imm))); 481 479 if (IMM_H(imm)) 482 - PPC_ORIS(dst_reg, dst_reg, IMM_H(imm)); 480 + EMIT(PPC_RAW_ORIS(dst_reg, dst_reg, IMM_H(imm))); 483 481 } 484 482 goto bpf_alu32_trunc; 485 483 case BPF_ALU | BPF_XOR | BPF_X: /* (u32) dst ^= src */ 486 484 case BPF_ALU64 | BPF_XOR | BPF_X: /* dst ^= src */ 487 - PPC_XOR(dst_reg, dst_reg, src_reg); 485 + EMIT(PPC_RAW_XOR(dst_reg, dst_reg, src_reg)); 488 486 goto bpf_alu32_trunc; 489 487 case BPF_ALU | BPF_XOR | BPF_K: /* (u32) dst ^= (u32) imm */ 490 488 case BPF_ALU64 | BPF_XOR | BPF_K: /* dst ^= imm */ 491 489 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) { 492 490 /* Sign-extended */ 493 491 PPC_LI32(b2p[TMP_REG_1], imm); 494 - PPC_XOR(dst_reg, dst_reg, b2p[TMP_REG_1]); 492 + EMIT(PPC_RAW_XOR(dst_reg, dst_reg, b2p[TMP_REG_1])); 495 493 } else { 496 494 if (IMM_L(imm)) 497 - PPC_XORI(dst_reg, dst_reg, IMM_L(imm)); 495 + EMIT(PPC_RAW_XORI(dst_reg, dst_reg, IMM_L(imm))); 498 496 if (IMM_H(imm)) 499 - PPC_XORIS(dst_reg, dst_reg, IMM_H(imm)); 497 + EMIT(PPC_RAW_XORIS(dst_reg, dst_reg, IMM_H(imm))); 500 498 } 501 499 goto bpf_alu32_trunc; 502 500 case BPF_ALU | BPF_LSH | BPF_X: /* (u32) dst <<= (u32) src */ 503 501 /* slw clears top 32 bits */ 504 - PPC_SLW(dst_reg, dst_reg, src_reg); 502 + EMIT(PPC_RAW_SLW(dst_reg, dst_reg, src_reg)); 505 503 /* skip zero extension move, but set address map. */ 506 504 if (insn_is_zext(&insn[i + 1])) 507 505 addrs[++i] = ctx->idx * 4; 508 506 break; 509 507 case BPF_ALU64 | BPF_LSH | BPF_X: /* dst <<= src; */ 510 - PPC_SLD(dst_reg, dst_reg, src_reg); 508 + EMIT(PPC_RAW_SLD(dst_reg, dst_reg, src_reg)); 511 509 break; 512 510 case BPF_ALU | BPF_LSH | BPF_K: /* (u32) dst <<== (u32) imm */ 513 511 /* with imm 0, we still need to clear top 32 bits */ 514 - PPC_SLWI(dst_reg, dst_reg, imm); 512 + EMIT(PPC_RAW_SLWI(dst_reg, dst_reg, imm)); 515 513 if (insn_is_zext(&insn[i + 1])) 516 514 addrs[++i] = ctx->idx * 4; 517 515 break; 518 516 case BPF_ALU64 | BPF_LSH | BPF_K: /* dst <<== imm */ 519 517 if (imm != 0) 520 - PPC_SLDI(dst_reg, dst_reg, imm); 518 + EMIT(PPC_RAW_SLDI(dst_reg, dst_reg, imm)); 521 519 break; 522 520 case BPF_ALU | BPF_RSH | BPF_X: /* (u32) dst >>= (u32) src */ 523 - PPC_SRW(dst_reg, dst_reg, src_reg); 521 + EMIT(PPC_RAW_SRW(dst_reg, dst_reg, src_reg)); 524 522 if (insn_is_zext(&insn[i + 1])) 525 523 addrs[++i] = ctx->idx * 4; 526 524 break; 527 525 case BPF_ALU64 | BPF_RSH | BPF_X: /* dst >>= src */ 528 - PPC_SRD(dst_reg, dst_reg, src_reg); 526 + EMIT(PPC_RAW_SRD(dst_reg, dst_reg, src_reg)); 529 527 break; 530 528 case BPF_ALU | BPF_RSH | BPF_K: /* (u32) dst >>= (u32) imm */ 531 - PPC_SRWI(dst_reg, dst_reg, imm); 529 + EMIT(PPC_RAW_SRWI(dst_reg, dst_reg, imm)); 532 530 if (insn_is_zext(&insn[i + 1])) 533 531 addrs[++i] = ctx->idx * 4; 534 532 break; 535 533 case BPF_ALU64 | BPF_RSH | BPF_K: /* dst >>= imm */ 536 534 if (imm != 0) 537 - PPC_SRDI(dst_reg, dst_reg, imm); 535 + EMIT(PPC_RAW_SRDI(dst_reg, dst_reg, imm)); 538 536 break; 539 537 case BPF_ALU | BPF_ARSH | BPF_X: /* (s32) dst >>= src */ 540 - PPC_SRAW(dst_reg, dst_reg, src_reg); 538 + EMIT(PPC_RAW_SRAW(dst_reg, dst_reg, src_reg)); 541 539 goto bpf_alu32_trunc; 542 540 case BPF_ALU64 | BPF_ARSH | BPF_X: /* (s64) dst >>= src */ 543 - PPC_SRAD(dst_reg, dst_reg, src_reg); 541 + EMIT(PPC_RAW_SRAD(dst_reg, dst_reg, src_reg)); 544 542 break; 545 543 case BPF_ALU | BPF_ARSH | BPF_K: /* (s32) dst >>= imm */ 546 - PPC_SRAWI(dst_reg, dst_reg, imm); 544 + EMIT(PPC_RAW_SRAWI(dst_reg, dst_reg, imm)); 547 545 goto bpf_alu32_trunc; 548 546 case BPF_ALU64 | BPF_ARSH | BPF_K: /* (s64) dst >>= imm */ 549 547 if (imm != 0) 550 - PPC_SRADI(dst_reg, dst_reg, imm); 548 + EMIT(PPC_RAW_SRADI(dst_reg, dst_reg, imm)); 551 549 break; 552 550 553 551 /* ··· 557 555 case BPF_ALU64 | BPF_MOV | BPF_X: /* dst = src */ 558 556 if (imm == 1) { 559 557 /* special mov32 for zext */ 560 - PPC_RLWINM(dst_reg, dst_reg, 0, 0, 31); 558 + EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31)); 561 559 break; 562 560 } 563 - PPC_MR(dst_reg, src_reg); 561 + EMIT(PPC_RAW_MR(dst_reg, src_reg)); 564 562 goto bpf_alu32_trunc; 565 563 case BPF_ALU | BPF_MOV | BPF_K: /* (u32) dst = imm */ 566 564 case BPF_ALU64 | BPF_MOV | BPF_K: /* dst = (s64) imm */ ··· 574 572 bpf_alu32_trunc: 575 573 /* Truncate to 32-bits */ 576 574 if (BPF_CLASS(code) == BPF_ALU && !fp->aux->verifier_zext) 577 - PPC_RLWINM(dst_reg, dst_reg, 0, 0, 31); 575 + EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31)); 578 576 break; 579 577 580 578 /* ··· 592 590 switch (imm) { 593 591 case 16: 594 592 /* Rotate 8 bits left & mask with 0x0000ff00 */ 595 - PPC_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 16, 23); 593 + EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 16, 23)); 596 594 /* Rotate 8 bits right & insert LSB to reg */ 597 - PPC_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 24, 31); 595 + EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 24, 31)); 598 596 /* Move result back to dst_reg */ 599 - PPC_MR(dst_reg, b2p[TMP_REG_1]); 597 + EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1])); 600 598 break; 601 599 case 32: 602 600 /* ··· 604 602 * 2 bytes are already in their final position 605 603 * -- byte 2 and 4 (of bytes 1, 2, 3 and 4) 606 604 */ 607 - PPC_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 0, 31); 605 + EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 0, 31)); 608 606 /* Rotate 24 bits and insert byte 1 */ 609 - PPC_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 0, 7); 607 + EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 0, 7)); 610 608 /* Rotate 24 bits and insert byte 3 */ 611 - PPC_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 16, 23); 612 - PPC_MR(dst_reg, b2p[TMP_REG_1]); 609 + EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 16, 23)); 610 + EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1])); 613 611 break; 614 612 case 64: 615 613 /* ··· 621 619 * same across all passes 622 620 */ 623 621 PPC_BPF_STL(dst_reg, 1, bpf_jit_stack_local(ctx)); 624 - PPC_ADDI(b2p[TMP_REG_1], 1, bpf_jit_stack_local(ctx)); 625 - PPC_LDBRX(dst_reg, 0, b2p[TMP_REG_1]); 622 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], 1, bpf_jit_stack_local(ctx))); 623 + EMIT(PPC_RAW_LDBRX(dst_reg, 0, b2p[TMP_REG_1])); 626 624 break; 627 625 } 628 626 break; ··· 631 629 switch (imm) { 632 630 case 16: 633 631 /* zero-extend 16 bits into 64 bits */ 634 - PPC_RLDICL(dst_reg, dst_reg, 0, 48); 632 + EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 48)); 635 633 if (insn_is_zext(&insn[i + 1])) 636 634 addrs[++i] = ctx->idx * 4; 637 635 break; 638 636 case 32: 639 637 if (!fp->aux->verifier_zext) 640 638 /* zero-extend 32 bits into 64 bits */ 641 - PPC_RLDICL(dst_reg, dst_reg, 0, 32); 639 + EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 32)); 642 640 break; 643 641 case 64: 644 642 /* nop */ ··· 652 650 case BPF_STX | BPF_MEM | BPF_B: /* *(u8 *)(dst + off) = src */ 653 651 case BPF_ST | BPF_MEM | BPF_B: /* *(u8 *)(dst + off) = imm */ 654 652 if (BPF_CLASS(code) == BPF_ST) { 655 - PPC_LI(b2p[TMP_REG_1], imm); 653 + EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm)); 656 654 src_reg = b2p[TMP_REG_1]; 657 655 } 658 - PPC_STB(src_reg, dst_reg, off); 656 + EMIT(PPC_RAW_STB(src_reg, dst_reg, off)); 659 657 break; 660 658 case BPF_STX | BPF_MEM | BPF_H: /* (u16 *)(dst + off) = src */ 661 659 case BPF_ST | BPF_MEM | BPF_H: /* (u16 *)(dst + off) = imm */ 662 660 if (BPF_CLASS(code) == BPF_ST) { 663 - PPC_LI(b2p[TMP_REG_1], imm); 661 + EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm)); 664 662 src_reg = b2p[TMP_REG_1]; 665 663 } 666 - PPC_STH(src_reg, dst_reg, off); 664 + EMIT(PPC_RAW_STH(src_reg, dst_reg, off)); 667 665 break; 668 666 case BPF_STX | BPF_MEM | BPF_W: /* *(u32 *)(dst + off) = src */ 669 667 case BPF_ST | BPF_MEM | BPF_W: /* *(u32 *)(dst + off) = imm */ ··· 671 669 PPC_LI32(b2p[TMP_REG_1], imm); 672 670 src_reg = b2p[TMP_REG_1]; 673 671 } 674 - PPC_STW(src_reg, dst_reg, off); 672 + EMIT(PPC_RAW_STW(src_reg, dst_reg, off)); 675 673 break; 676 674 case BPF_STX | BPF_MEM | BPF_DW: /* (u64 *)(dst + off) = src */ 677 675 case BPF_ST | BPF_MEM | BPF_DW: /* *(u64 *)(dst + off) = imm */ ··· 688 686 /* *(u32 *)(dst + off) += src */ 689 687 case BPF_STX | BPF_XADD | BPF_W: 690 688 /* Get EA into TMP_REG_1 */ 691 - PPC_ADDI(b2p[TMP_REG_1], dst_reg, off); 689 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off)); 692 690 tmp_idx = ctx->idx * 4; 693 691 /* load value from memory into TMP_REG_2 */ 694 692 EMIT(PPC_RAW_LWARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0)); 695 693 /* add value from src_reg into this */ 696 694 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg)); 697 695 /* store result back */ 698 - PPC_BPF_STWCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1]); 696 + EMIT(PPC_RAW_STWCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1])); 699 697 /* we're done if this succeeded */ 700 698 PPC_BCC_SHORT(COND_NE, tmp_idx); 701 699 break; 702 700 /* *(u64 *)(dst + off) += src */ 703 701 case BPF_STX | BPF_XADD | BPF_DW: 704 - PPC_ADDI(b2p[TMP_REG_1], dst_reg, off); 702 + EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off)); 705 703 tmp_idx = ctx->idx * 4; 706 704 EMIT(PPC_RAW_LDARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0)); 707 705 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg)); ··· 714 712 */ 715 713 /* dst = *(u8 *)(ul) (src + off) */ 716 714 case BPF_LDX | BPF_MEM | BPF_B: 717 - PPC_LBZ(dst_reg, src_reg, off); 715 + EMIT(PPC_RAW_LBZ(dst_reg, src_reg, off)); 718 716 if (insn_is_zext(&insn[i + 1])) 719 717 addrs[++i] = ctx->idx * 4; 720 718 break; 721 719 /* dst = *(u16 *)(ul) (src + off) */ 722 720 case BPF_LDX | BPF_MEM | BPF_H: 723 - PPC_LHZ(dst_reg, src_reg, off); 721 + EMIT(PPC_RAW_LHZ(dst_reg, src_reg, off)); 724 722 if (insn_is_zext(&insn[i + 1])) 725 723 addrs[++i] = ctx->idx * 4; 726 724 break; ··· 777 775 else 778 776 bpf_jit_emit_func_call_rel(image, ctx, func_addr); 779 777 /* move return value from r3 to BPF_REG_0 */ 780 - PPC_MR(b2p[BPF_REG_0], 3); 778 + EMIT(PPC_RAW_MR(b2p[BPF_REG_0], 3)); 781 779 break; 782 780 783 781 /* ··· 862 860 case BPF_JMP32 | BPF_JNE | BPF_X: 863 861 /* unsigned comparison */ 864 862 if (BPF_CLASS(code) == BPF_JMP32) 865 - PPC_CMPLW(dst_reg, src_reg); 863 + EMIT(PPC_RAW_CMPLW(dst_reg, src_reg)); 866 864 else 867 - PPC_CMPLD(dst_reg, src_reg); 865 + EMIT(PPC_RAW_CMPLD(dst_reg, src_reg)); 868 866 break; 869 867 case BPF_JMP | BPF_JSGT | BPF_X: 870 868 case BPF_JMP | BPF_JSLT | BPF_X: ··· 876 874 case BPF_JMP32 | BPF_JSLE | BPF_X: 877 875 /* signed comparison */ 878 876 if (BPF_CLASS(code) == BPF_JMP32) 879 - PPC_CMPW(dst_reg, src_reg); 877 + EMIT(PPC_RAW_CMPW(dst_reg, src_reg)); 880 878 else 881 - PPC_CMPD(dst_reg, src_reg); 879 + EMIT(PPC_RAW_CMPD(dst_reg, src_reg)); 882 880 break; 883 881 case BPF_JMP | BPF_JSET | BPF_X: 884 882 case BPF_JMP32 | BPF_JSET | BPF_X: 885 883 if (BPF_CLASS(code) == BPF_JMP) { 886 - PPC_AND_DOT(b2p[TMP_REG_1], dst_reg, 887 - src_reg); 884 + EMIT(PPC_RAW_AND_DOT(b2p[TMP_REG_1], dst_reg, 885 + src_reg)); 888 886 } else { 889 887 int tmp_reg = b2p[TMP_REG_1]; 890 888 891 - PPC_AND(tmp_reg, dst_reg, src_reg); 892 - PPC_RLWINM_DOT(tmp_reg, tmp_reg, 0, 0, 893 - 31); 889 + EMIT(PPC_RAW_AND(tmp_reg, dst_reg, src_reg)); 890 + EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg, 0, 0, 891 + 31)); 894 892 } 895 893 break; 896 894 case BPF_JMP | BPF_JNE | BPF_K: ··· 914 912 */ 915 913 if (imm >= 0 && imm < 32768) { 916 914 if (is_jmp32) 917 - PPC_CMPLWI(dst_reg, imm); 915 + EMIT(PPC_RAW_CMPLWI(dst_reg, imm)); 918 916 else 919 - PPC_CMPLDI(dst_reg, imm); 917 + EMIT(PPC_RAW_CMPLDI(dst_reg, imm)); 920 918 } else { 921 919 /* sign-extending load */ 922 920 PPC_LI32(b2p[TMP_REG_1], imm); 923 921 /* ... but unsigned comparison */ 924 922 if (is_jmp32) 925 - PPC_CMPLW(dst_reg, 926 - b2p[TMP_REG_1]); 923 + EMIT(PPC_RAW_CMPLW(dst_reg, 924 + b2p[TMP_REG_1])); 927 925 else 928 - PPC_CMPLD(dst_reg, 929 - b2p[TMP_REG_1]); 926 + EMIT(PPC_RAW_CMPLD(dst_reg, 927 + b2p[TMP_REG_1])); 930 928 } 931 929 break; 932 930 } ··· 947 945 */ 948 946 if (imm >= -32768 && imm < 32768) { 949 947 if (is_jmp32) 950 - PPC_CMPWI(dst_reg, imm); 948 + EMIT(PPC_RAW_CMPWI(dst_reg, imm)); 951 949 else 952 - PPC_CMPDI(dst_reg, imm); 950 + EMIT(PPC_RAW_CMPDI(dst_reg, imm)); 953 951 } else { 954 952 PPC_LI32(b2p[TMP_REG_1], imm); 955 953 if (is_jmp32) 956 - PPC_CMPW(dst_reg, 957 - b2p[TMP_REG_1]); 954 + EMIT(PPC_RAW_CMPW(dst_reg, 955 + b2p[TMP_REG_1])); 958 956 else 959 - PPC_CMPD(dst_reg, 960 - b2p[TMP_REG_1]); 957 + EMIT(PPC_RAW_CMPD(dst_reg, 958 + b2p[TMP_REG_1])); 961 959 } 962 960 break; 963 961 } ··· 966 964 /* andi does not sign-extend the immediate */ 967 965 if (imm >= 0 && imm < 32768) 968 966 /* PPC_ANDI is _only/always_ dot-form */ 969 - PPC_ANDI(b2p[TMP_REG_1], dst_reg, imm); 967 + EMIT(PPC_RAW_ANDI(b2p[TMP_REG_1], dst_reg, imm)); 970 968 else { 971 969 int tmp_reg = b2p[TMP_REG_1]; 972 970 973 971 PPC_LI32(tmp_reg, imm); 974 972 if (BPF_CLASS(code) == BPF_JMP) { 975 - PPC_AND_DOT(tmp_reg, dst_reg, 976 - tmp_reg); 973 + EMIT(PPC_RAW_AND_DOT(tmp_reg, dst_reg, 974 + tmp_reg)); 977 975 } else { 978 - PPC_AND(tmp_reg, dst_reg, 979 - tmp_reg); 980 - PPC_RLWINM_DOT(tmp_reg, tmp_reg, 981 - 0, 0, 31); 976 + EMIT(PPC_RAW_AND(tmp_reg, dst_reg, 977 + tmp_reg)); 978 + EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg, 979 + 0, 0, 31)); 982 980 } 983 981 } 984 982 break;