Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

KVM: VMX: Explicitly reference RCX as the vmx_vcpu pointer in asm blobs

Use '%% " _ASM_CX"' instead of '%0' to dereference RCX, i.e. the
'struct vcpu_vmx' pointer, in the VM-Enter asm blobs of vmx_vcpu_run()
and nested_vmx_check_vmentry_hw(). Using the symbolic name means that
adding/removing an output parameter(s) requires "rewriting" almost all
of the asm blob, which makes it nearly impossible to understand what's
being changed in even the most minor patches.

Opportunistically improve the code comments.

Signed-off-by: Sean Christopherson <sean.j.christopherson@intel.com>
Reviewed-by: Andi Kleen <ak@linux.intel.com>
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>

authored by

Sean Christopherson and committed by
Paolo Bonzini
051a2d3e c6ad4597

+50 -42
+3 -3
arch/x86/kvm/vmx/nested.c
··· 2759 2759 asm( 2760 2760 /* Set HOST_RSP */ 2761 2761 __ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t" 2762 - "mov %%" _ASM_SP ", %c[host_rsp](%0)\n\t" 2762 + "mov %%" _ASM_SP ", %c[host_rsp](%% " _ASM_CX")\n\t" 2763 2763 2764 2764 /* Check if vmlaunch or vmresume is needed */ 2765 - "cmpl $0, %c[launched](%0)\n\t" 2765 + "cmpl $0, %c[launched](%% " _ASM_CX")\n\t" 2766 2766 "jne 1f\n\t" 2767 2767 __ex("vmlaunch") "\n\t" 2768 2768 "jmp 2f\n\t" 2769 2769 "1: " __ex("vmresume") "\n\t" 2770 2770 "2: " 2771 2771 /* Set vmx->fail accordingly */ 2772 - "setbe %c[fail](%0)\n\t" 2772 + "setbe %c[fail](%% " _ASM_CX")\n\t" 2773 2773 2774 2774 ".pushsection .rodata\n\t" 2775 2775 ".global vmx_early_consistency_check_return\n\t"
+47 -39
arch/x86/kvm/vmx/vmx.c
··· 6440 6440 "push %%" _ASM_DX "; push %%" _ASM_BP ";" 6441 6441 "push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */ 6442 6442 "push %%" _ASM_CX " \n\t" 6443 - "cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t" 6443 + "cmp %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t" 6444 6444 "je 1f \n\t" 6445 - "mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t" 6445 + "mov %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t" 6446 6446 /* Avoid VMWRITE when Enlightened VMCS is in use */ 6447 6447 "test %%" _ASM_SI ", %%" _ASM_SI " \n\t" 6448 6448 "jz 2f \n\t" ··· 6452 6452 __ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t" 6453 6453 "1: \n\t" 6454 6454 /* Reload cr2 if changed */ 6455 - "mov %c[cr2](%0), %%" _ASM_AX " \n\t" 6455 + "mov %c[cr2](%%" _ASM_CX "), %%" _ASM_AX " \n\t" 6456 6456 "mov %%cr2, %%" _ASM_DX " \n\t" 6457 6457 "cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t" 6458 6458 "je 3f \n\t" 6459 6459 "mov %%" _ASM_AX", %%cr2 \n\t" 6460 6460 "3: \n\t" 6461 6461 /* Check if vmlaunch or vmresume is needed */ 6462 - "cmpl $0, %c[launched](%0) \n\t" 6462 + "cmpl $0, %c[launched](%%" _ASM_CX ") \n\t" 6463 6463 /* Load guest registers. Don't clobber flags. */ 6464 - "mov %c[rax](%0), %%" _ASM_AX " \n\t" 6465 - "mov %c[rbx](%0), %%" _ASM_BX " \n\t" 6466 - "mov %c[rdx](%0), %%" _ASM_DX " \n\t" 6467 - "mov %c[rsi](%0), %%" _ASM_SI " \n\t" 6468 - "mov %c[rdi](%0), %%" _ASM_DI " \n\t" 6469 - "mov %c[rbp](%0), %%" _ASM_BP " \n\t" 6464 + "mov %c[rax](%%" _ASM_CX "), %%" _ASM_AX " \n\t" 6465 + "mov %c[rbx](%%" _ASM_CX "), %%" _ASM_BX " \n\t" 6466 + "mov %c[rdx](%%" _ASM_CX "), %%" _ASM_DX " \n\t" 6467 + "mov %c[rsi](%%" _ASM_CX "), %%" _ASM_SI " \n\t" 6468 + "mov %c[rdi](%%" _ASM_CX "), %%" _ASM_DI " \n\t" 6469 + "mov %c[rbp](%%" _ASM_CX "), %%" _ASM_BP " \n\t" 6470 6470 #ifdef CONFIG_X86_64 6471 - "mov %c[r8](%0), %%r8 \n\t" 6472 - "mov %c[r9](%0), %%r9 \n\t" 6473 - "mov %c[r10](%0), %%r10 \n\t" 6474 - "mov %c[r11](%0), %%r11 \n\t" 6475 - "mov %c[r12](%0), %%r12 \n\t" 6476 - "mov %c[r13](%0), %%r13 \n\t" 6477 - "mov %c[r14](%0), %%r14 \n\t" 6478 - "mov %c[r15](%0), %%r15 \n\t" 6471 + "mov %c[r8](%%" _ASM_CX "), %%r8 \n\t" 6472 + "mov %c[r9](%%" _ASM_CX "), %%r9 \n\t" 6473 + "mov %c[r10](%%" _ASM_CX "), %%r10 \n\t" 6474 + "mov %c[r11](%%" _ASM_CX "), %%r11 \n\t" 6475 + "mov %c[r12](%%" _ASM_CX "), %%r12 \n\t" 6476 + "mov %c[r13](%%" _ASM_CX "), %%r13 \n\t" 6477 + "mov %c[r14](%%" _ASM_CX "), %%r14 \n\t" 6478 + "mov %c[r15](%%" _ASM_CX "), %%r15 \n\t" 6479 6479 #endif 6480 - "mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */ 6480 + /* Load guest RCX. This kills the vmx_vcpu pointer! */ 6481 + "mov %c[rcx](%%" _ASM_CX "), %%" _ASM_CX " \n\t" 6481 6482 6482 6483 /* Enter guest mode */ 6483 6484 "jne 1f \n\t" ··· 6486 6485 "jmp 2f \n\t" 6487 6486 "1: " __ex("vmresume") "\n\t" 6488 6487 "2: " 6489 - /* Save guest registers, load host registers, keep flags */ 6490 - "mov %0, %c[wordsize](%%" _ASM_SP ") \n\t" 6491 - "pop %0 \n\t" 6492 - "setbe %c[fail](%0)\n\t" 6493 - "mov %%" _ASM_AX ", %c[rax](%0) \n\t" 6494 - "mov %%" _ASM_BX ", %c[rbx](%0) \n\t" 6495 - __ASM_SIZE(pop) " %c[rcx](%0) \n\t" 6496 - "mov %%" _ASM_DX ", %c[rdx](%0) \n\t" 6497 - "mov %%" _ASM_SI ", %c[rsi](%0) \n\t" 6498 - "mov %%" _ASM_DI ", %c[rdi](%0) \n\t" 6499 - "mov %%" _ASM_BP ", %c[rbp](%0) \n\t" 6488 + 6489 + /* Save guest's RCX to the stack placeholder (see above) */ 6490 + "mov %%" _ASM_CX ", %c[wordsize](%%" _ASM_SP ") \n\t" 6491 + 6492 + /* Load host's RCX, i.e. the vmx_vcpu pointer */ 6493 + "pop %%" _ASM_CX " \n\t" 6494 + 6495 + /* Set vmx->fail based on EFLAGS.{CF,ZF} */ 6496 + "setbe %c[fail](%%" _ASM_CX ")\n\t" 6497 + 6498 + /* Save all guest registers, including RCX from the stack */ 6499 + "mov %%" _ASM_AX ", %c[rax](%%" _ASM_CX ") \n\t" 6500 + "mov %%" _ASM_BX ", %c[rbx](%%" _ASM_CX ") \n\t" 6501 + __ASM_SIZE(pop) " %c[rcx](%%" _ASM_CX ") \n\t" 6502 + "mov %%" _ASM_DX ", %c[rdx](%%" _ASM_CX ") \n\t" 6503 + "mov %%" _ASM_SI ", %c[rsi](%%" _ASM_CX ") \n\t" 6504 + "mov %%" _ASM_DI ", %c[rdi](%%" _ASM_CX ") \n\t" 6505 + "mov %%" _ASM_BP ", %c[rbp](%%" _ASM_CX ") \n\t" 6500 6506 #ifdef CONFIG_X86_64 6501 - "mov %%r8, %c[r8](%0) \n\t" 6502 - "mov %%r9, %c[r9](%0) \n\t" 6503 - "mov %%r10, %c[r10](%0) \n\t" 6504 - "mov %%r11, %c[r11](%0) \n\t" 6505 - "mov %%r12, %c[r12](%0) \n\t" 6506 - "mov %%r13, %c[r13](%0) \n\t" 6507 - "mov %%r14, %c[r14](%0) \n\t" 6508 - "mov %%r15, %c[r15](%0) \n\t" 6507 + "mov %%r8, %c[r8](%%" _ASM_CX ") \n\t" 6508 + "mov %%r9, %c[r9](%%" _ASM_CX ") \n\t" 6509 + "mov %%r10, %c[r10](%%" _ASM_CX ") \n\t" 6510 + "mov %%r11, %c[r11](%%" _ASM_CX ") \n\t" 6511 + "mov %%r12, %c[r12](%%" _ASM_CX ") \n\t" 6512 + "mov %%r13, %c[r13](%%" _ASM_CX ") \n\t" 6513 + "mov %%r14, %c[r14](%%" _ASM_CX ") \n\t" 6514 + "mov %%r15, %c[r15](%%" _ASM_CX ") \n\t" 6509 6515 /* 6510 6516 * Clear host registers marked as clobbered to prevent 6511 6517 * speculative use. ··· 6527 6519 "xor %%r15d, %%r15d \n\t" 6528 6520 #endif 6529 6521 "mov %%cr2, %%" _ASM_AX " \n\t" 6530 - "mov %%" _ASM_AX ", %c[cr2](%0) \n\t" 6522 + "mov %%" _ASM_AX ", %c[cr2](%%" _ASM_CX ") \n\t" 6531 6523 6532 6524 "xor %%eax, %%eax \n\t" 6533 6525 "xor %%ebx, %%ebx \n\t"