Commit bb03911f authored by Uros Bizjak's avatar Uros Bizjak Committed by Paolo Bonzini

KVM: VMX: access regs array in vmenter.S in its natural order

Registers in "regs" array are indexed as rax/rcx/rdx/.../rsi/rdi/r8/...
Reorder access to "regs" array in vmenter.S to follow its natural order.
Signed-off-by: default avatarUros Bizjak <ubizjak@gmail.com>
Signed-off-by: default avatarPaolo Bonzini <pbonzini@redhat.com>
parent 1c482452
...@@ -135,12 +135,12 @@ SYM_FUNC_START(__vmx_vcpu_run) ...@@ -135,12 +135,12 @@ SYM_FUNC_START(__vmx_vcpu_run)
cmpb $0, %bl cmpb $0, %bl
/* Load guest registers. Don't clobber flags. */ /* Load guest registers. Don't clobber flags. */
mov VCPU_RBX(%_ASM_AX), %_ASM_BX
mov VCPU_RCX(%_ASM_AX), %_ASM_CX mov VCPU_RCX(%_ASM_AX), %_ASM_CX
mov VCPU_RDX(%_ASM_AX), %_ASM_DX mov VCPU_RDX(%_ASM_AX), %_ASM_DX
mov VCPU_RBX(%_ASM_AX), %_ASM_BX
mov VCPU_RBP(%_ASM_AX), %_ASM_BP
mov VCPU_RSI(%_ASM_AX), %_ASM_SI mov VCPU_RSI(%_ASM_AX), %_ASM_SI
mov VCPU_RDI(%_ASM_AX), %_ASM_DI mov VCPU_RDI(%_ASM_AX), %_ASM_DI
mov VCPU_RBP(%_ASM_AX), %_ASM_BP
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
mov VCPU_R8 (%_ASM_AX), %r8 mov VCPU_R8 (%_ASM_AX), %r8
mov VCPU_R9 (%_ASM_AX), %r9 mov VCPU_R9 (%_ASM_AX), %r9
...@@ -168,12 +168,12 @@ SYM_FUNC_START(__vmx_vcpu_run) ...@@ -168,12 +168,12 @@ SYM_FUNC_START(__vmx_vcpu_run)
/* Save all guest registers, including RAX from the stack */ /* Save all guest registers, including RAX from the stack */
__ASM_SIZE(pop) VCPU_RAX(%_ASM_AX) __ASM_SIZE(pop) VCPU_RAX(%_ASM_AX)
mov %_ASM_BX, VCPU_RBX(%_ASM_AX)
mov %_ASM_CX, VCPU_RCX(%_ASM_AX) mov %_ASM_CX, VCPU_RCX(%_ASM_AX)
mov %_ASM_DX, VCPU_RDX(%_ASM_AX) mov %_ASM_DX, VCPU_RDX(%_ASM_AX)
mov %_ASM_BX, VCPU_RBX(%_ASM_AX)
mov %_ASM_BP, VCPU_RBP(%_ASM_AX)
mov %_ASM_SI, VCPU_RSI(%_ASM_AX) mov %_ASM_SI, VCPU_RSI(%_ASM_AX)
mov %_ASM_DI, VCPU_RDI(%_ASM_AX) mov %_ASM_DI, VCPU_RDI(%_ASM_AX)
mov %_ASM_BP, VCPU_RBP(%_ASM_AX)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
mov %r8, VCPU_R8 (%_ASM_AX) mov %r8, VCPU_R8 (%_ASM_AX)
mov %r9, VCPU_R9 (%_ASM_AX) mov %r9, VCPU_R9 (%_ASM_AX)
...@@ -197,12 +197,12 @@ SYM_FUNC_START(__vmx_vcpu_run) ...@@ -197,12 +197,12 @@ SYM_FUNC_START(__vmx_vcpu_run)
* free. RSP and RAX are exempt as RSP is restored by hardware during * free. RSP and RAX are exempt as RSP is restored by hardware during
* VM-Exit and RAX is explicitly loaded with 0 or 1 to return VM-Fail. * VM-Exit and RAX is explicitly loaded with 0 or 1 to return VM-Fail.
*/ */
1: xor %ebx, %ebx 1: xor %ecx, %ecx
xor %ecx, %ecx
xor %edx, %edx xor %edx, %edx
xor %ebx, %ebx
xor %ebp, %ebp
xor %esi, %esi xor %esi, %esi
xor %edi, %edi xor %edi, %edi
xor %ebp, %ebp
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
xor %r8d, %r8d xor %r8d, %r8d
xor %r9d, %r9d xor %r9d, %r9d
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment