Use memory operand in CMP instruction to avoid usage of a temporary register. Use %eax register to hold VMX_spec_ctrl and use it directly in the follow-up WRMSR. The new code saves a few bytes by removing two MOV insns, from: 2d: 48 8b 7c 24 10 mov 0x10(%rsp),%rdi 32: 8b bf 48 18 00 00 mov 0x1848(%rdi),%edi 38: 65 8b 35 00 00 00 00 mov %gs:0x0(%rip),%esi 3f: 39 fe cmp %edi,%esi 41: 74 0b je 4e <...> 43: b9 48 00 00 00 mov $0x48,%ecx 48: 31 d2 xor %edx,%edx 4a: 89 f8 mov %edi,%eax 4c: 0f 30 wrmsr to: 2d: 48 8b 7c 24 10 mov 0x10(%rsp),%rdi 32: 8b 87 48 18 00 00 mov 0x1848(%rdi),%eax 38: 65 3b 05 00 00 00 00 cmp %gs:0x0(%rip),%eax 3f: 74 09 je 4a <...> 41: b9 48 00 00 00 mov $0x48,%ecx 46: 31 d2 xor %edx,%edx 48: 0f 30 wrmsr No functional change intended. Signed-off-by: Uros Bizjak Cc: Sean Christopherson Cc: Paolo Bonzini Cc: Thomas Gleixner Cc: Ingo Molnar Cc: Borislav Petkov Cc: Dave Hansen Cc: "H. Peter Anvin" --- arch/x86/kvm/vmx/vmenter.S | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/arch/x86/kvm/vmx/vmenter.S b/arch/x86/kvm/vmx/vmenter.S index 0a6cf5bff2aa..c65de5de92ab 100644 --- a/arch/x86/kvm/vmx/vmenter.S +++ b/arch/x86/kvm/vmx/vmenter.S @@ -118,13 +118,11 @@ SYM_FUNC_START(__vmx_vcpu_run) * and vmentry. */ mov 2*WORD_SIZE(%_ASM_SP), %_ASM_DI - movl VMX_spec_ctrl(%_ASM_DI), %edi - movl PER_CPU_VAR(x86_spec_ctrl_current), %esi - cmp %edi, %esi + movl VMX_spec_ctrl(%_ASM_DI), %eax + cmp PER_CPU_VAR(x86_spec_ctrl_current), %eax je .Lspec_ctrl_done mov $MSR_IA32_SPEC_CTRL, %ecx xor %edx, %edx - mov %edi, %eax wrmsr .Lspec_ctrl_done: -- 2.50.1