]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/commitdiff
kvm: vmx: Scrub hardware GPRs at VM-exit
authorJim Mattson <jmattson@google.com>
Wed, 3 Jan 2018 22:31:38 +0000 (14:31 -0800)
committerKleber Sacilotto de Souza <kleber.souza@canonical.com>
Mon, 5 Feb 2018 15:53:01 +0000 (16:53 +0100)
CVE-2017-5715 (Spectre v2 retpoline)

Guest GPR values are live in the hardware GPRs at VM-exit.  Do not
leave any guest values in hardware GPRs after the guest GPR values are
saved to the vcpu_vmx structure.

This is a partial mitigation for CVE 2017-5715 and CVE 2017-5753.
Specifically, it defeats the Project Zero PoC for CVE 2017-5715.

Suggested-by: Eric Northup <digitaleric@google.com>
Signed-off-by: Jim Mattson <jmattson@google.com>
Reviewed-by: Eric Northup <digitaleric@google.com>
Reviewed-by: Benjamin Serebrin <serebrin@google.com>
Reviewed-by: Andrew Honig <ahonig@google.com>
[Paolo: Add AMD bits, Signed-off-by: Tom Lendacky <thomas.lendacky@amd.com>]
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
(cherry picked from commit 0cb5b30698fdc8f6b4646012e3acb4ddce430788)
Signed-off-by: Andy Whitcroft <apw@canonical.com>
Signed-off-by: Kleber Sacilotto de Souza <kleber.souza@canonical.com>
arch/x86/kvm/svm.c
arch/x86/kvm/vmx.c

index af256b786a70cccd14906fe926e2b790156967a4..b87dfe48e6ba3b4ba1e9933f9b9f878778f34640 100644 (file)
@@ -4917,6 +4917,25 @@ static void svm_vcpu_run(struct kvm_vcpu *vcpu)
                "mov %%r13, %c[r13](%[svm]) \n\t"
                "mov %%r14, %c[r14](%[svm]) \n\t"
                "mov %%r15, %c[r15](%[svm]) \n\t"
+#endif
+               /*
+               * Clear host registers marked as clobbered to prevent
+               * speculative use.
+               */
+               "xor %%" _ASM_BX ", %%" _ASM_BX " \n\t"
+               "xor %%" _ASM_CX ", %%" _ASM_CX " \n\t"
+               "xor %%" _ASM_DX ", %%" _ASM_DX " \n\t"
+               "xor %%" _ASM_SI ", %%" _ASM_SI " \n\t"
+               "xor %%" _ASM_DI ", %%" _ASM_DI " \n\t"
+#ifdef CONFIG_X86_64
+               "xor %%r8, %%r8 \n\t"
+               "xor %%r9, %%r9 \n\t"
+               "xor %%r10, %%r10 \n\t"
+               "xor %%r11, %%r11 \n\t"
+               "xor %%r12, %%r12 \n\t"
+               "xor %%r13, %%r13 \n\t"
+               "xor %%r14, %%r14 \n\t"
+               "xor %%r15, %%r15 \n\t"
 #endif
                "pop %%" _ASM_BP
                :
index 6c160a328c537dc10bcd51e1882be1d8134512a4..5a1782af04294e1f942d866432cc5fee90317362 100644 (file)
@@ -9077,6 +9077,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                /* Save guest registers, load host registers, keep flags */
                "mov %0, %c[wordsize](%%" _ASM_SP ") \n\t"
                "pop %0 \n\t"
+               "setbe %c[fail](%0)\n\t"
                "mov %%" _ASM_AX ", %c[rax](%0) \n\t"
                "mov %%" _ASM_BX ", %c[rbx](%0) \n\t"
                __ASM_SIZE(pop) " %c[rcx](%0) \n\t"
@@ -9093,12 +9094,23 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                "mov %%r13, %c[r13](%0) \n\t"
                "mov %%r14, %c[r14](%0) \n\t"
                "mov %%r15, %c[r15](%0) \n\t"
+               "xor %%r8d,  %%r8d \n\t"
+               "xor %%r9d,  %%r9d \n\t"
+               "xor %%r10d, %%r10d \n\t"
+               "xor %%r11d, %%r11d \n\t"
+               "xor %%r12d, %%r12d \n\t"
+               "xor %%r13d, %%r13d \n\t"
+               "xor %%r14d, %%r14d \n\t"
+               "xor %%r15d, %%r15d \n\t"
 #endif
                "mov %%cr2, %%" _ASM_AX "   \n\t"
                "mov %%" _ASM_AX ", %c[cr2](%0) \n\t"
 
+               "xor %%eax, %%eax \n\t"
+               "xor %%ebx, %%ebx \n\t"
+               "xor %%esi, %%esi \n\t"
+               "xor %%edi, %%edi \n\t"
                "pop  %%" _ASM_BP "; pop  %%" _ASM_DX " \n\t"
-               "setbe %c[fail](%0) \n\t"
                ".pushsection .rodata \n\t"
                ".global vmx_return \n\t"
                "vmx_return: " _ASM_PTR " 2b \n\t"