@@ -1474,7 +1474,7 @@ static int test_vmxon_bad_cr(int cr_number, unsigned long orig_cr,
unsigned long *flexible_bits)
{
unsigned long required1, disallowed1, val, bit;
- int ret, i;
+ int ret, i, expected;
if (!cr_number) {
required1 = rdmsr(MSR_IA32_VMX_CR0_FIXED0);
@@ -1521,10 +1521,22 @@ static int test_vmxon_bad_cr(int cr_number, unsigned long orig_cr,
if (write_cr_safe(cr_number, val))
continue;
+ /*
+ * CR0.PE==0 and CR4.VMXE==0 result in #UD, all other invalid
+ * CR0/CR4 bits result in #GP. Include CR0.PE even though it's
+ * dead code (see above) for completeness.
+ */
+ if ((cr_number == 0 && bit == X86_CR0_PE) ||
+ (cr_number == 4 && bit == X86_CR4_VMXE))
+ expected = UD_VECTOR;
+ else
+ expected = GP_VECTOR;
+
ret = vmx_on();
- report(ret == UD_VECTOR,
- "VMXON with CR%d bit %d %s should #UD, got '%d'",
- cr_number, i, (required1 & bit) ? "cleared" : "set", ret);
+ report(ret == expected,
+ "VMXON with CR%d bit %d %s should %s, got '%d'",
+ cr_number, i, (required1 & bit) ? "cleared" : "set",
+ expected == UD_VECTOR ? "UD" : "#GP", ret);
write_cr(cr_number, orig_cr);
Expect #GP, not #UD, when executing with "generic" invalid CR0/CR4 bits, i.e. with invalid bits other than CR0.PE or CR4.VMXE. The PE and VMXE checks are special pre-conditions to VM-Exit and thus #UD, all other CR0/CR4 checks are performed if and only if the CPU isn't already in VMX mode and so #GP. Reported-by: Eric Li <ercli@ucdavis.edu> Fixes: f7b730bc ("nVMX: Add subtest to verify VMXON succeeds/#UDs on good/bad CR0/CR4") Signed-off-by: Sean Christopherson <seanjc@google.com> --- x86/vmx.c | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) base-commit: d8a4f9e5e8d69d4ef257b40d6cd666bd2f63494e