diff mbox series

[3/3] nSVM: Test non-MBZ reserved bits in CR3 in long mode

Message ID 20200829004824.4577-4-krish.sadhukhan@oracle.com (mailing list archive)
State New, archived
Headers show
Series KVM: nSVM: Add checks for CR3 and CR4 reserved bits to svm_set_nested_state() and test CR3 non-MBZ reserved bits | expand

Commit Message

Krish Sadhukhan Aug. 29, 2020, 12:48 a.m. UTC
According to section "CR3" in APM vol. 2, the non-MBZ reserved bits in CR3
need to be set by software as follows:

	"Reserved Bits. Reserved fields should be cleared to 0 by software
	when writing CR3."

Signed-off-by: Krish Sadhukhan <krish.sadhukhan@oracle.com>
---
 x86/svm.h       |  3 ++-
 x86/svm_tests.c | 54 ++++++++++++++++++++++++++++++++++++++++++-------
 2 files changed, 49 insertions(+), 8 deletions(-)
diff mbox series

Patch

diff --git a/x86/svm.h b/x86/svm.h
index 15e0f18..465d794 100644
--- a/x86/svm.h
+++ b/x86/svm.h
@@ -325,7 +325,8 @@  struct __attribute__ ((__packed__)) vmcb {
 #define SVM_CR0_SELECTIVE_MASK (X86_CR0_TS | X86_CR0_MP)
 
 #define	SVM_CR0_RESERVED_MASK			0xffffffff00000000U
-#define	SVM_CR3_LONG_RESERVED_MASK		0xfff0000000000000U
+#define	SVM_CR3_LONG_MBZ_MASK			0xfff0000000000000U
+#define	SVM_CR3_LONG_RESERVED_MASK		0x0000000000000fe7U
 #define	SVM_CR4_LEGACY_RESERVED_MASK		0xff88f000U
 #define	SVM_CR4_RESERVED_MASK			0xffffffffff88f000U
 #define	SVM_DR6_RESERVED_MASK			0xffffffffffff1ff0U
diff --git a/x86/svm_tests.c b/x86/svm_tests.c
index 1908c7c..af8684b 100644
--- a/x86/svm_tests.c
+++ b/x86/svm_tests.c
@@ -1891,11 +1891,11 @@  static bool reg_corruption_check(struct svm_test *test)
  * v2 tests
  */
 
+int KRISH_step = 0;
 static void basic_guest_main(struct svm_test *test)
 {
 }
 
-
 #define SVM_TEST_REG_RESERVED_BITS(start, end, inc, str_name, reg, val,	\
 				   resv_mask)				\
 {									\
@@ -1913,7 +1913,8 @@  static void basic_guest_main(struct svm_test *test)
         }								\
 }
 
-#define SVM_TEST_CR_RESERVED_BITS(start, end, inc, cr, val, resv_mask)	\
+#define SVM_TEST_CR_RESERVED_BITS(start, end, inc, cr, val, resv_mask,	\
+				  exit_code)				\
 {									\
 	u64 tmp, mask;							\
 	int i;								\
@@ -1933,7 +1934,7 @@  static void basic_guest_main(struct svm_test *test)
 		case 4:							\
 			vmcb->save.cr4 = tmp;				\
 		}							\
-		report(svm_vmrun() == SVM_EXIT_ERR, "Test CR%d %d:%d: %lx",\
+		report(svm_vmrun() == exit_code, "Test CR%d %d:%d: %lx",\
 		    cr, end, start, tmp);				\
 	}								\
 }
@@ -2012,9 +2013,48 @@  static void test_cr3(void)
 	u64 cr3_saved = vmcb->save.cr3;
 
 	SVM_TEST_CR_RESERVED_BITS(0, 63, 1, 3, cr3_saved,
-	    SVM_CR3_LONG_RESERVED_MASK);
+	    SVM_CR3_LONG_MBZ_MASK, SVM_EXIT_ERR);
+
+	vmcb->save.cr3 = cr3_saved & ~SVM_CR3_LONG_MBZ_MASK;
+	report(svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR3 63:0: %lx",
+	    vmcb->save.cr3);
+
+	/*
+	 * CR3 non-MBZ reserved bits based on different modes:
+	 *   [11:5] [2:0] - long mode
+	 */
+	u64 cr4_saved = vmcb->save.cr4;
+
+	/*
+	 * Long mode
+	 */
+	if (this_cpu_has(X86_FEATURE_PCID)) {
+		vmcb->save.cr4 = cr4_saved | X86_CR4_PCIDE;
+		SVM_TEST_CR_RESERVED_BITS(0, 11, 1, 3, cr3_saved,
+		    SVM_CR3_LONG_RESERVED_MASK, SVM_EXIT_VMMCALL);
+
+		vmcb->save.cr3 = cr3_saved & ~SVM_CR3_LONG_RESERVED_MASK;
+		report(svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR3 63:0: %lx",
+		    vmcb->save.cr3);
+	} else {
+		u64 *pdpe = npt_get_pml4e();
+
+		vmcb->save.cr4 = cr4_saved & ~X86_CR4_PCIDE;
+
+		/* Clear P (Present) bit in NPT in order to trigger #NPF */
+		pdpe[0] &= ~1ULL;
+
+		SVM_TEST_CR_RESERVED_BITS(0, 11, 1, 3, cr3_saved,
+		    SVM_CR3_LONG_RESERVED_MASK, SVM_EXIT_NPF);
+
+		pdpe[0] |= 1ULL;
+		vmcb->save.cr3 = cr3_saved & ~SVM_CR3_LONG_RESERVED_MASK;
+		report(svm_vmrun() == SVM_EXIT_VMMCALL, "Test CR3 63:0: %lx",
+		    vmcb->save.cr3);
+	}
 
 	vmcb->save.cr3 = cr3_saved;
+	vmcb->save.cr4 = cr4_saved;
 }
 
 static void test_cr4(void)
@@ -2031,14 +2071,14 @@  static void test_cr4(void)
 	efer &= ~EFER_LME;
 	vmcb->save.efer = efer;
 	SVM_TEST_CR_RESERVED_BITS(12, 31, 1, 4, cr4_saved,
-	    SVM_CR4_LEGACY_RESERVED_MASK);
+	    SVM_CR4_LEGACY_RESERVED_MASK, SVM_EXIT_ERR);
 
 	efer |= EFER_LME;
 	vmcb->save.efer = efer;
 	SVM_TEST_CR_RESERVED_BITS(12, 31, 1, 4, cr4_saved,
-	    SVM_CR4_RESERVED_MASK);
+	    SVM_CR4_RESERVED_MASK, SVM_EXIT_ERR);
 	SVM_TEST_CR_RESERVED_BITS(32, 63, 4, 4, cr4_saved,
-	    SVM_CR4_RESERVED_MASK);
+	    SVM_CR4_RESERVED_MASK, SVM_EXIT_ERR);
 
 	vmcb->save.cr4 = cr4_saved;
 	vmcb->save.efer = efer_saved;