@@ -329,6 +329,9 @@ struct __attribute__ ((__packed__)) vmcb {
#define SVM_CR3_LONG_RESERVED_MASK 0xfff0000000000fe7U
#define SVM_CR4_LEGACY_RESERVED_MASK 0xffbaf000U
#define SVM_CR4_RESERVED_MASK 0xffffffffffbaf000U
+#define SVM_DR6_RESERVED_MASK 0xffffffffffff1ff0U
+#define SVM_DR7_RESERVED_MASK 0xffffffff0000cc00U
+#define SVM_EFER_RESERVED_MASK 0xffffffffffff0200U
#define MSR_BITMAP_SIZE 8192
@@ -1799,7 +1799,8 @@ static void basic_guest_main(struct svm_test *test)
{
}
-#define SVM_TEST_CR_RESERVED_BITS(start, end, inc, cr, val, resv_mask) \
+#define SVM_TEST_REG_RESERVED_BITS(start, end, inc, str_name, reg, val, \
+ resv_mask) \
{ \
u64 tmp, mask; \
int i; \
@@ -1809,18 +1810,9 @@ static void basic_guest_main(struct svm_test *test)
if (!(mask & resv_mask)) \
continue; \
tmp = val | mask; \
- switch (cr) { \
- case 0: \
- vmcb->save.cr0 = tmp; \
- break; \
- case 3: \
- vmcb->save.cr3 = tmp; \
- break; \
- case 4: \
- vmcb->save.cr4 = tmp; \
- } \
- report(svm_vmrun() == SVM_EXIT_ERR, "Test CR%d %d:%d: %lx",\
- cr, end, start, tmp); \
+ reg = tmp; \
+ report(svm_vmrun() == SVM_EXIT_ERR, "Test %s %d:%d: %lx",\
+ str_name, end, start, tmp); \
} \
}
@@ -1871,7 +1863,7 @@ static void svm_guest_state_test(void)
*/
cr0 = cr0_saved;
- SVM_TEST_CR_RESERVED_BITS(32, 63, 4, 0, cr0_saved,
+ SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "CR0", vmcb->save.cr0, cr0_saved,
SVM_CR0_RESERVED_MASK);
vmcb->save.cr0 = cr0_saved;
@@ -1891,19 +1883,19 @@ static void svm_guest_state_test(void)
vmcb->save.efer = efer;
cr4 |= X86_CR4_PAE;
vmcb->save.cr4 = cr4;
- SVM_TEST_CR_RESERVED_BITS(0, 2, 1, 3, cr3_saved,
+ SVM_TEST_REG_RESERVED_BITS(0, 2, 1, "CR3", vmcb->save.cr3, cr3_saved,
SVM_CR3_LEGACY_PAE_RESERVED_MASK);
cr4 = cr4_saved & ~X86_CR4_PAE;
vmcb->save.cr4 = cr4;
- SVM_TEST_CR_RESERVED_BITS(0, 11, 2, 3, cr3_saved,
+ SVM_TEST_REG_RESERVED_BITS(0, 11, 2, "CR3", vmcb->save.cr3, cr3_saved,
SVM_CR3_LEGACY_RESERVED_MASK);
cr4 |= X86_CR4_PAE;
vmcb->save.cr4 = cr4;
efer |= EFER_LMA;
vmcb->save.efer = efer;
- SVM_TEST_CR_RESERVED_BITS(0, 63, 2, 3, cr3_saved,
+ SVM_TEST_REG_RESERVED_BITS(0, 63, 2, "CR3", vmcb->save.cr3, cr3_saved,
SVM_CR3_LONG_RESERVED_MASK);
vmcb->save.cr4 = cr4_saved;
@@ -1919,18 +1911,45 @@ static void svm_guest_state_test(void)
efer_saved = vmcb->save.efer;
efer &= ~EFER_LMA;
vmcb->save.efer = efer;
- SVM_TEST_CR_RESERVED_BITS(12, 31, 2, 4, cr4_saved,
+ SVM_TEST_REG_RESERVED_BITS(12, 31, 2, "CR4", vmcb->save.cr4, cr4_saved,
SVM_CR4_LEGACY_RESERVED_MASK);
efer |= EFER_LMA;
vmcb->save.efer = efer;
- SVM_TEST_CR_RESERVED_BITS(12, 31, 2, 4, cr4_saved,
+ SVM_TEST_REG_RESERVED_BITS(12, 31, 2, "CR4", vmcb->save.cr4, cr4_saved,
SVM_CR4_RESERVED_MASK);
- SVM_TEST_CR_RESERVED_BITS(32, 63, 4, 4, cr4_saved,
+ SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "CR4", vmcb->save.cr4, cr4_saved,
SVM_CR4_RESERVED_MASK);
vmcb->save.cr4 = cr4_saved;
vmcb->save.efer = efer_saved;
+
+ /*
+ * DR6[63:32] and DR7[63:32] are MBZ
+ */
+ u64 dr_saved = vmcb->save.dr6;
+
+ SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "DR6", vmcb->save.dr6, dr_saved,
+ SVM_DR6_RESERVED_MASK);
+ vmcb->save.dr6 = dr_saved;
+
+ dr_saved = vmcb->save.dr7;
+ SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "DR7", vmcb->save.dr7, dr_saved,
+ SVM_DR7_RESERVED_MASK);
+
+ vmcb->save.dr7 = dr_saved;
+
+ /*
+ * EFER MBZ bits: 63:16, 9
+ */
+ efer_saved = vmcb->save.efer;
+
+ SVM_TEST_REG_RESERVED_BITS(8, 9, 1, "EFER", vmcb->save.efer,
+ efer_saved, SVM_EFER_RESERVED_MASK);
+ SVM_TEST_REG_RESERVED_BITS(16, 63, 4, "EFER", vmcb->save.efer,
+ efer_saved, SVM_EFER_RESERVED_MASK);
+
+ vmcb->save.efer = efer_saved;
}
struct svm_test svm_tests[] = {
According to section "Canonicalization and Consistency Checks" in APM vol. 2 the following guest state is illegal: "DR6[63:32] are not zero." "DR7[63:32] are not zero." "Any MBZ bit of EFER is set." Signed-off-by: Krish Sadhukhan <krish.sadhukhan@oracle.com> --- x86/svm.h | 3 +++ x86/svm_tests.c | 59 ++++++++++++++++++++++++++++++++++++++------------------- 2 files changed, 42 insertions(+), 20 deletions(-)