[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20251024194925.3201933-8-yosry.ahmed@linux.dev>
Date: Fri, 24 Oct 2025 19:49:25 +0000
From: Yosry Ahmed <yosry.ahmed@...ux.dev>
To: Sean Christopherson <seanjc@...gle.com>
Cc: Paolo Bonzini <pbonzini@...hat.com>,
Jim Mattson <jmattson@...gle.com>,
kvm@...r.kernel.org,
linux-kernel@...r.kernel.org,
Yosry Ahmed <yosryahmed@...gle.com>
Subject: [kvm-unit-tests 7/7] x86/svm: Add more selective CR0 write and LMSW test cases
From: Yosry Ahmed <yosryahmed@...gle.com>
Add more test cases that cover:
- The priority between selective and non-selective CR0 intercepts.
- Writes to CR0 that should not intercept (e.g. CR0.MP).
- Writes to CR0 using LMSW, which should always intercept (even when
updating CR0.MP).
Emulator variants of all test cases are added as well.
The new tests exercises bugs fixed by:
https://lore.kernel.org/kvm/20251024192918.3191141-1-yosry.ahmed@linux.dev/.
Signed-off-by: Yosry Ahmed <yosryahmed@...gle.com>
---
x86/svm_tests.c | 76 ++++++++++++++++++++++++++++++++++++++++++++++---
1 file changed, 72 insertions(+), 4 deletions(-)
diff --git a/x86/svm_tests.c b/x86/svm_tests.c
index 71afb38a..2981f459 100644
--- a/x86/svm_tests.c
+++ b/x86/svm_tests.c
@@ -129,20 +129,36 @@ static bool finished_rsm_intercept(struct svm_test *test)
static void prepare_sel_cr0_intercept(struct svm_test *test)
{
+ /* Clear CR0.MP and CR0.CD as the tests will set either of them */
+ vmcb->save.cr0 &= ~X86_CR0_MP;
vmcb->save.cr0 &= ~X86_CR0_CD;
vmcb->control.intercept |= (1ULL << INTERCEPT_SELECTIVE_CR0);
}
+static void prepare_sel_nonsel_cr0_intercepts(struct svm_test *test)
+{
+ /* Clear CR0.MP and CR0.CD as the tests will set either of them */
+ vmcb->save.cr0 &= ~X86_CR0_MP;
+ vmcb->save.cr0 &= ~X86_CR0_CD;
+ vmcb->control.intercept_cr_write |= (1ULL << 0);
+ vmcb->control.intercept |= (1ULL << INTERCEPT_SELECTIVE_CR0);
+}
+
static void __test_cr0_write_bit(struct svm_test *test, unsigned long bit,
- bool intercept, bool fep)
+ bool is_lmsw, bool intercept, bool fep)
{
+ unsigned short msw;
unsigned long cr0;
cr0 = read_cr0();
cr0 |= bit;
+ msw = cr0 & 0xfUL;
test->scratch = cr0;
- asm_conditional_fep_safe(fep, "mov %0,%%cr0", "r"(cr0));
+ if (is_lmsw)
+ asm_conditional_fep_safe(fep, "lmsw %0", "r"(msw));
+ else
+ asm_conditional_fep_safe(fep, "mov %0,%%cr0", "r"(cr0));
/* This code should be unreachable when an intercept is expected */
report_svm_guest(!intercept, test, "Expected intercept on CR0 write");
@@ -151,12 +167,34 @@ static void __test_cr0_write_bit(struct svm_test *test, unsigned long bit,
/* MOV-to-CR0 updating CR0.CD is intercepted by the selective intercept */
static void test_sel_cr0_write_intercept(struct svm_test *test)
{
- __test_cr0_write_bit(test, X86_CR0_CD, true, false);
+ __test_cr0_write_bit(test, X86_CR0_CD, false, true, false);
}
static void test_sel_cr0_write_intercept_emul(struct svm_test *test)
{
- __test_cr0_write_bit(test, X86_CR0_CD, true, true);
+ __test_cr0_write_bit(test, X86_CR0_CD, false, true, true);
+}
+
+/* MOV-to-CR0 updating CR0.MP is NOT intercepted by the selective intercept */
+static void test_sel_cr0_write_nointercept(struct svm_test *test)
+{
+ __test_cr0_write_bit(test, X86_CR0_MP, false, false, false);
+}
+
+static void test_sel_cr0_write_nointercept_emul(struct svm_test *test)
+{
+ __test_cr0_write_bit(test, X86_CR0_MP, false, false, true);
+}
+
+/* LMSW updating CR0.MP is intercepted by the selective intercept */
+static void test_sel_cr0_lmsw_intercept(struct svm_test *test)
+{
+ __test_cr0_write_bit(test, X86_CR0_MP, true, false, false);
+}
+
+static void test_sel_cr0_lmsw_intercept_emul(struct svm_test *test)
+{
+ __test_cr0_write_bit(test, X86_CR0_MP, true, false, true);
}
static bool check_sel_cr0_intercept(struct svm_test *test)
@@ -165,6 +203,18 @@ static bool check_sel_cr0_intercept(struct svm_test *test)
vmcb->save.cr0 != test->scratch;
}
+static bool check_nonsel_cr0_intercept(struct svm_test *test)
+{
+ return vmcb->control.exit_code == SVM_EXIT_WRITE_CR0 &&
+ vmcb->save.cr0 != test->scratch;
+}
+
+static bool check_cr0_nointercept(struct svm_test *test)
+{
+ return vmcb->control.exit_code == SVM_EXIT_VMMCALL &&
+ vmcb->save.cr0 == test->scratch;
+}
+
static void prepare_cr3_intercept(struct svm_test *test)
{
default_prepare(test);
@@ -3473,6 +3523,24 @@ struct svm_test svm_tests[] = {
{ "sel cr0 write intercept emulate", fep_supported,
prepare_sel_cr0_intercept, default_prepare_gif_clear,
test_sel_cr0_write_intercept_emul, default_finished, check_sel_cr0_intercept},
+ { "sel cr0 write intercept priority", default_supported,
+ prepare_sel_nonsel_cr0_intercepts, default_prepare_gif_clear,
+ test_sel_cr0_write_intercept, default_finished, check_nonsel_cr0_intercept},
+ { "sel cr0 write intercept priority emulate", fep_supported,
+ prepare_sel_nonsel_cr0_intercepts, default_prepare_gif_clear,
+ test_sel_cr0_write_intercept_emul, default_finished, check_nonsel_cr0_intercept},
+ { "sel cr0 write nointercept", default_supported,
+ prepare_sel_cr0_intercept, default_prepare_gif_clear,
+ test_sel_cr0_write_nointercept, default_finished, check_cr0_nointercept},
+ { "sel cr0 write nointercept emulate", fep_supported,
+ prepare_sel_cr0_intercept, default_prepare_gif_clear,
+ test_sel_cr0_write_nointercept_emul, default_finished, check_cr0_nointercept},
+ { "sel cr0 lmsw intercept", default_supported,
+ prepare_sel_cr0_intercept, default_prepare_gif_clear,
+ test_sel_cr0_lmsw_intercept, default_finished, check_sel_cr0_intercept},
+ { "sel cr0 lmsw intercept emulate", fep_supported,
+ prepare_sel_cr0_intercept, default_prepare_gif_clear,
+ test_sel_cr0_lmsw_intercept_emul, default_finished, check_sel_cr0_intercept},
{ "cr3 read intercept", default_supported,
prepare_cr3_intercept, default_prepare_gif_clear,
test_cr3_intercept, default_finished, check_cr3_intercept },
--
2.51.1.821.gb6fe4d2222-goog
Powered by blists - more mailing lists