[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20221110203504.1985010-5-seanjc@google.com>
Date: Thu, 10 Nov 2022 20:35:03 +0000
From: Sean Christopherson <seanjc@...gle.com>
To: Dave Hansen <dave.hansen@...ux.intel.com>,
Andy Lutomirski <luto@...nel.org>,
Peter Zijlstra <peterz@...radead.org>,
Thomas Gleixner <tglx@...utronix.de>,
Ingo Molnar <mingo@...hat.com>, Borislav Petkov <bp@...en8.de>,
x86@...nel.org, Andrey Ryabinin <ryabinin.a.a@...il.com>
Cc: "H. Peter Anvin" <hpa@...or.com>,
Alexander Potapenko <glider@...gle.com>,
Andrey Konovalov <andreyknvl@...il.com>,
Dmitry Vyukov <dvyukov@...gle.com>,
Vincenzo Frascino <vincenzo.frascino@....com>,
linux-kernel@...r.kernel.org, kasan-dev@...glegroups.com,
Sean Christopherson <seanjc@...gle.com>,
syzbot+ffb4f000dc2872c93f62@...kaller.appspotmail.com,
syzbot+8cdd16fd5a6c0565e227@...kaller.appspotmail.com
Subject: [PATCH v2 4/5] x86/kasan: Add helpers to align shadow addresses up
and down
Add helpers to dedup code for aligning shadow address up/down to page
boundaries when translating an address to its shadow.
No functional change intended.
Signed-off-by: Sean Christopherson <seanjc@...gle.com>
---
arch/x86/mm/kasan_init_64.c | 40 ++++++++++++++++++++-----------------
1 file changed, 22 insertions(+), 18 deletions(-)
diff --git a/arch/x86/mm/kasan_init_64.c b/arch/x86/mm/kasan_init_64.c
index ad7872ae10ed..afc5e129ca7b 100644
--- a/arch/x86/mm/kasan_init_64.c
+++ b/arch/x86/mm/kasan_init_64.c
@@ -316,22 +316,33 @@ void __init kasan_early_init(void)
kasan_map_early_shadow(init_top_pgt);
}
+static unsigned long kasan_mem_to_shadow_align_down(unsigned long va)
+{
+ unsigned long shadow = (unsigned long)kasan_mem_to_shadow((void *)va);
+
+ return round_down(shadow, PAGE_SIZE);
+}
+
+static unsigned long kasan_mem_to_shadow_align_up(unsigned long va)
+{
+ unsigned long shadow = (unsigned long)kasan_mem_to_shadow((void *)va);
+
+ return round_up(shadow, PAGE_SIZE);
+}
+
void __init kasan_populate_shadow_for_vaddr(void *va, size_t size, int nid)
{
unsigned long shadow_start, shadow_end;
- shadow_start = (unsigned long)kasan_mem_to_shadow(va);
- shadow_start = round_down(shadow_start, PAGE_SIZE);
- shadow_end = (unsigned long)kasan_mem_to_shadow(va + size);
- shadow_end = round_up(shadow_end, PAGE_SIZE);
-
+ shadow_start = kasan_mem_to_shadow_align_down((unsigned long)va);
+ shadow_end = kasan_mem_to_shadow_align_up((unsigned long)va + size);
kasan_populate_shadow(shadow_start, shadow_end, nid);
}
void __init kasan_init(void)
{
+ unsigned long shadow_cea_begin, shadow_cea_end;
int i;
- void *shadow_cea_begin, *shadow_cea_end;
memcpy(early_top_pgt, init_top_pgt, sizeof(early_top_pgt));
@@ -372,16 +383,9 @@ void __init kasan_init(void)
map_range(&pfn_mapped[i]);
}
- shadow_cea_begin = (void *)CPU_ENTRY_AREA_BASE;
- shadow_cea_begin = kasan_mem_to_shadow(shadow_cea_begin);
- shadow_cea_begin = (void *)round_down(
- (unsigned long)shadow_cea_begin, PAGE_SIZE);
-
- shadow_cea_end = (void *)(CPU_ENTRY_AREA_BASE +
- CPU_ENTRY_AREA_MAP_SIZE);
- shadow_cea_end = kasan_mem_to_shadow(shadow_cea_end);
- shadow_cea_end = (void *)round_up(
- (unsigned long)shadow_cea_end, PAGE_SIZE);
+ shadow_cea_begin = kasan_mem_to_shadow_align_down(CPU_ENTRY_AREA_BASE);
+ shadow_cea_end = kasan_mem_to_shadow_align_up(CPU_ENTRY_AREA_BASE +
+ CPU_ENTRY_AREA_MAP_SIZE);
kasan_populate_early_shadow(
kasan_mem_to_shadow((void *)PAGE_OFFSET + MAXMEM),
@@ -403,9 +407,9 @@ void __init kasan_init(void)
kasan_populate_early_shadow(
kasan_mem_to_shadow((void *)VMALLOC_END + 1),
- shadow_cea_begin);
+ (void *)shadow_cea_begin);
- kasan_populate_early_shadow(shadow_cea_end,
+ kasan_populate_early_shadow((void *)shadow_cea_end,
kasan_mem_to_shadow((void *)__START_KERNEL_map));
kasan_populate_shadow((unsigned long)kasan_mem_to_shadow(_stext),
--
2.38.1.431.g37b22c650d-goog
Powered by blists - more mailing lists