[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20210930143113.1502553-7-pasha.tatashin@soleen.com>
Date: Thu, 30 Sep 2021 14:31:04 +0000
From: Pasha Tatashin <pasha.tatashin@...een.com>
To: pasha.tatashin@...een.com, sashal@...nel.org,
ebiederm@...ssion.com, kexec@...ts.infradead.org,
linux-kernel@...r.kernel.org, corbet@....net,
catalin.marinas@....com, will@...nel.org,
linux-arm-kernel@...ts.infradead.org, maz@...nel.org,
james.morse@....com, vladimir.murzin@....com,
matthias.bgg@...il.com, linux-mm@...ck.org, mark.rutland@....com,
steve.capper@....com, rfontana@...hat.com, tglx@...utronix.de,
selindag@...il.com, kernelfans@...il.com, akpm@...ux-foundation.org
Subject: [PATCH v18 06/15] arm64: kexec: Use dcache ops macros instead of open-coding
kexec does dcache maintenance when it re-writes all memory. Our
dcache_by_line_op macro depends on reading the sanitized DminLine
from memory. Kexec may have overwritten this, so open-codes the
sequence.
dcache_by_line_op is a whole set of macros, it uses dcache_line_size
which uses read_ctr for the sanitsed DminLine. Reading the DminLine
is the first thing the dcache_by_line_op does.
Rename dcache_by_line_op dcache_by_myline_op and take DminLine as
an argument. Kexec can now use the slightly smaller macro.
This makes up-coming changes to the dcache maintenance easier on
the eye.
Code generated by the existing callers is unchanged.
Suggested-by: James Morse <james.morse@....com>
Signed-off-by: Pasha Tatashin <pasha.tatashin@...een.com>
Acked-by: Catalin Marinas <catalin.marinas@....com>
---
arch/arm64/include/asm/assembler.h | 30 ++++++++++++++++++++++-------
arch/arm64/kernel/relocate_kernel.S | 13 +++----------
2 files changed, 26 insertions(+), 17 deletions(-)
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h
index bfa58409a4d4..d5281f75a58d 100644
--- a/arch/arm64/include/asm/assembler.h
+++ b/arch/arm64/include/asm/assembler.h
@@ -405,19 +405,19 @@ alternative_endif
/*
* Macro to perform a data cache maintenance for the interval
- * [start, end)
+ * [start, end) with dcache line size explicitly provided.
*
* op: operation passed to dc instruction
* domain: domain used in dsb instruciton
* start: starting virtual address of the region
* end: end virtual address of the region
+ * linesz: dcache line size
* fixup: optional label to branch to on user fault
- * Corrupts: start, end, tmp1, tmp2
+ * Corrupts: start, end, tmp
*/
- .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
- dcache_line_size \tmp1, \tmp2
- sub \tmp2, \tmp1, #1
- bic \start, \start, \tmp2
+ .macro dcache_by_myline_op op, domain, start, end, linesz, tmp, fixup
+ sub \tmp, \linesz, #1
+ bic \start, \start, \tmp
.Ldcache_op\@:
.ifc \op, cvau
__dcache_op_workaround_clean_cache \op, \start
@@ -436,7 +436,7 @@ alternative_endif
.endif
.endif
.endif
- add \start, \start, \tmp1
+ add \start, \start, \linesz
cmp \start, \end
b.lo .Ldcache_op\@
dsb \domain
@@ -444,6 +444,22 @@ alternative_endif
_cond_extable .Ldcache_op\@, \fixup
.endm
+/*
+ * Macro to perform a data cache maintenance for the interval
+ * [start, end)
+ *
+ * op: operation passed to dc instruction
+ * domain: domain used in dsb instruciton
+ * start: starting virtual address of the region
+ * end: end virtual address of the region
+ * fixup: optional label to branch to on user fault
+ * Corrupts: start, end, tmp1, tmp2
+ */
+ .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
+ dcache_line_size \tmp1, \tmp2
+ dcache_by_myline_op \op, \domain, \start, \end, \tmp1, \tmp2, \fixup
+ .endm
+
/*
* Macro to perform an instruction cache maintenance for the interval
* [start, end)
diff --git a/arch/arm64/kernel/relocate_kernel.S b/arch/arm64/kernel/relocate_kernel.S
index 8058fabe0a76..8c43779e8cc6 100644
--- a/arch/arm64/kernel/relocate_kernel.S
+++ b/arch/arm64/kernel/relocate_kernel.S
@@ -41,16 +41,9 @@ SYM_CODE_START(arm64_relocate_new_kernel)
tbz x16, IND_SOURCE_BIT, .Ltest_indirection
/* Invalidate dest page to PoC. */
- mov x2, x13
- add x20, x2, #PAGE_SIZE
- sub x1, x15, #1
- bic x2, x2, x1
-2: dc ivac, x2
- add x2, x2, x15
- cmp x2, x20
- b.lo 2b
- dsb sy
-
+ mov x2, x13
+ add x1, x2, #PAGE_SIZE
+ dcache_by_myline_op ivac, sy, x2, x1, x15, x20
copy_page x13, x12, x1, x2, x3, x4, x5, x6, x7, x8
b .Lnext
.Ltest_indirection:
--
2.33.0.800.g4c38ced690-goog
Powered by blists - more mailing lists