From 887821e1cf32c505d2bc9fb9dcee45cf1e2f65e5 Mon Sep 17 00:00:00 2001 From: Linus Torvalds Date: Sat, 8 Jun 2024 13:22:31 -0700 Subject: [PATCH] arm64: add 'runtime const' support Signed-off-by: Linus Torvalds --- arch/arm64/include/asm/runtime-const.h | 75 ++++++++++++++++++++++++++ arch/arm64/kernel/vmlinux.lds.S | 3 ++ 2 files changed, 78 insertions(+) create mode 100644 arch/arm64/include/asm/runtime-const.h diff --git a/arch/arm64/include/asm/runtime-const.h b/arch/arm64/include/asm/runtime-const.h new file mode 100644 index 000000000000..ab5f98bc942e --- /dev/null +++ b/arch/arm64/include/asm/runtime-const.h @@ -0,0 +1,75 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ASM_RUNTIME_CONST_H +#define _ASM_RUNTIME_CONST_H + +#define runtime_const_ptr(sym) ({ \ + typeof(sym) __ret; \ + asm("1:\t" \ + "movz %0, #0xcdef\n\t" \ + "movk %0, #0x89ab, lsl #16\n\t" \ + "movk %0, #0x4567, lsl #32\n\t" \ + "movk %0, #0x0123, lsl #48\n\t" \ + ".pushsection runtime_ptr_" #sym ",\"a\"\n\t" \ + ".long 1b - .\n\t" \ + ".popsection" \ + :"=r" (__ret)); \ + __ret; }) + +#define runtime_const_shift_right_32(val, sym) ({ \ + unsigned long __ret; \ + asm("1:\t" \ + "lsr %w0,%w1,#12\n\t" \ + ".pushsection runtime_shift_" #sym ",\"a\"\n\t" \ + ".long 1b - .\n\t" \ + ".popsection" \ + :"=r" (__ret) \ + :"r" (0u+(val))); \ + __ret; }) + +#define runtime_const_init(type, sym, value) do { \ + extern s32 __start_runtime_##type##_##sym[]; \ + extern s32 __stop_runtime_##type##_##sym[]; \ + runtime_const_fixup(__runtime_fixup_##type, \ + (unsigned long)(value), \ + __start_runtime_##type##_##sym, \ + __stop_runtime_##type##_##sym); \ +} while (0) + +// 16-bit immediate for wide move (movz and movk) in bits 5..20 +static inline void __runtime_fixup_16(unsigned int *p, unsigned int val) +{ + unsigned int insn = *p; + insn &= 0xffe0001f; + insn |= (val & 0xffff) << 5; + *p = insn; +} + +static inline void __runtime_fixup_ptr(void *where, unsigned long val) +{ + unsigned int *p = lm_alias(where); + __runtime_fixup_16(p, val); + __runtime_fixup_16(p+1, val >> 16); + __runtime_fixup_16(p+2, val >> 32); + __runtime_fixup_16(p+3, val >> 48); +} + +// Immediate value is 5 bits starting at bit #16 +static inline void __runtime_fixup_shift(void *where, unsigned long val) +{ + unsigned int *p = lm_alias(where); + unsigned int insn = *p; + insn &= 0xffc0ffff; + insn |= (val & 63) << 16; + *p = insn; +} + +static inline void runtime_const_fixup(void (*fn)(void *, unsigned long), + unsigned long val, s32 *start, s32 *end) +{ + while (start < end) { + fn(*start + (void *)start, val); + start++; + } +} + +#endif diff --git a/arch/arm64/kernel/vmlinux.lds.S b/arch/arm64/kernel/vmlinux.lds.S index 755a22d4f840..55a8e310ea12 100644 --- a/arch/arm64/kernel/vmlinux.lds.S +++ b/arch/arm64/kernel/vmlinux.lds.S @@ -264,6 +264,9 @@ SECTIONS EXIT_DATA } + RUNTIME_CONST(shift, d_hash_shift) + RUNTIME_CONST(ptr, dentry_hashtable) + PERCPU_SECTION(L1_CACHE_BYTES) HYPERVISOR_PERCPU_SECTION -- 2.45.1