lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20251031174220.43458-3-mjguzik@gmail.com>
Date: Fri, 31 Oct 2025 18:42:19 +0100
From: Mateusz Guzik <mjguzik@...il.com>
To: torvalds@...ux-foundation.org
Cc: brauner@...nel.org,
	viro@...iv.linux.org.uk,
	jack@...e.cz,
	linux-kernel@...r.kernel.org,
	linux-fsdevel@...r.kernel.org,
	tglx@...utronix.de,
	pfalcato@...e.de,
	Mateusz Guzik <mjguzik@...il.com>
Subject: [PATCH 2/3] runtime-const: split headers between accessors and fixup; disable for modules

risv and x86 covered as a POC
---
 .../include/asm/runtime-const-accessors.h     | 151 ++++++++++++++++++
 arch/riscv/include/asm/runtime-const.h        | 142 +---------------
 .../x86/include/asm/runtime-const-accessors.h |  45 ++++++
 arch/x86/include/asm/runtime-const.h          |  38 +----
 4 files changed, 200 insertions(+), 176 deletions(-)
 create mode 100644 arch/riscv/include/asm/runtime-const-accessors.h
 create mode 100644 arch/x86/include/asm/runtime-const-accessors.h

diff --git a/arch/riscv/include/asm/runtime-const-accessors.h b/arch/riscv/include/asm/runtime-const-accessors.h
new file mode 100644
index 000000000000..5b8e0349ee0d
--- /dev/null
+++ b/arch/riscv/include/asm/runtime-const-accessors.h
@@ -0,0 +1,151 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef _ASM_RISCV_RUNTIME_CONST_ACCESSORS_H
+#define _ASM_RISCV_RUNTIME_CONST_ACCESSORS_H
+
+#ifdef MODULE
+#error "this functionality is not available for modules"
+#endif
+
+#ifdef CONFIG_32BIT
+#define runtime_const_ptr(sym)					\
+({								\
+	typeof(sym) __ret;					\
+	asm_inline(".option push\n\t"				\
+		".option norvc\n\t"				\
+		"1:\t"						\
+		"lui	%[__ret],0x89abd\n\t"			\
+		"addi	%[__ret],%[__ret],-0x211\n\t"		\
+		".option pop\n\t"				\
+		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
+		".long 1b - .\n\t"				\
+		".popsection"					\
+		: [__ret] "=r" (__ret));			\
+	__ret;							\
+})
+#else
+/*
+ * Loading 64-bit constants into a register from immediates is a non-trivial
+ * task on riscv64. To get it somewhat performant, load 32 bits into two
+ * different registers and then combine the results.
+ *
+ * If the processor supports the Zbkb extension, we can combine the final
+ * "slli,slli,srli,add" into the single "pack" instruction. If the processor
+ * doesn't support Zbkb but does support the Zbb extension, we can
+ * combine the final "slli,srli,add" into one instruction "add.uw".
+ */
+#define RISCV_RUNTIME_CONST_64_PREAMBLE				\
+	".option push\n\t"					\
+	".option norvc\n\t"					\
+	"1:\t"							\
+	"lui	%[__ret],0x89abd\n\t"				\
+	"lui	%[__tmp],0x1234\n\t"				\
+	"addiw	%[__ret],%[__ret],-0x211\n\t"			\
+	"addiw	%[__tmp],%[__tmp],0x567\n\t"			\
+
+#define RISCV_RUNTIME_CONST_64_BASE				\
+	"slli	%[__tmp],%[__tmp],32\n\t"			\
+	"slli	%[__ret],%[__ret],32\n\t"			\
+	"srli	%[__ret],%[__ret],32\n\t"			\
+	"add	%[__ret],%[__ret],%[__tmp]\n\t"			\
+
+#define RISCV_RUNTIME_CONST_64_ZBA				\
+	".option push\n\t"					\
+	".option arch,+zba\n\t"					\
+	".option norvc\n\t"					\
+	"slli	%[__tmp],%[__tmp],32\n\t"			\
+	"add.uw %[__ret],%[__ret],%[__tmp]\n\t"			\
+	"nop\n\t"						\
+	"nop\n\t"						\
+	".option pop\n\t"					\
+
+#define RISCV_RUNTIME_CONST_64_ZBKB				\
+	".option push\n\t"					\
+	".option arch,+zbkb\n\t"				\
+	".option norvc\n\t"					\
+	"pack	%[__ret],%[__ret],%[__tmp]\n\t"			\
+	"nop\n\t"						\
+	"nop\n\t"						\
+	"nop\n\t"						\
+	".option pop\n\t"					\
+
+#define RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
+	".option pop\n\t"					\
+	".pushsection runtime_ptr_" #sym ",\"a\"\n\t"		\
+	".long 1b - .\n\t"					\
+	".popsection"						\
+
+#if defined(CONFIG_RISCV_ISA_ZBA) && defined(CONFIG_TOOLCHAIN_HAS_ZBA)	\
+	&& defined(CONFIG_RISCV_ISA_ZBKB)
+#define runtime_const_ptr(sym)						\
+({									\
+	typeof(sym) __ret, __tmp;					\
+	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
+		ALTERNATIVE_2(						\
+			RISCV_RUNTIME_CONST_64_BASE,			\
+			RISCV_RUNTIME_CONST_64_ZBA,			\
+			0, RISCV_ISA_EXT_ZBA, 1,			\
+			RISCV_RUNTIME_CONST_64_ZBKB,			\
+			0, RISCV_ISA_EXT_ZBKB, 1			\
+		)							\
+		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
+		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
+	__ret;								\
+})
+#elif defined(CONFIG_RISCV_ISA_ZBA) && defined(CONFIG_TOOLCHAIN_HAS_ZBA)
+#define runtime_const_ptr(sym)						\
+({									\
+	typeof(sym) __ret, __tmp;					\
+	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
+		ALTERNATIVE(						\
+			RISCV_RUNTIME_CONST_64_BASE,			\
+			RISCV_RUNTIME_CONST_64_ZBA,			\
+			0, RISCV_ISA_EXT_ZBA, 1				\
+		)							\
+		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
+		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
+	__ret;								\
+})
+#elif defined(CONFIG_RISCV_ISA_ZBKB)
+#define runtime_const_ptr(sym)						\
+({									\
+	typeof(sym) __ret, __tmp;					\
+	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
+		ALTERNATIVE(						\
+			RISCV_RUNTIME_CONST_64_BASE,			\
+			RISCV_RUNTIME_CONST_64_ZBKB,			\
+			0, RISCV_ISA_EXT_ZBKB, 1			\
+		)							\
+		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
+		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
+	__ret;								\
+})
+#else
+#define runtime_const_ptr(sym)						\
+({									\
+	typeof(sym) __ret, __tmp;					\
+	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
+		RISCV_RUNTIME_CONST_64_BASE				\
+		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
+		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
+	__ret;								\
+})
+#endif
+#endif
+
+#define runtime_const_shift_right_32(val, sym)			\
+({								\
+	u32 __ret;						\
+	asm_inline(".option push\n\t"				\
+		".option norvc\n\t"				\
+		"1:\t"						\
+		SRLI " %[__ret],%[__val],12\n\t"		\
+		".option pop\n\t"				\
+		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
+		".long 1b - .\n\t"				\
+		".popsection"					\
+		: [__ret] "=r" (__ret)				\
+		: [__val] "r" (val));				\
+	__ret;							\
+})
+
+#endif /* _ASM_RISCV_RUNTIME_CONST_ACCESSORS_H */
diff --git a/arch/riscv/include/asm/runtime-const.h b/arch/riscv/include/asm/runtime-const.h
index d766e2b9e6df..14994be81487 100644
--- a/arch/riscv/include/asm/runtime-const.h
+++ b/arch/riscv/include/asm/runtime-const.h
@@ -11,147 +11,7 @@
 
 #include <linux/uaccess.h>
 
-#ifdef CONFIG_32BIT
-#define runtime_const_ptr(sym)					\
-({								\
-	typeof(sym) __ret;					\
-	asm_inline(".option push\n\t"				\
-		".option norvc\n\t"				\
-		"1:\t"						\
-		"lui	%[__ret],0x89abd\n\t"			\
-		"addi	%[__ret],%[__ret],-0x211\n\t"		\
-		".option pop\n\t"				\
-		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
-		".long 1b - .\n\t"				\
-		".popsection"					\
-		: [__ret] "=r" (__ret));			\
-	__ret;							\
-})
-#else
-/*
- * Loading 64-bit constants into a register from immediates is a non-trivial
- * task on riscv64. To get it somewhat performant, load 32 bits into two
- * different registers and then combine the results.
- *
- * If the processor supports the Zbkb extension, we can combine the final
- * "slli,slli,srli,add" into the single "pack" instruction. If the processor
- * doesn't support Zbkb but does support the Zbb extension, we can
- * combine the final "slli,srli,add" into one instruction "add.uw".
- */
-#define RISCV_RUNTIME_CONST_64_PREAMBLE				\
-	".option push\n\t"					\
-	".option norvc\n\t"					\
-	"1:\t"							\
-	"lui	%[__ret],0x89abd\n\t"				\
-	"lui	%[__tmp],0x1234\n\t"				\
-	"addiw	%[__ret],%[__ret],-0x211\n\t"			\
-	"addiw	%[__tmp],%[__tmp],0x567\n\t"			\
-
-#define RISCV_RUNTIME_CONST_64_BASE				\
-	"slli	%[__tmp],%[__tmp],32\n\t"			\
-	"slli	%[__ret],%[__ret],32\n\t"			\
-	"srli	%[__ret],%[__ret],32\n\t"			\
-	"add	%[__ret],%[__ret],%[__tmp]\n\t"			\
-
-#define RISCV_RUNTIME_CONST_64_ZBA				\
-	".option push\n\t"					\
-	".option arch,+zba\n\t"					\
-	".option norvc\n\t"					\
-	"slli	%[__tmp],%[__tmp],32\n\t"			\
-	"add.uw %[__ret],%[__ret],%[__tmp]\n\t"			\
-	"nop\n\t"						\
-	"nop\n\t"						\
-	".option pop\n\t"					\
-
-#define RISCV_RUNTIME_CONST_64_ZBKB				\
-	".option push\n\t"					\
-	".option arch,+zbkb\n\t"				\
-	".option norvc\n\t"					\
-	"pack	%[__ret],%[__ret],%[__tmp]\n\t"			\
-	"nop\n\t"						\
-	"nop\n\t"						\
-	"nop\n\t"						\
-	".option pop\n\t"					\
-
-#define RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
-	".option pop\n\t"					\
-	".pushsection runtime_ptr_" #sym ",\"a\"\n\t"		\
-	".long 1b - .\n\t"					\
-	".popsection"						\
-
-#if defined(CONFIG_RISCV_ISA_ZBA) && defined(CONFIG_TOOLCHAIN_HAS_ZBA)	\
-	&& defined(CONFIG_RISCV_ISA_ZBKB)
-#define runtime_const_ptr(sym)						\
-({									\
-	typeof(sym) __ret, __tmp;					\
-	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
-		ALTERNATIVE_2(						\
-			RISCV_RUNTIME_CONST_64_BASE,			\
-			RISCV_RUNTIME_CONST_64_ZBA,			\
-			0, RISCV_ISA_EXT_ZBA, 1,			\
-			RISCV_RUNTIME_CONST_64_ZBKB,			\
-			0, RISCV_ISA_EXT_ZBKB, 1			\
-		)							\
-		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
-		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
-	__ret;								\
-})
-#elif defined(CONFIG_RISCV_ISA_ZBA) && defined(CONFIG_TOOLCHAIN_HAS_ZBA)
-#define runtime_const_ptr(sym)						\
-({									\
-	typeof(sym) __ret, __tmp;					\
-	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
-		ALTERNATIVE(						\
-			RISCV_RUNTIME_CONST_64_BASE,			\
-			RISCV_RUNTIME_CONST_64_ZBA,			\
-			0, RISCV_ISA_EXT_ZBA, 1				\
-		)							\
-		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
-		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
-	__ret;								\
-})
-#elif defined(CONFIG_RISCV_ISA_ZBKB)
-#define runtime_const_ptr(sym)						\
-({									\
-	typeof(sym) __ret, __tmp;					\
-	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
-		ALTERNATIVE(						\
-			RISCV_RUNTIME_CONST_64_BASE,			\
-			RISCV_RUNTIME_CONST_64_ZBKB,			\
-			0, RISCV_ISA_EXT_ZBKB, 1			\
-		)							\
-		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
-		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
-	__ret;								\
-})
-#else
-#define runtime_const_ptr(sym)						\
-({									\
-	typeof(sym) __ret, __tmp;					\
-	asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE			\
-		RISCV_RUNTIME_CONST_64_BASE				\
-		RISCV_RUNTIME_CONST_64_POSTAMBLE(sym)			\
-		: [__ret] "=r" (__ret), [__tmp] "=r" (__tmp));		\
-	__ret;								\
-})
-#endif
-#endif
-
-#define runtime_const_shift_right_32(val, sym)			\
-({								\
-	u32 __ret;						\
-	asm_inline(".option push\n\t"				\
-		".option norvc\n\t"				\
-		"1:\t"						\
-		SRLI " %[__ret],%[__val],12\n\t"		\
-		".option pop\n\t"				\
-		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
-		".long 1b - .\n\t"				\
-		".popsection"					\
-		: [__ret] "=r" (__ret)				\
-		: [__val] "r" (val));				\
-	__ret;							\
-})
+#include <asm/runtime-const-accessors.h>
 
 #define runtime_const_init(type, sym) do {			\
 	extern s32 __start_runtime_##type##_##sym[];		\
diff --git a/arch/x86/include/asm/runtime-const-accessors.h b/arch/x86/include/asm/runtime-const-accessors.h
new file mode 100644
index 000000000000..4c411bc3cb32
--- /dev/null
+++ b/arch/x86/include/asm/runtime-const-accessors.h
@@ -0,0 +1,45 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef _ASM_RUNTIME_CONST_ACCESSORS_H
+#define _ASM_RUNTIME_CONST_ACCESSORS_H
+
+#ifdef MODULE
+#error "this functionality is not available for modules"
+#endif
+
+#ifdef __ASSEMBLY__
+
+.macro RUNTIME_CONST_PTR sym reg
+	movq	$0x0123456789abcdef, %\reg
+	1:
+	.pushsection runtime_ptr_\sym, "a"
+	.long	1b - 8 - .
+	.popsection
+.endm
+
+#else /* __ASSEMBLY__ */
+
+#define runtime_const_ptr(sym) ({				\
+	typeof(sym) __ret;					\
+	asm_inline("mov %1,%0\n1:\n"				\
+		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
+		".long 1b - %c2 - .\n"				\
+		".popsection"					\
+		:"=r" (__ret)					\
+		:"i" ((unsigned long)0x0123456789abcdefull),	\
+		 "i" (sizeof(long)));				\
+	__ret; })
+
+// The 'typeof' will create at _least_ a 32-bit type, but
+// will happily also take a bigger type and the 'shrl' will
+// clear the upper bits
+#define runtime_const_shift_right_32(val, sym) ({		\
+	typeof(0u+(val)) __ret = (val);				\
+	asm_inline("shrl $12,%k0\n1:\n"				\
+		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
+		".long 1b - 1 - .\n"				\
+		".popsection"					\
+		:"+r" (__ret));					\
+	__ret; })
+
+#endif /* __ASSEMBLY__ */
+#endif
diff --git a/arch/x86/include/asm/runtime-const.h b/arch/x86/include/asm/runtime-const.h
index 8d983cfd06ea..15d67e2bfc96 100644
--- a/arch/x86/include/asm/runtime-const.h
+++ b/arch/x86/include/asm/runtime-const.h
@@ -2,41 +2,9 @@
 #ifndef _ASM_RUNTIME_CONST_H
 #define _ASM_RUNTIME_CONST_H
 
-#ifdef __ASSEMBLY__
-
-.macro RUNTIME_CONST_PTR sym reg
-	movq	$0x0123456789abcdef, %\reg
-	1:
-	.pushsection runtime_ptr_\sym, "a"
-	.long	1b - 8 - .
-	.popsection
-.endm
-
-#else /* __ASSEMBLY__ */
-
-#define runtime_const_ptr(sym) ({				\
-	typeof(sym) __ret;					\
-	asm_inline("mov %1,%0\n1:\n"				\
-		".pushsection runtime_ptr_" #sym ",\"a\"\n\t"	\
-		".long 1b - %c2 - .\n"				\
-		".popsection"					\
-		:"=r" (__ret)					\
-		:"i" ((unsigned long)0x0123456789abcdefull),	\
-		 "i" (sizeof(long)));				\
-	__ret; })
-
-// The 'typeof' will create at _least_ a 32-bit type, but
-// will happily also take a bigger type and the 'shrl' will
-// clear the upper bits
-#define runtime_const_shift_right_32(val, sym) ({		\
-	typeof(0u+(val)) __ret = (val);				\
-	asm_inline("shrl $12,%k0\n1:\n"				\
-		".pushsection runtime_shift_" #sym ",\"a\"\n\t"	\
-		".long 1b - 1 - .\n"				\
-		".popsection"					\
-		:"+r" (__ret));					\
-	__ret; })
+#include <asm/runtime-const-accessors.h>
 
+#ifndef __ASSEMBLY__
 #define runtime_const_init(type, sym) do {		\
 	extern s32 __start_runtime_##type##_##sym[];	\
 	extern s32 __stop_runtime_##type##_##sym[];	\
@@ -70,5 +38,5 @@ static inline void runtime_const_fixup(void (*fn)(void *, unsigned long),
 	}
 }
 
-#endif /* __ASSEMBLY__ */
+#endif /* !__ASSEMBLY__ */
 #endif
-- 
2.34.1


Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ