lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite for Android: free password hash cracker in your pocket
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <77n2puylk3pox4s2du6agjiaj3joh2bxtvpjsenf72mentb7bj@siyjjbwxrasv>
Date: Sun, 7 Dec 2025 06:52:44 -0800
From: Josh Poimboeuf <jpoimboe@...nel.org>
To: x86@...nel.org
Cc: linux-kernel@...r.kernel.org, 
	Linus Torvalds <torvalds@...ux-foundation.org>, Ingo Molnar <mingo@...nel.org>, 
	Peter Zijlstra <peterz@...radead.org>, Borislav Petkov <bp@...en8.de>, 
	Thomas Gleixner <tglx@...utronix.de>
Subject: Re: [PATCH 7/8] x86/asm: Remove newlines in alternatives

On Sat, Dec 06, 2025 at 01:41:14PM -0800, Josh Poimboeuf wrote:
> In preparation for converting the alternative implementation to
> assembler macros, remove all newlines from alternative instructions.
> 
> Signed-off-by: Josh Poimboeuf <jpoimboe@...nel.org>

x86-32 fixes:

diff --git a/arch/x86/include/asm/nospec-branch.h b/arch/x86/include/asm/nospec-branch.h
index 334c6058a1ec..2313f3fe19d0 100644
--- a/arch/x86/include/asm/nospec-branch.h
+++ b/arch/x86/include/asm/nospec-branch.h
@@ -441,18 +441,18 @@ static inline void call_depth_return_thunk(void) {}
  * -mindirect-branch-cs-prefix.
  */
 #define __CS_PREFIX(reg)				\
-	".irp rs,r8,r9,r10,r11,r12,r13,r14,r15\n"	\
-	".ifc \\rs," reg "\n"				\
-	".byte 0x2e\n"					\
-	".endif\n"					\
-	".endr\n"
+	".irp rs,r8,r9,r10,r11,r12,r13,r14,r15; "	\
+	".ifc \\rs," reg "; "				\
+	".byte 0x2e; "					\
+	".endif; "					\
+	".endr; "
 
 /*
  * Inline asm uses the %V modifier which is only in newer GCC
  * which is ensured when CONFIG_MITIGATION_RETPOLINE is defined.
  */
 #define CALL_NOSPEC	__CS_PREFIX("%V[thunk_target]")	\
-			"call __x86_indirect_thunk_%V[thunk_target]\n"
+			"call __x86_indirect_thunk_%V[thunk_target]"
 
 # define THUNK_TARGET(addr) [thunk_target] "r" (addr)
 
@@ -464,30 +464,30 @@ static inline void call_depth_return_thunk(void) {}
  */
 # define CALL_NOSPEC						\
 	ALTERNATIVE_2(						\
-	ANNOTATE_RETPOLINE_SAFE "\n"				\
-	"call *%[thunk_target]\n",				\
-	"       jmp    904f;\n"					\
-	"       .align 16\n"					\
-	"901:	call   903f;\n"					\
-	"902:	pause;\n"					\
-	"    	lfence;\n"					\
-	"       jmp    902b;\n"					\
-	"       .align 16\n"					\
-	"903:	lea    4(%%esp), %%esp;\n"			\
-	"       pushl  %[thunk_target];\n"			\
-	"       ret;\n"						\
-	"       .align 16\n"					\
-	"904:	call   901b;\n",				\
+	ANNOTATE_RETPOLINE_SAFE "; "				\
+	"call *%[thunk_target]; ",				\
+	"       jmp    904f; "					\
+	"       .align 16; "					\
+	"901:	call   903f; "					\
+	"902:	pause; "					\
+	"	lfence; "					\
+	"       jmp    902b; "					\
+	"       .align 16; "					\
+	"903:	lea    4(%%esp), %%esp; "			\
+	"       pushl  %[thunk_target]; "			\
+	"       ret; "						\
+	"       .align 16; "					\
+	"904:	call   901b; ",					\
 	X86_FEATURE_RETPOLINE,					\
-	"lfence;\n"						\
-	ANNOTATE_RETPOLINE_SAFE "\n"				\
-	"call *%[thunk_target]\n",				\
+	"lfence; "						\
+	ANNOTATE_RETPOLINE_SAFE "; "				\
+	"call *%[thunk_target]; ",				\
 	X86_FEATURE_RETPOLINE_LFENCE)
 
 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
 #endif
 #else /* No retpoline for C / inline asm */
-# define CALL_NOSPEC "call *%[thunk_target]\n"
+# define CALL_NOSPEC "call *%[thunk_target]; "
 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
 #endif
 
diff --git a/arch/x86/kernel/irq_32.c b/arch/x86/kernel/irq_32.c
index c7a5d2960d57..9c5bb666bd55 100644
--- a/arch/x86/kernel/irq_32.c
+++ b/arch/x86/kernel/irq_32.c
@@ -53,7 +53,7 @@ DEFINE_PER_CPU_CACHE_HOT(struct irq_stack *, softirq_stack_ptr);
 
 static void call_on_stack(void *func, void *stack)
 {
-	asm volatile("xchgl %[sp], %%esp\n"
+	asm volatile("xchgl %[sp], %%esp; "
 		     CALL_NOSPEC
 		     "movl %[sp], %%esp"
 		     : [sp] "+b" (stack)

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ