[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20240927194925.707462984@infradead.org>
Date: Fri, 27 Sep 2024 21:49:09 +0200
From: Peter Zijlstra <peterz@...radead.org>
To: x86@...nel.org
Cc: linux-kernel@...r.kernel.org,
peterz@...radead.org,
alyssa.milburn@...el.com,
scott.d.constable@...el.com,
joao@...rdrivepizza.com,
andrew.cooper3@...rix.com,
jpoimboe@...nel.org,
jose.marchesi@...cle.com,
hjl.tools@...il.com,
ndesaulniers@...gle.com,
samitolvanen@...gle.com,
nathan@...nel.org,
ojeda@...nel.org,
kees@...nel.org,
alexei.starovoitov@...il.com
Subject: [PATCH 13/14] x86: BHI stubs
Mostly generated combinatorical stubs used to poison function
argument pointers.
Notably, since this targets eIBRS parts which do not suffer from
retbleed, use normal return instructions to save some space.
In total: 6c1 + 6c2 + 6c3 + 6c4 + 1 = 6 + 15 + 20 + 15 + 1 = 57 stubs.
Note: Scott said only 0.6% of the kernel functions take 5 or more
pointer arguments, if any of those turns out to be performance
critical, we can add more stubs.
Note: the nested for loops are horrid, should be fixed.
Signed-off-by: Peter Zijlstra (Intel) <peterz@...radead.org>
---
arch/x86/include/asm/cfi.h | 11
arch/x86/kernel/alternative.c | 94 ++++++
arch/x86/lib/Makefile | 1
arch/x86/lib/bhi.S | 602 ++++++++++++++++++++++++++++++++++++++++++
4 files changed, 708 insertions(+)
--- a/arch/x86/include/asm/cfi.h
+++ b/arch/x86/include/asm/cfi.h
@@ -101,6 +101,17 @@ enum cfi_mode {
extern enum cfi_mode cfi_mode;
+typedef u8 bhi_thunk_8[8];
+typedef u8 bhi_thunk_16[16];
+typedef u8 bhi_thunk_32[32];
+
+extern bhi_thunk_8 __bhi_args_6c1[];
+extern bhi_thunk_16 __bhi_args_6c2[];
+extern bhi_thunk_16 __bhi_args_6c3[];
+extern bhi_thunk_32 __bhi_args_6c4[];
+
+extern u8 __bhi_args_all[];
+
struct pt_regs;
#ifdef CONFIG_CFI_CLANG
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -1039,10 +1039,104 @@ u32 cfi_get_func_hash(void *func)
return hash;
}
+
#endif
#ifdef CONFIG_FINEIBT
+static void *bhi_args_1(u8 args, void *addr)
+{
+ u8 bytes[5];
+
+ for (int i = 0; i < 6; i++) {
+ if (args != BIT(i))
+ continue;
+
+ bytes[0] = 0x2e;
+ memcpy(&bytes[1], &__bhi_args_6c1[i], 4);
+
+ text_poke_early(addr, bytes, 5);
+
+ return NULL;
+ }
+
+ return __bhi_args_all;
+}
+
+static void *bhi_args_2(u8 args, void *addr)
+{
+ int x = 0;
+
+ for (int i = 0; i < 6; i++) {
+ for (int j = i + 1; j < 6; j++) {
+ if (args == (BIT(i) | BIT(j)))
+ return &__bhi_args_6c2[x];
+ x++;
+ }
+ }
+
+ return __bhi_args_all;
+}
+
+static void *bhi_args_3(u8 args, void *addr)
+{
+ int x = 0;
+
+ for (int i = 0; i < 6; i++) {
+ for (int j = i + 1; j < 6; j++) {
+ for (int k = j + 1; k < 6; k++) {
+ if (args == (BIT(i) | BIT(j) | BIT(k)))
+ return &__bhi_args_6c3[x];
+ x++;
+ }
+ }
+ }
+
+ return __bhi_args_all;
+}
+
+static void *bhi_args_4(u8 args, void *addr)
+{
+ int x = 0;
+
+ for (int i = 0; i < 6; i++) {
+ for (int j = i + 1; j < 6; j++) {
+ for (int k = j + 1; k < 6; k++) {
+ for (int l = k + 1; l < 6; l++) {
+ if (args == (BIT(i) | BIT(j) | BIT(k) | BIT(l)))
+ return &__bhi_args_6c4[x];
+ x++;
+ }
+ }
+ }
+ }
+
+ return __bhi_args_all;
+}
+
+static void bhi_args(u8 args, void *addr)
+{
+ void *dest = __bhi_args_all;
+
+ if (WARN_ON_ONCE(!args))
+ return;
+
+ switch(hweight8(args)) {
+ case 1: if (bhi_args_1(args, addr) == dest)
+ break;
+ return;
+
+ case 2: dest = bhi_args_2(args, addr); break;
+ case 3: dest = bhi_args_3(args, addr); break;
+ case 4: dest = bhi_args_4(args, addr); break;
+
+ default:
+ break;
+ }
+
+ text_poke_early(addr, text_gen_insn(CALL_INSN_OPCODE, addr, dest), 5);
+}
+
static bool cfi_rand __ro_after_init = true;
static u32 cfi_seed __ro_after_init;
--- a/arch/x86/lib/Makefile
+++ b/arch/x86/lib/Makefile
@@ -60,4 +60,5 @@ endif
lib-y += memmove_64.o memset_64.o
lib-y += copy_user_64.o copy_user_uncached_64.o
lib-y += cmpxchg16b_emu.o
+ lib-y += bhi.o
endif
--- /dev/null
+++ b/arch/x86/lib/bhi.S
@@ -0,0 +1,602 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+
+#include <linux/linkage.h>
+#include <asm/unwind_hints.h>
+#include <asm/nospec-branch.h>
+
+/*
+ * At the function start, launder function arguments that are a pointer through
+ * CMOVcc, this will create a write dependency in the speculation flow.
+ *
+ * Notably, the CFI preambles calling these will have ZF set and r10 zero.
+ */
+
+.pushsection .noinstr.text, "ax"
+
+ .align 8
+SYM_CODE_START(__bhi_args_6c1)
+ ANNOTATE_NOENDBR
+ .align 8
+SYM_INNER_LABEL(__bhi_args_0, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 8
+SYM_INNER_LABEL(__bhi_args_1, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 8
+SYM_INNER_LABEL(__bhi_args_2, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 8
+SYM_INNER_LABEL(__bhi_args_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 8
+SYM_INNER_LABEL(__bhi_args_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 8
+SYM_INNER_LABEL(__bhi_args_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+SYM_CODE_END(__bhi_args_6c1)
+
+
+ .align 16
+SYM_CODE_START(__bhi_args_6c2)
+ ANNOTATE_NOENDBR
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_1, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_2, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_2, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+SYM_CODE_END(__bhi_args_6c2)
+
+
+ .align 16
+SYM_CODE_START(__bhi_args_6c3)
+ ANNOTATE_NOENDBR
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_1_2, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_1_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_1_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_1_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_2_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_2_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_2_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_0_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_2_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_2_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_2_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_1_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_2_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 16
+SYM_INNER_LABEL(__bhi_args_3_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+SYM_CODE_END(__bhi_args_6c3)
+
+
+ .align 32
+SYM_CODE_START(__bhi_args_6c4)
+ ANNOTATE_NOENDBR
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_2_3, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_2_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_2_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_1_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_2_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_2_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_2_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_0_3_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_1_2_3_4, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_1_2_3_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_1_2_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_1_3_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rsi
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+ .align 32
+SYM_INNER_LABEL(__bhi_args_2_3_4_5, SYM_L_LOCAL)
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+
+SYM_CODE_END(__bhi_args_6c4)
+
+SYM_CODE_START(__bhi_args_all)
+ ANNOTATE_NOENDBR
+ UNWIND_HINT_FUNC
+ cmovne %r10, %rdi
+ cmovne %r10, %rsi
+ cmovne %r10, %rdx
+ cmovne %r10, %rcx
+ cmovne %r10, %r8
+ cmovne %r10, %r9
+ cmovne %r10, %rsp
+ ANNOTATE_UNRET_SAFE
+ ret
+ int3
+SYM_CODE_END(__bhi_args_all)
+
+.popsection
Powered by blists - more mailing lists