[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20220723095248.801393697@linuxfoundation.org>
Date: Sat, 23 Jul 2022 11:54:59 +0200
From: Greg Kroah-Hartman <gregkh@...uxfoundation.org>
To: linux-kernel@...r.kernel.org
Cc: Greg Kroah-Hartman <gregkh@...uxfoundation.org>,
stable@...r.kernel.org,
"Peter Zijlstra (Intel)" <peterz@...radead.org>,
Borislav Petkov <bp@...e.de>,
Josh Poimboeuf <jpoimboe@...nel.org>,
Thadeu Lima de Souza Cascardo <cascardo@...onical.com>,
Ben Hutchings <ben@...adent.org.uk>
Subject: [PATCH 5.10 087/148] x86,static_call: Use alternative RET encoding
From: Peter Zijlstra <peterz@...radead.org>
commit ee88d363d15617ff50ac24fab0ffec11113b2aeb upstream.
In addition to teaching static_call about the new way to spell 'RET',
there is an added complication in that static_call() is allowed to
rewrite text before it is known which particular spelling is required.
In order to deal with this; have a static_call specific fixup in the
apply_return() 'alternative' patching routine that will rewrite the
static_call trampoline to match the definite sequence.
This in turn creates the problem of uniquely identifying static call
trampolines. Currently trampolines are 8 bytes, the first 5 being the
jmp.d32/ret sequence and the final 3 a byte sequence that spells out
'SCT'.
This sequence is used in __static_call_validate() to ensure it is
patching a trampoline and not a random other jmp.d32. That is,
false-positives shouldn't be plenty, but aren't a big concern.
OTOH the new __static_call_fixup() must not have false-positives, and
'SCT' decodes to the somewhat weird but semi plausible sequence:
push %rbx
rex.XB push %r12
Additionally, there are SLS concerns with immediate jumps. Combined it
seems like a good moment to change the signature to a single 3 byte
trap instruction that is unique to this usage and will not ever get
generated by accident.
As such, change the signature to: '0x0f, 0xb9, 0xcc', which decodes
to:
ud1 %esp, %ecx
Signed-off-by: Peter Zijlstra (Intel) <peterz@...radead.org>
Signed-off-by: Borislav Petkov <bp@...e.de>
Reviewed-by: Josh Poimboeuf <jpoimboe@...nel.org>
Signed-off-by: Borislav Petkov <bp@...e.de>
[cascardo: skip validation as introduced by 2105a92748e8 ("static_call,x86: Robustify trampoline patching")]
Signed-off-by: Thadeu Lima de Souza Cascardo <cascardo@...onical.com>
[bwh: Backported to 5.10: adjust context]
Signed-off-by: Ben Hutchings <ben@...adent.org.uk>
Signed-off-by: Greg Kroah-Hartman <gregkh@...uxfoundation.org>
---
arch/x86/include/asm/static_call.h | 17 ++++++++++++++++
arch/x86/kernel/alternative.c | 12 +++++++----
arch/x86/kernel/static_call.c | 38 ++++++++++++++++++++++++++++++++++++-
3 files changed, 62 insertions(+), 5 deletions(-)
--- a/arch/x86/include/asm/static_call.h
+++ b/arch/x86/include/asm/static_call.h
@@ -21,6 +21,16 @@
* relative displacement across sections.
*/
+/*
+ * The trampoline is 8 bytes and of the general form:
+ *
+ * jmp.d32 \func
+ * ud1 %esp, %ecx
+ *
+ * That trailing #UD provides both a speculation stop and serves as a unique
+ * 3 byte signature identifying static call trampolines. Also see tramp_ud[]
+ * and __static_call_fixup().
+ */
#define __ARCH_DEFINE_STATIC_CALL_TRAMP(name, insns) \
asm(".pushsection .static_call.text, \"ax\" \n" \
".align 4 \n" \
@@ -34,8 +44,13 @@
#define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func) \
__ARCH_DEFINE_STATIC_CALL_TRAMP(name, ".byte 0xe9; .long " #func " - (. + 4)")
+#ifdef CONFIG_RETPOLINE
+#define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \
+ __ARCH_DEFINE_STATIC_CALL_TRAMP(name, "jmp __x86_return_thunk")
+#else
#define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \
__ARCH_DEFINE_STATIC_CALL_TRAMP(name, "ret; int3; nop; nop; nop")
+#endif
#define ARCH_ADD_TRAMP_KEY(name) \
@@ -44,4 +59,6 @@
".long " STATIC_CALL_KEY_STR(name) " - . \n" \
".popsection \n")
+extern bool __static_call_fixup(void *tramp, u8 op, void *dest);
+
#endif /* _ASM_STATIC_CALL_H */
--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -693,18 +693,22 @@ void __init_or_module noinline apply_ret
s32 *s;
for (s = start; s < end; s++) {
- void *addr = (void *)s + *s;
+ void *dest = NULL, *addr = (void *)s + *s;
struct insn insn;
int len, ret;
u8 bytes[16];
- u8 op1;
+ u8 op;
ret = insn_decode_kernel(&insn, addr);
if (WARN_ON_ONCE(ret < 0))
continue;
- op1 = insn.opcode.bytes[0];
- if (WARN_ON_ONCE(op1 != JMP32_INSN_OPCODE))
+ op = insn.opcode.bytes[0];
+ if (op == JMP32_INSN_OPCODE)
+ dest = addr + insn.length + insn.immediate.value;
+
+ if (__static_call_fixup(addr, op, dest) ||
+ WARN_ON_ONCE(dest != &__x86_return_thunk))
continue;
DPRINTK("return thunk at: %pS (%px) len: %d to: %pS",
--- a/arch/x86/kernel/static_call.c
+++ b/arch/x86/kernel/static_call.c
@@ -11,6 +11,13 @@ enum insn_type {
RET = 3, /* tramp / site cond-tail-call */
};
+/*
+ * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
+ * that there is no false-positive trampoline identification while also being a
+ * speculation stop.
+ */
+static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
+
static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
static void __ref __static_call_transform(void *insn, enum insn_type type, void *func)
@@ -32,7 +39,10 @@ static void __ref __static_call_transfor
break;
case RET:
- code = &retinsn;
+ if (cpu_feature_enabled(X86_FEATURE_RETHUNK))
+ code = text_gen_insn(JMP32_INSN_OPCODE, insn, &__x86_return_thunk);
+ else
+ code = &retinsn;
break;
}
@@ -97,3 +107,29 @@ void arch_static_call_transform(void *si
mutex_unlock(&text_mutex);
}
EXPORT_SYMBOL_GPL(arch_static_call_transform);
+
+#ifdef CONFIG_RETPOLINE
+/*
+ * This is called by apply_returns() to fix up static call trampolines,
+ * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
+ * having a return trampoline.
+ *
+ * The problem is that static_call() is available before determining
+ * X86_FEATURE_RETHUNK and, by implication, running alternatives.
+ *
+ * This means that __static_call_transform() above can have overwritten the
+ * return trampoline and we now need to fix things up to be consistent.
+ */
+bool __static_call_fixup(void *tramp, u8 op, void *dest)
+{
+ if (memcmp(tramp+5, tramp_ud, 3)) {
+ /* Not a trampoline site, not our problem. */
+ return false;
+ }
+
+ if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
+ __static_call_transform(tramp, RET, NULL);
+
+ return true;
+}
+#endif
Powered by blists - more mailing lists