[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20190118163913.0414868DCF@newverein.lst.de>
Date: Fri, 18 Jan 2019 17:39:13 +0100 (CET)
From: duwe@....de (Torsten Duwe)
To: Mark Rutland <mark.rutland@....com>,
Will Deacon <will.deacon@....com>,
Catalin Marinas <catalin.marinas@....com>,
Julien Thierry <julien.thierry@....com>,
Steven Rostedt <rostedt@...dmis.org>,
Josh Poimboeuf <jpoimboe@...hat.com>,
Ingo Molnar <mingo@...hat.com>,
Ard Biesheuvel <ard.biesheuvel@...aro.org>,
Arnd Bergmann <arnd@...db.de>,
AKASHI Takahiro <takahiro.akashi@...aro.org>,
Amit Daniel Kachhap <amit.kachhap@....com>
Cc: linux-arm-kernel@...ts.infradead.org, linux-kernel@...r.kernel.org,
live-patching@...r.kernel.org
Subject: [PATCH v7 3/3] arm64: use -fpatchable-function-entry if available
Test whether gcc supports -fpatchable-function-entry and use it to promote
DYNAMIC_FTRACE to DYNAMIC_FTRACE_WITH_REGS. Amend support for the new object
section that holds the locations (__patchable_function_entries) and define
a proper "notrace" attribute to switch it off.
Signed-off-by: Torsten Duwe <duwe@...e.de>
---
arch/arm64/Kconfig | 2 ++
arch/arm64/Makefile | 5 +++++
include/asm-generic/vmlinux.lds.h | 1 +
include/linux/compiler_types.h | 2 ++
4 files changed, 10 insertions(+)
--- a/arch/arm64/Kconfig
+++ b/arch/arm64/Kconfig
@@ -132,6 +132,8 @@ config ARM64
select HAVE_DEBUG_KMEMLEAK
select HAVE_DMA_CONTIGUOUS
select HAVE_DYNAMIC_FTRACE
+ select HAVE_DYNAMIC_FTRACE_WITH_REGS \
+ if $(cc-option,-fpatchable-function-entry=2)
select HAVE_EFFICIENT_UNALIGNED_ACCESS
select HAVE_FTRACE_MCOUNT_RECORD
select HAVE_FUNCTION_TRACER
--- a/arch/arm64/Makefile
+++ b/arch/arm64/Makefile
@@ -89,6 +89,11 @@ ifeq ($(CONFIG_ARM64_MODULE_PLTS),y)
KBUILD_LDFLAGS_MODULE += -T $(srctree)/arch/arm64/kernel/module.lds
endif
+ifeq ($(CONFIG_DYNAMIC_FTRACE_WITH_REGS),y)
+ KBUILD_CPPFLAGS += -DCC_USING_PATCHABLE_FENTRY
+ CC_FLAGS_FTRACE := -fpatchable-function-entry=2
+endif
+
# Default value
head-y := arch/arm64/kernel/head.o
--- a/include/asm-generic/vmlinux.lds.h
+++ b/include/asm-generic/vmlinux.lds.h
@@ -113,6 +113,7 @@
#define MCOUNT_REC() . = ALIGN(8); \
__start_mcount_loc = .; \
KEEP(*(__mcount_loc)) \
+ KEEP(*(__patchable_function_entries)) \
__stop_mcount_loc = .;
#else
#define MCOUNT_REC()
--- a/include/linux/compiler_types.h
+++ b/include/linux/compiler_types.h
@@ -112,6 +112,8 @@ struct ftrace_likely_data {
#if defined(CC_USING_HOTPATCH)
#define notrace __attribute__((hotpatch(0, 0)))
+#elif defined(CC_USING_PATCHABLE_FENTRY)
+#define notrace __attribute__((patchable_function_entry(0)))
#else
#define notrace __attribute__((__no_instrument_function__))
#endif
Powered by blists - more mailing lists