[<prev] [next>] [<thread-prev] [day] [month] [year] [list]
Message-Id: <20251106094023.1371246-10-yeoreum.yun@arm.com>
Date: Thu, 6 Nov 2025 09:40:23 +0000
From: Yeoreum Yun <yeoreum.yun@....com>
To: catalin.marinas@....com,
will@...nel.org,
maz@...nel.org,
broonie@...nel.org,
oliver.upton@...ux.dev,
miko.lenczewski@....com,
kevin.brodsky@....com,
ardb@...nel.org,
suzuki.poulose@....com,
lpieralisi@...nel.org,
yangyicong@...ilicon.com,
scott@...amperecomputing.com,
joey.gouly@....com,
yuzenghui@...wei.com,
pbonzini@...hat.com,
shuah@...nel.org,
mark.rutland@....com
Cc: linux-arm-kernel@...ts.infradead.org,
linux-kernel@...r.kernel.org,
kvmarm@...ts.linux.dev,
kvm@...r.kernel.org,
linux-kselftest@...r.kernel.org,
Yeoreum Yun <yeoreum.yun@....com>
Subject: [PATCH v11 9/9] arm64: armv8_deprecated: apply FEAT_LSUI for swpX emulation.
apply FEAT_LSUI instruction to emulate deprecated swpX instruction.
Signed-off-by: Yeoreum Yun <yeoreum.yun@....com>
---
arch/arm64/kernel/armv8_deprecated.c | 77 +++++++++++++++++++++++++---
1 file changed, 71 insertions(+), 6 deletions(-)
diff --git a/arch/arm64/kernel/armv8_deprecated.c b/arch/arm64/kernel/armv8_deprecated.c
index d15e35f1075c..b8e6d71f766d 100644
--- a/arch/arm64/kernel/armv8_deprecated.c
+++ b/arch/arm64/kernel/armv8_deprecated.c
@@ -13,6 +13,7 @@
#include <linux/uaccess.h>
#include <asm/cpufeature.h>
+#include <asm/lsui.h>
#include <asm/insn.h>
#include <asm/sysreg.h>
#include <asm/system_misc.h>
@@ -86,13 +87,77 @@ static unsigned int __maybe_unused aarch32_check_condition(u32 opcode, u32 psr)
* Rn = address
*/
+/* Arbitrary constant to ensure forward-progress of the loop */
+#define __SWP_LOOPS 4
+
+#ifdef CONFIG_AS_HAS_LSUI
+static __always_inline int
+__lsui_user_swp_asm(unsigned int *data, unsigned int addr)
+{
+ int err = 0;
+ unsigned int temp;
+
+ asm volatile("// __lsui_user_swp_asm\n"
+ __LSUI_PREAMBLE
+ "1: swpt %w1, %w2, [%3]\n"
+ " mov %w1, %w2\n"
+ "2:\n"
+ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0)
+ : "+r" (err), "+r" (*data), "=&r" (temp)
+ : "r" ((unsigned long)addr)
+ : "memory");
+
+ return err;
+}
+
+static __always_inline int
+__lsui_user_swpb_asm(unsigned int *data, unsigned int addr)
+{
+ u8 i, idx;
+ int err = -EAGAIN;
+ u64 __user *addr_al;
+ u64 oldval;
+ union {
+ u64 var;
+ u8 raw[sizeof(u64)];
+ } newval, curval;
+
+ idx = addr & (sizeof(u64) - 1);
+ addr_al = (u64 __user *)ALIGN_DOWN(addr, sizeof(u64));
+
+ for (i = 0; i < __SWP_LOOPS; i++) {
+ if (get_user(oldval, addr_al))
+ return -EFAULT;
+
+ curval.var = newval.var = oldval;
+ newval.raw[idx] = *data;
+
+ asm volatile("// __lsui_user_swpb_asm\n"
+ __LSUI_PREAMBLE
+ "1: cast %x2, %x3, %1\n"
+ "2:\n"
+ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0)
+ : "+r" (err), "+Q" (*addr_al), "+r" (curval.var)
+ : "r" (newval.var)
+ : "memory");
+
+ if (curval.var == oldval) {
+ err = 0;
+ break;
+ }
+ }
+
+ if (!err)
+ *data = curval.raw[idx];
+
+ return err;
+}
+#endif /* CONFIG_AS_HAS_LSUI */
+
/*
* Error-checking SWP macros implemented using ldxr{b}/stxr{b}
*/
-/* Arbitrary constant to ensure forward-progress of the LL/SC loop */
-#define __SWP_LL_SC_LOOPS 4
-
#define LLSC_USER_SWPX(B) \
static __always_inline int \
__llsc_user_swp##B##_asm(unsigned int *data, unsigned int addr) \
@@ -117,7 +182,7 @@ __llsc_user_swp##B##_asm(unsigned int *data, unsigned int addr) \
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
: "=&r" (err), "+r" (*data), "=&r" (temp), "=&r" (temp2)\
: "r" ((unsigned long)addr), "i" (-EAGAIN), \
- "i" (__SWP_LL_SC_LOOPS) \
+ "i" (__SWP_LOOPS) \
: "memory"); \
uaccess_disable_privileged(); \
\
@@ -128,9 +193,9 @@ LLSC_USER_SWPX()
LLSC_USER_SWPX(b)
#define __user_swp_asm(data, addr) \
- __llsc_user_swp_asm(data, addr)
+ __lsui_llsc_body(user_swp_asm, data, addr)
#define __user_swpb_asm(data, addr) \
- __llsc_user_swpb_asm(data, addr)
+ __lsui_llsc_body(user_swpb_asm, data, addr)
/*
* Bit 22 of the instruction encoding distinguishes between
--
LEVI:{C3F47F37-75D8-414A-A8BA-3980EC8A46D7}
Powered by blists - more mailing lists