[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20250611151154.46362-4-yeoreum.yun@arm.com>
Date: Wed, 11 Jun 2025 16:11:51 +0100
From: Yeoreum Yun <yeoreum.yun@....com>
To: catalin.marinas@....com,
will@...nel.org,
broonie@...nel.org,
maz@...nel.org,
oliver.upton@...ux.dev,
ardb@...nel.org,
frederic@...nel.org,
james.morse@....com,
joey.gouly@....com,
scott@...amperecomputing.com
Cc: linux-arm-kernel@...ts.infradead.org,
linux-kernel@...r.kernel.org,
Yeoreum Yun <yeoreum.yun@....com>
Subject: [PATCH v2 3/6] arm64/futex: move futex atomic logic with clearing PAN bit
Move current futex atomic logics which uses ll/sc method with cleraing
PSTATE.PAN to separate file (futex_ll_sc_u.h) so that
former method will be used only when FEAT_LSUI isn't supported.
Signed-off-by: Yeoreum Yun <yeoreum.yun@....com>
---
arch/arm64/include/asm/futex_ll_sc_u.h | 115 +++++++++++++++++++++++++
1 file changed, 115 insertions(+)
create mode 100644 arch/arm64/include/asm/futex_ll_sc_u.h
diff --git a/arch/arm64/include/asm/futex_ll_sc_u.h b/arch/arm64/include/asm/futex_ll_sc_u.h
new file mode 100644
index 000000000000..6702ba66f1b2
--- /dev/null
+++ b/arch/arm64/include/asm/futex_ll_sc_u.h
@@ -0,0 +1,115 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * Copyright (C) 2025 Arm Ltd.
+ */
+#ifndef __ASM_FUTEX_LL_SC_U_H
+#define __ASM_FUTEX_LL_SC_U_H
+
+#include <linux/uaccess.h>
+#include <linux/stringify.h>
+
+#define FUTEX_ATOMIC_OP(op, asm_op) \
+static __always_inline int \
+__ll_sc_u_futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \
+{ \
+ unsigned int loops = LL_SC_MAX_LOOPS; \
+ int ret, val, tmp; \
+ \
+ uaccess_enable_privileged(); \
+ asm volatile("// __ll_sc_u_futex_atomic_" #op "\n" \
+ " prfm pstl1strm, %2\n" \
+ "1: ldxr %w1, %2\n" \
+ " " #asm_op " %w3, %w1, %w5\n" \
+ "2: stlxr %w0, %w3, %2\n" \
+ " cbz %w0, 3f\n" \
+ " sub %w4, %w4, %w0\n" \
+ " cbnz %w4, 1b\n" \
+ " mov %w0, %w6\n" \
+ "3:\n" \
+ " dmb ish\n" \
+ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
+ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
+ : "=&r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), \
+ "+r" (loops) \
+ : "r" (oparg), "Ir" (-EAGAIN) \
+ : "memory"); \
+ uaccess_disable_privileged(); \
+ \
+ if (!ret) \
+ *oval = val; \
+ \
+ return ret; \
+}
+
+FUTEX_ATOMIC_OP(add, add)
+FUTEX_ATOMIC_OP(or, orr)
+FUTEX_ATOMIC_OP(and, and)
+FUTEX_ATOMIC_OP(eor, eor)
+
+#undef FUTEX_ATOMIC_OP
+
+static __always_inline int
+__ll_sc_u_futex_atomic_set(int oparg, u32 __user *uaddr, int *oval)
+{
+ unsigned int loops = LL_SC_MAX_LOOPS;
+ int ret, val;
+
+ uaccess_enable_privileged();
+ asm volatile("//__ll_sc_u_futex_xchg\n"
+ " prfm pstl1strm, %2\n"
+ "1: ldxr %w1, %2\n"
+ "2: stlxr %w0, %w4, %2\n"
+ " cbz %w3, 3f\n"
+ " sub %w3, %w3, %w0\n"
+ " cbnz %w3, 1b\n"
+ " mov %w0, %w5\n"
+ "3:\n"
+ " dmb ish\n"
+ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0)
+ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0)
+ : "=&r" (ret), "=&r" (val), "+Q" (*uaddr), "+r" (loops)
+ : "r" (oparg), "Ir" (-EAGAIN)
+ : "memory");
+ uaccess_disable_privileged();
+
+ if (!ret)
+ *oval = val;
+
+ return ret;
+}
+
+static __always_inline int
+__ll_sc_u_futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
+{
+ int ret = 0;
+ unsigned int loops = LL_SC_MAX_LOOPS;
+ u32 val, tmp;
+
+ uaccess_enable_privileged();
+ asm volatile("//__ll_sc_u_futex_cmpxchg\n"
+ " prfm pstl1strm, %2\n"
+ "1: ldxr %w1, %2\n"
+ " eor %w3, %w1, %w5\n"
+ " cbnz %w3, 4f\n"
+ "2: stlxr %w3, %w6, %2\n"
+ " cbz %w3, 3f\n"
+ " sub %w4, %w4, %w3\n"
+ " cbnz %w4, 1b\n"
+ " mov %w0, %w7\n"
+ "3:\n"
+ " dmb ish\n"
+ "4:\n"
+ _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
+ _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
+ : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
+ : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
+ : "memory");
+ uaccess_disable_privileged();
+
+ if (!ret)
+ *oval = val;
+
+ return ret;
+}
+
+#endif /* __ASM_FUTEX_LL_SC_U_H */
--
LEVI:{C3F47F37-75D8-414A-A8BA-3980EC8A46D7}
Powered by blists - more mailing lists