[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20250826145740.92276-5-luxu.kernel@bytedance.com>
Date: Tue, 26 Aug 2025 22:57:40 +0800
From: Xu Lu <luxu.kernel@...edance.com>
To: paul.walmsley@...ive.com,
palmer@...belt.com,
aou@...s.berkeley.edu,
alex@...ti.fr,
ajones@...tanamicro.com,
brs@...osinc.com
Cc: linux-riscv@...ts.infradead.org,
linux-kernel@...r.kernel.org,
Xu Lu <luxu.kernel@...edance.com>
Subject: [RFC PATCH 4/4] riscv: Use Zalasr for smp_load_acquire/smp_store_release
Replace fence instructions with Zalasr instructions during acquire or
release operations.
Signed-off-by: Xu Lu <luxu.kernel@...edance.com>
---
arch/riscv/include/asm/barrier.h | 79 +++++++++++++++++++++++++++-----
1 file changed, 68 insertions(+), 11 deletions(-)
diff --git a/arch/riscv/include/asm/barrier.h b/arch/riscv/include/asm/barrier.h
index b8c5726d86acb..b1d2a9a85256d 100644
--- a/arch/riscv/include/asm/barrier.h
+++ b/arch/riscv/include/asm/barrier.h
@@ -51,19 +51,76 @@
*/
#define smp_mb__after_spinlock() RISCV_FENCE(iorw, iorw)
-#define __smp_store_release(p, v) \
-do { \
- compiletime_assert_atomic_type(*p); \
- RISCV_FENCE(rw, w); \
- WRITE_ONCE(*p, v); \
+extern void __bad_size_call_parameter(void);
+
+#define __smp_store_release(p, v) \
+do { \
+ compiletime_assert_atomic_type(*p); \
+ switch (sizeof(*p)) { \
+ case 1: \
+ asm volatile(ALTERNATIVE("fence rw, w;\t\nsb %0, 0(%1)\t\n", \
+ SB_RL(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : : "r" (v), "r" (p) : "memory"); \
+ break; \
+ case 2: \
+ asm volatile(ALTERNATIVE("fence rw, w;\t\nsh %0, 0(%1)\t\n", \
+ SH_RL(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : : "r" (v), "r" (p) : "memory"); \
+ break; \
+ case 4: \
+ asm volatile(ALTERNATIVE("fence rw, w;\t\nsw %0, 0(%1)\t\n", \
+ SW_RL(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : : "r" (v), "r" (p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile(ALTERNATIVE("fence rw, w;\t\nsd %0, 0(%1)\t\n", \
+ SD_RL(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : : "r" (v), "r" (p) : "memory"); \
+ break; \
+ default: \
+ __bad_size_call_parameter(); \
+ break; \
+ } \
} while (0)
-#define __smp_load_acquire(p) \
-({ \
- typeof(*p) ___p1 = READ_ONCE(*p); \
- compiletime_assert_atomic_type(*p); \
- RISCV_FENCE(r, rw); \
- ___p1; \
+#define __smp_load_acquire(p) \
+({ \
+ TYPEOF_UNQUAL(*p) val; \
+ compiletime_assert_atomic_type(*p); \
+ switch (sizeof(*p)) { \
+ case 1: \
+ asm volatile(ALTERNATIVE("lb %0, 0(%1)\t\nfence r, rw\t\n", \
+ LB_AQ(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : "=r" (val) : "r" (p) : "memory"); \
+ break; \
+ case 2: \
+ asm volatile(ALTERNATIVE("lh %0, 0(%1)\t\nfence r, rw\t\n", \
+ LH_AQ(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : "=r" (val) : "r" (p) : "memory"); \
+ break; \
+ case 4: \
+ asm volatile(ALTERNATIVE("lw %0, 0(%1)\t\nfence r, rw\t\n", \
+ LW_AQ(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : "=r" (val) : "r" (p) : "memory"); \
+ break; \
+ case 8: \
+ asm volatile(ALTERNATIVE("ld %0, 0(%1)\t\nfence r, rw\t\n", \
+ LD_AQ(%0, %1) "\t\nnop\t\n", \
+ 0, RISCV_ISA_EXT_ZALASR, 1) \
+ : "=r" (val) : "r" (p) : "memory"); \
+ break; \
+ default: \
+ __bad_size_call_parameter(); \
+ break; \
+ } \
+ val; \
})
#ifdef CONFIG_RISCV_ISA_ZAWRS
--
2.20.1
Powered by blists - more mailing lists