lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20201215182805.53913-3-ubizjak@gmail.com>
Date:   Tue, 15 Dec 2020 19:28:04 +0100
From:   Uros Bizjak <ubizjak@...il.com>
To:     x86@...nel.org, kvm@...r.kernel.org, linux-kernel@...r.kernel.org
Cc:     Uros Bizjak <ubizjak@...il.com>,
        Thomas Gleixner <tglx@...utronix.de>,
        Ingo Molnar <mingo@...hat.com>, Borislav Petkov <bp@...en8.de>,
        "H. Peter Anvin" <hpa@...or.com>
Subject: [PATCH 2/3] locking/atomic/x86: Introduce arch_try_cmpxchg64()

Add arch_try_cmpxchg64(), similar to arch_try_cmpxchg(), that
operates with 64 bit operands. This function provides the same
interface for 32 bit and 64 bit targets.

Signed-off-by: Uros Bizjak <ubizjak@...il.com>
Cc: Thomas Gleixner <tglx@...utronix.de>
Cc: Ingo Molnar <mingo@...hat.com>
Cc: Borislav Petkov <bp@...en8.de>
Cc: "H. Peter Anvin" <hpa@...or.com>
---
 arch/x86/include/asm/cmpxchg_32.h | 62 ++++++++++++++++++++++++++-----
 arch/x86/include/asm/cmpxchg_64.h |  6 +++
 2 files changed, 59 insertions(+), 9 deletions(-)

diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 0a7fe0321613..8dcde400244e 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -35,15 +35,6 @@ static inline void set_64bit(volatile u64 *ptr, u64 value)
 		     : "memory");
 }
 
-#ifdef CONFIG_X86_CMPXCHG64
-#define arch_cmpxchg64(ptr, o, n)					\
-	((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
-					 (unsigned long long)(n)))
-#define arch_cmpxchg64_local(ptr, o, n)					\
-	((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
-					       (unsigned long long)(n)))
-#endif
-
 static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
 {
 	u64 prev;
@@ -71,6 +62,39 @@ static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
 }
 
 #ifndef CONFIG_X86_CMPXCHG64
+#define arch_cmpxchg64(ptr, o, n)					\
+	((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
+					 (unsigned long long)(n)))
+#define arch_cmpxchg64_local(ptr, o, n)					\
+	((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o), \
+
+#define __raw_try_cmpxchg64(_ptr, _pold, _new, lock)		\
+({								\
+	bool success;						\
+	__typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);	\
+	__typeof__(*(_ptr)) __old = *_old;			\
+	__typeof__(*(_ptr)) __new = (_new);			\
+	asm volatile(lock "cmpxchg8b %1"			\
+		     CC_SET(z)					\
+		     : CC_OUT(z) (success),			\
+		       "+m" (*_ptr),				\
+		       "+A" (__old)				\
+		     : "b" ((unsigned int)__new),		\
+		       "c" ((unsigned int)(__new>>32))		\
+		     : "memory");				\
+	if (unlikely(!success))					\
+		*_old = __old;					\
+	likely(success);					\
+})
+
+#define __try_cmpxchg64(ptr, pold, new)				\
+	__raw_try_cmpxchg64((ptr), (pold), (new), LOCK_PREFIX)
+
+#define arch_try_cmpxchg64(ptr, pold, new) 			\
+	__try_cmpxchg64((ptr), (pold), (new))
+
+#else
+
 /*
  * Building a kernel capable running on 80386 and 80486. It may be necessary
  * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
@@ -108,6 +132,26 @@ static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
 		       : "memory");				\
 	__ret; })
 
+#define arch_try_cmpxchg64(ptr, po, n)				\
+({								\
+	bool success;						\
+	__typeof__(ptr) _old = (__typeof__(ptr))(po);		\
+	__typeof__(*(ptr)) __old = *_old;			\
+	__typeof__(*(ptr)) __new = (n);				\
+	alternative_io(LOCK_PREFIX_HERE				\
+			"call cmpxchg8b_emu",			\
+			"lock; cmpxchg8b (%%esi)" ,		\
+		       X86_FEATURE_CX8,				\
+		       "+A" (__old),				\
+		       "S" ((ptr)),				\
+		       "b" ((unsigned int)__new),		\
+		       "c" ((unsigned int)(__new>>32))		\
+		       : "memory");				\
+	success = (__old == *_old);				\
+	if (unlikely(!success))					\
+		*_old = __old;					\
+	likely(success);					\
+})
 #endif
 
 #define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX8)
diff --git a/arch/x86/include/asm/cmpxchg_64.h b/arch/x86/include/asm/cmpxchg_64.h
index 072e5459fe2f..250187ac8248 100644
--- a/arch/x86/include/asm/cmpxchg_64.h
+++ b/arch/x86/include/asm/cmpxchg_64.h
@@ -19,6 +19,12 @@ static inline void set_64bit(volatile u64 *ptr, u64 val)
 	arch_cmpxchg_local((ptr), (o), (n));				\
 })
 
+#define arch_try_cmpxchg64(ptr, po, n)					\
+({									\
+	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
+	arch_try_cmpxchg((ptr), (po), (n));				\
+})
+
 #define system_has_cmpxchg_double() boot_cpu_has(X86_FEATURE_CX16)
 
 #endif /* _ASM_X86_CMPXCHG_64_H */
-- 
2.26.2

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ