lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Date:	Wed, 24 Aug 2011 14:37:03 -0700
From:	Jeremy Fitzhardinge <jeremy@...p.org>
To:	"H. Peter Anvin" <hpa@...or.com>
Cc:	Linus Torvalds <torvalds@...ux-foundation.org>,
	Peter Zijlstra <peterz@...radead.org>,
	Ingo Molnar <mingo@...e.hu>,
	the arch/x86 maintainers <x86@...nel.org>,
	Linux Kernel Mailing List <linux-kernel@...r.kernel.org>,
	Nick Piggin <npiggin@...nel.dk>,
	Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
Subject: [PATCH 06/12] x86: add cmpxchg_flag() variant

From: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>

Most callers of cmpxchg() direcly compare RETURN with OLD to see if it was
successful.  This results in unnecessary conditional comparisons
and conditionals since the cmpxchg instruction directly sets the flags
to indicate success/failure.

Add cmpxchg_flag() variants which return a boolean flag directly indicating
success.  Unfortunately an asm() statement can't directly export status
status flags, but sete isn't too bad.

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
---
 arch/x86/include/asm/cmpxchg.h    |   48 +++++++++++++++++++++++++++++++-----
 arch/x86/include/asm/cmpxchg_32.h |   14 ++++++++++
 2 files changed, 55 insertions(+), 7 deletions(-)

diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h
index 0d0d9cd..6013247 100644
--- a/arch/x86/include/asm/cmpxchg.h
+++ b/arch/x86/include/asm/cmpxchg.h
@@ -82,18 +82,18 @@ extern void __xadd_wrong_size(void);
 /*
  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  * store NEW in MEM.  Return the initial value in MEM.  Success is
- * indicated by comparing RETURN with OLD.
+ * determined by "compare"
  */
-#define __raw_cmpxchg(ptr, old, new, size, lock)			\
+#define __raw_cmpxchg_cmp(ptr, old, new, size, lock, rettype, compare)	\
 ({									\
-	__typeof__(*(ptr)) __ret;					\
+	rettype __ret;							\
 	__typeof__(*(ptr)) __old = (old);				\
 	__typeof__(*(ptr)) __new = (new);				\
 	switch (size) {							\
 	case __X86_CASE_B:						\
 	{								\
 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
-		asm volatile(lock "cmpxchgb %2,%1"			\
+		asm volatile(lock "cmpxchgb %2,%1; " compare		\
 			     : "=a" (__ret), "+m" (*__ptr)		\
 			     : "q" (__new), "0" (__old)			\
 			     : "memory");				\
@@ -102,7 +102,7 @@ extern void __xadd_wrong_size(void);
 	case __X86_CASE_W:						\
 	{								\
 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
-		asm volatile(lock "cmpxchgw %2,%1"			\
+		asm volatile(lock "cmpxchgw %2,%1; " compare		\
 			     : "=a" (__ret), "+m" (*__ptr)		\
 			     : "r" (__new), "0" (__old)			\
 			     : "memory");				\
@@ -111,7 +111,7 @@ extern void __xadd_wrong_size(void);
 	case __X86_CASE_L:						\
 	{								\
 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
-		asm volatile(lock "cmpxchgl %2,%1"			\
+		asm volatile(lock "cmpxchgl %2,%1; " compare		\
 			     : "=a" (__ret), "+m" (*__ptr)		\
 			     : "r" (__new), "0" (__old)			\
 			     : "memory");				\
@@ -120,7 +120,7 @@ extern void __xadd_wrong_size(void);
 	case __X86_CASE_Q:						\
 	{								\
 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
-		asm volatile(lock "cmpxchgq %2,%1"			\
+		asm volatile(lock "cmpxchgq %2,%1; " compare		\
 			     : "=a" (__ret), "+m" (*__ptr)		\
 			     : "r" (__new), "0" (__old)			\
 			     : "memory");				\
@@ -132,15 +132,40 @@ extern void __xadd_wrong_size(void);
 	__ret;								\
 })
 
+/*
+ * Atomic compare and exchange.  Compare OLD with MEM, if identical,
+ * store NEW in MEM.  Return the initial value in MEM.  Success is
+ * indicated by comparing RETURN with OLD.
+ */
+#define __raw_cmpxchg(ptr, old, new, size, lock)			\
+	__raw_cmpxchg_cmp(ptr, old, new, size, lock, __typeof__(*(ptr)), "")
+
+/*
+ * Atomic compare and exchange.  Compare OLD with MEM, if identical,
+ * store NEW in MEM.  Return the initial value in MEM.  Success is
+ * indicated by a true return.
+ */
+#define __raw_cmpxchg_flag(ptr, old, new, size, lock)			\
+	__raw_cmpxchg_cmp(ptr, old, new, size, lock, unsigned char, "sete %0")
+
 #define __cmpxchg(ptr, old, new, size)					\
 	__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
 
+#define __cmpxchg_flag(ptr, old, new, size)				\
+	__raw_cmpxchg_flag((ptr), (old), (new), (size), LOCK_PREFIX)
+
 #define __sync_cmpxchg(ptr, old, new, size)				\
 	__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
 
+#define __sync_cmpxchg_flag(ptr, old, new, size)			\
+	__raw_cmpxchg_flag((ptr), (old), (new), (size), "lock; ")
+
 #define __cmpxchg_local(ptr, old, new, size)				\
 	__raw_cmpxchg((ptr), (old), (new), (size), "")
 
+#define __cmpxchg_local_flag(ptr, old, new, size)		\
+	__raw_cmpxchg_flag((ptr), (old), (new), (size), "")
+
 #ifdef CONFIG_X86_32
 # include "cmpxchg_32.h"
 #else
@@ -151,11 +176,20 @@ extern void __xadd_wrong_size(void);
 #define cmpxchg(ptr, old, new)						\
 	__cmpxchg((ptr), (old), (new), sizeof(*ptr))
 
+#define cmpxchg_flag(ptr, old, new)					\
+	__cmpxchg_flag((ptr), (old), (new), sizeof(*ptr))
+
 #define sync_cmpxchg(ptr, old, new)					\
 	__sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
 
+#define sync_cmpxchg_flag(ptr, old, new)				\
+	__sync_cmpxchg_flag((ptr), (old), (new), sizeof(*ptr))
+
 #define cmpxchg_local(ptr, old, new)					\
 	__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
+
+#define cmpxchg_local_flag(ptr, old, new)				\
+	__cmpxchg_local_flag((ptr), (old), (new), sizeof(*ptr))
 #endif
 
 #define __xadd(ptr, inc, lock)						\
diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 3b573f6..0797bc6 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -111,6 +111,13 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
 				sizeof(*(ptr)));			\
 	__ret;								\
 })
+
+#define cmpxchg_flag(ptr, o, n)			\
+({						\
+	__typeof__(*(ptr)) __orig = (o);	\
+	cmpxchg((ptr), __orig, (n)) == __orig;	\
+})
+
 #define cmpxchg_local(ptr, o, n)					\
 ({									\
 	__typeof__(*(ptr)) __ret;					\
@@ -124,6 +131,13 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
 				sizeof(*(ptr)));			\
 	__ret;								\
 })
+
+#define cmpxchg_local_flag(ptr, o, n)			\
+({							\
+	__typeof__(*(ptr)) __orig = (o);		\
+	cmpxchg_local((ptr), __orig, (n)) == __orig;	\
+})
+
 #endif
 
 #ifndef CONFIG_X86_CMPXCHG64
-- 
1.7.6

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ