lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <b6707747021ae55d8c37b71f5f4a67b1bacf5e37.1314207974.git.jeremy.fitzhardinge@citrix.com>
Date:	Wed, 24 Aug 2011 10:53:11 -0700
From:	Jeremy Fitzhardinge <jeremy@...p.org>
To:	"H. Peter Anvin" <hpa@...or.com>
Cc:	Linus Torvalds <torvalds@...ux-foundation.org>,
	Peter Zijlstra <peterz@...radead.org>,
	Ingo Molnar <mingo@...e.hu>,
	the arch/x86 maintainers <x86@...nel.org>,
	Linux Kernel Mailing List <linux-kernel@...r.kernel.org>,
	Nick Piggin <npiggin@...nel.dk>,
	Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
Subject: [PATCH 17/18] x86: add local and sync variants of xadd

From: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
---
 arch/x86/include/asm/cmpxchg.h |   14 +++++++++-----
 1 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h
index c99ce79..992ac80 100644
--- a/arch/x86/include/asm/cmpxchg.h
+++ b/arch/x86/include/asm/cmpxchg.h
@@ -193,26 +193,26 @@ extern void __xadd_wrong_size(void);
 	__cmpxchg_local_flag((ptr), (old), (new), sizeof(*ptr))
 #endif
 
-#define xadd(ptr, inc)							\
+#define __xadd(ptr, inc, lock)						\
 	do {								\
 		switch (sizeof(*(ptr))) {				\
 		case __X86_CASE_B:					\
-			asm volatile (LOCK_PREFIX "xaddb %b0, %1\n"	\
+			asm volatile (lock "xaddb %b0, %1\n"		\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
 		case __X86_CASE_W:					\
-			asm volatile (LOCK_PREFIX "xaddw %w0, %1\n"	\
+			asm volatile (lock "xaddw %w0, %1\n"		\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
 		case __X86_CASE_L:					\
-			asm volatile (LOCK_PREFIX "xaddl %0, %1\n"	\
+			asm volatile (lock "xaddl %0, %1\n"		\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
 		case __X86_CASE_Q:					\
-			asm volatile (LOCK_PREFIX "xaddq %q0, %1\n"	\
+			asm volatile (lock "xaddq %q0, %1\n"		\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
@@ -221,4 +221,8 @@ extern void __xadd_wrong_size(void);
 		}							\
 	} while(0)
 
+#define xadd(ptr, inc)		__xadd((ptr), (inc), LOCK_PREFIX)
+#define xadd_sync(ptr, inc)	__xadd((ptr), (inc), "lock; ")
+#define xadd_local(ptr, inc)	__xadd((ptr), (inc), "")
+
 #endif	/* ASM_X86_CMPXCHG_H */
-- 
1.7.6

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ