lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Date:	Wed, 24 Aug 2011 10:53:10 -0700
From:	Jeremy Fitzhardinge <jeremy@...p.org>
To:	"H. Peter Anvin" <hpa@...or.com>
Cc:	Linus Torvalds <torvalds@...ux-foundation.org>,
	Peter Zijlstra <peterz@...radead.org>,
	Ingo Molnar <mingo@...e.hu>,
	the arch/x86 maintainers <x86@...nel.org>,
	Linux Kernel Mailing List <linux-kernel@...r.kernel.org>,
	Nick Piggin <npiggin@...nel.dk>,
	Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
Subject: [PATCH 16/18] x86: report xchg/cmpxchg/xadd usage errors consistently

From: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>

Make sure that passing a variable of an unusable size causes a consistent
link-time failure.  Previously, using a 64-bit value on a 32-bit system
would cause an assember error, which isn't easy to correlate with a line
of code.

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@...rix.com>
---
 arch/x86/include/asm/cmpxchg.h |   44 +++++++++++++++++++++++++++++----------
 1 files changed, 32 insertions(+), 12 deletions(-)

diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h
index 57d6706..c99ce79 100644
--- a/arch/x86/include/asm/cmpxchg.h
+++ b/arch/x86/include/asm/cmpxchg.h
@@ -3,8 +3,26 @@
 
 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
 
+/* Non-existant functions to indicate usage errors at link time. */
 extern void __xchg_wrong_size(void);
 extern void __cmpxchg_wrong_size(void);
+extern void __xadd_wrong_size(void);
+
+/*
+ * Constants for operation sizes. On 32-bit, the 64-bit size it set to
+ * -1 because sizeof will never return -1, thereby making those switch
+ * case statements guaranteeed dead code which the compiler will
+ * eliminate, and allowing the "missing symbol in the default case" to
+ * indicate a usage error.
+ */
+#define __X86_CASE_B	1
+#define __X86_CASE_W	2
+#define __X86_CASE_L	4
+#ifdef CONFIG_64BIT
+#define __X86_CASE_Q	8
+#else
+#define	__X86_CASE_Q	-1		/* sizeof will never return -1 */
+#endif
 
 /*
  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
@@ -16,7 +34,7 @@ extern void __cmpxchg_wrong_size(void);
 ({									\
 	__typeof(*(ptr)) __x = (x);					\
 	switch (size) {							\
-	case 1:								\
+	case __X86_CASE_B:						\
 	{								\
 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
 		asm volatile("xchgb %0,%1"				\
@@ -25,7 +43,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 2:								\
+	case __X86_CASE_W:						\
 	{								\
 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
 		asm volatile("xchgw %0,%1"				\
@@ -34,7 +52,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 4:								\
+	case __X86_CASE_L:						\
 	{								\
 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
 		asm volatile("xchgl %0,%1"				\
@@ -43,7 +61,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 8:								\
+	case __X86_CASE_Q:						\
 	{								\
 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
 		asm volatile("xchgq %0,%1"				\
@@ -72,7 +90,7 @@ extern void __cmpxchg_wrong_size(void);
 	__typeof__(*(ptr)) __old = (old);				\
 	__typeof__(*(ptr)) __new = (new);				\
 	switch (size) {							\
-	case 1:								\
+	case __X86_CASE_B:						\
 	{								\
 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
 		asm volatile(lock "cmpxchgb %2,%1; " compare		\
@@ -81,7 +99,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 2:								\
+	case __X86_CASE_W:						\
 	{								\
 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
 		asm volatile(lock "cmpxchgw %2,%1; " compare		\
@@ -90,7 +108,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 4:								\
+	case __X86_CASE_L:						\
 	{								\
 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
 		asm volatile(lock "cmpxchgl %2,%1; " compare		\
@@ -99,7 +117,7 @@ extern void __cmpxchg_wrong_size(void);
 			     : "memory");				\
 		break;							\
 	}								\
-	case 8:								\
+	case __X86_CASE_Q:						\
 	{								\
 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
 		asm volatile(lock "cmpxchgq %2,%1; " compare		\
@@ -178,26 +196,28 @@ extern void __cmpxchg_wrong_size(void);
 #define xadd(ptr, inc)							\
 	do {								\
 		switch (sizeof(*(ptr))) {				\
-		case 1:							\
+		case __X86_CASE_B:					\
 			asm volatile (LOCK_PREFIX "xaddb %b0, %1\n"	\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
-		case 2:							\
+		case __X86_CASE_W:					\
 			asm volatile (LOCK_PREFIX "xaddw %w0, %1\n"	\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
-		case 4:							\
+		case __X86_CASE_L:					\
 			asm volatile (LOCK_PREFIX "xaddl %0, %1\n"	\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
-		case 8:							\
+		case __X86_CASE_Q:					\
 			asm volatile (LOCK_PREFIX "xaddq %q0, %1\n"	\
 				      : "+r" (inc), "+m" (*(ptr))	\
 				      : : "memory", "cc");		\
 			break;						\
+		default:						\
+			__xadd_wrong_size();				\
 		}							\
 	} while(0)
 
-- 
1.7.6

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ