lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [day] [month] [year] [list]
Date:	Thu, 14 Apr 2016 11:08:36 +0800
From:	zengzhaoxiu@....com
To:	linux-kernel@...r.kernel.org
Cc:	Zhaoxiu Zeng <zhaoxiu.zeng@...il.com>,
	Thomas Gleixner <tglx@...utronix.de>,
	Ingo Molnar <mingo@...hat.com>,
	"H. Peter Anvin" <hpa@...or.com>, x86@...nel.org,
	Borislav Petkov <bp@...e.de>,
	Denys Vlasenko <dvlasenk@...hat.com>
Subject: [PATCH V3 09/29] Add x86-specific parity functions

From: Zhaoxiu Zeng <zhaoxiu.zeng@...il.com>

Use alternatives, lifted from arch_hweight

Signed-off-by: Zhaoxiu Zeng <zhaoxiu.zeng@...il.com>
---
 arch/x86/include/asm/arch_hweight.h |   5 ++
 arch/x86/include/asm/arch_parity.h  | 117 ++++++++++++++++++++++++++++++++++++
 arch/x86/include/asm/bitops.h       |   4 +-
 3 files changed, 125 insertions(+), 1 deletion(-)
 create mode 100644 arch/x86/include/asm/arch_parity.h

diff --git a/arch/x86/include/asm/arch_hweight.h b/arch/x86/include/asm/arch_hweight.h
index 02e799f..c79d50d 100644
--- a/arch/x86/include/asm/arch_hweight.h
+++ b/arch/x86/include/asm/arch_hweight.h
@@ -63,4 +63,9 @@ static __always_inline unsigned long __arch_hweight64(__u64 w)
 }
 #endif /* CONFIG_X86_32 */
 
+#undef POPCNT32
+#undef POPCNT64
+#undef REG_IN
+#undef REG_OUT
+
 #endif
diff --git a/arch/x86/include/asm/arch_parity.h b/arch/x86/include/asm/arch_parity.h
new file mode 100644
index 0000000..c4d08e2
--- /dev/null
+++ b/arch/x86/include/asm/arch_parity.h
@@ -0,0 +1,117 @@
+#ifndef _ASM_X86_PARITY_H
+#define _ASM_X86_PARITY_H
+
+#include <asm/cpufeatures.h>
+
+/*
+ * the generic version use the Parity Flag directly
+ *
+ * Parity flag - Set if the least-significant byte of the
+ *               result contains an even number of 1 bits;
+ *               cleared otherwise.
+ */
+
+static inline unsigned int __arch_parity4(unsigned int w)
+{
+	unsigned int res = 0;
+
+	asm("test $0xf, %1; setpo %b0"
+		: "+q" (res)
+		: "r" (w)
+		: "cc");
+
+	return res;
+}
+
+static inline unsigned int __arch_parity8(unsigned int w)
+{
+	unsigned int res = 0;
+
+	asm("test %1, %1; setpo %b0"
+		: "+q" (res)
+		: "r" (w)
+		: "cc");
+
+	return res;
+}
+
+static inline unsigned int __arch_parity16(unsigned int w)
+{
+	unsigned int res = 0;
+
+	asm("xor %h1, %b1; setpo %b0"
+		: "+q" (res), "+q" (w)
+		: : "cc");
+
+	return res;
+}
+
+#ifdef CONFIG_64BIT
+/* popcnt %eax, %eax -- redundant REX prefix for alignment */
+#define POPCNT32 ".byte 0xf3,0x40,0x0f,0xb8,0xc0"
+/* popcnt %rax, %rax */
+#define POPCNT64 ".byte 0xf3,0x48,0x0f,0xb8,0xc0"
+#else
+/* popcnt %eax, %eax */
+#define POPCNT32 ".byte 0xf3,0x0f,0xb8,0xc0"
+#endif
+
+static __always_inline unsigned int __arch_parity32(unsigned int w)
+{
+	unsigned int res;
+	unsigned int tmp;
+
+	asm(ALTERNATIVE(
+		"	mov	%%eax, %1	\n"
+		"	shr	$16, %%eax	\n"
+		"	xor	%1, %%eax	\n"
+		"	xor	%%ah, %%al	\n"
+		"	mov	$0, %%eax	\n"
+		"	setpo	%%al	\n",
+		POPCNT32 "			\n"
+		"	and	$1, %%eax	\n",
+		X86_FEATURE_POPCNT)
+		: "=a" (res), "=&r" (tmp)
+		: "a" (w)
+		: "cc");
+
+	return res;
+}
+
+#ifdef CONFIG_X86_32
+static inline unsigned int __arch_parity64(__u64 w)
+{
+	return __arch_parity32((u32)w ^ (u32)(w >> 32));
+}
+#else
+static __always_inline unsigned int __arch_parity64(__u64 w)
+{
+	unsigned int res;
+	__u64 tmp;
+
+	asm(ALTERNATIVE(
+		"	mov	%%rax, %1	\n"
+		"	shr	$32, %%rax	\n"
+		"	xor	%k1, %%eax	\n"
+		"	mov	%%eax, %k1	\n"
+		"	shr	$16, %%eax	\n"
+		"	xor	%k1, %%eax 	\n"
+		"	xor	%%ah, %%al	\n"
+		"	mov	$0, %%eax	\n"
+		"	setpo	%%al	\n",
+		POPCNT64 "			\n"
+		"	and	$1, %%eax	\n",
+		X86_FEATURE_POPCNT)
+		: "=a" (res), "=&r" (tmp)
+		: "a" (w)
+		: "cc");
+
+	return res;
+}
+#endif /* CONFIG_X86_32 */
+
+#undef POPCNT32
+#undef POPCNT64
+
+#endif
+
diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h
index 7766d1c..f5b0122 100644
--- a/arch/x86/include/asm/bitops.h
+++ b/arch/x86/include/asm/bitops.h
@@ -498,9 +498,11 @@ static __always_inline int fls64(__u64 x)
 #include <asm-generic/bitops/sched.h>
 
 #include <asm/arch_hweight.h>
-
 #include <asm-generic/bitops/const_hweight.h>
 
+#include <asm/arch_parity.h>
+#include <asm-generic/bitops/const_parity.h>
+
 #include <asm-generic/bitops/le.h>
 
 #include <asm-generic/bitops/ext2-atomic-setbit.h>
-- 
2.5.0


Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ