lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Date:	Sun, 23 Mar 2008 01:03:33 -0700
From:	Joe Perches <joe@...ches.com>
To:	Ingo Molnar <mingo@...e.hu>, Thomas Gleixner <tglx@...utronix.de>
Cc:	linux-kernel@...r.kernel.org
Subject: [PATCH 118/148] include/asm-x86/string_32.h: checkpatch cleanups - formatting only


Signed-off-by: Joe Perches <joe@...ches.com>
---
 include/asm-x86/string_32.h |  141 +++++++++++++++++++++----------------------
 1 files changed, 70 insertions(+), 71 deletions(-)

diff --git a/include/asm-x86/string_32.h b/include/asm-x86/string_32.h
index 32afb63..c18e533 100644
--- a/include/asm-x86/string_32.h
+++ b/include/asm-x86/string_32.h
@@ -3,7 +3,7 @@
 
 #ifdef __KERNEL__
 
-/* Let gcc decide wether to inline or use the out of line functions */
+/* Let gcc decide whether to inline or use the out of line functions */
 
 #define __HAVE_ARCH_STRCPY
 extern char *strcpy(char *dest, const char *src);
@@ -32,16 +32,15 @@ extern size_t strlen(const char *s);
 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
 {
 	int d0, d1, d2;
-	__asm__ __volatile__(
-		"rep ; movsl\n\t"
-		"movl %4,%%ecx\n\t"
-		"andl $3,%%ecx\n\t"
-		"jz 1f\n\t"
-		"rep ; movsb\n\t"
-		"1:"
-		: "=&c" (d0), "=&D" (d1), "=&S" (d2)
-		: "0" (n/4), "g" (n), "1" ((long)to), "2" ((long)from)
-		: "memory");
+	asm volatile("rep ; movsl\n\t"
+		     "movl %4,%%ecx\n\t"
+		     "andl $3,%%ecx\n\t"
+		     "jz 1f\n\t"
+		     "rep ; movsb\n\t"
+		     "1:"
+		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
+		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
+		     : "memory");
 	return to;
 }
 
@@ -74,10 +73,12 @@ static __always_inline void *__constant_memcpy(void *to, const void *from, size_
 		*(int *)to = *(int *)from;
 		*((char *)to + 4) = *((char *)from + 4);
 		return to;
-	case 6: *(int *)to = *(int *)from;
+	case 6:
+		*(int *)to = *(int *)from;
 		*((short *)to + 2) = *((short *)from + 2);
 		return to;
-	case 8: *(int *)to = *(int *)from;
+	case 8:
+		*(int *)to = *(int *)from;
 		*((int *)to + 1) = *((int *)from + 1);
 		return to;
 #endif
@@ -88,54 +89,55 @@ static __always_inline void *__constant_memcpy(void *to, const void *from, size_
 	if (n >= 5 * 4) {
 		/* large block: use rep prefix */
 		int ecx;
-		__asm__ __volatile__(
-			"rep ; movsl"
-			: "=&c" (ecx), "=&D" (edi), "=&S" (esi)
-			: "0" (n/4), "1" (edi), "2" (esi)
-			: "memory"
+		asm volatile("rep ; movsl"
+			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
+			     : "0" (n / 4), "1" (edi), "2" (esi)
+			     : "memory"
 		);
 	} else {
 		/* small block: don't clobber ecx + smaller code */
 		if (n >= 4 * 4)
-			__asm__ __volatile__("movsl"
-					     : "=&D"(edi), "=&S"(esi)
-					     : "0"(edi), "1"(esi)
-					     : "memory");
+			asm volatile("movsl"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
 		if (n >= 3 * 4)
-			__asm__ __volatile__("movsl"
-					     : "=&D"(edi), "=&S"(esi)
-					     : "0"(edi), "1"(esi)
-					     : "memory");
+			asm volatile("movsl"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
 		if (n >= 2 * 4)
-			__asm__ __volatile__("movsl"
-					     : "=&D"(edi), "=&S"(esi)
-					     : "0"(edi), "1"(esi)
-					     : "memory");
+			asm volatile("movsl"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
 		if (n >= 1 * 4)
-			__asm__ __volatile__("movsl"
-					     : "=&D"(edi), "=&S"(esi)
-					     : "0"(edi), "1"(esi)
-					     : "memory");
+			asm volatile("movsl"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
 	}
 	switch (n % 4) {
 		/* tail */
 	case 0:
 		return to;
 	case 1:
-		__asm__ __volatile__("movsb"
-				     : "=&D"(edi), "=&S"(esi)
-				     : "0"(edi), "1"(esi)
-				     : "memory");
+		asm volatile("movsb"
+			     : "=&D"(edi), "=&S"(esi)
+			     : "0"(edi), "1"(esi)
+			     : "memory");
 		return to;
-	case 2: __asm__ __volatile__("movsw"
-				     : "=&D"(edi), "=&S"(esi)
-				     : "0"(edi), "1"(esi)
-				     : "memory");
+	case 2:
+		asm volatile("movsw"
+			     : "=&D"(edi), "=&S"(esi)
+			     : "0"(edi), "1"(esi)
+			     : "memory");
 		return to;
-	default: __asm__ __volatile__("movsw\n\tmovsb"
-				      : "=&D"(edi), "=&S"(esi)
-				      : "0"(edi), "1"(esi)
-				      : "memory");
+	default:
+		asm volatile("movsw\n\tmovsb"
+			     : "=&D"(edi), "=&S"(esi)
+			     : "0"(edi), "1"(esi)
+			     : "memory");
 		return to;
 	}
 }
@@ -193,12 +195,11 @@ extern void *memchr(const void *cs, int c, size_t count);
 static inline void *__memset_generic(void *s, char c, size_t count)
 {
 	int d0, d1;
-	__asm__ __volatile__(
-		"rep\n\t"
-		"stosb"
-		: "=&c" (d0), "=&D" (d1)
-		: "a" (c), "1" (s), "0" (count)
-		: "memory");
+	asm volatile("rep\n\t"
+		     "stosb"
+		     : "=&c" (d0), "=&D" (d1)
+		     : "a" (c), "1" (s), "0" (count)
+		     : "memory");
 	return s;
 }
 
@@ -213,18 +214,17 @@ static inline void *__memset_generic(void *s, char c, size_t count)
 static __always_inline void *__constant_c_memset(void *s, unsigned long c, size_t count)
 {
 	int d0, d1;
-	__asm__ __volatile__(
-		"rep ; stosl\n\t"
-		"testb $2,%b3\n\t"
-		"je 1f\n\t"
-		"stosw\n"
-		"1:\ttestb $1,%b3\n\t"
-		"je 2f\n\t"
-		"stosb\n"
-		"2:"
-		: "=&c" (d0), "=&D" (d1)
-		: "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
-		: "memory");
+	asm volatile("rep ; stosl\n\t"
+		     "testb $2,%b3\n\t"
+		     "je 1f\n\t"
+		     "stosw\n"
+		     "1:\ttestb $1,%b3\n\t"
+		     "je 2f\n\t"
+		     "stosb\n"
+		     "2:"
+		     : "=&c" (d0), "=&D" (d1)
+		     : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
+		     : "memory");
 	return s;
 }
 
@@ -260,13 +260,12 @@ static __always_inline void *__constant_c_and_count_memset(void *s, unsigned lon
 		return s;
 	}
 
-#define COMMON(x)						\
-	__asm__  __volatile__(					\
-		"rep ; stosl"					\
-		x						\
-		: "=&c" (d0), "=&D" (d1)			\
-		: "a" (pattern), "0" (count/4), "1" ((long)s)	\
-		: "memory")
+#define COMMON(x)							\
+	asm volatile("rep ; stosl"					\
+		     x							\
+		     : "=&c" (d0), "=&D" (d1)				\
+		     : "a" (pattern), "0" (count/4), "1" ((long)s)	\
+		     : "memory")
 
 	{
 		int d0, d1;
-- 
1.5.4.rc2

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ