lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [thread-next>] [day] [month] [year] [list]
Date:	Wed, 12 Mar 2008 18:34:34 -0700
From:	Joe Perches <joe@...ches.com>
To:	Ingo Molnar <mingo@...e.hu>, Thomas Gleixner <tglx@...utronix.de>
Cc:	LKML <linux-kernel@...r.kernel.org>
Subject: [PATCH] include/asm-x86/string_32.h - style only

Looked at this file because of __memcpy warnings.
Thought it could use a style/checkpatch cleanup.

No change in vmlinux

Signed-off-by: Joe Perches <joe@...ches.com>

 include/asm-x86/string_32.h |  298 +++++++++++++++++++++++++------------------
 1 files changed, 171 insertions(+), 127 deletions(-)

diff --git a/include/asm-x86/string_32.h b/include/asm-x86/string_32.h
index c5d13a8..3cb2b35 100644
--- a/include/asm-x86/string_32.h
+++ b/include/asm-x86/string_32.h
@@ -29,81 +29,114 @@ extern char *strchr(const char *s, int c);
 #define __HAVE_ARCH_STRLEN
 extern size_t strlen(const char *s);
 
-static __always_inline void * __memcpy(void * to, const void * from, size_t n)
+static __always_inline void *__memcpy(void *to, const void *from, size_t n)
 {
-int d0, d1, d2;
-__asm__ __volatile__(
-	"rep ; movsl\n\t"
-	"movl %4,%%ecx\n\t"
-	"andl $3,%%ecx\n\t"
-	"jz 1f\n\t"
-	"rep ; movsb\n\t"
-	"1:"
-	: "=&c" (d0), "=&D" (d1), "=&S" (d2)
-	: "0" (n/4), "g" (n), "1" ((long) to), "2" ((long) from)
-	: "memory");
-return (to);
+	int d0, d1, d2;
+	__asm__ __volatile__(
+		"rep ; movsl\n\t"
+		"movl %4,%%ecx\n\t"
+		"andl $3,%%ecx\n\t"
+		"jz 1f\n\t"
+		"rep ; movsb\n\t"
+		"1:"
+		: "=&c" (d0), "=&D" (d1), "=&S" (d2)
+		: "0" (n/4), "g" (n), "1" ((long)to), "2" ((long)from)
+		: "memory");
+	return to;
 }
 
 /*
  * This looks ugly, but the compiler can optimize it totally,
  * as the count is constant.
  */
-static __always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
+static __always_inline void *__constant_memcpy(void *to, const void *from, size_t n)
 {
 	long esi, edi;
-	if (!n) return to;
+	if (!n)
+		return to;
 #if 1	/* want to do small copies with non-string ops? */
 	switch (n) {
-		case 1: *(char*)to = *(char*)from; return to;
-		case 2: *(short*)to = *(short*)from; return to;
-		case 4: *(int*)to = *(int*)from; return to;
+	case 1:
+		*(char *)to = *(char *)from;
+		return to;
+	case 2:
+		*(short *)to = *(short *)from;
+		return to;
+	case 4:
+		*(int *)to = *(int *)from;
+		return to;
 #if 1	/* including those doable with two moves? */
-		case 3: *(short*)to = *(short*)from;
-			*((char*)to+2) = *((char*)from+2); return to;
-		case 5: *(int*)to = *(int*)from;
-			*((char*)to+4) = *((char*)from+4); return to;
-		case 6: *(int*)to = *(int*)from;
-			*((short*)to+2) = *((short*)from+2); return to;
-		case 8: *(int*)to = *(int*)from;
-			*((int*)to+1) = *((int*)from+1); return to;
+	case 3:
+		*(short *)to = *(short *)from;
+		*((char *)to + 2) = *((char *)from + 2);
+		return to;
+	case 5:
+		*(int *)to = *(int *)from;
+		*((char *)to + 4) = *((char *)from + 4);
+		return to;
+	case 6: *(int *)to = *(int *)from;
+		*((short *)to + 2) = *((short *)from + 2);
+		return to;
+	case 8: *(int *)to = *(int *)from;
+		*((int *)to + 1) = *((int *)from + 1);
+		return to;
 #endif
 	}
 #endif
-	esi = (long) from;
-	edi = (long) to;
-	if (n >= 5*4) {
+	esi = (long)from;
+	edi = (long)to;
+	if (n >= 5 * 4) {
 		/* large block: use rep prefix */
 		int ecx;
 		__asm__ __volatile__(
 			"rep ; movsl"
 			: "=&c" (ecx), "=&D" (edi), "=&S" (esi)
-			: "0" (n/4), "1" (edi),"2" (esi)
+			: "0" (n/4), "1" (edi), "2" (esi)
 			: "memory"
 		);
 	} else {
 		/* small block: don't clobber ecx + smaller code */
-		if (n >= 4*4) __asm__ __volatile__("movsl"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-		if (n >= 3*4) __asm__ __volatile__("movsl"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-		if (n >= 2*4) __asm__ __volatile__("movsl"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-		if (n >= 1*4) __asm__ __volatile__("movsl"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
+		if (n >= 4 * 4)
+			__asm__ __volatile__("movsl"
+					     : "=&D"(edi), "=&S"(esi)
+					     : "0"(edi), "1"(esi)
+					     : "memory");
+		if (n >= 3 * 4)
+			__asm__ __volatile__("movsl"
+					     : "=&D"(edi), "=&S"(esi)
+					     : "0"(edi), "1"(esi)
+					     : "memory");
+		if (n >= 2 * 4)
+			__asm__ __volatile__("movsl"
+					     : "=&D"(edi), "=&S"(esi)
+					     : "0"(edi), "1"(esi)
+					     : "memory");
+		if (n >= 1 * 4)
+			__asm__ __volatile__("movsl"
+					     : "=&D"(edi), "=&S"(esi)
+					     : "0"(edi), "1"(esi)
+					     : "memory");
 	}
 	switch (n % 4) {
 		/* tail */
-		case 0: return to;
-		case 1: __asm__ __volatile__("movsb"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-			return to;
-		case 2: __asm__ __volatile__("movsw"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-			return to;
-		default: __asm__ __volatile__("movsw\n\tmovsb"
-			:"=&D"(edi),"=&S"(esi):"0"(edi),"1"(esi):"memory");
-			return to;
+	case 0:
+		return to;
+	case 1:
+		__asm__ __volatile__("movsb"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
+		return to;
+	case 2: __asm__ __volatile__("movsw"
+				     : "=&D"(edi), "=&S"(esi)
+				     : "0"(edi), "1"(esi)
+				     : "memory");
+		return to;
+	default: __asm__ __volatile__("movsw\n\tmovsb"
+				      : "=&D"(edi), "=&S"(esi)
+				      : "0"(edi), "1"(esi)
+				      : "memory");
+		return to;
 	}
 }
 
@@ -117,87 +150,87 @@ static __always_inline void * __constant_memcpy(void * to, const void * from, si
  *	This CPU favours 3DNow strongly (eg AMD Athlon)
  */
 
-static inline void * __constant_memcpy3d(void * to, const void * from, size_t len)
+static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
 {
 	if (len < 512)
 		return __constant_memcpy(to, from, len);
 	return _mmx_memcpy(to, from, len);
 }
 
-static __inline__ void *__memcpy3d(void *to, const void *from, size_t len)
+static inline void *__memcpy3d(void *to, const void *from, size_t len)
 {
 	if (len < 512)
 		return __memcpy(to, from, len);
 	return _mmx_memcpy(to, from, len);
 }
 
-#define memcpy(t, f, n) \
-(__builtin_constant_p(n) ? \
- __constant_memcpy3d((t),(f),(n)) : \
- __memcpy3d((t),(f),(n)))
+#define memcpy(t, f, n)				\
+	(__builtin_constant_p((n))		\
+	 ? __constant_memcpy3d((t), (f), (n))	\
+	 : __memcpy3d((t), (f), (n)))
 
 #else
 
 /*
  *	No 3D Now!
  */
- 
-#define memcpy(t, f, n) \
-(__builtin_constant_p(n) ? \
- __constant_memcpy((t),(f),(n)) : \
- __memcpy((t),(f),(n)))
+
+#define memcpy(t, f, n)				\
+	(__builtin_constant_p((n))		\
+	 ? __constant_memcpy((t), (f), (n))	\
+	 : __memcpy((t), (f), (n)))
 
 #endif
 
 #define __HAVE_ARCH_MEMMOVE
-void *memmove(void * dest,const void * src, size_t n);
+void *memmove(void *dest, const void *src, size_t n);
 
 #define memcmp __builtin_memcmp
 
 #define __HAVE_ARCH_MEMCHR
-extern void *memchr(const void * cs,int c,size_t count);
+extern void *memchr(const void *cs, int c, size_t count);
 
-static inline void * __memset_generic(void * s, char c,size_t count)
+static inline void *__memset_generic(void *s, char c, size_t count)
 {
-int d0, d1;
-__asm__ __volatile__(
-	"rep\n\t"
-	"stosb"
-	: "=&c" (d0), "=&D" (d1)
-	:"a" (c),"1" (s),"0" (count)
-	:"memory");
-return s;
+	int d0, d1;
+	__asm__ __volatile__(
+		"rep\n\t"
+		"stosb"
+		: "=&c" (d0), "=&D" (d1)
+		: "a" (c), "1" (s), "0" (count)
+		: "memory");
+	return s;
 }
 
 /* we might want to write optimized versions of these later */
-#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
+#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
 
 /*
- * memset(x,0,y) is a reasonably common thing to do, so we want to fill
+ * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
  * things 32 bits at a time even when we don't know the size of the
  * area at compile-time..
  */
-static __always_inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
+static __always_inline void *__constant_c_memset(void *s, unsigned long c, size_t count)
 {
-int d0, d1;
-__asm__ __volatile__(
-	"rep ; stosl\n\t"
-	"testb $2,%b3\n\t"
-	"je 1f\n\t"
-	"stosw\n"
-	"1:\ttestb $1,%b3\n\t"
-	"je 2f\n\t"
-	"stosb\n"
-	"2:"
-	:"=&c" (d0), "=&D" (d1)
-	:"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
-	:"memory");
-return (s);	
+	int d0, d1;
+	__asm__ __volatile__(
+		"rep ; stosl\n\t"
+		"testb $2,%b3\n\t"
+		"je 1f\n\t"
+		"stosw\n"
+		"1:\ttestb $1,%b3\n\t"
+		"je 2f\n\t"
+		"stosb\n"
+		"2:"
+		: "=&c" (d0), "=&D" (d1)
+		: "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
+		: "memory");
+	return s;
 }
 
 /* Added by Gertjan van Wingerde to make minix and sysv module work */
 #define __HAVE_ARCH_STRNLEN
-extern size_t strnlen(const char * s, size_t count);
+extern size_t strnlen(const char *s, size_t count);
 /* end of additional stuff */
 
 #define __HAVE_ARCH_STRSTR
@@ -207,66 +240,77 @@ extern char *strstr(const char *cs, const char *ct);
  * This looks horribly ugly, but the compiler can optimize it totally,
  * as we by now know that both pattern and count is constant..
  */
-static __always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
+static __always_inline void *__constant_c_and_count_memset(void *s, unsigned long pattern, size_t count)
 {
 	switch (count) {
+	case 0:
+		return s;
+	case 1:
+		*(unsigned char *)s = pattern & 0xff;
+		return s;
+	case 2:
+		*(unsigned short *)s = pattern & 0xffff;
+		return s;
+	case 3:
+		*(unsigned short *)s = pattern & 0xffff;
+		*((unsigned char *)s + 2) = pattern & 0xff;
+		return s;
+	case 4:
+		*(unsigned long *)s = pattern;
+		return s;
+	}
+
+#define COMMON(x)						\
+	__asm__  __volatile__(					\
+		"rep ; stosl"					\
+		x						\
+		: "=&c" (d0), "=&D" (d1)			\
+		: "a" (pattern), "0" (count/4), "1" ((long)s)	\
+		: "memory")
+
+	{
+		int d0, d1;
+		switch (count % 4) {
 		case 0:
+			COMMON("");
 			return s;
 		case 1:
-			*(unsigned char *)s = pattern & 0xff;
+			COMMON("\n\tstosb");
 			return s;
 		case 2:
-			*(unsigned short *)s = pattern & 0xffff;
-			return s;
-		case 3:
-			*(unsigned short *)s = pattern & 0xffff;
-			*(2+(unsigned char *)s) = pattern & 0xff;
+			COMMON("\n\tstosw");
 			return s;
-		case 4:
-			*(unsigned long *)s = pattern;
+		default:
+			COMMON("\n\tstosw\n\tstosb");
 			return s;
+		}
 	}
-#define COMMON(x) \
-__asm__  __volatile__( \
-	"rep ; stosl" \
-	x \
-	: "=&c" (d0), "=&D" (d1) \
-	: "a" (pattern),"0" (count/4),"1" ((long) s) \
-	: "memory")
-{
-	int d0, d1;
-	switch (count % 4) {
-		case 0: COMMON(""); return s;
-		case 1: COMMON("\n\tstosb"); return s;
-		case 2: COMMON("\n\tstosw"); return s;
-		default: COMMON("\n\tstosw\n\tstosb"); return s;
-	}
-}
-  
+
 #undef COMMON
 }
 
-#define __constant_c_x_memset(s, c, count) \
-(__builtin_constant_p(count) ? \
- __constant_c_and_count_memset((s),(c),(count)) : \
- __constant_c_memset((s),(c),(count)))
+#define __constant_c_x_memset(s, c, count)			\
+	(__builtin_constant_p(count)				\
+	 ? __constant_c_and_count_memset((s), (c), (count))	\
+	 : __constant_c_memset((s), (c), (count)))
 
-#define __memset(s, c, count) \
-(__builtin_constant_p(count) ? \
- __constant_count_memset((s),(c),(count)) : \
- __memset_generic((s),(c),(count)))
+#define __memset(s, c, count)				\
+	(__builtin_constant_p(count)			\
+	 ? __constant_count_memset((s), (c), (count))	\
+	 : __memset_generic((s), (c), (count)))
 
 #define __HAVE_ARCH_MEMSET
-#define memset(s, c, count) \
-(__builtin_constant_p(c) ? \
- __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
- __memset((s),(c),(count)))
+#define memset(s, c, count)						\
+	(__builtin_constant_p(c)					\
+	 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
+				 (count))				\
+	 : __memset((s), (c), (count)))
 
 /*
  * find the first occurrence of byte 'c', or 1 past the area if none
  */
 #define __HAVE_ARCH_MEMSCAN
-extern void *memscan(void * addr, int c, size_t size);
+extern void *memscan(void *addr, int c, size_t size);
 
 #endif /* __KERNEL__ */
 


--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ