lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20200904165216.1799796-4-hch@lst.de>
Date:   Fri,  4 Sep 2020 18:52:11 +0200
From:   Christoph Hellwig <hch@....de>
To:     Paul Walmsley <paul.walmsley@...ive.com>,
        Palmer Dabbelt <palmer@...belt.com>,
        Arnd Bergmann <arnd@...db.de>,
        Alexander Viro <viro@...iv.linux.org.uk>
Cc:     linux-riscv@...ts.infradead.org, linux-kernel@...r.kernel.org,
        linux-arch@...r.kernel.org
Subject: [PATCH 3/8] asm-generic: fix unaligned access hamdling in raw_copy_{from,to}_user

Use get_unaligned and put_unaligned for the small constant size cases
in the generic uaccess routines.  This ensures they can be used for
architectures that do not support unaligned loads and stores, while
being a no-op for those that do.

Signed-off-by: Christoph Hellwig <hch@....de>
---
 include/asm-generic/uaccess.h | 20 ++++++++------------
 1 file changed, 8 insertions(+), 12 deletions(-)

diff --git a/include/asm-generic/uaccess.h b/include/asm-generic/uaccess.h
index cc3b2c8b68fab4..768502bbfb154e 100644
--- a/include/asm-generic/uaccess.h
+++ b/include/asm-generic/uaccess.h
@@ -36,19 +36,17 @@ raw_copy_from_user(void *to, const void __user * from, unsigned long n)
 	if (__builtin_constant_p(n)) {
 		switch(n) {
 		case 1:
-			*(u8 *)to = *(u8 __force *)from;
+			*(u8 *)to = get_unaligned((u8 __force *)from);
 			return 0;
 		case 2:
-			*(u16 *)to = *(u16 __force *)from;
+			*(u16 *)to = get_unaligned((u16 __force *)from);
 			return 0;
 		case 4:
-			*(u32 *)to = *(u32 __force *)from;
+			*(u32 *)to = get_unaligned((u32 __force *)from);
 			return 0;
-#ifdef CONFIG_64BIT
 		case 8:
-			*(u64 *)to = *(u64 __force *)from;
+			*(u64 *)to = get_unaligned((u64 __force *)from);
 			return 0;
-#endif
 		}
 	}
 
@@ -62,19 +60,17 @@ raw_copy_to_user(void __user *to, const void *from, unsigned long n)
 	if (__builtin_constant_p(n)) {
 		switch(n) {
 		case 1:
-			*(u8 __force *)to = *(u8 *)from;
+			put_unaligned(*(u8 *)from, (u8 __force *)to);
 			return 0;
 		case 2:
-			*(u16 __force *)to = *(u16 *)from;
+			put_unaligned(*(u16 *)from, (u16 __force *)to);
 			return 0;
 		case 4:
-			*(u32 __force *)to = *(u32 *)from;
+			put_unaligned(*(u32 *)from, (u32 __force *)to);
 			return 0;
-#ifdef CONFIG_64BIT
 		case 8:
-			*(u64 __force *)to = *(u64 *)from;
+			put_unaligned(*(u64 *)from, (u64 __force *)to);
 			return 0;
-#endif
 		default:
 			break;
 		}
-- 
2.28.0

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ