lists.openwall.net | lists / announce owl-users owl-dev john-users john-dev passwdqc-users yescrypt popa3d-users / oss-security kernel-hardening musl sabotage tlsify passwords / crypt-dev xvendor / Bugtraq Full-Disclosure linux-kernel linux-netdev linux-ext4 linux-hardening linux-cve-announce PHC | |
Open Source and information security mailing list archives
| ||
|
Message-ID: <152261531782.30503.2451449805347145060.stgit@warthog.procyon.org.uk> Date: Sun, 01 Apr 2018 21:41:57 +0100 From: David Howells <dhowells@...hat.com> To: linux-kernel@...r.kernel.org Subject: [PATCH 17/45] C++: Turn READ_ONCE(), WRITE_ONCE() & co. into inline template functions Turn READ_ONCE(), WRITE_ONCE() and similar into inline template functions. This allows the source to be simplified since there doesn't need to be magic casting and switch-statements inside macroisation. Signed-off-by: David Howells <dhowells@...hat.com> --- include/linux/compiler.h | 93 ++++++++++++++-------------------------------- 1 file changed, 29 insertions(+), 64 deletions(-) diff --git a/include/linux/compiler.h b/include/linux/compiler.h index ab4711c63601..f9be82b93e20 100644 --- a/include/linux/compiler.h +++ b/include/linux/compiler.h @@ -3,6 +3,8 @@ #define __LINUX_COMPILER_H #include <linux/compiler_types.h> +#include <linux/kasan-checks.h> +#include <asm/barrier.h> #ifndef __ASSEMBLY__ @@ -168,26 +170,6 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val, #include <uapi/linux/types.h> -#define __READ_ONCE_SIZE \ -({ \ - switch (size) { \ - case 1: *(__u8 *)res = *(volatile __u8 *)p; break; \ - case 2: *(__u16 *)res = *(volatile __u16 *)p; break; \ - case 4: *(__u32 *)res = *(volatile __u32 *)p; break; \ - case 8: *(__u64 *)res = *(volatile __u64 *)p; break; \ - default: \ - barrier(); \ - __builtin_memcpy((void *)res, (const void *)p, size); \ - barrier(); \ - } \ -}) - -static __always_inline -void __read_once_size(const volatile void *p, void *res, int size) -{ - __READ_ONCE_SIZE; -} - #ifdef CONFIG_KASAN /* * We can't declare function 'inline' because __no_sanitize_address confilcts @@ -200,26 +182,6 @@ void __read_once_size(const volatile void *p, void *res, int size) # define __no_kasan_or_inline __always_inline #endif -static __no_kasan_or_inline -void __read_once_size_nocheck(const volatile void *p, void *res, int size) -{ - __READ_ONCE_SIZE; -} - -static __always_inline void __write_once_size(volatile void *p, void *res, int size) -{ - switch (size) { - case 1: *(volatile __u8 *)p = *(__u8 *)res; break; - case 2: *(volatile __u16 *)p = *(__u16 *)res; break; - case 4: *(volatile __u32 *)p = *(__u32 *)res; break; - case 8: *(volatile __u64 *)p = *(__u64 *)res; break; - default: - barrier(); - __builtin_memcpy((void *)p, (const void *)res, size); - barrier(); - } -} - /* * Prevent the compiler from merging or refetching reads or writes. The * compiler is also forbidden from reordering successive instances of @@ -242,27 +204,30 @@ static __always_inline void __write_once_size(volatile void *p, void *res, int s * with an explicit memory barrier or atomic instruction that provides the * required ordering. */ -#include <asm/barrier.h> -#include <linux/kasan-checks.h> +template <typename P> +static inline P READ_ONCE(P &ptr) +{ + barrier(); + P val = *(volatile P *)&ptr; + smp_read_barrier_depends(); /* Enforce dependency ordering from x */ + return val; +} -#define __READ_ONCE(x, check) \ -({ \ - union { typeof(x) __val; char __c[1]; } __u; \ - if (check) \ - __read_once_size(&(x), __u.__c, sizeof(x)); \ - else \ - __read_once_size_nocheck(&(x), __u.__c, sizeof(x)); \ - smp_read_barrier_depends(); /* Enforce dependency ordering from x */ \ - __u.__val; \ -}) -#define READ_ONCE(x) __READ_ONCE(x, 1) +template <typename P> +static __no_kasan_or_inline P READ_ONCE_NOCHECK(P &ptr) +{ + barrier(); + P val = *(volatile P *)&ptr; + smp_read_barrier_depends(); /* Enforce dependency ordering from x */ + return val; +} /* - * Use READ_ONCE_NOCHECK() instead of READ_ONCE() if you need - * to hide memory access from KASAN. + * This function is not 'inline' because __no_sanitize_address confilcts + * with inlining. Attempt to inline it may cause a build failure. + * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67368 + * '__maybe_unused' allows us to avoid defined-but-not-used warnings. */ -#define READ_ONCE_NOCHECK(x) __READ_ONCE(x, 0) - static __no_kasan_or_inline unsigned long read_word_at_a_time(const void *addr) { @@ -270,13 +235,13 @@ unsigned long read_word_at_a_time(const void *addr) return *(unsigned long *)addr; } -#define WRITE_ONCE(x, val) \ -({ \ - union { typeof(x) __val; char __c[1]; } __u = \ - { .__val = (__force typeof(x)) (val) }; \ - __write_once_size(&(x), __u.__c, sizeof(x)); \ - __u.__val; \ -}) +template <typename P, typename T> +static inline void WRITE_ONCE(P &ptr, T val) +{ + barrier(); + *(volatile P *)&ptr = val; + barrier(); +} #endif /* __KERNEL__ */
Powered by blists - more mailing lists