lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20180625105952.3756-11-mark.rutland@arm.com>
Date:   Mon, 25 Jun 2018 11:59:51 +0100
From:   Mark Rutland <mark.rutland@....com>
To:     linux-kernel@...r.kernel.org, will.deacon@....com,
        peterz@...radead.org, boqun.feng@...il.com
Cc:     Mark Rutland <mark.rutland@....com>, Arnd Bergmann <arnd@...db.de>,
        Andrey Ryabinin <aryabinin@...tuozzo.com>,
        Alexander Potapenko <glider@...gle.com>,
        Dmitry Vyukov <dvyukov@...gle.com>
Subject: [PATCHv2 10/11] atomics: switch to generated instrumentation

As a step towards ensuring the atomic* APIs are consistent, let's switch
to wrappers generated by gen-atomic-instrumented.h, using the same table
used to generate the fallbacks and atomic-long wrappers.

These are checked in rather than generated with Kbuild, since:

* This allows inspection of the atomics with git grep and ctags on a
  pristine tree, which Linus strongly prefers being able to do.

* The fallbacks are not expected to change very often, and are not
  affected by machine details or configuration options, so regenerating
  them for *every* build is somewhat wasteful.

* These are included by files required *very* early in the build process
  (e.g. for generating bounds.h), and we'd rather not complicate the
  top-level Kbuild file.

Generating the atomic headers means that the instrumented wrappers will
remain in sync with the rest of the atomic APIs, and we gain all the
ordering variants of each atomic without having to manually expaned them
all.

The KASAN checks are automatically generated based on the function
parameters defined in atomics.tbl. Note that try_cmpxchg() now correctly
treats 'old' as a parameter that may be written to, and not only read as
the hand-written instrumentation assumed.

Other than the change to try_cmpxchg(), existing code should not be
affected by this patch. The patch instruoduces instrumentation for all
optional atomics (and ordering variants), along with the ifdeffery this
requires, enabling other archtiectures to make use of the instrumented
atomics.

Signed-off-by: Mark Rutland <mark.rutland@....com>
Cc: Boqun Feng <boqun.feng@...il.com>
Cc: Peter Zijlstra <peterz@...radead.org>
Cc: Will Deacon <will.deacon@....com>
Cc: Arnd Bergmann <arnd@...db.de>
Cc: Andrey Ryabinin <aryabinin@...tuozzo.com>
Cc: Alexander Potapenko <glider@...gle.com>
Cc: Dmitry Vyukov <dvyukov@...gle.com>
---
 include/asm-generic/atomic-instrumented.h | 1688 +++++++++++++++++++++++++----
 1 file changed, 1504 insertions(+), 184 deletions(-)

diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index 0d4b1d3dbc1e..3e126306c33e 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -1,3 +1,8 @@
+// SPDX-License-Identifier: GPL-2.0
+
+// Generated by ./scripts/atomic/gen-atomic-instrumented.sh
+// DO NOT MODIFY THIS FILE DIRECTLY
+
 /*
  * This file provides wrappers with KASAN instrumentation for atomic operations.
  * To use this functionality an arch's atomic.h file needs to define all
@@ -9,459 +14,1774 @@
  * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  * double instrumentation.
  */
+#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
+#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
+
+#include <linux/build_bug.h>
+#include <linux/kasan-checks.h>
+
+static inline int
+atomic_read(const atomic_t *v)
+{
+	kasan_check_read(v, sizeof(*v));
+	return arch_atomic_read(v);
+}
+#define atomic_read atomic_read
+
+#if defined(arch_atomic_read_acquire)
+static inline int
+atomic_read_acquire(const atomic_t *v)
+{
+	kasan_check_read(v, sizeof(*v));
+	return arch_atomic_read_acquire(v);
+}
+#define atomic_read_acquire atomic_read_acquire
+#endif
+
+static inline void
+atomic_set(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_set(v, i);
+}
+#define atomic_set atomic_set
+
+#if defined(arch_atomic_set_release)
+static inline void
+atomic_set_release(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_set_release(v, i);
+}
+#define atomic_set_release atomic_set_release
+#endif
+
+static inline void
+atomic_add(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_add(i, v);
+}
+#define atomic_add atomic_add
+
+#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
+static inline int
+atomic_add_return(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_return(i, v);
+}
+#define atomic_add_return atomic_add_return
+#endif
+
+#if defined(arch_atomic_add_return_acquire)
+static inline int
+atomic_add_return_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_return_acquire(i, v);
+}
+#define atomic_add_return_acquire atomic_add_return_acquire
+#endif
+
+#if defined(arch_atomic_add_return_release)
+static inline int
+atomic_add_return_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_return_release(i, v);
+}
+#define atomic_add_return_release atomic_add_return_release
+#endif
+
+#if defined(arch_atomic_add_return_relaxed)
+static inline int
+atomic_add_return_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_return_relaxed(i, v);
+}
+#define atomic_add_return_relaxed atomic_add_return_relaxed
+#endif
+
+#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
+static inline int
+atomic_fetch_add(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_add(i, v);
+}
+#define atomic_fetch_add atomic_fetch_add
+#endif
+
+#if defined(arch_atomic_fetch_add_acquire)
+static inline int
+atomic_fetch_add_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_add_acquire(i, v);
+}
+#define atomic_fetch_add_acquire atomic_fetch_add_acquire
+#endif
+
+#if defined(arch_atomic_fetch_add_release)
+static inline int
+atomic_fetch_add_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_add_release(i, v);
+}
+#define atomic_fetch_add_release atomic_fetch_add_release
+#endif
+
+#if defined(arch_atomic_fetch_add_relaxed)
+static inline int
+atomic_fetch_add_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_add_relaxed(i, v);
+}
+#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
+#endif
+
+static inline void
+atomic_sub(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_sub(i, v);
+}
+#define atomic_sub atomic_sub
+
+#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
+static inline int
+atomic_sub_return(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_sub_return(i, v);
+}
+#define atomic_sub_return atomic_sub_return
+#endif
+
+#if defined(arch_atomic_sub_return_acquire)
+static inline int
+atomic_sub_return_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_sub_return_acquire(i, v);
+}
+#define atomic_sub_return_acquire atomic_sub_return_acquire
+#endif
+
+#if defined(arch_atomic_sub_return_release)
+static inline int
+atomic_sub_return_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_sub_return_release(i, v);
+}
+#define atomic_sub_return_release atomic_sub_return_release
+#endif
+
+#if defined(arch_atomic_sub_return_relaxed)
+static inline int
+atomic_sub_return_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_sub_return_relaxed(i, v);
+}
+#define atomic_sub_return_relaxed atomic_sub_return_relaxed
+#endif
+
+#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
+static inline int
+atomic_fetch_sub(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_sub(i, v);
+}
+#define atomic_fetch_sub atomic_fetch_sub
+#endif
+
+#if defined(arch_atomic_fetch_sub_acquire)
+static inline int
+atomic_fetch_sub_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_sub_acquire(i, v);
+}
+#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
+#endif
+
+#if defined(arch_atomic_fetch_sub_release)
+static inline int
+atomic_fetch_sub_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_sub_release(i, v);
+}
+#define atomic_fetch_sub_release atomic_fetch_sub_release
+#endif
+
+#if defined(arch_atomic_fetch_sub_relaxed)
+static inline int
+atomic_fetch_sub_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_sub_relaxed(i, v);
+}
+#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
+#endif
+
+#if defined(arch_atomic_inc)
+static inline void
+atomic_inc(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_inc(v);
+}
+#define atomic_inc atomic_inc
+#endif
+
+#if defined(arch_atomic_inc_return)
+static inline int
+atomic_inc_return(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_return(v);
+}
+#define atomic_inc_return atomic_inc_return
+#endif
+
+#if defined(arch_atomic_inc_return_acquire)
+static inline int
+atomic_inc_return_acquire(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_return_acquire(v);
+}
+#define atomic_inc_return_acquire atomic_inc_return_acquire
+#endif
+
+#if defined(arch_atomic_inc_return_release)
+static inline int
+atomic_inc_return_release(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_return_release(v);
+}
+#define atomic_inc_return_release atomic_inc_return_release
+#endif
+
+#if defined(arch_atomic_inc_return_relaxed)
+static inline int
+atomic_inc_return_relaxed(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_return_relaxed(v);
+}
+#define atomic_inc_return_relaxed atomic_inc_return_relaxed
+#endif
+
+#if defined(arch_atomic_fetch_inc)
+static inline int
+atomic_fetch_inc(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_inc(v);
+}
+#define atomic_fetch_inc atomic_fetch_inc
+#endif
+
+#if defined(arch_atomic_fetch_inc_acquire)
+static inline int
+atomic_fetch_inc_acquire(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_inc_acquire(v);
+}
+#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
+#endif
+
+#if defined(arch_atomic_fetch_inc_release)
+static inline int
+atomic_fetch_inc_release(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_inc_release(v);
+}
+#define atomic_fetch_inc_release atomic_fetch_inc_release
+#endif
+
+#if defined(arch_atomic_fetch_inc_relaxed)
+static inline int
+atomic_fetch_inc_relaxed(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_inc_relaxed(v);
+}
+#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
+#endif
+
+#if defined(arch_atomic_dec)
+static inline void
+atomic_dec(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_dec(v);
+}
+#define atomic_dec atomic_dec
+#endif
+
+#if defined(arch_atomic_dec_return)
+static inline int
+atomic_dec_return(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_return(v);
+}
+#define atomic_dec_return atomic_dec_return
+#endif
+
+#if defined(arch_atomic_dec_return_acquire)
+static inline int
+atomic_dec_return_acquire(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_return_acquire(v);
+}
+#define atomic_dec_return_acquire atomic_dec_return_acquire
+#endif
+
+#if defined(arch_atomic_dec_return_release)
+static inline int
+atomic_dec_return_release(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_return_release(v);
+}
+#define atomic_dec_return_release atomic_dec_return_release
+#endif
+
+#if defined(arch_atomic_dec_return_relaxed)
+static inline int
+atomic_dec_return_relaxed(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_return_relaxed(v);
+}
+#define atomic_dec_return_relaxed atomic_dec_return_relaxed
+#endif
+
+#if defined(arch_atomic_fetch_dec)
+static inline int
+atomic_fetch_dec(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_dec(v);
+}
+#define atomic_fetch_dec atomic_fetch_dec
+#endif
+
+#if defined(arch_atomic_fetch_dec_acquire)
+static inline int
+atomic_fetch_dec_acquire(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_dec_acquire(v);
+}
+#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
+#endif
+
+#if defined(arch_atomic_fetch_dec_release)
+static inline int
+atomic_fetch_dec_release(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_dec_release(v);
+}
+#define atomic_fetch_dec_release atomic_fetch_dec_release
+#endif
+
+#if defined(arch_atomic_fetch_dec_relaxed)
+static inline int
+atomic_fetch_dec_relaxed(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_dec_relaxed(v);
+}
+#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
+#endif
+
+static inline void
+atomic_and(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_and(i, v);
+}
+#define atomic_and atomic_and
+
+#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
+static inline int
+atomic_fetch_and(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_and(i, v);
+}
+#define atomic_fetch_and atomic_fetch_and
+#endif
+
+#if defined(arch_atomic_fetch_and_acquire)
+static inline int
+atomic_fetch_and_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_and_acquire(i, v);
+}
+#define atomic_fetch_and_acquire atomic_fetch_and_acquire
+#endif
+
+#if defined(arch_atomic_fetch_and_release)
+static inline int
+atomic_fetch_and_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_and_release(i, v);
+}
+#define atomic_fetch_and_release atomic_fetch_and_release
+#endif
+
+#if defined(arch_atomic_fetch_and_relaxed)
+static inline int
+atomic_fetch_and_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_and_relaxed(i, v);
+}
+#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
+#endif
+
+#if defined(arch_atomic_andnot)
+static inline void
+atomic_andnot(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_andnot(i, v);
+}
+#define atomic_andnot atomic_andnot
+#endif
+
+#if defined(arch_atomic_fetch_andnot)
+static inline int
+atomic_fetch_andnot(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_andnot(i, v);
+}
+#define atomic_fetch_andnot atomic_fetch_andnot
+#endif
+
+#if defined(arch_atomic_fetch_andnot_acquire)
+static inline int
+atomic_fetch_andnot_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_andnot_acquire(i, v);
+}
+#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
+#endif
+
+#if defined(arch_atomic_fetch_andnot_release)
+static inline int
+atomic_fetch_andnot_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_andnot_release(i, v);
+}
+#define atomic_fetch_andnot_release atomic_fetch_andnot_release
+#endif
+
+#if defined(arch_atomic_fetch_andnot_relaxed)
+static inline int
+atomic_fetch_andnot_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_andnot_relaxed(i, v);
+}
+#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
+#endif
+
+static inline void
+atomic_or(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_or(i, v);
+}
+#define atomic_or atomic_or
+
+#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
+static inline int
+atomic_fetch_or(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_or(i, v);
+}
+#define atomic_fetch_or atomic_fetch_or
+#endif
+
+#if defined(arch_atomic_fetch_or_acquire)
+static inline int
+atomic_fetch_or_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_or_acquire(i, v);
+}
+#define atomic_fetch_or_acquire atomic_fetch_or_acquire
+#endif
+
+#if defined(arch_atomic_fetch_or_release)
+static inline int
+atomic_fetch_or_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_or_release(i, v);
+}
+#define atomic_fetch_or_release atomic_fetch_or_release
+#endif
+
+#if defined(arch_atomic_fetch_or_relaxed)
+static inline int
+atomic_fetch_or_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_or_relaxed(i, v);
+}
+#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
+#endif
+
+static inline void
+atomic_xor(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic_xor(i, v);
+}
+#define atomic_xor atomic_xor
+
+#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
+static inline int
+atomic_fetch_xor(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_xor(i, v);
+}
+#define atomic_fetch_xor atomic_fetch_xor
+#endif
+
+#if defined(arch_atomic_fetch_xor_acquire)
+static inline int
+atomic_fetch_xor_acquire(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_xor_acquire(i, v);
+}
+#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
+#endif
+
+#if defined(arch_atomic_fetch_xor_release)
+static inline int
+atomic_fetch_xor_release(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_xor_release(i, v);
+}
+#define atomic_fetch_xor_release atomic_fetch_xor_release
+#endif
+
+#if defined(arch_atomic_fetch_xor_relaxed)
+static inline int
+atomic_fetch_xor_relaxed(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_xor_relaxed(i, v);
+}
+#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
+#endif
+
+#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
+static inline int
+atomic_xchg(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_xchg(v, i);
+}
+#define atomic_xchg atomic_xchg
+#endif
+
+#if defined(arch_atomic_xchg_acquire)
+static inline int
+atomic_xchg_acquire(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_xchg_acquire(v, i);
+}
+#define atomic_xchg_acquire atomic_xchg_acquire
+#endif
+
+#if defined(arch_atomic_xchg_release)
+static inline int
+atomic_xchg_release(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_xchg_release(v, i);
+}
+#define atomic_xchg_release atomic_xchg_release
+#endif
+
+#if defined(arch_atomic_xchg_relaxed)
+static inline int
+atomic_xchg_relaxed(atomic_t *v, int i)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_xchg_relaxed(v, i);
+}
+#define atomic_xchg_relaxed atomic_xchg_relaxed
+#endif
+
+#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
+static inline int
+atomic_cmpxchg(atomic_t *v, int old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_cmpxchg(v, old, new);
+}
+#define atomic_cmpxchg atomic_cmpxchg
+#endif
+
+#if defined(arch_atomic_cmpxchg_acquire)
+static inline int
+atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_cmpxchg_acquire(v, old, new);
+}
+#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic_cmpxchg_release)
+static inline int
+atomic_cmpxchg_release(atomic_t *v, int old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_cmpxchg_release(v, old, new);
+}
+#define atomic_cmpxchg_release atomic_cmpxchg_release
+#endif
+
+#if defined(arch_atomic_cmpxchg_relaxed)
+static inline int
+atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic_try_cmpxchg)
+static inline bool
+atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic_try_cmpxchg(v, old, new);
+}
+#define atomic_try_cmpxchg atomic_try_cmpxchg
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_acquire)
+static inline bool
+atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic_try_cmpxchg_acquire(v, old, new);
+}
+#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_release)
+static inline bool
+atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic_try_cmpxchg_release(v, old, new);
+}
+#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_relaxed)
+static inline bool
+atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
+{
+	kasan_check_write(v, sizeof(*v));
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic_sub_and_test)
+static inline bool
+atomic_sub_and_test(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_sub_and_test(i, v);
+}
+#define atomic_sub_and_test atomic_sub_and_test
+#endif
+
+#if defined(arch_atomic_dec_and_test)
+static inline bool
+atomic_dec_and_test(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_and_test(v);
+}
+#define atomic_dec_and_test atomic_dec_and_test
+#endif
+
+#if defined(arch_atomic_inc_and_test)
+static inline bool
+atomic_inc_and_test(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_and_test(v);
+}
+#define atomic_inc_and_test atomic_inc_and_test
+#endif
+
+#if defined(arch_atomic_add_negative)
+static inline bool
+atomic_add_negative(int i, atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_negative(i, v);
+}
+#define atomic_add_negative atomic_add_negative
+#endif
+
+#if defined(arch_atomic_fetch_add_unless)
+static inline int
+atomic_fetch_add_unless(atomic_t *v, int a, int u)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_fetch_add_unless(v, a, u);
+}
+#define atomic_fetch_add_unless atomic_fetch_add_unless
+#endif
+
+#if defined(arch_atomic_add_unless)
+static inline bool
+atomic_add_unless(atomic_t *v, int a, int u)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_add_unless(v, a, u);
+}
+#define atomic_add_unless atomic_add_unless
+#endif
+
+#if defined(arch_atomic_inc_not_zero)
+static inline bool
+atomic_inc_not_zero(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_not_zero(v);
+}
+#define atomic_inc_not_zero atomic_inc_not_zero
+#endif
+
+#if defined(arch_atomic_inc_unless_negative)
+static inline bool
+atomic_inc_unless_negative(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_inc_unless_negative(v);
+}
+#define atomic_inc_unless_negative atomic_inc_unless_negative
+#endif
+
+#if defined(arch_atomic_dec_unless_positive)
+static inline bool
+atomic_dec_unless_positive(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_unless_positive(v);
+}
+#define atomic_dec_unless_positive atomic_dec_unless_positive
+#endif
+
+#if defined(arch_atomic_dec_if_positive)
+static inline int
+atomic_dec_if_positive(atomic_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic_dec_if_positive(v);
+}
+#define atomic_dec_if_positive atomic_dec_if_positive
+#endif
+
+static inline s64
+atomic64_read(const atomic64_t *v)
+{
+	kasan_check_read(v, sizeof(*v));
+	return arch_atomic64_read(v);
+}
+#define atomic64_read atomic64_read
+
+#if defined(arch_atomic64_read_acquire)
+static inline s64
+atomic64_read_acquire(const atomic64_t *v)
+{
+	kasan_check_read(v, sizeof(*v));
+	return arch_atomic64_read_acquire(v);
+}
+#define atomic64_read_acquire atomic64_read_acquire
+#endif
+
+static inline void
+atomic64_set(atomic64_t *v, s64 i)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic64_set(v, i);
+}
+#define atomic64_set atomic64_set
+
+#if defined(arch_atomic64_set_release)
+static inline void
+atomic64_set_release(atomic64_t *v, s64 i)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic64_set_release(v, i);
+}
+#define atomic64_set_release atomic64_set_release
+#endif
+
+static inline void
+atomic64_add(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic64_add(i, v);
+}
+#define atomic64_add atomic64_add
+
+#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
+static inline s64
+atomic64_add_return(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_add_return(i, v);
+}
+#define atomic64_add_return atomic64_add_return
+#endif
+
+#if defined(arch_atomic64_add_return_acquire)
+static inline s64
+atomic64_add_return_acquire(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_add_return_acquire(i, v);
+}
+#define atomic64_add_return_acquire atomic64_add_return_acquire
+#endif
+
+#if defined(arch_atomic64_add_return_release)
+static inline s64
+atomic64_add_return_release(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_add_return_release(i, v);
+}
+#define atomic64_add_return_release atomic64_add_return_release
+#endif
+
+#if defined(arch_atomic64_add_return_relaxed)
+static inline s64
+atomic64_add_return_relaxed(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_add_return_relaxed(i, v);
+}
+#define atomic64_add_return_relaxed atomic64_add_return_relaxed
+#endif
+
+#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
+static inline s64
+atomic64_fetch_add(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_add(i, v);
+}
+#define atomic64_fetch_add atomic64_fetch_add
+#endif
+
+#if defined(arch_atomic64_fetch_add_acquire)
+static inline s64
+atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_add_acquire(i, v);
+}
+#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_add_release)
+static inline s64
+atomic64_fetch_add_release(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_add_release(i, v);
+}
+#define atomic64_fetch_add_release atomic64_fetch_add_release
+#endif
+
+#if defined(arch_atomic64_fetch_add_relaxed)
+static inline s64
+atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_add_relaxed(i, v);
+}
+#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
+#endif
+
+static inline void
+atomic64_sub(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	arch_atomic64_sub(i, v);
+}
+#define atomic64_sub atomic64_sub
 
-#ifndef _LINUX_ATOMIC_INSTRUMENTED_H
-#define _LINUX_ATOMIC_INSTRUMENTED_H
+#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
+static inline s64
+atomic64_sub_return(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_sub_return(i, v);
+}
+#define atomic64_sub_return atomic64_sub_return
+#endif
 
-#include <linux/build_bug.h>
-#include <linux/kasan-checks.h>
+#if defined(arch_atomic64_sub_return_acquire)
+static inline s64
+atomic64_sub_return_acquire(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_sub_return_acquire(i, v);
+}
+#define atomic64_sub_return_acquire atomic64_sub_return_acquire
+#endif
 
-static __always_inline int atomic_read(const atomic_t *v)
+#if defined(arch_atomic64_sub_return_release)
+static inline s64
+atomic64_sub_return_release(s64 i, atomic64_t *v)
 {
-	kasan_check_read(v, sizeof(*v));
-	return arch_atomic_read(v);
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_sub_return_release(i, v);
 }
+#define atomic64_sub_return_release atomic64_sub_return_release
+#endif
 
-static __always_inline s64 atomic64_read(const atomic64_t *v)
+#if defined(arch_atomic64_sub_return_relaxed)
+static inline s64
+atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
 {
-	kasan_check_read(v, sizeof(*v));
-	return arch_atomic64_read(v);
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_sub_return_relaxed(i, v);
 }
+#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
+#endif
 
-static __always_inline void atomic_set(atomic_t *v, int i)
+#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
+static inline s64
+atomic64_fetch_sub(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_set(v, i);
+	return arch_atomic64_fetch_sub(i, v);
 }
+#define atomic64_fetch_sub atomic64_fetch_sub
+#endif
 
-static __always_inline void atomic64_set(atomic64_t *v, s64 i)
+#if defined(arch_atomic64_fetch_sub_acquire)
+static inline s64
+atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_set(v, i);
+	return arch_atomic64_fetch_sub_acquire(i, v);
 }
+#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
+#endif
 
-static __always_inline int atomic_xchg(atomic_t *v, int i)
+#if defined(arch_atomic64_fetch_sub_release)
+static inline s64
+atomic64_fetch_sub_release(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_xchg(v, i);
+	return arch_atomic64_fetch_sub_release(i, v);
 }
+#define atomic64_fetch_sub_release atomic64_fetch_sub_release
+#endif
 
-static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
+#if defined(arch_atomic64_fetch_sub_relaxed)
+static inline s64
+atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_xchg(v, i);
+	return arch_atomic64_fetch_sub_relaxed(i, v);
 }
+#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
+#endif
 
-static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
+#if defined(arch_atomic64_inc)
+static inline void
+atomic64_inc(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_cmpxchg(v, old, new);
+	arch_atomic64_inc(v);
 }
+#define atomic64_inc atomic64_inc
+#endif
 
-static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+#if defined(arch_atomic64_inc_return)
+static inline s64
+atomic64_inc_return(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_cmpxchg(v, old, new);
+	return arch_atomic64_inc_return(v);
 }
+#define atomic64_inc_return atomic64_inc_return
+#endif
 
-#ifdef arch_atomic_try_cmpxchg
-#define atomic_try_cmpxchg atomic_try_cmpxchg
-static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+#if defined(arch_atomic64_inc_return_acquire)
+static inline s64
+atomic64_inc_return_acquire(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	kasan_check_read(old, sizeof(*old));
-	return arch_atomic_try_cmpxchg(v, old, new);
+	return arch_atomic64_inc_return_acquire(v);
 }
+#define atomic64_inc_return_acquire atomic64_inc_return_acquire
 #endif
 
-#ifdef arch_atomic64_try_cmpxchg
-#define atomic64_try_cmpxchg atomic64_try_cmpxchg
-static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+#if defined(arch_atomic64_inc_return_release)
+static inline s64
+atomic64_inc_return_release(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	kasan_check_read(old, sizeof(*old));
-	return arch_atomic64_try_cmpxchg(v, old, new);
+	return arch_atomic64_inc_return_release(v);
 }
+#define atomic64_inc_return_release atomic64_inc_return_release
 #endif
 
-#ifdef arch_atomic_fetch_add_unless
-#define atomic_fetch_add_unless atomic_fetch_add_unless
-static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
+#if defined(arch_atomic64_inc_return_relaxed)
+static inline s64
+atomic64_inc_return_relaxed(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_add_unless(v, a, u);
+	return arch_atomic64_inc_return_relaxed(v);
 }
+#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
 #endif
 
-#ifdef arch_atomic64_fetch_add_unless
-#define atomic64_fetch_add_unless atomic64_fetch_add_unless
-static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
+#if defined(arch_atomic64_fetch_inc)
+static inline s64
+atomic64_fetch_inc(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_add_unless(v, a, u);
+	return arch_atomic64_fetch_inc(v);
 }
+#define atomic64_fetch_inc atomic64_fetch_inc
 #endif
 
-#ifdef arch_atomic_inc
-#define atomic_inc atomic_inc
-static __always_inline void atomic_inc(atomic_t *v)
+#if defined(arch_atomic64_fetch_inc_acquire)
+static inline s64
+atomic64_fetch_inc_acquire(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_inc(v);
+	return arch_atomic64_fetch_inc_acquire(v);
 }
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
 #endif
 
-#ifdef arch_atomic64_inc
-#define atomic64_inc atomic64_inc
-static __always_inline void atomic64_inc(atomic64_t *v)
+#if defined(arch_atomic64_fetch_inc_release)
+static inline s64
+atomic64_fetch_inc_release(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_inc(v);
+	return arch_atomic64_fetch_inc_release(v);
 }
+#define atomic64_fetch_inc_release atomic64_fetch_inc_release
 #endif
 
-#ifdef arch_atomic_dec
-#define atomic_dec atomic_dec
-static __always_inline void atomic_dec(atomic_t *v)
+#if defined(arch_atomic64_fetch_inc_relaxed)
+static inline s64
+atomic64_fetch_inc_relaxed(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_dec(v);
+	return arch_atomic64_fetch_inc_relaxed(v);
 }
+#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
 #endif
 
-#ifdef atch_atomic64_dec
-#define atomic64_dec
-static __always_inline void atomic64_dec(atomic64_t *v)
+#if defined(arch_atomic64_dec)
+static inline void
+atomic64_dec(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
 	arch_atomic64_dec(v);
 }
+#define atomic64_dec atomic64_dec
 #endif
 
-static __always_inline void atomic_add(int i, atomic_t *v)
+#if defined(arch_atomic64_dec_return)
+static inline s64
+atomic64_dec_return(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_add(i, v);
+	return arch_atomic64_dec_return(v);
 }
+#define atomic64_dec_return atomic64_dec_return
+#endif
 
-static __always_inline void atomic64_add(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_dec_return_acquire)
+static inline s64
+atomic64_dec_return_acquire(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_add(i, v);
+	return arch_atomic64_dec_return_acquire(v);
 }
+#define atomic64_dec_return_acquire atomic64_dec_return_acquire
+#endif
 
-static __always_inline void atomic_sub(int i, atomic_t *v)
+#if defined(arch_atomic64_dec_return_release)
+static inline s64
+atomic64_dec_return_release(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_sub(i, v);
+	return arch_atomic64_dec_return_release(v);
 }
+#define atomic64_dec_return_release atomic64_dec_return_release
+#endif
 
-static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_dec_return_relaxed)
+static inline s64
+atomic64_dec_return_relaxed(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_sub(i, v);
+	return arch_atomic64_dec_return_relaxed(v);
 }
+#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
+#endif
 
-static __always_inline void atomic_and(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_dec)
+static inline s64
+atomic64_fetch_dec(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_and(i, v);
+	return arch_atomic64_fetch_dec(v);
+}
+#define atomic64_fetch_dec atomic64_fetch_dec
+#endif
+
+#if defined(arch_atomic64_fetch_dec_acquire)
+static inline s64
+atomic64_fetch_dec_acquire(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_dec_acquire(v);
+}
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_dec_release)
+static inline s64
+atomic64_fetch_dec_release(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_dec_release(v);
 }
+#define atomic64_fetch_dec_release atomic64_fetch_dec_release
+#endif
+
+#if defined(arch_atomic64_fetch_dec_relaxed)
+static inline s64
+atomic64_fetch_dec_relaxed(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_dec_relaxed(v);
+}
+#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
+#endif
 
-static __always_inline void atomic64_and(s64 i, atomic64_t *v)
+static inline void
+atomic64_and(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
 	arch_atomic64_and(i, v);
 }
+#define atomic64_and atomic64_and
 
-static __always_inline void atomic_or(int i, atomic_t *v)
+#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
+static inline s64
+atomic64_fetch_and(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_or(i, v);
+	return arch_atomic64_fetch_and(i, v);
 }
+#define atomic64_fetch_and atomic64_fetch_and
+#endif
 
-static __always_inline void atomic64_or(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_and_acquire)
+static inline s64
+atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_or(i, v);
+	return arch_atomic64_fetch_and_acquire(i, v);
 }
+#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
+#endif
 
-static __always_inline void atomic_xor(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_and_release)
+static inline s64
+atomic64_fetch_and_release(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic_xor(i, v);
+	return arch_atomic64_fetch_and_release(i, v);
 }
+#define atomic64_fetch_and_release atomic64_fetch_and_release
+#endif
 
-static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_and_relaxed)
+static inline s64
+atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	arch_atomic64_xor(i, v);
+	return arch_atomic64_fetch_and_relaxed(i, v);
 }
+#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
+#endif
 
-#ifdef arch_atomic_inc_return
-#define atomic_inc_return atomic_inc_return
-static __always_inline int atomic_inc_return(atomic_t *v)
+#if defined(arch_atomic64_andnot)
+static inline void
+atomic64_andnot(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_inc_return(v);
+	arch_atomic64_andnot(i, v);
 }
+#define atomic64_andnot atomic64_andnot
 #endif
 
-#ifdef arch_atomic64_in_return
-#define atomic64_inc_return atomic64_inc_return
-static __always_inline s64 atomic64_inc_return(atomic64_t *v)
+#if defined(arch_atomic64_fetch_andnot)
+static inline s64
+atomic64_fetch_andnot(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_inc_return(v);
+	return arch_atomic64_fetch_andnot(i, v);
 }
+#define atomic64_fetch_andnot atomic64_fetch_andnot
 #endif
 
-#ifdef arch_atomic_dec_return
-#define atomic_dec_return atomic_dec_return
-static __always_inline int atomic_dec_return(atomic_t *v)
+#if defined(arch_atomic64_fetch_andnot_acquire)
+static inline s64
+atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_dec_return(v);
+	return arch_atomic64_fetch_andnot_acquire(i, v);
 }
+#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
 #endif
 
-#ifdef arch_atomic64_dec_return
-#define atomic64_dec_return atomic64_dec_return
-static __always_inline s64 atomic64_dec_return(atomic64_t *v)
+#if defined(arch_atomic64_fetch_andnot_release)
+static inline s64
+atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_dec_return(v);
+	return arch_atomic64_fetch_andnot_release(i, v);
 }
+#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
 #endif
 
-#ifdef arch_atomic64_inc_not_zero
-#define atomic64_inc_not_zero atomic64_inc_not_zero
-static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
+#if defined(arch_atomic64_fetch_andnot_relaxed)
+static inline s64
+atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_inc_not_zero(v);
+	return arch_atomic64_fetch_andnot_relaxed(i, v);
 }
+#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
 #endif
 
-#ifdef arch_atomic64_dec_if_positive
-#define atomic64_dec_if_positive atomic64_dec_if_positive
-static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
+static inline void
+atomic64_or(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_dec_if_positive(v);
+	arch_atomic64_or(i, v);
+}
+#define atomic64_or atomic64_or
+
+#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
+static inline s64
+atomic64_fetch_or(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_or(i, v);
 }
+#define atomic64_fetch_or atomic64_fetch_or
 #endif
 
-#ifdef arch_atomic_dec_and_test
-#define atomic_dec_and_test atomic_dec_and_test
-static __always_inline bool atomic_dec_and_test(atomic_t *v)
+#if defined(arch_atomic64_fetch_or_acquire)
+static inline s64
+atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_dec_and_test(v);
+	return arch_atomic64_fetch_or_acquire(i, v);
 }
+#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
 #endif
 
-#ifdef arch_atomic64_dec_and_test
-#define atomic64_dec_and_test atomic64_dec_and_test
-static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
+#if defined(arch_atomic64_fetch_or_release)
+static inline s64
+atomic64_fetch_or_release(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_dec_and_test(v);
+	return arch_atomic64_fetch_or_release(i, v);
 }
+#define atomic64_fetch_or_release atomic64_fetch_or_release
 #endif
 
-#ifdef arch_atomic_inc_and_test
-#define atomic_inc_and_test atomic_inc_and_test
-static __always_inline bool atomic_inc_and_test(atomic_t *v)
+#if defined(arch_atomic64_fetch_or_relaxed)
+static inline s64
+atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_inc_and_test(v);
+	return arch_atomic64_fetch_or_relaxed(i, v);
 }
+#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
 #endif
 
-#ifdef arch_atomic64_inc_and_test
-#define atomic64_inc_and_test atomic64_inc_and_test
-static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
+static inline void
+atomic64_xor(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_inc_and_test(v);
+	arch_atomic64_xor(i, v);
+}
+#define atomic64_xor atomic64_xor
+
+#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
+static inline s64
+atomic64_fetch_xor(s64 i, atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_xor(i, v);
 }
+#define atomic64_fetch_xor atomic64_fetch_xor
 #endif
 
-static __always_inline int atomic_add_return(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_xor_acquire)
+static inline s64
+atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_add_return(i, v);
+	return arch_atomic64_fetch_xor_acquire(i, v);
 }
+#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
+#endif
 
-static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_xor_release)
+static inline s64
+atomic64_fetch_xor_release(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_add_return(i, v);
+	return arch_atomic64_fetch_xor_release(i, v);
 }
+#define atomic64_fetch_xor_release atomic64_fetch_xor_release
+#endif
 
-static __always_inline int atomic_sub_return(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_xor_relaxed)
+static inline s64
+atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_sub_return(i, v);
+	return arch_atomic64_fetch_xor_relaxed(i, v);
 }
+#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
+#endif
 
-static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
+#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
+static inline s64
+atomic64_xchg(atomic64_t *v, s64 i)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_sub_return(i, v);
+	return arch_atomic64_xchg(v, i);
 }
+#define atomic64_xchg atomic64_xchg
+#endif
 
-static __always_inline int atomic_fetch_add(int i, atomic_t *v)
+#if defined(arch_atomic64_xchg_acquire)
+static inline s64
+atomic64_xchg_acquire(atomic64_t *v, s64 i)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_add(i, v);
+	return arch_atomic64_xchg_acquire(v, i);
 }
+#define atomic64_xchg_acquire atomic64_xchg_acquire
+#endif
 
-static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_xchg_release)
+static inline s64
+atomic64_xchg_release(atomic64_t *v, s64 i)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_add(i, v);
+	return arch_atomic64_xchg_release(v, i);
 }
+#define atomic64_xchg_release atomic64_xchg_release
+#endif
 
-static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
+#if defined(arch_atomic64_xchg_relaxed)
+static inline s64
+atomic64_xchg_relaxed(atomic64_t *v, s64 i)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_sub(i, v);
+	return arch_atomic64_xchg_relaxed(v, i);
 }
+#define atomic64_xchg_relaxed atomic64_xchg_relaxed
+#endif
 
-static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
+#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
+static inline s64
+atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_sub(i, v);
+	return arch_atomic64_cmpxchg(v, old, new);
 }
+#define atomic64_cmpxchg atomic64_cmpxchg
+#endif
 
-static __always_inline int atomic_fetch_and(int i, atomic_t *v)
+#if defined(arch_atomic64_cmpxchg_acquire)
+static inline s64
+atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_and(i, v);
+	return arch_atomic64_cmpxchg_acquire(v, old, new);
 }
+#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
+#endif
 
-static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_cmpxchg_release)
+static inline s64
+atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_and(i, v);
+	return arch_atomic64_cmpxchg_release(v, old, new);
 }
+#define atomic64_cmpxchg_release atomic64_cmpxchg_release
+#endif
 
-static __always_inline int atomic_fetch_or(int i, atomic_t *v)
+#if defined(arch_atomic64_cmpxchg_relaxed)
+static inline s64
+atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_or(i, v);
+	return arch_atomic64_cmpxchg_relaxed(v, old, new);
 }
+#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
+#endif
 
-static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_try_cmpxchg)
+static inline bool
+atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_or(i, v);
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic64_try_cmpxchg(v, old, new);
 }
+#define atomic64_try_cmpxchg atomic64_try_cmpxchg
+#endif
 
-static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
+#if defined(arch_atomic64_try_cmpxchg_acquire)
+static inline bool
+atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_fetch_xor(i, v);
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic64_try_cmpxchg_acquire(v, old, new);
 }
+#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
+#endif
 
-static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_try_cmpxchg_release)
+static inline bool
+atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic64_fetch_xor(i, v);
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic64_try_cmpxchg_release(v, old, new);
 }
+#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
+#endif
 
-#ifdef arch_atomic_sub_and_test
-#define atomic_sub_and_test atomic_sub_and_test
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+#if defined(arch_atomic64_try_cmpxchg_relaxed)
+static inline bool
+atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_sub_and_test(i, v);
+	kasan_check_write(old, sizeof(*old));
+	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
 }
+#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
 #endif
 
-#ifdef arch_atomic64_sub_and_test
-#define atomic64_sub_and_test atomic64_sub_and_test
-static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_sub_and_test)
+static inline bool
+atomic64_sub_and_test(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
 	return arch_atomic64_sub_and_test(i, v);
 }
+#define atomic64_sub_and_test atomic64_sub_and_test
 #endif
 
-#ifdef arch_atomic_add_negative
-#define atomic_add_negative atomic_add_negative
-static __always_inline bool atomic_add_negative(int i, atomic_t *v)
+#if defined(arch_atomic64_dec_and_test)
+static inline bool
+atomic64_dec_and_test(atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
-	return arch_atomic_add_negative(i, v);
+	return arch_atomic64_dec_and_test(v);
 }
+#define atomic64_dec_and_test atomic64_dec_and_test
 #endif
 
-#ifdef arch_atomic64_add_negative
-#define atomic64_add_negative atomic64_add_negative
-static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_inc_and_test)
+static inline bool
+atomic64_inc_and_test(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_inc_and_test(v);
+}
+#define atomic64_inc_and_test atomic64_inc_and_test
+#endif
+
+#if defined(arch_atomic64_add_negative)
+static inline bool
+atomic64_add_negative(s64 i, atomic64_t *v)
 {
 	kasan_check_write(v, sizeof(*v));
 	return arch_atomic64_add_negative(i, v);
 }
+#define atomic64_add_negative atomic64_add_negative
+#endif
+
+#if defined(arch_atomic64_fetch_add_unless)
+static inline s64
+atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_fetch_add_unless(v, a, u);
+}
+#define atomic64_fetch_add_unless atomic64_fetch_add_unless
+#endif
+
+#if defined(arch_atomic64_add_unless)
+static inline bool
+atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_add_unless(v, a, u);
+}
+#define atomic64_add_unless atomic64_add_unless
+#endif
+
+#if defined(arch_atomic64_inc_not_zero)
+static inline bool
+atomic64_inc_not_zero(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_inc_not_zero(v);
+}
+#define atomic64_inc_not_zero atomic64_inc_not_zero
+#endif
+
+#if defined(arch_atomic64_inc_unless_negative)
+static inline bool
+atomic64_inc_unless_negative(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_inc_unless_negative(v);
+}
+#define atomic64_inc_unless_negative atomic64_inc_unless_negative
 #endif
 
-#define xchg(ptr, new)							\
+#if defined(arch_atomic64_dec_unless_positive)
+static inline bool
+atomic64_dec_unless_positive(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_dec_unless_positive(v);
+}
+#define atomic64_dec_unless_positive atomic64_dec_unless_positive
+#endif
+
+#if defined(arch_atomic64_dec_if_positive)
+static inline s64
+atomic64_dec_if_positive(atomic64_t *v)
+{
+	kasan_check_write(v, sizeof(*v));
+	return arch_atomic64_dec_if_positive(v);
+}
+#define atomic64_dec_if_positive atomic64_dec_if_positive
+#endif
+
+#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
+#define xchg(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_xchg(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_xchg_acquire)
+#define xchg_acquire(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_xchg_acquire(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_xchg_release)
+#define xchg_release(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_xchg_release(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_xchg_relaxed)
+#define xchg_relaxed(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_xchg_relaxed(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
+#define cmpxchg(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_xchg(__ai_ptr, (new));					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg(__ai_ptr, __VA_ARGS__);				\
 })
+#endif
+
+#if defined(arch_cmpxchg_acquire)
+#define cmpxchg_acquire(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_cmpxchg_release)
+#define cmpxchg_release(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg_release(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_cmpxchg_relaxed)
+#define cmpxchg_relaxed(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
 
-#define cmpxchg(ptr, old, new)						\
+#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
+#define cmpxchg64(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_cmpxchg(__ai_ptr, (old), (new));				\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg64(__ai_ptr, __VA_ARGS__);				\
 })
+#endif
+
+#if defined(arch_cmpxchg64_acquire)
+#define cmpxchg64_acquire(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
+
+#if defined(arch_cmpxchg64_release)
+#define cmpxchg64_release(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__);				\
+})
+#endif
 
-#define sync_cmpxchg(ptr, old, new)					\
+#if defined(arch_cmpxchg64_relaxed)
+#define cmpxchg64_relaxed(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_sync_cmpxchg(__ai_ptr, (old), (new));			\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__);				\
 })
+#endif
 
-#define cmpxchg_local(ptr, old, new)					\
+#define cmpxchg_local(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_cmpxchg_local(__ai_ptr, (old), (new));			\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg_local(__ai_ptr, __VA_ARGS__);				\
 })
 
-#define cmpxchg64(ptr, old, new)					\
+#define cmpxchg64_local(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_cmpxchg64(__ai_ptr, (old), (new));				\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__);				\
 })
 
-#define cmpxchg64_local(ptr, old, new)					\
+#define sync_cmpxchg(ptr, ...)						\
 ({									\
 	typeof(ptr) __ai_ptr = (ptr);					\
-	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));			\
-	arch_cmpxchg64_local(__ai_ptr, (old), (new));			\
+	kasan_check_write(__ai_ptr, sizeof(*__ai_ptr));		\
+	arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__);				\
 })
 
-#define cmpxchg_double(p1, p2, o1, o2, n1, n2)				\
+#define cmpxchg_double(ptr, ...)						\
 ({									\
-	typeof(p1) __ai_p1 = (p1);					\
-	kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1));		\
-	arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2));	\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
+	arch_cmpxchg_double(__ai_ptr, __VA_ARGS__);				\
 })
 
-#define cmpxchg_double_local(p1, p2, o1, o2, n1, n2)				\
-({										\
-	typeof(p1) __ai_p1 = (p1);						\
-	kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1));			\
-	arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2));	\
+
+#define cmpxchg_double_local(ptr, ...)						\
+({									\
+	typeof(ptr) __ai_ptr = (ptr);					\
+	kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));		\
+	arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__);				\
 })
 
-#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
+#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
-- 
2.11.0

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ