summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2019-03-06 07:17:17 -0800
committerLinus Torvalds <torvalds@linux-foundation.org>2019-03-06 07:17:17 -0800
commit3478588b5136966c80c571cf0006f08e9e5b8f04 (patch)
tree186e67cce850bbdaf91aa678406d68fb87c5597c /include
parentc8f5ed6ef972ed4fd10b0c2e2baec3b6803d3c73 (diff)
parent28d49e282665e2a51cc91b716937fccfa24d80e1 (diff)
downloadlinux-stable-3478588b5136966c80c571cf0006f08e9e5b8f04.tar.gz
linux-stable-3478588b5136966c80c571cf0006f08e9e5b8f04.tar.bz2
linux-stable-3478588b5136966c80c571cf0006f08e9e5b8f04.zip
Merge branch 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Ingo Molnar: "The biggest part of this tree is the new auto-generated atomics API wrappers by Mark Rutland. The primary motivation was to allow instrumentation without uglifying the primary source code. The linecount increase comes from adding the auto-generated files to the Git space as well: include/asm-generic/atomic-instrumented.h | 1689 ++++++++++++++++-- include/asm-generic/atomic-long.h | 1174 ++++++++++--- include/linux/atomic-fallback.h | 2295 +++++++++++++++++++++++++ include/linux/atomic.h | 1241 +------------ I preferred this approach, so that the full call stack of the (already complex) locking APIs is still fully visible in 'git grep'. But if this is excessive we could certainly hide them. There's a separate build-time mechanism to determine whether the headers are out of date (they should never be stale if we do our job right). Anyway, nothing from this should be visible to regular kernel developers. Other changes: - Add support for dynamic keys, which removes a source of false positives in the workqueue code, among other things (Bart Van Assche) - Updates to tools/memory-model (Andrea Parri, Paul E. McKenney) - qspinlock, wake_q and lockdep micro-optimizations (Waiman Long) - misc other updates and enhancements" * 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (48 commits) locking/lockdep: Shrink struct lock_class_key locking/lockdep: Add module_param to enable consistency checks lockdep/lib/tests: Test dynamic key registration lockdep/lib/tests: Fix run_tests.sh kernel/workqueue: Use dynamic lockdep keys for workqueues locking/lockdep: Add support for dynamic keys locking/lockdep: Verify whether lock objects are small enough to be used as class keys locking/lockdep: Check data structure consistency locking/lockdep: Reuse lock chains that have been freed locking/lockdep: Fix a comment in add_chain_cache() locking/lockdep: Introduce lockdep_next_lockchain() and lock_chain_count() locking/lockdep: Reuse list entries that are no longer in use locking/lockdep: Free lock classes that are no longer in use locking/lockdep: Update two outdated comments locking/lockdep: Make it easy to detect whether or not inside a selftest locking/lockdep: Split lockdep_free_key_range() and lockdep_reset_lock() locking/lockdep: Initialize the locks_before and locks_after lists earlier locking/lockdep: Make zap_class() remove all matching lock order entries locking/lockdep: Reorder struct lock_class members locking/lockdep: Avoid that add_chain_cache() adds an invalid chain to the cache ...
Diffstat (limited to 'include')
-rw-r--r--include/asm-generic/atomic-instrumented.h1659
-rw-r--r--include/asm-generic/atomic-long.h1174
-rw-r--r--include/linux/atomic-fallback.h2295
-rw-r--r--include/linux/atomic.h1241
-rw-r--r--include/linux/lockdep.h54
-rw-r--r--include/linux/percpu-rwsem.h24
-rw-r--r--include/linux/sched/wake_q.h4
-rw-r--r--include/linux/workqueue.h28
8 files changed, 4795 insertions, 1684 deletions
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index 0d4b1d3dbc1e..e8730c6b9fe2 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -1,3 +1,8 @@
+// SPDX-License-Identifier: GPL-2.0
+
+// Generated by scripts/atomic/gen-atomic-instrumented.sh
+// DO NOT MODIFY THIS FILE DIRECTLY
+
/*
* This file provides wrappers with KASAN instrumentation for atomic operations.
* To use this functionality an arch's atomic.h file needs to define all
@@ -9,459 +14,1775 @@
* arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
* double instrumentation.
*/
-
-#ifndef _LINUX_ATOMIC_INSTRUMENTED_H
-#define _LINUX_ATOMIC_INSTRUMENTED_H
+#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
+#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
#include <linux/build_bug.h>
#include <linux/kasan-checks.h>
-static __always_inline int atomic_read(const atomic_t *v)
+static inline int
+atomic_read(const atomic_t *v)
{
kasan_check_read(v, sizeof(*v));
return arch_atomic_read(v);
}
+#define atomic_read atomic_read
-static __always_inline s64 atomic64_read(const atomic64_t *v)
+#if defined(arch_atomic_read_acquire)
+static inline int
+atomic_read_acquire(const atomic_t *v)
{
kasan_check_read(v, sizeof(*v));
- return arch_atomic64_read(v);
+ return arch_atomic_read_acquire(v);
}
+#define atomic_read_acquire atomic_read_acquire
+#endif
-static __always_inline void atomic_set(atomic_t *v, int i)
+static inline void
+atomic_set(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_set(v, i);
}
+#define atomic_set atomic_set
-static __always_inline void atomic64_set(atomic64_t *v, s64 i)
+#if defined(arch_atomic_set_release)
+static inline void
+atomic_set_release(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_set(v, i);
+ arch_atomic_set_release(v, i);
}
+#define atomic_set_release atomic_set_release
+#endif
-static __always_inline int atomic_xchg(atomic_t *v, int i)
+static inline void
+atomic_add(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_xchg(v, i);
+ arch_atomic_add(i, v);
}
+#define atomic_add atomic_add
-static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
+#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
+static inline int
+atomic_add_return(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_xchg(v, i);
+ return arch_atomic_add_return(i, v);
}
+#define atomic_add_return atomic_add_return
+#endif
-static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
+#if defined(arch_atomic_add_return_acquire)
+static inline int
+atomic_add_return_acquire(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_cmpxchg(v, old, new);
+ return arch_atomic_add_return_acquire(i, v);
}
+#define atomic_add_return_acquire atomic_add_return_acquire
+#endif
-static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+#if defined(arch_atomic_add_return_release)
+static inline int
+atomic_add_return_release(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_cmpxchg(v, old, new);
+ return arch_atomic_add_return_release(i, v);
}
+#define atomic_add_return_release atomic_add_return_release
+#endif
-#ifdef arch_atomic_try_cmpxchg
-#define atomic_try_cmpxchg atomic_try_cmpxchg
-static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+#if defined(arch_atomic_add_return_relaxed)
+static inline int
+atomic_add_return_relaxed(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- kasan_check_read(old, sizeof(*old));
- return arch_atomic_try_cmpxchg(v, old, new);
+ return arch_atomic_add_return_relaxed(i, v);
}
+#define atomic_add_return_relaxed atomic_add_return_relaxed
#endif
-#ifdef arch_atomic64_try_cmpxchg
-#define atomic64_try_cmpxchg atomic64_try_cmpxchg
-static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
+static inline int
+atomic_fetch_add(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- kasan_check_read(old, sizeof(*old));
- return arch_atomic64_try_cmpxchg(v, old, new);
+ return arch_atomic_fetch_add(i, v);
}
+#define atomic_fetch_add atomic_fetch_add
#endif
-#ifdef arch_atomic_fetch_add_unless
-#define atomic_fetch_add_unless atomic_fetch_add_unless
-static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
+#if defined(arch_atomic_fetch_add_acquire)
+static inline int
+atomic_fetch_add_acquire(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_add_unless(v, a, u);
+ return arch_atomic_fetch_add_acquire(i, v);
}
+#define atomic_fetch_add_acquire atomic_fetch_add_acquire
#endif
-#ifdef arch_atomic64_fetch_add_unless
-#define atomic64_fetch_add_unless atomic64_fetch_add_unless
-static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
+#if defined(arch_atomic_fetch_add_release)
+static inline int
+atomic_fetch_add_release(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_fetch_add_unless(v, a, u);
+ return arch_atomic_fetch_add_release(i, v);
}
+#define atomic_fetch_add_release atomic_fetch_add_release
#endif
-#ifdef arch_atomic_inc
-#define atomic_inc atomic_inc
-static __always_inline void atomic_inc(atomic_t *v)
+#if defined(arch_atomic_fetch_add_relaxed)
+static inline int
+atomic_fetch_add_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_add_relaxed(i, v);
+}
+#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
+#endif
+
+static inline void
+atomic_sub(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic_sub(i, v);
+}
+#define atomic_sub atomic_sub
+
+#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
+static inline int
+atomic_sub_return(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_sub_return(i, v);
+}
+#define atomic_sub_return atomic_sub_return
+#endif
+
+#if defined(arch_atomic_sub_return_acquire)
+static inline int
+atomic_sub_return_acquire(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_sub_return_acquire(i, v);
+}
+#define atomic_sub_return_acquire atomic_sub_return_acquire
+#endif
+
+#if defined(arch_atomic_sub_return_release)
+static inline int
+atomic_sub_return_release(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_sub_return_release(i, v);
+}
+#define atomic_sub_return_release atomic_sub_return_release
+#endif
+
+#if defined(arch_atomic_sub_return_relaxed)
+static inline int
+atomic_sub_return_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_sub_return_relaxed(i, v);
+}
+#define atomic_sub_return_relaxed atomic_sub_return_relaxed
+#endif
+
+#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
+static inline int
+atomic_fetch_sub(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_sub(i, v);
+}
+#define atomic_fetch_sub atomic_fetch_sub
+#endif
+
+#if defined(arch_atomic_fetch_sub_acquire)
+static inline int
+atomic_fetch_sub_acquire(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_sub_acquire(i, v);
+}
+#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
+#endif
+
+#if defined(arch_atomic_fetch_sub_release)
+static inline int
+atomic_fetch_sub_release(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_sub_release(i, v);
+}
+#define atomic_fetch_sub_release atomic_fetch_sub_release
+#endif
+
+#if defined(arch_atomic_fetch_sub_relaxed)
+static inline int
+atomic_fetch_sub_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_sub_relaxed(i, v);
+}
+#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
+#endif
+
+#if defined(arch_atomic_inc)
+static inline void
+atomic_inc(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_inc(v);
}
+#define atomic_inc atomic_inc
#endif
-#ifdef arch_atomic64_inc
-#define atomic64_inc atomic64_inc
-static __always_inline void atomic64_inc(atomic64_t *v)
+#if defined(arch_atomic_inc_return)
+static inline int
+atomic_inc_return(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_inc(v);
+ return arch_atomic_inc_return(v);
}
+#define atomic_inc_return atomic_inc_return
#endif
-#ifdef arch_atomic_dec
-#define atomic_dec atomic_dec
-static __always_inline void atomic_dec(atomic_t *v)
+#if defined(arch_atomic_inc_return_acquire)
+static inline int
+atomic_inc_return_acquire(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_inc_return_acquire(v);
+}
+#define atomic_inc_return_acquire atomic_inc_return_acquire
+#endif
+
+#if defined(arch_atomic_inc_return_release)
+static inline int
+atomic_inc_return_release(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_inc_return_release(v);
+}
+#define atomic_inc_return_release atomic_inc_return_release
+#endif
+
+#if defined(arch_atomic_inc_return_relaxed)
+static inline int
+atomic_inc_return_relaxed(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_inc_return_relaxed(v);
+}
+#define atomic_inc_return_relaxed atomic_inc_return_relaxed
+#endif
+
+#if defined(arch_atomic_fetch_inc)
+static inline int
+atomic_fetch_inc(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_inc(v);
+}
+#define atomic_fetch_inc atomic_fetch_inc
+#endif
+
+#if defined(arch_atomic_fetch_inc_acquire)
+static inline int
+atomic_fetch_inc_acquire(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_inc_acquire(v);
+}
+#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
+#endif
+
+#if defined(arch_atomic_fetch_inc_release)
+static inline int
+atomic_fetch_inc_release(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_inc_release(v);
+}
+#define atomic_fetch_inc_release atomic_fetch_inc_release
+#endif
+
+#if defined(arch_atomic_fetch_inc_relaxed)
+static inline int
+atomic_fetch_inc_relaxed(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_inc_relaxed(v);
+}
+#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
+#endif
+
+#if defined(arch_atomic_dec)
+static inline void
+atomic_dec(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_dec(v);
}
+#define atomic_dec atomic_dec
#endif
-#ifdef atch_atomic64_dec
-#define atomic64_dec
-static __always_inline void atomic64_dec(atomic64_t *v)
+#if defined(arch_atomic_dec_return)
+static inline int
+atomic_dec_return(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_dec(v);
+ return arch_atomic_dec_return(v);
}
+#define atomic_dec_return atomic_dec_return
#endif
-static __always_inline void atomic_add(int i, atomic_t *v)
+#if defined(arch_atomic_dec_return_acquire)
+static inline int
+atomic_dec_return_acquire(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic_add(i, v);
+ return arch_atomic_dec_return_acquire(v);
}
+#define atomic_dec_return_acquire atomic_dec_return_acquire
+#endif
-static __always_inline void atomic64_add(s64 i, atomic64_t *v)
+#if defined(arch_atomic_dec_return_release)
+static inline int
+atomic_dec_return_release(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_add(i, v);
+ return arch_atomic_dec_return_release(v);
}
+#define atomic_dec_return_release atomic_dec_return_release
+#endif
-static __always_inline void atomic_sub(int i, atomic_t *v)
+#if defined(arch_atomic_dec_return_relaxed)
+static inline int
+atomic_dec_return_relaxed(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic_sub(i, v);
+ return arch_atomic_dec_return_relaxed(v);
}
+#define atomic_dec_return_relaxed atomic_dec_return_relaxed
+#endif
-static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
+#if defined(arch_atomic_fetch_dec)
+static inline int
+atomic_fetch_dec(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_sub(i, v);
+ return arch_atomic_fetch_dec(v);
}
+#define atomic_fetch_dec atomic_fetch_dec
+#endif
-static __always_inline void atomic_and(int i, atomic_t *v)
+#if defined(arch_atomic_fetch_dec_acquire)
+static inline int
+atomic_fetch_dec_acquire(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_dec_acquire(v);
+}
+#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
+#endif
+
+#if defined(arch_atomic_fetch_dec_release)
+static inline int
+atomic_fetch_dec_release(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_dec_release(v);
+}
+#define atomic_fetch_dec_release atomic_fetch_dec_release
+#endif
+
+#if defined(arch_atomic_fetch_dec_relaxed)
+static inline int
+atomic_fetch_dec_relaxed(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_dec_relaxed(v);
+}
+#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
+#endif
+
+static inline void
+atomic_and(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_and(i, v);
}
+#define atomic_and atomic_and
-static __always_inline void atomic64_and(s64 i, atomic64_t *v)
+#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
+static inline int
+atomic_fetch_and(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_and(i, v);
+ return arch_atomic_fetch_and(i, v);
+}
+#define atomic_fetch_and atomic_fetch_and
+#endif
+
+#if defined(arch_atomic_fetch_and_acquire)
+static inline int
+atomic_fetch_and_acquire(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_and_acquire(i, v);
+}
+#define atomic_fetch_and_acquire atomic_fetch_and_acquire
+#endif
+
+#if defined(arch_atomic_fetch_and_release)
+static inline int
+atomic_fetch_and_release(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_and_release(i, v);
+}
+#define atomic_fetch_and_release atomic_fetch_and_release
+#endif
+
+#if defined(arch_atomic_fetch_and_relaxed)
+static inline int
+atomic_fetch_and_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_and_relaxed(i, v);
}
+#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
+#endif
+
+#if defined(arch_atomic_andnot)
+static inline void
+atomic_andnot(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic_andnot(i, v);
+}
+#define atomic_andnot atomic_andnot
+#endif
-static __always_inline void atomic_or(int i, atomic_t *v)
+#if defined(arch_atomic_fetch_andnot)
+static inline int
+atomic_fetch_andnot(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_andnot(i, v);
+}
+#define atomic_fetch_andnot atomic_fetch_andnot
+#endif
+
+#if defined(arch_atomic_fetch_andnot_acquire)
+static inline int
+atomic_fetch_andnot_acquire(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_andnot_acquire(i, v);
+}
+#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
+#endif
+
+#if defined(arch_atomic_fetch_andnot_release)
+static inline int
+atomic_fetch_andnot_release(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_andnot_release(i, v);
+}
+#define atomic_fetch_andnot_release atomic_fetch_andnot_release
+#endif
+
+#if defined(arch_atomic_fetch_andnot_relaxed)
+static inline int
+atomic_fetch_andnot_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_andnot_relaxed(i, v);
+}
+#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
+#endif
+
+static inline void
+atomic_or(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_or(i, v);
}
+#define atomic_or atomic_or
-static __always_inline void atomic64_or(s64 i, atomic64_t *v)
+#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
+static inline int
+atomic_fetch_or(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_or(i, v);
+ return arch_atomic_fetch_or(i, v);
+}
+#define atomic_fetch_or atomic_fetch_or
+#endif
+
+#if defined(arch_atomic_fetch_or_acquire)
+static inline int
+atomic_fetch_or_acquire(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_or_acquire(i, v);
+}
+#define atomic_fetch_or_acquire atomic_fetch_or_acquire
+#endif
+
+#if defined(arch_atomic_fetch_or_release)
+static inline int
+atomic_fetch_or_release(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_or_release(i, v);
+}
+#define atomic_fetch_or_release atomic_fetch_or_release
+#endif
+
+#if defined(arch_atomic_fetch_or_relaxed)
+static inline int
+atomic_fetch_or_relaxed(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_fetch_or_relaxed(i, v);
}
+#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
+#endif
-static __always_inline void atomic_xor(int i, atomic_t *v)
+static inline void
+atomic_xor(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
arch_atomic_xor(i, v);
}
+#define atomic_xor atomic_xor
-static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
+#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
+static inline int
+atomic_fetch_xor(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- arch_atomic64_xor(i, v);
+ return arch_atomic_fetch_xor(i, v);
}
+#define atomic_fetch_xor atomic_fetch_xor
+#endif
-#ifdef arch_atomic_inc_return
-#define atomic_inc_return atomic_inc_return
-static __always_inline int atomic_inc_return(atomic_t *v)
+#if defined(arch_atomic_fetch_xor_acquire)
+static inline int
+atomic_fetch_xor_acquire(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_inc_return(v);
+ return arch_atomic_fetch_xor_acquire(i, v);
}
+#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
#endif
-#ifdef arch_atomic64_in_return
-#define atomic64_inc_return atomic64_inc_return
-static __always_inline s64 atomic64_inc_return(atomic64_t *v)
+#if defined(arch_atomic_fetch_xor_release)
+static inline int
+atomic_fetch_xor_release(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_inc_return(v);
+ return arch_atomic_fetch_xor_release(i, v);
}
+#define atomic_fetch_xor_release atomic_fetch_xor_release
#endif
-#ifdef arch_atomic_dec_return
-#define atomic_dec_return atomic_dec_return
-static __always_inline int atomic_dec_return(atomic_t *v)
+#if defined(arch_atomic_fetch_xor_relaxed)
+static inline int
+atomic_fetch_xor_relaxed(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_dec_return(v);
+ return arch_atomic_fetch_xor_relaxed(i, v);
}
+#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
#endif
-#ifdef arch_atomic64_dec_return
-#define atomic64_dec_return atomic64_dec_return
-static __always_inline s64 atomic64_dec_return(atomic64_t *v)
+#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
+static inline int
+atomic_xchg(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_dec_return(v);
+ return arch_atomic_xchg(v, i);
}
+#define atomic_xchg atomic_xchg
#endif
-#ifdef arch_atomic64_inc_not_zero
-#define atomic64_inc_not_zero atomic64_inc_not_zero
-static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
+#if defined(arch_atomic_xchg_acquire)
+static inline int
+atomic_xchg_acquire(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_inc_not_zero(v);
+ return arch_atomic_xchg_acquire(v, i);
}
+#define atomic_xchg_acquire atomic_xchg_acquire
#endif
-#ifdef arch_atomic64_dec_if_positive
-#define atomic64_dec_if_positive atomic64_dec_if_positive
-static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
+#if defined(arch_atomic_xchg_release)
+static inline int
+atomic_xchg_release(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_dec_if_positive(v);
+ return arch_atomic_xchg_release(v, i);
}
+#define atomic_xchg_release atomic_xchg_release
#endif
-#ifdef arch_atomic_dec_and_test
-#define atomic_dec_and_test atomic_dec_and_test
-static __always_inline bool atomic_dec_and_test(atomic_t *v)
+#if defined(arch_atomic_xchg_relaxed)
+static inline int
+atomic_xchg_relaxed(atomic_t *v, int i)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_dec_and_test(v);
+ return arch_atomic_xchg_relaxed(v, i);
}
+#define atomic_xchg_relaxed atomic_xchg_relaxed
#endif
-#ifdef arch_atomic64_dec_and_test
-#define atomic64_dec_and_test atomic64_dec_and_test
-static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
+#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
+static inline int
+atomic_cmpxchg(atomic_t *v, int old, int new)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_dec_and_test(v);
+ return arch_atomic_cmpxchg(v, old, new);
}
+#define atomic_cmpxchg atomic_cmpxchg
#endif
-#ifdef arch_atomic_inc_and_test
-#define atomic_inc_and_test atomic_inc_and_test
-static __always_inline bool atomic_inc_and_test(atomic_t *v)
+#if defined(arch_atomic_cmpxchg_acquire)
+static inline int
+atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_cmpxchg_acquire(v, old, new);
+}
+#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic_cmpxchg_release)
+static inline int
+atomic_cmpxchg_release(atomic_t *v, int old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_cmpxchg_release(v, old, new);
+}
+#define atomic_cmpxchg_release atomic_cmpxchg_release
+#endif
+
+#if defined(arch_atomic_cmpxchg_relaxed)
+static inline int
+atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic_try_cmpxchg)
+static inline bool
+atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic_try_cmpxchg(v, old, new);
+}
+#define atomic_try_cmpxchg atomic_try_cmpxchg
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_acquire)
+static inline bool
+atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic_try_cmpxchg_acquire(v, old, new);
+}
+#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_release)
+static inline bool
+atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic_try_cmpxchg_release(v, old, new);
+}
+#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
+#endif
+
+#if defined(arch_atomic_try_cmpxchg_relaxed)
+static inline bool
+atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic_try_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic_sub_and_test)
+static inline bool
+atomic_sub_and_test(int i, atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_sub_and_test(i, v);
+}
+#define atomic_sub_and_test atomic_sub_and_test
+#endif
+
+#if defined(arch_atomic_dec_and_test)
+static inline bool
+atomic_dec_and_test(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_dec_and_test(v);
+}
+#define atomic_dec_and_test atomic_dec_and_test
+#endif
+
+#if defined(arch_atomic_inc_and_test)
+static inline bool
+atomic_inc_and_test(atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic_inc_and_test(v);
}
+#define atomic_inc_and_test atomic_inc_and_test
#endif
-#ifdef arch_atomic64_inc_and_test
-#define atomic64_inc_and_test atomic64_inc_and_test
-static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
+#if defined(arch_atomic_add_negative)
+static inline bool
+atomic_add_negative(int i, atomic_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_inc_and_test(v);
+ return arch_atomic_add_negative(i, v);
}
+#define atomic_add_negative atomic_add_negative
#endif
-static __always_inline int atomic_add_return(int i, atomic_t *v)
+#if defined(arch_atomic_fetch_add_unless)
+static inline int
+atomic_fetch_add_unless(atomic_t *v, int a, int u)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_add_return(i, v);
+ return arch_atomic_fetch_add_unless(v, a, u);
+}
+#define atomic_fetch_add_unless atomic_fetch_add_unless
+#endif
+
+#if defined(arch_atomic_add_unless)
+static inline bool
+atomic_add_unless(atomic_t *v, int a, int u)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_add_unless(v, a, u);
+}
+#define atomic_add_unless atomic_add_unless
+#endif
+
+#if defined(arch_atomic_inc_not_zero)
+static inline bool
+atomic_inc_not_zero(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_inc_not_zero(v);
+}
+#define atomic_inc_not_zero atomic_inc_not_zero
+#endif
+
+#if defined(arch_atomic_inc_unless_negative)
+static inline bool
+atomic_inc_unless_negative(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_inc_unless_negative(v);
+}
+#define atomic_inc_unless_negative atomic_inc_unless_negative
+#endif
+
+#if defined(arch_atomic_dec_unless_positive)
+static inline bool
+atomic_dec_unless_positive(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_dec_unless_positive(v);
+}
+#define atomic_dec_unless_positive atomic_dec_unless_positive
+#endif
+
+#if defined(arch_atomic_dec_if_positive)
+static inline int
+atomic_dec_if_positive(atomic_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic_dec_if_positive(v);
+}
+#define atomic_dec_if_positive atomic_dec_if_positive
+#endif
+
+static inline s64
+atomic64_read(const atomic64_t *v)
+{
+ kasan_check_read(v, sizeof(*v));
+ return arch_atomic64_read(v);
+}
+#define atomic64_read atomic64_read
+
+#if defined(arch_atomic64_read_acquire)
+static inline s64
+atomic64_read_acquire(const atomic64_t *v)
+{
+ kasan_check_read(v, sizeof(*v));
+ return arch_atomic64_read_acquire(v);
+}
+#define atomic64_read_acquire atomic64_read_acquire
+#endif
+
+static inline void
+atomic64_set(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_set(v, i);
+}
+#define atomic64_set atomic64_set
+
+#if defined(arch_atomic64_set_release)
+static inline void
+atomic64_set_release(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_set_release(v, i);
}
+#define atomic64_set_release atomic64_set_release
+#endif
-static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
+static inline void
+atomic64_add(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_add(i, v);
+}
+#define atomic64_add atomic64_add
+
+#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
+static inline s64
+atomic64_add_return(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_add_return(i, v);
}
+#define atomic64_add_return atomic64_add_return
+#endif
-static __always_inline int atomic_sub_return(int i, atomic_t *v)
+#if defined(arch_atomic64_add_return_acquire)
+static inline s64
+atomic64_add_return_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_sub_return(i, v);
+ return arch_atomic64_add_return_acquire(i, v);
}
+#define atomic64_add_return_acquire atomic64_add_return_acquire
+#endif
-static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_add_return_release)
+static inline s64
+atomic64_add_return_release(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic64_sub_return(i, v);
+ return arch_atomic64_add_return_release(i, v);
}
+#define atomic64_add_return_release atomic64_add_return_release
+#endif
-static __always_inline int atomic_fetch_add(int i, atomic_t *v)
+#if defined(arch_atomic64_add_return_relaxed)
+static inline s64
+atomic64_add_return_relaxed(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_add(i, v);
+ return arch_atomic64_add_return_relaxed(i, v);
}
+#define atomic64_add_return_relaxed atomic64_add_return_relaxed
+#endif
-static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
+#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
+static inline s64
+atomic64_fetch_add(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_fetch_add(i, v);
}
+#define atomic64_fetch_add atomic64_fetch_add
+#endif
-static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_add_acquire)
+static inline s64
+atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_sub(i, v);
+ return arch_atomic64_fetch_add_acquire(i, v);
}
+#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_add_release)
+static inline s64
+atomic64_fetch_add_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_add_release(i, v);
+}
+#define atomic64_fetch_add_release atomic64_fetch_add_release
+#endif
-static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_add_relaxed)
+static inline s64
+atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_add_relaxed(i, v);
+}
+#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
+#endif
+
+static inline void
+atomic64_sub(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_sub(i, v);
+}
+#define atomic64_sub atomic64_sub
+
+#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
+static inline s64
+atomic64_sub_return(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_sub_return(i, v);
+}
+#define atomic64_sub_return atomic64_sub_return
+#endif
+
+#if defined(arch_atomic64_sub_return_acquire)
+static inline s64
+atomic64_sub_return_acquire(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_sub_return_acquire(i, v);
+}
+#define atomic64_sub_return_acquire atomic64_sub_return_acquire
+#endif
+
+#if defined(arch_atomic64_sub_return_release)
+static inline s64
+atomic64_sub_return_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_sub_return_release(i, v);
+}
+#define atomic64_sub_return_release atomic64_sub_return_release
+#endif
+
+#if defined(arch_atomic64_sub_return_relaxed)
+static inline s64
+atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_sub_return_relaxed(i, v);
+}
+#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
+#endif
+
+#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
+static inline s64
+atomic64_fetch_sub(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_fetch_sub(i, v);
}
+#define atomic64_fetch_sub atomic64_fetch_sub
+#endif
-static __always_inline int atomic_fetch_and(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_sub_acquire)
+static inline s64
+atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_and(i, v);
+ return arch_atomic64_fetch_sub_acquire(i, v);
+}
+#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_sub_release)
+static inline s64
+atomic64_fetch_sub_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_sub_release(i, v);
}
+#define atomic64_fetch_sub_release atomic64_fetch_sub_release
+#endif
-static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_sub_relaxed)
+static inline s64
+atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_sub_relaxed(i, v);
+}
+#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
+#endif
+
+#if defined(arch_atomic64_inc)
+static inline void
+atomic64_inc(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_inc(v);
+}
+#define atomic64_inc atomic64_inc
+#endif
+
+#if defined(arch_atomic64_inc_return)
+static inline s64
+atomic64_inc_return(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_return(v);
+}
+#define atomic64_inc_return atomic64_inc_return
+#endif
+
+#if defined(arch_atomic64_inc_return_acquire)
+static inline s64
+atomic64_inc_return_acquire(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_return_acquire(v);
+}
+#define atomic64_inc_return_acquire atomic64_inc_return_acquire
+#endif
+
+#if defined(arch_atomic64_inc_return_release)
+static inline s64
+atomic64_inc_return_release(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_return_release(v);
+}
+#define atomic64_inc_return_release atomic64_inc_return_release
+#endif
+
+#if defined(arch_atomic64_inc_return_relaxed)
+static inline s64
+atomic64_inc_return_relaxed(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_return_relaxed(v);
+}
+#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
+#endif
+
+#if defined(arch_atomic64_fetch_inc)
+static inline s64
+atomic64_fetch_inc(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_inc(v);
+}
+#define atomic64_fetch_inc atomic64_fetch_inc
+#endif
+
+#if defined(arch_atomic64_fetch_inc_acquire)
+static inline s64
+atomic64_fetch_inc_acquire(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_inc_acquire(v);
+}
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_inc_release)
+static inline s64
+atomic64_fetch_inc_release(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_inc_release(v);
+}
+#define atomic64_fetch_inc_release atomic64_fetch_inc_release
+#endif
+
+#if defined(arch_atomic64_fetch_inc_relaxed)
+static inline s64
+atomic64_fetch_inc_relaxed(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_inc_relaxed(v);
+}
+#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
+#endif
+
+#if defined(arch_atomic64_dec)
+static inline void
+atomic64_dec(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_dec(v);
+}
+#define atomic64_dec atomic64_dec
+#endif
+
+#if defined(arch_atomic64_dec_return)
+static inline s64
+atomic64_dec_return(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_return(v);
+}
+#define atomic64_dec_return atomic64_dec_return
+#endif
+
+#if defined(arch_atomic64_dec_return_acquire)
+static inline s64
+atomic64_dec_return_acquire(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_return_acquire(v);
+}
+#define atomic64_dec_return_acquire atomic64_dec_return_acquire
+#endif
+
+#if defined(arch_atomic64_dec_return_release)
+static inline s64
+atomic64_dec_return_release(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_return_release(v);
+}
+#define atomic64_dec_return_release atomic64_dec_return_release
+#endif
+
+#if defined(arch_atomic64_dec_return_relaxed)
+static inline s64
+atomic64_dec_return_relaxed(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_return_relaxed(v);
+}
+#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
+#endif
+
+#if defined(arch_atomic64_fetch_dec)
+static inline s64
+atomic64_fetch_dec(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_dec(v);
+}
+#define atomic64_fetch_dec atomic64_fetch_dec
+#endif
+
+#if defined(arch_atomic64_fetch_dec_acquire)
+static inline s64
+atomic64_fetch_dec_acquire(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_dec_acquire(v);
+}
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_dec_release)
+static inline s64
+atomic64_fetch_dec_release(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_dec_release(v);
+}
+#define atomic64_fetch_dec_release atomic64_fetch_dec_release
+#endif
+
+#if defined(arch_atomic64_fetch_dec_relaxed)
+static inline s64
+atomic64_fetch_dec_relaxed(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_dec_relaxed(v);
+}
+#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
+#endif
+
+static inline void
+atomic64_and(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_and(i, v);
+}
+#define atomic64_and atomic64_and
+
+#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
+static inline s64
+atomic64_fetch_and(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_fetch_and(i, v);
}
+#define atomic64_fetch_and atomic64_fetch_and
+#endif
-static __always_inline int atomic_fetch_or(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_and_acquire)
+static inline s64
+atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_or(i, v);
+ return arch_atomic64_fetch_and_acquire(i, v);
+}
+#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_and_release)
+static inline s64
+atomic64_fetch_and_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_and_release(i, v);
+}
+#define atomic64_fetch_and_release atomic64_fetch_and_release
+#endif
+
+#if defined(arch_atomic64_fetch_and_relaxed)
+static inline s64
+atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_and_relaxed(i, v);
+}
+#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
+#endif
+
+#if defined(arch_atomic64_andnot)
+static inline void
+atomic64_andnot(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_andnot(i, v);
+}
+#define atomic64_andnot atomic64_andnot
+#endif
+
+#if defined(arch_atomic64_fetch_andnot)
+static inline s64
+atomic64_fetch_andnot(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_andnot(i, v);
+}
+#define atomic64_fetch_andnot atomic64_fetch_andnot
+#endif
+
+#if defined(arch_atomic64_fetch_andnot_acquire)
+static inline s64
+atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_andnot_acquire(i, v);
+}
+#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_andnot_release)
+static inline s64
+atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_andnot_release(i, v);
+}
+#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
+#endif
+
+#if defined(arch_atomic64_fetch_andnot_relaxed)
+static inline s64
+atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_andnot_relaxed(i, v);
+}
+#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
+#endif
+
+static inline void
+atomic64_or(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_or(i, v);
}
+#define atomic64_or atomic64_or
-static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
+#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
+static inline s64
+atomic64_fetch_or(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_fetch_or(i, v);
}
+#define atomic64_fetch_or atomic64_fetch_or
+#endif
-static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_or_acquire)
+static inline s64
+atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_fetch_xor(i, v);
+ return arch_atomic64_fetch_or_acquire(i, v);
+}
+#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
+#endif
+
+#if defined(arch_atomic64_fetch_or_release)
+static inline s64
+atomic64_fetch_or_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_or_release(i, v);
+}
+#define atomic64_fetch_or_release atomic64_fetch_or_release
+#endif
+
+#if defined(arch_atomic64_fetch_or_relaxed)
+static inline s64
+atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_or_relaxed(i, v);
+}
+#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
+#endif
+
+static inline void
+atomic64_xor(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ arch_atomic64_xor(i, v);
}
+#define atomic64_xor atomic64_xor
-static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
+#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
+static inline s64
+atomic64_fetch_xor(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_fetch_xor(i, v);
}
+#define atomic64_fetch_xor atomic64_fetch_xor
+#endif
-#ifdef arch_atomic_sub_and_test
-#define atomic_sub_and_test atomic_sub_and_test
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+#if defined(arch_atomic64_fetch_xor_acquire)
+static inline s64
+atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_sub_and_test(i, v);
+ return arch_atomic64_fetch_xor_acquire(i, v);
}
+#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
#endif
-#ifdef arch_atomic64_sub_and_test
-#define atomic64_sub_and_test atomic64_sub_and_test
-static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_fetch_xor_release)
+static inline s64
+atomic64_fetch_xor_release(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_xor_release(i, v);
+}
+#define atomic64_fetch_xor_release atomic64_fetch_xor_release
+#endif
+
+#if defined(arch_atomic64_fetch_xor_relaxed)
+static inline s64
+atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_xor_relaxed(i, v);
+}
+#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
+#endif
+
+#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
+static inline s64
+atomic64_xchg(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_xchg(v, i);
+}
+#define atomic64_xchg atomic64_xchg
+#endif
+
+#if defined(arch_atomic64_xchg_acquire)
+static inline s64
+atomic64_xchg_acquire(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_xchg_acquire(v, i);
+}
+#define atomic64_xchg_acquire atomic64_xchg_acquire
+#endif
+
+#if defined(arch_atomic64_xchg_release)
+static inline s64
+atomic64_xchg_release(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_xchg_release(v, i);
+}
+#define atomic64_xchg_release atomic64_xchg_release
+#endif
+
+#if defined(arch_atomic64_xchg_relaxed)
+static inline s64
+atomic64_xchg_relaxed(atomic64_t *v, s64 i)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_xchg_relaxed(v, i);
+}
+#define atomic64_xchg_relaxed atomic64_xchg_relaxed
+#endif
+
+#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
+static inline s64
+atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_cmpxchg(v, old, new);
+}
+#define atomic64_cmpxchg atomic64_cmpxchg
+#endif
+
+#if defined(arch_atomic64_cmpxchg_acquire)
+static inline s64
+atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_cmpxchg_acquire(v, old, new);
+}
+#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic64_cmpxchg_release)
+static inline s64
+atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_cmpxchg_release(v, old, new);
+}
+#define atomic64_cmpxchg_release atomic64_cmpxchg_release
+#endif
+
+#if defined(arch_atomic64_cmpxchg_relaxed)
+static inline s64
+atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_cmpxchg_relaxed(v, old, new);
+}
+#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic64_try_cmpxchg)
+static inline bool
+atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic64_try_cmpxchg(v, old, new);
+}
+#define atomic64_try_cmpxchg atomic64_try_cmpxchg
+#endif
+
+#if defined(arch_atomic64_try_cmpxchg_acquire)
+static inline bool
+atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic64_try_cmpxchg_acquire(v, old, new);
+}
+#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
+#endif
+
+#if defined(arch_atomic64_try_cmpxchg_release)
+static inline bool
+atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic64_try_cmpxchg_release(v, old, new);
+}
+#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
+#endif
+
+#if defined(arch_atomic64_try_cmpxchg_relaxed)
+static inline bool
+atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
+{
+ kasan_check_write(v, sizeof(*v));
+ kasan_check_write(old, sizeof(*old));
+ return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
+}
+#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
+#endif
+
+#if defined(arch_atomic64_sub_and_test)
+static inline bool
+atomic64_sub_and_test(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_sub_and_test(i, v);
}
+#define atomic64_sub_and_test atomic64_sub_and_test
#endif
-#ifdef arch_atomic_add_negative
-#define atomic_add_negative atomic_add_negative
-static __always_inline bool atomic_add_negative(int i, atomic_t *v)
+#if defined(arch_atomic64_dec_and_test)
+static inline bool
+atomic64_dec_and_test(atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
- return arch_atomic_add_negative(i, v);
+ return arch_atomic64_dec_and_test(v);
}
+#define atomic64_dec_and_test atomic64_dec_and_test
#endif
-#ifdef arch_atomic64_add_negative
-#define atomic64_add_negative atomic64_add_negative
-static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
+#if defined(arch_atomic64_inc_and_test)
+static inline bool
+atomic64_inc_and_test(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_and_test(v);
+}
+#define atomic64_inc_and_test atomic64_inc_and_test
+#endif
+
+#if defined(arch_atomic64_add_negative)
+static inline bool
+atomic64_add_negative(s64 i, atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_add_negative(i, v);
}
+#define atomic64_add_negative atomic64_add_negative
+#endif
+
+#if defined(arch_atomic64_fetch_add_unless)
+static inline s64
+atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_fetch_add_unless(v, a, u);
+}
+#define atomic64_fetch_add_unless atomic64_fetch_add_unless
+#endif
+
+#if defined(arch_atomic64_add_unless)
+static inline bool
+atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_add_unless(v, a, u);
+}
+#define atomic64_add_unless atomic64_add_unless
+#endif
+
+#if defined(arch_atomic64_inc_not_zero)
+static inline bool
+atomic64_inc_not_zero(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_not_zero(v);
+}
+#define atomic64_inc_not_zero atomic64_inc_not_zero
+#endif
+
+#if defined(arch_atomic64_inc_unless_negative)
+static inline bool
+atomic64_inc_unless_negative(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_inc_unless_negative(v);
+}
+#define atomic64_inc_unless_negative atomic64_inc_unless_negative
+#endif
+
+#if defined(arch_atomic64_dec_unless_positive)
+static inline bool
+atomic64_dec_unless_positive(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_unless_positive(v);
+}
+#define atomic64_dec_unless_positive atomic64_dec_unless_positive
+#endif
+
+#if defined(arch_atomic64_dec_if_positive)
+static inline s64
+atomic64_dec_if_positive(atomic64_t *v)
+{
+ kasan_check_write(v, sizeof(*v));
+ return arch_atomic64_dec_if_positive(v);
+}
+#define atomic64_dec_if_positive atomic64_dec_if_positive
+#endif
+
+#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
+#define xchg(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_xchg(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if defined(arch_xchg_acquire)
+#define xchg_acquire(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if defined(arch_xchg_release)
+#define xchg_release(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_xchg_release(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if defined(arch_xchg_relaxed)
+#define xchg_relaxed(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
+#define cmpxchg(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if defined(arch_cmpxchg_acquire)
+#define cmpxchg_acquire(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
+})
+#endif
+
+#if defined(arch_cmpxchg_release)
+#define cmpxchg_release(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
+})
#endif
-#define xchg(ptr, new) \
+#if defined(arch_cmpxchg_relaxed)
+#define cmpxchg_relaxed(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_xchg(__ai_ptr, (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
})
+#endif
-#define cmpxchg(ptr, old, new) \
+#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
+#define cmpxchg64(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg(__ai_ptr, (old), (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
})
+#endif
-#define sync_cmpxchg(ptr, old, new) \
+#if defined(arch_cmpxchg64_acquire)
+#define cmpxchg64_acquire(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_sync_cmpxchg(__ai_ptr, (old), (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
})
+#endif
-#define cmpxchg_local(ptr, old, new) \
+#if defined(arch_cmpxchg64_release)
+#define cmpxchg64_release(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg_local(__ai_ptr, (old), (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
})
+#endif
-#define cmpxchg64(ptr, old, new) \
+#if defined(arch_cmpxchg64_relaxed)
+#define cmpxchg64_relaxed(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64(__ai_ptr, (old), (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
})
+#endif
-#define cmpxchg64_local(ptr, old, new) \
+#define cmpxchg_local(ptr, ...) \
({ \
typeof(ptr) __ai_ptr = (ptr); \
- kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64_local(__ai_ptr, (old), (new)); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
})
-#define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
+#define cmpxchg64_local(ptr, ...) \
({ \
- typeof(p1) __ai_p1 = (p1); \
- kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
- arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
})
-#define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
-({ \
- typeof(p1) __ai_p1 = (p1); \
- kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
- arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
+#define sync_cmpxchg(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
+ arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
+})
+
+#define cmpxchg_double(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
+ arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
+})
+
+
+#define cmpxchg_double_local(ptr, ...) \
+({ \
+ typeof(ptr) __ai_ptr = (ptr); \
+ kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
+ arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
})
-#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
+#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
+// b29b625d5de9280f680e42c7be859b55b15e5f6a
diff --git a/include/asm-generic/atomic-long.h b/include/asm-generic/atomic-long.h
index 87d14476edc2..881c7e27af28 100644
--- a/include/asm-generic/atomic-long.h
+++ b/include/asm-generic/atomic-long.h
@@ -1,269 +1,1013 @@
-/* SPDX-License-Identifier: GPL-2.0 */
+// SPDX-License-Identifier: GPL-2.0
+
+// Generated by scripts/atomic/gen-atomic-long.sh
+// DO NOT MODIFY THIS FILE DIRECTLY
+
#ifndef _ASM_GENERIC_ATOMIC_LONG_H
#define _ASM_GENERIC_ATOMIC_LONG_H
-/*
- * Copyright (C) 2005 Silicon Graphics, Inc.
- * Christoph Lameter
- *
- * Allows to provide arch independent atomic definitions without the need to
- * edit all arch specific atomic.h files.
- */
#include <asm/types.h>
-/*
- * Suppport for atomic_long_t
- *
- * Casts for parameters are avoided for existing atomic functions in order to
- * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
- * macros of a platform may have.
- */
+#ifdef CONFIG_64BIT
+typedef atomic64_t atomic_long_t;
+#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
+#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
+#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
+#else
+typedef atomic_t atomic_long_t;
+#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
+#define atomic_long_cond_read_acquire atomic_cond_read_acquire
+#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
+#endif
-#if BITS_PER_LONG == 64
+#ifdef CONFIG_64BIT
-typedef atomic64_t atomic_long_t;
+static inline long
+atomic_long_read(const atomic_long_t *v)
+{
+ return atomic64_read(v);
+}
-#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
-#define ATOMIC_LONG_PFX(x) atomic64 ## x
-#define ATOMIC_LONG_TYPE s64
+static inline long
+atomic_long_read_acquire(const atomic_long_t *v)
+{
+ return atomic64_read_acquire(v);
+}
-#else
+static inline void
+atomic_long_set(atomic_long_t *v, long i)
+{
+ atomic64_set(v, i);
+}
-typedef atomic_t atomic_long_t;
+static inline void
+atomic_long_set_release(atomic_long_t *v, long i)
+{
+ atomic64_set_release(v, i);
+}
-#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
-#define ATOMIC_LONG_PFX(x) atomic ## x
-#define ATOMIC_LONG_TYPE int
+static inline void
+atomic_long_add(long i, atomic_long_t *v)
+{
+ atomic64_add(i, v);
+}
-#endif
+static inline long
+atomic_long_add_return(long i, atomic_long_t *v)
+{
+ return atomic64_add_return(i, v);
+}
+
+static inline long
+atomic_long_add_return_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_add_return_acquire(i, v);
+}
+
+static inline long
+atomic_long_add_return_release(long i, atomic_long_t *v)
+{
+ return atomic64_add_return_release(i, v);
+}
+
+static inline long
+atomic_long_add_return_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_add_return_relaxed(i, v);
+}
+
+static inline long
+atomic_long_fetch_add(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_add(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_add_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_add_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_add_relaxed(i, v);
+}
+
+static inline void
+atomic_long_sub(long i, atomic_long_t *v)
+{
+ atomic64_sub(i, v);
+}
+
+static inline long
+atomic_long_sub_return(long i, atomic_long_t *v)
+{
+ return atomic64_sub_return(i, v);
+}
+
+static inline long
+atomic_long_sub_return_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_sub_return_acquire(i, v);
+}
+
+static inline long
+atomic_long_sub_return_release(long i, atomic_long_t *v)
+{
+ return atomic64_sub_return_release(i, v);
+}
+
+static inline long
+atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_sub_return_relaxed(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_sub(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_sub_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_sub_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_sub_relaxed(i, v);
+}
+
+static inline void
+atomic_long_inc(atomic_long_t *v)
+{
+ atomic64_inc(v);
+}
+
+static inline long
+atomic_long_inc_return(atomic_long_t *v)
+{
+ return atomic64_inc_return(v);
+}
+
+static inline long
+atomic_long_inc_return_acquire(atomic_long_t *v)
+{
+ return atomic64_inc_return_acquire(v);
+}
+
+static inline long
+atomic_long_inc_return_release(atomic_long_t *v)
+{
+ return atomic64_inc_return_release(v);
+}
+
+static inline long
+atomic_long_inc_return_relaxed(atomic_long_t *v)
+{
+ return atomic64_inc_return_relaxed(v);
+}
+
+static inline long
+atomic_long_fetch_inc(atomic_long_t *v)
+{
+ return atomic64_fetch_inc(v);
+}
+
+static inline long
+atomic_long_fetch_inc_acquire(atomic_long_t *v)
+{
+ return atomic64_fetch_inc_acquire(v);
+}
+
+static inline long
+atomic_long_fetch_inc_release(atomic_long_t *v)
+{
+ return atomic64_fetch_inc_release(v);
+}
+
+static inline long
+atomic_long_fetch_inc_relaxed(atomic_long_t *v)
+{
+ return atomic64_fetch_inc_relaxed(v);
+}
+
+static inline void
+atomic_long_dec(atomic_long_t *v)
+{
+ atomic64_dec(v);
+}
+
+static inline long
+atomic_long_dec_return(atomic_long_t *v)
+{
+ return atomic64_dec_return(v);
+}
+
+static inline long
+atomic_long_dec_return_acquire(atomic_long_t *v)
+{
+ return atomic64_dec_return_acquire(v);
+}
+
+static inline long
+atomic_long_dec_return_release(atomic_long_t *v)
+{
+ return atomic64_dec_return_release(v);
+}
+
+static inline long
+atomic_long_dec_return_relaxed(atomic_long_t *v)
+{
+ return atomic64_dec_return_relaxed(v);
+}
+
+static inline long
+atomic_long_fetch_dec(atomic_long_t *v)
+{
+ return atomic64_fetch_dec(v);
+}
+
+static inline long
+atomic_long_fetch_dec_acquire(atomic_long_t *v)
+{
+ return atomic64_fetch_dec_acquire(v);
+}
+
+static inline long
+atomic_long_fetch_dec_release(atomic_long_t *v)
+{
+ return atomic64_fetch_dec_release(v);
+}
+
+static inline long
+atomic_long_fetch_dec_relaxed(atomic_long_t *v)
+{
+ return atomic64_fetch_dec_relaxed(v);
+}
+
+static inline void
+atomic_long_and(long i, atomic_long_t *v)
+{
+ atomic64_and(i, v);
+}
+
+static inline long
+atomic_long_fetch_and(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_and(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_and_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_and_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_and_relaxed(i, v);
+}
+
+static inline void
+atomic_long_andnot(long i, atomic_long_t *v)
+{
+ atomic64_andnot(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_andnot(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_andnot_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_andnot_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_andnot_relaxed(i, v);
+}
+
+static inline void
+atomic_long_or(long i, atomic_long_t *v)
+{
+ atomic64_or(i, v);
+}
+
+static inline long
+atomic_long_fetch_or(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_or(i, v);
+}
+
+static inline long
+atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_or_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_or_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_or_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_or_relaxed(i, v);
+}
+
+static inline void
+atomic_long_xor(long i, atomic_long_t *v)
+{
+ atomic64_xor(i, v);
+}
+
+static inline long
+atomic_long_fetch_xor(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_xor(i, v);
+}
+
+static inline long
+atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_xor_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_xor_release(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_xor_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
+{
+ return atomic64_fetch_xor_relaxed(i, v);
+}
+
+static inline long
+atomic_long_xchg(atomic_long_t *v, long i)
+{
+ return atomic64_xchg(v, i);
+}
+
+static inline long
+atomic_long_xchg_acquire(atomic_long_t *v, long i)
+{
+ return atomic64_xchg_acquire(v, i);
+}
+
+static inline long
+atomic_long_xchg_release(atomic_long_t *v, long i)
+{
+ return atomic64_xchg_release(v, i);
+}
+
+static inline long
+atomic_long_xchg_relaxed(atomic_long_t *v, long i)
+{
+ return atomic64_xchg_relaxed(v, i);
+}
+
+static inline long
+atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
+{
+ return atomic64_cmpxchg(v, old, new);
+}
+
+static inline long
+atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
+{
+ return atomic64_cmpxchg_acquire(v, old, new);
+}
+
+static inline long
+atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
+{
+ return atomic64_cmpxchg_release(v, old, new);
+}
+
+static inline long
+atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
+{
+ return atomic64_cmpxchg_relaxed(v, old, new);
+}
+
+static inline bool
+atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
+{
+ return atomic64_try_cmpxchg(v, (s64 *)old, new);
+}
+
+static inline bool
+atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
+{
+ return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
+}
+
+static inline bool
+atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
+{
+ return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
+}
+
+static inline bool
+atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
+{
+ return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
+}
+
+static inline bool
+atomic_long_sub_and_test(long i, atomic_long_t *v)
+{
+ return atomic64_sub_and_test(i, v);
+}
+
+static inline bool
+atomic_long_dec_and_test(atomic_long_t *v)
+{
+ return atomic64_dec_and_test(v);
+}
+
+static inline bool
+atomic_long_inc_and_test(atomic_long_t *v)
+{
+ return atomic64_inc_and_test(v);
+}
+
+static inline bool
+atomic_long_add_negative(long i, atomic_long_t *v)
+{
+ return atomic64_add_negative(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
+{
+ return atomic64_fetch_add_unless(v, a, u);
+}
+
+static inline bool
+atomic_long_add_unless(atomic_long_t *v, long a, long u)
+{
+ return atomic64_add_unless(v, a, u);
+}
+
+static inline bool
+atomic_long_inc_not_zero(atomic_long_t *v)
+{
+ return atomic64_inc_not_zero(v);
+}
+
+static inline bool
+atomic_long_inc_unless_negative(atomic_long_t *v)
+{
+ return atomic64_inc_unless_negative(v);
+}
+
+static inline bool
+atomic_long_dec_unless_positive(atomic_long_t *v)
+{
+ return atomic64_dec_unless_positive(v);
+}
+
+static inline long
+atomic_long_dec_if_positive(atomic_long_t *v)
+{
+ return atomic64_dec_if_positive(v);
+}
+
+#else /* CONFIG_64BIT */
+
+static inline long
+atomic_long_read(const atomic_long_t *v)
+{
+ return atomic_read(v);
+}
+
+static inline long
+atomic_long_read_acquire(const atomic_long_t *v)
+{
+ return atomic_read_acquire(v);
+}
+
+static inline void
+atomic_long_set(atomic_long_t *v, long i)
+{
+ atomic_set(v, i);
+}
+
+static inline void
+atomic_long_set_release(atomic_long_t *v, long i)
+{
+ atomic_set_release(v, i);
+}
+
+static inline void
+atomic_long_add(long i, atomic_long_t *v)
+{
+ atomic_add(i, v);
+}
+
+static inline long
+atomic_long_add_return(long i, atomic_long_t *v)
+{
+ return atomic_add_return(i, v);
+}
+
+static inline long
+atomic_long_add_return_acquire(long i, atomic_long_t *v)
+{
+ return atomic_add_return_acquire(i, v);
+}
+
+static inline long
+atomic_long_add_return_release(long i, atomic_long_t *v)
+{
+ return atomic_add_return_release(i, v);
+}
+
+static inline long
+atomic_long_add_return_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_add_return_relaxed(i, v);
+}
+
+static inline long
+atomic_long_fetch_add(long i, atomic_long_t *v)
+{
+ return atomic_fetch_add(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_add_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_add_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_add_relaxed(i, v);
+}
+
+static inline void
+atomic_long_sub(long i, atomic_long_t *v)
+{
+ atomic_sub(i, v);
+}
+
+static inline long
+atomic_long_sub_return(long i, atomic_long_t *v)
+{
+ return atomic_sub_return(i, v);
+}
+
+static inline long
+atomic_long_sub_return_acquire(long i, atomic_long_t *v)
+{
+ return atomic_sub_return_acquire(i, v);
+}
+
+static inline long
+atomic_long_sub_return_release(long i, atomic_long_t *v)
+{
+ return atomic_sub_return_release(i, v);
+}
+
+static inline long
+atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_sub_return_relaxed(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub(long i, atomic_long_t *v)
+{
+ return atomic_fetch_sub(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_sub_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_sub_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_sub_relaxed(i, v);
+}
+
+static inline void
+atomic_long_inc(atomic_long_t *v)
+{
+ atomic_inc(v);
+}
+
+static inline long
+atomic_long_inc_return(atomic_long_t *v)
+{
+ return atomic_inc_return(v);
+}
+
+static inline long
+atomic_long_inc_return_acquire(atomic_long_t *v)
+{
+ return atomic_inc_return_acquire(v);
+}
+
+static inline long
+atomic_long_inc_return_release(atomic_long_t *v)
+{
+ return atomic_inc_return_release(v);
+}
+
+static inline long
+atomic_long_inc_return_relaxed(atomic_long_t *v)
+{
+ return atomic_inc_return_relaxed(v);
+}
+
+static inline long
+atomic_long_fetch_inc(atomic_long_t *v)
+{
+ return atomic_fetch_inc(v);
+}
+
+static inline long
+atomic_long_fetch_inc_acquire(atomic_long_t *v)
+{
+ return atomic_fetch_inc_acquire(v);
+}
+
+static inline long
+atomic_long_fetch_inc_release(atomic_long_t *v)
+{
+ return atomic_fetch_inc_release(v);
+}
+
+static inline long
+atomic_long_fetch_inc_relaxed(atomic_long_t *v)
+{
+ return atomic_fetch_inc_relaxed(v);
+}
+
+static inline void
+atomic_long_dec(atomic_long_t *v)
+{
+ atomic_dec(v);
+}
+
+static inline long
+atomic_long_dec_return(atomic_long_t *v)
+{
+ return atomic_dec_return(v);
+}
+
+static inline long
+atomic_long_dec_return_acquire(atomic_long_t *v)
+{
+ return atomic_dec_return_acquire(v);
+}
+
+static inline long
+atomic_long_dec_return_release(atomic_long_t *v)
+{
+ return atomic_dec_return_release(v);
+}
+
+static inline long
+atomic_long_dec_return_relaxed(atomic_long_t *v)
+{
+ return atomic_dec_return_relaxed(v);
+}
+
+static inline long
+atomic_long_fetch_dec(atomic_long_t *v)
+{
+ return atomic_fetch_dec(v);
+}
+
+static inline long
+atomic_long_fetch_dec_acquire(atomic_long_t *v)
+{
+ return atomic_fetch_dec_acquire(v);
+}
+
+static inline long
+atomic_long_fetch_dec_release(atomic_long_t *v)
+{
+ return atomic_fetch_dec_release(v);
+}
+
+static inline long
+atomic_long_fetch_dec_relaxed(atomic_long_t *v)
+{
+ return atomic_fetch_dec_relaxed(v);
+}
+
+static inline void
+atomic_long_and(long i, atomic_long_t *v)
+{
+ atomic_and(i, v);
+}
+
+static inline long
+atomic_long_fetch_and(long i, atomic_long_t *v)
+{
+ return atomic_fetch_and(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_and_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_and_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_and_relaxed(i, v);
+}
+
+static inline void
+atomic_long_andnot(long i, atomic_long_t *v)
+{
+ atomic_andnot(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot(long i, atomic_long_t *v)
+{
+ return atomic_fetch_andnot(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_andnot_acquire(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_andnot_release(i, v);
+}
+
+static inline long
+atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_andnot_relaxed(i, v);
+}
-#define ATOMIC_LONG_READ_OP(mo) \
-static inline long atomic_long_read##mo(const atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- return (long)ATOMIC_LONG_PFX(_read##mo)(v); \
-}
-ATOMIC_LONG_READ_OP()
-ATOMIC_LONG_READ_OP(_acquire)
-
-#undef ATOMIC_LONG_READ_OP
-
-#define ATOMIC_LONG_SET_OP(mo) \
-static inline void atomic_long_set##mo(atomic_long_t *l, long i) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- ATOMIC_LONG_PFX(_set##mo)(v, i); \
-}
-ATOMIC_LONG_SET_OP()
-ATOMIC_LONG_SET_OP(_release)
-
-#undef ATOMIC_LONG_SET_OP
-
-#define ATOMIC_LONG_ADD_SUB_OP(op, mo) \
-static inline long \
-atomic_long_##op##_return##mo(long i, atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(i, v); \
-}
-ATOMIC_LONG_ADD_SUB_OP(add,)
-ATOMIC_LONG_ADD_SUB_OP(add, _relaxed)
-ATOMIC_LONG_ADD_SUB_OP(add, _acquire)
-ATOMIC_LONG_ADD_SUB_OP(add, _release)
-ATOMIC_LONG_ADD_SUB_OP(sub,)
-ATOMIC_LONG_ADD_SUB_OP(sub, _relaxed)
-ATOMIC_LONG_ADD_SUB_OP(sub, _acquire)
-ATOMIC_LONG_ADD_SUB_OP(sub, _release)
-
-#undef ATOMIC_LONG_ADD_SUB_OP
-
-#define atomic_long_cmpxchg_relaxed(l, old, new) \
- (ATOMIC_LONG_PFX(_cmpxchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(l), \
- (old), (new)))
-#define atomic_long_cmpxchg_acquire(l, old, new) \
- (ATOMIC_LONG_PFX(_cmpxchg_acquire)((ATOMIC_LONG_PFX(_t) *)(l), \
- (old), (new)))
-#define atomic_long_cmpxchg_release(l, old, new) \
- (ATOMIC_LONG_PFX(_cmpxchg_release)((ATOMIC_LONG_PFX(_t) *)(l), \
- (old), (new)))
-#define atomic_long_cmpxchg(l, old, new) \
- (ATOMIC_LONG_PFX(_cmpxchg)((ATOMIC_LONG_PFX(_t) *)(l), (old), (new)))
-
-
-#define atomic_long_try_cmpxchg_relaxed(l, old, new) \
- (ATOMIC_LONG_PFX(_try_cmpxchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(l), \
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
-#define atomic_long_try_cmpxchg_acquire(l, old, new) \
- (ATOMIC_LONG_PFX(_try_cmpxchg_acquire)((ATOMIC_LONG_PFX(_t) *)(l), \
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
-#define atomic_long_try_cmpxchg_release(l, old, new) \
- (ATOMIC_LONG_PFX(_try_cmpxchg_release)((ATOMIC_LONG_PFX(_t) *)(l), \
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
-#define atomic_long_try_cmpxchg(l, old, new) \
- (ATOMIC_LONG_PFX(_try_cmpxchg)((ATOMIC_LONG_PFX(_t) *)(l), \
- (ATOMIC_LONG_TYPE *)(old), (ATOMIC_LONG_TYPE)(new)))
-
-
-#define atomic_long_xchg_relaxed(v, new) \
- (ATOMIC_LONG_PFX(_xchg_relaxed)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
-#define atomic_long_xchg_acquire(v, new) \
- (ATOMIC_LONG_PFX(_xchg_acquire)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
-#define atomic_long_xchg_release(v, new) \
- (ATOMIC_LONG_PFX(_xchg_release)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
-#define atomic_long_xchg(v, new) \
- (ATOMIC_LONG_PFX(_xchg)((ATOMIC_LONG_PFX(_t) *)(v), (new)))
-
-static __always_inline void atomic_long_inc(atomic_long_t *l)
-{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
-
- ATOMIC_LONG_PFX(_inc)(v);
-}
-
-static __always_inline void atomic_long_dec(atomic_long_t *l)
+static inline void
+atomic_long_or(long i, atomic_long_t *v)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ atomic_or(i, v);
+}
- ATOMIC_LONG_PFX(_dec)(v);
+static inline long
+atomic_long_fetch_or(long i, atomic_long_t *v)
+{
+ return atomic_fetch_or(i, v);
}
-#define ATOMIC_LONG_FETCH_OP(op, mo) \
-static inline long \
-atomic_long_fetch_##op##mo(long i, atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v); \
+static inline long
+atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_or_acquire(i, v);
}
-ATOMIC_LONG_FETCH_OP(add, )
-ATOMIC_LONG_FETCH_OP(add, _relaxed)
-ATOMIC_LONG_FETCH_OP(add, _acquire)
-ATOMIC_LONG_FETCH_OP(add, _release)
-ATOMIC_LONG_FETCH_OP(sub, )
-ATOMIC_LONG_FETCH_OP(sub, _relaxed)
-ATOMIC_LONG_FETCH_OP(sub, _acquire)
-ATOMIC_LONG_FETCH_OP(sub, _release)
-ATOMIC_LONG_FETCH_OP(and, )
-ATOMIC_LONG_FETCH_OP(and, _relaxed)
-ATOMIC_LONG_FETCH_OP(and, _acquire)
-ATOMIC_LONG_FETCH_OP(and, _release)
-ATOMIC_LONG_FETCH_OP(andnot, )
-ATOMIC_LONG_FETCH_OP(andnot, _relaxed)
-ATOMIC_LONG_FETCH_OP(andnot, _acquire)
-ATOMIC_LONG_FETCH_OP(andnot, _release)
-ATOMIC_LONG_FETCH_OP(or, )
-ATOMIC_LONG_FETCH_OP(or, _relaxed)
-ATOMIC_LONG_FETCH_OP(or, _acquire)
-ATOMIC_LONG_FETCH_OP(or, _release)
-ATOMIC_LONG_FETCH_OP(xor, )
-ATOMIC_LONG_FETCH_OP(xor, _relaxed)
-ATOMIC_LONG_FETCH_OP(xor, _acquire)
-ATOMIC_LONG_FETCH_OP(xor, _release)
+static inline long
+atomic_long_fetch_or_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_or_release(i, v);
+}
-#undef ATOMIC_LONG_FETCH_OP
+static inline long
+atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_or_relaxed(i, v);
+}
-#define ATOMIC_LONG_FETCH_INC_DEC_OP(op, mo) \
-static inline long \
-atomic_long_fetch_##op##mo(atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(v); \
+static inline void
+atomic_long_xor(long i, atomic_long_t *v)
+{
+ atomic_xor(i, v);
}
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc,)
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _relaxed)
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _acquire)
-ATOMIC_LONG_FETCH_INC_DEC_OP(inc, _release)
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec,)
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _relaxed)
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _acquire)
-ATOMIC_LONG_FETCH_INC_DEC_OP(dec, _release)
+static inline long
+atomic_long_fetch_xor(long i, atomic_long_t *v)
+{
+ return atomic_fetch_xor(i, v);
+}
-#undef ATOMIC_LONG_FETCH_INC_DEC_OP
+static inline long
+atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
+{
+ return atomic_fetch_xor_acquire(i, v);
+}
-#define ATOMIC_LONG_OP(op) \
-static __always_inline void \
-atomic_long_##op(long i, atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- ATOMIC_LONG_PFX(_##op)(i, v); \
+static inline long
+atomic_long_fetch_xor_release(long i, atomic_long_t *v)
+{
+ return atomic_fetch_xor_release(i, v);
}
-ATOMIC_LONG_OP(add)
-ATOMIC_LONG_OP(sub)
-ATOMIC_LONG_OP(and)
-ATOMIC_LONG_OP(andnot)
-ATOMIC_LONG_OP(or)
-ATOMIC_LONG_OP(xor)
+static inline long
+atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
+{
+ return atomic_fetch_xor_relaxed(i, v);
+}
-#undef ATOMIC_LONG_OP
+static inline long
+atomic_long_xchg(atomic_long_t *v, long i)
+{
+ return atomic_xchg(v, i);
+}
-static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
+static inline long
+atomic_long_xchg_acquire(atomic_long_t *v, long i)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ return atomic_xchg_acquire(v, i);
+}
- return ATOMIC_LONG_PFX(_sub_and_test)(i, v);
+static inline long
+atomic_long_xchg_release(atomic_long_t *v, long i)
+{
+ return atomic_xchg_release(v, i);
}
-static inline int atomic_long_dec_and_test(atomic_long_t *l)
+static inline long
+atomic_long_xchg_relaxed(atomic_long_t *v, long i)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ return atomic_xchg_relaxed(v, i);
+}
- return ATOMIC_LONG_PFX(_dec_and_test)(v);
+static inline long
+atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
+{
+ return atomic_cmpxchg(v, old, new);
}
-static inline int atomic_long_inc_and_test(atomic_long_t *l)
+static inline long
+atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ return atomic_cmpxchg_acquire(v, old, new);
+}
- return ATOMIC_LONG_PFX(_inc_and_test)(v);
+static inline long
+atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
+{
+ return atomic_cmpxchg_release(v, old, new);
}
-static inline int atomic_long_add_negative(long i, atomic_long_t *l)
+static inline long
+atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ return atomic_cmpxchg_relaxed(v, old, new);
+}
- return ATOMIC_LONG_PFX(_add_negative)(i, v);
+static inline bool
+atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
+{
+ return atomic_try_cmpxchg(v, (int *)old, new);
}
-#define ATOMIC_LONG_INC_DEC_OP(op, mo) \
-static inline long \
-atomic_long_##op##_return##mo(atomic_long_t *l) \
-{ \
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
- \
- return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(v); \
+static inline bool
+atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
+{
+ return atomic_try_cmpxchg_acquire(v, (int *)old, new);
}
-ATOMIC_LONG_INC_DEC_OP(inc,)
-ATOMIC_LONG_INC_DEC_OP(inc, _relaxed)
-ATOMIC_LONG_INC_DEC_OP(inc, _acquire)
-ATOMIC_LONG_INC_DEC_OP(inc, _release)
-ATOMIC_LONG_INC_DEC_OP(dec,)
-ATOMIC_LONG_INC_DEC_OP(dec, _relaxed)
-ATOMIC_LONG_INC_DEC_OP(dec, _acquire)
-ATOMIC_LONG_INC_DEC_OP(dec, _release)
-#undef ATOMIC_LONG_INC_DEC_OP
+static inline bool
+atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
+{
+ return atomic_try_cmpxchg_release(v, (int *)old, new);
+}
+
+static inline bool
+atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
+{
+ return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
+}
+
+static inline bool
+atomic_long_sub_and_test(long i, atomic_long_t *v)
+{
+ return atomic_sub_and_test(i, v);
+}
+
+static inline bool
+atomic_long_dec_and_test(atomic_long_t *v)
+{
+ return atomic_dec_and_test(v);
+}
+
+static inline bool
+atomic_long_inc_and_test(atomic_long_t *v)
+{
+ return atomic_inc_and_test(v);
+}
+
+static inline bool
+atomic_long_add_negative(long i, atomic_long_t *v)
+{
+ return atomic_add_negative(i, v);
+}
+
+static inline long
+atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
+{
+ return atomic_fetch_add_unless(v, a, u);
+}
+
+static inline bool
+atomic_long_add_unless(atomic_long_t *v, long a, long u)
+{
+ return atomic_add_unless(v, a, u);
+}
-static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
+static inline bool
+atomic_long_inc_not_zero(atomic_long_t *v)
{
- ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
+ return atomic_inc_not_zero(v);
+}
- return (long)ATOMIC_LONG_PFX(_add_unless)(v, a, u);
+static inline bool
+atomic_long_inc_unless_negative(atomic_long_t *v)
+{
+ return atomic_inc_unless_negative(v);
}
-#define atomic_long_inc_not_zero(l) \
- ATOMIC_LONG_PFX(_inc_not_zero)((ATOMIC_LONG_PFX(_t) *)(l))
+static inline bool
+atomic_long_dec_unless_positive(atomic_long_t *v)
+{
+ return atomic_dec_unless_positive(v);
+}
-#define atomic_long_cond_read_relaxed(v, c) \
- ATOMIC_LONG_PFX(_cond_read_relaxed)((ATOMIC_LONG_PFX(_t) *)(v), (c))
-#define atomic_long_cond_read_acquire(v, c) \
- ATOMIC_LONG_PFX(_cond_read_acquire)((ATOMIC_LONG_PFX(_t) *)(v), (c))
+static inline long
+atomic_long_dec_if_positive(atomic_long_t *v)
+{
+ return atomic_dec_if_positive(v);
+}
-#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
+#endif /* CONFIG_64BIT */
+#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
+// 77558968132ce4f911ad53f6f52ce423006f6268
diff --git a/include/linux/atomic-fallback.h b/include/linux/atomic-fallback.h
new file mode 100644
index 000000000000..a7d240e465c0
--- /dev/null
+++ b/include/linux/atomic-fallback.h
@@ -0,0 +1,2295 @@
+// SPDX-License-Identifier: GPL-2.0
+
+// Generated by scripts/atomic/gen-atomic-fallback.sh
+// DO NOT MODIFY THIS FILE DIRECTLY
+
+#ifndef _LINUX_ATOMIC_FALLBACK_H
+#define _LINUX_ATOMIC_FALLBACK_H
+
+#ifndef xchg_relaxed
+#define xchg_relaxed xchg
+#define xchg_acquire xchg
+#define xchg_release xchg
+#else /* xchg_relaxed */
+
+#ifndef xchg_acquire
+#define xchg_acquire(...) \
+ __atomic_op_acquire(xchg, __VA_ARGS__)
+#endif
+
+#ifndef xchg_release
+#define xchg_release(...) \
+ __atomic_op_release(xchg, __VA_ARGS__)
+#endif
+
+#ifndef xchg
+#define xchg(...) \
+ __atomic_op_fence(xchg, __VA_ARGS__)
+#endif
+
+#endif /* xchg_relaxed */
+
+#ifndef cmpxchg_relaxed
+#define cmpxchg_relaxed cmpxchg
+#define cmpxchg_acquire cmpxchg
+#define cmpxchg_release cmpxchg
+#else /* cmpxchg_relaxed */
+
+#ifndef cmpxchg_acquire
+#define cmpxchg_acquire(...) \
+ __atomic_op_acquire(cmpxchg, __VA_ARGS__)
+#endif
+
+#ifndef cmpxchg_release
+#define cmpxchg_release(...) \
+ __atomic_op_release(cmpxchg, __VA_ARGS__)
+#endif
+
+#ifndef cmpxchg
+#define cmpxchg(...) \
+ __atomic_op_fence(cmpxchg, __VA_ARGS__)
+#endif
+
+#endif /* cmpxchg_relaxed */
+
+#ifndef cmpxchg64_relaxed
+#define cmpxchg64_relaxed cmpxchg64
+#define cmpxchg64_acquire cmpxchg64
+#define cmpxchg64_release cmpxchg64
+#else /* cmpxchg64_relaxed */
+
+#ifndef cmpxchg64_acquire
+#define cmpxchg64_acquire(...) \
+ __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
+#endif
+
+#ifndef cmpxchg64_release
+#define cmpxchg64_release(...) \
+ __atomic_op_release(cmpxchg64, __VA_ARGS__)
+#endif
+
+#ifndef cmpxchg64
+#define cmpxchg64(...) \
+ __atomic_op_fence(cmpxchg64, __VA_ARGS__)
+#endif
+
+#endif /* cmpxchg64_relaxed */
+
+#ifndef atomic_read_acquire
+static inline int
+atomic_read_acquire(const atomic_t *v)
+{
+ return smp_load_acquire(&(v)->counter);
+}
+#define atomic_read_acquire atomic_read_acquire
+#endif
+
+#ifndef atomic_set_release
+static inline void
+atomic_set_release(atomic_t *v, int i)
+{
+ smp_store_release(&(v)->counter, i);
+}
+#define atomic_set_release atomic_set_release
+#endif
+
+#ifndef atomic_add_return_relaxed
+#define atomic_add_return_acquire atomic_add_return
+#define atomic_add_return_release atomic_add_return
+#define atomic_add_return_relaxed atomic_add_return
+#else /* atomic_add_return_relaxed */
+
+#ifndef atomic_add_return_acquire
+static inline int
+atomic_add_return_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_add_return_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_add_return_acquire atomic_add_return_acquire
+#endif
+
+#ifndef atomic_add_return_release
+static inline int
+atomic_add_return_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_add_return_relaxed(i, v);
+}
+#define atomic_add_return_release atomic_add_return_release
+#endif
+
+#ifndef atomic_add_return
+static inline int
+atomic_add_return(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_add_return_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_add_return atomic_add_return
+#endif
+
+#endif /* atomic_add_return_relaxed */
+
+#ifndef atomic_fetch_add_relaxed
+#define atomic_fetch_add_acquire atomic_fetch_add
+#define atomic_fetch_add_release atomic_fetch_add
+#define atomic_fetch_add_relaxed atomic_fetch_add
+#else /* atomic_fetch_add_relaxed */
+
+#ifndef atomic_fetch_add_acquire
+static inline int
+atomic_fetch_add_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_add_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_add_acquire atomic_fetch_add_acquire
+#endif
+
+#ifndef atomic_fetch_add_release
+static inline int
+atomic_fetch_add_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_add_relaxed(i, v);
+}
+#define atomic_fetch_add_release atomic_fetch_add_release
+#endif
+
+#ifndef atomic_fetch_add
+static inline int
+atomic_fetch_add(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_add_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_add atomic_fetch_add
+#endif
+
+#endif /* atomic_fetch_add_relaxed */
+
+#ifndef atomic_sub_return_relaxed
+#define atomic_sub_return_acquire atomic_sub_return
+#define atomic_sub_return_release atomic_sub_return
+#define atomic_sub_return_relaxed atomic_sub_return
+#else /* atomic_sub_return_relaxed */
+
+#ifndef atomic_sub_return_acquire
+static inline int
+atomic_sub_return_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_sub_return_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_sub_return_acquire atomic_sub_return_acquire
+#endif
+
+#ifndef atomic_sub_return_release
+static inline int
+atomic_sub_return_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_sub_return_relaxed(i, v);
+}
+#define atomic_sub_return_release atomic_sub_return_release
+#endif
+
+#ifndef atomic_sub_return
+static inline int
+atomic_sub_return(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_sub_return_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_sub_return atomic_sub_return
+#endif
+
+#endif /* atomic_sub_return_relaxed */
+
+#ifndef atomic_fetch_sub_relaxed
+#define atomic_fetch_sub_acquire atomic_fetch_sub
+#define atomic_fetch_sub_release atomic_fetch_sub
+#define atomic_fetch_sub_relaxed atomic_fetch_sub
+#else /* atomic_fetch_sub_relaxed */
+
+#ifndef atomic_fetch_sub_acquire
+static inline int
+atomic_fetch_sub_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_sub_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
+#endif
+
+#ifndef atomic_fetch_sub_release
+static inline int
+atomic_fetch_sub_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_sub_relaxed(i, v);
+}
+#define atomic_fetch_sub_release atomic_fetch_sub_release
+#endif
+
+#ifndef atomic_fetch_sub
+static inline int
+atomic_fetch_sub(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_sub_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_sub atomic_fetch_sub
+#endif
+
+#endif /* atomic_fetch_sub_relaxed */
+
+#ifndef atomic_inc
+static inline void
+atomic_inc(atomic_t *v)
+{
+ atomic_add(1, v);
+}
+#define atomic_inc atomic_inc
+#endif
+
+#ifndef atomic_inc_return_relaxed
+#ifdef atomic_inc_return
+#define atomic_inc_return_acquire atomic_inc_return
+#define atomic_inc_return_release atomic_inc_return
+#define atomic_inc_return_relaxed atomic_inc_return
+#endif /* atomic_inc_return */
+
+#ifndef atomic_inc_return
+static inline int
+atomic_inc_return(atomic_t *v)
+{
+ return atomic_add_return(1, v);
+}
+#define atomic_inc_return atomic_inc_return
+#endif
+
+#ifndef atomic_inc_return_acquire
+static inline int
+atomic_inc_return_acquire(atomic_t *v)
+{
+ return atomic_add_return_acquire(1, v);
+}
+#define atomic_inc_return_acquire atomic_inc_return_acquire
+#endif
+
+#ifndef atomic_inc_return_release
+static inline int
+atomic_inc_return_release(atomic_t *v)
+{
+ return atomic_add_return_release(1, v);
+}
+#define atomic_inc_return_release atomic_inc_return_release
+#endif
+
+#ifndef atomic_inc_return_relaxed
+static inline int
+atomic_inc_return_relaxed(atomic_t *v)
+{
+ return atomic_add_return_relaxed(1, v);
+}
+#define atomic_inc_return_relaxed atomic_inc_return_relaxed
+#endif
+
+#else /* atomic_inc_return_relaxed */
+
+#ifndef atomic_inc_return_acquire
+static inline int
+atomic_inc_return_acquire(atomic_t *v)
+{
+ int ret = atomic_inc_return_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_inc_return_acquire atomic_inc_return_acquire
+#endif
+
+#ifndef atomic_inc_return_release
+static inline int
+atomic_inc_return_release(atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_inc_return_relaxed(v);
+}
+#define atomic_inc_return_release atomic_inc_return_release
+#endif
+
+#ifndef atomic_inc_return
+static inline int
+atomic_inc_return(atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_inc_return_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_inc_return atomic_inc_return
+#endif
+
+#endif /* atomic_inc_return_relaxed */
+
+#ifndef atomic_fetch_inc_relaxed
+#ifdef atomic_fetch_inc
+#define atomic_fetch_inc_acquire atomic_fetch_inc
+#define atomic_fetch_inc_release atomic_fetch_inc
+#define atomic_fetch_inc_relaxed atomic_fetch_inc
+#endif /* atomic_fetch_inc */
+
+#ifndef atomic_fetch_inc
+static inline int
+atomic_fetch_inc(atomic_t *v)
+{
+ return atomic_fetch_add(1, v);
+}
+#define atomic_fetch_inc atomic_fetch_inc
+#endif
+
+#ifndef atomic_fetch_inc_acquire
+static inline int
+atomic_fetch_inc_acquire(atomic_t *v)
+{
+ return atomic_fetch_add_acquire(1, v);
+}
+#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
+#endif
+
+#ifndef atomic_fetch_inc_release
+static inline int
+atomic_fetch_inc_release(atomic_t *v)
+{
+ return atomic_fetch_add_release(1, v);
+}
+#define atomic_fetch_inc_release atomic_fetch_inc_release
+#endif
+
+#ifndef atomic_fetch_inc_relaxed
+static inline int
+atomic_fetch_inc_relaxed(atomic_t *v)
+{
+ return atomic_fetch_add_relaxed(1, v);
+}
+#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
+#endif
+
+#else /* atomic_fetch_inc_relaxed */
+
+#ifndef atomic_fetch_inc_acquire
+static inline int
+atomic_fetch_inc_acquire(atomic_t *v)
+{
+ int ret = atomic_fetch_inc_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
+#endif
+
+#ifndef atomic_fetch_inc_release
+static inline int
+atomic_fetch_inc_release(atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_inc_relaxed(v);
+}
+#define atomic_fetch_inc_release atomic_fetch_inc_release
+#endif
+
+#ifndef atomic_fetch_inc
+static inline int
+atomic_fetch_inc(atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_inc_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_inc atomic_fetch_inc
+#endif
+
+#endif /* atomic_fetch_inc_relaxed */
+
+#ifndef atomic_dec
+static inline void
+atomic_dec(atomic_t *v)
+{
+ atomic_sub(1, v);
+}
+#define atomic_dec atomic_dec
+#endif
+
+#ifndef atomic_dec_return_relaxed
+#ifdef atomic_dec_return
+#define atomic_dec_return_acquire atomic_dec_return
+#define atomic_dec_return_release atomic_dec_return
+#define atomic_dec_return_relaxed atomic_dec_return
+#endif /* atomic_dec_return */
+
+#ifndef atomic_dec_return
+static inline int
+atomic_dec_return(atomic_t *v)
+{
+ return atomic_sub_return(1, v);
+}
+#define atomic_dec_return atomic_dec_return
+#endif
+
+#ifndef atomic_dec_return_acquire
+static inline int
+atomic_dec_return_acquire(atomic_t *v)
+{
+ return atomic_sub_return_acquire(1, v);
+}
+#define atomic_dec_return_acquire atomic_dec_return_acquire
+#endif
+
+#ifndef atomic_dec_return_release
+static inline int
+atomic_dec_return_release(atomic_t *v)
+{
+ return atomic_sub_return_release(1, v);
+}
+#define atomic_dec_return_release atomic_dec_return_release
+#endif
+
+#ifndef atomic_dec_return_relaxed
+static inline int
+atomic_dec_return_relaxed(atomic_t *v)
+{
+ return atomic_sub_return_relaxed(1, v);
+}
+#define atomic_dec_return_relaxed atomic_dec_return_relaxed
+#endif
+
+#else /* atomic_dec_return_relaxed */
+
+#ifndef atomic_dec_return_acquire
+static inline int
+atomic_dec_return_acquire(atomic_t *v)
+{
+ int ret = atomic_dec_return_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_dec_return_acquire atomic_dec_return_acquire
+#endif
+
+#ifndef atomic_dec_return_release
+static inline int
+atomic_dec_return_release(atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_dec_return_relaxed(v);
+}
+#define atomic_dec_return_release atomic_dec_return_release
+#endif
+
+#ifndef atomic_dec_return
+static inline int
+atomic_dec_return(atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_dec_return_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_dec_return atomic_dec_return
+#endif
+
+#endif /* atomic_dec_return_relaxed */
+
+#ifndef atomic_fetch_dec_relaxed
+#ifdef atomic_fetch_dec
+#define atomic_fetch_dec_acquire atomic_fetch_dec
+#define atomic_fetch_dec_release atomic_fetch_dec
+#define atomic_fetch_dec_relaxed atomic_fetch_dec
+#endif /* atomic_fetch_dec */
+
+#ifndef atomic_fetch_dec
+static inline int
+atomic_fetch_dec(atomic_t *v)
+{
+ return atomic_fetch_sub(1, v);
+}
+#define atomic_fetch_dec atomic_fetch_dec
+#endif
+
+#ifndef atomic_fetch_dec_acquire
+static inline int
+atomic_fetch_dec_acquire(atomic_t *v)
+{
+ return atomic_fetch_sub_acquire(1, v);
+}
+#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
+#endif
+
+#ifndef atomic_fetch_dec_release
+static inline int
+atomic_fetch_dec_release(atomic_t *v)
+{
+ return atomic_fetch_sub_release(1, v);
+}
+#define atomic_fetch_dec_release atomic_fetch_dec_release
+#endif
+
+#ifndef atomic_fetch_dec_relaxed
+static inline int
+atomic_fetch_dec_relaxed(atomic_t *v)
+{
+ return atomic_fetch_sub_relaxed(1, v);
+}
+#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
+#endif
+
+#else /* atomic_fetch_dec_relaxed */
+
+#ifndef atomic_fetch_dec_acquire
+static inline int
+atomic_fetch_dec_acquire(atomic_t *v)
+{
+ int ret = atomic_fetch_dec_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
+#endif
+
+#ifndef atomic_fetch_dec_release
+static inline int
+atomic_fetch_dec_release(atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_dec_relaxed(v);
+}
+#define atomic_fetch_dec_release atomic_fetch_dec_release
+#endif
+
+#ifndef atomic_fetch_dec
+static inline int
+atomic_fetch_dec(atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_dec_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_dec atomic_fetch_dec
+#endif
+
+#endif /* atomic_fetch_dec_relaxed */
+
+#ifndef atomic_fetch_and_relaxed
+#define atomic_fetch_and_acquire atomic_fetch_and
+#define atomic_fetch_and_release atomic_fetch_and
+#define atomic_fetch_and_relaxed atomic_fetch_and
+#else /* atomic_fetch_and_relaxed */
+
+#ifndef atomic_fetch_and_acquire
+static inline int
+atomic_fetch_and_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_and_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_and_acquire atomic_fetch_and_acquire
+#endif
+
+#ifndef atomic_fetch_and_release
+static inline int
+atomic_fetch_and_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_and_relaxed(i, v);
+}
+#define atomic_fetch_and_release atomic_fetch_and_release
+#endif
+
+#ifndef atomic_fetch_and
+static inline int
+atomic_fetch_and(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_and_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_and atomic_fetch_and
+#endif
+
+#endif /* atomic_fetch_and_relaxed */
+
+#ifndef atomic_andnot
+static inline void
+atomic_andnot(int i, atomic_t *v)
+{
+ atomic_and(~i, v);
+}
+#define atomic_andnot atomic_andnot
+#endif
+
+#ifndef atomic_fetch_andnot_relaxed
+#ifdef atomic_fetch_andnot
+#define atomic_fetch_andnot_acquire atomic_fetch_andnot
+#define atomic_fetch_andnot_release atomic_fetch_andnot
+#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
+#endif /* atomic_fetch_andnot */
+
+#ifndef atomic_fetch_andnot
+static inline int
+atomic_fetch_andnot(int i, atomic_t *v)
+{
+ return atomic_fetch_and(~i, v);
+}
+#define atomic_fetch_andnot atomic_fetch_andnot
+#endif
+
+#ifndef atomic_fetch_andnot_acquire
+static inline int
+atomic_fetch_andnot_acquire(int i, atomic_t *v)
+{
+ return atomic_fetch_and_acquire(~i, v);
+}
+#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
+#endif
+
+#ifndef atomic_fetch_andnot_release
+static inline int
+atomic_fetch_andnot_release(int i, atomic_t *v)
+{
+ return atomic_fetch_and_release(~i, v);
+}
+#define atomic_fetch_andnot_release atomic_fetch_andnot_release
+#endif
+
+#ifndef atomic_fetch_andnot_relaxed
+static inline int
+atomic_fetch_andnot_relaxed(int i, atomic_t *v)
+{
+ return atomic_fetch_and_relaxed(~i, v);
+}
+#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
+#endif
+
+#else /* atomic_fetch_andnot_relaxed */
+
+#ifndef atomic_fetch_andnot_acquire
+static inline int
+atomic_fetch_andnot_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_andnot_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
+#endif
+
+#ifndef atomic_fetch_andnot_release
+static inline int
+atomic_fetch_andnot_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_andnot_relaxed(i, v);
+}
+#define atomic_fetch_andnot_release atomic_fetch_andnot_release
+#endif
+
+#ifndef atomic_fetch_andnot
+static inline int
+atomic_fetch_andnot(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_andnot_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_andnot atomic_fetch_andnot
+#endif
+
+#endif /* atomic_fetch_andnot_relaxed */
+
+#ifndef atomic_fetch_or_relaxed
+#define atomic_fetch_or_acquire atomic_fetch_or
+#define atomic_fetch_or_release atomic_fetch_or
+#define atomic_fetch_or_relaxed atomic_fetch_or
+#else /* atomic_fetch_or_relaxed */
+
+#ifndef atomic_fetch_or_acquire
+static inline int
+atomic_fetch_or_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_or_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_or_acquire atomic_fetch_or_acquire
+#endif
+
+#ifndef atomic_fetch_or_release
+static inline int
+atomic_fetch_or_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_or_relaxed(i, v);
+}
+#define atomic_fetch_or_release atomic_fetch_or_release
+#endif
+
+#ifndef atomic_fetch_or
+static inline int
+atomic_fetch_or(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_or_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_or atomic_fetch_or
+#endif
+
+#endif /* atomic_fetch_or_relaxed */
+
+#ifndef atomic_fetch_xor_relaxed
+#define atomic_fetch_xor_acquire atomic_fetch_xor
+#define atomic_fetch_xor_release atomic_fetch_xor
+#define atomic_fetch_xor_relaxed atomic_fetch_xor
+#else /* atomic_fetch_xor_relaxed */
+
+#ifndef atomic_fetch_xor_acquire
+static inline int
+atomic_fetch_xor_acquire(int i, atomic_t *v)
+{
+ int ret = atomic_fetch_xor_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
+#endif
+
+#ifndef atomic_fetch_xor_release
+static inline int
+atomic_fetch_xor_release(int i, atomic_t *v)
+{
+ __atomic_release_fence();
+ return atomic_fetch_xor_relaxed(i, v);
+}
+#define atomic_fetch_xor_release atomic_fetch_xor_release
+#endif
+
+#ifndef atomic_fetch_xor
+static inline int
+atomic_fetch_xor(int i, atomic_t *v)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_fetch_xor_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_fetch_xor atomic_fetch_xor
+#endif
+
+#endif /* atomic_fetch_xor_relaxed */
+
+#ifndef atomic_xchg_relaxed
+#define atomic_xchg_acquire atomic_xchg
+#define atomic_xchg_release atomic_xchg
+#define atomic_xchg_relaxed atomic_xchg
+#else /* atomic_xchg_relaxed */
+
+#ifndef atomic_xchg_acquire
+static inline int
+atomic_xchg_acquire(atomic_t *v, int i)
+{
+ int ret = atomic_xchg_relaxed(v, i);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_xchg_acquire atomic_xchg_acquire
+#endif
+
+#ifndef atomic_xchg_release
+static inline int
+atomic_xchg_release(atomic_t *v, int i)
+{
+ __atomic_release_fence();
+ return atomic_xchg_relaxed(v, i);
+}
+#define atomic_xchg_release atomic_xchg_release
+#endif
+
+#ifndef atomic_xchg
+static inline int
+atomic_xchg(atomic_t *v, int i)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_xchg_relaxed(v, i);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_xchg atomic_xchg
+#endif
+
+#endif /* atomic_xchg_relaxed */
+
+#ifndef atomic_cmpxchg_relaxed
+#define atomic_cmpxchg_acquire atomic_cmpxchg
+#define atomic_cmpxchg_release atomic_cmpxchg
+#define atomic_cmpxchg_relaxed atomic_cmpxchg
+#else /* atomic_cmpxchg_relaxed */
+
+#ifndef atomic_cmpxchg_acquire
+static inline int
+atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
+{
+ int ret = atomic_cmpxchg_relaxed(v, old, new);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
+#endif
+
+#ifndef atomic_cmpxchg_release
+static inline int
+atomic_cmpxchg_release(atomic_t *v, int old, int new)
+{
+ __atomic_release_fence();
+ return atomic_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_cmpxchg_release atomic_cmpxchg_release
+#endif
+
+#ifndef atomic_cmpxchg
+static inline int
+atomic_cmpxchg(atomic_t *v, int old, int new)
+{
+ int ret;
+ __atomic_pre_full_fence();
+ ret = atomic_cmpxchg_relaxed(v, old, new);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_cmpxchg atomic_cmpxchg
+#endif
+
+#endif /* atomic_cmpxchg_relaxed */
+
+#ifndef atomic_try_cmpxchg_relaxed
+#ifdef atomic_try_cmpxchg
+#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
+#define atomic_try_cmpxchg_release atomic_try_cmpxchg
+#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
+#endif /* atomic_try_cmpxchg */
+
+#ifndef atomic_try_cmpxchg
+static inline bool
+atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+{
+ int r, o = *old;
+ r = atomic_cmpxchg(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic_try_cmpxchg atomic_try_cmpxchg
+#endif
+
+#ifndef atomic_try_cmpxchg_acquire
+static inline bool
+atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
+{
+ int r, o = *old;
+ r = atomic_cmpxchg_acquire(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
+#endif
+
+#ifndef atomic_try_cmpxchg_release
+static inline bool
+atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
+{
+ int r, o = *old;
+ r = atomic_cmpxchg_release(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
+#endif
+
+#ifndef atomic_try_cmpxchg_relaxed
+static inline bool
+atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
+{
+ int r, o = *old;
+ r = atomic_cmpxchg_relaxed(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
+#endif
+
+#else /* atomic_try_cmpxchg_relaxed */
+
+#ifndef atomic_try_cmpxchg_acquire
+static inline bool
+atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
+{
+ bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
+#endif
+
+#ifndef atomic_try_cmpxchg_release
+static inline bool
+atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
+{
+ __atomic_release_fence();
+ return atomic_try_cmpxchg_relaxed(v, old, new);
+}
+#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
+#endif
+
+#ifndef atomic_try_cmpxchg
+static inline bool
+atomic_try_cmpxchg(atomic_t *v, int *old, int new)
+{
+ bool ret;
+ __atomic_pre_full_fence();
+ ret = atomic_try_cmpxchg_relaxed(v, old, new);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic_try_cmpxchg atomic_try_cmpxchg
+#endif
+
+#endif /* atomic_try_cmpxchg_relaxed */
+
+#ifndef atomic_sub_and_test
+/**
+ * atomic_sub_and_test - subtract value from variable and test result
+ * @i: integer value to subtract
+ * @v: pointer of type atomic_t
+ *
+ * Atomically subtracts @i from @v and returns
+ * true if the result is zero, or false for all
+ * other cases.
+ */
+static inline bool
+atomic_sub_and_test(int i, atomic_t *v)
+{
+ return atomic_sub_return(i, v) == 0;
+}
+#define atomic_sub_and_test atomic_sub_and_test
+#endif
+
+#ifndef atomic_dec_and_test
+/**
+ * atomic_dec_and_test - decrement and test
+ * @v: pointer of type atomic_t
+ *
+ * Atomically decrements @v by 1 and
+ * returns true if the result is 0, or false for all other
+ * cases.
+ */
+static inline bool
+atomic_dec_and_test(atomic_t *v)
+{
+ return atomic_dec_return(v) == 0;
+}
+#define atomic_dec_and_test atomic_dec_and_test
+#endif
+
+#ifndef atomic_inc_and_test
+/**
+ * atomic_inc_and_test - increment and test
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1
+ * and returns true if the result is zero, or false for all
+ * other cases.
+ */
+static inline bool
+atomic_inc_and_test(atomic_t *v)
+{
+ return atomic_inc_return(v) == 0;
+}
+#define atomic_inc_and_test atomic_inc_and_test
+#endif
+
+#ifndef atomic_add_negative
+/**
+ * atomic_add_negative - add and test if negative
+ * @i: integer value to add
+ * @v: pointer of type atomic_t
+ *
+ * Atomically adds @i to @v and returns true
+ * if the result is negative, or false when
+ * result is greater than or equal to zero.
+ */
+static inline bool
+atomic_add_negative(int i, atomic_t *v)
+{
+ return atomic_add_return(i, v) < 0;
+}
+#define atomic_add_negative atomic_add_negative
+#endif
+
+#ifndef atomic_fetch_add_unless
+/**
+ * atomic_fetch_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as @v was not already @u.
+ * Returns original value of @v
+ */
+static inline int
+atomic_fetch_add_unless(atomic_t *v, int a, int u)
+{
+ int c = atomic_read(v);
+
+ do {
+ if (unlikely(c == u))
+ break;
+ } while (!atomic_try_cmpxchg(v, &c, c + a));
+
+ return c;
+}
+#define atomic_fetch_add_unless atomic_fetch_add_unless
+#endif
+
+#ifndef atomic_add_unless
+/**
+ * atomic_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, if @v was not already @u.
+ * Returns true if the addition was done.
+ */
+static inline bool
+atomic_add_unless(atomic_t *v, int a, int u)
+{
+ return atomic_fetch_add_unless(v, a, u) != u;
+}
+#define atomic_add_unless atomic_add_unless
+#endif
+
+#ifndef atomic_inc_not_zero
+/**
+ * atomic_inc_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1, if @v is non-zero.
+ * Returns true if the increment was done.
+ */
+static inline bool
+atomic_inc_not_zero(atomic_t *v)
+{
+ return atomic_add_unless(v, 1, 0);
+}
+#define atomic_inc_not_zero atomic_inc_not_zero
+#endif
+
+#ifndef atomic_inc_unless_negative
+static inline bool
+atomic_inc_unless_negative(atomic_t *v)
+{
+ int c = atomic_read(v);
+
+ do {
+ if (unlikely(c < 0))
+ return false;
+ } while (!atomic_try_cmpxchg(v, &c, c + 1));
+
+ return true;
+}
+#define atomic_inc_unless_negative atomic_inc_unless_negative
+#endif
+
+#ifndef atomic_dec_unless_positive
+static inline bool
+atomic_dec_unless_positive(atomic_t *v)
+{
+ int c = atomic_read(v);
+
+ do {
+ if (unlikely(c > 0))
+ return false;
+ } while (!atomic_try_cmpxchg(v, &c, c - 1));
+
+ return true;
+}
+#define atomic_dec_unless_positive atomic_dec_unless_positive
+#endif
+
+#ifndef atomic_dec_if_positive
+static inline int
+atomic_dec_if_positive(atomic_t *v)
+{
+ int dec, c = atomic_read(v);
+
+ do {
+ dec = c - 1;
+ if (unlikely(dec < 0))
+ break;
+ } while (!atomic_try_cmpxchg(v, &c, dec));
+
+ return dec;
+}
+#define atomic_dec_if_positive atomic_dec_if_positive
+#endif
+
+#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
+#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
+
+#ifdef CONFIG_GENERIC_ATOMIC64
+#include <asm-generic/atomic64.h>
+#endif
+
+#ifndef atomic64_read_acquire
+static inline s64
+atomic64_read_acquire(const atomic64_t *v)
+{
+ return smp_load_acquire(&(v)->counter);
+}
+#define atomic64_read_acquire atomic64_read_acquire
+#endif
+
+#ifndef atomic64_set_release
+static inline void
+atomic64_set_release(atomic64_t *v, s64 i)
+{
+ smp_store_release(&(v)->counter, i);
+}
+#define atomic64_set_release atomic64_set_release
+#endif
+
+#ifndef atomic64_add_return_relaxed
+#define atomic64_add_return_acquire atomic64_add_return
+#define atomic64_add_return_release atomic64_add_return
+#define atomic64_add_return_relaxed atomic64_add_return
+#else /* atomic64_add_return_relaxed */
+
+#ifndef atomic64_add_return_acquire
+static inline s64
+atomic64_add_return_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_add_return_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_add_return_acquire atomic64_add_return_acquire
+#endif
+
+#ifndef atomic64_add_return_release
+static inline s64
+atomic64_add_return_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_add_return_relaxed(i, v);
+}
+#define atomic64_add_return_release atomic64_add_return_release
+#endif
+
+#ifndef atomic64_add_return
+static inline s64
+atomic64_add_return(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_add_return_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_add_return atomic64_add_return
+#endif
+
+#endif /* atomic64_add_return_relaxed */
+
+#ifndef atomic64_fetch_add_relaxed
+#define atomic64_fetch_add_acquire atomic64_fetch_add
+#define atomic64_fetch_add_release atomic64_fetch_add
+#define atomic64_fetch_add_relaxed atomic64_fetch_add
+#else /* atomic64_fetch_add_relaxed */
+
+#ifndef atomic64_fetch_add_acquire
+static inline s64
+atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_add_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
+#endif
+
+#ifndef atomic64_fetch_add_release
+static inline s64
+atomic64_fetch_add_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_add_relaxed(i, v);
+}
+#define atomic64_fetch_add_release atomic64_fetch_add_release
+#endif
+
+#ifndef atomic64_fetch_add
+static inline s64
+atomic64_fetch_add(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_add_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_add atomic64_fetch_add
+#endif
+
+#endif /* atomic64_fetch_add_relaxed */
+
+#ifndef atomic64_sub_return_relaxed
+#define atomic64_sub_return_acquire atomic64_sub_return
+#define atomic64_sub_return_release atomic64_sub_return
+#define atomic64_sub_return_relaxed atomic64_sub_return
+#else /* atomic64_sub_return_relaxed */
+
+#ifndef atomic64_sub_return_acquire
+static inline s64
+atomic64_sub_return_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_sub_return_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_sub_return_acquire atomic64_sub_return_acquire
+#endif
+
+#ifndef atomic64_sub_return_release
+static inline s64
+atomic64_sub_return_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_sub_return_relaxed(i, v);
+}
+#define atomic64_sub_return_release atomic64_sub_return_release
+#endif
+
+#ifndef atomic64_sub_return
+static inline s64
+atomic64_sub_return(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_sub_return_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_sub_return atomic64_sub_return
+#endif
+
+#endif /* atomic64_sub_return_relaxed */
+
+#ifndef atomic64_fetch_sub_relaxed
+#define atomic64_fetch_sub_acquire atomic64_fetch_sub
+#define atomic64_fetch_sub_release atomic64_fetch_sub
+#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
+#else /* atomic64_fetch_sub_relaxed */
+
+#ifndef atomic64_fetch_sub_acquire
+static inline s64
+atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_sub_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
+#endif
+
+#ifndef atomic64_fetch_sub_release
+static inline s64
+atomic64_fetch_sub_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_sub_relaxed(i, v);
+}
+#define atomic64_fetch_sub_release atomic64_fetch_sub_release
+#endif
+
+#ifndef atomic64_fetch_sub
+static inline s64
+atomic64_fetch_sub(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_sub_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_sub atomic64_fetch_sub
+#endif
+
+#endif /* atomic64_fetch_sub_relaxed */
+
+#ifndef atomic64_inc
+static inline void
+atomic64_inc(atomic64_t *v)
+{
+ atomic64_add(1, v);
+}
+#define atomic64_inc atomic64_inc
+#endif
+
+#ifndef atomic64_inc_return_relaxed
+#ifdef atomic64_inc_return
+#define atomic64_inc_return_acquire atomic64_inc_return
+#define atomic64_inc_return_release atomic64_inc_return
+#define atomic64_inc_return_relaxed atomic64_inc_return
+#endif /* atomic64_inc_return */
+
+#ifndef atomic64_inc_return
+static inline s64
+atomic64_inc_return(atomic64_t *v)
+{
+ return atomic64_add_return(1, v);
+}
+#define atomic64_inc_return atomic64_inc_return
+#endif
+
+#ifndef atomic64_inc_return_acquire
+static inline s64
+atomic64_inc_return_acquire(atomic64_t *v)
+{
+ return atomic64_add_return_acquire(1, v);
+}
+#define atomic64_inc_return_acquire atomic64_inc_return_acquire
+#endif
+
+#ifndef atomic64_inc_return_release
+static inline s64
+atomic64_inc_return_release(atomic64_t *v)
+{
+ return atomic64_add_return_release(1, v);
+}
+#define atomic64_inc_return_release atomic64_inc_return_release
+#endif
+
+#ifndef atomic64_inc_return_relaxed
+static inline s64
+atomic64_inc_return_relaxed(atomic64_t *v)
+{
+ return atomic64_add_return_relaxed(1, v);
+}
+#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
+#endif
+
+#else /* atomic64_inc_return_relaxed */
+
+#ifndef atomic64_inc_return_acquire
+static inline s64
+atomic64_inc_return_acquire(atomic64_t *v)
+{
+ s64 ret = atomic64_inc_return_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_inc_return_acquire atomic64_inc_return_acquire
+#endif
+
+#ifndef atomic64_inc_return_release
+static inline s64
+atomic64_inc_return_release(atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_inc_return_relaxed(v);
+}
+#define atomic64_inc_return_release atomic64_inc_return_release
+#endif
+
+#ifndef atomic64_inc_return
+static inline s64
+atomic64_inc_return(atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_inc_return_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_inc_return atomic64_inc_return
+#endif
+
+#endif /* atomic64_inc_return_relaxed */
+
+#ifndef atomic64_fetch_inc_relaxed
+#ifdef atomic64_fetch_inc
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc
+#define atomic64_fetch_inc_release atomic64_fetch_inc
+#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
+#endif /* atomic64_fetch_inc */
+
+#ifndef atomic64_fetch_inc
+static inline s64
+atomic64_fetch_inc(atomic64_t *v)
+{
+ return atomic64_fetch_add(1, v);
+}
+#define atomic64_fetch_inc atomic64_fetch_inc
+#endif
+
+#ifndef atomic64_fetch_inc_acquire
+static inline s64
+atomic64_fetch_inc_acquire(atomic64_t *v)
+{
+ return atomic64_fetch_add_acquire(1, v);
+}
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
+#endif
+
+#ifndef atomic64_fetch_inc_release
+static inline s64
+atomic64_fetch_inc_release(atomic64_t *v)
+{
+ return atomic64_fetch_add_release(1, v);
+}
+#define atomic64_fetch_inc_release atomic64_fetch_inc_release
+#endif
+
+#ifndef atomic64_fetch_inc_relaxed
+static inline s64
+atomic64_fetch_inc_relaxed(atomic64_t *v)
+{
+ return atomic64_fetch_add_relaxed(1, v);
+}
+#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
+#endif
+
+#else /* atomic64_fetch_inc_relaxed */
+
+#ifndef atomic64_fetch_inc_acquire
+static inline s64
+atomic64_fetch_inc_acquire(atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_inc_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
+#endif
+
+#ifndef atomic64_fetch_inc_release
+static inline s64
+atomic64_fetch_inc_release(atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_inc_relaxed(v);
+}
+#define atomic64_fetch_inc_release atomic64_fetch_inc_release
+#endif
+
+#ifndef atomic64_fetch_inc
+static inline s64
+atomic64_fetch_inc(atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_inc_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_inc atomic64_fetch_inc
+#endif
+
+#endif /* atomic64_fetch_inc_relaxed */
+
+#ifndef atomic64_dec
+static inline void
+atomic64_dec(atomic64_t *v)
+{
+ atomic64_sub(1, v);
+}
+#define atomic64_dec atomic64_dec
+#endif
+
+#ifndef atomic64_dec_return_relaxed
+#ifdef atomic64_dec_return
+#define atomic64_dec_return_acquire atomic64_dec_return
+#define atomic64_dec_return_release atomic64_dec_return
+#define atomic64_dec_return_relaxed atomic64_dec_return
+#endif /* atomic64_dec_return */
+
+#ifndef atomic64_dec_return
+static inline s64
+atomic64_dec_return(atomic64_t *v)
+{
+ return atomic64_sub_return(1, v);
+}
+#define atomic64_dec_return atomic64_dec_return
+#endif
+
+#ifndef atomic64_dec_return_acquire
+static inline s64
+atomic64_dec_return_acquire(atomic64_t *v)
+{
+ return atomic64_sub_return_acquire(1, v);
+}
+#define atomic64_dec_return_acquire atomic64_dec_return_acquire
+#endif
+
+#ifndef atomic64_dec_return_release
+static inline s64
+atomic64_dec_return_release(atomic64_t *v)
+{
+ return atomic64_sub_return_release(1, v);
+}
+#define atomic64_dec_return_release atomic64_dec_return_release
+#endif
+
+#ifndef atomic64_dec_return_relaxed
+static inline s64
+atomic64_dec_return_relaxed(atomic64_t *v)
+{
+ return atomic64_sub_return_relaxed(1, v);
+}
+#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
+#endif
+
+#else /* atomic64_dec_return_relaxed */
+
+#ifndef atomic64_dec_return_acquire
+static inline s64
+atomic64_dec_return_acquire(atomic64_t *v)
+{
+ s64 ret = atomic64_dec_return_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_dec_return_acquire atomic64_dec_return_acquire
+#endif
+
+#ifndef atomic64_dec_return_release
+static inline s64
+atomic64_dec_return_release(atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_dec_return_relaxed(v);
+}
+#define atomic64_dec_return_release atomic64_dec_return_release
+#endif
+
+#ifndef atomic64_dec_return
+static inline s64
+atomic64_dec_return(atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_dec_return_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_dec_return atomic64_dec_return
+#endif
+
+#endif /* atomic64_dec_return_relaxed */
+
+#ifndef atomic64_fetch_dec_relaxed
+#ifdef atomic64_fetch_dec
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec
+#define atomic64_fetch_dec_release atomic64_fetch_dec
+#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
+#endif /* atomic64_fetch_dec */
+
+#ifndef atomic64_fetch_dec
+static inline s64
+atomic64_fetch_dec(atomic64_t *v)
+{
+ return atomic64_fetch_sub(1, v);
+}
+#define atomic64_fetch_dec atomic64_fetch_dec
+#endif
+
+#ifndef atomic64_fetch_dec_acquire
+static inline s64
+atomic64_fetch_dec_acquire(atomic64_t *v)
+{
+ return atomic64_fetch_sub_acquire(1, v);
+}
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
+#endif
+
+#ifndef atomic64_fetch_dec_release
+static inline s64
+atomic64_fetch_dec_release(atomic64_t *v)
+{
+ return atomic64_fetch_sub_release(1, v);
+}
+#define atomic64_fetch_dec_release atomic64_fetch_dec_release
+#endif
+
+#ifndef atomic64_fetch_dec_relaxed
+static inline s64
+atomic64_fetch_dec_relaxed(atomic64_t *v)
+{
+ return atomic64_fetch_sub_relaxed(1, v);
+}
+#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
+#endif
+
+#else /* atomic64_fetch_dec_relaxed */
+
+#ifndef atomic64_fetch_dec_acquire
+static inline s64
+atomic64_fetch_dec_acquire(atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_dec_relaxed(v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
+#endif
+
+#ifndef atomic64_fetch_dec_release
+static inline s64
+atomic64_fetch_dec_release(atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_dec_relaxed(v);
+}
+#define atomic64_fetch_dec_release atomic64_fetch_dec_release
+#endif
+
+#ifndef atomic64_fetch_dec
+static inline s64
+atomic64_fetch_dec(atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_dec_relaxed(v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_dec atomic64_fetch_dec
+#endif
+
+#endif /* atomic64_fetch_dec_relaxed */
+
+#ifndef atomic64_fetch_and_relaxed
+#define atomic64_fetch_and_acquire atomic64_fetch_and
+#define atomic64_fetch_and_release atomic64_fetch_and
+#define atomic64_fetch_and_relaxed atomic64_fetch_and
+#else /* atomic64_fetch_and_relaxed */
+
+#ifndef atomic64_fetch_and_acquire
+static inline s64
+atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_and_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
+#endif
+
+#ifndef atomic64_fetch_and_release
+static inline s64
+atomic64_fetch_and_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_and_relaxed(i, v);
+}
+#define atomic64_fetch_and_release atomic64_fetch_and_release
+#endif
+
+#ifndef atomic64_fetch_and
+static inline s64
+atomic64_fetch_and(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_and_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_and atomic64_fetch_and
+#endif
+
+#endif /* atomic64_fetch_and_relaxed */
+
+#ifndef atomic64_andnot
+static inline void
+atomic64_andnot(s64 i, atomic64_t *v)
+{
+ atomic64_and(~i, v);
+}
+#define atomic64_andnot atomic64_andnot
+#endif
+
+#ifndef atomic64_fetch_andnot_relaxed
+#ifdef atomic64_fetch_andnot
+#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
+#define atomic64_fetch_andnot_release atomic64_fetch_andnot
+#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
+#endif /* atomic64_fetch_andnot */
+
+#ifndef atomic64_fetch_andnot
+static inline s64
+atomic64_fetch_andnot(s64 i, atomic64_t *v)
+{
+ return atomic64_fetch_and(~i, v);
+}
+#define atomic64_fetch_andnot atomic64_fetch_andnot
+#endif
+
+#ifndef atomic64_fetch_andnot_acquire
+static inline s64
+atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
+{
+ return atomic64_fetch_and_acquire(~i, v);
+}
+#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
+#endif
+
+#ifndef atomic64_fetch_andnot_release
+static inline s64
+atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
+{
+ return atomic64_fetch_and_release(~i, v);
+}
+#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
+#endif
+
+#ifndef atomic64_fetch_andnot_relaxed
+static inline s64
+atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
+{
+ return atomic64_fetch_and_relaxed(~i, v);
+}
+#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
+#endif
+
+#else /* atomic64_fetch_andnot_relaxed */
+
+#ifndef atomic64_fetch_andnot_acquire
+static inline s64
+atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_andnot_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
+#endif
+
+#ifndef atomic64_fetch_andnot_release
+static inline s64
+atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_andnot_relaxed(i, v);
+}
+#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
+#endif
+
+#ifndef atomic64_fetch_andnot
+static inline s64
+atomic64_fetch_andnot(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_andnot_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_andnot atomic64_fetch_andnot
+#endif
+
+#endif /* atomic64_fetch_andnot_relaxed */
+
+#ifndef atomic64_fetch_or_relaxed
+#define atomic64_fetch_or_acquire atomic64_fetch_or
+#define atomic64_fetch_or_release atomic64_fetch_or
+#define atomic64_fetch_or_relaxed atomic64_fetch_or
+#else /* atomic64_fetch_or_relaxed */
+
+#ifndef atomic64_fetch_or_acquire
+static inline s64
+atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_or_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
+#endif
+
+#ifndef atomic64_fetch_or_release
+static inline s64
+atomic64_fetch_or_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_or_relaxed(i, v);
+}
+#define atomic64_fetch_or_release atomic64_fetch_or_release
+#endif
+
+#ifndef atomic64_fetch_or
+static inline s64
+atomic64_fetch_or(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_or_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_or atomic64_fetch_or
+#endif
+
+#endif /* atomic64_fetch_or_relaxed */
+
+#ifndef atomic64_fetch_xor_relaxed
+#define atomic64_fetch_xor_acquire atomic64_fetch_xor
+#define atomic64_fetch_xor_release atomic64_fetch_xor
+#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
+#else /* atomic64_fetch_xor_relaxed */
+
+#ifndef atomic64_fetch_xor_acquire
+static inline s64
+atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
+{
+ s64 ret = atomic64_fetch_xor_relaxed(i, v);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
+#endif
+
+#ifndef atomic64_fetch_xor_release
+static inline s64
+atomic64_fetch_xor_release(s64 i, atomic64_t *v)
+{
+ __atomic_release_fence();
+ return atomic64_fetch_xor_relaxed(i, v);
+}
+#define atomic64_fetch_xor_release atomic64_fetch_xor_release
+#endif
+
+#ifndef atomic64_fetch_xor
+static inline s64
+atomic64_fetch_xor(s64 i, atomic64_t *v)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_fetch_xor_relaxed(i, v);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_fetch_xor atomic64_fetch_xor
+#endif
+
+#endif /* atomic64_fetch_xor_relaxed */
+
+#ifndef atomic64_xchg_relaxed
+#define atomic64_xchg_acquire atomic64_xchg
+#define atomic64_xchg_release atomic64_xchg
+#define atomic64_xchg_relaxed atomic64_xchg
+#else /* atomic64_xchg_relaxed */
+
+#ifndef atomic64_xchg_acquire
+static inline s64
+atomic64_xchg_acquire(atomic64_t *v, s64 i)
+{
+ s64 ret = atomic64_xchg_relaxed(v, i);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_xchg_acquire atomic64_xchg_acquire
+#endif
+
+#ifndef atomic64_xchg_release
+static inline s64
+atomic64_xchg_release(atomic64_t *v, s64 i)
+{
+ __atomic_release_fence();
+ return atomic64_xchg_relaxed(v, i);
+}
+#define atomic64_xchg_release atomic64_xchg_release
+#endif
+
+#ifndef atomic64_xchg
+static inline s64
+atomic64_xchg(atomic64_t *v, s64 i)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_xchg_relaxed(v, i);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_xchg atomic64_xchg
+#endif
+
+#endif /* atomic64_xchg_relaxed */
+
+#ifndef atomic64_cmpxchg_relaxed
+#define atomic64_cmpxchg_acquire atomic64_cmpxchg
+#define atomic64_cmpxchg_release atomic64_cmpxchg
+#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
+#else /* atomic64_cmpxchg_relaxed */
+
+#ifndef atomic64_cmpxchg_acquire
+static inline s64
+atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
+{
+ s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
+#endif
+
+#ifndef atomic64_cmpxchg_release
+static inline s64
+atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
+{
+ __atomic_release_fence();
+ return atomic64_cmpxchg_relaxed(v, old, new);
+}
+#define atomic64_cmpxchg_release atomic64_cmpxchg_release
+#endif
+
+#ifndef atomic64_cmpxchg
+static inline s64
+atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+{
+ s64 ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_cmpxchg_relaxed(v, old, new);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_cmpxchg atomic64_cmpxchg
+#endif
+
+#endif /* atomic64_cmpxchg_relaxed */
+
+#ifndef atomic64_try_cmpxchg_relaxed
+#ifdef atomic64_try_cmpxchg
+#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
+#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
+#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
+#endif /* atomic64_try_cmpxchg */
+
+#ifndef atomic64_try_cmpxchg
+static inline bool
+atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+{
+ s64 r, o = *old;
+ r = atomic64_cmpxchg(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic64_try_cmpxchg atomic64_try_cmpxchg
+#endif
+
+#ifndef atomic64_try_cmpxchg_acquire
+static inline bool
+atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
+{
+ s64 r, o = *old;
+ r = atomic64_cmpxchg_acquire(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
+#endif
+
+#ifndef atomic64_try_cmpxchg_release
+static inline bool
+atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
+{
+ s64 r, o = *old;
+ r = atomic64_cmpxchg_release(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
+#endif
+
+#ifndef atomic64_try_cmpxchg_relaxed
+static inline bool
+atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
+{
+ s64 r, o = *old;
+ r = atomic64_cmpxchg_relaxed(v, o, new);
+ if (unlikely(r != o))
+ *old = r;
+ return likely(r == o);
+}
+#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
+#endif
+
+#else /* atomic64_try_cmpxchg_relaxed */
+
+#ifndef atomic64_try_cmpxchg_acquire
+static inline bool
+atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
+{
+ bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
+ __atomic_acquire_fence();
+ return ret;
+}
+#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
+#endif
+
+#ifndef atomic64_try_cmpxchg_release
+static inline bool
+atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
+{
+ __atomic_release_fence();
+ return atomic64_try_cmpxchg_relaxed(v, old, new);
+}
+#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
+#endif
+
+#ifndef atomic64_try_cmpxchg
+static inline bool
+atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+{
+ bool ret;
+ __atomic_pre_full_fence();
+ ret = atomic64_try_cmpxchg_relaxed(v, old, new);
+ __atomic_post_full_fence();
+ return ret;
+}
+#define atomic64_try_cmpxchg atomic64_try_cmpxchg
+#endif
+
+#endif /* atomic64_try_cmpxchg_relaxed */
+
+#ifndef atomic64_sub_and_test
+/**
+ * atomic64_sub_and_test - subtract value from variable and test result
+ * @i: integer value to subtract
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically subtracts @i from @v and returns
+ * true if the result is zero, or false for all
+ * other cases.
+ */
+static inline bool
+atomic64_sub_and_test(s64 i, atomic64_t *v)
+{
+ return atomic64_sub_return(i, v) == 0;
+}
+#define atomic64_sub_and_test atomic64_sub_and_test
+#endif
+
+#ifndef atomic64_dec_and_test
+/**
+ * atomic64_dec_and_test - decrement and test
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically decrements @v by 1 and
+ * returns true if the result is 0, or false for all other
+ * cases.
+ */
+static inline bool
+atomic64_dec_and_test(atomic64_t *v)
+{
+ return atomic64_dec_return(v) == 0;
+}
+#define atomic64_dec_and_test atomic64_dec_and_test
+#endif
+
+#ifndef atomic64_inc_and_test
+/**
+ * atomic64_inc_and_test - increment and test
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1
+ * and returns true if the result is zero, or false for all
+ * other cases.
+ */
+static inline bool
+atomic64_inc_and_test(atomic64_t *v)
+{
+ return atomic64_inc_return(v) == 0;
+}
+#define atomic64_inc_and_test atomic64_inc_and_test
+#endif
+
+#ifndef atomic64_add_negative
+/**
+ * atomic64_add_negative - add and test if negative
+ * @i: integer value to add
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically adds @i to @v and returns true
+ * if the result is negative, or false when
+ * result is greater than or equal to zero.
+ */
+static inline bool
+atomic64_add_negative(s64 i, atomic64_t *v)
+{
+ return atomic64_add_return(i, v) < 0;
+}
+#define atomic64_add_negative atomic64_add_negative
+#endif
+
+#ifndef atomic64_fetch_add_unless
+/**
+ * atomic64_fetch_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic64_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as @v was not already @u.
+ * Returns original value of @v
+ */
+static inline s64
+atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+ s64 c = atomic64_read(v);
+
+ do {
+ if (unlikely(c == u))
+ break;
+ } while (!atomic64_try_cmpxchg(v, &c, c + a));
+
+ return c;
+}
+#define atomic64_fetch_add_unless atomic64_fetch_add_unless
+#endif
+
+#ifndef atomic64_add_unless
+/**
+ * atomic64_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic64_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, if @v was not already @u.
+ * Returns true if the addition was done.
+ */
+static inline bool
+atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
+{
+ return atomic64_fetch_add_unless(v, a, u) != u;
+}
+#define atomic64_add_unless atomic64_add_unless
+#endif
+
+#ifndef atomic64_inc_not_zero
+/**
+ * atomic64_inc_not_zero - increment unless the number is zero
+ * @v: pointer of type atomic64_t
+ *
+ * Atomically increments @v by 1, if @v is non-zero.
+ * Returns true if the increment was done.
+ */
+static inline bool
+atomic64_inc_not_zero(atomic64_t *v)
+{
+ return atomic64_add_unless(v, 1, 0);
+}
+#define atomic64_inc_not_zero atomic64_inc_not_zero
+#endif
+
+#ifndef atomic64_inc_unless_negative
+static inline bool
+atomic64_inc_unless_negative(atomic64_t *v)
+{
+ s64 c = atomic64_read(v);
+
+ do {
+ if (unlikely(c < 0))
+ return false;
+ } while (!atomic64_try_cmpxchg(v, &c, c + 1));
+
+ return true;
+}
+#define atomic64_inc_unless_negative atomic64_inc_unless_negative
+#endif
+
+#ifndef atomic64_dec_unless_positive
+static inline bool
+atomic64_dec_unless_positive(atomic64_t *v)
+{
+ s64 c = atomic64_read(v);
+
+ do {
+ if (unlikely(c > 0))
+ return false;
+ } while (!atomic64_try_cmpxchg(v, &c, c - 1));
+
+ return true;
+}
+#define atomic64_dec_unless_positive atomic64_dec_unless_positive
+#endif
+
+#ifndef atomic64_dec_if_positive
+static inline s64
+atomic64_dec_if_positive(atomic64_t *v)
+{
+ s64 dec, c = atomic64_read(v);
+
+ do {
+ dec = c - 1;
+ if (unlikely(dec < 0))
+ break;
+ } while (!atomic64_try_cmpxchg(v, &c, dec));
+
+ return dec;
+}
+#define atomic64_dec_if_positive atomic64_dec_if_positive
+#endif
+
+#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
+#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
+
+#endif /* _LINUX_ATOMIC_FALLBACK_H */
+// 25de4a2804d70f57e994fe3b419148658bb5378a
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 1e8e88bdaf09..4c0d009a46f0 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -25,14 +25,6 @@
* See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
*/
-#ifndef atomic_read_acquire
-#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
-#endif
-
-#ifndef atomic_set_release
-#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
-#endif
-
/*
* The idea here is to build acquire/release variants by adding explicit
* barriers on top of the relaxed variant. In the case where the relaxed
@@ -79,1238 +71,7 @@
__ret; \
})
-/* atomic_add_return_relaxed */
-#ifndef atomic_add_return_relaxed
-#define atomic_add_return_relaxed atomic_add_return
-#define atomic_add_return_acquire atomic_add_return
-#define atomic_add_return_release atomic_add_return
-
-#else /* atomic_add_return_relaxed */
-
-#ifndef atomic_add_return_acquire
-#define atomic_add_return_acquire(...) \
- __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_add_return_release
-#define atomic_add_return_release(...) \
- __atomic_op_release(atomic_add_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_add_return
-#define atomic_add_return(...) \
- __atomic_op_fence(atomic_add_return, __VA_ARGS__)
-#endif
-#endif /* atomic_add_return_relaxed */
-
-#ifndef atomic_inc
-#define atomic_inc(v) atomic_add(1, (v))
-#endif
-
-/* atomic_inc_return_relaxed */
-#ifndef atomic_inc_return_relaxed
-
-#ifndef atomic_inc_return
-#define atomic_inc_return(v) atomic_add_return(1, (v))
-#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
-#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
-#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
-#else /* atomic_inc_return */
-#define atomic_inc_return_relaxed atomic_inc_return
-#define atomic_inc_return_acquire atomic_inc_return
-#define atomic_inc_return_release atomic_inc_return
-#endif /* atomic_inc_return */
-
-#else /* atomic_inc_return_relaxed */
-
-#ifndef atomic_inc_return_acquire
-#define atomic_inc_return_acquire(...) \
- __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_inc_return_release
-#define atomic_inc_return_release(...) \
- __atomic_op_release(atomic_inc_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_inc_return
-#define atomic_inc_return(...) \
- __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
-#endif
-#endif /* atomic_inc_return_relaxed */
-
-/* atomic_sub_return_relaxed */
-#ifndef atomic_sub_return_relaxed
-#define atomic_sub_return_relaxed atomic_sub_return
-#define atomic_sub_return_acquire atomic_sub_return
-#define atomic_sub_return_release atomic_sub_return
-
-#else /* atomic_sub_return_relaxed */
-
-#ifndef atomic_sub_return_acquire
-#define atomic_sub_return_acquire(...) \
- __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_sub_return_release
-#define atomic_sub_return_release(...) \
- __atomic_op_release(atomic_sub_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_sub_return
-#define atomic_sub_return(...) \
- __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
-#endif
-#endif /* atomic_sub_return_relaxed */
-
-#ifndef atomic_dec
-#define atomic_dec(v) atomic_sub(1, (v))
-#endif
-
-/* atomic_dec_return_relaxed */
-#ifndef atomic_dec_return_relaxed
-
-#ifndef atomic_dec_return
-#define atomic_dec_return(v) atomic_sub_return(1, (v))
-#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
-#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
-#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
-#else /* atomic_dec_return */
-#define atomic_dec_return_relaxed atomic_dec_return
-#define atomic_dec_return_acquire atomic_dec_return
-#define atomic_dec_return_release atomic_dec_return
-#endif /* atomic_dec_return */
-
-#else /* atomic_dec_return_relaxed */
-
-#ifndef atomic_dec_return_acquire
-#define atomic_dec_return_acquire(...) \
- __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_dec_return_release
-#define atomic_dec_return_release(...) \
- __atomic_op_release(atomic_dec_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic_dec_return
-#define atomic_dec_return(...) \
- __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
-#endif
-#endif /* atomic_dec_return_relaxed */
-
-
-/* atomic_fetch_add_relaxed */
-#ifndef atomic_fetch_add_relaxed
-#define atomic_fetch_add_relaxed atomic_fetch_add
-#define atomic_fetch_add_acquire atomic_fetch_add
-#define atomic_fetch_add_release atomic_fetch_add
-
-#else /* atomic_fetch_add_relaxed */
-
-#ifndef atomic_fetch_add_acquire
-#define atomic_fetch_add_acquire(...) \
- __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_add_release
-#define atomic_fetch_add_release(...) \
- __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_add
-#define atomic_fetch_add(...) \
- __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_add_relaxed */
-
-/* atomic_fetch_inc_relaxed */
-#ifndef atomic_fetch_inc_relaxed
-
-#ifndef atomic_fetch_inc
-#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
-#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
-#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
-#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
-#else /* atomic_fetch_inc */
-#define atomic_fetch_inc_relaxed atomic_fetch_inc
-#define atomic_fetch_inc_acquire atomic_fetch_inc
-#define atomic_fetch_inc_release atomic_fetch_inc
-#endif /* atomic_fetch_inc */
-
-#else /* atomic_fetch_inc_relaxed */
-
-#ifndef atomic_fetch_inc_acquire
-#define atomic_fetch_inc_acquire(...) \
- __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_inc_release
-#define atomic_fetch_inc_release(...) \
- __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_inc
-#define atomic_fetch_inc(...) \
- __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_inc_relaxed */
-
-/* atomic_fetch_sub_relaxed */
-#ifndef atomic_fetch_sub_relaxed
-#define atomic_fetch_sub_relaxed atomic_fetch_sub
-#define atomic_fetch_sub_acquire atomic_fetch_sub
-#define atomic_fetch_sub_release atomic_fetch_sub
-
-#else /* atomic_fetch_sub_relaxed */
-
-#ifndef atomic_fetch_sub_acquire
-#define atomic_fetch_sub_acquire(...) \
- __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_sub_release
-#define atomic_fetch_sub_release(...) \
- __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_sub
-#define atomic_fetch_sub(...) \
- __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_sub_relaxed */
-
-/* atomic_fetch_dec_relaxed */
-#ifndef atomic_fetch_dec_relaxed
-
-#ifndef atomic_fetch_dec
-#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
-#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
-#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
-#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
-#else /* atomic_fetch_dec */
-#define atomic_fetch_dec_relaxed atomic_fetch_dec
-#define atomic_fetch_dec_acquire atomic_fetch_dec
-#define atomic_fetch_dec_release atomic_fetch_dec
-#endif /* atomic_fetch_dec */
-
-#else /* atomic_fetch_dec_relaxed */
-
-#ifndef atomic_fetch_dec_acquire
-#define atomic_fetch_dec_acquire(...) \
- __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_dec_release
-#define atomic_fetch_dec_release(...) \
- __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_dec
-#define atomic_fetch_dec(...) \
- __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_dec_relaxed */
-
-/* atomic_fetch_or_relaxed */
-#ifndef atomic_fetch_or_relaxed
-#define atomic_fetch_or_relaxed atomic_fetch_or
-#define atomic_fetch_or_acquire atomic_fetch_or
-#define atomic_fetch_or_release atomic_fetch_or
-
-#else /* atomic_fetch_or_relaxed */
-
-#ifndef atomic_fetch_or_acquire
-#define atomic_fetch_or_acquire(...) \
- __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_or_release
-#define atomic_fetch_or_release(...) \
- __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_or
-#define atomic_fetch_or(...) \
- __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_or_relaxed */
-
-/* atomic_fetch_and_relaxed */
-#ifndef atomic_fetch_and_relaxed
-#define atomic_fetch_and_relaxed atomic_fetch_and
-#define atomic_fetch_and_acquire atomic_fetch_and
-#define atomic_fetch_and_release atomic_fetch_and
-
-#else /* atomic_fetch_and_relaxed */
-
-#ifndef atomic_fetch_and_acquire
-#define atomic_fetch_and_acquire(...) \
- __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_and_release
-#define atomic_fetch_and_release(...) \
- __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_and
-#define atomic_fetch_and(...) \
- __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_and_relaxed */
-
-#ifndef atomic_andnot
-#define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
-#endif
-
-#ifndef atomic_fetch_andnot_relaxed
-
-#ifndef atomic_fetch_andnot
-#define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
-#define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
-#define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
-#define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
-#else /* atomic_fetch_andnot */
-#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
-#define atomic_fetch_andnot_acquire atomic_fetch_andnot
-#define atomic_fetch_andnot_release atomic_fetch_andnot
-#endif /* atomic_fetch_andnot */
-
-#else /* atomic_fetch_andnot_relaxed */
-
-#ifndef atomic_fetch_andnot_acquire
-#define atomic_fetch_andnot_acquire(...) \
- __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_andnot_release
-#define atomic_fetch_andnot_release(...) \
- __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_andnot
-#define atomic_fetch_andnot(...) \
- __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_andnot_relaxed */
-
-/* atomic_fetch_xor_relaxed */
-#ifndef atomic_fetch_xor_relaxed
-#define atomic_fetch_xor_relaxed atomic_fetch_xor
-#define atomic_fetch_xor_acquire atomic_fetch_xor
-#define atomic_fetch_xor_release atomic_fetch_xor
-
-#else /* atomic_fetch_xor_relaxed */
-
-#ifndef atomic_fetch_xor_acquire
-#define atomic_fetch_xor_acquire(...) \
- __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_xor_release
-#define atomic_fetch_xor_release(...) \
- __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
-#endif
-
-#ifndef atomic_fetch_xor
-#define atomic_fetch_xor(...) \
- __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
-#endif
-#endif /* atomic_fetch_xor_relaxed */
-
-
-/* atomic_xchg_relaxed */
-#ifndef atomic_xchg_relaxed
-#define atomic_xchg_relaxed atomic_xchg
-#define atomic_xchg_acquire atomic_xchg
-#define atomic_xchg_release atomic_xchg
-
-#else /* atomic_xchg_relaxed */
-
-#ifndef atomic_xchg_acquire
-#define atomic_xchg_acquire(...) \
- __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic_xchg_release
-#define atomic_xchg_release(...) \
- __atomic_op_release(atomic_xchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic_xchg
-#define atomic_xchg(...) \
- __atomic_op_fence(atomic_xchg, __VA_ARGS__)
-#endif
-#endif /* atomic_xchg_relaxed */
-
-/* atomic_cmpxchg_relaxed */
-#ifndef atomic_cmpxchg_relaxed
-#define atomic_cmpxchg_relaxed atomic_cmpxchg
-#define atomic_cmpxchg_acquire atomic_cmpxchg
-#define atomic_cmpxchg_release atomic_cmpxchg
-
-#else /* atomic_cmpxchg_relaxed */
-
-#ifndef atomic_cmpxchg_acquire
-#define atomic_cmpxchg_acquire(...) \
- __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic_cmpxchg_release
-#define atomic_cmpxchg_release(...) \
- __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic_cmpxchg
-#define atomic_cmpxchg(...) \
- __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
-#endif
-#endif /* atomic_cmpxchg_relaxed */
-
-#ifndef atomic_try_cmpxchg
-
-#define __atomic_try_cmpxchg(type, _p, _po, _n) \
-({ \
- typeof(_po) __po = (_po); \
- typeof(*(_po)) __r, __o = *__po; \
- __r = atomic_cmpxchg##type((_p), __o, (_n)); \
- if (unlikely(__r != __o)) \
- *__po = __r; \
- likely(__r == __o); \
-})
-
-#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
-#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
-#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
-#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
-
-#else /* atomic_try_cmpxchg */
-#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
-#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
-#define atomic_try_cmpxchg_release atomic_try_cmpxchg
-#endif /* atomic_try_cmpxchg */
-
-/* cmpxchg_relaxed */
-#ifndef cmpxchg_relaxed
-#define cmpxchg_relaxed cmpxchg
-#define cmpxchg_acquire cmpxchg
-#define cmpxchg_release cmpxchg
-
-#else /* cmpxchg_relaxed */
-
-#ifndef cmpxchg_acquire
-#define cmpxchg_acquire(...) \
- __atomic_op_acquire(cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef cmpxchg_release
-#define cmpxchg_release(...) \
- __atomic_op_release(cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef cmpxchg
-#define cmpxchg(...) \
- __atomic_op_fence(cmpxchg, __VA_ARGS__)
-#endif
-#endif /* cmpxchg_relaxed */
-
-/* cmpxchg64_relaxed */
-#ifndef cmpxchg64_relaxed
-#define cmpxchg64_relaxed cmpxchg64
-#define cmpxchg64_acquire cmpxchg64
-#define cmpxchg64_release cmpxchg64
-
-#else /* cmpxchg64_relaxed */
-
-#ifndef cmpxchg64_acquire
-#define cmpxchg64_acquire(...) \
- __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
-#endif
-
-#ifndef cmpxchg64_release
-#define cmpxchg64_release(...) \
- __atomic_op_release(cmpxchg64, __VA_ARGS__)
-#endif
-
-#ifndef cmpxchg64
-#define cmpxchg64(...) \
- __atomic_op_fence(cmpxchg64, __VA_ARGS__)
-#endif
-#endif /* cmpxchg64_relaxed */
-
-/* xchg_relaxed */
-#ifndef xchg_relaxed
-#define xchg_relaxed xchg
-#define xchg_acquire xchg
-#define xchg_release xchg
-
-#else /* xchg_relaxed */
-
-#ifndef xchg_acquire
-#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
-#endif
-
-#ifndef xchg_release
-#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
-#endif
-
-#ifndef xchg
-#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
-#endif
-#endif /* xchg_relaxed */
-
-/**
- * atomic_fetch_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, if @v was not already @u.
- * Returns the original value of @v.
- */
-#ifndef atomic_fetch_add_unless
-static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
- int c = atomic_read(v);
-
- do {
- if (unlikely(c == u))
- break;
- } while (!atomic_try_cmpxchg(v, &c, c + a));
-
- return c;
-}
-#endif
-
-/**
- * atomic_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, if @v was not already @u.
- * Returns true if the addition was done.
- */
-static inline bool atomic_add_unless(atomic_t *v, int a, int u)
-{
- return atomic_fetch_add_unless(v, a, u) != u;
-}
-
-/**
- * atomic_inc_not_zero - increment unless the number is zero
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1, if @v is non-zero.
- * Returns true if the increment was done.
- */
-#ifndef atomic_inc_not_zero
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-#endif
-
-/**
- * atomic_inc_and_test - increment and test
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
-#ifndef atomic_inc_and_test
-static inline bool atomic_inc_and_test(atomic_t *v)
-{
- return atomic_inc_return(v) == 0;
-}
-#endif
-
-/**
- * atomic_dec_and_test - decrement and test
- * @v: pointer of type atomic_t
- *
- * Atomically decrements @v by 1 and
- * returns true if the result is 0, or false for all other
- * cases.
- */
-#ifndef atomic_dec_and_test
-static inline bool atomic_dec_and_test(atomic_t *v)
-{
- return atomic_dec_return(v) == 0;
-}
-#endif
-
-/**
- * atomic_sub_and_test - subtract value from variable and test result
- * @i: integer value to subtract
- * @v: pointer of type atomic_t
- *
- * Atomically subtracts @i from @v and returns
- * true if the result is zero, or false for all
- * other cases.
- */
-#ifndef atomic_sub_and_test
-static inline bool atomic_sub_and_test(int i, atomic_t *v)
-{
- return atomic_sub_return(i, v) == 0;
-}
-#endif
-
-/**
- * atomic_add_negative - add and test if negative
- * @i: integer value to add
- * @v: pointer of type atomic_t
- *
- * Atomically adds @i to @v and returns true
- * if the result is negative, or false when
- * result is greater than or equal to zero.
- */
-#ifndef atomic_add_negative
-static inline bool atomic_add_negative(int i, atomic_t *v)
-{
- return atomic_add_return(i, v) < 0;
-}
-#endif
-
-#ifndef atomic_inc_unless_negative
-static inline bool atomic_inc_unless_negative(atomic_t *v)
-{
- int c = atomic_read(v);
-
- do {
- if (unlikely(c < 0))
- return false;
- } while (!atomic_try_cmpxchg(v, &c, c + 1));
-
- return true;
-}
-#endif
-
-#ifndef atomic_dec_unless_positive
-static inline bool atomic_dec_unless_positive(atomic_t *v)
-{
- int c = atomic_read(v);
-
- do {
- if (unlikely(c > 0))
- return false;
- } while (!atomic_try_cmpxchg(v, &c, c - 1));
-
- return true;
-}
-#endif
-
-/*
- * atomic_dec_if_positive - decrement by 1 if old value positive
- * @v: pointer of type atomic_t
- *
- * The function returns the old value of *v minus 1, even if
- * the atomic variable, v, was not decremented.
- */
-#ifndef atomic_dec_if_positive
-static inline int atomic_dec_if_positive(atomic_t *v)
-{
- int dec, c = atomic_read(v);
-
- do {
- dec = c - 1;
- if (unlikely(dec < 0))
- break;
- } while (!atomic_try_cmpxchg(v, &c, dec));
-
- return dec;
-}
-#endif
-
-#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
-#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
-
-#ifdef CONFIG_GENERIC_ATOMIC64
-#include <asm-generic/atomic64.h>
-#endif
-
-#ifndef atomic64_read_acquire
-#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
-#endif
-
-#ifndef atomic64_set_release
-#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
-#endif
-
-/* atomic64_add_return_relaxed */
-#ifndef atomic64_add_return_relaxed
-#define atomic64_add_return_relaxed atomic64_add_return
-#define atomic64_add_return_acquire atomic64_add_return
-#define atomic64_add_return_release atomic64_add_return
-
-#else /* atomic64_add_return_relaxed */
-
-#ifndef atomic64_add_return_acquire
-#define atomic64_add_return_acquire(...) \
- __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_add_return_release
-#define atomic64_add_return_release(...) \
- __atomic_op_release(atomic64_add_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_add_return
-#define atomic64_add_return(...) \
- __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
-#endif
-#endif /* atomic64_add_return_relaxed */
-
-#ifndef atomic64_inc
-#define atomic64_inc(v) atomic64_add(1, (v))
-#endif
-
-/* atomic64_inc_return_relaxed */
-#ifndef atomic64_inc_return_relaxed
-
-#ifndef atomic64_inc_return
-#define atomic64_inc_return(v) atomic64_add_return(1, (v))
-#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
-#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
-#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
-#else /* atomic64_inc_return */
-#define atomic64_inc_return_relaxed atomic64_inc_return
-#define atomic64_inc_return_acquire atomic64_inc_return
-#define atomic64_inc_return_release atomic64_inc_return
-#endif /* atomic64_inc_return */
-
-#else /* atomic64_inc_return_relaxed */
-
-#ifndef atomic64_inc_return_acquire
-#define atomic64_inc_return_acquire(...) \
- __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_inc_return_release
-#define atomic64_inc_return_release(...) \
- __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_inc_return
-#define atomic64_inc_return(...) \
- __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
-#endif
-#endif /* atomic64_inc_return_relaxed */
-
-
-/* atomic64_sub_return_relaxed */
-#ifndef atomic64_sub_return_relaxed
-#define atomic64_sub_return_relaxed atomic64_sub_return
-#define atomic64_sub_return_acquire atomic64_sub_return
-#define atomic64_sub_return_release atomic64_sub_return
-
-#else /* atomic64_sub_return_relaxed */
-
-#ifndef atomic64_sub_return_acquire
-#define atomic64_sub_return_acquire(...) \
- __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_sub_return_release
-#define atomic64_sub_return_release(...) \
- __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_sub_return
-#define atomic64_sub_return(...) \
- __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
-#endif
-#endif /* atomic64_sub_return_relaxed */
-
-#ifndef atomic64_dec
-#define atomic64_dec(v) atomic64_sub(1, (v))
-#endif
-
-/* atomic64_dec_return_relaxed */
-#ifndef atomic64_dec_return_relaxed
-
-#ifndef atomic64_dec_return
-#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
-#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
-#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
-#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
-#else /* atomic64_dec_return */
-#define atomic64_dec_return_relaxed atomic64_dec_return
-#define atomic64_dec_return_acquire atomic64_dec_return
-#define atomic64_dec_return_release atomic64_dec_return
-#endif /* atomic64_dec_return */
-
-#else /* atomic64_dec_return_relaxed */
-
-#ifndef atomic64_dec_return_acquire
-#define atomic64_dec_return_acquire(...) \
- __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_dec_return_release
-#define atomic64_dec_return_release(...) \
- __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_dec_return
-#define atomic64_dec_return(...) \
- __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
-#endif
-#endif /* atomic64_dec_return_relaxed */
-
-
-/* atomic64_fetch_add_relaxed */
-#ifndef atomic64_fetch_add_relaxed
-#define atomic64_fetch_add_relaxed atomic64_fetch_add
-#define atomic64_fetch_add_acquire atomic64_fetch_add
-#define atomic64_fetch_add_release atomic64_fetch_add
-
-#else /* atomic64_fetch_add_relaxed */
-
-#ifndef atomic64_fetch_add_acquire
-#define atomic64_fetch_add_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_add_release
-#define atomic64_fetch_add_release(...) \
- __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_add
-#define atomic64_fetch_add(...) \
- __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_add_relaxed */
-
-/* atomic64_fetch_inc_relaxed */
-#ifndef atomic64_fetch_inc_relaxed
-
-#ifndef atomic64_fetch_inc
-#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
-#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
-#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
-#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
-#else /* atomic64_fetch_inc */
-#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
-#define atomic64_fetch_inc_acquire atomic64_fetch_inc
-#define atomic64_fetch_inc_release atomic64_fetch_inc
-#endif /* atomic64_fetch_inc */
-
-#else /* atomic64_fetch_inc_relaxed */
-
-#ifndef atomic64_fetch_inc_acquire
-#define atomic64_fetch_inc_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_inc_release
-#define atomic64_fetch_inc_release(...) \
- __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_inc
-#define atomic64_fetch_inc(...) \
- __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_inc_relaxed */
-
-/* atomic64_fetch_sub_relaxed */
-#ifndef atomic64_fetch_sub_relaxed
-#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
-#define atomic64_fetch_sub_acquire atomic64_fetch_sub
-#define atomic64_fetch_sub_release atomic64_fetch_sub
-
-#else /* atomic64_fetch_sub_relaxed */
-
-#ifndef atomic64_fetch_sub_acquire
-#define atomic64_fetch_sub_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_sub_release
-#define atomic64_fetch_sub_release(...) \
- __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_sub
-#define atomic64_fetch_sub(...) \
- __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_sub_relaxed */
-
-/* atomic64_fetch_dec_relaxed */
-#ifndef atomic64_fetch_dec_relaxed
-
-#ifndef atomic64_fetch_dec
-#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
-#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
-#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
-#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
-#else /* atomic64_fetch_dec */
-#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
-#define atomic64_fetch_dec_acquire atomic64_fetch_dec
-#define atomic64_fetch_dec_release atomic64_fetch_dec
-#endif /* atomic64_fetch_dec */
-
-#else /* atomic64_fetch_dec_relaxed */
-
-#ifndef atomic64_fetch_dec_acquire
-#define atomic64_fetch_dec_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_dec_release
-#define atomic64_fetch_dec_release(...) \
- __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_dec
-#define atomic64_fetch_dec(...) \
- __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_dec_relaxed */
-
-/* atomic64_fetch_or_relaxed */
-#ifndef atomic64_fetch_or_relaxed
-#define atomic64_fetch_or_relaxed atomic64_fetch_or
-#define atomic64_fetch_or_acquire atomic64_fetch_or
-#define atomic64_fetch_or_release atomic64_fetch_or
-
-#else /* atomic64_fetch_or_relaxed */
-
-#ifndef atomic64_fetch_or_acquire
-#define atomic64_fetch_or_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_or_release
-#define atomic64_fetch_or_release(...) \
- __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_or
-#define atomic64_fetch_or(...) \
- __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_or_relaxed */
-
-/* atomic64_fetch_and_relaxed */
-#ifndef atomic64_fetch_and_relaxed
-#define atomic64_fetch_and_relaxed atomic64_fetch_and
-#define atomic64_fetch_and_acquire atomic64_fetch_and
-#define atomic64_fetch_and_release atomic64_fetch_and
-
-#else /* atomic64_fetch_and_relaxed */
-
-#ifndef atomic64_fetch_and_acquire
-#define atomic64_fetch_and_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_and_release
-#define atomic64_fetch_and_release(...) \
- __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_and
-#define atomic64_fetch_and(...) \
- __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_and_relaxed */
-
-#ifndef atomic64_andnot
-#define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
-#endif
-
-#ifndef atomic64_fetch_andnot_relaxed
-
-#ifndef atomic64_fetch_andnot
-#define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
-#define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
-#define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
-#define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
-#else /* atomic64_fetch_andnot */
-#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
-#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
-#define atomic64_fetch_andnot_release atomic64_fetch_andnot
-#endif /* atomic64_fetch_andnot */
-
-#else /* atomic64_fetch_andnot_relaxed */
-
-#ifndef atomic64_fetch_andnot_acquire
-#define atomic64_fetch_andnot_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_andnot_release
-#define atomic64_fetch_andnot_release(...) \
- __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_andnot
-#define atomic64_fetch_andnot(...) \
- __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_andnot_relaxed */
-
-/* atomic64_fetch_xor_relaxed */
-#ifndef atomic64_fetch_xor_relaxed
-#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
-#define atomic64_fetch_xor_acquire atomic64_fetch_xor
-#define atomic64_fetch_xor_release atomic64_fetch_xor
-
-#else /* atomic64_fetch_xor_relaxed */
-
-#ifndef atomic64_fetch_xor_acquire
-#define atomic64_fetch_xor_acquire(...) \
- __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_xor_release
-#define atomic64_fetch_xor_release(...) \
- __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_fetch_xor
-#define atomic64_fetch_xor(...) \
- __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
-#endif
-#endif /* atomic64_fetch_xor_relaxed */
-
-
-/* atomic64_xchg_relaxed */
-#ifndef atomic64_xchg_relaxed
-#define atomic64_xchg_relaxed atomic64_xchg
-#define atomic64_xchg_acquire atomic64_xchg
-#define atomic64_xchg_release atomic64_xchg
-
-#else /* atomic64_xchg_relaxed */
-
-#ifndef atomic64_xchg_acquire
-#define atomic64_xchg_acquire(...) \
- __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_xchg_release
-#define atomic64_xchg_release(...) \
- __atomic_op_release(atomic64_xchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_xchg
-#define atomic64_xchg(...) \
- __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
-#endif
-#endif /* atomic64_xchg_relaxed */
-
-/* atomic64_cmpxchg_relaxed */
-#ifndef atomic64_cmpxchg_relaxed
-#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
-#define atomic64_cmpxchg_acquire atomic64_cmpxchg
-#define atomic64_cmpxchg_release atomic64_cmpxchg
-
-#else /* atomic64_cmpxchg_relaxed */
-
-#ifndef atomic64_cmpxchg_acquire
-#define atomic64_cmpxchg_acquire(...) \
- __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_cmpxchg_release
-#define atomic64_cmpxchg_release(...) \
- __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
-#endif
-
-#ifndef atomic64_cmpxchg
-#define atomic64_cmpxchg(...) \
- __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
-#endif
-#endif /* atomic64_cmpxchg_relaxed */
-
-#ifndef atomic64_try_cmpxchg
-
-#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
-({ \
- typeof(_po) __po = (_po); \
- typeof(*(_po)) __r, __o = *__po; \
- __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
- if (unlikely(__r != __o)) \
- *__po = __r; \
- likely(__r == __o); \
-})
-
-#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
-#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
-#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
-#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
-
-#else /* atomic64_try_cmpxchg */
-#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
-#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
-#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
-#endif /* atomic64_try_cmpxchg */
-
-/**
- * atomic64_fetch_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic64_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, if @v was not already @u.
- * Returns the original value of @v.
- */
-#ifndef atomic64_fetch_add_unless
-static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
- long long u)
-{
- long long c = atomic64_read(v);
-
- do {
- if (unlikely(c == u))
- break;
- } while (!atomic64_try_cmpxchg(v, &c, c + a));
-
- return c;
-}
-#endif
-
-/**
- * atomic64_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, if @v was not already @u.
- * Returns true if the addition was done.
- */
-static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
-{
- return atomic64_fetch_add_unless(v, a, u) != u;
-}
-
-/**
- * atomic64_inc_not_zero - increment unless the number is zero
- * @v: pointer of type atomic64_t
- *
- * Atomically increments @v by 1, if @v is non-zero.
- * Returns true if the increment was done.
- */
-#ifndef atomic64_inc_not_zero
-#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-#endif
-
-/**
- * atomic64_inc_and_test - increment and test
- * @v: pointer of type atomic64_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
-#ifndef atomic64_inc_and_test
-static inline bool atomic64_inc_and_test(atomic64_t *v)
-{
- return atomic64_inc_return(v) == 0;
-}
-#endif
-
-/**
- * atomic64_dec_and_test - decrement and test
- * @v: pointer of type atomic64_t
- *
- * Atomically decrements @v by 1 and
- * returns true if the result is 0, or false for all other
- * cases.
- */
-#ifndef atomic64_dec_and_test
-static inline bool atomic64_dec_and_test(atomic64_t *v)
-{
- return atomic64_dec_return(v) == 0;
-}
-#endif
-
-/**
- * atomic64_sub_and_test - subtract value from variable and test result
- * @i: integer value to subtract
- * @v: pointer of type atomic64_t
- *
- * Atomically subtracts @i from @v and returns
- * true if the result is zero, or false for all
- * other cases.
- */
-#ifndef atomic64_sub_and_test
-static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
-{
- return atomic64_sub_return(i, v) == 0;
-}
-#endif
-
-/**
- * atomic64_add_negative - add and test if negative
- * @i: integer value to add
- * @v: pointer of type atomic64_t
- *
- * Atomically adds @i to @v and returns true
- * if the result is negative, or false when
- * result is greater than or equal to zero.
- */
-#ifndef atomic64_add_negative
-static inline bool atomic64_add_negative(long long i, atomic64_t *v)
-{
- return atomic64_add_return(i, v) < 0;
-}
-#endif
-
-#ifndef atomic64_inc_unless_negative
-static inline bool atomic64_inc_unless_negative(atomic64_t *v)
-{
- long long c = atomic64_read(v);
-
- do {
- if (unlikely(c < 0))
- return false;
- } while (!atomic64_try_cmpxchg(v, &c, c + 1));
-
- return true;
-}
-#endif
-
-#ifndef atomic64_dec_unless_positive
-static inline bool atomic64_dec_unless_positive(atomic64_t *v)
-{
- long long c = atomic64_read(v);
-
- do {
- if (unlikely(c > 0))
- return false;
- } while (!atomic64_try_cmpxchg(v, &c, c - 1));
-
- return true;
-}
-#endif
-
-/*
- * atomic64_dec_if_positive - decrement by 1 if old value positive
- * @v: pointer of type atomic64_t
- *
- * The function returns the old value of *v minus 1, even if
- * the atomic64 variable, v, was not decremented.
- */
-#ifndef atomic64_dec_if_positive
-static inline long long atomic64_dec_if_positive(atomic64_t *v)
-{
- long long dec, c = atomic64_read(v);
-
- do {
- dec = c - 1;
- if (unlikely(dec < 0))
- break;
- } while (!atomic64_try_cmpxchg(v, &c, dec));
-
- return dec;
-}
-#endif
-
-#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
-#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
+#include <linux/atomic-fallback.h>
#include <asm-generic/atomic-long.h>
diff --git a/include/linux/lockdep.h b/include/linux/lockdep.h
index c5335df2372f..79c3873d58ac 100644
--- a/include/linux/lockdep.h
+++ b/include/linux/lockdep.h
@@ -46,16 +46,22 @@ extern int lock_stat;
#define NR_LOCKDEP_CACHING_CLASSES 2
/*
- * Lock-classes are keyed via unique addresses, by embedding the
- * lockclass-key into the kernel (or module) .data section. (For
- * static locks we use the lock address itself as the key.)
+ * A lockdep key is associated with each lock object. For static locks we use
+ * the lock address itself as the key. Dynamically allocated lock objects can
+ * have a statically or dynamically allocated key. Dynamically allocated lock
+ * keys must be registered before being used and must be unregistered before
+ * the key memory is freed.
*/
struct lockdep_subclass_key {
char __one_byte;
} __attribute__ ((__packed__));
+/* hash_entry is used to keep track of dynamically allocated keys. */
struct lock_class_key {
- struct lockdep_subclass_key subkeys[MAX_LOCKDEP_SUBCLASSES];
+ union {
+ struct hlist_node hash_entry;
+ struct lockdep_subclass_key subkeys[MAX_LOCKDEP_SUBCLASSES];
+ };
};
extern struct lock_class_key __lockdep_no_validate__;
@@ -63,7 +69,8 @@ extern struct lock_class_key __lockdep_no_validate__;
#define LOCKSTAT_POINTS 4
/*
- * The lock-class itself:
+ * The lock-class itself. The order of the structure members matters.
+ * reinit_class() zeroes the key member and all subsequent members.
*/
struct lock_class {
/*
@@ -72,10 +79,19 @@ struct lock_class {
struct hlist_node hash_entry;
/*
- * global list of all lock-classes:
+ * Entry in all_lock_classes when in use. Entry in free_lock_classes
+ * when not in use. Instances that are being freed are on one of the
+ * zapped_classes lists.
*/
struct list_head lock_entry;
+ /*
+ * These fields represent a directed graph of lock dependencies,
+ * to every node we attach a list of "forward" and a list of
+ * "backward" graph nodes.
+ */
+ struct list_head locks_after, locks_before;
+
struct lockdep_subclass_key *key;
unsigned int subclass;
unsigned int dep_gen_id;
@@ -87,13 +103,6 @@ struct lock_class {
struct stack_trace usage_traces[XXX_LOCK_USAGE_STATES];
/*
- * These fields represent a directed graph of lock dependencies,
- * to every node we attach a list of "forward" and a list of
- * "backward" graph nodes.
- */
- struct list_head locks_after, locks_before;
-
- /*
* Generation counter, when doing certain classes of graph walking,
* to ensure that we check one node only once:
*/
@@ -104,7 +113,7 @@ struct lock_class {
unsigned long contention_point[LOCKSTAT_POINTS];
unsigned long contending_point[LOCKSTAT_POINTS];
#endif
-};
+} __no_randomize_layout;
#ifdef CONFIG_LOCK_STAT
struct lock_time {
@@ -178,6 +187,7 @@ static inline void lockdep_copy_map(struct lockdep_map *to,
struct lock_list {
struct list_head entry;
struct lock_class *class;
+ struct lock_class *links_to;
struct stack_trace trace;
int distance;
@@ -264,10 +274,14 @@ extern void lockdep_reset(void);
extern void lockdep_reset_lock(struct lockdep_map *lock);
extern void lockdep_free_key_range(void *start, unsigned long size);
extern asmlinkage void lockdep_sys_exit(void);
+extern void lockdep_set_selftest_task(struct task_struct *task);
extern void lockdep_off(void);
extern void lockdep_on(void);
+extern void lockdep_register_key(struct lock_class_key *key);
+extern void lockdep_unregister_key(struct lock_class_key *key);
+
/*
* These methods are used by specific locking variants (spinlocks,
* rwlocks, mutexes and rwsems) to pass init/acquire/release events
@@ -394,6 +408,10 @@ static inline void lockdep_on(void)
{
}
+static inline void lockdep_set_selftest_task(struct task_struct *task)
+{
+}
+
# define lock_acquire(l, s, t, r, c, n, i) do { } while (0)
# define lock_release(l, n, i) do { } while (0)
# define lock_downgrade(l, i) do { } while (0)
@@ -425,6 +443,14 @@ static inline void lockdep_on(void)
*/
struct lock_class_key { };
+static inline void lockdep_register_key(struct lock_class_key *key)
+{
+}
+
+static inline void lockdep_unregister_key(struct lock_class_key *key)
+{
+}
+
/*
* The lockdep_map takes no space if lockdep is disabled:
*/
diff --git a/include/linux/percpu-rwsem.h b/include/linux/percpu-rwsem.h
index 71b75643c432..03cb4b6f842e 100644
--- a/include/linux/percpu-rwsem.h
+++ b/include/linux/percpu-rwsem.h
@@ -29,7 +29,7 @@ static struct percpu_rw_semaphore name = { \
extern int __percpu_down_read(struct percpu_rw_semaphore *, int);
extern void __percpu_up_read(struct percpu_rw_semaphore *);
-static inline void percpu_down_read_preempt_disable(struct percpu_rw_semaphore *sem)
+static inline void percpu_down_read(struct percpu_rw_semaphore *sem)
{
might_sleep();
@@ -47,16 +47,10 @@ static inline void percpu_down_read_preempt_disable(struct percpu_rw_semaphore *
__this_cpu_inc(*sem->read_count);
if (unlikely(!rcu_sync_is_idle(&sem->rss)))
__percpu_down_read(sem, false); /* Unconditional memory barrier */
- barrier();
/*
- * The barrier() prevents the compiler from
+ * The preempt_enable() prevents the compiler from
* bleeding the critical section out.
*/
-}
-
-static inline void percpu_down_read(struct percpu_rw_semaphore *sem)
-{
- percpu_down_read_preempt_disable(sem);
preempt_enable();
}
@@ -83,13 +77,9 @@ static inline int percpu_down_read_trylock(struct percpu_rw_semaphore *sem)
return ret;
}
-static inline void percpu_up_read_preempt_enable(struct percpu_rw_semaphore *sem)
+static inline void percpu_up_read(struct percpu_rw_semaphore *sem)
{
- /*
- * The barrier() prevents the compiler from
- * bleeding the critical section out.
- */
- barrier();
+ preempt_disable();
/*
* Same as in percpu_down_read().
*/
@@ -102,12 +92,6 @@ static inline void percpu_up_read_preempt_enable(struct percpu_rw_semaphore *sem
rwsem_release(&sem->rw_sem.dep_map, 1, _RET_IP_);
}
-static inline void percpu_up_read(struct percpu_rw_semaphore *sem)
-{
- preempt_disable();
- percpu_up_read_preempt_enable(sem);
-}
-
extern void percpu_down_write(struct percpu_rw_semaphore *);
extern void percpu_up_write(struct percpu_rw_semaphore *);
diff --git a/include/linux/sched/wake_q.h b/include/linux/sched/wake_q.h
index 545f37138057..ad826d2a4557 100644
--- a/include/linux/sched/wake_q.h
+++ b/include/linux/sched/wake_q.h
@@ -51,8 +51,8 @@ static inline void wake_q_init(struct wake_q_head *head)
head->lastp = &head->first;
}
-extern void wake_q_add(struct wake_q_head *head,
- struct task_struct *task);
+extern void wake_q_add(struct wake_q_head *head, struct task_struct *task);
+extern void wake_q_add_safe(struct wake_q_head *head, struct task_struct *task);
extern void wake_up_q(struct wake_q_head *head);
#endif /* _LINUX_SCHED_WAKE_Q_H */
diff --git a/include/linux/workqueue.h b/include/linux/workqueue.h
index 60d673e15632..d9a1a480e920 100644
--- a/include/linux/workqueue.h
+++ b/include/linux/workqueue.h
@@ -390,43 +390,23 @@ extern struct workqueue_struct *system_freezable_wq;
extern struct workqueue_struct *system_power_efficient_wq;
extern struct workqueue_struct *system_freezable_power_efficient_wq;
-extern struct workqueue_struct *
-__alloc_workqueue_key(const char *fmt, unsigned int flags, int max_active,
- struct lock_class_key *key, const char *lock_name, ...) __printf(1, 6);
-
/**
* alloc_workqueue - allocate a workqueue
* @fmt: printf format for the name of the workqueue
* @flags: WQ_* flags
* @max_active: max in-flight work items, 0 for default
- * @args...: args for @fmt
+ * remaining args: args for @fmt
*
* Allocate a workqueue with the specified parameters. For detailed
* information on WQ_* flags, please refer to
* Documentation/core-api/workqueue.rst.
*
- * The __lock_name macro dance is to guarantee that single lock_class_key
- * doesn't end up with different namesm, which isn't allowed by lockdep.
- *
* RETURNS:
* Pointer to the allocated workqueue on success, %NULL on failure.
*/
-#ifdef CONFIG_LOCKDEP
-#define alloc_workqueue(fmt, flags, max_active, args...) \
-({ \
- static struct lock_class_key __key; \
- const char *__lock_name; \
- \
- __lock_name = "(wq_completion)"#fmt#args; \
- \
- __alloc_workqueue_key((fmt), (flags), (max_active), \
- &__key, __lock_name, ##args); \
-})
-#else
-#define alloc_workqueue(fmt, flags, max_active, args...) \
- __alloc_workqueue_key((fmt), (flags), (max_active), \
- NULL, NULL, ##args)
-#endif
+struct workqueue_struct *alloc_workqueue(const char *fmt,
+ unsigned int flags,
+ int max_active, ...);
/**
* alloc_ordered_workqueue - allocate an ordered workqueue