diff options
| author | Thomas Gleixner <tglx@linutronix.de> | 2023-03-23 21:55:30 +0100 |
|---|---|---|
| committer | Peter Zijlstra <peterz@infradead.org> | 2023-03-28 10:39:29 +0200 |
| commit | e5ab9eff46b04c5a04778e40d7092fed3fda52ca (patch) | |
| tree | 87cac8205ddaf1e0e81fba3be4f0e8d7dcf7f584 /include/linux/atomic/atomic-arch-fallback.h | |
| parent | fe15c26ee26efa11741a7b632e9f23b01aca4cc6 (diff) | |
atomics: Provide atomic_add_negative() variants
atomic_add_negative() does not provide the relaxed/acquire/release
variants.
Provide them in preparation for a new scalable reference count algorithm.
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230323102800.101763813@linutronix.de
Diffstat (limited to 'include/linux/atomic/atomic-arch-fallback.h')
| -rw-r--r-- | include/linux/atomic/atomic-arch-fallback.h | 208 |
1 files changed, 199 insertions, 9 deletions
diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h index 77bc5522e61c..4226379a232d 100644 --- a/include/linux/atomic/atomic-arch-fallback.h +++ b/include/linux/atomic/atomic-arch-fallback.h @@ -1208,15 +1208,21 @@ arch_atomic_inc_and_test(atomic_t *v) #define arch_atomic_inc_and_test arch_atomic_inc_and_test #endif +#ifndef arch_atomic_add_negative_relaxed +#ifdef arch_atomic_add_negative +#define arch_atomic_add_negative_acquire arch_atomic_add_negative +#define arch_atomic_add_negative_release arch_atomic_add_negative +#define arch_atomic_add_negative_relaxed arch_atomic_add_negative +#endif /* arch_atomic_add_negative */ + #ifndef arch_atomic_add_negative /** - * arch_atomic_add_negative - add and test if negative + * arch_atomic_add_negative - Add and test if negative * @i: integer value to add * @v: pointer of type atomic_t * - * Atomically adds @i to @v and returns true - * if the result is negative, or false when - * result is greater than or equal to zero. + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. */ static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) @@ -1226,6 +1232,95 @@ arch_atomic_add_negative(int i, atomic_t *v) #define arch_atomic_add_negative arch_atomic_add_negative #endif +#ifndef arch_atomic_add_negative_acquire +/** + * arch_atomic_add_negative_acquire - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic_add_negative_acquire(int i, atomic_t *v) +{ + return arch_atomic_add_return_acquire(i, v) < 0; +} +#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire +#endif + +#ifndef arch_atomic_add_negative_release +/** + * arch_atomic_add_negative_release - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic_add_negative_release(int i, atomic_t *v) +{ + return arch_atomic_add_return_release(i, v) < 0; +} +#define arch_atomic_add_negative_release arch_atomic_add_negative_release +#endif + +#ifndef arch_atomic_add_negative_relaxed +/** + * arch_atomic_add_negative_relaxed - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic_add_negative_relaxed(int i, atomic_t *v) +{ + return arch_atomic_add_return_relaxed(i, v) < 0; +} +#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed +#endif + +#else /* arch_atomic_add_negative_relaxed */ + +#ifndef arch_atomic_add_negative_acquire +static __always_inline bool +arch_atomic_add_negative_acquire(int i, atomic_t *v) +{ + bool ret = arch_atomic_add_negative_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire +#endif + +#ifndef arch_atomic_add_negative_release +static __always_inline bool +arch_atomic_add_negative_release(int i, atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_add_negative_relaxed(i, v); +} +#define arch_atomic_add_negative_release arch_atomic_add_negative_release +#endif + +#ifndef arch_atomic_add_negative +static __always_inline bool +arch_atomic_add_negative(int i, atomic_t *v) +{ + bool ret; + __atomic_pre_full_fence(); + ret = arch_atomic_add_negative_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic_add_negative arch_atomic_add_negative +#endif + +#endif /* arch_atomic_add_negative_relaxed */ + #ifndef arch_atomic_fetch_add_unless /** * arch_atomic_fetch_add_unless - add unless the number is already a given value @@ -2329,15 +2424,21 @@ arch_atomic64_inc_and_test(atomic64_t *v) #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test #endif +#ifndef arch_atomic64_add_negative_relaxed +#ifdef arch_atomic64_add_negative +#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative +#define arch_atomic64_add_negative_release arch_atomic64_add_negative +#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative +#endif /* arch_atomic64_add_negative */ + #ifndef arch_atomic64_add_negative /** - * arch_atomic64_add_negative - add and test if negative + * arch_atomic64_add_negative - Add and test if negative * @i: integer value to add * @v: pointer of type atomic64_t * - * Atomically adds @i to @v and returns true - * if the result is negative, or false when - * result is greater than or equal to zero. + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. */ static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v) @@ -2347,6 +2448,95 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v) #define arch_atomic64_add_negative arch_atomic64_add_negative #endif +#ifndef arch_atomic64_add_negative_acquire +/** + * arch_atomic64_add_negative_acquire - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic64_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) +{ + return arch_atomic64_add_return_acquire(i, v) < 0; +} +#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire +#endif + +#ifndef arch_atomic64_add_negative_release +/** + * arch_atomic64_add_negative_release - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic64_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic64_add_negative_release(s64 i, atomic64_t *v) +{ + return arch_atomic64_add_return_release(i, v) < 0; +} +#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release +#endif + +#ifndef arch_atomic64_add_negative_relaxed +/** + * arch_atomic64_add_negative_relaxed - Add and test if negative + * @i: integer value to add + * @v: pointer of type atomic64_t + * + * Atomically adds @i to @v and returns true if the result is negative, + * or false when the result is greater than or equal to zero. + */ +static __always_inline bool +arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) +{ + return arch_atomic64_add_return_relaxed(i, v) < 0; +} +#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed +#endif + +#else /* arch_atomic64_add_negative_relaxed */ + +#ifndef arch_atomic64_add_negative_acquire +static __always_inline bool +arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) +{ + bool ret = arch_atomic64_add_negative_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire +#endif + +#ifndef arch_atomic64_add_negative_release +static __always_inline bool +arch_atomic64_add_negative_release(s64 i, atomic64_t *v) +{ + __atomic_release_fence(); + return arch_atomic64_add_negative_relaxed(i, v); +} +#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release +#endif + +#ifndef arch_atomic64_add_negative +static __always_inline bool +arch_atomic64_add_negative(s64 i, atomic64_t *v) +{ + bool ret; + __atomic_pre_full_fence(); + ret = arch_atomic64_add_negative_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#define arch_atomic64_add_negative arch_atomic64_add_negative +#endif + +#endif /* arch_atomic64_add_negative_relaxed */ + #ifndef arch_atomic64_fetch_add_unless /** * arch_atomic64_fetch_add_unless - add unless the number is already a given value @@ -2456,4 +2646,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v) #endif #endif /* _LINUX_ATOMIC_FALLBACK_H */ -// b5e87bdd5ede61470c29f7a7e4de781af3770f09 +// 00071fffa021cec66f6290d706d69c91df87bade |
