Commit e5ab9eff authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Peter Zijlstra

atomics: Provide atomic_add_negative() variants

atomic_add_negative() does not provide the relaxed/acquire/release
variants.

Provide them in preparation for a new scalable reference count algorithm.
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: default avatarMark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20230323102800.101763813@linutronix.de
parent fe15c26e
...@@ -1208,15 +1208,21 @@ arch_atomic_inc_and_test(atomic_t *v) ...@@ -1208,15 +1208,21 @@ arch_atomic_inc_and_test(atomic_t *v)
#define arch_atomic_inc_and_test arch_atomic_inc_and_test #define arch_atomic_inc_and_test arch_atomic_inc_and_test
#endif #endif
#ifndef arch_atomic_add_negative_relaxed
#ifdef arch_atomic_add_negative
#define arch_atomic_add_negative_acquire arch_atomic_add_negative
#define arch_atomic_add_negative_release arch_atomic_add_negative
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative
#endif /* arch_atomic_add_negative */
#ifndef arch_atomic_add_negative #ifndef arch_atomic_add_negative
/** /**
* arch_atomic_add_negative - add and test if negative * arch_atomic_add_negative - Add and test if negative
* @i: integer value to add * @i: integer value to add
* @v: pointer of type atomic_t * @v: pointer of type atomic_t
* *
* Atomically adds @i to @v and returns true * Atomically adds @i to @v and returns true if the result is negative,
* if the result is negative, or false when * or false when the result is greater than or equal to zero.
* result is greater than or equal to zero.
*/ */
static __always_inline bool static __always_inline bool
arch_atomic_add_negative(int i, atomic_t *v) arch_atomic_add_negative(int i, atomic_t *v)
...@@ -1226,6 +1232,95 @@ arch_atomic_add_negative(int i, atomic_t *v) ...@@ -1226,6 +1232,95 @@ arch_atomic_add_negative(int i, atomic_t *v)
#define arch_atomic_add_negative arch_atomic_add_negative #define arch_atomic_add_negative arch_atomic_add_negative
#endif #endif
#ifndef arch_atomic_add_negative_acquire
/**
* arch_atomic_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_acquire(int i, atomic_t *v)
{
return arch_atomic_add_return_acquire(i, v) < 0;
}
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
#endif
#ifndef arch_atomic_add_negative_release
/**
* arch_atomic_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_release(int i, atomic_t *v)
{
return arch_atomic_add_return_release(i, v) < 0;
}
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
#endif
#ifndef arch_atomic_add_negative_relaxed
/**
* arch_atomic_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_relaxed(int i, atomic_t *v)
{
return arch_atomic_add_return_relaxed(i, v) < 0;
}
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed
#endif
#else /* arch_atomic_add_negative_relaxed */
#ifndef arch_atomic_add_negative_acquire
static __always_inline bool
arch_atomic_add_negative_acquire(int i, atomic_t *v)
{
bool ret = arch_atomic_add_negative_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
#endif
#ifndef arch_atomic_add_negative_release
static __always_inline bool
arch_atomic_add_negative_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_add_negative_relaxed(i, v);
}
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
#endif
#ifndef arch_atomic_add_negative
static __always_inline bool
arch_atomic_add_negative(int i, atomic_t *v)
{
bool ret;
__atomic_pre_full_fence();
ret = arch_atomic_add_negative_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
#define arch_atomic_add_negative arch_atomic_add_negative
#endif
#endif /* arch_atomic_add_negative_relaxed */
#ifndef arch_atomic_fetch_add_unless #ifndef arch_atomic_fetch_add_unless
/** /**
* arch_atomic_fetch_add_unless - add unless the number is already a given value * arch_atomic_fetch_add_unless - add unless the number is already a given value
...@@ -2329,15 +2424,21 @@ arch_atomic64_inc_and_test(atomic64_t *v) ...@@ -2329,15 +2424,21 @@ arch_atomic64_inc_and_test(atomic64_t *v)
#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
#endif #endif
#ifndef arch_atomic64_add_negative_relaxed
#ifdef arch_atomic64_add_negative
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative
#define arch_atomic64_add_negative_release arch_atomic64_add_negative
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative
#endif /* arch_atomic64_add_negative */
#ifndef arch_atomic64_add_negative #ifndef arch_atomic64_add_negative
/** /**
* arch_atomic64_add_negative - add and test if negative * arch_atomic64_add_negative - Add and test if negative
* @i: integer value to add * @i: integer value to add
* @v: pointer of type atomic64_t * @v: pointer of type atomic64_t
* *
* Atomically adds @i to @v and returns true * Atomically adds @i to @v and returns true if the result is negative,
* if the result is negative, or false when * or false when the result is greater than or equal to zero.
* result is greater than or equal to zero.
*/ */
static __always_inline bool static __always_inline bool
arch_atomic64_add_negative(s64 i, atomic64_t *v) arch_atomic64_add_negative(s64 i, atomic64_t *v)
...@@ -2347,6 +2448,95 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v) ...@@ -2347,6 +2448,95 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v)
#define arch_atomic64_add_negative arch_atomic64_add_negative #define arch_atomic64_add_negative arch_atomic64_add_negative
#endif #endif
#ifndef arch_atomic64_add_negative_acquire
/**
* arch_atomic64_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_acquire(i, v) < 0;
}
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
#endif
#ifndef arch_atomic64_add_negative_release
/**
* arch_atomic64_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_release(i, v) < 0;
}
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
#endif
#ifndef arch_atomic64_add_negative_relaxed
/**
* arch_atomic64_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_relaxed(i, v) < 0;
}
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed
#endif
#else /* arch_atomic64_add_negative_relaxed */
#ifndef arch_atomic64_add_negative_acquire
static __always_inline bool
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
bool ret = arch_atomic64_add_negative_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
#endif
#ifndef arch_atomic64_add_negative_release
static __always_inline bool
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
{
__atomic_release_fence();
return arch_atomic64_add_negative_relaxed(i, v);
}
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
#endif
#ifndef arch_atomic64_add_negative
static __always_inline bool
arch_atomic64_add_negative(s64 i, atomic64_t *v)
{
bool ret;
__atomic_pre_full_fence();
ret = arch_atomic64_add_negative_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
#define arch_atomic64_add_negative arch_atomic64_add_negative
#endif
#endif /* arch_atomic64_add_negative_relaxed */
#ifndef arch_atomic64_fetch_add_unless #ifndef arch_atomic64_fetch_add_unless
/** /**
* arch_atomic64_fetch_add_unless - add unless the number is already a given value * arch_atomic64_fetch_add_unless - add unless the number is already a given value
...@@ -2456,4 +2646,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v) ...@@ -2456,4 +2646,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
#endif #endif
#endif /* _LINUX_ATOMIC_FALLBACK_H */ #endif /* _LINUX_ATOMIC_FALLBACK_H */
// b5e87bdd5ede61470c29f7a7e4de781af3770f09 // 00071fffa021cec66f6290d706d69c91df87bade
...@@ -592,6 +592,28 @@ atomic_add_negative(int i, atomic_t *v) ...@@ -592,6 +592,28 @@ atomic_add_negative(int i, atomic_t *v)
return arch_atomic_add_negative(i, v); return arch_atomic_add_negative(i, v);
} }
static __always_inline bool
atomic_add_negative_acquire(int i, atomic_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_acquire(i, v);
}
static __always_inline bool
atomic_add_negative_release(int i, atomic_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_release(i, v);
}
static __always_inline bool
atomic_add_negative_relaxed(int i, atomic_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_relaxed(i, v);
}
static __always_inline int static __always_inline int
atomic_fetch_add_unless(atomic_t *v, int a, int u) atomic_fetch_add_unless(atomic_t *v, int a, int u)
{ {
...@@ -1211,6 +1233,28 @@ atomic64_add_negative(s64 i, atomic64_t *v) ...@@ -1211,6 +1233,28 @@ atomic64_add_negative(s64 i, atomic64_t *v)
return arch_atomic64_add_negative(i, v); return arch_atomic64_add_negative(i, v);
} }
static __always_inline bool
atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_acquire(i, v);
}
static __always_inline bool
atomic64_add_negative_release(s64 i, atomic64_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_release(i, v);
}
static __always_inline bool
atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_relaxed(i, v);
}
static __always_inline s64 static __always_inline s64
atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
{ {
...@@ -1830,6 +1874,28 @@ atomic_long_add_negative(long i, atomic_long_t *v) ...@@ -1830,6 +1874,28 @@ atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic_long_add_negative(i, v); return arch_atomic_long_add_negative(i, v);
} }
static __always_inline bool
atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_acquire(i, v);
}
static __always_inline bool
atomic_long_add_negative_release(long i, atomic_long_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_release(i, v);
}
static __always_inline bool
atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_relaxed(i, v);
}
static __always_inline long static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{ {
...@@ -2083,4 +2149,4 @@ atomic_long_dec_if_positive(atomic_long_t *v) ...@@ -2083,4 +2149,4 @@ atomic_long_dec_if_positive(atomic_long_t *v)
}) })
#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
// 764f741eb77a7ad565dc8d99ce2837d5542e8aee // 1b485de9cbaa4900de59e14ee2084357eaeb1c3a
...@@ -479,6 +479,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v) ...@@ -479,6 +479,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic64_add_negative(i, v); return arch_atomic64_add_negative(i, v);
} }
static __always_inline bool
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_acquire(i, v);
}
static __always_inline bool
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_release(i, v);
}
static __always_inline bool
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_relaxed(i, v);
}
static __always_inline long static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{ {
...@@ -973,6 +991,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v) ...@@ -973,6 +991,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic_add_negative(i, v); return arch_atomic_add_negative(i, v);
} }
static __always_inline bool
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_acquire(i, v);
}
static __always_inline bool
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_release(i, v);
}
static __always_inline bool
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_relaxed(i, v);
}
static __always_inline long static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{ {
...@@ -1011,4 +1047,4 @@ arch_atomic_long_dec_if_positive(atomic_long_t *v) ...@@ -1011,4 +1047,4 @@ arch_atomic_long_dec_if_positive(atomic_long_t *v)
#endif /* CONFIG_64BIT */ #endif /* CONFIG_64BIT */
#endif /* _LINUX_ATOMIC_LONG_H */ #endif /* _LINUX_ATOMIC_LONG_H */
// e8f0e08ff072b74d180eabe2ad001282b38c2c88 // a194c07d7d2f4b0e178d3c118c919775d5d65f50
...@@ -33,7 +33,7 @@ try_cmpxchg B v p:old i:new ...@@ -33,7 +33,7 @@ try_cmpxchg B v p:old i:new
sub_and_test b i v sub_and_test b i v
dec_and_test b v dec_and_test b v
inc_and_test b v inc_and_test b v
add_negative b i v add_negative B i v
add_unless fb v i:a i:u add_unless fb v i:a i:u
inc_not_zero b v inc_not_zero b v
inc_unless_negative b v inc_unless_negative b v
......
cat <<EOF cat <<EOF
/** /**
* arch_${atomic}_add_negative - add and test if negative * arch_${atomic}_add_negative${order} - Add and test if negative
* @i: integer value to add * @i: integer value to add
* @v: pointer of type ${atomic}_t * @v: pointer of type ${atomic}_t
* *
* Atomically adds @i to @v and returns true * Atomically adds @i to @v and returns true if the result is negative,
* if the result is negative, or false when * or false when the result is greater than or equal to zero.
* result is greater than or equal to zero.
*/ */
static __always_inline bool static __always_inline bool
arch_${atomic}_add_negative(${int} i, ${atomic}_t *v) arch_${atomic}_add_negative${order}(${int} i, ${atomic}_t *v)
{ {
return arch_${atomic}_add_return(i, v) < 0; return arch_${atomic}_add_return${order}(i, v) < 0;
} }
EOF EOF
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment