Commit 63ab7bd0 authored by Davidlohr Bueso's avatar Davidlohr Bueso Committed by Ingo Molnar

locking/asm-generic: Add _{relaxed|acquire|release}() variants for inc/dec atomics

Similar to what we have for regular add/sub calls. For now, no actual arch
implements them, so everyone falls back to the default atomics... iow,
nothing changes. These will be used in future primitives.
Signed-off-by: default avatarDavidlohr Bueso <dbueso@suse.de>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarThomas Gleixner <tglx@linutronix.de>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Paul E.McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-kernel@vger.kernel.org
Link: http://lkml.kernel.org/r/1443643395-17016-2-git-send-email-dave@stgolabs.netSigned-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 82fc167c
...@@ -159,19 +159,24 @@ static inline int atomic_long_add_negative(long i, atomic_long_t *l) ...@@ -159,19 +159,24 @@ static inline int atomic_long_add_negative(long i, atomic_long_t *l)
return ATOMIC_LONG_PFX(_add_negative)(i, v); return ATOMIC_LONG_PFX(_add_negative)(i, v);
} }
static inline long atomic_long_inc_return(atomic_long_t *l) #define ATOMIC_LONG_INC_DEC_OP(op, mo) \
{ static inline long \
ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; atomic_long_##op##_return##mo(atomic_long_t *l) \
{ \
return (long)ATOMIC_LONG_PFX(_inc_return)(v); ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
} \
return (long)ATOMIC_LONG_PFX(_##op##_return##mo)(v); \
static inline long atomic_long_dec_return(atomic_long_t *l)
{
ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l;
return (long)ATOMIC_LONG_PFX(_dec_return)(v);
} }
ATOMIC_LONG_INC_DEC_OP(inc,)
ATOMIC_LONG_INC_DEC_OP(inc, _relaxed)
ATOMIC_LONG_INC_DEC_OP(inc, _acquire)
ATOMIC_LONG_INC_DEC_OP(inc, _release)
ATOMIC_LONG_INC_DEC_OP(dec,)
ATOMIC_LONG_INC_DEC_OP(dec, _relaxed)
ATOMIC_LONG_INC_DEC_OP(dec, _acquire)
ATOMIC_LONG_INC_DEC_OP(dec, _release)
#undef ATOMIC_LONG_INC_DEC_OP
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u) static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
{ {
......
...@@ -90,6 +90,30 @@ static inline int atomic_read_ctrl(const atomic_t *v) ...@@ -90,6 +90,30 @@ static inline int atomic_read_ctrl(const atomic_t *v)
#endif #endif
#endif /* atomic_add_return_relaxed */ #endif /* atomic_add_return_relaxed */
/* atomic_inc_return_relaxed */
#ifndef atomic_inc_return_relaxed
#define atomic_inc_return_relaxed atomic_inc_return
#define atomic_inc_return_acquire atomic_inc_return
#define atomic_inc_return_release atomic_inc_return
#else /* atomic_inc_return_relaxed */
#ifndef atomic_inc_return_acquire
#define atomic_inc_return_acquire(...) \
__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
#endif
#ifndef atomic_inc_return_release
#define atomic_inc_return_release(...) \
__atomic_op_release(atomic_inc_return, __VA_ARGS__)
#endif
#ifndef atomic_inc_return
#define atomic_inc_return(...) \
__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
#endif
#endif /* atomic_inc_return_relaxed */
/* atomic_sub_return_relaxed */ /* atomic_sub_return_relaxed */
#ifndef atomic_sub_return_relaxed #ifndef atomic_sub_return_relaxed
#define atomic_sub_return_relaxed atomic_sub_return #define atomic_sub_return_relaxed atomic_sub_return
...@@ -114,6 +138,30 @@ static inline int atomic_read_ctrl(const atomic_t *v) ...@@ -114,6 +138,30 @@ static inline int atomic_read_ctrl(const atomic_t *v)
#endif #endif
#endif /* atomic_sub_return_relaxed */ #endif /* atomic_sub_return_relaxed */
/* atomic_dec_return_relaxed */
#ifndef atomic_dec_return_relaxed
#define atomic_dec_return_relaxed atomic_dec_return
#define atomic_dec_return_acquire atomic_dec_return
#define atomic_dec_return_release atomic_dec_return
#else /* atomic_dec_return_relaxed */
#ifndef atomic_dec_return_acquire
#define atomic_dec_return_acquire(...) \
__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
#endif
#ifndef atomic_dec_return_release
#define atomic_dec_return_release(...) \
__atomic_op_release(atomic_dec_return, __VA_ARGS__)
#endif
#ifndef atomic_dec_return
#define atomic_dec_return(...) \
__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
#endif
#endif /* atomic_dec_return_relaxed */
/* atomic_xchg_relaxed */ /* atomic_xchg_relaxed */
#ifndef atomic_xchg_relaxed #ifndef atomic_xchg_relaxed
#define atomic_xchg_relaxed atomic_xchg #define atomic_xchg_relaxed atomic_xchg
...@@ -194,6 +242,31 @@ static inline int atomic_read_ctrl(const atomic_t *v) ...@@ -194,6 +242,31 @@ static inline int atomic_read_ctrl(const atomic_t *v)
#endif #endif
#endif /* atomic64_add_return_relaxed */ #endif /* atomic64_add_return_relaxed */
/* atomic64_inc_return_relaxed */
#ifndef atomic64_inc_return_relaxed
#define atomic64_inc_return_relaxed atomic64_inc_return
#define atomic64_inc_return_acquire atomic64_inc_return
#define atomic64_inc_return_release atomic64_inc_return
#else /* atomic64_inc_return_relaxed */
#ifndef atomic64_inc_return_acquire
#define atomic64_inc_return_acquire(...) \
__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
#endif
#ifndef atomic64_inc_return_release
#define atomic64_inc_return_release(...) \
__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
#endif
#ifndef atomic64_inc_return
#define atomic64_inc_return(...) \
__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
#endif
#endif /* atomic64_inc_return_relaxed */
/* atomic64_sub_return_relaxed */ /* atomic64_sub_return_relaxed */
#ifndef atomic64_sub_return_relaxed #ifndef atomic64_sub_return_relaxed
#define atomic64_sub_return_relaxed atomic64_sub_return #define atomic64_sub_return_relaxed atomic64_sub_return
...@@ -218,6 +291,30 @@ static inline int atomic_read_ctrl(const atomic_t *v) ...@@ -218,6 +291,30 @@ static inline int atomic_read_ctrl(const atomic_t *v)
#endif #endif
#endif /* atomic64_sub_return_relaxed */ #endif /* atomic64_sub_return_relaxed */
/* atomic64_dec_return_relaxed */
#ifndef atomic64_dec_return_relaxed
#define atomic64_dec_return_relaxed atomic64_dec_return
#define atomic64_dec_return_acquire atomic64_dec_return
#define atomic64_dec_return_release atomic64_dec_return
#else /* atomic64_dec_return_relaxed */
#ifndef atomic64_dec_return_acquire
#define atomic64_dec_return_acquire(...) \
__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
#endif
#ifndef atomic64_dec_return_release
#define atomic64_dec_return_release(...) \
__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
#endif
#ifndef atomic64_dec_return
#define atomic64_dec_return(...) \
__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
#endif
#endif /* atomic64_dec_return_relaxed */
/* atomic64_xchg_relaxed */ /* atomic64_xchg_relaxed */
#ifndef atomic64_xchg_relaxed #ifndef atomic64_xchg_relaxed
#define atomic64_xchg_relaxed atomic64_xchg #define atomic64_xchg_relaxed atomic64_xchg
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment