Commit 28aa2bda authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Ingo Molnar

locking/atomic: Implement...

locking/atomic: Implement atomic{,64,_long}_fetch_{add,sub,and,andnot,or,xor}{,_relaxed,_acquire,_release}()

Now that all the architectures have implemented support for these new
atomic primitives add on the generic infrastructure to expose and use
it.
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Borislav Petkov <bp@suse.de>
Cc: Davidlohr Bueso <dave@stgolabs.net>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent e1213332
...@@ -112,6 +112,40 @@ static __always_inline void atomic_long_dec(atomic_long_t *l) ...@@ -112,6 +112,40 @@ static __always_inline void atomic_long_dec(atomic_long_t *l)
ATOMIC_LONG_PFX(_dec)(v); ATOMIC_LONG_PFX(_dec)(v);
} }
#define ATOMIC_LONG_FETCH_OP(op, mo) \
static inline long \
atomic_long_fetch_##op##mo(long i, atomic_long_t *l) \
{ \
ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \
\
return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v); \
}
ATOMIC_LONG_FETCH_OP(add, )
ATOMIC_LONG_FETCH_OP(add, _relaxed)
ATOMIC_LONG_FETCH_OP(add, _acquire)
ATOMIC_LONG_FETCH_OP(add, _release)
ATOMIC_LONG_FETCH_OP(sub, )
ATOMIC_LONG_FETCH_OP(sub, _relaxed)
ATOMIC_LONG_FETCH_OP(sub, _acquire)
ATOMIC_LONG_FETCH_OP(sub, _release)
ATOMIC_LONG_FETCH_OP(and, )
ATOMIC_LONG_FETCH_OP(and, _relaxed)
ATOMIC_LONG_FETCH_OP(and, _acquire)
ATOMIC_LONG_FETCH_OP(and, _release)
ATOMIC_LONG_FETCH_OP(andnot, )
ATOMIC_LONG_FETCH_OP(andnot, _relaxed)
ATOMIC_LONG_FETCH_OP(andnot, _acquire)
ATOMIC_LONG_FETCH_OP(andnot, _release)
ATOMIC_LONG_FETCH_OP(or, )
ATOMIC_LONG_FETCH_OP(or, _relaxed)
ATOMIC_LONG_FETCH_OP(or, _acquire)
ATOMIC_LONG_FETCH_OP(or, _release)
ATOMIC_LONG_FETCH_OP(xor, )
ATOMIC_LONG_FETCH_OP(xor, _relaxed)
ATOMIC_LONG_FETCH_OP(xor, _acquire)
ATOMIC_LONG_FETCH_OP(xor, _release)
#define ATOMIC_LONG_OP(op) \ #define ATOMIC_LONG_OP(op) \
static __always_inline void \ static __always_inline void \
atomic_long_##op(long i, atomic_long_t *l) \ atomic_long_##op(long i, atomic_long_t *l) \
...@@ -124,9 +158,9 @@ atomic_long_##op(long i, atomic_long_t *l) \ ...@@ -124,9 +158,9 @@ atomic_long_##op(long i, atomic_long_t *l) \
ATOMIC_LONG_OP(add) ATOMIC_LONG_OP(add)
ATOMIC_LONG_OP(sub) ATOMIC_LONG_OP(sub)
ATOMIC_LONG_OP(and) ATOMIC_LONG_OP(and)
ATOMIC_LONG_OP(andnot)
ATOMIC_LONG_OP(or) ATOMIC_LONG_OP(or)
ATOMIC_LONG_OP(xor) ATOMIC_LONG_OP(xor)
ATOMIC_LONG_OP(andnot)
#undef ATOMIC_LONG_OP #undef ATOMIC_LONG_OP
......
...@@ -61,6 +61,18 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -61,6 +61,18 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
return c c_op i; \ return c c_op i; \
} }
#define ATOMIC_FETCH_OP(op, c_op) \
static inline int atomic_fetch_##op(int i, atomic_t *v) \
{ \
int c, old; \
\
c = v->counter; \
while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
c = old; \
\
return c; \
}
#else #else
#include <linux/irqflags.h> #include <linux/irqflags.h>
...@@ -88,6 +100,20 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -88,6 +100,20 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
return ret; \ return ret; \
} }
#define ATOMIC_FETCH_OP(op, c_op) \
static inline int atomic_fetch_##op(int i, atomic_t *v) \
{ \
unsigned long flags; \
int ret; \
\
raw_local_irq_save(flags); \
ret = v->counter; \
v->counter = v->counter c_op i; \
raw_local_irq_restore(flags); \
\
return ret; \
}
#endif /* CONFIG_SMP */ #endif /* CONFIG_SMP */
#ifndef atomic_add_return #ifndef atomic_add_return
...@@ -98,6 +124,28 @@ ATOMIC_OP_RETURN(add, +) ...@@ -98,6 +124,28 @@ ATOMIC_OP_RETURN(add, +)
ATOMIC_OP_RETURN(sub, -) ATOMIC_OP_RETURN(sub, -)
#endif #endif
#ifndef atomic_fetch_add
ATOMIC_FETCH_OP(add, +)
#endif
#ifndef atomic_fetch_sub
ATOMIC_FETCH_OP(sub, -)
#endif
#ifndef atomic_fetch_and
ATOMIC_FETCH_OP(and, &)
#endif
#ifndef atomic_fetch_or
#define atomic_fetch_or atomic_fetch_or
ATOMIC_FETCH_OP(or, |)
#endif
#ifndef atomic_fetch_xor
ATOMIC_FETCH_OP(xor, ^)
#endif
#ifndef atomic_and #ifndef atomic_and
ATOMIC_OP(and, &) ATOMIC_OP(and, &)
#endif #endif
...@@ -110,6 +158,7 @@ ATOMIC_OP(or, |) ...@@ -110,6 +158,7 @@ ATOMIC_OP(or, |)
ATOMIC_OP(xor, ^) ATOMIC_OP(xor, ^)
#endif #endif
#undef ATOMIC_FETCH_OP
#undef ATOMIC_OP_RETURN #undef ATOMIC_OP_RETURN
#undef ATOMIC_OP #undef ATOMIC_OP
......
...@@ -27,16 +27,23 @@ extern void atomic64_##op(long long a, atomic64_t *v); ...@@ -27,16 +27,23 @@ extern void atomic64_##op(long long a, atomic64_t *v);
#define ATOMIC64_OP_RETURN(op) \ #define ATOMIC64_OP_RETURN(op) \
extern long long atomic64_##op##_return(long long a, atomic64_t *v); extern long long atomic64_##op##_return(long long a, atomic64_t *v);
#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) #define ATOMIC64_FETCH_OP(op) \
extern long long atomic64_fetch_##op(long long a, atomic64_t *v);
#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
ATOMIC64_OPS(add) ATOMIC64_OPS(add)
ATOMIC64_OPS(sub) ATOMIC64_OPS(sub)
ATOMIC64_OP(and) #undef ATOMIC64_OPS
ATOMIC64_OP(or) #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
ATOMIC64_OP(xor)
ATOMIC64_OPS(and)
ATOMIC64_OPS(or)
ATOMIC64_OPS(xor)
#undef ATOMIC64_OPS #undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP #undef ATOMIC64_OP
......
...@@ -163,6 +163,154 @@ ...@@ -163,6 +163,154 @@
#endif #endif
#endif /* atomic_dec_return_relaxed */ #endif /* atomic_dec_return_relaxed */
/* atomic_fetch_add_relaxed */
#ifndef atomic_fetch_add_relaxed
#define atomic_fetch_add_relaxed atomic_fetch_add
#define atomic_fetch_add_acquire atomic_fetch_add
#define atomic_fetch_add_release atomic_fetch_add
#else /* atomic_fetch_add_relaxed */
#ifndef atomic_fetch_add_acquire
#define atomic_fetch_add_acquire(...) \
__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
#endif
#ifndef atomic_fetch_add_release
#define atomic_fetch_add_release(...) \
__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
#endif
#ifndef atomic_fetch_add
#define atomic_fetch_add(...) \
__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
#endif
#endif /* atomic_fetch_add_relaxed */
/* atomic_fetch_sub_relaxed */
#ifndef atomic_fetch_sub_relaxed
#define atomic_fetch_sub_relaxed atomic_fetch_sub
#define atomic_fetch_sub_acquire atomic_fetch_sub
#define atomic_fetch_sub_release atomic_fetch_sub
#else /* atomic_fetch_sub_relaxed */
#ifndef atomic_fetch_sub_acquire
#define atomic_fetch_sub_acquire(...) \
__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
#endif
#ifndef atomic_fetch_sub_release
#define atomic_fetch_sub_release(...) \
__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
#endif
#ifndef atomic_fetch_sub
#define atomic_fetch_sub(...) \
__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
#endif
#endif /* atomic_fetch_sub_relaxed */
/* atomic_fetch_or_relaxed */
#ifndef atomic_fetch_or_relaxed
#define atomic_fetch_or_relaxed atomic_fetch_or
#define atomic_fetch_or_acquire atomic_fetch_or
#define atomic_fetch_or_release atomic_fetch_or
#else /* atomic_fetch_or_relaxed */
#ifndef atomic_fetch_or_acquire
#define atomic_fetch_or_acquire(...) \
__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
#endif
#ifndef atomic_fetch_or_release
#define atomic_fetch_or_release(...) \
__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
#endif
#ifndef atomic_fetch_or
#define atomic_fetch_or(...) \
__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
#endif
#endif /* atomic_fetch_or_relaxed */
/* atomic_fetch_and_relaxed */
#ifndef atomic_fetch_and_relaxed
#define atomic_fetch_and_relaxed atomic_fetch_and
#define atomic_fetch_and_acquire atomic_fetch_and
#define atomic_fetch_and_release atomic_fetch_and
#else /* atomic_fetch_and_relaxed */
#ifndef atomic_fetch_and_acquire
#define atomic_fetch_and_acquire(...) \
__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
#endif
#ifndef atomic_fetch_and_release
#define atomic_fetch_and_release(...) \
__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
#endif
#ifndef atomic_fetch_and
#define atomic_fetch_and(...) \
__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
#endif
#endif /* atomic_fetch_and_relaxed */
#ifdef atomic_andnot
/* atomic_fetch_andnot_relaxed */
#ifndef atomic_fetch_andnot_relaxed
#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
#define atomic_fetch_andnot_acquire atomic_fetch_andnot
#define atomic_fetch_andnot_release atomic_fetch_andnot
#else /* atomic_fetch_andnot_relaxed */
#ifndef atomic_fetch_andnot_acquire
#define atomic_fetch_andnot_acquire(...) \
__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
#endif
#ifndef atomic_fetch_andnot_release
#define atomic_fetch_andnot_release(...) \
__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
#endif
#ifndef atomic_fetch_andnot
#define atomic_fetch_andnot(...) \
__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
#endif
#endif /* atomic_fetch_andnot_relaxed */
#endif /* atomic_andnot */
/* atomic_fetch_xor_relaxed */
#ifndef atomic_fetch_xor_relaxed
#define atomic_fetch_xor_relaxed atomic_fetch_xor
#define atomic_fetch_xor_acquire atomic_fetch_xor
#define atomic_fetch_xor_release atomic_fetch_xor
#else /* atomic_fetch_xor_relaxed */
#ifndef atomic_fetch_xor_acquire
#define atomic_fetch_xor_acquire(...) \
__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
#endif
#ifndef atomic_fetch_xor_release
#define atomic_fetch_xor_release(...) \
__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
#endif
#ifndef atomic_fetch_xor
#define atomic_fetch_xor(...) \
__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
#endif
#endif /* atomic_fetch_xor_relaxed */
/* atomic_xchg_relaxed */ /* atomic_xchg_relaxed */
#ifndef atomic_xchg_relaxed #ifndef atomic_xchg_relaxed
#define atomic_xchg_relaxed atomic_xchg #define atomic_xchg_relaxed atomic_xchg
...@@ -310,6 +458,26 @@ static inline void atomic_andnot(int i, atomic_t *v) ...@@ -310,6 +458,26 @@ static inline void atomic_andnot(int i, atomic_t *v)
{ {
atomic_and(~i, v); atomic_and(~i, v);
} }
static inline int atomic_fetch_andnot(int i, atomic_t *v)
{
return atomic_fetch_and(~i, v);
}
static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
{
return atomic_fetch_and_relaxed(~i, v);
}
static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
{
return atomic_fetch_and_acquire(~i, v);
}
static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
{
return atomic_fetch_and_release(~i, v);
}
#endif #endif
static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v) static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
...@@ -535,6 +703,154 @@ static inline int atomic_fetch_or(int mask, atomic_t *p) ...@@ -535,6 +703,154 @@ static inline int atomic_fetch_or(int mask, atomic_t *p)
#endif #endif
#endif /* atomic64_dec_return_relaxed */ #endif /* atomic64_dec_return_relaxed */
/* atomic64_fetch_add_relaxed */
#ifndef atomic64_fetch_add_relaxed
#define atomic64_fetch_add_relaxed atomic64_fetch_add
#define atomic64_fetch_add_acquire atomic64_fetch_add
#define atomic64_fetch_add_release atomic64_fetch_add
#else /* atomic64_fetch_add_relaxed */
#ifndef atomic64_fetch_add_acquire
#define atomic64_fetch_add_acquire(...) \
__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_add_release
#define atomic64_fetch_add_release(...) \
__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_add
#define atomic64_fetch_add(...) \
__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_add_relaxed */
/* atomic64_fetch_sub_relaxed */
#ifndef atomic64_fetch_sub_relaxed
#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
#define atomic64_fetch_sub_acquire atomic64_fetch_sub
#define atomic64_fetch_sub_release atomic64_fetch_sub
#else /* atomic64_fetch_sub_relaxed */
#ifndef atomic64_fetch_sub_acquire
#define atomic64_fetch_sub_acquire(...) \
__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_sub_release
#define atomic64_fetch_sub_release(...) \
__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_sub
#define atomic64_fetch_sub(...) \
__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_sub_relaxed */
/* atomic64_fetch_or_relaxed */
#ifndef atomic64_fetch_or_relaxed
#define atomic64_fetch_or_relaxed atomic64_fetch_or
#define atomic64_fetch_or_acquire atomic64_fetch_or
#define atomic64_fetch_or_release atomic64_fetch_or
#else /* atomic64_fetch_or_relaxed */
#ifndef atomic64_fetch_or_acquire
#define atomic64_fetch_or_acquire(...) \
__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_or_release
#define atomic64_fetch_or_release(...) \
__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_or
#define atomic64_fetch_or(...) \
__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_or_relaxed */
/* atomic64_fetch_and_relaxed */
#ifndef atomic64_fetch_and_relaxed
#define atomic64_fetch_and_relaxed atomic64_fetch_and
#define atomic64_fetch_and_acquire atomic64_fetch_and
#define atomic64_fetch_and_release atomic64_fetch_and
#else /* atomic64_fetch_and_relaxed */
#ifndef atomic64_fetch_and_acquire
#define atomic64_fetch_and_acquire(...) \
__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_and_release
#define atomic64_fetch_and_release(...) \
__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_and
#define atomic64_fetch_and(...) \
__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_and_relaxed */
#ifdef atomic64_andnot
/* atomic64_fetch_andnot_relaxed */
#ifndef atomic64_fetch_andnot_relaxed
#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
#define atomic64_fetch_andnot_release atomic64_fetch_andnot
#else /* atomic64_fetch_andnot_relaxed */
#ifndef atomic64_fetch_andnot_acquire
#define atomic64_fetch_andnot_acquire(...) \
__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_andnot_release
#define atomic64_fetch_andnot_release(...) \
__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_andnot
#define atomic64_fetch_andnot(...) \
__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_andnot_relaxed */
#endif /* atomic64_andnot */
/* atomic64_fetch_xor_relaxed */
#ifndef atomic64_fetch_xor_relaxed
#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
#define atomic64_fetch_xor_acquire atomic64_fetch_xor
#define atomic64_fetch_xor_release atomic64_fetch_xor
#else /* atomic64_fetch_xor_relaxed */
#ifndef atomic64_fetch_xor_acquire
#define atomic64_fetch_xor_acquire(...) \
__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_xor_release
#define atomic64_fetch_xor_release(...) \
__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
#endif
#ifndef atomic64_fetch_xor
#define atomic64_fetch_xor(...) \
__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
#endif
#endif /* atomic64_fetch_xor_relaxed */
/* atomic64_xchg_relaxed */ /* atomic64_xchg_relaxed */
#ifndef atomic64_xchg_relaxed #ifndef atomic64_xchg_relaxed
#define atomic64_xchg_relaxed atomic64_xchg #define atomic64_xchg_relaxed atomic64_xchg
...@@ -588,6 +904,26 @@ static inline void atomic64_andnot(long long i, atomic64_t *v) ...@@ -588,6 +904,26 @@ static inline void atomic64_andnot(long long i, atomic64_t *v)
{ {
atomic64_and(~i, v); atomic64_and(~i, v);
} }
static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
{
return atomic64_fetch_and(~i, v);
}
static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
{
return atomic64_fetch_and_relaxed(~i, v);
}
static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
{
return atomic64_fetch_and_acquire(~i, v);
}
static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
{
return atomic64_fetch_and_release(~i, v);
}
#endif #endif
#include <asm-generic/atomic-long.h> #include <asm-generic/atomic-long.h>
......
...@@ -96,17 +96,41 @@ long long atomic64_##op##_return(long long a, atomic64_t *v) \ ...@@ -96,17 +96,41 @@ long long atomic64_##op##_return(long long a, atomic64_t *v) \
} \ } \
EXPORT_SYMBOL(atomic64_##op##_return); EXPORT_SYMBOL(atomic64_##op##_return);
#define ATOMIC64_FETCH_OP(op, c_op) \
long long atomic64_fetch_##op(long long a, atomic64_t *v) \
{ \
unsigned long flags; \
raw_spinlock_t *lock = lock_addr(v); \
long long val; \
\
raw_spin_lock_irqsave(lock, flags); \
val = v->counter; \
v->counter c_op a; \
raw_spin_unlock_irqrestore(lock, flags); \
return val; \
} \
EXPORT_SYMBOL(atomic64_fetch_##op);
#define ATOMIC64_OPS(op, c_op) \ #define ATOMIC64_OPS(op, c_op) \
ATOMIC64_OP(op, c_op) \ ATOMIC64_OP(op, c_op) \
ATOMIC64_OP_RETURN(op, c_op) ATOMIC64_OP_RETURN(op, c_op) \
ATOMIC64_FETCH_OP(op, c_op)
ATOMIC64_OPS(add, +=) ATOMIC64_OPS(add, +=)
ATOMIC64_OPS(sub, -=) ATOMIC64_OPS(sub, -=)
ATOMIC64_OP(and, &=)
ATOMIC64_OP(or, |=)
ATOMIC64_OP(xor, ^=)
#undef ATOMIC64_OPS #undef ATOMIC64_OPS
#define ATOMIC64_OPS(op, c_op) \
ATOMIC64_OP(op, c_op) \
ATOMIC64_OP_RETURN(op, c_op) \
ATOMIC64_FETCH_OP(op, c_op)
ATOMIC64_OPS(and, &=)
ATOMIC64_OPS(or, |=)
ATOMIC64_OPS(xor, ^=)
#undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP #undef ATOMIC64_OP
......
...@@ -53,11 +53,25 @@ do { \ ...@@ -53,11 +53,25 @@ do { \
BUG_ON(atomic##bit##_read(&v) != r); \ BUG_ON(atomic##bit##_read(&v) != r); \
} while (0) } while (0)
#define TEST_FETCH(bit, op, c_op, val) \
do { \
atomic##bit##_set(&v, v0); \
r = v0; \
r c_op val; \
BUG_ON(atomic##bit##_##op(val, &v) != v0); \
BUG_ON(atomic##bit##_read(&v) != r); \
} while (0)
#define RETURN_FAMILY_TEST(bit, op, c_op, val) \ #define RETURN_FAMILY_TEST(bit, op, c_op, val) \
do { \ do { \
FAMILY_TEST(TEST_RETURN, bit, op, c_op, val); \ FAMILY_TEST(TEST_RETURN, bit, op, c_op, val); \
} while (0) } while (0)
#define FETCH_FAMILY_TEST(bit, op, c_op, val) \
do { \
FAMILY_TEST(TEST_FETCH, bit, op, c_op, val); \
} while (0)
#define TEST_ARGS(bit, op, init, ret, expect, args...) \ #define TEST_ARGS(bit, op, init, ret, expect, args...) \
do { \ do { \
atomic##bit##_set(&v, init); \ atomic##bit##_set(&v, init); \
...@@ -114,6 +128,16 @@ static __init void test_atomic(void) ...@@ -114,6 +128,16 @@ static __init void test_atomic(void)
RETURN_FAMILY_TEST(, sub_return, -=, onestwos); RETURN_FAMILY_TEST(, sub_return, -=, onestwos);
RETURN_FAMILY_TEST(, sub_return, -=, -one); RETURN_FAMILY_TEST(, sub_return, -=, -one);
FETCH_FAMILY_TEST(, fetch_add, +=, onestwos);
FETCH_FAMILY_TEST(, fetch_add, +=, -one);
FETCH_FAMILY_TEST(, fetch_sub, -=, onestwos);
FETCH_FAMILY_TEST(, fetch_sub, -=, -one);
FETCH_FAMILY_TEST(, fetch_or, |=, v1);
FETCH_FAMILY_TEST(, fetch_and, &=, v1);
FETCH_FAMILY_TEST(, fetch_andnot, &= ~, v1);
FETCH_FAMILY_TEST(, fetch_xor, ^=, v1);
INC_RETURN_FAMILY_TEST(, v0); INC_RETURN_FAMILY_TEST(, v0);
DEC_RETURN_FAMILY_TEST(, v0); DEC_RETURN_FAMILY_TEST(, v0);
...@@ -154,6 +178,16 @@ static __init void test_atomic64(void) ...@@ -154,6 +178,16 @@ static __init void test_atomic64(void)
RETURN_FAMILY_TEST(64, sub_return, -=, onestwos); RETURN_FAMILY_TEST(64, sub_return, -=, onestwos);
RETURN_FAMILY_TEST(64, sub_return, -=, -one); RETURN_FAMILY_TEST(64, sub_return, -=, -one);
FETCH_FAMILY_TEST(64, fetch_add, +=, onestwos);
FETCH_FAMILY_TEST(64, fetch_add, +=, -one);
FETCH_FAMILY_TEST(64, fetch_sub, -=, onestwos);
FETCH_FAMILY_TEST(64, fetch_sub, -=, -one);
FETCH_FAMILY_TEST(64, fetch_or, |=, v1);
FETCH_FAMILY_TEST(64, fetch_and, &=, v1);
FETCH_FAMILY_TEST(64, fetch_andnot, &= ~, v1);
FETCH_FAMILY_TEST(64, fetch_xor, ^=, v1);
INIT(v0); INIT(v0);
atomic64_inc(&v); atomic64_inc(&v);
r += one; r += one;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment