Commit e6942b7d authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Thomas Gleixner

atomic: Provide atomic_{or,xor,and}

Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 2957c035
...@@ -110,7 +110,6 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ ...@@ -110,7 +110,6 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
#define atomic64_andnot atomic64_andnot #define atomic64_andnot atomic64_andnot
......
...@@ -144,7 +144,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -144,7 +144,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add, +=, add) ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub) ATOMIC_OPS(sub, -=, sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
ATOMIC_OP(and, &=, and) ATOMIC_OP(and, &=, and)
......
...@@ -194,7 +194,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -194,7 +194,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
ATOMIC_OPS(add, +=, add) ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub) ATOMIC_OPS(sub, -=, sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
ATOMIC_OP(and, &=, and) ATOMIC_OP(and, &=, and)
......
...@@ -85,7 +85,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -85,7 +85,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add, add) ATOMIC_OPS(add, add)
ATOMIC_OPS(sub, sub) ATOMIC_OPS(sub, sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_andnot atomic_andnot #define atomic_andnot atomic_andnot
ATOMIC_OP(and, and) ATOMIC_OP(and, and)
......
...@@ -51,8 +51,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ ...@@ -51,8 +51,6 @@ static inline void atomic_##op(int i, atomic_t *v) \
(void)__atomic_##op##_return(i, v); \ (void)__atomic_##op##_return(i, v); \
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, and) ATOMIC_OP(and, and)
ATOMIC_OP(or, or) ATOMIC_OP(or, or)
ATOMIC_OP(xor, eor) ATOMIC_OP(xor, eor)
......
...@@ -28,8 +28,6 @@ asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value); ...@@ -28,8 +28,6 @@ asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i) #define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i)) #define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i) #define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i) #define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i) #define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
......
...@@ -192,8 +192,6 @@ static inline void atomic64_##op(long long i, atomic64_t *v) \ ...@@ -192,8 +192,6 @@ static inline void atomic64_##op(long long i, atomic64_t *v) \
(void)__atomic64_fetch_##op(i, &v->counter); \ (void)__atomic64_fetch_##op(i, &v->counter); \
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ ...@@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \
ATOMIC_OP_RETURN(add, +=) ATOMIC_OP_RETURN(add, +=)
ATOMIC_OP_RETURN(sub, -=) ATOMIC_OP_RETURN(sub, -=)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, &=) ATOMIC_OP(and, &=)
ATOMIC_OP(or, |=) ATOMIC_OP(or, |=)
ATOMIC_OP(xor, ^=) ATOMIC_OP(xor, ^=)
......
...@@ -132,8 +132,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -132,8 +132,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -69,8 +69,6 @@ ATOMIC_OP(sub, -) ...@@ -69,8 +69,6 @@ ATOMIC_OP(sub, -)
: ia64_atomic_sub(__ia64_asr_i, v); \ : ia64_atomic_sub(__ia64_asr_i, v); \
}) })
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, &) ATOMIC_OP(and, &)
ATOMIC_OP(or, |) ATOMIC_OP(or, |)
ATOMIC_OP(xor, ^) ATOMIC_OP(xor, ^)
......
...@@ -94,8 +94,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -94,8 +94,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -77,8 +77,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \ ...@@ -77,8 +77,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \
ATOMIC_OPS(add, +=, add) ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub) ATOMIC_OPS(sub, -=, sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, &=, and) ATOMIC_OP(and, &=, and)
ATOMIC_OP(or, |=, or) ATOMIC_OP(or, |=, or)
ATOMIC_OP(xor, ^=, eor) ATOMIC_OP(xor, ^=, eor)
......
...@@ -74,8 +74,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -74,8 +74,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -137,8 +137,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ ...@@ -137,8 +137,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
ATOMIC_OPS(add, +=, addu) ATOMIC_OPS(add, +=, addu)
ATOMIC_OPS(sub, -=, subu) ATOMIC_OPS(sub, -=, subu)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, &=, and) ATOMIC_OP(and, &=, and)
ATOMIC_OP(or, |=, or) ATOMIC_OP(or, |=, or)
ATOMIC_OP(xor, ^=, xor) ATOMIC_OP(xor, ^=, xor)
......
...@@ -89,8 +89,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -89,8 +89,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -126,8 +126,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -126,8 +126,6 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add, +=) ATOMIC_OPS(add, +=)
ATOMIC_OPS(sub, -=) ATOMIC_OPS(sub, -=)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, &=) ATOMIC_OP(and, &=)
ATOMIC_OP(or, |=) ATOMIC_OP(or, |=)
ATOMIC_OP(xor, ^=) ATOMIC_OP(xor, ^=)
......
...@@ -67,8 +67,6 @@ static __inline__ int atomic_##op##_return(int a, atomic_t *v) \ ...@@ -67,8 +67,6 @@ static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
ATOMIC_OPS(add, add) ATOMIC_OPS(add, add)
ATOMIC_OPS(sub, subf) ATOMIC_OPS(sub, subf)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and, and) ATOMIC_OP(and, and)
ATOMIC_OP(or, or) ATOMIC_OP(or, or)
ATOMIC_OP(xor, xor) ATOMIC_OP(xor, xor)
......
...@@ -282,8 +282,6 @@ static inline void atomic64_##op(long i, atomic64_t *v) \ ...@@ -282,8 +282,6 @@ static inline void atomic64_##op(long i, atomic64_t *v) \
__ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \ __ATOMIC64_LOOP(v, i, __ATOMIC64_##OP, __ATOMIC64_NO_BARRIER); \
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC64_OP(and, AND) ATOMIC64_OP(and, AND)
ATOMIC64_OP(or, OR) ATOMIC64_OP(or, OR)
ATOMIC64_OP(xor, XOR) ATOMIC64_OP(xor, XOR)
......
...@@ -48,8 +48,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -48,8 +48,6 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -17,8 +17,6 @@ ...@@ -17,8 +17,6 @@
#include <asm/barrier.h> #include <asm/barrier.h>
#include <asm-generic/atomic64.h> #include <asm-generic/atomic64.h>
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define ATOMIC_INIT(i) { (i) } #define ATOMIC_INIT(i) { (i) }
int atomic_add_return(int, atomic_t *); int atomic_add_return(int, atomic_t *);
......
...@@ -33,8 +33,6 @@ long atomic64_##op##_return(long, atomic64_t *); ...@@ -33,8 +33,6 @@ long atomic64_##op##_return(long, atomic64_t *);
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ ...@@ -41,8 +41,6 @@ static inline void atomic_##op(int i, atomic_t *v) \
_atomic_##op((unsigned long *)&v->counter, i); \ _atomic_##op((unsigned long *)&v->counter, i); \
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -58,8 +58,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) ...@@ -58,8 +58,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
return oldval; return oldval;
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
static inline void atomic_and(int i, atomic_t *v) static inline void atomic_and(int i, atomic_t *v)
{ {
__insn_fetchand4((void *)&v->counter, i); __insn_fetchand4((void *)&v->counter, i);
......
...@@ -191,8 +191,6 @@ static inline void atomic_##op(int i, atomic_t *v) \ ...@@ -191,8 +191,6 @@ static inline void atomic_##op(int i, atomic_t *v) \
: "memory"); \ : "memory"); \
} }
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -145,8 +145,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \ ...@@ -145,8 +145,6 @@ static inline int atomic_##op##_return(int i, atomic_t * v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and) ATOMIC_OP(and)
ATOMIC_OP(or) ATOMIC_OP(or)
ATOMIC_OP(xor) ATOMIC_OP(xor)
......
...@@ -102,24 +102,27 @@ ATOMIC_OP_RETURN(sub, -) ...@@ -102,24 +102,27 @@ ATOMIC_OP_RETURN(sub, -)
ATOMIC_OP(and, &) ATOMIC_OP(and, &)
#endif #endif
#ifndef atomic_clear_mask
#define atomic_clear_mask(i, v) atomic_and(~(i), (v))
#endif
#ifndef atomic_or #ifndef atomic_or
#ifndef CONFIG_ARCH_HAS_ATOMIC_OR
#define CONFIG_ARCH_HAS_ATOMIC_OR
#endif
ATOMIC_OP(or, |) ATOMIC_OP(or, |)
#endif #endif
#ifndef atomic_set_mask #ifndef atomic_xor
#define atomic_set_mask(i, v) atomic_or((i), (v)) ATOMIC_OP(xor, ^)
#endif #endif
#undef ATOMIC_OP_RETURN #undef ATOMIC_OP_RETURN
#undef ATOMIC_OP #undef ATOMIC_OP
static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
{
atomic_and(~mask, v);
}
static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
{
atomic_or(mask, v);
}
/* /*
* Atomic operations that C can't guarantee us. Useful for * Atomic operations that C can't guarantee us. Useful for
* resource counting etc.. * resource counting etc..
......
...@@ -32,6 +32,10 @@ extern long long atomic64_##op##_return(long long a, atomic64_t *v); ...@@ -32,6 +32,10 @@ extern long long atomic64_##op##_return(long long a, atomic64_t *v);
ATOMIC64_OPS(add) ATOMIC64_OPS(add)
ATOMIC64_OPS(sub) ATOMIC64_OPS(sub)
ATOMIC64_OP(and)
ATOMIC64_OP(or)
ATOMIC64_OP(xor)
#undef ATOMIC64_OPS #undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP #undef ATOMIC64_OP
......
...@@ -111,19 +111,6 @@ static inline int atomic_dec_if_positive(atomic_t *v) ...@@ -111,19 +111,6 @@ static inline int atomic_dec_if_positive(atomic_t *v)
} }
#endif #endif
#ifndef CONFIG_ARCH_HAS_ATOMIC_OR
static inline void atomic_or(int i, atomic_t *v)
{
int old;
int new;
do {
old = atomic_read(v);
new = old | i;
} while (atomic_cmpxchg(v, old, new) != old);
}
#endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */
#include <asm-generic/atomic-long.h> #include <asm-generic/atomic-long.h>
#ifdef CONFIG_GENERIC_ATOMIC64 #ifdef CONFIG_GENERIC_ATOMIC64
#include <asm-generic/atomic64.h> #include <asm-generic/atomic64.h>
......
...@@ -102,6 +102,9 @@ EXPORT_SYMBOL(atomic64_##op##_return); ...@@ -102,6 +102,9 @@ EXPORT_SYMBOL(atomic64_##op##_return);
ATOMIC64_OPS(add, +=) ATOMIC64_OPS(add, +=)
ATOMIC64_OPS(sub, -=) ATOMIC64_OPS(sub, -=)
ATOMIC64_OP(and, &=)
ATOMIC64_OP(or, |=)
ATOMIC64_OP(xor, ^=)
#undef ATOMIC64_OPS #undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP_RETURN
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment