Commit 5b4a2f0f authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Thomas Gleixner

metag: Provide atomic_{or,xor,and}

Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 74b1bc50
...@@ -74,42 +74,24 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -74,42 +74,24 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add) ATOMIC_OPS(add)
ATOMIC_OPS(sub) ATOMIC_OPS(sub)
#define CONFIG_ARCH_HAS_ATOMIC_OR
ATOMIC_OP(and)
ATOMIC_OP(or)
ATOMIC_OP(xor)
#undef ATOMIC_OPS #undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN #undef ATOMIC_OP_RETURN
#undef ATOMIC_OP #undef ATOMIC_OP
static inline void atomic_clear_mask(unsigned int mask, atomic_t *v) static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
{ {
int temp; atomic_and(~mask, v);
asm volatile (
"1: LNKGETD %0, [%1]\n"
" AND %0, %0, %2\n"
" LNKSETD [%1] %0\n"
" DEFR %0, TXSTAT\n"
" ANDT %0, %0, #HI(0x3f000000)\n"
" CMPT %0, #HI(0x02000000)\n"
" BNZ 1b\n"
: "=&d" (temp)
: "da" (&v->counter), "bd" (~mask)
: "cc");
} }
static inline void atomic_set_mask(unsigned int mask, atomic_t *v) static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
{ {
int temp; atomic_or(mask, v);
asm volatile (
"1: LNKGETD %0, [%1]\n"
" OR %0, %0, %2\n"
" LNKSETD [%1], %0\n"
" DEFR %0, TXSTAT\n"
" ANDT %0, %0, #HI(0x3f000000)\n"
" CMPT %0, #HI(0x02000000)\n"
" BNZ 1b\n"
: "=&d" (temp)
: "da" (&v->counter), "bd" (mask)
: "cc");
} }
static inline int atomic_cmpxchg(atomic_t *v, int old, int new) static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
......
...@@ -68,29 +68,22 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ ...@@ -68,29 +68,22 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add, +=) ATOMIC_OPS(add, +=)
ATOMIC_OPS(sub, -=) ATOMIC_OPS(sub, -=)
ATOMIC_OP(and, &=)
ATOMIC_OP(or, |=)
ATOMIC_OP(xor, ^=)
#undef ATOMIC_OPS #undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN #undef ATOMIC_OP_RETURN
#undef ATOMIC_OP #undef ATOMIC_OP
static inline void atomic_clear_mask(unsigned int mask, atomic_t *v) static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
{ {
unsigned long flags; atomic_and(~mask, v);
__global_lock1(flags);
fence();
v->counter &= ~mask;
__global_unlock1(flags);
} }
static inline void atomic_set_mask(unsigned int mask, atomic_t *v) static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
{ {
unsigned long flags; atomic_or(mask, v);
__global_lock1(flags);
fence();
v->counter |= mask;
__global_unlock1(flags);
} }
static inline int atomic_cmpxchg(atomic_t *v, int old, int new) static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment