Commit 6bbe043b authored by Paul Burton's avatar Paul Burton

MIPS: bitops: Implement test_and_set_bit() in terms of _lock variant

The only difference between test_and_set_bit() & test_and_set_bit_lock()
is memory ordering barrier semantics - the former provides a full
barrier whilst the latter only provides acquire semantics.

We can therefore implement test_and_set_bit() in terms of
test_and_set_bit_lock() with the addition of the extra memory barrier.
Do this in order to avoid duplicating logic.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
Cc: linux-mips@vger.kernel.org
Cc: Huacai Chen <chenhc@lemote.com>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: linux-kernel@vger.kernel.org
parent 27aab272
......@@ -31,8 +31,6 @@
void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
int __mips_test_and_set_bit(unsigned long nr,
volatile unsigned long *addr);
int __mips_test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *addr);
int __mips_test_and_clear_bit(unsigned long nr,
......@@ -236,24 +234,22 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
}
/*
* test_and_set_bit - Set a bit and return its old value
* test_and_set_bit_lock - Set a bit and return its old value
* @nr: Bit to set
* @addr: Address to count from
*
* This operation is atomic and cannot be reordered.
* It also implies a memory barrier.
* This operation is atomic and implies acquire ordering semantics
* after the memory operation.
*/
static inline int test_and_set_bit(unsigned long nr,
static inline int test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
int bit = nr & SZLONG_MASK;
unsigned long res, temp;
smp_mb__before_llsc();
if (!kernel_uses_llsc) {
res = __mips_test_and_set_bit(nr, addr);
res = __mips_test_and_set_bit_lock(nr, addr);
} else if (R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set push \n"
......@@ -264,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr,
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "=&r" (temp), "+m" (*m), "=&r" (res)
: "r" (1UL << bit)
: __LLSC_CLOBBER);
} else {
......@@ -291,56 +287,20 @@ static inline int test_and_set_bit(unsigned long nr,
}
/*
* test_and_set_bit_lock - Set a bit and return its old value
* test_and_set_bit - Set a bit and return its old value
* @nr: Bit to set
* @addr: Address to count from
*
* This operation is atomic and implies acquire ordering semantics
* after the memory operation.
* This operation is atomic and cannot be reordered.
* It also implies a memory barrier.
*/
static inline int test_and_set_bit_lock(unsigned long nr,
static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *)addr) + (nr >> SZLONG_LOG);
int bit = nr & SZLONG_MASK;
unsigned long res, temp;
if (!kernel_uses_llsc) {
res = __mips_test_and_set_bit_lock(nr, addr);
} else if (R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set push \n"
" .set arch=r4000 \n"
"1: " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" beqzl %2, 1b \n"
" and %2, %0, %3 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*m), "=&r" (res)
: "r" (1UL << bit)
: __LLSC_CLOBBER);
} else {
do {
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
" " __LL "%0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" " __SC "%2, %1 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
: "r" (1UL << bit)
: __LLSC_CLOBBER);
} while (unlikely(!res));
res = temp & (1UL << bit);
}
smp_llsc_mb();
return res != 0;
smp_mb__before_llsc();
return test_and_set_bit_lock(nr, addr);
}
/*
* test_and_clear_bit - Clear a bit and return its old value
* @nr: Bit to clear
......
......@@ -77,32 +77,6 @@ void __mips_change_bit(unsigned long nr, volatile unsigned long *addr)
EXPORT_SYMBOL(__mips_change_bit);
/**
* __mips_test_and_set_bit - Set a bit and return its old value. This is
* called by test_and_set_bit() if it cannot find a faster solution.
* @nr: Bit to set
* @addr: Address to count from
*/
int __mips_test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
int res;
a += nr >> SZLONG_LOG;
mask = 1UL << bit;
raw_local_irq_save(flags);
res = (mask & *a) != 0;
*a |= mask;
raw_local_irq_restore(flags);
return res;
}
EXPORT_SYMBOL(__mips_test_and_set_bit);
/**
* __mips_test_and_set_bit_lock - Set a bit and return its old value. This is
* called by test_and_set_bit_lock() if it cannot find a faster solution.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment