Commit 9511ca19 authored by Will Deacon's avatar Will Deacon

arm64: rwlocks: don't fail trylock purely due to contention

STXR can fail for a number of reasons, so don't fail an rwlock trylock
operation simply because the STXR reported failure.

I'm not aware of any issues with the current code, but this makes it
consistent with spin_trylock and also other architectures (e.g. arch/arm).
Reported-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Reviewed-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
parent fc9eb93c
...@@ -140,10 +140,11 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) ...@@ -140,10 +140,11 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
unsigned int tmp; unsigned int tmp;
asm volatile( asm volatile(
" ldaxr %w0, %1\n" "1: ldaxr %w0, %1\n"
" cbnz %w0, 1f\n" " cbnz %w0, 2f\n"
" stxr %w0, %w2, %1\n" " stxr %w0, %w2, %1\n"
"1:\n" " cbnz %w0, 1b\n"
"2:\n"
: "=&r" (tmp), "+Q" (rw->lock) : "=&r" (tmp), "+Q" (rw->lock)
: "r" (0x80000000) : "r" (0x80000000)
: "memory"); : "memory");
...@@ -209,11 +210,12 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) ...@@ -209,11 +210,12 @@ static inline int arch_read_trylock(arch_rwlock_t *rw)
unsigned int tmp, tmp2 = 1; unsigned int tmp, tmp2 = 1;
asm volatile( asm volatile(
" ldaxr %w0, %2\n" "1: ldaxr %w0, %2\n"
" add %w0, %w0, #1\n" " add %w0, %w0, #1\n"
" tbnz %w0, #31, 1f\n" " tbnz %w0, #31, 2f\n"
" stxr %w1, %w0, %2\n" " stxr %w1, %w0, %2\n"
"1:\n" " cbnz %w1, 1b\n"
"2:\n"
: "=&r" (tmp), "+r" (tmp2), "+Q" (rw->lock) : "=&r" (tmp), "+r" (tmp2), "+Q" (rw->lock)
: :
: "memory"); : "memory");
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment