Commit 53a42b63 authored by John David Anglin's avatar John David Anglin Committed by Helge Deller

parisc: Switch to more fine grained lws locks

Increase the number of lws locks to 256 entries (instead of 16) and
choose lock entry based on bits 3-11 (instead of 4-7) of the relevant
address.  With this change we archieve more fine-grained locking in
futex syscalls and thus reduce the number of possible stalls.
Signed-off-by: default avatarJohn David Anglin <dave.anglin@bell.net>
Signed-off-by: default avatarHelge Deller <deller@gmx.de>
parent 2a7d4eed
...@@ -16,7 +16,7 @@ static inline void ...@@ -16,7 +16,7 @@ static inline void
_futex_spin_lock_irqsave(u32 __user *uaddr, unsigned long int *flags) _futex_spin_lock_irqsave(u32 __user *uaddr, unsigned long int *flags)
{ {
extern u32 lws_lock_start[]; extern u32 lws_lock_start[];
long index = ((long)uaddr & 0xf0) >> 2; long index = ((long)uaddr & 0x3f8) >> 1;
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index]; arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
local_irq_save(*flags); local_irq_save(*flags);
arch_spin_lock(s); arch_spin_lock(s);
...@@ -26,7 +26,7 @@ static inline void ...@@ -26,7 +26,7 @@ static inline void
_futex_spin_unlock_irqrestore(u32 __user *uaddr, unsigned long int *flags) _futex_spin_unlock_irqrestore(u32 __user *uaddr, unsigned long int *flags)
{ {
extern u32 lws_lock_start[]; extern u32 lws_lock_start[];
long index = ((long)uaddr & 0xf0) >> 2; long index = ((long)uaddr & 0x3f8) >> 1;
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index]; arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
arch_spin_unlock(s); arch_spin_unlock(s);
local_irq_restore(*flags); local_irq_restore(*flags);
......
...@@ -571,8 +571,8 @@ lws_compare_and_swap: ...@@ -571,8 +571,8 @@ lws_compare_and_swap:
ldil L%lws_lock_start, %r20 ldil L%lws_lock_start, %r20
ldo R%lws_lock_start(%r20), %r28 ldo R%lws_lock_start(%r20), %r28
/* Extract four bits from r26 and hash lock (Bits 4-7) */ /* Extract eight bits from r26 and hash lock (Bits 3-11) */
extru %r26, 27, 4, %r20 extru %r26, 28, 8, %r20
/* Find lock to use, the hash is either one of 0 to /* Find lock to use, the hash is either one of 0 to
15, multiplied by 16 (keep it 16-byte aligned) 15, multiplied by 16 (keep it 16-byte aligned)
...@@ -761,8 +761,8 @@ cas2_lock_start: ...@@ -761,8 +761,8 @@ cas2_lock_start:
ldil L%lws_lock_start, %r20 ldil L%lws_lock_start, %r20
ldo R%lws_lock_start(%r20), %r28 ldo R%lws_lock_start(%r20), %r28
/* Extract four bits from r26 and hash lock (Bits 4-7) */ /* Extract eight bits from r26 and hash lock (Bits 3-11) */
extru %r26, 27, 4, %r20 extru %r26, 28, 8, %r20
/* Find lock to use, the hash is either one of 0 to /* Find lock to use, the hash is either one of 0 to
15, multiplied by 16 (keep it 16-byte aligned) 15, multiplied by 16 (keep it 16-byte aligned)
...@@ -950,7 +950,7 @@ END(sys_call_table64) ...@@ -950,7 +950,7 @@ END(sys_call_table64)
.align L1_CACHE_BYTES .align L1_CACHE_BYTES
ENTRY(lws_lock_start) ENTRY(lws_lock_start)
/* lws locks */ /* lws locks */
.rept 16 .rept 256
/* Keep locks aligned at 16-bytes */ /* Keep locks aligned at 16-bytes */
.word 1 .word 1
.word 0 .word 0
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment