_futex_spin_lock_irqsave(u32 __user *uaddr, unsigned long int *flags)
{
extern u32 lws_lock_start[];
- long index = ((long)uaddr & 0xf0) >> 2;
+ long index = ((long)uaddr & 0x3f8) >> 1;
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
local_irq_save(*flags);
arch_spin_lock(s);
_futex_spin_unlock_irqrestore(u32 __user *uaddr, unsigned long int *flags)
{
extern u32 lws_lock_start[];
- long index = ((long)uaddr & 0xf0) >> 2;
+ long index = ((long)uaddr & 0x3f8) >> 1;
arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index];
arch_spin_unlock(s);
local_irq_restore(*flags);
ldil L%lws_lock_start, %r20
ldo R%lws_lock_start(%r20), %r28
- /* Extract four bits from r26 and hash lock (Bits 4-7) */
- extru %r26, 27, 4, %r20
+ /* Extract eight bits from r26 and hash lock (Bits 3-11) */
+ extru %r26, 28, 8, %r20
/* Find lock to use, the hash is either one of 0 to
15, multiplied by 16 (keep it 16-byte aligned)
ldil L%lws_lock_start, %r20
ldo R%lws_lock_start(%r20), %r28
- /* Extract four bits from r26 and hash lock (Bits 4-7) */
- extru %r26, 27, 4, %r20
+ /* Extract eight bits from r26 and hash lock (Bits 3-11) */
+ extru %r26, 28, 8, %r20
/* Find lock to use, the hash is either one of 0 to
15, multiplied by 16 (keep it 16-byte aligned)
.align L1_CACHE_BYTES
ENTRY(lws_lock_start)
/* lws locks */
- .rept 16
+ .rept 256
/* Keep locks aligned at 16-bytes */
.word 1
.word 0