Revert "parisc: Use ldcw instruction for SMP spinlock release barrier"
authorHelge Deller <deller@gmx.de>
Tue, 28 Jul 2020 16:54:40 +0000 (18:54 +0200)
committerHelge Deller <deller@gmx.de>
Tue, 28 Jul 2020 16:54:40 +0000 (18:54 +0200)
This reverts commit 9e5c602186a692a7e848c0da17aed40f49d30519.
No need to use the ldcw instruction as SMP spinlock release barrier.
Revert it to gain back speed again.

Signed-off-by: Helge Deller <deller@gmx.de>
Cc: <stable@vger.kernel.org> # v5.2+
arch/parisc/include/asm/spinlock.h
arch/parisc/kernel/entry.S
arch/parisc/kernel/syscall.S

index d2a3337..6f85ca7 100644 (file)
@@ -37,11 +37,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *x)
        volatile unsigned int *a;
 
        a = __ldcw_align(x);
-#ifdef CONFIG_SMP
-       (void) __ldcw(a);
-#else
        mb();
-#endif
        *a = 1;
 }
 
index 4b484ec..06455f1 100644 (file)
        nop
        LDREG           0(\ptp),\pte
        bb,<,n          \pte,_PAGE_PRESENT_BIT,3f
-       LDCW            0(\tmp),\tmp1
        b               \fault
-       stw             \spc,0(\tmp)
+       stw,ma          \spc,0(\tmp)
 99:    ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
 #endif
 2:     LDREG           0(\ptp),\pte
        .endm
 
        /* Release pa_tlb_lock lock without reloading lock address. */
-       .macro          tlb_unlock0     spc,tmp,tmp1
+       .macro          tlb_unlock0     spc,tmp
 #ifdef CONFIG_SMP
 98:    or,COND(=)      %r0,\spc,%r0
-       LDCW            0(\tmp),\tmp1
-       or,COND(=)      %r0,\spc,%r0
-       stw             \spc,0(\tmp)
+       stw,ma          \spc,0(\tmp)
 99:    ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
 #endif
        .endm
 
        /* Release pa_tlb_lock lock. */
-       .macro          tlb_unlock1     spc,tmp,tmp1
+       .macro          tlb_unlock1     spc,tmp
 #ifdef CONFIG_SMP
 98:    load_pa_tlb_lock \tmp
 99:    ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
-       tlb_unlock0     \spc,\tmp,\tmp1
+       tlb_unlock0     \spc,\tmp
 #endif
        .endm
 
@@ -1163,7 +1160,7 @@ dtlb_miss_20w:
        
        idtlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1189,7 +1186,7 @@ nadtlb_miss_20w:
 
        idtlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1223,7 +1220,7 @@ dtlb_miss_11:
 
        mtsp            t1, %sr1        /* Restore sr1 */
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1256,7 +1253,7 @@ nadtlb_miss_11:
 
        mtsp            t1, %sr1        /* Restore sr1 */
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1285,7 +1282,7 @@ dtlb_miss_20:
 
        idtlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1313,7 +1310,7 @@ nadtlb_miss_20:
        
        idtlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1420,7 +1417,7 @@ itlb_miss_20w:
        
        iitlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1444,7 +1441,7 @@ naitlb_miss_20w:
 
        iitlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1478,7 +1475,7 @@ itlb_miss_11:
 
        mtsp            t1, %sr1        /* Restore sr1 */
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1502,7 +1499,7 @@ naitlb_miss_11:
 
        mtsp            t1, %sr1        /* Restore sr1 */
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1532,7 +1529,7 @@ itlb_miss_20:
 
        iitlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1552,7 +1549,7 @@ naitlb_miss_20:
 
        iitlbt          pte,prot
 
-       tlb_unlock1     spc,t0,t1
+       tlb_unlock1     spc,t0
        rfir
        nop
 
@@ -1582,7 +1579,7 @@ dbit_trap_20w:
                
        idtlbt          pte,prot
 
-       tlb_unlock0     spc,t0,t1
+       tlb_unlock0     spc,t0
        rfir
        nop
 #else
@@ -1608,7 +1605,7 @@ dbit_trap_11:
 
        mtsp            t1, %sr1     /* Restore sr1 */
 
-       tlb_unlock0     spc,t0,t1
+       tlb_unlock0     spc,t0
        rfir
        nop
 
@@ -1628,7 +1625,7 @@ dbit_trap_20:
        
        idtlbt          pte,prot
 
-       tlb_unlock0     spc,t0,t1
+       tlb_unlock0     spc,t0
        rfir
        nop
 #endif
index ea505a8..472ce99 100644 (file)
@@ -640,9 +640,7 @@ cas_action:
        sub,<>  %r28, %r25, %r0
 2:     stw     %r24, 0(%r26)
        /* Free lock */
-#ifdef CONFIG_SMP
-       LDCW    0(%sr2,%r20), %r1                       /* Barrier */
-#endif
+       sync
        stw     %r20, 0(%sr2,%r20)
 #if ENABLE_LWS_DEBUG
        /* Clear thread register indicator */
@@ -657,9 +655,7 @@ cas_action:
 3:             
        /* Error occurred on load or store */
        /* Free lock */
-#ifdef CONFIG_SMP
-       LDCW    0(%sr2,%r20), %r1                       /* Barrier */
-#endif
+       sync
        stw     %r20, 0(%sr2,%r20)
 #if ENABLE_LWS_DEBUG
        stw     %r0, 4(%sr2,%r20)
@@ -861,9 +857,7 @@ cas2_action:
 
 cas2_end:
        /* Free lock */
-#ifdef CONFIG_SMP
-       LDCW    0(%sr2,%r20), %r1                       /* Barrier */
-#endif
+       sync
        stw     %r20, 0(%sr2,%r20)
        /* Enable interrupts */
        ssm     PSW_SM_I, %r0
@@ -874,9 +868,7 @@ cas2_end:
 22:
        /* Error occurred on load or store */
        /* Free lock */
-#ifdef CONFIG_SMP
-       LDCW    0(%sr2,%r20), %r1                       /* Barrier */
-#endif
+       sync
        stw     %r20, 0(%sr2,%r20)
        ssm     PSW_SM_I, %r0
        ldo     1(%r0),%r28