[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-Id: <20200817143808.121518283@linuxfoundation.org>
Date: Mon, 17 Aug 2020 17:17:28 +0200
From: Greg Kroah-Hartman <gregkh@...uxfoundation.org>
To: linux-kernel@...r.kernel.org
Cc: Greg Kroah-Hartman <gregkh@...uxfoundation.org>,
stable@...r.kernel.org, Helge Deller <deller@....de>
Subject: [PATCH 5.4 247/270] Revert "parisc: Use ldcw instruction for SMP spinlock release barrier"
From: Helge Deller <deller@....de>
commit 6e9f06ee6c9566f3606d93182ac8f803a148504b upstream.
This reverts commit 9e5c602186a692a7e848c0da17aed40f49d30519.
No need to use the ldcw instruction as SMP spinlock release barrier.
Revert it to gain back speed again.
Signed-off-by: Helge Deller <deller@....de>
Cc: <stable@...r.kernel.org> # v5.2+
Signed-off-by: Greg Kroah-Hartman <gregkh@...uxfoundation.org>
---
arch/parisc/include/asm/spinlock.h | 4 ---
arch/parisc/kernel/entry.S | 43 +++++++++++++++++--------------------
arch/parisc/kernel/syscall.S | 16 +++----------
3 files changed, 24 insertions(+), 39 deletions(-)
--- a/arch/parisc/include/asm/spinlock.h
+++ b/arch/parisc/include/asm/spinlock.h
@@ -37,11 +37,7 @@ static inline void arch_spin_unlock(arch
volatile unsigned int *a;
a = __ldcw_align(x);
-#ifdef CONFIG_SMP
- (void) __ldcw(a);
-#else
mb();
-#endif
*a = 1;
}
--- a/arch/parisc/kernel/entry.S
+++ b/arch/parisc/kernel/entry.S
@@ -454,9 +454,8 @@
nop
LDREG 0(\ptp),\pte
bb,<,n \pte,_PAGE_PRESENT_BIT,3f
- LDCW 0(\tmp),\tmp1
b \fault
- stw \spc,0(\tmp)
+ stw,ma \spc,0(\tmp)
99: ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
#endif
2: LDREG 0(\ptp),\pte
@@ -465,22 +464,20 @@
.endm
/* Release pa_tlb_lock lock without reloading lock address. */
- .macro tlb_unlock0 spc,tmp,tmp1
+ .macro tlb_unlock0 spc,tmp
#ifdef CONFIG_SMP
98: or,COND(=) %r0,\spc,%r0
- LDCW 0(\tmp),\tmp1
- or,COND(=) %r0,\spc,%r0
- stw \spc,0(\tmp)
+ stw,ma \spc,0(\tmp)
99: ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
#endif
.endm
/* Release pa_tlb_lock lock. */
- .macro tlb_unlock1 spc,tmp,tmp1
+ .macro tlb_unlock1 spc,tmp
#ifdef CONFIG_SMP
98: load_pa_tlb_lock \tmp
99: ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
- tlb_unlock0 \spc,\tmp,\tmp1
+ tlb_unlock0 \spc,\tmp
#endif
.endm
@@ -1163,7 +1160,7 @@ dtlb_miss_20w:
idtlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1189,7 +1186,7 @@ nadtlb_miss_20w:
idtlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1223,7 +1220,7 @@ dtlb_miss_11:
mtsp t1, %sr1 /* Restore sr1 */
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1256,7 +1253,7 @@ nadtlb_miss_11:
mtsp t1, %sr1 /* Restore sr1 */
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1285,7 +1282,7 @@ dtlb_miss_20:
idtlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1313,7 +1310,7 @@ nadtlb_miss_20:
idtlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1420,7 +1417,7 @@ itlb_miss_20w:
iitlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1444,7 +1441,7 @@ naitlb_miss_20w:
iitlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1478,7 +1475,7 @@ itlb_miss_11:
mtsp t1, %sr1 /* Restore sr1 */
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1502,7 +1499,7 @@ naitlb_miss_11:
mtsp t1, %sr1 /* Restore sr1 */
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1532,7 +1529,7 @@ itlb_miss_20:
iitlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1552,7 +1549,7 @@ naitlb_miss_20:
iitlbt pte,prot
- tlb_unlock1 spc,t0,t1
+ tlb_unlock1 spc,t0
rfir
nop
@@ -1582,7 +1579,7 @@ dbit_trap_20w:
idtlbt pte,prot
- tlb_unlock0 spc,t0,t1
+ tlb_unlock0 spc,t0
rfir
nop
#else
@@ -1608,7 +1605,7 @@ dbit_trap_11:
mtsp t1, %sr1 /* Restore sr1 */
- tlb_unlock0 spc,t0,t1
+ tlb_unlock0 spc,t0
rfir
nop
@@ -1628,7 +1625,7 @@ dbit_trap_20:
idtlbt pte,prot
- tlb_unlock0 spc,t0,t1
+ tlb_unlock0 spc,t0
rfir
nop
#endif
--- a/arch/parisc/kernel/syscall.S
+++ b/arch/parisc/kernel/syscall.S
@@ -640,9 +640,7 @@ cas_action:
sub,<> %r28, %r25, %r0
2: stw %r24, 0(%r26)
/* Free lock */
-#ifdef CONFIG_SMP
- LDCW 0(%sr2,%r20), %r1 /* Barrier */
-#endif
+ sync
stw %r20, 0(%sr2,%r20)
#if ENABLE_LWS_DEBUG
/* Clear thread register indicator */
@@ -657,9 +655,7 @@ cas_action:
3:
/* Error occurred on load or store */
/* Free lock */
-#ifdef CONFIG_SMP
- LDCW 0(%sr2,%r20), %r1 /* Barrier */
-#endif
+ sync
stw %r20, 0(%sr2,%r20)
#if ENABLE_LWS_DEBUG
stw %r0, 4(%sr2,%r20)
@@ -861,9 +857,7 @@ cas2_action:
cas2_end:
/* Free lock */
-#ifdef CONFIG_SMP
- LDCW 0(%sr2,%r20), %r1 /* Barrier */
-#endif
+ sync
stw %r20, 0(%sr2,%r20)
/* Enable interrupts */
ssm PSW_SM_I, %r0
@@ -874,9 +868,7 @@ cas2_end:
22:
/* Error occurred on load or store */
/* Free lock */
-#ifdef CONFIG_SMP
- LDCW 0(%sr2,%r20), %r1 /* Barrier */
-#endif
+ sync
stw %r20, 0(%sr2,%r20)
ssm PSW_SM_I, %r0
ldo 1(%r0),%r28
Powered by blists - more mailing lists