summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorScott Wood <scottwood@freescale.com>2014-03-07 20:48:35 (GMT)
committerJose Rivera <German.Rivera@freescale.com>2014-03-28 13:56:46 (GMT)
commite2929e1e0ab910995090ce0714beab75618db694 (patch)
treee63e07f61bc686cb7e0c4fde92e0a92d87afd69a
parentd80041c4797c0b7d71caa602da4951bbb360f14f (diff)
downloadlinux-fsl-qoriq-e2929e1e0ab910995090ce0714beab75618db694.tar.xz
powerpc/e6500: Make TLB lock recursive
Once special level interrupts are supported, we may take nested TLB misses -- so allow the same thread to acquire the lock recursively. The lock will not be effective against the nested TLB miss handler trying to write the same entry as the interrupted TLB miss handler, but that's also a problem on non-threaded CPUs that lack TLB write conditional. This will be addressed in the patch that enables crit/mc support by invalidating the TLB on return from level exceptions. Signed-off-by: Scott Wood <scottwood@freescale.com> Change-Id: I1c485fe78f289e038c318863f175b5fdc345afe6 Reviewed-on: http://git.am.freescale.net:8181/10264 Tested-by: Review Code-CDREVIEW <CDREVIEW@freescale.com> Reviewed-by: Bharat Bhushan <Bharat.Bhushan@freescale.com> Reviewed-by: Jose Rivera <German.Rivera@freescale.com>
-rw-r--r--arch/powerpc/mm/tlb_low_64e.S19
1 files changed, 12 insertions, 7 deletions
diff --git a/arch/powerpc/mm/tlb_low_64e.S b/arch/powerpc/mm/tlb_low_64e.S
index 0a06f66..f924972 100644
--- a/arch/powerpc/mm/tlb_low_64e.S
+++ b/arch/powerpc/mm/tlb_low_64e.S
@@ -345,7 +345,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_SMT)
* r14 = page table base
* r13 = PACA
* r11 = tlb_per_core ptr
- * r10 = crap (free to use)
+ * r10 = cpu number
*/
tlb_miss_common_fsl_htw:
/*
@@ -355,21 +355,25 @@ tlb_miss_common_fsl_htw:
* MAS6:IND should be already set based on MAS4
*/
mtocrf 0x01,r11
- addi r10,r11,PACA_TLB_LOCK-1 /* -1 to compensate for low bit set */
+ addi r11,r11,PACA_TLB_LOCK-1 /* -1 to compensate for low bit set */
bf 31,1f /* no lock if TLB_PER_CORE_HAS_LOCK clear */
-2: lbarx r15,0,r10
+2: lbarx r15,0,r11
+ lhz r10,PACAPACAINDEX(r13)
cmpdi r15,0
+ cmpdi cr1,r15,1 /* set cr1.eq = 0 for non-recursive */
bne 3f
- li r15,1
- stbcx. r15,0,r10
+ stbcx. r10,0,r11
bne 2b
+4:
.subsection 1
-3: lbz r15,0(r10)
+3: cmpd cr1,r15,r10 /* recursive lock due to mcheck/crit/etc? */
+ beq cr1,4b /* unlock will happen if cr1.eq = 0 */
+ lbz r15,0(r11)
cmpdi r15,0
bne 3b
b 2b
.previous
-1:
+1: subi r11,r11,PACA_TLB_LOCK-1
mfspr r15,SPRN_MAS2
@@ -445,6 +449,7 @@ tlb_miss_common_fsl_htw:
tlb_miss_done_fsl_htw:
.macro tlb_unlock_fsl_htw
+ beq cr1,1f /* no unlock if lock was recursively grabbed */
mtocrf 0x01,r11
addi r10,r11,PACA_TLB_LOCK-1
li r15,0