summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLaurentiu Tudor <laurentiu.tudor@nxp.com>2020-01-23 11:19:25 +0000
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2020-04-17 10:48:55 +0200
commit87279a3576deca065fcf224f9ee89a1eff61a767 (patch)
treedb4268565b8b1627486b7a1571578a9a9393c4cc
parent52f1c4257c5ae74fff387c9547e99b71ad443f88 (diff)
downloadlinux-stable-87279a3576deca065fcf224f9ee89a1eff61a767.tar.gz
linux-stable-87279a3576deca065fcf224f9ee89a1eff61a767.tar.bz2
linux-stable-87279a3576deca065fcf224f9ee89a1eff61a767.zip
powerpc/fsl_booke: Avoid creating duplicate tlb1 entry
[ Upstream commit aa4113340ae6c2811e046f08c2bc21011d20a072 ] In the current implementation, the call to loadcam_multi() is wrapped between switch_to_as1() and restore_to_as0() calls so, when it tries to create its own temporary AS=1 TLB1 entry, it ends up duplicating the existing one created by switch_to_as1(). Add a check to skip creating the temporary entry if already running in AS=1. Fixes: d9e1831a4202 ("powerpc/85xx: Load all early TLB entries at once") Cc: stable@vger.kernel.org # v4.4+ Signed-off-by: Laurentiu Tudor <laurentiu.tudor@nxp.com> Acked-by: Scott Wood <oss@buserror.net> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au> Link: https://lore.kernel.org/r/20200123111914.2565-1-laurentiu.tudor@nxp.com Signed-off-by: Sasha Levin <sashal@kernel.org>
-rw-r--r--arch/powerpc/mm/tlb_nohash_low.S12
1 files changed, 11 insertions, 1 deletions
diff --git a/arch/powerpc/mm/tlb_nohash_low.S b/arch/powerpc/mm/tlb_nohash_low.S
index e066a658acac..56f58a362ea5 100644
--- a/arch/powerpc/mm/tlb_nohash_low.S
+++ b/arch/powerpc/mm/tlb_nohash_low.S
@@ -402,7 +402,7 @@ _GLOBAL(set_context)
* extern void loadcam_entry(unsigned int index)
*
* Load TLBCAM[index] entry in to the L2 CAM MMU
- * Must preserve r7, r8, r9, and r10
+ * Must preserve r7, r8, r9, r10 and r11
*/
_GLOBAL(loadcam_entry)
mflr r5
@@ -438,6 +438,10 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
*/
_GLOBAL(loadcam_multi)
mflr r8
+ /* Don't switch to AS=1 if already there */
+ mfmsr r11
+ andi. r11,r11,MSR_IS
+ bne 10f
/*
* Set up temporary TLB entry that is the same as what we're
@@ -463,6 +467,7 @@ _GLOBAL(loadcam_multi)
mtmsr r6
isync
+10:
mr r9,r3
add r10,r3,r4
2: bl loadcam_entry
@@ -471,6 +476,10 @@ _GLOBAL(loadcam_multi)
mr r3,r9
blt 2b
+ /* Don't return to AS=0 if we were in AS=1 at function start */
+ andi. r11,r11,MSR_IS
+ bne 3f
+
/* Return to AS=0 and clear the temporary entry */
mfmsr r6
rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
@@ -486,6 +495,7 @@ _GLOBAL(loadcam_multi)
tlbwe
isync
+3:
mtlr r8
blr
#endif