In the current implementation, the call to loadcam_multi() is wrapped
between switch_to_as1() and restore_to_as0() calls so, when it tries
to create its own temporary AS=1 TLB1 entry, it ends up duplicating the
existing one created by switch_to_as1(). Add a check to skip creating
the temporary entry if already running in AS=1.

Fixes: d9e1831a4202 ("powerpc/85xx: Load all early TLB entries at once")
Signed-off-by: Laurentiu Tudor <laurentiu.tu...@nxp.com>
Cc: sta...@vger.kernel.org
---
 arch/powerpc/mm/nohash/tlb_low.S | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)

diff --git a/arch/powerpc/mm/nohash/tlb_low.S b/arch/powerpc/mm/nohash/tlb_low.S
index 2ca407cedbe7..eaeee402f96e 100644
--- a/arch/powerpc/mm/nohash/tlb_low.S
+++ b/arch/powerpc/mm/nohash/tlb_low.S
@@ -397,7 +397,7 @@ _GLOBAL(set_context)
  * extern void loadcam_entry(unsigned int index)
  *
  * Load TLBCAM[index] entry in to the L2 CAM MMU
- * Must preserve r7, r8, r9, and r10
+ * Must preserve r7, r8, r9, r10 and r11
  */
 _GLOBAL(loadcam_entry)
        mflr    r5
@@ -433,6 +433,10 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
  */
 _GLOBAL(loadcam_multi)
        mflr    r8
+       /* Don't switch to AS=1 if already there */
+       mfmsr   r11
+       andi.   r11,r11,MSR_IS
+       bne     10f
 
        /*
         * Set up temporary TLB entry that is the same as what we're
@@ -458,6 +462,7 @@ _GLOBAL(loadcam_multi)
        mtmsr   r6
        isync
 
+10:
        mr      r9,r3
        add     r10,r3,r4
 2:     bl      loadcam_entry
@@ -466,6 +471,10 @@ _GLOBAL(loadcam_multi)
        mr      r3,r9
        blt     2b
 
+       /* Don't return to AS=0 if we were in AS=1 at function start */
+       andi.   r11,r11,MSR_IS
+       bne     3f
+
        /* Return to AS=0 and clear the temporary entry */
        mfmsr   r6
        rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
@@ -481,6 +490,7 @@ _GLOBAL(loadcam_multi)
        tlbwe
        isync
 
+3:
        mtlr    r8
        blr
 #endif
-- 
2.17.1

Reply via email to