Use patch sites and associated helpers to manage TLB handlers
patching instead of hardcoding.

Signed-off-by: Christophe Leroy <christophe.le...@c-s.fr>
---
 arch/powerpc/include/asm/mmu-44x.h |  3 +++
 arch/powerpc/kernel/head_44x.S     | 11 +++++------
 arch/powerpc/mm/44x_mmu.c          | 14 +++-----------
 3 files changed, 11 insertions(+), 17 deletions(-)

diff --git a/arch/powerpc/include/asm/mmu-44x.h 
b/arch/powerpc/include/asm/mmu-44x.h
index 295b3dbb2698..28aa3b339c5e 100644
--- a/arch/powerpc/include/asm/mmu-44x.h
+++ b/arch/powerpc/include/asm/mmu-44x.h
@@ -111,6 +111,9 @@ typedef struct {
        unsigned long   vdso_base;
 } mm_context_t;
 
+/* patch sites */
+extern s32 patch__tlb_44x_hwater_D, patch__tlb_44x_hwater_I;
+
 #endif /* !__ASSEMBLY__ */
 
 #ifndef CONFIG_PPC_EARLY_DEBUG_44x
diff --git a/arch/powerpc/kernel/head_44x.S b/arch/powerpc/kernel/head_44x.S
index 37e4a7cf0065..bf23c19c92d6 100644
--- a/arch/powerpc/kernel/head_44x.S
+++ b/arch/powerpc/kernel/head_44x.S
@@ -40,6 +40,7 @@
 #include <asm/ptrace.h>
 #include <asm/synch.h>
 #include <asm/export.h>
+#include <asm/code-patching-asm.h>
 #include "head_booke.h"
 
 
@@ -382,10 +383,9 @@ interrupt_base:
        /* Increment, rollover, and store TLB index */
        addi    r13,r13,1
 
+       patch_site 0f, patch__tlb_44x_hwater_D
        /* Compare with watermark (instruction gets patched) */
-       .globl tlb_44x_patch_hwater_D
-tlb_44x_patch_hwater_D:
-       cmpwi   0,r13,1                 /* reserve entries */
+0:     cmpwi   0,r13,1                 /* reserve entries */
        ble     5f
        li      r13,0
 5:
@@ -478,10 +478,9 @@ tlb_44x_patch_hwater_D:
        /* Increment, rollover, and store TLB index */
        addi    r13,r13,1
 
+       patch_site 0f, patch__tlb_44x_hwater_I
        /* Compare with watermark (instruction gets patched) */
-       .globl tlb_44x_patch_hwater_I
-tlb_44x_patch_hwater_I:
-       cmpwi   0,r13,1                 /* reserve entries */
+0:     cmpwi   0,r13,1                 /* reserve entries */
        ble     5f
        li      r13,0
 5:
diff --git a/arch/powerpc/mm/44x_mmu.c b/arch/powerpc/mm/44x_mmu.c
index 12d92518e898..ea2b9af08a48 100644
--- a/arch/powerpc/mm/44x_mmu.c
+++ b/arch/powerpc/mm/44x_mmu.c
@@ -29,6 +29,7 @@
 #include <asm/mmu.h>
 #include <asm/page.h>
 #include <asm/cacheflush.h>
+#include <asm/code-patching.h>
 
 #include "mmu_decl.h"
 
@@ -43,22 +44,13 @@ unsigned long tlb_47x_boltmap[1024/8];
 
 static void ppc44x_update_tlb_hwater(void)
 {
-       extern unsigned int tlb_44x_patch_hwater_D[];
-       extern unsigned int tlb_44x_patch_hwater_I[];
-
        /* The TLB miss handlers hard codes the watermark in a cmpli
         * instruction to improve performances rather than loading it
         * from the global variable. Thus, we patch the instructions
         * in the 2 TLB miss handlers when updating the value
         */
-       tlb_44x_patch_hwater_D[0] = (tlb_44x_patch_hwater_D[0] & 0xffff0000) |
-               tlb_44x_hwater;
-       flush_icache_range((unsigned long)&tlb_44x_patch_hwater_D[0],
-                          (unsigned long)&tlb_44x_patch_hwater_D[1]);
-       tlb_44x_patch_hwater_I[0] = (tlb_44x_patch_hwater_I[0] & 0xffff0000) |
-               tlb_44x_hwater;
-       flush_icache_range((unsigned long)&tlb_44x_patch_hwater_I[0],
-                          (unsigned long)&tlb_44x_patch_hwater_I[1]);
+       modify_instruction_site(&patch__tlb_44x_hwater_D, 0xffff, 
tlb_44x_hwater);
+       modify_instruction_site(&patch__tlb_44x_hwater_I, 0xffff, 
tlb_44x_hwater);
 }
 
 /*
-- 
2.13.3

Reply via email to