Add a separate hcall entry point. This can be used to deal with the
different calling convention.

Signed-off-by: Nicholas Piggin <npig...@gmail.com>
---
 arch/powerpc/kernel/exceptions-64s.S | 4 ++--
 arch/powerpc/kvm/book3s_64_entry.S   | 4 ++++
 2 files changed, 6 insertions(+), 2 deletions(-)

diff --git a/arch/powerpc/kernel/exceptions-64s.S 
b/arch/powerpc/kernel/exceptions-64s.S
index e6f7fc7c61a1..c25395b5921a 100644
--- a/arch/powerpc/kernel/exceptions-64s.S
+++ b/arch/powerpc/kernel/exceptions-64s.S
@@ -2028,13 +2028,13 @@ END_FTR_SECTION_IFSET(CPU_FTR_HAS_PPR)
         * Requires __LOAD_FAR_HANDLER beause kvmppc_interrupt lives
         * outside the head section.
         */
-       __LOAD_FAR_HANDLER(r10, kvmppc_interrupt)
+       __LOAD_FAR_HANDLER(r10, kvmppc_hcall)
        mtctr   r10
        ld      r10,PACA_EXGEN+EX_R10(r13)
        bctr
 #else
        ld      r10,PACA_EXGEN+EX_R10(r13)
-       b       kvmppc_interrupt
+       b       kvmppc_hcall
 #endif
 #endif
 
diff --git a/arch/powerpc/kvm/book3s_64_entry.S 
b/arch/powerpc/kvm/book3s_64_entry.S
index 8e7216f3c3ee..3b894b90862f 100644
--- a/arch/powerpc/kvm/book3s_64_entry.S
+++ b/arch/powerpc/kvm/book3s_64_entry.S
@@ -9,6 +9,10 @@
 /*
  * We come here from the first-level interrupt handlers.
  */
+.global        kvmppc_hcall
+.balign IFETCH_ALIGN_BYTES
+kvmppc_hcall:
+
 .global        kvmppc_interrupt
 .balign IFETCH_ALIGN_BYTES
 kvmppc_interrupt:
-- 
2.23.0

Reply via email to