Implement core support for PSCI. As this is generic code, it doesn't
implement anything really useful (all the functions are returning
Not Implemented).

This is largely ported from the similar code that exists for ARMv7

Signed-off-by: Arnab Basu <arnab_b...@rocketmail.com>
Cc: Bhupesh Sharma <bhupesh.sha...@freescale.com>
Cc: Marc Zyngier <marc.zyng...@arm.com>
---
 arch/arm/cpu/armv8/Makefile      |   3 +-
 arch/arm/cpu/armv8/psci.S        | 162 +++++++++++++++++++++++++++++++++++++++
 arch/arm/include/asm/armv8/esr.h |  12 +++
 3 files changed, 176 insertions(+), 1 deletion(-)
 create mode 100644 arch/arm/cpu/armv8/psci.S
 create mode 100644 arch/arm/include/asm/armv8/esr.h

diff --git a/arch/arm/cpu/armv8/Makefile b/arch/arm/cpu/armv8/Makefile
index 74c32b2..1c696ea 100644
--- a/arch/arm/cpu/armv8/Makefile
+++ b/arch/arm/cpu/armv8/Makefile
@@ -16,4 +16,5 @@ obj-y += tlb.o
 obj-y  += transition.o
 obj-y  += cpu-dt.o
 
-obj-$(CONFIG_FSL_LSCH3) += fsl-lsch3/
+obj-$(CONFIG_ARMV8_PSCI)       += psci.o
+obj-$(CONFIG_FSL_LSCH3)        += fsl-lsch3/
diff --git a/arch/arm/cpu/armv8/psci.S b/arch/arm/cpu/armv8/psci.S
new file mode 100644
index 0000000..6028020
--- /dev/null
+++ b/arch/arm/cpu/armv8/psci.S
@@ -0,0 +1,162 @@
+/*
+ * (C) Copyright 2014
+ * Arnab Basu <arnab.b...@freescale.com>
+ * (C) Copyright 2015
+ * Arnab Basu <arnab_b...@rocketmail.com>
+ *
+ * Based on arch/arm/cpu/armv7/psci.S
+ *
+ * SPDX-License-Identifier:    GPL-2.0+
+ */
+
+#include <linux/linkage.h>
+#include <asm/psci.h>
+#include <asm/armv8/esr.h>
+
+#define PSCI_FN(__id, __fn) \
+        .quad __id; \
+        .quad __fn
+
+.pushsection ._secure.text, "ax"
+
+ENTRY(psci_0_2_cpu_suspend_64)
+ENTRY(psci_0_2_cpu_on_64)
+ENTRY(psci_0_2_affinity_info_64)
+ENTRY(psci_0_2_migrate_64)
+ENTRY(psci_0_2_migrate_info_up_cpu_64)
+       mov     x0, #ARM_PSCI_RET_NI    /* Return -1 (Not Implemented) */
+       ret
+ENDPROC(psci_0_2_cpu_suspend_64)
+ENDPROC(psci_0_2_cpu_on_64)
+ENDPROC(psci_0_2_affinity_info_64)
+ENDPROC(psci_0_2_migrate_64)
+ENDPROC(psci_0_2_migrate_info_up_cpu_64)
+.weak psci_0_2_cpu_suspend_64
+.weak psci_0_2_cpu_on_64
+.weak psci_0_2_affinity_info_64
+.weak psci_0_2_migrate_64
+.weak psci_0_2_migrate_info_up_cpu_64
+
+ENTRY(psci_0_2_psci_version)
+       mov     x0, #2                  /* Return Major = 0, Minor = 2*/
+       ret
+ENDPROC(psci_0_2_psci_version)
+
+.align 4
+_psci_0_2_table:
+       PSCI_FN(PSCI_0_2_FN_PSCI_VERSION, psci_0_2_psci_version)
+       PSCI_FN(PSCI_0_2_FN64_CPU_SUSPEND, psci_0_2_cpu_suspend_64)
+       PSCI_FN(PSCI_0_2_FN64_CPU_ON, psci_0_2_cpu_on_64)
+       PSCI_FN(PSCI_0_2_FN64_AFFINITY_INFO, psci_0_2_affinity_info_64)
+       PSCI_FN(PSCI_0_2_FN64_MIGRATE, psci_0_2_migrate_64)
+       PSCI_FN(PSCI_0_2_FN64_MIGRATE_INFO_UP_CPU, 
psci_0_2_migrate_info_up_cpu_64)
+       PSCI_FN(0, 0)
+
+.macro psci_enter
+       stp     x29, x30, [sp, #-16]!
+       stp     x27, x28, [sp, #-16]!
+       stp     x25, x26, [sp, #-16]!
+       stp     x23, x24, [sp, #-16]!
+       stp     x21, x22, [sp, #-16]!
+       stp     x19, x20, [sp, #-16]!
+        str     x18, [sp, #-8]!
+        mrs     x16, sp_el0
+        mrs     x15, elr_el3
+       stp     x15, x16, [sp, #-16]!
+
+       /* Switching to Secure State to Execute U-Boot */
+       mrs     x4, scr_el3
+       bic     x4, x4, #1
+       msr     scr_el3, x4
+.endm
+
+.macro psci_return
+       /* Switching to Non-Secure State to Execute OS */
+       mrs     x4, scr_el3
+       orr     x4, x4, #1
+       msr     scr_el3, x4
+
+        ldp     x15, x16, [sp], #16
+        msr     elr_el3, x15
+        msr     sp_el0, x16
+        ldr     x18, [sp], #8
+       ldp     x19, x20, [sp], #16
+       ldp     x21, x22, [sp], #16
+       ldp     x23, x24, [sp], #16
+       ldp     x25, x26, [sp], #16
+       ldp     x27, x28, [sp], #16
+       ldp     x29, x30, [sp], #16
+       eret
+.endm
+
+ENTRY(_smc_psci)
+       psci_enter
+       adr     x4, _psci_0_2_table
+1:     ldp     x5, x6, [x4]          /* Load PSCI function ID and target PC */
+       cbz     x5, fn_not_found      /* If reach the end, bail out */
+       cmp     x0, x5                /* If not matching, try next entry */
+       b.eq    fn_call
+       add     x4, x4, #16
+       b       1b
+
+fn_call:
+       blr     x6
+       psci_return
+
+fn_not_found:
+       mov     x0, #ARM_PSCI_RET_NI    /* Return -1 (Not Supported) */
+       psci_return
+ENDPROC(_smc_psci)
+
+ENTRY(unhandled_exception)
+/* Returning to the place that caused the exception has the potential to cause
+ * an endless loop of taking the same exception over and over again. Looping
+ * here seems marginally better
+ */
+1:      b       1b
+ENDPROC(unhandled_exception)
+
+__handle_sync:
+       str     x4, [sp, #-8]!
+       mrs     x4, esr_el3
+       ubfx    x4, x4, #26, #6
+       cmp     x4, #ESR_EC_SMC64
+       b.eq    smc_found
+       ldr     x4, [sp], #8
+       b       unhandled_exception
+smc_found:
+       ldr     x4, [sp], #8
+       b       _smc_psci
+
+/*
+ * PSCI Exception vectors.
+ */
+       .align  11
+       .globl  psci_vectors
+psci_vectors:
+       .align  7
+       b       unhandled_exception     /* Current EL Synchronous Thread */
+       .align  7
+       b       unhandled_exception     /* Current EL IRQ Thread */
+       .align  7
+       b       unhandled_exception     /* Current EL FIQ Thread */
+       .align  7
+       b       unhandled_exception     /* Current EL Error Thread */
+       .align  7
+       b       unhandled_exception     /* Current EL Synchronous Handler */
+       .align  7
+       b       unhandled_exception     /* Current EL IRQ Handler */
+       .align  7
+       b       unhandled_exception     /* Current EL FIQ Handler */
+       .align  7
+       b       unhandled_exception     /* Current EL Error Handler */
+       .align  7
+       b       __handle_sync           /* Lower EL Synchronous (64b) */
+       .align  7
+       b       unhandled_exception     /* Lower EL IRQ (64b) */
+       .align  7
+       b       unhandled_exception     /* Lower EL FIQ (64b) */
+       .align  7
+       b       unhandled_exception     /* Lower EL Error (64b) */
+
+.popsection
diff --git a/arch/arm/include/asm/armv8/esr.h b/arch/arm/include/asm/armv8/esr.h
new file mode 100644
index 0000000..59d4289
--- /dev/null
+++ b/arch/arm/include/asm/armv8/esr.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright 2015, Arnab Basu <arnab_b...@rocketmail.com>
+ *
+ * SPDX-License-Identifier:     GPL-2.0+
+ */
+
+#ifndef _ARMV8_ESR_H
+#define _ARMV8_ESR_H
+
+#define ESR_EC_SMC64   (0x17)
+
+#endif /* _ARMV8_ESR_H */
-- 
1.9.1

_______________________________________________
U-Boot mailing list
U-Boot@lists.denx.de
http://lists.denx.de/mailman/listinfo/u-boot

Reply via email to