Add the detection and runtime code for ARM_SMCCC_ARCH_WORKAROUND_1.
Signed-off-by: Julien Grall <julien.gr...@arm.com>
---
Changes in v3:
- Add the missing call to smc #0.
Changes in v2:
- Patch added
---
xen/arch/arm/arm64/bpi.S | 13 +++++++++++++
xen/arch/arm/cpuerrata.c | 32 +++++++++++++++++++++++++++++++-
xen/include/asm-arm/smccc.h | 1 +
3 files changed, 45 insertions(+), 1 deletion(-)
diff --git a/xen/arch/arm/arm64/bpi.S b/xen/arch/arm/arm64/bpi.S
index 4b7f1dc21f..981fb83a88 100644
--- a/xen/arch/arm/arm64/bpi.S
+++ b/xen/arch/arm/arm64/bpi.S
@@ -16,6 +16,8 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <asm/smccc.h>
+
.macro ventry target
.rept 31
nop
@@ -81,6 +83,17 @@ ENTRY(__psci_hyp_bp_inval_start)
add sp, sp, #(8 * 18)
ENTRY(__psci_hyp_bp_inval_end)
+ENTRY(__smccc_workaround_1_smc_start)
+ sub sp, sp, #(8 * 4)
+ stp x2, x3, [sp, #(8 * 0)]
+ stp x0, x1, [sp, #(8 * 2)]
+ mov w0, #ARM_SMCCC_ARCH_WORKAROUND_1_FID
+ smc #0
+ ldp x2, x3, [sp, #(8 * 0)]
+ ldp x0, x1, [sp, #(8 * 2)]
+ add sp, sp, #(8 * 4)
+ENTRY(__smccc_workaround_1_smc_end)
+
/*
* Local variables:
* mode: ASM
diff --git a/xen/arch/arm/cpuerrata.c b/xen/arch/arm/cpuerrata.c
index 8d5f8d372a..dec9074422 100644
--- a/xen/arch/arm/cpuerrata.c
+++ b/xen/arch/arm/cpuerrata.c
@@ -147,6 +147,34 @@ install_bp_hardening_vec(const struct arm_cpu_capabilities
*entry,
return ret;
}
+extern char __smccc_workaround_1_smc_start[], __smccc_workaround_1_smc_end[];
+
+static bool
+check_smccc_arch_workaround_1(const struct arm_cpu_capabilities *entry)
+{
+ struct arm_smccc_res res;
+
+ /*
+ * Enable callbacks are called on every CPU based on the
+ * capabilities. So double-check whether the CPU matches the
+ * entry.
+ */
+ if ( !entry->matches(entry) )
+ return false;