Module Name: src Committed By: riastradh Date: Wed Mar 1 08:17:24 UTC 2023
Modified Files: src/sys/arch/aarch64/aarch64: cpuswitch.S Log Message: aarch64: Optimization: Omit needless membar when triggering softint. When we are triggering a softint, it can't already hold any mutexes. So any path to mutex_exit(mtx) must go via mutex_enter(mtx), which is always done with atomic r/m/w, and we need not issue any explicit barrier between ci->ci_curlwp = softlwp and a potential load of mtx->mtx_owner in mutex_exit. PR kern/57240 XXX pullup-9 XXX pullup-10 To generate a diff of this commit: cvs rdiff -u -r1.40 -r1.41 src/sys/arch/aarch64/aarch64/cpuswitch.S Please note that diffs are not public domain; they are subject to the copyright notices on the relevant files.
Modified files: Index: src/sys/arch/aarch64/aarch64/cpuswitch.S diff -u src/sys/arch/aarch64/aarch64/cpuswitch.S:1.40 src/sys/arch/aarch64/aarch64/cpuswitch.S:1.41 --- src/sys/arch/aarch64/aarch64/cpuswitch.S:1.40 Thu Feb 23 14:54:57 2023 +++ src/sys/arch/aarch64/aarch64/cpuswitch.S Wed Mar 1 08:17:24 2023 @@ -1,4 +1,4 @@ -/* $NetBSD: cpuswitch.S,v 1.40 2023/02/23 14:54:57 riastradh Exp $ */ +/* $NetBSD: cpuswitch.S,v 1.41 2023/03/01 08:17:24 riastradh Exp $ */ /*- * Copyright (c) 2014, 2020 The NetBSD Foundation, Inc. @@ -38,7 +38,7 @@ #include "opt_ddb.h" #include "opt_kasan.h" -RCSID("$NetBSD: cpuswitch.S,v 1.40 2023/02/23 14:54:57 riastradh Exp $") +RCSID("$NetBSD: cpuswitch.S,v 1.41 2023/03/01 08:17:24 riastradh Exp $") ARMV8_DEFINE_OPTIONS @@ -224,7 +224,13 @@ ENTRY_NP(cpu_switchto_softint) msr tpidr_el1, x0 /* curlwp = softlwp; */ dmb ishst /* for mutex_enter; see cpu_switchto */ str x0, [x20, #CI_CURLWP] /* curcpu()->ci_curlwp = softlwp; */ - dmb ish /* for mutex_enter; see cpu_switchto */ + /* + * No need for barrier after ci->ci_curlwp = softlwp -- when we + * enter a softint lwp, it can't be holding any mutexes, so it + * can't release any until after it has acquired them, so we + * need not participate in the protocol with mutex_vector_enter + * barriers here. + */ mov x5, #CPACR_FPEN_NONE msr cpacr_el1, x5 /* cpacr_el1 = CPACR_FPEN_NONE */