On Fri, May 09, 2025 at 10:33:08PM -0700, Eric Biggers wrote:
>
> Yes, the PowerPC Poly1305 code incorrectly uses VSX without first checking
> crypto_simd_usable().  And PowerPC also doesn't support VSX in softirqs, or at
> least it doesn't claim to (it doesn't override may_use_simd(), so it gets the
> default from include/asm-generic/simd.h which returns false in softirq 
> context).
> Maybe add 'depends on BROKEN' to CRYPTO_POLY1305_P10 for now, and give the
> PowerPC folks (Cc'ed) a chance to fix this before removing the code.

OK this patch works for me:

---8<---
Add a SIMD fallback path for poly1305-p10 by converting the 2^64
based hash state into a 2^44 base.  In order to ensure that the
generic fallback is actually 2^44, add ARCH_SUPPORTS_INT128 to
powerpc and make poly1305-p10 depend on it.

Fixes: ba8f8624fde2 ("crypto: poly1305-p10 - Glue code for optmized Poly1305 
implementation for ppc64le")
Signed-off-by: Herbert Xu <herb...@gondor.apana.org.au>

diff --git a/arch/powerpc/Kconfig b/arch/powerpc/Kconfig
index 6722625a406a..651e0c32957a 100644
--- a/arch/powerpc/Kconfig
+++ b/arch/powerpc/Kconfig
@@ -173,6 +173,7 @@ config PPC
        select ARCH_STACKWALK
        select ARCH_SUPPORTS_ATOMIC_RMW
        select ARCH_SUPPORTS_DEBUG_PAGEALLOC    if PPC_BOOK3S || PPC_8xx
+       select ARCH_SUPPORTS_INT128             if PPC64 && CC_HAS_INT128
        select ARCH_USE_BUILTIN_BSWAP
        select ARCH_USE_CMPXCHG_LOCKREF         if PPC64
        select ARCH_USE_MEMTEST
diff --git a/arch/powerpc/lib/crypto/Kconfig b/arch/powerpc/lib/crypto/Kconfig
index ffa541ad6d5d..6761fdb6193c 100644
--- a/arch/powerpc/lib/crypto/Kconfig
+++ b/arch/powerpc/lib/crypto/Kconfig
@@ -9,7 +9,7 @@ config CRYPTO_CHACHA20_P10
 
 config CRYPTO_POLY1305_P10
        tristate
-       depends on PPC64 && CPU_LITTLE_ENDIAN && VSX
+       depends on PPC64 && CPU_LITTLE_ENDIAN && VSX && ARCH_SUPPORTS_INT128
        default CRYPTO_LIB_POLY1305
        select CRYPTO_ARCH_HAVE_LIB_POLY1305
        select CRYPTO_LIB_POLY1305_GENERIC
diff --git a/arch/powerpc/lib/crypto/poly1305-p10-glue.c 
b/arch/powerpc/lib/crypto/poly1305-p10-glue.c
index 3f1664a724b6..280c10c48c53 100644
--- a/arch/powerpc/lib/crypto/poly1305-p10-glue.c
+++ b/arch/powerpc/lib/crypto/poly1305-p10-glue.c
@@ -6,6 +6,7 @@
  */
 #include <asm/switch_to.h>
 #include <crypto/internal/poly1305.h>
+#include <crypto/internal/simd.h>
 #include <linux/cpufeature.h>
 #include <linux/jump_label.h>
 #include <linux/kernel.h>
@@ -18,6 +19,11 @@ asmlinkage void poly1305_emit_64(const struct poly1305_state 
*state, const u32 n
 
 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10);
 
+static inline bool is_state_base64(struct poly1305_block_state *state)
+{
+       return state->core_r.precomputed_s.r64[2];
+}
+
 static void vsx_begin(void)
 {
        preempt_disable();
@@ -30,12 +36,35 @@ static void vsx_end(void)
        preempt_enable();
 }
 
+static void convert_to_base2_44(struct poly1305_block_state *state)
+{
+       u8 raw_key[POLY1305_BLOCK_SIZE];
+       u64 h0, h1, h2;
+
+       if (!is_state_base64(state))
+               return;
+
+       state->core_r.precomputed_s.r64[2] = 0;
+       put_unaligned_le64(state->core_r.key.r64[0], raw_key + 0);
+       put_unaligned_le64(state->core_r.key.r64[1], raw_key + 8);
+       poly1305_core_setkey(&state->core_r, raw_key);
+
+       h0 = state->h.h64[0];
+       h1 = state->h.h64[1];
+       h2 = state->h.h64[2];
+       state->h.h64[0] = h0 & 0xfffffffffffULL;
+       state->h.h64[1] = h0 >> 44 | (h1 & 0xffffffULL) << 20;
+       state->h.h64[2] = h1 >> 24 | h2 << 40;
+}
+
 void poly1305_block_init_arch(struct poly1305_block_state *dctx,
                              const u8 raw_key[POLY1305_BLOCK_SIZE])
 {
-       if (!static_key_enabled(&have_p10))
+       dctx->core_r.precomputed_s.r64[2] = 0;
+       if (!static_key_enabled(&have_p10) || !crypto_simd_usable())
                return poly1305_block_init_generic(dctx, raw_key);
 
+       dctx->core_r.precomputed_s.r64[2] = 1;
        dctx->h = (struct poly1305_state){};
        dctx->core_r.key.r64[0] = get_unaligned_le64(raw_key + 0);
        dctx->core_r.key.r64[1] = get_unaligned_le64(raw_key + 8);
@@ -45,8 +74,11 @@ EXPORT_SYMBOL_GPL(poly1305_block_init_arch);
 void poly1305_blocks_arch(struct poly1305_block_state *state, const u8 *src,
                          unsigned int len, u32 padbit)
 {
-       if (!static_key_enabled(&have_p10))
+       if (!static_key_enabled(&have_p10) || !is_state_base64(state) ||
+           !crypto_simd_usable()) {
+               convert_to_base2_44(state);
                return poly1305_blocks_generic(state, src, len, padbit);
+       }
        vsx_begin();
        if (len >= POLY1305_BLOCK_SIZE * 4) {
                poly1305_p10le_4blocks(state, src, len);
@@ -66,7 +98,10 @@ void poly1305_emit_arch(const struct poly1305_state *state,
                        u8 digest[POLY1305_DIGEST_SIZE],
                        const u32 nonce[4])
 {
-       if (!static_key_enabled(&have_p10))
+       struct poly1305_block_state *dctx =
+               container_of(state, struct poly1305_block_state, h);
+
+       if (!static_key_enabled(&have_p10) || !is_state_base64(dctx))
                return poly1305_emit_generic(state, digest, nonce);
        poly1305_emit_64(state, nonce, digest);
 }
-- 
Email: Herbert Xu <herb...@gondor.apana.org.au>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt

Reply via email to