Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- host/include/generic/host/crypto/clmul.h | 17 ++++++ include/crypto/clmul.h | 61 +++++++++++++++++++ crypto/clmul.c | 76 ++++++++++++++++++++++++ crypto/meson.build | 9 ++- 4 files changed, 160 insertions(+), 3 deletions(-) create mode 100644 host/include/generic/host/crypto/clmul.h create mode 100644 include/crypto/clmul.h create mode 100644 crypto/clmul.c
diff --git a/host/include/generic/host/crypto/clmul.h b/host/include/generic/host/crypto/clmul.h new file mode 100644 index 0000000000..694705f703 --- /dev/null +++ b/host/include/generic/host/crypto/clmul.h @@ -0,0 +1,17 @@ +/* + * No host specific carry-less multiply acceleration. + * SPDX-License-Identifier: GPL-2.0-or-later + */ + +#ifndef GENERIC_HOST_CRYPTO_CLMUL_H +#define GENERIC_HOST_CRYPTO_CLMUL_H + +/* Defer everything to the generic routines. */ +#define clmul_8x8_low clmul_8x8_low_gen +#define clmul_8x4_even clmul_8x4_even_gen +#define clmul_8x4_odd clmul_8x4_odd_gen +#define clmul_8x8_even clmul_8x8_even_gen +#define clmul_8x8_odd clmul_8x8_odd_gen +#define clmul_8x8_packed clmul_8x8_packed_gen + +#endif /* GENERIC_HOST_CRYPTO_CLMUL_H */ diff --git a/include/crypto/clmul.h b/include/crypto/clmul.h new file mode 100644 index 0000000000..7f19205d6f --- /dev/null +++ b/include/crypto/clmul.h @@ -0,0 +1,61 @@ +/* + * Carry-less multiply + * SPDX-License-Identifier: GPL-2.0-or-later + * + * Copyright (C) 2023 Linaro, Ltd. + */ + +#ifndef CRYPTO_CLMUL_H +#define CRYPTO_CLMUL_H + +#include "qemu/int128.h" + +/** + * clmul_8x8_low: + * + * Perform eight 8x8->8 carry-less multiplies. + */ +uint64_t clmul_8x8_low_gen(uint64_t, uint64_t); + +/** + * clmul_8x4_even: + * + * Perform four 8x8->16 carry-less multiplies. + * The odd bytes of the inputs are ignored. + */ +uint64_t clmul_8x4_even_gen(uint64_t, uint64_t); + +/** + * clmul_8x4_odd: + * + * Perform four 8x8->16 carry-less multiplies. + * The even bytes of the inputs are ignored. + */ +uint64_t clmul_8x4_odd_gen(uint64_t, uint64_t); + +/** + * clmul_8x8_even: + * + * Perform eight 8x8->16 carry-less multiplies. + * The odd bytes of the inputs are ignored. + */ +Int128 clmul_8x8_even_gen(Int128, Int128); + +/** + * clmul_8x8_odd: + * + * Perform eight 8x8->16 carry-less multiplies. + * The even bytes of the inputs are ignored. + */ +Int128 clmul_8x8_odd_gen(Int128, Int128); + +/** + * clmul_8x8_packed: + * + * Perform eight 8x8->16 carry-less multiplies. + */ +Int128 clmul_8x8_packed_gen(uint64_t, uint64_t); + +#include "host/crypto/clmul.h" + +#endif /* CRYPTO_CLMUL_H */ diff --git a/crypto/clmul.c b/crypto/clmul.c new file mode 100644 index 0000000000..866704e751 --- /dev/null +++ b/crypto/clmul.c @@ -0,0 +1,76 @@ +/* + * No host specific carry-less multiply acceleration. + * SPDX-License-Identifier: GPL-2.0-or-later + */ + +#include "qemu/osdep.h" +#include "crypto/clmul.h" + +uint64_t clmul_8x8_low_gen(uint64_t n, uint64_t m) +{ + uint64_t r = 0; + + for (int i = 0; i < 8; ++i) { + uint64_t mask = (n & 0x0101010101010101ull) * 0xff; + r ^= m & mask; + m = (m << 1) & 0xfefefefefefefefeull; + n >>= 1; + } + return r; +} + +uint64_t clmul_8x4_even_gen(uint64_t n, uint64_t m) +{ + uint64_t r = 0; + + n &= 0x00ff00ff00ff00ffull; + m &= 0x00ff00ff00ff00ffull; + + for (int i = 0; i < 8; ++i) { + uint64_t mask = (n & 0x0001000100010001ull) * 0xffff; + r ^= m & mask; + n >>= 1; + m <<= 1; + } + return r; +} + +uint64_t clmul_8x4_odd_gen(uint64_t n, uint64_t m) +{ + return clmul_8x4_even_gen(n >> 8, m >> 8); +} + +Int128 clmul_8x8_even_gen(Int128 n, Int128 m) +{ + uint64_t rl, rh; + + rl = clmul_8x4_even_gen(int128_getlo(n), int128_getlo(m)); + rh = clmul_8x4_even_gen(int128_gethi(n), int128_gethi(m)); + return int128_make128(rl, rh); +} + +Int128 clmul_8x8_odd_gen(Int128 n, Int128 m) +{ + uint64_t rl, rh; + + rl = clmul_8x4_odd_gen(int128_getlo(n), int128_getlo(m)); + rh = clmul_8x4_odd_gen(int128_gethi(n), int128_gethi(m)); + return int128_make128(rl, rh); +} + +static uint64_t unpack_8_to_16(uint64_t x) +{ + return (x & 0x000000ff) + | ((x & 0x0000ff00) << 8) + | ((x & 0x00ff0000) << 16) + | ((x & 0xff000000) << 24); +} + +Int128 clmul_8x8_packed_gen(uint64_t n, uint64_t m) +{ + uint64_t rl, rh; + + rl = clmul_8x4_even_gen(unpack_8_to_16(n), unpack_8_to_16(m)); + rh = clmul_8x4_even_gen(unpack_8_to_16(n >> 32), unpack_8_to_16(m >> 32)); + return int128_make128(rl, rh); +} diff --git a/crypto/meson.build b/crypto/meson.build index 5f03a30d34..9ac1a89802 100644 --- a/crypto/meson.build +++ b/crypto/meson.build @@ -48,9 +48,12 @@ if have_afalg endif crypto_ss.add(when: gnutls, if_true: files('tls-cipher-suites.c')) -util_ss.add(files('sm4.c')) -util_ss.add(files('aes.c')) -util_ss.add(files('init.c')) +util_ss.add(files( + 'aes.c', + 'clmul.c', + 'init.c', + 'sm4.c', +)) if gnutls.found() util_ss.add(gnutls) endif -- 2.34.1