Add a primitive for InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns.
Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- host/include/generic/host/aes-round.h | 4 ++++ include/crypto/aes-round.h | 21 +++++++++++++++++++++ crypto/aes.c | 20 ++++++++++++++++++++ 3 files changed, 45 insertions(+) diff --git a/host/include/generic/host/aes-round.h b/host/include/generic/host/aes-round.h index 848436379d..84f82e53d8 100644 --- a/host/include/generic/host/aes-round.h +++ b/host/include/generic/host/aes-round.h @@ -25,6 +25,10 @@ void aesdec_IMC_accel(AESState *, const AESState *, bool) void aesdec_ISB_ISR_accel(AESState *, const AESState *, bool) QEMU_ERROR("unsupported accel"); +void aesdec_ISB_ISR_AK_IMC_accel(AESState *, const AESState *, + const AESState *, bool) + QEMU_ERROR("unsupported accel"); + void aesdec_ISB_ISR_IMC_AK_accel(AESState *, const AESState *, const AESState *, bool) QEMU_ERROR("unsupported accel"); diff --git a/include/crypto/aes-round.h b/include/crypto/aes-round.h index 352687ce11..b48b87671c 100644 --- a/include/crypto/aes-round.h +++ b/include/crypto/aes-round.h @@ -113,6 +113,27 @@ static inline void aesdec_IMC(AESState *r, const AESState *st, bool be) } } +/* + * Perform InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns. + */ + +void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st, + const AESState *rk); +void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st, + const AESState *rk); + +static inline void aesdec_ISB_ISR_AK_IMC(AESState *r, const AESState *st, + const AESState *rk, bool be) +{ + if (HAVE_AES_ACCEL) { + aesdec_ISB_ISR_AK_IMC_accel(r, st, rk, be); + } else if (HOST_BIG_ENDIAN == be) { + aesdec_ISB_ISR_AK_IMC_gen(r, st, rk); + } else { + aesdec_ISB_ISR_AK_IMC_genrev(r, st, rk); + } +} + /* * Perform InvSubBytes + InvShiftRows + InvMixColumns + AddRoundKey. */ diff --git a/crypto/aes.c b/crypto/aes.c index 1696086868..c0e4bc5580 100644 --- a/crypto/aes.c +++ b/crypto/aes.c @@ -1571,6 +1571,26 @@ void aesdec_ISB_ISR_IMC_AK_genrev(AESState *r, const AESState *st, aesdec_ISB_ISR_IMC_AK_swap(r, st, rk, true); } +void aesdec_ISB_ISR_AK_IMC_gen(AESState *r, const AESState *st, + const AESState *rk) +{ + AESState t; + + aesdec_ISB_ISR_gen(&t, st); + t.v ^= rk->v; + aesdec_IMC_gen(r, &t); +} + +void aesdec_ISB_ISR_AK_IMC_genrev(AESState *r, const AESState *st, + const AESState *rk) +{ + AESState t; + + aesdec_ISB_ISR_genrev(&t, st); + t.v ^= rk->v; + aesdec_IMC_genrev(r, &t); +} + /** * Expand the cipher key into the encryption key schedule. */ -- 2.34.1