@@ -23,6 +23,9 @@ void aesdec_IMC_accel(AESState *, const AESState *, bool)
void aesdec_ISB_ISR_AK_accel(AESState *, const AESState *,
const AESState *, bool)
QEMU_ERROR("unsupported accel");
+void aesdec_ISB_ISR_AK_IMC_accel(AESState *, const AESState *,
+ const AESState *, bool)
+ QEMU_ERROR("unsupported accel");
void aesdec_ISB_ISR_IMC_AK_accel(AESState *, const AESState *,
const AESState *, bool)
QEMU_ERROR("unsupported accel");
@@ -119,6 +119,27 @@ static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st,
}
}
+/*
+ * Perform InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns.
+ */
+
+void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st,
+ const AESState *rk);
+void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st,
+ const AESState *rk);
+
+static inline void aesdec_ISB_ISR_AK_IMC(AESState *r, const AESState *st,
+ const AESState *rk, bool be)
+{
+ if (HAVE_AES_ACCEL) {
+ aesdec_ISB_ISR_AK_IMC_accel(r, st, rk, be);
+ } else if (HOST_BIG_ENDIAN == be) {
+ aesdec_ISB_ISR_AK_IMC_gen(r, st, rk);
+ } else {
+ aesdec_ISB_ISR_AK_IMC_genrev(r, st, rk);
+ }
+}
+
/*
* Perform InvSubBytes + InvShiftRows + InvMixColumns + AddRoundKey.
*/
@@ -1564,6 +1564,20 @@ void aesdec_ISB_ISR_IMC_AK_genrev(AESState *r, const AESState *st,
aesdec_ISB_ISR_IMC_AK_swap(r, st, rk, true);
}
+void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st,
+ const AESState *rk)
+{
+ aesdec_ISB_ISR_AK_gen(ret, st, rk);
+ aesdec_IMC_gen(ret, ret);
+}
+
+void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st,
+ const AESState *rk)
+{
+ aesdec_ISB_ISR_AK_genrev(ret, st, rk);
+ aesdec_IMC_genrev(ret, ret);
+}
+
/**
* Expand the cipher key into the encryption key schedule.
*/
Add a primitive for InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- host/include/generic/host/aes-round.h | 3 +++ include/crypto/aes-round.h | 21 +++++++++++++++++++++ crypto/aes.c | 14 ++++++++++++++ 3 files changed, 38 insertions(+)