[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH 21/35] crypto: Add aesenc_SB_SR_MC_AK
From: |
Richard Henderson |
Subject: |
[PATCH 21/35] crypto: Add aesenc_SB_SR_MC_AK |
Date: |
Fri, 2 Jun 2023 19:34:12 -0700 |
Add a primitive for SubBytes + ShiftRows + MixColumns + AddRoundKey.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
host/include/generic/host/aes-round.h | 4 ++
include/crypto/aes-round.h | 21 ++++++++++
crypto/aes.c | 56 +++++++++++++++++++++++++++
3 files changed, 81 insertions(+)
diff --git a/host/include/generic/host/aes-round.h
b/host/include/generic/host/aes-round.h
index 1e9b97d274..dc2c751ac3 100644
--- a/host/include/generic/host/aes-round.h
+++ b/host/include/generic/host/aes-round.h
@@ -15,6 +15,10 @@ void aesenc_MC_accel(AESState *, const AESState *, bool)
void aesenc_SB_SR_accel(AESState *, const AESState *, bool)
QEMU_ERROR("unsupported accel");
+void aesenc_SB_SR_MC_AK_accel(AESState *, const AESState *,
+ const AESState *, bool)
+ QEMU_ERROR("unsupported accel");
+
void aesdec_IMC_accel(AESState *, const AESState *, bool)
QEMU_ERROR("unsupported accel");
diff --git a/include/crypto/aes-round.h b/include/crypto/aes-round.h
index 2d962ede0b..aefa17fcc3 100644
--- a/include/crypto/aes-round.h
+++ b/include/crypto/aes-round.h
@@ -56,6 +56,27 @@ static inline void aesenc_MC(AESState *r, const AESState
*st, bool be)
}
}
+/*
+ * Perform SubBytes + ShiftRows + MixColumns + AddRoundKey.
+ */
+
+void aesenc_SB_SR_MC_AK_gen(AESState *ret, const AESState *st,
+ const AESState *rk);
+void aesenc_SB_SR_MC_AK_genrev(AESState *ret, const AESState *st,
+ const AESState *rk);
+
+static inline void aesenc_SB_SR_MC_AK(AESState *r, const AESState *st,
+ const AESState *rk, bool be)
+{
+ if (HAVE_AES_ACCEL) {
+ aesenc_SB_SR_MC_AK_accel(r, st, rk, be);
+ } else if (HOST_BIG_ENDIAN == be) {
+ aesenc_SB_SR_MC_AK_gen(r, st, rk);
+ } else {
+ aesenc_SB_SR_MC_AK_genrev(r, st, rk);
+ }
+}
+
/*
* Perform InvSubBytes + InvShiftRows.
*/
diff --git a/crypto/aes.c b/crypto/aes.c
index 4e654e5404..6172495b46 100644
--- a/crypto/aes.c
+++ b/crypto/aes.c
@@ -1356,6 +1356,62 @@ void aesenc_MC_genrev(AESState *r, const AESState *st)
aesenc_MC_swap(r, st, true);
}
+/* Perform SubBytes + ShiftRows + MixColumns + AddRoundKey. */
+static inline void
+aesenc_SB_SR_MC_AK_swap(AESState *r, const AESState *st,
+ const AESState *rk, bool swap)
+{
+ int swap_b = swap * 0xf;
+ int swap_w = swap * 0x3;
+ bool be = HOST_BIG_ENDIAN ^ swap;
+ uint32_t w0, w1, w2, w3;
+
+ w0 = (AES_Te0[st->b[swap_b ^ AES_SH_0]] ^
+ AES_Te1[st->b[swap_b ^ AES_SH_1]] ^
+ AES_Te2[st->b[swap_b ^ AES_SH_2]] ^
+ AES_Te3[st->b[swap_b ^ AES_SH_3]]);
+
+ w1 = (AES_Te0[st->b[swap_b ^ AES_SH_4]] ^
+ AES_Te1[st->b[swap_b ^ AES_SH_5]] ^
+ AES_Te2[st->b[swap_b ^ AES_SH_6]] ^
+ AES_Te3[st->b[swap_b ^ AES_SH_7]]);
+
+ w2 = (AES_Te0[st->b[swap_b ^ AES_SH_8]] ^
+ AES_Te1[st->b[swap_b ^ AES_SH_9]] ^
+ AES_Te2[st->b[swap_b ^ AES_SH_A]] ^
+ AES_Te3[st->b[swap_b ^ AES_SH_B]]);
+
+ w3 = (AES_Te0[st->b[swap_b ^ AES_SH_C]] ^
+ AES_Te1[st->b[swap_b ^ AES_SH_D]] ^
+ AES_Te2[st->b[swap_b ^ AES_SH_E]] ^
+ AES_Te3[st->b[swap_b ^ AES_SH_F]]);
+
+ /* Note that AES_TeX is encoded for big-endian. */
+ if (!be) {
+ w0 = bswap32(w0);
+ w1 = bswap32(w1);
+ w2 = bswap32(w2);
+ w3 = bswap32(w3);
+ }
+
+ r->w[swap_w ^ 0] = rk->w[swap_w ^ 0] ^ w0;
+ r->w[swap_w ^ 1] = rk->w[swap_w ^ 1] ^ w1;
+ r->w[swap_w ^ 2] = rk->w[swap_w ^ 2] ^ w2;
+ r->w[swap_w ^ 3] = rk->w[swap_w ^ 3] ^ w3;
+}
+
+void aesenc_SB_SR_MC_AK_gen(AESState *r, const AESState *st,
+ const AESState *rk)
+{
+ aesenc_SB_SR_MC_AK_swap(r, st, rk, false);
+}
+
+void aesenc_SB_SR_MC_AK_genrev(AESState *r, const AESState *st,
+ const AESState *rk)
+{
+ aesenc_SB_SR_MC_AK_swap(r, st, rk, true);
+}
+
/* Perform InvSubBytes + InvShiftRows. */
static inline void
aesdec_ISB_ISR_swap(AESState *r, const AESState *st, bool swap)
--
2.34.1
- [PATCH 06/35] target/arm: Demultiplex AESE and AESMC, (continued)
- [PATCH 06/35] target/arm: Demultiplex AESE and AESMC, Richard Henderson, 2023/06/02
- [PATCH 07/35] target/arm: Use aesenc_SB_SR, Richard Henderson, 2023/06/02
- [PATCH 10/35] crypto: Add aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 08/35] target/ppc: Use aesenc_SB_SR, Richard Henderson, 2023/06/02
- [PATCH 12/35] target/arm: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 09/35] target/riscv: Use aesenc_SB_SR, Richard Henderson, 2023/06/02
- [PATCH 11/35] target/i386: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 13/35] target/ppc: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 17/35] crypto: Add aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 21/35] crypto: Add aesenc_SB_SR_MC_AK,
Richard Henderson <=
- [PATCH 23/35] target/ppc: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 24/35] target/riscv: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 26/35] target/i386: Use aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 25/35] crypto: Add aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 14/35] target/riscv: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 27/35] target/riscv: Use aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 30/35] host/include/i386: Implement aes-round.h, Richard Henderson, 2023/06/02
- [PATCH 15/35] crypto: Add aesenc_MC, Richard Henderson, 2023/06/02
- [PATCH 31/35] host/include/aarch64: Implement aes-round.h, Richard Henderson, 2023/06/02