[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH 15/35] crypto: Add aesenc_MC
From: |
Richard Henderson |
Subject: |
[PATCH 15/35] crypto: Add aesenc_MC |
Date: |
Fri, 2 Jun 2023 19:34:06 -0700 |
Add a primitive for MixColumns.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
host/include/generic/host/aes-round.h | 3 ++
include/crypto/aes-round.h | 18 +++++++++
crypto/aes.c | 58 +++++++++++++++++++++++++++
3 files changed, 79 insertions(+)
diff --git a/host/include/generic/host/aes-round.h
b/host/include/generic/host/aes-round.h
index cb4fed61fe..7c48db24b6 100644
--- a/host/include/generic/host/aes-round.h
+++ b/host/include/generic/host/aes-round.h
@@ -9,6 +9,9 @@
#define HAVE_AES_ACCEL false
#define ATTR_AES_ACCEL
+void aesenc_MC_accel(AESState *, const AESState *, bool)
+ QEMU_ERROR("unsupported accel");
+
void aesenc_SB_SR_accel(AESState *, const AESState *, bool)
QEMU_ERROR("unsupported accel");
diff --git a/include/crypto/aes-round.h b/include/crypto/aes-round.h
index ff1914bd63..f25e9572a3 100644
--- a/include/crypto/aes-round.h
+++ b/include/crypto/aes-round.h
@@ -38,6 +38,24 @@ static inline void aesenc_SB_SR(AESState *r, const AESState
*st, bool be)
}
}
+/*
+ * Perform MixColumns.
+ */
+
+void aesenc_MC_gen(AESState *ret, const AESState *st);
+void aesenc_MC_genrev(AESState *ret, const AESState *st);
+
+static inline void aesenc_MC(AESState *r, const AESState *st, bool be)
+{
+ if (HAVE_AES_ACCEL) {
+ aesenc_MC_accel(r, st, be);
+ } else if (HOST_BIG_ENDIAN == be) {
+ aesenc_MC_gen(r, st);
+ } else {
+ aesenc_MC_genrev(r, st);
+ }
+}
+
/*
* Perform InvSubBytes + InvShiftRows.
*/
diff --git a/crypto/aes.c b/crypto/aes.c
index 937377647f..c7123eddd5 100644
--- a/crypto/aes.c
+++ b/crypto/aes.c
@@ -28,6 +28,8 @@
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "qemu/osdep.h"
+#include "qemu/bswap.h"
+#include "qemu/bitops.h"
#include "crypto/aes.h"
#include "crypto/aes-round.h"
@@ -1298,6 +1300,62 @@ void aesenc_SB_SR_genrev(AESState *r, const AESState *st)
aesenc_SB_SR_swap(r, st, true);
}
+/* Perform MixColumns. */
+static inline void
+aesenc_MC_swap(AESState *r, const AESState *st, bool swap)
+{
+ int swap_b = swap * 0xf;
+ int swap_w = swap * 0x3;
+ bool be = HOST_BIG_ENDIAN ^ swap;
+ uint32_t t;
+
+ t = ( AES_mc_rot[st->b[swap_b ^ 0x0]] ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x1]], 8) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x2]], 16) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x3]], 24));
+ if (be) {
+ t = bswap32(t);
+ }
+ r->w[swap_w ^ 0] = t;
+
+ t = ( AES_mc_rot[st->b[swap_b ^ 0x4]] ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x5]], 8) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x6]], 16) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x7]], 24));
+ if (be) {
+ t = bswap32(t);
+ }
+ r->w[swap_w ^ 1] = t;
+
+ t = ( AES_mc_rot[st->b[swap_b ^ 0x8]] ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0x9]], 8) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0xA]], 16) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0xB]], 24));
+ if (be) {
+ t = bswap32(t);
+ }
+ r->w[swap_w ^ 2] = t;
+
+ t = ( AES_mc_rot[st->b[swap_b ^ 0xC]] ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0xD]], 8) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0xE]], 16) ^
+ rol32(AES_mc_rot[st->b[swap_b ^ 0xF]], 24));
+ if (be) {
+ t = bswap32(t);
+ }
+ r->w[swap_w ^ 3] = t;
+}
+
+void aesenc_MC_gen(AESState *r, const AESState *st)
+{
+ aesenc_MC_swap(r, st, false);
+}
+
+void aesenc_MC_genrev(AESState *r, const AESState *st)
+{
+ aesenc_MC_swap(r, st, true);
+}
+
/* Perform InvSubBytes + InvShiftRows. */
static inline void
aesdec_ISB_ISR_swap(AESState *r, const AESState *st, bool swap)
--
2.34.1
- [PATCH 13/35] target/ppc: Use aesdec_ISB_ISR, (continued)
- [PATCH 13/35] target/ppc: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 17/35] crypto: Add aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 21/35] crypto: Add aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 23/35] target/ppc: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 24/35] target/riscv: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 26/35] target/i386: Use aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 25/35] crypto: Add aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 14/35] target/riscv: Use aesdec_ISB_ISR, Richard Henderson, 2023/06/02
- [PATCH 27/35] target/riscv: Use aesdec_ISB_ISR_IMC_AK, Richard Henderson, 2023/06/02
- [PATCH 30/35] host/include/i386: Implement aes-round.h, Richard Henderson, 2023/06/02
- [PATCH 15/35] crypto: Add aesenc_MC,
Richard Henderson <=
- [PATCH 31/35] host/include/aarch64: Implement aes-round.h, Richard Henderson, 2023/06/02
- [PATCH 16/35] target/arm: Use aesenc_MC, Richard Henderson, 2023/06/02
- [PATCH 34/35] crypto: Remove AES_imc, Richard Henderson, 2023/06/02
- [PATCH 35/35] crypto: Unexport AES_*_rot, AES_TeN, AES_TdN, Richard Henderson, 2023/06/02
- [PATCH 18/35] target/i386: Use aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 20/35] target/riscv: Use aesdec_IMC, Richard Henderson, 2023/06/02
- [PATCH 22/35] target/i386: Use aesenc_SB_SR_MC_AK, Richard Henderson, 2023/06/02
- [PATCH 19/35] target/arm: Use aesdec_IMC, Richard Henderson, 2023/06/02