[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v4 32/57] tcg/sparc64: Rename tcg_out_movi_imm32 to tcg_out_movi_
From: |
Richard Henderson |
Subject: |
[PATCH v4 32/57] tcg/sparc64: Rename tcg_out_movi_imm32 to tcg_out_movi_u32 |
Date: |
Wed, 3 May 2023 08:06:31 +0100 |
Emphasize that the constant is unsigned.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/sparc64/tcg-target.c.inc | 24 ++++++++++--------------
1 file changed, 10 insertions(+), 14 deletions(-)
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
index 2e6127d506..e244209890 100644
--- a/tcg/sparc64/tcg-target.c.inc
+++ b/tcg/sparc64/tcg-target.c.inc
@@ -399,22 +399,18 @@ static void tcg_out_sethi(TCGContext *s, TCGReg ret,
uint32_t arg)
tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
}
+/* A 13-bit constant sign-extended to 64 bits. */
static void tcg_out_movi_s13(TCGContext *s, TCGReg ret, int32_t arg)
{
tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
}
-static void tcg_out_movi_imm32(TCGContext *s, TCGReg ret, int32_t arg)
+/* A 32-bit constant zero-extended to 64 bits. */
+static void tcg_out_movi_u32(TCGContext *s, TCGReg ret, uint32_t arg)
{
- if (check_fit_i32(arg, 13)) {
- /* A 13-bit constant sign-extended to 64-bits. */
- tcg_out_movi_s13(s, ret, arg);
- } else {
- /* A 32-bit constant zero-extended to 64 bits. */
- tcg_out_sethi(s, ret, arg);
- if (arg & 0x3ff) {
- tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
- }
+ tcg_out_sethi(s, ret, arg);
+ if (arg & 0x3ff) {
+ tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
}
}
@@ -433,7 +429,7 @@ static void tcg_out_movi_int(TCGContext *s, TCGType type,
TCGReg ret,
/* A 32-bit constant, or 32-bit zero-extended to 64-bits. */
if (type == TCG_TYPE_I32 || arg == (uint32_t)arg) {
- tcg_out_movi_imm32(s, ret, arg);
+ tcg_out_movi_u32(s, ret, arg);
return;
}
@@ -477,13 +473,13 @@ static void tcg_out_movi_int(TCGContext *s, TCGType type,
TCGReg ret,
/* A 64-bit constant decomposed into 2 32-bit pieces. */
if (check_fit_i32(lo, 13)) {
hi = (arg - lo) >> 32;
- tcg_out_movi_imm32(s, ret, hi);
+ tcg_out_movi_u32(s, ret, hi);
tcg_out_arithi(s, ret, ret, 32, SHIFT_SLLX);
tcg_out_arithi(s, ret, ret, lo, ARITH_ADD);
} else {
hi = arg >> 32;
- tcg_out_movi_imm32(s, ret, hi);
- tcg_out_movi_imm32(s, scratch, lo);
+ tcg_out_movi_u32(s, ret, hi);
+ tcg_out_movi_u32(s, scratch, lo);
tcg_out_arithi(s, ret, ret, 32, SHIFT_SLLX);
tcg_out_arith(s, ret, ret, scratch, ARITH_OR);
}
--
2.34.1
- Re: [PATCH v4 29/57] tcg/s390x: Use full load/store helpers in user-only mode, (continued)
[PATCH v4 32/57] tcg/sparc64: Rename tcg_out_movi_imm32 to tcg_out_movi_u32,
Richard Henderson <=
[PATCH v4 33/57] tcg/sparc64: Split out tcg_out_movi_s32, Richard Henderson, 2023/05/03
[PATCH v4 36/57] tcg/loongarch64: Assert the host supports unaligned accesses, Richard Henderson, 2023/05/03
[PATCH v4 35/57] accel/tcg: Remove helper_unaligned_{ld,st}, Richard Henderson, 2023/05/03
[PATCH v4 37/57] tcg/loongarch64: Support softmmu unaligned accesses, Richard Henderson, 2023/05/03