[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v5 37/54] tcg: Merge tcg_out_helper_load_regs into caller
From: |
Richard Henderson |
Subject: |
[PATCH v5 37/54] tcg: Merge tcg_out_helper_load_regs into caller |
Date: |
Mon, 15 May 2023 07:32:56 -0700 |
Now that tcg_out_helper_load_regs is not recursive, we can
merge it into its only caller, tcg_out_helper_load_slots.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/tcg.c | 89 +++++++++++++++++++++++++------------------------------
1 file changed, 41 insertions(+), 48 deletions(-)
diff --git a/tcg/tcg.c b/tcg/tcg.c
index 8688248284..ff7aec23e7 100644
--- a/tcg/tcg.c
+++ b/tcg/tcg.c
@@ -5223,12 +5223,50 @@ static int tcg_out_helper_stk_ofs(TCGType type,
unsigned slot)
return ofs;
}
-static void tcg_out_helper_load_regs(TCGContext *s,
- unsigned nmov, TCGMovExtend *mov,
- const TCGLdstHelperParam *parm)
+static void tcg_out_helper_load_slots(TCGContext *s,
+ unsigned nmov, TCGMovExtend *mov,
+ const TCGLdstHelperParam *parm)
{
+ unsigned i;
TCGReg dst3;
+ /*
+ * Start from the end, storing to the stack first.
+ * This frees those registers, so we need not consider overlap.
+ */
+ for (i = nmov; i-- > 0; ) {
+ unsigned slot = mov[i].dst;
+
+ if (arg_slot_reg_p(slot)) {
+ goto found_reg;
+ }
+
+ TCGReg src = mov[i].src;
+ TCGType dst_type = mov[i].dst_type;
+ MemOp dst_mo = dst_type == TCG_TYPE_I32 ? MO_32 : MO_64;
+
+ /* The argument is going onto the stack; extend into scratch. */
+ if ((mov[i].src_ext & MO_SIZE) != dst_mo) {
+ tcg_debug_assert(parm->ntmp != 0);
+ mov[i].dst = src = parm->tmp[0];
+ tcg_out_movext1(s, &mov[i]);
+ }
+
+ tcg_out_st(s, dst_type, src, TCG_REG_CALL_STACK,
+ tcg_out_helper_stk_ofs(dst_type, slot));
+ }
+ return;
+
+ found_reg:
+ /*
+ * The remaining arguments are in registers.
+ * Convert slot numbers to argument registers.
+ */
+ nmov = i + 1;
+ for (i = 0; i < nmov; ++i) {
+ mov[i].dst = tcg_target_call_iarg_regs[mov[i].dst];
+ }
+
switch (nmov) {
case 4:
/* The backend must have provided enough temps for the worst case. */
@@ -5269,51 +5307,6 @@ static void tcg_out_helper_load_regs(TCGContext *s,
}
}
-static void tcg_out_helper_load_slots(TCGContext *s,
- unsigned nmov, TCGMovExtend *mov,
- const TCGLdstHelperParam *parm)
-{
- unsigned i;
-
- /*
- * Start from the end, storing to the stack first.
- * This frees those registers, so we need not consider overlap.
- */
- for (i = nmov; i-- > 0; ) {
- unsigned slot = mov[i].dst;
-
- if (arg_slot_reg_p(slot)) {
- goto found_reg;
- }
-
- TCGReg src = mov[i].src;
- TCGType dst_type = mov[i].dst_type;
- MemOp dst_mo = dst_type == TCG_TYPE_I32 ? MO_32 : MO_64;
-
- /* The argument is going onto the stack; extend into scratch. */
- if ((mov[i].src_ext & MO_SIZE) != dst_mo) {
- tcg_debug_assert(parm->ntmp != 0);
- mov[i].dst = src = parm->tmp[0];
- tcg_out_movext1(s, &mov[i]);
- }
-
- tcg_out_st(s, dst_type, src, TCG_REG_CALL_STACK,
- tcg_out_helper_stk_ofs(dst_type, slot));
- }
- return;
-
- found_reg:
- /*
- * The remaining arguments are in registers.
- * Convert slot numbers to argument registers.
- */
- nmov = i + 1;
- for (i = 0; i < nmov; ++i) {
- mov[i].dst = tcg_target_call_iarg_regs[mov[i].dst];
- }
- tcg_out_helper_load_regs(s, nmov, mov, parm);
-}
-
static void tcg_out_helper_load_imm(TCGContext *s, unsigned slot,
TCGType type, tcg_target_long imm,
const TCGLdstHelperParam *parm)
--
2.34.1
- Re: [PATCH v5 36/54] tcg: Introduce tcg_out_movext3, (continued)
- [PATCH v5 20/54] tcg/arm: Adjust constraints on qemu_ld/st, Richard Henderson, 2023/05/15
- [PATCH v5 27/54] tcg/sparc64: Rename tcg_out_movi_imm32 to tcg_out_movi_u32, Richard Henderson, 2023/05/15
- [PATCH v5 32/54] tcg/loongarch64: Support softmmu unaligned accesses, Richard Henderson, 2023/05/15
- [PATCH v5 26/54] target/sparc64: Remove tcg_out_movi_s13 case from tcg_out_movi_imm32, Richard Henderson, 2023/05/15
- [PATCH v5 39/54] tcg: Introduce atom_and_align_for_opc, Richard Henderson, 2023/05/15
- [PATCH v5 37/54] tcg: Merge tcg_out_helper_load_regs into caller,
Richard Henderson <=
- [PATCH v5 38/54] tcg: Support TCG_TYPE_I128 in tcg_out_{ld, st}_helper_{args, ret}, Richard Henderson, 2023/05/15
- [PATCH v5 28/54] tcg/sparc64: Split out tcg_out_movi_s32, Richard Henderson, 2023/05/15
- [PATCH v5 23/54] tcg/s390x: Use full load/store helpers in user-only mode, Richard Henderson, 2023/05/15
- [PATCH v5 33/54] tcg/riscv: Support softmmu unaligned accesses, Richard Henderson, 2023/05/15
- [PATCH v5 29/54] tcg/sparc64: Use standard slow path for softmmu, Richard Henderson, 2023/05/15
- [PATCH v5 31/54] tcg/loongarch64: Check the host supports unaligned accesses, Richard Henderson, 2023/05/15
- [PATCH v5 35/54] tcg: Add INDEX_op_qemu_{ld,st}_i128, Richard Henderson, 2023/05/15