[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v2 33/54] tcg: Introduce arg_slot_stk_ofs
From: |
Richard Henderson |
Subject: |
[PATCH v2 33/54] tcg: Introduce arg_slot_stk_ofs |
Date: |
Mon, 10 Apr 2023 18:04:51 -0700 |
Unify all computation of argument stack offset in one function.
This requires that we adjust ref_slot to be in the same units,
by adding max_reg_slots during init_call_layout.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/tcg.c | 29 +++++++++++++++++------------
1 file changed, 17 insertions(+), 12 deletions(-)
diff --git a/tcg/tcg.c b/tcg/tcg.c
index fa28db0188..057423c121 100644
--- a/tcg/tcg.c
+++ b/tcg/tcg.c
@@ -816,6 +816,15 @@ static inline bool arg_slot_reg_p(unsigned arg_slot)
return arg_slot < nreg;
}
+static inline int arg_slot_stk_ofs(unsigned arg_slot)
+{
+ unsigned max = TCG_STATIC_CALL_ARGS_SIZE / sizeof(tcg_target_long);
+ unsigned stk_slot = arg_slot - ARRAY_SIZE(tcg_target_call_iarg_regs);
+
+ tcg_debug_assert(stk_slot < max);
+ return TCG_TARGET_CALL_STACK_OFFSET + stk_slot * sizeof(tcg_target_long);
+}
+
typedef struct TCGCumulativeArgs {
int arg_idx; /* tcg_gen_callN args[] */
int info_in_idx; /* TCGHelperInfo in[] */
@@ -1055,6 +1064,7 @@ static void init_call_layout(TCGHelperInfo *info)
}
}
assert(ref_base + cum.ref_slot <= max_stk_slots);
+ ref_base += max_reg_slots;
if (ref_base != 0) {
for (int i = cum.info_in_idx - 1; i >= 0; --i) {
@@ -4826,7 +4836,7 @@ static void load_arg_reg(TCGContext *s, TCGReg reg,
TCGTemp *ts,
}
}
-static void load_arg_stk(TCGContext *s, int stk_slot, TCGTemp *ts,
+static void load_arg_stk(TCGContext *s, unsigned arg_slot, TCGTemp *ts,
TCGRegSet allocated_regs)
{
/*
@@ -4836,8 +4846,7 @@ static void load_arg_stk(TCGContext *s, int stk_slot,
TCGTemp *ts,
*/
temp_load(s, ts, tcg_target_available_regs[ts->type], allocated_regs, 0);
tcg_out_st(s, ts->type, ts->reg, TCG_REG_CALL_STACK,
- TCG_TARGET_CALL_STACK_OFFSET +
- stk_slot * sizeof(tcg_target_long));
+ arg_slot_stk_ofs(arg_slot));
}
static void load_arg_normal(TCGContext *s, const TCGCallArgumentLoc *l,
@@ -4848,18 +4857,16 @@ static void load_arg_normal(TCGContext *s, const
TCGCallArgumentLoc *l,
load_arg_reg(s, reg, ts, *allocated_regs);
tcg_regset_set_reg(*allocated_regs, reg);
} else {
- load_arg_stk(s, l->arg_slot - ARRAY_SIZE(tcg_target_call_iarg_regs),
- ts, *allocated_regs);
+ load_arg_stk(s, l->arg_slot, ts, *allocated_regs);
}
}
-static void load_arg_ref(TCGContext *s, int arg_slot, TCGReg ref_base,
+static void load_arg_ref(TCGContext *s, unsigned arg_slot, TCGReg ref_base,
intptr_t ref_off, TCGRegSet *allocated_regs)
{
TCGReg reg;
- int stk_slot = arg_slot - ARRAY_SIZE(tcg_target_call_iarg_regs);
- if (stk_slot < 0) {
+ if (arg_slot_reg_p(arg_slot)) {
reg = tcg_target_call_iarg_regs[arg_slot];
tcg_reg_free(s, reg, *allocated_regs);
tcg_out_addi_ptr(s, reg, ref_base, ref_off);
@@ -4869,8 +4876,7 @@ static void load_arg_ref(TCGContext *s, int arg_slot,
TCGReg ref_base,
*allocated_regs, 0, false);
tcg_out_addi_ptr(s, reg, ref_base, ref_off);
tcg_out_st(s, TCG_TYPE_PTR, reg, TCG_REG_CALL_STACK,
- TCG_TARGET_CALL_STACK_OFFSET
- + stk_slot * sizeof(tcg_target_long));
+ arg_slot_stk_ofs(arg_slot));
}
}
@@ -4900,8 +4906,7 @@ static void tcg_reg_alloc_call(TCGContext *s, TCGOp *op)
case TCG_CALL_ARG_BY_REF:
load_arg_stk(s, loc->ref_slot, ts, allocated_regs);
load_arg_ref(s, loc->arg_slot, TCG_REG_CALL_STACK,
- TCG_TARGET_CALL_STACK_OFFSET
- + loc->ref_slot * sizeof(tcg_target_long),
+ arg_slot_stk_ofs(loc->ref_slot),
&allocated_regs);
break;
case TCG_CALL_ARG_BY_REF_N:
--
2.34.1
- Re: [PATCH v2 27/54] tcg/riscv: Require TCG_TARGET_REG_BITS == 64, (continued)
- [PATCH v2 30/54] tcg/sparc64: Pass TCGType to tcg_out_qemu_{ld,st}, Richard Henderson, 2023/04/10
- [PATCH v2 31/54] tcg: Move TCGLabelQemuLdst to tcg.c, Richard Henderson, 2023/04/10
- [PATCH v2 32/54] tcg: Replace REG_P with arg_loc_reg_p, Richard Henderson, 2023/04/10
- [PATCH v2 33/54] tcg: Introduce arg_slot_stk_ofs,
Richard Henderson <=
- [PATCH v2 34/54] tcg: Widen helper_*_st[bw]_mmu val arguments, Richard Henderson, 2023/04/10
- [PATCH v2 35/54] tcg: Add routines for calling slow-path helpers, Richard Henderson, 2023/04/10
- [PATCH v2 36/54] tcg/i386: Convert tcg_out_qemu_ld_slow_path, Richard Henderson, 2023/04/10
- [PATCH v2 37/54] tcg/i386: Convert tcg_out_qemu_st_slow_path, Richard Henderson, 2023/04/10
- [PATCH v2 38/54] tcg/aarch64: Convert tcg_out_qemu_{ld,st}_slow_path, Richard Henderson, 2023/04/10
- [PATCH v2 39/54] tcg/arm: Convert tcg_out_qemu_{ld,st}_slow_path, Richard Henderson, 2023/04/10
- [PATCH v2 40/54] tcg/loongarch64: Convert tcg_out_qemu_{ld, st}_slow_path, Richard Henderson, 2023/04/10