[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v2 21/35] tcg/sparc64: Pass TCGCond to tcg_out_cmp
From: |
Richard Henderson |
Subject: |
[PATCH v2 21/35] tcg/sparc64: Pass TCGCond to tcg_out_cmp |
Date: |
Sat, 28 Oct 2023 12:45:08 -0700 |
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/sparc64/tcg-target.c.inc | 21 +++++++++++----------
1 file changed, 11 insertions(+), 10 deletions(-)
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
index e16b25e309..10fb8a1a0d 100644
--- a/tcg/sparc64/tcg-target.c.inc
+++ b/tcg/sparc64/tcg-target.c.inc
@@ -646,7 +646,8 @@ static void tcg_out_bpcc(TCGContext *s, int scond, int
flags, TCGLabel *l)
tcg_out_bpcc0(s, scond, flags, off19);
}
-static void tcg_out_cmp(TCGContext *s, TCGReg c1, int32_t c2, int c2const)
+static void tcg_out_cmp(TCGContext *s, TCGCond cond,
+ TCGReg c1, int32_t c2, int c2const)
{
tcg_out_arithc(s, TCG_REG_G0, c1, c2, c2const, ARITH_SUBCC);
}
@@ -654,7 +655,7 @@ static void tcg_out_cmp(TCGContext *s, TCGReg c1, int32_t
c2, int c2const)
static void tcg_out_brcond_i32(TCGContext *s, TCGCond cond, TCGReg arg1,
int32_t arg2, int const_arg2, TCGLabel *l)
{
- tcg_out_cmp(s, arg1, arg2, const_arg2);
+ tcg_out_cmp(s, cond, arg1, arg2, const_arg2);
tcg_out_bpcc(s, tcg_cond_to_bcond[cond], BPCC_ICC | BPCC_PT, l);
tcg_out_nop(s);
}
@@ -671,7 +672,7 @@ static void tcg_out_movcond_i32(TCGContext *s, TCGCond
cond, TCGReg ret,
TCGReg c1, int32_t c2, int c2const,
int32_t v1, int v1const)
{
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
tcg_out_movcc(s, cond, MOVCC_ICC, ret, v1, v1const);
}
@@ -691,7 +692,7 @@ static void tcg_out_brcond_i64(TCGContext *s, TCGCond cond,
TCGReg arg1,
tcg_out32(s, INSN_OP(0) | INSN_OP2(3) | BPR_PT | INSN_RS1(arg1)
| INSN_COND(rcond) | off16);
} else {
- tcg_out_cmp(s, arg1, arg2, const_arg2);
+ tcg_out_cmp(s, cond, arg1, arg2, const_arg2);
tcg_out_bpcc(s, tcg_cond_to_bcond[cond], BPCC_XCC | BPCC_PT, l);
}
tcg_out_nop(s);
@@ -715,7 +716,7 @@ static void tcg_out_movcond_i64(TCGContext *s, TCGCond
cond, TCGReg ret,
if (c2 == 0 && rcond && (!v1const || check_fit_i32(v1, 10))) {
tcg_out_movr(s, rcond, ret, c1, v1, v1const);
} else {
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
tcg_out_movcc(s, cond, MOVCC_XCC, ret, v1, v1const);
}
}
@@ -759,13 +760,13 @@ static void tcg_out_setcond_i32(TCGContext *s, TCGCond
cond, TCGReg ret,
/* FALLTHRU */
default:
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
tcg_out_movi_s13(s, ret, 0);
tcg_out_movcc(s, cond, MOVCC_ICC, ret, neg ? -1 : 1, 1);
return;
}
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
if (cond == TCG_COND_LTU) {
if (neg) {
/* 0 - 0 - C = -C = (C ? -1 : 0) */
@@ -799,7 +800,7 @@ static void tcg_out_setcond_i64(TCGContext *s, TCGCond
cond, TCGReg ret,
c2 = c1, c2const = 0, c1 = TCG_REG_G0;
/* FALLTHRU */
case TCG_COND_LTU:
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
tcg_out_arith(s, ret, TCG_REG_G0, TCG_REG_G0, ARITH_ADDXC);
return;
default:
@@ -814,7 +815,7 @@ static void tcg_out_setcond_i64(TCGContext *s, TCGCond
cond, TCGReg ret,
tcg_out_movi_s13(s, ret, 0);
tcg_out_movr(s, rcond, ret, c1, neg ? -1 : 1, 1);
} else {
- tcg_out_cmp(s, c1, c2, c2const);
+ tcg_out_cmp(s, cond, c1, c2, c2const);
tcg_out_movi_s13(s, ret, 0);
tcg_out_movcc(s, cond, MOVCC_XCC, ret, neg ? -1 : 1, 1);
}
@@ -1102,7 +1103,7 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s,
HostAddress *h,
tcg_out_movi_s32(s, TCG_REG_T3, compare_mask);
tcg_out_arith(s, TCG_REG_T3, addr_reg, TCG_REG_T3, ARITH_AND);
}
- tcg_out_cmp(s, TCG_REG_T2, TCG_REG_T3, 0);
+ tcg_out_cmp(s, TCG_COND_NE, TCG_REG_T2, TCG_REG_T3, 0);
ldst = new_ldst_label(s);
ldst->is_ld = is_ld;
--
2.34.1
- [PATCH v2 08/35] tcg/aarch64: Support TCG_COND_TST{EQ,NE}, (continued)
- [PATCH v2 08/35] tcg/aarch64: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 10/35] tcg/aarch64: Generate CBNZ for TSTNE of UINT32_MAX, Richard Henderson, 2023/10/28
- [PATCH v2 06/35] tcg/optimize: Handle TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 24/35] tcg/ppc: Use cr0 in tcg_to_bc and tcg_to_isel, Richard Henderson, 2023/10/28
- [PATCH v2 16/35] tcg/loongarch64: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 11/35] tcg/arm: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 26/35] tcg/ppc: Add TCG_CT_CONST_CMP, Richard Henderson, 2023/10/28
- [PATCH v2 14/35] tcg/i386: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 15/35] tcg/i386: Improve TSTNE/TESTEQ vs powers of two, Richard Henderson, 2023/10/28
- [PATCH v2 18/35] tcg/riscv: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 21/35] tcg/sparc64: Pass TCGCond to tcg_out_cmp,
Richard Henderson <=
- [PATCH v2 25/35] tcg/ppc: Tidy up tcg_target_const_match, Richard Henderson, 2023/10/28
- [PATCH v2 28/35] tcg/s390x: Split constraint A into J+U, Richard Henderson, 2023/10/28
- [PATCH v2 32/35] target/alpha: Use TCG_COND_TST{EQ,NE} for BLB{C,S}, Richard Henderson, 2023/10/28
- [PATCH v2 33/35] target/alpha: Use TCG_COND_TST{EQ, NE} for CMOVLB{C, S}, Richard Henderson, 2023/10/28
- [PATCH v2 12/35] tcg/i386: Pass x86 condition codes to tcg_out_cmov, Richard Henderson, 2023/10/28
- [PATCH v2 13/35] tcg/i386: Move tcg_cond_to_jcc[] into tcg_out_cmp, Richard Henderson, 2023/10/28
- [PATCH v2 20/35] tcg/sparc64: Hoist read of tcg_cond_to_rcond, Richard Henderson, 2023/10/28
- [PATCH v2 23/35] tcg/ppc: Sink tcg_to_bc usage into tcg_out_bc, Richard Henderson, 2023/10/28
- [PATCH v2 17/35] tcg/mips: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 29/35] tcg/s390x: Add TCG_CT_CONST_CMP, Richard Henderson, 2023/10/28