[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v4 04/54] tcg/i386: Drop r0+r1 local variables from tcg_out_tlb_l
From: |
Richard Henderson |
Subject: |
[PATCH v4 04/54] tcg/i386: Drop r0+r1 local variables from tcg_out_tlb_load |
Date: |
Wed, 3 May 2023 07:56:39 +0100 |
Use TCG_REG_L[01] constants directly.
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/i386/tcg-target.c.inc | 32 ++++++++++++++++----------------
1 file changed, 16 insertions(+), 16 deletions(-)
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
index 909eecd4a3..78160f453b 100644
--- a/tcg/i386/tcg-target.c.inc
+++ b/tcg/i386/tcg-target.c.inc
@@ -1810,8 +1810,6 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg
addrlo, TCGReg addrhi,
int mem_index, MemOp opc,
tcg_insn_unit **label_ptr, int which)
{
- const TCGReg r0 = TCG_REG_L0;
- const TCGReg r1 = TCG_REG_L1;
TCGType ttype = TCG_TYPE_I32;
TCGType tlbtype = TCG_TYPE_I32;
int trexw = 0, hrexw = 0, tlbrexw = 0;
@@ -1835,15 +1833,15 @@ static inline void tcg_out_tlb_load(TCGContext *s,
TCGReg addrlo, TCGReg addrhi,
}
}
- tcg_out_mov(s, tlbtype, r0, addrlo);
- tcg_out_shifti(s, SHIFT_SHR + tlbrexw, r0,
+ tcg_out_mov(s, tlbtype, TCG_REG_L0, addrlo);
+ tcg_out_shifti(s, SHIFT_SHR + tlbrexw, TCG_REG_L0,
TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
- tcg_out_modrm_offset(s, OPC_AND_GvEv + trexw, r0, TCG_AREG0,
+ tcg_out_modrm_offset(s, OPC_AND_GvEv + trexw, TCG_REG_L0, TCG_AREG0,
TLB_MASK_TABLE_OFS(mem_index) +
offsetof(CPUTLBDescFast, mask));
- tcg_out_modrm_offset(s, OPC_ADD_GvEv + hrexw, r0, TCG_AREG0,
+ tcg_out_modrm_offset(s, OPC_ADD_GvEv + hrexw, TCG_REG_L0, TCG_AREG0,
TLB_MASK_TABLE_OFS(mem_index) +
offsetof(CPUTLBDescFast, table));
@@ -1851,19 +1849,21 @@ static inline void tcg_out_tlb_load(TCGContext *s,
TCGReg addrlo, TCGReg addrhi,
copy the address and mask. For lesser alignments, check that we don't
cross pages for the complete access. */
if (a_bits >= s_bits) {
- tcg_out_mov(s, ttype, r1, addrlo);
+ tcg_out_mov(s, ttype, TCG_REG_L1, addrlo);
} else {
- tcg_out_modrm_offset(s, OPC_LEA + trexw, r1, addrlo, s_mask - a_mask);
+ tcg_out_modrm_offset(s, OPC_LEA + trexw, TCG_REG_L1,
+ addrlo, s_mask - a_mask);
}
tlb_mask = (target_ulong)TARGET_PAGE_MASK | a_mask;
- tgen_arithi(s, ARITH_AND + trexw, r1, tlb_mask, 0);
+ tgen_arithi(s, ARITH_AND + trexw, TCG_REG_L1, tlb_mask, 0);
- /* cmp 0(r0), r1 */
- tcg_out_modrm_offset(s, OPC_CMP_GvEv + trexw, r1, r0, which);
+ /* cmp 0(TCG_REG_L0), TCG_REG_L1 */
+ tcg_out_modrm_offset(s, OPC_CMP_GvEv + trexw,
+ TCG_REG_L1, TCG_REG_L0, which);
/* Prepare for both the fast path add of the tlb addend, and the slow
path function argument setup. */
- tcg_out_mov(s, ttype, r1, addrlo);
+ tcg_out_mov(s, ttype, TCG_REG_L1, addrlo);
/* jne slow_path */
tcg_out_opc(s, OPC_JCC_long + JCC_JNE, 0, 0, 0);
@@ -1871,8 +1871,8 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg
addrlo, TCGReg addrhi,
s->code_ptr += 4;
if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
- /* cmp 4(r0), addrhi */
- tcg_out_modrm_offset(s, OPC_CMP_GvEv, addrhi, r0, which + 4);
+ /* cmp 4(TCG_REG_L0), addrhi */
+ tcg_out_modrm_offset(s, OPC_CMP_GvEv, addrhi, TCG_REG_L0, which + 4);
/* jne slow_path */
tcg_out_opc(s, OPC_JCC_long + JCC_JNE, 0, 0, 0);
@@ -1882,8 +1882,8 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg
addrlo, TCGReg addrhi,
/* TLB Hit. */
- /* add addend(r0), r1 */
- tcg_out_modrm_offset(s, OPC_ADD_GvEv + hrexw, r1, r0,
+ /* add addend(TCG_REG_L0), TCG_REG_L1 */
+ tcg_out_modrm_offset(s, OPC_ADD_GvEv + hrexw, TCG_REG_L1, TCG_REG_L0,
offsetof(CPUTLBEntry, addend));
}
--
2.34.1
- [PATCH v4 00/54] tcg: Simplify calls to load/store helpers, Richard Henderson, 2023/05/03
- [PATCH v4 01/54] tcg/i386: Rationalize args to tcg_out_qemu_{ld,st}, Richard Henderson, 2023/05/03
- [PATCH v4 02/54] tcg/i386: Generalize multi-part load overlap test, Richard Henderson, 2023/05/03
- [PATCH v4 05/54] tcg/i386: Introduce tcg_out_testi, Richard Henderson, 2023/05/03
- [PATCH v4 04/54] tcg/i386: Drop r0+r1 local variables from tcg_out_tlb_load,
Richard Henderson <=
- [PATCH v4 03/54] tcg/i386: Introduce HostAddress, Richard Henderson, 2023/05/03
- [PATCH v4 06/54] tcg/i386: Introduce prepare_host_addr, Richard Henderson, 2023/05/03
- [PATCH v4 07/54] tcg/i386: Use indexed addressing for softmmu fast path, Richard Henderson, 2023/05/03
- [PATCH v4 08/54] tcg/aarch64: Rationalize args to tcg_out_qemu_{ld, st}, Richard Henderson, 2023/05/03
- [PATCH v4 09/54] tcg/aarch64: Introduce HostAddress, Richard Henderson, 2023/05/03
- [PATCH v4 11/54] tcg/arm: Rationalize args to tcg_out_qemu_{ld,st}, Richard Henderson, 2023/05/03
- [PATCH v4 10/54] tcg/aarch64: Introduce prepare_host_addr, Richard Henderson, 2023/05/03
- [PATCH v4 12/54] tcg/arm: Introduce HostAddress, Richard Henderson, 2023/05/03
- [PATCH v4 13/54] tcg/arm: Introduce prepare_host_addr, Richard Henderson, 2023/05/03
- [PATCH v4 14/54] tcg/loongarch64: Rationalize args to tcg_out_qemu_{ld, st}, Richard Henderson, 2023/05/03