[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v2 2/9] accel/tcg: Move TLB_WATCHPOINT to TLB_SLOW_FLAGS_MASK
From: |
Richard Henderson |
Subject: |
[PATCH v2 2/9] accel/tcg: Move TLB_WATCHPOINT to TLB_SLOW_FLAGS_MASK |
Date: |
Wed, 21 Jun 2023 14:18:55 +0200 |
This frees up one bit of the primary tlb flags without
impacting the TLB_NOTDIRTY logic.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
include/exec/cpu-all.h | 8 ++++----
accel/tcg/cputlb.c | 18 ++++++++++++++----
2 files changed, 18 insertions(+), 8 deletions(-)
diff --git a/include/exec/cpu-all.h b/include/exec/cpu-all.h
index 4422f4bb07..b5618613cc 100644
--- a/include/exec/cpu-all.h
+++ b/include/exec/cpu-all.h
@@ -325,8 +325,6 @@ CPUArchState *cpu_copy(CPUArchState *env);
#define TLB_NOTDIRTY (1 << (TARGET_PAGE_BITS_MIN - 2))
/* Set if TLB entry is an IO callback. */
#define TLB_MMIO (1 << (TARGET_PAGE_BITS_MIN - 3))
-/* Set if TLB entry contains a watchpoint. */
-#define TLB_WATCHPOINT (1 << (TARGET_PAGE_BITS_MIN - 4))
/* Set if the slow path must be used; more flags in CPUTLBEntryFull. */
#define TLB_FORCE_SLOW (1 << (TARGET_PAGE_BITS_MIN - 5))
/* Set if TLB entry writes ignored. */
@@ -338,7 +336,7 @@ CPUArchState *cpu_copy(CPUArchState *env);
*/
#define TLB_FLAGS_MASK \
(TLB_INVALID_MASK | TLB_NOTDIRTY | TLB_MMIO \
- | TLB_WATCHPOINT | TLB_FORCE_SLOW | TLB_DISCARD_WRITE)
+ | TLB_FORCE_SLOW | TLB_DISCARD_WRITE)
/*
* Flags stored in CPUTLBEntryFull.slow_flags[x].
@@ -346,8 +344,10 @@ CPUArchState *cpu_copy(CPUArchState *env);
*/
/* Set if TLB entry requires byte swap. */
#define TLB_BSWAP (1 << 0)
+/* Set if TLB entry contains a watchpoint. */
+#define TLB_WATCHPOINT (1 << 1)
-#define TLB_SLOW_FLAGS_MASK TLB_BSWAP
+#define TLB_SLOW_FLAGS_MASK (TLB_BSWAP | TLB_WATCHPOINT)
/* The two sets of flags must not overlap. */
QEMU_BUILD_BUG_ON(TLB_FLAGS_MASK & TLB_SLOW_FLAGS_MASK);
diff --git a/accel/tcg/cputlb.c b/accel/tcg/cputlb.c
index b40ce5ea0f..152c4e9994 100644
--- a/accel/tcg/cputlb.c
+++ b/accel/tcg/cputlb.c
@@ -1984,7 +1984,7 @@ static void *atomic_mmu_lookup(CPUArchState *env,
target_ulong addr,
*/
goto stop_the_world;
}
- /* Collect TLB_WATCHPOINT for read. */
+ /* Collect tlb flags for read. */
tlb_addr |= tlbe->addr_read;
/* Notice an IO access or a needs-MMU-lookup access */
@@ -2001,9 +2001,19 @@ static void *atomic_mmu_lookup(CPUArchState *env,
target_ulong addr,
notdirty_write(env_cpu(env), addr, size, full, retaddr);
}
- if (unlikely(tlb_addr & TLB_WATCHPOINT)) {
- cpu_check_watchpoint(env_cpu(env), addr, size, full->attrs,
- BP_MEM_READ | BP_MEM_WRITE, retaddr);
+ if (unlikely(tlb_addr & TLB_FORCE_SLOW)) {
+ int wp_flags = 0;
+
+ if (full->slow_flags[MMU_DATA_STORE] & TLB_WATCHPOINT) {
+ wp_flags |= BP_MEM_WRITE;
+ }
+ if (full->slow_flags[MMU_DATA_LOAD] & TLB_WATCHPOINT) {
+ wp_flags |= BP_MEM_READ;
+ }
+ if (wp_flags) {
+ cpu_check_watchpoint(env_cpu(env), addr, size,
+ full->attrs, wp_flags, retaddr);
+ }
}
return hostaddr;
--
2.34.1
- [PATCH v2 0/9] {tcg,aarch64}: Add TLB_CHECK_ALIGNED, Richard Henderson, 2023/06/21
- [PATCH v2 1/9] accel/tcg: Store some tlb flags in CPUTLBEntryFull, Richard Henderson, 2023/06/21
- [PATCH v2 4/9] target/arm: Support 32-byte alignment in pow2_align, Richard Henderson, 2023/06/21
- [PATCH v2 2/9] accel/tcg: Move TLB_WATCHPOINT to TLB_SLOW_FLAGS_MASK,
Richard Henderson <=
- [PATCH v2 5/9] exec/memattrs: Remove target_tlb_bit*, Richard Henderson, 2023/06/21
- [PATCH v2 9/9] target/arm: Do memory type alignment check when translation enabled, Richard Henderson, 2023/06/21
- [PATCH v2 7/9] accel/tcg: Add TLB_CHECK_ALIGNED, Richard Henderson, 2023/06/21
- [PATCH v2 8/9] target/arm: Do memory type alignment check when translation disabled, Richard Henderson, 2023/06/21
- [PATCH v2 3/9] accel/tcg: Renumber TLB_DISCARD_WRITE, Richard Henderson, 2023/06/21
- [PATCH v2 6/9] accel/tcg: Add tlb_fill_flags to CPUTLBEntryFull, Richard Henderson, 2023/06/21