Allow the target to set tlb flags to apply to all of the
comparators. Remove MemTxAttrs.byte_swap, as the bit is
not relevant to memory transactions, only the page mapping.
Adjust target/sparc to set TLB_BSWAP directly.
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-id:
20240301204110.656742-4-richard.henderson@linaro.org
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
" prot=%x idx=%d\n",
addr, full->phys_addr, prot, mmu_idx);
- read_flags = 0;
+ read_flags = full->tlb_fill_flags;
if (full->lg_page_size < TARGET_PAGE_BITS) {
/* Repeat the MMU check and TLB fill on every access. */
read_flags |= TLB_INVALID_MASK;
}
- if (full->attrs.byte_swap) {
- read_flags |= TLB_BSWAP;
- }
is_ram = memory_region_is_ram(section->mr);
is_romd = memory_region_is_romd(section->mr);
unsigned int memory:1;
/* Requester ID (for MSI for example) */
unsigned int requester_id:16;
- /* Invert endianness for this page */
- unsigned int byte_swap:1;
} MemTxAttrs;
/* Bus masters which don't specify any attributes will get this,
/* @lg_page_size contains the log2 of the page size. */
uint8_t lg_page_size;
+ /* Additional tlb flags requested by tlb_fill. */
+ uint8_t tlb_fill_flags;
+
/*
* Additional tlb flags for use by the slow path. If non-zero,
* the corresponding CPUTLBEntry comparator must have TLB_FORCE_SLOW.
int do_fault = 0;
if (TTE_IS_IE(env->dtlb[i].tte)) {
- full->attrs.byte_swap = true;
+ full->tlb_fill_flags |= TLB_BSWAP;
}
/* access ok? */