FIELD(TBFLAG_A32, STACKCHECK, 22, 1)
/* Bit usage when in AArch64 state */
-FIELD(TBFLAG_A64, TBI0, 0, 1)
-FIELD(TBFLAG_A64, TBI1, 1, 1)
+FIELD(TBFLAG_A64, TBII, 0, 2)
FIELD(TBFLAG_A64, SVEEXC_EL, 2, 2)
FIELD(TBFLAG_A64, ZCR_LEN, 4, 4)
FIELD(TBFLAG_A64, PAUTH_ACTIVE, 8, 1)
*pc = env->pc;
flags = FIELD_DP32(flags, TBFLAG_ANY, AARCH64_STATE, 1);
/* Get control bits for tagged addresses */
- flags = FIELD_DP32(flags, TBFLAG_A64, TBI0,
+ flags = FIELD_DP32(flags, TBFLAG_A64, TBII,
+ (arm_regime_tbi1(env, mmu_idx) << 1) |
arm_regime_tbi0(env, mmu_idx));
- flags = FIELD_DP32(flags, TBFLAG_A64, TBI1,
- arm_regime_tbi1(env, mmu_idx));
if (cpu_isar_feature(aa64_sve, cpu)) {
int sve_el = sve_exception_el(env, current_el);
*/
static void gen_a64_set_pc(DisasContext *s, TCGv_i64 src)
{
+ /* Note that TBII is TBI1:TBI0. */
+ int tbi = s->tbii;
if (s->current_el <= 1) {
/* Test if NEITHER or BOTH TBI values are set. If so, no need to
* examine bit 55 of address, can just generate code.
* If mixed, then test via generated code
*/
- if (s->tbi0 && s->tbi1) {
+ if (tbi == 3) {
TCGv_i64 tmp_reg = tcg_temp_new_i64();
/* Both bits set, sign extension from bit 55 into [63:56] will
* cover both cases
tcg_gen_shli_i64(tmp_reg, src, 8);
tcg_gen_sari_i64(cpu_pc, tmp_reg, 8);
tcg_temp_free_i64(tmp_reg);
- } else if (!s->tbi0 && !s->tbi1) {
+ } else if (tbi == 0) {
/* Neither bit set, just load it as-is */
tcg_gen_mov_i64(cpu_pc, src);
} else {
tcg_gen_andi_i64(tcg_bit55, src, (1ull << 55));
- if (s->tbi0) {
+ if (tbi == 1) {
/* tbi0==1, tbi1==0, so 0-fill upper byte if bit 55 = 0 */
tcg_gen_andi_i64(tcg_tmpval, src,
0x00FFFFFFFFFFFFFFull);
tcg_temp_free_i64(tcg_tmpval);
}
} else { /* EL > 1 */
- if (s->tbi0) {
+ if (tbi != 0) {
/* Force tag byte to all zero */
tcg_gen_andi_i64(cpu_pc, src, 0x00FFFFFFFFFFFFFFull);
} else {
dc->condexec_cond = 0;
core_mmu_idx = FIELD_EX32(tb_flags, TBFLAG_ANY, MMUIDX);
dc->mmu_idx = core_to_arm_mmu_idx(env, core_mmu_idx);
- dc->tbi0 = FIELD_EX32(tb_flags, TBFLAG_A64, TBI0);
- dc->tbi1 = FIELD_EX32(tb_flags, TBFLAG_A64, TBI1);
+ dc->tbii = FIELD_EX32(tb_flags, TBFLAG_A64, TBII);
dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx);
#if !defined(CONFIG_USER_ONLY)
dc->user = (dc->current_el == 0);
int user;
#endif
ARMMMUIdx mmu_idx; /* MMU index to use for normal loads/stores */
- bool tbi0; /* TBI0 for EL0/1 or TBI for EL2/3 */
- bool tbi1; /* TBI1 for EL0/1, not used for EL2/3 */
+ uint8_t tbii; /* TBI1|TBI0 for EL0/1 or TBI for EL2/3 */
bool ns; /* Use non-secure CPREG bank on access */
int fp_excp_el; /* FP exception EL or 0 if enabled */
int sve_excp_el; /* SVE exception EL or 0 if enabled */