tcg_gen_extu_i32_i64(t, v);
tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
(dst & 1 ? 0 : 32), 32);
- tcg_temp_free_i64(t);
gen_update_fprs_dirty(dc, dst);
}
carry_32 = tcg_temp_new_i32();
tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
-#if TARGET_LONG_BITS == 64
- tcg_temp_free_i32(cc_src1_32);
- tcg_temp_free_i32(cc_src2_32);
-#endif
-
return carry_32;
}
carry_32 = tcg_temp_new_i32();
tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
-#if TARGET_LONG_BITS == 64
- tcg_temp_free_i32(cc_src1_32);
- tcg_temp_free_i32(cc_src2_32);
-#endif
-
return carry_32;
}
generated the carry in the first place. */
carry = tcg_temp_new();
tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
- tcg_temp_free(carry);
goto add_done;
}
carry_32 = gen_add32_carry32();
tcg_gen_add_tl(dst, src1, src2);
tcg_gen_add_tl(dst, dst, carry);
- tcg_temp_free_i32(carry_32);
-#if TARGET_LONG_BITS == 64
- tcg_temp_free(carry);
-#endif
-
add_done:
if (update_cc) {
tcg_gen_mov_tl(cpu_cc_src, src1);
generated the carry in the first place. */
carry = tcg_temp_new();
tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
- tcg_temp_free(carry);
goto sub_done;
}
carry_32 = gen_sub32_carry32();
tcg_gen_sub_tl(dst, src1, src2);
tcg_gen_sub_tl(dst, dst, carry);
- tcg_temp_free_i32(carry_32);
-#if TARGET_LONG_BITS == 64
- tcg_temp_free(carry);
-#endif
-
sub_done:
if (update_cc) {
tcg_gen_mov_tl(cpu_cc_src, src1);
tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
zero, cpu_cc_src2);
- tcg_temp_free(zero);
// b2 = T0 & 1;
// env->y = (b2 << 31) | (env->y >> 1);
gen_mov_reg_N(t0, cpu_psr);
gen_mov_reg_V(r_temp, cpu_psr);
tcg_gen_xor_tl(t0, t0, r_temp);
- tcg_temp_free(r_temp);
// T0 = (b1 << 31) | (T0 >> 1);
// src1 = T0;
tcg_gen_shli_tl(t0, t0, 31);
tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
- tcg_temp_free(t0);
tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
}
tcg_gen_mul_i64(dst, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
-
tcg_gen_shri_i64(cpu_y, dst, 32);
#endif
}
tcg_gen_xor_tl(dst, dst, t0);
gen_mov_reg_Z(t0, src);
tcg_gen_or_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// N ^ V
gen_mov_reg_V(t0, src);
gen_mov_reg_N(dst, src);
tcg_gen_xor_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// C | Z
gen_mov_reg_Z(t0, src);
gen_mov_reg_C(dst, src);
tcg_gen_or_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// C
gen_mov_reg_FCC0(dst, src, fcc_offset);
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_or_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// 1 or 2: FCC0 ^ FCC1
gen_mov_reg_FCC0(dst, src, fcc_offset);
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_xor_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// 1 or 3: FCC0
gen_mov_reg_FCC0(dst, src, fcc_offset);
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_andc_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// 2 or 3: FCC1
gen_mov_reg_FCC0(dst, src, fcc_offset);
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_andc_tl(dst, t0, dst);
- tcg_temp_free(t0);
}
// 3: FCC0 & FCC1
gen_mov_reg_FCC0(dst, src, fcc_offset);
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_and_tl(dst, dst, t0);
- tcg_temp_free(t0);
}
// 0: !(FCC0 | FCC1)
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_or_tl(dst, dst, t0);
tcg_gen_xori_tl(dst, dst, 0x1);
- tcg_temp_free(t0);
}
// 0 or 3: !(FCC0 ^ FCC1)
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_xor_tl(dst, dst, t0);
tcg_gen_xori_tl(dst, dst, 0x1);
- tcg_temp_free(t0);
}
// 0 or 2: !FCC0
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_andc_tl(dst, dst, t0);
tcg_gen_xori_tl(dst, dst, 0x1);
- tcg_temp_free(t0);
}
// 0 or 1: !FCC1
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_andc_tl(dst, t0, dst);
tcg_gen_xori_tl(dst, dst, 0x1);
- tcg_temp_free(t0);
}
// !3: !(FCC0 & FCC1)
gen_mov_reg_FCC1(t0, src, fcc_offset);
tcg_gen_and_tl(dst, dst, t0);
tcg_gen_xori_tl(dst, dst, 0x1);
- tcg_temp_free(t0);
}
static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
t = tcg_const_tl(pc1);
z = tcg_const_tl(0);
tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
- tcg_temp_free(t);
- tcg_temp_free(z);
dc->pc = DYNAMIC_PC;
}
TCGv zero = tcg_const_tl(0);
tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
-
- tcg_temp_free(npc0);
- tcg_temp_free(npc1);
- tcg_temp_free(zero);
}
/* call this function before using the condition register as it may
save_state(dc);
t = tcg_const_i32(which);
gen_helper_raise_exception(cpu_env, t);
- tcg_temp_free_i32(t);
dc->base.is_jmp = DISAS_NORETURN;
}
{
TCGv_i32 r_mask = tcg_const_i32(mask);
gen_helper_check_align(cpu_env, addr, r_mask);
- tcg_temp_free_i32(r_mask);
}
static inline void gen_mov_pc_npc(DisasContext *dc)
TCGv m1 = tcg_const_tl(0xff);
gen_address_mask(dc, addr);
tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
- tcg_temp_free(m1);
}
/* asi moves */
TCGv_i64 t64 = tcg_temp_new_i64();
gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
tcg_gen_trunc_i64_tl(dst, t64);
- tcg_temp_free_i64(t64);
}
#endif
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
}
break;
}
tcg_gen_add_tl(saddr, saddr, four);
tcg_gen_add_tl(daddr, daddr, four);
}
-
- tcg_temp_free(saddr);
- tcg_temp_free(daddr);
- tcg_temp_free(four);
- tcg_temp_free_i32(tmp);
}
break;
#endif
TCGv_i64 t64 = tcg_temp_new_i64();
tcg_gen_extu_tl_i64(t64, src);
gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
- tcg_temp_free_i64(t64);
}
#endif
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
/* A write to a TLB register may alter page maps. End the TB. */
dc->npc = DYNAMIC_PC;
tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
da.mem_idx, da.memop);
gen_store_gpr(dc, rd, oldv);
- tcg_temp_free(oldv);
break;
default:
/* ??? Should be DAE_invalid_asi. */
s64 = tcg_const_i64(0xff);
gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
- tcg_temp_free_i64(s64);
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
tcg_gen_trunc_i64_tl(dst, t64);
- tcg_temp_free_i64(t64);
/* End the TB. */
dc->npc = DYNAMIC_PC;
tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
da.memop | MO_ALIGN_4);
tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
- tcg_temp_free_i64(d64);
break;
default:
g_assert_not_reached();
tcg_gen_add_tl(addr, addr, eight);
memop = da.memop;
}
- tcg_temp_free(eight);
} else {
gen_exception(dc, TT_ILL_INSN);
}
gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
d32 = gen_dest_fpr_F(dc);
tcg_gen_extrl_i64_i32(d32, d64);
- tcg_temp_free_i64(d64);
gen_store_fpr_F(dc, rd, d32);
break;
case 8:
tcg_gen_addi_tl(addr, addr, 8);
gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
- tcg_temp_free_i64(d64);
break;
default:
g_assert_not_reached();
}
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
}
break;
}
tcg_gen_add_tl(addr, addr, eight);
memop = da.memop;
}
- tcg_temp_free(eight);
} else {
gen_exception(dc, TT_ILL_INSN);
}
} else {
tcg_gen_extr32_i64(hi, lo, tmp);
}
- tcg_temp_free_i64(tmp);
}
break;
save_state(dc);
gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
- tcg_temp_free_i32(r_asi);
- tcg_temp_free_i32(r_mop);
/* See above. */
if ((da.memop & MO_BSWAP) == MO_TE) {
} else {
tcg_gen_extr32_i64(hi, lo, tmp);
}
- tcg_temp_free_i64(tmp);
}
break;
}
}
gen_address_mask(dc, addr);
tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
- tcg_temp_free_i64(t64);
}
break;
save_state(dc);
gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
- tcg_temp_free_i64(t64);
}
break;
}
tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
da.mem_idx, da.memop);
gen_store_gpr(dc, rd, oldv);
- tcg_temp_free(oldv);
break;
default:
/* ??? Should be DAE_invalid_asi. */
switch (da.type) {
case GET_ASI_EXCP:
- tcg_temp_free_i64(t64);
return;
case GET_ASI_DIRECT:
gen_address_mask(dc, addr);
save_state(dc);
gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
}
break;
}
tcg_gen_extr_i64_i32(lo, hi, t64);
- tcg_temp_free_i64(t64);
gen_store_gpr(dc, rd | 1, lo);
gen_store_gpr(dc, rd, hi);
}
tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
tcg_gen_add_tl(d_addr, d_addr, eight);
}
-
- tcg_temp_free(d_addr);
- tcg_temp_free(eight);
}
break;
default:
save_state(dc);
gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
- tcg_temp_free_i32(r_mop);
- tcg_temp_free_i32(r_asi);
}
break;
}
-
- tcg_temp_free_i64(t64);
}
#endif
TCGv_i64 c64 = tcg_temp_new_i64();
tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
tcg_gen_extrl_i64_i32(c32, c64);
- tcg_temp_free_i64(c64);
}
s1 = gen_load_fpr_F(dc, rs);
tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
- tcg_temp_free_i32(c32);
- tcg_temp_free_i32(zero);
gen_store_fpr_F(dc, rd, dst);
}
TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
- tcg_temp_free_ptr(r_tl_tmp);
}
-
- tcg_temp_free_i32(r_tl);
}
#endif
tcg_gen_neg_tl(t1, t1);
tcg_gen_or_tl(lo2, lo2, t1);
tcg_gen_and_tl(dst, dst, lo2);
-
- tcg_temp_free(lo1);
- tcg_temp_free(lo2);
- tcg_temp_free(t1);
- tcg_temp_free(t2);
}
static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
tcg_gen_neg_tl(tmp, tmp);
}
tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
-
- tcg_temp_free(tmp);
}
static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
tcg_gen_shri_tl(t2, t2, 1);
tcg_gen_or_tl(dst, t1, t2);
-
- tcg_temp_free(t1);
- tcg_temp_free(t2);
- tcg_temp_free(shift);
}
#endif
}
gen_helper_raise_exception(cpu_env, trap);
- tcg_temp_free_i32(trap);
if (cond == 8) {
/* An unconditional trap ends the TB. */
}
gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
r_const);
- tcg_temp_free_ptr(r_tickptr);
- tcg_temp_free_i32(r_const);
gen_store_gpr(dc, rd, cpu_dst);
if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
/* I/O operations in icount mode must end the TB */
}
gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
r_const);
- tcg_temp_free_ptr(r_tickptr);
- tcg_temp_free_i32(r_const);
gen_store_gpr(dc, rd, cpu_dst);
if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
/* I/O operations in icount mode must end the TB */
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tpc));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 1: // tnpc
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tnpc));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 2: // tstate
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tstate));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 3: // tt
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tt));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 4: // tick
}
gen_helper_tick_get_count(cpu_tmp0, cpu_env,
r_tickptr, r_const);
- tcg_temp_free_ptr(r_tickptr);
- tcg_temp_free_i32(r_const);
if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
/* I/O operations in icount mode must end the TB */
dc->base.is_jmp = DISAS_EXIT;
}
gen_helper_tick_set_limit(r_tickptr,
cpu_tick_cmpr);
- tcg_temp_free_ptr(r_tickptr);
/* End TB to handle timer interrupt */
dc->base.is_jmp = DISAS_EXIT;
}
}
gen_helper_tick_set_count(r_tickptr,
cpu_tmp0);
- tcg_temp_free_ptr(r_tickptr);
/* End TB to handle timer interrupt */
dc->base.is_jmp = DISAS_EXIT;
}
}
gen_helper_tick_set_limit(r_tickptr,
cpu_stick_cmpr);
- tcg_temp_free_ptr(r_tickptr);
/* End TB to handle timer interrupt */
dc->base.is_jmp = DISAS_EXIT;
}
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tpc));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 1: // tnpc
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tnpc));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 2: // tstate
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state,
tstate));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 3: // tt
gen_load_trap_state_at_tl(r_tsptr, cpu_env);
tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tt));
- tcg_temp_free_ptr(r_tsptr);
}
break;
case 4: // tick
}
gen_helper_tick_set_count(r_tickptr,
cpu_tmp0);
- tcg_temp_free_ptr(r_tickptr);
/* End TB to handle timer interrupt */
dc->base.is_jmp = DISAS_EXIT;
}
}
gen_helper_tick_set_limit(r_tickptr,
cpu_hstick_cmpr);
- tcg_temp_free_ptr(r_tickptr);
/* End TB to handle timer interrupt */
dc->base.is_jmp = DISAS_EXIT;
}
gen_store_gpr(dc, rd + 1, cpu_val);
tcg_gen_shri_i64(t64, t64, 32);
tcg_gen_trunc_i64_tl(cpu_val, t64);
- tcg_temp_free_i64(t64);
tcg_gen_ext32u_tl(cpu_val, cpu_val);
}
break;
tcg_gen_qemu_ld_i64(t64, cpu_addr,
dc->mem_idx, MO_TEUQ);
gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
- tcg_temp_free_i64(t64);
break;
}
#endif
tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
MO_TEUQ | MO_ALIGN_4);
gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
- tcg_temp_free_i64(cpu_src1_64);
- tcg_temp_free_i64(cpu_src2_64);
break;
case 0x23: /* lddf, load double fpreg */
gen_address_mask(dc, cpu_addr);
t64 = tcg_temp_new_i64();
tcg_gen_concat_tl_i64(t64, lo, cpu_val);
tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
- tcg_temp_free_i64(t64);
}
break;
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)