We will need a backend interface for performing 8-bit zero-extend.
Use it in tcg_reg_alloc_op in the meantime.
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
tcg_out_ubfm(s, 0, rd, rn, 0, bits);
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
+{
+ tcg_out_uxt(s, MO_8, rd, rn);
+}
+
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
TCGReg rn, int64_t aimm)
{
case INDEX_op_ext32s_i64:
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
break;
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext8u_i32:
- tcg_out_uxt(s, MO_8, a0, a1);
- break;
case INDEX_op_ext16u_i64:
case INDEX_op_ext16u_i32:
tcg_out_uxt(s, MO_16, a0, a1);
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out32(s, 0x06af0070 | (COND_AL << 28) | (rd << 12) | rn);
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
+{
+ tcg_out_dat_imm(s, COND_AL, ARITH_AND, rd, rn, 0xff);
+}
+
static void __attribute__((unused))
-tcg_out_ext8u(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
+tcg_out_ext8u_cond(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
{
tcg_out_dat_imm(s, cond, ARITH_AND, rd, rn, 0xff);
}
DEFINE_TCG_OUT_ARG(tcg_out_arg_imm32, uint32_t, tcg_out_movi32,
(tcg_out_movi32(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
-DEFINE_TCG_OUT_ARG(tcg_out_arg_reg8, TCGReg, tcg_out_ext8u,
- (tcg_out_ext8u(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
+DEFINE_TCG_OUT_ARG(tcg_out_arg_reg8, TCGReg, tcg_out_ext8u_cond,
+ (tcg_out_ext8u_cond(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg16, TCGReg, tcg_out_ext16u,
(tcg_out_ext16u(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg32, TCGReg, tcg_out_mov_reg, )
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8u_i32:
default:
g_assert_not_reached();
}
tcg_out_shifti(s, SHIFT_ROL + P_DATA16, reg, 8);
}
-static inline void tcg_out_ext8u(TCGContext *s, int dest, int src)
+static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
{
/* movzbl */
tcg_debug_assert(src < 4 || TCG_TARGET_REG_BITS == 64);
OP_32_64(ext16s):
tcg_out_ext16s(s, a0, a1, rexw);
break;
- OP_32_64(ext8u):
- tcg_out_ext8u(s, a0, a1);
- break;
OP_32_64(ext16u):
tcg_out_ext16u(s, a0, a1);
break;
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
break;
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- tcg_out_ext8u(s, a0, a1);
- break;
-
case INDEX_op_ext16s_i32:
case INDEX_op_ext16s_i64:
tcg_out_ext16s(s, a0, a1);
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs);
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
+{
+ tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xff);
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
if (i < ARRAY_SIZE(tcg_target_call_iarg_regs)) {
tmp = tcg_target_call_iarg_regs[i];
}
- tcg_out_opc_imm(s, OPC_ANDI, tmp, arg, 0xff);
+ tcg_out_ext8u(s, tmp, arg);
return tcg_out_call_iarg_reg(s, i, tmp);
}
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out32(s, EXTSB | RA(dst) | RS(src));
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg dst, TCGReg src)
+{
+ tcg_out32(s, ANDI | SAI(src, dst, 0xff));
+}
+
static inline void tcg_out_ext16s(TCGContext *s, TCGReg dst, TCGReg src)
{
tcg_out32(s, EXTSH | RA(dst) | RS(src));
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out_qemu_st(s, args, true);
break;
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- tcg_out_ext8u(s, a0, a1);
- break;
-
case INDEX_op_ext16u_i32:
case INDEX_op_ext16u_i64:
tcg_out_ext16u(s, a0, a1);
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
tcg_out_insn(s, RRE, LGBR, dest, src);
}
-static void tgen_ext8u(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
+static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
{
tcg_out_insn(s, RRE, LLGCR, dest, src);
}
return;
}
if ((val & valid) == 0xff) {
- tgen_ext8u(s, TCG_TYPE_I64, dest, dest);
+ tcg_out_ext8u(s, dest, dest);
return;
}
if ((val & valid) == 0xffff) {
}
switch (opc & MO_SIZE) {
case MO_UB:
- tgen_ext8u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
+ tcg_out_ext8u(s, TCG_REG_R4, data_reg);
break;
case MO_UW:
tgen_ext16u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
case INDEX_op_ext16s_i32:
tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
break;
- case INDEX_op_ext8u_i32:
- tgen_ext8u(s, TCG_TYPE_I32, args[0], args[1]);
- break;
case INDEX_op_ext16u_i32:
tgen_ext16u(s, TCG_TYPE_I32, args[0], args[1]);
break;
case INDEX_op_ext32s_i64:
tgen_ext32s(s, args[0], args[1]);
break;
- case INDEX_op_ext8u_i64:
- tgen_ext8u(s, TCG_TYPE_I64, args[0], args[1]);
- break;
case INDEX_op_ext16u_i64:
tgen_ext16u(s, TCG_TYPE_I64, args[0], args[1]);
break;
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
g_assert_not_reached();
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
+{
+ tcg_out_arithi(s, rd, rs, 0xff, ARITH_AND);
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
*/
switch (op & MO_SIZE) {
case MO_8:
- tcg_out_arithi(s, r, r, 0xff, ARITH_AND);
+ tcg_out_ext8u(s, r, r);
break;
case MO_16:
tcg_out_arithi(s, r, r, 16, SHIFT_SLL);
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}
static void tcg_out_movi(TCGContext *s, TCGType type,
TCGReg ret, tcg_target_long arg);
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
+static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
static void tcg_out_goto_tb(TCGContext *s, int which);
case INDEX_op_ext8s_i64:
tcg_out_ext8s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
break;
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
+ tcg_out_ext8u(s, new_args[0], new_args[1]);
+ break;
default:
if (def->flags & TCG_OPF_VECTOR) {
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
}
}
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
+{
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
+ tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
+ } else {
+ tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
+ tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
+ }
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
- CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
+ case INDEX_op_ext8u_i32:
+ case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}