#define SZR (TCG_TARGET_REG_BITS / 8)
#define TCG_CT_CONST_S16 0x100
+#define TCG_CT_CONST_U16 0x200
#define TCG_CT_CONST_S32 0x400
#define TCG_CT_CONST_U32 0x800
#define TCG_CT_CONST_ZERO 0x1000
#define TCG_CT_CONST_MONE 0x2000
#define TCG_CT_CONST_WSZ 0x4000
+#define TCG_CT_CONST_CMP 0x8000
#define ALL_GENERAL_REGS 0xffffffffu
#define ALL_VECTOR_REGS 0xffffffff00000000ull
sval = (int32_t)sval;
}
+ if (ct & TCG_CT_CONST_CMP) {
+ switch (cond) {
+ case TCG_COND_EQ:
+ case TCG_COND_NE:
+ ct |= TCG_CT_CONST_S16 | TCG_CT_CONST_U16;
+ break;
+ case TCG_COND_LT:
+ case TCG_COND_GE:
+ case TCG_COND_LE:
+ case TCG_COND_GT:
+ ct |= TCG_CT_CONST_S16;
+ break;
+ case TCG_COND_LTU:
+ case TCG_COND_GEU:
+ case TCG_COND_LEU:
+ case TCG_COND_GTU:
+ ct |= TCG_CT_CONST_U16;
+ break;
+ default:
+ g_assert_not_reached();
+ }
+ }
+
if ((ct & TCG_CT_CONST_S16) && sval == (int16_t)sval) {
return 1;
}
+ if ((ct & TCG_CT_CONST_U16) && uval == (uint16_t)uval) {
+ return 1;
+ }
if ((ct & TCG_CT_CONST_S32) && sval == (int32_t)sval) {
return 1;
}
tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32);
- /* Simplify the comparisons below wrt CMPI. */
+ /*
+ * Simplify the comparisons below wrt CMPI.
+ * All of the tests are 16-bit, so a 32-bit sign extend always works.
+ */
if (type == TCG_TYPE_I32) {
arg2 = (int32_t)arg2;
}
case INDEX_op_sar_i32:
case INDEX_op_rotl_i32:
case INDEX_op_rotr_i32:
- case INDEX_op_setcond_i32:
- case INDEX_op_negsetcond_i32:
case INDEX_op_and_i64:
case INDEX_op_andc_i64:
case INDEX_op_shl_i64:
case INDEX_op_sar_i64:
case INDEX_op_rotl_i64:
case INDEX_op_rotr_i64:
- case INDEX_op_setcond_i64:
- case INDEX_op_negsetcond_i64:
return C_O1_I2(r, r, ri);
case INDEX_op_mul_i32:
case INDEX_op_brcond_i32:
case INDEX_op_brcond_i64:
- return C_O0_I2(r, ri);
-
+ return C_O0_I2(r, rC);
+ case INDEX_op_setcond_i32:
+ case INDEX_op_setcond_i64:
+ case INDEX_op_negsetcond_i32:
+ case INDEX_op_negsetcond_i64:
+ return C_O1_I2(r, r, rC);
case INDEX_op_movcond_i32:
case INDEX_op_movcond_i64:
- return C_O1_I4(r, r, ri, rZ, rZ);
+ return C_O1_I4(r, r, rC, rZ, rZ);
+
case INDEX_op_deposit_i32:
case INDEX_op_deposit_i64:
return C_O1_I2(r, 0, rZ);