@@ -85,10 +85,10 @@ DEF(brcond_i32, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_COND_BRANCH)
DEF(add2_i32, 2, 4, 0, IMPL(TCG_TARGET_HAS_add2_i32))
DEF(sub2_i32, 2, 4, 0, IMPL(TCG_TARGET_HAS_sub2_i32))
-DEF(mulu2_i32, 2, 2, 0, IMPL(TCG_TARGET_HAS_mulu2_i32))
-DEF(muls2_i32, 2, 2, 0, IMPL(TCG_TARGET_HAS_muls2_i32))
-DEF(muluh_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_muluh_i32))
-DEF(mulsh_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_mulsh_i32))
+DEF(mulu2_i32, 2, 2, 0, IMPL(TCG_TARGET_HAS_mulu2(TCG_TYPE_I32)))
+DEF(muls2_i32, 2, 2, 0, IMPL(TCG_TARGET_HAS_muls2(TCG_TYPE_I32)))
+DEF(muluh_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_muluh(TCG_TYPE_I32)))
+DEF(mulsh_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_mulsh(TCG_TYPE_I32)))
DEF(brcond2_i32, 0, 4, 2,
TCG_OPF_BB_END | TCG_OPF_COND_BRANCH | IMPL(TCG_TARGET_REG_BITS == 32))
DEF(setcond2_i32, 1, 4, 1, IMPL(TCG_TARGET_REG_BITS == 32))
@@ -183,10 +183,10 @@ DEF(ctpop_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ctpop(TCG_TYPE_I64)))
DEF(add2_i64, 2, 4, 0, IMPL64 | IMPL(TCG_TARGET_HAS_add2_i64))
DEF(sub2_i64, 2, 4, 0, IMPL64 | IMPL(TCG_TARGET_HAS_sub2_i64))
-DEF(mulu2_i64, 2, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_mulu2_i64))
-DEF(muls2_i64, 2, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muls2_i64))
-DEF(muluh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muluh_i64))
-DEF(mulsh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_mulsh_i64))
+DEF(mulu2_i64, 2, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_mulu2(TCG_TYPE_I64)))
+DEF(muls2_i64, 2, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muls2(TCG_TYPE_I64)))
+DEF(muluh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muluh(TCG_TYPE_I64)))
+DEF(mulsh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_mulsh(TCG_TYPE_I64)))
#define DATA64_ARGS (TCG_TARGET_REG_BITS == 64 ? 1 : 2)
@@ -18,6 +18,10 @@
#define TCG_TARGET_HAS_ctpop(T) 0
#define TCG_TARGET_HAS_ctz(T) 1
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) 0
+#define TCG_TARGET_HAS_mulu2(T) 0
+#define TCG_TARGET_HAS_mulsh(T) (T == TCG_TYPE_I64)
+#define TCG_TARGET_HAS_muluh(T) (T == TCG_TYPE_I64)
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) 1
@@ -40,10 +44,6 @@
#define TCG_TARGET_HAS_negsetcond_i32 1
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_extr_i64_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
@@ -60,10 +60,6 @@
#define TCG_TARGET_HAS_negsetcond_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 1
-#define TCG_TARGET_HAS_mulsh_i64 1
/*
* Without FEAT_LSE2, we must use LDXP+STXP to implement atomic 128-bit load,
@@ -29,6 +29,10 @@ extern bool use_neon_instructions;
#define TCG_TARGET_HAS_ctpop(T) 0
#define TCG_TARGET_HAS_ctz(T) use_armv7_instructions
#define TCG_TARGET_HAS_div(T) use_idiv_instructions
+#define TCG_TARGET_HAS_muls2(T) 1
+#define TCG_TARGET_HAS_mulu2(T) 1
+#define TCG_TARGET_HAS_mulsh(T) 0
+#define TCG_TARGET_HAS_muluh(T) 0
#define TCG_TARGET_HAS_rem(T) 0
#define TCG_TARGET_HAS_rot(T) 1
@@ -49,10 +53,6 @@ extern bool use_neon_instructions;
#define TCG_TARGET_HAS_sextract_i32 use_armv7_instructions
#define TCG_TARGET_HAS_extract2_i32 1
#define TCG_TARGET_HAS_negsetcond_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 1
-#define TCG_TARGET_HAS_muls2_i32 1
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
#define TCG_TARGET_HAS_qemu_ldst_i128 0
@@ -31,6 +31,10 @@
#define TCG_TARGET_HAS_ctpop(T) have_popcnt
#define TCG_TARGET_HAS_ctz(T) 1
#define TCG_TARGET_HAS_div2(T) 1
+#define TCG_TARGET_HAS_muls2(T) 1
+#define TCG_TARGET_HAS_mulu2(T) 1
+#define TCG_TARGET_HAS_mulsh(T) 0
+#define TCG_TARGET_HAS_muluh(T) 0
#define TCG_TARGET_HAS_rot(T) 1
/* optional integer and vector instructions */
@@ -52,10 +56,6 @@
#define TCG_TARGET_HAS_negsetcond_i32 1
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 1
-#define TCG_TARGET_HAS_muls2_i32 1
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#if TCG_TARGET_REG_BITS == 64
/* Keep 32-bit values zero-extended in a register. */
@@ -73,10 +73,6 @@
#define TCG_TARGET_HAS_negsetcond_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 1
-#define TCG_TARGET_HAS_muls2_i64 1
-#define TCG_TARGET_HAS_muluh_i64 0
-#define TCG_TARGET_HAS_mulsh_i64 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
#else
#define TCG_TARGET_HAS_qemu_st8_i32 1
@@ -15,6 +15,10 @@
#define TCG_TARGET_HAS_ctpop(T) 0
#define TCG_TARGET_HAS_ctz(T) 1
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) 0
+#define TCG_TARGET_HAS_mulu2(T) 0
+#define TCG_TARGET_HAS_mulsh(T) 1
+#define TCG_TARGET_HAS_muluh(T) 1
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) 1
@@ -33,10 +37,6 @@
#define TCG_TARGET_HAS_extract2_i32 0
#define TCG_TARGET_HAS_add2_i32 0
#define TCG_TARGET_HAS_sub2_i32 0
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 1
-#define TCG_TARGET_HAS_mulsh_i32 1
#define TCG_TARGET_HAS_ext8s_i32 1
#define TCG_TARGET_HAS_ext16s_i32 1
#define TCG_TARGET_HAS_ext8u_i32 1
@@ -60,10 +60,6 @@
#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_add2_i64 0
#define TCG_TARGET_HAS_sub2_i64 0
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 1
-#define TCG_TARGET_HAS_mulsh_i64 1
#define TCG_TARGET_HAS_qemu_ldst_i128 (cpuinfo & CPUINFO_LSX)
@@ -44,6 +44,10 @@ extern bool use_mips32r2_instructions;
#define TCG_TARGET_HAS_ctpop(T) 0
#define TCG_TARGET_HAS_ctz(T) 0
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) (!use_mips32r6_instructions)
+#define TCG_TARGET_HAS_mulu2(T) (!use_mips32r6_instructions)
+#define TCG_TARGET_HAS_mulsh(T) 1
+#define TCG_TARGET_HAS_muluh(T) 1
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) use_mips32r2_instructions
@@ -55,10 +59,6 @@ extern bool use_mips32r2_instructions;
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 0
-#define TCG_TARGET_HAS_mulu2_i32 (!use_mips32r6_instructions)
-#define TCG_TARGET_HAS_muls2_i32 (!use_mips32r6_instructions)
-#define TCG_TARGET_HAS_muluh_i32 1
-#define TCG_TARGET_HAS_mulsh_i32 1
#define TCG_TARGET_HAS_negsetcond_i32 0
#if TCG_TARGET_REG_BITS == 64
@@ -67,10 +67,6 @@ extern bool use_mips32r2_instructions;
#define TCG_TARGET_HAS_extr_i64_i32 1
#define TCG_TARGET_HAS_add2_i64 0
#define TCG_TARGET_HAS_sub2_i64 0
-#define TCG_TARGET_HAS_mulu2_i64 (!use_mips32r6_instructions)
-#define TCG_TARGET_HAS_muls2_i64 (!use_mips32r6_instructions)
-#define TCG_TARGET_HAS_muluh_i64 1
-#define TCG_TARGET_HAS_mulsh_i64 1
#define TCG_TARGET_HAS_ext32s_i64 1
#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_negsetcond_i64 0
@@ -22,6 +22,10 @@
#define TCG_TARGET_HAS_ctpop(T) have_isa_2_06
#define TCG_TARGET_HAS_ctz(T) have_isa_3_00
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) 0
+#define TCG_TARGET_HAS_mulu2(T) 0
+#define TCG_TARGET_HAS_mulsh(T) 1
+#define TCG_TARGET_HAS_muluh(T) 1
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) 1
@@ -44,10 +48,6 @@
#define TCG_TARGET_HAS_sextract_i32 0
#define TCG_TARGET_HAS_extract2_i32 0
#define TCG_TARGET_HAS_negsetcond_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 1
-#define TCG_TARGET_HAS_mulsh_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
#if TCG_TARGET_REG_BITS == 64
@@ -67,10 +67,6 @@
#define TCG_TARGET_HAS_negsetcond_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 1
-#define TCG_TARGET_HAS_mulsh_i64 1
#endif
#define TCG_TARGET_HAS_qemu_ldst_i128 \
@@ -15,6 +15,10 @@
#define TCG_TARGET_HAS_ctpop(T) (cpuinfo & CPUINFO_ZBB)
#define TCG_TARGET_HAS_ctz(T) (cpuinfo & CPUINFO_ZBB)
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) 0
+#define TCG_TARGET_HAS_mulu2(T) 0
+#define TCG_TARGET_HAS_mulsh(T) (T == TCG_TYPE_I64)
+#define TCG_TARGET_HAS_muluh(T) (T == TCG_TYPE_I64)
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) (cpuinfo & CPUINFO_ZBB)
@@ -33,10 +37,6 @@
#define TCG_TARGET_HAS_extract2_i32 0
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_ext8s_i32 1
#define TCG_TARGET_HAS_ext16s_i32 1
#define TCG_TARGET_HAS_ext8u_i32 1
@@ -59,10 +59,6 @@
#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 1
-#define TCG_TARGET_HAS_mulsh_i64 1
#define TCG_TARGET_HAS_qemu_ldst_i128 0
@@ -34,6 +34,10 @@ extern uint64_t s390_facilities[3];
#define TCG_TARGET_HAS_ctpop(T) 1
#define TCG_TARGET_HAS_ctz(T) 0
#define TCG_TARGET_HAS_div2(T) 1
+#define TCG_TARGET_HAS_muls2(T) (T == TCG_TYPE_I64 && HAVE_FACILITY(MISC_INSN_EXT2))
+#define TCG_TARGET_HAS_mulu2(T) (T == TCG_TYPE_I64)
+#define TCG_TARGET_HAS_mulsh(T) 0
+#define TCG_TARGET_HAS_muluh(T) 0
#define TCG_TARGET_HAS_rot(T) 1
/* optional integer and vector instructions */
@@ -55,10 +59,6 @@ extern uint64_t s390_facilities[3];
#define TCG_TARGET_HAS_negsetcond_i32 1
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_extr_i64_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
@@ -75,10 +75,6 @@ extern uint64_t s390_facilities[3];
#define TCG_TARGET_HAS_negsetcond_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 1
-#define TCG_TARGET_HAS_muls2_i64 HAVE_FACILITY(MISC_INSN_EXT2)
-#define TCG_TARGET_HAS_muluh_i64 0
-#define TCG_TARGET_HAS_mulsh_i64 0
#define TCG_TARGET_HAS_qemu_ldst_i128 1
@@ -19,6 +19,10 @@ extern bool use_vis3_instructions;
#define TCG_TARGET_HAS_ctpop(T) 0
#define TCG_TARGET_HAS_ctz(T) 0
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) (T == TCG_TYPE_I32)
+#define TCG_TARGET_HAS_mulu2(T) (T == TCG_TYPE_I32)
+#define TCG_TARGET_HAS_mulsh(T) 0
+#define TCG_TARGET_HAS_muluh(T) (T == TCG_TYPE_I64 && use_vis3_instructions)
#define TCG_TARGET_HAS_rem(T) 0
#define TCG_TARGET_HAS_rot(T) 0
@@ -41,10 +45,6 @@ extern bool use_vis3_instructions;
#define TCG_TARGET_HAS_negsetcond_i32 1
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 1
-#define TCG_TARGET_HAS_muls2_i32 1
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
#define TCG_TARGET_HAS_extr_i64_i32 0
@@ -61,10 +61,6 @@ extern bool use_vis3_instructions;
#define TCG_TARGET_HAS_negsetcond_i64 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 use_vis3_instructions
-#define TCG_TARGET_HAS_mulsh_i64 0
#define TCG_TARGET_HAS_qemu_ldst_i128 0
@@ -25,10 +25,6 @@
#define TCG_TARGET_HAS_negsetcond_i64 0
#define TCG_TARGET_HAS_add2_i64 0
#define TCG_TARGET_HAS_sub2_i64 0
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 0
-#define TCG_TARGET_HAS_mulsh_i64 0
/* Turn some undef macros into true macros. */
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
@@ -13,6 +13,10 @@
#define TCG_TARGET_HAS_ctpop(T) 1
#define TCG_TARGET_HAS_ctz(T) 1
#define TCG_TARGET_HAS_div(T) 1
+#define TCG_TARGET_HAS_muls2(T) 1
+#define TCG_TARGET_HAS_mulu2(T) 1
+#define TCG_TARGET_HAS_mulsh(T) 0
+#define TCG_TARGET_HAS_muluh(T) 0
#define TCG_TARGET_HAS_rem(T) 1
#define TCG_TARGET_HAS_rot(T) 1
@@ -33,9 +37,6 @@
#define TCG_TARGET_HAS_sextract_i32 1
#define TCG_TARGET_HAS_extract2_i32 0
#define TCG_TARGET_HAS_negsetcond_i32 0
-#define TCG_TARGET_HAS_muls2_i32 1
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
#if TCG_TARGET_REG_BITS == 64
@@ -51,17 +52,10 @@
#define TCG_TARGET_HAS_ext16u_i64 1
#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_negsetcond_i64 0
-#define TCG_TARGET_HAS_muls2_i64 1
#define TCG_TARGET_HAS_add2_i32 1
#define TCG_TARGET_HAS_sub2_i32 1
-#define TCG_TARGET_HAS_mulu2_i32 1
#define TCG_TARGET_HAS_add2_i64 1
#define TCG_TARGET_HAS_sub2_i64 1
-#define TCG_TARGET_HAS_mulu2_i64 1
-#define TCG_TARGET_HAS_muluh_i64 0
-#define TCG_TARGET_HAS_mulsh_i64 0
-#else
-#define TCG_TARGET_HAS_mulu2_i32 1
#endif /* TCG_TARGET_REG_BITS == 64 */
#define TCG_TARGET_HAS_qemu_ldst_i128 0
@@ -1191,9 +1191,9 @@ void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_mulu2_i32) {
+ if (TCG_TARGET_HAS_mulu2(TCG_TYPE_I32)) {
tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_muluh_i32) {
+ } else if (TCG_TARGET_HAS_muluh(TCG_TYPE_I32)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
@@ -1215,9 +1215,9 @@ void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_muls2_i32) {
+ if (TCG_TARGET_HAS_muls2(TCG_TYPE_I32)) {
tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_mulsh_i32) {
+ } else if (TCG_TARGET_HAS_mulsh(TCG_TYPE_I32)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
@@ -3099,58 +3099,73 @@ void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_mulu2_i64) {
- tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_muluh_i64) {
- TCGv_i64 t = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
- tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
- tcg_gen_mov_i64(rl, t);
- tcg_temp_free_i64(t);
- } else {
- TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- tcg_gen_mul_i64(t0, arg1, arg2);
- gen_helper_muluh_i64(rh, arg1, arg2);
- tcg_gen_mov_i64(rl, t0);
- tcg_temp_free_i64(t0);
+ TCGv_i64 t;
+
+ if (TCG_TARGET_REG_BITS == 64) {
+ if (TCG_TARGET_HAS_mulu2(TCG_TYPE_I64)) {
+ tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
+ return;
+ }
+ if (TCG_TARGET_HAS_muluh(TCG_TYPE_I64)) {
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
+ return;
+ }
}
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_mul_i64(t, arg1, arg2);
+ gen_helper_muluh_i64(rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
}
void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_muls2_i64) {
- tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_mulsh_i64) {
- TCGv_i64 t = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
- tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
- tcg_gen_mov_i64(rl, t);
- tcg_temp_free_i64(t);
- } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
- TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- TCGv_i64 t1 = tcg_temp_ebb_new_i64();
- TCGv_i64 t2 = tcg_temp_ebb_new_i64();
- TCGv_i64 t3 = tcg_temp_ebb_new_i64();
- tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
- /* Adjust for negative inputs. */
- tcg_gen_sari_i64(t2, arg1, 63);
- tcg_gen_sari_i64(t3, arg2, 63);
- tcg_gen_and_i64(t2, t2, arg2);
- tcg_gen_and_i64(t3, t3, arg1);
- tcg_gen_sub_i64(rh, t1, t2);
- tcg_gen_sub_i64(rh, rh, t3);
- tcg_gen_mov_i64(rl, t0);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
- tcg_temp_free_i64(t2);
- tcg_temp_free_i64(t3);
- } else {
- TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- tcg_gen_mul_i64(t0, arg1, arg2);
- gen_helper_mulsh_i64(rh, arg1, arg2);
- tcg_gen_mov_i64(rl, t0);
- tcg_temp_free_i64(t0);
+ TCGv_i64 t;
+
+ if (TCG_TARGET_REG_BITS == 64) {
+ if (TCG_TARGET_HAS_muls2(TCG_TYPE_I64)) {
+ tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
+ return;
+ }
+ if (TCG_TARGET_HAS_mulsh(TCG_TYPE_I64)) {
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
+ return;
+ }
+ if (TCG_TARGET_HAS_mulu2(TCG_TYPE_I64) ||
+ TCG_TARGET_HAS_muluh(TCG_TYPE_I64)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t1 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t2 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t3 = tcg_temp_ebb_new_i64();
+ tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
+ /* Adjust for negative inputs. */
+ tcg_gen_sari_i64(t2, arg1, 63);
+ tcg_gen_sari_i64(t3, arg2, 63);
+ tcg_gen_and_i64(t2, t2, arg2);
+ tcg_gen_and_i64(t3, t3, arg1);
+ tcg_gen_sub_i64(rh, t1, t2);
+ tcg_gen_sub_i64(rh, rh, t3);
+ tcg_gen_mov_i64(rl, t0);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ tcg_temp_free_i64(t2);
+ tcg_temp_free_i64(t3);
+ return;
+ }
}
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_mul_i64(t, arg1, arg2);
+ gen_helper_mulsh_i64(rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
}
void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
@@ -2235,13 +2235,13 @@ bool tcg_op_supported(TCGOpcode op, TCGType type)
case INDEX_op_sub2_i32:
return TCG_TARGET_HAS_sub2_i32;
case INDEX_op_mulu2_i32:
- return TCG_TARGET_HAS_mulu2_i32;
+ return TCG_TARGET_HAS_mulu2(TCG_TYPE_I32);
case INDEX_op_muls2_i32:
- return TCG_TARGET_HAS_muls2_i32;
+ return TCG_TARGET_HAS_muls2(TCG_TYPE_I32);
case INDEX_op_muluh_i32:
- return TCG_TARGET_HAS_muluh_i32;
+ return TCG_TARGET_HAS_muluh(TCG_TYPE_I32);
case INDEX_op_mulsh_i32:
- return TCG_TARGET_HAS_mulsh_i32;
+ return TCG_TARGET_HAS_mulsh(TCG_TYPE_I32);
case INDEX_op_ext8s_i32:
return TCG_TARGET_HAS_ext8s_i32;
case INDEX_op_ext16s_i32:
@@ -2369,13 +2369,13 @@ bool tcg_op_supported(TCGOpcode op, TCGType type)
case INDEX_op_sub2_i64:
return TCG_TARGET_HAS_sub2_i64;
case INDEX_op_mulu2_i64:
- return TCG_TARGET_HAS_mulu2_i64;
+ return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_mulu2(TCG_TYPE_I64);
case INDEX_op_muls2_i64:
- return TCG_TARGET_HAS_muls2_i64;
+ return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_muls2(TCG_TYPE_I64);
case INDEX_op_muluh_i64:
- return TCG_TARGET_HAS_muluh_i64;
+ return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_muluh(TCG_TYPE_I64);
case INDEX_op_mulsh_i64:
- return TCG_TARGET_HAS_mulsh_i64;
+ return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_mulsh(TCG_TYPE_I64);
case INDEX_op_mov_vec:
case INDEX_op_dup_vec:
@@ -4013,22 +4013,22 @@ liveness_pass_1(TCGContext *s)
case INDEX_op_mulu2_i32:
opc_new = INDEX_op_mul_i32;
opc_new2 = INDEX_op_muluh_i32;
- have_opc_new2 = TCG_TARGET_HAS_muluh_i32;
+ have_opc_new2 = TCG_TARGET_HAS_muluh(TCG_TYPE_I32);
goto do_mul2;
case INDEX_op_muls2_i32:
opc_new = INDEX_op_mul_i32;
opc_new2 = INDEX_op_mulsh_i32;
- have_opc_new2 = TCG_TARGET_HAS_mulsh_i32;
+ have_opc_new2 = TCG_TARGET_HAS_mulsh(TCG_TYPE_I32);
goto do_mul2;
case INDEX_op_mulu2_i64:
opc_new = INDEX_op_mul_i64;
opc_new2 = INDEX_op_muluh_i64;
- have_opc_new2 = TCG_TARGET_HAS_muluh_i64;
+ have_opc_new2 = TCG_TARGET_HAS_muluh(TCG_TYPE_I64);
goto do_mul2;
case INDEX_op_muls2_i64:
opc_new = INDEX_op_mul_i64;
opc_new2 = INDEX_op_mulsh_i64;
- have_opc_new2 = TCG_TARGET_HAS_mulsh_i64;
+ have_opc_new2 = TCG_TARGET_HAS_mulsh(TCG_TYPE_I64);
goto do_mul2;
do_mul2:
nb_iargs = 2;
@@ -673,20 +673,16 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tci_write_reg64(regs, r1, r0, T1 - T2);
break;
#endif
-#if TCG_TARGET_HAS_mulu2_i32
case INDEX_op_mulu2_i32:
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
tci_write_reg64(regs, r1, r0, tmp64);
break;
-#endif
-#if TCG_TARGET_HAS_muls2_i32
case INDEX_op_muls2_i32:
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
tci_write_reg64(regs, r1, r0, tmp64);
break;
-#endif
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
CASE_32_64(ext8s)
tci_args_rr(insn, &r0, &r1);
@@ -774,18 +770,14 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tci_args_rr(insn, &r0, &r1);
regs[r0] = ctpop64(regs[r1]);
break;
-#if TCG_TARGET_HAS_mulu2_i64
case INDEX_op_mulu2_i64:
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
mulu64(®s[r0], ®s[r1], regs[r2], regs[r3]);
break;
-#endif
-#if TCG_TARGET_HAS_muls2_i64
case INDEX_op_muls2_i64:
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
muls64(®s[r0], ®s[r1], regs[r2], regs[r3]);
break;
-#endif
#if TCG_TARGET_HAS_add2_i64
case INDEX_op_add2_i64:
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- include/tcg/tcg-opc.h | 16 ++--- tcg/aarch64/tcg-target-has.h | 12 ++-- tcg/arm/tcg-target-has.h | 8 +-- tcg/i386/tcg-target-has.h | 12 ++-- tcg/loongarch64/tcg-target-has.h | 12 ++-- tcg/mips/tcg-target-has.h | 12 ++-- tcg/ppc/tcg-target-has.h | 12 ++-- tcg/riscv/tcg-target-has.h | 12 ++-- tcg/s390x/tcg-target-has.h | 12 ++-- tcg/sparc64/tcg-target-has.h | 12 ++-- tcg/tcg-has.h | 4 -- tcg/tci/tcg-target-has.h | 14 ++-- tcg/tcg-op.c | 115 +++++++++++++++++-------------- tcg/tcg.c | 24 +++---- tcg/tci.c | 8 --- 15 files changed, 125 insertions(+), 160 deletions(-)