@@ -45,7 +45,6 @@ DEF(br, 0, 0, 1, TCG_OPF_BB_END)
DEF(mb, 0, 0, 1, 0)
DEF(mov_i32, 1, 1, 0, TCG_OPF_NOT_PRESENT)
-DEF(movi_i32, 1, 0, 1, TCG_OPF_NOT_PRESENT)
DEF(setcond_i32, 1, 2, 1, 0)
DEF(movcond_i32, 1, 4, 1, IMPL(TCG_TARGET_HAS_movcond_i32))
/* load/store */
@@ -110,7 +109,6 @@ DEF(ctz_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_ctz_i32))
DEF(ctpop_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ctpop_i32))
DEF(mov_i64, 1, 1, 0, TCG_OPF_64BIT | TCG_OPF_NOT_PRESENT)
-DEF(movi_i64, 1, 0, 1, TCG_OPF_64BIT | TCG_OPF_NOT_PRESENT)
DEF(setcond_i64, 1, 2, 1, IMPL64)
DEF(movcond_i64, 1, 4, 1, IMPL64 | IMPL(TCG_TARGET_HAS_movcond_i64))
/* load/store */
@@ -215,7 +213,6 @@ DEF(qemu_st_i64, 0, TLADDR_ARGS + DATA64_ARGS, 1,
#define IMPLVEC TCG_OPF_VECTOR | IMPL(TCG_TARGET_MAYBE_vec)
DEF(mov_vec, 1, 1, 0, TCG_OPF_VECTOR | TCG_OPF_NOT_PRESENT)
-DEF(dupi_vec, 1, 0, 1, TCG_OPF_VECTOR | TCG_OPF_NOT_PRESENT)
DEF(dup_vec, 1, 1, 0, IMPLVEC)
DEF(dup2_vec, 1, 2, 0, IMPLVEC | IMPL(TCG_TARGET_REG_BITS == 32))
@@ -2262,8 +2262,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
g_assert_not_reached();
@@ -2471,7 +2469,6 @@ static void tcg_out_vec_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_mov_vec: /* Always emitted via tcg_out_mov. */
- case INDEX_op_dupi_vec: /* Always emitted via tcg_out_movi. */
case INDEX_op_dup_vec: /* Always emitted via tcg_out_dup_vec. */
default:
g_assert_not_reached();
@@ -2068,7 +2068,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -2678,8 +2678,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -2965,7 +2963,6 @@ static void tcg_out_vec_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_mov_vec: /* Always emitted via tcg_out_mov. */
- case INDEX_op_dupi_vec: /* Always emitted via tcg_out_movi. */
case INDEX_op_dup_vec: /* Always emitted via tcg_out_dup_vec. */
default:
g_assert_not_reached();
@@ -2155,8 +2155,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -1099,10 +1099,6 @@ void tcg_optimize(TCGContext *s)
CASE_OP_32_64_VEC(mov):
tcg_opt_gen_mov(s, op, op->args[0], op->args[1]);
break;
- CASE_OP_32_64(movi):
- case INDEX_op_dupi_vec:
- tcg_opt_gen_movi(s, &temps_used, op, op->args[0], op->args[1]);
- break;
case INDEX_op_dup_vec:
if (arg_is_const(op->args[1])) {
@@ -2967,8 +2967,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -3314,7 +3312,6 @@ static void tcg_out_vec_op(TCGContext *s, TCGOpcode opc,
return;
case INDEX_op_mov_vec: /* Always emitted via tcg_out_mov. */
- case INDEX_op_dupi_vec: /* Always emitted via tcg_out_movi. */
case INDEX_op_dup_vec: /* Always emitted via tcg_out_dup_vec. */
default:
g_assert_not_reached();
@@ -1606,8 +1606,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
g_assert_not_reached();
@@ -2310,8 +2310,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -1591,8 +1591,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
@@ -83,7 +83,6 @@ bool tcg_can_emit_vecop_list(const TCGOpcode *list,
case INDEX_op_xor_vec:
case INDEX_op_mov_vec:
case INDEX_op_dup_vec:
- case INDEX_op_dupi_vec:
case INDEX_op_dup2_vec:
case INDEX_op_ld_vec:
case INDEX_op_st_vec:
@@ -1463,7 +1463,6 @@ bool tcg_op_supported(TCGOpcode op)
return TCG_TARGET_HAS_goto_ptr;
case INDEX_op_mov_i32:
- case INDEX_op_movi_i32:
case INDEX_op_setcond_i32:
case INDEX_op_brcond_i32:
case INDEX_op_ld8u_i32:
@@ -1557,7 +1556,6 @@ bool tcg_op_supported(TCGOpcode op)
return TCG_TARGET_REG_BITS == 32;
case INDEX_op_mov_i64:
- case INDEX_op_movi_i64:
case INDEX_op_setcond_i64:
case INDEX_op_brcond_i64:
case INDEX_op_ld8u_i64:
@@ -1663,7 +1661,6 @@ bool tcg_op_supported(TCGOpcode op)
case INDEX_op_mov_vec:
case INDEX_op_dup_vec:
- case INDEX_op_dupi_vec:
case INDEX_op_dupm_vec:
case INDEX_op_ld_vec:
case INDEX_op_st_vec:
@@ -3488,7 +3485,7 @@ static void tcg_reg_alloc_bb_end(TCGContext *s, TCGRegSet allocated_regs)
}
/*
- * Specialized code generation for INDEX_op_movi_*.
+ * Specialized code generation for INDEX_op_mov_* with a constant.
*/
static void tcg_reg_alloc_do_movi(TCGContext *s, TCGTemp *ots,
tcg_target_ulong val, TCGLifeData arg_life,
@@ -3511,14 +3508,6 @@ static void tcg_reg_alloc_do_movi(TCGContext *s, TCGTemp *ots,
}
}
-static void tcg_reg_alloc_movi(TCGContext *s, const TCGOp *op)
-{
- TCGTemp *ots = arg_temp(op->args[0]);
- tcg_target_ulong val = op->args[1];
-
- tcg_reg_alloc_do_movi(s, ots, val, op->life, op->output_pref[0]);
-}
-
/*
* Specialized code generation for INDEX_op_mov_*.
*/
@@ -4301,11 +4290,6 @@ int tcg_gen_code(TCGContext *s, TranslationBlock *tb)
case INDEX_op_mov_vec:
tcg_reg_alloc_mov(s, op);
break;
- case INDEX_op_movi_i32:
- case INDEX_op_movi_i64:
- case INDEX_op_dupi_vec:
- tcg_reg_alloc_movi(s, op);
- break;
case INDEX_op_dup_vec:
tcg_reg_alloc_dup(s, op);
break;
@@ -815,8 +815,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();