@@ -105,6 +105,7 @@ static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
static void tcg_out_movi(TCGContext *s, TCGType type,
TCGReg ret, tcg_target_long arg);
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
static void tcg_out_goto_tb(TCGContext *s, int which);
@@ -4496,11 +4497,21 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
}
/* emit instruction */
- if (def->flags & TCG_OPF_VECTOR) {
- tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
- new_args, const_args);
- } else {
- tcg_out_op(s, op->opc, new_args, const_args);
+ switch (op->opc) {
+ case INDEX_op_ext8s_i32:
+ tcg_out_ext8s(s, TCG_TYPE_I32, new_args[0], new_args[1]);
+ break;
+ case INDEX_op_ext8s_i64:
+ tcg_out_ext8s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
+ break;
+ default:
+ if (def->flags & TCG_OPF_VECTOR) {
+ tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
+ new_args, const_args);
+ } else {
+ tcg_out_op(s, op->opc, new_args, const_args);
+ }
+ break;
}
/* move the outputs in the correct register if needed */
@@ -1419,6 +1419,11 @@ static inline void tcg_out_sxt(TCGContext *s, TCGType ext, MemOp s_bits,
tcg_out_sbfm(s, ext, rd, rn, 0, bits);
}
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rn)
+{
+ tcg_out_sxt(s, type, MO_8, rd, rn);
+}
+
static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
TCGReg rd, TCGReg rn)
{
@@ -2230,10 +2235,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8s_i32:
- tcg_out_sxt(s, ext, MO_8, a0, a1);
- break;
case INDEX_op_ext16s_i64:
case INDEX_op_ext16s_i32:
tcg_out_sxt(s, ext, MO_16, a0, a1);
@@ -2310,6 +2311,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -958,10 +958,10 @@ static void tcg_out_udiv(TCGContext *s, ARMCond cond,
tcg_out32(s, 0x0730f010 | (cond << 28) | (rd << 16) | rn | (rm << 8));
}
-static void tcg_out_ext8s(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
+static void tcg_out_ext8s(TCGContext *s, TCGType t, TCGReg rd, TCGReg rn)
{
/* sxtb */
- tcg_out32(s, 0x06af0070 | (cond << 28) | (rd << 12) | rn);
+ tcg_out32(s, 0x06af0070 | (COND_AL << 28) | (rd << 12) | rn);
}
static void __attribute__((unused))
@@ -1533,7 +1533,7 @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
datahi = lb->datahi_reg;
switch (opc & MO_SSIZE) {
case MO_SB:
- tcg_out_ext8s(s, COND_AL, datalo, TCG_REG_R0);
+ tcg_out_ext8s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
break;
case MO_SW:
tcg_out_ext16s(s, COND_AL, datalo, TCG_REG_R0);
@@ -2244,9 +2244,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_bswap32(s, COND_AL, args[0], args[1]);
break;
- case INDEX_op_ext8s_i32:
- tcg_out_ext8s(s, COND_AL, args[0], args[1]);
- break;
case INDEX_op_ext16s_i32:
tcg_out_ext16s(s, COND_AL, args[0], args[1]);
break;
@@ -2301,6 +2298,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
default:
g_assert_not_reached();
}
@@ -1266,8 +1266,9 @@ static inline void tcg_out_ext8u(TCGContext *s, int dest, int src)
tcg_out_modrm(s, OPC_MOVZBL + P_REXB_RM, dest, src);
}
-static void tcg_out_ext8s(TCGContext *s, int dest, int src, int rexw)
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
{
+ int rexw = type == TCG_TYPE_I32 ? 0 : P_REXW;
/* movsbl */
tcg_debug_assert(src < 4 || TCG_TARGET_REG_BITS == 64);
tcg_out_modrm(s, OPC_MOVSBL + P_REXB_RM + rexw, dest, src);
@@ -1929,7 +1930,7 @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
data_reg = l->datalo_reg;
switch (opc & MO_SSIZE) {
case MO_SB:
- tcg_out_ext8s(s, data_reg, TCG_REG_EAX, rexw);
+ tcg_out_ext8s(s, l->type, data_reg, TCG_REG_EAX);
break;
case MO_SW:
tcg_out_ext16s(s, data_reg, TCG_REG_EAX, rexw);
@@ -2669,9 +2670,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, a0);
break;
- OP_32_64(ext8s):
- tcg_out_ext8s(s, a0, a1, rexw);
- break;
OP_32_64(ext16s):
tcg_out_ext16s(s, a0, a1, rexw);
break;
@@ -2840,6 +2838,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -441,7 +441,7 @@ static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
tcg_out_opc_bstrpick_d(s, ret, arg, 0, 31);
}
-static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
{
tcg_out_opc_sext_b(s, ret, arg);
}
@@ -893,7 +893,7 @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
switch (opc & MO_SSIZE) {
case MO_SB:
- tcg_out_ext8s(s, l->datalo_reg, TCG_REG_A0);
+ tcg_out_ext8s(s, type, l->datalo_reg, TCG_REG_A0);
break;
case MO_SW:
tcg_out_ext16s(s, l->datalo_reg, TCG_REG_A0);
@@ -1246,11 +1246,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
break;
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- tcg_out_ext8s(s, a0, a1);
- break;
-
case INDEX_op_ext8u_i32:
case INDEX_op_ext8u_i64:
tcg_out_ext8u(s, a0, a1);
@@ -1627,6 +1622,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -552,6 +552,12 @@ static void tcg_out_movi(TCGContext *s, TCGType type,
}
}
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
+{
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
+ tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs);
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
@@ -2245,10 +2251,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_not_i64:
i1 = OPC_NOR;
goto do_unary;
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- i1 = OPC_SEB;
- goto do_unary;
case INDEX_op_ext16s_i32:
case INDEX_op_ext16s_i64:
i1 = OPC_SEH;
@@ -2419,6 +2421,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -774,7 +774,7 @@ static inline void tcg_out_rlw(TCGContext *s, int op, TCGReg ra, TCGReg rs,
tcg_out32(s, op | RA(ra) | RS(rs) | SH(sh) | MB(mb) | ME(me));
}
-static inline void tcg_out_ext8s(TCGContext *s, TCGReg dst, TCGReg src)
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dst, TCGReg src)
{
tcg_out32(s, EXTSB | RA(dst) | RS(src));
}
@@ -2625,7 +2625,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_ld8s_i32:
case INDEX_op_ld8s_i64:
tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
- tcg_out_ext8s(s, args[0], args[0]);
+ tcg_out_ext8s(s, TCG_TYPE_REG, args[0], args[0]);
break;
case INDEX_op_ld16u_i32:
case INDEX_op_ld16u_i64:
@@ -2973,10 +2973,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_qemu_st(s, args, true);
break;
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- tcg_out_ext8s(s, args[0], args[1]);
- break;
case INDEX_op_ext16s_i32:
case INDEX_op_ext16s_i64:
tcg_out_ext16s(s, args[0], args[1]);
@@ -3124,6 +3120,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -585,7 +585,7 @@ static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
tcg_out_opc_imm(s, OPC_SRLI, ret, ret, 32);
}
-static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
{
tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 24);
tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 24);
@@ -1612,11 +1612,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_ext32u(s, a0, a1);
break;
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- tcg_out_ext8s(s, a0, a1);
- break;
-
case INDEX_op_ext16s_i32:
case INDEX_op_ext16s_i64:
tcg_out_ext16s(s, a0, a1);
@@ -1651,6 +1646,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -1092,7 +1092,7 @@ static inline void tcg_out_risbg(TCGContext *s, TCGReg dest, TCGReg src,
tcg_out16(s, (ofs << 8) | (RIEf_RISBG & 0xff));
}
-static void tgen_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
{
tcg_out_insn(s, RRE, LGBR, dest, src);
}
@@ -2233,9 +2233,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
- case INDEX_op_ext8s_i32:
- tgen_ext8s(s, TCG_TYPE_I32, args[0], args[1]);
- break;
case INDEX_op_ext16s_i32:
tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
break;
@@ -2537,9 +2534,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
- case INDEX_op_ext8s_i64:
- tgen_ext8s(s, TCG_TYPE_I64, args[0], args[1]);
- break;
case INDEX_op_ext16s_i64:
tgen_ext16s(s, TCG_TYPE_I64, args[0], args[1]);
break;
@@ -2644,6 +2638,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -496,6 +496,11 @@ static void tcg_out_movi(TCGContext *s, TCGType type,
tcg_out_movi_int(s, type, ret, arg, false, TCG_REG_T2);
}
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
+{
+ g_assert_not_reached();
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
@@ -1700,6 +1705,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
@@ -557,6 +557,24 @@ static void tcg_out_movi(TCGContext *s, TCGType type,
}
}
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
+{
+ switch (type) {
+ case TCG_TYPE_I32:
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
+ tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
+ break;
+#if TCG_TARGET_REG_BITS == 64
+ case TCG_TYPE_I64:
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
+ tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
+ break;
+#endif
+ default:
+ g_assert_not_reached();
+ }
+}
+
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
tcg_target_long imm)
{
@@ -715,7 +733,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
- CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */
CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
@@ -795,6 +812,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
+ case INDEX_op_ext8s_i64:
default:
g_assert_not_reached();
}
We will need a backend interface for performing 8-bit sign-extend. Use it in tcg_reg_alloc_op in the meantime. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- tcg/tcg.c | 21 ++++++++++++++++----- tcg/aarch64/tcg-target.c.inc | 11 +++++++---- tcg/arm/tcg-target.c.inc | 10 ++++------ tcg/i386/tcg-target.c.inc | 10 +++++----- tcg/loongarch64/tcg-target.c.inc | 11 ++++------- tcg/mips/tcg-target.c.inc | 12 ++++++++---- tcg/ppc/tcg-target.c.inc | 10 ++++------ tcg/riscv/tcg-target.c.inc | 9 +++------ tcg/s390x/tcg-target.c.inc | 10 +++------- tcg/sparc64/tcg-target.c.inc | 7 +++++++ tcg/tci/tcg-target.c.inc | 21 ++++++++++++++++++++- 11 files changed, 81 insertions(+), 51 deletions(-)