@@ -21,3 +21,16 @@ NCP 00 - ---- 111 ---------------------- # CBcc
SETHI 00 rd:5 100 i:22
CALL 01 i:s30
+
+Tcc_r 10 0 cond:4 111010 rs1:5 0 cc:1 0000000 rs2:5
+{
+ # For v7, the entire simm13 field is present, but masked to 7 bits.
+ # For v8, [12:7] are reserved. However, a compatibility note for
+ # the Tcc insn in the v9 manual suggests that the v8 reserved field
+ # was ignored and did not produce traps.
+ Tcc_i_v7 10 0 cond:4 111010 rs1:5 1 ------ i:7
+
+ # For v9, bits [12:11] are cc1 and cc0 (and cc0 must be 0).
+ # Bits [10:8] are reserved and the OSA2011 manual says they must be 0.
+ Tcc_i_v9 10 0 cond:4 111010 rs1:5 1 cc:1 0 000 i:8
+}
@@ -3042,6 +3042,81 @@ static bool trans_SETHI(DisasContext *dc, arg_SETHI *a)
return advance_pc(dc);
}
+static bool do_tcc(DisasContext *dc, int cond, int cc,
+ int rs1, bool imm, int rs2_or_imm)
+{
+ int mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
+ ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
+ DisasCompare cmp;
+ TCGLabel *lab;
+ TCGv_i32 trap;
+
+ /* Trap never. */
+ if (cond == 0) {
+ return advance_pc(dc);
+ }
+
+ /*
+ * Immediate traps are the most common case. Since this value is
+ * live across the branch, it really pays to evaluate the constant.
+ */
+ if (rs1 == 0 && (imm || rs2_or_imm == 0)) {
+ trap = tcg_constant_i32((rs2_or_imm & mask) + TT_TRAP);
+ } else {
+ trap = tcg_temp_new_i32();
+ tcg_gen_trunc_tl_i32(trap, gen_load_gpr(dc, rs1));
+ if (imm) {
+ tcg_gen_addi_i32(trap, trap, rs2_or_imm);
+ } else {
+ TCGv_i32 t2 = tcg_temp_new_i32();
+ tcg_gen_trunc_tl_i32(t2, gen_load_gpr(dc, rs2_or_imm));
+ tcg_gen_add_i32(trap, trap, t2);
+ }
+ tcg_gen_andi_i32(trap, trap, mask);
+ tcg_gen_addi_i32(trap, trap, TT_TRAP);
+ }
+
+ /* Trap always. */
+ if (cond == 8) {
+ save_state(dc);
+ gen_helper_raise_exception(tcg_env, trap);
+ dc->base.is_jmp = DISAS_NORETURN;
+ return true;
+ }
+
+ /* Conditional trap. */
+ flush_cond(dc);
+ lab = delay_exceptionv(dc, trap);
+ gen_compare(&cmp, cc, cond, dc);
+ tcg_gen_brcond_tl(cmp.cond, cmp.c1, cmp.c2, lab);
+
+ return advance_pc(dc);
+}
+
+static bool trans_Tcc_r(DisasContext *dc, arg_Tcc_r *a)
+{
+ if (avail_32(dc) && a->cc) {
+ return false;
+ }
+ return do_tcc(dc, a->cond, a->cc, a->rs1, false, a->rs2);
+}
+
+static bool trans_Tcc_i_v7(DisasContext *dc, arg_Tcc_i_v7 *a)
+{
+ if (avail_64(dc)) {
+ return false;
+ }
+ return do_tcc(dc, a->cond, 0, a->rs1, true, a->i);
+}
+
+static bool trans_Tcc_i_v9(DisasContext *dc, arg_Tcc_i_v9 *a)
+{
+ if (avail_32(dc)) {
+ return false;
+ }
+ return do_tcc(dc, a->cond, a->cc, a->rs1, true, a->i);
+}
+
#define CHECK_IU_FEATURE(dc, FEATURE) \
if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
goto illegal_insn;
@@ -3072,85 +3147,7 @@ static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
TCGv cpu_dst = tcg_temp_new();
TCGv cpu_tmp0;
- if (xop == 0x3a) { /* generate trap */
- int cond = GET_FIELD(insn, 3, 6);
- TCGv_i32 trap;
- TCGLabel *l1 = NULL;
- int mask;
-
- if (cond == 0) {
- /* Trap never. */
- break;
- }
-
- save_state(dc);
-
- if (cond != 8) {
- /* Conditional trap. */
- DisasCompare cmp;
-#ifdef TARGET_SPARC64
- /* V9 icc/xcc */
- int cc = GET_FIELD_SP(insn, 11, 12);
- if (cc == 0) {
- gen_compare(&cmp, 0, cond, dc);
- } else if (cc == 2) {
- gen_compare(&cmp, 1, cond, dc);
- } else {
- goto illegal_insn;
- }
-#else
- gen_compare(&cmp, 0, cond, dc);
-#endif
- l1 = gen_new_label();
- tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
- cmp.c1, cmp.c2, l1);
- }
-
- mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
- ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
-
- /* Don't use the normal temporaries, as they may well have
- gone out of scope with the branch above. While we're
- doing that we might as well pre-truncate to 32-bit. */
- trap = tcg_temp_new_i32();
-
- rs1 = GET_FIELD_SP(insn, 14, 18);
- if (IS_IMM) {
- rs2 = GET_FIELD_SP(insn, 0, 7);
- if (rs1 == 0) {
- tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
- /* Signal that the trap value is fully constant. */
- mask = 0;
- } else {
- TCGv t1 = gen_load_gpr(dc, rs1);
- tcg_gen_trunc_tl_i32(trap, t1);
- tcg_gen_addi_i32(trap, trap, rs2);
- }
- } else {
- TCGv t1, t2;
- rs2 = GET_FIELD_SP(insn, 0, 4);
- t1 = gen_load_gpr(dc, rs1);
- t2 = gen_load_gpr(dc, rs2);
- tcg_gen_add_tl(t1, t1, t2);
- tcg_gen_trunc_tl_i32(trap, t1);
- }
- if (mask != 0) {
- tcg_gen_andi_i32(trap, trap, mask);
- tcg_gen_addi_i32(trap, trap, TT_TRAP);
- }
-
- gen_helper_raise_exception(tcg_env, trap);
-
- if (cond == 8) {
- /* An unconditional trap ends the TB. */
- dc->base.is_jmp = DISAS_NORETURN;
- goto jmp_insn;
- } else {
- /* A conditional trap falls through to the next insn. */
- gen_set_label(l1);
- break;
- }
- } else if (xop == 0x28) {
+ if (xop == 0x28) {
rs1 = GET_FIELD(insn, 13, 17);
switch(rs1) {
case 0: /* rdy */
Use the new delay_exceptionv function in the implementation. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- target/sparc/insns.decode | 13 ++++ target/sparc/translate.c | 155 +++++++++++++++++++------------------- 2 files changed, 89 insertions(+), 79 deletions(-)