@@ -2509,9 +2509,6 @@ static void tcg_target_qemu_prologue(TCGContext *s)
tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
tcg_out32(s, MTSPR | RS(tcg_target_call_iarg_regs[1]) | CTR);
- if (USE_REG_TB) {
- tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, tcg_target_call_iarg_regs[1]);
- }
tcg_out32(s, BCCTR | BO_ALWAYS);
/* Epilogue */
@@ -2529,7 +2526,13 @@ static void tcg_target_qemu_prologue(TCGContext *s)
static void tcg_out_tb_start(TCGContext *s)
{
- /* nothing to do */
+ /* Load TCG_REG_TB. */
+ if (USE_REG_TB) {
+ /* bcl 20,31,$+4 (preferred form for getting nia) */
+ tcg_out32(s, BC | BO_ALWAYS | BI(7, CR_SO) | 0x4 | LK);
+ tcg_out32(s, MFSPR | RT(TCG_REG_TB) | LR);
+ tcg_out32(s, ADDI | TAI(TCG_REG_TB, TCG_REG_TB, -4));
+ }
}
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
@@ -2542,32 +2545,22 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
{
uintptr_t ptr = get_jmp_target_addr(s, which);
+ /* Direct branch will be patched by tb_target_set_jmp_target. */
+ set_jmp_insn_offset(s, which);
+ tcg_out32(s, NOP);
+
+ /* When branch is out of range, fall through to indirect. */
if (USE_REG_TB) {
ptrdiff_t offset = tcg_tbrel_diff(s, (void *)ptr);
- tcg_out_mem_long(s, LD, LDX, TCG_REG_TB, TCG_REG_TB, offset);
-
- /* TODO: Use direct branches when possible. */
- set_jmp_insn_offset(s, which);
- tcg_out32(s, MTSPR | RS(TCG_REG_TB) | CTR);
-
- tcg_out32(s, BCCTR | BO_ALWAYS);
-
- /* For the unlinked case, need to reset TCG_REG_TB. */
- set_jmp_reset_offset(s, which);
- tcg_out_mem_long(s, ADDI, ADD, TCG_REG_TB, TCG_REG_TB,
- -tcg_current_code_size(s));
+ tcg_out_mem_long(s, LD, LDX, TCG_REG_TMP1, TCG_REG_TB, offset);
} else {
- /* Direct branch will be patched by tb_target_set_jmp_target. */
- set_jmp_insn_offset(s, which);
- tcg_out32(s, NOP);
-
- /* When branch is out of range, fall through to indirect. */
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP1, ptr - (int16_t)ptr);
tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP1, TCG_REG_TMP1, (int16_t)ptr);
- tcg_out32(s, MTSPR | RS(TCG_REG_TMP1) | CTR);
- tcg_out32(s, BCCTR | BO_ALWAYS);
- set_jmp_reset_offset(s, which);
}
+
+ tcg_out32(s, MTSPR | RS(TCG_REG_TMP1) | CTR);
+ tcg_out32(s, BCCTR | BO_ALWAYS);
+ set_jmp_reset_offset(s, which);
}
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
@@ -2577,10 +2570,6 @@ void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
intptr_t diff = addr - jmp_rx;
tcg_insn_unit insn;
- if (USE_REG_TB) {
- return;
- }
-
if (in_range_b(diff)) {
insn = B | (diff & 0x3fffffc);
} else {
@@ -2600,9 +2589,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
switch (opc) {
case INDEX_op_goto_ptr:
tcg_out32(s, MTSPR | RS(args[0]) | CTR);
- if (USE_REG_TB) {
- tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, args[0]);
- }
tcg_out32(s, ADDI | TAI(TCG_REG_R3, 0, 0));
tcg_out32(s, BCCTR | BO_ALWAYS);
break;