@@ -26,38 +26,38 @@
#define TCG_LDST_H
/* Value zero-extended to tcg register size. */
-tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
+uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
+Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
/* Value sign-extended to tcg register size. */
-tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
-tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr);
/*
* Value extended to at least uint32_t, so that some ABIs do not require
* zero-extension from uint8_t or uint16_t.
*/
-void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr);
-void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr);
-void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr);
-void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
+void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr);
-void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
+void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
MemOpIdx oi, uintptr_t retaddr);
#endif /* TCG_LDST_H */
@@ -2357,7 +2357,7 @@ static uint8_t do_ld1_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
return do_ld_1(env, &l.page[0], l.mmu_idx, access_type, ra);
}
-tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_8);
@@ -2388,7 +2388,7 @@ static uint16_t do_ld2_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
return ret;
}
-tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_16);
@@ -2415,7 +2415,7 @@ static uint32_t do_ld4_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
return ret;
}
-tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_32);
@@ -2442,7 +2442,7 @@ static uint64_t do_ld8_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
return ret;
}
-uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
+uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_64);
@@ -2454,19 +2454,19 @@ uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
* avoid this for 64-bit data, or for 32-bit data on 32-bit host.
*/
-tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
return (int8_t)helper_ldub_mmu(env, addr, oi, retaddr);
}
-tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
return (int16_t)helper_lduw_mmu(env, addr, oi, retaddr);
}
-tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t retaddr)
{
return (int32_t)helper_ldul_mmu(env, addr, oi, retaddr);
@@ -2534,7 +2534,7 @@ static Int128 do_ld16_mmu(CPUArchState *env, target_ulong addr,
return ret;
}
-Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
+Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
uint32_t oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_128);
@@ -2842,7 +2842,7 @@ static void do_st_8(CPUArchState *env, MMULookupPageData *p, uint64_t val,
}
}
-void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{
MMULookupLocals l;
@@ -2877,7 +2877,7 @@ static void do_st2_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
do_st_1(env, &l.page[1], b, l.mmu_idx, ra);
}
-void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_16);
@@ -2904,7 +2904,7 @@ static void do_st4_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
(void) do_st_leN(env, &l.page[1], val, l.mmu_idx, l.memop, ra);
}
-void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_32);
@@ -2931,7 +2931,7 @@ static void do_st8_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
(void) do_st_leN(env, &l.page[1], val, l.mmu_idx, l.memop, ra);
}
-void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
+void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_64);
@@ -2999,7 +2999,7 @@ static void do_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
}
}
-void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
+void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
MemOpIdx oi, uintptr_t retaddr)
{
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_128);
@@ -920,13 +920,13 @@ static uint8_t do_ld1_mmu(CPUArchState *env, abi_ptr addr,
return ret;
}
-tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
return do_ld1_mmu(env, addr, get_memop(oi), ra);
}
-tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
return (int8_t)do_ld1_mmu(env, addr, get_memop(oi), ra);
@@ -953,7 +953,7 @@ static uint16_t do_ld2_he_mmu(CPUArchState *env, abi_ptr addr,
return ret;
}
-tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -965,7 +965,7 @@ tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
return ret;
}
-tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1014,7 +1014,7 @@ static uint32_t do_ld4_he_mmu(CPUArchState *env, abi_ptr addr,
return ret;
}
-tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1026,7 +1026,7 @@ tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
return ret;
}
-tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
+tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1075,7 +1075,7 @@ static uint64_t do_ld8_he_mmu(CPUArchState *env, abi_ptr addr,
return ret;
}
-uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
+uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1124,7 +1124,7 @@ static Int128 do_ld16_he_mmu(CPUArchState *env, abi_ptr addr,
return ret;
}
-Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
+Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1182,7 +1182,7 @@ static void do_st1_mmu(CPUArchState *env, abi_ptr addr, uint8_t val,
clear_helper_retaddr();
}
-void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{
do_st1_mmu(env, addr, val, get_memop(oi), ra);
@@ -1206,7 +1206,7 @@ static void do_st2_he_mmu(CPUArchState *env, abi_ptr addr, uint16_t val,
clear_helper_retaddr();
}
-void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1248,7 +1248,7 @@ static void do_st4_he_mmu(CPUArchState *env, abi_ptr addr, uint32_t val,
clear_helper_retaddr();
}
-void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
+void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1290,7 +1290,7 @@ static void do_st8_he_mmu(CPUArchState *env, abi_ptr addr, uint64_t val,
clear_helper_retaddr();
}
-void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
+void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -1332,7 +1332,7 @@ static void do_st16_he_mmu(CPUArchState *env, abi_ptr addr, Int128 val,
clear_helper_retaddr();
}
-void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
+void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
MemOpIdx oi, uintptr_t ra)
{
MemOp mop = get_memop(oi);
@@ -768,7 +768,7 @@ static TCGHelperInfo info_helper_ld32_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(ttl, 0) /* return tcg_target_ulong */
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i32, 3) /* unsigned oi */
| dh_typemask(ptr, 4) /* uintptr_t ra */
};
@@ -777,7 +777,7 @@ static TCGHelperInfo info_helper_ld64_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(i64, 0) /* return uint64_t */
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i32, 3) /* unsigned oi */
| dh_typemask(ptr, 4) /* uintptr_t ra */
};
@@ -786,7 +786,7 @@ static TCGHelperInfo info_helper_ld128_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(i128, 0) /* return Int128 */
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i32, 3) /* unsigned oi */
| dh_typemask(ptr, 4) /* uintptr_t ra */
};
@@ -795,7 +795,7 @@ static TCGHelperInfo info_helper_st32_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(void, 0)
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i32, 3) /* uint32_t data */
| dh_typemask(i32, 4) /* unsigned oi */
| dh_typemask(ptr, 5) /* uintptr_t ra */
@@ -805,7 +805,7 @@ static TCGHelperInfo info_helper_st64_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(void, 0)
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i64, 3) /* uint64_t data */
| dh_typemask(i32, 4) /* unsigned oi */
| dh_typemask(ptr, 5) /* uintptr_t ra */
@@ -815,7 +815,7 @@ static TCGHelperInfo info_helper_st128_mmu = {
.flags = TCG_CALL_NO_WG,
.typemask = dh_typemask(void, 0)
| dh_typemask(env, 1)
- | dh_typemask(tl, 2) /* target_ulong addr */
+ | dh_typemask(i64, 2) /* uint64_t addr */
| dh_typemask(i128, 3) /* Int128 data */
| dh_typemask(i32, 4) /* unsigned oi */
| dh_typemask(ptr, 5) /* uintptr_t ra */
@@ -5456,11 +5456,26 @@ static void tcg_out_ld_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
next_arg = 1;
loc = &info->in[next_arg];
- nmov = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_TL, TCG_TYPE_TL,
- ldst->addrlo_reg, ldst->addrhi_reg);
- next_arg += nmov;
+ if (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 64) {
+ nmov = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_I64, TCG_TYPE_TL,
+ ldst->addrlo_reg, ldst->addrhi_reg);
+ tcg_out_helper_load_slots(s, nmov, mov, parm);
+ next_arg += nmov;
+ } else {
+ /*
+ * 32-bit host with 32-bit guest: zero-extend the guest address
+ * to 64-bits for the helper by storing the low part, then
+ * load a zero for the high part.
+ */
+ tcg_out_helper_add_mov(mov, loc + HOST_BIG_ENDIAN,
+ TCG_TYPE_I32, TCG_TYPE_I32,
+ ldst->addrlo_reg, -1);
+ tcg_out_helper_load_slots(s, 1, mov, parm);
- tcg_out_helper_load_slots(s, nmov, mov, parm);
+ tcg_out_helper_load_imm(s, loc[!HOST_BIG_ENDIAN].arg_slot,
+ TCG_TYPE_I32, 0, parm);
+ next_arg += 2;
+ }
switch (info->out_kind) {
case TCG_CALL_RET_NORMAL:
@@ -5614,10 +5629,24 @@ static void tcg_out_st_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
/* Handle addr argument. */
loc = &info->in[next_arg];
- n = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_TL, TCG_TYPE_TL,
- ldst->addrlo_reg, ldst->addrhi_reg);
- next_arg += n;
- nmov += n;
+ if (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 64) {
+ n = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_I64, TCG_TYPE_TL,
+ ldst->addrlo_reg, ldst->addrhi_reg);
+ next_arg += n;
+ nmov += n;
+ } else {
+ /*
+ * 32-bit host with 32-bit guest: zero-extend the guest address
+ * to 64-bits for the helper by storing the low part. Later,
+ * after we have processed the register inputs, we will load a
+ * zero for the high part.
+ */
+ tcg_out_helper_add_mov(mov, loc + HOST_BIG_ENDIAN,
+ TCG_TYPE_I32, TCG_TYPE_I32,
+ ldst->addrlo_reg, -1);
+ next_arg += 2;
+ nmov += 1;
+ }
/* Handle data argument. */
loc = &info->in[next_arg];
@@ -5662,6 +5691,11 @@ static void tcg_out_st_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
g_assert_not_reached();
}
+ if (TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32) {
+ loc = &info->in[1 + !HOST_BIG_ENDIAN];
+ tcg_out_helper_load_imm(s, loc->arg_slot, TCG_TYPE_I32, 0, parm);
+ }
+
tcg_out_helper_load_common_args(s, ldst, parm, info, next_arg);
}
Always pass the target address as uint64_t. Adjust tcg_out_{ld,st}_helper_args to match. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- include/tcg/tcg-ldst.h | 26 +++++++++--------- accel/tcg/cputlb.c | 26 +++++++++--------- accel/tcg/user-exec.c | 26 +++++++++--------- tcg/tcg.c | 62 ++++++++++++++++++++++++++++++++---------- 4 files changed, 87 insertions(+), 53 deletions(-)