accel/tcg: Widen tcg-ldst.h addresses to uint64_t
Always pass the target address as uint64_t. Adjust tcg_out_{ld,st}_helper_args to match. Reviewed-by: Alex Bennée <alex.bennee@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
c9ad8d27ca
commit
24e46e6c9d
@ -2367,7 +2367,7 @@ static uint8_t do_ld1_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
|
||||
return do_ld_1(env, &l.page[0], l.mmu_idx, access_type, ra);
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_8);
|
||||
@ -2398,7 +2398,7 @@ static uint16_t do_ld2_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_16);
|
||||
@ -2425,7 +2425,7 @@ static uint32_t do_ld4_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_32);
|
||||
@ -2452,7 +2452,7 @@ static uint64_t do_ld8_mmu(CPUArchState *env, target_ulong addr, MemOpIdx oi,
|
||||
return ret;
|
||||
}
|
||||
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_64);
|
||||
@ -2464,19 +2464,19 @@ uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
|
||||
* avoid this for 64-bit data, or for 32-bit data on 32-bit host.
|
||||
*/
|
||||
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
return (int8_t)helper_ldub_mmu(env, addr, oi, retaddr);
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
return (int16_t)helper_lduw_mmu(env, addr, oi, retaddr);
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
return (int32_t)helper_ldul_mmu(env, addr, oi, retaddr);
|
||||
@ -2544,7 +2544,7 @@ static Int128 do_ld16_mmu(CPUArchState *env, target_ulong addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
|
||||
uint32_t oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_128);
|
||||
@ -2860,7 +2860,7 @@ static void do_st_8(CPUArchState *env, MMULookupPageData *p, uint64_t val,
|
||||
}
|
||||
}
|
||||
|
||||
void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MMULookupLocals l;
|
||||
@ -2895,7 +2895,7 @@ static void do_st2_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
|
||||
do_st_1(env, &l.page[1], b, l.mmu_idx, ra);
|
||||
}
|
||||
|
||||
void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_16);
|
||||
@ -2922,7 +2922,7 @@ static void do_st4_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
(void) do_st_leN(env, &l.page[1], val, l.mmu_idx, l.memop, ra);
|
||||
}
|
||||
|
||||
void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_32);
|
||||
@ -2949,7 +2949,7 @@ static void do_st8_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
|
||||
(void) do_st_leN(env, &l.page[1], val, l.mmu_idx, l.memop, ra);
|
||||
}
|
||||
|
||||
void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
|
||||
void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_64);
|
||||
@ -3017,7 +3017,7 @@ static void do_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
|
||||
}
|
||||
}
|
||||
|
||||
void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
|
||||
void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
|
||||
MemOpIdx oi, uintptr_t retaddr)
|
||||
{
|
||||
tcg_debug_assert((get_memop(oi) & MO_SIZE) == MO_128);
|
||||
|
@ -920,13 +920,13 @@ static uint8_t do_ld1_mmu(CPUArchState *env, abi_ptr addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
return do_ld1_mmu(env, addr, get_memop(oi), ra);
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
return (int8_t)do_ld1_mmu(env, addr, get_memop(oi), ra);
|
||||
@ -953,7 +953,7 @@ static uint16_t do_ld2_he_mmu(CPUArchState *env, abi_ptr addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -965,7 +965,7 @@ tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1014,7 +1014,7 @@ static uint32_t do_ld4_he_mmu(CPUArchState *env, abi_ptr addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1026,7 +1026,7 @@ tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1075,7 +1075,7 @@ static uint64_t do_ld8_he_mmu(CPUArchState *env, abi_ptr addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1124,7 +1124,7 @@ static Int128 do_ld16_he_mmu(CPUArchState *env, abi_ptr addr,
|
||||
return ret;
|
||||
}
|
||||
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1182,7 +1182,7 @@ static void do_st1_mmu(CPUArchState *env, abi_ptr addr, uint8_t val,
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
do_st1_mmu(env, addr, val, get_memop(oi), ra);
|
||||
@ -1206,7 +1206,7 @@ static void do_st2_he_mmu(CPUArchState *env, abi_ptr addr, uint16_t val,
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1248,7 +1248,7 @@ static void do_st4_he_mmu(CPUArchState *env, abi_ptr addr, uint32_t val,
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1290,7 +1290,7 @@ static void do_st8_he_mmu(CPUArchState *env, abi_ptr addr, uint64_t val,
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
|
||||
void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
@ -1332,7 +1332,7 @@ static void do_st16_he_mmu(CPUArchState *env, abi_ptr addr, Int128 val,
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
|
||||
void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
|
||||
MemOpIdx oi, uintptr_t ra)
|
||||
{
|
||||
MemOp mop = get_memop(oi);
|
||||
|
@ -26,38 +26,38 @@
|
||||
#define TCG_LDST_H
|
||||
|
||||
/* Value zero-extended to tcg register size. */
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldub_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_lduw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldul_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, target_ulong addr,
|
||||
uint64_t helper_ldq_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, target_ulong addr,
|
||||
Int128 helper_ld16_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
|
||||
/* Value sign-extended to tcg register size. */
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsb_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsw_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, target_ulong addr,
|
||||
tcg_target_ulong helper_ldsl_mmu(CPUArchState *env, uint64_t addr,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
|
||||
/*
|
||||
* Value extended to at least uint32_t, so that some ABIs do not require
|
||||
* zero-extension from uint8_t or uint16_t.
|
||||
*/
|
||||
void helper_stb_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stb_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
void helper_stw_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stw_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
void helper_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
|
||||
void helper_stl_mmu(CPUArchState *env, uint64_t addr, uint32_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
void helper_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
|
||||
void helper_stq_mmu(CPUArchState *env, uint64_t addr, uint64_t val,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
void helper_st16_mmu(CPUArchState *env, target_ulong addr, Int128 val,
|
||||
void helper_st16_mmu(CPUArchState *env, uint64_t addr, Int128 val,
|
||||
MemOpIdx oi, uintptr_t retaddr);
|
||||
|
||||
#endif /* TCG_LDST_H */
|
||||
|
62
tcg/tcg.c
62
tcg/tcg.c
@ -865,7 +865,7 @@ static TCGHelperInfo info_helper_ld32_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(ttl, 0) /* return tcg_target_ulong */
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i32, 3) /* unsigned oi */
|
||||
| dh_typemask(ptr, 4) /* uintptr_t ra */
|
||||
};
|
||||
@ -874,7 +874,7 @@ static TCGHelperInfo info_helper_ld64_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(i64, 0) /* return uint64_t */
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i32, 3) /* unsigned oi */
|
||||
| dh_typemask(ptr, 4) /* uintptr_t ra */
|
||||
};
|
||||
@ -883,7 +883,7 @@ static TCGHelperInfo info_helper_ld128_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(i128, 0) /* return Int128 */
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i32, 3) /* unsigned oi */
|
||||
| dh_typemask(ptr, 4) /* uintptr_t ra */
|
||||
};
|
||||
@ -892,7 +892,7 @@ static TCGHelperInfo info_helper_st32_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(void, 0)
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i32, 3) /* uint32_t data */
|
||||
| dh_typemask(i32, 4) /* unsigned oi */
|
||||
| dh_typemask(ptr, 5) /* uintptr_t ra */
|
||||
@ -902,7 +902,7 @@ static TCGHelperInfo info_helper_st64_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(void, 0)
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i64, 3) /* uint64_t data */
|
||||
| dh_typemask(i32, 4) /* unsigned oi */
|
||||
| dh_typemask(ptr, 5) /* uintptr_t ra */
|
||||
@ -912,7 +912,7 @@ static TCGHelperInfo info_helper_st128_mmu = {
|
||||
.flags = TCG_CALL_NO_WG,
|
||||
.typemask = dh_typemask(void, 0)
|
||||
| dh_typemask(env, 1)
|
||||
| dh_typemask(tl, 2) /* target_ulong addr */
|
||||
| dh_typemask(i64, 2) /* uint64_t addr */
|
||||
| dh_typemask(i128, 3) /* Int128 data */
|
||||
| dh_typemask(i32, 4) /* unsigned oi */
|
||||
| dh_typemask(ptr, 5) /* uintptr_t ra */
|
||||
@ -5597,11 +5597,26 @@ static void tcg_out_ld_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
|
||||
next_arg = 1;
|
||||
|
||||
loc = &info->in[next_arg];
|
||||
nmov = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_TL, TCG_TYPE_TL,
|
||||
ldst->addrlo_reg, ldst->addrhi_reg);
|
||||
next_arg += nmov;
|
||||
if (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 64) {
|
||||
nmov = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_I64, TCG_TYPE_TL,
|
||||
ldst->addrlo_reg, ldst->addrhi_reg);
|
||||
tcg_out_helper_load_slots(s, nmov, mov, parm);
|
||||
next_arg += nmov;
|
||||
} else {
|
||||
/*
|
||||
* 32-bit host with 32-bit guest: zero-extend the guest address
|
||||
* to 64-bits for the helper by storing the low part, then
|
||||
* load a zero for the high part.
|
||||
*/
|
||||
tcg_out_helper_add_mov(mov, loc + HOST_BIG_ENDIAN,
|
||||
TCG_TYPE_I32, TCG_TYPE_I32,
|
||||
ldst->addrlo_reg, -1);
|
||||
tcg_out_helper_load_slots(s, 1, mov, parm);
|
||||
|
||||
tcg_out_helper_load_slots(s, nmov, mov, parm);
|
||||
tcg_out_helper_load_imm(s, loc[!HOST_BIG_ENDIAN].arg_slot,
|
||||
TCG_TYPE_I32, 0, parm);
|
||||
next_arg += 2;
|
||||
}
|
||||
|
||||
switch (info->out_kind) {
|
||||
case TCG_CALL_RET_NORMAL:
|
||||
@ -5755,10 +5770,24 @@ static void tcg_out_st_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
|
||||
|
||||
/* Handle addr argument. */
|
||||
loc = &info->in[next_arg];
|
||||
n = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_TL, TCG_TYPE_TL,
|
||||
ldst->addrlo_reg, ldst->addrhi_reg);
|
||||
next_arg += n;
|
||||
nmov += n;
|
||||
if (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 64) {
|
||||
n = tcg_out_helper_add_mov(mov, loc, TCG_TYPE_I64, TCG_TYPE_TL,
|
||||
ldst->addrlo_reg, ldst->addrhi_reg);
|
||||
next_arg += n;
|
||||
nmov += n;
|
||||
} else {
|
||||
/*
|
||||
* 32-bit host with 32-bit guest: zero-extend the guest address
|
||||
* to 64-bits for the helper by storing the low part. Later,
|
||||
* after we have processed the register inputs, we will load a
|
||||
* zero for the high part.
|
||||
*/
|
||||
tcg_out_helper_add_mov(mov, loc + HOST_BIG_ENDIAN,
|
||||
TCG_TYPE_I32, TCG_TYPE_I32,
|
||||
ldst->addrlo_reg, -1);
|
||||
next_arg += 2;
|
||||
nmov += 1;
|
||||
}
|
||||
|
||||
/* Handle data argument. */
|
||||
loc = &info->in[next_arg];
|
||||
@ -5803,6 +5832,11 @@ static void tcg_out_st_helper_args(TCGContext *s, const TCGLabelQemuLdst *ldst,
|
||||
g_assert_not_reached();
|
||||
}
|
||||
|
||||
if (TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32) {
|
||||
loc = &info->in[1 + !HOST_BIG_ENDIAN];
|
||||
tcg_out_helper_load_imm(s, loc->arg_slot, TCG_TYPE_I32, 0, parm);
|
||||
}
|
||||
|
||||
tcg_out_helper_load_common_args(s, ldst, parm, info, next_arg);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user