cputlb: Do unaligned store recursion to outermost function

This is less tricky than for loads, because we always fall
back to single byte stores to implement unaligned stores.

Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Signed-off-by: Alex Bennée <alex.bennee@linaro.org>
Tested-by: Mark Cave-Ayland <mark.cave-ayland@ilande.co.uk>
This commit is contained in:
Richard Henderson 2019-04-25 21:12:59 -07:00 committed by Alex Bennée
parent 2dd9260678
commit 4601f8d10d
1 changed files with 4 additions and 4 deletions

View File

@ -1413,9 +1413,9 @@ tcg_target_ulong helper_be_ldsl_mmu(CPUArchState *env, target_ulong addr,
* Store Helpers * Store Helpers
*/ */
static void store_helper(CPUArchState *env, target_ulong addr, uint64_t val, static inline void __attribute__((always_inline))
TCGMemOpIdx oi, uintptr_t retaddr, size_t size, store_helper(CPUArchState *env, target_ulong addr, uint64_t val,
bool big_endian) TCGMemOpIdx oi, uintptr_t retaddr, size_t size, bool big_endian)
{ {
uintptr_t mmu_idx = get_mmuidx(oi); uintptr_t mmu_idx = get_mmuidx(oi);
uintptr_t index = tlb_index(env, mmu_idx, addr); uintptr_t index = tlb_index(env, mmu_idx, addr);
@ -1514,7 +1514,7 @@ static void store_helper(CPUArchState *env, target_ulong addr, uint64_t val,
/* Little-endian extract. */ /* Little-endian extract. */
val8 = val >> (i * 8); val8 = val >> (i * 8);
} }
store_helper(env, addr + i, val8, oi, retaddr, 1, big_endian); helper_ret_stb_mmu(env, addr + i, val8, oi, retaddr);
} }
return; return;
} }