target/arm: Drop manual handling of set/clear_helper_retaddr
Since we converted back to cpu_*_data_ra, we do not need to do this ourselves. Reviewed-by: Peter Maydell <peter.maydell@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org> Message-id: 20200508154359.7494-9-richard.henderson@linaro.org Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
parent
6799ce7b0e
commit
f32e2ab65f
@ -4133,12 +4133,6 @@ static intptr_t max_for_page(target_ulong base, intptr_t mem_off,
|
||||
return MIN(split, mem_max - mem_off) + mem_off;
|
||||
}
|
||||
|
||||
#ifndef CONFIG_USER_ONLY
|
||||
/* These are normally defined only for CONFIG_USER_ONLY in <exec/cpu_ldst.h> */
|
||||
static inline void set_helper_retaddr(uintptr_t ra) { }
|
||||
static inline void clear_helper_retaddr(void) { }
|
||||
#endif
|
||||
|
||||
/*
|
||||
* The result of tlb_vaddr_to_host for user-only is just g2h(x),
|
||||
* which is always non-null. Elide the useless test.
|
||||
@ -4180,7 +4174,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
return;
|
||||
}
|
||||
mem_off = reg_off >> diffsz;
|
||||
set_helper_retaddr(retaddr);
|
||||
|
||||
/*
|
||||
* If the (remaining) load is entirely within a single page, then:
|
||||
@ -4195,7 +4188,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
if (test_host_page(host)) {
|
||||
mem_off = host_fn(vd, vg, host - mem_off, mem_off, mem_max);
|
||||
tcg_debug_assert(mem_off == mem_max);
|
||||
clear_helper_retaddr();
|
||||
/* After having taken any fault, zero leading inactive elements. */
|
||||
swap_memzero(vd, reg_off);
|
||||
return;
|
||||
@ -4246,7 +4238,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
}
|
||||
#endif
|
||||
|
||||
clear_helper_retaddr();
|
||||
memcpy(vd, &scratch, reg_max);
|
||||
}
|
||||
|
||||
@ -4306,7 +4297,6 @@ static void sve_ld2_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
ARMVectorReg scratch[2] = { };
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4318,7 +4308,6 @@ static void sve_ld2_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 2 * size;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
|
||||
/* Wait until all exceptions have been raised to write back. */
|
||||
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
|
||||
@ -4333,7 +4322,6 @@ static void sve_ld3_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
ARMVectorReg scratch[3] = { };
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4346,7 +4334,6 @@ static void sve_ld3_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 3 * size;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
|
||||
/* Wait until all exceptions have been raised to write back. */
|
||||
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
|
||||
@ -4362,7 +4349,6 @@ static void sve_ld4_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
ARMVectorReg scratch[4] = { };
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4376,7 +4362,6 @@ static void sve_ld4_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 4 * size;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
|
||||
/* Wait until all exceptions have been raised to write back. */
|
||||
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
|
||||
@ -4483,7 +4468,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
return;
|
||||
}
|
||||
mem_off = reg_off >> diffsz;
|
||||
set_helper_retaddr(retaddr);
|
||||
|
||||
/*
|
||||
* If the (remaining) load is entirely within a single page, then:
|
||||
@ -4498,7 +4482,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
if (test_host_page(host)) {
|
||||
mem_off = host_fn(vd, vg, host - mem_off, mem_off, mem_max);
|
||||
tcg_debug_assert(mem_off == mem_max);
|
||||
clear_helper_retaddr();
|
||||
/* After any fault, zero any leading inactive elements. */
|
||||
swap_memzero(vd, reg_off);
|
||||
return;
|
||||
@ -4541,7 +4524,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
|
||||
}
|
||||
#endif
|
||||
|
||||
clear_helper_retaddr();
|
||||
record_fault(env, reg_off, reg_max);
|
||||
}
|
||||
|
||||
@ -4687,7 +4669,6 @@ static void sve_st1_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
void *vd = &env->vfp.zregs[rd];
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4698,7 +4679,6 @@ static void sve_st1_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += msize;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
@ -4711,7 +4691,6 @@ static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
void *d1 = &env->vfp.zregs[rd];
|
||||
void *d2 = &env->vfp.zregs[(rd + 1) & 31];
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4723,7 +4702,6 @@ static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 2 * msize;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
@ -4737,7 +4715,6 @@ static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
void *d2 = &env->vfp.zregs[(rd + 1) & 31];
|
||||
void *d3 = &env->vfp.zregs[(rd + 2) & 31];
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4750,7 +4727,6 @@ static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 3 * msize;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
@ -4765,7 +4741,6 @@ static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
void *d3 = &env->vfp.zregs[(rd + 2) & 31];
|
||||
void *d4 = &env->vfp.zregs[(rd + 3) & 31];
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4779,7 +4754,6 @@ static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
|
||||
addr += 4 * msize;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
#define DO_STN_1(N, NAME, ESIZE) \
|
||||
@ -4875,7 +4849,6 @@ static void sve_ld1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
ARMVectorReg scratch = { };
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -4886,7 +4859,6 @@ static void sve_ld1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
i += 4, pg >>= 4;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
|
||||
/* Wait until all exceptions have been raised to write back. */
|
||||
memcpy(vd, &scratch, oprsz);
|
||||
@ -4900,7 +4872,6 @@ static void sve_ld1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
intptr_t i, oprsz = simd_oprsz(desc) / 8;
|
||||
ARMVectorReg scratch = { };
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; i++) {
|
||||
uint8_t pg = *(uint8_t *)(vg + H1(i));
|
||||
if (likely(pg & 1)) {
|
||||
@ -4908,7 +4879,6 @@ static void sve_ld1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
tlb_fn(env, &scratch, i * 8, base + (off << scale), ra);
|
||||
}
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
|
||||
/* Wait until all exceptions have been raised to write back. */
|
||||
memcpy(vd, &scratch, oprsz * 8);
|
||||
@ -5080,13 +5050,11 @@ static inline void sve_ldff1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
reg_off = find_next_active(vg, 0, reg_max, MO_32);
|
||||
if (likely(reg_off < reg_max)) {
|
||||
/* Perform one normal read, which will fault or not. */
|
||||
set_helper_retaddr(ra);
|
||||
addr = off_fn(vm, reg_off);
|
||||
addr = base + (addr << scale);
|
||||
tlb_fn(env, vd, reg_off, addr, ra);
|
||||
|
||||
/* The rest of the reads will be non-faulting. */
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
/* After any fault, zero the leading predicated false elements. */
|
||||
@ -5122,13 +5090,11 @@ static inline void sve_ldff1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
reg_off = find_next_active(vg, 0, reg_max, MO_64);
|
||||
if (likely(reg_off < reg_max)) {
|
||||
/* Perform one normal read, which will fault or not. */
|
||||
set_helper_retaddr(ra);
|
||||
addr = off_fn(vm, reg_off);
|
||||
addr = base + (addr << scale);
|
||||
tlb_fn(env, vd, reg_off, addr, ra);
|
||||
|
||||
/* The rest of the reads will be non-faulting. */
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
/* After any fault, zero the leading predicated false elements. */
|
||||
@ -5240,7 +5206,6 @@ static void sve_st1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
const int scale = extract32(desc, SIMD_DATA_SHIFT + MEMOPIDX_SHIFT, 2);
|
||||
intptr_t i, oprsz = simd_oprsz(desc);
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; ) {
|
||||
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
|
||||
do {
|
||||
@ -5251,7 +5216,6 @@ static void sve_st1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
i += 4, pg >>= 4;
|
||||
} while (i & 15);
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
@ -5261,7 +5225,6 @@ static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
const int scale = extract32(desc, SIMD_DATA_SHIFT + MEMOPIDX_SHIFT, 2);
|
||||
intptr_t i, oprsz = simd_oprsz(desc) / 8;
|
||||
|
||||
set_helper_retaddr(ra);
|
||||
for (i = 0; i < oprsz; i++) {
|
||||
uint8_t pg = *(uint8_t *)(vg + H1(i));
|
||||
if (likely(pg & 1)) {
|
||||
@ -5269,7 +5232,6 @@ static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
|
||||
tlb_fn(env, vd, i * 8, base + (off << scale), ra);
|
||||
}
|
||||
}
|
||||
clear_helper_retaddr();
|
||||
}
|
||||
|
||||
#define DO_ST1_ZPZ_S(MEM, OFS) \
|
||||
|
Loading…
Reference in New Issue
Block a user