target/arm: Drop manual handling of set/clear_helper_retaddr

Since we converted back to cpu_*_data_ra, we do not need to
do this ourselves.

Backports commit f32e2ab65f3a0fc03d58936709e5a565c4b0db50 from qemu
This commit is contained in:
Richard Henderson 2021-02-25 20:20:21 -05:00 committed by Lioncash
parent 2e03f74a53
commit f430a399d4

View file

@ -4095,13 +4095,6 @@ static intptr_t max_for_page(target_ulong base, intptr_t mem_off,
return MIN(split, mem_max - mem_off) + mem_off;
}
static inline void set_helper_retaddr(uintptr_t ra)
{
#ifdef CONFIG_USER_ONLY
helper_retaddr = ra;
#endif
}
/*
* The result of tlb_vaddr_to_host for user-only is just g2h(x),
* which is always non-null. Elide the useless test.
@ -4143,7 +4136,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
return;
}
mem_off = reg_off >> diffsz;
set_helper_retaddr(retaddr);
/*
* If the (remaining) load is entirely within a single page, then:
@ -4158,7 +4150,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
if (test_host_page(host)) {
mem_off = host_fn(vd, vg, host - mem_off, mem_off, mem_max);
tcg_debug_assert(mem_off == mem_max);
set_helper_retaddr(0);
/* After having taken any fault, zero leading inactive elements. */
swap_memzero(vd, reg_off);
return;
@ -4209,7 +4200,6 @@ static void sve_ld1_r(CPUARMState *env, void *vg, const target_ulong addr,
}
#endif
set_helper_retaddr(0);
memcpy(vd, &scratch, reg_max);
}
@ -4269,7 +4259,6 @@ static void sve_ld2_r(CPUARMState *env, void *vg, target_ulong addr,
intptr_t i, oprsz = simd_oprsz(desc);
ARMVectorReg scratch[2] = { };
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4281,7 +4270,6 @@ static void sve_ld2_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 2 * size;
} while (i & 15);
}
set_helper_retaddr(0);
/* Wait until all exceptions have been raised to write back. */
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
@ -4296,7 +4284,6 @@ static void sve_ld3_r(CPUARMState *env, void *vg, target_ulong addr,
intptr_t i, oprsz = simd_oprsz(desc);
ARMVectorReg scratch[3] = { };
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4309,7 +4296,6 @@ static void sve_ld3_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 3 * size;
} while (i & 15);
}
set_helper_retaddr(0);
/* Wait until all exceptions have been raised to write back. */
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
@ -4325,7 +4311,6 @@ static void sve_ld4_r(CPUARMState *env, void *vg, target_ulong addr,
intptr_t i, oprsz = simd_oprsz(desc);
ARMVectorReg scratch[4] = { };
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4339,7 +4324,6 @@ static void sve_ld4_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 4 * size;
} while (i & 15);
}
set_helper_retaddr(0);
/* Wait until all exceptions have been raised to write back. */
memcpy(&env->vfp.zregs[rd], &scratch[0], oprsz);
@ -4446,7 +4430,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
return;
}
mem_off = reg_off >> diffsz;
set_helper_retaddr(retaddr);
/*
* If the (remaining) load is entirely within a single page, then:
@ -4461,7 +4444,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
if (test_host_page(host)) {
mem_off = host_fn(vd, vg, host - mem_off, mem_off, mem_max);
tcg_debug_assert(mem_off == mem_max);
set_helper_retaddr(0);
/* After any fault, zero any leading inactive elements. */
swap_memzero(vd, reg_off);
return;
@ -4504,7 +4486,6 @@ static void sve_ldff1_r(CPUARMState *env, void *vg, const target_ulong addr,
}
#endif
set_helper_retaddr(0);
record_fault(env, reg_off, reg_max);
}
@ -4651,7 +4632,6 @@ static void sve_st1_r(CPUARMState *env, void *vg, target_ulong addr,
intptr_t i, oprsz = simd_oprsz(desc);
void *vd = &env->vfp.zregs[rd];
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4662,7 +4642,6 @@ static void sve_st1_r(CPUARMState *env, void *vg, target_ulong addr,
addr += msize;
} while (i & 15);
}
set_helper_retaddr(0);
}
static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
@ -4675,7 +4654,6 @@ static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
void *d1 = &env->vfp.zregs[rd];
void *d2 = &env->vfp.zregs[(rd + 1) & 31];
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4687,7 +4665,6 @@ static void sve_st2_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 2 * msize;
} while (i & 15);
}
set_helper_retaddr(0);
}
static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
@ -4701,7 +4678,6 @@ static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
void *d2 = &env->vfp.zregs[(rd + 1) & 31];
void *d3 = &env->vfp.zregs[(rd + 2) & 31];
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4714,7 +4690,6 @@ static void sve_st3_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 3 * msize;
} while (i & 15);
}
set_helper_retaddr(0);
}
static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
@ -4729,7 +4704,6 @@ static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
void *d3 = &env->vfp.zregs[(rd + 2) & 31];
void *d4 = &env->vfp.zregs[(rd + 3) & 31];
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4743,7 +4717,6 @@ static void sve_st4_r(CPUARMState *env, void *vg, target_ulong addr,
addr += 4 * msize;
} while (i & 15);
}
set_helper_retaddr(0);
}
#define DO_STN_1(N, NAME, ESIZE) \
@ -4839,7 +4812,6 @@ static void sve_ld1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
intptr_t i, oprsz = simd_oprsz(desc);
ARMVectorReg scratch = { };
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -4850,7 +4822,6 @@ static void sve_ld1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
i += 4, pg >>= 4;
} while (i & 15);
}
set_helper_retaddr(0);
/* Wait until all exceptions have been raised to write back. */
memcpy(vd, &scratch, oprsz);
@ -4864,7 +4835,6 @@ static void sve_ld1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
intptr_t i, oprsz = simd_oprsz(desc) / 8;
ARMVectorReg scratch = { };
set_helper_retaddr(ra);
for (i = 0; i < oprsz; i++) {
uint8_t pg = *(uint8_t *)(vg + H1(i));
if (likely(pg & 1)) {
@ -4872,7 +4842,6 @@ static void sve_ld1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
tlb_fn(env, &scratch, i * 8, base + (off << scale), ra);
}
}
set_helper_retaddr(0);
/* Wait until all exceptions have been raised to write back. */
memcpy(vd, &scratch, oprsz * 8);
@ -5044,13 +5013,11 @@ static inline void sve_ldff1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
reg_off = find_next_active(vg, 0, reg_max, MO_32);
if (likely(reg_off < reg_max)) {
/* Perform one normal read, which will fault or not. */
set_helper_retaddr(ra);
addr = off_fn(vm, reg_off);
addr = base + (addr << scale);
tlb_fn(env, vd, reg_off, addr, ra);
/* The rest of the reads will be non-faulting. */
set_helper_retaddr(0);
}
/* After any fault, zero the leading predicated false elements. */
@ -5086,13 +5053,11 @@ static inline void sve_ldff1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
reg_off = find_next_active(vg, 0, reg_max, MO_64);
if (likely(reg_off < reg_max)) {
/* Perform one normal read, which will fault or not. */
set_helper_retaddr(ra);
addr = off_fn(vm, reg_off);
addr = base + (addr << scale);
tlb_fn(env, vd, reg_off, addr, ra);
/* The rest of the reads will be non-faulting. */
set_helper_retaddr(0);
}
/* After any fault, zero the leading predicated false elements. */
@ -5204,7 +5169,6 @@ static void sve_st1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
const int scale = extract32(desc, SIMD_DATA_SHIFT + MEMOPIDX_SHIFT, 2);
intptr_t i, oprsz = simd_oprsz(desc);
set_helper_retaddr(ra);
for (i = 0; i < oprsz; ) {
uint16_t pg = *(uint16_t *)(vg + H1_2(i >> 3));
do {
@ -5215,7 +5179,6 @@ static void sve_st1_zs(CPUARMState *env, void *vd, void *vg, void *vm,
i += 4, pg >>= 4;
} while (i & 15);
}
set_helper_retaddr(0);
}
static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
@ -5225,7 +5188,6 @@ static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
const int scale = extract32(desc, SIMD_DATA_SHIFT + MEMOPIDX_SHIFT, 2);
intptr_t i, oprsz = simd_oprsz(desc) / 8;
set_helper_retaddr(ra);
for (i = 0; i < oprsz; i++) {
uint8_t pg = *(uint8_t *)(vg + H1(i));
if (likely(pg & 1)) {
@ -5233,7 +5195,6 @@ static void sve_st1_zd(CPUARMState *env, void *vd, void *vg, void *vm,
tlb_fn(env, vd, i * 8, base + (off << scale), ra);
}
}
set_helper_retaddr(0);
}
#define DO_ST1_ZPZ_S(MEM, OFS) \