target-arm: implement BE32 mode in system emulation

System emulation only has a little-endian target; BE32 mode
is implemented by adjusting the low bits of the address
for every byte and halfword load and store. 64-bit accesses
flip the low and high words.

Backports commit e334bd3190f6c4ca12f1d40d316dc471c70009ab from qemu
This commit is contained in:
Paolo Bonzini 2018-02-21 02:47:17 -05:00 committed by Lioncash
parent aa5be4d6ca
commit 7f23f7004d
No known key found for this signature in database
GPG key ID: 4E3C3CC1031BA9C7
2 changed files with 87 additions and 28 deletions

View file

@ -2009,9 +2009,8 @@ static inline bool bswap_code(bool sctlr_b)
#endif
sctlr_b;
#else
/* We do not implement BE32 mode for system-mode emulation, but
* anyway it would always do little-endian accesses with
* TARGET_WORDS_BIGENDIAN = 0.
/* All code access in ARM is little endian, and there are no loaders
* doing swaps that need to be reversed
*/
return 0;
#endif

View file

@ -940,6 +940,12 @@ static inline void store_reg_from_load(DisasContext *s, int reg, TCGv_i32 var)
}
}
#ifdef CONFIG_USER_ONLY
#define IS_USER_ONLY 1
#else
#define IS_USER_ONLY 0
#endif
/* Abstractions of "generate code to do a guest load/store for
* AArch32", where a vaddr is always 32 bits (and is zero
* extended if we're a 64 bit core) and data is also
@ -949,54 +955,94 @@ static inline void store_reg_from_load(DisasContext *s, int reg, TCGv_i32 var)
*/
#if TARGET_LONG_BITS == 32
#define DO_GEN_LD(SUFF, OPC) \
#define DO_GEN_LD(SUFF, OPC, BE32_XOR) \
static inline void gen_aa32_ld##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
{ \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGMemOp opc = (OPC) | s->be_data; \
/* Not needed for user-mode BE32, where we use MO_BE instead. */ \
if (!IS_USER_ONLY && s->sctlr_b && BE32_XOR) { \
TCGv addr_be = tcg_temp_new(tcg_ctx); \
tcg_gen_xori_i32(tcg_ctx, addr_be, addr, BE32_XOR); \
tcg_gen_qemu_ld_i32(s->uc, val, addr_be, index, opc); \
tcg_temp_free(tcg_ctx, addr_be); \
return; \
} \
tcg_gen_qemu_ld_i32(s->uc, val, addr, index, opc); \
}
#define DO_GEN_ST(SUFF, OPC) \
#define DO_GEN_ST(SUFF, OPC, BE32_XOR) \
static inline void gen_aa32_st##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
{ \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGMemOp opc = (OPC) | s->be_data; \
/* Not needed for user-mode BE32, where we use MO_BE instead. */ \
if (!IS_USER_ONLY && s->sctlr_b && BE32_XOR) { \
TCGv addr_be = tcg_temp_new(tcg_ctx); \
tcg_gen_xori_i32(tcg_ctx, addr_be, addr, BE32_XOR); \
tcg_gen_qemu_st_i32(s->uc, val, addr_be, index, opc); \
tcg_temp_free(tcg_ctx, addr_be); \
return; \
} \
tcg_gen_qemu_st_i32(s->uc, val, addr, index, opc); \
}
static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, int index)
{
TCGContext *tcg_ctx = s->uc->tcg_ctx;
TCGMemOp opc = MO_Q | s->be_data;
tcg_gen_qemu_ld_i64(s->uc, val, addr, index, opc);
/* Not needed for user-mode BE32, where we use MO_BE instead. */
if (!IS_USER_ONLY && s->sctlr_b) {
tcg_gen_rotri_i64(tcg_ctx, val, val, 32);
}
}
static inline void gen_aa32_st64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, int index)
{
TCGContext *tcg_ctx = s->uc->tcg_ctx;
TCGMemOp opc = MO_Q | s->be_data;
/* Not needed for user-mode BE32, where we use MO_BE instead. */
if (!IS_USER_ONLY && s->sctlr_b) {
TCGv_i64 tmp = tcg_temp_new_i64(tcg_ctx);
tcg_gen_rotri_i64(tcg_ctx, tmp, val, 32);
tcg_gen_qemu_st_i64(s->uc, tmp, addr, index, opc);
tcg_temp_free_i64(tcg_ctx, tmp);
return;
}
tcg_gen_qemu_st_i64(s->uc, val, addr, index, opc);
}
#else
#define DO_GEN_LD(SUFF, OPC) \
#define DO_GEN_LD(SUFF, OPC, BE32_XOR) \
static inline void gen_aa32_ld##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
{ \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGMemOp opc = (OPC) | s->be_data; \
TCGv addr64 = tcg_temp_new(tcg_ctx); \
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr); \
tcg_gen_qemu_ld_i32(s->uc, val, addr64, index, opc); \
tcg_temp_free(tcg_ctx, addr64); \
TCGv addr64 = tcg_temp_new(tcg_ctx); \
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr); \
/* Not needed for user-mode BE32, where we use MO_BE instead. */ \
if (!IS_USER_ONLY && s->sctlr_b && BE32_XOR) { \
tcg_gen_xori_i64(tcg_ctx, addr64, addr64, BE32_XOR); \
} \
tcg_gen_qemu_ld_i32(s->uc, val, addr64, index, opc); \
tcg_temp_free(tcg_ctx, addr64); \
}
#define DO_GEN_ST(SUFF, OPC) \
#define DO_GEN_ST(SUFF, OPC, BE32_XOR) \
static inline void gen_aa32_st##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
{ \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGContext *tcg_ctx = s->uc->tcg_ctx; \
TCGMemOp opc = (OPC) | s->be_data; \
TCGv addr64 = tcg_temp_new(tcg_ctx); \
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr); \
tcg_gen_qemu_st_i32(s->uc, val, addr64, index, opc); \
tcg_temp_free(tcg_ctx, addr64); \
TCGv addr64 = tcg_temp_new(tcg_ctx); \
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr); \
/* Not needed for user-mode BE32, where we use MO_BE instead. */ \
if (!IS_USER_ONLY && s->sctlr_b && BE32_XOR) { \
tcg_gen_xori_i64(tcg_ctx, addr64, addr64, BE32_XOR); \
} \
tcg_gen_qemu_st_i32(s->uc, val, addr64, index, opc); \
tcg_temp_free(tcg_ctx, addr64); \
}
static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, int index)
@ -1006,6 +1052,11 @@ static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, i
TCGv addr64 = tcg_temp_new(tcg_ctx);
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr);
tcg_gen_qemu_ld_i64(s->uc, val, addr64, index, opc);
/* Not needed for user-mode BE32, where we use MO_BE instead. */
if (!IS_USER_ONLY && s->sctlr_b) {
tcg_gen_rotri_i64(tcg_ctx, val, val, 32);
}
tcg_temp_free(tcg_ctx, addr64);
}
@ -1015,23 +1066,32 @@ static inline void gen_aa32_st64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, i
TCGMemOp opc = MO_Q | s->be_data;
TCGv addr64 = tcg_temp_new(tcg_ctx);
tcg_gen_extu_i32_i64(tcg_ctx, addr64, addr);
tcg_gen_qemu_st_i64(s->uc, val, addr64, index, opc);
/* Not needed for user-mode BE32, where we use MO_BE instead. */
if (!IS_USER_ONLY && s->sctlr_b) {
TCGv tmp = tcg_temp_new(tcg_ctx);
tcg_gen_rotri_i64(tcg_ctx, tmp, val, 32);
tcg_gen_qemu_st_i64(s->uc, tmp, addr64, index, opc);
tcg_temp_free(tcg_ctx, tmp);
} else {
tcg_gen_qemu_st_i64(s->uc, val, addr64, index, opc);
}
tcg_temp_free(tcg_ctx, addr64);
}
#endif
DO_GEN_LD(8s, MO_SB)
DO_GEN_LD(8u, MO_UB)
DO_GEN_LD(16s, MO_SW)
DO_GEN_LD(16u, MO_UW)
DO_GEN_LD(32u, MO_UL)
DO_GEN_LD(8s, MO_SB, 3)
DO_GEN_LD(8u, MO_UB, 3)
DO_GEN_LD(16s, MO_SW, 2)
DO_GEN_LD(16u, MO_UW, 2)
DO_GEN_LD(32u, MO_UL, 0)
/* 'a' variants include an alignment check */
DO_GEN_LD(16ua, MO_UW | MO_ALIGN)
DO_GEN_LD(32ua, MO_UL | MO_ALIGN)
DO_GEN_ST(8, MO_UB)
DO_GEN_ST(16, MO_UW)
DO_GEN_ST(32, MO_UL)
DO_GEN_LD(16ua, MO_UW | MO_ALIGN, 2)
DO_GEN_LD(32ua, MO_UL | MO_ALIGN, 0)
DO_GEN_ST(8, MO_UB, 3)
DO_GEN_ST(16, MO_UW, 2)
DO_GEN_ST(32, MO_UL, 0)
static inline void gen_set_pc_im(DisasContext *s, target_ulong val)
{