mirror of
https://github.com/yuzu-emu/unicorn.git
synced 2024-12-23 12:35:36 +00:00
target-arm: raise exception on misaligned LDREX operands
Qemu does not generally perform alignment checks. However, the ARM ARM requires implementation of alignment exceptions for a number of cases including LDREX, and Windows-on-ARM relies on this. This change adds plumbing to enable alignment checks on loads using MO_ALIGN, a do_unaligned_access hook to raise the exception (data abort), and uses the new aligned loads in LDREX (for all but single-byte loads). Backports commit 30901475b91ef1f46304404ab4bfe89097f61b96 from qemu
This commit is contained in:
parent
3664e1ab8a
commit
e1701b069f
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_aarch64
|
#define address_space_stq_be address_space_stq_be_aarch64
|
||||||
#define arm_release arm_release_aarch64
|
#define arm_release arm_release_aarch64
|
||||||
#define arm_tlb_fill arm_tlb_fill_aarch64
|
#define arm_tlb_fill arm_tlb_fill_aarch64
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_aarch64
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_aarch64
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_aarch64
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_aarch64
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_aarch64
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_aarch64
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_aarch64
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_aarch64
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_aarch64eb
|
#define address_space_stq_be address_space_stq_be_aarch64eb
|
||||||
#define arm_release arm_release_aarch64eb
|
#define arm_release arm_release_aarch64eb
|
||||||
#define arm_tlb_fill arm_tlb_fill_aarch64eb
|
#define arm_tlb_fill arm_tlb_fill_aarch64eb
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_aarch64eb
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_aarch64eb
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_aarch64eb
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_aarch64eb
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_aarch64eb
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_aarch64eb
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_aarch64eb
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_aarch64eb
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_arm
|
#define address_space_stq_be address_space_stq_be_arm
|
||||||
#define arm_release arm_release_arm
|
#define arm_release arm_release_arm
|
||||||
#define arm_tlb_fill arm_tlb_fill_arm
|
#define arm_tlb_fill arm_tlb_fill_arm
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_arm
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_arm
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_arm
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_arm
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_arm
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_arm
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_arm
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_arm
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_armeb
|
#define address_space_stq_be address_space_stq_be_armeb
|
||||||
#define arm_release arm_release_armeb
|
#define arm_release arm_release_armeb
|
||||||
#define arm_tlb_fill arm_tlb_fill_armeb
|
#define arm_tlb_fill arm_tlb_fill_armeb
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_armeb
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_armeb
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_armeb
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_armeb
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_armeb
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_armeb
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_armeb
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_armeb
|
||||||
|
|
|
@ -30,6 +30,8 @@ symbols = (
|
||||||
'address_space_stq_be',
|
'address_space_stq_be',
|
||||||
'arm_release',
|
'arm_release',
|
||||||
'arm_tlb_fill',
|
'arm_tlb_fill',
|
||||||
|
'arm_regime_using_lpae_format',
|
||||||
|
'arm_cpu_do_unaligned_access',
|
||||||
'aarch64_sync_32_to_64',
|
'aarch64_sync_32_to_64',
|
||||||
'aarch64_sync_64_to_32',
|
'aarch64_sync_64_to_32',
|
||||||
'aarch64_tb_set_jmp_target',
|
'aarch64_tb_set_jmp_target',
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_m68k
|
#define address_space_stq_be address_space_stq_be_m68k
|
||||||
#define arm_release arm_release_m68k
|
#define arm_release arm_release_m68k
|
||||||
#define arm_tlb_fill arm_tlb_fill_m68k
|
#define arm_tlb_fill arm_tlb_fill_m68k
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_m68k
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_m68k
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_m68k
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_m68k
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_m68k
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_m68k
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_m68k
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_m68k
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_mips
|
#define address_space_stq_be address_space_stq_be_mips
|
||||||
#define arm_release arm_release_mips
|
#define arm_release arm_release_mips
|
||||||
#define arm_tlb_fill arm_tlb_fill_mips
|
#define arm_tlb_fill arm_tlb_fill_mips
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_mips
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_mips
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_mips64
|
#define address_space_stq_be address_space_stq_be_mips64
|
||||||
#define arm_release arm_release_mips64
|
#define arm_release arm_release_mips64
|
||||||
#define arm_tlb_fill arm_tlb_fill_mips64
|
#define arm_tlb_fill arm_tlb_fill_mips64
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_mips64
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_mips64
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips64
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips64
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips64
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips64
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips64
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips64
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_mips64el
|
#define address_space_stq_be address_space_stq_be_mips64el
|
||||||
#define arm_release arm_release_mips64el
|
#define arm_release arm_release_mips64el
|
||||||
#define arm_tlb_fill arm_tlb_fill_mips64el
|
#define arm_tlb_fill arm_tlb_fill_mips64el
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_mips64el
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_mips64el
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips64el
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mips64el
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips64el
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mips64el
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips64el
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mips64el
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_mipsel
|
#define address_space_stq_be address_space_stq_be_mipsel
|
||||||
#define arm_release arm_release_mipsel
|
#define arm_release arm_release_mipsel
|
||||||
#define arm_tlb_fill arm_tlb_fill_mipsel
|
#define arm_tlb_fill arm_tlb_fill_mipsel
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_mipsel
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_mipsel
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mipsel
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_mipsel
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mipsel
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_mipsel
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mipsel
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_mipsel
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_powerpc
|
#define address_space_stq_be address_space_stq_be_powerpc
|
||||||
#define arm_release arm_release_powerpc
|
#define arm_release arm_release_powerpc
|
||||||
#define arm_tlb_fill arm_tlb_fill_powerpc
|
#define arm_tlb_fill arm_tlb_fill_powerpc
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_powerpc
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_powerpc
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_powerpc
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_powerpc
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_powerpc
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_powerpc
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_powerpc
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_powerpc
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_sparc
|
#define address_space_stq_be address_space_stq_be_sparc
|
||||||
#define arm_release arm_release_sparc
|
#define arm_release arm_release_sparc
|
||||||
#define arm_tlb_fill arm_tlb_fill_sparc
|
#define arm_tlb_fill arm_tlb_fill_sparc
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_sparc
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_sparc
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_sparc
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_sparc
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_sparc
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_sparc
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_sparc
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_sparc
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_sparc64
|
#define address_space_stq_be address_space_stq_be_sparc64
|
||||||
#define arm_release arm_release_sparc64
|
#define arm_release arm_release_sparc64
|
||||||
#define arm_tlb_fill arm_tlb_fill_sparc64
|
#define arm_tlb_fill arm_tlb_fill_sparc64
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_sparc64
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_sparc64
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_sparc64
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_sparc64
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_sparc64
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_sparc64
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_sparc64
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_sparc64
|
||||||
|
|
|
@ -1282,6 +1282,7 @@ static void arm_cpu_class_init(struct uc_struct *uc, ObjectClass *oc, void *data
|
||||||
cc->handle_mmu_fault = arm_cpu_handle_mmu_fault;
|
cc->handle_mmu_fault = arm_cpu_handle_mmu_fault;
|
||||||
#else
|
#else
|
||||||
cc->do_interrupt = arm_cpu_do_interrupt;
|
cc->do_interrupt = arm_cpu_do_interrupt;
|
||||||
|
cc->do_unaligned_access = arm_cpu_do_unaligned_access;
|
||||||
cc->get_phys_page_debug = arm_cpu_get_phys_page_debug;
|
cc->get_phys_page_debug = arm_cpu_get_phys_page_debug;
|
||||||
// UNICORN: Commented out
|
// UNICORN: Commented out
|
||||||
//cc->vmsd = &vmstate_arm_cpu;
|
//cc->vmsd = &vmstate_arm_cpu;
|
||||||
|
|
|
@ -5339,6 +5339,14 @@ static inline bool regime_using_lpae_format(CPUARMState *env,
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Returns true if the translation regime is using LPAE format page tables.
|
||||||
|
* Used when raising alignment exceptions, whose FSR changes depending on
|
||||||
|
* whether the long or short descriptor format is in use. */
|
||||||
|
bool arm_regime_using_lpae_format(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
|
{
|
||||||
|
return regime_using_lpae_format(env, mmu_idx);
|
||||||
|
}
|
||||||
|
|
||||||
static inline bool regime_is_user(CPUARMState *env, ARMMMUIdx mmu_idx)
|
static inline bool regime_is_user(CPUARMState *env, ARMMMUIdx mmu_idx)
|
||||||
{
|
{
|
||||||
switch (mmu_idx) {
|
switch (mmu_idx) {
|
||||||
|
|
|
@ -443,4 +443,11 @@ struct ARMMMUFaultInfo {
|
||||||
bool arm_tlb_fill(CPUState *cpu, vaddr address, int rw, int mmu_idx,
|
bool arm_tlb_fill(CPUState *cpu, vaddr address, int rw, int mmu_idx,
|
||||||
uint32_t *fsr, ARMMMUFaultInfo *fi);
|
uint32_t *fsr, ARMMMUFaultInfo *fi);
|
||||||
|
|
||||||
|
/* Return true if the translation regime is using LPAE format page tables */
|
||||||
|
bool arm_regime_using_lpae_format(CPUARMState *env, ARMMMUIdx mmu_idx);
|
||||||
|
|
||||||
|
/* Raise a data fault alignment exception for the specified virtual address */
|
||||||
|
void arm_cpu_do_unaligned_access(CPUState *cs, vaddr vaddr, int is_write,
|
||||||
|
int is_user, uintptr_t retaddr);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -126,7 +126,45 @@ void tlb_fill(CPUState *cs, target_ulong addr, int is_write, int mmu_idx,
|
||||||
raise_exception(env, exc, syn, target_el);
|
raise_exception(env, exc, syn, target_el);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
/* Raise a data fault alignment exception for the specified virtual address */
|
||||||
|
void arm_cpu_do_unaligned_access(CPUState *cs, vaddr vaddr, int is_write,
|
||||||
|
int is_user, uintptr_t retaddr)
|
||||||
|
{
|
||||||
|
ARMCPU *cpu = ARM_CPU(cs->uc, cs);
|
||||||
|
CPUARMState *env = &cpu->env;
|
||||||
|
int target_el;
|
||||||
|
bool same_el;
|
||||||
|
|
||||||
|
if (retaddr) {
|
||||||
|
/* now we have a real cpu fault */
|
||||||
|
cpu_restore_state(cs, retaddr);
|
||||||
|
}
|
||||||
|
|
||||||
|
target_el = exception_target_el(env);
|
||||||
|
same_el = (arm_current_el(env) == target_el);
|
||||||
|
|
||||||
|
env->exception.vaddress = vaddr;
|
||||||
|
|
||||||
|
/* the DFSR for an alignment fault depends on whether we're using
|
||||||
|
* the LPAE long descriptor format, or the short descriptor format
|
||||||
|
*/
|
||||||
|
if (arm_regime_using_lpae_format(env, cpu_mmu_index(env, false))) {
|
||||||
|
env->exception.fsr = 0x21;
|
||||||
|
} else {
|
||||||
|
env->exception.fsr = 0x1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (is_write == 1 && arm_feature(env, ARM_FEATURE_V6)) {
|
||||||
|
env->exception.fsr |= (1 << 11);
|
||||||
|
}
|
||||||
|
|
||||||
|
raise_exception(env, EXCP_DATA_ABORT,
|
||||||
|
syn_data_abort(same_el, 0, 0, 0, is_write == 1, 0x21),
|
||||||
|
target_el);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif /* !defined(CONFIG_USER_ONLY) */
|
||||||
|
|
||||||
uint32_t HELPER(add_setq)(CPUARMState *env, uint32_t a, uint32_t b)
|
uint32_t HELPER(add_setq)(CPUARMState *env, uint32_t a, uint32_t b)
|
||||||
{
|
{
|
||||||
|
|
|
@ -955,13 +955,13 @@ static inline void store_reg_from_load(DisasContext *s, int reg, TCGv_i32 var)
|
||||||
#define DO_GEN_LD(SUFF, OPC) \
|
#define DO_GEN_LD(SUFF, OPC) \
|
||||||
static inline void gen_aa32_ld##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
|
static inline void gen_aa32_ld##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
|
||||||
{ \
|
{ \
|
||||||
tcg_gen_qemu_ld_i32(s->uc, val, addr, index, OPC); \
|
tcg_gen_qemu_ld_i32(s->uc, val, addr, index, (OPC)); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define DO_GEN_ST(SUFF, OPC) \
|
#define DO_GEN_ST(SUFF, OPC) \
|
||||||
static inline void gen_aa32_st##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
|
static inline void gen_aa32_st##SUFF(DisasContext *s, TCGv_i32 val, TCGv_i32 addr, int index) \
|
||||||
{ \
|
{ \
|
||||||
tcg_gen_qemu_st_i32(s->uc, val, addr, index, OPC); \
|
tcg_gen_qemu_st_i32(s->uc, val, addr, index, (OPC)); \
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, int index)
|
static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val, TCGv_i32 addr, int index)
|
||||||
|
@ -1021,6 +1021,9 @@ DO_GEN_LD(8u, MO_UB)
|
||||||
DO_GEN_LD(16s, MO_TESW)
|
DO_GEN_LD(16s, MO_TESW)
|
||||||
DO_GEN_LD(16u, MO_TEUW)
|
DO_GEN_LD(16u, MO_TEUW)
|
||||||
DO_GEN_LD(32u, MO_TEUL)
|
DO_GEN_LD(32u, MO_TEUL)
|
||||||
|
/* 'a' variants include an alignment check */
|
||||||
|
DO_GEN_LD(16ua, MO_TEUW | MO_ALIGN)
|
||||||
|
DO_GEN_LD(32ua, MO_TEUL | MO_ALIGN)
|
||||||
DO_GEN_ST(8, MO_UB)
|
DO_GEN_ST(8, MO_UB)
|
||||||
DO_GEN_ST(16, MO_TEUW)
|
DO_GEN_ST(16, MO_TEUW)
|
||||||
DO_GEN_ST(32, MO_TEUL)
|
DO_GEN_ST(32, MO_TEUL)
|
||||||
|
@ -7562,11 +7565,11 @@ static void gen_load_exclusive(DisasContext *s, int rt, int rt2,
|
||||||
gen_aa32_ld8u(s, tmp, addr, get_mem_index(s));
|
gen_aa32_ld8u(s, tmp, addr, get_mem_index(s));
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
gen_aa32_ld16u(s, tmp, addr, get_mem_index(s));
|
gen_aa32_ld16ua(s, tmp, addr, get_mem_index(s));
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
case 3:
|
case 3:
|
||||||
gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
|
gen_aa32_ld32ua(s, tmp, addr, get_mem_index(s));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
abort();
|
abort();
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
#define address_space_stq_be address_space_stq_be_x86_64
|
#define address_space_stq_be address_space_stq_be_x86_64
|
||||||
#define arm_release arm_release_x86_64
|
#define arm_release arm_release_x86_64
|
||||||
#define arm_tlb_fill arm_tlb_fill_x86_64
|
#define arm_tlb_fill arm_tlb_fill_x86_64
|
||||||
|
#define arm_regime_using_lpae_format arm_regime_using_lpae_format_x86_64
|
||||||
|
#define arm_cpu_do_unaligned_access arm_cpu_do_unaligned_access_x86_64
|
||||||
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_x86_64
|
#define aarch64_sync_32_to_64 aarch64_sync_32_to_64_x86_64
|
||||||
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_x86_64
|
#define aarch64_sync_64_to_32 aarch64_sync_64_to_32_x86_64
|
||||||
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_x86_64
|
#define aarch64_tb_set_jmp_target aarch64_tb_set_jmp_target_x86_64
|
||||||
|
|
Loading…
Reference in a new issue