target/arm: Replace s->pc with s->base.pc_next

We must update s->base.pc_next when we return from the translate_insn
hook to the main translator loop. By incrementing s->base.pc_next
immediately after reading the insn word, "pc_next" contains the address
of the next instruction throughout translation.

All remaining uses of s->pc are referencing the address of the next insn,
so this is now a simple global replacement. Remove the "s->pc" field.

Backports commit a04159166b880b505ccadc16f2fe84169806883d from qemu
This commit is contained in:
Richard Henderson 2019-11-18 17:22:44 -05:00 committed by Lioncash
parent 7d1fcef722
commit 00fbadf637
No known key found for this signature in database
GPG key ID: 4E3C3CC1031BA9C7
3 changed files with 80 additions and 91 deletions

View file

@ -396,7 +396,7 @@ static void gen_exception_internal(DisasContext *s, int excp)
static void gen_exception_internal_insn(DisasContext *s, int offset, int excp) static void gen_exception_internal_insn(DisasContext *s, int offset, int excp)
{ {
gen_a64_set_pc_im(s, s->pc - offset); gen_a64_set_pc_im(s, s->base.pc_next - offset);
gen_exception_internal(s, excp); gen_exception_internal(s, excp);
s->base.is_jmp = DISAS_NORETURN; s->base.is_jmp = DISAS_NORETURN;
} }
@ -404,7 +404,7 @@ static void gen_exception_internal_insn(DisasContext *s, int offset, int excp)
static void gen_exception_insn(DisasContext *s, int offset, int excp, static void gen_exception_insn(DisasContext *s, int offset, int excp,
uint32_t syndrome, uint32_t target_el) uint32_t syndrome, uint32_t target_el)
{ {
gen_a64_set_pc_im(s, s->pc - offset); gen_a64_set_pc_im(s, s->base.pc_next - offset);
gen_exception(s, excp, syndrome, target_el); gen_exception(s, excp, syndrome, target_el);
s->base.is_jmp = DISAS_NORETURN; s->base.is_jmp = DISAS_NORETURN;
} }
@ -415,7 +415,7 @@ static void gen_exception_bkpt_insn(DisasContext *s, int offset,
TCGContext *tcg_ctx = s->uc->tcg_ctx; TCGContext *tcg_ctx = s->uc->tcg_ctx;
TCGv_i32 tcg_syn; TCGv_i32 tcg_syn;
gen_a64_set_pc_im(s, s->pc - offset); gen_a64_set_pc_im(s, s->base.pc_next - offset);
tcg_syn = tcg_const_i32(tcg_ctx, syndrome); tcg_syn = tcg_const_i32(tcg_ctx, syndrome);
gen_helper_exception_bkpt_insn(tcg_ctx, tcg_ctx->cpu_env, tcg_syn); gen_helper_exception_bkpt_insn(tcg_ctx, tcg_ctx->cpu_env, tcg_syn);
tcg_temp_free_i32(tcg_ctx, tcg_syn); tcg_temp_free_i32(tcg_ctx, tcg_syn);
@ -1429,7 +1429,7 @@ static void disas_uncond_b_imm(DisasContext *s, uint32_t insn)
if (insn & (1U << 31)) { if (insn & (1U << 31)) {
/* BL Branch with link */ /* BL Branch with link */
tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->pc); tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->base.pc_next);
} }
/* B Branch / BL Branch with link */ /* B Branch / BL Branch with link */
@ -1463,7 +1463,7 @@ static void disas_comp_b_imm(DisasContext *s, uint32_t insn)
tcg_gen_brcondi_i64(tcg_ctx, op ? TCG_COND_NE : TCG_COND_EQ, tcg_gen_brcondi_i64(tcg_ctx, op ? TCG_COND_NE : TCG_COND_EQ,
tcg_cmp, 0, label_match); tcg_cmp, 0, label_match);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
gen_set_label(tcg_ctx, label_match); gen_set_label(tcg_ctx, label_match);
gen_goto_tb(s, 1, addr); gen_goto_tb(s, 1, addr);
} }
@ -1495,7 +1495,7 @@ static void disas_test_b_imm(DisasContext *s, uint32_t insn)
tcg_gen_brcondi_i64(tcg_ctx, op ? TCG_COND_NE : TCG_COND_EQ, tcg_gen_brcondi_i64(tcg_ctx, op ? TCG_COND_NE : TCG_COND_EQ,
tcg_cmp, 0, label_match); tcg_cmp, 0, label_match);
tcg_temp_free_i64(tcg_ctx, tcg_cmp); tcg_temp_free_i64(tcg_ctx, tcg_cmp);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
gen_set_label(tcg_ctx, label_match); gen_set_label(tcg_ctx, label_match);
gen_goto_tb(s, 1, addr); gen_goto_tb(s, 1, addr);
} }
@ -1524,7 +1524,7 @@ static void disas_cond_b_imm(DisasContext *s, uint32_t insn)
/* genuinely conditional branches */ /* genuinely conditional branches */
TCGLabel *label_match = gen_new_label(tcg_ctx); TCGLabel *label_match = gen_new_label(tcg_ctx);
arm_gen_test_cc(s, cond, label_match); arm_gen_test_cc(s, cond, label_match);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
gen_set_label(tcg_ctx, label_match); gen_set_label(tcg_ctx, label_match);
gen_goto_tb(s, 1, addr); gen_goto_tb(s, 1, addr);
} else { } else {
@ -1683,7 +1683,7 @@ static void handle_sync(DisasContext *s, uint32_t insn,
* any pending interrupts immediately. * any pending interrupts immediately.
*/ */
reset_btype(s); reset_btype(s);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
return; return;
case 7: /* SB */ case 7: /* SB */
@ -1695,7 +1695,7 @@ static void handle_sync(DisasContext *s, uint32_t insn,
* MB and end the TB instead. * MB and end the TB instead.
*/ */
tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC); tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
return; return;
default: default:
@ -2218,7 +2218,7 @@ static void disas_uncond_b_reg(DisasContext *s, uint32_t insn)
gen_a64_set_pc(s, dst); gen_a64_set_pc(s, dst);
/* BLR also needs to load return address */ /* BLR also needs to load return address */
if (opc == 1) { if (opc == 1) {
tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->pc); tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->base.pc_next);
} }
break; break;
@ -2245,7 +2245,7 @@ static void disas_uncond_b_reg(DisasContext *s, uint32_t insn)
gen_a64_set_pc(s, dst); gen_a64_set_pc(s, dst);
/* BLRAA also needs to load return address */ /* BLRAA also needs to load return address */
if (opc == 9) { if (opc == 9) {
tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->pc); tcg_gen_movi_i64(tcg_ctx, cpu_reg(s, 30), s->base.pc_next);
} }
break; break;
@ -14357,20 +14357,20 @@ static void disas_a64_insn(CPUARMState *env, DisasContext *s)
TCGContext *tcg_ctx = env->uc->tcg_ctx; TCGContext *tcg_ctx = env->uc->tcg_ctx;
// Unicorn: end address tells us to stop emulation // Unicorn: end address tells us to stop emulation
if (s->pc == s->uc->addr_end) { if (s->base.pc_next == s->uc->addr_end) {
// imitate WFI instruction to halt emulation // imitate WFI instruction to halt emulation
s->base.is_jmp = DISAS_WFI; s->base.is_jmp = DISAS_WFI;
return; return;
} }
s->pc_curr = s->pc; s->pc_curr = s->base.pc_next;
insn = arm_ldl_code(env, s->pc, s->sctlr_b); insn = arm_ldl_code(env, s->base.pc_next, s->sctlr_b);
s->insn = insn; s->insn = insn;
s->pc += 4; s->base.pc_next += 4;
// Unicorn: trace this instruction on request // Unicorn: trace this instruction on request
if (HOOK_EXISTS_BOUNDED(env->uc, UC_HOOK_CODE, s->pc - 4)) { if (HOOK_EXISTS_BOUNDED(env->uc, UC_HOOK_CODE, s->pc_curr)) {
gen_uc_tracecode(tcg_ctx, 4, UC_HOOK_CODE_IDX, env->uc, s->pc - 4); gen_uc_tracecode(tcg_ctx, 4, UC_HOOK_CODE_IDX, env->uc, s->pc_curr);
// the callback might want to stop emulation immediately // the callback might want to stop emulation immediately
check_exit_request(tcg_ctx); check_exit_request(tcg_ctx);
} }
@ -14473,7 +14473,6 @@ static void aarch64_tr_init_disas_context(DisasContextBase *dcbase,
dc->uc = env->uc; dc->uc = env->uc;
dc->isar = &arm_cpu->isar; dc->isar = &arm_cpu->isar;
dc->pc = dc->base.pc_first;
dc->condjmp = 0; dc->condjmp = 0;
dc->aarch64 = 1; dc->aarch64 = 1;
@ -14547,7 +14546,7 @@ static void aarch64_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
DisasContext *dc = container_of(dcbase, DisasContext, base); DisasContext *dc = container_of(dcbase, DisasContext, base);
TCGContext *tcg_ctx = cpu->uc->tcg_ctx; TCGContext *tcg_ctx = cpu->uc->tcg_ctx;
tcg_gen_insn_start(tcg_ctx, dc->pc, 0, 0); tcg_gen_insn_start(tcg_ctx, dc->base.pc_next, 0, 0);
dc->insn_start = tcg_last_op(tcg_ctx); dc->insn_start = tcg_last_op(tcg_ctx);
} }
@ -14558,7 +14557,7 @@ static bool aarch64_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
TCGContext *tcg_ctx = cpu->uc->tcg_ctx; TCGContext *tcg_ctx = cpu->uc->tcg_ctx;
if (bp->flags & BP_CPU) { if (bp->flags & BP_CPU) {
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
gen_helper_check_breakpoints(tcg_ctx, tcg_ctx->cpu_env); gen_helper_check_breakpoints(tcg_ctx, tcg_ctx->cpu_env);
/* End the TB early; it likely won't be executed */ /* End the TB early; it likely won't be executed */
dc->base.is_jmp = DISAS_TOO_MANY; dc->base.is_jmp = DISAS_TOO_MANY;
@ -14569,7 +14568,7 @@ static bool aarch64_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
to for it to be properly cleared -- thus we to for it to be properly cleared -- thus we
increment the PC here so that the logic setting increment the PC here so that the logic setting
tb->size below does the right thing. */ tb->size below does the right thing. */
dc->pc += 4; dc->base.pc_next += 4;
dc->base.is_jmp = DISAS_NORETURN; dc->base.is_jmp = DISAS_NORETURN;
} }
@ -14599,7 +14598,6 @@ static void aarch64_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
disas_a64_insn(env, dc); disas_a64_insn(env, dc);
} }
dc->base.pc_next = dc->pc;
translator_loop_temp_check(&dc->base); translator_loop_temp_check(&dc->base);
} }
@ -14616,7 +14614,7 @@ static void aarch64_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
*/ */
switch (dc->base.is_jmp) { switch (dc->base.is_jmp) {
default: default:
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
/* fall through */ /* fall through */
case DISAS_EXIT: case DISAS_EXIT:
case DISAS_JUMP: case DISAS_JUMP:
@ -14633,11 +14631,11 @@ static void aarch64_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
switch (dc->base.is_jmp) { switch (dc->base.is_jmp) {
case DISAS_NEXT: case DISAS_NEXT:
case DISAS_TOO_MANY: case DISAS_TOO_MANY:
gen_goto_tb(dc, 1, dc->pc); gen_goto_tb(dc, 1, dc->base.pc_next);
break; break;
default: default:
case DISAS_UPDATE: case DISAS_UPDATE:
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
/* fall through */ /* fall through */
case DISAS_EXIT: case DISAS_EXIT:
tcg_gen_exit_tb(tcg_ctx, NULL, 0); tcg_gen_exit_tb(tcg_ctx, NULL, 0);
@ -14649,11 +14647,11 @@ static void aarch64_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
case DISAS_SWI: case DISAS_SWI:
break; break;
case DISAS_WFE: case DISAS_WFE:
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
gen_helper_wfe(tcg_ctx, tcg_ctx->cpu_env); gen_helper_wfe(tcg_ctx, tcg_ctx->cpu_env);
break; break;
case DISAS_YIELD: case DISAS_YIELD:
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
gen_helper_yield(tcg_ctx, tcg_ctx->cpu_env); gen_helper_yield(tcg_ctx, tcg_ctx->cpu_env);
break; break;
case DISAS_WFI: case DISAS_WFI:
@ -14663,7 +14661,7 @@ static void aarch64_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
*/ */
TCGv_i32 tmp = tcg_const_i32(tcg_ctx, 4); TCGv_i32 tmp = tcg_const_i32(tcg_ctx, 4);
gen_a64_set_pc_im(dc, dc->pc); gen_a64_set_pc_im(dc, dc->base.pc_next);
gen_helper_wfi(tcg_ctx, tcg_ctx->cpu_env, tmp); gen_helper_wfi(tcg_ctx, tcg_ctx->cpu_env, tmp);
tcg_temp_free_i32(tcg_ctx, tmp); tcg_temp_free_i32(tcg_ctx, tmp);
/* The helper doesn't necessarily throw an exception, but we /* The helper doesn't necessarily throw an exception, but we
@ -14674,9 +14672,6 @@ static void aarch64_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
} }
} }
} }
/* Functions above can change dc->pc, so re-align db->pc_next */
dc->base.pc_next = dc->pc;
} }
static void aarch64_tr_disas_log(const DisasContextBase *dcbase, static void aarch64_tr_disas_log(const DisasContextBase *dcbase,

View file

@ -1101,7 +1101,7 @@ static inline void gen_blxns(DisasContext *s, int rm)
* We do however need to set the PC, because the blxns helper reads it. * We do however need to set the PC, because the blxns helper reads it.
* The blxns helper may throw an exception. * The blxns helper may throw an exception.
*/ */
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
gen_helper_v7m_blxns(tcg_ctx, tcg_ctx->cpu_env, var); gen_helper_v7m_blxns(tcg_ctx, tcg_ctx->cpu_env, var);
tcg_temp_free_i32(tcg_ctx, var); tcg_temp_free_i32(tcg_ctx, var);
s->base.is_jmp = DISAS_EXIT; s->base.is_jmp = DISAS_EXIT;
@ -1295,7 +1295,7 @@ static inline void gen_hvc(DisasContext *s, int imm16)
* for single stepping.) * for single stepping.)
*/ */
s->svc_imm = imm16; s->svc_imm = imm16;
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_HVC; s->base.is_jmp = DISAS_HVC;
} }
@ -1311,14 +1311,14 @@ static inline void gen_smc(DisasContext *s)
tmp = tcg_const_i32(tcg_ctx, syn_aa32_smc()); tmp = tcg_const_i32(tcg_ctx, syn_aa32_smc());
gen_helper_pre_smc(tcg_ctx, tcg_ctx->cpu_env, tmp); gen_helper_pre_smc(tcg_ctx, tcg_ctx->cpu_env, tmp);
tcg_temp_free_i32(tcg_ctx, tmp); tcg_temp_free_i32(tcg_ctx, tmp);
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_SMC; s->base.is_jmp = DISAS_SMC;
} }
static void gen_exception_internal_insn(DisasContext *s, int offset, int excp) static void gen_exception_internal_insn(DisasContext *s, int offset, int excp)
{ {
gen_set_condexec(s); gen_set_condexec(s);
gen_set_pc_im(s, s->pc - offset); gen_set_pc_im(s, s->base.pc_next - offset);
gen_exception_internal(s, excp); gen_exception_internal(s, excp);
s->base.is_jmp = DISAS_NORETURN; s->base.is_jmp = DISAS_NORETURN;
} }
@ -1327,7 +1327,7 @@ static void gen_exception_insn(DisasContext *s, int offset, int excp,
int syn, uint32_t target_el) int syn, uint32_t target_el)
{ {
gen_set_condexec(s); gen_set_condexec(s);
gen_set_pc_im(s, s->pc - offset); gen_set_pc_im(s, s->base.pc_next - offset);
gen_exception(s, excp, syn, target_el); gen_exception(s, excp, syn, target_el);
s->base.is_jmp = DISAS_NORETURN; s->base.is_jmp = DISAS_NORETURN;
} }
@ -1338,7 +1338,7 @@ static void gen_exception_bkpt_insn(DisasContext *s, int offset, uint32_t syn)
TCGv_i32 tcg_syn; TCGv_i32 tcg_syn;
gen_set_condexec(s); gen_set_condexec(s);
gen_set_pc_im(s, s->pc - offset); gen_set_pc_im(s, s->base.pc_next - offset);
tcg_syn = tcg_const_i32(tcg_ctx, syn); tcg_syn = tcg_const_i32(tcg_ctx, syn);
gen_helper_exception_bkpt_insn(tcg_ctx, tcg_ctx->cpu_env, tcg_syn); gen_helper_exception_bkpt_insn(tcg_ctx, tcg_ctx->cpu_env, tcg_syn);
tcg_temp_free_i32(tcg_ctx, tcg_syn); tcg_temp_free_i32(tcg_ctx, tcg_syn);
@ -1349,7 +1349,7 @@ static void gen_exception_bkpt_insn(DisasContext *s, int offset, uint32_t syn)
static inline void gen_lookup_tb(DisasContext *s) static inline void gen_lookup_tb(DisasContext *s)
{ {
TCGContext *tcg_ctx = s->uc->tcg_ctx; TCGContext *tcg_ctx = s->uc->tcg_ctx;
tcg_gen_movi_i32(tcg_ctx, tcg_ctx->cpu_R[15], s->pc); tcg_gen_movi_i32(tcg_ctx, tcg_ctx->cpu_R[15], s->base.pc_next);
s->base.is_jmp = DISAS_EXIT; s->base.is_jmp = DISAS_EXIT;
} }
@ -3020,7 +3020,7 @@ static inline bool use_goto_tb(DisasContext *s, target_ulong dest)
{ {
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
return (s->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) || return (s->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
((s->pc - 1) & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK); ((s->base.pc_next - 1) & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
#else #else
return true; return true;
#endif #endif
@ -3414,17 +3414,17 @@ static void gen_nop_hint(DisasContext *s, int val)
*/ */
case 1: /* yield */ case 1: /* yield */
if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) { if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) {
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_YIELD; s->base.is_jmp = DISAS_YIELD;
} }
break; break;
case 3: /* wfi */ case 3: /* wfi */
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_WFI; s->base.is_jmp = DISAS_WFI;
break; break;
case 2: /* wfe */ case 2: /* wfe */
if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) { if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) {
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_WFE; s->base.is_jmp = DISAS_WFE;
} }
break; break;
@ -7401,7 +7401,7 @@ static int disas_coproc_insn(DisasContext *s, uint32_t insn)
if (isread) { if (isread) {
return 1; return 1;
} }
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->base.is_jmp = DISAS_WFI; s->base.is_jmp = DISAS_WFI;
return 0; return 0;
default: default:
@ -7895,8 +7895,8 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
} }
// Unicorn: trace this instruction on request // Unicorn: trace this instruction on request
if (HOOK_EXISTS_BOUNDED(s->uc, UC_HOOK_CODE, s->pc - 4)) { if (HOOK_EXISTS_BOUNDED(s->uc, UC_HOOK_CODE, s->pc_curr)) {
gen_uc_tracecode(tcg_ctx, 4, UC_HOOK_CODE_IDX, s->uc, s->pc - 4); gen_uc_tracecode(tcg_ctx, 4, UC_HOOK_CODE_IDX, s->uc, s->pc_curr);
// the callback might want to stop emulation immediately // the callback might want to stop emulation immediately
check_exit_request(tcg_ctx); check_exit_request(tcg_ctx);
} }
@ -7988,7 +7988,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
* self-modifying code correctly and also to take * self-modifying code correctly and also to take
* any pending interrupts immediately. * any pending interrupts immediately.
*/ */
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
return; return;
case 7: /* sb */ case 7: /* sb */
if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) { if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) {
@ -7999,7 +7999,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
* for TCG; MB and end the TB instead. * for TCG; MB and end the TB instead.
*/ */
tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC); tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
return; return;
default: default:
goto illegal_op; goto illegal_op;
@ -8055,7 +8055,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
int32_t offset; int32_t offset;
tmp = tcg_temp_new_i32(tcg_ctx); tmp = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp, s->pc); tcg_gen_movi_i32(tcg_ctx, tmp, s->base.pc_next);
store_reg(s, 14, tmp); store_reg(s, 14, tmp);
/* Sign-extend the 24-bit offset */ /* Sign-extend the 24-bit offset */
offset = (((int32_t)insn) << 8) >> 8; offset = (((int32_t)insn) << 8) >> 8;
@ -8240,7 +8240,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
/* branch link/exchange thumb (blx) */ /* branch link/exchange thumb (blx) */
tmp = load_reg(s, rm); tmp = load_reg(s, rm);
tmp2 = tcg_temp_new_i32(tcg_ctx); tmp2 = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp2, s->pc); tcg_gen_movi_i32(tcg_ctx, tmp2, s->base.pc_next);
store_reg(s, 14, tmp2); store_reg(s, 14, tmp2);
gen_bx(s, tmp); gen_bx(s, tmp);
break; break;
@ -9400,7 +9400,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
/* branch (and link) */ /* branch (and link) */
if (insn & (1 << 24)) { if (insn & (1 << 24)) {
tmp = tcg_temp_new_i32(tcg_ctx); tmp = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp, s->pc); tcg_gen_movi_i32(tcg_ctx, tmp, s->base.pc_next);
store_reg(s, 14, tmp); store_reg(s, 14, tmp);
} }
offset = sextract32(insn << 2, 0, 26); offset = sextract32(insn << 2, 0, 26);
@ -9422,7 +9422,7 @@ static void disas_arm_insn(DisasContext *s, unsigned int insn)
break; break;
case 0xf: case 0xf:
/* swi */ /* swi */
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->svc_imm = extract32(insn, 0, 24); s->svc_imm = extract32(insn, 0, 24);
s->base.is_jmp = DISAS_SWI; s->base.is_jmp = DISAS_SWI;
break; break;
@ -10506,7 +10506,7 @@ static void disas_thumb2_insn(DisasContext *s, uint32_t insn)
if (insn & (1 << 14)) { if (insn & (1 << 14)) {
/* Branch and link. */ /* Branch and link. */
tcg_gen_movi_i32(tcg_ctx, tcg_ctx->cpu_R[14], s->pc | 1); tcg_gen_movi_i32(tcg_ctx, tcg_ctx->cpu_R[14], s->base.pc_next | 1);
} }
offset += read_pc(s); offset += read_pc(s);
@ -10629,7 +10629,7 @@ static void disas_thumb2_insn(DisasContext *s, uint32_t insn)
* and also to take any pending interrupts * and also to take any pending interrupts
* immediately. * immediately.
*/ */
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
break; break;
case 7: /* sb */ case 7: /* sb */
if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) { if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) {
@ -10640,7 +10640,7 @@ static void disas_thumb2_insn(DisasContext *s, uint32_t insn)
* for TCG; MB and end the TB instead. * for TCG; MB and end the TB instead.
*/ */
tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC); tcg_gen_mb(tcg_ctx, TCG_MO_ALL | TCG_BAR_SC);
gen_goto_tb(s, 0, s->pc); gen_goto_tb(s, 0, s->base.pc_next);
break; break;
default: default:
goto illegal_op; goto illegal_op;
@ -11302,7 +11302,7 @@ static void disas_thumb_insn(DisasContext *s, uint32_t insn)
/* BLX/BX */ /* BLX/BX */
tmp = load_reg(s, rm); tmp = load_reg(s, rm);
if (link) { if (link) {
val = (uint32_t)s->pc | 1; val = (uint32_t)s->base.pc_next | 1;
tmp2 = tcg_temp_new_i32(tcg_ctx); tmp2 = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp2, val); tcg_gen_movi_i32(tcg_ctx, tmp2, val);
store_reg(s, 14, tmp2); store_reg(s, 14, tmp2);
@ -11876,7 +11876,7 @@ static void disas_thumb_insn(DisasContext *s, uint32_t insn)
if (cond == 0xf) { if (cond == 0xf) {
/* swi */ /* swi */
gen_set_pc_im(s, s->pc); gen_set_pc_im(s, s->base.pc_next);
s->svc_imm = extract32(insn, 0, 8); s->svc_imm = extract32(insn, 0, 8);
s->base.is_jmp = DISAS_SWI; s->base.is_jmp = DISAS_SWI;
break; break;
@ -11905,7 +11905,7 @@ static void disas_thumb_insn(DisasContext *s, uint32_t insn)
tcg_gen_andi_i32(tcg_ctx, tmp, tmp, 0xfffffffc); tcg_gen_andi_i32(tcg_ctx, tmp, tmp, 0xfffffffc);
tmp2 = tcg_temp_new_i32(tcg_ctx); tmp2 = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp2, s->pc | 1); tcg_gen_movi_i32(tcg_ctx, tmp2, s->base.pc_next | 1);
store_reg(s, 14, tmp2); store_reg(s, 14, tmp2);
gen_bx(s, tmp); gen_bx(s, tmp);
break; break;
@ -11930,7 +11930,7 @@ static void disas_thumb_insn(DisasContext *s, uint32_t insn)
tcg_gen_addi_i32(tcg_ctx, tmp, tmp, offset); tcg_gen_addi_i32(tcg_ctx, tmp, tmp, offset);
tmp2 = tcg_temp_new_i32(tcg_ctx); tmp2 = tcg_temp_new_i32(tcg_ctx);
tcg_gen_movi_i32(tcg_ctx, tmp2, s->pc | 1); tcg_gen_movi_i32(tcg_ctx, tmp2, s->base.pc_next | 1);
store_reg(s, 14, tmp2); store_reg(s, 14, tmp2);
gen_bx(s, tmp); gen_bx(s, tmp);
} else { } else {
@ -11950,16 +11950,16 @@ undef:
static bool insn_crosses_page(CPUARMState *env, DisasContext *s) static bool insn_crosses_page(CPUARMState *env, DisasContext *s)
{ {
/* Return true if the insn at dc->pc might cross a page boundary. /* Return true if the insn at dc->base.pc_next might cross a page boundary.
* (False positives are OK, false negatives are not.) * (False positives are OK, false negatives are not.)
* We know this is a Thumb insn, and our caller ensures we are * We know this is a Thumb insn, and our caller ensures we are
* only called if dc->pc is less than 4 bytes from the page * only called if dc->base.pc_next is less than 4 bytes from the page
* boundary, so we cross the page if the first 16 bits indicate * boundary, so we cross the page if the first 16 bits indicate
* that this is a 32 bit insn. * that this is a 32 bit insn.
*/ */
uint16_t insn = arm_lduw_code(env, s->pc, s->sctlr_b); uint16_t insn = arm_lduw_code(env, s->base.pc_next, s->sctlr_b);
return !thumb_insn_is_16bit(s, s->pc, insn); return !thumb_insn_is_16bit(s, s->base.pc_next, insn);
} }
static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
@ -11973,7 +11973,6 @@ static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
dc->uc = cs->uc; dc->uc = cs->uc;
dc->isar = &cpu->isar; dc->isar = &cpu->isar;
dc->pc = dc->base.pc_first;
dc->condjmp = 0; dc->condjmp = 0;
dc->aarch64 = 0; dc->aarch64 = 0;
@ -12107,7 +12106,7 @@ static void arm_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
DisasContext *dc = container_of(dcbase, DisasContext, base); DisasContext *dc = container_of(dcbase, DisasContext, base);
TCGContext *tcg_ctx = cpu->uc->tcg_ctx; TCGContext *tcg_ctx = cpu->uc->tcg_ctx;
tcg_gen_insn_start(tcg_ctx, dc->pc, tcg_gen_insn_start(tcg_ctx, dc->base.pc_next,
(dc->condexec_cond << 4) | (dc->condexec_mask >> 1), (dc->condexec_cond << 4) | (dc->condexec_mask >> 1),
0); 0);
dc->insn_start = tcg_last_op(tcg_ctx); dc->insn_start = tcg_last_op(tcg_ctx);
@ -12121,7 +12120,7 @@ static bool arm_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
if (bp->flags & BP_CPU) { if (bp->flags & BP_CPU) {
gen_set_condexec(dc); gen_set_condexec(dc);
gen_set_pc_im(dc, dc->pc); gen_set_pc_im(dc, dc->base.pc_next);
gen_helper_check_breakpoints(tcg_ctx, tcg_ctx->cpu_env); gen_helper_check_breakpoints(tcg_ctx, tcg_ctx->cpu_env);
/* End the TB early; it's likely not going to be executed */ /* End the TB early; it's likely not going to be executed */
dc->base.is_jmp = DISAS_TOO_MANY; dc->base.is_jmp = DISAS_TOO_MANY;
@ -12134,7 +12133,7 @@ static bool arm_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
tb->size below does the right thing. */ tb->size below does the right thing. */
/* TODO: Advance PC by correct instruction length to /* TODO: Advance PC by correct instruction length to
* avoid disassembler error messages */ * avoid disassembler error messages */
dc->pc += 2; dc->base.pc_next += 2;
dc->base.is_jmp = DISAS_NORETURN; dc->base.is_jmp = DISAS_NORETURN;
} }
@ -12145,7 +12144,7 @@ static bool arm_pre_translate_insn(DisasContext *dc)
{ {
#ifdef CONFIG_USER_ONLY #ifdef CONFIG_USER_ONLY
/* Intercept jump to the magic kernel page. */ /* Intercept jump to the magic kernel page. */
if (dc->pc >= 0xffff0000) { if (dc->base.pc_next >= 0xffff0000) {
/* We always get here via a jump, so know we are not in a /* We always get here via a jump, so know we are not in a
conditional execution block. */ conditional execution block. */
gen_exception_internal(dc, EXCP_KERNEL_TRAP); gen_exception_internal(dc, EXCP_KERNEL_TRAP);
@ -12155,7 +12154,7 @@ static bool arm_pre_translate_insn(DisasContext *dc)
#endif #endif
// Unicorn: end address tells us to stop emulation // Unicorn: end address tells us to stop emulation
if (dc->pc == dc->uc->addr_end) { if (dc->base.pc_next == dc->uc->addr_end) {
// imitate WFI instruction to halt emulation // imitate WFI instruction to halt emulation
dc->base.is_jmp = DISAS_WFI; dc->base.is_jmp = DISAS_WFI;
return true; return true;
@ -12188,7 +12187,6 @@ static void arm_post_translate_insn(DisasContext *dc)
gen_set_label(tcg_ctx, dc->condlabel); gen_set_label(tcg_ctx, dc->condlabel);
dc->condjmp = 0; dc->condjmp = 0;
} }
dc->base.pc_next = dc->pc;
translator_loop_temp_check(&dc->base); translator_loop_temp_check(&dc->base);
} }
@ -12202,10 +12200,10 @@ static void arm_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
return; return;
} }
dc->pc_curr = dc->pc; dc->pc_curr = dc->base.pc_next;
insn = arm_ldl_code(env, dc->pc, dc->sctlr_b); insn = arm_ldl_code(env, dc->base.pc_next, dc->sctlr_b);
dc->insn = insn; dc->insn = insn;
dc->pc += 4; dc->base.pc_next += 4;
disas_arm_insn(dc, insn); disas_arm_insn(dc, insn);
arm_post_translate_insn(dc); arm_post_translate_insn(dc);
@ -12272,15 +12270,15 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
return; return;
} }
dc->pc_curr = dc->pc; dc->pc_curr = dc->base.pc_next;
insn = arm_lduw_code(env, dc->pc, dc->sctlr_b); insn = arm_lduw_code(env, dc->base.pc_next, dc->sctlr_b);
is_16bit = thumb_insn_is_16bit(dc, dc->pc, insn); is_16bit = thumb_insn_is_16bit(dc, dc->base.pc_next, insn);
dc->pc += 2; dc->base.pc_next += 2;
if (!is_16bit) { if (!is_16bit) {
uint32_t insn2 = arm_lduw_code(env, dc->pc, dc->sctlr_b); uint32_t insn2 = arm_lduw_code(env, dc->base.pc_next, dc->sctlr_b);
insn = insn << 16 | insn2; insn = insn << 16 | insn2;
dc->pc += 2; dc->base.pc_next += 2;
} }
dc->insn = insn; dc->insn = insn;
@ -12298,8 +12296,8 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
// Unicorn: trace this instruction on request // Unicorn: trace this instruction on request
const uint32_t insn_size = is_16bit ? 2 : 4; const uint32_t insn_size = is_16bit ? 2 : 4;
if (HOOK_EXISTS_BOUNDED(dc->uc, UC_HOOK_CODE, dc->pc - insn_size)) { if (HOOK_EXISTS_BOUNDED(dc->uc, UC_HOOK_CODE, dc->base.pc_next - insn_size)) {
gen_uc_tracecode(tcg_ctx, insn_size, UC_HOOK_CODE_IDX, dc->uc, dc->pc - insn_size); gen_uc_tracecode(tcg_ctx, insn_size, UC_HOOK_CODE_IDX, dc->uc, dc->base.pc_next - insn_size);
// the callback might want to stop emulation immediately // the callback might want to stop emulation immediately
check_exit_request(tcg_ctx); check_exit_request(tcg_ctx);
} }
@ -12336,8 +12334,8 @@ static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
* but isn't very efficient). * but isn't very efficient).
*/ */
if (dc->base.is_jmp == DISAS_NEXT if (dc->base.is_jmp == DISAS_NEXT
&& (dc->pc - dc->page_start >= TARGET_PAGE_SIZE && (dc->base.pc_next - dc->page_start >= TARGET_PAGE_SIZE
|| (dc->pc - dc->page_start >= TARGET_PAGE_SIZE - 3 || (dc->base.pc_next - dc->page_start >= TARGET_PAGE_SIZE - 3
&& insn_crosses_page(env, dc)))) { && insn_crosses_page(env, dc)))) {
dc->base.is_jmp = DISAS_TOO_MANY; dc->base.is_jmp = DISAS_TOO_MANY;
} }
@ -12383,7 +12381,7 @@ static void arm_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
case DISAS_NEXT: case DISAS_NEXT:
case DISAS_TOO_MANY: case DISAS_TOO_MANY:
case DISAS_UPDATE: case DISAS_UPDATE:
gen_set_pc_im(dc, dc->pc); gen_set_pc_im(dc, dc->base.pc_next);
/* fall through */ /* fall through */
default: default:
/* FIXME: Single stepping a WFI insn will not halt the CPU. */ /* FIXME: Single stepping a WFI insn will not halt the CPU. */
@ -12404,13 +12402,13 @@ static void arm_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
switch(dc->base.is_jmp) { switch(dc->base.is_jmp) {
case DISAS_NEXT: case DISAS_NEXT:
case DISAS_TOO_MANY: case DISAS_TOO_MANY:
gen_goto_tb(dc, 1, dc->pc); gen_goto_tb(dc, 1, dc->base.pc_next);
break; break;
case DISAS_JUMP: case DISAS_JUMP:
gen_goto_ptr(dc); gen_goto_ptr(dc);
break; break;
case DISAS_UPDATE: case DISAS_UPDATE:
gen_set_pc_im(dc, dc->pc); gen_set_pc_im(dc, dc->base.pc_next);
/* fall through */ /* fall through */
default: default:
/* indicate that the hash table must be used to find the next TB */ /* indicate that the hash table must be used to find the next TB */
@ -12456,15 +12454,12 @@ static void arm_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
gen_set_label(tcg_ctx, dc->condlabel); gen_set_label(tcg_ctx, dc->condlabel);
gen_set_condexec(dc); gen_set_condexec(dc);
if (unlikely(is_singlestepping(dc))) { if (unlikely(is_singlestepping(dc))) {
gen_set_pc_im(dc, dc->pc); gen_set_pc_im(dc, dc->base.pc_next);
gen_singlestep_exception(dc); gen_singlestep_exception(dc);
} else { } else {
gen_goto_tb(dc, 1, dc->pc); gen_goto_tb(dc, 1, dc->base.pc_next);
} }
} }
/* Functions above can change dc->pc, so re-align db->pc_next */
dc->base.pc_next = dc->pc;
} }
static void arm_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu) static void arm_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)

View file

@ -9,7 +9,6 @@ typedef struct DisasContext {
DisasContextBase base; DisasContextBase base;
const ARMISARegisters *isar; const ARMISARegisters *isar;
target_ulong pc;
/* The address of the current instruction being translated. */ /* The address of the current instruction being translated. */
target_ulong pc_curr; target_ulong pc_curr;
target_ulong page_start; target_ulong page_start;