mirror of
https://github.com/yuzu-emu/unicorn.git
synced 2024-12-23 18:15:30 +00:00
tcg: Clean up direct block chaining data fields
Briefly describe in a comment how direct block chaining is done. It should help in understanding of the following data fields. Rename some fields in TranslationBlock and TCGContext structures to better reflect their purpose (dropping excessive 'tb_' prefix in TranslationBlock but keeping it in TCGContext): tb_next_offset => jmp_reset_offset tb_jmp_offset => jmp_insn_offset tb_next => jmp_target_addr jmp_next => jmp_list_next jmp_first => jmp_list_first Avoid using a magic constant as an invalid offset which is used to indicate that there's no n-th jump generated. Backports commit f309101c26b59641fc1aa8fb2a98a5441cdaea03 from qemu
This commit is contained in:
parent
bb0b055a99
commit
e60c24cecf
|
@ -256,20 +256,32 @@ struct TranslationBlock {
|
||||||
struct TranslationBlock *page_next[2];
|
struct TranslationBlock *page_next[2];
|
||||||
tb_page_addr_t page_addr[2];
|
tb_page_addr_t page_addr[2];
|
||||||
|
|
||||||
/* the following data are used to directly call another TB from
|
/* The following data are used to directly call another TB from
|
||||||
the code of this one. */
|
* the code of this one. This can be done either by emitting direct or
|
||||||
uint16_t tb_next_offset[2]; /* offset of original jump target */
|
* indirect native jump instructions. These jumps are reset so that the TB
|
||||||
|
* just continue its execution. The TB can be linked to another one by
|
||||||
|
* setting one of the jump targets (or patching the jump instruction). Only
|
||||||
|
* two of such jumps are supported.
|
||||||
|
*/
|
||||||
|
uint16_t jmp_reset_offset[2]; /* offset of original jump target */
|
||||||
|
#define TB_JMP_RESET_OFFSET_INVALID 0xffff /* indicates no jump generated */
|
||||||
#ifdef USE_DIRECT_JUMP
|
#ifdef USE_DIRECT_JUMP
|
||||||
uint16_t tb_jmp_offset[2]; /* offset of jump instruction */
|
uint16_t jmp_insn_offset[2]; /* offset of native jump instruction */
|
||||||
#else
|
#else
|
||||||
uintptr_t tb_next[2]; /* address of jump generated code */
|
uintptr_t jmp_target_addr[2]; /* target address for indirect jump */
|
||||||
#endif
|
#endif
|
||||||
/* list of TBs jumping to this one. This is a circular list using
|
/* Each TB has an assosiated circular list of TBs jumping to this one.
|
||||||
the two least significant bits of the pointers to tell what is
|
* jmp_list_first points to the first TB jumping to this one.
|
||||||
the next pointer: 0 = jmp_next[0], 1 = jmp_next[1], 2 =
|
* jmp_list_next is used to point to the next TB in a list.
|
||||||
jmp_first */
|
* Since each TB can have two jumps, it can participate in two lists.
|
||||||
struct TranslationBlock *jmp_next[2];
|
* The two least significant bits of a pointer are used to choose which
|
||||||
struct TranslationBlock *jmp_first;
|
* data field holds a pointer to the next TB:
|
||||||
|
* 0 => jmp_list_next[0], 1 => jmp_list_next[1], 2 => jmp_list_first.
|
||||||
|
* In other words, 0/1 tells which jump is used in the pointed TB,
|
||||||
|
* and 2 means that this is a pointer back to the target TB of this list.
|
||||||
|
*/
|
||||||
|
struct TranslationBlock *jmp_list_next[2];
|
||||||
|
struct TranslationBlock *jmp_list_first;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef struct TBContext TBContext;
|
typedef struct TBContext TBContext;
|
||||||
|
@ -334,7 +346,7 @@ void tb_set_jmp_target1(uintptr_t jmp_addr, uintptr_t addr);
|
||||||
static inline void tb_set_jmp_target(TranslationBlock *tb,
|
static inline void tb_set_jmp_target(TranslationBlock *tb,
|
||||||
int n, uintptr_t addr)
|
int n, uintptr_t addr)
|
||||||
{
|
{
|
||||||
uint16_t offset = tb->tb_jmp_offset[n];
|
uint16_t offset = tb->jmp_insn_offset[n];
|
||||||
tb_set_jmp_target1((uintptr_t)((char*)tb->tc_ptr + offset), addr);
|
tb_set_jmp_target1((uintptr_t)((char*)tb->tc_ptr + offset), addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -344,7 +356,7 @@ static inline void tb_set_jmp_target(TranslationBlock *tb,
|
||||||
static inline void tb_set_jmp_target(TranslationBlock *tb,
|
static inline void tb_set_jmp_target(TranslationBlock *tb,
|
||||||
int n, uintptr_t addr)
|
int n, uintptr_t addr)
|
||||||
{
|
{
|
||||||
tb->tb_next[n] = addr;
|
tb->jmp_target_addr[n] = addr;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -353,7 +365,7 @@ static inline void tb_add_jump(TranslationBlock *tb, int n,
|
||||||
TranslationBlock *tb_next)
|
TranslationBlock *tb_next)
|
||||||
{
|
{
|
||||||
/* NOTE: this test is only needed for thread safety */
|
/* NOTE: this test is only needed for thread safety */
|
||||||
if (!tb->jmp_next[n]) {
|
if (!tb->jmp_list_next[n]) {
|
||||||
qemu_log_mask_and_addr(CPU_LOG_EXEC, tb->pc,
|
qemu_log_mask_and_addr(CPU_LOG_EXEC, tb->pc,
|
||||||
"Linking TBs %p [" TARGET_FMT_lx
|
"Linking TBs %p [" TARGET_FMT_lx
|
||||||
"] index %d -> %p [" TARGET_FMT_lx "]\n",
|
"] index %d -> %p [" TARGET_FMT_lx "]\n",
|
||||||
|
@ -363,8 +375,8 @@ static inline void tb_add_jump(TranslationBlock *tb, int n,
|
||||||
tb_set_jmp_target(tb, n, (uintptr_t)tb_next->tc_ptr);
|
tb_set_jmp_target(tb, n, (uintptr_t)tb_next->tc_ptr);
|
||||||
|
|
||||||
/* add in TB jmp circular list */
|
/* add in TB jmp circular list */
|
||||||
tb->jmp_next[n] = tb_next->jmp_first;
|
tb->jmp_list_next[n] = tb_next->jmp_list_first;
|
||||||
tb_next->jmp_first = (TranslationBlock *)((uintptr_t)(tb) | (n));
|
tb_next->jmp_list_first = (TranslationBlock *)((uintptr_t)tb | n);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1310,12 +1310,13 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
#ifndef USE_DIRECT_JUMP
|
#ifndef USE_DIRECT_JUMP
|
||||||
#error "USE_DIRECT_JUMP required for aarch64"
|
#error "USE_DIRECT_JUMP required for aarch64"
|
||||||
#endif
|
#endif
|
||||||
tcg_debug_assert(s->tb_jmp_offset != NULL); /* consistency for USE_DIRECT_JUMP */
|
/* consistency for USE_DIRECT_JUMP */
|
||||||
s->tb_jmp_offset[a0] = tcg_current_code_size(s);
|
tcg_debug_assert(s->tb_jmp_insn_offset != NULL);
|
||||||
|
s->tb_jmp_insn_offset[a0] = tcg_current_code_size(s);
|
||||||
/* actual branch destination will be patched by
|
/* actual branch destination will be patched by
|
||||||
aarch64_tb_set_jmp_target later, beware retranslation. */
|
aarch64_tb_set_jmp_target later, beware retranslation. */
|
||||||
tcg_out_goto_noaddr(s);
|
tcg_out_goto_noaddr(s);
|
||||||
s->tb_next_offset[a0] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[a0] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
|
|
|
@ -1665,17 +1665,17 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
tcg_out_goto(s, COND_AL, tb_ret_addr);
|
tcg_out_goto(s, COND_AL, tb_ret_addr);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
if (s->tb_jmp_offset) {
|
if (s->tb_jmp_insn_offset) {
|
||||||
/* Direct jump method */
|
/* Direct jump method */
|
||||||
s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
|
||||||
tcg_out_b_noaddr(s, COND_AL);
|
tcg_out_b_noaddr(s, COND_AL);
|
||||||
} else {
|
} else {
|
||||||
/* Indirect jump method */
|
/* Indirect jump method */
|
||||||
intptr_t ptr = (intptr_t)(s->tb_next + args[0]);
|
intptr_t ptr = (intptr_t)(s->tb_jmp_target_addr + args[0]);
|
||||||
tcg_out_movi32(s, COND_AL, TCG_REG_R0, ptr & ~0xfff);
|
tcg_out_movi32(s, COND_AL, TCG_REG_R0, ptr & ~0xfff);
|
||||||
tcg_out_ld32_12(s, COND_AL, TCG_REG_PC, TCG_REG_R0, ptr & 0xfff);
|
tcg_out_ld32_12(s, COND_AL, TCG_REG_PC, TCG_REG_R0, ptr & 0xfff);
|
||||||
}
|
}
|
||||||
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
tcg_out_goto_label(s, COND_AL, arg_label(s, args[0]));
|
tcg_out_goto_label(s, COND_AL, arg_label(s, args[0]));
|
||||||
|
|
|
@ -1891,7 +1891,7 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
tcg_out_jmp(s, s->tb_ret_addr);
|
tcg_out_jmp(s, s->tb_ret_addr);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
if (s->tb_jmp_offset) {
|
if (s->tb_jmp_insn_offset) {
|
||||||
/* direct jump method */
|
/* direct jump method */
|
||||||
int gap;
|
int gap;
|
||||||
/* jump displacement must be aligned for atomic patching;
|
/* jump displacement must be aligned for atomic patching;
|
||||||
|
@ -1902,14 +1902,14 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
tcg_out_nopn(s, gap - 1);
|
tcg_out_nopn(s, gap - 1);
|
||||||
}
|
}
|
||||||
tcg_out8(s, OPC_JMP_long); /* jmp im */
|
tcg_out8(s, OPC_JMP_long); /* jmp im */
|
||||||
s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
|
||||||
tcg_out32(s, 0);
|
tcg_out32(s, 0);
|
||||||
} else {
|
} else {
|
||||||
/* indirect jump method */
|
/* indirect jump method */
|
||||||
tcg_out_modrm_offset(s, OPC_GRP5, EXT5_JMPN_Ev, -1,
|
tcg_out_modrm_offset(s, OPC_GRP5, EXT5_JMPN_Ev, -1,
|
||||||
(intptr_t)(s->tb_next + args[0]));
|
(intptr_t)(s->tb_jmp_target_addr + args[0]));
|
||||||
}
|
}
|
||||||
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
tcg_out_jxx(s, JCC_JMP, arg_label(s, args[0]), 0);
|
tcg_out_jxx(s, JCC_JMP, arg_label(s, args[0]), 0);
|
||||||
|
|
|
@ -1397,19 +1397,19 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
if (s->tb_jmp_offset) {
|
if (s->tb_jmp_insn_offset) {
|
||||||
/* direct jump method */
|
/* direct jump method */
|
||||||
s->tb_jmp_offset[a0] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[a0] = tcg_current_code_size(s);
|
||||||
/* Avoid clobbering the address during retranslation. */
|
/* Avoid clobbering the address during retranslation. */
|
||||||
tcg_out32(s, OPC_J | (*(uint32_t *)s->code_ptr & 0x3ffffff));
|
tcg_out32(s, OPC_J | (*(uint32_t *)s->code_ptr & 0x3ffffff));
|
||||||
} else {
|
} else {
|
||||||
/* indirect jump method */
|
/* indirect jump method */
|
||||||
tcg_out_ld(s, TCG_TYPE_PTR, TCG_TMP0, TCG_REG_ZERO,
|
tcg_out_ld(s, TCG_TYPE_PTR, TCG_TMP0, TCG_REG_ZERO,
|
||||||
(uintptr_t)(s->tb_next + a0));
|
(uintptr_t)(s->tb_jmp_target_addr + a0));
|
||||||
tcg_out_opc_reg(s, OPC_JR, 0, TCG_TMP0, 0);
|
tcg_out_opc_reg(s, OPC_JR, 0, TCG_TMP0, 0);
|
||||||
}
|
}
|
||||||
tcg_out_nop(s);
|
tcg_out_nop(s);
|
||||||
s->tb_next_offset[a0] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[a0] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO,
|
tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO,
|
||||||
|
|
|
@ -1879,17 +1879,17 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
|
||||||
tcg_out_b(s, 0, tb_ret_addr);
|
tcg_out_b(s, 0, tb_ret_addr);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
tcg_debug_assert(s->tb_jmp_offset);
|
tcg_debug_assert(s->tb_jmp_insn_offset);
|
||||||
/* Direct jump. Ensure the next insns are 8-byte aligned. */
|
/* Direct jump. Ensure the next insns are 8-byte aligned. */
|
||||||
if ((uintptr_t)s->code_ptr & 7) {
|
if ((uintptr_t)s->code_ptr & 7) {
|
||||||
tcg_out32(s, NOP);
|
tcg_out32(s, NOP);
|
||||||
}
|
}
|
||||||
s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
|
||||||
/* To be replaced by either a branch+nop or a load into TMP1. */
|
/* To be replaced by either a branch+nop or a load into TMP1. */
|
||||||
s->code_ptr += 2;
|
s->code_ptr += 2;
|
||||||
tcg_out32(s, MTSPR | RS(TCG_REG_TMP1) | CTR);
|
tcg_out32(s, MTSPR | RS(TCG_REG_TMP1) | CTR);
|
||||||
tcg_out32(s, BCCTR | BO_ALWAYS);
|
tcg_out32(s, BCCTR | BO_ALWAYS);
|
||||||
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
{
|
{
|
||||||
|
|
|
@ -1722,7 +1722,7 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
if (s->tb_jmp_offset) {
|
if (s->tb_jmp_insn_offset) {
|
||||||
/* branch displacement must be aligned for atomic patching;
|
/* branch displacement must be aligned for atomic patching;
|
||||||
* see if we need to add extra nop before branch
|
* see if we need to add extra nop before branch
|
||||||
*/
|
*/
|
||||||
|
@ -1730,15 +1730,16 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
tcg_out16(s, NOP);
|
tcg_out16(s, NOP);
|
||||||
}
|
}
|
||||||
tcg_out16(s, RIL_BRCL | (S390_CC_ALWAYS << 4));
|
tcg_out16(s, RIL_BRCL | (S390_CC_ALWAYS << 4));
|
||||||
s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
|
||||||
s->code_ptr += 2;
|
s->code_ptr += 2;
|
||||||
} else {
|
} else {
|
||||||
/* load address stored at s->tb_next + args[0] */
|
/* load address stored at s->tb_jmp_target_addr + args[0] */
|
||||||
tcg_out_ld_abs(s, TCG_TYPE_PTR, TCG_TMP0, s->tb_next + args[0]);
|
tcg_out_ld_abs(s, TCG_TYPE_PTR, TCG_TMP0,
|
||||||
|
s->tb_jmp_target_addr + args[0]);
|
||||||
/* and go there */
|
/* and go there */
|
||||||
tcg_out_insn(s, RR, BCR, S390_CC_ALWAYS, TCG_TMP0);
|
tcg_out_insn(s, RR, BCR, S390_CC_ALWAYS, TCG_TMP0);
|
||||||
}
|
}
|
||||||
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
OP_32_64(ld8u):
|
OP_32_64(ld8u):
|
||||||
|
|
|
@ -1231,18 +1231,19 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
if (s->tb_jmp_offset) {
|
if (s->tb_jmp_insn_offset) {
|
||||||
/* direct jump method */
|
/* direct jump method */
|
||||||
s->tb_jmp_offset[a0] = tcg_current_code_size(s);
|
s->tb_jmp_insn_offset[a0] = tcg_current_code_size(s);
|
||||||
/* Make sure to preserve links during retranslation. */
|
/* Make sure to preserve links during retranslation. */
|
||||||
tcg_out32(s, CALL | (*s->code_ptr & ~INSN_OP(-1)));
|
tcg_out32(s, CALL | (*s->code_ptr & ~INSN_OP(-1)));
|
||||||
} else {
|
} else {
|
||||||
/* indirect jump method */
|
/* indirect jump method */
|
||||||
tcg_out_ld_ptr(s, TCG_REG_T1, (uintptr_t)(s->tb_next + a0));
|
tcg_out_ld_ptr(s, TCG_REG_T1,
|
||||||
|
(uintptr_t)(s->tb_jmp_target_addr + a0));
|
||||||
tcg_out_arithi(s, TCG_REG_G0, TCG_REG_T1, 0, JMPL);
|
tcg_out_arithi(s, TCG_REG_G0, TCG_REG_T1, 0, JMPL);
|
||||||
}
|
}
|
||||||
tcg_out_nop(s);
|
tcg_out_nop(s);
|
||||||
s->tb_next_offset[a0] = tcg_current_code_size(s);
|
s->tb_jmp_reset_offset[a0] = tcg_current_code_size(s);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_br:
|
case INDEX_op_br:
|
||||||
tcg_out_bpcc(s, COND_A, BPCC_PT, arg_label(s, a0));
|
tcg_out_bpcc(s, COND_A, BPCC_PT, arg_label(s, a0));
|
||||||
|
|
|
@ -638,9 +638,9 @@ struct TCGContext {
|
||||||
|
|
||||||
/* goto_tb support */
|
/* goto_tb support */
|
||||||
tcg_insn_unit *code_buf;
|
tcg_insn_unit *code_buf;
|
||||||
uintptr_t *tb_next;
|
uint16_t *tb_jmp_reset_offset; /* tb->jmp_reset_offset */
|
||||||
uint16_t *tb_next_offset;
|
uint16_t *tb_jmp_insn_offset; /* tb->jmp_insn_offset if USE_DIRECT_JUMP */
|
||||||
uint16_t *tb_jmp_offset; /* != NULL if USE_DIRECT_JUMP */
|
uintptr_t *tb_jmp_target_addr; /* tb->jmp_target_addr if !USE_DIRECT_JUMP */
|
||||||
|
|
||||||
/* liveness analysis */
|
/* liveness analysis */
|
||||||
uint16_t *op_dead_args; /* for each operation, each bit tells if the
|
uint16_t *op_dead_args; /* for each operation, each bit tells if the
|
||||||
|
|
|
@ -1011,7 +1011,7 @@ static inline void tb_jmp_remove(TranslationBlock *tb, int n)
|
||||||
TranslationBlock *tb1, **ptb;
|
TranslationBlock *tb1, **ptb;
|
||||||
unsigned int n1;
|
unsigned int n1;
|
||||||
|
|
||||||
ptb = &tb->jmp_next[n];
|
ptb = &tb->jmp_list_next[n];
|
||||||
tb1 = *ptb;
|
tb1 = *ptb;
|
||||||
if (tb1) {
|
if (tb1) {
|
||||||
/* find tb(n) in circular list */
|
/* find tb(n) in circular list */
|
||||||
|
@ -1023,15 +1023,15 @@ static inline void tb_jmp_remove(TranslationBlock *tb, int n)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (n1 == 2) {
|
if (n1 == 2) {
|
||||||
ptb = &tb1->jmp_first;
|
ptb = &tb1->jmp_list_first;
|
||||||
} else {
|
} else {
|
||||||
ptb = &tb1->jmp_next[n1];
|
ptb = &tb1->jmp_list_next[n1];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* now we can suppress tb(n) from the list */
|
/* now we can suppress tb(n) from the list */
|
||||||
*ptb = tb->jmp_next[n];
|
*ptb = tb->jmp_list_next[n];
|
||||||
|
|
||||||
tb->jmp_next[n] = NULL;
|
tb->jmp_list_next[n] = NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1039,7 +1039,8 @@ static inline void tb_jmp_remove(TranslationBlock *tb, int n)
|
||||||
another TB */
|
another TB */
|
||||||
static inline void tb_reset_jump(TranslationBlock *tb, int n)
|
static inline void tb_reset_jump(TranslationBlock *tb, int n)
|
||||||
{
|
{
|
||||||
tb_set_jmp_target(tb, n, (uintptr_t)((char*)tb->tc_ptr + tb->tb_next_offset[n]));
|
uintptr_t addr = (uintptr_t)(tb->tc_ptr + tb->jmp_reset_offset[n]);
|
||||||
|
tb_set_jmp_target(tb, n, addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* invalidate one TB */
|
/* invalidate one TB */
|
||||||
|
@ -1083,19 +1084,21 @@ void tb_phys_invalidate(struct uc_struct *uc,
|
||||||
tb_jmp_remove(tb, 1);
|
tb_jmp_remove(tb, 1);
|
||||||
|
|
||||||
/* suppress any remaining jumps to this TB */
|
/* suppress any remaining jumps to this TB */
|
||||||
tb1 = tb->jmp_first;
|
tb1 = tb->jmp_list_first;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
n1 = (uintptr_t)tb1 & 3;
|
n1 = (uintptr_t)tb1 & 3;
|
||||||
if (n1 == 2) {
|
if (n1 == 2) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
tb1 = (TranslationBlock *)((uintptr_t)tb1 & ~3);
|
tb1 = (TranslationBlock *)((uintptr_t)tb1 & ~3);
|
||||||
tb2 = tb1->jmp_next[n1];
|
tb2 = tb1->jmp_list_next[n1];
|
||||||
tb_reset_jump(tb1, n1);
|
tb_reset_jump(tb1, n1);
|
||||||
tb1->jmp_next[n1] = NULL;
|
tb1->jmp_list_next[n1] = NULL;
|
||||||
tb1 = tb2;
|
tb1 = tb2;
|
||||||
}
|
}
|
||||||
tb->jmp_first = (TranslationBlock *)((uintptr_t)tb | 2); /* fail safe */
|
|
||||||
|
/* fail safe */
|
||||||
|
tb->jmp_list_first = (TranslationBlock *)((uintptr_t)tb | 2);
|
||||||
|
|
||||||
tcg_ctx->tb_ctx.tb_phys_invalidate_count++;
|
tcg_ctx->tb_ctx.tb_phys_invalidate_count++;
|
||||||
}
|
}
|
||||||
|
@ -1214,15 +1217,15 @@ TranslationBlock *tb_gen_code(CPUState *cpu,
|
||||||
//trace_translate_block(tb, tb->pc, tb->tc_ptr);
|
//trace_translate_block(tb, tb->pc, tb->tc_ptr);
|
||||||
|
|
||||||
/* generate machine code */
|
/* generate machine code */
|
||||||
tb->tb_next_offset[0] = 0xffff;
|
tb->jmp_reset_offset[0] = TB_JMP_RESET_OFFSET_INVALID;
|
||||||
tb->tb_next_offset[1] = 0xffff;
|
tb->jmp_reset_offset[1] = TB_JMP_RESET_OFFSET_INVALID;
|
||||||
tcg_ctx->tb_next_offset = tb->tb_next_offset;
|
tcg_ctx->tb_jmp_reset_offset = tb->jmp_reset_offset;
|
||||||
#ifdef USE_DIRECT_JUMP
|
#ifdef USE_DIRECT_JUMP
|
||||||
tcg_ctx->tb_jmp_offset = tb->tb_jmp_offset;
|
tcg_ctx->tb_jmp_insn_offset = tb->jmp_insn_offset;
|
||||||
tcg_ctx->tb_next = NULL;
|
tcg_ctx->tb_jmp_target_addr = NULL;
|
||||||
#else
|
#else
|
||||||
tcg_ctx->tb_jmp_offset = NULL;
|
tcg_ctx->tb_jmp_insn_offset = NULL;
|
||||||
tcg_ctx->tb_next = tb->tb_next;
|
tcg_ctx->tb_jmp_target_addr = tb->jmp_target_addr;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef CONFIG_PROFILER
|
#ifdef CONFIG_PROFILER
|
||||||
|
@ -1599,15 +1602,15 @@ static void tb_link_page(struct uc_struct *uc,
|
||||||
tb->page_addr[1] = -1;
|
tb->page_addr[1] = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
tb->jmp_first = (TranslationBlock *)((uintptr_t)tb | 2);
|
tb->jmp_list_first = (TranslationBlock *)((uintptr_t)tb | 2);
|
||||||
tb->jmp_next[0] = NULL;
|
tb->jmp_list_next[0] = NULL;
|
||||||
tb->jmp_next[1] = NULL;
|
tb->jmp_list_next[1] = NULL;
|
||||||
|
|
||||||
/* init original jump addresses */
|
/* init original jump addresses */
|
||||||
if (tb->tb_next_offset[0] != 0xffff) {
|
if (tb->jmp_reset_offset[0] != TB_JMP_RESET_OFFSET_INVALID) {
|
||||||
tb_reset_jump(tb, 0);
|
tb_reset_jump(tb, 0);
|
||||||
}
|
}
|
||||||
if (tb->tb_next_offset[1] != 0xffff) {
|
if (tb->jmp_reset_offset[1] != TB_JMP_RESET_OFFSET_INVALID) {
|
||||||
tb_reset_jump(tb, 1);
|
tb_reset_jump(tb, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1794,9 +1797,9 @@ void dump_exec_info(FILE *f, fprintf_function cpu_fprintf)
|
||||||
if (tb->page_addr[1] != -1) {
|
if (tb->page_addr[1] != -1) {
|
||||||
cross_page++;
|
cross_page++;
|
||||||
}
|
}
|
||||||
if (tb->tb_next_offset[0] != 0xffff) {
|
if (tb->jmp_reset_offset[0] != TB_JMP_RESET_OFFSET_INVALID) {
|
||||||
direct_jmp_count++;
|
direct_jmp_count++;
|
||||||
if (tb->tb_next_offset[1] != 0xffff) {
|
if (tb->jmp_reset_offset[1] != TB_JMP_RESET_OFFSET_INVALID) {
|
||||||
direct_jmp2_count++;
|
direct_jmp2_count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue