mirror of
https://github.com/yuzu-emu/unicorn.git
synced 2024-12-23 01:15:37 +00:00
target/i386: move cpu_tmp2_i32 to DisasContext
Backports commit 6bd48f6f206b6f32a5bbeebc3ae6886d4f587981 from qemu
This commit is contained in:
parent
c13337d1bc
commit
781e6bde41
|
@ -125,6 +125,7 @@ typedef struct DisasContext {
|
|||
TCGv tmp4;
|
||||
TCGv_ptr ptr0;
|
||||
TCGv_ptr ptr1;
|
||||
TCGv_i32 tmp2_i32;
|
||||
|
||||
sigjmp_buf jmpbuf;
|
||||
struct uc_struct *uc;
|
||||
|
@ -722,22 +723,21 @@ static void gen_check_io(DisasContext *s, TCGMemOp ot, target_ulong cur_eip,
|
|||
{
|
||||
target_ulong next_eip;
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
|
||||
// Unicorn: allow all I/O instructions
|
||||
return;
|
||||
|
||||
if (s->pe && (s->cpl > s->iopl || s->vm86)) {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
switch (ot) {
|
||||
case MO_8:
|
||||
gen_helper_check_iob(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_check_iob(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case MO_16:
|
||||
gen_helper_check_iow(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_check_iow(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case MO_32:
|
||||
gen_helper_check_iol(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_check_iol(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
default:
|
||||
tcg_abort();
|
||||
|
@ -748,8 +748,8 @@ static void gen_check_io(DisasContext *s, TCGMemOp ot, target_ulong cur_eip,
|
|||
gen_jmp_im(s, cur_eip);
|
||||
svm_flags |= (1 << (4 + ot));
|
||||
next_eip = s->pc - s->cs_base;
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_svm_check_io(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32,
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_svm_check_io(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32,
|
||||
tcg_const_i32(tcg_ctx, svm_flags),
|
||||
tcg_const_i32(tcg_ctx, next_eip - cur_eip));
|
||||
}
|
||||
|
@ -1283,7 +1283,6 @@ static void gen_bpt_io(DisasContext *s, TCGv_i32 t_port, int ot)
|
|||
static inline void gen_ins(DisasContext *s, TCGMemOp ot)
|
||||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv *cpu_regs = tcg_ctx->cpu_regs;
|
||||
|
||||
gen_string_movl_A0_EDI(s);
|
||||
|
@ -1291,33 +1290,32 @@ static inline void gen_ins(DisasContext *s, TCGMemOp ot)
|
|||
case of page fault. */
|
||||
tcg_gen_movi_tl(tcg_ctx, s->T0, 0);
|
||||
gen_op_st_v(s, ot, s->T0, s->A0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_EDX]);
|
||||
tcg_gen_andi_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
|
||||
gen_helper_in_func(s, ot, s->T0, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_EDX]);
|
||||
tcg_gen_andi_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, 0xffff);
|
||||
gen_helper_in_func(s, ot, s->T0, s->tmp2_i32);
|
||||
gen_op_st_v(s, ot, s->T0, s->A0);
|
||||
gen_op_movl_T0_Dshift(s, ot);
|
||||
gen_op_add_reg_T0(s, s->aflag, R_EDI);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
}
|
||||
|
||||
static inline void gen_outs(DisasContext *s, TCGMemOp ot)
|
||||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv_i32 cpu_tmp3_i32 = tcg_ctx->cpu_tmp3_i32;
|
||||
TCGv *cpu_regs = tcg_ctx->cpu_regs;
|
||||
|
||||
gen_string_movl_A0_ESI(s);
|
||||
gen_op_ld_v(s, ot, s->T0, s->A0);
|
||||
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_EDX]);
|
||||
tcg_gen_andi_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_EDX]);
|
||||
tcg_gen_andi_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, 0xffff);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, s->T0);
|
||||
gen_helper_out_func(s, ot, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
gen_helper_out_func(s, ot, s->tmp2_i32, cpu_tmp3_i32);
|
||||
|
||||
gen_op_movl_T0_Dshift(s, ot);
|
||||
gen_op_add_reg_T0(s, s->aflag, R_ESI);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
}
|
||||
|
||||
/* same method as Valgrind : we generate jumps to current or next
|
||||
|
@ -1564,7 +1562,6 @@ static void gen_shift_flags(DisasContext *s, TCGMemOp ot, TCGv result,
|
|||
TCGv_i32 z32, s32, oldop;
|
||||
TCGv z_tl;
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv_i32 cpu_tmp3_i32 = tcg_ctx->cpu_tmp3_i32;
|
||||
TCGv_i32 cpu_cc_op = tcg_ctx->cpu_cc_op;
|
||||
TCGv cpu_cc_dst = tcg_ctx->cpu_cc_dst;
|
||||
|
@ -1589,7 +1586,7 @@ static void gen_shift_flags(DisasContext *s, TCGMemOp ot, TCGv result,
|
|||
tcg_temp_free(tcg_ctx, z_tl);
|
||||
|
||||
/* Get the two potential CC_OP values into temporaries. */
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, (is_right ? CC_OP_SARB : CC_OP_SHLB) + ot);
|
||||
if (s->cc_op == CC_OP_DYNAMIC) {
|
||||
oldop = cpu_cc_op;
|
||||
} else {
|
||||
|
@ -1601,7 +1598,7 @@ static void gen_shift_flags(DisasContext *s, TCGMemOp ot, TCGv result,
|
|||
z32 = tcg_const_i32(tcg_ctx, 0);
|
||||
s32 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s32, count);
|
||||
tcg_gen_movcond_i32(tcg_ctx, TCG_COND_NE, cpu_cc_op, s32, z32, cpu_tmp2_i32, oldop);
|
||||
tcg_gen_movcond_i32(tcg_ctx, TCG_COND_NE, cpu_cc_op, s32, z32, s->tmp2_i32, oldop);
|
||||
tcg_temp_free_i32(tcg_ctx, z32);
|
||||
tcg_temp_free_i32(tcg_ctx, s32);
|
||||
|
||||
|
@ -1694,7 +1691,6 @@ static void gen_rot_rm_T1(DisasContext *s, TCGMemOp ot, int op1, int is_right)
|
|||
target_ulong mask = (ot == MO_64 ? 0x3f : 0x1f);
|
||||
TCGv_i32 t0, t1;
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv_i32 cpu_tmp3_i32 = tcg_ctx->cpu_tmp3_i32;
|
||||
TCGv_i32 cpu_cc_op = tcg_ctx->cpu_cc_op;
|
||||
TCGv cpu_cc_dst = tcg_ctx->cpu_cc_dst;
|
||||
|
@ -1722,14 +1718,14 @@ static void gen_rot_rm_T1(DisasContext *s, TCGMemOp ot, int op1, int is_right)
|
|||
do_long:
|
||||
#ifdef TARGET_X86_64
|
||||
case MO_32:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, s->T1);
|
||||
if (is_right) {
|
||||
tcg_gen_rotr_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_rotr_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
|
||||
} else {
|
||||
tcg_gen_rotl_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_rotl_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
|
||||
}
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, cpu_tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, s->tmp2_i32);
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
|
@ -1769,10 +1765,10 @@ static void gen_rot_rm_T1(DisasContext *s, TCGMemOp ot, int op1, int is_right)
|
|||
t0 = tcg_const_i32(tcg_ctx, 0);
|
||||
t1 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, t1, s->T1);
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, CC_OP_ADCOX);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, CC_OP_ADCOX);
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp3_i32, CC_OP_EFLAGS);
|
||||
tcg_gen_movcond_i32(tcg_ctx, TCG_COND_NE, cpu_cc_op, t1, t0,
|
||||
cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_temp_free_i32(tcg_ctx, t0);
|
||||
tcg_temp_free_i32(tcg_ctx, t1);
|
||||
|
||||
|
@ -1789,10 +1785,6 @@ static void gen_rot_rm_im(DisasContext *s, TCGMemOp ot, int op1, int op2,
|
|||
TCGv cpu_cc_dst = tcg_ctx->cpu_cc_dst;
|
||||
TCGv cpu_cc_src2 = tcg_ctx->cpu_cc_src2;
|
||||
|
||||
#ifdef TARGET_X86_64
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
#endif
|
||||
|
||||
/* load */
|
||||
if (op1 == OR_TMP0) {
|
||||
gen_op_ld_v(s, ot, s->T0, s->A0);
|
||||
|
@ -1805,13 +1797,13 @@ static void gen_rot_rm_im(DisasContext *s, TCGMemOp ot, int op1, int op2,
|
|||
switch (ot) {
|
||||
#ifdef TARGET_X86_64
|
||||
case MO_32:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
if (is_right) {
|
||||
tcg_gen_rotri_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, op2);
|
||||
tcg_gen_rotri_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, op2);
|
||||
} else {
|
||||
tcg_gen_rotli_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, op2);
|
||||
tcg_gen_rotli_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, op2);
|
||||
}
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, cpu_tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, s->tmp2_i32);
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
|
@ -2304,7 +2296,6 @@ static void gen_bndck(CPUX86State *env, DisasContext *s, int modrm,
|
|||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i64 cpu_tmp1_i64 = tcg_ctx->cpu_tmp1_i64;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
|
||||
TCGv ea = gen_lea_modrm_1(s, gen_lea_modrm_0(env, s, modrm));
|
||||
|
||||
|
@ -2313,8 +2304,8 @@ static void gen_bndck(CPUX86State *env, DisasContext *s, int modrm,
|
|||
tcg_gen_ext32u_i64(tcg_ctx, cpu_tmp1_i64, cpu_tmp1_i64);
|
||||
}
|
||||
tcg_gen_setcond_i64(tcg_ctx, cond, cpu_tmp1_i64, cpu_tmp1_i64, bndv);
|
||||
tcg_gen_extrl_i64_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp1_i64);
|
||||
gen_helper_bndck(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_extrl_i64_i32(tcg_ctx, s->tmp2_i32, cpu_tmp1_i64);
|
||||
gen_helper_bndck(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32);
|
||||
}
|
||||
|
||||
/* used for LEA and MOV AX, mem */
|
||||
|
@ -2499,11 +2490,10 @@ static inline void gen_op_movl_seg_T0_vm(DisasContext *s, int seg_reg)
|
|||
static void gen_movl_seg_T0(DisasContext *s, int seg_reg)
|
||||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
|
||||
if (s->pe && !s->vm86) {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_load_seg(tcg_ctx, tcg_ctx->cpu_env, tcg_const_i32(tcg_ctx, seg_reg), cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_load_seg(tcg_ctx, tcg_ctx->cpu_env, tcg_const_i32(tcg_ctx, seg_reg), s->tmp2_i32);
|
||||
/* abort translation because the addseg value may change or
|
||||
because ss32 may change. For R_SS, translation must always
|
||||
stop as a special handling must be done to disable hardware
|
||||
|
@ -2952,8 +2942,8 @@ static inline void gen_op_movl(DisasContext *s, int d_offset, int s_offset)
|
|||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
|
||||
tcg_gen_ld_i32(tcg_ctx, tcg_ctx->cpu_tmp2_i32, tcg_ctx->cpu_env, s_offset);
|
||||
tcg_gen_st_i32(tcg_ctx, tcg_ctx->cpu_tmp2_i32, tcg_ctx->cpu_env, d_offset);
|
||||
tcg_gen_ld_i32(tcg_ctx, s->tmp2_i32, tcg_ctx->cpu_env, s_offset);
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, tcg_ctx->cpu_env, d_offset);
|
||||
}
|
||||
|
||||
static inline void gen_op_movq_env_0(DisasContext *s, int d_offset)
|
||||
|
@ -3497,7 +3487,6 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
TCGMemOp ot;
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_ptr cpu_env = tcg_ctx->cpu_env;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv_i32 cpu_tmp3_i32 = tcg_ctx->cpu_tmp3_i32;
|
||||
TCGv_i64 cpu_tmp1_i64 = tcg_ctx->cpu_tmp1_i64;
|
||||
TCGv cpu_cc_dst = tcg_ctx->cpu_cc_dst;
|
||||
|
@ -3617,8 +3606,8 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
gen_ldst_modrm(env, s, modrm, MO_32, OR_TMP0, 0);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env,
|
||||
offsetof(CPUX86State,fpregs[reg].mmx));
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_movl_mm_T0_mmx(tcg_ctx, s->ptr0, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_movl_mm_T0_mmx(tcg_ctx, s->ptr0, s->tmp2_i32);
|
||||
}
|
||||
break;
|
||||
case 0x16e: /* movd xmm, ea */
|
||||
|
@ -3634,8 +3623,8 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
gen_ldst_modrm(env, s, modrm, MO_32, OR_TMP0, 0);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env,
|
||||
offsetof(CPUX86State,xmm_regs[reg]));
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_movl_mm_T0_xmm(tcg_ctx, s->ptr0, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_movl_mm_T0_xmm(tcg_ctx, s->ptr0, s->tmp2_i32);
|
||||
}
|
||||
break;
|
||||
case 0x6f: /* movq mm, ea */
|
||||
|
@ -3936,15 +3925,15 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
rm = (modrm & 7) | REX_B(s);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env,
|
||||
offsetof(CPUX86State,xmm_regs[rm]));
|
||||
gen_helper_movmskps(tcg_ctx, cpu_tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], cpu_tmp2_i32);
|
||||
gen_helper_movmskps(tcg_ctx, s->tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], s->tmp2_i32);
|
||||
break;
|
||||
case 0x150: /* movmskpd */
|
||||
rm = (modrm & 7) | REX_B(s);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env,
|
||||
offsetof(CPUX86State,xmm_regs[rm]));
|
||||
gen_helper_movmskpd(tcg_ctx, cpu_tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], cpu_tmp2_i32);
|
||||
gen_helper_movmskpd(tcg_ctx, s->tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], s->tmp2_i32);
|
||||
break;
|
||||
case 0x02a: /* cvtpi2ps */
|
||||
case 0x12a: /* cvtpi2pd */
|
||||
|
@ -3978,8 +3967,8 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env, op1_offset);
|
||||
if (ot == MO_32) {
|
||||
SSEFunc_0_epi sse_fn_epi = sse_op_table3ai[(b >> 8) & 1];
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
sse_fn_epi(tcg_ctx, cpu_env, s->ptr0, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
sse_fn_epi(tcg_ctx, cpu_env, s->ptr0, s->tmp2_i32);
|
||||
} else {
|
||||
#ifdef TARGET_X86_64
|
||||
SSEFunc_0_epl sse_fn_epl = sse_op_table3aq[(b >> 8) & 1];
|
||||
|
@ -4042,8 +4031,8 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
if (ot == MO_32) {
|
||||
SSEFunc_i_ep sse_fn_i_ep =
|
||||
sse_op_table3bi[((b >> 7) & 2) | (b & 1)];
|
||||
sse_fn_i_ep(tcg_ctx, cpu_tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, cpu_tmp2_i32);
|
||||
sse_fn_i_ep(tcg_ctx, s->tmp2_i32, cpu_env, s->ptr0);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, s->tmp2_i32);
|
||||
} else {
|
||||
#ifdef TARGET_X86_64
|
||||
SSEFunc_l_ep sse_fn_l_ep =
|
||||
|
@ -4122,14 +4111,14 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
if (b1) {
|
||||
rm = (modrm & 7) | REX_B(s);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
|
||||
gen_helper_pmovmskb_xmm(tcg_ctx, cpu_tmp2_i32, cpu_env, s->ptr0);
|
||||
gen_helper_pmovmskb_xmm(tcg_ctx, s->tmp2_i32, cpu_env, s->ptr0);
|
||||
} else {
|
||||
rm = (modrm & 7);
|
||||
tcg_gen_addi_ptr(tcg_ctx, s->ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
|
||||
gen_helper_pmovmskb_mmx(tcg_ctx, cpu_tmp2_i32, cpu_env, s->ptr0);
|
||||
gen_helper_pmovmskb_mmx(tcg_ctx, s->tmp2_i32, cpu_env, s->ptr0);
|
||||
}
|
||||
reg = ((modrm >> 3) & 7) | rex_r;
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], cpu_tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], s->tmp2_i32);
|
||||
break;
|
||||
|
||||
case 0x138:
|
||||
|
@ -4169,9 +4158,9 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
break;
|
||||
case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
|
||||
case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
tcg_gen_st_i32(tcg_ctx, cpu_tmp2_i32, cpu_env, op2_offset +
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, cpu_env, op2_offset +
|
||||
offsetof(ZMMReg, ZMM_L(0)));
|
||||
break;
|
||||
case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
|
||||
|
@ -4233,9 +4222,9 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
ot = MO_64;
|
||||
}
|
||||
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[reg]);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[reg]);
|
||||
gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
|
||||
gen_helper_crc32(tcg_ctx, s->T0, cpu_tmp2_i32,
|
||||
gen_helper_crc32(tcg_ctx, s->T0, s->tmp2_i32,
|
||||
s->T0, tcg_const_i32(tcg_ctx, 8 << ot));
|
||||
|
||||
ot = mo_64_32(s->dflag);
|
||||
|
@ -4363,11 +4352,11 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
gen_ldst_modrm(env, s, modrm, ot, OR_TMP0, 0);
|
||||
switch (ot) {
|
||||
default:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, cpu_regs[R_EDX]);
|
||||
tcg_gen_mulu2_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp3_i32,
|
||||
cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[s->vex_v], cpu_tmp2_i32);
|
||||
tcg_gen_mulu2_i32(tcg_ctx, s->tmp2_i32, cpu_tmp3_i32,
|
||||
s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[s->vex_v], s->tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], cpu_tmp3_i32);
|
||||
break;
|
||||
#ifdef TARGET_X86_64
|
||||
|
@ -4615,13 +4604,13 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
break;
|
||||
case 0x16:
|
||||
if (ot == MO_32) { /* pextrd */
|
||||
tcg_gen_ld_i32(tcg_ctx, cpu_tmp2_i32, cpu_env,
|
||||
tcg_gen_ld_i32(tcg_ctx, s->tmp2_i32, cpu_env,
|
||||
offsetof(CPUX86State,
|
||||
xmm_regs[reg].ZMM_L(val & 3)));
|
||||
if (mod == 3) {
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[rm], cpu_tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[rm], s->tmp2_i32);
|
||||
} else {
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
}
|
||||
} else { /* pextrq */
|
||||
|
@ -4662,14 +4651,14 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
break;
|
||||
case 0x21: /* insertps */
|
||||
if (mod == 3) {
|
||||
tcg_gen_ld_i32(tcg_ctx, cpu_tmp2_i32, cpu_env,
|
||||
tcg_gen_ld_i32(tcg_ctx, s->tmp2_i32, cpu_env,
|
||||
offsetof(CPUX86State,xmm_regs[rm]
|
||||
.ZMM_L((val >> 6) & 3)));
|
||||
} else {
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
}
|
||||
tcg_gen_st_i32(tcg_ctx, cpu_tmp2_i32, cpu_env,
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, cpu_env,
|
||||
offsetof(CPUX86State,xmm_regs[reg]
|
||||
.ZMM_L((val >> 4) & 3)));
|
||||
if ((val >> 0) & 1)
|
||||
|
@ -4692,12 +4681,12 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
case 0x22:
|
||||
if (ot == MO_32) { /* pinsrd */
|
||||
if (mod == 3) {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[rm]);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[rm]);
|
||||
} else {
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
}
|
||||
tcg_gen_st_i32(tcg_ctx, cpu_tmp2_i32, cpu_env,
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, cpu_env,
|
||||
offsetof(CPUX86State,
|
||||
xmm_regs[reg].ZMM_L(val & 3)));
|
||||
} else { /* pinsrq */
|
||||
|
@ -4774,9 +4763,9 @@ static void gen_sse(CPUX86State *env, DisasContext *s, int b,
|
|||
if (ot == MO_64) {
|
||||
tcg_gen_rotri_tl(tcg_ctx, s->T0, s->T0, b & 63);
|
||||
} else {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_rotri_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, b & 31);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_rotri_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, b & 31);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, s->tmp2_i32);
|
||||
}
|
||||
gen_op_mov_reg_v(s, ot, reg, s->T0);
|
||||
break;
|
||||
|
@ -4958,7 +4947,6 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
target_ulong pc_start = s->base.pc_next;
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
TCGv_ptr cpu_env = tcg_ctx->cpu_env;
|
||||
TCGv_i32 cpu_tmp2_i32 = tcg_ctx->cpu_tmp2_i32;
|
||||
TCGv_i32 cpu_tmp3_i32 = tcg_ctx->cpu_tmp3_i32;
|
||||
TCGv_i64 cpu_tmp1_i64 = tcg_ctx->cpu_tmp1_i64;
|
||||
TCGv cpu_cc_dst = tcg_ctx->cpu_cc_dst;
|
||||
|
@ -5431,11 +5419,11 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
default:
|
||||
case MO_32:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, cpu_regs[R_EAX]);
|
||||
tcg_gen_mulu2_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp3_i32,
|
||||
cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EAX], cpu_tmp2_i32);
|
||||
tcg_gen_mulu2_i32(tcg_ctx, s->tmp2_i32, cpu_tmp3_i32,
|
||||
s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EAX], s->tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EDX], cpu_tmp3_i32);
|
||||
tcg_gen_mov_tl(tcg_ctx, cpu_cc_dst, cpu_regs[R_EAX]);
|
||||
tcg_gen_mov_tl(tcg_ctx, cpu_cc_src, cpu_regs[R_EDX]);
|
||||
|
@ -5482,16 +5470,16 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
default:
|
||||
case MO_32:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, cpu_regs[R_EAX]);
|
||||
tcg_gen_muls2_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp3_i32,
|
||||
cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EAX], cpu_tmp2_i32);
|
||||
tcg_gen_muls2_i32(tcg_ctx, s->tmp2_i32, cpu_tmp3_i32,
|
||||
s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EAX], s->tmp2_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[R_EDX], cpu_tmp3_i32);
|
||||
tcg_gen_sari_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, 31);
|
||||
tcg_gen_sari_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, 31);
|
||||
tcg_gen_mov_tl(tcg_ctx, cpu_cc_dst, cpu_regs[R_EAX]);
|
||||
tcg_gen_sub_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_cc_src, cpu_tmp2_i32);
|
||||
tcg_gen_sub_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_cc_src, s->tmp2_i32);
|
||||
set_cc_op(s, CC_OP_MULL);
|
||||
break;
|
||||
#ifdef TARGET_X86_64
|
||||
|
@ -5612,13 +5600,13 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_op_ld_v(s, MO_16, s->T0, s->A0);
|
||||
do_lcall:
|
||||
if (s->pe && !s->vm86) {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_lcall_protected(tcg_ctx, cpu_env, cpu_tmp2_i32, s->T1,
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_lcall_protected(tcg_ctx, cpu_env, s->tmp2_i32, s->T1,
|
||||
tcg_const_i32(tcg_ctx, dflag - 1),
|
||||
tcg_const_tl(tcg_ctx, s->pc - s->cs_base));
|
||||
} else {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_lcall_real(tcg_ctx, cpu_env, cpu_tmp2_i32, s->T1,
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_lcall_real(tcg_ctx, cpu_env, s->tmp2_i32, s->T1,
|
||||
tcg_const_i32(tcg_ctx, dflag - 1),
|
||||
tcg_const_i32(tcg_ctx, s->pc - s->cs_base));
|
||||
}
|
||||
|
@ -5639,8 +5627,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_op_ld_v(s, MO_16, s->T0, s->A0);
|
||||
do_ljmp:
|
||||
if (s->pe && !s->vm86) {
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_ljmp_protected(tcg_ctx, cpu_env, cpu_tmp2_i32, s->T1,
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_ljmp_protected(tcg_ctx, cpu_env, s->tmp2_i32, s->T1,
|
||||
tcg_const_tl(tcg_ctx, s->pc - s->cs_base));
|
||||
} else {
|
||||
gen_op_movl_seg_T0_vm(s, R_CS);
|
||||
|
@ -5759,15 +5747,15 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
#endif
|
||||
case MO_32:
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, s->T1);
|
||||
tcg_gen_muls2_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp3_i32,
|
||||
cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], cpu_tmp2_i32);
|
||||
tcg_gen_sari_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, 31);
|
||||
tcg_gen_muls2_i32(tcg_ctx, s->tmp2_i32, cpu_tmp3_i32,
|
||||
s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_regs[reg], s->tmp2_i32);
|
||||
tcg_gen_sari_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, 31);
|
||||
tcg_gen_mov_tl(tcg_ctx, cpu_cc_dst, cpu_regs[reg]);
|
||||
tcg_gen_sub_i32(tcg_ctx, cpu_tmp2_i32, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_cc_src, cpu_tmp2_i32);
|
||||
tcg_gen_sub_i32(tcg_ctx, s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, cpu_cc_src, s->tmp2_i32);
|
||||
break;
|
||||
default:
|
||||
tcg_gen_ext16s_tl(tcg_ctx, s->T0, s->T0);
|
||||
|
@ -6385,14 +6373,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
|
||||
switch(op >> 4) {
|
||||
case 0:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
gen_helper_flds_FT0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_flds_FT0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case 1:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
gen_helper_fildl_FT0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_fildl_FT0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case 2:
|
||||
tcg_gen_qemu_ld_i64(s->uc, cpu_tmp1_i64, s->A0,
|
||||
|
@ -6401,9 +6389,9 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
case 3:
|
||||
default:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LESW);
|
||||
gen_helper_fildl_FT0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_fildl_FT0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -6424,14 +6412,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
case 0:
|
||||
switch(op >> 4) {
|
||||
case 0:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
gen_helper_flds_ST0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_flds_ST0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case 1:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
gen_helper_fildl_ST0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_fildl_ST0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
case 2:
|
||||
tcg_gen_qemu_ld_i64(s->uc, cpu_tmp1_i64, s->A0,
|
||||
|
@ -6440,9 +6428,9 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
case 3:
|
||||
default:
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LESW);
|
||||
gen_helper_fildl_ST0(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_fildl_ST0(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
@ -6450,8 +6438,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
/* XXX: the corresponding CPUID bit must be tested ! */
|
||||
switch(op >> 4) {
|
||||
case 1:
|
||||
gen_helper_fisttl_ST0(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fisttl_ST0(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
break;
|
||||
case 2:
|
||||
|
@ -6461,8 +6449,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
case 3:
|
||||
default:
|
||||
gen_helper_fistt_ST0(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fistt_ST0(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUW);
|
||||
break;
|
||||
}
|
||||
|
@ -6471,13 +6459,13 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
default:
|
||||
switch(op >> 4) {
|
||||
case 0:
|
||||
gen_helper_fsts_ST0(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fsts_ST0(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
break;
|
||||
case 1:
|
||||
gen_helper_fistl_ST0(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fistl_ST0(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUL);
|
||||
break;
|
||||
case 2:
|
||||
|
@ -6487,8 +6475,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
break;
|
||||
case 3:
|
||||
default:
|
||||
gen_helper_fist_ST0(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fist_ST0(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUW);
|
||||
break;
|
||||
}
|
||||
|
@ -6503,9 +6491,9 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
}
|
||||
else if(op == 0x0d) /* fldcw mem */
|
||||
{
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUW);
|
||||
gen_helper_fldcw(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
gen_helper_fldcw(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
}
|
||||
else if(op == 0x0e) /* fnstenv mem */
|
||||
{
|
||||
|
@ -6513,8 +6501,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
}
|
||||
else if(op == 0x0f) /* fnstcw mem */
|
||||
{
|
||||
gen_helper_fnstcw(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fnstcw(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUW);
|
||||
}
|
||||
else if(op == 0x1d) /* fldt mem */
|
||||
|
@ -6536,8 +6524,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
}
|
||||
else if(op == 0x2f) /* fnstsw mem */
|
||||
{
|
||||
gen_helper_fnstsw(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, cpu_tmp2_i32, s->A0,
|
||||
gen_helper_fnstsw(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_qemu_st_i32(s->uc, s->tmp2_i32, s->A0,
|
||||
s->mem_index, MO_LEUW);
|
||||
}
|
||||
else if(op == 0x3c) /* fbld */
|
||||
|
@ -6820,8 +6808,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
case 0x3c: /* df/4 */
|
||||
switch(rm) {
|
||||
case 0:
|
||||
gen_helper_fnstsw(tcg_ctx, cpu_tmp2_i32, cpu_env);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, cpu_tmp2_i32);
|
||||
gen_helper_fnstsw(tcg_ctx, s->tmp2_i32, cpu_env);
|
||||
tcg_gen_extu_i32_tl(tcg_ctx, s->T0, s->tmp2_i32);
|
||||
gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0);
|
||||
break;
|
||||
default:
|
||||
|
@ -6964,10 +6952,10 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
tcg_gen_movi_tl(tcg_ctx, s->T0, val);
|
||||
gen_check_io(s, ot, pc_start - s->cs_base,
|
||||
SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, val);
|
||||
gen_helper_in_func(s, ot, s->T1, cpu_tmp2_i32);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, val);
|
||||
gen_helper_in_func(s, ot, s->T1, s->tmp2_i32);
|
||||
gen_op_mov_reg_v(s, ot, R_EAX, s->T1);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
break;
|
||||
case 0xe6:
|
||||
case 0xe7:
|
||||
|
@ -6977,10 +6965,10 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_check_io(s, ot, pc_start - s->cs_base,
|
||||
svm_is_rep(prefixes));
|
||||
gen_op_mov_v_reg(s, ot, s->T1, R_EAX);
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, val);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, val);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, s->T1);
|
||||
gen_helper_out_func(s, ot, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_helper_out_func(s, ot, s->tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
|
||||
break;
|
||||
case 0xec:
|
||||
|
@ -6989,10 +6977,10 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
tcg_gen_ext16u_tl(tcg_ctx, s->T0, cpu_regs[R_EDX]);
|
||||
gen_check_io(s, ot, pc_start - s->cs_base,
|
||||
SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_in_func(s, ot, s->T1, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_in_func(s, ot, s->T1, s->tmp2_i32);
|
||||
gen_op_mov_reg_v(s, ot, R_EAX, s->T1);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
break;
|
||||
case 0xee:
|
||||
case 0xef:
|
||||
|
@ -7002,10 +6990,10 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
svm_is_rep(prefixes));
|
||||
gen_op_mov_v_reg(s, ot, s->T1, R_EAX);
|
||||
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp3_i32, s->T1);
|
||||
gen_helper_out_func(s, ot, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
gen_helper_out_func(s, ot, s->tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, s->tmp2_i32, ot);
|
||||
break;
|
||||
|
||||
/************************/
|
||||
|
@ -7287,12 +7275,12 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
tcg_gen_ori_tl(tcg_ctx, cpu_cc_src, cpu_cc_src, CC_C);
|
||||
break;
|
||||
case 0xfc: /* cld */
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, 1);
|
||||
tcg_gen_st_i32(tcg_ctx, cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, 1);
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, cpu_env, offsetof(CPUX86State, df));
|
||||
break;
|
||||
case 0xfd: /* std */
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, -1);
|
||||
tcg_gen_st_i32(tcg_ctx, cpu_tmp2_i32, cpu_env, offsetof(CPUX86State, df));
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, -1);
|
||||
tcg_gen_st_i32(tcg_ctx, s->tmp2_i32, cpu_env, offsetof(CPUX86State, df));
|
||||
break;
|
||||
|
||||
/************************/
|
||||
|
@ -7626,11 +7614,11 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
goto illegal_op;
|
||||
gen_op_mov_v_reg(s, ot, s->T0, reg);
|
||||
gen_lea_modrm(env, s, modrm);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
if (ot == MO_16) {
|
||||
gen_helper_boundw(tcg_ctx, cpu_env, s->A0, cpu_tmp2_i32);
|
||||
gen_helper_boundw(tcg_ctx, cpu_env, s->A0, s->tmp2_i32);
|
||||
} else {
|
||||
gen_helper_boundl(tcg_ctx, cpu_env, s->A0, cpu_tmp2_i32);
|
||||
gen_helper_boundl(tcg_ctx, cpu_env, s->A0, s->tmp2_i32);
|
||||
}
|
||||
break;
|
||||
case 0x1c8: case 0x1c9: case 0x1ca: case 0x1cb:
|
||||
|
@ -7810,8 +7798,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
} else {
|
||||
gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
|
||||
gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_lldt(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_lldt(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
}
|
||||
break;
|
||||
case 1: /* str */
|
||||
|
@ -7830,8 +7818,8 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
} else {
|
||||
gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
|
||||
gen_ldst_modrm(env, s, modrm, MO_16, OR_TMP0, 0);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, s->T0);
|
||||
gen_helper_ltr(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, s->T0);
|
||||
gen_helper_ltr(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
}
|
||||
break;
|
||||
case 4: /* verr */
|
||||
|
@ -7931,8 +7919,8 @@ case 0x101:
|
|||
| PREFIX_REPZ | PREFIX_REPNZ))) {
|
||||
goto illegal_op;
|
||||
}
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_xgetbv(tcg_ctx, cpu_tmp1_i64, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_xgetbv(tcg_ctx, cpu_tmp1_i64, cpu_env, s->tmp2_i32);
|
||||
tcg_gen_extr_i64_tl(tcg_ctx, cpu_regs[R_EAX], cpu_regs[R_EDX], cpu_tmp1_i64);
|
||||
break;
|
||||
|
||||
|
@ -7948,8 +7936,8 @@ case 0x101:
|
|||
}
|
||||
tcg_gen_concat_tl_i64(tcg_ctx, cpu_tmp1_i64, cpu_regs[R_EAX],
|
||||
cpu_regs[R_EDX]);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_xsetbv(tcg_ctx, cpu_env, cpu_tmp2_i32, cpu_tmp1_i64);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_xsetbv(tcg_ctx, cpu_env, s->tmp2_i32, cpu_tmp1_i64);
|
||||
/* End TB because translation flags may change. */
|
||||
gen_jmp_im(s, s->pc - s->cs_base);
|
||||
gen_eob(s);
|
||||
|
@ -8112,8 +8100,8 @@ case 0x101:
|
|||
if (prefixes & PREFIX_LOCK) {
|
||||
goto illegal_op;
|
||||
}
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_rdpkru(tcg_ctx, cpu_tmp1_i64, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_rdpkru(tcg_ctx, cpu_tmp1_i64, cpu_env, s->tmp2_i32);
|
||||
tcg_gen_extr_i64_tl(tcg_ctx, cpu_regs[R_EAX], cpu_regs[R_EDX], cpu_tmp1_i64);
|
||||
break;
|
||||
case 0xef: /* wrpkru */
|
||||
|
@ -8122,8 +8110,8 @@ case 0x101:
|
|||
}
|
||||
tcg_gen_concat_tl_i64(tcg_ctx, cpu_tmp1_i64, cpu_regs[R_EAX],
|
||||
cpu_regs[R_EDX]);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, cpu_tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_wrpkru(tcg_ctx, cpu_env, cpu_tmp2_i32, cpu_tmp1_i64);
|
||||
tcg_gen_trunc_tl_i32(tcg_ctx, s->tmp2_i32, cpu_regs[R_ECX]);
|
||||
gen_helper_wrpkru(tcg_ctx, cpu_env, s->tmp2_i32, cpu_tmp1_i64);
|
||||
break;
|
||||
CASE_MODRM_OP(6): /* lmsw */
|
||||
if (s->cpl != 0) {
|
||||
|
@ -8606,14 +8594,14 @@ case 0x101:
|
|||
if (b & 2) {
|
||||
gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
|
||||
gen_op_mov_v_reg(s, ot, s->T0, rm);
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, reg);
|
||||
gen_helper_set_dr(tcg_ctx, tcg_ctx->cpu_env, cpu_tmp2_i32, s->T0);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, reg);
|
||||
gen_helper_set_dr(tcg_ctx, tcg_ctx->cpu_env, s->tmp2_i32, s->T0);
|
||||
gen_jmp_im(s, s->pc - s->cs_base);
|
||||
gen_eob(s);
|
||||
} else {
|
||||
gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, reg);
|
||||
gen_helper_get_dr(tcg_ctx, s->T0, tcg_ctx->cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, reg);
|
||||
gen_helper_get_dr(tcg_ctx, s->T0, tcg_ctx->cpu_env, s->tmp2_i32);
|
||||
gen_op_mov_reg_v(s, ot, rm, s->T0);
|
||||
}
|
||||
}
|
||||
|
@ -8680,8 +8668,8 @@ case 0x101:
|
|||
break;
|
||||
}
|
||||
gen_lea_modrm(env, s, modrm);
|
||||
tcg_gen_qemu_ld_i32(s->uc, cpu_tmp2_i32, s->A0, s->mem_index, MO_LEUL);
|
||||
gen_helper_ldmxcsr(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_qemu_ld_i32(s->uc, s->tmp2_i32, s->A0, s->mem_index, MO_LEUL);
|
||||
gen_helper_ldmxcsr(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
break;
|
||||
|
||||
CASE_MODRM_MEM_OP(3): /* stmxcsr */
|
||||
|
@ -8808,8 +8796,8 @@ case 0x101:
|
|||
TCGv base, treg, src, dst;
|
||||
|
||||
/* Preserve hflags bits by testing CR4 at runtime. */
|
||||
tcg_gen_movi_i32(tcg_ctx, cpu_tmp2_i32, CR4_FSGSBASE_MASK);
|
||||
gen_helper_cr4_testbit(tcg_ctx, cpu_env, cpu_tmp2_i32);
|
||||
tcg_gen_movi_i32(tcg_ctx, s->tmp2_i32, CR4_FSGSBASE_MASK);
|
||||
gen_helper_cr4_testbit(tcg_ctx, cpu_env, s->tmp2_i32);
|
||||
|
||||
base = cpu_seg_base[modrm & 8 ? R_GS : R_FS];
|
||||
treg = cpu_regs[(modrm & 7) | REX_B(s)];
|
||||
|
@ -9135,7 +9123,7 @@ static void i386_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
|
|||
dc->tmp4 = tcg_temp_new(tcg_ctx);
|
||||
|
||||
tcg_ctx->cpu_tmp1_i64 = tcg_temp_new_i64(tcg_ctx);
|
||||
tcg_ctx->cpu_tmp2_i32 = tcg_temp_new_i32(tcg_ctx);
|
||||
dc->tmp2_i32 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_ctx->cpu_tmp3_i32 = tcg_temp_new_i32(tcg_ctx);
|
||||
dc->ptr0 = tcg_temp_new_ptr(tcg_ctx);
|
||||
dc->ptr1 = tcg_temp_new_ptr(tcg_ctx);
|
||||
|
|
|
@ -827,7 +827,7 @@ struct TCGContext {
|
|||
TCGv cpu_cc_src2;
|
||||
|
||||
/* qemu/target-i386/translate.c: local register indexes (only used inside old micro ops) */
|
||||
TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
|
||||
TCGv_i32 cpu_tmp3_i32;
|
||||
TCGv_i64 cpu_tmp1_i64;
|
||||
|
||||
/* qemu/tcg/i386/tcg-target.c */
|
||||
|
|
Loading…
Reference in a new issue