mirror of
https://github.com/yuzu-emu/unicorn.git
synced 2024-12-31 23:35:38 +00:00
target/arm: implement SM3 instructions
This implements emulation of the new SM3 instructions that have been added as an optional extension to the ARMv8 Crypto Extensions in ARM v8.2. Backports commit 80d6f4c6bbb718f343a832df8dee15329cc7686c from qemu
This commit is contained in:
parent
72078a7674
commit
78d15a9cd0
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_aarch64
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_aarch64
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_aarch64
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_aarch64
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_aarch64
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_aarch64
|
||||
#define helper_dc_zva helper_dc_zva_aarch64
|
||||
#define helper_div_i32 helper_div_i32_aarch64
|
||||
#define helper_div_i64 helper_div_i64_aarch64
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_aarch64eb
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_aarch64eb
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_aarch64eb
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_aarch64eb
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_aarch64eb
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_aarch64eb
|
||||
#define helper_dc_zva helper_dc_zva_aarch64eb
|
||||
#define helper_div_i32 helper_div_i32_aarch64eb
|
||||
#define helper_div_i64 helper_div_i64_aarch64eb
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_arm
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_arm
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_arm
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_arm
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_arm
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_arm
|
||||
#define helper_dc_zva helper_dc_zva_arm
|
||||
#define helper_div_i32 helper_div_i32_arm
|
||||
#define helper_div_i64 helper_div_i64_arm
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_armeb
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_armeb
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_armeb
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_armeb
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_armeb
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_armeb
|
||||
#define helper_dc_zva helper_dc_zva_armeb
|
||||
#define helper_div_i32 helper_div_i32_armeb
|
||||
#define helper_div_i64 helper_div_i64_armeb
|
||||
|
|
|
@ -1607,6 +1607,9 @@ symbols = (
|
|||
'helper_crypto_sha512h2',
|
||||
'helper_crypto_sha512su0',
|
||||
'helper_crypto_sha512su1',
|
||||
'helper_crypto_sm3partw1',
|
||||
'helper_crypto_sm3partw2',
|
||||
'helper_crypto_sm3tt',
|
||||
'helper_dc_zva',
|
||||
'helper_div_i32',
|
||||
'helper_div_i64',
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_m68k
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_m68k
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_m68k
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_m68k
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_m68k
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_m68k
|
||||
#define helper_dc_zva helper_dc_zva_m68k
|
||||
#define helper_div_i32 helper_div_i32_m68k
|
||||
#define helper_div_i64 helper_div_i64_m68k
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_mips
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_mips
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_mips
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_mips
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_mips
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_mips
|
||||
#define helper_dc_zva helper_dc_zva_mips
|
||||
#define helper_div_i32 helper_div_i32_mips
|
||||
#define helper_div_i64 helper_div_i64_mips
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_mips64
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_mips64
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_mips64
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_mips64
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_mips64
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_mips64
|
||||
#define helper_dc_zva helper_dc_zva_mips64
|
||||
#define helper_div_i32 helper_div_i32_mips64
|
||||
#define helper_div_i64 helper_div_i64_mips64
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_mips64el
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_mips64el
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_mips64el
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_mips64el
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_mips64el
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_mips64el
|
||||
#define helper_dc_zva helper_dc_zva_mips64el
|
||||
#define helper_div_i32 helper_div_i32_mips64el
|
||||
#define helper_div_i64 helper_div_i64_mips64el
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_mipsel
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_mipsel
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_mipsel
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_mipsel
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_mipsel
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_mipsel
|
||||
#define helper_dc_zva helper_dc_zva_mipsel
|
||||
#define helper_div_i32 helper_div_i32_mipsel
|
||||
#define helper_div_i64 helper_div_i64_mipsel
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_powerpc
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_powerpc
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_powerpc
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_powerpc
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_powerpc
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_powerpc
|
||||
#define helper_dc_zva helper_dc_zva_powerpc
|
||||
#define helper_div_i32 helper_div_i32_powerpc
|
||||
#define helper_div_i64 helper_div_i64_powerpc
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_sparc
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_sparc
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_sparc
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_sparc
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_sparc
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_sparc
|
||||
#define helper_dc_zva helper_dc_zva_sparc
|
||||
#define helper_div_i32 helper_div_i32_sparc
|
||||
#define helper_div_i64 helper_div_i64_sparc
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_sparc64
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_sparc64
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_sparc64
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_sparc64
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_sparc64
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_sparc64
|
||||
#define helper_dc_zva helper_dc_zva_sparc64
|
||||
#define helper_div_i32 helper_div_i32_sparc64
|
||||
#define helper_div_i64 helper_div_i64_sparc64
|
||||
|
|
|
@ -1319,6 +1319,7 @@ enum arm_features {
|
|||
ARM_FEATURE_SVE, /* has Scalable Vector Extension */
|
||||
ARM_FEATURE_V8_SHA512, /* implements SHA512 part of v8 Crypto Extensions */
|
||||
ARM_FEATURE_V8_SHA3, /* implements SHA3 part of v8 Crypto Extensions */
|
||||
ARM_FEATURE_V8_SM3, /* implements SM3 part of v8 Crypto Extensions */
|
||||
};
|
||||
|
||||
static inline int arm_feature(CPUARMState *env, int feature)
|
||||
|
|
|
@ -550,3 +550,128 @@ void HELPER(crypto_sha512su1)(void *vd, void *vn, void *vm)
|
|||
rd[0] += s1_512(rn[0]) + rm[0];
|
||||
rd[1] += s1_512(rn[1]) + rm[1];
|
||||
}
|
||||
|
||||
void HELPER(crypto_sm3partw1)(void *vd, void *vn, void *vm)
|
||||
{
|
||||
uint64_t *rd = vd;
|
||||
uint64_t *rn = vn;
|
||||
uint64_t *rm = vm;
|
||||
union CRYPTO_STATE d;
|
||||
union CRYPTO_STATE n;
|
||||
union CRYPTO_STATE m;
|
||||
uint32_t t;
|
||||
|
||||
d.l[0] = rd[0];
|
||||
d.l[1] = rd[1];
|
||||
|
||||
n.l[0] = rn[0];
|
||||
n.l[1] = rn[1];
|
||||
|
||||
m.l[0] = rm[0];
|
||||
m.l[1] = rm[1];
|
||||
|
||||
t = CR_ST_WORD(d, 0) ^ CR_ST_WORD(n, 0) ^ ror32(CR_ST_WORD(m, 1), 17);
|
||||
CR_ST_WORD(d, 0) = t ^ ror32(t, 17) ^ ror32(t, 9);
|
||||
|
||||
t = CR_ST_WORD(d, 1) ^ CR_ST_WORD(n, 1) ^ ror32(CR_ST_WORD(m, 2), 17);
|
||||
CR_ST_WORD(d, 1) = t ^ ror32(t, 17) ^ ror32(t, 9);
|
||||
|
||||
t = CR_ST_WORD(d, 2) ^ CR_ST_WORD(n, 2) ^ ror32(CR_ST_WORD(m, 3), 17);
|
||||
CR_ST_WORD(d, 2) = t ^ ror32(t, 17) ^ ror32(t, 9);
|
||||
|
||||
t = CR_ST_WORD(d, 3) ^ CR_ST_WORD(n, 3) ^ ror32(CR_ST_WORD(d, 0), 17);
|
||||
CR_ST_WORD(d, 3) = t ^ ror32(t, 17) ^ ror32(t, 9);
|
||||
|
||||
rd[0] = d.l[0];
|
||||
rd[1] = d.l[1];
|
||||
}
|
||||
|
||||
void HELPER(crypto_sm3partw2)(void *vd, void *vn, void *vm)
|
||||
{
|
||||
uint64_t *rd = vd;
|
||||
uint64_t *rn = vn;
|
||||
uint64_t *rm = vm;
|
||||
union CRYPTO_STATE d;
|
||||
union CRYPTO_STATE n;
|
||||
union CRYPTO_STATE m;
|
||||
uint32_t t;
|
||||
|
||||
d.l[0] = rd[0];
|
||||
d.l[1] = rd[1];
|
||||
|
||||
n.l[0] = rn[0];
|
||||
n.l[1] = rn[1];
|
||||
|
||||
m.l[0] = rm[0];
|
||||
m.l[1] = rm[1];
|
||||
|
||||
t = CR_ST_WORD(n, 0) ^ ror32(CR_ST_WORD(m, 0), 25);
|
||||
|
||||
CR_ST_WORD(d, 0) ^= t;
|
||||
CR_ST_WORD(d, 1) ^= CR_ST_WORD(n, 1) ^ ror32(CR_ST_WORD(m, 1), 25);
|
||||
CR_ST_WORD(d, 2) ^= CR_ST_WORD(n, 2) ^ ror32(CR_ST_WORD(m, 2), 25);
|
||||
CR_ST_WORD(d, 3) ^= CR_ST_WORD(n, 3) ^ ror32(CR_ST_WORD(m, 3), 25) ^
|
||||
ror32(t, 17) ^ ror32(t, 2) ^ ror32(t, 26);
|
||||
|
||||
rd[0] = d.l[0];
|
||||
rd[1] = d.l[1];
|
||||
}
|
||||
|
||||
void HELPER(crypto_sm3tt)(void *vd, void *vn, void *vm, uint32_t imm2,
|
||||
uint32_t opcode)
|
||||
{
|
||||
uint64_t *rd = vd;
|
||||
uint64_t *rn = vn;
|
||||
uint64_t *rm = vm;
|
||||
union CRYPTO_STATE d;
|
||||
union CRYPTO_STATE n;
|
||||
union CRYPTO_STATE m;
|
||||
uint32_t t;
|
||||
|
||||
d.l[0] = rd[0];
|
||||
d.l[1] = rd[1];
|
||||
|
||||
n.l[0] = rn[0];
|
||||
n.l[1] = rn[1];
|
||||
|
||||
m.l[0] = rm[0];
|
||||
m.l[1] = rm[1];
|
||||
|
||||
assert(imm2 < 4);
|
||||
|
||||
if (opcode == 0 || opcode == 2) {
|
||||
/* SM3TT1A, SM3TT2A */
|
||||
t = par(CR_ST_WORD(d, 3), CR_ST_WORD(d, 2), CR_ST_WORD(d, 1));
|
||||
} else if (opcode == 1) {
|
||||
/* SM3TT1B */
|
||||
t = maj(CR_ST_WORD(d, 3), CR_ST_WORD(d, 2), CR_ST_WORD(d, 1));
|
||||
} else if (opcode == 3) {
|
||||
/* SM3TT2B */
|
||||
t = cho(CR_ST_WORD(d, 3), CR_ST_WORD(d, 2), CR_ST_WORD(d, 1));
|
||||
} else {
|
||||
g_assert_not_reached();
|
||||
}
|
||||
|
||||
t += CR_ST_WORD(d, 0) + CR_ST_WORD(m, imm2);
|
||||
|
||||
CR_ST_WORD(d, 0) = CR_ST_WORD(d, 1);
|
||||
|
||||
if (opcode < 2) {
|
||||
/* SM3TT1A, SM3TT1B */
|
||||
t += CR_ST_WORD(n, 3) ^ ror32(CR_ST_WORD(d, 3), 20);
|
||||
|
||||
CR_ST_WORD(d, 1) = ror32(CR_ST_WORD(d, 2), 23);
|
||||
} else {
|
||||
/* SM3TT2A, SM3TT2B */
|
||||
t += CR_ST_WORD(n, 3);
|
||||
t ^= rol32(t, 9) ^ rol32(t, 17);
|
||||
|
||||
CR_ST_WORD(d, 1) = ror32(CR_ST_WORD(d, 2), 13);
|
||||
}
|
||||
|
||||
CR_ST_WORD(d, 2) = CR_ST_WORD(d, 3);
|
||||
CR_ST_WORD(d, 3) = t;
|
||||
|
||||
rd[0] = d.l[0];
|
||||
rd[1] = d.l[1];
|
||||
}
|
||||
|
|
|
@ -541,6 +541,10 @@ DEF_HELPER_FLAGS_3(crypto_sha512h2, TCG_CALL_NO_RWG, void, ptr, ptr, ptr)
|
|||
DEF_HELPER_FLAGS_2(crypto_sha512su0, TCG_CALL_NO_RWG, void, ptr, ptr)
|
||||
DEF_HELPER_FLAGS_3(crypto_sha512su1, TCG_CALL_NO_RWG, void, ptr, ptr, ptr)
|
||||
|
||||
DEF_HELPER_FLAGS_5(crypto_sm3tt, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32, i32)
|
||||
DEF_HELPER_FLAGS_3(crypto_sm3partw1, TCG_CALL_NO_RWG, void, ptr, ptr, ptr)
|
||||
DEF_HELPER_FLAGS_3(crypto_sm3partw2, TCG_CALL_NO_RWG, void, ptr, ptr, ptr)
|
||||
|
||||
DEF_HELPER_FLAGS_3(crc32_arm, TCG_CALL_NO_RWG_SE, i32, i32, i32, i32)
|
||||
DEF_HELPER_FLAGS_3(crc32c, TCG_CALL_NO_RWG_SE, i32, i32, i32, i32)
|
||||
DEF_HELPER_2(dc_zva, void, env, i64)
|
||||
|
|
|
@ -11779,8 +11779,19 @@ static void disas_crypto_three_reg_sha512(DisasContext *s, uint32_t insn)
|
|||
break;
|
||||
}
|
||||
} else {
|
||||
unallocated_encoding(s);
|
||||
return;
|
||||
switch (opcode) {
|
||||
case 0: /* SM3PARTW1 */
|
||||
feature = ARM_FEATURE_V8_SM3;
|
||||
genfn = gen_helper_crypto_sm3partw1;
|
||||
break;
|
||||
case 1: /* SM3PARTW2 */
|
||||
feature = ARM_FEATURE_V8_SM3;
|
||||
genfn = gen_helper_crypto_sm3partw2;
|
||||
break;
|
||||
default:
|
||||
unallocated_encoding(s);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!arm_dc_feature(s, feature)) {
|
||||
|
@ -11895,6 +11906,9 @@ static void disas_crypto_four_reg(DisasContext *s, uint32_t insn)
|
|||
case 1: /* BCAX */
|
||||
feature = ARM_FEATURE_V8_SHA3;
|
||||
break;
|
||||
case 2: /* SM3SS1 */
|
||||
feature = ARM_FEATURE_V8_SM3;
|
||||
break;
|
||||
default:
|
||||
unallocated_encoding(s);
|
||||
return;
|
||||
|
@ -11942,7 +11956,33 @@ static void disas_crypto_four_reg(DisasContext *s, uint32_t insn)
|
|||
tcg_temp_free_i64(tcg_ctx, tcg_res[0]);
|
||||
tcg_temp_free_i64(tcg_ctx, tcg_res[1]);
|
||||
} else {
|
||||
g_assert_not_reached();
|
||||
TCGv_i32 tcg_op1, tcg_op2, tcg_op3, tcg_res, tcg_zero;
|
||||
|
||||
tcg_op1 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_op2 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_op3 = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_res = tcg_temp_new_i32(tcg_ctx);
|
||||
tcg_zero = tcg_const_i32(tcg_ctx, 0);
|
||||
|
||||
read_vec_element_i32(s, tcg_op1, rn, 3, MO_32);
|
||||
read_vec_element_i32(s, tcg_op2, rm, 3, MO_32);
|
||||
read_vec_element_i32(s, tcg_op3, ra, 3, MO_32);
|
||||
|
||||
tcg_gen_rotri_i32(tcg_ctx, tcg_res, tcg_op1, 20);
|
||||
tcg_gen_add_i32(tcg_ctx, tcg_res, tcg_res, tcg_op2);
|
||||
tcg_gen_add_i32(tcg_ctx, tcg_res, tcg_res, tcg_op3);
|
||||
tcg_gen_rotri_i32(tcg_ctx, tcg_res, tcg_res, 25);
|
||||
|
||||
write_vec_element_i32(s, tcg_zero, rd, 0, MO_32);
|
||||
write_vec_element_i32(s, tcg_zero, rd, 1, MO_32);
|
||||
write_vec_element_i32(s, tcg_zero, rd, 2, MO_32);
|
||||
write_vec_element_i32(s, tcg_res, rd, 3, MO_32);
|
||||
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_op1);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_op2);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_op3);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_res);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_zero);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11992,6 +12032,48 @@ static void disas_crypto_xar(DisasContext *s, uint32_t insn)
|
|||
tcg_temp_free_i64(tcg_ctx, tcg_res[1]);
|
||||
}
|
||||
|
||||
/* Crypto three-reg imm2
|
||||
* 31 21 20 16 15 14 13 12 11 10 9 5 4 0
|
||||
* +-----------------------+------+-----+------+--------+------+------+
|
||||
* | 1 1 0 0 1 1 1 0 0 1 0 | Rm | 1 0 | imm2 | opcode | Rn | Rd |
|
||||
* +-----------------------+------+-----+------+--------+------+------+
|
||||
*/
|
||||
static void disas_crypto_three_reg_imm2(DisasContext *s, uint32_t insn)
|
||||
{
|
||||
TCGContext *tcg_ctx = s->uc->tcg_ctx;
|
||||
int opcode = extract32(insn, 10, 2);
|
||||
int imm2 = extract32(insn, 12, 2);
|
||||
int rm = extract32(insn, 16, 5);
|
||||
int rn = extract32(insn, 5, 5);
|
||||
int rd = extract32(insn, 0, 5);
|
||||
TCGv_ptr tcg_rd_ptr, tcg_rn_ptr, tcg_rm_ptr;
|
||||
TCGv_i32 tcg_imm2, tcg_opcode;
|
||||
|
||||
if (!arm_dc_feature(s, ARM_FEATURE_V8_SM3)) {
|
||||
unallocated_encoding(s);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fp_access_check(s)) {
|
||||
return;
|
||||
}
|
||||
|
||||
tcg_rd_ptr = vec_full_reg_ptr(s, rd);
|
||||
tcg_rn_ptr = vec_full_reg_ptr(s, rn);
|
||||
tcg_rm_ptr = vec_full_reg_ptr(s, rm);
|
||||
tcg_imm2 = tcg_const_i32(tcg_ctx, imm2);
|
||||
tcg_opcode = tcg_const_i32(tcg_ctx, opcode);
|
||||
|
||||
gen_helper_crypto_sm3tt(tcg_ctx, tcg_rd_ptr, tcg_rn_ptr, tcg_rm_ptr, tcg_imm2,
|
||||
tcg_opcode);
|
||||
|
||||
tcg_temp_free_ptr(tcg_ctx, tcg_rd_ptr);
|
||||
tcg_temp_free_ptr(tcg_ctx, tcg_rn_ptr);
|
||||
tcg_temp_free_ptr(tcg_ctx, tcg_rm_ptr);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_imm2);
|
||||
tcg_temp_free_i32(tcg_ctx, tcg_opcode);
|
||||
}
|
||||
|
||||
/* C3.6 Data processing - SIMD, inc Crypto
|
||||
*
|
||||
* As the decode gets a little complex we are using a table based
|
||||
|
@ -12025,6 +12107,7 @@ static const AArch64DecodeTable data_proc_simd[] = {
|
|||
{ 0xcec08000, 0xfffff000, disas_crypto_two_reg_sha512 },
|
||||
{ 0xce000000, 0xff808000, disas_crypto_four_reg },
|
||||
{ 0xce800000, 0xffe00000, disas_crypto_xar },
|
||||
{ 0xce408000, 0xffe0c000, disas_crypto_three_reg_imm2 },
|
||||
{ 0x00000000, 0x00000000, NULL }
|
||||
};
|
||||
|
||||
|
|
|
@ -1601,6 +1601,9 @@
|
|||
#define helper_crypto_sha512h2 helper_crypto_sha512h2_x86_64
|
||||
#define helper_crypto_sha512su0 helper_crypto_sha512su0_x86_64
|
||||
#define helper_crypto_sha512su1 helper_crypto_sha512su1_x86_64
|
||||
#define helper_crypto_sm3partw1 helper_crypto_sm3partw1_x86_64
|
||||
#define helper_crypto_sm3partw2 helper_crypto_sm3partw2_x86_64
|
||||
#define helper_crypto_sm3tt helper_crypto_sm3tt_x86_64
|
||||
#define helper_dc_zva helper_dc_zva_x86_64
|
||||
#define helper_div_i32 helper_div_i32_x86_64
|
||||
#define helper_div_i64 helper_div_i64_x86_64
|
||||
|
|
Loading…
Reference in a new issue