target/riscv: add vector index load and store instructions

Vector indexed operations add the contents of each element of the
vector offset operand specified by vs2 to the base effective address
to give the effective address of each element.

Backports f732560e3551c0823cee52efba993fbb8f689a36
This commit is contained in:
LIU Zhiwei 2021-02-26 02:59:41 -05:00 committed by Lioncash
parent c7a17d04a2
commit 887c29bc79
7 changed files with 399 additions and 0 deletions

View file

@ -6174,6 +6174,41 @@ riscv_symbols = (
'helper_vsse_v_h',
'helper_vsse_v_w',
'helper_vsse_v_d',
'helper_vlxb_v_b',
'helper_vlxb_v_h',
'helper_vlxb_v_w',
'helper_vlxb_v_d',
'helper_vlxh_v_h',
'helper_vlxh_v_w',
'helper_vlxh_v_d',
'helper_vlxw_v_w',
'helper_vlxw_v_d',
'helper_vlxe_v_b',
'helper_vlxe_v_h',
'helper_vlxe_v_w',
'helper_vlxe_v_d',
'helper_vlxbu_v_b',
'helper_vlxbu_v_h',
'helper_vlxbu_v_w',
'helper_vlxbu_v_d',
'helper_vlxhu_v_h',
'helper_vlxhu_v_w',
'helper_vlxhu_v_d',
'helper_vlxwu_v_w',
'helper_vlxwu_v_d',
'helper_vsxb_v_b',
'helper_vsxb_v_h',
'helper_vsxb_v_w',
'helper_vsxb_v_d',
'helper_vsxh_v_h',
'helper_vsxh_v_w',
'helper_vsxh_v_d',
'helper_vsxw_v_w',
'helper_vsxw_v_d',
'helper_vsxe_v_b',
'helper_vsxe_v_h',
'helper_vsxe_v_w',
'helper_vsxe_v_d',
'pmp_hart_has_privs',
'pmpaddr_csr_read',
'pmpaddr_csr_write',

View file

@ -3628,6 +3628,41 @@
#define helper_vsse_v_h helper_vsse_v_h_riscv32
#define helper_vsse_v_w helper_vsse_v_w_riscv32
#define helper_vsse_v_d helper_vsse_v_d_riscv32
#define helper_vlxb_v_b helper_vlxb_v_b_riscv32
#define helper_vlxb_v_h helper_vlxb_v_h_riscv32
#define helper_vlxb_v_w helper_vlxb_v_w_riscv32
#define helper_vlxb_v_d helper_vlxb_v_d_riscv32
#define helper_vlxh_v_h helper_vlxh_v_h_riscv32
#define helper_vlxh_v_w helper_vlxh_v_w_riscv32
#define helper_vlxh_v_d helper_vlxh_v_d_riscv32
#define helper_vlxw_v_w helper_vlxw_v_w_riscv32
#define helper_vlxw_v_d helper_vlxw_v_d_riscv32
#define helper_vlxe_v_b helper_vlxe_v_b_riscv32
#define helper_vlxe_v_h helper_vlxe_v_h_riscv32
#define helper_vlxe_v_w helper_vlxe_v_w_riscv32
#define helper_vlxe_v_d helper_vlxe_v_d_riscv32
#define helper_vlxbu_v_b helper_vlxbu_v_b_riscv32
#define helper_vlxbu_v_h helper_vlxbu_v_h_riscv32
#define helper_vlxbu_v_w helper_vlxbu_v_w_riscv32
#define helper_vlxbu_v_d helper_vlxbu_v_d_riscv32
#define helper_vlxhu_v_h helper_vlxhu_v_h_riscv32
#define helper_vlxhu_v_w helper_vlxhu_v_w_riscv32
#define helper_vlxhu_v_d helper_vlxhu_v_d_riscv32
#define helper_vlxwu_v_w helper_vlxwu_v_w_riscv32
#define helper_vlxwu_v_d helper_vlxwu_v_d_riscv32
#define helper_vsxb_v_b helper_vsxb_v_b_riscv32
#define helper_vsxb_v_h helper_vsxb_v_h_riscv32
#define helper_vsxb_v_w helper_vsxb_v_w_riscv32
#define helper_vsxb_v_d helper_vsxb_v_d_riscv32
#define helper_vsxh_v_h helper_vsxh_v_h_riscv32
#define helper_vsxh_v_w helper_vsxh_v_w_riscv32
#define helper_vsxh_v_d helper_vsxh_v_d_riscv32
#define helper_vsxw_v_w helper_vsxw_v_w_riscv32
#define helper_vsxw_v_d helper_vsxw_v_d_riscv32
#define helper_vsxe_v_b helper_vsxe_v_b_riscv32
#define helper_vsxe_v_h helper_vsxe_v_h_riscv32
#define helper_vsxe_v_w helper_vsxe_v_w_riscv32
#define helper_vsxe_v_d helper_vsxe_v_d_riscv32
#define pmp_hart_has_privs pmp_hart_has_privs_riscv32
#define pmpaddr_csr_read pmpaddr_csr_read_riscv32
#define pmpaddr_csr_write pmpaddr_csr_write_riscv32

View file

@ -3628,6 +3628,41 @@
#define helper_vsse_v_h helper_vsse_v_h_riscv64
#define helper_vsse_v_w helper_vsse_v_w_riscv64
#define helper_vsse_v_d helper_vsse_v_d_riscv64
#define helper_vlxb_v_b helper_vlxb_v_b_riscv64
#define helper_vlxb_v_h helper_vlxb_v_h_riscv64
#define helper_vlxb_v_w helper_vlxb_v_w_riscv64
#define helper_vlxb_v_d helper_vlxb_v_d_riscv64
#define helper_vlxh_v_h helper_vlxh_v_h_riscv64
#define helper_vlxh_v_w helper_vlxh_v_w_riscv64
#define helper_vlxh_v_d helper_vlxh_v_d_riscv64
#define helper_vlxw_v_w helper_vlxw_v_w_riscv64
#define helper_vlxw_v_d helper_vlxw_v_d_riscv64
#define helper_vlxe_v_b helper_vlxe_v_b_riscv64
#define helper_vlxe_v_h helper_vlxe_v_h_riscv64
#define helper_vlxe_v_w helper_vlxe_v_w_riscv64
#define helper_vlxe_v_d helper_vlxe_v_d_riscv64
#define helper_vlxbu_v_b helper_vlxbu_v_b_riscv64
#define helper_vlxbu_v_h helper_vlxbu_v_h_riscv64
#define helper_vlxbu_v_w helper_vlxbu_v_w_riscv64
#define helper_vlxbu_v_d helper_vlxbu_v_d_riscv64
#define helper_vlxhu_v_h helper_vlxhu_v_h_riscv64
#define helper_vlxhu_v_w helper_vlxhu_v_w_riscv64
#define helper_vlxhu_v_d helper_vlxhu_v_d_riscv64
#define helper_vlxwu_v_w helper_vlxwu_v_w_riscv64
#define helper_vlxwu_v_d helper_vlxwu_v_d_riscv64
#define helper_vsxb_v_b helper_vsxb_v_b_riscv64
#define helper_vsxb_v_h helper_vsxb_v_h_riscv64
#define helper_vsxb_v_w helper_vsxb_v_w_riscv64
#define helper_vsxb_v_d helper_vsxb_v_d_riscv64
#define helper_vsxh_v_h helper_vsxh_v_h_riscv64
#define helper_vsxh_v_w helper_vsxh_v_w_riscv64
#define helper_vsxh_v_d helper_vsxh_v_d_riscv64
#define helper_vsxw_v_w helper_vsxw_v_w_riscv64
#define helper_vsxw_v_d helper_vsxw_v_d_riscv64
#define helper_vsxe_v_b helper_vsxe_v_b_riscv64
#define helper_vsxe_v_h helper_vsxe_v_h_riscv64
#define helper_vsxe_v_w helper_vsxe_v_w_riscv64
#define helper_vsxe_v_d helper_vsxe_v_d_riscv64
#define pmp_hart_has_privs pmp_hart_has_privs_riscv64
#define pmpaddr_csr_read pmpaddr_csr_read_riscv64
#define pmpaddr_csr_write pmpaddr_csr_write_riscv64

View file

@ -192,3 +192,38 @@ DEF_HELPER_6(vsse_v_b, void, ptr, ptr, tl, tl, env, i32)
DEF_HELPER_6(vsse_v_h, void, ptr, ptr, tl, tl, env, i32)
DEF_HELPER_6(vsse_v_w, void, ptr, ptr, tl, tl, env, i32)
DEF_HELPER_6(vsse_v_d, void, ptr, ptr, tl, tl, env, i32)
DEF_HELPER_6(vlxb_v_b, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxb_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxb_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxb_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxh_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxh_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxh_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxw_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxw_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxe_v_b, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxe_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxe_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxe_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxbu_v_b, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxbu_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxbu_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxbu_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxhu_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxhu_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxhu_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxwu_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vlxwu_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxb_v_b, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxb_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxb_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxb_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxh_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxh_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxh_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxw_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxw_v_d, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxe_v_b, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxe_v_h, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxe_v_w, void, ptr, ptr, tl, ptr, env, i32)
DEF_HELPER_6(vsxe_v_d, void, ptr, ptr, tl, ptr, env, i32)

View file

@ -243,6 +243,19 @@ vssh_v ... 010 . ..... ..... 101 ..... 0100111 @r_nfvm
vssw_v ... 010 . ..... ..... 110 ..... 0100111 @r_nfvm
vsse_v ... 010 . ..... ..... 111 ..... 0100111 @r_nfvm
vlxb_v ... 111 . ..... ..... 000 ..... 0000111 @r_nfvm
vlxh_v ... 111 . ..... ..... 101 ..... 0000111 @r_nfvm
vlxw_v ... 111 . ..... ..... 110 ..... 0000111 @r_nfvm
vlxe_v ... 011 . ..... ..... 111 ..... 0000111 @r_nfvm
vlxbu_v ... 011 . ..... ..... 000 ..... 0000111 @r_nfvm
vlxhu_v ... 011 . ..... ..... 101 ..... 0000111 @r_nfvm
vlxwu_v ... 011 . ..... ..... 110 ..... 0000111 @r_nfvm
# Vector ordered-indexed and unordered-indexed store insns.
vsxb_v ... -11 . ..... ..... 000 ..... 0100111 @r_nfvm
vsxh_v ... -11 . ..... ..... 101 ..... 0100111 @r_nfvm
vsxw_v ... -11 . ..... ..... 110 ..... 0100111 @r_nfvm
vsxe_v ... -11 . ..... ..... 111 ..... 0100111 @r_nfvm
# *** new major opcode OP-V ***
vsetvli 0 ........... ..... 111 ..... 1010111 @r2_zimm
vsetvl 1000000 ..... ..... 111 ..... 1010111 @r

View file

@ -437,3 +437,133 @@ GEN_VEXT_TRANS(vssb_v, 0, rnfvm, st_stride_op, st_stride_check)
GEN_VEXT_TRANS(vssh_v, 1, rnfvm, st_stride_op, st_stride_check)
GEN_VEXT_TRANS(vssw_v, 2, rnfvm, st_stride_op, st_stride_check)
GEN_VEXT_TRANS(vsse_v, 3, rnfvm, st_stride_op, st_stride_check)
/*
*** index load and store
*/
typedef void gen_helper_ldst_index(TCGContext *, TCGv_ptr, TCGv_ptr, TCGv,
TCGv_ptr, TCGv_env, TCGv_i32);
static bool ldst_index_trans(uint32_t vd, uint32_t rs1, uint32_t vs2,
uint32_t data, gen_helper_ldst_index *fn,
DisasContext *s)
{
TCGContext *tcg_ctx = s->uc->tcg_ctx;
TCGv_ptr dest, mask, index;
TCGv base;
TCGv_i32 desc;
TCGLabel *over = gen_new_label(tcg_ctx);
tcg_gen_brcondi_tl(tcg_ctx, TCG_COND_EQ, tcg_ctx->cpu_vl_risc, 0, over);
dest = tcg_temp_new_ptr(tcg_ctx);
mask = tcg_temp_new_ptr(tcg_ctx);
index = tcg_temp_new_ptr(tcg_ctx);
base = tcg_temp_new(tcg_ctx);
desc = tcg_const_i32(tcg_ctx, simd_desc(0, s->vlen / 8, data));
gen_get_gpr(s, base, rs1);
tcg_gen_addi_ptr(tcg_ctx, dest, tcg_ctx->cpu_env, vreg_ofs(s, vd));
tcg_gen_addi_ptr(tcg_ctx, index,tcg_ctx->cpu_env, vreg_ofs(s, vs2));
tcg_gen_addi_ptr(tcg_ctx, mask, tcg_ctx->cpu_env, vreg_ofs(s, 0));
fn(tcg_ctx, dest, mask, base, index, tcg_ctx->cpu_env, desc);
tcg_temp_free_ptr(tcg_ctx, dest);
tcg_temp_free_ptr(tcg_ctx, mask);
tcg_temp_free_ptr(tcg_ctx, index);
tcg_temp_free(tcg_ctx, base);
tcg_temp_free_i32(tcg_ctx, desc);
gen_set_label(tcg_ctx, over);
return true;
}
static bool ld_index_op(DisasContext *s, arg_rnfvm *a, uint8_t seq)
{
uint32_t data = 0;
gen_helper_ldst_index *fn;
static gen_helper_ldst_index * const fns[7][4] = {
{ gen_helper_vlxb_v_b, gen_helper_vlxb_v_h,
gen_helper_vlxb_v_w, gen_helper_vlxb_v_d },
{ NULL, gen_helper_vlxh_v_h,
gen_helper_vlxh_v_w, gen_helper_vlxh_v_d },
{ NULL, NULL,
gen_helper_vlxw_v_w, gen_helper_vlxw_v_d },
{ gen_helper_vlxe_v_b, gen_helper_vlxe_v_h,
gen_helper_vlxe_v_w, gen_helper_vlxe_v_d },
{ gen_helper_vlxbu_v_b, gen_helper_vlxbu_v_h,
gen_helper_vlxbu_v_w, gen_helper_vlxbu_v_d },
{ NULL, gen_helper_vlxhu_v_h,
gen_helper_vlxhu_v_w, gen_helper_vlxhu_v_d },
{ NULL, NULL,
gen_helper_vlxwu_v_w, gen_helper_vlxwu_v_d },
};
fn = fns[seq][s->sew];
if (fn == NULL) {
return false;
}
data = FIELD_DP32(data, VDATA, MLEN, s->mlen);
data = FIELD_DP32(data, VDATA, VM, a->vm);
data = FIELD_DP32(data, VDATA, LMUL, s->lmul);
data = FIELD_DP32(data, VDATA, NF, a->nf);
return ldst_index_trans(a->rd, a->rs1, a->rs2, data, fn, s);
}
static bool ld_index_check(DisasContext *s, arg_rnfvm* a)
{
return (vext_check_isa_ill(s) &&
vext_check_overlap_mask(s, a->rd, a->vm, false) &&
vext_check_reg(s, a->rd, false) &&
vext_check_reg(s, a->rs2, false) &&
vext_check_nf(s, a->nf));
}
GEN_VEXT_TRANS(vlxb_v, 0, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxh_v, 1, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxw_v, 2, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxe_v, 3, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxbu_v, 4, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxhu_v, 5, rnfvm, ld_index_op, ld_index_check)
GEN_VEXT_TRANS(vlxwu_v, 6, rnfvm, ld_index_op, ld_index_check)
static bool st_index_op(DisasContext *s, arg_rnfvm *a, uint8_t seq)
{
uint32_t data = 0;
gen_helper_ldst_index *fn;
static gen_helper_ldst_index * const fns[4][4] = {
{ gen_helper_vsxb_v_b, gen_helper_vsxb_v_h,
gen_helper_vsxb_v_w, gen_helper_vsxb_v_d },
{ NULL, gen_helper_vsxh_v_h,
gen_helper_vsxh_v_w, gen_helper_vsxh_v_d },
{ NULL, NULL,
gen_helper_vsxw_v_w, gen_helper_vsxw_v_d },
{ gen_helper_vsxe_v_b, gen_helper_vsxe_v_h,
gen_helper_vsxe_v_w, gen_helper_vsxe_v_d }
};
fn = fns[seq][s->sew];
if (fn == NULL) {
return false;
}
data = FIELD_DP32(data, VDATA, MLEN, s->mlen);
data = FIELD_DP32(data, VDATA, VM, a->vm);
data = FIELD_DP32(data, VDATA, LMUL, s->lmul);
data = FIELD_DP32(data, VDATA, NF, a->nf);
return ldst_index_trans(a->rd, a->rs1, a->rs2, data, fn, s);
}
static bool st_index_check(DisasContext *s, arg_rnfvm* a)
{
return (vext_check_isa_ill(s) &&
vext_check_reg(s, a->rd, false) &&
vext_check_reg(s, a->rs2, false) &&
vext_check_nf(s, a->nf));
}
GEN_VEXT_TRANS(vsxb_v, 0, rnfvm, st_index_op, st_index_check)
GEN_VEXT_TRANS(vsxh_v, 1, rnfvm, st_index_op, st_index_check)
GEN_VEXT_TRANS(vsxw_v, 2, rnfvm, st_index_op, st_index_check)
GEN_VEXT_TRANS(vsxe_v, 3, rnfvm, st_index_op, st_index_check)

View file

@ -462,3 +462,119 @@ GEN_VEXT_ST_US(vse_v_b, int8_t, int8_t , ste_b)
GEN_VEXT_ST_US(vse_v_h, int16_t, int16_t, ste_h)
GEN_VEXT_ST_US(vse_v_w, int32_t, int32_t, ste_w)
GEN_VEXT_ST_US(vse_v_d, int64_t, int64_t, ste_d)
/*
*** index: access vector element from indexed memory
*/
typedef target_ulong vext_get_index_addr(target_ulong base,
uint32_t idx, void *vs2);
#define GEN_VEXT_GET_INDEX_ADDR(NAME, ETYPE, H) \
static target_ulong NAME(target_ulong base, \
uint32_t idx, void *vs2) \
{ \
return (base + *((ETYPE *)vs2 + H(idx))); \
}
GEN_VEXT_GET_INDEX_ADDR(idx_b, int8_t, H1)
GEN_VEXT_GET_INDEX_ADDR(idx_h, int16_t, H2)
GEN_VEXT_GET_INDEX_ADDR(idx_w, int32_t, H4)
GEN_VEXT_GET_INDEX_ADDR(idx_d, int64_t, H8)
static inline void
vext_ldst_index(void *vd, void *v0, target_ulong base,
void *vs2, CPURISCVState *env, uint32_t desc,
vext_get_index_addr get_index_addr,
vext_ldst_elem_fn *ldst_elem,
clear_fn *clear_elem,
uint32_t esz, uint32_t msz, uintptr_t ra,
MMUAccessType access_type)
{
uint32_t i, k;
uint32_t nf = vext_nf(desc);
uint32_t vm = vext_vm(desc);
uint32_t mlen = vext_mlen(desc);
uint32_t vlmax = vext_maxsz(desc) / esz;
/* probe every access*/
for (i = 0; i < env->vl; i++) {
if (!vm && !vext_elem_mask(v0, mlen, i)) {
continue;
}
probe_pages(env, get_index_addr(base, i, vs2), nf * msz, ra,
access_type);
}
/* load bytes from guest memory */
for (i = 0; i < env->vl; i++) {
k = 0;
if (!vm && !vext_elem_mask(v0, mlen, i)) {
continue;
}
while (k < nf) {
abi_ptr addr = get_index_addr(base, i, vs2) + k * msz;
ldst_elem(env, addr, i + k * vlmax, vd, ra);
k++;
}
}
/* clear tail elements */
if (clear_elem) {
for (k = 0; k < nf; k++) {
clear_elem(vd, env->vl + k * vlmax, env->vl * esz, vlmax * esz);
}
}
}
#define GEN_VEXT_LD_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, LOAD_FN, CLEAR_FN) \
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
void *vs2, CPURISCVState *env, uint32_t desc) \
{ \
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
LOAD_FN, CLEAR_FN, sizeof(ETYPE), sizeof(MTYPE), \
GETPC(), MMU_DATA_LOAD); \
}
GEN_VEXT_LD_INDEX(vlxb_v_b, int8_t, int8_t, idx_b, ldb_b, clearb)
GEN_VEXT_LD_INDEX(vlxb_v_h, int8_t, int16_t, idx_h, ldb_h, clearh)
GEN_VEXT_LD_INDEX(vlxb_v_w, int8_t, int32_t, idx_w, ldb_w, clearl)
GEN_VEXT_LD_INDEX(vlxb_v_d, int8_t, int64_t, idx_d, ldb_d, clearq)
GEN_VEXT_LD_INDEX(vlxh_v_h, int16_t, int16_t, idx_h, ldh_h, clearh)
GEN_VEXT_LD_INDEX(vlxh_v_w, int16_t, int32_t, idx_w, ldh_w, clearl)
GEN_VEXT_LD_INDEX(vlxh_v_d, int16_t, int64_t, idx_d, ldh_d, clearq)
GEN_VEXT_LD_INDEX(vlxw_v_w, int32_t, int32_t, idx_w, ldw_w, clearl)
GEN_VEXT_LD_INDEX(vlxw_v_d, int32_t, int64_t, idx_d, ldw_d, clearq)
GEN_VEXT_LD_INDEX(vlxe_v_b, int8_t, int8_t, idx_b, lde_b, clearb)
GEN_VEXT_LD_INDEX(vlxe_v_h, int16_t, int16_t, idx_h, lde_h, clearh)
GEN_VEXT_LD_INDEX(vlxe_v_w, int32_t, int32_t, idx_w, lde_w, clearl)
GEN_VEXT_LD_INDEX(vlxe_v_d, int64_t, int64_t, idx_d, lde_d, clearq)
GEN_VEXT_LD_INDEX(vlxbu_v_b, uint8_t, uint8_t, idx_b, ldbu_b, clearb)
GEN_VEXT_LD_INDEX(vlxbu_v_h, uint8_t, uint16_t, idx_h, ldbu_h, clearh)
GEN_VEXT_LD_INDEX(vlxbu_v_w, uint8_t, uint32_t, idx_w, ldbu_w, clearl)
GEN_VEXT_LD_INDEX(vlxbu_v_d, uint8_t, uint64_t, idx_d, ldbu_d, clearq)
GEN_VEXT_LD_INDEX(vlxhu_v_h, uint16_t, uint16_t, idx_h, ldhu_h, clearh)
GEN_VEXT_LD_INDEX(vlxhu_v_w, uint16_t, uint32_t, idx_w, ldhu_w, clearl)
GEN_VEXT_LD_INDEX(vlxhu_v_d, uint16_t, uint64_t, idx_d, ldhu_d, clearq)
GEN_VEXT_LD_INDEX(vlxwu_v_w, uint32_t, uint32_t, idx_w, ldwu_w, clearl)
GEN_VEXT_LD_INDEX(vlxwu_v_d, uint32_t, uint64_t, idx_d, ldwu_d, clearq)
#define GEN_VEXT_ST_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, STORE_FN)\
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
void *vs2, CPURISCVState *env, uint32_t desc) \
{ \
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
STORE_FN, NULL, sizeof(ETYPE), sizeof(MTYPE),\
GETPC(), MMU_DATA_STORE); \
}
GEN_VEXT_ST_INDEX(vsxb_v_b, int8_t, int8_t, idx_b, stb_b)
GEN_VEXT_ST_INDEX(vsxb_v_h, int8_t, int16_t, idx_h, stb_h)
GEN_VEXT_ST_INDEX(vsxb_v_w, int8_t, int32_t, idx_w, stb_w)
GEN_VEXT_ST_INDEX(vsxb_v_d, int8_t, int64_t, idx_d, stb_d)
GEN_VEXT_ST_INDEX(vsxh_v_h, int16_t, int16_t, idx_h, sth_h)
GEN_VEXT_ST_INDEX(vsxh_v_w, int16_t, int32_t, idx_w, sth_w)
GEN_VEXT_ST_INDEX(vsxh_v_d, int16_t, int64_t, idx_d, sth_d)
GEN_VEXT_ST_INDEX(vsxw_v_w, int32_t, int32_t, idx_w, stw_w)
GEN_VEXT_ST_INDEX(vsxw_v_d, int32_t, int64_t, idx_d, stw_d)
GEN_VEXT_ST_INDEX(vsxe_v_b, int8_t, int8_t, idx_b, ste_b)
GEN_VEXT_ST_INDEX(vsxe_v_h, int16_t, int16_t, idx_h, ste_h)
GEN_VEXT_ST_INDEX(vsxe_v_w, int32_t, int32_t, idx_w, ste_w)
GEN_VEXT_ST_INDEX(vsxe_v_d, int64_t, int64_t, idx_d, ste_d)