Add LoongArch LSX instructions.

-----BEGIN PGP SIGNATURE-----
 
 iLMEAAEIAB0WIQS4/x2g0v3LLaCcbCxAov/yOSY+3wUCZFXxGwAKCRBAov/yOSY+
 39EoA/0Uy2DPz6g7J5+9tcIRk9jLrp36aYQJ9J8zRJd226YFvHSfiBWSIteMFOEX
 Z0Jx1bL6N97KK/HA74Nx++x0kVuplEGp1s5cO/odL3gYy8RaJm23p9iaDa0D/UaB
 ygLvXtuzN4unDFP5EF/wa9zRkDb7qX2iBBvc8OIal7eT4dDX+g==
 =gyVU
 -----END PGP SIGNATURE-----

Merge tag 'pull-loongarch-20230506' of https://gitlab.com/gaosong/qemu into staging

Add LoongArch LSX instructions.

# -----BEGIN PGP SIGNATURE-----
#
# iLMEAAEIAB0WIQS4/x2g0v3LLaCcbCxAov/yOSY+3wUCZFXxGwAKCRBAov/yOSY+
# 39EoA/0Uy2DPz6g7J5+9tcIRk9jLrp36aYQJ9J8zRJd226YFvHSfiBWSIteMFOEX
# Z0Jx1bL6N97KK/HA74Nx++x0kVuplEGp1s5cO/odL3gYy8RaJm23p9iaDa0D/UaB
# ygLvXtuzN4unDFP5EF/wa9zRkDb7qX2iBBvc8OIal7eT4dDX+g==
# =gyVU
# -----END PGP SIGNATURE-----
# gpg: Signature made Sat 06 May 2023 07:18:03 AM BST
# gpg:                using RSA key B8FF1DA0D2FDCB2DA09C6C2C40A2FFF239263EDF
# gpg: Good signature from "Song Gao <m17746591750@163.com>" [unknown]
# gpg: WARNING: This key is not certified with a trusted signature!
# gpg:          There is no indication that the signature belongs to the owner.
# Primary key fingerprint: B8FF 1DA0 D2FD CB2D A09C  6C2C 40A2 FFF2 3926 3EDF

* tag 'pull-loongarch-20230506' of https://gitlab.com/gaosong/qemu: (45 commits)
  hw/intc: don't use target_ulong for LoongArch ipi
  target/loongarch: CPUCFG support LSX
  target/loongarch: Use {set/get}_gpr replace to cpu_fpr
  target/loongarch: Implement vldi
  target/loongarch: Implement vld vst
  target/loongarch: Implement vilvl vilvh vextrins vshuf
  target/loongarch: Implement vreplve vpack vpick
  target/loongarch: Implement vinsgr2vr vpickve2gr vreplgr2vr
  target/loongarch: Implement vbitsel vset
  target/loongarch: Implement vfcmp
  target/loongarch: Implement vseq vsle vslt
  target/loongarch: Implement LSX fpu fcvt instructions
  target/loongarch: Implement LSX fpu arith instructions
  target/loongarch: Implement vfrstp
  target/loongarch: Implement vbitclr vbitset vbitrev
  target/loongarch: Implement vpcnt
  target/loongarch: Implement vclo vclz
  target/loongarch: Implement vssrlrn vssrarn
  target/loongarch: Implement vssrln vssran
  target/loongarch: Implement vsrlrn vsrarn
  ...

Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-05-06 08:11:52 +01:00
commit 792f77f376
20 changed files with 9989 additions and 58 deletions

View File

@ -50,7 +50,7 @@ static uint64_t loongarch_ipi_readl(void *opaque, hwaddr addr, unsigned size)
return ret;
}
static void send_ipi_data(CPULoongArchState *env, target_ulong val, target_ulong addr)
static void send_ipi_data(CPULoongArchState *env, uint64_t val, hwaddr addr)
{
int i, mask = 0, data = 0;

View File

@ -128,7 +128,7 @@ static void setup_sigframe(CPULoongArchState *env,
fpu_ctx = (struct target_fpu_context *)(info + 1);
for (i = 0; i < 32; ++i) {
__put_user(env->fpr[i], &fpu_ctx->regs[i]);
__put_user(env->fpr[i].vreg.D(0), &fpu_ctx->regs[i]);
}
__put_user(read_fcc(env), &fpu_ctx->fcc);
__put_user(env->fcsr0, &fpu_ctx->fcsr);
@ -193,7 +193,7 @@ static void restore_sigframe(CPULoongArchState *env,
uint64_t fcc;
for (i = 0; i < 32; ++i) {
__get_user(env->fpr[i], &fpu_ctx->regs[i]);
__get_user(env->fpr[i].vreg.D(0), &fpu_ctx->regs[i]);
}
__get_user(fcc, &fpu_ctx->fcc);
write_fcc(env, fcc);

View File

@ -52,6 +52,7 @@ static const char * const excp_names[] = {
[EXCCODE_FPE] = "Floating Point Exception",
[EXCCODE_DBP] = "Debug breakpoint",
[EXCCODE_BCE] = "Bound Check Exception",
[EXCCODE_SXD] = "128 bit vector instructions Disable exception",
};
const char *loongarch_exception_name(int32_t exception)
@ -187,6 +188,7 @@ static void loongarch_cpu_do_interrupt(CPUState *cs)
case EXCCODE_FPD:
case EXCCODE_FPE:
case EXCCODE_BCE:
case EXCCODE_SXD:
env->CSR_BADV = env->pc;
QEMU_FALLTHROUGH;
case EXCCODE_ADEM:
@ -386,6 +388,7 @@ static void loongarch_la464_initfn(Object *obj)
data = FIELD_DP32(data, CPUCFG2, FP_SP, 1);
data = FIELD_DP32(data, CPUCFG2, FP_DP, 1);
data = FIELD_DP32(data, CPUCFG2, FP_VER, 1);
data = FIELD_DP32(data, CPUCFG2, LSX, 1),
data = FIELD_DP32(data, CPUCFG2, LLFTP, 1);
data = FIELD_DP32(data, CPUCFG2, LLFTP_VER, 1);
data = FIELD_DP32(data, CPUCFG2, LAM, 1);
@ -656,7 +659,7 @@ void loongarch_cpu_dump_state(CPUState *cs, FILE *f, int flags)
/* fpr */
if (flags & CPU_DUMP_FPU) {
for (i = 0; i < 32; i++) {
qemu_fprintf(f, " %s %016" PRIx64, fregnames[i], env->fpr[i]);
qemu_fprintf(f, " %s %016" PRIx64, fregnames[i], env->fpr[i].vreg.D(0));
if ((i & 3) == 3) {
qemu_fprintf(f, "\n");
}

View File

@ -8,6 +8,7 @@
#ifndef LOONGARCH_CPU_H
#define LOONGARCH_CPU_H
#include "qemu/int128.h"
#include "exec/cpu-defs.h"
#include "fpu/softfloat-types.h"
#include "hw/registerfields.h"
@ -54,6 +55,10 @@ FIELD(FCSR0, CAUSE, 24, 5)
do { \
(REG) = FIELD_DP32(REG, FCSR0, CAUSE, V); \
} while (0)
#define UPDATE_FP_CAUSE(REG, V) \
do { \
(REG) |= FIELD_DP32(0, FCSR0, CAUSE, V); \
} while (0)
#define GET_FP_ENABLES(REG) FIELD_EX32(REG, FCSR0, ENABLES)
#define SET_FP_ENABLES(REG, V) \
@ -241,6 +246,24 @@ FIELD(TLB_MISC, ASID, 1, 10)
FIELD(TLB_MISC, VPPN, 13, 35)
FIELD(TLB_MISC, PS, 48, 6)
#define LSX_LEN (128)
typedef union VReg {
int8_t B[LSX_LEN / 8];
int16_t H[LSX_LEN / 16];
int32_t W[LSX_LEN / 32];
int64_t D[LSX_LEN / 64];
uint8_t UB[LSX_LEN / 8];
uint16_t UH[LSX_LEN / 16];
uint32_t UW[LSX_LEN / 32];
uint64_t UD[LSX_LEN / 64];
Int128 Q[LSX_LEN / 128];
}VReg;
typedef union fpr_t fpr_t;
union fpr_t {
VReg vreg;
};
struct LoongArchTLB {
uint64_t tlb_misc;
/* Fields corresponding to CSR_TLBELO0/1 */
@ -253,7 +276,7 @@ typedef struct CPUArchState {
uint64_t gpr[32];
uint64_t pc;
uint64_t fpr[32];
fpr_t fpr[32];
float_status fp_status;
bool cf[8];
@ -400,6 +423,7 @@ static inline int cpu_mmu_index(CPULoongArchState *env, bool ifetch)
#define HW_FLAGS_PLV_MASK R_CSR_CRMD_PLV_MASK /* 0x03 */
#define HW_FLAGS_CRMD_PG R_CSR_CRMD_PG_MASK /* 0x10 */
#define HW_FLAGS_EUEN_FPE 0x04
#define HW_FLAGS_EUEN_SXE 0x08
static inline void cpu_get_tb_cpu_state(CPULoongArchState *env,
target_ulong *pc,
@ -410,6 +434,7 @@ static inline void cpu_get_tb_cpu_state(CPULoongArchState *env,
*cs_base = 0;
*flags = env->CSR_CRMD & (R_CSR_CRMD_PLV_MASK | R_CSR_CRMD_PG_MASK);
*flags |= FIELD_EX64(env->CSR_EUEN, CSR_EUEN, FPE) * HW_FLAGS_EUEN_FPE;
*flags |= FIELD_EX64(env->CSR_EUEN, CSR_EUEN, SXE) * HW_FLAGS_EUEN_SXE;
}
void loongarch_cpu_list(void);

View File

@ -21,11 +21,21 @@ static inline int plus_1(DisasContext *ctx, int x)
return x + 1;
}
static inline int shl_1(DisasContext *ctx, int x)
{
return x << 1;
}
static inline int shl_2(DisasContext *ctx, int x)
{
return x << 2;
}
static inline int shl_3(DisasContext *ctx, int x)
{
return x << 3;
}
#define CSR_NAME(REG) \
[LOONGARCH_CSR_##REG] = (#REG)
@ -784,3 +794,904 @@ PCADD_INSN(pcaddi)
PCADD_INSN(pcalau12i)
PCADD_INSN(pcaddu12i)
PCADD_INSN(pcaddu18i)
#define INSN_LSX(insn, type) \
static bool trans_##insn(DisasContext *ctx, arg_##type * a) \
{ \
output_##type(ctx, a, #insn); \
return true; \
}
static void output_cv(DisasContext *ctx, arg_cv *a,
const char *mnemonic)
{
output(ctx, mnemonic, "fcc%d, v%d", a->cd, a->vj);
}
static void output_vvv(DisasContext *ctx, arg_vvv *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, v%d, v%d", a->vd, a->vj, a->vk);
}
static void output_vv_i(DisasContext *ctx, arg_vv_i *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, v%d, 0x%x", a->vd, a->vj, a->imm);
}
static void output_vv(DisasContext *ctx, arg_vv *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, v%d", a->vd, a->vj);
}
static void output_vvvv(DisasContext *ctx, arg_vvvv *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, v%d, v%d, v%d", a->vd, a->vj, a->vk, a->va);
}
static void output_vr_i(DisasContext *ctx, arg_vr_i *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, r%d, 0x%x", a->vd, a->rj, a->imm);
}
static void output_vr_ii(DisasContext *ctx, arg_vr_ii *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, r%d, 0x%x, 0x%x", a->vd, a->rj, a->imm, a->imm2);
}
static void output_rv_i(DisasContext *ctx, arg_rv_i *a, const char *mnemonic)
{
output(ctx, mnemonic, "r%d, v%d, 0x%x", a->rd, a->vj, a->imm);
}
static void output_vr(DisasContext *ctx, arg_vr *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, r%d", a->vd, a->rj);
}
static void output_vvr(DisasContext *ctx, arg_vvr *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, v%d, r%d", a->vd, a->vj, a->rk);
}
static void output_vrr(DisasContext *ctx, arg_vrr *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, r%d, r%d", a->vd, a->rj, a->rk);
}
static void output_v_i(DisasContext *ctx, arg_v_i *a, const char *mnemonic)
{
output(ctx, mnemonic, "v%d, 0x%x", a->vd, a->imm);
}
INSN_LSX(vadd_b, vvv)
INSN_LSX(vadd_h, vvv)
INSN_LSX(vadd_w, vvv)
INSN_LSX(vadd_d, vvv)
INSN_LSX(vadd_q, vvv)
INSN_LSX(vsub_b, vvv)
INSN_LSX(vsub_h, vvv)
INSN_LSX(vsub_w, vvv)
INSN_LSX(vsub_d, vvv)
INSN_LSX(vsub_q, vvv)
INSN_LSX(vaddi_bu, vv_i)
INSN_LSX(vaddi_hu, vv_i)
INSN_LSX(vaddi_wu, vv_i)
INSN_LSX(vaddi_du, vv_i)
INSN_LSX(vsubi_bu, vv_i)
INSN_LSX(vsubi_hu, vv_i)
INSN_LSX(vsubi_wu, vv_i)
INSN_LSX(vsubi_du, vv_i)
INSN_LSX(vneg_b, vv)
INSN_LSX(vneg_h, vv)
INSN_LSX(vneg_w, vv)
INSN_LSX(vneg_d, vv)
INSN_LSX(vsadd_b, vvv)
INSN_LSX(vsadd_h, vvv)
INSN_LSX(vsadd_w, vvv)
INSN_LSX(vsadd_d, vvv)
INSN_LSX(vsadd_bu, vvv)
INSN_LSX(vsadd_hu, vvv)
INSN_LSX(vsadd_wu, vvv)
INSN_LSX(vsadd_du, vvv)
INSN_LSX(vssub_b, vvv)
INSN_LSX(vssub_h, vvv)
INSN_LSX(vssub_w, vvv)
INSN_LSX(vssub_d, vvv)
INSN_LSX(vssub_bu, vvv)
INSN_LSX(vssub_hu, vvv)
INSN_LSX(vssub_wu, vvv)
INSN_LSX(vssub_du, vvv)
INSN_LSX(vhaddw_h_b, vvv)
INSN_LSX(vhaddw_w_h, vvv)
INSN_LSX(vhaddw_d_w, vvv)
INSN_LSX(vhaddw_q_d, vvv)
INSN_LSX(vhaddw_hu_bu, vvv)
INSN_LSX(vhaddw_wu_hu, vvv)
INSN_LSX(vhaddw_du_wu, vvv)
INSN_LSX(vhaddw_qu_du, vvv)
INSN_LSX(vhsubw_h_b, vvv)
INSN_LSX(vhsubw_w_h, vvv)
INSN_LSX(vhsubw_d_w, vvv)
INSN_LSX(vhsubw_q_d, vvv)
INSN_LSX(vhsubw_hu_bu, vvv)
INSN_LSX(vhsubw_wu_hu, vvv)
INSN_LSX(vhsubw_du_wu, vvv)
INSN_LSX(vhsubw_qu_du, vvv)
INSN_LSX(vaddwev_h_b, vvv)
INSN_LSX(vaddwev_w_h, vvv)
INSN_LSX(vaddwev_d_w, vvv)
INSN_LSX(vaddwev_q_d, vvv)
INSN_LSX(vaddwod_h_b, vvv)
INSN_LSX(vaddwod_w_h, vvv)
INSN_LSX(vaddwod_d_w, vvv)
INSN_LSX(vaddwod_q_d, vvv)
INSN_LSX(vsubwev_h_b, vvv)
INSN_LSX(vsubwev_w_h, vvv)
INSN_LSX(vsubwev_d_w, vvv)
INSN_LSX(vsubwev_q_d, vvv)
INSN_LSX(vsubwod_h_b, vvv)
INSN_LSX(vsubwod_w_h, vvv)
INSN_LSX(vsubwod_d_w, vvv)
INSN_LSX(vsubwod_q_d, vvv)
INSN_LSX(vaddwev_h_bu, vvv)
INSN_LSX(vaddwev_w_hu, vvv)
INSN_LSX(vaddwev_d_wu, vvv)
INSN_LSX(vaddwev_q_du, vvv)
INSN_LSX(vaddwod_h_bu, vvv)
INSN_LSX(vaddwod_w_hu, vvv)
INSN_LSX(vaddwod_d_wu, vvv)
INSN_LSX(vaddwod_q_du, vvv)
INSN_LSX(vsubwev_h_bu, vvv)
INSN_LSX(vsubwev_w_hu, vvv)
INSN_LSX(vsubwev_d_wu, vvv)
INSN_LSX(vsubwev_q_du, vvv)
INSN_LSX(vsubwod_h_bu, vvv)
INSN_LSX(vsubwod_w_hu, vvv)
INSN_LSX(vsubwod_d_wu, vvv)
INSN_LSX(vsubwod_q_du, vvv)
INSN_LSX(vaddwev_h_bu_b, vvv)
INSN_LSX(vaddwev_w_hu_h, vvv)
INSN_LSX(vaddwev_d_wu_w, vvv)
INSN_LSX(vaddwev_q_du_d, vvv)
INSN_LSX(vaddwod_h_bu_b, vvv)
INSN_LSX(vaddwod_w_hu_h, vvv)
INSN_LSX(vaddwod_d_wu_w, vvv)
INSN_LSX(vaddwod_q_du_d, vvv)
INSN_LSX(vavg_b, vvv)
INSN_LSX(vavg_h, vvv)
INSN_LSX(vavg_w, vvv)
INSN_LSX(vavg_d, vvv)
INSN_LSX(vavg_bu, vvv)
INSN_LSX(vavg_hu, vvv)
INSN_LSX(vavg_wu, vvv)
INSN_LSX(vavg_du, vvv)
INSN_LSX(vavgr_b, vvv)
INSN_LSX(vavgr_h, vvv)
INSN_LSX(vavgr_w, vvv)
INSN_LSX(vavgr_d, vvv)
INSN_LSX(vavgr_bu, vvv)
INSN_LSX(vavgr_hu, vvv)
INSN_LSX(vavgr_wu, vvv)
INSN_LSX(vavgr_du, vvv)
INSN_LSX(vabsd_b, vvv)
INSN_LSX(vabsd_h, vvv)
INSN_LSX(vabsd_w, vvv)
INSN_LSX(vabsd_d, vvv)
INSN_LSX(vabsd_bu, vvv)
INSN_LSX(vabsd_hu, vvv)
INSN_LSX(vabsd_wu, vvv)
INSN_LSX(vabsd_du, vvv)
INSN_LSX(vadda_b, vvv)
INSN_LSX(vadda_h, vvv)
INSN_LSX(vadda_w, vvv)
INSN_LSX(vadda_d, vvv)
INSN_LSX(vmax_b, vvv)
INSN_LSX(vmax_h, vvv)
INSN_LSX(vmax_w, vvv)
INSN_LSX(vmax_d, vvv)
INSN_LSX(vmin_b, vvv)
INSN_LSX(vmin_h, vvv)
INSN_LSX(vmin_w, vvv)
INSN_LSX(vmin_d, vvv)
INSN_LSX(vmax_bu, vvv)
INSN_LSX(vmax_hu, vvv)
INSN_LSX(vmax_wu, vvv)
INSN_LSX(vmax_du, vvv)
INSN_LSX(vmin_bu, vvv)
INSN_LSX(vmin_hu, vvv)
INSN_LSX(vmin_wu, vvv)
INSN_LSX(vmin_du, vvv)
INSN_LSX(vmaxi_b, vv_i)
INSN_LSX(vmaxi_h, vv_i)
INSN_LSX(vmaxi_w, vv_i)
INSN_LSX(vmaxi_d, vv_i)
INSN_LSX(vmini_b, vv_i)
INSN_LSX(vmini_h, vv_i)
INSN_LSX(vmini_w, vv_i)
INSN_LSX(vmini_d, vv_i)
INSN_LSX(vmaxi_bu, vv_i)
INSN_LSX(vmaxi_hu, vv_i)
INSN_LSX(vmaxi_wu, vv_i)
INSN_LSX(vmaxi_du, vv_i)
INSN_LSX(vmini_bu, vv_i)
INSN_LSX(vmini_hu, vv_i)
INSN_LSX(vmini_wu, vv_i)
INSN_LSX(vmini_du, vv_i)
INSN_LSX(vmul_b, vvv)
INSN_LSX(vmul_h, vvv)
INSN_LSX(vmul_w, vvv)
INSN_LSX(vmul_d, vvv)
INSN_LSX(vmuh_b, vvv)
INSN_LSX(vmuh_h, vvv)
INSN_LSX(vmuh_w, vvv)
INSN_LSX(vmuh_d, vvv)
INSN_LSX(vmuh_bu, vvv)
INSN_LSX(vmuh_hu, vvv)
INSN_LSX(vmuh_wu, vvv)
INSN_LSX(vmuh_du, vvv)
INSN_LSX(vmulwev_h_b, vvv)
INSN_LSX(vmulwev_w_h, vvv)
INSN_LSX(vmulwev_d_w, vvv)
INSN_LSX(vmulwev_q_d, vvv)
INSN_LSX(vmulwod_h_b, vvv)
INSN_LSX(vmulwod_w_h, vvv)
INSN_LSX(vmulwod_d_w, vvv)
INSN_LSX(vmulwod_q_d, vvv)
INSN_LSX(vmulwev_h_bu, vvv)
INSN_LSX(vmulwev_w_hu, vvv)
INSN_LSX(vmulwev_d_wu, vvv)
INSN_LSX(vmulwev_q_du, vvv)
INSN_LSX(vmulwod_h_bu, vvv)
INSN_LSX(vmulwod_w_hu, vvv)
INSN_LSX(vmulwod_d_wu, vvv)
INSN_LSX(vmulwod_q_du, vvv)
INSN_LSX(vmulwev_h_bu_b, vvv)
INSN_LSX(vmulwev_w_hu_h, vvv)
INSN_LSX(vmulwev_d_wu_w, vvv)
INSN_LSX(vmulwev_q_du_d, vvv)
INSN_LSX(vmulwod_h_bu_b, vvv)
INSN_LSX(vmulwod_w_hu_h, vvv)
INSN_LSX(vmulwod_d_wu_w, vvv)
INSN_LSX(vmulwod_q_du_d, vvv)
INSN_LSX(vmadd_b, vvv)
INSN_LSX(vmadd_h, vvv)
INSN_LSX(vmadd_w, vvv)
INSN_LSX(vmadd_d, vvv)
INSN_LSX(vmsub_b, vvv)
INSN_LSX(vmsub_h, vvv)
INSN_LSX(vmsub_w, vvv)
INSN_LSX(vmsub_d, vvv)
INSN_LSX(vmaddwev_h_b, vvv)
INSN_LSX(vmaddwev_w_h, vvv)
INSN_LSX(vmaddwev_d_w, vvv)
INSN_LSX(vmaddwev_q_d, vvv)
INSN_LSX(vmaddwod_h_b, vvv)
INSN_LSX(vmaddwod_w_h, vvv)
INSN_LSX(vmaddwod_d_w, vvv)
INSN_LSX(vmaddwod_q_d, vvv)
INSN_LSX(vmaddwev_h_bu, vvv)
INSN_LSX(vmaddwev_w_hu, vvv)
INSN_LSX(vmaddwev_d_wu, vvv)
INSN_LSX(vmaddwev_q_du, vvv)
INSN_LSX(vmaddwod_h_bu, vvv)
INSN_LSX(vmaddwod_w_hu, vvv)
INSN_LSX(vmaddwod_d_wu, vvv)
INSN_LSX(vmaddwod_q_du, vvv)
INSN_LSX(vmaddwev_h_bu_b, vvv)
INSN_LSX(vmaddwev_w_hu_h, vvv)
INSN_LSX(vmaddwev_d_wu_w, vvv)
INSN_LSX(vmaddwev_q_du_d, vvv)
INSN_LSX(vmaddwod_h_bu_b, vvv)
INSN_LSX(vmaddwod_w_hu_h, vvv)
INSN_LSX(vmaddwod_d_wu_w, vvv)
INSN_LSX(vmaddwod_q_du_d, vvv)
INSN_LSX(vdiv_b, vvv)
INSN_LSX(vdiv_h, vvv)
INSN_LSX(vdiv_w, vvv)
INSN_LSX(vdiv_d, vvv)
INSN_LSX(vdiv_bu, vvv)
INSN_LSX(vdiv_hu, vvv)
INSN_LSX(vdiv_wu, vvv)
INSN_LSX(vdiv_du, vvv)
INSN_LSX(vmod_b, vvv)
INSN_LSX(vmod_h, vvv)
INSN_LSX(vmod_w, vvv)
INSN_LSX(vmod_d, vvv)
INSN_LSX(vmod_bu, vvv)
INSN_LSX(vmod_hu, vvv)
INSN_LSX(vmod_wu, vvv)
INSN_LSX(vmod_du, vvv)
INSN_LSX(vsat_b, vv_i)
INSN_LSX(vsat_h, vv_i)
INSN_LSX(vsat_w, vv_i)
INSN_LSX(vsat_d, vv_i)
INSN_LSX(vsat_bu, vv_i)
INSN_LSX(vsat_hu, vv_i)
INSN_LSX(vsat_wu, vv_i)
INSN_LSX(vsat_du, vv_i)
INSN_LSX(vexth_h_b, vv)
INSN_LSX(vexth_w_h, vv)
INSN_LSX(vexth_d_w, vv)
INSN_LSX(vexth_q_d, vv)
INSN_LSX(vexth_hu_bu, vv)
INSN_LSX(vexth_wu_hu, vv)
INSN_LSX(vexth_du_wu, vv)
INSN_LSX(vexth_qu_du, vv)
INSN_LSX(vsigncov_b, vvv)
INSN_LSX(vsigncov_h, vvv)
INSN_LSX(vsigncov_w, vvv)
INSN_LSX(vsigncov_d, vvv)
INSN_LSX(vmskltz_b, vv)
INSN_LSX(vmskltz_h, vv)
INSN_LSX(vmskltz_w, vv)
INSN_LSX(vmskltz_d, vv)
INSN_LSX(vmskgez_b, vv)
INSN_LSX(vmsknz_b, vv)
INSN_LSX(vldi, v_i)
INSN_LSX(vand_v, vvv)
INSN_LSX(vor_v, vvv)
INSN_LSX(vxor_v, vvv)
INSN_LSX(vnor_v, vvv)
INSN_LSX(vandn_v, vvv)
INSN_LSX(vorn_v, vvv)
INSN_LSX(vandi_b, vv_i)
INSN_LSX(vori_b, vv_i)
INSN_LSX(vxori_b, vv_i)
INSN_LSX(vnori_b, vv_i)
INSN_LSX(vsll_b, vvv)
INSN_LSX(vsll_h, vvv)
INSN_LSX(vsll_w, vvv)
INSN_LSX(vsll_d, vvv)
INSN_LSX(vslli_b, vv_i)
INSN_LSX(vslli_h, vv_i)
INSN_LSX(vslli_w, vv_i)
INSN_LSX(vslli_d, vv_i)
INSN_LSX(vsrl_b, vvv)
INSN_LSX(vsrl_h, vvv)
INSN_LSX(vsrl_w, vvv)
INSN_LSX(vsrl_d, vvv)
INSN_LSX(vsrli_b, vv_i)
INSN_LSX(vsrli_h, vv_i)
INSN_LSX(vsrli_w, vv_i)
INSN_LSX(vsrli_d, vv_i)
INSN_LSX(vsra_b, vvv)
INSN_LSX(vsra_h, vvv)
INSN_LSX(vsra_w, vvv)
INSN_LSX(vsra_d, vvv)
INSN_LSX(vsrai_b, vv_i)
INSN_LSX(vsrai_h, vv_i)
INSN_LSX(vsrai_w, vv_i)
INSN_LSX(vsrai_d, vv_i)
INSN_LSX(vrotr_b, vvv)
INSN_LSX(vrotr_h, vvv)
INSN_LSX(vrotr_w, vvv)
INSN_LSX(vrotr_d, vvv)
INSN_LSX(vrotri_b, vv_i)
INSN_LSX(vrotri_h, vv_i)
INSN_LSX(vrotri_w, vv_i)
INSN_LSX(vrotri_d, vv_i)
INSN_LSX(vsllwil_h_b, vv_i)
INSN_LSX(vsllwil_w_h, vv_i)
INSN_LSX(vsllwil_d_w, vv_i)
INSN_LSX(vextl_q_d, vv)
INSN_LSX(vsllwil_hu_bu, vv_i)
INSN_LSX(vsllwil_wu_hu, vv_i)
INSN_LSX(vsllwil_du_wu, vv_i)
INSN_LSX(vextl_qu_du, vv)
INSN_LSX(vsrlr_b, vvv)
INSN_LSX(vsrlr_h, vvv)
INSN_LSX(vsrlr_w, vvv)
INSN_LSX(vsrlr_d, vvv)
INSN_LSX(vsrlri_b, vv_i)
INSN_LSX(vsrlri_h, vv_i)
INSN_LSX(vsrlri_w, vv_i)
INSN_LSX(vsrlri_d, vv_i)
INSN_LSX(vsrar_b, vvv)
INSN_LSX(vsrar_h, vvv)
INSN_LSX(vsrar_w, vvv)
INSN_LSX(vsrar_d, vvv)
INSN_LSX(vsrari_b, vv_i)
INSN_LSX(vsrari_h, vv_i)
INSN_LSX(vsrari_w, vv_i)
INSN_LSX(vsrari_d, vv_i)
INSN_LSX(vsrln_b_h, vvv)
INSN_LSX(vsrln_h_w, vvv)
INSN_LSX(vsrln_w_d, vvv)
INSN_LSX(vsran_b_h, vvv)
INSN_LSX(vsran_h_w, vvv)
INSN_LSX(vsran_w_d, vvv)
INSN_LSX(vsrlni_b_h, vv_i)
INSN_LSX(vsrlni_h_w, vv_i)
INSN_LSX(vsrlni_w_d, vv_i)
INSN_LSX(vsrlni_d_q, vv_i)
INSN_LSX(vsrani_b_h, vv_i)
INSN_LSX(vsrani_h_w, vv_i)
INSN_LSX(vsrani_w_d, vv_i)
INSN_LSX(vsrani_d_q, vv_i)
INSN_LSX(vsrlrn_b_h, vvv)
INSN_LSX(vsrlrn_h_w, vvv)
INSN_LSX(vsrlrn_w_d, vvv)
INSN_LSX(vsrarn_b_h, vvv)
INSN_LSX(vsrarn_h_w, vvv)
INSN_LSX(vsrarn_w_d, vvv)
INSN_LSX(vsrlrni_b_h, vv_i)
INSN_LSX(vsrlrni_h_w, vv_i)
INSN_LSX(vsrlrni_w_d, vv_i)
INSN_LSX(vsrlrni_d_q, vv_i)
INSN_LSX(vsrarni_b_h, vv_i)
INSN_LSX(vsrarni_h_w, vv_i)
INSN_LSX(vsrarni_w_d, vv_i)
INSN_LSX(vsrarni_d_q, vv_i)
INSN_LSX(vssrln_b_h, vvv)
INSN_LSX(vssrln_h_w, vvv)
INSN_LSX(vssrln_w_d, vvv)
INSN_LSX(vssran_b_h, vvv)
INSN_LSX(vssran_h_w, vvv)
INSN_LSX(vssran_w_d, vvv)
INSN_LSX(vssrln_bu_h, vvv)
INSN_LSX(vssrln_hu_w, vvv)
INSN_LSX(vssrln_wu_d, vvv)
INSN_LSX(vssran_bu_h, vvv)
INSN_LSX(vssran_hu_w, vvv)
INSN_LSX(vssran_wu_d, vvv)
INSN_LSX(vssrlni_b_h, vv_i)
INSN_LSX(vssrlni_h_w, vv_i)
INSN_LSX(vssrlni_w_d, vv_i)
INSN_LSX(vssrlni_d_q, vv_i)
INSN_LSX(vssrani_b_h, vv_i)
INSN_LSX(vssrani_h_w, vv_i)
INSN_LSX(vssrani_w_d, vv_i)
INSN_LSX(vssrani_d_q, vv_i)
INSN_LSX(vssrlni_bu_h, vv_i)
INSN_LSX(vssrlni_hu_w, vv_i)
INSN_LSX(vssrlni_wu_d, vv_i)
INSN_LSX(vssrlni_du_q, vv_i)
INSN_LSX(vssrani_bu_h, vv_i)
INSN_LSX(vssrani_hu_w, vv_i)
INSN_LSX(vssrani_wu_d, vv_i)
INSN_LSX(vssrani_du_q, vv_i)
INSN_LSX(vssrlrn_b_h, vvv)
INSN_LSX(vssrlrn_h_w, vvv)
INSN_LSX(vssrlrn_w_d, vvv)
INSN_LSX(vssrarn_b_h, vvv)
INSN_LSX(vssrarn_h_w, vvv)
INSN_LSX(vssrarn_w_d, vvv)
INSN_LSX(vssrlrn_bu_h, vvv)
INSN_LSX(vssrlrn_hu_w, vvv)
INSN_LSX(vssrlrn_wu_d, vvv)
INSN_LSX(vssrarn_bu_h, vvv)
INSN_LSX(vssrarn_hu_w, vvv)
INSN_LSX(vssrarn_wu_d, vvv)
INSN_LSX(vssrlrni_b_h, vv_i)
INSN_LSX(vssrlrni_h_w, vv_i)
INSN_LSX(vssrlrni_w_d, vv_i)
INSN_LSX(vssrlrni_d_q, vv_i)
INSN_LSX(vssrlrni_bu_h, vv_i)
INSN_LSX(vssrlrni_hu_w, vv_i)
INSN_LSX(vssrlrni_wu_d, vv_i)
INSN_LSX(vssrlrni_du_q, vv_i)
INSN_LSX(vssrarni_b_h, vv_i)
INSN_LSX(vssrarni_h_w, vv_i)
INSN_LSX(vssrarni_w_d, vv_i)
INSN_LSX(vssrarni_d_q, vv_i)
INSN_LSX(vssrarni_bu_h, vv_i)
INSN_LSX(vssrarni_hu_w, vv_i)
INSN_LSX(vssrarni_wu_d, vv_i)
INSN_LSX(vssrarni_du_q, vv_i)
INSN_LSX(vclo_b, vv)
INSN_LSX(vclo_h, vv)
INSN_LSX(vclo_w, vv)
INSN_LSX(vclo_d, vv)
INSN_LSX(vclz_b, vv)
INSN_LSX(vclz_h, vv)
INSN_LSX(vclz_w, vv)
INSN_LSX(vclz_d, vv)
INSN_LSX(vpcnt_b, vv)
INSN_LSX(vpcnt_h, vv)
INSN_LSX(vpcnt_w, vv)
INSN_LSX(vpcnt_d, vv)
INSN_LSX(vbitclr_b, vvv)
INSN_LSX(vbitclr_h, vvv)
INSN_LSX(vbitclr_w, vvv)
INSN_LSX(vbitclr_d, vvv)
INSN_LSX(vbitclri_b, vv_i)
INSN_LSX(vbitclri_h, vv_i)
INSN_LSX(vbitclri_w, vv_i)
INSN_LSX(vbitclri_d, vv_i)
INSN_LSX(vbitset_b, vvv)
INSN_LSX(vbitset_h, vvv)
INSN_LSX(vbitset_w, vvv)
INSN_LSX(vbitset_d, vvv)
INSN_LSX(vbitseti_b, vv_i)
INSN_LSX(vbitseti_h, vv_i)
INSN_LSX(vbitseti_w, vv_i)
INSN_LSX(vbitseti_d, vv_i)
INSN_LSX(vbitrev_b, vvv)
INSN_LSX(vbitrev_h, vvv)
INSN_LSX(vbitrev_w, vvv)
INSN_LSX(vbitrev_d, vvv)
INSN_LSX(vbitrevi_b, vv_i)
INSN_LSX(vbitrevi_h, vv_i)
INSN_LSX(vbitrevi_w, vv_i)
INSN_LSX(vbitrevi_d, vv_i)
INSN_LSX(vfrstp_b, vvv)
INSN_LSX(vfrstp_h, vvv)
INSN_LSX(vfrstpi_b, vv_i)
INSN_LSX(vfrstpi_h, vv_i)
INSN_LSX(vfadd_s, vvv)
INSN_LSX(vfadd_d, vvv)
INSN_LSX(vfsub_s, vvv)
INSN_LSX(vfsub_d, vvv)
INSN_LSX(vfmul_s, vvv)
INSN_LSX(vfmul_d, vvv)
INSN_LSX(vfdiv_s, vvv)
INSN_LSX(vfdiv_d, vvv)
INSN_LSX(vfmadd_s, vvvv)
INSN_LSX(vfmadd_d, vvvv)
INSN_LSX(vfmsub_s, vvvv)
INSN_LSX(vfmsub_d, vvvv)
INSN_LSX(vfnmadd_s, vvvv)
INSN_LSX(vfnmadd_d, vvvv)
INSN_LSX(vfnmsub_s, vvvv)
INSN_LSX(vfnmsub_d, vvvv)
INSN_LSX(vfmax_s, vvv)
INSN_LSX(vfmax_d, vvv)
INSN_LSX(vfmin_s, vvv)
INSN_LSX(vfmin_d, vvv)
INSN_LSX(vfmaxa_s, vvv)
INSN_LSX(vfmaxa_d, vvv)
INSN_LSX(vfmina_s, vvv)
INSN_LSX(vfmina_d, vvv)
INSN_LSX(vflogb_s, vv)
INSN_LSX(vflogb_d, vv)
INSN_LSX(vfclass_s, vv)
INSN_LSX(vfclass_d, vv)
INSN_LSX(vfsqrt_s, vv)
INSN_LSX(vfsqrt_d, vv)
INSN_LSX(vfrecip_s, vv)
INSN_LSX(vfrecip_d, vv)
INSN_LSX(vfrsqrt_s, vv)
INSN_LSX(vfrsqrt_d, vv)
INSN_LSX(vfcvtl_s_h, vv)
INSN_LSX(vfcvth_s_h, vv)
INSN_LSX(vfcvtl_d_s, vv)
INSN_LSX(vfcvth_d_s, vv)
INSN_LSX(vfcvt_h_s, vvv)
INSN_LSX(vfcvt_s_d, vvv)
INSN_LSX(vfrint_s, vv)
INSN_LSX(vfrint_d, vv)
INSN_LSX(vfrintrm_s, vv)
INSN_LSX(vfrintrm_d, vv)
INSN_LSX(vfrintrp_s, vv)
INSN_LSX(vfrintrp_d, vv)
INSN_LSX(vfrintrz_s, vv)
INSN_LSX(vfrintrz_d, vv)
INSN_LSX(vfrintrne_s, vv)
INSN_LSX(vfrintrne_d, vv)
INSN_LSX(vftint_w_s, vv)
INSN_LSX(vftint_l_d, vv)
INSN_LSX(vftintrm_w_s, vv)
INSN_LSX(vftintrm_l_d, vv)
INSN_LSX(vftintrp_w_s, vv)
INSN_LSX(vftintrp_l_d, vv)
INSN_LSX(vftintrz_w_s, vv)
INSN_LSX(vftintrz_l_d, vv)
INSN_LSX(vftintrne_w_s, vv)
INSN_LSX(vftintrne_l_d, vv)
INSN_LSX(vftint_wu_s, vv)
INSN_LSX(vftint_lu_d, vv)
INSN_LSX(vftintrz_wu_s, vv)
INSN_LSX(vftintrz_lu_d, vv)
INSN_LSX(vftint_w_d, vvv)
INSN_LSX(vftintrm_w_d, vvv)
INSN_LSX(vftintrp_w_d, vvv)
INSN_LSX(vftintrz_w_d, vvv)
INSN_LSX(vftintrne_w_d, vvv)
INSN_LSX(vftintl_l_s, vv)
INSN_LSX(vftinth_l_s, vv)
INSN_LSX(vftintrml_l_s, vv)
INSN_LSX(vftintrmh_l_s, vv)
INSN_LSX(vftintrpl_l_s, vv)
INSN_LSX(vftintrph_l_s, vv)
INSN_LSX(vftintrzl_l_s, vv)
INSN_LSX(vftintrzh_l_s, vv)
INSN_LSX(vftintrnel_l_s, vv)
INSN_LSX(vftintrneh_l_s, vv)
INSN_LSX(vffint_s_w, vv)
INSN_LSX(vffint_s_wu, vv)
INSN_LSX(vffint_d_l, vv)
INSN_LSX(vffint_d_lu, vv)
INSN_LSX(vffintl_d_w, vv)
INSN_LSX(vffinth_d_w, vv)
INSN_LSX(vffint_s_l, vvv)
INSN_LSX(vseq_b, vvv)
INSN_LSX(vseq_h, vvv)
INSN_LSX(vseq_w, vvv)
INSN_LSX(vseq_d, vvv)
INSN_LSX(vseqi_b, vv_i)
INSN_LSX(vseqi_h, vv_i)
INSN_LSX(vseqi_w, vv_i)
INSN_LSX(vseqi_d, vv_i)
INSN_LSX(vsle_b, vvv)
INSN_LSX(vsle_h, vvv)
INSN_LSX(vsle_w, vvv)
INSN_LSX(vsle_d, vvv)
INSN_LSX(vslei_b, vv_i)
INSN_LSX(vslei_h, vv_i)
INSN_LSX(vslei_w, vv_i)
INSN_LSX(vslei_d, vv_i)
INSN_LSX(vsle_bu, vvv)
INSN_LSX(vsle_hu, vvv)
INSN_LSX(vsle_wu, vvv)
INSN_LSX(vsle_du, vvv)
INSN_LSX(vslei_bu, vv_i)
INSN_LSX(vslei_hu, vv_i)
INSN_LSX(vslei_wu, vv_i)
INSN_LSX(vslei_du, vv_i)
INSN_LSX(vslt_b, vvv)
INSN_LSX(vslt_h, vvv)
INSN_LSX(vslt_w, vvv)
INSN_LSX(vslt_d, vvv)
INSN_LSX(vslti_b, vv_i)
INSN_LSX(vslti_h, vv_i)
INSN_LSX(vslti_w, vv_i)
INSN_LSX(vslti_d, vv_i)
INSN_LSX(vslt_bu, vvv)
INSN_LSX(vslt_hu, vvv)
INSN_LSX(vslt_wu, vvv)
INSN_LSX(vslt_du, vvv)
INSN_LSX(vslti_bu, vv_i)
INSN_LSX(vslti_hu, vv_i)
INSN_LSX(vslti_wu, vv_i)
INSN_LSX(vslti_du, vv_i)
#define output_vfcmp(C, PREFIX, SUFFIX) \
{ \
(C)->info->fprintf_func((C)->info->stream, "%08x %s%s\t%d, f%d, f%d", \
(C)->insn, PREFIX, SUFFIX, a->vd, \
a->vj, a->vk); \
}
static bool output_vvv_fcond(DisasContext *ctx, arg_vvv_fcond * a,
const char *suffix)
{
bool ret = true;
switch (a->fcond) {
case 0x0:
output_vfcmp(ctx, "vfcmp_caf_", suffix);
break;
case 0x1:
output_vfcmp(ctx, "vfcmp_saf_", suffix);
break;
case 0x2:
output_vfcmp(ctx, "vfcmp_clt_", suffix);
break;
case 0x3:
output_vfcmp(ctx, "vfcmp_slt_", suffix);
break;
case 0x4:
output_vfcmp(ctx, "vfcmp_ceq_", suffix);
break;
case 0x5:
output_vfcmp(ctx, "vfcmp_seq_", suffix);
break;
case 0x6:
output_vfcmp(ctx, "vfcmp_cle_", suffix);
break;
case 0x7:
output_vfcmp(ctx, "vfcmp_sle_", suffix);
break;
case 0x8:
output_vfcmp(ctx, "vfcmp_cun_", suffix);
break;
case 0x9:
output_vfcmp(ctx, "vfcmp_sun_", suffix);
break;
case 0xA:
output_vfcmp(ctx, "vfcmp_cult_", suffix);
break;
case 0xB:
output_vfcmp(ctx, "vfcmp_sult_", suffix);
break;
case 0xC:
output_vfcmp(ctx, "vfcmp_cueq_", suffix);
break;
case 0xD:
output_vfcmp(ctx, "vfcmp_sueq_", suffix);
break;
case 0xE:
output_vfcmp(ctx, "vfcmp_cule_", suffix);
break;
case 0xF:
output_vfcmp(ctx, "vfcmp_sule_", suffix);
break;
case 0x10:
output_vfcmp(ctx, "vfcmp_cne_", suffix);
break;
case 0x11:
output_vfcmp(ctx, "vfcmp_sne_", suffix);
break;
case 0x14:
output_vfcmp(ctx, "vfcmp_cor_", suffix);
break;
case 0x15:
output_vfcmp(ctx, "vfcmp_sor_", suffix);
break;
case 0x18:
output_vfcmp(ctx, "vfcmp_cune_", suffix);
break;
case 0x19:
output_vfcmp(ctx, "vfcmp_sune_", suffix);
break;
default:
ret = false;
}
return ret;
}
#define LSX_FCMP_INSN(suffix) \
static bool trans_vfcmp_cond_##suffix(DisasContext *ctx, \
arg_vvv_fcond * a) \
{ \
return output_vvv_fcond(ctx, a, #suffix); \
}
LSX_FCMP_INSN(s)
LSX_FCMP_INSN(d)
INSN_LSX(vbitsel_v, vvvv)
INSN_LSX(vbitseli_b, vv_i)
INSN_LSX(vseteqz_v, cv)
INSN_LSX(vsetnez_v, cv)
INSN_LSX(vsetanyeqz_b, cv)
INSN_LSX(vsetanyeqz_h, cv)
INSN_LSX(vsetanyeqz_w, cv)
INSN_LSX(vsetanyeqz_d, cv)
INSN_LSX(vsetallnez_b, cv)
INSN_LSX(vsetallnez_h, cv)
INSN_LSX(vsetallnez_w, cv)
INSN_LSX(vsetallnez_d, cv)
INSN_LSX(vinsgr2vr_b, vr_i)
INSN_LSX(vinsgr2vr_h, vr_i)
INSN_LSX(vinsgr2vr_w, vr_i)
INSN_LSX(vinsgr2vr_d, vr_i)
INSN_LSX(vpickve2gr_b, rv_i)
INSN_LSX(vpickve2gr_h, rv_i)
INSN_LSX(vpickve2gr_w, rv_i)
INSN_LSX(vpickve2gr_d, rv_i)
INSN_LSX(vpickve2gr_bu, rv_i)
INSN_LSX(vpickve2gr_hu, rv_i)
INSN_LSX(vpickve2gr_wu, rv_i)
INSN_LSX(vpickve2gr_du, rv_i)
INSN_LSX(vreplgr2vr_b, vr)
INSN_LSX(vreplgr2vr_h, vr)
INSN_LSX(vreplgr2vr_w, vr)
INSN_LSX(vreplgr2vr_d, vr)
INSN_LSX(vreplve_b, vvr)
INSN_LSX(vreplve_h, vvr)
INSN_LSX(vreplve_w, vvr)
INSN_LSX(vreplve_d, vvr)
INSN_LSX(vreplvei_b, vv_i)
INSN_LSX(vreplvei_h, vv_i)
INSN_LSX(vreplvei_w, vv_i)
INSN_LSX(vreplvei_d, vv_i)
INSN_LSX(vbsll_v, vv_i)
INSN_LSX(vbsrl_v, vv_i)
INSN_LSX(vpackev_b, vvv)
INSN_LSX(vpackev_h, vvv)
INSN_LSX(vpackev_w, vvv)
INSN_LSX(vpackev_d, vvv)
INSN_LSX(vpackod_b, vvv)
INSN_LSX(vpackod_h, vvv)
INSN_LSX(vpackod_w, vvv)
INSN_LSX(vpackod_d, vvv)
INSN_LSX(vpickev_b, vvv)
INSN_LSX(vpickev_h, vvv)
INSN_LSX(vpickev_w, vvv)
INSN_LSX(vpickev_d, vvv)
INSN_LSX(vpickod_b, vvv)
INSN_LSX(vpickod_h, vvv)
INSN_LSX(vpickod_w, vvv)
INSN_LSX(vpickod_d, vvv)
INSN_LSX(vilvl_b, vvv)
INSN_LSX(vilvl_h, vvv)
INSN_LSX(vilvl_w, vvv)
INSN_LSX(vilvl_d, vvv)
INSN_LSX(vilvh_b, vvv)
INSN_LSX(vilvh_h, vvv)
INSN_LSX(vilvh_w, vvv)
INSN_LSX(vilvh_d, vvv)
INSN_LSX(vshuf_b, vvvv)
INSN_LSX(vshuf_h, vvv)
INSN_LSX(vshuf_w, vvv)
INSN_LSX(vshuf_d, vvv)
INSN_LSX(vshuf4i_b, vv_i)
INSN_LSX(vshuf4i_h, vv_i)
INSN_LSX(vshuf4i_w, vv_i)
INSN_LSX(vshuf4i_d, vv_i)
INSN_LSX(vpermi_w, vv_i)
INSN_LSX(vextrins_d, vv_i)
INSN_LSX(vextrins_w, vv_i)
INSN_LSX(vextrins_h, vv_i)
INSN_LSX(vextrins_b, vv_i)
INSN_LSX(vld, vr_i)
INSN_LSX(vst, vr_i)
INSN_LSX(vldx, vrr)
INSN_LSX(vstx, vrr)
INSN_LSX(vldrepl_d, vr_i)
INSN_LSX(vldrepl_w, vr_i)
INSN_LSX(vldrepl_h, vr_i)
INSN_LSX(vldrepl_b, vr_i)
INSN_LSX(vstelm_d, vr_ii)
INSN_LSX(vstelm_w, vr_ii)
INSN_LSX(vstelm_h, vr_ii)
INSN_LSX(vstelm_b, vr_ii)

View File

@ -33,7 +33,7 @@ void restore_fp_status(CPULoongArchState *env)
set_flush_to_zero(0, &env->fp_status);
}
static int ieee_ex_to_loongarch(int xcpt)
int ieee_ex_to_loongarch(int xcpt)
{
int ret = 0;
if (xcpt & float_flag_invalid) {

View File

@ -69,7 +69,7 @@ static int loongarch_gdb_get_fpu(CPULoongArchState *env,
GByteArray *mem_buf, int n)
{
if (0 <= n && n < 32) {
return gdb_get_reg64(mem_buf, env->fpr[n]);
return gdb_get_reg64(mem_buf, env->fpr[n].vreg.D(0));
} else if (n == 32) {
uint64_t val = read_fcc(env);
return gdb_get_reg64(mem_buf, val);
@ -85,7 +85,7 @@ static int loongarch_gdb_set_fpu(CPULoongArchState *env,
int length = 0;
if (0 <= n && n < 32) {
env->fpr[n] = ldq_p(mem_buf);
env->fpr[n].vreg.D(0) = ldq_p(mem_buf);
length = 8;
} else if (n == 32) {
uint64_t val = ldq_p(mem_buf);

View File

@ -130,3 +130,569 @@ DEF_HELPER_4(ldpte, void, env, tl, tl, i32)
DEF_HELPER_1(ertn, void, env)
DEF_HELPER_1(idle, void, env)
#endif
/* LoongArch LSX */
DEF_HELPER_4(vhaddw_h_b, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_w_h, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_d_w, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_q_d, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_hu_bu, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_wu_hu, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_du_wu, void, env, i32, i32, i32)
DEF_HELPER_4(vhaddw_qu_du, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_h_b, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_w_h, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_d_w, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_q_d, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_hu_bu, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_wu_hu, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_du_wu, void, env, i32, i32, i32)
DEF_HELPER_4(vhsubw_qu_du, void, env, i32, i32, i32)
DEF_HELPER_FLAGS_4(vaddwev_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_q_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_q_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_q_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_q_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_q_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_q_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwev_q_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsubwod_q_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwev_q_du_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vaddwod_q_du_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavg_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vavgr_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vabsd_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vadda_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vadda_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vadda_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vadda_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmini_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_bu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_hu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_wu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmini_du, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_bu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_hu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_wu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmaxi_du, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vmuh_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmuh_du, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwev_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmulwod_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmadd_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmadd_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmadd_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmadd_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmsub_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmsub_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmsub_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmsub_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_h_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_w_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_d_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_h_bu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_w_hu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_d_wu, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwev_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_h_bu_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_w_hu_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vmaddwod_d_wu_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_4(vdiv_b, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_h, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_w, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_d, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_bu, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_hu, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_wu, void, env, i32, i32, i32)
DEF_HELPER_4(vdiv_du, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_b, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_h, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_w, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_d, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_bu, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_hu, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_wu, void, env, i32, i32, i32)
DEF_HELPER_4(vmod_du, void, env, i32, i32, i32)
DEF_HELPER_FLAGS_4(vsat_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_bu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_hu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_wu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vsat_du, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_3(vexth_h_b, void, env, i32, i32)
DEF_HELPER_3(vexth_w_h, void, env, i32, i32)
DEF_HELPER_3(vexth_d_w, void, env, i32, i32)
DEF_HELPER_3(vexth_q_d, void, env, i32, i32)
DEF_HELPER_3(vexth_hu_bu, void, env, i32, i32)
DEF_HELPER_3(vexth_wu_hu, void, env, i32, i32)
DEF_HELPER_3(vexth_du_wu, void, env, i32, i32)
DEF_HELPER_3(vexth_qu_du, void, env, i32, i32)
DEF_HELPER_FLAGS_4(vsigncov_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsigncov_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsigncov_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vsigncov_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_3(vmskltz_b, void, env, i32, i32)
DEF_HELPER_3(vmskltz_h, void, env, i32, i32)
DEF_HELPER_3(vmskltz_w, void, env, i32, i32)
DEF_HELPER_3(vmskltz_d, void, env, i32, i32)
DEF_HELPER_3(vmskgez_b, void, env, i32, i32)
DEF_HELPER_3(vmsknz_b, void, env, i32,i32)
DEF_HELPER_FLAGS_4(vnori_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_4(vsllwil_h_b, void, env, i32, i32, i32)
DEF_HELPER_4(vsllwil_w_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsllwil_d_w, void, env, i32, i32, i32)
DEF_HELPER_3(vextl_q_d, void, env, i32, i32)
DEF_HELPER_4(vsllwil_hu_bu, void, env, i32, i32, i32)
DEF_HELPER_4(vsllwil_wu_hu, void, env, i32, i32, i32)
DEF_HELPER_4(vsllwil_du_wu, void, env, i32, i32, i32)
DEF_HELPER_3(vextl_qu_du, void, env, i32, i32)
DEF_HELPER_4(vsrlr_b, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlr_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlr_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlr_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlri_b, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlri_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlri_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlri_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrar_b, void, env, i32, i32, i32)
DEF_HELPER_4(vsrar_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrar_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrar_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrari_b, void, env, i32, i32, i32)
DEF_HELPER_4(vsrari_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrari_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrari_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrln_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrln_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrln_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsran_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsran_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsran_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vsrani_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrani_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrani_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrani_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrn_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrn_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrn_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarn_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarn_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarn_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrlrni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vsrarni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrln_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssran_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlni_du_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrani_du_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrn_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarn_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_b_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_h_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_d_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrlrni_du_q, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_bu_h, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_hu_w, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_wu_d, void, env, i32, i32, i32)
DEF_HELPER_4(vssrarni_du_q, void, env, i32, i32, i32)
DEF_HELPER_3(vclo_b, void, env, i32, i32)
DEF_HELPER_3(vclo_h, void, env, i32, i32)
DEF_HELPER_3(vclo_w, void, env, i32, i32)
DEF_HELPER_3(vclo_d, void, env, i32, i32)
DEF_HELPER_3(vclz_b, void, env, i32, i32)
DEF_HELPER_3(vclz_h, void, env, i32, i32)
DEF_HELPER_3(vclz_w, void, env, i32, i32)
DEF_HELPER_3(vclz_d, void, env, i32, i32)
DEF_HELPER_3(vpcnt_b, void, env, i32, i32)
DEF_HELPER_3(vpcnt_h, void, env, i32, i32)
DEF_HELPER_3(vpcnt_w, void, env, i32, i32)
DEF_HELPER_3(vpcnt_d, void, env, i32, i32)
DEF_HELPER_FLAGS_4(vbitclr_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitclr_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitclr_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitclr_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitclri_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitclri_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitclri_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitclri_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitset_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitset_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitset_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitset_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitseti_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitseti_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitseti_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitseti_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitrev_b, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitrev_h, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitrev_w, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitrev_d, TCG_CALL_NO_RWG, void, ptr, ptr, ptr, i32)
DEF_HELPER_FLAGS_4(vbitrevi_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitrevi_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitrevi_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vbitrevi_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_4(vfrstp_b, void, env, i32, i32, i32)
DEF_HELPER_4(vfrstp_h, void, env, i32, i32, i32)
DEF_HELPER_4(vfrstpi_b, void, env, i32, i32, i32)
DEF_HELPER_4(vfrstpi_h, void, env, i32, i32, i32)
DEF_HELPER_4(vfadd_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfadd_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfsub_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfsub_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfmul_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfmul_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfdiv_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfdiv_d, void, env, i32, i32, i32)
DEF_HELPER_5(vfmadd_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfmadd_d, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfmsub_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfmsub_d, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfnmadd_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfnmadd_d, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfnmsub_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfnmsub_d, void, env, i32, i32, i32, i32)
DEF_HELPER_4(vfmax_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfmax_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfmin_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfmin_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfmaxa_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfmaxa_d, void, env, i32, i32, i32)
DEF_HELPER_4(vfmina_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfmina_d, void, env, i32, i32, i32)
DEF_HELPER_3(vflogb_s, void, env, i32, i32)
DEF_HELPER_3(vflogb_d, void, env, i32, i32)
DEF_HELPER_3(vfclass_s, void, env, i32, i32)
DEF_HELPER_3(vfclass_d, void, env, i32, i32)
DEF_HELPER_3(vfsqrt_s, void, env, i32, i32)
DEF_HELPER_3(vfsqrt_d, void, env, i32, i32)
DEF_HELPER_3(vfrecip_s, void, env, i32, i32)
DEF_HELPER_3(vfrecip_d, void, env, i32, i32)
DEF_HELPER_3(vfrsqrt_s, void, env, i32, i32)
DEF_HELPER_3(vfrsqrt_d, void, env, i32, i32)
DEF_HELPER_3(vfcvtl_s_h, void, env, i32, i32)
DEF_HELPER_3(vfcvth_s_h, void, env, i32, i32)
DEF_HELPER_3(vfcvtl_d_s, void, env, i32, i32)
DEF_HELPER_3(vfcvth_d_s, void, env, i32, i32)
DEF_HELPER_4(vfcvt_h_s, void, env, i32, i32, i32)
DEF_HELPER_4(vfcvt_s_d, void, env, i32, i32, i32)
DEF_HELPER_3(vfrintrne_s, void, env, i32, i32)
DEF_HELPER_3(vfrintrne_d, void, env, i32, i32)
DEF_HELPER_3(vfrintrz_s, void, env, i32, i32)
DEF_HELPER_3(vfrintrz_d, void, env, i32, i32)
DEF_HELPER_3(vfrintrp_s, void, env, i32, i32)
DEF_HELPER_3(vfrintrp_d, void, env, i32, i32)
DEF_HELPER_3(vfrintrm_s, void, env, i32, i32)
DEF_HELPER_3(vfrintrm_d, void, env, i32, i32)
DEF_HELPER_3(vfrint_s, void, env, i32, i32)
DEF_HELPER_3(vfrint_d, void, env, i32, i32)
DEF_HELPER_3(vftintrne_w_s, void, env, i32, i32)
DEF_HELPER_3(vftintrne_l_d, void, env, i32, i32)
DEF_HELPER_3(vftintrz_w_s, void, env, i32, i32)
DEF_HELPER_3(vftintrz_l_d, void, env, i32, i32)
DEF_HELPER_3(vftintrp_w_s, void, env, i32, i32)
DEF_HELPER_3(vftintrp_l_d, void, env, i32, i32)
DEF_HELPER_3(vftintrm_w_s, void, env, i32, i32)
DEF_HELPER_3(vftintrm_l_d, void, env, i32, i32)
DEF_HELPER_3(vftint_w_s, void, env, i32, i32)
DEF_HELPER_3(vftint_l_d, void, env, i32, i32)
DEF_HELPER_3(vftintrz_wu_s, void, env, i32, i32)
DEF_HELPER_3(vftintrz_lu_d, void, env, i32, i32)
DEF_HELPER_3(vftint_wu_s, void, env, i32, i32)
DEF_HELPER_3(vftint_lu_d, void, env, i32, i32)
DEF_HELPER_4(vftintrne_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vftintrz_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vftintrp_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vftintrm_w_d, void, env, i32, i32, i32)
DEF_HELPER_4(vftint_w_d, void, env, i32, i32, i32)
DEF_HELPER_3(vftintrnel_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrneh_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrzl_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrzh_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrpl_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrph_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrml_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintrmh_l_s, void, env, i32, i32)
DEF_HELPER_3(vftintl_l_s, void, env, i32, i32)
DEF_HELPER_3(vftinth_l_s, void, env, i32, i32)
DEF_HELPER_3(vffint_s_w, void, env, i32, i32)
DEF_HELPER_3(vffint_d_l, void, env, i32, i32)
DEF_HELPER_3(vffint_s_wu, void, env, i32, i32)
DEF_HELPER_3(vffint_d_lu, void, env, i32, i32)
DEF_HELPER_3(vffintl_d_w, void, env, i32, i32)
DEF_HELPER_3(vffinth_d_w, void, env, i32, i32)
DEF_HELPER_4(vffint_s_l, void, env, i32, i32, i32)
DEF_HELPER_FLAGS_4(vseqi_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vseqi_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vseqi_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vseqi_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_bu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_hu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_wu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslei_du, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_h, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_w, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_d, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_bu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_hu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_wu, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_FLAGS_4(vslti_du, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_5(vfcmp_c_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfcmp_s_s, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfcmp_c_d, void, env, i32, i32, i32, i32)
DEF_HELPER_5(vfcmp_s_d, void, env, i32, i32, i32, i32)
DEF_HELPER_FLAGS_4(vbitseli_b, TCG_CALL_NO_RWG, void, ptr, ptr, i64, i32)
DEF_HELPER_3(vsetanyeqz_b, void, env, i32, i32)
DEF_HELPER_3(vsetanyeqz_h, void, env, i32, i32)
DEF_HELPER_3(vsetanyeqz_w, void, env, i32, i32)
DEF_HELPER_3(vsetanyeqz_d, void, env, i32, i32)
DEF_HELPER_3(vsetallnez_b, void, env, i32, i32)
DEF_HELPER_3(vsetallnez_h, void, env, i32, i32)
DEF_HELPER_3(vsetallnez_w, void, env, i32, i32)
DEF_HELPER_3(vsetallnez_d, void, env, i32, i32)
DEF_HELPER_4(vpackev_b, void, env, i32, i32, i32)
DEF_HELPER_4(vpackev_h, void, env, i32, i32, i32)
DEF_HELPER_4(vpackev_w, void, env, i32, i32, i32)
DEF_HELPER_4(vpackev_d, void, env, i32, i32, i32)
DEF_HELPER_4(vpackod_b, void, env, i32, i32, i32)
DEF_HELPER_4(vpackod_h, void, env, i32, i32, i32)
DEF_HELPER_4(vpackod_w, void, env, i32, i32, i32)
DEF_HELPER_4(vpackod_d, void, env, i32, i32, i32)
DEF_HELPER_4(vpickev_b, void, env, i32, i32, i32)
DEF_HELPER_4(vpickev_h, void, env, i32, i32, i32)
DEF_HELPER_4(vpickev_w, void, env, i32, i32, i32)
DEF_HELPER_4(vpickev_d, void, env, i32, i32, i32)
DEF_HELPER_4(vpickod_b, void, env, i32, i32, i32)
DEF_HELPER_4(vpickod_h, void, env, i32, i32, i32)
DEF_HELPER_4(vpickod_w, void, env, i32, i32, i32)
DEF_HELPER_4(vpickod_d, void, env, i32, i32, i32)
DEF_HELPER_4(vilvl_b, void, env, i32, i32, i32)
DEF_HELPER_4(vilvl_h, void, env, i32, i32, i32)
DEF_HELPER_4(vilvl_w, void, env, i32, i32, i32)
DEF_HELPER_4(vilvl_d, void, env, i32, i32, i32)
DEF_HELPER_4(vilvh_b, void, env, i32, i32, i32)
DEF_HELPER_4(vilvh_h, void, env, i32, i32, i32)
DEF_HELPER_4(vilvh_w, void, env, i32, i32, i32)
DEF_HELPER_4(vilvh_d, void, env, i32, i32, i32)
DEF_HELPER_5(vshuf_b, void, env, i32, i32, i32, i32)
DEF_HELPER_4(vshuf_h, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf_w, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf_d, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf4i_b, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf4i_h, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf4i_w, void, env, i32, i32, i32)
DEF_HELPER_4(vshuf4i_d, void, env, i32, i32, i32)
DEF_HELPER_4(vpermi_w, void, env, i32, i32, i32)
DEF_HELPER_4(vextrins_b, void, env, i32, i32, i32)
DEF_HELPER_4(vextrins_h, void, env, i32, i32, i32)
DEF_HELPER_4(vextrins_w, void, env, i32, i32, i32)
DEF_HELPER_4(vextrins_d, void, env, i32, i32, i32)

View File

@ -17,18 +17,29 @@
static bool gen_fff(DisasContext *ctx, arg_fff *a,
void (*func)(TCGv, TCGv_env, TCGv, TCGv))
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src1 = get_fpr(ctx, a->fj);
TCGv src2 = get_fpr(ctx, a->fk);
CHECK_FPE;
func(cpu_fpr[a->fd], cpu_env, cpu_fpr[a->fj], cpu_fpr[a->fk]);
func(dest, cpu_env, src1, src2);
set_fpr(a->fd, dest);
return true;
}
static bool gen_ff(DisasContext *ctx, arg_ff *a,
void (*func)(TCGv, TCGv_env, TCGv))
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
func(cpu_fpr[a->fd], cpu_env, cpu_fpr[a->fj]);
func(dest, cpu_env, src);
set_fpr(a->fd, dest);
return true;
}
@ -37,61 +48,98 @@ static bool gen_muladd(DisasContext *ctx, arg_ffff *a,
int flag)
{
TCGv_i32 tflag = tcg_constant_i32(flag);
TCGv dest = get_fpr(ctx, a->fd);
TCGv src1 = get_fpr(ctx, a->fj);
TCGv src2 = get_fpr(ctx, a->fk);
TCGv src3 = get_fpr(ctx, a->fa);
CHECK_FPE;
func(cpu_fpr[a->fd], cpu_env, cpu_fpr[a->fj],
cpu_fpr[a->fk], cpu_fpr[a->fa], tflag);
func(dest, cpu_env, src1, src2, src3, tflag);
set_fpr(a->fd, dest);
return true;
}
static bool trans_fcopysign_s(DisasContext *ctx, arg_fcopysign_s *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src1 = get_fpr(ctx, a->fk);
TCGv src2 = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_deposit_i64(cpu_fpr[a->fd], cpu_fpr[a->fk], cpu_fpr[a->fj], 0, 31);
tcg_gen_deposit_i64(dest, src1, src2, 0, 31);
set_fpr(a->fd, dest);
return true;
}
static bool trans_fcopysign_d(DisasContext *ctx, arg_fcopysign_d *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src1 = get_fpr(ctx, a->fk);
TCGv src2 = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_deposit_i64(cpu_fpr[a->fd], cpu_fpr[a->fk], cpu_fpr[a->fj], 0, 63);
tcg_gen_deposit_i64(dest, src1, src2, 0, 63);
set_fpr(a->fd, dest);
return true;
}
static bool trans_fabs_s(DisasContext *ctx, arg_fabs_s *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_andi_i64(cpu_fpr[a->fd], cpu_fpr[a->fj], MAKE_64BIT_MASK(0, 31));
gen_nanbox_s(cpu_fpr[a->fd], cpu_fpr[a->fd]);
tcg_gen_andi_i64(dest, src, MAKE_64BIT_MASK(0, 31));
gen_nanbox_s(dest, dest);
set_fpr(a->fd, dest);
return true;
}
static bool trans_fabs_d(DisasContext *ctx, arg_fabs_d *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_andi_i64(cpu_fpr[a->fd], cpu_fpr[a->fj], MAKE_64BIT_MASK(0, 63));
tcg_gen_andi_i64(dest, src, MAKE_64BIT_MASK(0, 63));
set_fpr(a->fd, dest);
return true;
}
static bool trans_fneg_s(DisasContext *ctx, arg_fneg_s *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_xori_i64(cpu_fpr[a->fd], cpu_fpr[a->fj], 0x80000000);
gen_nanbox_s(cpu_fpr[a->fd], cpu_fpr[a->fd]);
tcg_gen_xori_i64(dest, src, 0x80000000);
gen_nanbox_s(dest, dest);
set_fpr(a->fd, dest);
return true;
}
static bool trans_fneg_d(DisasContext *ctx, arg_fneg_d *a)
{
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
tcg_gen_xori_i64(cpu_fpr[a->fd], cpu_fpr[a->fj], 0x8000000000000000LL);
tcg_gen_xori_i64(dest, src, 0x8000000000000000LL);
set_fpr(a->fd, dest);
return true;
}

View File

@ -25,17 +25,19 @@ static uint32_t get_fcmp_flags(int cond)
static bool trans_fcmp_cond_s(DisasContext *ctx, arg_fcmp_cond_s *a)
{
TCGv var;
TCGv var, src1, src2;
uint32_t flags;
void (*fn)(TCGv, TCGv_env, TCGv, TCGv, TCGv_i32);
CHECK_FPE;
var = tcg_temp_new();
src1 = get_fpr(ctx, a->fj);
src2 = get_fpr(ctx, a->fk);
fn = (a->fcond & 1 ? gen_helper_fcmp_s_s : gen_helper_fcmp_c_s);
flags = get_fcmp_flags(a->fcond >> 1);
fn(var, cpu_env, cpu_fpr[a->fj], cpu_fpr[a->fk], tcg_constant_i32(flags));
fn(var, cpu_env, src1, src2, tcg_constant_i32(flags));
tcg_gen_st8_tl(var, cpu_env, offsetof(CPULoongArchState, cf[a->cd]));
return true;
@ -43,17 +45,19 @@ static bool trans_fcmp_cond_s(DisasContext *ctx, arg_fcmp_cond_s *a)
static bool trans_fcmp_cond_d(DisasContext *ctx, arg_fcmp_cond_d *a)
{
TCGv var;
TCGv var, src1, src2;
uint32_t flags;
void (*fn)(TCGv, TCGv_env, TCGv, TCGv, TCGv_i32);
CHECK_FPE;
var = tcg_temp_new();
src1 = get_fpr(ctx, a->fj);
src2 = get_fpr(ctx, a->fk);
fn = (a->fcond & 1 ? gen_helper_fcmp_s_d : gen_helper_fcmp_c_d);
flags = get_fcmp_flags(a->fcond >> 1);
fn(var, cpu_env, cpu_fpr[a->fj], cpu_fpr[a->fk], tcg_constant_i32(flags));
fn(var, cpu_env, src1, src2, tcg_constant_i32(flags));
tcg_gen_st8_tl(var, cpu_env, offsetof(CPULoongArchState, cf[a->cd]));
return true;

View File

@ -13,6 +13,7 @@ static void maybe_nanbox_load(TCGv freg, MemOp mop)
static bool gen_fload_i(DisasContext *ctx, arg_fr_i *a, MemOp mop)
{
TCGv addr = gpr_src(ctx, a->rj, EXT_NONE);
TCGv dest = get_fpr(ctx, a->fd);
CHECK_FPE;
@ -22,8 +23,9 @@ static bool gen_fload_i(DisasContext *ctx, arg_fr_i *a, MemOp mop)
addr = temp;
}
tcg_gen_qemu_ld_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
maybe_nanbox_load(cpu_fpr[a->fd], mop);
tcg_gen_qemu_ld_tl(dest, addr, ctx->mem_idx, mop);
maybe_nanbox_load(dest, mop);
set_fpr(a->fd, dest);
return true;
}
@ -31,6 +33,7 @@ static bool gen_fload_i(DisasContext *ctx, arg_fr_i *a, MemOp mop)
static bool gen_fstore_i(DisasContext *ctx, arg_fr_i *a, MemOp mop)
{
TCGv addr = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src = get_fpr(ctx, a->fd);
CHECK_FPE;
@ -40,7 +43,8 @@ static bool gen_fstore_i(DisasContext *ctx, arg_fr_i *a, MemOp mop)
addr = temp;
}
tcg_gen_qemu_st_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
tcg_gen_qemu_st_tl(src, addr, ctx->mem_idx, mop);
return true;
}
@ -48,14 +52,16 @@ static bool gen_floadx(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv dest = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
addr = tcg_temp_new();
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_ld_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
maybe_nanbox_load(cpu_fpr[a->fd], mop);
tcg_gen_qemu_ld_tl(dest, addr, ctx->mem_idx, mop);
maybe_nanbox_load(dest, mop);
set_fpr(a->fd, dest);
return true;
}
@ -64,13 +70,14 @@ static bool gen_fstorex(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv src3 = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
addr = tcg_temp_new();
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_st_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
tcg_gen_qemu_st_tl(src3, addr, ctx->mem_idx, mop);
return true;
}
@ -79,6 +86,7 @@ static bool gen_fload_gt(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv dest = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
@ -86,8 +94,9 @@ static bool gen_fload_gt(DisasContext *ctx, arg_frr *a, MemOp mop)
addr = tcg_temp_new();
gen_helper_asrtgt_d(cpu_env, src1, src2);
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_ld_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
maybe_nanbox_load(cpu_fpr[a->fd], mop);
tcg_gen_qemu_ld_tl(dest, addr, ctx->mem_idx, mop);
maybe_nanbox_load(dest, mop);
set_fpr(a->fd, dest);
return true;
}
@ -96,6 +105,7 @@ static bool gen_fstore_gt(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv src3 = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
@ -103,7 +113,7 @@ static bool gen_fstore_gt(DisasContext *ctx, arg_frr *a, MemOp mop)
addr = tcg_temp_new();
gen_helper_asrtgt_d(cpu_env, src1, src2);
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_st_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
tcg_gen_qemu_st_tl(src3, addr, ctx->mem_idx, mop);
return true;
}
@ -112,6 +122,7 @@ static bool gen_fload_le(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv dest = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
@ -119,8 +130,9 @@ static bool gen_fload_le(DisasContext *ctx, arg_frr *a, MemOp mop)
addr = tcg_temp_new();
gen_helper_asrtle_d(cpu_env, src1, src2);
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_ld_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
maybe_nanbox_load(cpu_fpr[a->fd], mop);
tcg_gen_qemu_ld_tl(dest, addr, ctx->mem_idx, mop);
maybe_nanbox_load(dest, mop);
set_fpr(a->fd, dest);
return true;
}
@ -129,6 +141,7 @@ static bool gen_fstore_le(DisasContext *ctx, arg_frr *a, MemOp mop)
{
TCGv src1 = gpr_src(ctx, a->rj, EXT_NONE);
TCGv src2 = gpr_src(ctx, a->rk, EXT_NONE);
TCGv src3 = get_fpr(ctx, a->fd);
TCGv addr;
CHECK_FPE;
@ -136,7 +149,7 @@ static bool gen_fstore_le(DisasContext *ctx, arg_frr *a, MemOp mop)
addr = tcg_temp_new();
gen_helper_asrtle_d(cpu_env, src1, src2);
tcg_gen_add_tl(addr, src1, src2);
tcg_gen_qemu_st_tl(cpu_fpr[a->fd], addr, ctx->mem_idx, mop);
tcg_gen_qemu_st_tl(src3, addr, ctx->mem_idx, mop);
return true;
}

View File

@ -10,14 +10,17 @@ static const uint32_t fcsr_mask[4] = {
static bool trans_fsel(DisasContext *ctx, arg_fsel *a)
{
TCGv zero = tcg_constant_tl(0);
TCGv dest = get_fpr(ctx, a->fd);
TCGv src1 = get_fpr(ctx, a->fj);
TCGv src2 = get_fpr(ctx, a->fk);
TCGv cond;
CHECK_FPE;
cond = tcg_temp_new();
tcg_gen_ld8u_tl(cond, cpu_env, offsetof(CPULoongArchState, cf[a->ca]));
tcg_gen_movcond_tl(TCG_COND_EQ, cpu_fpr[a->fd], cond, zero,
cpu_fpr[a->fj], cpu_fpr[a->fk]);
tcg_gen_movcond_tl(TCG_COND_EQ, dest, cond, zero, src1, src2);
set_fpr(a->fd, dest);
return true;
}
@ -25,15 +28,16 @@ static bool trans_fsel(DisasContext *ctx, arg_fsel *a)
static bool gen_f2f(DisasContext *ctx, arg_ff *a,
void (*func)(TCGv, TCGv), bool nanbox)
{
TCGv dest = cpu_fpr[a->fd];
TCGv src = cpu_fpr[a->fj];
TCGv dest = get_fpr(ctx, a->fd);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
func(dest, src);
if (nanbox) {
gen_nanbox_s(cpu_fpr[a->fd], cpu_fpr[a->fd]);
gen_nanbox_s(dest, dest);
}
set_fpr(a->fd, dest);
return true;
}
@ -42,10 +46,13 @@ static bool gen_r2f(DisasContext *ctx, arg_fr *a,
void (*func)(TCGv, TCGv))
{
TCGv src = gpr_src(ctx, a->rj, EXT_NONE);
TCGv dest = get_fpr(ctx, a->fd);
CHECK_FPE;
func(cpu_fpr[a->fd], src);
func(dest, src);
set_fpr(a->fd, dest);
return true;
}
@ -53,10 +60,11 @@ static bool gen_f2r(DisasContext *ctx, arg_rf *a,
void (*func)(TCGv, TCGv))
{
TCGv dest = gpr_dst(ctx, a->rd, EXT_NONE);
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
func(dest, cpu_fpr[a->fj]);
func(dest, src);
gen_set_gpr(a->rd, dest, EXT_NONE);
return true;
@ -124,11 +132,12 @@ static void gen_movfrh2gr_s(TCGv dest, TCGv src)
static bool trans_movfr2cf(DisasContext *ctx, arg_movfr2cf *a)
{
TCGv t0;
TCGv src = get_fpr(ctx, a->fj);
CHECK_FPE;
t0 = tcg_temp_new();
tcg_gen_andi_tl(t0, cpu_fpr[a->fj], 0x1);
tcg_gen_andi_tl(t0, src, 0x1);
tcg_gen_st8_tl(t0, cpu_env, offsetof(CPULoongArchState, cf[a->cd & 0x7]));
return true;
@ -136,10 +145,14 @@ static bool trans_movfr2cf(DisasContext *ctx, arg_movfr2cf *a)
static bool trans_movcf2fr(DisasContext *ctx, arg_movcf2fr *a)
{
TCGv dest = get_fpr(ctx, a->fd);
CHECK_FPE;
tcg_gen_ld8u_tl(cpu_fpr[a->fd], cpu_env,
tcg_gen_ld8u_tl(dest, cpu_env,
offsetof(CPULoongArchState, cf[a->cj & 0x7]));
set_fpr(a->fd, dest);
return true;
}

File diff suppressed because it is too large Load Diff

View File

@ -485,3 +485,814 @@ ldpte 0000 01100100 01 ........ ..... 00000 @j_i
ertn 0000 01100100 10000 01110 00000 00000 @empty
idle 0000 01100100 10001 ............... @i15
dbcl 0000 00000010 10101 ............... @i15
#
# LSX Fields
#
%i9s3 10:s9 !function=shl_3
%i10s2 10:s10 !function=shl_2
%i11s1 10:s11 !function=shl_1
%i8s3 10:s8 !function=shl_3
%i8s2 10:s8 !function=shl_2
%i8s1 10:s8 !function=shl_1
#
# LSX Argument sets
#
&vv vd vj
&cv cd vj
&vvv vd vj vk
&vv_i vd vj imm
&vvvv vd vj vk va
&vvv_fcond vd vj vk fcond
&vr_i vd rj imm
&rv_i rd vj imm
&vr vd rj
&vvr vd vj rk
&vrr vd rj rk
&vr_ii vd rj imm imm2
&v_i vd imm
#
# LSX Formats
#
@vv .... ........ ..... ..... vj:5 vd:5 &vv
@cv .... ........ ..... ..... vj:5 .. cd:3 &cv
@vvv .... ........ ..... vk:5 vj:5 vd:5 &vvv
@vv_ui1 .... ........ ..... .... imm:1 vj:5 vd:5 &vv_i
@vv_ui2 .... ........ ..... ... imm:2 vj:5 vd:5 &vv_i
@vv_ui3 .... ........ ..... .. imm:3 vj:5 vd:5 &vv_i
@vv_ui4 .... ........ ..... . imm:4 vj:5 vd:5 &vv_i
@vv_ui5 .... ........ ..... imm:5 vj:5 vd:5 &vv_i
@vv_ui6 .... ........ .... imm:6 vj:5 vd:5 &vv_i
@vv_ui7 .... ........ ... imm:7 vj:5 vd:5 &vv_i
@vv_ui8 .... ........ .. imm:8 vj:5 vd:5 &vv_i
@vv_i5 .... ........ ..... imm:s5 vj:5 vd:5 &vv_i
@vvvv .... ........ va:5 vk:5 vj:5 vd:5 &vvvv
@vvv_fcond .... ........ fcond:5 vk:5 vj:5 vd:5 &vvv_fcond
@vr_ui4 .... ........ ..... . imm:4 rj:5 vd:5 &vr_i
@vr_ui3 .... ........ ..... .. imm:3 rj:5 vd:5 &vr_i
@vr_ui2 .... ........ ..... ... imm:2 rj:5 vd:5 &vr_i
@vr_ui1 .... ........ ..... .... imm:1 rj:5 vd:5 &vr_i
@rv_ui4 .... ........ ..... . imm:4 vj:5 rd:5 &rv_i
@rv_ui3 .... ........ ..... .. imm:3 vj:5 rd:5 &rv_i
@rv_ui2 .... ........ ..... ... imm:2 vj:5 rd:5 &rv_i
@rv_ui1 .... ........ ..... .... imm:1 vj:5 rd:5 &rv_i
@vr .... ........ ..... ..... rj:5 vd:5 &vr
@vvr .... ........ ..... rk:5 vj:5 vd:5 &vvr
@vr_i9 .... ........ . ......... rj:5 vd:5 &vr_i imm=%i9s3
@vr_i10 .... ........ .......... rj:5 vd:5 &vr_i imm=%i10s2
@vr_i11 .... ....... ........... rj:5 vd:5 &vr_i imm=%i11s1
@vr_i12 .... ...... imm:s12 rj:5 vd:5 &vr_i
@vr_i8i1 .... ........ . imm2:1 ........ rj:5 vd:5 &vr_ii imm=%i8s3
@vr_i8i2 .... ........ imm2:2 ........ rj:5 vd:5 &vr_ii imm=%i8s2
@vr_i8i3 .... ....... imm2:3 ........ rj:5 vd:5 &vr_ii imm=%i8s1
@vr_i8i4 .... ...... imm2:4 imm:s8 rj:5 vd:5 &vr_ii
@vrr .... ........ ..... rk:5 rj:5 vd:5 &vrr
@v_i13 .... ........ .. imm:13 vd:5 &v_i
vadd_b 0111 00000000 10100 ..... ..... ..... @vvv
vadd_h 0111 00000000 10101 ..... ..... ..... @vvv
vadd_w 0111 00000000 10110 ..... ..... ..... @vvv
vadd_d 0111 00000000 10111 ..... ..... ..... @vvv
vadd_q 0111 00010010 11010 ..... ..... ..... @vvv
vsub_b 0111 00000000 11000 ..... ..... ..... @vvv
vsub_h 0111 00000000 11001 ..... ..... ..... @vvv
vsub_w 0111 00000000 11010 ..... ..... ..... @vvv
vsub_d 0111 00000000 11011 ..... ..... ..... @vvv
vsub_q 0111 00010010 11011 ..... ..... ..... @vvv
vaddi_bu 0111 00101000 10100 ..... ..... ..... @vv_ui5
vaddi_hu 0111 00101000 10101 ..... ..... ..... @vv_ui5
vaddi_wu 0111 00101000 10110 ..... ..... ..... @vv_ui5
vaddi_du 0111 00101000 10111 ..... ..... ..... @vv_ui5
vsubi_bu 0111 00101000 11000 ..... ..... ..... @vv_ui5
vsubi_hu 0111 00101000 11001 ..... ..... ..... @vv_ui5
vsubi_wu 0111 00101000 11010 ..... ..... ..... @vv_ui5
vsubi_du 0111 00101000 11011 ..... ..... ..... @vv_ui5
vneg_b 0111 00101001 11000 01100 ..... ..... @vv
vneg_h 0111 00101001 11000 01101 ..... ..... @vv
vneg_w 0111 00101001 11000 01110 ..... ..... @vv
vneg_d 0111 00101001 11000 01111 ..... ..... @vv
vsadd_b 0111 00000100 01100 ..... ..... ..... @vvv
vsadd_h 0111 00000100 01101 ..... ..... ..... @vvv
vsadd_w 0111 00000100 01110 ..... ..... ..... @vvv
vsadd_d 0111 00000100 01111 ..... ..... ..... @vvv
vsadd_bu 0111 00000100 10100 ..... ..... ..... @vvv
vsadd_hu 0111 00000100 10101 ..... ..... ..... @vvv
vsadd_wu 0111 00000100 10110 ..... ..... ..... @vvv
vsadd_du 0111 00000100 10111 ..... ..... ..... @vvv
vssub_b 0111 00000100 10000 ..... ..... ..... @vvv
vssub_h 0111 00000100 10001 ..... ..... ..... @vvv
vssub_w 0111 00000100 10010 ..... ..... ..... @vvv
vssub_d 0111 00000100 10011 ..... ..... ..... @vvv
vssub_bu 0111 00000100 11000 ..... ..... ..... @vvv
vssub_hu 0111 00000100 11001 ..... ..... ..... @vvv
vssub_wu 0111 00000100 11010 ..... ..... ..... @vvv
vssub_du 0111 00000100 11011 ..... ..... ..... @vvv
vhaddw_h_b 0111 00000101 01000 ..... ..... ..... @vvv
vhaddw_w_h 0111 00000101 01001 ..... ..... ..... @vvv
vhaddw_d_w 0111 00000101 01010 ..... ..... ..... @vvv
vhaddw_q_d 0111 00000101 01011 ..... ..... ..... @vvv
vhaddw_hu_bu 0111 00000101 10000 ..... ..... ..... @vvv
vhaddw_wu_hu 0111 00000101 10001 ..... ..... ..... @vvv
vhaddw_du_wu 0111 00000101 10010 ..... ..... ..... @vvv
vhaddw_qu_du 0111 00000101 10011 ..... ..... ..... @vvv
vhsubw_h_b 0111 00000101 01100 ..... ..... ..... @vvv
vhsubw_w_h 0111 00000101 01101 ..... ..... ..... @vvv
vhsubw_d_w 0111 00000101 01110 ..... ..... ..... @vvv
vhsubw_q_d 0111 00000101 01111 ..... ..... ..... @vvv
vhsubw_hu_bu 0111 00000101 10100 ..... ..... ..... @vvv
vhsubw_wu_hu 0111 00000101 10101 ..... ..... ..... @vvv
vhsubw_du_wu 0111 00000101 10110 ..... ..... ..... @vvv
vhsubw_qu_du 0111 00000101 10111 ..... ..... ..... @vvv
vaddwev_h_b 0111 00000001 11100 ..... ..... ..... @vvv
vaddwev_w_h 0111 00000001 11101 ..... ..... ..... @vvv
vaddwev_d_w 0111 00000001 11110 ..... ..... ..... @vvv
vaddwev_q_d 0111 00000001 11111 ..... ..... ..... @vvv
vaddwod_h_b 0111 00000010 00100 ..... ..... ..... @vvv
vaddwod_w_h 0111 00000010 00101 ..... ..... ..... @vvv
vaddwod_d_w 0111 00000010 00110 ..... ..... ..... @vvv
vaddwod_q_d 0111 00000010 00111 ..... ..... ..... @vvv
vsubwev_h_b 0111 00000010 00000 ..... ..... ..... @vvv
vsubwev_w_h 0111 00000010 00001 ..... ..... ..... @vvv
vsubwev_d_w 0111 00000010 00010 ..... ..... ..... @vvv
vsubwev_q_d 0111 00000010 00011 ..... ..... ..... @vvv
vsubwod_h_b 0111 00000010 01000 ..... ..... ..... @vvv
vsubwod_w_h 0111 00000010 01001 ..... ..... ..... @vvv
vsubwod_d_w 0111 00000010 01010 ..... ..... ..... @vvv
vsubwod_q_d 0111 00000010 01011 ..... ..... ..... @vvv
vaddwev_h_bu 0111 00000010 11100 ..... ..... ..... @vvv
vaddwev_w_hu 0111 00000010 11101 ..... ..... ..... @vvv
vaddwev_d_wu 0111 00000010 11110 ..... ..... ..... @vvv
vaddwev_q_du 0111 00000010 11111 ..... ..... ..... @vvv
vaddwod_h_bu 0111 00000011 00100 ..... ..... ..... @vvv
vaddwod_w_hu 0111 00000011 00101 ..... ..... ..... @vvv
vaddwod_d_wu 0111 00000011 00110 ..... ..... ..... @vvv
vaddwod_q_du 0111 00000011 00111 ..... ..... ..... @vvv
vsubwev_h_bu 0111 00000011 00000 ..... ..... ..... @vvv
vsubwev_w_hu 0111 00000011 00001 ..... ..... ..... @vvv
vsubwev_d_wu 0111 00000011 00010 ..... ..... ..... @vvv
vsubwev_q_du 0111 00000011 00011 ..... ..... ..... @vvv
vsubwod_h_bu 0111 00000011 01000 ..... ..... ..... @vvv
vsubwod_w_hu 0111 00000011 01001 ..... ..... ..... @vvv
vsubwod_d_wu 0111 00000011 01010 ..... ..... ..... @vvv
vsubwod_q_du 0111 00000011 01011 ..... ..... ..... @vvv
vaddwev_h_bu_b 0111 00000011 11100 ..... ..... ..... @vvv
vaddwev_w_hu_h 0111 00000011 11101 ..... ..... ..... @vvv
vaddwev_d_wu_w 0111 00000011 11110 ..... ..... ..... @vvv
vaddwev_q_du_d 0111 00000011 11111 ..... ..... ..... @vvv
vaddwod_h_bu_b 0111 00000100 00000 ..... ..... ..... @vvv
vaddwod_w_hu_h 0111 00000100 00001 ..... ..... ..... @vvv
vaddwod_d_wu_w 0111 00000100 00010 ..... ..... ..... @vvv
vaddwod_q_du_d 0111 00000100 00011 ..... ..... ..... @vvv
vavg_b 0111 00000110 01000 ..... ..... ..... @vvv
vavg_h 0111 00000110 01001 ..... ..... ..... @vvv
vavg_w 0111 00000110 01010 ..... ..... ..... @vvv
vavg_d 0111 00000110 01011 ..... ..... ..... @vvv
vavg_bu 0111 00000110 01100 ..... ..... ..... @vvv
vavg_hu 0111 00000110 01101 ..... ..... ..... @vvv
vavg_wu 0111 00000110 01110 ..... ..... ..... @vvv
vavg_du 0111 00000110 01111 ..... ..... ..... @vvv
vavgr_b 0111 00000110 10000 ..... ..... ..... @vvv
vavgr_h 0111 00000110 10001 ..... ..... ..... @vvv
vavgr_w 0111 00000110 10010 ..... ..... ..... @vvv
vavgr_d 0111 00000110 10011 ..... ..... ..... @vvv
vavgr_bu 0111 00000110 10100 ..... ..... ..... @vvv
vavgr_hu 0111 00000110 10101 ..... ..... ..... @vvv
vavgr_wu 0111 00000110 10110 ..... ..... ..... @vvv
vavgr_du 0111 00000110 10111 ..... ..... ..... @vvv
vabsd_b 0111 00000110 00000 ..... ..... ..... @vvv
vabsd_h 0111 00000110 00001 ..... ..... ..... @vvv
vabsd_w 0111 00000110 00010 ..... ..... ..... @vvv
vabsd_d 0111 00000110 00011 ..... ..... ..... @vvv
vabsd_bu 0111 00000110 00100 ..... ..... ..... @vvv
vabsd_hu 0111 00000110 00101 ..... ..... ..... @vvv
vabsd_wu 0111 00000110 00110 ..... ..... ..... @vvv
vabsd_du 0111 00000110 00111 ..... ..... ..... @vvv
vadda_b 0111 00000101 11000 ..... ..... ..... @vvv
vadda_h 0111 00000101 11001 ..... ..... ..... @vvv
vadda_w 0111 00000101 11010 ..... ..... ..... @vvv
vadda_d 0111 00000101 11011 ..... ..... ..... @vvv
vmax_b 0111 00000111 00000 ..... ..... ..... @vvv
vmax_h 0111 00000111 00001 ..... ..... ..... @vvv
vmax_w 0111 00000111 00010 ..... ..... ..... @vvv
vmax_d 0111 00000111 00011 ..... ..... ..... @vvv
vmaxi_b 0111 00101001 00000 ..... ..... ..... @vv_i5
vmaxi_h 0111 00101001 00001 ..... ..... ..... @vv_i5
vmaxi_w 0111 00101001 00010 ..... ..... ..... @vv_i5
vmaxi_d 0111 00101001 00011 ..... ..... ..... @vv_i5
vmax_bu 0111 00000111 01000 ..... ..... ..... @vvv
vmax_hu 0111 00000111 01001 ..... ..... ..... @vvv
vmax_wu 0111 00000111 01010 ..... ..... ..... @vvv
vmax_du 0111 00000111 01011 ..... ..... ..... @vvv
vmaxi_bu 0111 00101001 01000 ..... ..... ..... @vv_ui5
vmaxi_hu 0111 00101001 01001 ..... ..... ..... @vv_ui5
vmaxi_wu 0111 00101001 01010 ..... ..... ..... @vv_ui5
vmaxi_du 0111 00101001 01011 ..... ..... ..... @vv_ui5
vmin_b 0111 00000111 00100 ..... ..... ..... @vvv
vmin_h 0111 00000111 00101 ..... ..... ..... @vvv
vmin_w 0111 00000111 00110 ..... ..... ..... @vvv
vmin_d 0111 00000111 00111 ..... ..... ..... @vvv
vmini_b 0111 00101001 00100 ..... ..... ..... @vv_i5
vmini_h 0111 00101001 00101 ..... ..... ..... @vv_i5
vmini_w 0111 00101001 00110 ..... ..... ..... @vv_i5
vmini_d 0111 00101001 00111 ..... ..... ..... @vv_i5
vmin_bu 0111 00000111 01100 ..... ..... ..... @vvv
vmin_hu 0111 00000111 01101 ..... ..... ..... @vvv
vmin_wu 0111 00000111 01110 ..... ..... ..... @vvv
vmin_du 0111 00000111 01111 ..... ..... ..... @vvv
vmini_bu 0111 00101001 01100 ..... ..... ..... @vv_ui5
vmini_hu 0111 00101001 01101 ..... ..... ..... @vv_ui5
vmini_wu 0111 00101001 01110 ..... ..... ..... @vv_ui5
vmini_du 0111 00101001 01111 ..... ..... ..... @vv_ui5
vmul_b 0111 00001000 01000 ..... ..... ..... @vvv
vmul_h 0111 00001000 01001 ..... ..... ..... @vvv
vmul_w 0111 00001000 01010 ..... ..... ..... @vvv
vmul_d 0111 00001000 01011 ..... ..... ..... @vvv
vmuh_b 0111 00001000 01100 ..... ..... ..... @vvv
vmuh_h 0111 00001000 01101 ..... ..... ..... @vvv
vmuh_w 0111 00001000 01110 ..... ..... ..... @vvv
vmuh_d 0111 00001000 01111 ..... ..... ..... @vvv
vmuh_bu 0111 00001000 10000 ..... ..... ..... @vvv
vmuh_hu 0111 00001000 10001 ..... ..... ..... @vvv
vmuh_wu 0111 00001000 10010 ..... ..... ..... @vvv
vmuh_du 0111 00001000 10011 ..... ..... ..... @vvv
vmulwev_h_b 0111 00001001 00000 ..... ..... ..... @vvv
vmulwev_w_h 0111 00001001 00001 ..... ..... ..... @vvv
vmulwev_d_w 0111 00001001 00010 ..... ..... ..... @vvv
vmulwev_q_d 0111 00001001 00011 ..... ..... ..... @vvv
vmulwod_h_b 0111 00001001 00100 ..... ..... ..... @vvv
vmulwod_w_h 0111 00001001 00101 ..... ..... ..... @vvv
vmulwod_d_w 0111 00001001 00110 ..... ..... ..... @vvv
vmulwod_q_d 0111 00001001 00111 ..... ..... ..... @vvv
vmulwev_h_bu 0111 00001001 10000 ..... ..... ..... @vvv
vmulwev_w_hu 0111 00001001 10001 ..... ..... ..... @vvv
vmulwev_d_wu 0111 00001001 10010 ..... ..... ..... @vvv
vmulwev_q_du 0111 00001001 10011 ..... ..... ..... @vvv
vmulwod_h_bu 0111 00001001 10100 ..... ..... ..... @vvv
vmulwod_w_hu 0111 00001001 10101 ..... ..... ..... @vvv
vmulwod_d_wu 0111 00001001 10110 ..... ..... ..... @vvv
vmulwod_q_du 0111 00001001 10111 ..... ..... ..... @vvv
vmulwev_h_bu_b 0111 00001010 00000 ..... ..... ..... @vvv
vmulwev_w_hu_h 0111 00001010 00001 ..... ..... ..... @vvv
vmulwev_d_wu_w 0111 00001010 00010 ..... ..... ..... @vvv
vmulwev_q_du_d 0111 00001010 00011 ..... ..... ..... @vvv
vmulwod_h_bu_b 0111 00001010 00100 ..... ..... ..... @vvv
vmulwod_w_hu_h 0111 00001010 00101 ..... ..... ..... @vvv
vmulwod_d_wu_w 0111 00001010 00110 ..... ..... ..... @vvv
vmulwod_q_du_d 0111 00001010 00111 ..... ..... ..... @vvv
vmadd_b 0111 00001010 10000 ..... ..... ..... @vvv
vmadd_h 0111 00001010 10001 ..... ..... ..... @vvv
vmadd_w 0111 00001010 10010 ..... ..... ..... @vvv
vmadd_d 0111 00001010 10011 ..... ..... ..... @vvv
vmsub_b 0111 00001010 10100 ..... ..... ..... @vvv
vmsub_h 0111 00001010 10101 ..... ..... ..... @vvv
vmsub_w 0111 00001010 10110 ..... ..... ..... @vvv
vmsub_d 0111 00001010 10111 ..... ..... ..... @vvv
vmaddwev_h_b 0111 00001010 11000 ..... ..... ..... @vvv
vmaddwev_w_h 0111 00001010 11001 ..... ..... ..... @vvv
vmaddwev_d_w 0111 00001010 11010 ..... ..... ..... @vvv
vmaddwev_q_d 0111 00001010 11011 ..... ..... ..... @vvv
vmaddwod_h_b 0111 00001010 11100 ..... ..... ..... @vvv
vmaddwod_w_h 0111 00001010 11101 ..... ..... ..... @vvv
vmaddwod_d_w 0111 00001010 11110 ..... ..... ..... @vvv
vmaddwod_q_d 0111 00001010 11111 ..... ..... ..... @vvv
vmaddwev_h_bu 0111 00001011 01000 ..... ..... ..... @vvv
vmaddwev_w_hu 0111 00001011 01001 ..... ..... ..... @vvv
vmaddwev_d_wu 0111 00001011 01010 ..... ..... ..... @vvv
vmaddwev_q_du 0111 00001011 01011 ..... ..... ..... @vvv
vmaddwod_h_bu 0111 00001011 01100 ..... ..... ..... @vvv
vmaddwod_w_hu 0111 00001011 01101 ..... ..... ..... @vvv
vmaddwod_d_wu 0111 00001011 01110 ..... ..... ..... @vvv
vmaddwod_q_du 0111 00001011 01111 ..... ..... ..... @vvv
vmaddwev_h_bu_b 0111 00001011 11000 ..... ..... ..... @vvv
vmaddwev_w_hu_h 0111 00001011 11001 ..... ..... ..... @vvv
vmaddwev_d_wu_w 0111 00001011 11010 ..... ..... ..... @vvv
vmaddwev_q_du_d 0111 00001011 11011 ..... ..... ..... @vvv
vmaddwod_h_bu_b 0111 00001011 11100 ..... ..... ..... @vvv
vmaddwod_w_hu_h 0111 00001011 11101 ..... ..... ..... @vvv
vmaddwod_d_wu_w 0111 00001011 11110 ..... ..... ..... @vvv
vmaddwod_q_du_d 0111 00001011 11111 ..... ..... ..... @vvv
vdiv_b 0111 00001110 00000 ..... ..... ..... @vvv
vdiv_h 0111 00001110 00001 ..... ..... ..... @vvv
vdiv_w 0111 00001110 00010 ..... ..... ..... @vvv
vdiv_d 0111 00001110 00011 ..... ..... ..... @vvv
vdiv_bu 0111 00001110 01000 ..... ..... ..... @vvv
vdiv_hu 0111 00001110 01001 ..... ..... ..... @vvv
vdiv_wu 0111 00001110 01010 ..... ..... ..... @vvv
vdiv_du 0111 00001110 01011 ..... ..... ..... @vvv
vmod_b 0111 00001110 00100 ..... ..... ..... @vvv
vmod_h 0111 00001110 00101 ..... ..... ..... @vvv
vmod_w 0111 00001110 00110 ..... ..... ..... @vvv
vmod_d 0111 00001110 00111 ..... ..... ..... @vvv
vmod_bu 0111 00001110 01100 ..... ..... ..... @vvv
vmod_hu 0111 00001110 01101 ..... ..... ..... @vvv
vmod_wu 0111 00001110 01110 ..... ..... ..... @vvv
vmod_du 0111 00001110 01111 ..... ..... ..... @vvv
vsat_b 0111 00110010 01000 01 ... ..... ..... @vv_ui3
vsat_h 0111 00110010 01000 1 .... ..... ..... @vv_ui4
vsat_w 0111 00110010 01001 ..... ..... ..... @vv_ui5
vsat_d 0111 00110010 0101 ...... ..... ..... @vv_ui6
vsat_bu 0111 00110010 10000 01 ... ..... ..... @vv_ui3
vsat_hu 0111 00110010 10000 1 .... ..... ..... @vv_ui4
vsat_wu 0111 00110010 10001 ..... ..... ..... @vv_ui5
vsat_du 0111 00110010 1001 ...... ..... ..... @vv_ui6
vexth_h_b 0111 00101001 11101 11000 ..... ..... @vv
vexth_w_h 0111 00101001 11101 11001 ..... ..... @vv
vexth_d_w 0111 00101001 11101 11010 ..... ..... @vv
vexth_q_d 0111 00101001 11101 11011 ..... ..... @vv
vexth_hu_bu 0111 00101001 11101 11100 ..... ..... @vv
vexth_wu_hu 0111 00101001 11101 11101 ..... ..... @vv
vexth_du_wu 0111 00101001 11101 11110 ..... ..... @vv
vexth_qu_du 0111 00101001 11101 11111 ..... ..... @vv
vsigncov_b 0111 00010010 11100 ..... ..... ..... @vvv
vsigncov_h 0111 00010010 11101 ..... ..... ..... @vvv
vsigncov_w 0111 00010010 11110 ..... ..... ..... @vvv
vsigncov_d 0111 00010010 11111 ..... ..... ..... @vvv
vmskltz_b 0111 00101001 11000 10000 ..... ..... @vv
vmskltz_h 0111 00101001 11000 10001 ..... ..... @vv
vmskltz_w 0111 00101001 11000 10010 ..... ..... @vv
vmskltz_d 0111 00101001 11000 10011 ..... ..... @vv
vmskgez_b 0111 00101001 11000 10100 ..... ..... @vv
vmsknz_b 0111 00101001 11000 11000 ..... ..... @vv
vldi 0111 00111110 00 ............. ..... @v_i13
vand_v 0111 00010010 01100 ..... ..... ..... @vvv
vor_v 0111 00010010 01101 ..... ..... ..... @vvv
vxor_v 0111 00010010 01110 ..... ..... ..... @vvv
vnor_v 0111 00010010 01111 ..... ..... ..... @vvv
vandn_v 0111 00010010 10000 ..... ..... ..... @vvv
vorn_v 0111 00010010 10001 ..... ..... ..... @vvv
vandi_b 0111 00111101 00 ........ ..... ..... @vv_ui8
vori_b 0111 00111101 01 ........ ..... ..... @vv_ui8
vxori_b 0111 00111101 10 ........ ..... ..... @vv_ui8
vnori_b 0111 00111101 11 ........ ..... ..... @vv_ui8
vsll_b 0111 00001110 10000 ..... ..... ..... @vvv
vsll_h 0111 00001110 10001 ..... ..... ..... @vvv
vsll_w 0111 00001110 10010 ..... ..... ..... @vvv
vsll_d 0111 00001110 10011 ..... ..... ..... @vvv
vslli_b 0111 00110010 11000 01 ... ..... ..... @vv_ui3
vslli_h 0111 00110010 11000 1 .... ..... ..... @vv_ui4
vslli_w 0111 00110010 11001 ..... ..... ..... @vv_ui5
vslli_d 0111 00110010 1101 ...... ..... ..... @vv_ui6
vsrl_b 0111 00001110 10100 ..... ..... ..... @vvv
vsrl_h 0111 00001110 10101 ..... ..... ..... @vvv
vsrl_w 0111 00001110 10110 ..... ..... ..... @vvv
vsrl_d 0111 00001110 10111 ..... ..... ..... @vvv
vsrli_b 0111 00110011 00000 01 ... ..... ..... @vv_ui3
vsrli_h 0111 00110011 00000 1 .... ..... ..... @vv_ui4
vsrli_w 0111 00110011 00001 ..... ..... ..... @vv_ui5
vsrli_d 0111 00110011 0001 ...... ..... ..... @vv_ui6
vsra_b 0111 00001110 11000 ..... ..... ..... @vvv
vsra_h 0111 00001110 11001 ..... ..... ..... @vvv
vsra_w 0111 00001110 11010 ..... ..... ..... @vvv
vsra_d 0111 00001110 11011 ..... ..... ..... @vvv
vsrai_b 0111 00110011 01000 01 ... ..... ..... @vv_ui3
vsrai_h 0111 00110011 01000 1 .... ..... ..... @vv_ui4
vsrai_w 0111 00110011 01001 ..... ..... ..... @vv_ui5
vsrai_d 0111 00110011 0101 ...... ..... ..... @vv_ui6
vrotr_b 0111 00001110 11100 ..... ..... ..... @vvv
vrotr_h 0111 00001110 11101 ..... ..... ..... @vvv
vrotr_w 0111 00001110 11110 ..... ..... ..... @vvv
vrotr_d 0111 00001110 11111 ..... ..... ..... @vvv
vrotri_b 0111 00101010 00000 01 ... ..... ..... @vv_ui3
vrotri_h 0111 00101010 00000 1 .... ..... ..... @vv_ui4
vrotri_w 0111 00101010 00001 ..... ..... ..... @vv_ui5
vrotri_d 0111 00101010 0001 ...... ..... ..... @vv_ui6
vsllwil_h_b 0111 00110000 10000 01 ... ..... ..... @vv_ui3
vsllwil_w_h 0111 00110000 10000 1 .... ..... ..... @vv_ui4
vsllwil_d_w 0111 00110000 10001 ..... ..... ..... @vv_ui5
vextl_q_d 0111 00110000 10010 00000 ..... ..... @vv
vsllwil_hu_bu 0111 00110000 11000 01 ... ..... ..... @vv_ui3
vsllwil_wu_hu 0111 00110000 11000 1 .... ..... ..... @vv_ui4
vsllwil_du_wu 0111 00110000 11001 ..... ..... ..... @vv_ui5
vextl_qu_du 0111 00110000 11010 00000 ..... ..... @vv
vsrlr_b 0111 00001111 00000 ..... ..... ..... @vvv
vsrlr_h 0111 00001111 00001 ..... ..... ..... @vvv
vsrlr_w 0111 00001111 00010 ..... ..... ..... @vvv
vsrlr_d 0111 00001111 00011 ..... ..... ..... @vvv
vsrlri_b 0111 00101010 01000 01 ... ..... ..... @vv_ui3
vsrlri_h 0111 00101010 01000 1 .... ..... ..... @vv_ui4
vsrlri_w 0111 00101010 01001 ..... ..... ..... @vv_ui5
vsrlri_d 0111 00101010 0101 ...... ..... ..... @vv_ui6
vsrar_b 0111 00001111 00100 ..... ..... ..... @vvv
vsrar_h 0111 00001111 00101 ..... ..... ..... @vvv
vsrar_w 0111 00001111 00110 ..... ..... ..... @vvv
vsrar_d 0111 00001111 00111 ..... ..... ..... @vvv
vsrari_b 0111 00101010 10000 01 ... ..... ..... @vv_ui3
vsrari_h 0111 00101010 10000 1 .... ..... ..... @vv_ui4
vsrari_w 0111 00101010 10001 ..... ..... ..... @vv_ui5
vsrari_d 0111 00101010 1001 ...... ..... ..... @vv_ui6
vsrln_b_h 0111 00001111 01001 ..... ..... ..... @vvv
vsrln_h_w 0111 00001111 01010 ..... ..... ..... @vvv
vsrln_w_d 0111 00001111 01011 ..... ..... ..... @vvv
vsran_b_h 0111 00001111 01101 ..... ..... ..... @vvv
vsran_h_w 0111 00001111 01110 ..... ..... ..... @vvv
vsran_w_d 0111 00001111 01111 ..... ..... ..... @vvv
vsrlni_b_h 0111 00110100 00000 1 .... ..... ..... @vv_ui4
vsrlni_h_w 0111 00110100 00001 ..... ..... ..... @vv_ui5
vsrlni_w_d 0111 00110100 0001 ...... ..... ..... @vv_ui6
vsrlni_d_q 0111 00110100 001 ....... ..... ..... @vv_ui7
vsrani_b_h 0111 00110101 10000 1 .... ..... ..... @vv_ui4
vsrani_h_w 0111 00110101 10001 ..... ..... ..... @vv_ui5
vsrani_w_d 0111 00110101 1001 ...... ..... ..... @vv_ui6
vsrani_d_q 0111 00110101 101 ....... ..... ..... @vv_ui7
vsrlrn_b_h 0111 00001111 10001 ..... ..... ..... @vvv
vsrlrn_h_w 0111 00001111 10010 ..... ..... ..... @vvv
vsrlrn_w_d 0111 00001111 10011 ..... ..... ..... @vvv
vsrarn_b_h 0111 00001111 10101 ..... ..... ..... @vvv
vsrarn_h_w 0111 00001111 10110 ..... ..... ..... @vvv
vsrarn_w_d 0111 00001111 10111 ..... ..... ..... @vvv
vsrlrni_b_h 0111 00110100 01000 1 .... ..... ..... @vv_ui4
vsrlrni_h_w 0111 00110100 01001 ..... ..... ..... @vv_ui5
vsrlrni_w_d 0111 00110100 0101 ...... ..... ..... @vv_ui6
vsrlrni_d_q 0111 00110100 011 ....... ..... ..... @vv_ui7
vsrarni_b_h 0111 00110101 11000 1 .... ..... ..... @vv_ui4
vsrarni_h_w 0111 00110101 11001 ..... ..... ..... @vv_ui5
vsrarni_w_d 0111 00110101 1101 ...... ..... ..... @vv_ui6
vsrarni_d_q 0111 00110101 111 ....... ..... ..... @vv_ui7
vssrln_b_h 0111 00001111 11001 ..... ..... ..... @vvv
vssrln_h_w 0111 00001111 11010 ..... ..... ..... @vvv
vssrln_w_d 0111 00001111 11011 ..... ..... ..... @vvv
vssran_b_h 0111 00001111 11101 ..... ..... ..... @vvv
vssran_h_w 0111 00001111 11110 ..... ..... ..... @vvv
vssran_w_d 0111 00001111 11111 ..... ..... ..... @vvv
vssrln_bu_h 0111 00010000 01001 ..... ..... ..... @vvv
vssrln_hu_w 0111 00010000 01010 ..... ..... ..... @vvv
vssrln_wu_d 0111 00010000 01011 ..... ..... ..... @vvv
vssran_bu_h 0111 00010000 01101 ..... ..... ..... @vvv
vssran_hu_w 0111 00010000 01110 ..... ..... ..... @vvv
vssran_wu_d 0111 00010000 01111 ..... ..... ..... @vvv
vssrlni_b_h 0111 00110100 10000 1 .... ..... ..... @vv_ui4
vssrlni_h_w 0111 00110100 10001 ..... ..... ..... @vv_ui5
vssrlni_w_d 0111 00110100 1001 ...... ..... ..... @vv_ui6
vssrlni_d_q 0111 00110100 101 ....... ..... ..... @vv_ui7
vssrani_b_h 0111 00110110 00000 1 .... ..... ..... @vv_ui4
vssrani_h_w 0111 00110110 00001 ..... ..... ..... @vv_ui5
vssrani_w_d 0111 00110110 0001 ...... ..... ..... @vv_ui6
vssrani_d_q 0111 00110110 001 ....... ..... ..... @vv_ui7
vssrlni_bu_h 0111 00110100 11000 1 .... ..... ..... @vv_ui4
vssrlni_hu_w 0111 00110100 11001 ..... ..... ..... @vv_ui5
vssrlni_wu_d 0111 00110100 1101 ...... ..... ..... @vv_ui6
vssrlni_du_q 0111 00110100 111 ....... ..... ..... @vv_ui7
vssrani_bu_h 0111 00110110 01000 1 .... ..... ..... @vv_ui4
vssrani_hu_w 0111 00110110 01001 ..... ..... ..... @vv_ui5
vssrani_wu_d 0111 00110110 0101 ...... ..... ..... @vv_ui6
vssrani_du_q 0111 00110110 011 ....... ..... ..... @vv_ui7
vssrlrn_b_h 0111 00010000 00001 ..... ..... ..... @vvv
vssrlrn_h_w 0111 00010000 00010 ..... ..... ..... @vvv
vssrlrn_w_d 0111 00010000 00011 ..... ..... ..... @vvv
vssrarn_b_h 0111 00010000 00101 ..... ..... ..... @vvv
vssrarn_h_w 0111 00010000 00110 ..... ..... ..... @vvv
vssrarn_w_d 0111 00010000 00111 ..... ..... ..... @vvv
vssrlrn_bu_h 0111 00010000 10001 ..... ..... ..... @vvv
vssrlrn_hu_w 0111 00010000 10010 ..... ..... ..... @vvv
vssrlrn_wu_d 0111 00010000 10011 ..... ..... ..... @vvv
vssrarn_bu_h 0111 00010000 10101 ..... ..... ..... @vvv
vssrarn_hu_w 0111 00010000 10110 ..... ..... ..... @vvv
vssrarn_wu_d 0111 00010000 10111 ..... ..... ..... @vvv
vssrlrni_b_h 0111 00110101 00000 1 .... ..... ..... @vv_ui4
vssrlrni_h_w 0111 00110101 00001 ..... ..... ..... @vv_ui5
vssrlrni_w_d 0111 00110101 0001 ...... ..... ..... @vv_ui6
vssrlrni_d_q 0111 00110101 001 ....... ..... ..... @vv_ui7
vssrarni_b_h 0111 00110110 10000 1 .... ..... ..... @vv_ui4
vssrarni_h_w 0111 00110110 10001 ..... ..... ..... @vv_ui5
vssrarni_w_d 0111 00110110 1001 ...... ..... ..... @vv_ui6
vssrarni_d_q 0111 00110110 101 ....... ..... ..... @vv_ui7
vssrlrni_bu_h 0111 00110101 01000 1 .... ..... ..... @vv_ui4
vssrlrni_hu_w 0111 00110101 01001 ..... ..... ..... @vv_ui5
vssrlrni_wu_d 0111 00110101 0101 ...... ..... ..... @vv_ui6
vssrlrni_du_q 0111 00110101 011 ....... ..... ..... @vv_ui7
vssrarni_bu_h 0111 00110110 11000 1 .... ..... ..... @vv_ui4
vssrarni_hu_w 0111 00110110 11001 ..... ..... ..... @vv_ui5
vssrarni_wu_d 0111 00110110 1101 ...... ..... ..... @vv_ui6
vssrarni_du_q 0111 00110110 111 ....... ..... ..... @vv_ui7
vclo_b 0111 00101001 11000 00000 ..... ..... @vv
vclo_h 0111 00101001 11000 00001 ..... ..... @vv
vclo_w 0111 00101001 11000 00010 ..... ..... @vv
vclo_d 0111 00101001 11000 00011 ..... ..... @vv
vclz_b 0111 00101001 11000 00100 ..... ..... @vv
vclz_h 0111 00101001 11000 00101 ..... ..... @vv
vclz_w 0111 00101001 11000 00110 ..... ..... @vv
vclz_d 0111 00101001 11000 00111 ..... ..... @vv
vpcnt_b 0111 00101001 11000 01000 ..... ..... @vv
vpcnt_h 0111 00101001 11000 01001 ..... ..... @vv
vpcnt_w 0111 00101001 11000 01010 ..... ..... @vv
vpcnt_d 0111 00101001 11000 01011 ..... ..... @vv
vbitclr_b 0111 00010000 11000 ..... ..... ..... @vvv
vbitclr_h 0111 00010000 11001 ..... ..... ..... @vvv
vbitclr_w 0111 00010000 11010 ..... ..... ..... @vvv
vbitclr_d 0111 00010000 11011 ..... ..... ..... @vvv
vbitclri_b 0111 00110001 00000 01 ... ..... ..... @vv_ui3
vbitclri_h 0111 00110001 00000 1 .... ..... ..... @vv_ui4
vbitclri_w 0111 00110001 00001 ..... ..... ..... @vv_ui5
vbitclri_d 0111 00110001 0001 ...... ..... ..... @vv_ui6
vbitset_b 0111 00010000 11100 ..... ..... ..... @vvv
vbitset_h 0111 00010000 11101 ..... ..... ..... @vvv
vbitset_w 0111 00010000 11110 ..... ..... ..... @vvv
vbitset_d 0111 00010000 11111 ..... ..... ..... @vvv
vbitseti_b 0111 00110001 01000 01 ... ..... ..... @vv_ui3
vbitseti_h 0111 00110001 01000 1 .... ..... ..... @vv_ui4
vbitseti_w 0111 00110001 01001 ..... ..... ..... @vv_ui5
vbitseti_d 0111 00110001 0101 ...... ..... ..... @vv_ui6
vbitrev_b 0111 00010001 00000 ..... ..... ..... @vvv
vbitrev_h 0111 00010001 00001 ..... ..... ..... @vvv
vbitrev_w 0111 00010001 00010 ..... ..... ..... @vvv
vbitrev_d 0111 00010001 00011 ..... ..... ..... @vvv
vbitrevi_b 0111 00110001 10000 01 ... ..... ..... @vv_ui3
vbitrevi_h 0111 00110001 10000 1 .... ..... ..... @vv_ui4
vbitrevi_w 0111 00110001 10001 ..... ..... ..... @vv_ui5
vbitrevi_d 0111 00110001 1001 ...... ..... ..... @vv_ui6
vfrstp_b 0111 00010010 10110 ..... ..... ..... @vvv
vfrstp_h 0111 00010010 10111 ..... ..... ..... @vvv
vfrstpi_b 0111 00101001 10100 ..... ..... ..... @vv_ui5
vfrstpi_h 0111 00101001 10101 ..... ..... ..... @vv_ui5
vfadd_s 0111 00010011 00001 ..... ..... ..... @vvv
vfadd_d 0111 00010011 00010 ..... ..... ..... @vvv
vfsub_s 0111 00010011 00101 ..... ..... ..... @vvv
vfsub_d 0111 00010011 00110 ..... ..... ..... @vvv
vfmul_s 0111 00010011 10001 ..... ..... ..... @vvv
vfmul_d 0111 00010011 10010 ..... ..... ..... @vvv
vfdiv_s 0111 00010011 10101 ..... ..... ..... @vvv
vfdiv_d 0111 00010011 10110 ..... ..... ..... @vvv
vfmadd_s 0000 10010001 ..... ..... ..... ..... @vvvv
vfmadd_d 0000 10010010 ..... ..... ..... ..... @vvvv
vfmsub_s 0000 10010101 ..... ..... ..... ..... @vvvv
vfmsub_d 0000 10010110 ..... ..... ..... ..... @vvvv
vfnmadd_s 0000 10011001 ..... ..... ..... ..... @vvvv
vfnmadd_d 0000 10011010 ..... ..... ..... ..... @vvvv
vfnmsub_s 0000 10011101 ..... ..... ..... ..... @vvvv
vfnmsub_d 0000 10011110 ..... ..... ..... ..... @vvvv
vfmax_s 0111 00010011 11001 ..... ..... ..... @vvv
vfmax_d 0111 00010011 11010 ..... ..... ..... @vvv
vfmin_s 0111 00010011 11101 ..... ..... ..... @vvv
vfmin_d 0111 00010011 11110 ..... ..... ..... @vvv
vfmaxa_s 0111 00010100 00001 ..... ..... ..... @vvv
vfmaxa_d 0111 00010100 00010 ..... ..... ..... @vvv
vfmina_s 0111 00010100 00101 ..... ..... ..... @vvv
vfmina_d 0111 00010100 00110 ..... ..... ..... @vvv
vflogb_s 0111 00101001 11001 10001 ..... ..... @vv
vflogb_d 0111 00101001 11001 10010 ..... ..... @vv
vfclass_s 0111 00101001 11001 10101 ..... ..... @vv
vfclass_d 0111 00101001 11001 10110 ..... ..... @vv
vfsqrt_s 0111 00101001 11001 11001 ..... ..... @vv
vfsqrt_d 0111 00101001 11001 11010 ..... ..... @vv
vfrecip_s 0111 00101001 11001 11101 ..... ..... @vv
vfrecip_d 0111 00101001 11001 11110 ..... ..... @vv
vfrsqrt_s 0111 00101001 11010 00001 ..... ..... @vv
vfrsqrt_d 0111 00101001 11010 00010 ..... ..... @vv
vfcvtl_s_h 0111 00101001 11011 11010 ..... ..... @vv
vfcvth_s_h 0111 00101001 11011 11011 ..... ..... @vv
vfcvtl_d_s 0111 00101001 11011 11100 ..... ..... @vv
vfcvth_d_s 0111 00101001 11011 11101 ..... ..... @vv
vfcvt_h_s 0111 00010100 01100 ..... ..... ..... @vvv
vfcvt_s_d 0111 00010100 01101 ..... ..... ..... @vvv
vfrint_s 0111 00101001 11010 01101 ..... ..... @vv
vfrint_d 0111 00101001 11010 01110 ..... ..... @vv
vfrintrm_s 0111 00101001 11010 10001 ..... ..... @vv
vfrintrm_d 0111 00101001 11010 10010 ..... ..... @vv
vfrintrp_s 0111 00101001 11010 10101 ..... ..... @vv
vfrintrp_d 0111 00101001 11010 10110 ..... ..... @vv
vfrintrz_s 0111 00101001 11010 11001 ..... ..... @vv
vfrintrz_d 0111 00101001 11010 11010 ..... ..... @vv
vfrintrne_s 0111 00101001 11010 11101 ..... ..... @vv
vfrintrne_d 0111 00101001 11010 11110 ..... ..... @vv
vftint_w_s 0111 00101001 11100 01100 ..... ..... @vv
vftint_l_d 0111 00101001 11100 01101 ..... ..... @vv
vftintrm_w_s 0111 00101001 11100 01110 ..... ..... @vv
vftintrm_l_d 0111 00101001 11100 01111 ..... ..... @vv
vftintrp_w_s 0111 00101001 11100 10000 ..... ..... @vv
vftintrp_l_d 0111 00101001 11100 10001 ..... ..... @vv
vftintrz_w_s 0111 00101001 11100 10010 ..... ..... @vv
vftintrz_l_d 0111 00101001 11100 10011 ..... ..... @vv
vftintrne_w_s 0111 00101001 11100 10100 ..... ..... @vv
vftintrne_l_d 0111 00101001 11100 10101 ..... ..... @vv
vftint_wu_s 0111 00101001 11100 10110 ..... ..... @vv
vftint_lu_d 0111 00101001 11100 10111 ..... ..... @vv
vftintrz_wu_s 0111 00101001 11100 11100 ..... ..... @vv
vftintrz_lu_d 0111 00101001 11100 11101 ..... ..... @vv
vftint_w_d 0111 00010100 10011 ..... ..... ..... @vvv
vftintrm_w_d 0111 00010100 10100 ..... ..... ..... @vvv
vftintrp_w_d 0111 00010100 10101 ..... ..... ..... @vvv
vftintrz_w_d 0111 00010100 10110 ..... ..... ..... @vvv
vftintrne_w_d 0111 00010100 10111 ..... ..... ..... @vvv
vftintl_l_s 0111 00101001 11101 00000 ..... ..... @vv
vftinth_l_s 0111 00101001 11101 00001 ..... ..... @vv
vftintrml_l_s 0111 00101001 11101 00010 ..... ..... @vv
vftintrmh_l_s 0111 00101001 11101 00011 ..... ..... @vv
vftintrpl_l_s 0111 00101001 11101 00100 ..... ..... @vv
vftintrph_l_s 0111 00101001 11101 00101 ..... ..... @vv
vftintrzl_l_s 0111 00101001 11101 00110 ..... ..... @vv
vftintrzh_l_s 0111 00101001 11101 00111 ..... ..... @vv
vftintrnel_l_s 0111 00101001 11101 01000 ..... ..... @vv
vftintrneh_l_s 0111 00101001 11101 01001 ..... ..... @vv
vffint_s_w 0111 00101001 11100 00000 ..... ..... @vv
vffint_s_wu 0111 00101001 11100 00001 ..... ..... @vv
vffint_d_l 0111 00101001 11100 00010 ..... ..... @vv
vffint_d_lu 0111 00101001 11100 00011 ..... ..... @vv
vffintl_d_w 0111 00101001 11100 00100 ..... ..... @vv
vffinth_d_w 0111 00101001 11100 00101 ..... ..... @vv
vffint_s_l 0111 00010100 10000 ..... ..... ..... @vvv
vseq_b 0111 00000000 00000 ..... ..... ..... @vvv
vseq_h 0111 00000000 00001 ..... ..... ..... @vvv
vseq_w 0111 00000000 00010 ..... ..... ..... @vvv
vseq_d 0111 00000000 00011 ..... ..... ..... @vvv
vseqi_b 0111 00101000 00000 ..... ..... ..... @vv_i5
vseqi_h 0111 00101000 00001 ..... ..... ..... @vv_i5
vseqi_w 0111 00101000 00010 ..... ..... ..... @vv_i5
vseqi_d 0111 00101000 00011 ..... ..... ..... @vv_i5
vsle_b 0111 00000000 00100 ..... ..... ..... @vvv
vsle_h 0111 00000000 00101 ..... ..... ..... @vvv
vsle_w 0111 00000000 00110 ..... ..... ..... @vvv
vsle_d 0111 00000000 00111 ..... ..... ..... @vvv
vslei_b 0111 00101000 00100 ..... ..... ..... @vv_i5
vslei_h 0111 00101000 00101 ..... ..... ..... @vv_i5
vslei_w 0111 00101000 00110 ..... ..... ..... @vv_i5
vslei_d 0111 00101000 00111 ..... ..... ..... @vv_i5
vsle_bu 0111 00000000 01000 ..... ..... ..... @vvv
vsle_hu 0111 00000000 01001 ..... ..... ..... @vvv
vsle_wu 0111 00000000 01010 ..... ..... ..... @vvv
vsle_du 0111 00000000 01011 ..... ..... ..... @vvv
vslei_bu 0111 00101000 01000 ..... ..... ..... @vv_ui5
vslei_hu 0111 00101000 01001 ..... ..... ..... @vv_ui5
vslei_wu 0111 00101000 01010 ..... ..... ..... @vv_ui5
vslei_du 0111 00101000 01011 ..... ..... ..... @vv_ui5
vslt_b 0111 00000000 01100 ..... ..... ..... @vvv
vslt_h 0111 00000000 01101 ..... ..... ..... @vvv
vslt_w 0111 00000000 01110 ..... ..... ..... @vvv
vslt_d 0111 00000000 01111 ..... ..... ..... @vvv
vslti_b 0111 00101000 01100 ..... ..... ..... @vv_i5
vslti_h 0111 00101000 01101 ..... ..... ..... @vv_i5
vslti_w 0111 00101000 01110 ..... ..... ..... @vv_i5
vslti_d 0111 00101000 01111 ..... ..... ..... @vv_i5
vslt_bu 0111 00000000 10000 ..... ..... ..... @vvv
vslt_hu 0111 00000000 10001 ..... ..... ..... @vvv
vslt_wu 0111 00000000 10010 ..... ..... ..... @vvv
vslt_du 0111 00000000 10011 ..... ..... ..... @vvv
vslti_bu 0111 00101000 10000 ..... ..... ..... @vv_ui5
vslti_hu 0111 00101000 10001 ..... ..... ..... @vv_ui5
vslti_wu 0111 00101000 10010 ..... ..... ..... @vv_ui5
vslti_du 0111 00101000 10011 ..... ..... ..... @vv_ui5
vfcmp_cond_s 0000 11000101 ..... ..... ..... ..... @vvv_fcond
vfcmp_cond_d 0000 11000110 ..... ..... ..... ..... @vvv_fcond
vbitsel_v 0000 11010001 ..... ..... ..... ..... @vvvv
vbitseli_b 0111 00111100 01 ........ ..... ..... @vv_ui8
vseteqz_v 0111 00101001 11001 00110 ..... 00 ... @cv
vsetnez_v 0111 00101001 11001 00111 ..... 00 ... @cv
vsetanyeqz_b 0111 00101001 11001 01000 ..... 00 ... @cv
vsetanyeqz_h 0111 00101001 11001 01001 ..... 00 ... @cv
vsetanyeqz_w 0111 00101001 11001 01010 ..... 00 ... @cv
vsetanyeqz_d 0111 00101001 11001 01011 ..... 00 ... @cv
vsetallnez_b 0111 00101001 11001 01100 ..... 00 ... @cv
vsetallnez_h 0111 00101001 11001 01101 ..... 00 ... @cv
vsetallnez_w 0111 00101001 11001 01110 ..... 00 ... @cv
vsetallnez_d 0111 00101001 11001 01111 ..... 00 ... @cv
vinsgr2vr_b 0111 00101110 10111 0 .... ..... ..... @vr_ui4
vinsgr2vr_h 0111 00101110 10111 10 ... ..... ..... @vr_ui3
vinsgr2vr_w 0111 00101110 10111 110 .. ..... ..... @vr_ui2
vinsgr2vr_d 0111 00101110 10111 1110 . ..... ..... @vr_ui1
vpickve2gr_b 0111 00101110 11111 0 .... ..... ..... @rv_ui4
vpickve2gr_h 0111 00101110 11111 10 ... ..... ..... @rv_ui3
vpickve2gr_w 0111 00101110 11111 110 .. ..... ..... @rv_ui2
vpickve2gr_d 0111 00101110 11111 1110 . ..... ..... @rv_ui1
vpickve2gr_bu 0111 00101111 00111 0 .... ..... ..... @rv_ui4
vpickve2gr_hu 0111 00101111 00111 10 ... ..... ..... @rv_ui3
vpickve2gr_wu 0111 00101111 00111 110 .. ..... ..... @rv_ui2
vpickve2gr_du 0111 00101111 00111 1110 . ..... ..... @rv_ui1
vreplgr2vr_b 0111 00101001 11110 00000 ..... ..... @vr
vreplgr2vr_h 0111 00101001 11110 00001 ..... ..... @vr
vreplgr2vr_w 0111 00101001 11110 00010 ..... ..... @vr
vreplgr2vr_d 0111 00101001 11110 00011 ..... ..... @vr
vreplve_b 0111 00010010 00100 ..... ..... ..... @vvr
vreplve_h 0111 00010010 00101 ..... ..... ..... @vvr
vreplve_w 0111 00010010 00110 ..... ..... ..... @vvr
vreplve_d 0111 00010010 00111 ..... ..... ..... @vvr
vreplvei_b 0111 00101111 01111 0 .... ..... ..... @vv_ui4
vreplvei_h 0111 00101111 01111 10 ... ..... ..... @vv_ui3
vreplvei_w 0111 00101111 01111 110 .. ..... ..... @vv_ui2
vreplvei_d 0111 00101111 01111 1110 . ..... ..... @vv_ui1
vbsll_v 0111 00101000 11100 ..... ..... ..... @vv_ui5
vbsrl_v 0111 00101000 11101 ..... ..... ..... @vv_ui5
vpackev_b 0111 00010001 01100 ..... ..... ..... @vvv
vpackev_h 0111 00010001 01101 ..... ..... ..... @vvv
vpackev_w 0111 00010001 01110 ..... ..... ..... @vvv
vpackev_d 0111 00010001 01111 ..... ..... ..... @vvv
vpackod_b 0111 00010001 10000 ..... ..... ..... @vvv
vpackod_h 0111 00010001 10001 ..... ..... ..... @vvv
vpackod_w 0111 00010001 10010 ..... ..... ..... @vvv
vpackod_d 0111 00010001 10011 ..... ..... ..... @vvv
vpickev_b 0111 00010001 11100 ..... ..... ..... @vvv
vpickev_h 0111 00010001 11101 ..... ..... ..... @vvv
vpickev_w 0111 00010001 11110 ..... ..... ..... @vvv
vpickev_d 0111 00010001 11111 ..... ..... ..... @vvv
vpickod_b 0111 00010010 00000 ..... ..... ..... @vvv
vpickod_h 0111 00010010 00001 ..... ..... ..... @vvv
vpickod_w 0111 00010010 00010 ..... ..... ..... @vvv
vpickod_d 0111 00010010 00011 ..... ..... ..... @vvv
vilvl_b 0111 00010001 10100 ..... ..... ..... @vvv
vilvl_h 0111 00010001 10101 ..... ..... ..... @vvv
vilvl_w 0111 00010001 10110 ..... ..... ..... @vvv
vilvl_d 0111 00010001 10111 ..... ..... ..... @vvv
vilvh_b 0111 00010001 11000 ..... ..... ..... @vvv
vilvh_h 0111 00010001 11001 ..... ..... ..... @vvv
vilvh_w 0111 00010001 11010 ..... ..... ..... @vvv
vilvh_d 0111 00010001 11011 ..... ..... ..... @vvv
vshuf_b 0000 11010101 ..... ..... ..... ..... @vvvv
vshuf_h 0111 00010111 10101 ..... ..... ..... @vvv
vshuf_w 0111 00010111 10110 ..... ..... ..... @vvv
vshuf_d 0111 00010111 10111 ..... ..... ..... @vvv
vshuf4i_b 0111 00111001 00 ........ ..... ..... @vv_ui8
vshuf4i_h 0111 00111001 01 ........ ..... ..... @vv_ui8
vshuf4i_w 0111 00111001 10 ........ ..... ..... @vv_ui8
vshuf4i_d 0111 00111001 11 ........ ..... ..... @vv_ui8
vpermi_w 0111 00111110 01 ........ ..... ..... @vv_ui8
vextrins_d 0111 00111000 00 ........ ..... ..... @vv_ui8
vextrins_w 0111 00111000 01 ........ ..... ..... @vv_ui8
vextrins_h 0111 00111000 10 ........ ..... ..... @vv_ui8
vextrins_b 0111 00111000 11 ........ ..... ..... @vv_ui8
vld 0010 110000 ............ ..... ..... @vr_i12
vst 0010 110001 ............ ..... ..... @vr_i12
vldx 0011 10000100 00000 ..... ..... ..... @vrr
vstx 0011 10000100 01000 ..... ..... ..... @vrr
vldrepl_d 0011 00000001 0 ......... ..... ..... @vr_i9
vldrepl_w 0011 00000010 .......... ..... ..... @vr_i10
vldrepl_h 0011 0000010 ........... ..... ..... @vr_i11
vldrepl_b 0011 000010 ............ ..... ..... @vr_i12
vstelm_d 0011 00010001 0 . ........ ..... ..... @vr_i8i1
vstelm_w 0011 00010010 .. ........ ..... ..... @vr_i8i2
vstelm_h 0011 0001010 ... ........ ..... ..... @vr_i8i3
vstelm_b 0011 000110 .... ........ ..... ..... @vr_i8i4

View File

@ -21,6 +21,28 @@
/* Global bit for huge page */
#define LOONGARCH_HGLOBAL_SHIFT 12
#if HOST_BIG_ENDIAN
#define B(x) B[15 - (x)]
#define H(x) H[7 - (x)]
#define W(x) W[3 - (x)]
#define D(x) D[1 - (x)]
#define UB(x) UB[15 - (x)]
#define UH(x) UH[7 - (x)]
#define UW(x) UW[3 - (x)]
#define UD(x) UD[1 -(x)]
#define Q(x) Q[x]
#else
#define B(x) B[x]
#define H(x) H[x]
#define W(x) W[x]
#define D(x) D[x]
#define UB(x) UB[x]
#define UH(x) UH[x]
#define UW(x) UW[x]
#define UD(x) UD[x]
#define Q(x) Q[x]
#endif
void loongarch_translate_init(void);
void loongarch_cpu_dump_state(CPUState *cpu, FILE *f, int flags);
@ -31,6 +53,7 @@ void G_NORETURN do_raise_exception(CPULoongArchState *env,
const char *loongarch_exception_name(int32_t exception);
int ieee_ex_to_loongarch(int xcpt);
void restore_fp_status(CPULoongArchState *env);
#ifndef CONFIG_USER_ONLY

File diff suppressed because it is too large Load Diff

View File

@ -10,6 +10,72 @@
#include "migration/cpu.h"
#include "internals.h"
static const VMStateDescription vmstate_fpu_reg = {
.name = "fpu_reg",
.version_id = 1,
.minimum_version_id = 1,
.fields = (VMStateField[]) {
VMSTATE_UINT64(UD(0), VReg),
VMSTATE_END_OF_LIST()
}
};
#define VMSTATE_FPU_REGS(_field, _state, _start) \
VMSTATE_STRUCT_SUB_ARRAY(_field, _state, _start, 32, 0, \
vmstate_fpu_reg, fpr_t)
static bool fpu_needed(void *opaque)
{
LoongArchCPU *cpu = opaque;
return FIELD_EX64(cpu->env.cpucfg[2], CPUCFG2, FP);
}
static const VMStateDescription vmstate_fpu = {
.name = "cpu/fpu",
.version_id = 1,
.minimum_version_id = 1,
.needed = fpu_needed,
.fields = (VMStateField[]) {
VMSTATE_FPU_REGS(env.fpr, LoongArchCPU, 0),
VMSTATE_UINT32(env.fcsr0, LoongArchCPU),
VMSTATE_BOOL_ARRAY(env.cf, LoongArchCPU, 8),
VMSTATE_END_OF_LIST()
},
};
static const VMStateDescription vmstate_lsxh_reg = {
.name = "lsxh_reg",
.version_id = 1,
.minimum_version_id = 1,
.fields = (VMStateField[]) {
VMSTATE_UINT64(UD(1), VReg),
VMSTATE_END_OF_LIST()
}
};
#define VMSTATE_LSXH_REGS(_field, _state, _start) \
VMSTATE_STRUCT_SUB_ARRAY(_field, _state, _start, 32, 0, \
vmstate_lsxh_reg, fpr_t)
static bool lsx_needed(void *opaque)
{
LoongArchCPU *cpu = opaque;
return FIELD_EX64(cpu->env.cpucfg[2], CPUCFG2, LSX);
}
static const VMStateDescription vmstate_lsx = {
.name = "cpu/lsx",
.version_id = 1,
.minimum_version_id = 1,
.needed = lsx_needed,
.fields = (VMStateField[]) {
VMSTATE_LSXH_REGS(env.fpr, LoongArchCPU, 0),
VMSTATE_END_OF_LIST()
},
};
/* TLB state */
const VMStateDescription vmstate_tlb = {
.name = "cpu/tlb",
@ -24,18 +90,13 @@ const VMStateDescription vmstate_tlb = {
};
/* LoongArch CPU state */
const VMStateDescription vmstate_loongarch_cpu = {
.name = "cpu",
.version_id = 0,
.minimum_version_id = 0,
.version_id = 1,
.minimum_version_id = 1,
.fields = (VMStateField[]) {
VMSTATE_UINTTL_ARRAY(env.gpr, LoongArchCPU, 32),
VMSTATE_UINTTL(env.pc, LoongArchCPU),
VMSTATE_UINT64_ARRAY(env.fpr, LoongArchCPU, 32),
VMSTATE_UINT32(env.fcsr0, LoongArchCPU),
VMSTATE_BOOL_ARRAY(env.cf, LoongArchCPU, 8),
/* Remaining CSRs */
VMSTATE_UINT64(env.CSR_CRMD, LoongArchCPU),
@ -99,4 +160,8 @@ const VMStateDescription vmstate_loongarch_cpu = {
VMSTATE_END_OF_LIST()
},
.subsections = (const VMStateDescription*[]) {
&vmstate_fpu,
&vmstate_lsx,
}
};

View File

@ -11,6 +11,7 @@ loongarch_tcg_ss.add(files(
'op_helper.c',
'translate.c',
'gdbstub.c',
'lsx_helper.c',
))
loongarch_tcg_ss.add(zlib)

View File

@ -8,6 +8,8 @@
#include "qemu/osdep.h"
#include "cpu.h"
#include "tcg/tcg-op.h"
#include "tcg/tcg-op-gvec.h"
#include "exec/translator.h"
#include "exec/helper-proto.h"
#include "exec/helper-gen.h"
@ -21,7 +23,6 @@
/* Global register indices */
TCGv cpu_gpr[32], cpu_pc;
static TCGv cpu_lladdr, cpu_llval;
TCGv_i64 cpu_fpr[32];
#include "exec/gen-icount.h"
@ -29,16 +30,43 @@ TCGv_i64 cpu_fpr[32];
#define DISAS_EXIT DISAS_TARGET_1
#define DISAS_EXIT_UPDATE DISAS_TARGET_2
static inline int vec_full_offset(int regno)
{
return offsetof(CPULoongArchState, fpr[regno]);
}
static inline void get_vreg64(TCGv_i64 dest, int regno, int index)
{
tcg_gen_ld_i64(dest, cpu_env,
offsetof(CPULoongArchState, fpr[regno].vreg.D(index)));
}
static inline void set_vreg64(TCGv_i64 src, int regno, int index)
{
tcg_gen_st_i64(src, cpu_env,
offsetof(CPULoongArchState, fpr[regno].vreg.D(index)));
}
static inline int plus_1(DisasContext *ctx, int x)
{
return x + 1;
}
static inline int shl_1(DisasContext *ctx, int x)
{
return x << 1;
}
static inline int shl_2(DisasContext *ctx, int x)
{
return x << 2;
}
static inline int shl_3(DisasContext *ctx, int x)
{
return x << 3;
}
/*
* LoongArch the upper 32 bits are undefined ("can be any value").
* QEMU chooses to nanbox, because it is most likely to show guest bugs early.
@ -71,6 +99,7 @@ static void loongarch_tr_init_disas_context(DisasContextBase *dcbase,
CPUState *cs)
{
int64_t bound;
CPULoongArchState *env = cs->env_ptr;
DisasContext *ctx = container_of(dcbase, DisasContext, base);
ctx->page_start = ctx->base.pc_first & TARGET_PAGE_MASK;
@ -85,6 +114,10 @@ static void loongarch_tr_init_disas_context(DisasContextBase *dcbase,
bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
if (FIELD_EX64(env->cpucfg[2], CPUCFG2, LSX)) {
ctx->vl = LSX_LEN;
}
ctx->zero = tcg_constant_tl(0);
}
@ -157,6 +190,20 @@ static void gen_set_gpr(int reg_num, TCGv t, DisasExtend dst_ext)
}
}
static TCGv get_fpr(DisasContext *ctx, int reg_num)
{
TCGv t = tcg_temp_new();
tcg_gen_ld_i64(t, cpu_env,
offsetof(CPULoongArchState, fpr[reg_num].vreg.D(0)));
return t;
}
static void set_fpr(int reg_num, TCGv val)
{
tcg_gen_st_i64(val, cpu_env,
offsetof(CPULoongArchState, fpr[reg_num].vreg.D(0)));
}
#include "decode-insns.c.inc"
#include "insn_trans/trans_arith.c.inc"
#include "insn_trans/trans_shift.c.inc"
@ -171,6 +218,7 @@ static void gen_set_gpr(int reg_num, TCGv t, DisasExtend dst_ext)
#include "insn_trans/trans_fmemory.c.inc"
#include "insn_trans/trans_branch.c.inc"
#include "insn_trans/trans_privileged.c.inc"
#include "insn_trans/trans_lsx.c.inc"
static void loongarch_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
{
@ -250,11 +298,6 @@ void loongarch_translate_init(void)
regnames[i]);
}
for (i = 0; i < 32; i++) {
int off = offsetof(CPULoongArchState, fpr[i]);
cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env, off, fregnames[i]);
}
cpu_pc = tcg_global_mem_new(cpu_env, offsetof(CPULoongArchState, pc), "pc");
cpu_lladdr = tcg_global_mem_new(cpu_env,
offsetof(CPULoongArchState, lladdr), "lladdr");

View File

@ -31,6 +31,7 @@ typedef struct DisasContext {
uint32_t opcode;
uint16_t mem_idx;
uint16_t plv;
int vl; /* Vector length */
TCGv zero;
} DisasContext;