target/riscv: Adjust vsetvl according to XLEN

Signed-off-by: LIU Zhiwei <zhiwei_liu@c-sky.com>
Reviewed-by: Richard Henderson <richard.henderson@linaro.org>
Reviewed-by: Alistair Francis <alistair.francis@wdc.com>
Message-id: 20220120122050.41546-17-zhiwei_liu@c-sky.com
Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
This commit is contained in:
LIU Zhiwei 2022-01-20 20:20:43 +08:00 committed by Alistair Francis
parent d96a271a8d
commit 31961cfe50
2 changed files with 10 additions and 2 deletions

View File

@ -491,6 +491,11 @@ static inline RISCVMXL cpu_recompute_xl(CPURISCVState *env)
}
#endif
static inline int riscv_cpu_xlen(CPURISCVState *env)
{
return 16 << env->xl;
}
/*
* Encode LMUL to lmul as follows:
* LMUL vlmul lmul

View File

@ -36,8 +36,11 @@ target_ulong HELPER(vsetvl)(CPURISCVState *env, target_ulong s1,
uint64_t lmul = FIELD_EX64(s2, VTYPE, VLMUL);
uint16_t sew = 8 << FIELD_EX64(s2, VTYPE, VSEW);
uint8_t ediv = FIELD_EX64(s2, VTYPE, VEDIV);
bool vill = FIELD_EX64(s2, VTYPE, VILL);
target_ulong reserved = FIELD_EX64(s2, VTYPE, RESERVED);
int xlen = riscv_cpu_xlen(env);
bool vill = (s2 >> (xlen - 1)) & 0x1;
target_ulong reserved = s2 &
MAKE_64BIT_MASK(R_VTYPE_RESERVED_SHIFT,
xlen - 1 - R_VTYPE_RESERVED_SHIFT);
if (lmul & 4) {
/* Fractional LMUL. */