tcg/tci: Split out tci_args_rr

Reviewed-by: Philippe Mathieu-Daudé <f4bug@amsat.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2021-01-29 13:05:01 -10:00
parent cdd9799b25
commit fc4a62f65c

View File

@ -184,6 +184,13 @@ static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
* s = signed ldst offset * s = signed ldst offset
*/ */
static void tci_args_rr(const uint8_t **tb_ptr,
TCGReg *r0, TCGReg *r1)
{
*r0 = tci_read_r(tb_ptr);
*r1 = tci_read_r(tb_ptr);
}
static void tci_args_rrs(const uint8_t **tb_ptr, static void tci_args_rrs(const uint8_t **tb_ptr,
TCGReg *r0, TCGReg *r1, int32_t *i2) TCGReg *r0, TCGReg *r1, int32_t *i2)
{ {
@ -422,9 +429,8 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
break; break;
#endif #endif
CASE_32_64(mov) CASE_32_64(mov)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = regs[r1];
tci_write_reg(regs, t0, t1);
break; break;
case INDEX_op_tci_movi_i32: case INDEX_op_tci_movi_i32:
t0 = *tb_ptr++; t0 = *tb_ptr++;
@ -635,58 +641,50 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
#endif /* TCG_TARGET_REG_BITS == 32 */ #endif /* TCG_TARGET_REG_BITS == 32 */
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
CASE_32_64(ext8s) CASE_32_64(ext8s)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (int8_t)regs[r1];
tci_write_reg(regs, t0, (int8_t)t1);
break; break;
#endif #endif
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
CASE_32_64(ext16s) CASE_32_64(ext16s)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (int16_t)regs[r1];
tci_write_reg(regs, t0, (int16_t)t1);
break; break;
#endif #endif
#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
CASE_32_64(ext8u) CASE_32_64(ext8u)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (uint8_t)regs[r1];
tci_write_reg(regs, t0, (uint8_t)t1);
break; break;
#endif #endif
#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
CASE_32_64(ext16u) CASE_32_64(ext16u)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (uint16_t)regs[r1];
tci_write_reg(regs, t0, (uint16_t)t1);
break; break;
#endif #endif
#if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
CASE_32_64(bswap16) CASE_32_64(bswap16)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = bswap16(regs[r1]);
tci_write_reg(regs, t0, bswap16(t1));
break; break;
#endif #endif
#if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
CASE_32_64(bswap32) CASE_32_64(bswap32)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = bswap32(regs[r1]);
tci_write_reg(regs, t0, bswap32(t1));
break; break;
#endif #endif
#if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
CASE_32_64(not) CASE_32_64(not)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = ~regs[r1];
tci_write_reg(regs, t0, ~t1);
break; break;
#endif #endif
#if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
CASE_32_64(neg) CASE_32_64(neg)
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = -regs[r1];
tci_write_reg(regs, t0, -t1);
break; break;
#endif #endif
#if TCG_TARGET_REG_BITS == 64 #if TCG_TARGET_REG_BITS == 64
@ -799,21 +797,18 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
break; break;
case INDEX_op_ext32s_i64: case INDEX_op_ext32s_i64:
case INDEX_op_ext_i32_i64: case INDEX_op_ext_i32_i64:
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (int32_t)regs[r1];
tci_write_reg(regs, t0, (int32_t)t1);
break; break;
case INDEX_op_ext32u_i64: case INDEX_op_ext32u_i64:
case INDEX_op_extu_i32_i64: case INDEX_op_extu_i32_i64:
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = (uint32_t)regs[r1];
tci_write_reg(regs, t0, (uint32_t)t1);
break; break;
#if TCG_TARGET_HAS_bswap64_i64 #if TCG_TARGET_HAS_bswap64_i64
case INDEX_op_bswap64_i64: case INDEX_op_bswap64_i64:
t0 = *tb_ptr++; tci_args_rr(&tb_ptr, &r0, &r1);
t1 = tci_read_rval(regs, &tb_ptr); regs[r0] = bswap64(regs[r1]);
tci_write_reg(regs, t0, bswap64(t1));
break; break;
#endif #endif
#endif /* TCG_TARGET_REG_BITS == 64 */ #endif /* TCG_TARGET_REG_BITS == 64 */