From 5c13983e23de4095e2dfa8bc52333ef40ebe40db Mon Sep 17 00:00:00 2001 From: Richard Henderson Date: Tue, 6 Jun 2023 10:19:36 +0100 Subject: [PATCH] target/arm: Sink gen_mte_check1 into load/store_exclusive No need to duplicate this check across multiple call sites. Reviewed-by: Peter Maydell Signed-off-by: Richard Henderson Message-id: 20230530191438.411344-9-richard.henderson@linaro.org Signed-off-by: Peter Maydell --- target/arm/tcg/translate-a64.c | 44 ++++++++++++++++------------------ 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/target/arm/tcg/translate-a64.c b/target/arm/tcg/translate-a64.c index 35eac7729b..729947b11a 100644 --- a/target/arm/tcg/translate-a64.c +++ b/target/arm/tcg/translate-a64.c @@ -2369,11 +2369,16 @@ static void disas_b_exc_sys(DisasContext *s, uint32_t insn) * races in multi-threaded linux-user and when MTTCG softmmu is * enabled. */ -static void gen_load_exclusive(DisasContext *s, int rt, int rt2, - TCGv_i64 addr, int size, bool is_pair) +static void gen_load_exclusive(DisasContext *s, int rt, int rt2, int rn, + int size, bool is_pair) { int idx = get_mem_index(s); MemOp memop; + TCGv_i64 dirty_addr, clean_addr; + + s->is_ldex = true; + dirty_addr = cpu_reg_sp(s, rn); + clean_addr = gen_mte_check1(s, dirty_addr, false, rn != 31, size); g_assert(size <= 3); if (is_pair) { @@ -2381,7 +2386,7 @@ static void gen_load_exclusive(DisasContext *s, int rt, int rt2, if (size == 2) { /* The pair must be single-copy atomic for the doubleword. */ memop = finalize_memop(s, MO_64 | MO_ALIGN); - tcg_gen_qemu_ld_i64(cpu_exclusive_val, addr, idx, memop); + tcg_gen_qemu_ld_i64(cpu_exclusive_val, clean_addr, idx, memop); if (s->be_data == MO_LE) { tcg_gen_extract_i64(cpu_reg(s, rt), cpu_exclusive_val, 0, 32); tcg_gen_extract_i64(cpu_reg(s, rt2), cpu_exclusive_val, 32, 32); @@ -2400,7 +2405,7 @@ static void gen_load_exclusive(DisasContext *s, int rt, int rt2, memop = finalize_memop_atom(s, MO_128 | MO_ALIGN_16, MO_ATOM_IFALIGN_PAIR); - tcg_gen_qemu_ld_i128(t16, addr, idx, memop); + tcg_gen_qemu_ld_i128(t16, clean_addr, idx, memop); if (s->be_data == MO_LE) { tcg_gen_extr_i128_i64(cpu_exclusive_val, @@ -2414,14 +2419,14 @@ static void gen_load_exclusive(DisasContext *s, int rt, int rt2, } } else { memop = finalize_memop(s, size | MO_ALIGN); - tcg_gen_qemu_ld_i64(cpu_exclusive_val, addr, idx, memop); + tcg_gen_qemu_ld_i64(cpu_exclusive_val, clean_addr, idx, memop); tcg_gen_mov_i64(cpu_reg(s, rt), cpu_exclusive_val); } - tcg_gen_mov_i64(cpu_exclusive_addr, addr); + tcg_gen_mov_i64(cpu_exclusive_addr, clean_addr); } static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2, - TCGv_i64 addr, int size, int is_pair) + int rn, int size, int is_pair) { /* if (env->exclusive_addr == addr && env->exclusive_val == [addr] * && (!is_pair || env->exclusive_high == [addr + datasize])) { @@ -2437,9 +2442,12 @@ static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2, */ TCGLabel *fail_label = gen_new_label(); TCGLabel *done_label = gen_new_label(); - TCGv_i64 tmp; + TCGv_i64 tmp, dirty_addr, clean_addr; - tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_exclusive_addr, fail_label); + dirty_addr = cpu_reg_sp(s, rn); + clean_addr = gen_mte_check1(s, dirty_addr, true, rn != 31, size); + + tcg_gen_brcond_i64(TCG_COND_NE, clean_addr, cpu_exclusive_addr, fail_label); tmp = tcg_temp_new_i64(); if (is_pair) { @@ -2627,9 +2635,7 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) if (is_lasr) { tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); } - clean_addr = gen_mte_check1(s, cpu_reg_sp(s, rn), - true, rn != 31, size); - gen_store_exclusive(s, rs, rt, rt2, clean_addr, size, false); + gen_store_exclusive(s, rs, rt, rt2, rn, size, false); return; case 0x4: /* LDXR */ @@ -2637,10 +2643,7 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) if (rn == 31) { gen_check_sp_alignment(s); } - clean_addr = gen_mte_check1(s, cpu_reg_sp(s, rn), - false, rn != 31, size); - s->is_ldex = true; - gen_load_exclusive(s, rt, rt2, clean_addr, size, false); + gen_load_exclusive(s, rt, rt2, rn, size, false); if (is_lasr) { tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); } @@ -2692,9 +2695,7 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) if (is_lasr) { tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); } - clean_addr = gen_mte_check1(s, cpu_reg_sp(s, rn), - true, rn != 31, size); - gen_store_exclusive(s, rs, rt, rt2, clean_addr, size, true); + gen_store_exclusive(s, rs, rt, rt2, rn, size, true); return; } if (rt2 == 31 @@ -2711,10 +2712,7 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) if (rn == 31) { gen_check_sp_alignment(s); } - clean_addr = gen_mte_check1(s, cpu_reg_sp(s, rn), - false, rn != 31, size); - s->is_ldex = true; - gen_load_exclusive(s, rt, rt2, clean_addr, size, true); + gen_load_exclusive(s, rt, rt2, rn, size, true); if (is_lasr) { tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); }