This fixes a bug in that these two insns should have been using atomic 16-byte stores, since MTE is ARMv8.5 and LSE2 is mandatory from ARMv8.4.
Reviewed-by: Peter Maydell <peter.mayd...@linaro.org> Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- target/arm/tcg/translate-a64.c | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/target/arm/tcg/translate-a64.c b/target/arm/tcg/translate-a64.c index 6bb68618a0..51f9d227e7 100644 --- a/target/arm/tcg/translate-a64.c +++ b/target/arm/tcg/translate-a64.c @@ -4070,15 +4070,18 @@ static void disas_ldst_tag(DisasContext *s, uint32_t insn) if (is_zero) { TCGv_i64 clean_addr = clean_data_tbi(s, addr); - TCGv_i64 tcg_zero = tcg_constant_i64(0); + TCGv_i64 zero64 = tcg_constant_i64(0); + TCGv_i128 zero128 = tcg_temp_new_i128(); int mem_index = get_mem_index(s); - int i, n = (1 + is_pair) << LOG2_TAG_GRANULE; + MemOp mop = finalize_memop(s, MO_128 | MO_ALIGN); - tcg_gen_qemu_st_i64(tcg_zero, clean_addr, mem_index, - MO_UQ | MO_ALIGN_16); - for (i = 8; i < n; i += 8) { - tcg_gen_addi_i64(clean_addr, clean_addr, 8); - tcg_gen_qemu_st_i64(tcg_zero, clean_addr, mem_index, MO_UQ); + tcg_gen_concat_i64_i128(zero128, zero64, zero64); + + /* This is 1 or 2 atomic 16-byte operations. */ + tcg_gen_qemu_st_i128(zero128, clean_addr, mem_index, mop); + if (is_pair) { + tcg_gen_addi_i64(clean_addr, clean_addr, 16); + tcg_gen_qemu_st_i128(zero128, clean_addr, mem_index, mop); } } -- 2.34.1