Interpret the variable argument placement in the caller. Mark the argument register const, because they must be passed to add_qemu_ldst_label unmodified. This requires a bit of local variable renaming, because addrlo was being modified.
Pass data_type instead of is64 -- there are several places where we already convert back from bool to type. Clean things up by using type throughout. Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- tcg/ppc/tcg-target.c.inc | 164 +++++++++++++++++++++------------------ 1 file changed, 89 insertions(+), 75 deletions(-) diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc index 77abb7d20c..90093a6509 100644 --- a/tcg/ppc/tcg-target.c.inc +++ b/tcg/ppc/tcg-target.c.inc @@ -2118,7 +2118,8 @@ static TCGReg tcg_out_tlb_read(TCGContext *s, MemOp opc, /* Record the context of a call to the out of line helper code for the slow path for a load or store, so that we can later generate the correct helper code. */ -static void add_qemu_ldst_label(TCGContext *s, bool is_ld, MemOpIdx oi, +static void add_qemu_ldst_label(TCGContext *s, bool is_ld, + TCGType type, MemOpIdx oi, TCGReg datalo_reg, TCGReg datahi_reg, TCGReg addrlo_reg, TCGReg addrhi_reg, tcg_insn_unit *raddr, tcg_insn_unit *lptr) @@ -2126,6 +2127,7 @@ static void add_qemu_ldst_label(TCGContext *s, bool is_ld, MemOpIdx oi, TCGLabelQemuLdst *label = new_ldst_label(s); label->is_ld = is_ld; + label->type = type; label->oi = oi; label->datalo_reg = datalo_reg; label->datahi_reg = datahi_reg; @@ -2288,30 +2290,19 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l) #endif /* SOFTMMU */ -static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64) +static void tcg_out_qemu_ld(TCGContext *s, + const TCGReg datalo, const TCGReg datahi, + const TCGReg addrlo, const TCGReg addrhi, + const MemOpIdx oi, TCGType data_type) { - TCGReg datalo, datahi, addrlo, rbase; - TCGReg addrhi __attribute__((unused)); - MemOpIdx oi; - MemOp opc, s_bits; + MemOp opc = get_memop(oi); + MemOp s_bits = opc & MO_SIZE; + TCGReg rbase, index; + #ifdef CONFIG_SOFTMMU - int mem_index; tcg_insn_unit *label_ptr; -#else - unsigned a_bits; -#endif - datalo = *args++; - datahi = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0); - addrlo = *args++; - addrhi = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0); - oi = *args++; - opc = get_memop(oi); - s_bits = opc & MO_SIZE; - -#ifdef CONFIG_SOFTMMU - mem_index = get_mmuidx(oi); - addrlo = tcg_out_tlb_read(s, opc, addrlo, addrhi, mem_index, true); + index = tcg_out_tlb_read(s, opc, addrlo, addrhi, get_mmuidx(oi), true); /* Load a pointer into the current opcode w/conditional branch-link. */ label_ptr = s->code_ptr; @@ -2319,80 +2310,71 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64) rbase = TCG_REG_R3; #else /* !CONFIG_SOFTMMU */ - a_bits = get_alignment_bits(opc); + unsigned a_bits = get_alignment_bits(opc); if (a_bits) { tcg_out_test_alignment(s, true, addrlo, addrhi, a_bits); } rbase = guest_base ? TCG_GUEST_BASE_REG : 0; if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) { tcg_out_ext32u(s, TCG_REG_TMP1, addrlo); - addrlo = TCG_REG_TMP1; + index = TCG_REG_TMP1; + } else { + index = addrlo; } #endif if (TCG_TARGET_REG_BITS == 32 && s_bits == MO_64) { if (opc & MO_BSWAP) { - tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); - tcg_out32(s, LWBRX | TAB(datalo, rbase, addrlo)); + tcg_out32(s, ADDI | TAI(TCG_REG_R0, index, 4)); + tcg_out32(s, LWBRX | TAB(datalo, rbase, index)); tcg_out32(s, LWBRX | TAB(datahi, rbase, TCG_REG_R0)); } else if (rbase != 0) { - tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); - tcg_out32(s, LWZX | TAB(datahi, rbase, addrlo)); + tcg_out32(s, ADDI | TAI(TCG_REG_R0, index, 4)); + tcg_out32(s, LWZX | TAB(datahi, rbase, index)); tcg_out32(s, LWZX | TAB(datalo, rbase, TCG_REG_R0)); - } else if (addrlo == datahi) { - tcg_out32(s, LWZ | TAI(datalo, addrlo, 4)); - tcg_out32(s, LWZ | TAI(datahi, addrlo, 0)); + } else if (index == datahi) { + tcg_out32(s, LWZ | TAI(datalo, index, 4)); + tcg_out32(s, LWZ | TAI(datahi, index, 0)); } else { - tcg_out32(s, LWZ | TAI(datahi, addrlo, 0)); - tcg_out32(s, LWZ | TAI(datalo, addrlo, 4)); + tcg_out32(s, LWZ | TAI(datahi, index, 0)); + tcg_out32(s, LWZ | TAI(datalo, index, 4)); } } else { uint32_t insn = qemu_ldx_opc[opc & (MO_BSWAP | MO_SSIZE)]; if (!have_isa_2_06 && insn == LDBRX) { - tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); - tcg_out32(s, LWBRX | TAB(datalo, rbase, addrlo)); + tcg_out32(s, ADDI | TAI(TCG_REG_R0, index, 4)); + tcg_out32(s, LWBRX | TAB(datalo, rbase, index)); tcg_out32(s, LWBRX | TAB(TCG_REG_R0, rbase, TCG_REG_R0)); tcg_out_rld(s, RLDIMI, datalo, TCG_REG_R0, 32, 0); } else if (insn) { - tcg_out32(s, insn | TAB(datalo, rbase, addrlo)); + tcg_out32(s, insn | TAB(datalo, rbase, index)); } else { insn = qemu_ldx_opc[opc & (MO_SIZE | MO_BSWAP)]; - tcg_out32(s, insn | TAB(datalo, rbase, addrlo)); + tcg_out32(s, insn | TAB(datalo, rbase, index)); tcg_out_movext(s, TCG_TYPE_REG, datalo, TCG_TYPE_REG, opc & MO_SSIZE, datalo); } } #ifdef CONFIG_SOFTMMU - add_qemu_ldst_label(s, true, oi, datalo, datahi, addrlo, addrhi, - s->code_ptr, label_ptr); + add_qemu_ldst_label(s, true, data_type, oi, datalo, datahi, + addrlo, addrhi, s->code_ptr, label_ptr); #endif } -static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64) +static void tcg_out_qemu_st(TCGContext *s, + const TCGReg datalo, const TCGReg datahi, + const TCGReg addrlo, const TCGReg addrhi, + const MemOpIdx oi, TCGType data_type) { - TCGReg datalo, datahi, addrlo, rbase; - TCGReg addrhi __attribute__((unused)); - MemOpIdx oi; - MemOp opc, s_bits; + MemOp opc = get_memop(oi); + MemOp s_bits = opc & MO_SIZE; + TCGReg rbase, index; + #ifdef CONFIG_SOFTMMU - int mem_index; tcg_insn_unit *label_ptr; -#else - unsigned a_bits; -#endif - datalo = *args++; - datahi = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0); - addrlo = *args++; - addrhi = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0); - oi = *args++; - opc = get_memop(oi); - s_bits = opc & MO_SIZE; - -#ifdef CONFIG_SOFTMMU - mem_index = get_mmuidx(oi); - addrlo = tcg_out_tlb_read(s, opc, addrlo, addrhi, mem_index, false); + index = tcg_out_tlb_read(s, opc, addrlo, addrhi, get_mmuidx(oi), false); /* Load a pointer into the current opcode w/conditional branch-link. */ label_ptr = s->code_ptr; @@ -2400,45 +2382,47 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64) rbase = TCG_REG_R3; #else /* !CONFIG_SOFTMMU */ - a_bits = get_alignment_bits(opc); + unsigned a_bits = get_alignment_bits(opc); if (a_bits) { tcg_out_test_alignment(s, false, addrlo, addrhi, a_bits); } rbase = guest_base ? TCG_GUEST_BASE_REG : 0; if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) { tcg_out_ext32u(s, TCG_REG_TMP1, addrlo); - addrlo = TCG_REG_TMP1; + index = TCG_REG_TMP1; + } else { + index = addrlo; } #endif if (TCG_TARGET_REG_BITS == 32 && s_bits == MO_64) { if (opc & MO_BSWAP) { - tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); - tcg_out32(s, STWBRX | SAB(datalo, rbase, addrlo)); + tcg_out32(s, ADDI | TAI(TCG_REG_R0, index, 4)); + tcg_out32(s, STWBRX | SAB(datalo, rbase, index)); tcg_out32(s, STWBRX | SAB(datahi, rbase, TCG_REG_R0)); } else if (rbase != 0) { - tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); - tcg_out32(s, STWX | SAB(datahi, rbase, addrlo)); + tcg_out32(s, ADDI | TAI(TCG_REG_R0, index, 4)); + tcg_out32(s, STWX | SAB(datahi, rbase, index)); tcg_out32(s, STWX | SAB(datalo, rbase, TCG_REG_R0)); } else { - tcg_out32(s, STW | TAI(datahi, addrlo, 0)); - tcg_out32(s, STW | TAI(datalo, addrlo, 4)); + tcg_out32(s, STW | TAI(datahi, index, 0)); + tcg_out32(s, STW | TAI(datalo, index, 4)); } } else { uint32_t insn = qemu_stx_opc[opc & (MO_BSWAP | MO_SIZE)]; if (!have_isa_2_06 && insn == STDBRX) { - tcg_out32(s, STWBRX | SAB(datalo, rbase, addrlo)); - tcg_out32(s, ADDI | TAI(TCG_REG_TMP1, addrlo, 4)); + tcg_out32(s, STWBRX | SAB(datalo, rbase, index)); + tcg_out32(s, ADDI | TAI(TCG_REG_TMP1, index, 4)); tcg_out_shri64(s, TCG_REG_R0, datalo, 32); tcg_out32(s, STWBRX | SAB(TCG_REG_R0, rbase, TCG_REG_TMP1)); } else { - tcg_out32(s, insn | SAB(datalo, rbase, addrlo)); + tcg_out32(s, insn | SAB(datalo, rbase, index)); } } #ifdef CONFIG_SOFTMMU - add_qemu_ldst_label(s, false, oi, datalo, datahi, addrlo, addrhi, - s->code_ptr, label_ptr); + add_qemu_ldst_label(s, false, data_type, oi, datalo, datahi, + addrlo, addrhi, s->code_ptr, label_ptr); #endif } @@ -2972,16 +2956,46 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, break; case INDEX_op_qemu_ld_i32: - tcg_out_qemu_ld(s, args, false); + if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) { + tcg_out_qemu_ld(s, args[0], -1, args[1], -1, + args[2], TCG_TYPE_I32); + } else { + tcg_out_qemu_ld(s, args[0], -1, args[1], args[2], + args[3], TCG_TYPE_I32); + } break; case INDEX_op_qemu_ld_i64: - tcg_out_qemu_ld(s, args, true); + if (TCG_TARGET_REG_BITS == 64) { + tcg_out_qemu_ld(s, args[0], -1, args[1], -1, + args[2], TCG_TYPE_I64); + } else if (TARGET_LONG_BITS == 32) { + tcg_out_qemu_ld(s, args[0], args[1], args[2], -1, + args[3], TCG_TYPE_I64); + } else { + tcg_out_qemu_ld(s, args[0], args[1], args[2], args[3], + args[4], TCG_TYPE_I64); + } break; case INDEX_op_qemu_st_i32: - tcg_out_qemu_st(s, args, false); + if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) { + tcg_out_qemu_st(s, args[0], -1, args[1], -1, + args[2], TCG_TYPE_I32); + } else { + tcg_out_qemu_st(s, args[0], -1, args[1], args[2], + args[3], TCG_TYPE_I32); + } break; case INDEX_op_qemu_st_i64: - tcg_out_qemu_st(s, args, true); + if (TCG_TARGET_REG_BITS == 64) { + tcg_out_qemu_st(s, args[0], -1, args[1], -1, + args[2], TCG_TYPE_I64); + } else if (TARGET_LONG_BITS == 32) { + tcg_out_qemu_st(s, args[0], args[1], args[2], -1, + args[3], TCG_TYPE_I64); + } else { + tcg_out_qemu_st(s, args[0], args[1], args[2], args[3], + args[4], TCG_TYPE_I64); + } break; case INDEX_op_setcond_i32: -- 2.34.1