Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- tcg/aarch64/tcg-target-con-set.h | 4 +-- tcg/aarch64/tcg-target.c.inc | 58 +++++++++++++++++++++----------- 2 files changed, 41 insertions(+), 21 deletions(-)
diff --git a/tcg/aarch64/tcg-target-con-set.h b/tcg/aarch64/tcg-target-con-set.h index 3fdee26a3d..a927cd11a2 100644 --- a/tcg/aarch64/tcg-target-con-set.h +++ b/tcg/aarch64/tcg-target-con-set.h @@ -10,7 +10,7 @@ * tcg-target-con-str.h; the constraint combination is inclusive or. */ C_O0_I1(r) -C_O0_I2(r, rA) +C_O0_I2(r, rAL) C_O0_I2(rZ, r) C_O0_I2(w, r) C_O0_I3(rZ, rZ, r) @@ -31,6 +31,6 @@ C_O1_I2(w, w, wN) C_O1_I2(w, w, wO) C_O1_I2(w, w, wZ) C_O1_I3(w, w, w, w) -C_O1_I4(r, r, rA, rZ, rZ) +C_O1_I4(r, r, rAL, rZ, rZ) C_O2_I1(r, r, r) C_O2_I4(r, r, rZ, rZ, rA, rMZ) diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc index a3efa1e67a..2a748b3ee3 100644 --- a/tcg/aarch64/tcg-target.c.inc +++ b/tcg/aarch64/tcg-target.c.inc @@ -344,6 +344,9 @@ static const enum aarch64_cond_code tcg_cond_to_aarch64[] = { [TCG_COND_GTU] = COND_HI, [TCG_COND_GEU] = COND_HS, [TCG_COND_LEU] = COND_LS, + /* bit test */ + [TCG_COND_TSTEQ] = COND_EQ, + [TCG_COND_TSTNE] = COND_NE, }; typedef enum { @@ -1341,20 +1344,35 @@ static inline void tcg_out_dep(TCGContext *s, TCGType ext, TCGReg rd, tcg_out_bfm(s, ext, rd, rn, a, b); } -static void tcg_out_cmp(TCGContext *s, TCGType ext, TCGReg a, +static void tcg_out_cmp(TCGContext *s, TCGType ext, TCGCond cond, TCGReg a, tcg_target_long b, bool const_b) { - if (const_b) { - /* Using CMP or CMN aliases. */ - if (b >= 0) { - tcg_out_insn(s, 3401, SUBSI, ext, TCG_REG_XZR, a, b); - } else { - tcg_out_insn(s, 3401, ADDSI, ext, TCG_REG_XZR, a, -b); + if (is_tst_cond(cond)) { + if (const_b) { + if (is_limm(b)) { + tcg_out_logicali(s, I3404_ANDSI, 0, TCG_REG_XZR, a, b); + return; + } + tcg_out_movi(s, ext, TCG_REG_TMP0, b); + b = TCG_REG_TMP0; } - } else { - /* Using CMP alias SUBS wzr, Wn, Wm */ - tcg_out_insn(s, 3502, SUBS, ext, TCG_REG_XZR, a, b); + tcg_out_insn(s, 3510, ANDS, ext, TCG_REG_XZR, a, b); + return; } + + if (const_b) { + if (is_aimm(b)) { + tcg_out_insn(s, 3401, SUBSI, ext, TCG_REG_XZR, a, b); + return; + } + if (is_aimm(-b)) { + tcg_out_insn(s, 3401, ADDSI, ext, TCG_REG_XZR, a, -b); + return; + } + tcg_out_movi(s, ext, TCG_REG_TMP0, b); + b = TCG_REG_TMP0; + } + tcg_out_insn(s, 3502, SUBS, ext, TCG_REG_XZR, a, b); } static void tcg_out_goto(TCGContext *s, const tcg_insn_unit *target) @@ -1401,7 +1419,7 @@ static void tcg_out_brcond(TCGContext *s, TCGType ext, TCGCond c, TCGArg a, need_cmp = false; } else { need_cmp = true; - tcg_out_cmp(s, ext, a, b, b_const); + tcg_out_cmp(s, ext, c, a, b, b_const); } if (!l->has_value) { @@ -1574,7 +1592,7 @@ static void tcg_out_cltz(TCGContext *s, TCGType ext, TCGReg d, } else { AArch64Insn sel = I3506_CSEL; - tcg_out_cmp(s, ext, a0, 0, 1); + tcg_out_cmp(s, ext, TCG_COND_NE, a0, 0, 1); tcg_out_insn(s, 3507, CLZ, ext, TCG_REG_TMP0, a1); if (const_b) { @@ -1719,7 +1737,7 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h, addr_adj, compare_mask); /* Perform the address comparison. */ - tcg_out_cmp(s, addr_type, TCG_REG_TMP0, TCG_REG_TMP2, 0); + tcg_out_cmp(s, addr_type, TCG_COND_NE, TCG_REG_TMP0, TCG_REG_TMP2, 0); /* If not equal, we jump to the slow path. */ ldst->label_ptr[0] = s->code_ptr; @@ -2275,7 +2293,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, a2 = (int32_t)a2; /* FALLTHRU */ case INDEX_op_setcond_i64: - tcg_out_cmp(s, ext, a1, a2, c2); + tcg_out_cmp(s, ext, args[3], a1, a2, c2); /* Use CSET alias of CSINC Wd, WZR, WZR, invert(cond). */ tcg_out_insn(s, 3506, CSINC, TCG_TYPE_I32, a0, TCG_REG_XZR, TCG_REG_XZR, tcg_invert_cond(args[3])); @@ -2285,7 +2303,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, a2 = (int32_t)a2; /* FALLTHRU */ case INDEX_op_negsetcond_i64: - tcg_out_cmp(s, ext, a1, a2, c2); + tcg_out_cmp(s, ext, args[3], a1, a2, c2); /* Use CSETM alias of CSINV Wd, WZR, WZR, invert(cond). */ tcg_out_insn(s, 3506, CSINV, ext, a0, TCG_REG_XZR, TCG_REG_XZR, tcg_invert_cond(args[3])); @@ -2295,7 +2313,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, a2 = (int32_t)a2; /* FALLTHRU */ case INDEX_op_movcond_i64: - tcg_out_cmp(s, ext, a1, a2, c2); + tcg_out_cmp(s, ext, args[5], a1, a2, c2); tcg_out_insn(s, 3506, CSEL, ext, a0, REG0(3), REG0(4), args[5]); break; @@ -2895,11 +2913,13 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) case INDEX_op_add_i64: case INDEX_op_sub_i32: case INDEX_op_sub_i64: + return C_O1_I2(r, r, rA); + case INDEX_op_setcond_i32: case INDEX_op_setcond_i64: case INDEX_op_negsetcond_i32: case INDEX_op_negsetcond_i64: - return C_O1_I2(r, r, rA); + return C_O1_I2(r, r, rAL); case INDEX_op_mul_i32: case INDEX_op_mul_i64: @@ -2949,11 +2969,11 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) case INDEX_op_brcond_i32: case INDEX_op_brcond_i64: - return C_O0_I2(r, rA); + return C_O0_I2(r, rAL); case INDEX_op_movcond_i32: case INDEX_op_movcond_i64: - return C_O1_I4(r, r, rA, rZ, rZ); + return C_O1_I4(r, r, rAL, rZ, rZ); case INDEX_op_qemu_ld_a32_i32: case INDEX_op_qemu_ld_a64_i32: -- 2.34.1