Trivial, as we simply need to load a different constant in the conditional move.
Reviewed-by: Peter Maydell <peter.mayd...@linaro.org> Signed-off-by: Richard Henderson <richard.hender...@linaro.org> --- tcg/arm/tcg-target.h | 2 +- tcg/arm/tcg-target.c.inc | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/tcg/arm/tcg-target.h b/tcg/arm/tcg-target.h index b076d033a9..b064bbda9f 100644 --- a/tcg/arm/tcg-target.h +++ b/tcg/arm/tcg-target.h @@ -122,7 +122,7 @@ extern bool use_neon_instructions; #define TCG_TARGET_HAS_mulsh_i32 0 #define TCG_TARGET_HAS_div_i32 use_idiv_instructions #define TCG_TARGET_HAS_rem_i32 0 -#define TCG_TARGET_HAS_negsetcond_i32 0 +#define TCG_TARGET_HAS_negsetcond_i32 1 #define TCG_TARGET_HAS_qemu_st8_i32 0 #define TCG_TARGET_HAS_qemu_ldst_i128 0 diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc index 83e286088f..162df38c73 100644 --- a/tcg/arm/tcg-target.c.inc +++ b/tcg/arm/tcg-target.c.inc @@ -1975,6 +1975,14 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, tcg_out_dat_imm(s, tcg_cond_to_arm_cond[tcg_invert_cond(args[3])], ARITH_MOV, args[0], 0, 0); break; + case INDEX_op_negsetcond_i32: + tcg_out_dat_rIN(s, COND_AL, ARITH_CMP, ARITH_CMN, 0, + args[1], args[2], const_args[2]); + tcg_out_dat_imm(s, tcg_cond_to_arm_cond[args[3]], + ARITH_MVN, args[0], 0, 0); + tcg_out_dat_imm(s, tcg_cond_to_arm_cond[tcg_invert_cond(args[3])], + ARITH_MOV, args[0], 0, 0); + break; case INDEX_op_brcond2_i32: c = tcg_out_cmp2(s, args, const_args); @@ -2112,6 +2120,7 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) case INDEX_op_add_i32: case INDEX_op_sub_i32: case INDEX_op_setcond_i32: + case INDEX_op_negsetcond_i32: return C_O1_I2(r, r, rIN); case INDEX_op_and_i32: -- 2.34.1