Hi,
Thanks for the review and suggestions.
>> Please do not drop A - B -> A + (-B) from fold-const as match.pd
>> doesn't implement all of fold-const.c negate_expr_p support.
Done.
>> which is more expensive. This means that we miss a
>> (bit_and (bit_not @0) INTEGER_CST@1)
Should we have this pattern implemented in match.pd?
>> negate_expr_p doesn't capture everything
>> fold-const.c does so moving the above isn't a good idea.
Dropped the pattern. Was working on some more patterns
that had negate_expr_p. Will drop all of them.
>> fold-const.c only handles constant C, so we only need to 2nd pattern.
Yeah. Thought that even having variable would be optimized in a similar
manner and hence had that pattern.
Please find attached the modified pattern as per suggestions.
Please review the patch and let me know if there should be any further
modifications in it.
Thanks,
Naveen
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index ee9b349..c34d462 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -9739,18 +9739,6 @@ fold_binary_loc (location_t loc,
negate_expr (arg0)),
tem);
- /* (A + A) * C -> A * 2 * C */
- if (TREE_CODE (arg0) == PLUS_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1), 0))
- return fold_build2_loc (loc, MULT_EXPR, type,
- omit_one_operand_loc (loc, type,
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1)),
- fold_build2_loc (loc, MULT_EXPR, type,
- build_int_cst (type, 2) , arg1));
-
/* ((T) (X /[ex] C)) * C cancels out if the conversion is
sign-changing only. */
if (TREE_CODE (arg1) == INTEGER_CST
@@ -9940,45 +9928,6 @@ fold_binary_loc (location_t loc,
build_zero_cst (TREE_TYPE (tem)));
}
- /* Fold (X ^ Y) & Y as ~X & Y. */
- if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg1));
- }
- /* Fold (X ^ Y) & X as ~Y & X. */
- if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg1));
- }
- /* Fold X & (X ^ Y) as X & ~Y. */
- if (TREE_CODE (arg1) == BIT_XOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
- }
- /* Fold X & (Y ^ X) as ~Y & X. */
- if (TREE_CODE (arg1) == BIT_XOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg0));
- }
-
/* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
multiple of 1 << CST. */
if (TREE_CODE (arg1) == INTEGER_CST)
diff --git a/gcc/match.pd b/gcc/match.pd
index f6c5c07..bd33ea2 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -492,6 +492,12 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT)
(if (wi::bit_not (@2) == @1)
(bit_xor @0 @1)))
+/* Fold X & (X ^ Y) as X & ~Y. */
+(simplify
+ (bit_and:c (convert? @0) (convert? (bit_xor:c @0 @1)))
+ (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+ (convert (bit_and @0 (bit_not @1)))))
+
/* X % Y is smaller than Y. */
(for cmp (lt ge)
(simplify
@@ -1608,6 +1614,11 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT)
(if (SCALAR_FLOAT_TYPE_P (type))
(mult @0 { build_real (type, dconst2); })))
+/* Convert (A + A) * C -> A * 2 * C. */
+(simplify
+ (mult (convert? (plus @0 @0)) INTEGER_CST@1)
+ (mult (convert @0) (mult { build_int_cst (TREE_TYPE (@1), 2); } @1)))
+
(simplify
(minus integer_zerop @1)
(negate @1))