Hi,

Please find attached the modified patch of duplicate patterns which were
posted in the earlier part.

Please review them and let me know if any further modifications are required.

Thanks,
Naveen
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index de45a2c..b36e2f5 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -9232,26 +9232,6 @@ fold_binary_loc (location_t loc,
       return NULL_TREE;
 
     case PLUS_EXPR:
-      if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
-	{
-	  /* X + (X / CST) * -CST is X % CST.  */
-	  if (TREE_CODE (arg1) == MULT_EXPR
-	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
-	      && operand_equal_p (arg0,
-				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
-	    {
-	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
-	      tree cst1 = TREE_OPERAND (arg1, 1);
-	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
-				      cst1, cst0);
-	      if (sum && integer_zerop (sum))
-		return fold_convert_loc (loc, type,
-					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
-						      TREE_TYPE (arg0), arg0,
-						      cst0));
-	    }
-	}
-
       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
 	 one.  Make sure the type is not saturating and has the signedness of
 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
@@ -9692,28 +9672,6 @@ fold_binary_loc (location_t loc,
 			    fold_convert_loc (loc, type,
 					      TREE_OPERAND (arg0, 0)));
 
-      if (! FLOAT_TYPE_P (type))
-	{
-	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
-	     any power of 2 minus 1.  */
-	  if (TREE_CODE (arg0) == BIT_AND_EXPR
-	      && TREE_CODE (arg1) == BIT_AND_EXPR
-	      && operand_equal_p (TREE_OPERAND (arg0, 0),
-				  TREE_OPERAND (arg1, 0), 0))
-	    {
-	      tree mask0 = TREE_OPERAND (arg0, 1);
-	      tree mask1 = TREE_OPERAND (arg1, 1);
-	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
-
-	      if (operand_equal_p (tem, mask1, 0))
-		{
-		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
-				     TREE_OPERAND (arg0, 0), mask1);
-		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
-		}
-	    }
-	}
-
       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
 	 signed zeros are involved.  */
@@ -10013,28 +9971,6 @@ fold_binary_loc (location_t loc,
 				    arg1);
 	}
 
-      /* (X & ~Y) | (~X & Y) is X ^ Y */
-      if (TREE_CODE (arg0) == BIT_AND_EXPR
-	  && TREE_CODE (arg1) == BIT_AND_EXPR)
-        {
-	  tree a0, a1, l0, l1, n0, n1;
-
-	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
-	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
-
-	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
-	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
-	  
-	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
-	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
-	  
-	  if ((operand_equal_p (n0, a0, 0)
-	       && operand_equal_p (n1, a1, 0))
-	      || (operand_equal_p (n0, a1, 0)
-		  && operand_equal_p (n1, a0, 0)))
-	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
-	}
-
       /* See if this can be simplified into a rotate first.  If that
 	 is unsuccessful continue in the association code.  */
       goto bit_rotate;
diff --git a/gcc/match.pd b/gcc/match.pd
index f3813d8..5ee345e 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -324,6 +324,42 @@ along with GCC; see the file COPYING3.  If not see
     (if (real_isinteger (&TREE_REAL_CST (@1), &n) && (n & 1) == 0)
      (pows @0 @1))))))
 
+/* Fold X + (X / CST) * -CST to X % CST.  */
+(simplify
+ (plus (convert? @0) (convert? (mult (trunc_div @0 @1) (negate @1))))
+  (if (INTEGRAL_TYPE_P (type)
+       && tree_nop_conversion_p (type, TREE_TYPE (@0)))
+   (trunc_mod (convert @0) (convert @1))))
+(simplify
+ (plus (convert? @0) (convert? (mult (trunc_div @0 INTEGER_CST@1) INTEGER_CST@2)))
+  (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
+       && wi::add (@1, @2) == 0)
+   (trunc_mod (convert @0) (convert @1))))
+
+/* Fold (A & ~B) - (A & B) into (A ^ B) - B.  */
+(simplify
+ (minus (bit_and:s @0 (bit_not @1)) (bit_and:s @0 @1))
+  (if (! FLOAT_TYPE_P (type))
+   (minus (bit_xor @0 @1) @1)))
+(simplify
+ (minus (bit_and:s @0 INTEGER_CST@2) (bit_and:s @0 INTEGER_CST@1))
+ (if (! FLOAT_TYPE_P (type)
+      && wi::eq_p (const_unop (BIT_NOT_EXPR, TREE_TYPE (type), @2), @1))
+  (minus (bit_xor @0 @1) @1)))
+
+/* Simplify (X & ~Y) | (~X & Y) -> X ^ Y.  */
+(simplify
+ (bit_ior (bit_and:c @0 (bit_not @1)) (bit_and:c (bit_not @0) @1))
+  (bit_xor @0 @1))
+(simplify
+ (bit_ior (bit_and:c @0 INTEGER_CST@2) (bit_and:c (bit_not @0) INTEGER_CST@1))
+  (if (wi::eq_p (const_unop (BIT_NOT_EXPR, TREE_TYPE (type), @2), @1))
+   (bit_xor @0 @1)))
+(simplify
+ (bit_ior (bit_and:c INTEGER_CST@0 (bit_not @1)) (bit_and:c (bit_not INTEGER_CST@2) @1))
+  (if (wi::eq_p (const_unop (BIT_NOT_EXPR, TREE_TYPE (type), @2), @0))
+   (bit_xor @0 @1)))
+
 /* X % Y is smaller than Y.  */
 (for cmp (lt ge)
  (simplify
@@ -637,10 +673,10 @@ along with GCC; see the file COPYING3.  If not see
 (match (logical_inverted_value @0)
  (bit_xor truth_valued_p@0 integer_truep))
 
-/* X & !X -> 0.  */
+/* X & !X or X & ~X -> 0.  */
 (simplify
  (bit_and:c @0 (logical_inverted_value @0))
- { build_zero_cst (type); })
+  { build_zero_cst (type); })
 /* X | !X and X ^ !X -> 1, , if X is truth-valued.  */
 (for op (bit_ior bit_xor)
  (simplify

Reply via email to