The following picks up the patch from last December, refactoring
aff_combination_expand to not use gimple_assign_rhs_to_tree
but analyze GIMPLE stmts directly.

Last December I was stuck at

FAIL: gcc.dg/tree-ssa/ivopts-lt-2.c scan-tree-dump-times ivopts "PHI" 1
FAIL: gcc.dg/tree-ssa/ivopts-lt-2.c scan-tree-dump-times ivopts "p_[0-9]* 
<" 1

where IVOPTs relied on expanding _1 as (long unsigned) i_6(D) which
doesn't add anything interesting affine-combination wise.  I now
chose to simply retain that particular "expansion" and mark it
with ???, likewise the other point I made about hiding conversions.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk.

Leaves us with two necessary and one stupid caller of 
gimple_assign_rhs_to_tree (will fix that).

Richard.

2019-05-16  Richard Biener  <rguent...@suse.de>

        * tree-affine.c (expr_to_aff_combination): New function split
        out from...
        (tree_to_aff_combination): ... here.
        (aff_combination_expand): Avoid building a GENERIC tree.

Index: gcc/tree-affine.c
===================================================================
--- gcc/tree-affine.c   (revision 271282)
+++ gcc/tree-affine.c   (working copy)
@@ -259,104 +259,66 @@ aff_combination_convert (aff_tree *comb,
     }
 }
 
-/* Splits EXPR into an affine combination of parts.  */
+/* Tries to handle OP0 CODE OP1 as affine combination of parts.  Returns
+   true when that was successful and returns the combination in COMB.  */
 
-void
-tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+static bool
+expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
+                        tree op0, tree op1 = NULL_TREE)
 {
   aff_tree tmp;
-  enum tree_code code;
-  tree cst, core, toffset;
   poly_int64 bitpos, bitsize, bytepos;
-  machine_mode mode;
-  int unsignedp, reversep, volatilep;
-
-  STRIP_NOPS (expr);
 
-  code = TREE_CODE (expr);
   switch (code)
     {
     case POINTER_PLUS_EXPR:
-      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+      tree_to_aff_combination (op0, type, comb);
+      tree_to_aff_combination (op1, sizetype, &tmp);
       aff_combination_add (comb, &tmp);
-      return;
+      return true;
 
     case PLUS_EXPR:
     case MINUS_EXPR:
-      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
+      tree_to_aff_combination (op0, type, comb);
+      tree_to_aff_combination (op1, type, &tmp);
       if (code == MINUS_EXPR)
        aff_combination_scale (&tmp, -1);
       aff_combination_add (comb, &tmp);
-      return;
+      return true;
 
     case MULT_EXPR:
-      cst = TREE_OPERAND (expr, 1);
-      if (TREE_CODE (cst) != INTEGER_CST)
+      if (TREE_CODE (op1) != INTEGER_CST)
        break;
-      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      aff_combination_scale (comb, wi::to_widest (cst));
-      return;
+      tree_to_aff_combination (op0, type, comb);
+      aff_combination_scale (comb, wi::to_widest (op1));
+      return true;
 
     case NEGATE_EXPR:
-      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+      tree_to_aff_combination (op0, type, comb);
       aff_combination_scale (comb, -1);
-      return;
+      return true;
 
     case BIT_NOT_EXPR:
       /* ~x = -x - 1 */
-      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+      tree_to_aff_combination (op0, type, comb);
       aff_combination_scale (comb, -1);
       aff_combination_add_cst (comb, -1);
-      return;
-
-    case ADDR_EXPR:
-      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
-      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
-       {
-         expr = TREE_OPERAND (expr, 0);
-         tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-         tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
-         aff_combination_add (comb, &tmp);
-         return;
-       }
-      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
-                                 &toffset, &mode, &unsignedp, &reversep,
-                                 &volatilep);
-      if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
-       break;
-      aff_combination_const (comb, type, bytepos);
-      if (TREE_CODE (core) == MEM_REF)
-       {
-         tree mem_offset = TREE_OPERAND (core, 1);
-         aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
-         core = TREE_OPERAND (core, 0);
-       }
-      else
-       core = build_fold_addr_expr (core);
-
-      if (TREE_CODE (core) == ADDR_EXPR)
-       aff_combination_add_elt (comb, core, 1);
-      else
-       {
-         tree_to_aff_combination (core, type, &tmp);
-         aff_combination_add (comb, &tmp);
-       }
-      if (toffset)
-       {
-         tree_to_aff_combination (toffset, type, &tmp);
-         aff_combination_add (comb, &tmp);
-       }
-      return;
+      return true;
 
     CASE_CONVERT:
       {
-       tree otype = TREE_TYPE (expr);
-       tree inner = TREE_OPERAND (expr, 0);
+       tree otype = type;
+       tree inner = op0;
        tree itype = TREE_TYPE (inner);
        enum tree_code icode = TREE_CODE (inner);
 
+       /* STRIP_NOPS  */
+       if (tree_nop_conversion_p (otype, itype))
+         {
+           tree_to_aff_combination (op0, type, comb);
+           return true;
+         }
+
        /* In principle this is a valid folding, but it isn't necessarily
           an optimization, so do it here and not in fold_unary.  */
        if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
@@ -376,9 +338,7 @@ tree_to_aff_combination (tree expr, tree
              {
                op0 = fold_convert (otype, op0);
                op1 = fold_convert (otype, op1);
-               expr = fold_build2 (icode, otype, op0, op1);
-               tree_to_aff_combination (expr, type, comb);
-               return;
+               return expr_to_aff_combination (comb, icode, otype, op0, op1);
              }
            wide_int minv, maxv;
            /* If inner type has wrapping overflow behavior, fold conversion
@@ -399,15 +359,102 @@ tree_to_aff_combination (tree expr, tree
                  {
                    op0 = fold_convert (otype, op0);
                    op1 = fold_convert (otype, op1);
-                   expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
-                   tree_to_aff_combination (expr, type, comb);
-                   return;
+                   return expr_to_aff_combination (comb, MINUS_EXPR, otype,
+                                                   op0, op1);
                  }
              }
          }
       }
       break;
 
+    default:;
+    }
+
+  return false;
+}
+
+/* Splits EXPR into an affine combination of parts.  */
+
+void
+tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+{
+  aff_tree tmp;
+  enum tree_code code;
+  tree core, toffset;
+  poly_int64 bitpos, bitsize, bytepos;
+  machine_mode mode;
+  int unsignedp, reversep, volatilep;
+
+  STRIP_NOPS (expr);
+
+  code = TREE_CODE (expr);
+  switch (code)
+    {
+    case POINTER_PLUS_EXPR:
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+    case MULT_EXPR:
+      if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
+                                  TREE_OPERAND (expr, 1)))
+       return;
+      break;
+
+    case NEGATE_EXPR:
+    case BIT_NOT_EXPR:
+      if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
+       return;
+      break;
+
+    CASE_CONVERT:
+      /* ???  TREE_TYPE (expr) should be equal to type here, but IVOPTS
+        calls this with not showing an outer widening cast.  */
+      if (expr_to_aff_combination (comb, code,
+                                  TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
+       {
+         aff_combination_convert (comb, type);
+         return;
+       }
+      break;
+
+    case ADDR_EXPR:
+      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
+       {
+         expr = TREE_OPERAND (expr, 0);
+         tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+         tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+         aff_combination_add (comb, &tmp);
+         return;
+       }
+      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
+                                 &toffset, &mode, &unsignedp, &reversep,
+                                 &volatilep);
+      if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
+       break;
+      aff_combination_const (comb, type, bytepos);
+      if (TREE_CODE (core) == MEM_REF)
+       {
+         tree mem_offset = TREE_OPERAND (core, 1);
+         aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
+         core = TREE_OPERAND (core, 0);
+       }
+      else
+       core = build_fold_addr_expr (core);
+
+      if (TREE_CODE (core) == ADDR_EXPR)
+       aff_combination_add_elt (comb, core, 1);
+      else
+       {
+         tree_to_aff_combination (core, type, &tmp);
+         aff_combination_add (comb, &tmp);
+       }
+      if (toffset)
+       {
+         tree_to_aff_combination (toffset, type, &tmp);
+         aff_combination_add (comb, &tmp);
+       }
+      return;
+
     default:
       {
        if (poly_int_tree_p (expr))
@@ -665,7 +712,7 @@ aff_combination_expand (aff_tree *comb A
 {
   unsigned i;
   aff_tree to_add, current, curre;
-  tree e, rhs;
+  tree e;
   gimple *def;
   widest_int scale;
   struct name_expansion *exp;
@@ -715,20 +762,38 @@ aff_combination_expand (aff_tree *comb A
            case PLUS_EXPR:
            case MINUS_EXPR:
            case MULT_EXPR:
+             if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
+                                           gimple_assign_rhs1 (def),
+                                           gimple_assign_rhs2 (def)))
+               continue;
+             break;
            case NEGATE_EXPR:
            case BIT_NOT_EXPR:
+             if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
+                                           gimple_assign_rhs1 (def)))
+               continue;
+             break;
            CASE_CONVERT:
-             rhs = gimple_assign_rhs_to_tree (def);
+             if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
+                                           gimple_assign_rhs1 (def)))
+               /* This makes us always expand conversions which we did
+                  in the past and makes gcc.dg/tree-ssa/ivopts-lt-2.c
+                  PASS, eliminating one induction variable in IVOPTs.
+                  ???  But it is really excessive and we should try
+                  harder to do without it.  */
+               aff_combination_elt (&current, TREE_TYPE (name),
+                                    fold_convert (TREE_TYPE (name),
+                                                  gimple_assign_rhs1 (def)));
              break;
            case ADDR_EXPR:
            case INTEGER_CST:
            case POLY_INT_CST:
-             rhs = gimple_assign_rhs1 (def);
+             tree_to_aff_combination (gimple_assign_rhs1 (def),
+                                      TREE_TYPE (name), &current);
              break;
            default:
              continue;
            }
-         tree_to_aff_combination (rhs, TREE_TYPE (name), &current);
          exp = XNEW (struct name_expansion);
          exp->in_progress = 1;
          if (!*cache)

Reply via email to