This moves another fold-const.c folding to the GIMPLE level.
In PR59058 it was noticed we fail to optimize

  vect_vec_iv_.16_57 = VIEW_CONVERT_EXPR<vector(8) short 
int>(vect_vec_iv_.15_55);
  vect_b.17_58 = VIEW_CONVERT_EXPR<vector(8) unsigned 
short>(vect_vec_iv_.16_57);

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied.

Richard.

2013-11-21  Richard Biener  <rguent...@suse.de>

        * tree-ssa-forwprop.c (simplify_vce): New function.
        (ssa_forward_propagate_and_combine): Call it.

Index: gcc/tree-ssa-forwprop.c
===================================================================
*** gcc/tree-ssa-forwprop.c     (revision 205121)
--- gcc/tree-ssa-forwprop.c     (working copy)
*************** combine_conversions (gimple_stmt_iterato
*** 2994,2999 ****
--- 2994,3062 ----
    return 0;
  }
  
+ /* Combine VIEW_CONVERT_EXPRs with their defining statement.  */
+ 
+ static bool
+ simplify_vce (gimple_stmt_iterator *gsi)
+ {
+   gimple stmt = gsi_stmt (*gsi);
+   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
+ 
+   /* Drop useless VIEW_CONVERT_EXPRs.  */
+   tree op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+   if (useless_type_conversion_p (type, TREE_TYPE (op)))
+     {
+       gimple_assign_set_rhs1 (stmt, op);
+       update_stmt (stmt);
+       return true;
+     }
+ 
+   if (TREE_CODE (op) != SSA_NAME)
+     return false;
+ 
+   gimple def_stmt = SSA_NAME_DEF_STMT (op);
+   if (!is_gimple_assign (def_stmt))
+     return false;
+ 
+   tree def_op = gimple_assign_rhs1 (def_stmt);
+   switch (gimple_assign_rhs_code (def_stmt))
+     {
+     CASE_CONVERT:
+       /* Strip integral conversions that do not change the precision.  */
+       if ((INTEGRAL_TYPE_P (TREE_TYPE (op))
+          || POINTER_TYPE_P (TREE_TYPE (op)))
+         && (INTEGRAL_TYPE_P (TREE_TYPE (def_op))
+             || POINTER_TYPE_P (TREE_TYPE (def_op)))
+         && (TYPE_PRECISION (TREE_TYPE (op))
+             == TYPE_PRECISION (TREE_TYPE (def_op))))
+       {
+         TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) = def_op;
+         update_stmt (stmt);
+         return true;
+       }
+       break;
+ 
+     case VIEW_CONVERT_EXPR:
+       /* Series of VIEW_CONVERT_EXPRs on register operands can
+        be contracted.  */
+       if (TREE_CODE (TREE_OPERAND (def_op, 0)) == SSA_NAME)
+       {
+         if (useless_type_conversion_p (type,
+                                        TREE_TYPE (TREE_OPERAND (def_op, 0))))
+           gimple_assign_set_rhs1 (stmt, TREE_OPERAND (def_op, 0));
+         else
+           TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)
+               = TREE_OPERAND (def_op, 0);
+         update_stmt (stmt);
+         return true;
+       }
+ 
+     default:;
+     }
+ 
+   return false;
+ }
+ 
  /* Combine an element access with a shuffle.  Returns true if there were
     any changes made, else it returns false.  */
   
*************** ssa_forward_propagate_and_combine (void)
*** 3491,3496 ****
--- 3554,3561 ----
                      
                    changed = did_something != 0;
                  }
+               else if (code == VIEW_CONVERT_EXPR)
+                 changed = simplify_vce (&gsi);
                else if (code == VEC_PERM_EXPR)
                  {
                    int did_something = simplify_permutation (&gsi);

Reply via email to