load_p is set and used as to whether the stmt is a memory operation,
not whether it is only a load.  The following renames it to ldst_p
to avoid this confusion.  It also replaces checking for a VUSE
with checking STMT_VINFO_DATA_REF since VUSE checking doesn't
work for pattern matched stores where no virtual operands are
present.  Where we want to distinguish between loads and stores
we then check DR_IS_READ/WRITE.

I've made a classification mistake with .MASK_STORE support and
this hits other complications when dealing with single-lane SLP.

Bootstrapped and tested on x86_64-unknown-linux-gnu, pushed.

        * tree-vect-slp.cc (vect_build_slp_tree_1): Rename
        load_p to ldst_p, fix mistakes and rely on
        STMT_VINFO_DATA_REF.
---
 gcc/tree-vect-slp.cc | 42 ++++++++++++++++++++++++------------------
 1 file changed, 24 insertions(+), 18 deletions(-)

diff --git a/gcc/tree-vect-slp.cc b/gcc/tree-vect-slp.cc
index 0b1c2233017..0cf6e02285e 100644
--- a/gcc/tree-vect-slp.cc
+++ b/gcc/tree-vect-slp.cc
@@ -984,7 +984,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char *swap,
   bool need_same_oprnds = false;
   tree vectype = NULL_TREE, first_op1 = NULL_TREE;
   stmt_vec_info first_load = NULL, prev_first_load = NULL;
-  bool first_stmt_load_p = false, load_p = false;
+  bool first_stmt_ldst_p = false, ldst_p = false;
   bool first_stmt_phi_p = false, phi_p = false;
   bool maybe_soft_fail = false;
   tree soft_fail_nunits_vectype = NULL_TREE;
@@ -1074,9 +1074,12 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
          if (cfn == CFN_MASK_LOAD
              || cfn == CFN_GATHER_LOAD
              || cfn == CFN_MASK_GATHER_LOAD)
-           load_p = true;
+           ldst_p = true;
          else if (cfn == CFN_MASK_STORE)
-           rhs_code = CFN_MASK_STORE;
+           {
+             ldst_p = true;
+             rhs_code = CFN_MASK_STORE;
+           }
          else if ((internal_fn_p (cfn)
                    && !vectorizable_internal_fn_p (as_internal_fn (cfn)))
                   || gimple_call_tail_p (call_stmt)
@@ -1102,7 +1105,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
       else
        {
          rhs_code = gimple_assign_rhs_code (stmt);
-         load_p = gimple_vuse (stmt);
+         ldst_p = STMT_VINFO_DATA_REF (stmt_info) != nullptr;
        }
 
       /* Check the operation.  */
@@ -1110,7 +1113,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
        {
          *node_vectype = vectype;
          first_stmt_code = rhs_code;
-         first_stmt_load_p = load_p;
+         first_stmt_ldst_p = ldst_p;
          first_stmt_phi_p = phi_p;
 
          /* Shift arguments should be equal in all the packed stmts for a
@@ -1144,7 +1147,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
               need_same_oprnds = true;
               first_op1 = gimple_assign_rhs2 (stmt);
             }
-         else if (!load_p
+         else if (!ldst_p
                   && rhs_code == BIT_FIELD_REF)
            {
              tree vec = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
@@ -1207,7 +1210,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
                        || rhs_code == INDIRECT_REF
                        || rhs_code == COMPONENT_REF
                        || rhs_code == MEM_REF)))
-             || first_stmt_load_p != load_p
+             || first_stmt_ldst_p != ldst_p
              || first_stmt_phi_p != phi_p)
            {
              if (dump_enabled_p ())
@@ -1222,7 +1225,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
              continue;
            }
 
-         if (!load_p
+         if (!ldst_p
              && first_stmt_code == BIT_FIELD_REF
              && (TREE_OPERAND (gimple_assign_rhs1 (first_stmt_info->stmt), 0)
                  != TREE_OPERAND (gimple_assign_rhs1 (stmt_info->stmt), 0)))
@@ -1291,12 +1294,13 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
       /* Grouped store or load.  */
       if (STMT_VINFO_GROUPED_ACCESS (stmt_info))
        {
-         if (!load_p)
+         gcc_assert (ldst_p);
+         if (DR_IS_WRITE (STMT_VINFO_DATA_REF (stmt_info)))
            {
              /* Store.  */
              gcc_assert (rhs_code == CFN_MASK_STORE
-                         || REFERENCE_CLASS_P (lhs));
-             ;
+                         || REFERENCE_CLASS_P (lhs)
+                         || DECL_P (lhs));
            }
          else
            {
@@ -1321,10 +1325,11 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
               else
                 prev_first_load = first_load;
            }
-        } /* Grouped access.  */
-      else
+       }
+      /* Non-grouped store or load.  */
+      else if (ldst_p)
        {
-         if (load_p
+         if (DR_IS_READ (STMT_VINFO_DATA_REF (stmt_info))
              && rhs_code != CFN_GATHER_LOAD
              && rhs_code != CFN_MASK_GATHER_LOAD
              /* Not grouped loads are handled as externals for BB
@@ -1345,10 +1350,11 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char 
*swap,
              matches[0] = false;
              return false;
            }
-
-         /* Not memory operation.  */
-         if (!load_p
-             && !phi_p
+       }
+      /* Not memory operation.  */
+      else
+       {
+         if (!phi_p
              && rhs_code.is_tree_code ()
              && TREE_CODE_CLASS (tree_code (rhs_code)) != tcc_binary
              && TREE_CODE_CLASS (tree_code (rhs_code)) != tcc_unary
-- 
2.35.3

Reply via email to