This fixes bogus alignment computation for negative strided
grouped accesses which we now can vectorize but only through SLP.
The biasing for negative strided non-grouped accesses as supported
via VMAT_CONTIGUOUS_REVERSE does not apply there and instead causes
alignment analysis to compute wrong things.  The following restricts
the biasing to non-grouped accesses for the moment, leaving bigger
refactoring for another time.

Bootstrapped on x86_64-unknown-linux-gnu, testing in progress.

2021-03-25  Richard Biener  <rguent...@suse.de>

        PR tree-optimization/96109
        * tree-vect-data-refs.c (vect_compute_data_ref_alignment):
        Only bias non-grouped negative stride accesses.

        * gcc.dg/vect/pr96109.c: New testcase.
---
 gcc/testsuite/gcc.dg/vect/pr96109.c | 31 +++++++++++++++++++++++++++++
 gcc/tree-vect-data-refs.c           |  7 ++++++-
 2 files changed, 37 insertions(+), 1 deletion(-)
 create mode 100644 gcc/testsuite/gcc.dg/vect/pr96109.c

diff --git a/gcc/testsuite/gcc.dg/vect/pr96109.c 
b/gcc/testsuite/gcc.dg/vect/pr96109.c
new file mode 100644
index 00000000000..b9bc85fa593
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/vect/pr96109.c
@@ -0,0 +1,31 @@
+/* { dg-require-effective-target vect_double } */
+
+#include "tree-vect.h"
+
+static double x[1024], y[1024];
+
+void __attribute__((noipa))
+foo ()
+{
+  for (int i = 0; i < 511; ++i)
+    {
+      x[2*i] = y[1022 - 2*i - 1];
+      x[2*i+1] = y[1022 - 2*i];
+    }
+}
+
+int main()
+{
+  check_vect ();
+  for (int i = 0; i < 1024; ++i)
+    x[i] = 0, y[i] = i;
+  foo ();
+  for (int i = 0; i < 1022; ++i)
+    if (x[i] != y[1022 - (i^1)])
+      __builtin_abort ();
+  if (x[1022] != 0 || x[1023] != 0)
+    __builtin_abort ();
+  return 0;
+}
+
+/* { dg-final { scan-tree-dump "Vectorizing an unaligned access" "vect" { 
target vect_unaligned_possible } } } */
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 97c8577ebe7..ee266ba62a8 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -1109,7 +1109,12 @@ vect_compute_data_ref_alignment (vec_info *vinfo, 
dr_vec_info *dr_info)
   /* If this is a backward running DR then first access in the larger
      vectype actually is N-1 elements before the address in the DR.
      Adjust misalign accordingly.  */
-  if (tree_int_cst_sgn (drb->step) < 0)
+  /* ???  The only negative stride grouped access we support is via
+     VMAT_STRIDED_SLP and that is confused by this premature adjustment
+     of misalignment which really should be done on the
+     VMAT_CONTIGUOUS_REVERSE code path instead.  */
+  if (!STMT_VINFO_GROUPED_ACCESS (stmt_info)
+      && tree_int_cst_sgn (drb->step) < 0)
     /* PLUS because STEP is negative.  */
     misalignment += ((TYPE_VECTOR_SUBPARTS (vectype) - 1)
                     * -TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE 
(vectype))));
-- 
2.26.2

Reply via email to