The following works around an unfortunate interaction with ranger
and immediate use walking. An actual solution needs more thoughts.
Bootstrapped and tested on x86_64-unknown-linux-gnu, pushed.
PR tree-optimization/122502
* tree-scalar-evolution.cc (final_value_replacement_loop):
Avoid folding from within FOR_EACH_IMM_USE_STMT due to active
ranger.
* gcc.dg/torture/pr122502.c: New testcase.
---
gcc/testsuite/gcc.dg/torture/pr122502.c | 21 +++++++++++++++++++++
gcc/tree-scalar-evolution.cc | 10 ++++++++--
2 files changed, 29 insertions(+), 2 deletions(-)
create mode 100644 gcc/testsuite/gcc.dg/torture/pr122502.c
diff --git a/gcc/testsuite/gcc.dg/torture/pr122502.c
b/gcc/testsuite/gcc.dg/torture/pr122502.c
new file mode 100644
index 00000000000..5e2cb2e8163
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr122502.c
@@ -0,0 +1,21 @@
+/* { dg-do compile } */
+
+short int *ts;
+
+void
+c2 (unsigned long long int s4, int ns)
+{
+ short int *b2 = (short int *)&ns;
+
+ while (ns != 0)
+ {
+ int xn;
+
+ for (xn = 0; xn < 3; ++xn)
+ for (*b2 = 0; *b2 < 2; ++*b2)
+ s4 += xn;
+ if (s4 != 0)
+ b2 = ts;
+ ++ns;
+ }
+}
diff --git a/gcc/tree-scalar-evolution.cc b/gcc/tree-scalar-evolution.cc
index 7907893b916..9f82abc4b81 100644
--- a/gcc/tree-scalar-evolution.cc
+++ b/gcc/tree-scalar-evolution.cc
@@ -3995,11 +3995,17 @@ final_value_replacement_loop (class loop *loop)
{
gimple *use_stmt;
imm_use_iterator imm_iter;
+ auto_vec<gimple *, 4> to_fold;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, rslt)
+ if (!stmt_can_throw_internal (cfun, use_stmt))
+ to_fold.safe_push (use_stmt);
+ /* Delay folding until after the immediate use walk is completed
+ as we have an active ranger and that might walk immediate
+ uses of rslt again. See PR122502. */
+ for (gimple *use_stmt : to_fold)
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
- if (!stmt_can_throw_internal (cfun, use_stmt)
- && fold_stmt (&gsi, follow_all_ssa_edges))
+ if (fold_stmt (&gsi, follow_all_ssa_edges))
update_stmt (gsi_stmt (gsi));
}
}
--
2.51.0