https://gcc.gnu.org/g:7e697dba4f5917062ef25d13ee1bafe023cc914d

commit 7e697dba4f5917062ef25d13ee1bafe023cc914d
Author: Alexandre Oliva <ol...@adacore.com>
Date:   Sat Jan 18 00:40:51 2025 -0300

    [ifcombine] avoid dropping tree_could_trap_p [PR118514]
    
    Unlike other access patterns, BIT_FIELD_REFs aren't regarded as
    possibly-trapping out of referencing out-of-bounds bits.
    
    So, if decode_field_reference finds a load that could trap, but whose
    inner object can't, bail out if it accesses past the inner object's
    size.
    
    This may drop some optimizations in VLAs, that could be safe if we
    managed to reuse the same pre-existing load for both compares, but
    those get optimized later (as in the new testcase).  The cases of most
    interest are those that merge separate fields, and they necessarily
    involve new BIT_FIELD_REFs loads.
    
    
    for  gcc/ChangeLog
    
            PR tree-optimization/118514
            * gimple-fold.cc (decode_field_reference): Refuse to consider
            BIT_FIELD_REF of non-trapping object if the original load
            could trap for being out-of-bounds.
            (make_bit_field_ref): Check that replacement loads could trap
            as much as the original loads.
    
    for  gcc/testsuite/ChangeLog
    
            PR tree-optimization/118514
            * gcc.dg/field-merge-23.c: New.

Diff:
---
 gcc/gimple-fold.cc                    | 37 +++++++++++++++++++++++++----------
 gcc/testsuite/gcc.dg/field-merge-23.c | 19 ++++++++++++++++++
 gcc/tree-eh.cc                        | 30 +++++++++++++++++++++++++++-
 gcc/tree-eh.h                         |  1 +
 4 files changed, 76 insertions(+), 11 deletions(-)

diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc
index 3c971a29ef04..cd9d35034918 100644
--- a/gcc/gimple-fold.cc
+++ b/gcc/gimple-fold.cc
@@ -7686,10 +7686,14 @@ decode_field_reference (tree *pexp, HOST_WIDE_INT 
*pbitsize,
       || bs <= shiftrt
       || offset != 0
       || TREE_CODE (inner) == PLACEHOLDER_EXPR
-      /* Reject out-of-bound accesses (PR79731).  */
-      || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
-         && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
-                              bp + bs) < 0)
+      /* Reject out-of-bound accesses (PR79731).  BIT_FIELD_REFs aren't flagged
+        as tree_could_trap_p just because of out-of-range bits, so don't even
+        try to optimize loads that could trap if they access out-of-range bits
+        of an object that could not trap (PR118514).  */
+      || ((! AGGREGATE_TYPE_P (TREE_TYPE (inner))
+          || (load && tree_could_trap_p (gimple_assign_rhs1 (load))
+              && !tree_could_trap_p (inner)))
+         && !bit_field_ref_in_bounds_p (inner, bs, bp))
       || (INTEGRAL_TYPE_P (TREE_TYPE (inner))
          && !type_has_mode_precision_p (TREE_TYPE (inner))))
     return NULL_TREE;
@@ -7859,6 +7863,11 @@ make_bit_field_load (location_t loc, tree inner, tree 
orig_inner, tree type,
       gimple *new_stmt = gsi_stmt (i);
       if (gimple_has_mem_ops (new_stmt))
        gimple_set_vuse (new_stmt, reaching_vuse);
+      gcc_checking_assert (! (gimple_assign_load_p (point)
+                             && gimple_assign_load_p (new_stmt))
+                          || (tree_could_trap_p (gimple_assign_rhs1 (point))
+                              == tree_could_trap_p (gimple_assign_rhs1
+                                                    (new_stmt))));
     }
 
   gimple_stmt_iterator gsi = gsi_for_stmt (point);
@@ -8090,8 +8099,9 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree 
truth_type,
       return 0;
     }
 
-  /* Prepare to turn compares of signed quantities with zero into
-     sign-bit tests.  */
+  /* Prepare to turn compares of signed quantities with zero into sign-bit
+     tests.  We need not worry about *_reversep here for these compare
+     rewrites: loads will have already been reversed before compares.  */
   bool lsignbit = false, rsignbit = false;
   if ((lcode == LT_EXPR || lcode == GE_EXPR)
       && integer_zerop (lr_arg)
@@ -8198,10 +8208,11 @@ fold_truth_andor_for_ifcombine (enum tree_code code, 
tree truth_type,
      the rhs's.  If one is a load and the other isn't, we have to be
      conservative and avoid the optimization, otherwise we could get
      SRAed fields wrong.  */
-  if (volatilep || ll_reversep != rl_reversep)
+  if (volatilep)
     return 0;
 
-  if (! operand_equal_p (ll_inner, rl_inner, 0))
+  if (ll_reversep != rl_reversep
+      || ! operand_equal_p (ll_inner, rl_inner, 0))
     {
       /* Try swapping the operands.  */
       if (ll_reversep != rr_reversep
@@ -8221,8 +8232,12 @@ fold_truth_andor_for_ifcombine (enum tree_code code, 
tree truth_type,
       std::swap (rl_loc, rr_loc);
     }
 
+  /* Check that the loads that we're trying to combine have the same vuse and
+     same trapping status.  */
   if ((ll_load && rl_load)
-      ? gimple_vuse (ll_load) != gimple_vuse (rl_load)
+      ? (gimple_vuse (ll_load) != gimple_vuse (rl_load)
+        || (tree_could_trap_p (gimple_assign_rhs1 (ll_load))
+            != tree_could_trap_p (gimple_assign_rhs1 (rl_load))))
       : (!ll_load != !rl_load))
     return 0;
 
@@ -8275,7 +8290,9 @@ fold_truth_andor_for_ifcombine (enum tree_code code, tree 
truth_type,
   else if (lr_reversep != rr_reversep
           || ! operand_equal_p (lr_inner, rr_inner, 0)
           || ((lr_load && rr_load)
-              ? gimple_vuse (lr_load) != gimple_vuse (rr_load)
+              ? (gimple_vuse (lr_load) != gimple_vuse (rr_load)
+                 || (tree_could_trap_p (gimple_assign_rhs1 (lr_load))
+                     != tree_could_trap_p (gimple_assign_rhs1 (rr_load))))
               : (!lr_load != !rr_load)))
     return 0;
 
diff --git a/gcc/testsuite/gcc.dg/field-merge-23.c 
b/gcc/testsuite/gcc.dg/field-merge-23.c
new file mode 100644
index 000000000000..96604d43c9de
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/field-merge-23.c
@@ -0,0 +1,19 @@
+/* { dg-do run } */
+/* { dg-options "-O" } */
+
+/* PR tree-optimization/118514 */
+
+/* Check that we don't create BIT_FIELD_REFs that could trap, because they are
+   assumed not to trap and could be pulled out of loops.  */
+
+int a, c = 1;
+unsigned char b[1], d;
+int main() {
+  while (a || !c) {
+    signed char e = b[1000000000];
+    d = e < 0 || b[1000000000] > 1;
+    if (d)
+      __builtin_abort ();
+  }
+  return 0;
+}
diff --git a/gcc/tree-eh.cc b/gcc/tree-eh.cc
index 1033b124e4df..2eeaa075bdb4 100644
--- a/gcc/tree-eh.cc
+++ b/gcc/tree-eh.cc
@@ -2646,6 +2646,27 @@ range_in_array_bounds_p (tree ref)
   return true;
 }
 
+/* Return true iff a BIT_FIELD_REF <EXPR, SIZE, OFFSET> would access a bit
+   range that is known to be in bounds for EXPR's type.  */
+
+bool
+bit_field_ref_in_bounds_p (tree expr, poly_uint64 size, poly_uint64 offset)
+{
+  tree expr_size_tree;
+  poly_uint64 expr_size_max, min = offset, wid = size, max;
+
+  expr_size_tree = TREE_SIZE (TREE_TYPE (expr));
+  if (!expr_size_tree || !poly_int_tree_p (expr_size_tree, &expr_size_max))
+    return false;
+
+  max = min + wid;
+  if (maybe_lt (max, min)
+      || maybe_lt (expr_size_max, max))
+    return false;
+
+  return true;
+}
+
 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
    location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
    This routine expects only GIMPLE lhs or rhs input.  */
@@ -2688,10 +2709,17 @@ tree_could_trap_p (tree expr)
  restart:
   switch (code)
     {
+    case BIT_FIELD_REF:
+      if (DECL_P (TREE_OPERAND (expr, 0))
+         && !bit_field_ref_in_bounds_p (TREE_OPERAND (expr, 0),
+                                        bit_field_size (expr),
+                                        bit_field_offset (expr)))
+       return true;
+      /* Fall through.  */
+
     case COMPONENT_REF:
     case REALPART_EXPR:
     case IMAGPART_EXPR:
-    case BIT_FIELD_REF:
     case VIEW_CONVERT_EXPR:
     case WITH_SIZE_EXPR:
       expr = TREE_OPERAND (expr, 0);
diff --git a/gcc/tree-eh.h b/gcc/tree-eh.h
index 69fe193f1b82..1c6f70b24fab 100644
--- a/gcc/tree-eh.h
+++ b/gcc/tree-eh.h
@@ -36,6 +36,7 @@ extern void redirect_eh_dispatch_edge (geh_dispatch *, edge, 
basic_block);
 extern bool operation_could_trap_helper_p (enum tree_code, bool, bool, bool,
                                           bool, tree, bool *);
 extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
+extern bool bit_field_ref_in_bounds_p (tree, poly_uint64, poly_uint64);
 extern bool tree_could_trap_p (tree);
 extern tree rewrite_to_non_trapping_overflow (tree);
 extern bool stmt_could_throw_p (function *, gimple *);

Reply via email to