On Mon, 13 Jan 2025, Alexandre Oliva wrote:

> 
> Add logic to check and extend constants compared with bitfields, so
> that fields are only compared with constants they could actually
> equal.  This involves making sure the signedness doesn't change
> between loads and conversions before shifts: we'd need to carry a lot
> more data to deal with all the possibilities.
> 
> Regstrapped on x86_64-linux-gnu.  Ok to install?

OK.

Thanks,
Richard.

> 
> for  gcc/ChangeLog
> 
>       PR tree-optimization/118456
>       * gimple-fold.cc (decode_field_reference): Punt if shifting
>       after changing signedness.
>       (fold_truth_andor_for_ifcombine): Check extension bits in
>       constants before clipping.
> 
> for  gcc/testsuite/ChangeLog
> 
>       PR tree-optimization/118456
>       * gcc.dg/field-merge-21.c: New.
>       * gcc.dg/field-merge-22.c: New.
> ---
>  gcc/gimple-fold.cc                    |   40 ++++++++++++++++++++++++-
>  gcc/testsuite/gcc.dg/field-merge-21.c |   53 
> +++++++++++++++++++++++++++++++++
>  gcc/testsuite/gcc.dg/field-merge-22.c |   31 +++++++++++++++++++
>  3 files changed, 122 insertions(+), 2 deletions(-)
>  create mode 100644 gcc/testsuite/gcc.dg/field-merge-21.c
>  create mode 100644 gcc/testsuite/gcc.dg/field-merge-22.c
> 
> diff --git a/gcc/gimple-fold.cc b/gcc/gimple-fold.cc
> index 93ed8b3abb056..5b1fbe6db1df3 100644
> --- a/gcc/gimple-fold.cc
> +++ b/gcc/gimple-fold.cc
> @@ -7712,6 +7712,18 @@ decode_field_reference (tree *pexp, HOST_WIDE_INT 
> *pbitsize,
>  
>    if (shiftrt)
>      {
> +      /* Punt if we're shifting by more than the loaded bitfield (after
> +      adjustment), or if there's a shift after a change of signedness, punt.
> +      When comparing this field with a constant, we'll check that the
> +      constant is a proper sign- or zero-extension (depending on signedness)
> +      of a value that would fit in the selected portion of the bitfield.  A
> +      shift after a change of signedness would make the extension
> +      non-uniform, and we can't deal with that (yet ???).  See
> +      gcc.dg/field-merge-22.c for a test that would go wrong.  */
> +      if (*pbitsize <= shiftrt
> +       || (convert_before_shift
> +           && outer_type && unsignedp != TYPE_UNSIGNED (outer_type)))
> +     return NULL_TREE;
>        if (!*preversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
>       *pbitpos += shiftrt;
>        *pbitsize -= shiftrt;
> @@ -8512,13 +8524,25 @@ fold_truth_andor_for_ifcombine (enum tree_code code, 
> tree truth_type,
>       and bit position.  */
>    if (l_const.get_precision ())
>      {
> +      /* Before clipping upper bits of the right-hand operand of the compare,
> +      check that they're sign or zero extensions, depending on how the
> +      left-hand operand would be extended.  */
> +      bool l_non_ext_bits = false;
> +      if (ll_bitsize < lr_bitsize)
> +     {
> +       wide_int zext = wi::zext (l_const, ll_bitsize);
> +       if ((ll_unsignedp ? zext : wi::sext (l_const, ll_bitsize)) == l_const)
> +         l_const = zext;
> +       else
> +         l_non_ext_bits = true;
> +     }
>        /* We're doing bitwise equality tests, so don't bother with sign
>        extensions.  */
>        l_const = wide_int::from (l_const, lnprec, UNSIGNED);
>        if (ll_and_mask.get_precision ())
>       l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED);
>        l_const <<= xll_bitpos;
> -      if ((l_const & ~ll_mask) != 0)
> +      if (l_non_ext_bits || (l_const & ~ll_mask) != 0)
>       {
>         warning_at (lloc, OPT_Wtautological_compare,
>                     "comparison is always %d", wanted_code == NE_EXPR);
> @@ -8530,11 +8554,23 @@ fold_truth_andor_for_ifcombine (enum tree_code code, 
> tree truth_type,
>        again.  */
>        gcc_checking_assert (r_const.get_precision ());
>  
> +      /* Before clipping upper bits of the right-hand operand of the compare,
> +      check that they're sign or zero extensions, depending on how the
> +      left-hand operand would be extended.  */
> +      bool r_non_ext_bits = false;
> +      if (rl_bitsize < rr_bitsize)
> +     {
> +       wide_int zext = wi::zext (r_const, rl_bitsize);
> +       if ((rl_unsignedp ? zext : wi::sext (r_const, rl_bitsize)) == r_const)
> +         r_const = zext;
> +       else
> +         r_non_ext_bits = true;
> +     }
>        r_const = wide_int::from (r_const, lnprec, UNSIGNED);
>        if (rl_and_mask.get_precision ())
>       r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED);
>        r_const <<= xrl_bitpos;
> -      if ((r_const & ~rl_mask) != 0)
> +      if (r_non_ext_bits || (r_const & ~rl_mask) != 0)
>       {
>         warning_at (rloc, OPT_Wtautological_compare,
>                     "comparison is always %d", wanted_code == NE_EXPR);
> diff --git a/gcc/testsuite/gcc.dg/field-merge-21.c 
> b/gcc/testsuite/gcc.dg/field-merge-21.c
> new file mode 100644
> index 0000000000000..042b2123eb63e
> --- /dev/null
> +++ b/gcc/testsuite/gcc.dg/field-merge-21.c
> @@ -0,0 +1,53 @@
> +/* { dg-do run } */
> +/* { dg-options "-O2" } */
> +
> +/* PR tree-optimization/118456 */
> +/* Check that shifted fields compared with a constants compare correctly even
> +   if the constant contains sign-extension bits not present in the bit
> +   range.  */
> +
> +struct S { unsigned long long o; unsigned short a, b; } s;
> +
> +__attribute__((noipa)) int
> +foo (void)
> +{
> +  return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -27;
> +}
> +
> +__attribute__((noipa)) int
> +bar (void)
> +{
> +  return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == -91;
> +}
> +
> +__attribute__((noipa)) int
> +bars (void)
> +{
> +  return ((unsigned char) s.a) >> 3 == 17 && ((signed char) s.b) >> 2 == 37;
> +}
> +
> +__attribute__((noipa)) int
> +baz (void)
> +{
> +  return ((unsigned char) s.a) >> 3 == 49 && ((signed char) s.b) >> 2 == -27;
> +}
> +
> +__attribute__((noipa)) int
> +bazs (void)
> +{
> +  return ((unsigned char) s.a) >> 3 == (unsigned char) -15 && ((signed char) 
> s.b) >> 2 == -27;
> +}
> +
> +int
> +main ()
> +{
> +  s.a = 17 << 3;
> +  s.b = (unsigned short)(-27u << 2);
> +  if (foo () != 1
> +      || bar () != 0
> +      || bars () != 0
> +      || baz () != 0
> +      || bazs () != 0)
> +    __builtin_abort ();
> +  return 0;
> +}
> diff --git a/gcc/testsuite/gcc.dg/field-merge-22.c 
> b/gcc/testsuite/gcc.dg/field-merge-22.c
> new file mode 100644
> index 0000000000000..45b29c0bccaff
> --- /dev/null
> +++ b/gcc/testsuite/gcc.dg/field-merge-22.c
> @@ -0,0 +1,31 @@
> +/* { dg-do run } */
> +/* { dg-options "-O2" } */
> +
> +/* PR tree-optimization/118456 */
> +/* Check that compares with constants take into account sign/zero extension 
> of
> +   both the bitfield and of the shifting type.  */
> +
> +#define shift (__CHAR_BIT__ - 4)
> +
> +struct S {
> +  signed char a : shift + 2;
> +  signed char b : shift + 2;
> +  short ignore[0];
> +} s;
> +
> +__attribute__((noipa)) int
> +foo (void)
> +{
> +  return ((unsigned char) s.a) >> shift == 15
> +    && ((unsigned char) s.b) >> shift == 0;
> +}
> +
> +int
> +main ()
> +{
> +  s.a = -1;
> +  s.b = 1;
> +  if (foo () != 1)
> +    __builtin_abort ();
> +  return 0;
> +}
> 
> 
> 

-- 
Richard Biener <rguent...@suse.de>
SUSE Software Solutions Germany GmbH,
Frankenstrasse 146, 90461 Nuernberg, Germany;
GF: Ivo Totev, Andrew McDonald, Werner Knoblich; (HRB 36809, AG Nuernberg)

Reply via email to