Hi! While working on the next PR, I've noticed we only fold this on generic, not gimple.
Fixed thusly, bootstrapped/regtested on x86_64-linux and i686-linux, ok for trunk? 2018-09-12 Jakub Jelinek <ja...@redhat.com> PR tree-optimization/87287 * fold-const.c (fold_binary_loc) <case EQ_EXPR>: Move signed modulo X % C == 0 to X % (unsigned) C == 0 optimization to ... * match.pd (X % C == 0): ... here. New optimization. * gcc.dg/tree-ssa/pr87287.c: New test. --- gcc/match.pd.jj 2018-08-29 13:32:54.427224535 +0200 +++ gcc/match.pd 2018-09-12 16:25:01.385733828 +0200 @@ -470,7 +470,15 @@ (define_operator_list COND_TERNARY && TYPE_OVERFLOW_UNDEFINED (type) && wi::multiple_of_p (wi::to_wide (@1), wi::to_wide (@2), TYPE_SIGN (type))) - { build_zero_cst (type); }))) + { build_zero_cst (type); })) + /* For (X % C) == 0, if X is signed and C is power of 2, use unsigned + modulo and comparison, since it is simpler and equivalent. */ + (for cmp (eq ne) + (simplify + (cmp (mod @0 integer_pow2p@2) integer_zerop@1) + (if (!TYPE_UNSIGNED (TREE_TYPE (@0))) + (with { tree utype = unsigned_type_for (TREE_TYPE (@0)); } + (cmp (mod (convert:utype @0) (convert:utype @2)) (convert:utype @1))))))) /* X % -C is the same as X % C. */ (simplify --- gcc/fold-const.c.jj 2018-09-12 11:18:26.000000000 +0200 +++ gcc/fold-const.c 2018-09-12 16:19:39.289018098 +0200 @@ -10661,28 +10661,6 @@ fold_binary_loc (location_t loc, enum tr } } - /* If this is an NE or EQ comparison of zero against the result of a - signed MOD operation whose second operand is a power of 2, make - the MOD operation unsigned since it is simpler and equivalent. */ - if (integer_zerop (arg1) - && !TYPE_UNSIGNED (TREE_TYPE (arg0)) - && (TREE_CODE (arg0) == TRUNC_MOD_EXPR - || TREE_CODE (arg0) == CEIL_MOD_EXPR - || TREE_CODE (arg0) == FLOOR_MOD_EXPR - || TREE_CODE (arg0) == ROUND_MOD_EXPR) - && integer_pow2p (TREE_OPERAND (arg0, 1))) - { - tree newtype = unsigned_type_for (TREE_TYPE (arg0)); - tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype, - fold_convert_loc (loc, newtype, - TREE_OPERAND (arg0, 0)), - fold_convert_loc (loc, newtype, - TREE_OPERAND (arg0, 1))); - - return fold_build2_loc (loc, code, type, newmod, - fold_convert_loc (loc, newtype, arg1)); - } - /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where C1 is a valid shift constant, and C2 is a power of two, i.e. a single bit. */ --- gcc/testsuite/gcc.dg/tree-ssa/pr87287.c.jj 2018-09-12 16:37:38.817307646 +0200 +++ gcc/testsuite/gcc.dg/tree-ssa/pr87287.c 2018-09-12 16:36:50.293103713 +0200 @@ -0,0 +1,34 @@ +/* PR tree-optimization/87287 */ +/* { dg-options "-O2 -fdump-tree-cddce1" } */ +/* { dg-final { scan-tree-dump-not " % 16" "cddce1" } } */ +/* { dg-final { scan-tree-dump-times " & 15" 4 "cddce1" } } */ + +void f0 (void); + +int +f1 (int x) +{ + return x % 16 == 0; +} + +int +f2 (int x) +{ + int y = x % 16; + return y != 0; +} + +void +f3 (int x) +{ + if (x % 16 != 0) + f0 (); +} + +void +f4 (int x) +{ + int y = x % 16; + if (y == 0) + f0 (); +} Jakub