On Mon, Dec 8, 2014 at 3:45 PM, Marc Glisse <marc.gli...@inria.fr> wrote:
> On Tue, 18 Nov 2014, Richard Biener wrote:
>
>>> I'll try to replace some more TYPE_MODE during stage3...
>>
>>
>> Btw, a convenience would be to be able to write
>>
>> HONOR_NANS (type)
>>
>> thus effectively make HONOR_* inline functions with a machine_mode
>> and a type overload (and the type overload properly looking at
>> element types).
>
>
> Making those functions inline is not easy, because real.h and tree.h don't
> include each other. Here is a version with the functions not inline. I was
> tempted to also overload on gcond const* (for the cases that call
> gimple_cond_lhs) but the arguments were always gimple and not gcond*, so I
> didn't.
>
> Passes bootstrap+testsuite on x86_64-linux-gnu.

Ok.

Thanks,
Richard.

> 2014-12-08  Marc Glisse  <marc.gli...@inria.fr>
>
>         * real.h (HONOR_NANS): Replace macro with 3 overloaded declarations.
>         * real.c: Include rtl.h and options.h.
>         (HONOR_NANS): Define three overloads.
>         * builtins.c (fold_builtin_classify, fold_builtin_unordered_cmp):
>         Simplify argument of HONOR_NANS.
>         * fold-const.c (combine_comparisons, fold_truth_not_expr,
>         fold_cond_expr_with_comparison, merge_truthop_with_opposite_arm,
>         fold_comparison, fold_binary_loc): Likewise.
>         * ifcvt.c (noce_try_move, noce_try_minmax): Likewise.
>         * ipa-inline-analysis.c (add_clause,
>         set_cond_stmt_execution_predicate): Likewise.
>         * match.pd: Likewise.
>         * rtlanal.c (may_trap_p_1): Likewise.
>         * simplify-rtx.c (simplify_const_relational_operation): Likewise.
>         * tree-if-conv.c (parse_predicate): Likewise.
>         * tree-ssa-ccp.c (valid_lattice_transition): Likewise.
>         * tree-ssa-ifcombine.c (ifcombine_ifandif): Likewise.
>         * tree-ssa-phiopt.c (minmax_replacement, neg_replacement): Likewise.
>         * tree-ssa-reassoc.c (eliminate_using_constants): Likewise.
>         * tree-ssa-tail-merge.c (gimple_equal_p): Likewise.
>
> --
> Marc Glisse
> Index: builtins.c
> ===================================================================
> --- builtins.c  (revision 218467)
> +++ builtins.c  (working copy)
> @@ -9641,34 +9641,34 @@ fold_builtin_classify (location_t loc, t
>                                integer_minus_one_node, integer_one_node);
>             tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
>                                isinf_call, tmp,
>                                integer_zero_node);
>           }
>
>         return tmp;
>        }
>
>      case BUILT_IN_ISFINITE:
> -      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
> +      if (!HONOR_NANS (arg)
>           && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
>         return omit_one_operand_loc (loc, type, integer_one_node, arg);
>
>        if (TREE_CODE (arg) == REAL_CST)
>         {
>           r = TREE_REAL_CST (arg);
>           return real_isfinite (&r) ? integer_one_node : integer_zero_node;
>         }
>
>        return NULL_TREE;
>
>      case BUILT_IN_ISNAN:
> -      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
> +      if (!HONOR_NANS (arg))
>         return omit_one_operand_loc (loc, type, integer_zero_node, arg);
>
>        if (TREE_CODE (arg) == REAL_CST)
>         {
>           r = TREE_REAL_CST (arg);
>           return real_isnan (&r) ? integer_one_node : integer_zero_node;
>         }
>
>        arg = builtin_save_expr (arg);
>        return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
> @@ -9782,27 +9782,26 @@ fold_builtin_unordered_cmp (location_t l
>    else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
>      cmp_type = type0;
>    else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
>      cmp_type = type1;
>
>    arg0 = fold_convert_loc (loc, cmp_type, arg0);
>    arg1 = fold_convert_loc (loc, cmp_type, arg1);
>
>    if (unordered_code == UNORDERED_EXPR)
>      {
> -      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
> +      if (!HONOR_NANS (arg0))
>         return omit_two_operands_loc (loc, type, integer_zero_node, arg0,
> arg1);
>        return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
>      }
>
> -  code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
> -                                                  : ordered_code;
> +  code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
>    return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
>                       fold_build2_loc (loc, code, type, arg0, arg1));
>  }
>
>  /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
>     arithmetics if it can never overflow, or into internal functions that
>     return both result of arithmetics and overflowed boolean flag in
>     a complex integer result, or some other check for overflow.  */
>
>  static tree
> Index: fold-const.c
> ===================================================================
> --- fold-const.c        (revision 218467)
> +++ fold-const.c        (working copy)
> @@ -2585,21 +2585,21 @@ compcode_to_comparison (enum comparison_
>     and RCODE on the identical operands LL_ARG and LR_ARG.  Take into
> account
>     the possibility of trapping if the mode has NaNs, and return NULL_TREE
>     if this makes the transformation invalid.  */
>
>  tree
>  combine_comparisons (location_t loc,
>                      enum tree_code code, enum tree_code lcode,
>                      enum tree_code rcode, tree truth_type,
>                      tree ll_arg, tree lr_arg)
>  {
> -  bool honor_nans = HONOR_NANS (element_mode (ll_arg));
> +  bool honor_nans = HONOR_NANS (ll_arg);
>    enum comparison_code lcompcode = comparison_to_compcode (lcode);
>    enum comparison_code rcompcode = comparison_to_compcode (rcode);
>    int compcode;
>
>    switch (code)
>      {
>      case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
>        compcode = lcompcode & rcompcode;
>        break;
>
> @@ -3369,21 +3369,21 @@ fold_truth_not_expr (location_t loc, tre
>
>    if (TREE_CODE_CLASS (code) == tcc_comparison)
>      {
>        tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
>        if (FLOAT_TYPE_P (op_type)
>           && flag_trapping_math
>           && code != ORDERED_EXPR && code != UNORDERED_EXPR
>           && code != NE_EXPR && code != EQ_EXPR)
>         return NULL_TREE;
>
> -      code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE
> (op_type)));
> +      code = invert_tree_comparison (code, HONOR_NANS (op_type));
>        if (code == ERROR_MARK)
>         return NULL_TREE;
>
>        return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>                          TREE_OPERAND (arg, 1));
>      }
>
>    switch (code)
>      {
>      case INTEGER_CST:
> @@ -4981,55 +4981,55 @@ fold_cond_expr_with_comparison (location
>         case NE_EXPR:
>           return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type,
> arg1));
>         case LE_EXPR:
>         case LT_EXPR:
>         case UNLE_EXPR:
>         case UNLT_EXPR:
>           /* In C++ a ?: expression can be an lvalue, so put the
>              operand which will be used if they are equal first
>              so that we can convert this back to the
>              corresponding COND_EXPR.  */
> -         if (!HONOR_NANS (element_mode (arg1)))
> +         if (!HONOR_NANS (arg1))
>             {
>               comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
>               comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
>               tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
>                     ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0,
> comp_op1)
>                     : fold_build2_loc (loc, MIN_EXPR, comp_type,
>                                    comp_op1, comp_op0);
>               return pedantic_non_lvalue_loc (loc,
>                                           fold_convert_loc (loc, type,
> tem));
>             }
>           break;
>         case GE_EXPR:
>         case GT_EXPR:
>         case UNGE_EXPR:
>         case UNGT_EXPR:
> -         if (!HONOR_NANS (element_mode (arg1)))
> +         if (!HONOR_NANS (arg1))
>             {
>               comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
>               comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
>               tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
>                     ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0,
> comp_op1)
>                     : fold_build2_loc (loc, MAX_EXPR, comp_type,
>                                    comp_op1, comp_op0);
>               return pedantic_non_lvalue_loc (loc,
>                                           fold_convert_loc (loc, type,
> tem));
>             }
>           break;
>         case UNEQ_EXPR:
> -         if (!HONOR_NANS (element_mode (arg1)))
> +         if (!HONOR_NANS (arg1))
>             return pedantic_non_lvalue_loc (loc,
>                                         fold_convert_loc (loc, type, arg2));
>           break;
>         case LTGT_EXPR:
> -         if (!HONOR_NANS (element_mode (arg1)))
> +         if (!HONOR_NANS (arg1))
>             return pedantic_non_lvalue_loc (loc,
>                                         fold_convert_loc (loc, type, arg1));
>           break;
>         default:
>           gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
>           break;
>         }
>      }
>
>    /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
> @@ -5310,21 +5310,21 @@ merge_truthop_with_opposite_arm (locatio
>    if (lhs_code == truthop_code && !rhs_only)
>      {
>        tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop,
> false);
>        if (newlhs != NULL_TREE)
>         {
>           lhs = newlhs;
>           lhs_code = TREE_CODE (lhs);
>         }
>      }
>
> -  inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
> +  inv_code = invert_tree_comparison (code, HONOR_NANS (type));
>    if (inv_code == rhs_code
>        && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0),
> 0)
>        && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1),
> 0))
>      return lhs;
>    if (!rhs_only && inv_code == lhs_code
>        && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0),
> 0)
>        && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1),
> 0))
>      return rhs;
>    if (rhs != orig_rhs || lhs != orig_lhs)
>      return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
> @@ -9247,36 +9247,36 @@ fold_comparison (location_t loc, enum tr
>      }
>
>    /* Simplify comparison of something with itself.  (For IEEE
>       floating-point, we can only do some of these simplifications.)  */
>    if (operand_equal_p (arg0, arg1, 0))
>      {
>        switch (code)
>         {
>         case EQ_EXPR:
>           if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
> -             || ! HONOR_NANS (element_mode (arg0)))
> +             || ! HONOR_NANS (arg0))
>             return constant_boolean_node (1, type);
>           break;
>
>         case GE_EXPR:
>         case LE_EXPR:
>           if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
> -             || ! HONOR_NANS (element_mode (arg0)))
> +             || ! HONOR_NANS (arg0))
>             return constant_boolean_node (1, type);
>           return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
>
>         case NE_EXPR:
>           /* For NE, we can only do this simplification if integer
>              or we don't honor IEEE floating point NaNs.  */
>           if (FLOAT_TYPE_P (TREE_TYPE (arg0))
> -             && HONOR_NANS (element_mode (arg0)))
> +             && HONOR_NANS (arg0))
>             break;
>           /* ... fall through ...  */
>         case GT_EXPR:
>         case LT_EXPR:
>           return constant_boolean_node (0, type);
>         default:
>           gcc_unreachable ();
>         }
>      }
>
> @@ -10741,21 +10741,21 @@ fold_binary_loc (location_t loc,
>               if (tem != NULL_TREE)
>                 {
>                   tem = fold_convert_loc (loc, type, tem);
>                   return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
>                 }
>             }
>
>           /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
>              This is not the same for NaNs or if signed zeros are
>              involved.  */
> -         if (!HONOR_NANS (element_mode (arg0))
> +         if (!HONOR_NANS (arg0)
>                && !HONOR_SIGNED_ZEROS (element_mode (arg0))
>               && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
>               && TREE_CODE (arg1) == COMPLEX_CST
>               && real_zerop (TREE_REALPART (arg1)))
>             {
>               tree rtype = TREE_TYPE (TREE_TYPE (arg0));
>               if (real_onep (TREE_IMAGPART (arg1)))
>                 return
>                   fold_build2_loc (loc, COMPLEX_EXPR, type,
>                                negate_expr (fold_build1_loc (loc,
> IMAGPART_EXPR,
> @@ -11673,41 +11673,41 @@ fold_binary_loc (location_t loc,
>
>           /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
>              NaNs or Infinities.  */
>           if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
>                || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
>                || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
>             {
>               tree arg00 = CALL_EXPR_ARG (arg0, 0);
>               tree arg01 = CALL_EXPR_ARG (arg1, 0);
>
> -             if (! HONOR_NANS (element_mode (arg00))
> +             if (! HONOR_NANS (arg00)
>                   && ! HONOR_INFINITIES (element_mode (arg00))
>                   && operand_equal_p (arg00, arg01, 0))
>                 {
>                   tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
>
>                   if (cosfn != NULL_TREE)
>                     return build_call_expr_loc (loc, cosfn, 1, arg00);
>                 }
>             }
>
>           /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
>              NaNs or Infinities.  */
>           if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
>                || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
>                || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
>             {
>               tree arg00 = CALL_EXPR_ARG (arg0, 0);
>               tree arg01 = CALL_EXPR_ARG (arg1, 0);
>
> -             if (! HONOR_NANS (element_mode (arg00))
> +             if (! HONOR_NANS (arg00)
>                   && ! HONOR_INFINITIES (element_mode (arg00))
>                   && operand_equal_p (arg00, arg01, 0))
>                 {
>                   tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
>
>                   if (cosfn != NULL_TREE)
>                     {
>                       tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
>                       return fold_build2_loc (loc, RDIV_EXPR, type,
>                                           build_real (type, dconst1),
> @@ -12835,36 +12835,36 @@ fold_binary_loc (location_t loc,
>               if (TREE_CODE (arg01) == INTEGER_CST
>                   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
>                 fold_overflow_warning (("assuming signed overflow does not "
>                                         "occur when assuming that "
>                                         "(X + c) < X is always false"),
>                                        WARN_STRICT_OVERFLOW_ALL);
>               return constant_boolean_node (0, type);
>             }
>
>           /* Convert (X - c) <= X to true.  */
> -         if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
> +         if (!HONOR_NANS (arg1)
>               && code == LE_EXPR
>               && ((code0 == MINUS_EXPR && is_positive >= 0)
>                   || (code0 == PLUS_EXPR && is_positive <= 0)))
>             {
>               if (TREE_CODE (arg01) == INTEGER_CST
>                   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
>                 fold_overflow_warning (("assuming signed overflow does not "
>                                         "occur when assuming that "
>                                         "(X - c) <= X is always true"),
>                                        WARN_STRICT_OVERFLOW_ALL);
>               return constant_boolean_node (1, type);
>             }
>
>           /* Convert (X + c) >= X to true.  */
> -         if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
> +         if (!HONOR_NANS (arg1)
>               && code == GE_EXPR
>               && ((code0 == PLUS_EXPR && is_positive >= 0)
>                   || (code0 == MINUS_EXPR && is_positive <= 0)))
>             {
>               if (TREE_CODE (arg01) == INTEGER_CST
>                   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
>                 fold_overflow_warning (("assuming signed overflow does not "
>                                         "occur when assuming that "
>                                         "(X + c) >= X is always true"),
>                                        WARN_STRICT_OVERFLOW_ALL);
> @@ -13057,21 +13057,21 @@ fold_binary_loc (location_t loc,
>         return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
>                             build2 (GE_EXPR, type,
>                                     TREE_OPERAND (arg0, 0), tem),
>                             build2 (LE_EXPR, type,
>                                     TREE_OPERAND (arg0, 0), arg1));
>
>        /* Convert ABS_EXPR<x> >= 0 to true.  */
>        strict_overflow_p = false;
>        if (code == GE_EXPR
>           && (integer_zerop (arg1)
> -             || (! HONOR_NANS (element_mode (arg0))
> +             || (! HONOR_NANS (arg0)
>                   && real_zerop (arg1)))
>           && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
>         {
>           if (strict_overflow_p)
>             fold_overflow_warning (("assuming signed overflow does not occur
> "
>                                     "when simplifying comparison of "
>                                     "absolute value and zero"),
>                                    WARN_STRICT_OVERFLOW_CONDITIONAL);
>           return omit_one_operand_loc (loc, type,
>                                        constant_boolean_node (true, type),
> Index: ifcvt.c
> ===================================================================
> --- ifcvt.c     (revision 218467)
> +++ ifcvt.c     (working copy)
> @@ -1055,21 +1055,21 @@ noce_try_move (struct noce_if_info *if_i
>    rtx cond = if_info->cond;
>    enum rtx_code code = GET_CODE (cond);
>    rtx y;
>    rtx_insn *seq;
>
>    if (code != NE && code != EQ)
>      return FALSE;
>
>    /* This optimization isn't valid if either A or B could be a NaN
>       or a signed zero.  */
> -  if (HONOR_NANS (GET_MODE (if_info->x))
> +  if (HONOR_NANS (if_info->x)
>        || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
>      return FALSE;
>
>    /* Check whether the operands of the comparison are A and in
>       either order.  */
>    if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
>         && rtx_equal_p (if_info->b, XEXP (cond, 1)))
>        || (rtx_equal_p (if_info->a, XEXP (cond, 1))
>           && rtx_equal_p (if_info->b, XEXP (cond, 0))))
>      {
> @@ -1948,21 +1948,21 @@ noce_try_minmax (struct noce_if_info *if
>  {
>    rtx cond, target;
>    rtx_insn *earliest, *seq;
>    enum rtx_code code, op;
>    int unsignedp;
>
>    /* ??? Reject modes with NaNs or signed zeros since we don't know how
>       they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
>       to get the target to tell us...  */
>    if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
> -      || HONOR_NANS (GET_MODE (if_info->x)))
> +      || HONOR_NANS (if_info->x))
>      return FALSE;
>
>    cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
>    if (!cond)
>      return FALSE;
>
>    /* Verify the condition is of the form we expect, and canonicalize
>       the comparison code.  */
>    code = GET_CODE (cond);
>    if (rtx_equal_p (XEXP (cond, 0), if_info->a))
> Index: ipa-inline-analysis.c
> ===================================================================
> --- ipa-inline-analysis.c       (revision 218467)
> +++ ipa-inline-analysis.c       (working copy)
> @@ -372,23 +372,22 @@ add_clause (conditions conditions, struc
>         if (clause & (1 << c2))
>           {
>             condition *cc1 =
>               &(*conditions)[c1 - predicate_first_dynamic_condition];
>             condition *cc2 =
>               &(*conditions)[c2 - predicate_first_dynamic_condition];
>             if (cc1->operand_num == cc2->operand_num
>                 && cc1->val == cc2->val
>                 && cc2->code != IS_NOT_CONSTANT
>                 && cc2->code != CHANGED
> -               && cc1->code == invert_tree_comparison
> -                               (cc2->code,
> -                                HONOR_NANS (TYPE_MODE (TREE_TYPE
> (cc1->val)))))
> +               && cc1->code == invert_tree_comparison (cc2->code,
> +                                                       HONOR_NANS
> (cc1->val)))
>               return;
>           }
>      }
>
>
>    /* We run out of variants.  Be conservative in positive direction.  */
>    if (i2 == MAX_CLAUSES)
>      return;
>    /* Keep clauses in decreasing order. This makes equivalence testing easy.
> */
>    p->clause[i2 + 1] = 0;
> @@ -1755,23 +1754,21 @@ set_cond_stmt_execution_predicate (struc
>      return;
>    if (!is_gimple_ip_invariant (gimple_cond_rhs (last)))
>      return;
>    op = gimple_cond_lhs (last);
>    /* TODO: handle conditionals like
>       var = op0 < 4;
>       if (var != 0).  */
>    if (unmodified_parm_or_parm_agg_item (info, last, op, &index, &aggpos))
>      {
>        code = gimple_cond_code (last);
> -      inverted_code
> -       = invert_tree_comparison (code,
> -                                 HONOR_NANS (TYPE_MODE (TREE_TYPE (op))));
> +      inverted_code = invert_tree_comparison (code, HONOR_NANS (op));
>
>        FOR_EACH_EDGE (e, ei, bb->succs)
>         {
>           enum tree_code this_code = (e->flags & EDGE_TRUE_VALUE
>                                       ? code : inverted_code);
>           /* invert_tree_comparison will return ERROR_MARK on FP
>              comparsions that are not EQ/NE instead of returning proper
>              unordered one.  Be sure it is not confused with NON_CONSTANT.
> */
>           if (this_code != ERROR_MARK)
>             {
> Index: match.pd
> ===================================================================
> --- match.pd    (revision 218467)
> +++ match.pd    (working copy)
> @@ -66,35 +66,34 @@ along with GCC; see the file COPYING3.
>   (if (fold_real_zero_addition_p (type, @1, 1))
>    (non_lvalue @0)))
>
>  /* Simplify x - x.
>     This is unsafe for certain floats even in non-IEEE formats.
>     In IEEE, it is unsafe because it does wrong for NaNs.
>     Also note that operand_equal_p is always false if an operand
>     is volatile.  */
>  (simplify
>   (minus @0 @0)
> - (if (!FLOAT_TYPE_P (type) || !HONOR_NANS (element_mode (type)))
> + (if (!FLOAT_TYPE_P (type) || !HONOR_NANS (type))
>    { build_zero_cst (type); }))
>
>  (simplify
>   (mult @0 integer_zerop@1)
>   @1)
>
>  /* Maybe fold x * 0 to 0.  The expressions aren't the same
>     when x is NaN, since x * 0 is also NaN.  Nor are they the
>     same in modes with signed zeros, since multiplying a
>     negative value by 0 gives -0, not +0.  */
>  (simplify
>   (mult @0 real_zerop@1)
> - (if (!HONOR_NANS (element_mode (type))
> -      && !HONOR_SIGNED_ZEROS (element_mode (type)))
> + (if (!HONOR_NANS (type) && !HONOR_SIGNED_ZEROS (element_mode (type)))
>    @1))
>
>  /* In IEEE floating point, x*1 is not equivalent to x for snans.
>     Likewise for complex arithmetic with signed zeros.  */
>  (simplify
>   (mult @0 real_onep)
>   (if (!HONOR_SNANS (element_mode (type))
>        && (!HONOR_SIGNED_ZEROS (element_mode (type))
>            || !COMPLEX_FLOAT_TYPE_P (type)))
>    (non_lvalue @0)))
> @@ -143,30 +142,30 @@ along with GCC; see the file COPYING3.
>     (if (overflow_p
>          && (TYPE_UNSIGNED (type)
>             || mul != wi::min_value (TYPE_PRECISION (type), SIGNED)))
>      { build_zero_cst (type); }))))
>
>  /* Optimize A / A to 1.0 if we don't care about
>     NaNs or Infinities.  */
>  (simplify
>   (rdiv @0 @0)
>   (if (FLOAT_TYPE_P (type)
> -      && ! HONOR_NANS (element_mode (type))
> +      && ! HONOR_NANS (type)
>        && ! HONOR_INFINITIES (element_mode (type)))
>    { build_one_cst (type); }))
>
>  /* Optimize -A / A to -1.0 if we don't care about
>     NaNs or Infinities.  */
>  (simplify
>   (rdiv:c @0 (negate @0))
>   (if (FLOAT_TYPE_P (type)
> -      && ! HONOR_NANS (element_mode (type))
> +      && ! HONOR_NANS (type)
>        && ! HONOR_INFINITIES (element_mode (type)))
>    { build_minus_one_cst (type); }))
>
>  /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
>  (simplify
>   (rdiv @0 real_onep)
>   (if (!HONOR_SNANS (element_mode (type)))
>    (non_lvalue @0)))
>
>  /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
> @@ -898,29 +897,29 @@ along with GCC; see the file COPYING3.
>      For now implement what forward_propagate_comparison did.  */
>   (simplify
>    (bit_not (cmp @0 @1))
>    (if (VECTOR_TYPE_P (type)
>         || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1))
>     /* Comparison inversion may be impossible for trapping math,
>        invert_tree_comparison will tell us.  But we can't use
>        a computed operator in the replacement tree thus we have
>        to play the trick below.  */
>     (with { enum tree_code ic = invert_tree_comparison
> -             (cmp, HONOR_NANS (element_mode (@0))); }
> +             (cmp, HONOR_NANS (@0)); }
>      (if (ic == icmp)
>       (icmp @0 @1))
>      (if (ic == ncmp)
>       (ncmp @0 @1)))))
>   (simplify
>    (bit_xor (cmp @0 @1) integer_truep)
>    (with { enum tree_code ic = invert_tree_comparison
> -            (cmp, HONOR_NANS (element_mode (@0))); }
> +            (cmp, HONOR_NANS (@0)); }
>     (if (ic == icmp)
>      (icmp @0 @1))
>     (if (ic == ncmp)
>      (ncmp @0 @1)))))
>
>
>  /* Simplification of math builtins.  */
>
>  (define_operator_list LOG BUILT_IN_LOGF BUILT_IN_LOG BUILT_IN_LOGL)
>  (define_operator_list EXP BUILT_IN_EXPF BUILT_IN_EXP BUILT_IN_EXPL)
> Index: real.c
> ===================================================================
> --- real.c      (revision 218467)
> +++ real.c      (working copy)
> @@ -23,20 +23,22 @@
>  #include "system.h"
>  #include "coretypes.h"
>  #include "tm.h"
>  #include "tree.h"
>  #include "diagnostic-core.h"
>  #include "real.h"
>  #include "realmpfr.h"
>  #include "tm_p.h"
>  #include "dfp.h"
>  #include "wide-int.h"
> +#include "rtl.h"
> +#include "options.h"
>
>  /* The floating point model used internally is not exactly IEEE 754
>     compliant, and close to the description in the ISO C99 standard,
>     section 5.2.4.2.2 Characteristics of floating types.
>
>     Specifically
>
>         x = s * b^e * \sum_{k=1}^p f_k * b^{-k}
>
>         where
> @@ -4975,10 +4977,32 @@ get_max_float (const struct real_format
>          doubles.  The value of the long double is the sum of the
>          values of the two parts.  The most significant part is
>          required to be the value of the long double rounded to the
>          nearest double.  Rounding means we need a slightly smaller
>          value for LDBL_MAX.  */
>        buf[4 + fmt->pnan / 4] = "7bde"[fmt->pnan % 4];
>      }
>
>    gcc_assert (strlen (buf) < len);
>  }
> +
> +/* True if mode M has a NaN representation and
> +   the treatment of NaN operands is important.  */
> +
> +bool
> +HONOR_NANS (machine_mode m)
> +{
> +  return MODE_HAS_NANS (m) && !flag_finite_math_only;
> +}
> +
> +bool
> +HONOR_NANS (const_tree t)
> +{
> +  return HONOR_NANS (element_mode (t));
> +}
> +
> +bool
> +HONOR_NANS (const_rtx x)
> +{
> +    return HONOR_NANS (GET_MODE (x));
> +}
> +
> Index: real.h
> ===================================================================
> --- real.h      (revision 218467)
> +++ real.h      (working copy)
> @@ -193,22 +193,23 @@ extern const struct real_format *
>    (FLOAT_MODE_P (MODE) && FLOAT_MODE_FORMAT (MODE)->has_signed_zero)
>  #define MODE_HAS_SIGN_DEPENDENT_ROUNDING(MODE) \
>    (FLOAT_MODE_P (MODE) \
>     && FLOAT_MODE_FORMAT (MODE)->has_sign_dependent_rounding)
>
>  /* True if the given mode has a NaN representation and the treatment of
>     NaN operands is important.  Certain optimizations, such as folding
>     x * 0 into 0, are not correct for NaN operands, and are normally
>     disabled for modes with NaNs.  The user can ask for them to be
>     done anyway using the -funsafe-math-optimizations switch.  */
> -#define HONOR_NANS(MODE) \
> -  (MODE_HAS_NANS (MODE) && !flag_finite_math_only)
> +extern bool HONOR_NANS (machine_mode);
> +extern bool HONOR_NANS (const_tree);
> +extern bool HONOR_NANS (const_rtx);
>
>  /* Like HONOR_NANs, but true if we honor signaling NaNs (or sNaNs).  */
>  #define HONOR_SNANS(MODE) (flag_signaling_nans && HONOR_NANS (MODE))
>
>  /* As for HONOR_NANS, but true if the mode can represent infinity and
>     the treatment of infinite values is important.  */
>  #define HONOR_INFINITIES(MODE) \
>    (MODE_HAS_INFINITIES (MODE) && !flag_finite_math_only)
>
>  /* Like HONOR_NANS, but true if the given mode distinguishes between
> Index: rtlanal.c
> ===================================================================
> --- rtlanal.c   (revision 218467)
> +++ rtlanal.c   (working copy)
> @@ -2545,42 +2545,42 @@ may_trap_p_1 (const_rtx x, unsigned flag
>      case LT:
>      case LTGT:
>      case COMPARE:
>        /* Some floating point comparisons may trap.  */
>        if (!flag_trapping_math)
>         break;
>        /* ??? There is no machine independent way to check for tests that
> trap
>          when COMPARE is used, though many targets do make this distinction.
>          For instance, sparc uses CCFPE for compares which generate
> exceptions
>          and CCFP for compares which do not generate exceptions.  */
> -      if (HONOR_NANS (GET_MODE (x)))
> +      if (HONOR_NANS (x))
>         return 1;
>        /* But often the compare has some CC mode, so check operand
>          modes as well.  */
> -      if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
> -         || HONOR_NANS (GET_MODE (XEXP (x, 1))))
> +      if (HONOR_NANS (XEXP (x, 0))
> +         || HONOR_NANS (XEXP (x, 1)))
>         return 1;
>        break;
>
>      case EQ:
>      case NE:
>        if (HONOR_SNANS (GET_MODE (x)))
>         return 1;
>        /* Often comparison is CC mode, so check operand modes.  */
>        if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
>           || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
>         return 1;
>        break;
>
>      case FIX:
>        /* Conversion of floating point might trap.  */
> -      if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
> +      if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
>         return 1;
>        break;
>
>      case NEG:
>      case ABS:
>      case SUBREG:
>        /* These operations don't trap even with floating point.  */
>        break;
>
>      default:
> Index: simplify-rtx.c
> ===================================================================
> --- simplify-rtx.c      (revision 218467)
> +++ simplify-rtx.c      (working copy)
> @@ -4721,21 +4721,21 @@ simplify_const_relational_operation (enu
>    if (! HONOR_NANS (mode) && code == ORDERED)
>      return const_true_rtx;
>
>    if (! HONOR_NANS (mode) && code == UNORDERED)
>      return const0_rtx;
>
>    /* For modes without NaNs, if the two operands are equal, we know the
>       result except if they have side-effects.  Even with NaNs we know
>       the result of unordered comparisons and, if signaling NaNs are
>       irrelevant, also the result of LT/GT/LTGT.  */
> -  if ((! HONOR_NANS (GET_MODE (trueop0))
> +  if ((! HONOR_NANS (trueop0)
>         || code == UNEQ || code == UNLE || code == UNGE
>         || ((code == LT || code == GT || code == LTGT)
>            && ! HONOR_SNANS (GET_MODE (trueop0))))
>        && rtx_equal_p (trueop0, trueop1)
>        && ! side_effects_p (trueop0))
>      return comparison_result (code, CMP_EQ);
>
>    /* If the operands are floating-point constants, see if we can fold
>       the result.  */
>    if (CONST_DOUBLE_AS_FLOAT_P (trueop0)
> Index: tree-if-conv.c
> ===================================================================
> --- tree-if-conv.c      (revision 218467)
> +++ tree-if-conv.c      (working copy)
> @@ -304,21 +304,21 @@ parse_predicate (tree cond, tree *op0, t
>           return gimple_assign_rhs_code (s);
>         }
>
>        else if (gimple_assign_rhs_code (s) == TRUTH_NOT_EXPR)
>         {
>           tree op = gimple_assign_rhs1 (s);
>           tree type = TREE_TYPE (op);
>           enum tree_code code = parse_predicate (op, op0, op1);
>
>           return code == ERROR_MARK ? ERROR_MARK
> -           : invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
> +           : invert_tree_comparison (code, HONOR_NANS (type));
>         }
>
>        return ERROR_MARK;
>      }
>
>    if (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison)
>      {
>        *op0 = TREE_OPERAND (cond, 0);
>        *op1 = TREE_OPERAND (cond, 1);
>        return TREE_CODE (cond);
> Index: tree-ssa-ccp.c
> ===================================================================
> --- tree-ssa-ccp.c      (revision 218467)
> +++ tree-ssa-ccp.c      (working copy)
> @@ -451,38 +451,38 @@ valid_lattice_transition (ccp_prop_value
>    /* At least the kinds and types should agree now.  */
>    if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
>        || !types_compatible_p (TREE_TYPE (old_val.value),
>                               TREE_TYPE (new_val.value)))
>      return false;
>
>    /* For floats and !HONOR_NANS allow transitions from (partial) NaN
>       to non-NaN.  */
>    tree type = TREE_TYPE (new_val.value);
>    if (SCALAR_FLOAT_TYPE_P (type)
> -      && !HONOR_NANS (TYPE_MODE (type)))
> +      && !HONOR_NANS (type))
>      {
>        if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
>         return true;
>      }
>    else if (VECTOR_FLOAT_TYPE_P (type)
> -          && !HONOR_NANS (TYPE_MODE (TREE_TYPE (type))))
> +          && !HONOR_NANS (type))
>      {
>        for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
>         if (!REAL_VALUE_ISNAN
>                (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
>             && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
>                                  VECTOR_CST_ELT (new_val.value, i), 0))
>           return false;
>        return true;
>      }
>    else if (COMPLEX_FLOAT_TYPE_P (type)
> -          && !HONOR_NANS (TYPE_MODE (TREE_TYPE (type))))
> +          && !HONOR_NANS (type))
>      {
>        if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
>           && !operand_equal_p (TREE_REALPART (old_val.value),
>                                TREE_REALPART (new_val.value), 0))
>         return false;
>        if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
>           && !operand_equal_p (TREE_IMAGPART (old_val.value),
>                                TREE_IMAGPART (new_val.value), 0))
>         return false;
>        return true;
> Index: tree-ssa-ifcombine.c
> ===================================================================
> --- tree-ssa-ifcombine.c        (revision 218467)
> +++ tree-ssa-ifcombine.c        (working copy)
> @@ -512,26 +512,26 @@ ifcombine_ifandif (basic_block inner_con
>    else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) ==
> tcc_comparison
>            && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) ==
> tcc_comparison)
>      {
>        tree t;
>        enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
>        enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
>
>        /* Invert comparisons if necessary (and possible).  */
>        if (inner_inv)
>         inner_cond_code = invert_tree_comparison (inner_cond_code,
> -         HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs
> (inner_cond)))));
> +         HONOR_NANS (gimple_cond_lhs (inner_cond)));
>        if (inner_cond_code == ERROR_MARK)
>         return false;
>        if (outer_inv)
>         outer_cond_code = invert_tree_comparison (outer_cond_code,
> -         HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs
> (outer_cond)))));
> +         HONOR_NANS (gimple_cond_lhs (outer_cond)));
>        if (outer_cond_code == ERROR_MARK)
>         return false;
>        /* Don't return false so fast, try maybe_fold_or_comparisons?  */
>
>        if (!(t = maybe_fold_and_comparisons (inner_cond_code,
>                                             gimple_cond_lhs (inner_cond),
>                                             gimple_cond_rhs (inner_cond),
>                                             outer_cond_code,
>                                             gimple_cond_lhs (outer_cond),
>                                             gimple_cond_rhs (outer_cond))))
> Index: tree-ssa-phiopt.c
> ===================================================================
> --- tree-ssa-phiopt.c   (revision 218467)
> +++ tree-ssa-phiopt.c   (working copy)
> @@ -925,21 +925,21 @@ minmax_replacement (basic_block cond_bb,
>    gcond *cond;
>    gassign *new_stmt;
>    edge true_edge, false_edge;
>    enum tree_code cmp, minmax, ass_code;
>    tree smaller, larger, arg_true, arg_false;
>    gimple_stmt_iterator gsi, gsi_from;
>
>    type = TREE_TYPE (PHI_RESULT (phi));
>
>    /* The optimization may be unsafe due to NaNs.  */
> -  if (HONOR_NANS (TYPE_MODE (type)))
> +  if (HONOR_NANS (type))
>      return false;
>
>    cond = as_a <gcond *> (last_stmt (cond_bb));
>    cmp = gimple_cond_code (cond);
>
>    /* This transformation is only valid for order comparisons.  Record which
>       operand is smaller/larger if the result of the comparison is true.  */
>    if (cmp == LT_EXPR || cmp == LE_EXPR)
>      {
>        smaller = gimple_cond_lhs (cond);
> @@ -1348,22 +1348,21 @@ neg_replacement (basic_block cond_bb, ba
>    invert = false_edge->dest == middle_bb;
>
>    /* Unlike abs_replacement, we can handle arbitrary conditionals here.  */
>    cond = last_stmt (cond_bb);
>    cond_code = gimple_cond_code (cond);
>
>    /* If inversion is needed, first try to invert the test since
>       that's cheapest.  */
>    if (invert)
>      {
> -      bool honor_nans
> -       = HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (cond))));
> +      bool honor_nans = HONOR_NANS (gimple_cond_lhs (cond));
>        enum tree_code new_code = invert_tree_comparison (cond_code,
> honor_nans);
>
>        /* If invert_tree_comparison was successful, then use its return
>          value as the new code and note that inversion is no longer
>          needed.  */
>        if (new_code != ERROR_MARK)
>         {
>           cond_code = new_code;
>           invert = false;
>         }
> Index: tree-ssa-reassoc.c
> ===================================================================
> --- tree-ssa-reassoc.c  (revision 218467)
> +++ tree-ssa-reassoc.c  (working copy)
> @@ -959,21 +959,21 @@ eliminate_using_constants (enum tree_cod
>                   if (dump_file && (dump_flags & TDF_DETAILS))
>                     fprintf (dump_file, "Found | 0, removing\n");
>                   ops->pop ();
>                   reassociate_stats.ops_eliminated++;
>                 }
>             }
>           break;
>         case MULT_EXPR:
>           if (integer_zerop (oelast->op)
>               || (FLOAT_TYPE_P (type)
> -                 && !HONOR_NANS (TYPE_MODE (type))
> +                 && !HONOR_NANS (type)
>                   && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
>                   && real_zerop (oelast->op)))
>             {
>               if (ops->length () != 1)
>                 {
>                   if (dump_file && (dump_flags & TDF_DETAILS))
>                     fprintf (dump_file, "Found * 0, removing all other
> ops\n");
>
>                   reassociate_stats.ops_eliminated += ops->length () - 1;
>                   ops->truncate (1);
> Index: tree-ssa-tail-merge.c
> ===================================================================
> --- tree-ssa-tail-merge.c       (revision 218467)
> +++ tree-ssa-tail-merge.c       (working copy)
> @@ -1190,22 +1190,21 @@ gimple_equal_p (same_succ same_succ, gim
>        t2 = gimple_cond_rhs (s2);
>        if (!gimple_operand_equal_value_p (t1, t2))
>         return false;
>
>        code1 = gimple_expr_code (s1);
>        code2 = gimple_expr_code (s2);
>        inv_cond = (bitmap_bit_p (same_succ->inverse, bb1->index)
>                   != bitmap_bit_p (same_succ->inverse, bb2->index));
>        if (inv_cond)
>         {
> -         bool honor_nans
> -           = HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (s1))));
> +         bool honor_nans = HONOR_NANS (t1);
>           code2 = invert_tree_comparison (code2, honor_nans);
>         }
>        return code1 == code2;
>
>      default:
>        return false;
>      }
>  }
>
>  /* Let GSI skip backwards over local defs.  Return the earliest vuse in
> VUSE.
>

Reply via email to