gcc/ChangeLog:

2020-03-30  Martin Liska  <mli...@suse.cz>

        * expr.c (expand_expr_real_2): Put gcc_unreachable, we should reach
        this path.
        (do_store_flag): Likewise here.
        * internal-fn.c (expand_vect_cond_optab_fn): New.
        (expand_VCOND): Likewise.
        (expand_VCONDU): Likewise.
        (expand_VCONDEQ): Likewise.
        (expand_vect_cond_mask_optab_fn): Likewise.
        (expand_VCOND_MASK): Likewise.
        * internal-fn.def (VCOND): New.
        (VCONDU): Likewise.
        (VCONDEQ): Likewise.
        (VCOND_MASK): Likewise.
        * optabs.c (expand_vec_cond_mask_expr): Removed.
        (expand_vec_cond_expr): Likewise.
        * optabs.h (expand_vec_cond_expr): Likewise.
        (vector_compare_rtx): Likewise.
        * passes.def: Add pass_gimple_isel.
        * tree-cfg.c (verify_gimple_assign_ternary): Add new
        GIMPLE check.
        * tree-pass.h (make_pass_gimple_isel): New.
        * tree-ssa-forwprop.c (pass_forwprop::execute): Do not forward
        to already lowered VEC_COND_EXPR.
        * tree-vect-generic.c (expand_vector_divmod): Expand to SSA_NAME.
        (expand_vector_condition): Expand tcc_comparison of a VEC_COND_EXPR
        into a SSA_NAME.
        (gimple_expand_vec_cond_expr): New.
        (gimple_expand_vec_cond_exprs): New.
        (class pass_gimple_isel): New.
        (make_pass_gimple_isel): New.
---
 gcc/expr.c              |  25 +----
 gcc/internal-fn.c       |  98 +++++++++++++++++
 gcc/internal-fn.def     |   5 +
 gcc/optabs.c            | 124 +--------------------
 gcc/optabs.h            |   7 +-
 gcc/passes.def          |   1 +
 gcc/tree-cfg.c          |   8 ++
 gcc/tree-pass.h         |   1 +
 gcc/tree-ssa-forwprop.c |   6 +
 gcc/tree-vect-generic.c | 237 +++++++++++++++++++++++++++++++++++++++-
 10 files changed, 358 insertions(+), 154 deletions(-)

diff --git a/gcc/expr.c b/gcc/expr.c
index 6b75028e7f1..b284f723220 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -9242,17 +9242,8 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode 
tmode,
       if (temp != 0)
        return temp;
- /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
-        and similarly for MAX <x, y>.  */
       if (VECTOR_TYPE_P (type))
-       {
-         tree t0 = make_tree (type, op0);
-         tree t1 = make_tree (type, op1);
-         tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
-                                   type, t0, t1);
-         return expand_vec_cond_expr (type, comparison, t0, t1,
-                                      original_target);
-       }
+       gcc_unreachable ();
/* At this point, a MEM target is no longer useful; we will get better
         code without it.  */
@@ -9841,10 +9832,6 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode 
tmode,
        return temp;
       }
- case VEC_COND_EXPR:
-      target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
-      return target;
-
     case VEC_DUPLICATE_EXPR:
       op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
       target = expand_vector_broadcast (mode, op0);
@@ -12175,8 +12162,7 @@ do_store_flag (sepops ops, rtx target, machine_mode 
mode)
   STRIP_NOPS (arg1);
/* For vector typed comparisons emit code to generate the desired
-     all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
-     expander for this.  */
+     all-ones or all-zeros mask.  */
   if (TREE_CODE (ops->type) == VECTOR_TYPE)
     {
       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
@@ -12184,12 +12170,7 @@ do_store_flag (sepops ops, rtx target, machine_mode 
mode)
          && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
        return expand_vec_cmp_expr (ops->type, ifexp, target);
       else
-       {
-         tree if_true = constant_boolean_node (true, ops->type);
-         tree if_false = constant_boolean_node (false, ops->type);
-         return expand_vec_cond_expr (ops->type, ifexp, if_true,
-                                      if_false, target);
-       }
+       gcc_unreachable ();
     }
/* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
diff --git a/gcc/internal-fn.c b/gcc/internal-fn.c
index 5e9aa60721e..aa41b4f6870 100644
--- a/gcc/internal-fn.c
+++ b/gcc/internal-fn.c
@@ -49,6 +49,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "gimple-ssa.h"
 #include "tree-phinodes.h"
 #include "ssa-iterators.h"
+#include "explow.h"
/* The names of each internal function, indexed by function number. */
 const char *const internal_fn_name_array[] = {
@@ -2548,6 +2549,103 @@ expand_mask_store_optab_fn (internal_fn, gcall *stmt, 
convert_optab optab)
#define expand_mask_store_lanes_optab_fn expand_mask_store_optab_fn +/* Expand VCOND, VCONDU and VCONDEQ internal functions.
+   The expansion of STMT happens based on OPTAB table associated.  */
+
+static void
+expand_vect_cond_optab_fn (internal_fn ifn, gcall *stmt)
+{
+  class expand_operand ops[6];
+  insn_code icode;
+  tree lhs = gimple_call_lhs (stmt);
+  tree op0a = gimple_call_arg (stmt, 0);
+  tree op0b = gimple_call_arg (stmt, 1);
+  tree op1 = gimple_call_arg (stmt, 2);
+  tree op2 = gimple_call_arg (stmt, 3);
+  enum tree_code tcode = (tree_code) int_cst_value (gimple_call_arg (stmt, 4));
+
+  tree vec_cond_type = TREE_TYPE (lhs);
+  tree op_mode = TREE_TYPE (op0a);
+  bool unsignedp = TYPE_UNSIGNED (op_mode);
+
+  machine_mode mode = TYPE_MODE (vec_cond_type);
+  machine_mode cmp_op_mode = TYPE_MODE (op_mode);
+
+  enum optab_tag optab;
+  switch (ifn)
+    {
+    case IFN_VCOND:
+      optab = vcond_optab;
+      break;
+    case IFN_VCONDU:
+      optab = vcondu_optab;
+      break;
+    case IFN_VCONDEQ:
+      optab = vcondeq_optab;
+      break;
+    default:
+      gcc_unreachable ();
+    }
+
+  icode = convert_optab_handler (optab, mode, cmp_op_mode);
+  rtx comparison
+    = vector_compare_rtx (VOIDmode, tcode, op0a, op0b, unsignedp, icode, 4);
+  rtx rtx_op1 = expand_normal (op1);
+  rtx rtx_op2 = expand_normal (op2);
+
+  rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+  create_output_operand (&ops[0], target, mode);
+  create_input_operand (&ops[1], rtx_op1, mode);
+  create_input_operand (&ops[2], rtx_op2, mode);
+  create_fixed_operand (&ops[3], comparison);
+  create_fixed_operand (&ops[4], XEXP (comparison, 0));
+  create_fixed_operand (&ops[5], XEXP (comparison, 1));
+  expand_insn (icode, 6, ops);
+}
+
+#define expand_VCOND expand_vect_cond_optab_fn
+#define expand_VCONDU expand_vect_cond_optab_fn
+#define expand_VCONDEQ expand_vect_cond_optab_fn
+
+/* Expand VCOND_MASK internal function.
+   The expansion of STMT happens based on OPTAB table associated.  */
+
+static void
+expand_vect_cond_mask_optab_fn (internal_fn, gcall *stmt)
+{
+  class expand_operand ops[4];
+
+  tree lhs = gimple_call_lhs (stmt);
+  tree op0 = gimple_call_arg (stmt, 0);
+  tree op1 = gimple_call_arg (stmt, 1);
+  tree op2 = gimple_call_arg (stmt, 2);
+  tree vec_cond_type = TREE_TYPE (lhs);
+
+  machine_mode mode = TYPE_MODE (vec_cond_type);
+  machine_mode mask_mode = TYPE_MODE (TREE_TYPE (op0));
+
+  enum insn_code icode = convert_optab_handler (vcond_mask_optab, mode, 
mask_mode);
+  rtx mask, rtx_op1, rtx_op2;
+
+  gcc_assert (icode != CODE_FOR_nothing);
+
+  mask = expand_normal (op0);
+  rtx_op1 = expand_normal (op1);
+  rtx_op2 = expand_normal (op2);
+
+  mask = force_reg (mask_mode, mask);
+  rtx_op1 = force_reg (GET_MODE (rtx_op1), rtx_op1);
+
+  rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+  create_output_operand (&ops[0], target, mode);
+  create_input_operand (&ops[1], rtx_op1, mode);
+  create_input_operand (&ops[2], rtx_op2, mode);
+  create_input_operand (&ops[3], mask, mask_mode);
+  expand_insn (icode, 4, ops);
+}
+
+#define expand_VCOND_MASK expand_vect_cond_mask_optab_fn
+
 static void
 expand_ABNORMAL_DISPATCHER (internal_fn, gcall *)
 {
diff --git a/gcc/internal-fn.def b/gcc/internal-fn.def
index 1d190d492ff..5602619fd2a 100644
--- a/gcc/internal-fn.def
+++ b/gcc/internal-fn.def
@@ -319,6 +319,11 @@ DEF_INTERNAL_FN (TSAN_FUNC_EXIT, ECF_NOVOPS | ECF_LEAF | 
ECF_NOTHROW, NULL)
 DEF_INTERNAL_FN (VA_ARG, ECF_NOTHROW | ECF_LEAF, NULL)
 DEF_INTERNAL_FN (VEC_CONVERT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
+DEF_INTERNAL_FN(VCOND, ECF_NOTHROW | ECF_LEAF, NULL)
+DEF_INTERNAL_FN(VCONDU, ECF_NOTHROW | ECF_LEAF, NULL)
+DEF_INTERNAL_FN(VCONDEQ, ECF_NOTHROW | ECF_LEAF, NULL)
+DEF_INTERNAL_FN(VCOND_MASK, ECF_NOTHROW | ECF_LEAF, NULL)
+
 /* An unduplicable, uncombinable function.  Generally used to preserve
    a CFG property in the face of jump threading, tail merging or
    other such optimizations.  The first argument distinguishes
diff --git a/gcc/optabs.c b/gcc/optabs.c
index 6d0b76c13ba..184827fdf4e 100644
--- a/gcc/optabs.c
+++ b/gcc/optabs.c
@@ -5442,7 +5442,7 @@ get_rtx_code (enum tree_code tcode, bool unsignedp)
    first comparison operand for insn ICODE.  Do not generate the
    compare instruction itself.  */
-static rtx
+rtx
 vector_compare_rtx (machine_mode cmp_mode, enum tree_code tcode,
                    tree t_op0, tree t_op1, bool unsignedp,
                    enum insn_code icode, unsigned int opno)
@@ -5809,128 +5809,6 @@ expand_vec_perm_var (machine_mode mode, rtx v0, rtx v1, 
rtx sel, rtx target)
   return tmp;
 }
-/* Generate insns for a VEC_COND_EXPR with mask, given its TYPE and its
-   three operands.  */
-
-rtx
-expand_vec_cond_mask_expr (tree vec_cond_type, tree op0, tree op1, tree op2,
-                          rtx target)
-{
-  class expand_operand ops[4];
-  machine_mode mode = TYPE_MODE (vec_cond_type);
-  machine_mode mask_mode = TYPE_MODE (TREE_TYPE (op0));
-  enum insn_code icode = get_vcond_mask_icode (mode, mask_mode);
-  rtx mask, rtx_op1, rtx_op2;
-
-  if (icode == CODE_FOR_nothing)
-    return 0;
-
-  mask = expand_normal (op0);
-  rtx_op1 = expand_normal (op1);
-  rtx_op2 = expand_normal (op2);
-
-  mask = force_reg (mask_mode, mask);
-  rtx_op1 = force_reg (GET_MODE (rtx_op1), rtx_op1);
-
-  create_output_operand (&ops[0], target, mode);
-  create_input_operand (&ops[1], rtx_op1, mode);
-  create_input_operand (&ops[2], rtx_op2, mode);
-  create_input_operand (&ops[3], mask, mask_mode);
-  expand_insn (icode, 4, ops);
-
-  return ops[0].value;
-}
-
-/* Generate insns for a VEC_COND_EXPR, given its TYPE and its
-   three operands.  */
-
-rtx
-expand_vec_cond_expr (tree vec_cond_type, tree op0, tree op1, tree op2,
-                     rtx target)
-{
-  class expand_operand ops[6];
-  enum insn_code icode;
-  rtx comparison, rtx_op1, rtx_op2;
-  machine_mode mode = TYPE_MODE (vec_cond_type);
-  machine_mode cmp_op_mode;
-  bool unsignedp;
-  tree op0a, op0b;
-  enum tree_code tcode;
-
-  if (COMPARISON_CLASS_P (op0))
-    {
-      op0a = TREE_OPERAND (op0, 0);
-      op0b = TREE_OPERAND (op0, 1);
-      tcode = TREE_CODE (op0);
-    }
-  else
-    {
-      gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0)));
-      if (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
-         != CODE_FOR_nothing)
-       return expand_vec_cond_mask_expr (vec_cond_type, op0, op1,
-                                         op2, target);
-      /* Fake op0 < 0.  */
-      else
-       {
-         gcc_assert (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (op0)))
-                     == MODE_VECTOR_INT);
-         op0a = op0;
-         op0b = build_zero_cst (TREE_TYPE (op0));
-         tcode = LT_EXPR;
-       }
-    }
-  cmp_op_mode = TYPE_MODE (TREE_TYPE (op0a));
-  unsignedp = TYPE_UNSIGNED (TREE_TYPE (op0a));
-
-
-  gcc_assert (known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (cmp_op_mode))
-             && known_eq (GET_MODE_NUNITS (mode),
-                          GET_MODE_NUNITS (cmp_op_mode)));
-
-  icode = get_vcond_icode (mode, cmp_op_mode, unsignedp);
-  if (icode == CODE_FOR_nothing)
-    {
-      if (tcode == LT_EXPR
-         && op0a == op0
-         && TREE_CODE (op0) == VECTOR_CST)
-       {
-         /* A VEC_COND_EXPR condition could be folded from EQ_EXPR/NE_EXPR
-            into a constant when only get_vcond_eq_icode is supported.
-            Verify < 0 and != 0 behave the same and change it to NE_EXPR.  */
-         unsigned HOST_WIDE_INT nelts;
-         if (!VECTOR_CST_NELTS (op0).is_constant (&nelts))
-           {
-             if (VECTOR_CST_STEPPED_P (op0))
-               return 0;
-             nelts = vector_cst_encoded_nelts (op0);
-           }
-         for (unsigned int i = 0; i < nelts; ++i)
-           if (tree_int_cst_sgn (vector_cst_elt (op0, i)) == 1)
-             return 0;
-         tcode = NE_EXPR;
-       }
-      if (tcode == EQ_EXPR || tcode == NE_EXPR)
-       icode = get_vcond_eq_icode (mode, cmp_op_mode);
-      if (icode == CODE_FOR_nothing)
-       return 0;
-    }
-
-  comparison = vector_compare_rtx (VOIDmode, tcode, op0a, op0b, unsignedp,
-                                  icode, 4);
-  rtx_op1 = expand_normal (op1);
-  rtx_op2 = expand_normal (op2);
-
-  create_output_operand (&ops[0], target, mode);
-  create_input_operand (&ops[1], rtx_op1, mode);
-  create_input_operand (&ops[2], rtx_op2, mode);
-  create_fixed_operand (&ops[3], comparison);
-  create_fixed_operand (&ops[4], XEXP (comparison, 0));
-  create_fixed_operand (&ops[5], XEXP (comparison, 1));
-  expand_insn (icode, 6, ops);
-  return ops[0].value;
-}
-
 /* Generate VEC_SERIES_EXPR <OP0, OP1>, returning a value of mode VMODE.
    Use TARGET for the result if nonnull and convenient.  */
diff --git a/gcc/optabs.h b/gcc/optabs.h
index 5bd19503a0a..7c2ec257cb0 100644
--- a/gcc/optabs.h
+++ b/gcc/optabs.h
@@ -321,9 +321,6 @@ extern rtx expand_vec_perm_const (machine_mode, rtx, rtx,
 /* Generate code for vector comparison.  */
 extern rtx expand_vec_cmp_expr (tree, tree, rtx);
-/* Generate code for VEC_COND_EXPR. */
-extern rtx expand_vec_cond_expr (tree, tree, tree, tree, rtx);
-
 /* Generate code for VEC_SERIES_EXPR.  */
 extern rtx expand_vec_series_expr (machine_mode, rtx, rtx, rtx);
@@ -364,5 +361,9 @@ extern void expand_jump_insn (enum insn_code icode, unsigned int nops,
                              class expand_operand *ops);
extern enum rtx_code get_rtx_code (enum tree_code tcode, bool unsignedp);
+extern rtx vector_compare_rtx (machine_mode cmp_mode, enum tree_code tcode,
+                              tree t_op0, tree t_op1, bool unsignedp,
+                              enum insn_code icode, unsigned int opno);
+
#endif /* GCC_OPTABS_H */
diff --git a/gcc/passes.def b/gcc/passes.def
index 92cbe587a8a..e9f59d756c9 100644
--- a/gcc/passes.def
+++ b/gcc/passes.def
@@ -398,6 +398,7 @@ along with GCC; see the file COPYING3.  If not see
   NEXT_PASS (pass_cleanup_eh);
   NEXT_PASS (pass_lower_resx);
   NEXT_PASS (pass_nrv);
+  NEXT_PASS (pass_gimple_isel);
   NEXT_PASS (pass_cleanup_cfg_post_optimizing);
   NEXT_PASS (pass_warn_function_noreturn);
   NEXT_PASS (pass_gen_hsail);
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index d06a479e570..16ff06fbf88 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -4199,6 +4199,14 @@ verify_gimple_assign_ternary (gassign *stmt)
          debug_generic_expr (rhs1_type);
          return true;
        }
+      else if (cfun->curr_properties & PROP_gimple_lvec
+              && TREE_CODE_CLASS (TREE_CODE (rhs1)) == tcc_comparison)
+       {
+         error ("the first argument of %<VEC_COND_EXPR%> cannot be "
+                "a %<GENERIC%> tree comparison expression");
+         debug_generic_expr (rhs1);
+         return true;
+       }
       /* Fallthrough.  */
     case COND_EXPR:
       if (!is_gimple_val (rhs1)
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index 576b3f67434..4efece1b35b 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -626,6 +626,7 @@ extern gimple_opt_pass *make_pass_local_fn_summary 
(gcc::context *ctxt);
 extern gimple_opt_pass *make_pass_update_address_taken (gcc::context *ctxt);
 extern gimple_opt_pass *make_pass_convert_switch (gcc::context *ctxt);
 extern gimple_opt_pass *make_pass_lower_vaarg (gcc::context *ctxt);
+extern gimple_opt_pass *make_pass_gimple_isel (gcc::context *ctxt);
/* Current optimization pass. */
 extern opt_pass *current_pass;
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index 759baf56897..fce392e204c 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -3125,6 +3125,12 @@ pass_forwprop::execute (function *fun)
                    if (code == COND_EXPR
                        || code == VEC_COND_EXPR)
                      {
+                       /* Do not propagate into VEC_COND_EXPRs after they are
+                          vector lowering pass.  */
+                       if (code == VEC_COND_EXPR
+                           && (fun->curr_properties & PROP_gimple_lvec))
+                         break;
+
                        /* In this case the entire COND_EXPR is in rhs1. */
                        if (forward_propagate_into_cond (&gsi))
                          {
diff --git a/gcc/tree-vect-generic.c b/gcc/tree-vect-generic.c
index a7fe83da0e3..8f6d63f01c5 100644
--- a/gcc/tree-vect-generic.c
+++ b/gcc/tree-vect-generic.c
@@ -694,12 +694,14 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree 
type, tree op0,
          if (addend == NULL_TREE
              && expand_vec_cond_expr_p (type, type, LT_EXPR))
            {
-             tree zero, cst, cond, mask_type;
-             gimple *stmt;
+             tree zero, cst, mask_type, mask;
+             gimple *stmt, *cond;
mask_type = truth_type_for (type);
              zero = build_zero_cst (type);
-             cond = build2 (LT_EXPR, mask_type, op0, zero);
+             mask = make_ssa_name (mask_type);
+             cond = gimple_build_assign (mask, LT_EXPR, op0, zero);
+             gsi_insert_before (gsi, cond, GSI_SAME_STMT);
              tree_vector_builder vec (type, nunits, 1);
              for (i = 0; i < nunits; i++)
                vec.quick_push (build_int_cst (TREE_TYPE (type),
@@ -707,8 +709,8 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, 
tree op0,
                                                << shifts[i]) - 1));
              cst = vec.build ();
              addend = make_ssa_name (type);
-             stmt = gimple_build_assign (addend, VEC_COND_EXPR, cond,
-                                         cst, zero);
+             stmt
+               = gimple_build_assign (addend, VEC_COND_EXPR, mask, cst, zero);
              gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
            }
        }
@@ -964,7 +966,17 @@ expand_vector_condition (gimple_stmt_iterator *gsi)
     }
if (expand_vec_cond_expr_p (type, TREE_TYPE (a1), TREE_CODE (a)))
-    return;
+    {
+      if (a_is_comparison)
+       {
+         a = gimplify_build2 (gsi, TREE_CODE (a), TREE_TYPE (a), a1, a2);
+         gimple_assign_set_rhs1 (stmt, a);
+         update_stmt (stmt);
+         return;
+       }
+      gcc_assert (TREE_CODE (a) == SSA_NAME || TREE_CODE (a) == VECTOR_CST);
+      return;
+    }
/* Handle vector boolean types with bitmasks. If there is a comparison
      and we can expand the comparison into the vector boolean bitmask,
@@ -2241,6 +2253,176 @@ expand_vector_operations (void)
   return cfg_changed ? TODO_cleanup_cfg : 0;
 }
+/* Expand all VEC_COND_EXPR gimple assignments into calls to internal
+   function based on type of selected expansion.  */
+
+static gimple *
+gimple_expand_vec_cond_expr (gimple_stmt_iterator *gsi,
+                            hash_map<tree, unsigned int> 
*vec_cond_ssa_name_uses)
+{
+  tree lhs, op0a = NULL_TREE, op0b = NULL_TREE;
+  enum tree_code code;
+  enum tree_code tcode;
+  machine_mode cmp_op_mode;
+  bool unsignedp;
+  enum insn_code icode;
+  imm_use_iterator imm_iter;
+
+  /* Only consider code == GIMPLE_ASSIGN.  */
+  gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi));
+  if (!stmt)
+    return NULL;
+
+  code = gimple_assign_rhs_code (stmt);
+  if (code != VEC_COND_EXPR)
+    return NULL;
+
+  tree op0 = gimple_assign_rhs1 (stmt);
+  tree op1 = gimple_assign_rhs2 (stmt);
+  tree op2 = gimple_assign_rhs3 (stmt);
+  lhs = gimple_assign_lhs (stmt);
+  machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
+
+  gcc_assert (!COMPARISON_CLASS_P (op0));
+  if (TREE_CODE (op0) == SSA_NAME)
+    {
+      unsigned int used_vec_cond_exprs = 0;
+      unsigned int *slot = vec_cond_ssa_name_uses->get (op0);
+      if (slot)
+       used_vec_cond_exprs = *slot;
+      else
+       {
+         gimple *use_stmt;
+         FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, op0)
+           {
+             gassign *assign = dyn_cast<gassign *> (use_stmt);
+             if (assign != NULL
+                 && gimple_assign_rhs_code (assign) == VEC_COND_EXPR
+                 && gimple_assign_rhs1 (assign) == op0)
+               used_vec_cond_exprs++;
+           }
+         vec_cond_ssa_name_uses->put (op0, used_vec_cond_exprs);
+       }
+
+      gassign *def_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (op0));
+      if (def_stmt)
+       {
+         tcode = gimple_assign_rhs_code (def_stmt);
+         op0a = gimple_assign_rhs1 (def_stmt);
+         op0b = gimple_assign_rhs2 (def_stmt);
+
+         tree op0a_type = TREE_TYPE (op0a);
+         if (used_vec_cond_exprs >= 2
+             && (get_vcond_mask_icode (mode, TYPE_MODE (op0a_type))
+                 != CODE_FOR_nothing)
+             && expand_vec_cmp_expr_p (op0a_type, TREE_TYPE (lhs), tcode))
+           {
+             /* Keep the SSA name and use vcond_mask.  */
+             tcode = TREE_CODE (op0);
+           }
+       }
+      else
+       tcode = TREE_CODE (op0);
+    }
+  else
+    tcode = TREE_CODE (op0);
+
+  if (TREE_CODE_CLASS (tcode) != tcc_comparison)
+    {
+      gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0)));
+      if (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
+         != CODE_FOR_nothing)
+       return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2);
+      /* Fake op0 < 0.  */
+      else
+       {
+         gcc_assert (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (op0)))
+                     == MODE_VECTOR_INT);
+         op0a = op0;
+         op0b = build_zero_cst (TREE_TYPE (op0));
+         tcode = LT_EXPR;
+       }
+    }
+  cmp_op_mode = TYPE_MODE (TREE_TYPE (op0a));
+  unsignedp = TYPE_UNSIGNED (TREE_TYPE (op0a));
+
+
+  gcc_assert (known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (cmp_op_mode))
+             && known_eq (GET_MODE_NUNITS (mode),
+                          GET_MODE_NUNITS (cmp_op_mode)));
+
+  icode = get_vcond_icode (mode, cmp_op_mode, unsignedp);
+  if (icode == CODE_FOR_nothing)
+    {
+      if (tcode == LT_EXPR
+         && op0a == op0
+         && TREE_CODE (op0) == VECTOR_CST)
+       {
+         /* A VEC_COND_EXPR condition could be folded from EQ_EXPR/NE_EXPR
+            into a constant when only get_vcond_eq_icode is supported.
+            Verify < 0 and != 0 behave the same and change it to NE_EXPR.  */
+         unsigned HOST_WIDE_INT nelts;
+         if (!VECTOR_CST_NELTS (op0).is_constant (&nelts))
+           {
+             if (VECTOR_CST_STEPPED_P (op0))
+               gcc_unreachable ();
+             nelts = vector_cst_encoded_nelts (op0);
+           }
+         for (unsigned int i = 0; i < nelts; ++i)
+           if (tree_int_cst_sgn (vector_cst_elt (op0, i)) == 1)
+             gcc_unreachable ();
+         tcode = NE_EXPR;
+       }
+      if (tcode == EQ_EXPR || tcode == NE_EXPR)
+       {
+         tree tcode_tree = build_int_cst (integer_type_node, tcode);
+         return gimple_build_call_internal (IFN_VCONDEQ, 5, op0a, op0b, op1,
+                                            op2, tcode_tree);
+       }
+    }
+
+  gcc_assert (icode != CODE_FOR_nothing);
+  tree tcode_tree = build_int_cst (integer_type_node, tcode);
+  return gimple_build_call_internal (unsignedp ? IFN_VCONDU : IFN_VCOND,
+                                    5, op0a, op0b, op1, op2, tcode_tree);
+}
+
+/* Iterate all gimple statements and try to expand
+   VEC_COND_EXPR assignments.  */
+
+static unsigned int
+gimple_expand_vec_cond_exprs (void)
+{
+  gimple_stmt_iterator gsi;
+  basic_block bb;
+  bool cfg_changed = false;
+  hash_map<tree, unsigned int> vec_cond_ssa_name_uses;
+
+  FOR_EACH_BB_FN (bb, cfun)
+    {
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         gimple *g = gimple_expand_vec_cond_expr (&gsi,
+                                                  &vec_cond_ssa_name_uses);
+         if (g != NULL)
+           {
+             tree lhs = gimple_assign_lhs (gsi_stmt (gsi));
+             gimple_set_lhs (g, lhs);
+             gsi_replace (&gsi, g, false);
+           }
+         /* ???  If we do not cleanup EH then we will ICE in
+            verification.  But in reality we have created wrong-code
+            as we did not properly transition EH info and edges to
+            the piecewise computations.  */
+         if (maybe_clean_eh_stmt (gsi_stmt (gsi))
+             && gimple_purge_dead_eh_edges (bb))
+           cfg_changed = true;
+       }
+    }
+
+  return cfg_changed ? TODO_cleanup_cfg : 0;
+}
+
 namespace {
const pass_data pass_data_lower_vector =
@@ -2324,4 +2506,47 @@ make_pass_lower_vector_ssa (gcc::context *ctxt)
   return new pass_lower_vector_ssa (ctxt);
 }
+namespace {
+
+const pass_data pass_data_gimple_isel =
+{
+  GIMPLE_PASS, /* type */
+  "isel", /* name */
+  OPTGROUP_VEC, /* optinfo_flags */
+  TV_NONE, /* tv_id */
+  PROP_cfg, /* properties_required */
+  0, /* properties_provided */
+  0, /* properties_destroyed */
+  0, /* todo_flags_start */
+  TODO_update_ssa, /* todo_flags_finish */
+};
+
+class pass_gimple_isel : public gimple_opt_pass
+{
+public:
+  pass_gimple_isel (gcc::context *ctxt)
+    : gimple_opt_pass (pass_data_gimple_isel, ctxt)
+  {}
+
+  /* opt_pass methods: */
+  virtual bool gate (function *)
+    {
+      return true;
+    }
+
+  virtual unsigned int execute (function *)
+    {
+      return gimple_expand_vec_cond_exprs ();
+    }
+
+}; // class pass_gimple_isel
+
+} // anon namespace
+
+gimple_opt_pass *
+make_pass_gimple_isel (gcc::context *ctxt)
+{
+  return new pass_gimple_isel (ctxt);
+}
+
 #include "gt-tree-vect-generic.h"
--
2.26.2

Reply via email to