Richard, Jakub,
this updates patch 1 to use the target-insns.def mechanism of detecting
conditionally-implemented instructions. Otherwise it's the same as yesterday's
patch. To recap:
1) Moved the subcodes to an enumeration in internal-fn.h
2) Remove ECF_LEAF
3) Added check in initialize_ctrl_altering
4) tracer code now (continues) to only look in last stmt of block
I looked at fnsplit and do not believe I need changes there. That's changing
things like:
if (cheap test)
do cheap thing
else
do complex thing
to break out the else part into a separate function. That's fine -- it'll copy
the whole CFG of interest.
ok?
nathan
2015-10-26 Nathan Sidwell <nat...@codesourcery.com>
* internal-fn.c (expand_UNIQUE): New.
* internal-fn.h (enum ifn_unique_kind): New.
* internal-fn.def (IFN_UNIQUE): New.
* target-insns.def (unique): Define.
* gimple.h (gimple_call_internal_unique_p): New.
* gimple.c (gimple_call_same_target_p): Check internal fn
uniqueness.
* tracer.c (ignore_bb_p): Check for IFN_UNIQUE call.
* tree-ssa-threadedge.c
(record_temporary_equivalences_from_stmts): Likewise.
* tree-cfg.c (gmple_call_initialize_ctrl_altering): Likewise.
Index: gcc/target-insns.def
===================================================================
--- gcc/target-insns.def (revision 229276)
+++ gcc/target-insns.def (working copy)
@@ -89,5 +93,6 @@ DEF_TARGET_INSN (stack_protect_test, (rt
DEF_TARGET_INSN (store_multiple, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (tablejump, (rtx x0, rtx x1))
DEF_TARGET_INSN (trap, (void))
+DEF_TARGET_INSN (unique, (void))
DEF_TARGET_INSN (untyped_call, (rtx x0, rtx x1, rtx x2))
DEF_TARGET_INSN (untyped_return, (rtx x0, rtx x1))
Index: gcc/gimple.c
===================================================================
--- gcc/gimple.c (revision 229276)
+++ gcc/gimple.c (working copy)
@@ -1346,7 +1346,8 @@ gimple_call_same_target_p (const gimple
{
if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2)
- && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
+ && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
+ && !gimple_call_internal_unique_p (as_a <const gcall *> (c1)));
else
return (gimple_call_fn (c1) == gimple_call_fn (c2)
|| (gimple_call_fndecl (c1)
Index: gcc/gimple.h
===================================================================
--- gcc/gimple.h (revision 229276)
+++ gcc/gimple.h (working copy)
@@ -2895,6 +2895,21 @@ gimple_call_internal_fn (const gimple *g
return gimple_call_internal_fn (gc);
}
+/* Return true, if this internal gimple call is unique. */
+
+static inline bool
+gimple_call_internal_unique_p (const gcall *gs)
+{
+ return gimple_call_internal_fn (gs) == IFN_UNIQUE;
+}
+
+static inline bool
+gimple_call_internal_unique_p (const gimple *gs)
+{
+ const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
+ return gimple_call_internal_unique_p (gc);
+}
+
/* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt
that could alter control flow. */
Index: gcc/internal-fn.c
===================================================================
--- gcc/internal-fn.c (revision 229276)
+++ gcc/internal-fn.c (working copy)
@@ -1958,6 +1958,30 @@ expand_VA_ARG (gcall *stmt ATTRIBUTE_UNU
gcc_unreachable ();
}
+/* Expand the IFN_UNIQUE function according to its first argument. */
+
+static void
+expand_UNIQUE (gcall *stmt)
+{
+ rtx pattern = NULL_RTX;
+ enum ifn_unique_kind kind
+ = (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
+
+ switch (kind)
+ {
+ default:
+ gcc_unreachable ();
+
+ case IFN_UNIQUE_UNSPEC:
+ if (targetm.have_unique ())
+ pattern = targetm.gen_unique ();
+ break;
+ }
+
+ if (pattern)
+ emit_insn (pattern);
+}
+
/* Routines to expand each internal function, indexed by function number.
Each routine has the prototype:
Index: gcc/internal-fn.h
===================================================================
--- gcc/internal-fn.h (revision 229276)
+++ gcc/internal-fn.h (working copy)
@@ -20,6 +20,11 @@ along with GCC; see the file COPYING3.
#ifndef GCC_INTERNAL_FN_H
#define GCC_INTERNAL_FN_H
+/* INTEGER_CST values for IFN_UNIQUE function arg-0. */
+enum ifn_unique_kind {
+ IFN_UNIQUE_UNSPEC /* Undifferentiated UNIQUE. */
+};
+
/* Initialize internal function tables. */
extern void init_internal_fns ();
Index: gcc/internal-fn.def
===================================================================
--- gcc/internal-fn.def (revision 229276)
+++ gcc/internal-fn.def (working copy)
@@ -65,3 +65,10 @@ DEF_INTERNAL_FN (SUB_OVERFLOW, ECF_CONST
DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (TSAN_FUNC_EXIT, ECF_NOVOPS | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (VA_ARG, ECF_NOTHROW | ECF_LEAF, NULL)
+
+/* An unduplicable, uncombinable function. Generally used to preserve
+ a CFG property in the face of jump threading, tail merging or
+ other such optimizations. The first argument distinguishes
+ between uses. See internal-fn.h for usage. */
+DEF_INTERNAL_FN (UNIQUE, ECF_NOTHROW, NULL)
Index: gcc/tracer.c
===================================================================
--- gcc/tracer.c (revision 229276)
+++ gcc/tracer.c (working copy)
@@ -93,18 +93,24 @@ bb_seen_p (basic_block bb)
static bool
ignore_bb_p (const_basic_block bb)
{
- gimple *g;
-
if (bb->index < NUM_FIXED_BLOCKS)
return true;
if (optimize_bb_for_size_p (bb))
return true;
- /* A transaction is a single entry multiple exit region. It must be
- duplicated in its entirety or not at all. */
- g = last_stmt (CONST_CAST_BB (bb));
- if (g && gimple_code (g) == GIMPLE_TRANSACTION)
- return true;
+ if (gimple *g = last_stmt (CONST_CAST_BB (bb)))
+ {
+ /* A transaction is a single entry multiple exit region. It
+ must be duplicated in its entirety or not at all. */
+ if (gimple_code (g) == GIMPLE_TRANSACTION)
+ return true;
+
+ /* An IFN_UNIQUE call must be duplicated as part of its group,
+ or not at all. */
+ if (is_gimple_call (g) && gimple_call_internal_p (g)
+ && gimple_call_internal_unique_p (g))
+ return true;
+ }
return false;
}
Index: gcc/tree-ssa-threadedge.c
===================================================================
--- gcc/tree-ssa-threadedge.c (revision 229276)
+++ gcc/tree-ssa-threadedge.c (working copy)
@@ -283,6 +283,13 @@ record_temporary_equivalences_from_stmts
&& gimple_asm_volatile_p (as_a <gasm *> (stmt)))
return NULL;
+ /* If the statement is a unique builtin, we can not thread
+ through here. */
+ if (gimple_code (stmt) == GIMPLE_CALL
+ && gimple_call_internal_p (stmt)
+ && gimple_call_internal_unique_p (stmt))
+ return NULL;
+
/* If duplicating this block is going to cause too much code
expansion, then do not thread through this block. */
stmt_count++;
Index: gcc/tree-cfg.c
===================================================================
--- gcc/tree-cfg.c (revision 229276)
+++ gcc/tree-cfg.c (working copy)
@@ -487,7 +487,11 @@ gimple_call_initialize_ctrl_altering (gi
|| ((flags & ECF_TM_BUILTIN)
&& is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
/* BUILT_IN_RETURN call is same as return statement. */
- || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
+ || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
+ /* IFN_UNIQUE should be the last insn, to make checking for it
+ as cheap as possible. */
+ || (gimple_call_internal_p (stmt)
+ && gimple_call_internal_unique_p (stmt)))
gimple_call_set_ctrl_altering (stmt, true);
else
gimple_call_set_ctrl_altering (stmt, false);