Hi!
Here is incremental patch to avoid the traverses of constexpr_call_hash if
-freflection but since last time no metafns have been called or calls which
called them cached.
diff --git a/gcc/cp/constexpr.cc b/gcc/cp/constexpr.cc
index 18c8529dc07..0786a456722 100644
--- a/gcc/cp/constexpr.cc
+++ b/gcc/cp/constexpr.cc
@@ -1532,6 +1532,20 @@
uid_sensitive_constexpr_evaluation_checker::evaluation_restricted_p () const
static GTY (()) hash_table<constexpr_call_hasher> *constexpr_call_table;
+/* True if any constexpr_call_table entry has metafns_called flag set
+ and some non-NULL/error_mark_node cached result. */
+
+static GTY(()) bool metafns_cached;
+
+/* True if metafns_called could be true in any of the currently pending
+ constant evaluations. */
+
+static bool metafns_called;
+
+/* Number of currently pending cxx_eval_outermost_constexpr_expr invocations.
*/
+
+static int outermost_constexpr_count;
+
/* Compute a hash value for a constexpr call representation. */
inline hashval_t
@@ -3868,6 +3882,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree
t,
return t;
}
ctx->global->metafns_called = true;
+ metafns_called = true;
tree e = process_metafunction (ctx, fun, t, non_constant_p, overflow_p,
jump_target);
if (*jump_target)
@@ -4502,20 +4517,22 @@ cxx_eval_call_expression (const constexpr_ctx *ctx,
tree t,
result = void_node;
if (entry)
{
- entry->result (ctx->manifestly_const_eval)
- = cacheable ? result : error_mark_node;
+ tree res = cacheable ? result : error_mark_node;
+ entry->result (ctx->manifestly_const_eval) = res;
- if (result != error_mark_node
+ if (res != error_mark_node
&& ctx->manifestly_const_eval == mce_unknown)
{
/* Evaluation succeeded and was independent of whether we're in a
manifestly constant-evaluated context, so we can also reuse
this result when evaluating this call with a fixed context. */
if (!entry->result (mce_true))
- entry->result (mce_true) = entry->result (mce_unknown);
+ entry->result (mce_true) = res;
if (!entry->result (mce_false))
- entry->result (mce_false) = entry->result (mce_unknown);
+ entry->result (mce_false) = res;
}
+ if (entry->metafns_called && res != error_mark_node)
+ metafns_cached = true;
}
}
@@ -10722,6 +10739,7 @@ cxx_eval_outermost_constant_expr (tree t, bool
allow_non_constant,
if (manifestly_const_eval == mce_true)
instantiate_constexpr_fns (r);
+ ++outermost_constexpr_count;
tree jmp_target = NULL_TREE;
r = cxx_eval_constant_expression (&ctx, r, vc_prvalue,
&non_constant_p, &overflow_p,
@@ -10883,6 +10901,8 @@ cxx_eval_outermost_constant_expr (tree t, bool
allow_non_constant,
if (!non_constant_p && !constexpr_dtor)
verify_constant (r, allow_non_constant, &non_constant_p, &overflow_p);
+ if (--outermost_constexpr_count == 0)
+ metafns_called = false;
/* After verify_constant because reduced_constant_expression_p can unset
CONSTRUCTOR_NO_CLEARING. */
@@ -11228,9 +11248,13 @@ clear_constexpr_call_cache_entry (constexpr_call
**slot, bool)
void
maybe_clear_constexpr_call_cache ()
{
- if (flag_reflection && constexpr_call_table)
- constexpr_call_table->traverse <bool,
- clear_constexpr_call_cache_entry> (false);
+ if (!flag_reflection
+ || constexpr_call_table == NULL
+ || (!metafns_cached && !metafns_called))
+ return;
+ constexpr_call_table->traverse <bool,
+ clear_constexpr_call_cache_entry> (false);
+ metafns_cached = false;
}
/* Internal function handling expressions in templates for
Jakub