Hi, this patch adds logic for determining dynamic type of a memory location based on constructor calls or vtable stores as discussed on the Cauldron. It is based on existing code in ipa-prop that does look for type changes, but it newly handles the ctor calls too and fills in the polymorphic_call_context.
One of main differences is that I now conservatively assume that every function call may change dynamic type of any memory location, but I still fill in speculative info. This is shown in the attached testcases where the first call is devirtualized fully (since we know the type after construction) but the second call is not, because we expect the external call to do something evil. This is clearly bit too conservative if the pointer won't escape, but I do not implement this analysis - and it would be hard since the pointer becomes non-escaping only. I also do not make an attempt to track type changes interprocedurally may make sense to do. Bootstrapped/regtested x86_64-linux, will commit it after bit of further testing. The patch adds about 1000 additional speculative devirtualization and 800 addition non-speculative during early passes for libxul. Once ipa-prop is converted to the new interface, I expect this to be noticeably more useful. Honza * ipa-devirt.c: Inlcude gimple-pretty-print.h (referenced_from_vtable_p): Exclude DECL_EXTERNAL from further tests. (type_change_info): New sturcture based on ipa-prop variant. (noncall_stmt_may_be_vtbl_ptr_store): New predicate based on ipa-prop variant. (extr_type_from_vtbl_ptr_store): New function based on ipa-prop variant. (record_known_type): New function. (check_stmt_for_type_change): New function. (get_dynamic_type): New function. * ipa-prop.c (ipa_analyze_call_uses): Use get_dynamic_type. * tree-ssa-pre.c: ipa-utils.h (eliminate_dom_walker::before_dom_children): Use ipa-devirt machinery; sanity check with ipa-prop devirtualization. Index: testsuite/g++.dg/ipa/devirt-35.C =================================================================== --- testsuite/g++.dg/ipa/devirt-35.C (revision 0) +++ testsuite/g++.dg/ipa/devirt-35.C (revision 0) @@ -0,0 +1,23 @@ +/* { dg-options "-O2 -fdump-ipa-devirt-details -fdump-tree-fre1-details" } */ +struct A {virtual int t(void) {return 1;}}; +struct B:A {B(); virtual int t(void) {return 2;}}; +void test2(struct A *); +int +m(struct B *b) +{ + struct A *a = new (B); + a->t(); // This call should be devirtualized by + // FRE because we know type from ctor call + ((struct B *)a)->B::t(); // Make devirt possible + // C++ FE won't produce inline body without this + test2(a); + return a->t(); // This call should be devirtualized speculatively because + // test2 may change the type of A by placement new. + // C++ standard is bit imprecise about this. +} +/* { dg-final { scan-ipa-dump "converting indirect call to function virtual int B::t" "fre1" } } */ +/* { dg-final { scan-ipa-dump "to virtual int B::t" "devirt" } } */ +/* { dg-final { scan-ipa-dump "1 speculatively devirtualized" "devirt" } } */ +/* { dg-final { cleanup-ipa-dump "devirt" } } */ +/* { dg-final { cleanup-tree-dump "fre" } } */ + Index: testsuite/g++.dg/ipa/devirt-36.C =================================================================== --- testsuite/g++.dg/ipa/devirt-36.C (revision 0) +++ testsuite/g++.dg/ipa/devirt-36.C (revision 0) @@ -0,0 +1,25 @@ +/* { dg-options "-O2 -fdump-ipa-devirt-details -fdump-tree-fre1-details" } */ +struct A {virtual int t(void) {return 1;}}; +struct B:A {B(); virtual int t(void) {return 2;}}; +struct C {int a; struct B b;}; +void test2(struct A *); +int +m(struct B *b) +{ + struct C *c = new (C); + struct A *a = &c->b; + a->t(); // This call should be devirtualized by + // FRE because we know type from ctor call + ((struct B *)a)->B::t(); // Make devirt possible + // C++ FE won't produce inline body without this + test2(a); + return a->t(); // This call should be devirtualized speculatively because + // test2 may change the type of A by placement new. + // C++ standard is bit imprecise about this. +} +/* { dg-final { scan-ipa-dump "converting indirect call to function virtual int B::t" "fre1" } } */ +/* { dg-final { scan-ipa-dump "to virtual int B::t" "devirt" } } */ +/* { dg-final { scan-ipa-dump "1 speculatively devirtualized" "devirt" } } */ +/* { dg-final { cleanup-ipa-dump "devirt" } } */ +/* { dg-final { cleanup-tree-dump "fre" } } */ + Index: ipa-utils.h =================================================================== --- ipa-utils.h (revision 213709) +++ ipa-utils.h (working copy) @@ -95,7 +95,7 @@ tree get_polymorphic_call_info (tree, tr HOST_WIDE_INT *, ipa_polymorphic_call_context *, gimple call = NULL); -bool get_dynamic_type (tree, ipa_polymorphic_call_context *, tree, gimple); +bool get_dynamic_type (tree, ipa_polymorphic_call_context *, tree, tree, gimple); bool get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *, tree, tree, HOST_WIDE_INT); bool decl_maybe_in_construction_p (tree, tree, gimple, tree); Index: ipa-devirt.c =================================================================== --- ipa-devirt.c (revision 213709) +++ ipa-devirt.c (working copy) @@ -131,6 +131,7 @@ along with GCC; see the file COPYING3. #include "tree-dfa.h" #include "demangle.h" #include "dbgcnt.h" +#include "gimple-pretty-print.h" #include "stor-layout.h" #include "intl.h" #include "hash-map.h" @@ -1323,6 +1324,7 @@ referenced_from_vtable_p (struct cgraph_ bool found = false; if (node->externally_visible + || DECL_EXTERNAL (node->decl) || node->used_from_other_partition) return true; @@ -2436,6 +2438,504 @@ get_polymorphic_call_info (tree fndecl, return base_pointer; } +/* Structure to be passed in between detect_type_change and + check_stmt_for_type_change. */ + +struct type_change_info +{ + /* Offset into the object where there is the virtual method pointer we are + looking for. */ + HOST_WIDE_INT offset; + /* The declaration or SSA_NAME pointer of the base that we are checking for + type change. */ + tree instance; + /* The reference to virtual table pointer used. */ + tree vtbl_ptr_ref; + tree otr_type; + /* If we actually can tell the type that the object has changed to, it is + stored in this field. Otherwise it remains NULL_TREE. */ + tree known_current_type; + HOST_WIDE_INT known_current_offset; + + /* Set to true if dynamic type change has been detected. */ + bool type_maybe_changed; + /* Set to true if multiple types have been encountered. known_current_type + must be disregarded in that case. */ + bool multiple_types_encountered; + /* Set to true if we possibly missed some dynamic type changes and we should + consider the set to be speculative. */ + bool speculative; + bool seen_unanalyzed_store; +}; + +/* Return true if STMT is not call and can modify a virtual method table pointer. + We take advantage of fact that vtable stores must appear within constructor + and destructor functions. */ + +bool +noncall_stmt_may_be_vtbl_ptr_store (gimple stmt) +{ + if (is_gimple_assign (stmt)) + { + tree lhs = gimple_assign_lhs (stmt); + + if (gimple_clobber_p (stmt)) + return false; + if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))) + { + if (flag_strict_aliasing + && !POINTER_TYPE_P (TREE_TYPE (lhs))) + return false; + + if (TREE_CODE (lhs) == COMPONENT_REF + && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) + return false; + /* In the future we might want to use get_base_ref_and_offset to find + if there is a field corresponding to the offset and if so, proceed + almost like if it was a component ref. */ + } + } + + /* Code unification may mess with inline stacks. */ + if (cfun->after_inlining) + return true; + + /* Walk the inline stack and watch out for ctors/dtors. + TODO: Maybe we can require the store to appear in toplevel + block of CTOR/DTOR. */ + for (tree block = gimple_block (stmt); block && TREE_CODE (block) == BLOCK; + block = BLOCK_SUPERCONTEXT (block)) + if (BLOCK_ABSTRACT_ORIGIN (block) + && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL) + { + tree fn = BLOCK_ABSTRACT_ORIGIN (block); + + if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST)) + return false; + return (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE + && (DECL_CXX_CONSTRUCTOR_P (fn) + || DECL_CXX_DESTRUCTOR_P (fn))); + } + return false; +} + +/* If STMT can be proved to be an assignment to the virtual method table + pointer of ANALYZED_OBJ and the type associated with the new table + identified, return the type. Otherwise return NULL_TREE. */ + +static tree +extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci, + HOST_WIDE_INT *type_offset) +{ + HOST_WIDE_INT offset, size, max_size; + tree lhs, rhs, base, binfo; + + if (!gimple_assign_single_p (stmt)) + return NULL_TREE; + + lhs = gimple_assign_lhs (stmt); + rhs = gimple_assign_rhs1 (stmt); + if (TREE_CODE (lhs) != COMPONENT_REF + || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))) + return NULL_TREE; + + if (tci->vtbl_ptr_ref && operand_equal_p (lhs, tci->vtbl_ptr_ref, 0)) + ; + else + { + base = get_ref_base_and_extent (lhs, &offset, &size, &max_size); + if (offset != tci->offset + || size != POINTER_SIZE + || max_size != POINTER_SIZE) + return NULL_TREE; + if (TREE_CODE (base) == MEM_REF) + { + if (!operand_equal_p (tci->instance, TREE_OPERAND (base, 0), 0) + || !integer_zerop (TREE_OPERAND (base, 1))) + return NULL_TREE; + } + else if (!operand_equal_p (tci->instance, base, 0) + || tci->offset) + return NULL_TREE; + } + + binfo = vtable_pointer_value_to_binfo (rhs); + + if (!binfo) + return NULL; + *type_offset = tree_to_shwi (BINFO_OFFSET (binfo)) * BITS_PER_UNIT; + if (TYPE_BINFO (BINFO_TYPE (binfo)) == binfo) + return BINFO_TYPE (binfo); + + /* TODO: Figure out the type containing BINFO. */ + return NULL; +} + +/* Record dynamic type change of TCI to TYPE. */ + +void +record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset) +{ + if (dump_file) + { + if (type) + { + fprintf (dump_file, " Recording type: "); + print_generic_expr (dump_file, type, TDF_SLIM); + fprintf (dump_file, " at offset %i\n", (int)offset); + } + else + fprintf (dump_file, " Recording unknown type\n"); + } + if (tci->type_maybe_changed + && (type != tci->known_current_type + || offset != tci->known_current_offset)) + tci->multiple_types_encountered = true; + tci->known_current_type = type; + tci->known_current_offset = offset; + tci->type_maybe_changed = true; +} + +/* Callback of walk_aliased_vdefs and a helper function for + detect_type_change to check whether a particular statement may modify + the virtual table pointer, and if possible also determine the new type of + the (sub-)object. It stores its result into DATA, which points to a + type_change_info structure. */ + +static bool +check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data) +{ + gimple stmt = SSA_NAME_DEF_STMT (vdef); + struct type_change_info *tci = (struct type_change_info *) data; + tree fn; + + /* If we already gave up, just terminate the rest of walk. */ + if (tci->multiple_types_encountered) + return true; + + if (is_gimple_call (stmt)) + { + if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)) + return false; + + /* Check for a constructor call. */ + if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE + && DECL_CXX_CONSTRUCTOR_P (fn) + && TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE + && gimple_call_num_args (stmt)) + { + tree op = walk_ssa_copies (gimple_call_arg (stmt, 0)); + tree type = method_class_type (TREE_TYPE (fn)); + HOST_WIDE_INT offset = 0, size, max_size; + + if (dump_file) + { + fprintf (dump_file, " Checking constructor call: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + + /* See if THIS parameter seems like instance pointer. */ + if (TREE_CODE (op) == ADDR_EXPR) + { + op = get_ref_base_and_extent (TREE_OPERAND (op, 0), + &offset, &size, &max_size); + if (size != max_size || max_size == -1) + { + tci->speculative = true; + return false; + } + if (op && TREE_CODE (op) == MEM_REF) + { + if (!tree_fits_shwi_p (TREE_OPERAND (op, 1))) + { + tci->speculative = true; + return false; + } + offset += tree_to_shwi (TREE_OPERAND (op, 1)) + * BITS_PER_UNIT; + op = TREE_OPERAND (op, 0); + } + else + { + tci->speculative = true; + return false; + } + op = walk_ssa_copies (op); + } + if (operand_equal_p (op, tci->instance, 0) + && TYPE_SIZE (type) + && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST + && tree_fits_shwi_p (TYPE_SIZE (type)) + && tree_to_shwi (TYPE_SIZE (type)) + offset > tci->offset) + { + record_known_type (tci, type, tci->offset - offset); + return true; + } + } + /* Calls may possibly change dynamic type by placement new. Assume + it will not happen, but make result speculative only. */ + if (dump_file) + { + fprintf (dump_file, " Function call may change dynamic type:"); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + tci->speculative = true; + return false; + } + /* Check for inlined virtual table store. */ + else if (noncall_stmt_may_be_vtbl_ptr_store (stmt)) + { + tree type; + HOST_WIDE_INT offset = 0; + if (dump_file) + { + fprintf (dump_file, " Checking vtbl store: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + } + + type = extr_type_from_vtbl_ptr_store (stmt, tci, &offset); + gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type); + if (!type) + { + if (dump_file) + fprintf (dump_file, " Unanalyzed store may change type.\n"); + tci->seen_unanalyzed_store = true; + tci->speculative = true; + } + else + record_known_type (tci, type, offset); + return true; + } + else + return false; +} + +/* CONTEXT is polymorphic call context obtained from get_polymorphic_context. + OTR_OBJECT is pointer to the instance returned by OBJ_TYPE_REF_OBJECT. + INSTANCE is pointer to the outer instance as returned by + get_polymorphic_context. If the type of instance is not fully determined + (either OUTER_TYPE is unknown or MAYBE_IN_CONSTRUCTION/INCLUDE_DERIVED_TYPES + is set), try to walk memory writes and find the actual construction of the + instance. + + We do not include this analysis in the context analysis itself, because + it needs memory SSA to be fully built and the walk may be expensive. + So it is not suitable for use withing fold_stmt and similar uses. */ + +bool +get_dynamic_type (tree instance, + ipa_polymorphic_call_context *context, + tree otr_object, + tree otr_type, + gimple call) +{ + struct type_change_info tci; + ao_ref ao; + bool function_entry_reached = false; + tree instance_ref = NULL; + gimple stmt = call; + + if (!context->maybe_in_construction && !context->maybe_derived_type) + return false; + + /* We need to obtain refernce to virtual table pointer. It is better + to look it up in the code rather than build our own. This require bit + of pattern matching, but we end up verifying that what we found is + correct. + + What we pattern match is: + + tmp = instance->_vptr.A; // vtbl ptr load + tmp2 = tmp[otr_token]; // vtable lookup + OBJ_TYPE_REF(tmp2;instance->0) (instance); + + We want to start alias oracle walk from vtbl pointer load, + but we may not be able to identify it, for example, when PRE moved the + load around. */ + + if (gimple_code (call) == GIMPLE_CALL) + { + tree ref = gimple_call_fn (call); + HOST_WIDE_INT offset2, size, max_size; + + if (TREE_CODE (ref) == OBJ_TYPE_REF) + { + ref = OBJ_TYPE_REF_EXPR (ref); + ref = walk_ssa_copies (ref); + + /* Check if definition looks like vtable lookup. */ + if (TREE_CODE (ref) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (ref) + && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref)) + && TREE_CODE (gimple_assign_rhs1 + (SSA_NAME_DEF_STMT (ref))) == MEM_REF) + { + ref = get_base_address + (TREE_OPERAND (gimple_assign_rhs1 + (SSA_NAME_DEF_STMT (ref)), 0)); + ref = walk_ssa_copies (ref); + /* Find base address of the lookup and see if it looks like + vptr load. */ + if (TREE_CODE (ref) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (ref) + && gimple_assign_load_p (SSA_NAME_DEF_STMT (ref))) + { + tree ref_exp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (ref)); + tree base_ref = get_ref_base_and_extent + (ref_exp, &offset2, &size, &max_size); + + /* Finally verify that what we found looks like read from OTR_OBJECT + or from INSTANCE with offset OFFSET. */ + if (base_ref + && TREE_CODE (base_ref) == MEM_REF + && ((offset2 == context->offset + && TREE_OPERAND (base_ref, 0) == instance) + || (!offset2 && TREE_OPERAND (base_ref, 0) == otr_object))) + { + stmt = SSA_NAME_DEF_STMT (ref); + instance_ref = ref_exp; + } + } + } + } + } + + /* If we failed to look up the refernece in code, build our own. */ + if (!instance_ref) + { + /* If the statement in question does not use memory, we can't tell + anything. */ + if (!gimple_vuse (stmt)) + return false; + ao_ref_init_from_ptr_and_size (&ao, otr_object, NULL); + } + else + /* Otherwise use the real reference. */ + ao_ref_init (&ao, instance_ref); + + /* We look for vtbl pointer read. */ + ao.size = POINTER_SIZE; + ao.max_size = ao.size; + ao.ref_alias_set + = get_deref_alias_set (TREE_TYPE (BINFO_VTABLE (TYPE_BINFO (otr_type)))); + + if (dump_file) + { + fprintf (dump_file, "Determining dynamic type for call: "); + print_gimple_stmt (dump_file, call, 0, 0); + fprintf (dump_file, " Starting walk at: "); + print_gimple_stmt (dump_file, stmt, 0, 0); + fprintf (dump_file, " instance pointer: "); + print_generic_expr (dump_file, otr_object, TDF_SLIM); + fprintf (dump_file, " Outer instance pointer: "); + print_generic_expr (dump_file, instance, TDF_SLIM); + fprintf (dump_file, " offset: %i (bits)", (int)context->offset); + fprintf (dump_file, " vtbl reference: "); + print_generic_expr (dump_file, instance_ref, TDF_SLIM); + fprintf (dump_file, "\n"); + } + + tci.offset = context->offset; + tci.instance = instance; + tci.vtbl_ptr_ref = instance_ref; + gcc_assert (TREE_CODE (instance) != MEM_REF); + tci.known_current_type = NULL_TREE; + tci.known_current_offset = 0; + tci.otr_type = otr_type; + tci.type_maybe_changed = false; + tci.multiple_types_encountered = false; + tci.speculative = false; + tci.seen_unanalyzed_store = false; + + walk_aliased_vdefs (&ao, gimple_vuse (stmt), check_stmt_for_type_change, + &tci, NULL, &function_entry_reached); + + /* If we did not find any type changing statements, we may still drop + maybe_in_construction flag if the context already have outer type. + + Here we make special assumptions about both constructors and + destructors which are all the functions that are allowed to alter the + VMT pointers. It assumes that destructors begin with assignment into + all VMT pointers and that constructors essentially look in the + following way: + + 1) The very first thing they do is that they call constructors of + ancestor sub-objects that have them. + + 2) Then VMT pointers of this and all its ancestors is set to new + values corresponding to the type corresponding to the constructor. + + 3) Only afterwards, other stuff such as constructor of member + sub-objects and the code written by the user is run. Only this may + include calling virtual functions, directly or indirectly. + + 4) placement new can not be used to change type of non-POD statically + allocated variables. + + There is no way to call a constructor of an ancestor sub-object in any + other way. + + This means that we do not have to care whether constructors get the + correct type information because they will always change it (in fact, + if we define the type to be given by the VMT pointer, it is undefined). + + The most important fact to derive from the above is that if, for some + statement in the section 3, we try to detect whether the dynamic type + has changed, we can safely ignore all calls as we examine the function + body backwards until we reach statements in section 2 because these + calls cannot be ancestor constructors or destructors (if the input is + not bogus) and so do not change the dynamic type (this holds true only + for automatically allocated objects but at the moment we devirtualize + only these). We then must detect that statements in section 2 change + the dynamic type and can try to derive the new type. That is enough + and we can stop, we will never see the calls into constructors of + sub-objects in this code. + + Therefore if the static outer type was found (context->outer_type) + we can safely ignore tci.speculative that is set on calls and give up + only if there was dyanmic type store that may affect given variable + (seen_unanalyzed_store) */ + + if (!tci.type_maybe_changed) + { + if (!context->outer_type || tci.seen_unanalyzed_store) + return false; + if (context->maybe_in_construction) + context->maybe_in_construction = false; + if (dump_file) + fprintf (dump_file, " No dynamic type change found.\n"); + return true; + } + + if (tci.known_current_type + && !function_entry_reached + && !tci.multiple_types_encountered) + { + if (!tci.speculative) + { + context->outer_type = tci.known_current_type; + context->offset = tci.known_current_offset; + context->maybe_in_construction = false; + context->maybe_derived_type = false; + if (dump_file) + fprintf (dump_file, " Determined dynamic type.\n"); + } + else if (!context->speculative_outer_type + || context->speculative_maybe_derived_type) + { + context->speculative_outer_type = tci.known_current_type; + context->speculative_offset = tci.known_current_offset; + context->speculative_maybe_derived_type = false; + if (dump_file) + fprintf (dump_file, " Determined speculative dynamic type.\n"); + } + } + else if (dump_file) + fprintf (dump_file, " Found multiple types.\n"); + + return true; +} + /* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET. Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE and insert them to NODES. @@ -2516,6 +3016,7 @@ devirt_variable_node_removal_hook (varpo } /* Record about how many calls would benefit from given type to be final. */ + struct odr_type_warn_count { tree type; @@ -2524,6 +3025,7 @@ struct odr_type_warn_count }; /* Record about how many calls would benefit from given method to be final. */ + struct decl_warn_count { tree decl; @@ -2532,6 +3034,7 @@ struct decl_warn_count }; /* Information about type and decl warnings. */ + struct final_warning_record { gcov_type dyn_count; Index: ipa-prop.c =================================================================== --- ipa-prop.c (revision 213709) +++ ipa-prop.c (working copy) @@ -2337,11 +2337,46 @@ ipa_analyze_call_uses (struct func_body_ && !virtual_method_call_p (target))) return; + struct cgraph_edge *cs = fbi->node->get_edge (call); /* If we previously turned the call into a direct call, there is no need to analyze. */ - struct cgraph_edge *cs = fbi->node->get_edge (call); if (cs && !cs->indirect_unknown_callee) return; + + if (cs->indirect_info->polymorphic) + { + tree otr_type; + HOST_WIDE_INT otr_token; + ipa_polymorphic_call_context context; + tree instance; + tree target = gimple_call_fn (call); + + instance = get_polymorphic_call_info (current_function_decl, + target, + &otr_type, &otr_token, + &context, call); + + if (get_dynamic_type (instance, &context, + OBJ_TYPE_REF_OBJECT (target), + otr_type, call)) + { + gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE); + cs->indirect_info->polymorphic = true; + cs->indirect_info->param_index = -1; + cs->indirect_info->otr_token = otr_token; + cs->indirect_info->otr_type = otr_type; + cs->indirect_info->outer_type = context.outer_type; + cs->indirect_info->speculative_outer_type = context.speculative_outer_type; + cs->indirect_info->offset = context.offset; + cs->indirect_info->speculative_offset = context.speculative_offset; + cs->indirect_info->maybe_in_construction + = context.maybe_in_construction; + cs->indirect_info->maybe_derived_type = context.maybe_derived_type; + cs->indirect_info->speculative_maybe_derived_type + = context.speculative_maybe_derived_type; + } + } + if (TREE_CODE (target) == SSA_NAME) ipa_analyze_indirect_call_uses (fbi, call, target); else if (virtual_method_call_p (target)) Index: tree-ssa-pre.c =================================================================== --- tree-ssa-pre.c (revision 213709) +++ tree-ssa-pre.c (working copy) @@ -64,6 +64,7 @@ along with GCC; see the file COPYING3. #include "domwalk.h" #include "ipa-prop.h" #include "tree-ssa-propagate.h" +#include "ipa-utils.h" /* TODO: @@ -4360,12 +4361,41 @@ eliminate_dom_walker::before_dom_childre { tree fn = gimple_call_fn (stmt); if (fn - && TREE_CODE (fn) == OBJ_TYPE_REF - && TREE_CODE (OBJ_TYPE_REF_EXPR (fn)) == SSA_NAME) + && flag_devirtualize + && virtual_method_call_p (fn)) { - fn = ipa_intraprocedural_devirtualization (stmt); - if (fn && dbg_cnt (devirt)) + tree otr_type; + HOST_WIDE_INT otr_token; + ipa_polymorphic_call_context context; + tree instance; + bool final; + + instance = get_polymorphic_call_info (current_function_decl, + fn, + &otr_type, &otr_token, &context, stmt); + + get_dynamic_type (instance, &context, + OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt); + + vec <cgraph_node *>targets + = possible_polymorphic_call_targets (obj_type_ref_class (fn), + tree_to_uhwi + (OBJ_TYPE_REF_TOKEN (fn)), + context, + &final); + if (dump_enabled_p ()) + dump_possible_polymorphic_call_targets (dump_file, + obj_type_ref_class (fn), + tree_to_uhwi + (OBJ_TYPE_REF_TOKEN (fn)), + context); + if (final && targets.length () <= 1 && dbg_cnt (devirt)) { + tree fn; + if (targets.length () == 1) + fn = targets[0]->decl; + else + fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE); if (dump_enabled_p ()) { location_t loc = gimple_location_safe (stmt); @@ -4377,6 +4407,8 @@ eliminate_dom_walker::before_dom_childre gimple_call_set_fndecl (stmt, fn); gimple_set_modified (stmt, true); } + else + gcc_assert (!ipa_intraprocedural_devirtualization (stmt)); } }