Hello Josef,

On Tue, Nov 05 2024, Josef Melcr wrote:
> Hi!
>
> On 11/5/24 12:06, Martin Jambor wrote:
>> +/* Copy information from SRC_JF to DST_JF which correstpond to call graph 
>> edges
>> +   SRC and DST.  */
>> +
>> +static void
>> +ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
>> +                         ipa_jump_func *src_jf, ipa_jump_func *dst_jf)
>
> I am not sure this function copies over all necessary information, e.g. 
> jump function type. It will work fine when used in the duplication hook, 
> as the dst jump functions are created through
>
> new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
>
> so all other information is copied over, but I don't think that's the 
> case when using this function on its own. When a jump function is 
> constructed through different means, will the contents of the jump 
> functions still match ?

Thanks for reading through the patch.  I am happy that I was wrong when
reviewing the current situation rather then when writing the original
code.  Though of course mainly I need to pay more attention to details.

We still want to have the split for the purposes of propagating stuff to
OpenMP outlined regions, and for that I have prepared the patch below.
I am just not sure whether it makes sense to push this now or wait for
stage1 of GCC 16.  The patch does pass bootstrap and testing on
x86_64-linux.

Honza, any preferences?  (Independently of whether it is OK to be
pushed.)

Thanks,

Martin

---------- 8< -------------------- 8< -------------------- 8< ----------

When reviewing various IPA bits and pieces I have falsely assumed
that jump function duplication misses copying important bits because
it relies on vec_safe_copy-ing all data in the vector of jump
functions and then just fixes up the few fields it needs to.

Perhaps more importantly, we do want a function to copy one individual
jump function to form jump functions for planned call-graph edges that
model transfer of control to OpenMP outlined regions through calls to
gomp functions.

Therefore, this patch introduces such function and makes
ipa_edge_args_sum_t::duplicate just allocate the new vectors and then
uses the new function to copy the data.

gcc/ChangeLog:

2024-11-12  Martin Jambor  <mjam...@suse.cz>

        * ipa-prop.cc (ipa_duplicate_jump_function): New function.
        (ipa_edge_args_sum_t::duplicate): Move individual jump function
        copying to ipa_duplicate_jump_function.
---
 gcc/ipa-prop.cc | 186 ++++++++++++++++++++++++++++--------------------
 1 file changed, 110 insertions(+), 76 deletions(-)

diff --git a/gcc/ipa-prop.cc b/gcc/ipa-prop.cc
index 599181d0a94..e1c4c798f3f 100644
--- a/gcc/ipa-prop.cc
+++ b/gcc/ipa-prop.cc
@@ -4497,99 +4497,96 @@ ipa_edge_args_sum_t::remove (cgraph_edge *cs, 
ipa_edge_args *args)
     }
 }
 
-/* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
-   reference count data strucutres accordingly.  */
+/* Copy information from SRC_JF to DST_JF which correstpond to call graph edges
+   SRC and DST.  */
 
-void
-ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
-                               ipa_edge_args *old_args, ipa_edge_args 
*new_args)
+static void
+ipa_duplicate_jump_function (cgraph_edge *src, cgraph_edge *dst,
+                            ipa_jump_func *src_jf, ipa_jump_func *dst_jf)
 {
-  unsigned int i;
+  dst_jf->agg.items = vec_safe_copy (src_jf->agg.items);
+  dst_jf->agg.by_ref = src_jf->agg.by_ref;
 
-  new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
-  if (old_args->polymorphic_call_contexts)
-    new_args->polymorphic_call_contexts
-      = vec_safe_copy (old_args->polymorphic_call_contexts);
+  /* We can avoid calling ipa_set_jfunc_vr since it would only look up the
+     place in the hash_table where the source m_vr resides.  */
+  dst_jf->m_vr = src_jf->m_vr;
 
-  for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
+  if (src_jf->type == IPA_JF_CONST)
     {
-      struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
-      struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
+      ipa_set_jf_cst_copy (dst_jf, src_jf);
+      struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
 
-      dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
-
-      if (src_jf->type == IPA_JF_CONST)
+      if (!src_rdesc)
+       dst_jf->value.constant.rdesc = NULL;
+      else if (src->caller == dst->caller)
        {
-         struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
-
-         if (!src_rdesc)
-           dst_jf->value.constant.rdesc = NULL;
-         else if (src->caller == dst->caller)
+         /* Creation of a speculative edge.  If the source edge is the one
+            grabbing a reference, we must create a new (duplicate)
+            reference description.  Otherwise they refer to the same
+            description corresponding to a reference taken in a function
+            src->caller is inlined to.  In that case we just must
+            increment the refcount.  */
+         if (src_rdesc->cs == src)
            {
-             /* Creation of a speculative edge.  If the source edge is the one
-                grabbing a reference, we must create a new (duplicate)
-                reference description.  Otherwise they refer to the same
-                description corresponding to a reference taken in a function
-                src->caller is inlined to.  In that case we just must
-                increment the refcount.  */
-             if (src_rdesc->cs == src)
-               {
-                  symtab_node *n = symtab_node_for_jfunc (src_jf);
-                  gcc_checking_assert (n);
-                  ipa_ref *ref
-                    = src->caller->find_reference (n, src->call_stmt,
-                                                   src->lto_stmt_uid,
-                                                   IPA_REF_ADDR);
-                  gcc_checking_assert (ref);
-                  dst->caller->clone_reference (ref, ref->stmt);
+             symtab_node *n = symtab_node_for_jfunc (src_jf);
+             gcc_checking_assert (n);
+             ipa_ref *ref
+               = src->caller->find_reference (n, src->call_stmt,
+                                              src->lto_stmt_uid,
+                                              IPA_REF_ADDR);
+             gcc_checking_assert (ref);
+             dst->caller->clone_reference (ref, ref->stmt);
 
-                  ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
-                  dst_rdesc->cs = dst;
-                  dst_rdesc->refcount = src_rdesc->refcount;
-                  dst_rdesc->next_duplicate = NULL;
-                  dst_jf->value.constant.rdesc = dst_rdesc;
-               }
-             else
-               {
-                 src_rdesc->refcount++;
-                 dst_jf->value.constant.rdesc = src_rdesc;
-               }
-           }
-         else if (src_rdesc->cs == src)
-           {
-             struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
+             ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
              dst_rdesc->cs = dst;
              dst_rdesc->refcount = src_rdesc->refcount;
-             dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
-             src_rdesc->next_duplicate = dst_rdesc;
+             dst_rdesc->next_duplicate = NULL;
              dst_jf->value.constant.rdesc = dst_rdesc;
            }
          else
            {
-             struct ipa_cst_ref_desc *dst_rdesc;
-             /* This can happen during inlining, when a JFUNC can refer to a
-                reference taken in a function up in the tree of inline clones.
-                We need to find the duplicate that refers to our tree of
-                inline clones.  */
-
-             gcc_assert (dst->caller->inlined_to);
-             for (dst_rdesc = src_rdesc->next_duplicate;
-                  dst_rdesc;
-                  dst_rdesc = dst_rdesc->next_duplicate)
-               {
-                 struct cgraph_node *top;
-                 top = dst_rdesc->cs->caller->inlined_to
-                   ? dst_rdesc->cs->caller->inlined_to
-                   : dst_rdesc->cs->caller;
-                 if (dst->caller->inlined_to == top)
-                   break;
-               }
-             gcc_assert (dst_rdesc);
-             dst_jf->value.constant.rdesc = dst_rdesc;
+             src_rdesc->refcount++;
+             dst_jf->value.constant.rdesc = src_rdesc;
            }
        }
-      else if (dst_jf->type == IPA_JF_PASS_THROUGH
-              && src->caller == dst->caller)
+      else if (src_rdesc->cs == src)
+       {
+         struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
+         dst_rdesc->cs = dst;
+         dst_rdesc->refcount = src_rdesc->refcount;
+         dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
+         src_rdesc->next_duplicate = dst_rdesc;
+         dst_jf->value.constant.rdesc = dst_rdesc;
+       }
+      else
+       {
+         struct ipa_cst_ref_desc *dst_rdesc;
+         /* This can happen during inlining, when a JFUNC can refer to a
+            reference taken in a function up in the tree of inline clones.
+            We need to find the duplicate that refers to our tree of
+            inline clones.  */
+
+         gcc_assert (dst->caller->inlined_to);
+         for (dst_rdesc = src_rdesc->next_duplicate;
+              dst_rdesc;
+              dst_rdesc = dst_rdesc->next_duplicate)
+           {
+             struct cgraph_node *top;
+             top = dst_rdesc->cs->caller->inlined_to
+               ? dst_rdesc->cs->caller->inlined_to
+               : dst_rdesc->cs->caller;
+             if (dst->caller->inlined_to == top)
+               break;
+           }
+         gcc_assert (dst_rdesc);
+         dst_jf->value.constant.rdesc = dst_rdesc;
+       }
+    }
+  else if (src_jf->type == IPA_JF_PASS_THROUGH)
+    {
+      dst_jf->type = IPA_JF_PASS_THROUGH;
+      dst_jf->value.pass_through = src_jf->value.pass_through;
+      if (src->caller == dst->caller)
        {
          struct cgraph_node *inline_root = dst->caller->inlined_to
            ? dst->caller->inlined_to : dst->caller;
@@ -4604,6 +4601,43 @@ ipa_edge_args_sum_t::duplicate (cgraph_edge *src, 
cgraph_edge *dst,
            }
        }
     }
+  else if (src_jf->type == IPA_JF_ANCESTOR)
+    {
+      dst_jf->type = IPA_JF_ANCESTOR;
+      dst_jf->value.ancestor = src_jf->value.ancestor;
+    }
+  else
+    gcc_assert (src_jf->type == IPA_JF_UNKNOWN);
+}
+
+/* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
+   reference count data strucutres accordingly.  */
+
+void
+ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
+                               ipa_edge_args *old_args, ipa_edge_args 
*new_args)
+{
+  unsigned int i;
+
+  if (old_args->polymorphic_call_contexts)
+    new_args->polymorphic_call_contexts
+      = vec_safe_copy (old_args->polymorphic_call_contexts);
+
+  if (!vec_safe_length (old_args->jump_functions))
+    {
+      new_args->jump_functions = NULL;
+      return;
+    }
+  vec_safe_grow_cleared (new_args->jump_functions,
+                        old_args->jump_functions->length (), true);
+
+  for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
+    {
+      struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
+      struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
+
+      ipa_duplicate_jump_function (src, dst, src_jf, dst_jf);
+    }
 }
 
 /* Analyze newly added function into callgraph.  */
-- 
2.47.0


Reply via email to