This patch makes cfgcleanup optimize jumps to returns.  There are three
cases this handles:

-- A jump to a return; this is simplified to just that return.
-- A conditional branch to a return; simplified to a conditional return.
-- A conditional branch that falls through to a return.  This is simplified
   to a conditional return (with the condition inverted), falling through
   to a jump to the original destination.  That jump can then be optimized
   further, as usual.

This handles all cases the current function.c does, and a few it misses.


2016-05-03  Segher Boessenkool  <seg...@kernel.crashing.org>

        * cfgcleanup.c (bb_is_just_return): New function.
        (try_optimize_cfg): Simplify jumps to return, branches to return,
        and branches around return.

---
 gcc/cfgcleanup.c | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 123 insertions(+)

diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index 19583a7..4f87811 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -2606,6 +2606,35 @@ trivially_empty_bb_p (basic_block bb)
     }
 }
 
+/* Return true if BB contains just a return and possibly a USE of the
+   return value.  Fill in *RET and *USE with the return and use insns
+   if any found, otherwise NULL.  */
+
+static bool
+bb_is_just_return (basic_block bb, rtx_insn **ret, rtx_insn **use)
+{
+  *ret = *use = NULL;
+  rtx_insn *insn;
+
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
+    return false;
+
+  FOR_BB_INSNS (bb, insn)
+    if (NONDEBUG_INSN_P (insn))
+      {
+       if (!*ret && ANY_RETURN_P (PATTERN (insn)))
+         *ret = insn;
+       else if (!*ret && !*use && GET_CODE (PATTERN (insn)) == USE
+           && REG_P (XEXP (PATTERN (insn), 0))
+           && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
+         *use = insn;
+       else
+         return false;
+      }
+
+  return !!*ret;
+}
+
 /* Do simple CFG optimizations - basic block merging, simplifying of jump
    instructions etc.  Return nonzero if changes were made.  */
 
@@ -2792,6 +2821,100 @@ try_optimize_cfg (int mode)
                      }
                }
 
+             /* Try to change a branch to a return to just that return.  */
+             rtx_insn *ret, *use;
+             if (single_succ_p (b)
+                 && onlyjump_p (BB_END (b))
+                 && bb_is_just_return (single_succ (b), &ret, &use))
+               {
+                 if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+                                    PATTERN (ret), 0))
+                   {
+                     if (use)
+                       emit_insn_before (copy_insn (PATTERN (use)),
+                                         BB_END (b));
+                     if (dump_file)
+                       fprintf (dump_file, "Changed jump %d->%d to return.\n",
+                                           b->index, single_succ (b)->index);
+                     redirect_edge_succ (single_succ_edge (b),
+                                         EXIT_BLOCK_PTR_FOR_FN (cfun));
+                     single_succ_edge (b)->flags &= ~EDGE_CROSSING;
+                     changed_here = true;
+                   }
+               }
+
+             /* Try to change a conditional branch to a return to the
+                respective conditional return.  */
+             if (EDGE_COUNT (b->succs) == 2
+                 && any_condjump_p (BB_END (b))
+                 && bb_is_just_return (BRANCH_EDGE (b)->dest, &ret, &use))
+               {
+                 if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+                                    PATTERN (ret), 0))
+                   {
+                     if (use)
+                       emit_insn_before (copy_insn (PATTERN (use)),
+                                         BB_END (b));
+                     if (dump_file)
+                       fprintf (dump_file, "Changed conditional jump %d->%d "
+                                           "to conditional return.\n",
+                                           b->index,
+                                           BRANCH_EDGE (b)->dest->index);
+                     redirect_edge_succ (BRANCH_EDGE (b),
+                                         EXIT_BLOCK_PTR_FOR_FN (cfun));
+                     BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
+                     changed_here = true;
+                   }
+               }
+
+             /* Try to flip a conditional branch that falls through to
+                a return so that it becomes a conditional return and a
+                new jump to the original branch target.  */
+             if (EDGE_COUNT (b->succs) == 2
+                 && any_condjump_p (BB_END (b))
+                 && bb_is_just_return (FALLTHRU_EDGE (b)->dest, &ret, &use))
+               {
+                 if (invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+                                  JUMP_LABEL (BB_END (b)), 0))
+                   {
+                     basic_block new_ft = BRANCH_EDGE (b)->dest;
+                     if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+                                        PATTERN (ret), 0))
+                       {
+                         if (use)
+                           emit_insn_before (copy_insn (PATTERN (use)),
+                                             BB_END (b));
+                         if (dump_file)
+                           fprintf (dump_file, "Changed conditional jump "
+                                               "%d->%d to conditional return, "
+                                               "adding fall-through jump.\n",
+                                               b->index,
+                                               BRANCH_EDGE (b)->dest->index);
+                         redirect_edge_succ (BRANCH_EDGE (b),
+                                             EXIT_BLOCK_PTR_FOR_FN (cfun));
+                         BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
+                         std::swap (BRANCH_EDGE (b)->probability,
+                                    FALLTHRU_EDGE (b)->probability);
+                         update_br_prob_note (b);
+                         basic_block jb = force_nonfallthru (FALLTHRU_EDGE 
(b));
+                         notice_new_block (jb);
+                         if (!redirect_jump (as_a <rtx_jump_insn *> (BB_END 
(jb)),
+                                             block_label (new_ft), 0))
+                           gcc_unreachable ();
+                         redirect_edge_succ (single_succ_edge (jb), new_ft);
+                         changed_here = true;
+                       }
+                     else
+                       {
+                         /* Invert the jump back to what it was.  This should
+                            never fail.  */
+                         if (!invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+                                           JUMP_LABEL (BB_END (b)), 0))
+                           gcc_unreachable ();
+                       }
+                   }
+               }
+
              /* Simplify branch over branch.  */
              if ((mode & CLEANUP_EXPENSIVE)
                   && !(mode & CLEANUP_CFGLAYOUT)
-- 
1.9.3

Reply via email to