Rename `emit_unlikely_jump' function to `alpha_emit_unlikely_jump', so 
as to avoid namespace pollution, updating callers accordingly and export 
it for use in the machine description.  Make it return the insn emitted.

        gcc/
        * config/alpha/alpha-protos.h (alpha_emit_unlikely_jump): New 
        prototype.
        * config/alpha/alpha.cc (emit_unlikely_jump): Rename to...
        (alpha_emit_unlikely_jump): ... this.  Return the insn emitted.
        (alpha_split_atomic_op, alpha_split_compare_and_swap)
        (alpha_split_compare_and_swap_12, alpha_split_atomic_exchange)
        (alpha_split_atomic_exchange_12): Update call sites accordingly.
---
No change from v1.
---
 gcc/config/alpha/alpha-protos.h |    1 +
 gcc/config/alpha/alpha.cc       |   19 ++++++++++---------
 2 files changed, 11 insertions(+), 9 deletions(-)

gcc-alpha-emit-unlikely-jump-export.diff
Index: gcc/gcc/config/alpha/alpha-protos.h
===================================================================
--- gcc.orig/gcc/config/alpha/alpha-protos.h
+++ gcc/gcc/config/alpha/alpha-protos.h
@@ -59,6 +59,7 @@ extern rtx alpha_expand_zap_mask (HOST_W
 extern void alpha_expand_builtin_vector_binop (rtx (*)(rtx, rtx, rtx),
                                               machine_mode,
                                               rtx, rtx, rtx);
+extern rtx alpha_emit_unlikely_jump (rtx, rtx);
 extern void alpha_expand_builtin_establish_vms_condition_handler (rtx, rtx);
 extern void alpha_expand_builtin_revert_vms_condition_handler (rtx);
 
Index: gcc/gcc/config/alpha/alpha.cc
===================================================================
--- gcc.orig/gcc/config/alpha/alpha.cc
+++ gcc/gcc/config/alpha/alpha.cc
@@ -4420,12 +4420,13 @@ alpha_expand_builtin_vector_binop (rtx (
 /* A subroutine of the atomic operation splitters.  Jump to LABEL if
    COND is true.  Mark the jump as unlikely to be taken.  */
 
-static void
-emit_unlikely_jump (rtx cond, rtx label)
+rtx
+alpha_emit_unlikely_jump (rtx cond, rtx label)
 {
   rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
   rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
   add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
+  return insn;
 }
 
 /* Subroutines of the atomic operation splitters.  Emit barriers
@@ -4517,7 +4518,7 @@ alpha_split_atomic_op (enum rtx_code cod
   emit_insn (gen_store_conditional (mode, cond, mem, scratch));
 
   x = gen_rtx_EQ (DImode, cond, const0_rtx);
-  emit_unlikely_jump (x, label);
+  alpha_emit_unlikely_jump (x, label);
 
   alpha_post_atomic_barrier (model);
 }
@@ -4567,7 +4568,7 @@ alpha_split_compare_and_swap (rtx operan
       emit_insn (gen_rtx_SET (cond, x));
       x = gen_rtx_EQ (DImode, cond, const0_rtx);
     }
-  emit_unlikely_jump (x, label2);
+  alpha_emit_unlikely_jump (x, label2);
 
   emit_move_insn (cond, newval);
   emit_insn (gen_store_conditional
@@ -4576,7 +4577,7 @@ alpha_split_compare_and_swap (rtx operan
   if (!is_weak)
     {
       x = gen_rtx_EQ (DImode, cond, const0_rtx);
-      emit_unlikely_jump (x, label1);
+      alpha_emit_unlikely_jump (x, label1);
     }
 
   if (!is_mm_relaxed (mod_f))
@@ -4679,7 +4680,7 @@ alpha_split_compare_and_swap_12 (rtx ope
       emit_insn (gen_rtx_SET (cond, x));
       x = gen_rtx_EQ (DImode, cond, const0_rtx);
     }
-  emit_unlikely_jump (x, label2);
+  alpha_emit_unlikely_jump (x, label2);
 
   emit_insn (gen_mskxl (cond, scratch, mask, addr));
 
@@ -4691,7 +4692,7 @@ alpha_split_compare_and_swap_12 (rtx ope
   if (!is_weak)
     {
       x = gen_rtx_EQ (DImode, cond, const0_rtx);
-      emit_unlikely_jump (x, label1);
+      alpha_emit_unlikely_jump (x, label1);
     }
 
   if (!is_mm_relaxed (mod_f))
@@ -4731,7 +4732,7 @@ alpha_split_atomic_exchange (rtx operand
   emit_insn (gen_store_conditional (mode, cond, mem, scratch));
 
   x = gen_rtx_EQ (DImode, cond, const0_rtx);
-  emit_unlikely_jump (x, label);
+  alpha_emit_unlikely_jump (x, label);
 
   alpha_post_atomic_barrier (model);
 }
@@ -4805,7 +4806,7 @@ alpha_split_atomic_exchange_12 (rtx oper
   emit_insn (gen_store_conditional (DImode, scratch, mem, scratch));
 
   x = gen_rtx_EQ (DImode, scratch, const0_rtx);
-  emit_unlikely_jump (x, label);
+  alpha_emit_unlikely_jump (x, label);
 
   alpha_post_atomic_barrier (model);
 }

Reply via email to