Some of the compare-and-branch patterns rely on CC for scratch in some
of the alternative expansions.  This is fine, because when the combined
compare-and-branch patterns are formed by combine, we will be eliminating
a write to CC, so CC is dead anyway.

Standardize on the cc clobber for all such patterns, so that we don't
wind up with conflicts on pnum_clobbers.  This fixes an assert:

0xa1fffe fancy_abort(char const*, int, char const*)
        ../../gcc/diagnostics/context.cc:1640
0x81340e patch_jump_insn
        ../../gcc/cfgrtl.cc:1303
0xc0eafe redirect_branch_edge
        ../../gcc/cfgrtl.cc:1330
0xc0f372 cfg_layout_redirect_edge_and_branch
        ../../gcc/cfgrtl.cc:4736
0xbfb6b9 redirect_edge_and_branch(edge_def*, basic_block_def*)
        ../../gcc/cfghooks.cc:391
0x1fa9310 try_forward_edges
        ../../gcc/cfgcleanup.cc:561
0x1fa9310 try_optimize_cfg
        ../../gcc/cfgcleanup.cc:2931
0x1fa9310 cleanup_cfg(int)
        ../../gcc/cfgcleanup.cc:3143
0x1fe11e8 rest_of_handle_cse
        ../../gcc/cse.cc:7591
0x1fe11e8 execute
        ../../gcc/cse.cc:7622

where the choice between aarch64_tbzltdi1 and aarch64_cbltdi
resulted in a recog failure.

Because this removes the direct expansion of TARGET_CMPBR, and
because rtx costing is wrong, the CMPBR patterns won't be used.
To be fixed in a subsequent patch.

gcc:
        PR target/121385
        * config/aarch64/aarch64.cc (aarch64_gen_compare_zero_and_branch):
        Add cc clobber when expanding to CB<EQL>.
        * config/aarch64/aarch64.md (cbranch<GPI>4): Don't directly
        expand to TARGET_CMPBR patterns.
        (*aarch64_cbz<EQL><GPI>): Add cc clobber.
        ("aarch64_cb<INT_CMP><GPI>): Likewise.
        ("aarch64_cb<INT_CMP><SHORT>): Likewise.
---
 gcc/config/aarch64/aarch64.cc | 11 ++++++++++-
 gcc/config/aarch64/aarch64.md | 23 ++++++++++-------------
 2 files changed, 20 insertions(+), 14 deletions(-)

diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc
index b7d26cded8b..ff9243ea732 100644
--- a/gcc/config/aarch64/aarch64.cc
+++ b/gcc/config/aarch64/aarch64.cc
@@ -2901,7 +2901,16 @@ aarch64_gen_compare_zero_and_branch (rtx_code code, rtx 
x,
 
   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
                            gen_rtx_LABEL_REF (Pmode, label), pc_rtx);
-  return gen_rtx_SET (pc_rtx, x);
+  x = gen_rtx_SET (pc_rtx, x);
+
+  if (!aarch64_track_speculation)
+    {
+      rtx c = gen_rtx_REG (CCmode, CC_REGNUM);
+      c = gen_rtx_CLOBBER (VOIDmode, c);
+      x = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, x, c));
+    }
+
+  return x;
 }
 
 /* Return an rtx that branches to LABEL based on the value of bit BITNUM of X.
diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md
index 7def6d3ff36..b947da977c3 100644
--- a/gcc/config/aarch64/aarch64.md
+++ b/gcc/config/aarch64/aarch64.md
@@ -755,16 +755,9 @@
                           (pc)))]
   ""
   {
-    if (TARGET_CMPBR && aarch64_cb_rhs (GET_CODE (operands[0]), operands[2]))
-      {
-       /* The branch is supported natively.  */
-      }
-    else
-      {
-        operands[1] = aarch64_gen_compare_reg (GET_CODE (operands[0]),
-                                              operands[1], operands[2]);
-        operands[2] = const0_rtx;
-      }
+    operands[1] = aarch64_gen_compare_reg (GET_CODE (operands[0]),
+                                          operands[1], operands[2]);
+    operands[2] = const0_rtx;
   }
 )
 
@@ -794,11 +787,13 @@
 )
 
 ;; For an EQ/NE comparison against zero, emit `CBZ`/`CBNZ`
+;; The clobber is present to coordinate with other branches.
 (define_insn "*aarch64_cbz<optab><mode>"
   [(set (pc) (if_then_else (EQL (match_operand:GPI 0 "register_operand" "r")
                                (const_int 0))
                           (label_ref (match_operand 1))
-                          (pc)))]
+                          (pc)))
+   (clobber (reg:CC CC_REGNUM))]
   "!aarch64_track_speculation"
   {
     if (get_attr_length (insn) == 8)
@@ -877,7 +872,8 @@
                             (match_operand:GPI 1 "nonmemory_operand"
                               "r<INT_CMP:cmpbr_imm_constraint>"))
                           (label_ref (match_operand 2))
-                          (pc)))]
+                          (pc)))
+   (clobber (reg:CC CC_REGNUM))]
   "TARGET_CMPBR && aarch64_cb_rhs (<INT_CMP:CODE>, operands[1])"
   {
     return (get_attr_far_branch (insn) == FAR_BRANCH_NO)
@@ -908,7 +904,8 @@
                             (match_operand:SHORT 0 "register_operand" "r")
                             (match_operand:SHORT 1 "aarch64_reg_or_zero" "rZ"))
                           (label_ref (match_operand 2))
-                          (pc)))]
+                          (pc)))
+   (clobber (reg:CC CC_REGNUM))]
   "TARGET_CMPBR"
   {
     return (get_attr_far_branch (insn) == FAR_BRANCH_NO)
-- 
2.43.0

Reply via email to