As partial antic compute ignores back-edges (it's a union DF problem
and thus would blow up when working over back-edges) we can avoid
iterating if we use the correct processing order (postorder).

Bootstrapped on x86_64-unknown-linux-gnu, testing in progress.

Queued for stage1.

Richard.

2016-04-14  Richard Biener  <rguent...@suse.de>

        * tree-ssa-pre.c (postorder, postorder_num): Make locals ...
        (compute_antic): ... here.  For partial antic use regular
        postorder and scrap iteration.
        (compute_partial_antic_aux): Remove unused return value.
        (init_pre): Do not allocate postorder.
        (fini_pre): Do not free postorder.

Index: gcc/tree-ssa-pre.c
===================================================================
*** gcc/tree-ssa-pre.c  (revision 234970)
--- gcc/tree-ssa-pre.c  (working copy)
*************** typedef struct bb_bitmap_sets
*** 432,441 ****
  #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
  
  
- /* Basic block list in postorder.  */
- static int *postorder;
- static int postorder_num;
- 
  /* This structure is used to keep track of statistics on what
     optimization PRE was able to perform.  */
  static struct
--- 432,437 ----
*************** compute_antic_aux (basic_block block, bo
*** 2209,2219 ****
                                  - ANTIC_IN[BLOCK])
  
  */
! static bool
  compute_partial_antic_aux (basic_block block,
                           bool block_has_abnormal_pred_edge)
  {
-   bool changed = false;
    bitmap_set_t old_PA_IN;
    bitmap_set_t PA_OUT;
    edge e;
--- 2228,2237 ----
                                  - ANTIC_IN[BLOCK])
  
  */
! static void
  compute_partial_antic_aux (basic_block block,
                           bool block_has_abnormal_pred_edge)
  {
    bitmap_set_t old_PA_IN;
    bitmap_set_t PA_OUT;
    edge e;
*************** compute_partial_antic_aux (basic_block b
*** 2312,2320 ****
  
    dependent_clean (PA_IN (block), ANTIC_IN (block));
  
-   if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
-     changed = true;
- 
   maybe_dump_sets:
    if (dump_file && (dump_flags & TDF_DETAILS))
      {
--- 2330,2335 ----
*************** compute_partial_antic_aux (basic_block b
*** 2327,2333 ****
      bitmap_set_free (old_PA_IN);
    if (PA_OUT)
      bitmap_set_free (PA_OUT);
-   return changed;
  }
  
  /* Compute ANTIC and partial ANTIC sets.  */
--- 2342,2347 ----
*************** compute_antic (void)
*** 2349,2371 ****
  
    FOR_ALL_BB_FN (block, cfun)
      {
        FOR_EACH_EDGE (e, ei, block->preds)
        if (e->flags & EDGE_ABNORMAL)
          {
            bitmap_set_bit (has_abnormal_preds, block->index);
            break;
          }
  
-       BB_VISITED (block) = 0;
- 
        /* While we are here, give empty ANTIC_IN sets to each block.  */
        ANTIC_IN (block) = bitmap_set_new ();
!       PA_IN (block) = bitmap_set_new ();
      }
  
    /* At the exit block we anticipate nothing.  */
    BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
  
    sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
    bitmap_ones (worklist);
    while (changed)
--- 2363,2395 ----
  
    FOR_ALL_BB_FN (block, cfun)
      {
+       BB_VISITED (block) = 0;
+ 
        FOR_EACH_EDGE (e, ei, block->preds)
        if (e->flags & EDGE_ABNORMAL)
          {
            bitmap_set_bit (has_abnormal_preds, block->index);
+ 
+           /* We also anticipate nothing.  */
+           BB_VISITED (block) = 1;
            break;
          }
  
        /* While we are here, give empty ANTIC_IN sets to each block.  */
        ANTIC_IN (block) = bitmap_set_new ();
!       if (do_partial_partial)
!       PA_IN (block) = bitmap_set_new ();
      }
  
    /* At the exit block we anticipate nothing.  */
    BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
  
+   /* For ANTIC computation we need a postorder that also guarantees that
+      a block with a single successor is visited after its successor.
+      RPO on the inverted CFG has this property.  */
+   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+   int postorder_num = inverted_post_order_compute (postorder);
+ 
    sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
    bitmap_ones (worklist);
    while (changed)
*************** compute_antic (void)
*** 2403,2441 ****
  
    if (do_partial_partial)
      {
!       bitmap_ones (worklist);
!       num_iterations = 0;
!       changed = true;
!       while (changed)
        {
!         if (dump_file && (dump_flags & TDF_DETAILS))
!           fprintf (dump_file, "Starting iteration %d\n", num_iterations);
!         num_iterations++;
!         changed = false;
!         for (i = postorder_num - 1 ; i >= 0; i--)
!           {
!             if (bitmap_bit_p (worklist, postorder[i]))
!               {
!                 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
!                 bitmap_clear_bit (worklist, block->index);
!                 if (compute_partial_antic_aux (block,
!                                                bitmap_bit_p 
(has_abnormal_preds,
!                                                              block->index)))
!                   {
!                     FOR_EACH_EDGE (e, ei, block->preds)
!                       bitmap_set_bit (worklist, e->src->index);
!                     changed = true;
!                   }
!               }
!           }
!         /* Theoretically possible, but *highly* unlikely.  */
!         gcc_checking_assert (num_iterations < 500);
        }
-       statistics_histogram_event (cfun, "compute_partial_antic iterations",
-                                 num_iterations);
      }
    sbitmap_free (has_abnormal_preds);
    sbitmap_free (worklist);
  }
  
  
--- 2427,2447 ----
  
    if (do_partial_partial)
      {
!       /* For partial antic we ignore backedges and thus we do not need
!          to perform any iteration when we process blocks in postorder.  */
!       postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
!       for (i = postorder_num - 1 ; i >= 0; i--)
        {
!         basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
!         compute_partial_antic_aux (block,
!                                    bitmap_bit_p (has_abnormal_preds,
!                                                  block->index));
        }
      }
+ 
    sbitmap_free (has_abnormal_preds);
    sbitmap_free (worklist);
+   free (postorder);
  }
  
  
*************** init_pre (void)
*** 4694,4705 ****
    connect_infinite_loops_to_exit ();
    memset (&pre_stats, 0, sizeof (pre_stats));
  
-   /* For ANTIC computation we need a postorder that also guarantees that
-      a block with a single successor is visited after its successor.
-      RPO on the inverted CFG has this property.  */
-   postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-   postorder_num = inverted_post_order_compute (postorder);
- 
    alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
  
    calculate_dominance_info (CDI_DOMINATORS);
--- 4738,4743 ----
*************** init_pre (void)
*** 4722,4728 ****
  static void
  fini_pre ()
  {
-   free (postorder);
    value_expressions.release ();
    BITMAP_FREE (inserted_exprs);
    bitmap_obstack_release (&grand_bitmap_obstack);
--- 4760,4765 ----

Reply via email to