The patch simplifies usage of the profile_{count,probability} types.

Patch can bootstrap on x86_64-linux-gnu and survives regression tests.

Ready to be installed?
Thanks,
Martin

gcc/ChangeLog:

        * bb-reorder.cc (find_traces_1_round): Add operators / and * and
        use them.
        (better_edge_p): Likewise.
        * cfgloop.cc (find_subloop_latch_edge_by_profile): Likewise.
        * cfgloopmanip.cc (scale_loop_profile): Likewise.
        * cfgrtl.cc (force_nonfallthru_and_redirect): Likewise.
        * cgraph.cc (cgraph_edge::maybe_hot_p): Likewise.
        * config/sh/sh.cc (expand_cbranchdi4): Likewise.
        * dojump.cc (do_compare_rtx_and_jump): Likewise.
        * final.cc (compute_alignments): Likewise.
        * ipa-cp.cc (update_counts_for_self_gen_clones): Likewise.
        (decide_about_value): Likewise.
        * ipa-inline-analysis.cc (do_estimate_edge_time): Likewise.
        * loop-unroll.cc (unroll_loop_runtime_iterations): Likewise.
        * modulo-sched.cc (sms_schedule): Likewise.
        * omp-expand.cc (extract_omp_for_update_vars): Likewise.
        (expand_omp_ordered_sink): Likewise.
        (expand_omp_for_ordered_loops): Likewise.
        (expand_omp_for_static_nochunk): Likewise.
        * predict.cc (maybe_hot_count_p): Likewise.
        (probably_never_executed): Likewise.
        (set_even_probabilities): Likewise.
        (handle_missing_profiles): Likewise.
        (expensive_function_p): Likewise.
        * profile-count.h: Likewise.
        * profile.cc (compute_branch_probabilities): Likewise.
        * stmt.cc (emit_case_dispatch_table): Likewise.
        * symtab-thunks.cc (expand_thunk): Likewise.
        * tree-ssa-loop-manip.cc (tree_transform_and_unroll_loop): Likewise.
        * tree-ssa-sink.cc (select_best_block): Likewise.
        * tree-switch-conversion.cc 
(switch_decision_tree::analyze_switch_statement): Likewise.
        (switch_decision_tree::balance_case_nodes): Likewise.
        (switch_decision_tree::emit_case_nodes): Likewise.
        * tree-vect-loop.cc (scale_profile_for_vect_loop): Likewise.
---
 gcc/bb-reorder.cc             |  6 ++--
 gcc/cfgloop.cc                |  2 +-
 gcc/cfgloopmanip.cc           |  5 ++--
 gcc/cfgrtl.cc                 |  4 +--
 gcc/cgraph.cc                 |  5 ++--
 gcc/config/sh/sh.cc           |  2 +-
 gcc/dojump.cc                 |  2 +-
 gcc/final.cc                  | 12 +++-----
 gcc/ipa-cp.cc                 | 10 +++----
 gcc/ipa-inline-analysis.cc    |  2 +-
 gcc/loop-unroll.cc            |  8 +++---
 gcc/modulo-sched.cc           | 20 ++++++-------
 gcc/omp-expand.cc             | 24 ++++++----------
 gcc/predict.cc                | 17 ++++++-----
 gcc/profile-count.h           | 46 ++++++++++++++++++++++++++++--
 gcc/profile.cc                |  5 ++--
 gcc/stmt.cc                   |  5 ++--
 gcc/symtab-thunks.cc          | 10 +++----
 gcc/tree-ssa-loop-manip.cc    | 11 ++++----
 gcc/tree-ssa-sink.cc          |  3 +-
 gcc/tree-switch-conversion.cc | 53 +++++++++++++++++------------------
 gcc/tree-vect-loop.cc         |  5 ++--
 22 files changed, 137 insertions(+), 120 deletions(-)

diff --git a/gcc/bb-reorder.cc b/gcc/bb-reorder.cc
index d20ccb83aa6..6600f44d4d7 100644
--- a/gcc/bb-reorder.cc
+++ b/gcc/bb-reorder.cc
@@ -761,7 +761,7 @@ find_traces_1_round (int branch_th, profile_count count_th,
                            & EDGE_CAN_FALLTHRU)
                        && !(single_succ_edge (e->dest)->flags & EDGE_COMPLEX)
                        && single_succ (e->dest) == best_edge->dest
-                       && (e->dest->count.apply_scale (2, 1)
+                       && (e->dest->count * 2
                            >= best_edge->count () || for_size))
                      {
                        best_edge = e;
@@ -944,7 +944,7 @@ better_edge_p (const_basic_block bb, const_edge e, 
profile_probability prob,
 
   /* The BEST_* values do not have to be best, but can be a bit smaller than
      maximum values.  */
-  profile_probability diff_prob = best_prob.apply_scale (1, 10);
+  profile_probability diff_prob = best_prob / 10;
 
   /* The smaller one is better to keep the original order.  */
   if (optimize_function_for_size_p (cfun))
@@ -966,7 +966,7 @@ better_edge_p (const_basic_block bb, const_edge e, 
profile_probability prob,
     is_better_edge = false;
   else
     {
-      profile_count diff_count = best_count.apply_scale (1, 10);
+      profile_count diff_count = best_count / 10;
       if (count < best_count - diff_count
          || (!best_count.initialized_p ()
              && count.nonzero_p ()))
diff --git a/gcc/cfgloop.cc b/gcc/cfgloop.cc
index 5ffcc77d93f..57bf7b1855d 100644
--- a/gcc/cfgloop.cc
+++ b/gcc/cfgloop.cc
@@ -619,7 +619,7 @@ find_subloop_latch_edge_by_profile (vec<edge> latches)
     }
 
   if (!tcount.initialized_p () || !(tcount.ipa () > HEAVY_EDGE_MIN_SAMPLES)
-      || (tcount - mcount).apply_scale (HEAVY_EDGE_RATIO, 1) > tcount)
+      || (tcount - mcount) * HEAVY_EDGE_RATIO > tcount)
     return NULL;
 
   if (dump_file)
diff --git a/gcc/cfgloopmanip.cc b/gcc/cfgloopmanip.cc
index b4357c03e86..a1ac1146445 100644
--- a/gcc/cfgloopmanip.cc
+++ b/gcc/cfgloopmanip.cc
@@ -563,8 +563,7 @@ scale_loop_profile (class loop *loop, profile_probability p,
 
          /* Probability of exit must be 1/iterations.  */
          count_delta = e->count ();
-         e->probability = profile_probability::always ()
-                                   .apply_scale (1, iteration_bound);
+         e->probability = profile_probability::always () / iteration_bound;
          other_e->probability = e->probability.invert ();
 
          /* In code below we only handle the following two updates.  */
@@ -586,7 +585,7 @@ scale_loop_profile (class loop *loop, profile_probability p,
         we look at the actual profile, if it is available.  */
       p = profile_probability::always ();
 
-      count_in = count_in.apply_scale (iteration_bound, 1);
+      count_in *= iteration_bound;
       p = count_in.probability_in (loop->header->count);
       if (!(p > profile_probability::never ()))
        p = profile_probability::very_unlikely ();
diff --git a/gcc/cfgrtl.cc b/gcc/cfgrtl.cc
index 74ea14efc61..693941d41e8 100644
--- a/gcc/cfgrtl.cc
+++ b/gcc/cfgrtl.cc
@@ -1686,8 +1686,8 @@ force_nonfallthru_and_redirect (edge e, basic_block 
target, rtx jump_label)
         add also edge from asm goto bb to target.  */
       if (asm_goto_edge)
        {
-         new_edge->probability = new_edge->probability.apply_scale (1, 2);
-         jump_block->count = jump_block->count.apply_scale (1, 2);
+         new_edge->probability /= 2;
+         jump_block->count /= 2;
          edge new_edge2 = make_edge (new_edge->src, target,
                                      e->flags & ~EDGE_FALLTHRU);
          new_edge2->probability = probability - new_edge->probability;
diff --git a/gcc/cgraph.cc b/gcc/cgraph.cc
index 4bb9e7ba6af..20a0b770596 100644
--- a/gcc/cgraph.cc
+++ b/gcc/cgraph.cc
@@ -2935,11 +2935,10 @@ cgraph_edge::maybe_hot_p (void)
     return false;
   if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
     {
-      if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
+      if (count * 2 < where->count * 3)
        return false;
     }
-  else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
-          < where->count)
+  else if (count * param_hot_bb_frequency_fraction < where->count)
     return false;
   return true;
 }
diff --git a/gcc/config/sh/sh.cc b/gcc/config/sh/sh.cc
index 8d4056338a5..d05901678be 100644
--- a/gcc/config/sh/sh.cc
+++ b/gcc/config/sh/sh.cc
@@ -2178,7 +2178,7 @@ expand_cbranchdi4 (rtx *operands, enum rtx_code 
comparison)
          && prob.to_reg_br_prob_base () >= (int) (REG_BR_PROB_BASE * 3 / 8U)
          && prob.to_reg_br_prob_base () <= (int) (REG_BR_PROB_BASE * 5 / 8U))
        {
-         msw_taken_prob = prob.apply_scale (1, 2);
+         msw_taken_prob = prob / 2;
          msw_skip_prob = rev_prob.apply_scale (REG_BR_PROB_BASE,
                                                rev_prob.to_reg_br_prob_base ()
                                                + REG_BR_PROB_BASE);
diff --git a/gcc/dojump.cc b/gcc/dojump.cc
index 17a73da7448..2af0cd1aca3 100644
--- a/gcc/dojump.cc
+++ b/gcc/dojump.cc
@@ -1131,7 +1131,7 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code 
code, int unsignedp,
              profile_probability cprob
                = profile_probability::guessed_always ();
              if (first_code == UNORDERED)
-               cprob = cprob.apply_scale (1, 100);
+               cprob /= 100;
              else if (first_code == ORDERED)
                cprob = cprob.apply_scale (99, 100);
              else
diff --git a/gcc/final.cc b/gcc/final.cc
index a9868861bd2..c5427d2110d 100644
--- a/gcc/final.cc
+++ b/gcc/final.cc
@@ -642,8 +642,7 @@ compute_alignments (void)
       flow_loops_dump (dump_file, NULL, 1);
     }
   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
-  profile_count count_threshold = cfun->cfg->count_max.apply_scale
-                (1, param_align_threshold);
+  profile_count count_threshold = cfun->cfg->count_max / param_align_threshold;
 
   if (dump_file)
     {
@@ -710,10 +709,9 @@ compute_alignments (void)
 
       if (!has_fallthru
          && (branch_count > count_threshold
-             || (bb->count > bb->prev_bb->count.apply_scale (10, 1)
+             || (bb->count > bb->prev_bb->count * 10
                  && (bb->prev_bb->count
-                     <= ENTRY_BLOCK_PTR_FOR_FN (cfun)
-                          ->count.apply_scale (1, 2)))))
+                     <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2))))
        {
          align_flags alignment = JUMP_ALIGN (label);
          if (dump_file)
@@ -727,9 +725,7 @@ compute_alignments (void)
               && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
          && optimize_bb_for_speed_p (bb)
          && branch_count + fallthru_count > count_threshold
-         && (branch_count
-             > fallthru_count.apply_scale
-                   (param_align_loop_iterations, 1)))
+         && (branch_count > fallthru_count * param_align_loop_iterations))
        {
          align_flags alignment = LOOP_ALIGN (label);
          if (dump_file)
diff --git a/gcc/ipa-cp.cc b/gcc/ipa-cp.cc
index 11f4a327b99..316c174bede 100644
--- a/gcc/ipa-cp.cc
+++ b/gcc/ipa-cp.cc
@@ -4651,8 +4651,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
     {
       profile_count orig_count = n->count;
       profile_count new_count
-       = (redist_sum.apply_scale (1, self_gen_clones.length ())
-          + other_edges_count[i]);
+       = (redist_sum / self_gen_clones.length () + other_edges_count[i]);
       new_count = lenient_count_portion_handling (new_count, orig_node);
       n->count = new_count;
       profile_count::adjust_for_ipa_scaling (&new_count, &orig_count);
@@ -4684,7 +4683,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
            for (cgraph_edge *e = cs; e; e = get_next_cgraph_edge_clone (e))
              if (e->callee->ultimate_alias_target () == orig_node
                  && processed_edges.contains (e))
-               e->count = e->count.apply_scale (1, den);
+               e->count /= den;
        }
     }
 
@@ -4712,8 +4711,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
              && desc.unproc_orig_rec_edges > 0)
            {
              desc.count = n->count - desc.count;
-             desc.count
-               = desc.count.apply_scale (1, desc.unproc_orig_rec_edges);
+             desc.count = desc.count /= desc.unproc_orig_rec_edges;
              adjust_clone_incoming_counts (n, &desc);
            }
          else if (dump_file)
@@ -6083,7 +6081,7 @@ decide_about_value (struct cgraph_node *node, int index, 
HOST_WIDE_INT offset,
       if (node->count.ipa ().nonzero_p ())
        {
          unsigned dem = self_gen_clones->length () + 1;
-         rec_count_sum = node->count.ipa ().apply_scale (1, dem);
+         rec_count_sum = node->count.ipa () / dem;
        }
       else
        rec_count_sum = profile_count::zero ();
diff --git a/gcc/ipa-inline-analysis.cc b/gcc/ipa-inline-analysis.cc
index 11d8d09ee43..1ca685d1b0e 100644
--- a/gcc/ipa-inline-analysis.cc
+++ b/gcc/ipa-inline-analysis.cc
@@ -254,7 +254,7 @@ do_estimate_edge_time (struct cgraph_edge *edge, sreal 
*ret_nonspec_time)
      probability that caller will call the callee is low however, since it
      may hurt optimization of the caller's hot path.  */
   if (edge->count.ipa ().initialized_p () && edge->maybe_hot_p ()
-      && (edge->count.ipa ().apply_scale (2, 1)
+      && (edge->count.ipa () * 2
          > (edge->caller->inlined_to
             ? edge->caller->inlined_to->count.ipa ()
             : edge->caller->count.ipa ())))
diff --git a/gcc/loop-unroll.cc b/gcc/loop-unroll.cc
index 69df1ae84b7..1956c54609c 100644
--- a/gcc/loop-unroll.cc
+++ b/gcc/loop-unroll.cc
@@ -978,7 +978,7 @@ unroll_loop_runtime_iterations (class loop *loop)
   /* Compute count increments for each switch block and initialize
      innermost switch block.  Switch blocks and peeled loop copies are built
      from innermost outward.  */
-  iter_count = new_count = swtch->count.apply_scale (1, max_unroll + 1);
+  iter_count = new_count = swtch->count / (max_unroll + 1);
   swtch->count = new_count;
 
   for (i = 0; i < n_peel; i++)
@@ -995,7 +995,7 @@ unroll_loop_runtime_iterations (class loop *loop)
 
       /* Create item for switch.  */
       unsigned j = n_peel - i - (extra_zero_check ? 0 : 1);
-      p = profile_probability::always ().apply_scale (1, i + 2);
+      p = profile_probability::always () / (i + 2);
 
       preheader = split_edge (loop_preheader_edge (loop));
       /* Add in count of edge from switch block.  */
@@ -1021,12 +1021,12 @@ unroll_loop_runtime_iterations (class loop *loop)
   if (extra_zero_check)
     {
       /* Add branch for zero iterations.  */
-      p = profile_probability::always ().apply_scale (1, max_unroll + 1);
+      p = profile_probability::always () / (max_unroll + 1);
       swtch = ezc_swtch;
       preheader = split_edge (loop_preheader_edge (loop));
       /* Recompute count adjustments since initial peel copy may
         have exited and reduced those values that were computed above.  */
-      iter_count = swtch->count.apply_scale (1, max_unroll + 1);
+      iter_count = swtch->count / (max_unroll + 1);
       /* Add in count of edge from switch block.  */
       preheader->count += iter_count;
       branch_code = compare_and_jump_seq (copy_rtx (niter), const0_rtx, EQ,
diff --git a/gcc/modulo-sched.cc b/gcc/modulo-sched.cc
index 1e1fa7055a2..2c95c0995ee 100644
--- a/gcc/modulo-sched.cc
+++ b/gcc/modulo-sched.cc
@@ -1439,10 +1439,10 @@ sms_schedule (void)
 
       /* Perform SMS only on loops that their average count is above 
threshold.  */
 
-      if ( latch_edge->count () > profile_count::zero ()
-          && (latch_edge->count()
-             < single_exit (loop)->count ().apply_scale
-                                (param_sms_loop_average_count_threshold, 1)))
+      if (latch_edge->count () > profile_count::zero ()
+         && (latch_edge->count ()
+             < (single_exit (loop)->count ()
+                * param_sms_loop_average_count_threshold)))
        {
          if (dump_file)
            {
@@ -1464,12 +1464,12 @@ sms_schedule (void)
         }
 
       /* Make sure this is a doloop.  */
-      if ( !(count_reg = doloop_register_get (head, tail)))
-      {
-        if (dump_file)
-          fprintf (dump_file, "SMS doloop_register_get failed\n");
-       continue;
-      }
+      if (!(count_reg = doloop_register_get (head, tail)))
+       {
+         if (dump_file)
+           fprintf (dump_file, "SMS doloop_register_get failed\n");
+         continue;
+       }
 
       /* Don't handle BBs with calls or barriers
         or !single_set with the exception of do-loop control part insns.
diff --git a/gcc/omp-expand.cc b/gcc/omp-expand.cc
index ee708314793..a1c021dd39b 100644
--- a/gcc/omp-expand.cc
+++ b/gcc/omp-expand.cc
@@ -3118,8 +3118,7 @@ extract_omp_for_update_vars (struct omp_for_data *fd, 
tree *nonrect_bounds,
       if (i < fd->collapse - 1)
        {
          e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
-         e->probability
-           = profile_probability::guessed_always ().apply_scale (1, 8);
+         e->probability = profile_probability::guessed_always () / 8;
 
          struct omp_for_data_loop *l = &fd->loops[i + 1];
          if (l->m1 == NULL_TREE || l->outer != 1)
@@ -3238,8 +3237,7 @@ extract_omp_for_update_vars (struct omp_for_data *fd, 
tree *nonrect_bounds,
                if (update_bb == NULL)
                  update_bb = this_bb;
                e = make_edge (this_bb, bb, EDGE_FALSE_VALUE);
-               e->probability
-                 = profile_probability::guessed_always ().apply_scale (1, 8);
+               e->probability = profile_probability::guessed_always () / 8;
                if (prev_bb == NULL)
                  set_immediate_dominator (CDI_DOMINATORS, this_bb, bb);
                prev_bb = this_bb;
@@ -3531,7 +3529,7 @@ expand_omp_ordered_sink (gimple_stmt_iterator *gsi, 
struct omp_for_data *fd,
                                   GSI_CONTINUE_LINKING);
   gsi_insert_after (gsi, gimple_build_cond_empty (cond), GSI_NEW_STMT);
   edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
-  e3->probability = profile_probability::guessed_always ().apply_scale (1, 8);
+  e3->probability = profile_probability::guessed_always () / 8;
   e1->probability = e3->probability.invert ();
   e1->flags = EDGE_TRUE_VALUE;
   set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
@@ -3685,7 +3683,7 @@ expand_omp_for_ordered_loops (struct omp_for_data *fd, 
tree *counts,
       remove_edge (e1);
       make_edge (body_bb, new_header, EDGE_FALLTHRU);
       e3->flags = EDGE_FALSE_VALUE;
-      e3->probability = profile_probability::guessed_always ().apply_scale (1, 
8);
+      e3->probability = profile_probability::guessed_always () / 8;
       e1 = make_edge (new_header, new_body, EDGE_TRUE_VALUE);
       e1->probability = e3->probability.invert ();
 
@@ -5482,16 +5480,14 @@ expand_omp_for_static_nochunk (struct omp_region 
*region,
   ep->probability = profile_probability::guessed_always ().apply_scale (3, 4);
   ep = find_edge (entry_bb, second_bb);
   ep->flags = EDGE_TRUE_VALUE;
-  ep->probability = profile_probability::guessed_always ().apply_scale (1, 4);
+  ep->probability = profile_probability::guessed_always () / 4;
   if (fourth_bb)
     {
       ep = make_edge (third_bb, fifth_bb, EDGE_FALSE_VALUE);
-      ep->probability
-       = profile_probability::guessed_always ().apply_scale (1, 2);
+      ep->probability = profile_probability::guessed_always () / 2;
       ep = find_edge (third_bb, fourth_bb);
       ep->flags = EDGE_TRUE_VALUE;
-      ep->probability
-       = profile_probability::guessed_always ().apply_scale (1, 2);
+      ep->probability = profile_probability::guessed_always () / 2;
       ep = find_edge (fourth_bb, fifth_bb);
       redirect_edge_and_branch (ep, sixth_bb);
     }
@@ -5502,12 +5498,10 @@ expand_omp_for_static_nochunk (struct omp_region 
*region,
   if (exit1_bb)
     {
       ep = make_edge (exit_bb, exit2_bb, EDGE_FALSE_VALUE);
-      ep->probability
-       = profile_probability::guessed_always ().apply_scale (1, 2);
+      ep->probability = profile_probability::guessed_always () / 2;
       ep = find_edge (exit_bb, exit1_bb);
       ep->flags = EDGE_TRUE_VALUE;
-      ep->probability
-       = profile_probability::guessed_always ().apply_scale (1, 2);
+      ep->probability = profile_probability::guessed_always () / 2;
       ep = find_edge (exit1_bb, exit2_bb);
       redirect_edge_and_branch (ep, exit3_bb);
     }
diff --git a/gcc/predict.cc b/gcc/predict.cc
index 5734e4c8516..62da149e2b6 100644
--- a/gcc/predict.cc
+++ b/gcc/predict.cc
@@ -172,7 +172,7 @@ maybe_hot_count_p (struct function *fun, profile_count 
count)
       if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
          && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3)))
        return false;
-      if (count.apply_scale (param_hot_bb_frequency_fraction, 1)
+      if (count * param_hot_bb_frequency_fraction
          < ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
        return false;
       return true;
@@ -219,7 +219,7 @@ probably_never_executed (struct function *fun, 
profile_count count)
   if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ)
     {
       const int unlikely_frac = param_unlikely_bb_count_fraction;
-      if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
+      if (count * unlikely_frac >= profile_info->runs)
        return false;
       return true;
     }
@@ -916,12 +916,12 @@ set_even_probabilities (basic_block bb,
            else
              {
                profile_probability remainder = prob.invert ();
-               remainder -= profile_probability::very_unlikely ()
-                 .apply_scale (unlikely_count, 1);
+               remainder -= (profile_probability::very_unlikely ()
+                             * unlikely_count);
                int count = nedges - unlikely_count - 1;
                gcc_assert (count >= 0);
 
-               e->probability = remainder.apply_scale (1, count);
+               e->probability = remainder / count;
              }
          }
        else
@@ -940,7 +940,7 @@ set_even_probabilities (basic_block bb,
            if (unlikely_edges != NULL && unlikely_edges->contains (e))
              e->probability = profile_probability::very_unlikely ();
            else
-             e->probability = all.apply_scale (1, scale);
+             e->probability = all / scale;
          }
        else
          e->probability = profile_probability::never ();
@@ -3619,7 +3619,7 @@ handle_missing_profiles (void)
 
       if (call_count > 0
           && fn && fn->cfg
-          && call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
+         && call_count * unlikely_frac >= profile_info->runs)
         {
           drop_profile (node, call_count);
           worklist.safe_push (node);
@@ -3684,8 +3684,7 @@ expensive_function_p (int threshold)
   if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ())
     return true;
 
-  profile_count limit = ENTRY_BLOCK_PTR_FOR_FN
-                          (cfun)->count.apply_scale (threshold, 1);
+  profile_count limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count * threshold;
   profile_count sum = profile_count::zero ();
   FOR_EACH_BB_FN (bb, cfun)
     {
diff --git a/gcc/profile-count.h b/gcc/profile-count.h
index 7c66f8dd8a7..be6e2d57cf7 100644
--- a/gcc/profile-count.h
+++ b/gcc/profile-count.h
@@ -185,7 +185,7 @@ public:
   static profile_probability very_unlikely ()
     {
       /* Be consistent with PROB_VERY_UNLIKELY in predict.h.  */
-      profile_probability r = guessed_always ().apply_scale (1, 2000);
+      profile_probability r = guessed_always () / 2000;
       r.m_val--;
       return r;
     }
@@ -193,14 +193,14 @@ public:
   static profile_probability unlikely ()
     {
       /* Be consistent with PROB_VERY_LIKELY in predict.h.  */
-      profile_probability r = guessed_always ().apply_scale (1, 5);
+      profile_probability r = guessed_always () / 5;
       r.m_val--;
       return r;
     }
 
   static profile_probability even ()
     {
-      return guessed_always ().apply_scale (1, 2);
+      return guessed_always () / 2;
     }
 
   static profile_probability very_likely ()
@@ -600,6 +600,26 @@ public:
       return initialized_p () && other.initialized_p () && m_val >= 
other.m_val;
     }
 
+  profile_probability operator* (int64_t num) const
+    {
+      return apply_scale (num, 1);
+    }
+
+  profile_probability operator*= (int64_t den) const
+    {
+      return *this * den;
+    }
+
+  profile_probability operator/ (int64_t den) const
+    {
+      return apply_scale (1, den);
+    }
+
+  profile_probability operator/= (int64_t den) const
+    {
+      return *this / den;
+    }
+
   /* Get the value of the count.  */
   uint32_t value () const { return m_val; }
 
@@ -992,6 +1012,26 @@ public:
       return ipa ().initialized_p () && ipa ().m_val >= (uint64_t) other;
     }
 
+  profile_count operator* (int64_t num) const
+    {
+      return apply_scale (num, 1);
+    }
+
+  profile_count operator*= (int64_t den) const
+    {
+      return *this * den;
+    }
+
+  profile_count operator/ (int64_t den) const
+    {
+      return apply_scale (1, den);
+    }
+
+  profile_count operator/= (int64_t den) const
+    {
+      return *this / den;
+    }
+
   /* Return true when value is not zero and can be used for scaling. 
      This is different from *this > 0 because that requires counter to
      be IPA.  */
diff --git a/gcc/profile.cc b/gcc/profile.cc
index a67cce5b199..08af512cbca 100644
--- a/gcc/profile.cc
+++ b/gcc/profile.cc
@@ -716,7 +716,7 @@ compute_branch_probabilities (unsigned cfg_checksum, 
unsigned lineno_checksum)
              FOR_EACH_EDGE (e, ei, bb->succs)
                if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
                  e->probability
-                   = profile_probability::guessed_always ().apply_scale (1, 
total);
+                   = profile_probability::guessed_always () / total;
                else
                  e->probability = profile_probability::never ();
            }
@@ -724,8 +724,7 @@ compute_branch_probabilities (unsigned cfg_checksum, 
unsigned lineno_checksum)
            {
              total += EDGE_COUNT (bb->succs);
              FOR_EACH_EDGE (e, ei, bb->succs)
-               e->probability
-                = profile_probability::guessed_always ().apply_scale (1, 
total);
+               e->probability = profile_probability::guessed_always () / total;
            }
          if (bb->index >= NUM_FIXED_BLOCKS
              && block_ends_with_condjump_p (bb)
diff --git a/gcc/stmt.cc b/gcc/stmt.cc
index ea78a505b0b..11cc70f0013 100644
--- a/gcc/stmt.cc
+++ b/gcc/stmt.cc
@@ -822,9 +822,8 @@ emit_case_dispatch_table (tree index_expr, tree index_type,
          through the indirect jump or the direct conditional jump
          before that. Split the probability of reaching the
          default label among these two jumps.  */
-      new_default_prob
-       = conditional_probability (default_prob.apply_scale (1, 2), base);
-      default_prob = default_prob.apply_scale (1, 2);
+      new_default_prob = conditional_probability (default_prob / 2, base);
+      default_prob /= 2;
       base -= default_prob;
     }
   else
diff --git a/gcc/symtab-thunks.cc b/gcc/symtab-thunks.cc
index 73f810dc217..a61515e3a3f 100644
--- a/gcc/symtab-thunks.cc
+++ b/gcc/symtab-thunks.cc
@@ -579,11 +579,11 @@ expand_thunk (cgraph_node *node, bool output_asm_thunks,
                     adjustment, because that's why we're emitting a
                     thunk.  */
                  then_bb = create_basic_block (NULL, bb);
-                 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
+                 then_bb->count = cfg_count - cfg_count / 16;
                  return_bb = create_basic_block (NULL, then_bb);
                  return_bb->count = cfg_count;
                  else_bb = create_basic_block (NULL, else_bb);
-                 else_bb->count = cfg_count.apply_scale (1, 16);
+                 else_bb->count = cfg_count / 16;
                  add_bb_to_loop (then_bb, bb->loop_father);
                  add_bb_to_loop (return_bb, bb->loop_father);
                  add_bb_to_loop (else_bb, bb->loop_father);
@@ -594,11 +594,9 @@ expand_thunk (cgraph_node *node, bool output_asm_thunks,
                                            NULL_TREE, NULL_TREE);
                  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
                  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
-                 e->probability = profile_probability::guessed_always ()
-                                       .apply_scale (1, 16);
+                 e->probability = profile_probability::guessed_always () / 16;
                  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
-                 e->probability = profile_probability::guessed_always ()
-                                       .apply_scale (1, 16);
+                 e->probability = profile_probability::guessed_always () / 16;
                  make_single_succ_edge (return_bb,
                                         EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
                  make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
diff --git a/gcc/tree-ssa-loop-manip.cc b/gcc/tree-ssa-loop-manip.cc
index 66964254cb5..5d435637e98 100644
--- a/gcc/tree-ssa-loop-manip.cc
+++ b/gcc/tree-ssa-loop-manip.cc
@@ -1421,8 +1421,8 @@ tree_transform_and_unroll_loop (class loop *loop, 
unsigned factor,
        }
 
       basic_block rest = new_exit->dest;
-      new_exit->probability = profile_probability::always ()
-       .apply_scale (1, new_est_niter + 1);
+      new_exit->probability
+       = (profile_probability::always () / (new_est_niter + 1));
 
       rest->count += new_exit->count ();
 
@@ -1463,8 +1463,7 @@ tree_transform_and_unroll_loop (class loop *loop, 
unsigned factor,
          && TREE_CODE (desc->niter) == INTEGER_CST)
        {
          /* The + 1 converts latch counts to iteration counts.  */
-         profile_count new_header_count
-           = (in_count.apply_scale (new_est_niter + 1, 1));
+         profile_count new_header_count = in_count * (new_est_niter + 1);
          basic_block *body = get_loop_body (loop);
          scale_bbs_frequencies_profile_count (body, loop->num_nodes,
                                               new_header_count,
@@ -1502,8 +1501,8 @@ tree_transform_and_unroll_loop (class loop *loop, 
unsigned factor,
            e->dest->count / e->src->count ~= new e->probability
 
         for every outgoing edge e of NEW_EXIT->src.  */
-      profile_probability new_exit_prob = profile_probability::always ()
-       .apply_scale (1, new_est_niter + 1);
+      profile_probability new_exit_prob
+       = profile_probability::always () / (new_est_niter + 1);
       change_edge_frequency (new_exit, new_exit_prob);
     }
 
diff --git a/gcc/tree-ssa-sink.cc b/gcc/tree-ssa-sink.cc
index 1c226406feb..27fae2858d2 100644
--- a/gcc/tree-ssa-sink.cc
+++ b/gcc/tree-ssa-sink.cc
@@ -230,8 +230,7 @@ select_best_block (basic_block early_bb,
   if (bb_loop_depth (best_bb) == bb_loop_depth (early_bb)
       /* If result of comparsion is unknown, prefer EARLY_BB.
         Thus use !(...>=..) rather than (...<...)  */
-      && !(best_bb->count.apply_scale (100, 1)
-          >= early_bb->count.apply_scale (threshold, 1)))
+      && !(best_bb->count * 100 >= early_bb->count * threshold))
     return best_bb;
 
   /* No better block found, so return EARLY_BB, which happens to be the
diff --git a/gcc/tree-switch-conversion.cc b/gcc/tree-switch-conversion.cc
index e14b4e6c94a..cef26a9878e 100644
--- a/gcc/tree-switch-conversion.cc
+++ b/gcc/tree-switch-conversion.cc
@@ -1782,7 +1782,7 @@ switch_decision_tree::analyze_switch_statement ()
       tree high = CASE_HIGH (elt);
 
       profile_probability p
-       = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
+       = case_edge->probability / ((intptr_t) (case_edge->aux));
       clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
                                               p));
       m_case_bbs.quick_push (case_edge->dest);
@@ -2061,7 +2061,7 @@ switch_decision_tree::balance_case_nodes (case_tree_node 
**head,
          /* Split this list if it is long enough for that to help.  */
          npp = head;
          left = *npp;
-         profile_probability pivot_prob = prob.apply_scale (1, 2);
+         profile_probability pivot_prob = prob / 2;
 
          /* Find the place in the list that bisects the list's total cost,
             where ranges count as 2.  */
@@ -2263,12 +2263,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, 
tree index,
              redirect_edge_succ (single_pred_edge (test_bb),
                                  single_succ_edge (bb)->dest);
 
-             p = ((node->m_right->m_c->m_subtree_prob
-                   + default_prob.apply_scale (1, 2))
+             p = ((node->m_right->m_c->m_subtree_prob + default_prob / 2)
                   / (node->m_c->m_subtree_prob + default_prob));
              bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
                                            GT_EXPR, test_bb, p, loc);
-             default_prob = default_prob.apply_scale (1, 2);
+             default_prob /= 2;
 
              /* Handle the left-hand subtree.  */
              bb = emit_case_nodes (bb, index, node->m_left,
@@ -2297,11 +2296,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, 
tree index,
          if (node->m_right->has_child ()
              || !node->m_right->m_c->is_single_value_p ())
            {
-             p = (default_prob.apply_scale (1, 2)
+             p = ((default_prob / 2)
                   / (node->m_c->m_subtree_prob + default_prob));
              bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
                                            LT_EXPR, m_default_bb, p, loc);
-             default_prob = default_prob.apply_scale (1, 2);
+             default_prob /= 2;
 
              bb = emit_case_nodes (bb, index, node->m_right, default_prob,
                                    index_type, loc);
@@ -2324,11 +2323,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, 
tree index,
          if (node->m_left->has_child ()
              || !node->m_left->m_c->is_single_value_p ())
            {
-             p = (default_prob.apply_scale (1, 2)
+             p = ((default_prob / 2)
                   / (node->m_c->m_subtree_prob + default_prob));
              bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
                                            GT_EXPR, m_default_bb, p, loc);
-                 default_prob = default_prob.apply_scale (1, 2);
+             default_prob /= 2;
 
              bb = emit_case_nodes (bb, index, node->m_left, default_prob,
                                    index_type, loc);
@@ -2361,29 +2360,29 @@ switch_decision_tree::emit_case_nodes (basic_block bb, 
tree index,
           profile_probability right_prob = profile_probability::never ();
           if (node->m_right)
             right_prob = node->m_right->m_c->m_subtree_prob;
-         p = ((right_prob + default_prob.apply_scale (1, 2))
-              / (node->m_c->m_subtree_prob + default_prob));
+          p = ((right_prob + default_prob / 2)
+               / (node->m_c->m_subtree_prob + default_prob));
 
-         bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
-                                       GT_EXPR, test_bb, p, loc);
-         default_prob = default_prob.apply_scale (1, 2);
+          bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
+                                        GT_EXPR, test_bb, p, loc);
+          default_prob /= 2;
 
-         /* Value belongs to this node or to the left-hand subtree.  */
-         p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
-         bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
-                                       GE_EXPR, node->m_c->m_case_bb, p, loc);
+          /* Value belongs to this node or to the left-hand subtree.  */
+          p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
+          bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
+                                        GE_EXPR, node->m_c->m_case_bb, p, loc);
 
-         /* Handle the left-hand subtree.  */
-         bb = emit_case_nodes (bb, index, node->m_left,
-                               default_prob, index_type, loc);
+          /* Handle the left-hand subtree.  */
+          bb = emit_case_nodes (bb, index, node->m_left, default_prob,
+                                index_type, loc);
 
-         /* If the left-hand subtree fell through,
-            don't let it fall into the right-hand subtree.  */
-         if (bb && m_default_bb)
-           emit_jump (bb, m_default_bb);
+          /* If the left-hand subtree fell through,
+             don't let it fall into the right-hand subtree.  */
+          if (bb && m_default_bb)
+            emit_jump (bb, m_default_bb);
 
-         bb = emit_case_nodes (test_bb, index, node->m_right,
-                               default_prob, index_type, loc);
+          bb = emit_case_nodes (test_bb, index, node->m_right, default_prob,
+                                index_type, loc);
        }
       else
        {
diff --git a/gcc/tree-vect-loop.cc b/gcc/tree-vect-loop.cc
index ab7dade1c74..304623c305a 100644
--- a/gcc/tree-vect-loop.cc
+++ b/gcc/tree-vect-loop.cc
@@ -9342,13 +9342,12 @@ scale_profile_for_vect_loop (class loop *loop, unsigned 
vf)
         in loop's preheader.  */
       if (!(freq_e == profile_count::zero ()))
         freq_e = freq_e.force_nonzero ();
-      p = freq_e.apply_scale (new_est_niter + 1, 1).probability_in (freq_h);
+      p = (freq_e * (new_est_niter + 1)).probability_in (freq_h);
       scale_loop_frequencies (loop, p);
     }
 
   edge exit_e = single_exit (loop);
-  exit_e->probability = profile_probability::always ()
-                                .apply_scale (1, new_est_niter + 1);
+  exit_e->probability = profile_probability::always () / (new_est_niter + 1);
 
   edge exit_l = single_pred_edge (loop->latch);
   profile_probability prob = exit_l->probability;
-- 
2.36.0

Reply via email to