OSDN Git Service

Fixed ChangeLog entries.
[pf3gnuchains/gcc-fork.git] / gcc / predict.c
index 6a887be..6353fb9 100644 (file)
@@ -1,5 +1,5 @@
 /* Branch prediction routines for the GNU compiler.
-   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
+   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
    Free Software Foundation, Inc.
 
 This file is part of GCC.
@@ -66,8 +66,10 @@ along with GCC; see the file COPYING3.  If not see
 static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
             real_inv_br_prob_base, real_one_half, real_bb_freq_max;
 
-/* Random guesstimation given names.  */
-#define PROB_VERY_UNLIKELY     (REG_BR_PROB_BASE / 100 - 1)
+/* Random guesstimation given names.  
+   PROV_VERY_UNLIKELY should be small enough so basic block predicted
+   by it gets bellow HOT_BB_FREQUENCY_FRANCTION.  */
+#define PROB_VERY_UNLIKELY     (REG_BR_PROB_BASE / 2000 - 1)
 #define PROB_EVEN              (REG_BR_PROB_BASE / 2)
 #define PROB_VERY_LIKELY       (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
 #define PROB_ALWAYS            (REG_BR_PROB_BASE)
@@ -108,7 +110,8 @@ static const struct predictor_info predictor_info[]= {
 #undef DEF_PREDICTOR
 
 /* Return TRUE if frequency FREQ is considered to be hot.  */
-static bool
+
+static inline bool
 maybe_hot_frequency_p (int freq)
 {
   if (!profile_info || !flag_branch_probabilities)
@@ -118,22 +121,57 @@ maybe_hot_frequency_p (int freq)
       if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
         return true;
     }
+  if (profile_status == PROFILE_ABSENT)
+    return true;
   if (freq < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
     return false;
   return true;
 }
 
+/* Return TRUE if frequency FREQ is considered to be hot.  */
+
+static inline bool
+maybe_hot_count_p (gcov_type count)
+{
+  if (profile_status != PROFILE_READ)
+    return true;
+  /* Code executed at most once is not hot.  */
+  if (profile_info->runs >= count)
+    return false;
+  return (count
+         > profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION));
+}
+
 /* Return true in case BB can be CPU intensive and should be optimized
    for maximal performance.  */
 
 bool
 maybe_hot_bb_p (const_basic_block bb)
 {
+  if (profile_status == PROFILE_READ)
+    return maybe_hot_count_p (bb->count);
+  return maybe_hot_frequency_p (bb->frequency);
+}
+
+/* Return true if the call can be hot.  */
+
+bool
+cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
+{
   if (profile_info && flag_branch_probabilities
-      && (bb->count
-         < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
+      && (edge->count
+         <= profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
     return false;
-  return maybe_hot_frequency_p (bb->frequency);
+  if (lookup_attribute ("cold", DECL_ATTRIBUTES (edge->callee->decl))
+      || lookup_attribute ("cold", DECL_ATTRIBUTES (edge->caller->decl)))
+    return false;
+  if (lookup_attribute ("hot", DECL_ATTRIBUTES (edge->caller->decl)))
+    return true;
+  if (flag_guess_branch_prob
+      && edge->frequency <= (CGRAPH_FREQ_BASE
+                            / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
+    return false;
+  return true;
 }
 
 /* Return true in case BB can be CPU intensive and should be optimized
@@ -142,42 +180,181 @@ maybe_hot_bb_p (const_basic_block bb)
 bool
 maybe_hot_edge_p (edge e)
 {
-  if (profile_info && flag_branch_probabilities
-      && (e->count
-         < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
-    return false;
+  if (profile_status == PROFILE_READ)
+    return maybe_hot_count_p (e->count);
   return maybe_hot_frequency_p (EDGE_FREQUENCY (e));
 }
 
-/* Return true in case BB is cold and should be optimized for size.  */
-
+/* Return true in case BB is probably never executed.  */
 bool
-probably_cold_bb_p (const_basic_block bb)
+probably_never_executed_bb_p (const_basic_block bb)
 {
-  if (profile_info && flag_branch_probabilities
-      && (bb->count
-         < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
-    return true;
+  if (profile_info && flag_branch_probabilities)
+    return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
   if ((!profile_info || !flag_branch_probabilities)
       && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
     return true;
-  if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
+  return false;
+}
+
+/* Return true when current function should always be optimized for size.  */
+
+bool
+optimize_function_for_size_p (struct function *fun)
+{
+  return (optimize_size
+         || (fun && (fun->function_frequency
+                     == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)));
+}
+
+/* Return true when current function should always be optimized for speed.  */
+
+bool
+optimize_function_for_speed_p (struct function *fun)
+{
+  return !optimize_function_for_size_p (fun);
+}
+
+/* Return TRUE when BB should be optimized for size.  */
+
+bool
+optimize_bb_for_size_p (const_basic_block bb)
+{
+  return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (bb);
+}
+
+/* Return TRUE when BB should be optimized for speed.  */
+
+bool
+optimize_bb_for_speed_p (const_basic_block bb)
+{
+  return !optimize_bb_for_size_p (bb);
+}
+
+/* Return TRUE when BB should be optimized for size.  */
+
+bool
+optimize_edge_for_size_p (edge e)
+{
+  return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
+}
+
+/* Return TRUE when BB should be optimized for speed.  */
+
+bool
+optimize_edge_for_speed_p (edge e)
+{
+  return !optimize_edge_for_size_p (e);
+}
+
+/* Return TRUE when BB should be optimized for size.  */
+
+bool
+optimize_insn_for_size_p (void)
+{
+  return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
+}
+
+/* Return TRUE when BB should be optimized for speed.  */
+
+bool
+optimize_insn_for_speed_p (void)
+{
+  return !optimize_insn_for_size_p ();
+}
+
+/* Return TRUE when LOOP should be optimized for size.  */
+
+bool
+optimize_loop_for_size_p (struct loop *loop)
+{
+  return optimize_bb_for_size_p (loop->header);
+}
+
+/* Return TRUE when LOOP should be optimized for speed.  */
+
+bool
+optimize_loop_for_speed_p (struct loop *loop)
+{
+  return optimize_bb_for_speed_p (loop->header);
+}
+
+/* Return TRUE when LOOP nest should be optimized for speed.  */
+
+bool
+optimize_loop_nest_for_speed_p (struct loop *loop)
+{
+  struct loop *l = loop;
+  if (optimize_loop_for_speed_p (loop))
     return true;
+  l = loop->inner;
+  while (l && l != loop)
+    {
+      if (optimize_loop_for_speed_p (l))
+        return true;
+      if (l->inner)
+        l = l->inner;
+      else if (l->next)
+        l = l->next;
+      else
+        {
+         while (l != loop && !l->next)
+           l = loop_outer (l);
+         if (l != loop)
+           l = l->next;
+       }
+    }
   return false;
 }
 
-/* Return true in case BB is probably never executed.  */
+/* Return TRUE when LOOP nest should be optimized for size.  */
+
 bool
-probably_never_executed_bb_p (const_basic_block bb)
+optimize_loop_nest_for_size_p (struct loop *loop)
 {
-  if (profile_info && flag_branch_probabilities)
-    return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
-  if ((!profile_info || !flag_branch_probabilities)
-      && cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
+  return !optimize_loop_nest_for_speed_p (loop);
+}
+
+/* Return true when edge E is likely to be well predictable by branch
+   predictor.  */
+
+bool
+predictable_edge_p (edge e)
+{
+  if (profile_status == PROFILE_ABSENT)
+    return false;
+  if ((e->probability
+       <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
+      || (REG_BR_PROB_BASE - e->probability
+          <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
     return true;
   return false;
 }
 
+
+/* Set RTL expansion for BB profile.  */
+
+void
+rtl_profile_for_bb (basic_block bb)
+{
+  crtl->maybe_hot_insn_p = maybe_hot_bb_p (bb);
+}
+
+/* Set RTL expansion for edge profile.  */
+
+void
+rtl_profile_for_edge (edge e)
+{
+  crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
+}
+
+/* Set RTL expansion to default mode (i.e. when profile info is not known).  */
+void
+default_rtl_profile (void)
+{
+  crtl->maybe_hot_insn_p = true;
+}
+
 /* Return true if the one of outgoing edges is already predicted by
    PREDICTOR.  */
 
@@ -477,7 +654,7 @@ combine_predictions_for_insn (rtx insn, basic_block bb)
   rtx *pnote;
   rtx note;
   int best_probability = PROB_EVEN;
-  int best_predictor = END_PREDICTORS;
+  enum br_predictor best_predictor = END_PREDICTORS;
   int combined_probability = REG_BR_PROB_BASE / 2;
   int d;
   bool first_match = false;
@@ -500,7 +677,8 @@ combine_predictions_for_insn (rtx insn, basic_block bb)
   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
     if (REG_NOTE_KIND (note) == REG_BR_PRED)
       {
-       int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
+       enum br_predictor predictor = ((enum br_predictor)
+                                      INTVAL (XEXP (XEXP (note, 0), 0)));
        int probability = INTVAL (XEXP (XEXP (note, 0), 1));
 
        found = true;
@@ -546,7 +724,8 @@ combine_predictions_for_insn (rtx insn, basic_block bb)
     {
       if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
        {
-         int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
+         enum br_predictor predictor = ((enum br_predictor)
+                                        INTVAL (XEXP (XEXP (*pnote, 0), 0)));
          int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
 
          dump_prediction (dump_file, predictor, probability, bb,
@@ -588,7 +767,7 @@ static void
 combine_predictions_for_bb (basic_block bb)
 {
   int best_probability = PROB_EVEN;
-  int best_predictor = END_PREDICTORS;
+  enum br_predictor best_predictor = END_PREDICTORS;
   int combined_probability = REG_BR_PROB_BASE / 2;
   int d;
   bool first_match = false;
@@ -636,15 +815,40 @@ combine_predictions_for_bb (basic_block bb)
         by predictor with smallest index.  */
       for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
        {
-         int predictor = pred->ep_predictor;
+         enum br_predictor predictor = pred->ep_predictor;
          int probability = pred->ep_probability;
 
          if (pred->ep_edge != first)
            probability = REG_BR_PROB_BASE - probability;
 
          found = true;
+         /* First match heuristics would be widly confused if we predicted
+            both directions.  */
          if (best_predictor > predictor)
-           best_probability = probability, best_predictor = predictor;
+           {
+              struct edge_prediction *pred2;
+             int prob = probability;
+
+              for (pred2 = (struct edge_prediction *) *preds; pred2; pred2 = pred2->ep_next)
+              if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
+                {
+                  int probability2 = pred->ep_probability;
+
+                  if (pred2->ep_edge != first)
+                    probability2 = REG_BR_PROB_BASE - probability2;
+
+                  if ((probability < REG_BR_PROB_BASE / 2) != 
+                      (probability2 < REG_BR_PROB_BASE / 2))
+                    break;
+
+                  /* If the same predictor later gave better result, go for it! */
+                  if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
+                      || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
+                    prob = probability2;
+                }
+             if (!pred2)
+               best_probability = prob, best_predictor = predictor;
+           }
 
          d = (combined_probability * probability
               + (REG_BR_PROB_BASE - combined_probability)
@@ -686,7 +890,7 @@ combine_predictions_for_bb (basic_block bb)
     {
       for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
        {
-         int predictor = pred->ep_predictor;
+         enum br_predictor predictor = pred->ep_predictor;
          int probability = pred->ep_probability;
 
          if (pred->ep_edge != EDGE_SUCC (bb, 0))
@@ -712,8 +916,6 @@ predict_loops (void)
   loop_iterator li;
   struct loop *loop;
 
-  scev_initialize ();
-
   /* Try to predict out blocks in a loop that are not part of a
      natural loop.  */
   FOR_EACH_LOOP (li, loop, 0)
@@ -836,8 +1038,6 @@ predict_loops (void)
       /* Free basic blocks from get_loop_body.  */
       free (bbs);
     }
-
-  scev_finalize ();
 }
 
 /* Attempt to predict probabilities of BB outgoing edges using local
@@ -1086,9 +1286,10 @@ expr_expected_value (tree expr, bitmap visited)
 }
 
 \f
-/* Get rid of all builtin_expect calls we no longer need.  */
-static void
-strip_builtin_expect (void)
+/* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
+   we no longer need.  */
+static unsigned int
+strip_predict_hints (void)
 {
   basic_block bb;
   gimple ass_stmt;
@@ -1097,28 +1298,34 @@ strip_builtin_expect (void)
   FOR_EACH_BB (bb)
     {
       gimple_stmt_iterator bi;
-      for (bi = gsi_start_bb (bb); !gsi_end_p (bi); gsi_next (&bi))
+      for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
        {
          gimple stmt = gsi_stmt (bi);
-         tree fndecl;
-
-         if (gimple_code (stmt) != GIMPLE_CALL)
-           continue;
 
-         fndecl = gimple_call_fndecl (stmt);
-
-         if (fndecl
-             && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
-             && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
-             && gimple_call_num_args (stmt) == 2)
+         if (gimple_code (stmt) == GIMPLE_PREDICT)
+           {
+             gsi_remove (&bi, true);
+             continue;
+           }
+         else if (gimple_code (stmt) == GIMPLE_CALL)
            {
-             var = gimple_call_lhs (stmt);
-             ass_stmt = gimple_build_assign (var, gimple_call_arg (stmt, 0));
+             tree fndecl = gimple_call_fndecl (stmt);
 
-             gsi_replace (&bi, ass_stmt, true);
+             if (fndecl
+                 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+                 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
+                 && gimple_call_num_args (stmt) == 2)
+               {
+                 var = gimple_call_lhs (stmt);
+                 ass_stmt = gimple_build_assign (var, gimple_call_arg (stmt, 0));
+
+                 gsi_replace (&bi, ass_stmt, true);
+               }
            }
+         gsi_next (&bi);
        }
     }
+  return 0;
 }
 \f
 /* Predict using opcode of the last statement in basic block.  */
@@ -1337,6 +1544,16 @@ static void
 tree_bb_level_predictions (void)
 {
   basic_block bb;
+  bool has_return_edges = false;
+  edge e;
+  edge_iterator ei;
+
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+    if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
+      {
+        has_return_edges = true;
+       break;
+      }
 
   apply_return_prediction ();
 
@@ -1344,14 +1561,15 @@ tree_bb_level_predictions (void)
     {
       gimple_stmt_iterator gsi;
 
-      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
          gimple stmt = gsi_stmt (gsi);
          tree decl;
 
          if (is_gimple_call (stmt))
            {
-             if (gimple_call_flags (stmt) & ECF_NORETURN)
+             if ((gimple_call_flags (stmt) & ECF_NORETURN)
+                 && has_return_edges)
                predict_paths_leading_to (bb, PRED_NORETURN,
                                          NOT_TAKEN);
              decl = gimple_call_fndecl (stmt);
@@ -1365,11 +1583,9 @@ tree_bb_level_predictions (void)
            {
              predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
                                        gimple_predict_outcome (stmt));
-             gsi_remove (&gsi, true);
-             continue;
+             /* Keep GIMPLE_PREDICT around so early inlining will propagate
+                hints to callers.  */
            }
-
-         gsi_next (&gsi);
        }
     }
 }
@@ -1388,16 +1604,96 @@ assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
 }
 #endif
 
-/* Predict branch probabilities and estimate profile of the tree CFG.  */
-static unsigned int
+/* Predict branch probabilities and estimate profile for basic block BB.  */
+
+static void
+tree_estimate_probability_bb (basic_block bb)
+{
+  edge e;
+  edge_iterator ei;
+  gimple last;
+
+  FOR_EACH_EDGE (e, ei, bb->succs)
+    {
+      /* Predict early returns to be probable, as we've already taken
+        care for error returns and other cases are often used for
+        fast paths through function.
+
+        Since we've already removed the return statements, we are
+        looking for CFG like:
+
+        if (conditional)
+        {
+        ..
+        goto return_block
+        }
+        some other blocks
+        return_block:
+        return_stmt.  */
+      if (e->dest != bb->next_bb
+         && e->dest != EXIT_BLOCK_PTR
+         && single_succ_p (e->dest)
+         && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
+         && (last = last_stmt (e->dest)) != NULL
+         && gimple_code (last) == GIMPLE_RETURN)
+       {
+         edge e1;
+         edge_iterator ei1;
+
+         if (single_succ_p (bb))
+           {
+             FOR_EACH_EDGE (e1, ei1, bb->preds)
+               if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
+                   && !predicted_by_p (e1->src, PRED_CONST_RETURN)
+                   && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
+                 predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
+           }
+         else
+           if (!predicted_by_p (e->src, PRED_NULL_RETURN)
+               && !predicted_by_p (e->src, PRED_CONST_RETURN)
+               && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
+             predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
+       }
+
+      /* Look for block we are guarding (ie we dominate it,
+        but it doesn't postdominate us).  */
+      if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
+         && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
+         && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
+       {
+         gimple_stmt_iterator bi;
+
+         /* The call heuristic claims that a guarded function call
+            is improbable.  This is because such calls are often used
+            to signal exceptional situations such as printing error
+            messages.  */
+         for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
+              gsi_next (&bi))
+           {
+             gimple stmt = gsi_stmt (bi);
+             if (is_gimple_call (stmt)
+                 /* Constant and pure calls are hardly used to signalize
+                    something exceptional.  */
+                 && gimple_has_side_effects (stmt))
+               {
+                 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
+                 break;
+               }
+           }
+       }
+    }
+  tree_predict_by_opcode (bb);
+}
+
+/* Predict branch probabilities and estimate profile of the tree CFG.
+   This function can be called from the loop optimizers to recompute
+   the profile information.  */
+
+void
 tree_estimate_probability (void)
 {
   basic_block bb;
 
-  loop_optimizer_init (0);
-  if (dump_file && (dump_flags & TDF_DETAILS))
-    flow_loops_dump (dump_file, NULL, 0);
-
   add_noreturn_fake_exit_edges ();
   connect_infinite_loops_to_exit ();
   /* We use loop_niter_by_eval, which requires that the loops have
@@ -1407,87 +1703,14 @@ tree_estimate_probability (void)
 
   bb_predictions = pointer_map_create ();
   tree_bb_level_predictions ();
-
-  mark_irreducible_loops ();
   record_loop_exits ();
+
   if (number_of_loops () > 1)
     predict_loops ();
 
   FOR_EACH_BB (bb)
-    {
-      edge e;
-      edge_iterator ei;
-
-      FOR_EACH_EDGE (e, ei, bb->succs)
-       {
-         /* Predict early returns to be probable, as we've already taken
-            care for error returns and other cases are often used for
-            fast paths through function. 
-
-            Since we've already removed the return statements, we are
-            looking for CFG like:
+    tree_estimate_probability_bb (bb);
 
-              if (conditional)
-                {
-                  ..
-                  goto return_block
-                }
-              some other blocks
-            return_block:
-              return_stmt.  */
-         if (e->dest != bb->next_bb
-             && e->dest != EXIT_BLOCK_PTR
-             && single_succ_p (e->dest)
-             && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
-             && gimple_code (last_stmt (e->dest)) == GIMPLE_RETURN)
-           {
-             edge e1;
-             edge_iterator ei1;
-
-             if (single_succ_p (bb))
-               {
-                 FOR_EACH_EDGE (e1, ei1, bb->preds)
-                   if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
-                       && !predicted_by_p (e1->src, PRED_CONST_RETURN)
-                       && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
-                     predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
-               }
-              else
-               if (!predicted_by_p (e->src, PRED_NULL_RETURN)
-                   && !predicted_by_p (e->src, PRED_CONST_RETURN)
-                   && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
-                 predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
-           }
-
-         /* Look for block we are guarding (ie we dominate it,
-            but it doesn't postdominate us).  */
-         if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
-             && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
-             && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
-           {
-             gimple_stmt_iterator bi;
-
-             /* The call heuristic claims that a guarded function call
-                is improbable.  This is because such calls are often used
-                to signal exceptional situations such as printing error
-                messages.  */
-             for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
-                  gsi_next (&bi))
-               {
-                 gimple stmt = gsi_stmt (bi);
-                 if (is_gimple_call (stmt)
-                     /* Constant and pure calls are hardly used to signalize
-                        something exceptional.  */
-                     && gimple_has_side_effects (stmt))
-                   {
-                     predict_edge_def (e, PRED_CALL, NOT_TAKEN);
-                     break;
-                   }
-               }
-           }
-       }
-      tree_predict_by_opcode (bb);
-    }
   FOR_EACH_BB (bb)
     combine_predictions_for_bb (bb);
 
@@ -1497,10 +1720,34 @@ tree_estimate_probability (void)
   pointer_map_destroy (bb_predictions);
   bb_predictions = NULL;
 
-  strip_builtin_expect ();
   estimate_bb_frequencies ();
   free_dominance_info (CDI_POST_DOMINATORS);
   remove_fake_exit_edges ();
+}
+
+/* Predict branch probabilities and estimate profile of the tree CFG.
+   This is the driver function for PASS_PROFILE.  */
+
+static unsigned int
+tree_estimate_probability_driver (void)
+{
+  unsigned nb_loops;
+
+  loop_optimizer_init (0);
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    flow_loops_dump (dump_file, NULL, 0);
+
+  mark_irreducible_loops ();
+
+  nb_loops = number_of_loops ();
+  if (nb_loops > 1)
+    scev_initialize ();
+
+  tree_estimate_probability ();
+
+  if (nb_loops > 1)
+    scev_finalize ();
+
   loop_optimizer_finalize ();
   if (dump_file && (dump_flags & TDF_DETAILS))
     gimple_dump_cfg (dump_file, dump_flags);
@@ -1833,7 +2080,7 @@ estimate_bb_frequencies (void)
   basic_block bb;
   sreal freq_max;
 
-  if (!flag_branch_probabilities || !counts_to_freqs ())
+  if (profile_status != PROFILE_READ || !counts_to_freqs ())
     {
       static int real_values_initialized = 0;
 
@@ -1966,7 +2213,7 @@ build_predict_expr (enum br_predictor predictor, enum prediction taken)
 {
   tree t = build1 (PREDICT_EXPR, void_type_node,
                   build_int_cst (NULL, predictor));
-  PREDICT_EXPR_OUTCOME (t) = taken;
+  SET_PREDICT_EXPR_OUTCOME (t, taken);
   return t;
 }
 
@@ -1982,7 +2229,26 @@ struct gimple_opt_pass pass_profile =
   GIMPLE_PASS,
   "profile",                           /* name */
   gate_estimate_probability,           /* gate */
-  tree_estimate_probability,           /* execute */
+  tree_estimate_probability_driver,    /* execute */
+  NULL,                                        /* sub */
+  NULL,                                        /* next */
+  0,                                   /* static_pass_number */
+  TV_BRANCH_PROB,                      /* tv_id */
+  PROP_cfg,                            /* properties_required */
+  0,                                   /* properties_provided */
+  0,                                   /* properties_destroyed */
+  0,                                   /* todo_flags_start */
+  TODO_ggc_collect | TODO_verify_ssa                   /* todo_flags_finish */
+ }
+};
+
+struct gimple_opt_pass pass_strip_predict_hints = 
+{
+ {
+  GIMPLE_PASS,
+  NULL,                                        /* name */
+  NULL,                                        /* gate */
+  strip_predict_hints,                 /* execute */
   NULL,                                        /* sub */
   NULL,                                        /* next */
   0,                                   /* static_pass_number */