+\f
+/* Check whether this is the last basic block of function. Commonly
+ there is one extra common cleanup block. */
+static bool
+last_basic_block_p (basic_block bb)
+{
+ if (bb == EXIT_BLOCK_PTR)
+ return false;
+
+ return (bb->next_bb == EXIT_BLOCK_PTR
+ || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
+ && bb->succ && !bb->succ->succ_next
+ && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
+}
+\f
+/* This is used to carry information about basic blocks. It is
+ attached to the AUX field of the standard CFG block. */
+
+typedef struct block_info_def
+{
+ /* Estimated frequency of execution of basic_block. */
+ sreal frequency;
+
+ /* To keep queue of basic blocks to process. */
+ basic_block next;
+
+ /* True if block needs to be visited in propagate_freq. */
+ unsigned int tovisit:1;
+
+ /* Number of predecessors we need to visit first. */
+ int npredecessors;
+} *block_info;
+
+/* Similar information for edges. */
+typedef struct edge_info_def
+{
+ /* In case edge is an loopback edge, the probability edge will be reached
+ in case header is. Estimated number of iterations of the loop can be
+ then computed as 1 / (1 - back_edge_prob). */
+ sreal back_edge_prob;
+ /* True if the edge is an loopback edge in the natural loop. */
+ unsigned int back_edge:1;
+} *edge_info;
+
+#define BLOCK_INFO(B) ((block_info) (B)->aux)
+#define EDGE_INFO(E) ((edge_info) (E)->aux)
+
+/* Helper function for estimate_bb_frequencies.
+ Propagate the frequencies for LOOP. */
+
+static void
+propagate_freq (struct loop *loop)
+{
+ basic_block head = loop->header;
+ basic_block bb;
+ basic_block last;
+ edge e;
+ basic_block nextbb;
+
+ /* For each basic block we need to visit count number of his predecessors
+ we need to visit first. */
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ {
+ if (BLOCK_INFO (bb)->tovisit)
+ {
+ int count = 0;
+
+ for (e = bb->pred; e; e = e->pred_next)
+ if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
+ count++;
+ else if (BLOCK_INFO (e->src)->tovisit
+ && dump_file && !EDGE_INFO (e)->back_edge)
+ fprintf (dump_file,
+ "Irreducible region hit, ignoring edge to %i->%i\n",
+ e->src->index, bb->index);
+ BLOCK_INFO (bb)->npredecessors = count;
+ }
+ }
+
+ memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
+ last = head;
+ for (bb = head; bb; bb = nextbb)
+ {
+ sreal cyclic_probability, frequency;
+
+ memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
+ memcpy (&frequency, &real_zero, sizeof (real_zero));
+
+ nextbb = BLOCK_INFO (bb)->next;
+ BLOCK_INFO (bb)->next = NULL;
+
+ /* Compute frequency of basic block. */
+ if (bb != head)
+ {
+#ifdef ENABLE_CHECKING
+ for (e = bb->pred; e; e = e->pred_next)
+ if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
+ abort ();
+#endif
+
+ for (e = bb->pred; e; e = e->pred_next)
+ if (EDGE_INFO (e)->back_edge)
+ {
+ sreal_add (&cyclic_probability, &cyclic_probability,
+ &EDGE_INFO (e)->back_edge_prob);
+ }
+ else if (!(e->flags & EDGE_DFS_BACK))
+ {
+ sreal tmp;
+
+ /* frequency += (e->probability
+ * BLOCK_INFO (e->src)->frequency /
+ REG_BR_PROB_BASE); */
+
+ sreal_init (&tmp, e->probability, 0);
+ sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
+ sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
+ sreal_add (&frequency, &frequency, &tmp);
+ }
+
+ if (sreal_compare (&cyclic_probability, &real_zero) == 0)
+ {
+ memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
+ sizeof (frequency));
+ }
+ else
+ {
+ if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
+ {
+ memcpy (&cyclic_probability, &real_almost_one,
+ sizeof (real_almost_one));
+ }
+
+ /* BLOCK_INFO (bb)->frequency = frequency
+ / (1 - cyclic_probability) */
+
+ sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
+ sreal_div (&BLOCK_INFO (bb)->frequency,
+ &frequency, &cyclic_probability);
+ }
+ }
+
+ BLOCK_INFO (bb)->tovisit = 0;
+
+ /* Compute back edge frequencies. */
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->dest == head)
+ {
+ sreal tmp;
+
+ /* EDGE_INFO (e)->back_edge_prob
+ = ((e->probability * BLOCK_INFO (bb)->frequency)
+ / REG_BR_PROB_BASE); */
+
+ sreal_init (&tmp, e->probability, 0);
+ sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
+ sreal_mul (&EDGE_INFO (e)->back_edge_prob,
+ &tmp, &real_inv_br_prob_base);
+ }
+
+ /* Propagate to successor blocks. */
+ for (e = bb->succ; e; e = e->succ_next)
+ if (!(e->flags & EDGE_DFS_BACK)
+ && BLOCK_INFO (e->dest)->npredecessors)
+ {
+ BLOCK_INFO (e->dest)->npredecessors--;
+ if (!BLOCK_INFO (e->dest)->npredecessors)
+ {
+ if (!nextbb)
+ nextbb = e->dest;
+ else
+ BLOCK_INFO (last)->next = e->dest;
+
+ last = e->dest;
+ }
+ }
+ }
+}
+
+/* Estimate probabilities of loopback edges in loops at same nest level. */
+
+static void
+estimate_loops_at_level (struct loop *first_loop)
+{
+ struct loop *loop;
+
+ for (loop = first_loop; loop; loop = loop->next)
+ {
+ edge e;
+ basic_block *bbs;
+ unsigned i;
+
+ estimate_loops_at_level (loop->inner);
+
+ if (loop->latch->succ) /* Do not do this for dummy function loop. */
+ {
+ /* Find current loop back edge and mark it. */
+ e = loop_latch_edge (loop);
+ EDGE_INFO (e)->back_edge = 1;
+ }
+
+ bbs = get_loop_body (loop);
+ for (i = 0; i < loop->num_nodes; i++)
+ BLOCK_INFO (bbs[i])->tovisit = 1;
+ free (bbs);
+ propagate_freq (loop);
+ }
+}
+
+/* Convert counts measured by profile driven feedback to frequencies.
+ Return nonzero iff there was any nonzero execution count. */
+
+static int
+counts_to_freqs (void)
+{
+ gcov_type count_max, true_count_max = 0;
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
+ true_count_max = MAX (bb->count, true_count_max);
+
+ count_max = MAX (true_count_max, 1);
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
+ return true_count_max;
+}
+
+/* Return true if function is likely to be expensive, so there is no point to
+ optimize performance of prologue, epilogue or do inlining at the expense
+ of code size growth. THRESHOLD is the limit of number of instructions
+ function can execute at average to be still considered not expensive. */
+
+bool
+expensive_function_p (int threshold)
+{
+ unsigned int sum = 0;
+ basic_block bb;
+ unsigned int limit;
+
+ /* We can not compute accurately for large thresholds due to scaled
+ frequencies. */
+ if (threshold > BB_FREQ_MAX)
+ abort ();
+
+ /* Frequencies are out of range. This either means that function contains
+ internal loop executing more than BB_FREQ_MAX times or profile feedback
+ is available and function has not been executed at all. */
+ if (ENTRY_BLOCK_PTR->frequency == 0)
+ return true;
+
+ /* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
+ limit = ENTRY_BLOCK_PTR->frequency * threshold;
+ FOR_EACH_BB (bb)
+ {
+ rtx insn;
+
+ for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
+ insn = NEXT_INSN (insn))
+ if (active_insn_p (insn))
+ {
+ sum += bb->frequency;
+ if (sum > limit)
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/* Estimate basic blocks frequency by given branch probabilities. */
+
+static void
+estimate_bb_frequencies (struct loops *loops)
+{
+ basic_block bb;
+ sreal freq_max;
+
+ if (!flag_branch_probabilities || !counts_to_freqs ())
+ {
+ static int real_values_initialized = 0;
+
+ if (!real_values_initialized)
+ {
+ real_values_initialized = 1;
+ sreal_init (&real_zero, 0, 0);
+ sreal_init (&real_one, 1, 0);
+ sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
+ sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
+ sreal_init (&real_one_half, 1, -1);
+ sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
+ sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
+ }
+
+ mark_dfs_back_edges ();
+
+ ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
+
+ /* Set up block info for each basic block. */
+ alloc_aux_for_blocks (sizeof (struct block_info_def));
+ alloc_aux_for_edges (sizeof (struct edge_info_def));
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ {
+ edge e;
+
+ BLOCK_INFO (bb)->tovisit = 0;
+ for (e = bb->succ; e; e = e->succ_next)
+ {
+ sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
+ sreal_mul (&EDGE_INFO (e)->back_edge_prob,
+ &EDGE_INFO (e)->back_edge_prob,
+ &real_inv_br_prob_base);
+ }
+ }
+
+ /* First compute probabilities locally for each loop from innermost
+ to outermost to examine probabilities for back edges. */
+ estimate_loops_at_level (loops->tree_root);
+
+ memcpy (&freq_max, &real_zero, sizeof (real_zero));
+ FOR_EACH_BB (bb)
+ if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
+ memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
+
+ sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ {
+ sreal tmp;
+
+ sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
+ sreal_add (&tmp, &tmp, &real_one_half);
+ bb->frequency = sreal_to_int (&tmp);
+ }
+
+ free_aux_for_blocks ();
+ free_aux_for_edges ();
+ }
+ compute_function_frequency ();
+ if (flag_reorder_functions)
+ choose_function_section ();
+}
+
+/* Decide whether function is hot, cold or unlikely executed. */
+static void
+compute_function_frequency (void)
+{
+ basic_block bb;
+
+ if (!profile_info || !flag_branch_probabilities)
+ return;
+ cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
+ FOR_EACH_BB (bb)
+ {
+ if (maybe_hot_bb_p (bb))
+ {
+ cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
+ return;
+ }
+ if (!probably_never_executed_bb_p (bb))
+ cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
+ }
+}
+
+/* Choose appropriate section for the function. */
+static void
+choose_function_section (void)
+{
+ if (DECL_SECTION_NAME (current_function_decl)
+ || !targetm.have_named_sections
+ /* Theoretically we can split the gnu.linkonce text section too,
+ but this requires more work as the frequency needs to match
+ for all generated objects so we need to merge the frequency
+ of all instances. For now just never set frequency for these. */
+ || DECL_ONE_ONLY (current_function_decl))
+ return;
+ if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
+ DECL_SECTION_NAME (current_function_decl) =
+ build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
+ if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
+ DECL_SECTION_NAME (current_function_decl) =
+ build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
+ UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
+}
+
+
+struct tree_opt_pass pass_profile =
+{
+ "profile", /* name */
+ NULL, /* gate */
+ tree_estimate_probability, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_BRANCH_PROB, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+};