/* Basic block reordering routines for the GNU compiler.
- Copyright (C) 2000 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2002, 2003 Free Software Foundation, Inc.
- This file is part of GNU CC.
+ This file is part of GCC.
- GNU CC is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
+ GCC is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
- GNU CC is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
+ GCC is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
You should have received a copy of the GNU General Public License
- along with GNU CC; see the file COPYING. If not, write to
- the Free Software Foundation, 59 Temple Place - Suite 330,
- Boston, MA 02111-1307, USA. */
-
-/* References:
-
- "Profile Guided Code Positioning"
- Pettis and Hanson; PLDI '90.
-
- TODO:
-
- (1) Consider:
-
- if (p) goto A; // predict taken
- foo ();
- A:
- if (q) goto B; // predict taken
- bar ();
- B:
- baz ();
- return;
-
- We'll currently reorder this as
-
- if (!p) goto C;
- A:
- if (!q) goto D;
- B:
- baz ();
- return;
- D:
- bar ();
- goto B;
- C:
- foo ();
- goto A;
-
- A better ordering is
-
- if (!p) goto C;
- if (!q) goto D;
- B:
- baz ();
- return;
- C:
- foo ();
- if (q) goto B;
- D:
- bar ();
- goto B;
-
- This requires that we be able to duplicate the jump at A, and
- adjust the graph traversal such that greedy placement doesn't
- fix D before C is considered.
-
- (2) Coordinate with shorten_branches to minimize the number of
- long branches.
-
- (3) Invent a method by which sufficiently non-predicted code can
- be moved to either the end of the section or another section
- entirely. Some sort of NOTE_INSN note would work fine.
-
- This completely scroggs all debugging formats, so the user
- would have to explicitly ask for it.
+ along with GCC; see the file COPYING. If not, write to the Free
+ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+ 02111-1307, USA. */
+
+/* This (greedy) algorithm constructs traces in several rounds.
+ The construction starts from "seeds". The seed for the first round
+ is the entry point of function. When there are more than one seed
+ that one is selected first that has the lowest key in the heap
+ (see function bb_to_key). Then the algorithm repeatedly adds the most
+ probable successor to the end of a trace. Finally it connects the traces.
+
+ There are two parameters: Branch Threshold and Exec Threshold.
+ If the edge to a successor of the actual basic block is lower than
+ Branch Threshold or the frequency of the successor is lower than
+ Exec Threshold the successor will be the seed in one of the next rounds.
+ Each round has these parameters lower than the previous one.
+ The last round has to have these parameters set to zero
+ so that the remaining blocks are picked up.
+
+ The algorithm selects the most probable successor from all unvisited
+ successors and successors that have been added to this trace.
+ The other successors (that has not been "sent" to the next round) will be
+ other seeds for this round and the secondary traces will start in them.
+ If the successor has not been visited in this trace it is added to the trace
+ (however, there is some heuristic for simple branches).
+ If the successor has been visited in this trace the loop has been found.
+ If the loop has many iterations the loop is rotated so that the
+ source block of the most probable edge going out from the loop
+ is the last block of the trace.
+ If the loop has few iterations and there is no edge from the last block of
+ the loop going out from loop the loop header is duplicated.
+ Finally, the construction of the trace is terminated.
+
+ When connecting traces it first checks whether there is an edge from the
+ last block of one trace to the first block of another trace.
+ When there are still some unconnected traces it checks whether there exists
+ a basic block BB such that BB is a successor of the last bb of one trace
+ and BB is a predecessor of the first block of another trace. In this case,
+ BB is duplicated and the traces are connected through this duplicate.
+ The rest of traces are simply connected so there will be a jump to the
+ beginning of the rest of trace.
+
+
+ References:
+
+ "Software Trace Cache"
+ A. Ramirez, J. Larriba-Pey, C. Navarro, J. Torrellas and M. Valero; 1999
+ http://citeseer.nj.nec.com/15361.html
+
*/
#include "config.h"
#include "system.h"
-#include "tree.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
-#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
-#include "insn-config.h"
-#include "regs.h"
#include "flags.h"
#include "output.h"
-#include "function.h"
-#include "except.h"
-#include "toplev.h"
-#include "recog.h"
-#include "insn-flags.h"
-#include "expr.h"
-#include "obstack.h"
-
+#include "cfglayout.h"
+#include "fibheap.h"
+#include "target.h"
-#ifndef HAVE_epilogue
-#define HAVE_epilogue 0
-#endif
+/* The number of rounds. */
+#define N_ROUNDS 4
+/* Branch thresholds in thousandths (per mille) of the REG_BR_PROB_BASE. */
+static int branch_threshold[N_ROUNDS] = {400, 200, 100, 0};
-/* The contents of the current function definition are allocated
- in this obstack, and all are freed at the end of the function.
- For top-level functions, this is temporary_obstack.
- Separate obstacks are made for nested functions. */
+/* Exec thresholds in thousandths (per mille) of the frequency of bb 0. */
+static int exec_threshold[N_ROUNDS] = {500, 200, 50, 0};
-extern struct obstack flow_obstack;
+/* If edge frequency is lower than DUPLICATION_THRESHOLD per mille of entry
+ block the edge destination is not duplicated while connecting traces. */
+#define DUPLICATION_THRESHOLD 100
+/* Length of unconditional jump instruction. */
+static int uncond_jump_length;
-/* Structure to hold information about lexical scopes. */
-typedef struct scope_def
+/* Structure to hold needed information for each basic block. */
+typedef struct bbro_basic_block_data_def
{
- int level;
-
- /* The NOTE_INSN_BLOCK_BEG that started this scope. */
- rtx note_beg;
-
- /* The NOTE_INSN_BLOCK_END that ended this scope. */
- rtx note_end;
+ /* Which trace is the bb start of (-1 means it is not a start of a trace). */
+ int start_of_trace;
- /* The bb containing note_beg (if any). */
- basic_block bb_beg;
+ /* Which trace is the bb end of (-1 means it is not an end of a trace). */
+ int end_of_trace;
- /* The bb containing note_end (if any). */
- basic_block bb_end;
+ /* Which heap is BB in (if any)? */
+ fibheap_t heap;
- /* List of basic blocks contained within this scope. */
- basic_block *bbs;
+ /* Which heap node is BB in (if any)? */
+ fibnode_t node;
+} bbro_basic_block_data;
- /* Number of blocks contained within this scope. */
- int num_bbs;
+/* The current size of the following dynamic array. */
+static int array_size;
- /* The outer scope or NULL if outermost scope. */
- struct scope_def *outer;
+/* The array which holds needed information for basic blocks. */
+static bbro_basic_block_data *bbd;
- /* The first inner scope or NULL if innermost scope. */
- struct scope_def *inner;
+/* To avoid frequent reallocation the size of arrays is greater than needed,
+ the number of elements is (not less than) 1.25 * size_wanted. */
+#define GET_ARRAY_SIZE(X) ((((X) / 4) + 1) * 5)
- /* The last inner scope or NULL if innermost scope. */
- struct scope_def *inner_last;
+/* Free the memory and set the pointer to NULL. */
+#define FREE(P) \
+ do { if (P) { free (P); P = 0; } else { abort (); } } while (0)
- /* Link to the next (sibling) scope. */
- struct scope_def *next;
-} *scope;
-
-
-/* Structure to hold information about the scope forest. */
-typedef struct
+/* Structure for holding information about a trace. */
+struct trace
{
- /* Number of trees in forest. */
- int num_trees;
+ /* First and last basic block of the trace. */
+ basic_block first, last;
- /* List of tree roots. */
- scope *trees;
-} scope_forest_info;
+ /* The round of the STC creation which this trace was found in. */
+ int round;
-/* Structure to hold information about the blocks during reordering. */
-typedef struct reorder_block_def
-{
- rtx eff_head;
- rtx eff_end;
- scope scope;
- basic_block next;
- int index;
- int visited;
-} *reorder_block_def;
-
-#define RBI(BB) ((reorder_block_def) (BB)->aux)
+ /* The length (i.e. the number of basic blocks) of the trace. */
+ int length;
+};
+/* Maximum frequency and count of one of the entry blocks. */
+int max_entry_frequency;
+gcov_type max_entry_count;
/* Local function prototypes. */
-static rtx skip_insns_after_block PARAMS ((basic_block));
-static void record_effective_endpoints PARAMS ((void));
-static void make_reorder_chain PARAMS ((void));
-static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
-static rtx label_for_bb PARAMS ((basic_block));
-static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
-static void fixup_reorder_chain PARAMS ((void));
-static void relate_bbs_with_scopes PARAMS ((scope));
-static scope make_new_scope PARAMS ((int, rtx));
-static void build_scope_forest PARAMS ((scope_forest_info *));
-static void remove_scope_notes PARAMS ((void));
-static void insert_intra_1 PARAMS ((scope, rtx *));
-static void insert_intra_bb_scope_notes PARAMS ((basic_block));
-static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
-static void rebuild_scope_notes PARAMS ((scope_forest_info *));
-static void free_scope_forest_1 PARAMS ((scope));
-static void free_scope_forest PARAMS ((scope_forest_info *));
-void dump_scope_forest PARAMS ((scope_forest_info *));
-static void dump_scope_forest_1 PARAMS ((scope, int));
-static rtx get_next_bb_note PARAMS ((rtx));
-static rtx get_prev_bb_note PARAMS ((rtx));
-
-void verify_insn_chain PARAMS ((void));
+static void find_traces PARAMS ((int *, struct trace *));
+static basic_block rotate_loop PARAMS ((edge, struct trace *, int));
+static void mark_bb_visited PARAMS ((basic_block, int));
+static void find_traces_1_round PARAMS ((int, int, gcov_type,
+ struct trace *, int *, int,
+ fibheap_t *));
+static basic_block copy_bb PARAMS ((basic_block, edge,
+ basic_block, int));
+static fibheapkey_t bb_to_key PARAMS ((basic_block));
+static bool better_edge_p PARAMS ((basic_block, edge, int, int,
+ int, int));
+static void connect_traces PARAMS ((int, struct trace *));
+static bool copy_bb_p PARAMS ((basic_block, int));
+static int get_uncond_jump_length PARAMS ((void));
\f
-/* Skip over inter-block insns occurring after BB which are typically
- associated with BB (e.g., barriers). If there are any such insns,
- we return the last one. Otherwise, we return the end of BB. */
+/* Find the traces for Software Trace Cache. Chain each trace through
+ RBI()->next. Store the number of traces to N_TRACES and description of
+ traces to TRACES. */
-static rtx
-skip_insns_after_block (bb)
- basic_block bb;
+static void
+find_traces (n_traces, traces)
+ int *n_traces;
+ struct trace *traces;
{
- rtx insn, last_insn, next_head;
-
- next_head = NULL_RTX;
- if (bb->index + 1 != n_basic_blocks)
- next_head = BASIC_BLOCK (bb->index + 1)->head;
+ int i;
+ edge e;
+ fibheap_t heap;
- for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
+ /* Insert entry points of function into heap. */
+ heap = fibheap_new ();
+ max_entry_frequency = 0;
+ max_entry_count = 0;
+ for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
{
- if (insn == next_head)
- break;
-
- switch (GET_CODE (insn))
- {
- case BARRIER:
- continue;
+ bbd[e->dest->index].heap = heap;
+ bbd[e->dest->index].node = fibheap_insert (heap, bb_to_key (e->dest),
+ e->dest);
+ if (e->dest->frequency > max_entry_frequency)
+ max_entry_frequency = e->dest->frequency;
+ if (e->dest->count > max_entry_count)
+ max_entry_count = e->dest->count;
+ }
- case NOTE:
- switch (NOTE_LINE_NUMBER (insn))
- {
- case NOTE_INSN_LOOP_END:
- case NOTE_INSN_BLOCK_END:
- case NOTE_INSN_DELETED:
- case NOTE_INSN_DELETED_LABEL:
- continue;
-
- default:
- break;
- }
- break;
+ /* Find the traces. */
+ for (i = 0; i < N_ROUNDS; i++)
+ {
+ gcov_type count_threshold;
- case CODE_LABEL:
- if (NEXT_INSN (insn)
- && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
- && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
- || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
- {
- insn = NEXT_INSN (insn);
- continue;
- }
- break;
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "STC - round %d\n", i + 1);
- default:
- break;
- }
+ if (max_entry_count < INT_MAX / 1000)
+ count_threshold = max_entry_count * exec_threshold[i] / 1000;
+ else
+ count_threshold = max_entry_count / 1000 * exec_threshold[i];
- break;
+ find_traces_1_round (REG_BR_PROB_BASE * branch_threshold[i] / 1000,
+ max_entry_frequency * exec_threshold[i] / 1000,
+ count_threshold, traces, n_traces, i, &heap);
}
+ fibheap_delete (heap);
- return last_insn;
-}
-
-
-/* Locate the effective beginning and end of the insn chain for each
- block, as defined by skip_insns_after_block above. */
-
-static void
-record_effective_endpoints ()
-{
- rtx next_insn = get_insns ();
- int i;
-
- for (i = 0; i < n_basic_blocks; ++i)
+ if (rtl_dump_file)
{
- basic_block bb = BASIC_BLOCK (i);
- rtx end;
-
- RBI (bb)->eff_head = next_insn;
- end = skip_insns_after_block (bb);
- RBI (bb)->eff_end = end;
- next_insn = NEXT_INSN (end);
+ for (i = 0; i < *n_traces; i++)
+ {
+ basic_block bb;
+ fprintf (rtl_dump_file, "Trace %d (round %d): ", i + 1,
+ traces[i].round + 1);
+ for (bb = traces[i].first; bb != traces[i].last; bb = RBI (bb)->next)
+ fprintf (rtl_dump_file, "%d [%d] ", bb->index, bb->frequency);
+ fprintf (rtl_dump_file, "%d [%d]\n", bb->index, bb->frequency);
+ }
+ fflush (rtl_dump_file);
}
}
+/* Rotate loop whose back edge is BACK_EDGE in the tail of trace TRACE
+ (with sequential number TRACE_N). */
-/* Compute an ordering for a subgraph beginning with block BB. Record the
- ordering in RBI()->index and chained through RBI()->next. */
-
-static void
-make_reorder_chain ()
+static basic_block
+rotate_loop (back_edge, trace, trace_n)
+ edge back_edge;
+ struct trace *trace;
+ int trace_n;
{
- basic_block last_block = NULL;
- basic_block prev = NULL;
- int nbb_m1 = n_basic_blocks - 1;
-
- /* If we've not got epilogue in RTL, we must fallthru to the exit.
- Force the last block to be at the end. */
- /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
- end of the function for stack unwinding purposes. */
- if (! HAVE_epilogue)
- {
- last_block = BASIC_BLOCK (nbb_m1);
- RBI (last_block)->visited = 1;
- nbb_m1 -= 1;
- }
-
- /* Loop until we've placed every block. */
+ basic_block bb;
+
+ /* Information about the best end (end after rotation) of the loop. */
+ basic_block best_bb = NULL;
+ edge best_edge = NULL;
+ int best_freq = -1;
+ gcov_type best_count = -1;
+ /* The best edge is preferred when its destination is not visited yet
+ or is a start block of some trace. */
+ bool is_preferred = false;
+
+ /* Find the most frequent edge that goes out from current trace. */
+ bb = back_edge->dest;
do
{
- int i;
- basic_block next = NULL;
-
- /* Find the next unplaced block. */
- /* ??? Get rid of this loop, and track which blocks are not yet
- placed more directly, so as to avoid the O(N^2) worst case.
- Perhaps keep a doubly-linked list of all to-be-placed blocks;
- remove from the list as we place. The head of that list is
- what we're looking for here. */
-
- for (i = 0; i <= nbb_m1; ++i)
+ edge e;
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->dest != EXIT_BLOCK_PTR
+ && RBI (e->dest)->visited != trace_n
+ && (e->flags & EDGE_CAN_FALLTHRU)
+ && !(e->flags & EDGE_COMPLEX))
{
- basic_block bb = BASIC_BLOCK (i);
- if (! RBI (bb)->visited)
+ if (is_preferred)
{
- next = bb;
- break;
+ /* The best edge is preferred. */
+ if (!RBI (e->dest)->visited
+ || bbd[e->dest->index].start_of_trace >= 0)
+ {
+ /* The current edge E is also preferred. */
+ int freq = EDGE_FREQUENCY (e);
+ if (freq > best_freq || e->count > best_count)
+ {
+ best_freq = freq;
+ best_count = e->count;
+ best_edge = e;
+ best_bb = bb;
+ }
+ }
+ }
+ else
+ {
+ if (!RBI (e->dest)->visited
+ || bbd[e->dest->index].start_of_trace >= 0)
+ {
+ /* The current edge E is preferred. */
+ is_preferred = true;
+ best_freq = EDGE_FREQUENCY (e);
+ best_count = e->count;
+ best_edge = e;
+ best_bb = bb;
+ }
+ else
+ {
+ int freq = EDGE_FREQUENCY (e);
+ if (!best_edge || freq > best_freq || e->count > best_count)
+ {
+ best_freq = freq;
+ best_count = e->count;
+ best_edge = e;
+ best_bb = bb;
+ }
+ }
}
}
- if (! next)
- abort ();
-
- prev = make_reorder_chain_1 (next, prev);
+ bb = RBI (bb)->next;
}
- while (RBI (prev)->index < nbb_m1);
+ while (bb != back_edge->dest);
- /* Terminate the chain. */
- if (! HAVE_epilogue)
+ if (best_bb)
{
- RBI (prev)->next = last_block;
- RBI (last_block)->index = RBI (prev)->index + 1;
- prev = last_block;
- }
- RBI (prev)->next = NULL;
-}
-
-/* A helper function for make_reorder_chain.
-
- We do not follow EH edges, or non-fallthru edges to noreturn blocks.
- These are assumed to be the error condition and we wish to cluster
- all of them at the very end of the function for the benefit of cache
- locality for the rest of the function.
-
- ??? We could do slightly better by noticing earlier that some subgraph
- has all paths leading to noreturn functions, but for there to be more
- than one block in such a subgraph is rare. */
-
-static basic_block
-make_reorder_chain_1 (bb, prev)
- basic_block bb;
- basic_block prev;
-{
- edge e;
- basic_block next;
- rtx note;
+ /* Rotate the loop so that the BEST_EDGE goes out from the last block of
+ the trace. */
+ if (back_edge->dest == trace->first)
+ {
+ trace->first = RBI (best_bb)->next;
+ }
+ else
+ {
+ basic_block prev_bb;
- /* Mark this block visited. */
- if (prev)
- {
- int new_index;
+ for (prev_bb = trace->first;
+ RBI (prev_bb)->next != back_edge->dest;
+ prev_bb = RBI (prev_bb)->next)
+ ;
+ RBI (prev_bb)->next = RBI (best_bb)->next;
- restart:
- RBI (prev)->next = bb;
- new_index = RBI (prev)->index + 1;
- RBI (bb)->index = new_index;
+ /* Try to get rid of uncond jump to cond jump. */
+ if (prev_bb->succ && !prev_bb->succ->succ_next)
+ {
+ basic_block header = prev_bb->succ->dest;
- if (rtl_dump_file && prev->index + 1 != bb->index)
- fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
- bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
+ /* Duplicate HEADER if it is a small block containing cond jump
+ in the end. */
+ if (any_condjump_p (header->end) && copy_bb_p (header, 0))
+ {
+ copy_bb (header, prev_bb->succ, prev_bb, trace_n);
+ }
+ }
+ }
}
else
- RBI (bb)->index = 0;
- RBI (bb)->visited = 1;
- prev = bb;
-
- if (bb->succ == NULL)
- return prev;
-
- /* Find the most probable block. */
-
- next = NULL;
- if (any_condjump_p (bb->end)
- && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
- {
- int taken, probability;
- edge e_taken, e_fall;
-
- probability = INTVAL (XEXP (note, 0));
- taken = probability > REG_BR_PROB_BASE / 2;
-
- /* Find the normal taken edge and the normal fallthru edge.
- Note that there may in fact be other edges due to
- asynchronous_exceptions. */
-
- e_taken = e_fall = NULL;
- for (e = bb->succ; e ; e = e->succ_next)
- if (e->flags & EDGE_FALLTHRU)
- e_fall = e;
- else if (! (e->flags & EDGE_EH))
- e_taken = e;
-
- next = (taken ? e_taken : e_fall)->dest;
- }
-
- /* In the absence of a prediction, disturb things as little as possible
- by selecting the old "next" block from the list of successors. If
- there had been a fallthru edge, that will be the one. */
- if (! next)
{
- for (e = bb->succ; e ; e = e->succ_next)
- if (e->dest->index == bb->index + 1)
- {
- if ((e->flags & EDGE_FALLTHRU)
- || (e->dest->succ
- && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
- next = e->dest;
- break;
- }
- }
-
- /* Make sure we didn't select a silly next block. */
- if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
- next = NULL;
-
- /* Recurse on the successors. Unroll the last call, as the normal
- case is exactly one or two edges, and we can tail recurse. */
- for (e = bb->succ; e; e = e->succ_next)
- if (e->dest != EXIT_BLOCK_PTR
- && ! RBI (e->dest)->visited
- && e->dest->succ
- && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
- {
- if (next)
- {
- prev = make_reorder_chain_1 (next, prev);
- next = RBI (e->dest)->visited ? NULL : e->dest;
- }
- else
- next = e->dest;
- }
- if (next)
- {
- bb = next;
- goto restart;
+ /* We have not found suitable loop tail so do no rotation. */
+ best_bb = back_edge->src;
}
-
- return prev;
+ RBI (best_bb)->next = NULL;
+ return best_bb;
}
+/* This function marks BB that it was visited in trace number TRACE. */
-/* Locate or create a label for a given basic block. */
-
-static rtx
-label_for_bb (bb)
+static void
+mark_bb_visited (bb, trace)
basic_block bb;
+ int trace;
{
- rtx label = bb->head;
-
- if (GET_CODE (label) != CODE_LABEL)
+ RBI (bb)->visited = trace;
+ if (bbd[bb->index].heap)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
- bb->index, RBI (bb)->index);
-
- label = emit_label_before (gen_label_rtx (), label);
- if (bb->head == RBI (bb)->eff_head)
- RBI (bb)->eff_head = label;
- bb->head = label;
+ fibheap_delete_node (bbd[bb->index].heap, bbd[bb->index].node);
+ bbd[bb->index].heap = NULL;
+ bbd[bb->index].node = NULL;
}
-
- return label;
}
+/* One round of finding traces. Find traces for BRANCH_TH and EXEC_TH i.e. do
+ not include basic blocks their probability is lower than BRANCH_TH or their
+ frequency is lower than EXEC_TH into traces (or count is lower than
+ COUNT_TH). It stores the new traces into TRACES and modifies the number of
+ traces *N_TRACES. Sets the round (which the trace belongs to) to ROUND. It
+ expects that starting basic blocks are in *HEAP and at the end it deletes
+ *HEAP and stores starting points for the next round into new *HEAP. */
-/* Emit a jump to BB after insn AFTER. */
-
-static rtx
-emit_jump_to_block_after (bb, after)
- basic_block bb;
- rtx after;
+static void
+find_traces_1_round (branch_th, exec_th, count_th, traces, n_traces, round,
+ heap)
+ int branch_th;
+ int exec_th;
+ gcov_type count_th;
+ struct trace *traces;
+ int *n_traces;
+ int round;
+ fibheap_t *heap;
{
- rtx jump;
+ /* Heap for discarded basic blocks which are possible starting points for
+ the next round. */
+ fibheap_t new_heap = fibheap_new ();
- if (bb != EXIT_BLOCK_PTR)
+ while (!fibheap_empty (*heap))
{
- rtx label = label_for_bb (bb);
- jump = emit_jump_insn_after (gen_jump (label), after);
- JUMP_LABEL (jump) = label;
- LABEL_NUSES (label) += 1;
+ basic_block bb;
+ struct trace *trace;
+ edge best_edge, e;
+ fibheapkey_t key;
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
- bb->index, RBI (bb)->index);
- }
- else
- {
-#ifdef HAVE_return
- if (! HAVE_return)
- abort ();
- jump = emit_jump_insn_after (gen_return (), after);
+ bb = fibheap_extract_min (*heap);
+ bbd[bb->index].heap = NULL;
+ bbd[bb->index].node = NULL;
if (rtl_dump_file)
- fprintf (rtl_dump_file, "Emitting return\n");
-#else
- abort ();
-#endif
- }
-
- return jump;
-}
+ fprintf (rtl_dump_file, "Getting bb %d\n", bb->index);
+ /* If the BB's frequency is too low send BB to the next round. */
+ if (bb->frequency < exec_th || bb->count < count_th
+ || ((round < N_ROUNDS - 1) && probably_never_executed_bb_p (bb)))
+ {
+ int key = bb_to_key (bb);
+ bbd[bb->index].heap = new_heap;
+ bbd[bb->index].node = fibheap_insert (new_heap, key, bb);
+
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file,
+ " Possible start point of next round: %d (key: %d)\n",
+ bb->index, key);
+ continue;
+ }
-/* Given a reorder chain, rearrange the code to match. */
+ trace = traces + *n_traces;
+ trace->first = bb;
+ trace->round = round;
+ trace->length = 0;
+ (*n_traces)++;
-static void
-fixup_reorder_chain ()
-{
- basic_block bb, last_bb;
+ do
+ {
+ int prob, freq;
- /* First do the bulk reordering -- rechain the blocks without regard to
- the needed changes to jumps and labels. */
+ /* The probability and frequency of the best edge. */
+ int best_prob = INT_MIN / 2;
+ int best_freq = INT_MIN / 2;
- last_bb = BASIC_BLOCK (0);
- bb = RBI (last_bb)->next;
- while (bb)
- {
- rtx last_e = RBI (last_bb)->eff_end;
- rtx curr_h = RBI (bb)->eff_head;
+ best_edge = NULL;
+ mark_bb_visited (bb, *n_traces);
+ trace->length++;
- NEXT_INSN (last_e) = curr_h;
- PREV_INSN (curr_h) = last_e;
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Basic block %d was visited in trace %d\n",
+ bb->index, *n_traces - 1);
- last_bb = bb;
- bb = RBI (bb)->next;
- }
- NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
- set_last_insn (RBI (last_bb)->eff_end);
+ /* Select the successor that will be placed after BB. */
+ for (e = bb->succ; e; e = e->succ_next)
+ {
+ if (e->flags & EDGE_FAKE)
+ abort ();
- /* Now add jumps and labels as needed to match the blocks new
- outgoing edges. */
+ if (e->dest == EXIT_BLOCK_PTR)
+ continue;
- for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
- {
- edge e_fall, e_taken, e;
- rtx jump_insn, barrier_insn, bb_end_insn;
- basic_block nb;
+ if (RBI (e->dest)->visited
+ && RBI (e->dest)->visited != *n_traces)
+ continue;
- if (bb->succ == NULL)
- continue;
+ prob = e->probability;
+ freq = EDGE_FREQUENCY (e);
- /* Find the old fallthru edge, and another non-EH edge for
- a taken jump. */
- e_taken = e_fall = NULL;
- for (e = bb->succ; e ; e = e->succ_next)
- if (e->flags & EDGE_FALLTHRU)
- e_fall = e;
- else if (! (e->flags & EDGE_EH))
- e_taken = e;
-
- bb_end_insn = bb->end;
- if (GET_CODE (bb_end_insn) == JUMP_INSN)
- {
- if (any_uncondjump_p (bb_end_insn))
- {
- /* If the destination is still not next, nothing to do. */
- if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ /* Edge that cannot be fallthru or improbable or infrequent
+ successor (ie. it is unsuitable successor). */
+ if (!(e->flags & EDGE_CAN_FALLTHRU) || (e->flags & EDGE_COMPLEX)
+ || prob < branch_th || freq < exec_th || e->count < count_th)
continue;
- /* Otherwise, we can remove the jump and cleanup the edge. */
- tidy_fallthru_edge (e_taken, bb, e_taken->dest);
- RBI (bb)->eff_end = skip_insns_after_block (bb);
- RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
-
- if (rtl_dump_file)
- fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
- bb->index, RBI (bb)->index);
- continue;
+ if (better_edge_p (bb, e, prob, freq, best_prob, best_freq))
+ {
+ best_edge = e;
+ best_prob = prob;
+ best_freq = freq;
+ }
}
- else if (any_condjump_p (bb_end_insn))
+
+ /* If the best destination has multiple predecessors, and can be
+ duplicated cheaper than a jump, don't allow it to be added
+ to a trace. We'll duplicate it when connecting traces. */
+ if (best_edge && best_edge->dest->pred->pred_next
+ && copy_bb_p (best_edge->dest, 0))
+ best_edge = NULL;
+
+ /* Add all non-selected successors to the heaps. */
+ for (e = bb->succ; e; e = e->succ_next)
{
- /* If the old fallthru is still next, nothing to do. */
- if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
- || (RBI (bb)->index == n_basic_blocks - 1
- && e_fall->dest == EXIT_BLOCK_PTR))
+ if (e == best_edge
+ || e->dest == EXIT_BLOCK_PTR
+ || RBI (e->dest)->visited)
continue;
- /* There is one special case: if *neither* block is next,
- such as happens at the very end of a function, then we'll
- need to add a new unconditional jump. Choose the taken
- edge based on known or assumed probability. */
- if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ key = bb_to_key (e->dest);
+
+ if (bbd[e->dest->index].heap)
{
- rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
- if (note
- && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
- && invert_jump (bb_end_insn,
- label_for_bb (e_fall->dest), 0))
+ /* E->DEST is already in some heap. */
+ if (key != bbd[e->dest->index].node->key)
{
- e_fall->flags &= ~EDGE_FALLTHRU;
- e_taken->flags |= EDGE_FALLTHRU;
- e = e_fall, e_fall = e_taken, e_taken = e;
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file,
+ "Changing key for bb %d from %ld to %ld.\n",
+ e->dest->index,
+ (long) bbd[e->dest->index].node->key,
+ key);
+ }
+ fibheap_replace_key (bbd[e->dest->index].heap,
+ bbd[e->dest->index].node, key);
}
}
-
- /* Otherwise we can try to invert the jump. This will
- basically never fail, however, keep up the pretense. */
- else if (invert_jump (bb_end_insn,
- label_for_bb (e_fall->dest), 0))
+ else
{
- e_fall->flags &= ~EDGE_FALLTHRU;
- e_taken->flags |= EDGE_FALLTHRU;
- continue;
+ fibheap_t which_heap = *heap;
+
+ prob = e->probability;
+ freq = EDGE_FREQUENCY (e);
+
+ if (!(e->flags & EDGE_CAN_FALLTHRU)
+ || (e->flags & EDGE_COMPLEX)
+ || prob < branch_th || freq < exec_th
+ || e->count < count_th)
+ {
+ if (round < N_ROUNDS - 1)
+ which_heap = new_heap;
+ }
+
+ bbd[e->dest->index].heap = which_heap;
+ bbd[e->dest->index].node = fibheap_insert (which_heap,
+ key, e->dest);
+
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file,
+ " Possible start of %s round: %d (key: %ld)\n",
+ (which_heap == new_heap) ? "next" : "this",
+ e->dest->index, (long) key);
+ }
+
}
}
- else if (returnjump_p (bb_end_insn))
- continue;
- else
+
+ if (best_edge) /* Suitable successor was found. */
{
- /* Otherwise we have some switch or computed jump. In the
- 99% case, there should not have been a fallthru edge. */
- if (! e_fall)
- continue;
-#ifdef CASE_DROPS_THROUGH
- /* Except for VAX. Since we didn't have predication for the
- tablejump, the fallthru block should not have moved. */
- if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
- continue;
- bb_end_insn = skip_insns_after_block (bb);
-#else
- abort ();
-#endif
+ if (RBI (best_edge->dest)->visited == *n_traces)
+ {
+ /* We do nothing with one basic block loops. */
+ if (best_edge->dest != bb)
+ {
+ if (EDGE_FREQUENCY (best_edge)
+ > 4 * best_edge->dest->frequency / 5)
+ {
+ /* The loop has at least 4 iterations. If the loop
+ header is not the first block of the function
+ we can rotate the loop. */
+
+ if (best_edge->dest != ENTRY_BLOCK_PTR->next_bb)
+ {
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file,
+ "Rotating loop %d - %d\n",
+ best_edge->dest->index, bb->index);
+ }
+ RBI (bb)->next = best_edge->dest;
+ bb = rotate_loop (best_edge, trace, *n_traces);
+ }
+ }
+ else
+ {
+ /* The loop has less than 4 iterations. */
+
+ /* Check whether there is another edge from BB. */
+ edge another_edge;
+ for (another_edge = bb->succ;
+ another_edge;
+ another_edge = another_edge->succ_next)
+ if (another_edge != best_edge)
+ break;
+
+ if (!another_edge && copy_bb_p (best_edge->dest,
+ !optimize_size))
+ {
+ bb = copy_bb (best_edge->dest, best_edge, bb,
+ *n_traces);
+ }
+ }
+ }
+
+ /* Terminate the trace. */
+ break;
+ }
+ else
+ {
+ /* Check for a situation
+
+ A
+ /|
+ B |
+ \|
+ C
+
+ where
+ EDGE_FREQUENCY (AB) + EDGE_FREQUENCY (BC)
+ >= EDGE_FREQUENCY (AC).
+ (i.e. 2 * B->frequency >= EDGE_FREQUENCY (AC) )
+ Best ordering is then A B C.
+
+ This situation is created for example by:
+
+ if (A) B;
+ C;
+
+ */
+
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e != best_edge
+ && (e->flags & EDGE_CAN_FALLTHRU)
+ && !(e->flags & EDGE_COMPLEX)
+ && !RBI (e->dest)->visited
+ && !e->dest->pred->pred_next
+ && e->dest->succ
+ && (e->dest->succ->flags & EDGE_CAN_FALLTHRU)
+ && !(e->dest->succ->flags & EDGE_COMPLEX)
+ && !e->dest->succ->succ_next
+ && e->dest->succ->dest == best_edge->dest
+ && 2 * e->dest->frequency >= EDGE_FREQUENCY (best_edge))
+ {
+ best_edge = e;
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Selecting BB %d\n",
+ best_edge->dest->index);
+ break;
+ }
+
+ RBI (bb)->next = best_edge->dest;
+ bb = best_edge->dest;
+ }
}
}
- else
+ while (best_edge);
+ trace->last = bb;
+ bbd[trace->first->index].start_of_trace = *n_traces - 1;
+ bbd[trace->last->index].end_of_trace = *n_traces - 1;
+
+ /* The trace is terminated so we have to recount the keys in heap
+ (some block can have a lower key because now one of its predecessors
+ is an end of the trace). */
+ for (e = bb->succ; e; e = e->succ_next)
{
- /* No fallthru implies a noreturn function with EH edges, or
- something similarly bizarre. In any case, we don't need to
- do anything. */
- if (! e_fall)
+ if (e->dest == EXIT_BLOCK_PTR
+ || RBI (e->dest)->visited)
continue;
- /* If the fallthru block is still next, nothing to do. */
- if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
- || (RBI (bb)->index == n_basic_blocks - 1
- && e_fall->dest == EXIT_BLOCK_PTR))
- continue;
-
- /* We need a new jump insn. If the block has only one outgoing
- edge, then we can stuff the new jump insn in directly. */
- if (bb->succ->succ_next == NULL)
+ if (bbd[e->dest->index].heap)
{
- e_fall->flags &= ~EDGE_FALLTHRU;
-
- jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
- bb->end = jump_insn;
- barrier_insn = emit_barrier_after (jump_insn);
- RBI (bb)->eff_end = barrier_insn;
- continue;
+ key = bb_to_key (e->dest);
+ if (key != bbd[e->dest->index].node->key)
+ {
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file,
+ "Changing key for bb %d from %ld to %ld.\n",
+ e->dest->index,
+ (long) bbd[e->dest->index].node->key, key);
+ }
+ fibheap_replace_key (bbd[e->dest->index].heap,
+ bbd[e->dest->index].node,
+ key);
+ }
}
}
-
- /* We got here if we need to add a new jump insn in a new block
- across the edge e_fall. */
-
- jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
- barrier_insn = emit_barrier_after (jump_insn);
-
- VARRAY_GROW (basic_block_info, ++n_basic_blocks);
- create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
-
- nb = BASIC_BLOCK (n_basic_blocks - 1);
- nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- nb->local_set = 0;
-
- COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
- COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
-
- nb->aux = xmalloc (sizeof (struct reorder_block_def));
- RBI (nb)->eff_head = nb->head;
- RBI (nb)->eff_end = barrier_insn;
- RBI (nb)->scope = RBI (bb)->scope;
- RBI (nb)->index = RBI (bb)->index + 1;
- RBI (nb)->visited = 1;
- RBI (nb)->next = RBI (bb)->next;
- RBI (bb)->next = nb;
-
- /* Link to new block. */
- make_edge (NULL, nb, e_fall->dest, 0);
- redirect_edge_succ (e_fall, nb);
-
- /* Don't process this new block. */
- bb = nb;
-
- /* Fix subsequent reorder block indices to reflect new block. */
- while ((nb = RBI (nb)->next) != NULL)
- RBI (nb)->index += 1;
}
- /* Put basic_block_info in the new order. */
- for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
- {
- bb->index = RBI (bb)->index;
- BASIC_BLOCK (bb->index) = bb;
- }
-}
+ fibheap_delete (*heap);
+ /* "Return" the new heap. */
+ *heap = new_heap;
+}
-/* Perform sanity checks on the insn chain.
- 1. Check that next/prev pointers are consistent in both the forward and
- reverse direction.
- 2. Count insns in chain, going both directions, and check if equal.
- 3. Check that get_last_insn () returns the actual end of chain. */
+/* Create a duplicate of the basic block OLD_BB and redirect edge E to it, add
+ it to trace after BB, mark OLD_BB visited and update pass' data structures
+ (TRACE is a number of trace which OLD_BB is duplicated to). */
-void
-verify_insn_chain ()
+static basic_block
+copy_bb (old_bb, e, bb, trace)
+ basic_block old_bb;
+ edge e;
+ basic_block bb;
+ int trace;
{
- rtx x,
- prevx,
- nextx;
- int insn_cnt1,
- insn_cnt2;
-
- prevx = NULL;
- insn_cnt1 = 1;
- for (x = get_insns (); x; x = NEXT_INSN (x))
+ basic_block new_bb;
+
+ new_bb = cfg_layout_duplicate_bb (old_bb, e);
+ if (e->dest != new_bb)
+ abort ();
+ if (RBI (e->dest)->visited)
+ abort ();
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file,
+ "Duplicated bb %d (created bb %d)\n",
+ old_bb->index, new_bb->index);
+ RBI (new_bb)->visited = trace;
+ RBI (new_bb)->next = RBI (bb)->next;
+ RBI (bb)->next = new_bb;
+
+ if (new_bb->index >= array_size || last_basic_block > array_size)
{
- if (PREV_INSN (x) != prevx)
+ int i;
+ int new_size;
+
+ new_size = MAX (last_basic_block, new_bb->index + 1);
+ new_size = GET_ARRAY_SIZE (new_size);
+ bbd = xrealloc (bbd, new_size * sizeof (bbro_basic_block_data));
+ for (i = array_size; i < new_size; i++)
{
- fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
- fprintf (stderr, "previous insn:\n");
- debug_rtx (prevx);
- fprintf (stderr, "current insn:\n");
- debug_rtx (x);
- abort ();
+ bbd[i].start_of_trace = -1;
+ bbd[i].end_of_trace = -1;
+ bbd[i].heap = NULL;
+ bbd[i].node = NULL;
}
- ++insn_cnt1;
- prevx = x;
- }
-
- if (prevx != get_last_insn ())
- {
- fprintf (stderr, "last_insn corrupt.\n");
- abort ();
- }
+ array_size = new_size;
- nextx = NULL;
- insn_cnt2 = 1;
- for (x = get_last_insn (); x; x = PREV_INSN (x))
- {
- if (NEXT_INSN (x) != nextx)
+ if (rtl_dump_file)
{
- fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
- fprintf (stderr, "current insn:\n");
- debug_rtx (x);
- fprintf (stderr, "next insn:\n");
- debug_rtx (nextx);
- abort ();
+ fprintf (rtl_dump_file,
+ "Growing the dynamic array to %d elements.\n",
+ array_size);
}
- ++insn_cnt2;
- nextx = x;
}
- if (insn_cnt1 != insn_cnt2)
- {
- fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
- insn_cnt1, insn_cnt2);
- abort ();
- }
+ return new_bb;
}
-static rtx
-get_next_bb_note (x)
- rtx x;
-{
- while (x)
- {
- if (NOTE_INSN_BASIC_BLOCK_P (x))
- return x;
- x = NEXT_INSN (x);
- }
- return NULL;
-}
-
-
-static rtx
-get_prev_bb_note (x)
- rtx x;
-{
- while (x)
- {
- if (NOTE_INSN_BASIC_BLOCK_P (x))
- return x;
- x = PREV_INSN (x);
- }
- return NULL;
-}
-
-
-/* Determine and record the relationships between basic blocks and
- scopes in scope tree S. */
+/* Compute and return the key (for the heap) of the basic block BB. */
-static void
-relate_bbs_with_scopes (s)
- scope s;
+static fibheapkey_t
+bb_to_key (bb)
+ basic_block bb;
{
- scope p;
- int i, bbi1, bbi2, bbs_spanned;
- rtx bbnote;
+ edge e;
- for (p = s->inner; p; p = p->next)
- relate_bbs_with_scopes (p);
+ int priority = 0;
- bbi1 = bbi2 = -1;
- bbs_spanned = 0;
+ /* Do not start in probably never executed blocks. */
+ if (probably_never_executed_bb_p (bb))
+ return BB_FREQ_MAX;
- /* If the begin and end notes are both inside the same basic block,
- or if they are both outside of basic blocks, then we know immediately
- how they are related. Otherwise, we need to poke around to make the
- determination. */
- if (s->bb_beg != s->bb_end)
- {
- if (s->bb_beg && s->bb_end)
- {
- /* Both notes are in different bbs. This implies that all the
- basic blocks spanned by the pair of notes are contained in
- this scope. */
- bbi1 = s->bb_beg->index;
- bbi2 = s->bb_end->index;
- bbs_spanned = 1;
- }
- else if (! s->bb_beg)
- {
- /* First note is outside of a bb. If the scope spans more than
- one basic block, then they all are contained within this
- scope. Otherwise, this scope is contained within the basic
- block. */
- bbnote = get_next_bb_note (s->note_beg);
- if (! bbnote)
- abort ();
- if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
- {
- bbs_spanned = 0;
- s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
- }
- else
- {
- bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
- bbi2 = s->bb_end->index;
- s->bb_end = NULL;
- bbs_spanned = 1;
- }
- }
- else /* ! s->bb_end */
- {
- /* Second note is outside of a bb. If the scope spans more than
- one basic block, then they all are contained within this
- scope. Otherwise, this scope is contained within the basic
- block. */
- bbnote = get_prev_bb_note (s->note_end);
- if (! bbnote)
- abort ();
- if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
- {
- bbs_spanned = 0;
- s->bb_end = NOTE_BASIC_BLOCK (bbnote);
- }
- else
- {
- bbi1 = s->bb_beg->index;
- bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
- s->bb_beg = NULL;
- bbs_spanned = 1;
- }
- }
- }
- else
+ /* Prefer blocks whose predecessor is an end of some trace
+ or whose predecessor edge is EDGE_DFS_BACK. */
+ for (e = bb->pred; e; e = e->pred_next)
{
- if (s->bb_beg)
- /* Both notes are in the same bb, which implies the block
- contains this scope. */
- bbs_spanned = 0;
- else
+ if ((e->src != ENTRY_BLOCK_PTR && bbd[e->src->index].end_of_trace >= 0)
+ || (e->flags & EDGE_DFS_BACK))
{
- rtx x1, x2;
- /* Both notes are outside of any bbs. This implies that all the
- basic blocks spanned by the pair of notes are contained in
- this scope.
- There is a degenerate case to consider. If the notes do not
- span any basic blocks, then it is an empty scope that can
- safely be deleted or ignored. Mark these with level = -1. */
-
- x1 = get_next_bb_note (s->note_beg);
- x2 = get_prev_bb_note (s->note_end);
- if (! (x1 && x2))
- {
- s->level = -1;
- bbs_spanned = 0;
- }
- else
- {
- bbi1 = NOTE_BASIC_BLOCK (x1)->index;
- bbi2 = NOTE_BASIC_BLOCK (x2)->index;
- bbs_spanned = 1;
- }
- }
- }
-
- /* If the scope spans one or more basic blocks, we record them. We
- only record the bbs that are immediately contained within this
- scope. Note that if a scope is contained within a bb, we can tell
- by checking that bb_beg = bb_end and that they are non-null. */
- if (bbs_spanned)
- {
- int j = 0;
-
- s->num_bbs = 0;
- for (i = bbi1; i <= bbi2; i++)
- if (! RBI (BASIC_BLOCK (i))->scope)
- s->num_bbs++;
+ int edge_freq = EDGE_FREQUENCY (e);
- s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
- for (i = bbi1; i <= bbi2; i++)
- {
- basic_block curr_bb = BASIC_BLOCK (i);
- if (! RBI (curr_bb)->scope)
- {
- s->bbs[j++] = curr_bb;
- RBI (curr_bb)->scope = s;
- }
+ if (edge_freq > priority)
+ priority = edge_freq;
}
}
- else
- s->num_bbs = 0;
-}
+ if (priority)
+ /* The block with priority should have significantly lower key. */
+ return -(100 * BB_FREQ_MAX + 100 * priority + bb->frequency);
+ return -bb->frequency;
+}
-/* Allocate and initialize a new scope structure with scope level LEVEL,
- and record the NOTE beginning the scope. */
+/* Return true when the edge E from basic block BB is better than the temporary
+ best edge (details are in function). The probability of edge E is PROB. The
+ frequency of the successor is FREQ. The current best probability is
+ BEST_PROB, the best frequency is BEST_FREQ.
+ The edge is considered to be equivalent when PROB does not differ much from
+ BEST_PROB; similarly for frequency. */
-static scope
-make_new_scope (level, note)
- int level;
- rtx note;
+static bool
+better_edge_p (bb, e, prob, freq, best_prob, best_freq)
+ basic_block bb;
+ edge e;
+ int prob;
+ int freq;
+ int best_prob;
+ int best_freq;
{
- scope new_scope = xcalloc (1, sizeof (struct scope_def));
- new_scope->level = level;
- new_scope->note_beg = note;
- return new_scope;
-}
+ bool is_better_edge;
+
+ /* The BEST_* values do not have to be best, but can be a bit smaller than
+ maximum values. */
+ int diff_prob = best_prob / 10;
+ int diff_freq = best_freq / 10;
+
+ if (prob > best_prob + diff_prob)
+ /* The edge has higher probability than the temporary best edge. */
+ is_better_edge = true;
+ else if (prob < best_prob - diff_prob)
+ /* The edge has lower probability than the temporary best edge. */
+ is_better_edge = false;
+ else if (freq < best_freq - diff_freq)
+ /* The edge and the temporary best edge have almost equivalent
+ probabilities. The higher frequency of a successor now means
+ that there is another edge going into that successor.
+ This successor has lower frequency so it is better. */
+ is_better_edge = true;
+ else if (freq > best_freq + diff_freq)
+ /* This successor has higher frequency so it is worse. */
+ is_better_edge = false;
+ else if (e->dest->prev_bb == bb)
+ /* The edges have equivalent probabilities and the successors
+ have equivalent frequencies. Select the previous successor. */
+ is_better_edge = true;
+ else
+ is_better_edge = false;
+ return is_better_edge;
+}
-/* Build a forest representing the scope structure of the function.
- Return a pointer to a structure describing the forest. */
+/* Connect traces in array TRACES, N_TRACES is the count of traces. */
static void
-build_scope_forest (forest)
- scope_forest_info *forest;
+connect_traces (n_traces, traces)
+ int n_traces;
+ struct trace *traces;
{
- rtx x;
- int level, bbi, i;
- basic_block curr_bb;
- scope root, curr_scope = 0;
-
- forest->num_trees = 0;
- forest->trees = NULL;
- level = -1;
- root = NULL;
- curr_bb = NULL;
- bbi = 0;
- for (x = get_insns (); x; x = NEXT_INSN (x))
+ int i;
+ bool *connected;
+ int last_trace;
+ int freq_threshold;
+ gcov_type count_threshold;
+
+ freq_threshold = max_entry_frequency * DUPLICATION_THRESHOLD / 1000;
+ if (max_entry_count < INT_MAX / 1000)
+ count_threshold = max_entry_count * DUPLICATION_THRESHOLD / 1000;
+ else
+ count_threshold = max_entry_count / 1000 * DUPLICATION_THRESHOLD;
+
+ connected = xcalloc (n_traces, sizeof (bool));
+ last_trace = -1;
+ for (i = 0; i < n_traces; i++)
{
- if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
- curr_bb = BASIC_BLOCK (bbi);
+ int t = i;
+ int t2;
+ edge e, best;
+ int best_len;
+
+ if (connected[t])
+ continue;
- if (GET_CODE (x) == NOTE)
+ connected[t] = true;
+
+ /* Find the predecessor traces. */
+ for (t2 = t; t2 > 0;)
{
- if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
+ best = NULL;
+ best_len = 0;
+ for (e = traces[t2].first->pred; e; e = e->pred_next)
{
- if (root)
- {
- scope new_scope;
- if (! curr_scope)
- abort();
- level++;
- new_scope = make_new_scope (level, x);
- new_scope->outer = curr_scope;
- new_scope->next = NULL;
- if (! curr_scope->inner)
- {
- curr_scope->inner = new_scope;
- curr_scope->inner_last = new_scope;
- }
- else
- {
- curr_scope->inner_last->next = new_scope;
- curr_scope->inner_last = new_scope;
- }
- curr_scope = curr_scope->inner_last;
- }
- else
+ int si = e->src->index;
+
+ if (e->src != ENTRY_BLOCK_PTR
+ && (e->flags & EDGE_CAN_FALLTHRU)
+ && !(e->flags & EDGE_COMPLEX)
+ && bbd[si].end_of_trace >= 0
+ && !connected[bbd[si].end_of_trace]
+ && (!best
+ || e->probability > best->probability
+ || (e->probability == best->probability
+ && traces[bbd[si].end_of_trace].length > best_len)))
{
- int ntrees = forest->num_trees;
- level++;
- curr_scope = make_new_scope (level, x);
- root = curr_scope;
- forest->trees = xrealloc (forest->trees,
- sizeof (scope) * (ntrees + 1));
- forest->trees[forest->num_trees++] = root;
+ best = e;
+ best_len = traces[bbd[si].end_of_trace].length;
}
- curr_scope->bb_beg = curr_bb;
}
- else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
+ if (best)
{
- curr_scope->bb_end = curr_bb;
- curr_scope->note_end = x;
- level--;
- curr_scope = curr_scope->outer;
- if (level == -1)
- root = NULL;
+ RBI (best->src)->next = best->dest;
+ t2 = bbd[best->src->index].end_of_trace;
+ connected[t2] = true;
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file, "Connection: %d %d\n",
+ best->src->index, best->dest->index);
+ }
}
- } /* if note */
-
- if (curr_bb && curr_bb->end == x)
- {
- curr_bb = NULL;
- bbi++;
+ else
+ break;
}
- } /* for */
-
- for (i = 0; i < forest->num_trees; i++)
- relate_bbs_with_scopes (forest->trees[i]);
-}
-
-
-/* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
- the insn chain. */
-
-static void
-remove_scope_notes ()
-{
- rtx x, next;
- basic_block currbb = NULL;
-
- for (x = get_insns (); x; x = next)
- {
- next = NEXT_INSN (x);
- if (NOTE_INSN_BASIC_BLOCK_P (x))
- currbb = NOTE_BASIC_BLOCK (x);
+ if (last_trace >= 0)
+ RBI (traces[last_trace].last)->next = traces[t2].first;
+ last_trace = t;
- if (GET_CODE (x) == NOTE
- && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
- || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
+ /* Find the successor traces. */
+ while (1)
{
- /* Check if the scope note happens to be the end of a bb. */
- if (currbb && x == currbb->end)
- currbb->end = PREV_INSN (x);
- if (currbb && x == currbb->head)
- abort ();
-
- if (PREV_INSN (x))
+ /* Find the continuation of the chain. */
+ best = NULL;
+ best_len = 0;
+ for (e = traces[t].last->succ; e; e = e->succ_next)
{
- NEXT_INSN (PREV_INSN (x)) = next;
- PREV_INSN (next) = PREV_INSN (x);
-
- NEXT_INSN (x) = NULL;
- PREV_INSN (x) = NULL;
+ int di = e->dest->index;
+
+ if (e->dest != EXIT_BLOCK_PTR
+ && (e->flags & EDGE_CAN_FALLTHRU)
+ && !(e->flags & EDGE_COMPLEX)
+ && bbd[di].start_of_trace >= 0
+ && !connected[bbd[di].start_of_trace]
+ && (!best
+ || e->probability > best->probability
+ || (e->probability == best->probability
+ && traces[bbd[di].start_of_trace].length > best_len)))
+ {
+ best = e;
+ best_len = traces[bbd[di].start_of_trace].length;
+ }
}
- else
- abort ();
- }
- }
-}
-
-
-/* Insert scope note pairs for a contained scope tree S after insn IP. */
-
-static void
-insert_intra_1 (s, ip)
- scope s;
- rtx *ip;
-{
- scope p;
-
- if (NOTE_BLOCK (s->note_beg))
- {
- *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
- NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
- }
- for (p = s->inner; p; p = p->next)
- insert_intra_1 (p, ip);
-
- if (NOTE_BLOCK (s->note_beg))
- {
- *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
- NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
- }
-}
-
-
-/* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
- scopes that are contained within BB. */
-
-static void
-insert_intra_bb_scope_notes (bb)
- basic_block bb;
-{
- scope s = RBI (bb)->scope;
- scope p;
- rtx ip;
-
- if (! s)
- return;
-
- ip = bb->head;
- if (GET_CODE (ip) == CODE_LABEL)
- ip = NEXT_INSN (ip);
-
- for (p = s->inner; p; p = p->next)
- {
- if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
- insert_intra_1 (p, &ip);
- }
-}
-
-
-/* Given two consecutive basic blocks BB1 and BB2 with different scopes,
- insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
- notes before BB2 such that the notes are correctly balanced. If BB1 or
- BB2 is NULL, we are inserting scope notes for the first and last basic
- blocks, respectively. */
-
-static void
-insert_inter_bb_scope_notes (bb1, bb2)
- basic_block bb1;
- basic_block bb2;
-{
- rtx ip;
- scope com;
-
- /* It is possible that a basic block is not contained in any scope.
- In that case, we either open or close a scope but not both. */
- if (bb1 && bb2)
- {
- scope s1 = RBI (bb1)->scope;
- scope s2 = RBI (bb2)->scope;
- if (! s1 && ! s2)
- return;
- if (! s1)
- bb1 = NULL;
- else if (! s2)
- bb2 = NULL;
- }
-
- /* Find common ancestor scope. */
- if (bb1 && bb2)
- {
- scope s1 = RBI (bb1)->scope;
- scope s2 = RBI (bb2)->scope;
- while (s1 != s2)
- {
- if (! (s1 && s2))
- abort ();
- if (s1->level > s2->level)
- s1 = s1->outer;
- else if (s2->level > s1->level)
- s2 = s2->outer;
- else
+ if (best)
{
- s1 = s1->outer;
- s2 = s2->outer;
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file, "Connection: %d %d\n",
+ best->src->index, best->dest->index);
+ }
+ t = bbd[best->dest->index].start_of_trace;
+ RBI (traces[last_trace].last)->next = traces[t].first;
+ connected[t] = true;
+ last_trace = t;
}
- }
- com = s1;
- }
- else
- com = NULL;
+ else
+ {
+ /* Try to connect the traces by duplication of 1 block. */
+ edge e2;
+ basic_block next_bb = NULL;
+ bool try_copy = false;
+
+ for (e = traces[t].last->succ; e; e = e->succ_next)
+ if (e->dest != EXIT_BLOCK_PTR
+ && (e->flags & EDGE_CAN_FALLTHRU)
+ && !(e->flags & EDGE_COMPLEX)
+ && (!best || e->probability > best->probability))
+ {
+ edge best2 = NULL;
+ int best2_len = 0;
+
+ /* If the destination is a start of a trace which is only
+ one block long, then no need to search the successor
+ blocks of the trace. Accept it. */
+ if (bbd[e->dest->index].start_of_trace >= 0
+ && traces[bbd[e->dest->index].start_of_trace].length
+ == 1)
+ {
+ best = e;
+ try_copy = true;
+ continue;
+ }
+
+ for (e2 = e->dest->succ; e2; e2 = e2->succ_next)
+ {
+ int di = e2->dest->index;
+
+ if (e2->dest == EXIT_BLOCK_PTR
+ || ((e2->flags & EDGE_CAN_FALLTHRU)
+ && !(e2->flags & EDGE_COMPLEX)
+ && bbd[di].start_of_trace >= 0
+ && !connected[bbd[di].start_of_trace]
+ && (EDGE_FREQUENCY (e2) >= freq_threshold)
+ && (e2->count >= count_threshold)
+ && (!best2
+ || e2->probability > best2->probability
+ || (e2->probability == best2->probability
+ && traces[bbd[di].start_of_trace].length
+ > best2_len))))
+ {
+ best = e;
+ best2 = e2;
+ if (e2->dest != EXIT_BLOCK_PTR)
+ best2_len = traces[bbd[di].start_of_trace].length;
+ else
+ best2_len = INT_MAX;
+ next_bb = e2->dest;
+ try_copy = true;
+ }
+ }
+ }
+
+ /* Copy tiny blocks always; copy larger blocks only when the
+ edge is traversed frequently enough. */
+ if (try_copy
+ && copy_bb_p (best->dest,
+ !optimize_size
+ && EDGE_FREQUENCY (best) >= freq_threshold
+ && best->count >= count_threshold))
+ {
+ basic_block new_bb;
- /* Close scopes. */
- if (bb1)
- {
- scope s = RBI (bb1)->scope;
- ip = RBI (bb1)->eff_end;
- while (s != com)
- {
- if (NOTE_BLOCK (s->note_beg))
- {
- ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
- NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
- }
- s = s->outer;
- }
- }
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file, "Connection: %d %d ",
+ traces[t].last->index, best->dest->index);
+ if (!next_bb)
+ fputc ('\n', rtl_dump_file);
+ else if (next_bb == EXIT_BLOCK_PTR)
+ fprintf (rtl_dump_file, "exit\n");
+ else
+ fprintf (rtl_dump_file, "%d\n", next_bb->index);
+ }
- /* Open scopes. */
- if (bb2)
- {
- scope s = RBI (bb2)->scope;
- ip = bb2->head;
- while (s != com)
- {
- if (NOTE_BLOCK (s->note_beg))
- {
- ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
- NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
+ new_bb = copy_bb (best->dest, best, traces[t].last, t);
+ traces[t].last = new_bb;
+ if (next_bb && next_bb != EXIT_BLOCK_PTR)
+ {
+ t = bbd[next_bb->index].start_of_trace;
+ RBI (traces[last_trace].last)->next = traces[t].first;
+ connected[t] = true;
+ last_trace = t;
+ }
+ else
+ break; /* Stop finding the successor traces. */
+ }
+ else
+ break; /* Stop finding the successor traces. */
}
- s = s->outer;
}
}
-}
-
-
-/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
- on the scope forest and the newly reordered basic blocks. */
-
-static void
-rebuild_scope_notes (forest)
- scope_forest_info *forest;
-{
- int i;
-
- if (forest->num_trees == 0)
- return;
- /* Start by opening the scopes before the first basic block. */
- insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
-
- /* Then, open and close scopes as needed between blocks. */
- for (i = 0; i < n_basic_blocks - 1; i++)
+ if (rtl_dump_file)
{
- basic_block bb1 = BASIC_BLOCK (i);
- basic_block bb2 = BASIC_BLOCK (i + 1);
- if (RBI (bb1)->scope != RBI (bb2)->scope)
- insert_inter_bb_scope_notes (bb1, bb2);
- insert_intra_bb_scope_notes (bb1);
+ basic_block bb;
+
+ fprintf (rtl_dump_file, "Final order:\n");
+ for (bb = traces[0].first; bb; bb = RBI (bb)->next)
+ fprintf (rtl_dump_file, "%d ", bb->index);
+ fprintf (rtl_dump_file, "\n");
+ fflush (rtl_dump_file);
}
- /* Finally, close the scopes after the last basic block. */
- insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
- insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
+ FREE (connected);
}
+/* Return true when BB can and should be copied. CODE_MAY_GROW is true
+ when code size is allowed to grow by duplication. */
-/* Free the storage associated with the scope tree at S. */
-
-static void
-free_scope_forest_1 (s)
- scope s;
+static bool
+copy_bb_p (bb, code_may_grow)
+ basic_block bb;
+ int code_may_grow;
{
- scope p, next;
-
- for (p = s->inner; p; p = next)
+ int size = 0;
+ int max_size = uncond_jump_length;
+ rtx insn;
+
+ if (!bb->frequency)
+ return false;
+ if (!bb->pred || !bb->pred->pred_next)
+ return false;
+ if (!cfg_layout_can_duplicate_bb_p (bb))
+ return false;
+
+ if (code_may_grow && maybe_hot_bb_p (bb))
+ max_size *= 8;
+
+ for (insn = bb->head; insn != NEXT_INSN (bb->end);
+ insn = NEXT_INSN (insn))
{
- next = p->next;
- free_scope_forest_1 (p);
+ if (INSN_P (insn))
+ size += get_attr_length (insn);
}
- if (s->bbs)
- free (s->bbs);
- free (s);
-}
-
-
-/* Free the storage associated with the scope forest. */
-
-static void
-free_scope_forest (forest)
- scope_forest_info *forest;
-{
- int i;
- for (i = 0; i < forest->num_trees; i++)
- free_scope_forest_1 (forest->trees[i]);
-}
-
+ if (size <= max_size)
+ return true;
-/* Visualize the scope forest. */
-
-void
-dump_scope_forest (forest)
- scope_forest_info *forest;
-{
- if (forest->num_trees == 0)
- fprintf (stderr, "\n< Empty scope forest >\n");
- else
+ if (rtl_dump_file)
{
- int i;
- fprintf (stderr, "\n< Scope forest >\n");
- for (i = 0; i < forest->num_trees; i++)
- dump_scope_forest_1 (forest->trees[i], 0);
+ fprintf (rtl_dump_file,
+ "Block %d can't be copied because its size = %d.\n",
+ bb->index, size);
}
-}
+ return false;
+}
-/* Recursive portion of dump_scope_forest. */
+/* Return the length of unconditional jump instruction. */
-static void
-dump_scope_forest_1 (s, indent)
- scope s;
- int indent;
+static int
+get_uncond_jump_length ()
{
- scope p;
- int i;
+ rtx label, jump;
+ int length;
- if (s->bb_beg != NULL && s->bb_beg == s->bb_end
- && RBI (s->bb_beg)->scope
- && RBI (s->bb_beg)->scope->level + 1 == s->level)
- {
- fprintf (stderr, "%*s", indent, "");
- fprintf (stderr, "BB%d:\n", s->bb_beg->index);
- }
-
- fprintf (stderr, "%*s", indent, "");
- fprintf (stderr, "{ level %d (block %p)\n", s->level,
- (PTR) NOTE_BLOCK (s->note_beg));
+ label = emit_label_before (gen_label_rtx (), get_insns ());
+ jump = emit_jump_insn (gen_jump (label));
- fprintf (stderr, "%*s%s", indent, "", "bbs:");
- for (i = 0; i < s->num_bbs; i++)
- fprintf (stderr, " %d", s->bbs[i]->index);
- fprintf (stderr, "\n");
-
- for (p = s->inner; p; p = p->next)
- dump_scope_forest_1 (p, indent + 2);
+ length = get_attr_length (jump);
- fprintf (stderr, "%*s", indent, "");
- fprintf (stderr, "}\n");
+ delete_insn (jump);
+ delete_insn (label);
+ return length;
}
-
/* Reorder basic blocks. The main entry point to this file. */
void
reorder_basic_blocks ()
{
- scope_forest_info forest;
+ int n_traces;
int i;
+ struct trace *traces;
if (n_basic_blocks <= 1)
return;
- /* We do not currently handle correct re-placement of EH notes. */
- for (i = 0; i < n_basic_blocks; i++)
- {
- edge e;
- for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
- if (e->flags & EDGE_EH)
- return;
- }
-
- for (i = 0; i < n_basic_blocks; i++)
- BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
-
- EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
+ if ((* targetm.cannot_modify_jumps_p) ())
+ return;
- build_scope_forest (&forest);
- remove_scope_notes ();
+ cfg_layout_initialize (NULL);
- record_effective_endpoints ();
- make_reorder_chain ();
- fixup_reorder_chain ();
+ set_edge_can_fallthru_flag ();
+ mark_dfs_back_edges ();
-#ifdef ENABLE_CHECKING
- verify_insn_chain ();
-#endif
+ /* We are estimating the lenght of uncond jump insn only once since the code
+ for getting the insn lenght always returns the minimal length now. */
+ if (uncond_jump_length == 0)
+ uncond_jump_length = get_uncond_jump_length ();
- rebuild_scope_notes (&forest);
- free_scope_forest (&forest);
- reorder_blocks ();
+ /* We need to know some information for each basic block. */
+ array_size = GET_ARRAY_SIZE (last_basic_block);
+ bbd = xmalloc (array_size * sizeof (bbro_basic_block_data));
+ for (i = 0; i < array_size; i++)
+ {
+ bbd[i].start_of_trace = -1;
+ bbd[i].end_of_trace = -1;
+ bbd[i].heap = NULL;
+ bbd[i].node = NULL;
+ }
- for (i = 0; i < n_basic_blocks; i++)
- free (BASIC_BLOCK (i)->aux);
+ traces = xmalloc (n_basic_blocks * sizeof (struct trace));
+ n_traces = 0;
+ find_traces (&n_traces, traces);
+ connect_traces (n_traces, traces);
+ FREE (traces);
+ FREE (bbd);
- free (EXIT_BLOCK_PTR->aux);
+ if (rtl_dump_file)
+ dump_flow_info (rtl_dump_file);
-#ifdef ENABLE_CHECKING
- verify_flow_info ();
-#endif
+ cfg_layout_finalize ();
}