"Profile Guided Code Positioning"
Pettis and Hanson; PLDI '90.
+
+ TODO:
+
+ (1) Consider:
+
+ if (p) goto A; // predict taken
+ foo ();
+ A:
+ if (q) goto B; // predict taken
+ bar ();
+ B:
+ baz ();
+ return;
+
+ We'll currently reorder this as
+
+ if (!p) goto C;
+ A:
+ if (!q) goto D;
+ B:
+ baz ();
+ return;
+ D:
+ bar ();
+ goto B;
+ C:
+ foo ();
+ goto A;
+
+ A better ordering is
+
+ if (!p) goto C;
+ if (!q) goto D;
+ B:
+ baz ();
+ return;
+ C:
+ foo ();
+ if (q) goto B;
+ D:
+ bar ();
+ goto B;
+
+ This requires that we be able to duplicate the jump at A, and
+ adjust the graph traversal such that greedy placement doesn't
+ fix D before C is considered.
+
+ (2) Coordinate with shorten_branches to minimize the number of
+ long branches.
+
+ (3) Invent a method by which sufficiently non-predicted code can
+ be moved to either the end of the section or another section
+ entirely. Some sort of NOTE_INSN note would work fine.
+
+ This completely scroggs all debugging formats, so the user
+ would have to explicitly ask for it.
*/
#include "config.h"
#include "tree.h"
#include "rtl.h"
#include "tm_p.h"
+#include "hard-reg-set.h"
#include "basic-block.h"
#include "insn-config.h"
#include "regs.h"
-#include "hard-reg-set.h"
#include "flags.h"
#include "output.h"
#include "function.h"
-#include "except.h"
#include "toplev.h"
#include "recog.h"
-#include "insn-flags.h"
#include "expr.h"
#include "obstack.h"
+#ifndef HAVE_epilogue
+#define HAVE_epilogue 0
+#endif
+
+
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function.
For top-level functions, this is temporary_obstack.
Separate obstacks are made for nested functions. */
-extern struct obstack *function_obstack;
+extern struct obstack flow_obstack;
-typedef struct reorder_block_def {
- int flags;
- int index;
- basic_block add_jump;
- edge succ;
- rtx end;
- int block_begin;
- int block_end;
- rtx eff_head;
- rtx eff_end;
-} *reorder_block_def;
+/* Structure to hold information about lexical scopes. */
+typedef struct scope_def
+{
+ int level;
-static struct reorder_block_def rbd_init
-= {
- 0, /* flags */
- 0, /* index */
- NULL, /* add_jump */
- NULL, /* succ */
- NULL_RTX, /* end */
- 0, /* block_begin */
- 0, /* block_end */
- NULL_RTX, /* eff_head */
- NULL_RTX /* eff_end */
-};
-
-
-#define REORDER_BLOCK_HEAD 0x1
-#define REORDER_BLOCK_VISITED 0x2
-
-#define REORDER_BLOCK_FLAGS(bb) \
- ((reorder_block_def) (bb)->aux)->flags
+ /* The NOTE_INSN_BLOCK_BEG that started this scope. */
+ rtx note_beg;
-#define REORDER_BLOCK_INDEX(bb) \
- ((reorder_block_def) (bb)->aux)->index
+ /* The NOTE_INSN_BLOCK_END that ended this scope. */
+ rtx note_end;
-#define REORDER_BLOCK_ADD_JUMP(bb) \
- ((reorder_block_def) (bb)->aux)->add_jump
+ /* The bb containing note_beg (if any). */
+ basic_block bb_beg;
-#define REORDER_BLOCK_SUCC(bb) \
- ((reorder_block_def) (bb)->aux)->succ
+ /* The bb containing note_end (if any). */
+ basic_block bb_end;
-#define REORDER_BLOCK_OLD_END(bb) \
- ((reorder_block_def) (bb)->aux)->end
+ /* List of basic blocks contained within this scope. */
+ basic_block *bbs;
-#define REORDER_BLOCK_BEGIN(bb) \
- ((reorder_block_def) (bb)->aux)->block_begin
+ /* Number of blocks contained within this scope. */
+ int num_bbs;
-#define REORDER_BLOCK_END(bb) \
- ((reorder_block_def) (bb)->aux)->block_end
+ /* The outer scope or NULL if outermost scope. */
+ struct scope_def *outer;
-#define REORDER_BLOCK_EFF_HEAD(bb) \
- ((reorder_block_def) (bb)->aux)->eff_head
+ /* The first inner scope or NULL if innermost scope. */
+ struct scope_def *inner;
-#define REORDER_BLOCK_EFF_END(bb) \
- ((reorder_block_def) (bb)->aux)->eff_end
+ /* The last inner scope or NULL if innermost scope. */
+ struct scope_def *inner_last;
+ /* Link to the next (sibling) scope. */
+ struct scope_def *next;
+} *scope;
-static int reorder_index;
-static basic_block reorder_last_visited;
-enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
- REORDER_SKIP_BLOCK_END};
+/* Structure to hold information about the scope forest. */
+typedef struct
+{
+ /* Number of trees in forest. */
+ int num_trees;
+
+ /* List of tree roots. */
+ scope *trees;
+} scope_forest_info;
+
+/* Structure to hold information about the blocks during reordering. */
+typedef struct reorder_block_def
+{
+ rtx eff_head;
+ rtx eff_end;
+ scope scope;
+ basic_block next;
+ int index;
+ int visited;
+} *reorder_block_def;
+
+#define RBI(BB) ((reorder_block_def) (BB)->aux)
/* Local function prototypes. */
-static rtx skip_insns_between_block PARAMS ((basic_block,
- enum reorder_skip_type));
-static basic_block get_common_dest PARAMS ((basic_block, basic_block));
-static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
-static void make_reorder_chain PARAMS ((basic_block));
+static rtx skip_insns_after_block PARAMS ((basic_block));
+static void record_effective_endpoints PARAMS ((void));
+static void make_reorder_chain PARAMS ((void));
+static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
+static rtx label_for_bb PARAMS ((basic_block));
+static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
static void fixup_reorder_chain PARAMS ((void));
-#ifdef ENABLE_CHECKING
-static void verify_insn_chain PARAMS ((void));
-#endif
-
-/* Skip over insns BEFORE or AFTER BB which are typically associated with
- basic block BB. */
+static void relate_bbs_with_scopes PARAMS ((scope));
+static scope make_new_scope PARAMS ((int, rtx));
+static void build_scope_forest PARAMS ((scope_forest_info *));
+static void remove_scope_notes PARAMS ((void));
+static void insert_intra_1 PARAMS ((scope, rtx *));
+static void insert_intra_bb_scope_notes PARAMS ((basic_block));
+static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
+static void rebuild_scope_notes PARAMS ((scope_forest_info *));
+static void free_scope_forest_1 PARAMS ((scope));
+static void free_scope_forest PARAMS ((scope_forest_info *));
+void dump_scope_forest PARAMS ((scope_forest_info *));
+static void dump_scope_forest_1 PARAMS ((scope, int));
+static rtx get_next_bb_note PARAMS ((rtx));
+static rtx get_prev_bb_note PARAMS ((rtx));
+
+void verify_insn_chain PARAMS ((void));
+\f
+/* Skip over inter-block insns occurring after BB which are typically
+ associated with BB (e.g., barriers). If there are any such insns,
+ we return the last one. Otherwise, we return the end of BB. */
static rtx
-skip_insns_between_block (bb, skip_type)
+skip_insns_after_block (bb)
basic_block bb;
- enum reorder_skip_type skip_type;
{
- rtx insn, last_insn;
+ rtx insn, last_insn, next_head;
- if (skip_type == REORDER_SKIP_BEFORE)
- {
- if (bb == ENTRY_BLOCK_PTR)
- return 0;
+ next_head = NULL_RTX;
+ if (bb->index + 1 != n_basic_blocks)
+ next_head = BASIC_BLOCK (bb->index + 1)->head;
- last_insn = bb->head;
- for (insn = PREV_INSN (bb->head);
- insn && insn != BASIC_BLOCK (bb->index - 1)->end;
- last_insn = insn, insn = PREV_INSN (insn))
- {
- if (NEXT_INSN (insn) != last_insn)
- break;
-
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
- continue;
-
- break;
- }
- }
- else
+ for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
{
- last_insn = bb->end;
+ if (insn == next_head)
+ break;
- if (bb == EXIT_BLOCK_PTR)
- return 0;
-
- for (insn = NEXT_INSN (bb->end);
- insn;
- last_insn = insn, insn = NEXT_INSN (insn))
+ switch (GET_CODE (insn))
{
- if (bb->index + 1 != n_basic_blocks
- && insn == BASIC_BLOCK (bb->index + 1)->head)
- break;
+ case BARRIER:
+ continue;
- if (GET_CODE (insn) == BARRIER
- || GET_CODE (insn) == JUMP_INSN
- || (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
- continue;
+ case NOTE:
+ switch (NOTE_LINE_NUMBER (insn))
+ {
+ case NOTE_INSN_LOOP_END:
+ case NOTE_INSN_BLOCK_END:
+ case NOTE_INSN_DELETED:
+ case NOTE_INSN_DELETED_LABEL:
+ continue;
- if (GET_CODE (insn) == CODE_LABEL
+ default:
+ break;
+ }
+ break;
+
+ case CODE_LABEL:
+ if (NEXT_INSN (insn)
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
- || GET_CODE (PATTERN
- (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
+ || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
{
insn = NEXT_INSN (insn);
continue;
}
+ break;
- /* Skip to next non-deleted insn. */
- if (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))
- continue;
-
+ default:
break;
}
- if (skip_type == REORDER_SKIP_BLOCK_END)
- {
- int found_block_end = 0;
-
- for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
- {
- if (bb->index + 1 != n_basic_blocks
- && insn == BASIC_BLOCK (bb->index + 1)->head)
- break;
-
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- found_block_end = 1;
- continue;
- }
-
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
- continue;
-
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) >= 0
- && NEXT_INSN (insn)
- && GET_CODE (NEXT_INSN (insn)) == NOTE
- && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
- == NOTE_INSN_BLOCK_END))
- continue;
- break;
- }
-
- if (! found_block_end)
- last_insn = 0;
- }
+ break;
}
return last_insn;
}
-/* Return common destination for blocks BB0 and BB1. */
+/* Locate the effective beginning and end of the insn chain for each
+ block, as defined by skip_insns_after_block above. */
-static basic_block
-get_common_dest (bb0, bb1)
- basic_block bb0, bb1;
+static void
+record_effective_endpoints ()
{
- edge e0, e1;
-
- for (e0 = bb0->succ; e0; e0 = e0->succ_next)
+ rtx next_insn = get_insns ();
+ int i;
+
+ for (i = 0; i < n_basic_blocks; ++i)
{
- for (e1 = bb1->succ; e1; e1 = e1->succ_next)
- {
- if (e0->dest == e1->dest)
- {
- return e0->dest;
- }
- }
+ basic_block bb = BASIC_BLOCK (i);
+ rtx end;
+
+ RBI (bb)->eff_head = next_insn;
+ end = skip_insns_after_block (bb);
+ RBI (bb)->eff_end = end;
+ next_insn = NEXT_INSN (end);
}
- return 0;
}
-/* Move the destination block for edge E after chain end block CEB
- Adding jumps and labels is deferred until fixup_reorder_chain. */
+/* Compute an ordering for a subgraph beginning with block BB. Record the
+ ordering in RBI()->index and chained through RBI()->next. */
-static basic_block
-chain_reorder_blocks (e, ceb)
- edge e;
- basic_block ceb;
+static void
+make_reorder_chain ()
{
- basic_block sb = e->src;
- basic_block db = e->dest;
- rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
- edge ee, last_edge;
-
- enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
- PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
- enum cond_types cond_type;
- enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
- NO_ELSE_BLOCK};
- enum cond_block_types cond_block_type;
-
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- "Edge from basic block %d to basic block %d last visited %d\n",
- sb->index, db->index, ceb->index);
-
- dbh_insn = REORDER_BLOCK_EFF_HEAD (db);
- cebe_insn = REORDER_BLOCK_EFF_END (ceb);
- cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
-
- {
- int block_begins = 0;
- rtx insn;
-
- for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- block_begins += 1;
- break;
- }
- }
- REORDER_BLOCK_BEGIN (sb) = block_begins;
- }
-
- if (cebbe_insn)
+ basic_block last_block = NULL;
+ basic_block prev = NULL;
+ int nbb_m1 = n_basic_blocks - 1;
+
+ /* If we've not got epilogue in RTL, we must fallthru to the exit.
+ Force the last block to be at the end. */
+ /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
+ end of the function for stack unwinding purposes. */
+ if (! HAVE_epilogue)
{
- int block_ends = 0;
- rtx insn;
-
- for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
- {
- if (PREV_INSN (insn) == cebbe_insn)
- break;
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- block_ends += 1;
- continue;
- }
- }
- REORDER_BLOCK_END (ceb) = block_ends;
+ last_block = BASIC_BLOCK (nbb_m1);
+ RBI (last_block)->visited = 1;
+ nbb_m1 -= 1;
}
- /* Blocks are in original order. */
- if (sb->index == ceb->index
- && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
- return db;
-
- /* Get the type of block and type of condition. */
- cond_type = NO_COND;
- cond_block_type = NO_COND_BLOCK;
- if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
- && condjump_p (sb->end))
+ /* Loop until we've placed every block. */
+ do
{
- if (e->flags & EDGE_FALLTHRU)
- cond_block_type = THEN_BLOCK;
- else if (get_common_dest (sb->succ->dest, sb))
- cond_block_type = NO_ELSE_BLOCK;
- else
- cond_block_type = ELSE_BLOCK;
+ int i;
+ basic_block next = NULL;
- if (sb->succ->succ_next
- && get_common_dest (sb->succ->dest, sb))
- {
- if (cond_block_type == THEN_BLOCK)
- {
- if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_THEN_NO_ELSE;
- else
- cond_type = PREDICT_NOT_THEN_NO_ELSE;
- }
- else if (cond_block_type == NO_ELSE_BLOCK)
- {
- if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_NOT_THEN_NO_ELSE;
- else
- cond_type = PREDICT_THEN_NO_ELSE;
- }
- }
- else
+ /* Find the next unplaced block. */
+ /* ??? Get rid of this loop, and track which blocks are not yet
+ placed more directly, so as to avoid the O(N^2) worst case.
+ Perhaps keep a doubly-linked list of all to-be-placed blocks;
+ remove from the list as we place. The head of that list is
+ what we're looking for here. */
+
+ for (i = 0; i <= nbb_m1; ++i)
{
- if (cond_block_type == THEN_BLOCK)
+ basic_block bb = BASIC_BLOCK (i);
+ if (! RBI (bb)->visited)
{
- if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_THEN_WITH_ELSE;
- else
- cond_type = PREDICT_ELSE;
- }
- else if (cond_block_type == ELSE_BLOCK
- && sb->succ->dest != EXIT_BLOCK_PTR)
- {
- if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_ELSE;
- else
- cond_type = PREDICT_THEN_WITH_ELSE;
+ next = bb;
+ break;
}
}
- }
-
- if (rtl_dump_file)
- {
- static const char * cond_type_str [] = {"not cond jump", "predict then",
- "predict else",
- "predict then w/o else",
- "predict not then w/o else"};
- static const char * cond_block_type_str [] = {"not then or else block",
- "then block",
- "else block",
- "then w/o else block"};
+ if (! next)
+ abort ();
- fprintf (rtl_dump_file, " %s (looking at %s)\n",
- cond_type_str[(int)cond_type],
- cond_block_type_str[(int)cond_block_type]);
+ prev = make_reorder_chain_1 (next, prev);
}
+ while (RBI (prev)->index < nbb_m1);
- /* Reflect that then block will move and we'll jump to it. */
- if (cond_block_type != THEN_BLOCK
- && (cond_type == PREDICT_ELSE
- || cond_type == PREDICT_NOT_THEN_NO_ELSE))
+ /* Terminate the chain. */
+ if (! HAVE_epilogue)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " then jump from block %d to block %d\n",
- sb->index, sb->succ->dest->index);
-
- /* Jump to reordered then block. */
- REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
+ RBI (prev)->next = last_block;
+ RBI (last_block)->index = RBI (prev)->index + 1;
+ prev = last_block;
}
-
- /* Reflect that then block will jump back when we have no else. */
- if (cond_block_type != THEN_BLOCK
- && cond_type == PREDICT_NOT_THEN_NO_ELSE)
- {
- for (ee = sb->succ->dest->succ;
- ee && ! (ee->flags & EDGE_FALLTHRU);
- ee = ee->succ_next)
- continue;
+ RBI (prev)->next = NULL;
+}
- if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
- && ! simplejump_p (sb->succ->dest->end)))
- {
- REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
- }
- }
+/* A helper function for make_reorder_chain.
+
+ We do not follow EH edges, or non-fallthru edges to noreturn blocks.
+ These are assumed to be the error condition and we wish to cluster
+ all of them at the very end of the function for the benefit of cache
+ locality for the rest of the function.
- /* Reflect that else block will jump back. */
- if (cond_block_type == ELSE_BLOCK
- && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
+ ??? We could do slightly better by noticing earlier that some subgraph
+ has all paths leading to noreturn functions, but for there to be more
+ than one block in such a subgraph is rare. */
+
+static basic_block
+make_reorder_chain_1 (bb, prev)
+ basic_block bb;
+ basic_block prev;
+{
+ edge e;
+ basic_block next;
+ rtx note;
+
+ /* Mark this block visited. */
+ if (prev)
{
- last_edge=db->succ;
+ int new_index;
- if (last_edge
- && last_edge->dest != EXIT_BLOCK_PTR
- && GET_CODE (last_edge->dest->head) == CODE_LABEL
- && ! (GET_CODE (db->end) == JUMP_INSN))
- {
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " else jump from block %d to block %d\n",
- db->index, last_edge->dest->index);
+ restart:
+ RBI (prev)->next = bb;
+ new_index = RBI (prev)->index + 1;
+ RBI (bb)->index = new_index;
- REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
- }
+ if (rtl_dump_file && prev->index + 1 != bb->index)
+ fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
+ bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
}
+ else
+ RBI (bb)->index = 0;
+ RBI (bb)->visited = 1;
+ prev = bb;
- /* This block's successor has already been reordered. This can happen
- when we reorder a chain starting at a then or else. */
- for (last_edge = db->succ;
- last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
- last_edge = last_edge->succ_next)
- continue;
+ if (bb->succ == NULL)
+ return prev;
+
+ /* Find the most probable block. */
- if (last_edge
- && last_edge->dest != EXIT_BLOCK_PTR
- && (REORDER_BLOCK_FLAGS (last_edge->dest)
- & REORDER_BLOCK_VISITED))
+ next = NULL;
+ if (any_condjump_p (bb->end)
+ && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " end of chain jump from block %d to block %d\n",
- db->index, last_edge->dest->index);
+ int taken, probability;
+ edge e_taken, e_fall;
- REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
- }
+ probability = INTVAL (XEXP (note, 0));
+ taken = probability > REG_BR_PROB_BASE / 2;
- dbh_insn = REORDER_BLOCK_EFF_HEAD (db);
- cebe_insn = REORDER_BLOCK_EFF_END (ceb);
- dbe_insn = REORDER_BLOCK_EFF_END (db);
+ /* Find the normal taken edge and the normal fallthru edge.
- /* Leave behind any lexical block markers. */
- if (debug_info_level > DINFO_LEVEL_TERSE
- && ceb->index + 1 < db->index)
- {
- rtx insn, last_insn = get_last_insn ();
- insn = NEXT_INSN (ceb->end);
- if (! insn)
- insn = REORDER_BLOCK_OLD_END (ceb);
+ Note, conditional jumps with other side effects may not
+ be fully optimized. In this case it is possible for
+ the conditional jump to branch to the same location as
+ the fallthru path.
+
+ We should probably work to improve optimization of that
+ case; however, it seems silly not to also deal with such
+ problems here if they happen to occur. */
- if (NEXT_INSN (cebe_insn) == 0)
- set_last_insn (cebe_insn);
- for (; insn && insn != db->head/*dbh_insn*/;
- insn = NEXT_INSN (insn))
+ e_taken = e_fall = NULL;
+ for (e = bb->succ; e ; e = e->succ_next)
{
- if (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
- {
- cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
- delete_insn (insn);
- }
- if (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
- {
- cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
- delete_insn (insn);
- }
+ if (e->flags & EDGE_FALLTHRU)
+ e_fall = e;
+ if (! (e->flags & EDGE_EH))
+ e_taken = e;
}
- set_last_insn (last_insn);
+
+ next = (taken ? e_taken : e_fall)->dest;
+ }
+
+ /* In the absence of a prediction, disturb things as little as possible
+ by selecting the old "next" block from the list of successors. If
+ there had been a fallthru edge, that will be the one. */
+ if (! next)
+ {
+ for (e = bb->succ; e ; e = e->succ_next)
+ if (e->dest->index == bb->index + 1)
+ {
+ if ((e->flags & EDGE_FALLTHRU)
+ || (e->dest->succ
+ && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
+ next = e->dest;
+ break;
+ }
}
- /* Rechain predicted block. */
- NEXT_INSN (cebe_insn) = dbh_insn;
- PREV_INSN (dbh_insn) = cebe_insn;
+ /* Make sure we didn't select a silly next block. */
+ if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
+ next = NULL;
- REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
- if (db->index != n_basic_blocks - 1)
- NEXT_INSN (dbe_insn) = 0;
+ /* Recurse on the successors. Unroll the last call, as the normal
+ case is exactly one or two edges, and we can tail recurse. */
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->dest != EXIT_BLOCK_PTR
+ && ! RBI (e->dest)->visited
+ && e->dest->succ
+ && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
+ {
+ if (next)
+ {
+ prev = make_reorder_chain_1 (next, prev);
+ next = RBI (e->dest)->visited ? NULL : e->dest;
+ }
+ else
+ next = e->dest;
+ }
+ if (next)
+ {
+ bb = next;
+ goto restart;
+ }
- return db;
+ return prev;
}
-/* Reorder blocks starting at block BB. */
+/* Locate or create a label for a given basic block. */
-static void
-make_reorder_chain (bb)
+static rtx
+label_for_bb (bb)
basic_block bb;
{
- edge e;
- basic_block visited_edge = NULL;
- rtx block_end;
- int probability;
+ rtx label = bb->head;
- if (bb == EXIT_BLOCK_PTR)
- return;
-
- /* Find the most probable block. */
- e = bb->succ;
- block_end = bb->end;
- if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
+ if (GET_CODE (label) != CODE_LABEL)
{
- rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
-
- if (note)
- probability = INTVAL (XEXP (note, 0));
- else
- probability = 0;
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
+ bb->index, RBI (bb)->index);
- if (probability >= REG_BR_PROB_BASE / 2)
- e = bb->succ->succ_next;
+ label = emit_label_before (gen_label_rtx (), label);
+ if (bb->head == RBI (bb)->eff_head)
+ RBI (bb)->eff_head = label;
+ bb->head = label;
}
- /* Add chosen successor to chain and recurse on it. */
- if (e && e->dest != EXIT_BLOCK_PTR
- && e->dest != e->src
- && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
- || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
- {
- if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
- {
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
- REORDER_BLOCK_INDEX (bb) = reorder_index++;
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
- }
+ return label;
+}
+
- if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
- REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
-
- REORDER_BLOCK_SUCC (bb) = e;
+/* Emit a jump to BB after insn AFTER. */
- visited_edge = e->dest;
+static rtx
+emit_jump_to_block_after (bb, after)
+ basic_block bb;
+ rtx after;
+{
+ rtx jump;
- reorder_last_visited = chain_reorder_blocks (e, bb);
+ if (bb != EXIT_BLOCK_PTR)
+ {
+ rtx label = label_for_bb (bb);
+ jump = emit_jump_insn_after (gen_jump (label), after);
+ JUMP_LABEL (jump) = label;
+ LABEL_NUSES (label) += 1;
- if (e->dest
- && ! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- make_reorder_chain (e->dest);
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
+ bb->index, RBI (bb)->index);
}
else
{
- if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
- {
- REORDER_BLOCK_INDEX (bb) = reorder_index++;
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
- }
- }
-
- /* Recurse on the successors. */
- for (e = bb->succ; e; e = e->succ_next)
- {
- if (e->dest && e->dest == EXIT_BLOCK_PTR)
- continue;
+#ifdef HAVE_return
+ if (! HAVE_return)
+ abort ();
+ jump = emit_jump_insn_after (gen_return (), after);
- if (e->dest
- && e->dest != e->src
- && e->dest != visited_edge
- && ! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- {
- reorder_last_visited
- = chain_reorder_blocks (e, reorder_last_visited);
- make_reorder_chain (e->dest);
- }
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting return\n");
+#else
+ abort ();
+#endif
}
+
+ return jump;
}
-/* Fixup jumps and labels after reordering basic blocks. */
+/* Given a reorder chain, rearrange the code to match. */
static void
fixup_reorder_chain ()
{
- int i, j;
- rtx insn;
- int orig_num_blocks = n_basic_blocks;
-
- /* Set the new last insn. */
- {
- int max_val = 0;
- int max_index = 0;
- for (j = 0; j < n_basic_blocks; j++)
- {
- int val = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
- if (val > max_val)
- {
- max_val = val;
- max_index = j;
- }
- }
- insn = REORDER_BLOCK_EFF_END (BASIC_BLOCK (max_index));
- NEXT_INSN (insn) = NULL_RTX;
- set_last_insn (insn);
- }
+ basic_block bb, last_bb;
+
+ /* First do the bulk reordering -- rechain the blocks without regard to
+ the needed changes to jumps and labels. */
- /* Add jumps and labels to fixup blocks. */
- for (i = 0; i < orig_num_blocks; i++)
+ last_bb = BASIC_BLOCK (0);
+ bb = RBI (last_bb)->next;
+ while (bb)
{
- int need_block = 0;
- basic_block bbi = BASIC_BLOCK (i);
- if (REORDER_BLOCK_ADD_JUMP (bbi))
- {
- rtx label_insn, jump_insn, barrier_insn;
+ rtx last_e = RBI (last_bb)->eff_end;
+ rtx curr_h = RBI (bb)->eff_head;
- if (GET_CODE (REORDER_BLOCK_ADD_JUMP (bbi)->head) == CODE_LABEL)
- label_insn = REORDER_BLOCK_ADD_JUMP (bbi)->head;
- else
+ NEXT_INSN (last_e) = curr_h;
+ PREV_INSN (curr_h) = last_e;
+
+ last_bb = bb;
+ bb = RBI (bb)->next;
+ }
+ NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
+ set_last_insn (RBI (last_bb)->eff_end);
+
+ /* Now add jumps and labels as needed to match the blocks new
+ outgoing edges. */
+
+ for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
+ {
+ edge e_fall, e_taken, e;
+ rtx jump_insn, barrier_insn, bb_end_insn;
+ basic_block nb;
+
+ if (bb->succ == NULL)
+ continue;
+
+ /* Find the old fallthru edge, and another non-EH edge for
+ a taken jump. */
+ e_taken = e_fall = NULL;
+ for (e = bb->succ; e ; e = e->succ_next)
+ if (e->flags & EDGE_FALLTHRU)
+ e_fall = e;
+ else if (! (e->flags & EDGE_EH))
+ e_taken = e;
+
+ bb_end_insn = bb->end;
+ if (GET_CODE (bb_end_insn) == JUMP_INSN)
+ {
+ if (any_uncondjump_p (bb_end_insn))
{
- rtx new_label = gen_label_rtx ();
- label_insn = emit_label_before (new_label,
- REORDER_BLOCK_ADD_JUMP (bbi)->head);
- REORDER_BLOCK_ADD_JUMP (bbi)->head = label_insn;
- }
+ /* If the destination is still not next, nothing to do. */
+ if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ continue;
+
+ /* Otherwise, we can remove the jump and cleanup the edge. */
+ tidy_fallthru_edge (e_taken, bb, e_taken->dest);
+ RBI (bb)->eff_end = skip_insns_after_block (bb);
+ RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
- if (GET_CODE (bbi->end) != JUMP_INSN)
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
+ bb->index, RBI (bb)->index);
+ continue;
+ }
+ else if (any_condjump_p (bb_end_insn))
{
- jump_insn = emit_jump_insn_after (gen_jump (label_insn),
- bbi->end);
- bbi->end = jump_insn;
- need_block = 0;
+ /* If the old fallthru is still next, nothing to do. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
+ || (RBI (bb)->index == n_basic_blocks - 1
+ && e_fall->dest == EXIT_BLOCK_PTR))
+ continue;
+
+ /* There is one special case: if *neither* block is next,
+ such as happens at the very end of a function, then we'll
+ need to add a new unconditional jump. Choose the taken
+ edge based on known or assumed probability. */
+ if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ {
+ rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
+ if (note
+ && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
+ && invert_jump (bb_end_insn,
+ label_for_bb (e_fall->dest), 0))
+ {
+ e_fall->flags &= ~EDGE_FALLTHRU;
+ e_taken->flags |= EDGE_FALLTHRU;
+ e = e_fall, e_fall = e_taken, e_taken = e;
+ }
+ }
+
+ /* Otherwise we can try to invert the jump. This will
+ basically never fail, however, keep up the pretense. */
+ else if (invert_jump (bb_end_insn,
+ label_for_bb (e_fall->dest), 0))
+ {
+ e_fall->flags &= ~EDGE_FALLTHRU;
+ e_taken->flags |= EDGE_FALLTHRU;
+ continue;
+ }
}
+ else if (returnjump_p (bb_end_insn))
+ continue;
else
{
- jump_insn = emit_jump_insn_after (gen_jump (label_insn),
- REORDER_BLOCK_EFF_END (bbi));
- need_block = 1;
+ /* Otherwise we have some switch or computed jump. In the
+ 99% case, there should not have been a fallthru edge. */
+ if (! e_fall)
+ continue;
+#ifdef CASE_DROPS_THROUGH
+ /* Except for VAX. Since we didn't have predication for the
+ tablejump, the fallthru block should not have moved. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
+ continue;
+ bb_end_insn = skip_insns_after_block (bb);
+#else
+ abort ();
+#endif
}
+ }
+ else
+ {
+ /* No fallthru implies a noreturn function with EH edges, or
+ something similarly bizarre. In any case, we don't need to
+ do anything. */
+ if (! e_fall)
+ continue;
- JUMP_LABEL (jump_insn) = label_insn;
- ++LABEL_NUSES (label_insn);
- barrier_insn = emit_barrier_after (jump_insn);
+ /* If the fallthru block is still next, nothing to do. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
+ || (RBI (bb)->index == n_basic_blocks - 1
+ && e_fall->dest == EXIT_BLOCK_PTR))
+ continue;
- /* Add block for jump. Typically this is when a then is not
- predicted and we are jumping to the moved then block. */
- if (need_block)
+ /* We need a new jump insn. If the block has only one outgoing
+ edge, then we can stuff the new jump insn in directly. */
+ if (bb->succ->succ_next == NULL)
{
- basic_block nb;
-
- VARRAY_GROW (basic_block_info, ++n_basic_blocks);
- create_basic_block (n_basic_blocks - 1, jump_insn,
- jump_insn, NULL);
- nb = BASIC_BLOCK (n_basic_blocks - 1);
- nb->global_live_at_start
- = OBSTACK_ALLOC_REG_SET (function_obstack);
- nb->global_live_at_end
- = OBSTACK_ALLOC_REG_SET (function_obstack);
-
- COPY_REG_SET (nb->global_live_at_start,
- bbi->global_live_at_start);
- COPY_REG_SET (nb->global_live_at_end,
- bbi->global_live_at_start);
- BASIC_BLOCK (nb->index)->local_set = 0;
-
- nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
- REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
- = REORDER_BLOCK_INDEX (bbi) + 1;
- /* Relink to new block. */
- nb->succ = bbi->succ;
- nb->succ->src = nb;
-
- make_edge (NULL, bbi, nb, 0);
- bbi->succ->succ_next
- = bbi->succ->succ_next->succ_next;
- nb->succ->succ_next = 0;
- /* Fix reorder block index to reflect new block. */
- for (j = 0; j < n_basic_blocks - 1; j++)
- {
- basic_block bbj = BASIC_BLOCK (j);
- if (REORDER_BLOCK_INDEX (bbj)
- >= REORDER_BLOCK_INDEX (bbi) + 1)
- REORDER_BLOCK_INDEX (bbj)++;
- }
+ e_fall->flags &= ~EDGE_FALLTHRU;
+
+ jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
+ bb->end = jump_insn;
+ barrier_insn = emit_barrier_after (jump_insn);
+ RBI (bb)->eff_end = barrier_insn;
+ continue;
}
}
+
+ /* We got here if we need to add a new jump insn in a new block
+ across the edge e_fall. */
+
+ jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
+ barrier_insn = emit_barrier_after (jump_insn);
+
+ VARRAY_GROW (basic_block_info, ++n_basic_blocks);
+ create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
+
+ nb = BASIC_BLOCK (n_basic_blocks - 1);
+ nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ nb->local_set = 0;
+
+ COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
+ COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
+
+ nb->aux = xmalloc (sizeof (struct reorder_block_def));
+ RBI (nb)->eff_head = nb->head;
+ RBI (nb)->eff_end = barrier_insn;
+ RBI (nb)->scope = RBI (bb)->scope;
+ RBI (nb)->index = RBI (bb)->index + 1;
+ RBI (nb)->visited = 1;
+ RBI (nb)->next = RBI (bb)->next;
+ RBI (bb)->next = nb;
+
+ /* Link to new block. */
+ make_edge (NULL, nb, e_fall->dest, 0);
+ redirect_edge_succ (e_fall, nb);
+
+ /* Don't process this new block. */
+ bb = nb;
+
+ /* Fix subsequent reorder block indices to reflect new block. */
+ while ((nb = RBI (nb)->next) != NULL)
+ RBI (nb)->index += 1;
+ }
+
+ /* Put basic_block_info in the new order. */
+ for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
+ {
+ bb->index = RBI (bb)->index;
+ BASIC_BLOCK (bb->index) = bb;
}
}
reverse direction.
2. Count insns in chain, going both directions, and check if equal.
3. Check that get_last_insn () returns the actual end of chain. */
-#ifdef ENABLE_CHECKING
-static void
+
+void
verify_insn_chain ()
{
rtx x,
abort ();
}
}
-#endif
-/* Reorder basic blocks. */
+static rtx
+get_next_bb_note (x)
+ rtx x;
+{
+ while (x)
+ {
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
+ return x;
+ x = NEXT_INSN (x);
+ }
+ return NULL;
+}
-void
-reorder_basic_blocks ()
+
+static rtx
+get_prev_bb_note (x)
+ rtx x;
{
- int i, j;
- struct loops loops_info;
- int num_loops;
+ while (x)
+ {
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
+ return x;
+ x = PREV_INSN (x);
+ }
+ return NULL;
+}
- if (profile_arc_flag)
- return;
- if (n_basic_blocks <= 1)
- return;
+/* Determine and record the relationships between basic blocks and
+ scopes in scope tree S. */
- /* Exception edges are not currently handled. */
- for (i = 0; i < n_basic_blocks; i++)
+static void
+relate_bbs_with_scopes (s)
+ scope s;
+{
+ scope p;
+ int i, bbi1, bbi2, bbs_spanned;
+ rtx bbnote;
+
+ for (p = s->inner; p; p = p->next)
+ relate_bbs_with_scopes (p);
+
+ bbi1 = bbi2 = -1;
+ bbs_spanned = 0;
+
+ /* If the begin and end notes are both inside the same basic block,
+ or if they are both outside of basic blocks, then we know immediately
+ how they are related. Otherwise, we need to poke around to make the
+ determination. */
+ if (s->bb_beg != s->bb_end)
{
- edge e;
+ if (s->bb_beg && s->bb_end)
+ {
+ /* Both notes are in different bbs. This implies that all the
+ basic blocks spanned by the pair of notes are contained in
+ this scope. */
+ bbi1 = s->bb_beg->index;
+ bbi2 = s->bb_end->index;
+ bbs_spanned = 1;
+ }
+ else if (! s->bb_beg)
+ {
+ /* First note is outside of a bb. If the scope spans more than
+ one basic block, then they all are contained within this
+ scope. Otherwise, this scope is contained within the basic
+ block. */
+ bbnote = get_next_bb_note (s->note_beg);
+ if (! bbnote)
+ abort ();
+ if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
+ {
+ bbs_spanned = 0;
+ s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
+ }
+ else
+ {
+ bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
+ bbi2 = s->bb_end->index;
+ s->bb_end = NULL;
+ bbs_spanned = 1;
+ }
+ }
+ else /* ! s->bb_end */
+ {
+ /* Second note is outside of a bb. If the scope spans more than
+ one basic block, then they all are contained within this
+ scope. Otherwise, this scope is contained within the basic
+ block. */
+ bbnote = get_prev_bb_note (s->note_end);
+ if (! bbnote)
+ abort ();
+ if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
+ {
+ bbs_spanned = 0;
+ s->bb_end = NOTE_BASIC_BLOCK (bbnote);
+ }
+ else
+ {
+ bbi1 = s->bb_beg->index;
+ bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
+ s->bb_beg = NULL;
+ bbs_spanned = 1;
+ }
+ }
+ }
+ else
+ {
+ if (s->bb_beg)
+ /* Both notes are in the same bb, which implies the block
+ contains this scope. */
+ bbs_spanned = 0;
+ else
+ {
+ rtx x1, x2;
+ /* Both notes are outside of any bbs. This implies that all the
+ basic blocks spanned by the pair of notes are contained in
+ this scope.
+ There is a degenerate case to consider. If the notes do not
+ span any basic blocks, then it is an empty scope that can
+ safely be deleted or ignored. Mark these with level = -1. */
+
+ x1 = get_next_bb_note (s->note_beg);
+ x2 = get_prev_bb_note (s->note_end);
+ if (! (x1 && x2))
+ {
+ s->level = -1;
+ bbs_spanned = 0;
+ }
+ else
+ {
+ bbi1 = NOTE_BASIC_BLOCK (x1)->index;
+ bbi2 = NOTE_BASIC_BLOCK (x2)->index;
+ bbs_spanned = 1;
+ }
+ }
+ }
- for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
- e = e->succ_next)
- continue;
+ /* If the scope spans one or more basic blocks, we record them. We
+ only record the bbs that are immediately contained within this
+ scope. Note that if a scope is contained within a bb, we can tell
+ by checking that bb_beg = bb_end and that they are non-null. */
+ if (bbs_spanned)
+ {
+ int j = 0;
- if (e && (e->flags & EDGE_EH))
- return;
+ s->num_bbs = 0;
+ for (i = bbi1; i <= bbi2; i++)
+ if (! RBI (BASIC_BLOCK (i))->scope)
+ s->num_bbs++;
+
+ s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
+ for (i = bbi1; i <= bbi2; i++)
+ {
+ basic_block curr_bb = BASIC_BLOCK (i);
+ if (! RBI (curr_bb)->scope)
+ {
+ s->bbs[j++] = curr_bb;
+ RBI (curr_bb)->scope = s;
+ }
+ }
}
+ else
+ s->num_bbs = 0;
+}
+
- reorder_index = 0;
+/* Allocate and initialize a new scope structure with scope level LEVEL,
+ and record the NOTE beginning the scope. */
- /* Find natural loops using the CFG. */
- num_loops = flow_loops_find (&loops_info);
+static scope
+make_new_scope (level, note)
+ int level;
+ rtx note;
+{
+ scope new_scope = xcalloc (1, sizeof (struct scope_def));
+ new_scope->level = level;
+ new_scope->note_beg = note;
+ return new_scope;
+}
- /* Dump loop information. */
- flow_loops_dump (&loops_info, rtl_dump_file, 0);
- /* Estimate using heuristics if no profiling info is available. */
- if (! flag_branch_probabilities)
- estimate_probability (&loops_info);
+/* Build a forest representing the scope structure of the function.
+ Return a pointer to a structure describing the forest. */
- reorder_last_visited = BASIC_BLOCK (0);
+static void
+build_scope_forest (forest)
+ scope_forest_info *forest;
+{
+ rtx x;
+ int level, bbi, i;
+ basic_block curr_bb;
+ scope root, curr_scope = 0;
+
+ forest->num_trees = 0;
+ forest->trees = NULL;
+ level = -1;
+ root = NULL;
+ curr_bb = NULL;
+ bbi = 0;
+ for (x = get_insns (); x; x = NEXT_INSN (x))
+ {
+ if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
+ curr_bb = BASIC_BLOCK (bbi);
- for (i = 0; i < n_basic_blocks; i++)
+ if (GET_CODE (x) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
+ {
+ if (root)
+ {
+ scope new_scope;
+ if (! curr_scope)
+ abort();
+ level++;
+ new_scope = make_new_scope (level, x);
+ new_scope->outer = curr_scope;
+ new_scope->next = NULL;
+ if (! curr_scope->inner)
+ {
+ curr_scope->inner = new_scope;
+ curr_scope->inner_last = new_scope;
+ }
+ else
+ {
+ curr_scope->inner_last->next = new_scope;
+ curr_scope->inner_last = new_scope;
+ }
+ curr_scope = curr_scope->inner_last;
+ }
+ else
+ {
+ int ntrees = forest->num_trees;
+ level++;
+ curr_scope = make_new_scope (level, x);
+ root = curr_scope;
+ forest->trees = xrealloc (forest->trees,
+ sizeof (scope) * (ntrees + 1));
+ forest->trees[forest->num_trees++] = root;
+ }
+ curr_scope->bb_beg = curr_bb;
+ }
+ else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
+ {
+ curr_scope->bb_end = curr_bb;
+ curr_scope->note_end = x;
+ level--;
+ curr_scope = curr_scope->outer;
+ if (level == -1)
+ root = NULL;
+ }
+ } /* if note */
+
+ if (curr_bb && curr_bb->end == x)
+ {
+ curr_bb = NULL;
+ bbi++;
+ }
+
+ } /* for */
+
+ for (i = 0; i < forest->num_trees; i++)
+ relate_bbs_with_scopes (forest->trees[i]);
+}
+
+
+/* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
+ the insn chain. */
+
+static void
+remove_scope_notes ()
+{
+ rtx x, next;
+ basic_block currbb = NULL;
+
+ for (x = get_insns (); x; x = next)
{
- basic_block bbi = BASIC_BLOCK (i);
- bbi->aux = xcalloc (1, sizeof (struct reorder_block_def));
- *((struct reorder_block_def *)bbi->aux) = rbd_init;
- REORDER_BLOCK_EFF_END (bbi)
- = skip_insns_between_block (bbi, REORDER_SKIP_AFTER);
- if (i == 0)
- REORDER_BLOCK_EFF_HEAD (bbi) = get_insns ();
- else
+ next = NEXT_INSN (x);
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
+ currbb = NOTE_BASIC_BLOCK (x);
+
+ if (GET_CODE (x) == NOTE
+ && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
+ || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
{
- rtx prev_eff_end = REORDER_BLOCK_EFF_END (BASIC_BLOCK (i - 1));
- REORDER_BLOCK_EFF_HEAD (bbi) = NEXT_INSN (prev_eff_end);
+ /* Check if the scope note happens to be the end of a bb. */
+ if (currbb && x == currbb->end)
+ currbb->end = PREV_INSN (x);
+ if (currbb && x == currbb->head)
+ abort ();
+
+ if (PREV_INSN (x))
+ {
+ NEXT_INSN (PREV_INSN (x)) = next;
+ PREV_INSN (next) = PREV_INSN (x);
+
+ NEXT_INSN (x) = NULL;
+ PREV_INSN (x) = NULL;
+ }
+ else
+ abort ();
}
}
-
- make_reorder_chain (BASIC_BLOCK (0));
+}
- fixup_reorder_chain ();
-#ifdef ENABLE_CHECKING
- verify_insn_chain ();
-#endif
+/* Insert scope note pairs for a contained scope tree S after insn IP. */
- /* Put basic_block_info in new order. */
- for (i = 0; i < n_basic_blocks - 1; i++)
+static void
+insert_intra_1 (s, ip)
+ scope s;
+ rtx *ip;
+{
+ scope p;
+
+ if (NOTE_BLOCK (s->note_beg))
+ {
+ *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
+ NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
+ }
+
+ for (p = s->inner; p; p = p->next)
+ insert_intra_1 (p, ip);
+
+ if (NOTE_BLOCK (s->note_beg))
+ {
+ *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
+ NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
+ }
+}
+
+
+/* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
+ scopes that are contained within BB. */
+
+static void
+insert_intra_bb_scope_notes (bb)
+ basic_block bb;
+{
+ scope s = RBI (bb)->scope;
+ scope p;
+ rtx ip;
+
+ if (! s)
+ return;
+
+ ip = bb->head;
+ if (GET_CODE (ip) == CODE_LABEL)
+ ip = NEXT_INSN (ip);
+
+ for (p = s->inner; p; p = p->next)
{
- for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
- continue;
+ if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
+ insert_intra_1 (p, &ip);
+ }
+}
- if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
- && i != j)
+
+/* Given two consecutive basic blocks BB1 and BB2 with different scopes,
+ insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
+ notes before BB2 such that the notes are correctly balanced. If BB1 or
+ BB2 is NULL, we are inserting scope notes for the first and last basic
+ blocks, respectively. */
+
+static void
+insert_inter_bb_scope_notes (bb1, bb2)
+ basic_block bb1;
+ basic_block bb2;
+{
+ rtx ip;
+ scope com;
+
+ /* It is possible that a basic block is not contained in any scope.
+ In that case, we either open or close a scope but not both. */
+ if (bb1 && bb2)
+ {
+ scope s1 = RBI (bb1)->scope;
+ scope s2 = RBI (bb2)->scope;
+ if (! s1 && ! s2)
+ return;
+ if (! s1)
+ bb1 = NULL;
+ else if (! s2)
+ bb2 = NULL;
+ }
+
+ /* Find common ancestor scope. */
+ if (bb1 && bb2)
+ {
+ scope s1 = RBI (bb1)->scope;
+ scope s2 = RBI (bb2)->scope;
+ while (s1 != s2)
{
- basic_block tempbb;
- int temprbi;
- int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
-
- temprbi = BASIC_BLOCK (rbi)->index;
- BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
- BASIC_BLOCK (j)->index = temprbi;
- tempbb = BASIC_BLOCK (rbi);
- BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
- BASIC_BLOCK (j) = tempbb;
+ if (! (s1 && s2))
+ abort ();
+ if (s1->level > s2->level)
+ s1 = s1->outer;
+ else if (s2->level > s1->level)
+ s2 = s2->outer;
+ else
+ {
+ s1 = s1->outer;
+ s2 = s2->outer;
+ }
}
+ com = s1;
}
+ else
+ com = NULL;
+
+ /* Close scopes. */
+ if (bb1)
+ {
+ scope s = RBI (bb1)->scope;
+ ip = RBI (bb1)->eff_end;
+ while (s != com)
+ {
+ if (NOTE_BLOCK (s->note_beg))
+ {
+ ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
+ NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
+ }
+ s = s->outer;
+ }
+ }
+
+ /* Open scopes. */
+ if (bb2)
+ {
+ scope s = RBI (bb2)->scope;
+ ip = bb2->head;
+ while (s != com)
+ {
+ if (NOTE_BLOCK (s->note_beg))
+ {
+ ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
+ NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
+ }
+ s = s->outer;
+ }
+ }
+}
+
+
+/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
+ on the scope forest and the newly reordered basic blocks. */
+
+static void
+rebuild_scope_notes (forest)
+ scope_forest_info *forest;
+{
+ int i;
+
+ if (forest->num_trees == 0)
+ return;
+
+ /* Start by opening the scopes before the first basic block. */
+ insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
+
+ /* Then, open and close scopes as needed between blocks. */
+ for (i = 0; i < n_basic_blocks - 1; i++)
+ {
+ basic_block bb1 = BASIC_BLOCK (i);
+ basic_block bb2 = BASIC_BLOCK (i + 1);
+ if (RBI (bb1)->scope != RBI (bb2)->scope)
+ insert_inter_bb_scope_notes (bb1, bb2);
+ insert_intra_bb_scope_notes (bb1);
+ }
+
+ /* Finally, close the scopes after the last basic block. */
+ insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
+ insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
+}
+
+
+/* Free the storage associated with the scope tree at S. */
+
+static void
+free_scope_forest_1 (s)
+ scope s;
+{
+ scope p, next;
+
+ for (p = s->inner; p; p = next)
+ {
+ next = p->next;
+ free_scope_forest_1 (p);
+ }
+
+ if (s->bbs)
+ free (s->bbs);
+ free (s);
+}
+
+
+/* Free the storage associated with the scope forest. */
+
+static void
+free_scope_forest (forest)
+ scope_forest_info *forest;
+{
+ int i;
+ for (i = 0; i < forest->num_trees; i++)
+ free_scope_forest_1 (forest->trees[i]);
+}
+
+
+/* Visualize the scope forest. */
+
+void
+dump_scope_forest (forest)
+ scope_forest_info *forest;
+{
+ if (forest->num_trees == 0)
+ fprintf (stderr, "\n< Empty scope forest >\n");
+ else
+ {
+ int i;
+ fprintf (stderr, "\n< Scope forest >\n");
+ for (i = 0; i < forest->num_trees; i++)
+ dump_scope_forest_1 (forest->trees[i], 0);
+ }
+}
+
+
+/* Recursive portion of dump_scope_forest. */
+
+static void
+dump_scope_forest_1 (s, indent)
+ scope s;
+ int indent;
+{
+ scope p;
+ int i;
+
+ if (s->bb_beg != NULL && s->bb_beg == s->bb_end
+ && RBI (s->bb_beg)->scope
+ && RBI (s->bb_beg)->scope->level + 1 == s->level)
+ {
+ fprintf (stderr, "%*s", indent, "");
+ fprintf (stderr, "BB%d:\n", s->bb_beg->index);
+ }
+
+ fprintf (stderr, "%*s", indent, "");
+ fprintf (stderr, "{ level %d (block %p)\n", s->level,
+ (PTR) NOTE_BLOCK (s->note_beg));
+
+ fprintf (stderr, "%*s%s", indent, "", "bbs:");
+ for (i = 0; i < s->num_bbs; i++)
+ fprintf (stderr, " %d", s->bbs[i]->index);
+ fprintf (stderr, "\n");
+
+ for (p = s->inner; p; p = p->next)
+ dump_scope_forest_1 (p, indent + 2);
+
+ fprintf (stderr, "%*s", indent, "");
+ fprintf (stderr, "}\n");
+}
+
+
+/* Reorder basic blocks. The main entry point to this file. */
+
+void
+reorder_basic_blocks ()
+{
+ scope_forest_info forest;
+ int i;
+
+ if (n_basic_blocks <= 1)
+ return;
+
+ for (i = 0; i < n_basic_blocks; i++)
+ BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
+
+ EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
+
+ build_scope_forest (&forest);
+ remove_scope_notes ();
+
+ record_effective_endpoints ();
+ make_reorder_chain ();
+ fixup_reorder_chain ();
#ifdef ENABLE_CHECKING
- verify_flow_info ();
+ verify_insn_chain ();
#endif
+ rebuild_scope_notes (&forest);
+ free_scope_forest (&forest);
+ reorder_blocks ();
+
for (i = 0; i < n_basic_blocks; i++)
free (BASIC_BLOCK (i)->aux);
- /* Free loop information. */
- flow_loops_free (&loops_info);
+ free (EXIT_BLOCK_PTR->aux);
+#ifdef ENABLE_CHECKING
+ verify_flow_info ();
+#endif
}
-