"Profile Guided Code Positioning"
Pettis and Hanson; PLDI '90.
+
+ TODO:
+
+ (1) Consider:
+
+ if (p) goto A; // predict taken
+ foo ();
+ A:
+ if (q) goto B; // predict taken
+ bar ();
+ B:
+ baz ();
+ return;
+
+ We'll currently reorder this as
+
+ if (!p) goto C;
+ A:
+ if (!q) goto D;
+ B:
+ baz ();
+ return;
+ D:
+ bar ();
+ goto B;
+ C:
+ foo ();
+ goto A;
+
+ A better ordering is
+
+ if (!p) goto C;
+ if (!q) goto D;
+ B:
+ baz ();
+ return;
+ C:
+ foo ();
+ if (q) goto B;
+ D:
+ bar ();
+ goto B;
+
+ This requires that we be able to duplicate the jump at A, and
+ adjust the graph traversal such that greedy placement doesn't
+ fix D before C is considered.
+
+ (2) Coordinate with shorten_branches to minimize the number of
+ long branches.
+
+ (3) Invent a method by which sufficiently non-predicted code can
+ be moved to either the end of the section or another section
+ entirely. Some sort of NOTE_INSN note would work fine.
+
+ This completely scroggs all debugging formats, so the user
+ would have to explicitly ask for it.
*/
#include "config.h"
#include "tree.h"
#include "rtl.h"
#include "tm_p.h"
+#include "hard-reg-set.h"
#include "basic-block.h"
#include "insn-config.h"
#include "regs.h"
-#include "hard-reg-set.h"
#include "flags.h"
#include "output.h"
#include "function.h"
#include "obstack.h"
+#ifndef HAVE_epilogue
+#define HAVE_epilogue 0
+#endif
+
+
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function.
For top-level functions, this is temporary_obstack.
Separate obstacks are made for nested functions. */
-extern struct obstack *function_obstack;
+extern struct obstack flow_obstack;
/* Structure to hold information about lexical scopes. */
struct scope_def *next;
} *scope;
+
/* Structure to hold information about the scope forest. */
typedef struct
{
scope *trees;
} scope_forest_info;
-
-typedef struct reorder_block_def {
- int flags;
- int index;
- basic_block add_jump;
- edge succ;
+/* Structure to hold information about the blocks during reordering. */
+typedef struct reorder_block_def
+{
rtx eff_head;
rtx eff_end;
scope scope;
+ basic_block next;
+ int index;
+ int visited;
} *reorder_block_def;
-static struct reorder_block_def rbd_init
-= {
- 0, /* flags */
- 0, /* index */
- NULL, /* add_jump */
- NULL, /* succ */
- NULL_RTX, /* eff_head */
- NULL_RTX, /* eff_end */
- NULL /* scope */
-};
-
-
-#define REORDER_BLOCK_HEAD 0x1
-#define REORDER_BLOCK_VISITED 0x2
-
-#define REORDER_BLOCK_FLAGS(bb) \
- ((reorder_block_def) (bb)->aux)->flags
-
-#define REORDER_BLOCK_INDEX(bb) \
- ((reorder_block_def) (bb)->aux)->index
-
-#define REORDER_BLOCK_ADD_JUMP(bb) \
- ((reorder_block_def) (bb)->aux)->add_jump
-
-#define REORDER_BLOCK_SUCC(bb) \
- ((reorder_block_def) (bb)->aux)->succ
-
-#define REORDER_BLOCK_EFF_HEAD(bb) \
- ((reorder_block_def) (bb)->aux)->eff_head
-
-#define REORDER_BLOCK_EFF_END(bb) \
- ((reorder_block_def) (bb)->aux)->eff_end
-
-#define REORDER_BLOCK_SCOPE(bb) \
- ((reorder_block_def) (bb)->aux)->scope
-
-
-static int reorder_index;
-static basic_block reorder_last_visited;
-
-enum reorder_skip_type {REORDER_SKIP_BEFORE, REORDER_SKIP_AFTER,
- REORDER_SKIP_BLOCK_END};
+#define RBI(BB) ((reorder_block_def) (BB)->aux)
/* Local function prototypes. */
-static rtx skip_insns_between_block PARAMS ((basic_block,
- enum reorder_skip_type));
-static basic_block get_common_dest PARAMS ((basic_block, basic_block));
-static basic_block chain_reorder_blocks PARAMS ((edge, basic_block));
-static void make_reorder_chain PARAMS ((basic_block));
+static rtx skip_insns_after_block PARAMS ((basic_block));
+static void record_effective_endpoints PARAMS ((void));
+static void make_reorder_chain PARAMS ((void));
+static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
+static rtx label_for_bb PARAMS ((basic_block));
+static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
static void fixup_reorder_chain PARAMS ((void));
-#ifdef ENABLE_CHECKING
-static void verify_insn_chain PARAMS ((void));
-#endif
static void relate_bbs_with_scopes PARAMS ((scope));
static scope make_new_scope PARAMS ((int, rtx));
static void build_scope_forest PARAMS ((scope_forest_info *));
static void free_scope_forest PARAMS ((scope_forest_info *));
void dump_scope_forest PARAMS ((scope_forest_info *));
static void dump_scope_forest_1 PARAMS ((scope, int));
+static rtx get_next_bb_note PARAMS ((rtx));
+static rtx get_prev_bb_note PARAMS ((rtx));
-/* Skip over insns BEFORE or AFTER BB which are typically associated with
- basic block BB. */
+void verify_insn_chain PARAMS ((void));
+\f
+/* Skip over inter-block insns occurring after BB which are typically
+ associated with BB (e.g., barriers). If there are any such insns,
+ we return the last one. Otherwise, we return the end of BB. */
static rtx
-skip_insns_between_block (bb, skip_type)
+skip_insns_after_block (bb)
basic_block bb;
- enum reorder_skip_type skip_type;
{
- rtx insn, last_insn;
+ rtx insn, last_insn, next_head;
- if (skip_type == REORDER_SKIP_BEFORE)
- {
- if (bb == ENTRY_BLOCK_PTR)
- return 0;
+ next_head = NULL_RTX;
+ if (bb->index + 1 != n_basic_blocks)
+ next_head = BASIC_BLOCK (bb->index + 1)->head;
- last_insn = bb->head;
- for (insn = PREV_INSN (bb->head);
- insn && insn != BASIC_BLOCK (bb->index - 1)->end;
- last_insn = insn, insn = PREV_INSN (insn))
- {
- if (NEXT_INSN (insn) != last_insn)
- break;
-
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
- && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
- continue;
-
- break;
- }
- }
- else
+ for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
{
- last_insn = bb->end;
+ if (insn == next_head)
+ break;
- if (bb == EXIT_BLOCK_PTR)
- return 0;
-
- for (insn = NEXT_INSN (bb->end);
- insn;
- last_insn = insn, insn = NEXT_INSN (insn))
+ switch (GET_CODE (insn))
{
- if (bb->index + 1 != n_basic_blocks
- && insn == BASIC_BLOCK (bb->index + 1)->head)
- break;
+ case BARRIER:
+ continue;
- if (GET_CODE (insn) == BARRIER
- || GET_CODE (insn) == JUMP_INSN
- || (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
- continue;
+ case NOTE:
+ switch (NOTE_LINE_NUMBER (insn))
+ {
+ case NOTE_INSN_LOOP_END:
+ case NOTE_INSN_BLOCK_END:
+ case NOTE_INSN_DELETED:
+ case NOTE_INSN_DELETED_LABEL:
+ continue;
+
+ default:
+ break;
+ }
+ break;
- if (GET_CODE (insn) == CODE_LABEL
+ case CODE_LABEL:
+ if (NEXT_INSN (insn)
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
- || GET_CODE (PATTERN
- (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
+ || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
{
insn = NEXT_INSN (insn);
continue;
}
+ break;
- /* Skip to next non-deleted insn. */
- if (GET_CODE (insn) == NOTE
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))
- continue;
-
+ default:
break;
}
- if (skip_type == REORDER_SKIP_BLOCK_END)
- {
- int found_block_end = 0;
+ break;
+ }
- for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
- {
- if (bb->index + 1 != n_basic_blocks
- && insn == BASIC_BLOCK (bb->index + 1)->head)
- break;
+ return last_insn;
+}
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- found_block_end = 1;
- continue;
- }
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
- continue;
+/* Locate the effective beginning and end of the insn chain for each
+ block, as defined by skip_insns_after_block above. */
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) >= 0
- && NEXT_INSN (insn)
- && GET_CODE (NEXT_INSN (insn)) == NOTE
- && (NOTE_LINE_NUMBER (NEXT_INSN (insn))
- == NOTE_INSN_BLOCK_END))
- continue;
- break;
- }
+static void
+record_effective_endpoints ()
+{
+ rtx next_insn = get_insns ();
+ int i;
+
+ for (i = 0; i < n_basic_blocks; ++i)
+ {
+ basic_block bb = BASIC_BLOCK (i);
+ rtx end;
- if (! found_block_end)
- last_insn = 0;
- }
+ RBI (bb)->eff_head = next_insn;
+ end = skip_insns_after_block (bb);
+ RBI (bb)->eff_end = end;
+ next_insn = NEXT_INSN (end);
}
-
- return last_insn;
}
-/* Return common destination for blocks BB0 and BB1. */
+/* Compute an ordering for a subgraph beginning with block BB. Record the
+ ordering in RBI()->index and chained through RBI()->next. */
-static basic_block
-get_common_dest (bb0, bb1)
- basic_block bb0, bb1;
+static void
+make_reorder_chain ()
{
- edge e0, e1;
+ basic_block last_block = NULL;
+ basic_block prev = NULL;
+ int nbb_m1 = n_basic_blocks - 1;
+
+ /* If we've not got epilogue in RTL, we must fallthru to the exit.
+ Force the last block to be at the end. */
+ /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
+ end of the function for stack unwinding purposes. */
+ if (! HAVE_epilogue)
+ {
+ last_block = BASIC_BLOCK (nbb_m1);
+ RBI (last_block)->visited = 1;
+ nbb_m1 -= 1;
+ }
- for (e0 = bb0->succ; e0; e0 = e0->succ_next)
+ /* Loop until we've placed every block. */
+ do
{
- for (e1 = bb1->succ; e1; e1 = e1->succ_next)
+ int i;
+ basic_block next = NULL;
+
+ /* Find the next unplaced block. */
+ /* ??? Get rid of this loop, and track which blocks are not yet
+ placed more directly, so as to avoid the O(N^2) worst case.
+ Perhaps keep a doubly-linked list of all to-be-placed blocks;
+ remove from the list as we place. The head of that list is
+ what we're looking for here. */
+
+ for (i = 0; i <= nbb_m1; ++i)
{
- if (e0->dest == e1->dest)
+ basic_block bb = BASIC_BLOCK (i);
+ if (! RBI (bb)->visited)
{
- return e0->dest;
+ next = bb;
+ break;
}
}
+ if (! next)
+ abort ();
+
+ prev = make_reorder_chain_1 (next, prev);
+ }
+ while (RBI (prev)->index < nbb_m1);
+
+ /* Terminate the chain. */
+ if (! HAVE_epilogue)
+ {
+ RBI (prev)->next = last_block;
+ RBI (last_block)->index = RBI (prev)->index + 1;
+ prev = last_block;
}
- return 0;
+ RBI (prev)->next = NULL;
}
+/* A helper function for make_reorder_chain.
-/* Move the destination block for edge E after chain end block CEB
- Adding jumps and labels is deferred until fixup_reorder_chain. */
+ We do not follow EH edges, or non-fallthru edges to noreturn blocks.
+ These are assumed to be the error condition and we wish to cluster
+ all of them at the very end of the function for the benefit of cache
+ locality for the rest of the function.
+
+ ??? We could do slightly better by noticing earlier that some subgraph
+ has all paths leading to noreturn functions, but for there to be more
+ than one block in such a subgraph is rare. */
static basic_block
-chain_reorder_blocks (e, ceb)
- edge e;
- basic_block ceb;
+make_reorder_chain_1 (bb, prev)
+ basic_block bb;
+ basic_block prev;
{
- basic_block sb = e->src;
- basic_block db = e->dest;
- rtx cebe_insn, dbh_insn, dbe_insn;
- edge ee, last_edge;
- edge e_fallthru, e_jump;
-
- enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
- PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
- enum cond_types cond_type;
- enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
- NO_ELSE_BLOCK};
- enum cond_block_types cond_block_type;
-
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- "Edge from basic block %d to basic block %d last visited %d\n",
- sb->index, db->index, ceb->index);
- cebe_insn = REORDER_BLOCK_EFF_END (ceb);
-
- /* Blocks are in original order. */
- if (sb->index == ceb->index
- && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
- return db;
-
- e_fallthru = e_jump = e;
-
- /* Get the type of block and type of condition. */
- cond_type = NO_COND;
- cond_block_type = NO_COND_BLOCK;
- if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
- && condjump_p (sb->end))
+ edge e;
+ basic_block next;
+ rtx note;
+
+ /* Mark this block visited. */
+ if (prev)
{
- if (e->flags & EDGE_FALLTHRU)
- {
- if (e == sb->succ)
- e_jump = sb->succ->succ_next;
- else if (e == sb->succ->succ_next)
- e_jump = sb->succ;
- else
- abort ();
- }
- else
- {
- if (e == sb->succ)
- e_fallthru = sb->succ->succ_next;
- else if (e == sb->succ->succ_next)
- e_fallthru = sb->succ;
- else
- abort ();
- }
+ int new_index;
- if (e->flags & EDGE_FALLTHRU)
- cond_block_type = THEN_BLOCK;
- else if (get_common_dest (e_fallthru->dest, sb))
- cond_block_type = NO_ELSE_BLOCK;
- else
- cond_block_type = ELSE_BLOCK;
+ restart:
+ RBI (prev)->next = bb;
+ new_index = RBI (prev)->index + 1;
+ RBI (bb)->index = new_index;
- if (get_common_dest (e_fallthru->dest, sb))
- {
- if (cond_block_type == THEN_BLOCK)
- {
- if (! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_THEN_NO_ELSE;
- else
- cond_type = PREDICT_NOT_THEN_NO_ELSE;
- }
- else if (cond_block_type == NO_ELSE_BLOCK)
- {
- if (! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_NOT_THEN_NO_ELSE;
- else
- cond_type = PREDICT_THEN_NO_ELSE;
- }
- }
- else
- {
- if (cond_block_type == THEN_BLOCK)
- {
- if (! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_THEN_WITH_ELSE;
- else
- cond_type = PREDICT_ELSE;
- }
- else if (cond_block_type == ELSE_BLOCK
- && e_fallthru->dest != EXIT_BLOCK_PTR)
- {
- if (! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- cond_type = PREDICT_ELSE;
- else
- cond_type = PREDICT_THEN_WITH_ELSE;
- }
- }
- }
-
- if (rtl_dump_file)
- {
- static const char * cond_type_str [] = {"not cond jump", "predict then",
- "predict else",
- "predict then w/o else",
- "predict not then w/o else"};
- static const char * cond_block_type_str [] = {"not then or else block",
- "then block",
- "else block",
- "then w/o else block"};
-
- fprintf (rtl_dump_file, " %s (looking at %s)\n",
- cond_type_str[(int)cond_type],
- cond_block_type_str[(int)cond_block_type]);
+ if (rtl_dump_file && prev->index + 1 != bb->index)
+ fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
+ bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
}
+ else
+ RBI (bb)->index = 0;
+ RBI (bb)->visited = 1;
+ prev = bb;
- /* Reflect that then block will move and we'll jump to it. */
- if (cond_block_type != THEN_BLOCK
- && (cond_type == PREDICT_ELSE
- || cond_type == PREDICT_NOT_THEN_NO_ELSE))
- {
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " then jump from block %d to block %d\n",
- sb->index, e_fallthru->dest->index);
+ if (bb->succ == NULL)
+ return prev;
- /* Jump to reordered then block. */
- REORDER_BLOCK_ADD_JUMP (sb) = e_fallthru->dest;
- }
-
- /* Reflect that then block will jump back when we have no else. */
- if (cond_block_type != THEN_BLOCK
- && cond_type == PREDICT_NOT_THEN_NO_ELSE)
+ /* Find the most probable block. */
+
+ next = NULL;
+ if (any_condjump_p (bb->end)
+ && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
{
- basic_block jbb = e_fallthru->dest;
- for (ee = jbb->succ;
- ee && ! (ee->flags & EDGE_FALLTHRU);
- ee = ee->succ_next)
- continue;
+ int taken, probability;
+ edge e_taken, e_fall;
- if (ee && ! (GET_CODE (jbb->end) == JUMP_INSN
- && ! simplejump_p (jbb->end)))
- {
- REORDER_BLOCK_ADD_JUMP (jbb) = ee->dest;
- }
- }
+ probability = INTVAL (XEXP (note, 0));
+ taken = probability > REG_BR_PROB_BASE / 2;
- /* Reflect that else block will jump back. */
- if (cond_block_type == ELSE_BLOCK
- && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
- {
- last_edge=db->succ;
+ /* Find the normal taken edge and the normal fallthru edge.
+ Note that there may in fact be other edges due to
+ asynchronous_exceptions. */
- if (last_edge
- && last_edge->dest != EXIT_BLOCK_PTR
- && GET_CODE (last_edge->dest->head) == CODE_LABEL
- && ! (GET_CODE (db->end) == JUMP_INSN))
- {
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " else jump from block %d to block %d\n",
- db->index, last_edge->dest->index);
+ e_taken = e_fall = NULL;
+ for (e = bb->succ; e ; e = e->succ_next)
+ if (e->flags & EDGE_FALLTHRU)
+ e_fall = e;
+ else if (! (e->flags & EDGE_EH))
+ e_taken = e;
- REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
- }
+ next = (taken ? e_taken : e_fall)->dest;
}
- /* This block's successor has already been reordered. This can happen
- when we reorder a chain starting at a then or else. */
- for (last_edge = db->succ;
- last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
- last_edge = last_edge->succ_next)
- continue;
-
- if (last_edge
- && last_edge->dest != EXIT_BLOCK_PTR
- && (REORDER_BLOCK_FLAGS (last_edge->dest)
- & REORDER_BLOCK_VISITED))
+ /* In the absence of a prediction, disturb things as little as possible
+ by selecting the old "next" block from the list of successors. If
+ there had been a fallthru edge, that will be the one. */
+ if (! next)
{
- if (rtl_dump_file)
- fprintf (rtl_dump_file,
- " end of chain jump from block %d to block %d\n",
- db->index, last_edge->dest->index);
-
- REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
+ for (e = bb->succ; e ; e = e->succ_next)
+ if (e->dest->index == bb->index + 1)
+ {
+ if ((e->flags & EDGE_FALLTHRU)
+ || (e->dest->succ
+ && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
+ next = e->dest;
+ break;
+ }
}
- dbh_insn = REORDER_BLOCK_EFF_HEAD (db);
- cebe_insn = REORDER_BLOCK_EFF_END (ceb);
- dbe_insn = REORDER_BLOCK_EFF_END (db);
+ /* Make sure we didn't select a silly next block. */
+ if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
+ next = NULL;
- /* Rechain predicted block. */
- NEXT_INSN (cebe_insn) = dbh_insn;
- PREV_INSN (dbh_insn) = cebe_insn;
-
- if (db->index != n_basic_blocks - 1)
- NEXT_INSN (dbe_insn) = 0;
+ /* Recurse on the successors. Unroll the last call, as the normal
+ case is exactly one or two edges, and we can tail recurse. */
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->dest != EXIT_BLOCK_PTR
+ && ! RBI (e->dest)->visited
+ && e->dest->succ
+ && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
+ {
+ if (next)
+ {
+ prev = make_reorder_chain_1 (next, prev);
+ next = RBI (e->dest)->visited ? NULL : e->dest;
+ }
+ else
+ next = e->dest;
+ }
+ if (next)
+ {
+ bb = next;
+ goto restart;
+ }
- return db;
+ return prev;
}
-/* Reorder blocks starting at block BB. */
+/* Locate or create a label for a given basic block. */
-static void
-make_reorder_chain (bb)
+static rtx
+label_for_bb (bb)
basic_block bb;
{
- edge e;
- basic_block visited_edge = NULL;
- rtx block_end;
- int probability;
-
- if (bb == EXIT_BLOCK_PTR)
- return;
+ rtx label = bb->head;
- /* Find the most probable block. */
- e = bb->succ;
- block_end = bb->end;
- if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
+ if (GET_CODE (label) != CODE_LABEL)
{
- rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
-
- if (note)
- probability = INTVAL (XEXP (note, 0));
- else
- probability = 0;
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
+ bb->index, RBI (bb)->index);
- if (probability >= REG_BR_PROB_BASE / 2)
- e = bb->succ->succ_next;
+ label = emit_label_before (gen_label_rtx (), label);
+ if (bb->head == RBI (bb)->eff_head)
+ RBI (bb)->eff_head = label;
+ bb->head = label;
}
- /* Add chosen successor to chain and recurse on it. */
- if (e && e->dest != EXIT_BLOCK_PTR
- && e->dest != e->src
- && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
- || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
- {
- if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
- {
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
- REORDER_BLOCK_INDEX (bb) = reorder_index++;
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
- }
+ return label;
+}
- if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
- REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
-
- REORDER_BLOCK_SUCC (bb) = e;
- visited_edge = e->dest;
+/* Emit a jump to BB after insn AFTER. */
- reorder_last_visited = chain_reorder_blocks (e, bb);
+static rtx
+emit_jump_to_block_after (bb, after)
+ basic_block bb;
+ rtx after;
+{
+ rtx jump;
- if (e->dest
- && ! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- make_reorder_chain (e->dest);
- }
- else
+ if (bb != EXIT_BLOCK_PTR)
{
- if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
- {
- REORDER_BLOCK_INDEX (bb) = reorder_index++;
- REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
- }
- }
+ rtx label = label_for_bb (bb);
+ jump = emit_jump_insn_after (gen_jump (label), after);
+ JUMP_LABEL (jump) = label;
+ LABEL_NUSES (label) += 1;
- /* Recurse on the successors. */
- for (e = bb->succ; e; e = e->succ_next)
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
+ bb->index, RBI (bb)->index);
+ }
+ else
{
- if (e->dest && e->dest == EXIT_BLOCK_PTR)
- continue;
+#ifdef HAVE_return
+ if (! HAVE_return)
+ abort ();
+ jump = emit_jump_insn_after (gen_return (), after);
- if (e->dest
- && e->dest != e->src
- && e->dest != visited_edge
- && ! (REORDER_BLOCK_FLAGS (e->dest)
- & REORDER_BLOCK_VISITED))
- {
- reorder_last_visited
- = chain_reorder_blocks (e, reorder_last_visited);
- make_reorder_chain (e->dest);
- }
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Emitting return\n");
+#else
+ abort ();
+#endif
}
+
+ return jump;
}
-/* Fixup jumps and labels after reordering basic blocks. */
+/* Given a reorder chain, rearrange the code to match. */
static void
fixup_reorder_chain ()
{
- int i, j;
- rtx insn;
- int orig_num_blocks = n_basic_blocks;
-
- /* Set the new last insn. */
- {
- int max_val = 0;
- int max_index = 0;
- for (j = 0; j < n_basic_blocks; j++)
- {
- int val = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
- if (val > max_val)
- {
- max_val = val;
- max_index = j;
- }
- }
- insn = REORDER_BLOCK_EFF_END (BASIC_BLOCK (max_index));
- NEXT_INSN (insn) = NULL_RTX;
- set_last_insn (insn);
- }
+ basic_block bb, last_bb;
+
+ /* First do the bulk reordering -- rechain the blocks without regard to
+ the needed changes to jumps and labels. */
- /* Add jumps and labels to fixup blocks. */
- for (i = 0; i < orig_num_blocks; i++)
+ last_bb = BASIC_BLOCK (0);
+ bb = RBI (last_bb)->next;
+ while (bb)
{
- int need_block = 0;
- basic_block bbi = BASIC_BLOCK (i);
- if (REORDER_BLOCK_ADD_JUMP (bbi))
- {
- rtx label_insn, jump_insn, barrier_insn;
+ rtx last_e = RBI (last_bb)->eff_end;
+ rtx curr_h = RBI (bb)->eff_head;
- if (GET_CODE (REORDER_BLOCK_ADD_JUMP (bbi)->head) == CODE_LABEL)
- label_insn = REORDER_BLOCK_ADD_JUMP (bbi)->head;
- else
+ NEXT_INSN (last_e) = curr_h;
+ PREV_INSN (curr_h) = last_e;
+
+ last_bb = bb;
+ bb = RBI (bb)->next;
+ }
+ NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
+ set_last_insn (RBI (last_bb)->eff_end);
+
+ /* Now add jumps and labels as needed to match the blocks new
+ outgoing edges. */
+
+ for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
+ {
+ edge e_fall, e_taken, e;
+ rtx jump_insn, barrier_insn, bb_end_insn;
+ basic_block nb;
+
+ if (bb->succ == NULL)
+ continue;
+
+ /* Find the old fallthru edge, and another non-EH edge for
+ a taken jump. */
+ e_taken = e_fall = NULL;
+ for (e = bb->succ; e ; e = e->succ_next)
+ if (e->flags & EDGE_FALLTHRU)
+ e_fall = e;
+ else if (! (e->flags & EDGE_EH))
+ e_taken = e;
+
+ bb_end_insn = bb->end;
+ if (GET_CODE (bb_end_insn) == JUMP_INSN)
+ {
+ if (any_uncondjump_p (bb_end_insn))
{
- rtx new_label = gen_label_rtx ();
- label_insn = emit_label_before (new_label,
- REORDER_BLOCK_ADD_JUMP (bbi)->head);
- REORDER_BLOCK_ADD_JUMP (bbi)->head = label_insn;
- }
+ /* If the destination is still not next, nothing to do. */
+ if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ continue;
+
+ /* Otherwise, we can remove the jump and cleanup the edge. */
+ tidy_fallthru_edge (e_taken, bb, e_taken->dest);
+ RBI (bb)->eff_end = skip_insns_after_block (bb);
+ RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
- if (GET_CODE (bbi->end) != JUMP_INSN)
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
+ bb->index, RBI (bb)->index);
+ continue;
+ }
+ else if (any_condjump_p (bb_end_insn))
{
- jump_insn = emit_jump_insn_after (gen_jump (label_insn),
- bbi->end);
- bbi->end = jump_insn;
- need_block = 0;
+ /* If the old fallthru is still next, nothing to do. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
+ || (RBI (bb)->index == n_basic_blocks - 1
+ && e_fall->dest == EXIT_BLOCK_PTR))
+ continue;
+
+ /* There is one special case: if *neither* block is next,
+ such as happens at the very end of a function, then we'll
+ need to add a new unconditional jump. Choose the taken
+ edge based on known or assumed probability. */
+ if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
+ {
+ rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
+ if (note
+ && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
+ && invert_jump (bb_end_insn,
+ label_for_bb (e_fall->dest), 0))
+ {
+ e_fall->flags &= ~EDGE_FALLTHRU;
+ e_taken->flags |= EDGE_FALLTHRU;
+ e = e_fall, e_fall = e_taken, e_taken = e;
+ }
+ }
+
+ /* Otherwise we can try to invert the jump. This will
+ basically never fail, however, keep up the pretense. */
+ else if (invert_jump (bb_end_insn,
+ label_for_bb (e_fall->dest), 0))
+ {
+ e_fall->flags &= ~EDGE_FALLTHRU;
+ e_taken->flags |= EDGE_FALLTHRU;
+ continue;
+ }
}
+ else if (returnjump_p (bb_end_insn))
+ continue;
else
{
- jump_insn = emit_jump_insn_after (gen_jump (label_insn),
- REORDER_BLOCK_EFF_END (bbi));
- need_block = 1;
+ /* Otherwise we have some switch or computed jump. In the
+ 99% case, there should not have been a fallthru edge. */
+ if (! e_fall)
+ continue;
+#ifdef CASE_DROPS_THROUGH
+ /* Except for VAX. Since we didn't have predication for the
+ tablejump, the fallthru block should not have moved. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
+ continue;
+ bb_end_insn = skip_insns_after_block (bb);
+#else
+ abort ();
+#endif
}
+ }
+ else
+ {
+ /* No fallthru implies a noreturn function with EH edges, or
+ something similarly bizarre. In any case, we don't need to
+ do anything. */
+ if (! e_fall)
+ continue;
- JUMP_LABEL (jump_insn) = label_insn;
- ++LABEL_NUSES (label_insn);
- barrier_insn = emit_barrier_after (jump_insn);
+ /* If the fallthru block is still next, nothing to do. */
+ if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
+ || (RBI (bb)->index == n_basic_blocks - 1
+ && e_fall->dest == EXIT_BLOCK_PTR))
+ continue;
- /* Add block for jump. Typically this is when a then is not
- predicted and we are jumping to the moved then block. */
- if (need_block)
+ /* We need a new jump insn. If the block has only one outgoing
+ edge, then we can stuff the new jump insn in directly. */
+ if (bb->succ->succ_next == NULL)
{
- basic_block nb;
-
- VARRAY_GROW (basic_block_info, ++n_basic_blocks);
- create_basic_block (n_basic_blocks - 1, jump_insn,
- jump_insn, NULL);
- nb = BASIC_BLOCK (n_basic_blocks - 1);
- nb->global_live_at_start
- = OBSTACK_ALLOC_REG_SET (function_obstack);
- nb->global_live_at_end
- = OBSTACK_ALLOC_REG_SET (function_obstack);
-
- COPY_REG_SET (nb->global_live_at_start,
- bbi->global_live_at_start);
- COPY_REG_SET (nb->global_live_at_end,
- bbi->global_live_at_start);
- BASIC_BLOCK (nb->index)->local_set = 0;
-
- nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
- REORDER_BLOCK_INDEX (nb) = REORDER_BLOCK_INDEX (bbi) + 1;
- /* Relink to new block. */
- nb->succ = bbi->succ;
- nb->succ->src = nb;
-
- make_edge (NULL, bbi, nb, 0);
- bbi->succ->succ_next
- = bbi->succ->succ_next->succ_next;
- nb->succ->succ_next = 0;
- /* Fix reorder block index to reflect new block. */
- for (j = 0; j < n_basic_blocks - 1; j++)
- {
- basic_block bbj = BASIC_BLOCK (j);
- if (REORDER_BLOCK_INDEX (bbj)
- >= REORDER_BLOCK_INDEX (bbi) + 1)
- REORDER_BLOCK_INDEX (bbj)++;
- }
- REORDER_BLOCK_SCOPE (nb) = REORDER_BLOCK_SCOPE (bbi);
- REORDER_BLOCK_EFF_HEAD (nb) = nb->head;
- REORDER_BLOCK_EFF_END (nb) = barrier_insn;
+ e_fall->flags &= ~EDGE_FALLTHRU;
+
+ jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
+ bb->end = jump_insn;
+ barrier_insn = emit_barrier_after (jump_insn);
+ RBI (bb)->eff_end = barrier_insn;
+ continue;
}
- else
- REORDER_BLOCK_EFF_END (bbi) = barrier_insn;
}
+
+ /* We got here if we need to add a new jump insn in a new block
+ across the edge e_fall. */
+
+ jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
+ barrier_insn = emit_barrier_after (jump_insn);
+
+ VARRAY_GROW (basic_block_info, ++n_basic_blocks);
+ create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
+
+ nb = BASIC_BLOCK (n_basic_blocks - 1);
+ nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ nb->local_set = 0;
+
+ COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
+ COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
+
+ nb->aux = xmalloc (sizeof (struct reorder_block_def));
+ RBI (nb)->eff_head = nb->head;
+ RBI (nb)->eff_end = barrier_insn;
+ RBI (nb)->scope = RBI (bb)->scope;
+ RBI (nb)->index = RBI (bb)->index + 1;
+ RBI (nb)->visited = 1;
+ RBI (nb)->next = RBI (bb)->next;
+ RBI (bb)->next = nb;
+
+ /* Link to new block. */
+ make_edge (NULL, nb, e_fall->dest, 0);
+ redirect_edge_succ (e_fall, nb);
+
+ /* Don't process this new block. */
+ bb = nb;
+
+ /* Fix subsequent reorder block indices to reflect new block. */
+ while ((nb = RBI (nb)->next) != NULL)
+ RBI (nb)->index += 1;
+ }
+
+ /* Put basic_block_info in the new order. */
+ for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
+ {
+ bb->index = RBI (bb)->index;
+ BASIC_BLOCK (bb->index) = bb;
}
}
reverse direction.
2. Count insns in chain, going both directions, and check if equal.
3. Check that get_last_insn () returns the actual end of chain. */
-#ifdef ENABLE_CHECKING
-static void
+
+void
verify_insn_chain ()
{
rtx x,
abort ();
}
}
-#endif
static rtx
get_next_bb_note (x)
{
while (x)
{
- if (GET_CODE (x) == NOTE
- && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
return x;
x = NEXT_INSN (x);
}
{
while (x)
{
- if (GET_CODE (x) == NOTE
- && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
return x;
x = PREV_INSN (x);
}
}
}
-
/* If the scope spans one or more basic blocks, we record them. We
only record the bbs that are immediately contained within this
scope. Note that if a scope is contained within a bb, we can tell
s->num_bbs = 0;
for (i = bbi1; i <= bbi2; i++)
- if (! REORDER_BLOCK_SCOPE (BASIC_BLOCK (i)))
+ if (! RBI (BASIC_BLOCK (i))->scope)
s->num_bbs++;
- s->bbs = xcalloc (s->num_bbs, sizeof (struct basic_block_def));
+ s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
for (i = bbi1; i <= bbi2; i++)
{
basic_block curr_bb = BASIC_BLOCK (i);
- if (! REORDER_BLOCK_SCOPE (curr_bb))
+ if (! RBI (curr_bb)->scope)
{
s->bbs[j++] = curr_bb;
- REORDER_BLOCK_SCOPE (curr_bb) = s;
+ RBI (curr_bb)->scope = s;
}
}
}
scope new_scope = xcalloc (1, sizeof (struct scope_def));
new_scope->level = level;
new_scope->note_beg = note;
- new_scope->note_end = NULL;
- new_scope->bb_beg = NULL;
- new_scope->bb_end = NULL;
- new_scope->inner = NULL;
- new_scope->inner_last = NULL;
- new_scope->outer = NULL;
- new_scope->next = NULL;
- new_scope->num_bbs = 0;
- new_scope->bbs = NULL;
return new_scope;
}
rtx x;
int level, bbi, i;
basic_block curr_bb;
- scope root, curr_scope;
+ scope root, curr_scope = 0;
forest->num_trees = 0;
forest->trees = NULL;
level++;
curr_scope = make_new_scope (level, x);
root = curr_scope;
- if (ntrees == 0)
- forest->trees = xcalloc (1, sizeof (scope));
- else
- forest->trees = xrealloc (forest->trees,
- sizeof (scope) * (ntrees + 1));
+ forest->trees = xrealloc (forest->trees,
+ sizeof (scope) * (ntrees + 1));
forest->trees[forest->num_trees++] = root;
}
curr_scope->bb_beg = curr_bb;
for (x = get_insns (); x; x = next)
{
next = NEXT_INSN (x);
- if (GET_CODE (x) == NOTE
- && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
+ if (NOTE_INSN_BASIC_BLOCK_P (x))
currbb = NOTE_BASIC_BLOCK (x);
if (GET_CODE (x) == NOTE
/* Insert scope note pairs for a contained scope tree S after insn IP. */
+
static void
insert_intra_1 (s, ip)
scope s;
insert_intra_bb_scope_notes (bb)
basic_block bb;
{
- scope s = REORDER_BLOCK_SCOPE (bb);
+ scope s = RBI (bb)->scope;
scope p;
rtx ip;
In that case, we either open or close a scope but not both. */
if (bb1 && bb2)
{
- scope s1 = REORDER_BLOCK_SCOPE (bb1);
- scope s2 = REORDER_BLOCK_SCOPE (bb2);
+ scope s1 = RBI (bb1)->scope;
+ scope s2 = RBI (bb2)->scope;
if (! s1 && ! s2)
return;
if (! s1)
/* Find common ancestor scope. */
if (bb1 && bb2)
{
- scope s1 = REORDER_BLOCK_SCOPE (bb1);
- scope s2 = REORDER_BLOCK_SCOPE (bb2);
+ scope s1 = RBI (bb1)->scope;
+ scope s2 = RBI (bb2)->scope;
while (s1 != s2)
{
if (! (s1 && s2))
/* Close scopes. */
if (bb1)
{
- scope s = REORDER_BLOCK_SCOPE (bb1);
- ip = REORDER_BLOCK_EFF_END (bb1);
+ scope s = RBI (bb1)->scope;
+ ip = RBI (bb1)->eff_end;
while (s != com)
{
if (NOTE_BLOCK (s->note_beg))
/* Open scopes. */
if (bb2)
{
- scope s = REORDER_BLOCK_SCOPE (bb2);
+ scope s = RBI (bb2)->scope;
ip = bb2->head;
while (s != com)
{
{
basic_block bb1 = BASIC_BLOCK (i);
basic_block bb2 = BASIC_BLOCK (i + 1);
- if (REORDER_BLOCK_SCOPE (bb1) != REORDER_BLOCK_SCOPE (bb2))
+ if (RBI (bb1)->scope != RBI (bb2)->scope)
insert_inter_bb_scope_notes (bb1, bb2);
insert_intra_bb_scope_notes (bb1);
}
int i;
if (s->bb_beg != NULL && s->bb_beg == s->bb_end
- && REORDER_BLOCK_SCOPE (s->bb_beg)
- && REORDER_BLOCK_SCOPE (s->bb_beg)->level + 1 == s->level)
+ && RBI (s->bb_beg)->scope
+ && RBI (s->bb_beg)->scope->level + 1 == s->level)
{
fprintf (stderr, "%*s", indent, "");
fprintf (stderr, "BB%d:\n", s->bb_beg->index);
fprintf (stderr, "%*s", indent, "");
fprintf (stderr, "{ level %d (block %p)\n", s->level,
- NOTE_BLOCK (s->note_beg));
+ (PTR) NOTE_BLOCK (s->note_beg));
fprintf (stderr, "%*s%s", indent, "", "bbs:");
for (i = 0; i < s->num_bbs; i++)
}
-/* Reorder basic blocks. */
+/* Reorder basic blocks. The main entry point to this file. */
void
reorder_basic_blocks ()
{
- int i, j;
- struct loops loops_info;
- int num_loops;
scope_forest_info forest;
-
- if (profile_arc_flag)
- return;
+ int i;
if (n_basic_blocks <= 1)
return;
- /* Exception edges are not currently handled. */
- for (i = 0; i < n_basic_blocks; i++)
- {
- edge e;
-
- for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
- e = e->succ_next)
- continue;
-
- if (e && (e->flags & EDGE_EH))
- return;
- }
-
- reorder_index = 0;
-
- /* Find natural loops using the CFG. */
- num_loops = flow_loops_find (&loops_info);
-
- /* Dump loop information. */
- flow_loops_dump (&loops_info, rtl_dump_file, 0);
-
- reorder_last_visited = BASIC_BLOCK (0);
+ /* We do not currently handle correct re-placement of EH notes.
+ But that does not matter unless we intend to use them. */
+ if (flag_exceptions)
+ for (i = 0; i < n_basic_blocks; i++)
+ {
+ edge e;
+ for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
+ if (e->flags & EDGE_EH)
+ return;
+ }
for (i = 0; i < n_basic_blocks; i++)
- {
- basic_block bbi = BASIC_BLOCK (i);
- bbi->aux = xcalloc (1, sizeof (struct reorder_block_def));
- *((struct reorder_block_def *)bbi->aux) = rbd_init;
- }
+ BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
+
+ EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
build_scope_forest (&forest);
remove_scope_notes ();
- for (i = 0; i < n_basic_blocks; i++)
- {
- basic_block bbi = BASIC_BLOCK (i);
- REORDER_BLOCK_EFF_END (bbi)
- = skip_insns_between_block (bbi, REORDER_SKIP_AFTER);
- if (i == 0)
- REORDER_BLOCK_EFF_HEAD (bbi) = get_insns ();
- else
- {
- rtx prev_eff_end = REORDER_BLOCK_EFF_END (BASIC_BLOCK (i - 1));
- REORDER_BLOCK_EFF_HEAD (bbi) = NEXT_INSN (prev_eff_end);
- }
- }
-
- /* If we've not got epilogue in RTL, we must fallthru to the exit.
- Force the last block to be at the end. */
- /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
- end of the function for stack unwinding purposes. */
-
-#ifndef HAVE_epilogue
-#define HAVE_epilogue 0
-#endif
-
- if (! HAVE_epilogue)
- {
- basic_block last = BASIC_BLOCK (n_basic_blocks - 1);
- REORDER_BLOCK_INDEX (last) = n_basic_blocks - 1;
- REORDER_BLOCK_FLAGS (last) |= REORDER_BLOCK_VISITED;
- }
-
- make_reorder_chain (BASIC_BLOCK (0));
-
+ record_effective_endpoints ();
+ make_reorder_chain ();
fixup_reorder_chain ();
#ifdef ENABLE_CHECKING
verify_insn_chain ();
#endif
- /* Put basic_block_info in new order. */
- for (i = 0; i < n_basic_blocks - 1; i++)
- {
- for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
- continue;
-
- if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
- && i != j)
- {
- basic_block tempbb;
- int temprbi;
- int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
-
- temprbi = BASIC_BLOCK (rbi)->index;
- BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
- BASIC_BLOCK (j)->index = temprbi;
- tempbb = BASIC_BLOCK (rbi);
- BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
- BASIC_BLOCK (j) = tempbb;
- }
- }
-
rebuild_scope_notes (&forest);
free_scope_forest (&forest);
reorder_blocks ();
-#ifdef ENABLE_CHECKING
- verify_flow_info ();
-#endif
-
for (i = 0; i < n_basic_blocks; i++)
free (BASIC_BLOCK (i)->aux);
- /* Free loop information. */
- flow_loops_free (&loops_info);
+ free (EXIT_BLOCK_PTR->aux);
+#ifdef ENABLE_CHECKING
+ verify_flow_info ();
+#endif
}
-