/* Basic block reordering routines for the GNU compiler.
- Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree.h"
#include "rtl.h"
#include "hard-reg-set.h"
+#include "obstack.h"
#include "basic-block.h"
#include "insn-config.h"
#include "output.h"
#include "function.h"
-#include "obstack.h"
#include "cfglayout.h"
#include "cfgloop.h"
#include "target.h"
#include "alloc-pool.h"
#include "flags.h"
-/* The contents of the current function definition are allocated
- in this obstack, and all are freed at the end of the function. */
-extern struct obstack flow_obstack;
-
/* Holds the interesting trailing notes for the function. */
rtx cfg_layout_function_footer, cfg_layout_function_header;
void verify_insn_chain (void);
static void fixup_fallthru_exit_predecessor (void);
static tree insn_scope (rtx);
-static void update_unlikely_executed_notes (basic_block);
\f
rtx
unlink_insn_chain (rtx first, rtx last)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
insn = NEXT_INSN (insn))
continue;
- if (!insn)
- abort (); /* No basic blocks at all? */
+ /* No basic blocks at all? */
+ gcc_assert (insn);
+
if (PREV_INSN (insn))
cfg_layout_function_header =
unlink_insn_chain (get_insns (), PREV_INSN (insn));
locator greater than corresponding block_locators_locs value and smaller
than the following one. Similarly for the other properties. */
static GTY(()) varray_type block_locators_locs;
-static GTY(()) varray_type block_locators_blocks;
+static GTY(()) VEC(tree,gc) *block_locators_blocks;
static GTY(()) varray_type line_locators_locs;
static GTY(()) varray_type line_locators_lines;
static GTY(()) varray_type file_locators_locs;
prologue_locator = epilogue_locator = 0;
VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
- VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
+ block_locators_blocks = VEC_alloc (tree, gc, 32);
VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
for (insn = get_insns (); insn; insn = next)
{
+ int active = 0;
+
next = NEXT_INSN (insn);
- if ((active_insn_p (insn)
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
- || !NEXT_INSN (insn)
+ if (NOTE_P (insn))
+ {
+ gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
+ && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
+ if (NOTE_LINE_NUMBER (insn) > 0)
+ {
+ expanded_location xloc;
+ NOTE_EXPANDED_LOCATION (xloc, insn);
+ line_number = xloc.line;
+ file_name = xloc.file;
+ }
+ }
+ else
+ active = (active_insn_p (insn)
+ && GET_CODE (PATTERN (insn)) != ADDR_VEC
+ && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
+
+ check_block_change (insn, &block);
+
+ if (active
+ || !next
|| (!prologue_locator && file_name))
{
if (last_block != block)
{
loc++;
VARRAY_PUSH_INT (block_locators_locs, loc);
- VARRAY_PUSH_TREE (block_locators_blocks, block);
+ VEC_safe_push (tree, gc, block_locators_blocks, block);
last_block = block;
}
if (last_line_number != line_number)
VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
last_file_name = file_name;
}
+ if (!prologue_locator && file_name)
+ prologue_locator = loc;
+ if (!next)
+ epilogue_locator = loc;
+ if (active)
+ INSN_LOCATOR (insn) = loc;
}
- if (!prologue_locator && file_name)
- prologue_locator = loc;
- if (!NEXT_INSN (insn))
- epilogue_locator = loc;
- if (active_insn_p (insn))
- INSN_LOCATOR (insn) = loc;
- else if (NOTE_P (insn))
- {
- switch (NOTE_LINE_NUMBER (insn))
- {
- case NOTE_INSN_BLOCK_BEG:
- case NOTE_INSN_BLOCK_END:
- abort ();
-
- default:
- if (NOTE_LINE_NUMBER (insn) > 0)
- {
- expanded_location xloc;
- NOTE_EXPANDED_LOCATION (xloc, insn);
- line_number = xloc.line;
- file_name = xloc.file;
- }
- break;
- }
- }
-
- check_block_change (insn, &block);
}
/* Tag the blocks with a depth number so that change_scope can find
}
\f
/* Return sope resulting from combination of S1 and S2. */
-tree
+static tree
choose_inner_scope (tree s1, tree s2)
{
if (!s1)
while (ts1 != ts2)
{
- if (ts1 == NULL || ts2 == NULL)
- abort ();
+ gcc_assert (ts1 && ts2);
if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
ts1 = BLOCK_SUPERCONTEXT (ts1);
else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
break;
}
}
- return VARRAY_TREE (block_locators_blocks, min);
+ return VEC_index (tree, block_locators_blocks, min);
}
/* Return line number of the statement specified by the locator. */
{
tree this_block;
+ /* Avoid putting scope notes between jump table and its label. */
+ if (JUMP_P (insn)
+ && (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
+ continue;
+
this_block = insn_scope (insn);
/* For sequences compute scope resulting from merging all scopes
of instructions nested inside. */
}
}
- if (index != n_basic_blocks)
- abort ();
+ gcc_assert (index == n_basic_blocks);
NEXT_INSN (insn) = cfg_layout_function_footer;
if (cfg_layout_function_footer)
edge e_fall, e_taken, e;
rtx bb_end_insn;
basic_block nb;
- basic_block old_bb;
+ edge_iterator ei;
- if (bb->succ == NULL)
+ if (EDGE_COUNT (bb->succs) == 0)
continue;
/* Find the old fallthru edge, and another non-EH edge for
a taken jump. */
e_taken = e_fall = NULL;
- for (e = bb->succ; e ; e = e->succ_next)
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_FALLTHRU)
e_fall = e;
else if (! (e->flags & EDGE_EH))
{
rtx note;
edge e_fake;
+ bool redirected;
e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
- if (!redirect_jump (BB_END (bb), block_label (bb), 0))
- abort ();
+ redirected = redirect_jump (BB_END (bb),
+ block_label (bb), 0);
+ gcc_assert (redirected);
+
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (note)
{
{
e_fall->flags &= ~EDGE_FALLTHRU;
#ifdef ENABLE_CHECKING
- if (!could_fall_through (e_taken->src, e_taken->dest))
- abort ();
+ gcc_assert (could_fall_through
+ (e_taken->src, e_taken->dest));
#endif
e_taken->flags |= EDGE_FALLTHRU;
update_br_prob_note (bb);
/* If the "jumping" edge is a crossing edge, and the fall
through edge is non-crossing, leave things as they are. */
- else if (e_taken->crossing_edge && !e_fall->crossing_edge)
+ else if ((e_taken->flags & EDGE_CROSSING)
+ && !(e_fall->flags & EDGE_CROSSING))
continue;
/* Otherwise we can try to invert the jump. This will
{
e_fall->flags &= ~EDGE_FALLTHRU;
#ifdef ENABLE_CHECKING
- if (!could_fall_through (e_taken->src, e_taken->dest))
- abort ();
+ gcc_assert (could_fall_through
+ (e_taken->src, e_taken->dest));
#endif
e_taken->flags |= EDGE_FALLTHRU;
update_br_prob_note (bb);
continue;
}
}
- else if (returnjump_p (bb_end_insn))
- continue;
else
{
- /* Otherwise we have some switch or computed jump. In the
- 99% case, there should not have been a fallthru edge. */
- if (! e_fall)
- continue;
-
-#ifdef CASE_DROPS_THROUGH
- /* Except for VAX. Since we didn't have predication for the
- tablejump, the fallthru block should not have moved. */
- if (bb->rbi->next == e_fall->dest)
- continue;
- bb_end_insn = skip_insns_after_block (bb);
-#else
- abort ();
-#endif
+ /* Otherwise we have some return, switch or computed
+ jump. In the 99% case, there should not have been a
+ fallthru edge. */
+ gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
+ continue;
}
}
else
nb->rbi->next = bb->rbi->next;
bb->rbi->next = nb;
/* Don't process this new block. */
- old_bb = bb;
bb = nb;
/* Make sure new bb is tagged for correct section (same as
- fall-thru source). */
- e_fall->src->partition = bb->pred->src->partition;
- if (flag_reorder_blocks_and_partition)
- {
- if (bb->pred->src->partition == COLD_PARTITION)
- {
- rtx new_note;
- rtx note = BB_HEAD (e_fall->src);
-
- while (!INSN_P (note)
- && note != BB_END (e_fall->src))
- note = NEXT_INSN (note);
-
- new_note = emit_note_before
- (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
- note);
- NOTE_BASIC_BLOCK (new_note) = bb;
- }
- if (JUMP_P (BB_END (bb))
- && !any_condjump_p (BB_END (bb))
- && bb->succ->crossing_edge )
- REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
- (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
- }
+ fall-thru source, since you cannot fall-throu across
+ section boundaries). */
+ BB_COPY_PARTITION (e_fall->src, single_pred (bb));
+ if (flag_reorder_blocks_and_partition
+ && targetm.have_named_sections
+ && JUMP_P (BB_END (bb))
+ && !any_condjump_p (BB_END (bb))
+ && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
+ REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
+ (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
}
}
bb->index = index;
BASIC_BLOCK (index) = bb;
- update_unlikely_executed_notes (bb);
-
bb->prev_bb = prev_bb;
prev_bb->next_bb = bb;
}
FOR_EACH_BB (bb)
{
edge e;
- for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
- continue;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+
if (e && !can_fallthru (e->src, e->dest))
force_nonfallthru (e);
}
}
\f
-/* Update the basic block number information in any
- NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
-
-static void
-update_unlikely_executed_notes (basic_block bb)
-{
- rtx cur_insn;
-
- for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
- cur_insn = NEXT_INSN (cur_insn))
- if (NOTE_P (cur_insn)
- && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
- NOTE_BASIC_BLOCK (cur_insn) = bb;
-}
-\f
/* Perform sanity checks on the insn chain.
1. Check that next/prev pointers are consistent in both the forward and
reverse direction.
for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
x != 0;
prevx = x, insn_cnt1++, x = NEXT_INSN (x))
- if (PREV_INSN (x) != prevx)
- abort ();
+ gcc_assert (PREV_INSN (x) == prevx);
- if (prevx != get_last_insn ())
- abort ();
+ gcc_assert (prevx == get_last_insn ());
for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
x != 0;
nextx = x, insn_cnt2++, x = PREV_INSN (x))
- if (NEXT_INSN (x) != nextx)
- abort ();
+ gcc_assert (NEXT_INSN (x) == nextx);
- if (insn_cnt1 != insn_cnt2)
- abort ();
+ gcc_assert (insn_cnt1 == insn_cnt2);
}
\f
/* If we have assembler epilogues, the block falling through to exit must
fixup_fallthru_exit_predecessor (void)
{
edge e;
+ edge_iterator ei;
basic_block bb = NULL;
- /* This transformation is not valid before reload, because we might separate
- a call from the instruction that copies the return value. */
- if (! reload_completed)
- abort ();
+ /* This transformation is not valid before reload, because we might
+ separate a call from the instruction that copies the return
+ value. */
+ gcc_assert (reload_completed);
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
bb = e->src;
in first BB, we may want to copy the block. */
case NOTE_INSN_PROLOGUE_END:
- case NOTE_INSN_LOOP_VTOP:
- case NOTE_INSN_LOOP_CONT:
case NOTE_INSN_LOOP_BEG:
case NOTE_INSN_LOOP_END:
/* Strip down the loop notes - we don't really want to keep
case NOTE_INSN_BASIC_BLOCK:
break;
- /* There is no purpose to duplicate prologue. */
- case NOTE_INSN_BLOCK_BEG:
- case NOTE_INSN_BLOCK_END:
- /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
- reordering is in the progress. */
- case NOTE_INSN_EH_REGION_BEG:
- case NOTE_INSN_EH_REGION_END:
- /* Should never exist at BB duplication time. */
- abort ();
- break;
case NOTE_INSN_REPEATED_LINE_NUMBER:
- case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
+ case NOTE_INSN_SWITCH_TEXT_SECTIONS:
emit_note_copy (insn);
break;
default:
- if (NOTE_LINE_NUMBER (insn) < 0)
- abort ();
+ /* All other notes should have already been eliminated.
+ */
+ gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
+
/* It is possible that no_line_number is set and the note
won't be emitted. */
emit_note_copy (insn);
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
insn = NEXT_INSN (last);
insn ? get_last_insn () : NULL,
EXIT_BLOCK_PTR->prev_bb);
+ BB_COPY_PARTITION (new_bb, bb);
if (bb->rbi->header)
{
insn = bb->rbi->header;
if (bb->global_live_at_start)
{
- new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ new_bb->global_live_at_start = ALLOC_REG_SET (®_obstack);
+ new_bb->global_live_at_end = ALLOC_REG_SET (®_obstack);
COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
}
{
basic_block bb;
- /* Our algorithm depends on fact that there are no dead jumptables
- around the code. */
- alloc_rbi_pool ();
-
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
initialize_bb_rbi (bb);
#ifdef ENABLE_CHECKING
verify_insn_chain ();
#endif
-
- free_rbi_pool ();
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
bb->rbi = NULL;
for (i = 0; i < n; i++)
{
/* In case we should redirect abnormal edge during duplication, fail. */
- for (e = bbs[i]->succ; e; e = e->succ_next)
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, bbs[i]->succs)
if ((e->flags & EDGE_ABNORMAL)
&& e->dest->rbi->duplicated)
{
void
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
- edge *edges, unsigned n_edges, edge *new_edges,
+ edge *edges, unsigned num_edges, edge *new_edges,
struct loop *base)
{
unsigned i, j;
}
/* Redirect edges. */
- for (j = 0; j < n_edges; j++)
+ for (j = 0; j < num_edges; j++)
new_edges[j] = NULL;
for (i = 0; i < n; i++)
{
+ edge_iterator ei;
new_bb = new_bbs[i];
bb = bbs[i];
- for (e = new_bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, new_bb->succs)
{
- for (j = 0; j < n_edges; j++)
+ for (j = 0; j < num_edges; j++)
if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
new_edges[j] = e;