/* Basic block reordering routines for the GNU compiler.
- Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
void verify_insn_chain (void);
static void fixup_fallthru_exit_predecessor (void);
static tree insn_scope (rtx);
-static void update_unlikely_executed_notes (basic_block);
\f
rtx
unlink_insn_chain (rtx first, rtx last)
locator greater than corresponding block_locators_locs value and smaller
than the following one. Similarly for the other properties. */
static GTY(()) varray_type block_locators_locs;
-static GTY(()) varray_type block_locators_blocks;
+static GTY(()) VEC(tree,gc) *block_locators_blocks;
static GTY(()) varray_type line_locators_locs;
static GTY(()) varray_type line_locators_lines;
static GTY(()) varray_type file_locators_locs;
prologue_locator = epilogue_locator = 0;
VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
- VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
+ block_locators_blocks = VEC_alloc (tree, gc, 32);
VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
{
loc++;
VARRAY_PUSH_INT (block_locators_locs, loc);
- VARRAY_PUSH_TREE (block_locators_blocks, block);
+ VEC_safe_push (tree, gc, block_locators_blocks, block);
last_block = block;
}
if (last_line_number != line_number)
break;
}
}
- return VARRAY_TREE (block_locators_blocks, min);
+ return VEC_index (tree, block_locators_blocks, min);
}
/* Return line number of the statement specified by the locator. */
{
tree this_block;
+ /* Avoid putting scope notes between jump table and its label. */
+ if (JUMP_P (insn)
+ && (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
+ continue;
+
this_block = insn_scope (insn);
/* For sequences compute scope resulting from merging all scopes
of instructions nested inside. */
edge e_fall, e_taken, e;
rtx bb_end_insn;
basic_block nb;
- basic_block old_bb;
edge_iterator ei;
if (EDGE_COUNT (bb->succs) == 0)
}
else
{
-#ifndef CASE_DROPS_THROUGH
/* Otherwise we have some return, switch or computed
jump. In the 99% case, there should not have been a
fallthru edge. */
gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
continue;
-#else
- if (returnjump_p (bb_end_insn) || !e_fall)
- continue;
- /* Except for VAX. Since we didn't have predication for the
- tablejump, the fallthru block should not have moved. */
- if (bb->rbi->next == e_fall->dest)
- continue;
- bb_end_insn = skip_insns_after_block (bb);
-#endif
}
}
else
nb->rbi->next = bb->rbi->next;
bb->rbi->next = nb;
/* Don't process this new block. */
- old_bb = bb;
bb = nb;
/* Make sure new bb is tagged for correct section (same as
fall-thru source, since you cannot fall-throu across
section boundaries). */
- BB_COPY_PARTITION (e_fall->src, EDGE_PRED (bb, 0)->src);
+ BB_COPY_PARTITION (e_fall->src, single_pred (bb));
if (flag_reorder_blocks_and_partition
- && targetm.have_named_sections)
- {
- if (BB_PARTITION (EDGE_PRED (bb, 0)->src) == BB_COLD_PARTITION)
- {
- rtx new_note;
- rtx note = BB_HEAD (e_fall->src);
-
- while (!INSN_P (note)
- && note != BB_END (e_fall->src))
- note = NEXT_INSN (note);
-
- new_note = emit_note_before
- (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
- note);
- NOTE_BASIC_BLOCK (new_note) = bb;
- }
- if (JUMP_P (BB_END (bb))
- && !any_condjump_p (BB_END (bb))
- && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
- REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
- (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
- }
+ && targetm.have_named_sections
+ && JUMP_P (BB_END (bb))
+ && !any_condjump_p (BB_END (bb))
+ && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
+ REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
+ (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
}
}
bb->index = index;
BASIC_BLOCK (index) = bb;
- update_unlikely_executed_notes (bb);
-
bb->prev_bb = prev_bb;
prev_bb->next_bb = bb;
}
}
}
\f
-/* Update the basic block number information in any
- NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
-
-static void
-update_unlikely_executed_notes (basic_block bb)
-{
- rtx cur_insn;
-
- for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
- cur_insn = NEXT_INSN (cur_insn))
- if (NOTE_P (cur_insn)
- && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
- NOTE_BASIC_BLOCK (cur_insn) = bb;
-}
-\f
/* Perform sanity checks on the insn chain.
1. Check that next/prev pointers are consistent in both the forward and
reverse direction.
break;
case NOTE_INSN_REPEATED_LINE_NUMBER:
- case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
+ case NOTE_INSN_SWITCH_TEXT_SECTIONS:
emit_note_copy (insn);
break;
if (bb->global_live_at_start)
{
- new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (®_obstack);
- new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (®_obstack);
+ new_bb->global_live_at_start = ALLOC_REG_SET (®_obstack);
+ new_bb->global_live_at_end = ALLOC_REG_SET (®_obstack);
COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
}
{
basic_block bb;
- /* Our algorithm depends on fact that there are no dead jumptables
- around the code. */
- alloc_rbi_pool ();
-
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
initialize_bb_rbi (bb);
#ifdef ENABLE_CHECKING
verify_insn_chain ();
#endif
-
- free_rbi_pool ();
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
bb->rbi = NULL;
void
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
- edge *edges, unsigned n_edges, edge *new_edges,
+ edge *edges, unsigned num_edges, edge *new_edges,
struct loop *base)
{
unsigned i, j;
}
/* Redirect edges. */
- for (j = 0; j < n_edges; j++)
+ for (j = 0; j < num_edges; j++)
new_edges[j] = NULL;
for (i = 0; i < n; i++)
{
FOR_EACH_EDGE (e, ei, new_bb->succs)
{
- for (j = 0; j < n_edges; j++)
+ for (j = 0; j < num_edges; j++)
if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
new_edges[j] = e;