#include "rtl.h"
#include "hard-reg-set.h"
#include "basic-block.h"
+#include "insn-config.h"
+#include "recog.h"
#include "toplev.h"
-
#include "obstack.h"
+#include "tm_p.h"
/* Store the data structures necessary for depth-first search. */
struct depth_first_search_dsS {
\f
/* Return true if the block has no effect and only forwards control flow to
its single destination. */
+
bool
forwarder_block_p (bb)
basic_block bb;
{
- rtx insn = bb->head;
+ rtx insn;
+
if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
|| !bb->succ || bb->succ->succ_next)
return false;
- while (insn != bb->end)
- {
- if (INSN_P (insn) && active_insn_p (insn))
- return false;
- insn = NEXT_INSN (insn);
- }
+ for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
+ if (INSN_P (insn) && active_insn_p (insn))
+ return false;
+
return (!INSN_P (insn)
|| (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
|| !active_insn_p (insn));
}
/* Return nonzero if we can reach target from src by falling through. */
+
bool
can_fallthru (src, target)
basic_block src, target;
if (src->index + 1 == target->index && !active_insn_p (insn2))
insn2 = next_active_insn (insn2);
+
/* ??? Later we may add code to move jump tables offline. */
return next_active_insn (insn) == insn2;
}
SET_BIT (visited, dest->index);
pre[dest->index] = prenum++;
-
if (dest->succ)
{
/* Since the DEST node has been visited for the first
/* Add fake edges to the function exit for any non constant and non noreturn
calls, volatile inline assembly in the bitmap of blocks specified by
- BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
+ BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
that were split.
The goal is to expose cases in which entering a basic block does not imply
basic_block *bbs;
bool check_last_block = false;
- /* Map bb indicies into basic block pointers since split_block
+ /* Map bb indices into basic block pointers since split_block
will renumber the basic blocks. */
bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
{
for (i = 0; i < n_basic_blocks; i++)
bbs[bb_num++] = BASIC_BLOCK (i);
+
check_last_block = true;
}
else
- {
- EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
- {
- bbs[bb_num++] = BASIC_BLOCK (i);
- if (i == n_basic_blocks - 1)
- check_last_block = true;
- });
- }
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
+ {
+ bbs[bb_num++] = BASIC_BLOCK (i);
+ if (i == n_basic_blocks - 1)
+ check_last_block = true;
+ });
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
spanning tree in the case that the call doesn't return.
Handle this by adding a dummy instruction in a new last basic block. */
- if (check_last_block
- && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
+ if (check_last_block)
{
- edge e;
- for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
- if (e->dest == EXIT_BLOCK_PTR)
- break;
- insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
- commit_edge_insertions ();
- }
+ basic_block bb = BASIC_BLOCK (n_basic_blocks - 1);
+ rtx insn = bb->end;
+
+ /* Back up past insns that must be kept in the same block as a call. */
+ while (insn != bb->head
+ && keep_with_call_p (insn))
+ insn = PREV_INSN (insn);
+
+ if (need_fake_edge_p (insn))
+ {
+ edge e;
+
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->dest == EXIT_BLOCK_PTR)
+ break;
+ insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
+ commit_edge_insertions ();
+ }
+ }
/* Now add fake edges to the function exit for any non constant
calls since there is no way that we can determine if they will
if (need_fake_edge_p (insn))
{
edge e;
+ rtx split_at_insn = insn;
+
+ /* Don't split the block between a call and an insn that should
+ remain in the same block as the call. */
+ if (GET_CODE (insn) == CALL_INSN)
+ while (split_at_insn != bb->end
+ && keep_with_call_p (NEXT_INSN (split_at_insn)))
+ split_at_insn = NEXT_INSN (split_at_insn);
+
+ /* The handling above of the final block before the epilogue
+ should be enough to verify that there is no edge to the exit
+ block in CFG already. Calling make_edge in such case would
+ cause us to mark that edge as fake and remove it later. */
- /* The above condition should be enought to verify that there is
- no edge to the exit block in CFG already. Calling make_edge in
- such case would make us to mark that edge as fake and remove it
- later. */
#ifdef ENABLE_CHECKING
- if (insn == bb->end)
+ if (split_at_insn == bb->end)
for (e = bb->succ; e; e = e->succ_next)
if (e->dest == EXIT_BLOCK_PTR)
abort ();
/* Note that the following may create a new basic block
and renumber the existing basic blocks. */
- e = split_block (bb, insn);
+ e = split_block (bb, split_at_insn);
if (e)
blocks_split++;
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
}
+
if (insn == bb->head)
break;
}
free (bbs);
return blocks_split;
}
+
/* Find unreachable blocks. An unreachable block will have 0 in
the reachable bit in block->flags. A non-zero value indicates the
block is reachable. */
BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
/* Add our starting points to the worklist. Almost always there will
- be only one. It isn't inconcievable that we might one day directly
+ be only one. It isn't inconceivable that we might one day directly
support Fortran alternate entry points. */
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
for (e = bb->succ; e; e = e->succ_next)
num_edges++;
}
+
/* Don't forget successors of the entry block. */
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
num_edges++;
/* Follow successors of the entry block, and register these edges. */
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
- {
- elist->index_to_edge[num_edges] = e;
- num_edges++;
- }
+ elist->index_to_edge[num_edges++] = e;
for (x = 0; x < n_basic_blocks; x++)
{
/* Follow all successors of blocks, and register these edges. */
for (e = bb->succ; e; e = e->succ_next)
- {
- elist->index_to_edge[num_edges] = e;
- num_edges++;
- }
+ elist->index_to_edge[num_edges++] = e;
}
+
return elist;
}
struct edge_list *elist;
{
int x;
+
fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
elist->num_blocks - 2, elist->num_edges);
fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
continue;
}
+
if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
fprintf (f, "*p* Pred for index %d should be %d not %d\n",
index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
}
}
+
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
{
pred = e->src->index;
fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
continue;
}
+
if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
fprintf (f, "*p* Pred for index %d should be %d not %d\n",
index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
fprintf (f, "*p* Succ for index %d should be %d not %d\n",
index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
}
+
/* We've verified that all the edges are in the list, no lets make sure
there are no spurious edges in the list. */
{
basic_block p = BASIC_BLOCK (pred);
basic_block s = BASIC_BLOCK (succ);
-
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
found_edge = 1;
break;
}
+
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
+
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
BASIC_BLOCK (succ)));
}
+
for (succ = 0; succ < n_basic_blocks; succ++)
{
basic_block p = ENTRY_BLOCK_PTR;
basic_block s = BASIC_BLOCK (succ);
-
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
found_edge = 1;
break;
}
+
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
+
if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
BASIC_BLOCK (succ)));
}
+
for (pred = 0; pred < n_basic_blocks; pred++)
{
basic_block p = BASIC_BLOCK (pred);
basic_block s = EXIT_BLOCK_PTR;
-
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
found_edge = 1;
break;
}
+
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
+
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
basic_block pred, succ;
{
int x;
+
for (x = 0; x < NUM_EDGES (edge_list); x++)
- {
- if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
- && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
- return x;
- }
+ if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
+ && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
+ return x;
+
return (EDGE_INDEX_NO_EDGE);
}
for (i = 0; i < num_edges; i++)
fprintf (file, "%d->%d ", edge_list[i]->src->index,
edge_list[i]->dest->index);
+
fputs ("}\n", file);
}
basic_block bb;
{
edge e;
+
for (e = bb->succ; e;)
{
edge tmp = e;
+
e = e->succ_next;
if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
remove_edge (tmp);
connect_infinite_loops_to_exit ()
{
basic_block unvisited_block;
+ struct depth_first_search_dsS dfs_ds;
/* Perform depth-first search in the reverse graph to find nodes
reachable from the exit block. */
- struct depth_first_search_dsS dfs_ds;
-
flow_dfs_compute_reverse_init (&dfs_ds);
flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
if (!unvisited_block)
break;
+
make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
}
flow_dfs_compute_reverse_finish (&dfs_ds);
-
return;
}
\f
/* Compute reverse top sort order */
+
void
flow_reverse_top_sort_order_compute (rts_order)
int *rts_order;
SET_BIT (visited, dest->index);
if (dest->succ)
- {
- /* Since the DEST node has been visited for the first
- time, check its successors. */
- stack[sp++] = dest->succ;
- }
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = dest->succ;
else
rts_order[postnum++] = dest->index;
}
SET_BIT (visited, dest->index);
if (dfs_order)
- dfs_order[dfsnum++] = dest->index;
+ dfs_order[dfsnum] = dest->index;
+
+ dfsnum++;
if (dest->succ)
- {
- /* Since the DEST node has been visited for the first
- time, check its successors. */
- stack[sp++] = dest->succ;
- }
- else
- {
- /* There are no successors for the DEST node so assign
- its reverse completion number. */
- if (rc_order)
- rc_order[rcnum--] = dest->index;
- }
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = dest->succ;
+ else if (rc_order)
+ /* There are no successors for the DEST node so assign
+ its reverse completion number. */
+ rc_order[rcnum--] = dest->index;
}
else
{
- if (! e->succ_next && src != ENTRY_BLOCK_PTR)
- {
- /* There are no more successors for the SRC node
- so assign its reverse completion number. */
- if (rc_order)
- rc_order[rcnum--] = src->index;
- }
+ if (! e->succ_next && src != ENTRY_BLOCK_PTR
+ && rc_order)
+ /* There are no more successors for the SRC node
+ so assign its reverse completion number. */
+ rc_order[rcnum--] = src->index;
if (e->succ_next)
stack[sp - 1] = e->succ_next;
/* There are some nodes left in the CFG that are unreachable. */
if (dfsnum < n_basic_blocks)
abort ();
+
return dfsnum;
}
+struct dfst_node
+{
+ unsigned nnodes;
+ struct dfst_node **node;
+ struct dfst_node *up;
+};
+
+/* Compute a preorder transversal ordering such that a sub-tree which
+ is the source of a cross edge appears before the sub-tree which is
+ the destination of the cross edge. This allows for easy detection
+ of all the entry blocks for a loop.
+
+ The ordering is compute by:
+
+ 1) Generating a depth first spanning tree.
+
+ 2) Walking the resulting tree from right to left. */
+
+void
+flow_preorder_transversal_compute (pot_order)
+ int *pot_order;
+{
+ edge e;
+ edge *stack;
+ int i;
+ int max_successors;
+ int sp;
+ sbitmap visited;
+ struct dfst_node *node;
+ struct dfst_node *dfst;
+
+ /* Allocate stack for back-tracking up CFG. */
+ stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
+ sp = 0;
+
+ /* Allocate the tree. */
+ dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
+ sizeof (struct dfst_node));
+
+ for (i = 0; i < n_basic_blocks; i++)
+ {
+ max_successors = 0;
+ for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
+ max_successors++;
+
+ dfst[i].node
+ = (max_successors
+ ? (struct dfst_node **) xcalloc (max_successors,
+ sizeof (struct dfst_node *))
+ : NULL);
+ }
+
+ /* Allocate bitmap to track nodes that have been visited. */
+ visited = sbitmap_alloc (n_basic_blocks);
+
+ /* None of the nodes in the CFG have been visited yet. */
+ sbitmap_zero (visited);
+
+ /* Push the first edge on to the stack. */
+ stack[sp++] = ENTRY_BLOCK_PTR->succ;
+
+ while (sp)
+ {
+ basic_block src;
+ basic_block dest;
+
+ /* Look at the edge on the top of the stack. */
+ e = stack[sp - 1];
+ src = e->src;
+ dest = e->dest;
+
+ /* Check if the edge destination has been visited yet. */
+ if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
+ {
+ /* Mark that we have visited the destination. */
+ SET_BIT (visited, dest->index);
+
+ /* Add the destination to the preorder tree. */
+ if (src != ENTRY_BLOCK_PTR)
+ {
+ dfst[src->index].node[dfst[src->index].nnodes++]
+ = &dfst[dest->index];
+ dfst[dest->index].up = &dfst[src->index];
+ }
+
+ if (dest->succ)
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = dest->succ;
+ }
+
+ else if (e->succ_next)
+ stack[sp - 1] = e->succ_next;
+ else
+ sp--;
+ }
+
+ free (stack);
+ sbitmap_free (visited);
+
+ /* Record the preorder transversal order by
+ walking the tree from right to left. */
+
+ i = 0;
+ node = &dfst[0];
+ pot_order[i++] = 0;
+
+ while (node)
+ {
+ if (node->nnodes)
+ {
+ node = node->node[--node->nnodes];
+ pot_order[i++] = node - dfst;
+ }
+ else
+ node = node->up;
+ }
+
+ /* Free the tree. */
+
+ for (i = 0; i < n_basic_blocks; i++)
+ if (dfst[i].node)
+ free (dfst[i].node);
+
+ free (dfst);
+}
+
/* Compute the depth first search order on the _reverse_ graph and
store in the array DFS_ORDER, marking the nodes visited in VISITED.
Returns the number of nodes visited.
depth_first_search_ds data;
{
/* Allocate stack for back-tracking up CFG. */
- data->stack =
- (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
- * sizeof (basic_block));
+ data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
+ * sizeof (basic_block));
data->sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
basic_block bb;
{
data->stack[data->sp++] = bb;
- return;
+ SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
}
-/* Continue the depth-first search through the reverse graph starting
- with the block at the stack's top and ending when the stack is
- empty. Visited nodes are marked. Returns an unvisited basic
- block, or NULL if there is none available. */
+/* Continue the depth-first search through the reverse graph starting with the
+ block at the stack's top and ending when the stack is empty. Visited nodes
+ are marked. Returns an unvisited basic block, or NULL if there is none
+ available. */
static basic_block
flow_dfs_compute_reverse_execute (data)
{
bb = data->stack[--data->sp];
- /* Mark that we have visited this node. */
- if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
- {
- SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
-
- /* Perform depth-first search on adjacent vertices. */
- for (e = bb->pred; e; e = e->pred_next)
- flow_dfs_compute_reverse_add_bb (data, e->src);
- }
+ /* Perform depth-first search on adjacent vertices. */
+ for (e = bb->pred; e; e = e->pred_next)
+ if (!TEST_BIT (data->visited_blocks,
+ e->src->index - (INVALID_BLOCK + 1)))
+ flow_dfs_compute_reverse_add_bb (data, e->src);
}
/* Determine if there are unvisited basic blocks. */
- for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
+ for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
if (!TEST_BIT (data->visited_blocks, i))
return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
+
return NULL;
}
{
free (data->stack);
sbitmap_free (data->visited_blocks);
- return;
}