* cfganal.c (flow_reverse_top_sort_order_compute):
Renamed to post_order_compute and additional parameter added which
allows the inclusion of entry and exit blocks into list.
(mark_dfs_back_edges): Fixed comment.
(flow_depth_first_order_compute): Renamed to
pre_and_rev_post_order_compute additional parameter added which
allows the inclusion of entry and exit blocks into list.
* global.c (set_up_bb_rts_numbers): Call to
flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* var-tracking.c (vt_stack_adjustments): Fixed comment.
(vt_find_locations): Call to
flow_depth_first_order_compute renamed to
pre_and_rev_post_order_compute.
* cfgloop.c (flow_find_loops): Ditto.
* tree-ssa-reassoc.c (init_reassoc): Ditto.
* df.c (df_analyze_1, df_analyze_subcfg): Calls to
flow_reverse_top_sort_order_compute renamed to post_order_compute
and calls to flow_reverse_top_sort_order_compute renamed to
post_order_compute.
* basic_block.h: Ditto.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@108874
138bc75d-0d04-0410-961f-
82ee72b054a4
+2005-12-20 Kenneth Zadeck <zadeck@naturalbridge.com>
+
+ * cfganal.c (flow_reverse_top_sort_order_compute):
+ Renamed to post_order_compute and additional parameter added which
+ allows the inclusion of entry and exit blocks into list.
+ (mark_dfs_back_edges): Fixed comment.
+ (flow_depth_first_order_compute): Renamed to
+ pre_and_rev_post_order_compute additional parameter added which
+ allows the inclusion of entry and exit blocks into list.
+ * global.c (set_up_bb_rts_numbers): Call to
+ flow_reverse_top_sort_order_compute renamed to
+ post_order_compute.
+ * var-tracking.c (vt_stack_adjustments): Fixed comment.
+ (vt_find_locations): Call to
+ flow_depth_first_order_compute renamed to
+ pre_and_rev_post_order_compute.
+ * cfgloop.c (flow_find_loops): Ditto.
+ * tree-ssa-reassoc.c (init_reassoc): Ditto.
+ * df.c (df_analyze_1, df_analyze_subcfg): Calls to
+ flow_reverse_top_sort_order_compute renamed to post_order_compute
+ and calls to flow_reverse_top_sort_order_compute renamed to
+ post_order_compute.
+ * basic_block.h: Ditto.
+
+
2005-12-20 Roger Sayle <roger@eyesopen.com>
Paolo Bonzini <bonzini@gnu.org>
extern void redirect_edge_pred (edge, basic_block);
extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
extern void clear_bb_flags (void);
-extern void flow_reverse_top_sort_order_compute (int *);
-extern int flow_depth_first_order_compute (int *, int *);
+extern int post_order_compute (int *, bool);
+extern int pre_and_rev_post_order_compute (int *, int *, bool);
extern int dfs_enumerate_from (basic_block, int,
bool (*)(basic_block, void *),
basic_block *, int, void *);
Steven Muchnick
Morgan Kaufmann, 1997
- and heavily borrowed from flow_depth_first_order_compute. */
+ and heavily borrowed from pre_and_rev_post_order_compute. */
bool
mark_dfs_back_edges (void)
return;
}
\f
-/* Compute reverse top sort order. */
+/* Compute reverse top sort order.
+ This is computing a post order numbering of the graph. */
-void
-flow_reverse_top_sort_order_compute (int *rts_order)
+int
+post_order_compute (int *post_order, bool include_entry_exit)
{
edge_iterator *stack;
int sp;
- int postnum = 0;
+ int post_order_num = 0;
sbitmap visited;
+ if (include_entry_exit)
+ post_order[post_order_num++] = EXIT_BLOCK;
+
/* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
sp = 0;
time, check its successors. */
stack[sp++] = ei_start (dest->succs);
else
- rts_order[postnum++] = dest->index;
+ post_order[post_order_num++] = dest->index;
}
else
{
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
- rts_order[postnum++] = src->index;
+ post_order[post_order_num++] = src->index;
if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]);
}
}
+ if (include_entry_exit)
+ post_order[post_order_num++] = ENTRY_BLOCK;
+
free (stack);
sbitmap_free (visited);
+ return post_order_num;
}
/* Compute the depth first search order and store in the array
- DFS_ORDER if nonzero, marking the nodes visited in VISITED. If
- RC_ORDER is nonzero, return the reverse completion number for each
+ PRE_ORDER if nonzero, marking the nodes visited in VISITED. If
+ REV_POST_ORDER is nonzero, return the reverse completion number for each
node. Returns the number of nodes visited. A depth first search
tries to get as far away from the starting point as quickly as
- possible. */
+ possible.
+
+ pre_order is a really a preorder numbering of the graph.
+ rev_post_order is really a reverse postorder numbering of the graph.
+ */
int
-flow_depth_first_order_compute (int *dfs_order, int *rc_order)
+pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
+ bool include_entry_exit)
{
edge_iterator *stack;
int sp;
- int dfsnum = 0;
- int rcnum = n_basic_blocks - 1 - NUM_FIXED_BLOCKS;
+ int pre_order_num = 0;
+ int rev_post_order_num = n_basic_blocks - 1;
sbitmap visited;
/* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
sp = 0;
+ if (include_entry_exit)
+ {
+ if (pre_order)
+ pre_order[pre_order_num] = ENTRY_BLOCK;
+ pre_order_num++;
+ if (rev_post_order)
+ rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
+ }
+ else
+ rev_post_order_num -= NUM_FIXED_BLOCKS;
+
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (last_basic_block);
/* Mark that we have visited the destination. */
SET_BIT (visited, dest->index);
- if (dfs_order)
- dfs_order[dfsnum] = dest->index;
+ if (pre_order)
+ pre_order[pre_order_num] = dest->index;
- dfsnum++;
+ pre_order_num++;
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = ei_start (dest->succs);
- else if (rc_order)
+ else if (rev_post_order)
/* There are no successors for the DEST node so assign
its reverse completion number. */
- rc_order[rcnum--] = dest->index;
+ rev_post_order[rev_post_order_num--] = dest->index;
}
else
{
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR
- && rc_order)
+ && rev_post_order)
/* There are no more successors for the SRC node
so assign its reverse completion number. */
- rc_order[rcnum--] = src->index;
+ rev_post_order[rev_post_order_num--] = src->index;
if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]);
free (stack);
sbitmap_free (visited);
- /* The number of nodes visited should be the number of blocks minus
- the entry and exit blocks which are not visited here. */
- gcc_assert (dfsnum == n_basic_blocks - NUM_FIXED_BLOCKS);
+ if (include_entry_exit)
+ {
+ if (pre_order)
+ pre_order[pre_order_num] = EXIT_BLOCK;
+ pre_order_num++;
+ if (rev_post_order)
+ rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
+ /* The number of nodes visited should be the number of blocks. */
+ gcc_assert (pre_order_num == n_basic_blocks);
+ }
+ else
+ /* The number of nodes visited should be the number of blocks minus
+ the entry and exit blocks which are not visited here. */
+ gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS);
- return dfsnum;
+ return pre_order_num;
}
/* Compute the depth first search order on the _reverse_ graph and
natural loops will be found before inner natural loops. */
dfs_order = xmalloc (n_basic_blocks * sizeof (int));
rc_order = xmalloc (n_basic_blocks * sizeof (int));
- flow_depth_first_order_compute (dfs_order, rc_order);
+ pre_and_rev_post_order_compute (dfs_order, rc_order, false);
/* Save CFG derived information to avoid recomputing it. */
loops->cfg.dfs_order = dfs_order;
df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
- flow_depth_first_order_compute (df->dfs_order, df->rc_order);
- flow_reverse_top_sort_order_compute (df->rts_order);
+ pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false);
+ post_order_compute (df->rts_order, false);
if (aflags & DF_RD)
{
/* Compute the sets of gens and kills for the defs of each bb. */
df->rc_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
df->rts_order = xmalloc (sizeof (int) * n_basic_blocks - NUM_FIXED_BLOCKS);
- flow_depth_first_order_compute (df->dfs_order, df->rc_order);
- flow_reverse_top_sort_order_compute (df->rts_order);
+ pre_and_rev_post_order_compute (df->dfs_order, df->rc_order, false);
+ post_order_compute (df->rts_order, false);
n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks);
prune_to_subcfg (df->rc_order, n_basic_blocks - NUM_FIXED_BLOCKS, blocks);
int *rts_order;
rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
- flow_reverse_top_sort_order_compute (rts_order);
+ post_order_compute (rts_order, false);
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
free (rts_order);
/* Reverse RPO (Reverse Post Order) will give us something where
deeper loops come later. */
- flow_depth_first_order_compute (NULL, bbs);
+ pre_and_rev_post_order_compute (NULL, bbs, false);
bb_rank = xcalloc (last_basic_block + 1, sizeof (unsigned int));
operand_rank = htab_create (511, operand_entry_hash,
/* Compute stack adjustments for all blocks by traversing DFS tree.
Return true when the adjustments on all incoming edges are consistent.
- Heavily borrowed from flow_depth_first_order_compute. */
+ Heavily borrowed from pre_and_rev_post_order_compute. */
static bool
vt_stack_adjustments (void)
so that the data-flow runs faster. */
rc_order = xmalloc ((n_basic_blocks - NUM_FIXED_BLOCKS) * sizeof (int));
bb_order = xmalloc (last_basic_block * sizeof (int));
- flow_depth_first_order_compute (NULL, rc_order);
+ pre_and_rev_post_order_compute (NULL, rc_order, false);
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
bb_order[rc_order[i]] = i;
free (rc_order);