1 /* Control flow graph analysis code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains various simple utilities to analyze the CFG. */
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
32 /* Store the data structures necessary for depth-first search. */
33 struct depth_first_search_dsS {
34 /* stack for backtracking during the algorithm */
37 /* number of edges in the stack. That is, positions 0, ..., sp-1
41 /* record of basic blocks already seen by depth-first search */
42 sbitmap visited_blocks;
44 typedef struct depth_first_search_dsS *depth_first_search_ds;
46 static void flow_dfs_compute_reverse_init
47 PARAMS ((depth_first_search_ds));
48 static void flow_dfs_compute_reverse_add_bb
49 PARAMS ((depth_first_search_ds, basic_block));
50 static basic_block flow_dfs_compute_reverse_execute
51 PARAMS ((depth_first_search_ds));
52 static void flow_dfs_compute_reverse_finish
53 PARAMS ((depth_first_search_ds));
54 static void remove_fake_successors PARAMS ((basic_block));
55 static bool need_fake_edge_p PARAMS ((rtx));
57 /* Return true if the block has no effect and only forwards control flow to
58 its single destination. */
61 forwarder_block_p (bb)
66 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
67 || !bb->succ || bb->succ->succ_next)
70 for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
71 if (INSN_P (insn) && active_insn_p (insn))
74 return (!INSN_P (insn)
75 || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
76 || !active_insn_p (insn));
79 /* Return nonzero if we can reach target from src by falling through. */
82 can_fallthru (src, target)
83 basic_block src, target;
86 rtx insn2 = target->head;
88 if (src->index + 1 == target->index && !active_insn_p (insn2))
89 insn2 = next_active_insn (insn2);
91 /* ??? Later we may add code to move jump tables offline. */
92 return next_active_insn (insn) == insn2;
95 /* Mark the back edges in DFS traversal.
96 Return non-zero if a loop (natural or otherwise) is present.
97 Inspired by Depth_First_Search_PP described in:
99 Advanced Compiler Design and Implementation
101 Morgan Kaufmann, 1997
103 and heavily borrowed from flow_depth_first_order_compute. */
106 mark_dfs_back_edges ()
117 /* Allocate the preorder and postorder number arrays. */
118 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
119 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
121 /* Allocate stack for back-tracking up CFG. */
122 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
125 /* Allocate bitmap to track nodes that have been visited. */
126 visited = sbitmap_alloc (n_basic_blocks);
128 /* None of the nodes in the CFG have been visited yet. */
129 sbitmap_zero (visited);
131 /* Push the first edge on to the stack. */
132 stack[sp++] = ENTRY_BLOCK_PTR->succ;
140 /* Look at the edge on the top of the stack. */
144 e->flags &= ~EDGE_DFS_BACK;
146 /* Check if the edge destination has been visited yet. */
147 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
149 /* Mark that we have visited the destination. */
150 SET_BIT (visited, dest->index);
152 pre[dest->index] = prenum++;
155 /* Since the DEST node has been visited for the first
156 time, check its successors. */
157 stack[sp++] = dest->succ;
160 post[dest->index] = postnum++;
164 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
165 && pre[src->index] >= pre[dest->index]
166 && post[dest->index] == 0)
167 e->flags |= EDGE_DFS_BACK, found = true;
169 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
170 post[src->index] = postnum++;
173 stack[sp - 1] = e->succ_next;
182 sbitmap_free (visited);
187 /* Return true if we need to add fake edge to exit.
188 Helper function for the flow_call_edges_add. */
191 need_fake_edge_p (insn)
197 if ((GET_CODE (insn) == CALL_INSN
198 && !SIBLING_CALL_P (insn)
199 && !find_reg_note (insn, REG_NORETURN, NULL)
200 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
201 && !CONST_OR_PURE_CALL_P (insn)))
204 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
205 && MEM_VOLATILE_P (PATTERN (insn)))
206 || (GET_CODE (PATTERN (insn)) == PARALLEL
207 && asm_noperands (insn) != -1
208 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
209 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
212 /* Add fake edges to the function exit for any non constant and non noreturn
213 calls, volatile inline assembly in the bitmap of blocks specified by
214 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
217 The goal is to expose cases in which entering a basic block does not imply
218 that all subsequent instructions must be executed. */
221 flow_call_edges_add (blocks)
225 int blocks_split = 0;
228 bool check_last_block = false;
230 /* Map bb indices into basic block pointers since split_block
231 will renumber the basic blocks. */
233 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
237 for (i = 0; i < n_basic_blocks; i++)
238 bbs[bb_num++] = BASIC_BLOCK (i);
240 check_last_block = true;
244 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
246 bbs[bb_num++] = BASIC_BLOCK (i);
247 if (i == n_basic_blocks - 1)
248 check_last_block = true;
251 /* In the last basic block, before epilogue generation, there will be
252 a fallthru edge to EXIT. Special care is required if the last insn
253 of the last basic block is a call because make_edge folds duplicate
254 edges, which would result in the fallthru edge also being marked
255 fake, which would result in the fallthru edge being removed by
256 remove_fake_edges, which would result in an invalid CFG.
258 Moreover, we can't elide the outgoing fake edge, since the block
259 profiler needs to take this into account in order to solve the minimal
260 spanning tree in the case that the call doesn't return.
262 Handle this by adding a dummy instruction in a new last basic block. */
264 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
268 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
269 if (e->dest == EXIT_BLOCK_PTR)
272 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
273 commit_edge_insertions ();
276 /* Now add fake edges to the function exit for any non constant
277 calls since there is no way that we can determine if they will
280 for (i = 0; i < bb_num; i++)
282 basic_block bb = bbs[i];
286 for (insn = bb->end; ; insn = prev_insn)
288 prev_insn = PREV_INSN (insn);
289 if (need_fake_edge_p (insn))
293 /* The above condition should be enough to verify that there is
294 no edge to the exit block in CFG already. Calling make_edge
295 in such case would make us to mark that edge as fake and
298 #ifdef ENABLE_CHECKING
300 for (e = bb->succ; e; e = e->succ_next)
301 if (e->dest == EXIT_BLOCK_PTR)
305 /* Note that the following may create a new basic block
306 and renumber the existing basic blocks. */
307 e = split_block (bb, insn);
311 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
314 if (insn == bb->head)
326 /* Find unreachable blocks. An unreachable block will have 0 in
327 the reachable bit in block->flags. A non-zero value indicates the
328 block is reachable. */
331 find_unreachable_blocks ()
335 basic_block *tos, *worklist;
338 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
340 /* Clear all the reachability flags. */
342 for (i = 0; i < n; ++i)
343 BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
345 /* Add our starting points to the worklist. Almost always there will
346 be only one. It isn't inconceivable that we might one day directly
347 support Fortran alternate entry points. */
349 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
353 /* Mark the block reachable. */
354 e->dest->flags |= BB_REACHABLE;
357 /* Iterate: find everything reachable from what we've already seen. */
359 while (tos != worklist)
361 basic_block b = *--tos;
363 for (e = b->succ; e; e = e->succ_next)
364 if (!(e->dest->flags & BB_REACHABLE))
367 e->dest->flags |= BB_REACHABLE;
374 /* Functions to access an edge list with a vector representation.
375 Enough data is kept such that given an index number, the
376 pred and succ that edge represents can be determined, or
377 given a pred and a succ, its index number can be returned.
378 This allows algorithms which consume a lot of memory to
379 represent the normally full matrix of edge (pred,succ) with a
380 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
381 wasted space in the client code due to sparse flow graphs. */
383 /* This functions initializes the edge list. Basically the entire
384 flowgraph is processed, and all edges are assigned a number,
385 and the data structure is filled in. */
390 struct edge_list *elist;
396 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
400 /* Determine the number of edges in the flow graph by counting successor
401 edges on each basic block. */
402 for (x = 0; x < n_basic_blocks; x++)
404 basic_block bb = BASIC_BLOCK (x);
406 for (e = bb->succ; e; e = e->succ_next)
410 /* Don't forget successors of the entry block. */
411 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
414 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
415 elist->num_blocks = block_count;
416 elist->num_edges = num_edges;
417 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
421 /* Follow successors of the entry block, and register these edges. */
422 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
423 elist->index_to_edge[num_edges++] = e;
425 for (x = 0; x < n_basic_blocks; x++)
427 basic_block bb = BASIC_BLOCK (x);
429 /* Follow all successors of blocks, and register these edges. */
430 for (e = bb->succ; e; e = e->succ_next)
431 elist->index_to_edge[num_edges++] = e;
437 /* This function free's memory associated with an edge list. */
440 free_edge_list (elist)
441 struct edge_list *elist;
445 free (elist->index_to_edge);
450 /* This function provides debug output showing an edge list. */
453 print_edge_list (f, elist)
455 struct edge_list *elist;
459 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
460 elist->num_blocks - 2, elist->num_edges);
462 for (x = 0; x < elist->num_edges; x++)
464 fprintf (f, " %-4d - edge(", x);
465 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
466 fprintf (f, "entry,");
468 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
470 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
471 fprintf (f, "exit)\n");
473 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
477 /* This function provides an internal consistency check of an edge list,
478 verifying that all edges are present, and that there are no
482 verify_edge_list (f, elist)
484 struct edge_list *elist;
486 int x, pred, succ, index;
489 for (x = 0; x < n_basic_blocks; x++)
491 basic_block bb = BASIC_BLOCK (x);
493 for (e = bb->succ; e; e = e->succ_next)
495 pred = e->src->index;
496 succ = e->dest->index;
497 index = EDGE_INDEX (elist, e->src, e->dest);
498 if (index == EDGE_INDEX_NO_EDGE)
500 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
504 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
505 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
506 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
507 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
508 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
509 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
513 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
515 pred = e->src->index;
516 succ = e->dest->index;
517 index = EDGE_INDEX (elist, e->src, e->dest);
518 if (index == EDGE_INDEX_NO_EDGE)
520 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
524 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
525 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
526 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
527 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
528 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
529 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
532 /* We've verified that all the edges are in the list, no lets make sure
533 there are no spurious edges in the list. */
535 for (pred = 0; pred < n_basic_blocks; pred++)
536 for (succ = 0; succ < n_basic_blocks; succ++)
538 basic_block p = BASIC_BLOCK (pred);
539 basic_block s = BASIC_BLOCK (succ);
542 for (e = p->succ; e; e = e->succ_next)
549 for (e = s->pred; e; e = e->pred_next)
556 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
557 == EDGE_INDEX_NO_EDGE && found_edge != 0)
558 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
560 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
561 != EDGE_INDEX_NO_EDGE && found_edge == 0)
562 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
563 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
564 BASIC_BLOCK (succ)));
567 for (succ = 0; succ < n_basic_blocks; succ++)
569 basic_block p = ENTRY_BLOCK_PTR;
570 basic_block s = BASIC_BLOCK (succ);
573 for (e = p->succ; e; e = e->succ_next)
580 for (e = s->pred; e; e = e->pred_next)
587 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
588 == EDGE_INDEX_NO_EDGE && found_edge != 0)
589 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
591 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
592 != EDGE_INDEX_NO_EDGE && found_edge == 0)
593 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
594 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
595 BASIC_BLOCK (succ)));
598 for (pred = 0; pred < n_basic_blocks; pred++)
600 basic_block p = BASIC_BLOCK (pred);
601 basic_block s = EXIT_BLOCK_PTR;
604 for (e = p->succ; e; e = e->succ_next)
611 for (e = s->pred; e; e = e->pred_next)
618 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
619 == EDGE_INDEX_NO_EDGE && found_edge != 0)
620 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
622 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
623 != EDGE_INDEX_NO_EDGE && found_edge == 0)
624 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
625 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
630 /* This routine will determine what, if any, edge there is between
631 a specified predecessor and successor. */
634 find_edge_index (edge_list, pred, succ)
635 struct edge_list *edge_list;
636 basic_block pred, succ;
640 for (x = 0; x < NUM_EDGES (edge_list); x++)
641 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
642 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
645 return (EDGE_INDEX_NO_EDGE);
648 /* Dump the list of basic blocks in the bitmap NODES. */
651 flow_nodes_print (str, nodes, file)
661 fprintf (file, "%s { ", str);
662 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
666 /* Dump the list of edges in the array EDGE_LIST. */
669 flow_edge_list_print (str, edge_list, num_edges, file)
671 const edge *edge_list;
680 fprintf (file, "%s { ", str);
681 for (i = 0; i < num_edges; i++)
682 fprintf (file, "%d->%d ", edge_list[i]->src->index,
683 edge_list[i]->dest->index);
689 /* This routine will remove any fake successor edges for a basic block.
690 When the edge is removed, it is also removed from whatever predecessor
694 remove_fake_successors (bb)
699 for (e = bb->succ; e;)
704 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
709 /* This routine will remove all fake edges from the flow graph. If
710 we remove all fake successors, it will automatically remove all
711 fake predecessors. */
718 for (x = 0; x < n_basic_blocks; x++)
719 remove_fake_successors (BASIC_BLOCK (x));
721 /* We've handled all successors except the entry block's. */
722 remove_fake_successors (ENTRY_BLOCK_PTR);
725 /* This function will add a fake edge between any block which has no
726 successors, and the exit block. Some data flow equations require these
730 add_noreturn_fake_exit_edges ()
734 for (x = 0; x < n_basic_blocks; x++)
735 if (BASIC_BLOCK (x)->succ == NULL)
736 make_single_succ_edge (BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
739 /* This function adds a fake edge between any infinite loops to the
740 exit block. Some optimizations require a path from each node to
743 See also Morgan, Figure 3.10, pp. 82-83.
745 The current implementation is ugly, not attempting to minimize the
746 number of inserted fake edges. To reduce the number of fake edges
747 to insert, add fake edges from _innermost_ loops containing only
748 nodes not reachable from the exit block. */
751 connect_infinite_loops_to_exit ()
753 basic_block unvisited_block;
754 struct depth_first_search_dsS dfs_ds;
756 /* Perform depth-first search in the reverse graph to find nodes
757 reachable from the exit block. */
758 flow_dfs_compute_reverse_init (&dfs_ds);
759 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
761 /* Repeatedly add fake edges, updating the unreachable nodes. */
764 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
765 if (!unvisited_block)
768 make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
769 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
772 flow_dfs_compute_reverse_finish (&dfs_ds);
776 /* Compute reverse top sort order */
779 flow_reverse_top_sort_order_compute (rts_order)
787 /* Allocate stack for back-tracking up CFG. */
788 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
791 /* Allocate bitmap to track nodes that have been visited. */
792 visited = sbitmap_alloc (n_basic_blocks);
794 /* None of the nodes in the CFG have been visited yet. */
795 sbitmap_zero (visited);
797 /* Push the first edge on to the stack. */
798 stack[sp++] = ENTRY_BLOCK_PTR->succ;
806 /* Look at the edge on the top of the stack. */
811 /* Check if the edge destination has been visited yet. */
812 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
814 /* Mark that we have visited the destination. */
815 SET_BIT (visited, dest->index);
818 /* Since the DEST node has been visited for the first
819 time, check its successors. */
820 stack[sp++] = dest->succ;
822 rts_order[postnum++] = dest->index;
826 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
827 rts_order[postnum++] = src->index;
830 stack[sp - 1] = e->succ_next;
837 sbitmap_free (visited);
840 /* Compute the depth first search order and store in the array
841 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
842 RC_ORDER is non-zero, return the reverse completion number for each
843 node. Returns the number of nodes visited. A depth first search
844 tries to get as far away from the starting point as quickly as
848 flow_depth_first_order_compute (dfs_order, rc_order)
855 int rcnum = n_basic_blocks - 1;
858 /* Allocate stack for back-tracking up CFG. */
859 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
862 /* Allocate bitmap to track nodes that have been visited. */
863 visited = sbitmap_alloc (n_basic_blocks);
865 /* None of the nodes in the CFG have been visited yet. */
866 sbitmap_zero (visited);
868 /* Push the first edge on to the stack. */
869 stack[sp++] = ENTRY_BLOCK_PTR->succ;
877 /* Look at the edge on the top of the stack. */
882 /* Check if the edge destination has been visited yet. */
883 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
885 /* Mark that we have visited the destination. */
886 SET_BIT (visited, dest->index);
889 dfs_order[dfsnum] = dest->index;
894 /* Since the DEST node has been visited for the first
895 time, check its successors. */
896 stack[sp++] = dest->succ;
898 /* There are no successors for the DEST node so assign
899 its reverse completion number. */
900 rc_order[rcnum--] = dest->index;
904 if (! e->succ_next && src != ENTRY_BLOCK_PTR
906 /* There are no more successors for the SRC node
907 so assign its reverse completion number. */
908 rc_order[rcnum--] = src->index;
911 stack[sp - 1] = e->succ_next;
918 sbitmap_free (visited);
920 /* The number of nodes visited should not be greater than
922 if (dfsnum > n_basic_blocks)
925 /* There are some nodes left in the CFG that are unreachable. */
926 if (dfsnum < n_basic_blocks)
935 struct dfst_node **node;
936 struct dfst_node *up;
939 /* Compute a preorder transversal ordering such that a sub-tree which
940 is the source of a cross edge appears before the sub-tree which is
941 the destination of the cross edge. This allows for easy detection
942 of all the entry blocks for a loop.
944 The ordering is compute by:
946 1) Generating a depth first spanning tree.
948 2) Walking the resulting tree from right to left. */
951 flow_preorder_transversal_compute (pot_order)
960 struct dfst_node *node;
961 struct dfst_node *dfst;
963 /* Allocate stack for back-tracking up CFG. */
964 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
967 /* Allocate the tree. */
968 dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
969 sizeof (struct dfst_node));
971 for (i = 0; i < n_basic_blocks; i++)
974 for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
979 ? (struct dfst_node **) xcalloc (max_successors,
980 sizeof (struct dfst_node *))
984 /* Allocate bitmap to track nodes that have been visited. */
985 visited = sbitmap_alloc (n_basic_blocks);
987 /* None of the nodes in the CFG have been visited yet. */
988 sbitmap_zero (visited);
990 /* Push the first edge on to the stack. */
991 stack[sp++] = ENTRY_BLOCK_PTR->succ;
998 /* Look at the edge on the top of the stack. */
1003 /* Check if the edge destination has been visited yet. */
1004 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1006 /* Mark that we have visited the destination. */
1007 SET_BIT (visited, dest->index);
1009 /* Add the destination to the preorder tree. */
1010 if (src != ENTRY_BLOCK_PTR)
1012 dfst[src->index].node[dfst[src->index].nnodes++]
1013 = &dfst[dest->index];
1014 dfst[dest->index].up = &dfst[src->index];
1018 /* Since the DEST node has been visited for the first
1019 time, check its successors. */
1020 stack[sp++] = dest->succ;
1023 else if (e->succ_next)
1024 stack[sp - 1] = e->succ_next;
1030 sbitmap_free (visited);
1032 /* Record the preorder transversal order by
1033 walking the tree from right to left. */
1043 node = node->node[--node->nnodes];
1044 pot_order[i++] = node - dfst;
1050 /* Free the tree. */
1052 for (i = 0; i < n_basic_blocks; i++)
1054 free (dfst[i].node);
1059 /* Compute the depth first search order on the _reverse_ graph and
1060 store in the array DFS_ORDER, marking the nodes visited in VISITED.
1061 Returns the number of nodes visited.
1063 The computation is split into three pieces:
1065 flow_dfs_compute_reverse_init () creates the necessary data
1068 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1069 structures. The block will start the search.
1071 flow_dfs_compute_reverse_execute () continues (or starts) the
1072 search using the block on the top of the stack, stopping when the
1075 flow_dfs_compute_reverse_finish () destroys the necessary data
1078 Thus, the user will probably call ..._init(), call ..._add_bb() to
1079 add a beginning basic block to the stack, call ..._execute(),
1080 possibly add another bb to the stack and again call ..._execute(),
1081 ..., and finally call _finish(). */
1083 /* Initialize the data structures used for depth-first search on the
1084 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1085 added to the basic block stack. DATA is the current depth-first
1086 search context. If INITIALIZE_STACK is non-zero, there is an
1087 element on the stack. */
1090 flow_dfs_compute_reverse_init (data)
1091 depth_first_search_ds data;
1093 /* Allocate stack for back-tracking up CFG. */
1094 data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1095 * sizeof (basic_block));
1098 /* Allocate bitmap to track nodes that have been visited. */
1099 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
1101 /* None of the nodes in the CFG have been visited yet. */
1102 sbitmap_zero (data->visited_blocks);
1107 /* Add the specified basic block to the top of the dfs data
1108 structures. When the search continues, it will start at the
1112 flow_dfs_compute_reverse_add_bb (data, bb)
1113 depth_first_search_ds data;
1116 data->stack[data->sp++] = bb;
1117 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1120 /* Continue the depth-first search through the reverse graph starting with the
1121 block at the stack's top and ending when the stack is empty. Visited nodes
1122 are marked. Returns an unvisited basic block, or NULL if there is none
1126 flow_dfs_compute_reverse_execute (data)
1127 depth_first_search_ds data;
1133 while (data->sp > 0)
1135 bb = data->stack[--data->sp];
1137 /* Perform depth-first search on adjacent vertices. */
1138 for (e = bb->pred; e; e = e->pred_next)
1139 if (!TEST_BIT (data->visited_blocks,
1140 e->src->index - (INVALID_BLOCK + 1)))
1141 flow_dfs_compute_reverse_add_bb (data, e->src);
1144 /* Determine if there are unvisited basic blocks. */
1145 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
1146 if (!TEST_BIT (data->visited_blocks, i))
1147 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
1152 /* Destroy the data structures needed for depth-first search on the
1156 flow_dfs_compute_reverse_finish (data)
1157 depth_first_search_ds data;
1160 sbitmap_free (data->visited_blocks);