1 /* Control flow graph building code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* find_basic_blocks divides the current function's rtl into basic
23 blocks and constructs the CFG. The blocks are recorded in the
24 basic_block_info array; the CFG exists in the edge structures
25 referenced by the blocks.
27 find_basic_blocks also finds any unreachable loops and deletes them.
29 Available functionality:
32 - Local CFG construction
33 find_sub_basic_blocks */
37 #include "coretypes.h"
41 #include "hard-reg-set.h"
42 #include "basic-block.h"
51 static int count_basic_blocks (rtx);
52 static void find_basic_blocks_1 (rtx);
53 static rtx find_label_refs (rtx, rtx);
54 static void make_edges (rtx, basic_block, basic_block, int);
55 static void make_label_edge (sbitmap *, basic_block, rtx, int);
56 static void find_bb_boundaries (basic_block);
57 static void compute_outgoing_frequencies (basic_block);
59 /* Return true if insn is something that should be contained inside basic
63 inside_basic_block_p (rtx insn)
65 switch (GET_CODE (insn))
68 /* Avoid creating of basic block for jumptables. */
69 return (NEXT_INSN (insn) == 0
70 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
71 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
72 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
75 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
76 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
91 /* Return true if INSN may cause control flow transfer, so it should be last in
95 control_flow_insn_p (rtx insn)
99 switch (GET_CODE (insn))
106 /* Jump insn always causes control transfer except for tablejumps. */
107 return (GET_CODE (PATTERN (insn)) != ADDR_VEC
108 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
111 /* Noreturn and sibling call instructions terminate the basic blocks
112 (but only if they happen unconditionally). */
113 if ((SIBLING_CALL_P (insn)
114 || find_reg_note (insn, REG_NORETURN, 0))
115 && GET_CODE (PATTERN (insn)) != COND_EXEC)
117 /* Call insn may return to the nonlocal goto handler. */
118 return ((nonlocal_goto_handler_labels
119 && (0 == (note = find_reg_note (insn, REG_EH_REGION,
121 || INTVAL (XEXP (note, 0)) >= 0))
123 || can_throw_internal (insn));
126 return (flag_non_call_exceptions && can_throw_internal (insn));
129 /* It is nonsense to reach barrier when looking for the
130 end of basic block, but before dead code is eliminated
139 /* Count the basic blocks of the function. */
142 count_basic_blocks (rtx f)
145 bool saw_insn = false;
148 for (insn = f; insn; insn = NEXT_INSN (insn))
150 /* Code labels and barriers causes current basic block to be
151 terminated at previous real insn. */
152 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
154 count++, saw_insn = false;
156 /* Start basic block if needed. */
157 if (!saw_insn && inside_basic_block_p (insn))
160 /* Control flow insn causes current basic block to be terminated. */
161 if (saw_insn && control_flow_insn_p (insn))
162 count++, saw_insn = false;
168 /* The rest of the compiler works a bit smoother when we don't have to
169 check for the edge case of do-nothing functions with no basic blocks. */
172 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
179 /* Scan a list of insns for labels referred to other than by jumps.
180 This is used to scan the alternatives of a call placeholder. */
183 find_label_refs (rtx f, rtx lvl)
187 for (insn = f; insn; insn = NEXT_INSN (insn))
188 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
192 /* Make a list of all labels referred to other than by jumps
193 (which just don't have the REG_LABEL notes).
195 Make a special exception for labels followed by an ADDR*VEC,
196 as this would be a part of the tablejump setup code.
198 Make a special exception to registers loaded with label
199 values just before jump insns that use them. */
201 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
202 if (REG_NOTE_KIND (note) == REG_LABEL)
204 rtx lab = XEXP (note, 0), next;
206 if ((next = next_nonnote_insn (lab)) != NULL
207 && GET_CODE (next) == JUMP_INSN
208 && (GET_CODE (PATTERN (next)) == ADDR_VEC
209 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
211 else if (GET_CODE (lab) == NOTE)
213 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
214 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
217 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
224 /* Create an edge between two basic blocks. FLAGS are auxiliary information
225 about the edge that is accumulated between calls. */
227 /* Create an edge from a basic block to a label. */
230 make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
232 if (GET_CODE (label) != CODE_LABEL)
235 /* If the label was never emitted, this insn is junk, but avoid a
236 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
237 as a result of a syntax error and a diagnostic has already been
240 if (INSN_UID (label) == 0)
243 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
246 /* Create the edges generated by INSN in REGION. */
249 rtl_make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
251 int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
254 handlers = reachable_handlers (insn);
256 for (i = handlers; i; i = XEXP (i, 1))
257 make_label_edge (edge_cache, src, XEXP (i, 0),
258 EDGE_ABNORMAL | EDGE_EH | is_call);
260 free_INSN_LIST_list (&handlers);
263 /* Identify the edges between basic blocks MIN to MAX.
265 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
266 that are otherwise unreachable may be reachable with a non-local goto.
268 BB_EH_END is an array indexed by basic block number in which we record
269 the list of exception regions active at the end of the basic block. */
272 make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p)
275 sbitmap *edge_cache = NULL;
277 /* Assume no computed jump; revise as we create edges. */
278 current_function_has_computed_jump = 0;
280 /* If we are partitioning hot and cold basic blocks into separate
281 sections, we cannot assume there is no computed jump. */
283 if (flag_reorder_blocks_and_partition)
284 current_function_has_computed_jump = 1;
286 /* Heavy use of computed goto in machine-generated code can lead to
287 nearly fully-connected CFGs. In that case we spend a significant
288 amount of time searching the edge lists for duplicates. */
289 if (forced_labels || label_value_list || cfun->max_jumptable_ents > 100)
291 edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
292 sbitmap_vector_zero (edge_cache, last_basic_block);
295 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
299 for (e = bb->succ; e ; e = e->succ_next)
300 if (e->dest != EXIT_BLOCK_PTR)
301 SET_BIT (edge_cache[bb->index], e->dest->index);
305 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
306 is always the entry. */
307 if (min == ENTRY_BLOCK_PTR->next_bb)
308 cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, min,
311 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
315 int force_fallthru = 0;
317 if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
318 && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
319 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
321 /* Examine the last instruction of the block, and discover the
322 ways we can leave the block. */
325 code = GET_CODE (insn);
328 if (code == JUMP_INSN)
332 /* Recognize exception handling placeholders. */
333 if (GET_CODE (PATTERN (insn)) == RESX)
334 rtl_make_eh_edge (edge_cache, bb, insn);
336 /* Recognize a non-local goto as a branch outside the
338 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
341 /* Recognize a tablejump and do the right thing. */
342 else if (tablejump_p (insn, NULL, &tmp))
347 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
348 vec = XVEC (PATTERN (tmp), 0);
350 vec = XVEC (PATTERN (tmp), 1);
352 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
353 make_label_edge (edge_cache, bb,
354 XEXP (RTVEC_ELT (vec, j), 0), 0);
356 /* Some targets (eg, ARM) emit a conditional jump that also
357 contains the out-of-range target. Scan for these and
358 add an edge if necessary. */
359 if ((tmp = single_set (insn)) != NULL
360 && SET_DEST (tmp) == pc_rtx
361 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
362 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
363 make_label_edge (edge_cache, bb,
364 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
366 #ifdef CASE_DROPS_THROUGH
367 /* Silly VAXen. The ADDR_VEC is going to be in the way of
368 us naturally detecting fallthru into the next block. */
373 /* If this is a computed jump, then mark it as reaching
374 everything on the label_value_list and forced_labels list. */
375 else if (computed_jump_p (insn))
377 current_function_has_computed_jump = 1;
379 for (x = label_value_list; x; x = XEXP (x, 1))
380 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
382 for (x = forced_labels; x; x = XEXP (x, 1))
383 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
386 /* Returns create an exit out. */
387 else if (returnjump_p (insn))
388 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
390 /* Otherwise, we have a plain conditional or unconditional jump. */
393 if (! JUMP_LABEL (insn))
395 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
399 /* If this is a sibling call insn, then this is in effect a combined call
400 and return, and so we need an edge to the exit block. No need to
401 worry about EH edges, since we wouldn't have created the sibling call
402 in the first place. */
403 if (code == CALL_INSN && SIBLING_CALL_P (insn))
404 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
405 EDGE_SIBCALL | EDGE_ABNORMAL);
407 /* If this is a CALL_INSN, then mark it as reaching the active EH
408 handler for this CALL_INSN. If we're handling non-call
409 exceptions then any insn can reach any of the active handlers.
410 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
411 else if (code == CALL_INSN || flag_non_call_exceptions)
413 /* Add any appropriate EH edges. */
414 rtl_make_eh_edge (edge_cache, bb, insn);
416 if (code == CALL_INSN && nonlocal_goto_handler_labels)
418 /* ??? This could be made smarter: in some cases it's possible
419 to tell that certain calls will not do a nonlocal goto.
420 For example, if the nested functions that do the nonlocal
421 gotos do not have their addresses taken, then only calls to
422 those functions or to other nested functions that use them
423 could possibly do nonlocal gotos. */
425 /* We do know that a REG_EH_REGION note with a value less
426 than 0 is guaranteed not to perform a non-local goto. */
427 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
429 if (!note || INTVAL (XEXP (note, 0)) >= 0)
430 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
431 make_label_edge (edge_cache, bb, XEXP (x, 0),
432 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
436 /* Find out if we can drop through to the next block. */
437 insn = NEXT_INSN (insn);
439 && GET_CODE (insn) == NOTE
440 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
441 insn = NEXT_INSN (insn);
443 if (!insn || (bb->next_bb == EXIT_BLOCK_PTR && force_fallthru))
444 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
445 else if (bb->next_bb != EXIT_BLOCK_PTR)
447 if (force_fallthru || insn == BB_HEAD (bb->next_bb))
448 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
453 sbitmap_vector_free (edge_cache);
456 /* Find all basic blocks of the function whose first insn is F.
458 Collect and return a list of labels whose addresses are taken. This
459 will be used in make_edges for use with computed gotos. */
462 find_basic_blocks_1 (rtx f)
465 rtx bb_note = NULL_RTX;
470 basic_block prev = ENTRY_BLOCK_PTR;
472 /* We process the instructions in a slightly different way than we did
473 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
474 closed out the previous block, so that it gets attached at the proper
475 place. Since this form should be equivalent to the previous,
476 count_basic_blocks continues to use the old form as a check. */
478 for (insn = f; insn; insn = next)
480 enum rtx_code code = GET_CODE (insn);
482 next = NEXT_INSN (insn);
484 if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
487 prev = create_basic_block_structure (head, end, bb_note, prev);
488 head = end = NULL_RTX;
492 if (inside_basic_block_p (insn))
494 if (head == NULL_RTX)
499 if (head && control_flow_insn_p (insn))
501 prev = create_basic_block_structure (head, end, bb_note, prev);
502 head = end = NULL_RTX;
510 int kind = NOTE_LINE_NUMBER (insn);
512 /* Look for basic block notes with which to keep the
513 basic_block_info pointers stable. Unthread the note now;
514 we'll put it back at the right place in create_basic_block.
515 Or not at all if we've already found a note in this block. */
516 if (kind == NOTE_INSN_BASIC_BLOCK)
518 if (bb_note == NULL_RTX)
521 next = delete_insn (insn);
533 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
535 /* Scan each of the alternatives for label refs. */
536 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
537 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
538 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
539 /* Record its tail recursion label, if any. */
540 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
541 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
549 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
553 /* Make a list of all labels referred to other than by jumps.
555 Make a special exception for labels followed by an ADDR*VEC,
556 as this would be a part of the tablejump setup code.
558 Make a special exception to registers loaded with label
559 values just before jump insns that use them. */
561 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
562 if (REG_NOTE_KIND (note) == REG_LABEL)
564 rtx lab = XEXP (note, 0), next;
566 if ((next = next_nonnote_insn (lab)) != NULL
567 && GET_CODE (next) == JUMP_INSN
568 && (GET_CODE (PATTERN (next)) == ADDR_VEC
569 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
571 else if (GET_CODE (lab) == NOTE)
573 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
574 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
577 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
582 if (head != NULL_RTX)
583 create_basic_block_structure (head, end, bb_note, prev);
585 delete_insn (bb_note);
587 if (last_basic_block != n_basic_blocks)
590 label_value_list = lvl;
591 tail_recursion_label_list = trll;
592 clear_aux_for_blocks ();
596 /* Find basic blocks of the current function.
597 F is the first insn of the function and NREGS the number of register
601 find_basic_blocks (rtx f, int nregs ATTRIBUTE_UNUSED,
602 FILE *file ATTRIBUTE_UNUSED)
606 timevar_push (TV_CFG);
608 /* Flush out existing data. */
609 if (basic_block_info != NULL)
613 /* Clear bb->aux on all extant basic blocks. We'll use this as a
614 tag for reuse during create_basic_block, just in case some pass
615 copies around basic block notes improperly. */
619 basic_block_info = NULL;
622 n_basic_blocks = count_basic_blocks (f);
623 last_basic_block = 0;
624 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
625 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
627 /* Size the basic block table. The actual structures will be allocated
628 by find_basic_blocks_1, since we want to keep the structure pointers
629 stable across calls to find_basic_blocks. */
630 /* ??? This whole issue would be much simpler if we called find_basic_blocks
631 exactly once, and thereafter we don't have a single long chain of
632 instructions at all until close to the end of compilation when we
633 actually lay them out. */
635 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
637 find_basic_blocks_1 (f);
639 /* Discover the edges of our cfg. */
640 make_edges (label_value_list, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0);
642 /* Do very simple cleanup now, for the benefit of code that runs between
643 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
644 tidy_fallthru_edges ();
646 #ifdef ENABLE_CHECKING
649 timevar_pop (TV_CFG);
652 /* State of basic block as seen by find_sub_basic_blocks. */
653 enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
655 #define STATE(BB) (enum state) ((size_t) (BB)->aux)
656 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
658 /* Scan basic block BB for possible BB boundaries inside the block
659 and create new basic blocks in the progress. */
662 find_bb_boundaries (basic_block bb)
664 rtx insn = BB_HEAD (bb);
665 rtx end = BB_END (bb);
666 rtx flow_transfer_insn = NULL_RTX;
667 edge fallthru = NULL;
669 if (insn == BB_END (bb))
672 if (GET_CODE (insn) == CODE_LABEL)
673 insn = NEXT_INSN (insn);
675 /* Scan insn chain and try to find new basic block boundaries. */
678 enum rtx_code code = GET_CODE (insn);
680 /* On code label, split current basic block. */
681 if (code == CODE_LABEL)
683 fallthru = split_block (bb, PREV_INSN (insn));
684 if (flow_transfer_insn)
685 BB_END (bb) = flow_transfer_insn;
688 remove_edge (fallthru);
689 flow_transfer_insn = NULL_RTX;
690 if (LABEL_ALT_ENTRY_P (insn))
691 make_edge (ENTRY_BLOCK_PTR, bb, 0);
694 /* In case we've previously seen an insn that effects a control
695 flow transfer, split the block. */
696 if (flow_transfer_insn && inside_basic_block_p (insn))
698 fallthru = split_block (bb, PREV_INSN (insn));
699 BB_END (bb) = flow_transfer_insn;
701 remove_edge (fallthru);
702 flow_transfer_insn = NULL_RTX;
705 if (control_flow_insn_p (insn))
706 flow_transfer_insn = insn;
709 insn = NEXT_INSN (insn);
712 /* In case expander replaced normal insn by sequence terminating by
713 return and barrier, or possibly other sequence not behaving like
714 ordinary jump, we need to take care and move basic block boundary. */
715 if (flow_transfer_insn)
716 BB_END (bb) = flow_transfer_insn;
718 /* We've possibly replaced the conditional jump by conditional jump
719 followed by cleanup at fallthru edge, so the outgoing edges may
721 purge_dead_edges (bb);
724 /* Assume that frequency of basic block B is known. Compute frequencies
725 and probabilities of outgoing edges. */
728 compute_outgoing_frequencies (basic_block b)
732 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
734 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
740 probability = INTVAL (XEXP (note, 0));
742 e->probability = probability;
743 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
745 f = FALLTHRU_EDGE (b);
746 f->probability = REG_BR_PROB_BASE - probability;
747 f->count = b->count - e->count;
750 if (b->succ && !b->succ->succ_next)
753 e->probability = REG_BR_PROB_BASE;
758 /* Assume that someone emitted code with control flow instructions to the
759 basic block. Update the data structure. */
762 find_many_sub_basic_blocks (sbitmap blocks)
764 basic_block bb, min, max;
768 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
771 if (STATE (bb) == BLOCK_TO_SPLIT)
772 find_bb_boundaries (bb);
775 if (STATE (bb) != BLOCK_ORIGINAL)
779 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
780 if (STATE (bb) != BLOCK_ORIGINAL)
783 /* Now re-scan and wire in all edges. This expect simple (conditional)
784 jumps at the end of each new basic blocks. */
785 make_edges (NULL, min, max, 1);
787 /* Update branch probabilities. Expect only (un)conditional jumps
788 to be created with only the forward edges. */
789 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
793 if (STATE (bb) == BLOCK_ORIGINAL)
795 if (STATE (bb) == BLOCK_NEW)
799 for (e = bb->pred; e; e = e->pred_next)
801 bb->count += e->count;
802 bb->frequency += EDGE_FREQUENCY (e);
806 compute_outgoing_frequencies (bb);
813 /* Like above but for single basic block only. */
816 find_sub_basic_blocks (basic_block bb)
818 basic_block min, max, b;
819 basic_block next = bb->next_bb;
822 find_bb_boundaries (bb);
825 /* Now re-scan and wire in all edges. This expect simple (conditional)
826 jumps at the end of each new basic blocks. */
827 make_edges (NULL, min, max, 1);
829 /* Update branch probabilities. Expect only (un)conditional jumps
830 to be created with only the forward edges. */
831 FOR_BB_BETWEEN (b, min, max->next_bb, next_bb)
839 for (e = b->pred; e; e = e->pred_next)
841 b->count += e->count;
842 b->frequency += EDGE_FREQUENCY (e);
846 compute_outgoing_frequencies (b);