1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, rtl_delete_block,rtl_split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and committing to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
47 #include "coretypes.h"
51 #include "hard-reg-set.h"
52 #include "basic-block.h"
61 #include "insn-config.h"
62 #include "cfglayout.h"
64 /* Stubs in case we don't have a return insn. */
67 #define gen_return() NULL_RTX
70 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
71 /* ??? Should probably be using LABEL_NUSES instead. It would take a
72 bit of surgery to be able to use or co-opt the routines in jump. */
74 rtx tail_recursion_label_list;
76 static int can_delete_note_p PARAMS ((rtx));
77 static int can_delete_label_p PARAMS ((rtx));
78 static void commit_one_edge_insertion PARAMS ((edge, int));
79 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
80 static rtx last_loop_beg_note PARAMS ((rtx));
81 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
82 basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
83 static basic_block rtl_split_edge PARAMS ((edge));
84 static void rtl_verify_flow_info PARAMS ((void));
85 static edge cfg_layout_split_block PARAMS ((basic_block, void *));
86 static bool cfg_layout_redirect_edge_and_branch PARAMS ((edge, basic_block));
87 static basic_block cfg_layout_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
88 static void cfg_layout_delete_block PARAMS ((basic_block));
89 static void rtl_delete_block PARAMS ((basic_block));
90 static basic_block rtl_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
91 static bool rtl_redirect_edge_and_branch PARAMS ((edge, basic_block));
92 static edge rtl_split_block PARAMS ((basic_block, void *));
94 /* Return true if NOTE is not one of the ones that must be kept paired,
95 so that we may simply delete it. */
98 can_delete_note_p (note)
101 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
102 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
103 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
106 /* True if a given label can be deleted. */
109 can_delete_label_p (label)
112 return (!LABEL_PRESERVE_P (label)
113 /* User declared labels must be preserved. */
114 && LABEL_NAME (label) == 0
115 && !in_expr_list_p (forced_labels, label)
116 && !in_expr_list_p (label_value_list, label));
119 /* Delete INSN by patching it out. Return the next insn. */
125 rtx next = NEXT_INSN (insn);
127 bool really_delete = true;
129 if (GET_CODE (insn) == CODE_LABEL)
131 /* Some labels can't be directly removed from the INSN chain, as they
132 might be references via variables, constant pool etc.
133 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
134 if (! can_delete_label_p (insn))
136 const char *name = LABEL_NAME (insn);
138 really_delete = false;
139 PUT_CODE (insn, NOTE);
140 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
141 NOTE_SOURCE_FILE (insn) = name;
144 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
149 /* If this insn has already been deleted, something is very wrong. */
150 if (INSN_DELETED_P (insn))
153 INSN_DELETED_P (insn) = 1;
156 /* If deleting a jump, decrement the use count of the label. Deleting
157 the label itself should happen in the normal course of block merging. */
158 if (GET_CODE (insn) == JUMP_INSN
160 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
161 LABEL_NUSES (JUMP_LABEL (insn))--;
163 /* Also if deleting an insn that references a label. */
164 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
165 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
166 LABEL_NUSES (XEXP (note, 0))--;
168 if (GET_CODE (insn) == JUMP_INSN
169 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
170 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
172 rtx pat = PATTERN (insn);
173 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
174 int len = XVECLEN (pat, diff_vec_p);
177 for (i = 0; i < len; i++)
179 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
181 /* When deleting code in bulk (e.g. removing many unreachable
182 blocks) we can delete a label that's a target of the vector
183 before deleting the vector itself. */
184 if (GET_CODE (label) != NOTE)
185 LABEL_NUSES (label)--;
192 /* Like delete_insn but also purge dead edges from BB. */
194 delete_insn_and_edges (insn)
201 && BLOCK_FOR_INSN (insn)
202 && BLOCK_FOR_INSN (insn)->end == insn)
204 x = delete_insn (insn);
206 purge_dead_edges (BLOCK_FOR_INSN (insn));
210 /* Unlink a chain of insns between START and FINISH, leaving notes
211 that must be paired. */
214 delete_insn_chain (start, finish)
219 /* Unchain the insns one by one. It would be quicker to delete all of these
220 with a single unchaining, rather than one at a time, but we need to keep
224 next = NEXT_INSN (start);
225 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
228 next = delete_insn (start);
236 /* Like delete_insn but also purge dead edges from BB. */
238 delete_insn_chain_and_edges (first, last)
244 && BLOCK_FOR_INSN (last)
245 && BLOCK_FOR_INSN (last)->end == last)
247 delete_insn_chain (first, last);
249 purge_dead_edges (BLOCK_FOR_INSN (last));
252 /* Create a new basic block consisting of the instructions between HEAD and END
253 inclusive. This function is designed to allow fast BB construction - reuses
254 the note and basic block struct in BB_NOTE, if any and do not grow
255 BASIC_BLOCK chain and should be used directly only by CFG construction code.
256 END can be NULL in to create new empty basic block before HEAD. Both END
257 and HEAD can be NULL to create basic block at the end of INSN chain.
258 AFTER is the basic block we should be put after. */
261 create_basic_block_structure (head, end, bb_note, after)
262 rtx head, end, bb_note;
268 && ! RTX_INTEGRATED_P (bb_note)
269 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
272 /* If we found an existing note, thread it back onto the chain. */
276 if (GET_CODE (head) == CODE_LABEL)
280 after = PREV_INSN (head);
284 if (after != bb_note && NEXT_INSN (after) != bb_note)
285 reorder_insns_nobb (bb_note, bb_note, after);
289 /* Otherwise we must create a note and a basic block structure. */
295 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
296 else if (GET_CODE (head) == CODE_LABEL && end)
298 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
304 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
310 NOTE_BASIC_BLOCK (bb_note) = bb;
313 /* Always include the bb note in the block. */
314 if (NEXT_INSN (end) == bb_note)
319 bb->index = last_basic_block++;
321 link_block (bb, after);
322 BASIC_BLOCK (bb->index) = bb;
323 update_bb_for_insn (bb);
325 /* Tag the block so that we know it has been used when considering
326 other basic block notes. */
332 /* Create new basic block consisting of instructions in between HEAD and END
333 and place it to the BB chain after block AFTER. END can be NULL in to
334 create new empty basic block before HEAD. Both END and HEAD can be NULL to
335 create basic block at the end of INSN chain. */
338 create_basic_block (head, end, after)
344 /* Place the new block just after the end. */
345 VARRAY_GROW (basic_block_info, last_basic_block+1);
349 bb = create_basic_block_structure (head, end, NULL, after);
354 /* Delete the insns in a (non-live) block. We physically delete every
355 non-deleted-note insn, and update the flow graph appropriately.
357 Return nonzero if we deleted an exception handler. */
359 /* ??? Preserving all such notes strikes me as wrong. It would be nice
360 to post-process the stream to remove empty blocks, loops, ranges, etc. */
363 flow_delete_block_noexpunge (b)
368 /* If the head of this block is a CODE_LABEL, then it might be the
369 label for an exception handler which can't be reached.
371 We need to remove the label from the exception_handler_label list
372 and remove the associated NOTE_INSN_EH_REGION_BEG and
373 NOTE_INSN_EH_REGION_END notes. */
375 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
376 hanging before the block. */
378 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
380 if (GET_CODE (insn) != NOTE)
382 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
383 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
384 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
389 never_reached_warning (insn, b->end);
391 if (GET_CODE (insn) == CODE_LABEL)
392 maybe_remove_eh_handler (insn);
394 /* Include any jump table following the basic block. */
396 if (tablejump_p (end, NULL, &tmp))
399 /* Include any barrier that may follow the basic block. */
400 tmp = next_nonnote_insn (end);
401 if (tmp && GET_CODE (tmp) == BARRIER)
404 /* Selectively delete the entire chain. */
406 delete_insn_chain (insn, end);
408 /* Remove the edges into and out of this block. Note that there may
409 indeed be edges in, if we are removing an unreachable loop. */
410 while (b->pred != NULL)
411 remove_edge (b->pred);
412 while (b->succ != NULL)
413 remove_edge (b->succ);
423 flow_delete_block_noexpunge (b);
425 /* Remove the basic block from the array. */
429 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
432 compute_bb_for_insn ()
441 for (insn = bb->head; ; insn = NEXT_INSN (insn))
443 BLOCK_FOR_INSN (insn) = bb;
450 /* Release the basic_block_for_insn array. */
456 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
457 if (GET_CODE (insn) != BARRIER)
458 BLOCK_FOR_INSN (insn) = NULL;
461 /* Update insns block within BB. */
464 update_bb_for_insn (bb)
469 for (insn = bb->head; ; insn = NEXT_INSN (insn))
471 if (GET_CODE (insn) != BARRIER)
472 set_block_for_insn (insn, bb);
478 /* Split a block BB after insn INSN creating a new fallthru edge.
479 Return the new edge. Note that to keep other parts of the compiler happy,
480 this function renumbers all the basic blocks so that the new
481 one has a number one greater than the block split. */
484 rtl_split_block (bb, insnp)
493 /* There is no point splitting the block after its end. */
497 /* Create the new basic block. */
498 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
499 new_bb->count = bb->count;
500 new_bb->frequency = bb->frequency;
501 new_bb->loop_depth = bb->loop_depth;
504 /* Redirect the outgoing edges. */
505 new_bb->succ = bb->succ;
507 for (e = new_bb->succ; e; e = e->succ_next)
510 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
512 if (bb->global_live_at_start)
514 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
515 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
516 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
518 /* We now have to calculate which registers are live at the end
519 of the split basic block and at the start of the new basic
520 block. Start with those registers that are known to be live
521 at the end of the original basic block and get
522 propagate_block to determine which registers are live. */
523 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
524 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
525 COPY_REG_SET (bb->global_live_at_end,
526 new_bb->global_live_at_start);
527 #ifdef HAVE_conditional_execution
528 /* In the presence of conditional execution we are not able to update
529 liveness precisely. */
530 if (reload_completed)
532 bb->flags |= BB_DIRTY;
533 new_bb->flags |= BB_DIRTY;
541 /* Blocks A and B are to be merged into a single block A. The insns
542 are already contiguous, hence `nomove'. */
545 merge_blocks_nomove (a, b)
548 rtx b_head = b->head, b_end = b->end, a_end = a->end;
549 rtx del_first = NULL_RTX, del_last = NULL_RTX;
553 /* If there was a CODE_LABEL beginning B, delete it. */
554 if (GET_CODE (b_head) == CODE_LABEL)
556 /* Detect basic blocks with nothing but a label. This can happen
557 in particular at the end of a function. */
561 del_first = del_last = b_head;
562 b_head = NEXT_INSN (b_head);
565 /* Delete the basic block note and handle blocks containing just that
567 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
575 b_head = NEXT_INSN (b_head);
578 /* If there was a jump out of A, delete it. */
579 if (GET_CODE (a_end) == JUMP_INSN)
583 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
584 if (GET_CODE (prev) != NOTE
585 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
592 /* If this was a conditional jump, we need to also delete
593 the insn that set cc0. */
594 if (only_sets_cc0_p (prev))
598 prev = prev_nonnote_insn (prev);
605 a_end = PREV_INSN (del_first);
607 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
608 del_first = NEXT_INSN (a_end);
610 /* Normally there should only be one successor of A and that is B, but
611 partway though the merge of blocks for conditional_execution we'll
612 be merging a TEST block with THEN and ELSE successors. Free the
613 whole lot of them and hope the caller knows what they're doing. */
615 remove_edge (a->succ);
617 /* Adjust the edges out of B for the new owner. */
618 for (e = b->succ; e; e = e->succ_next)
621 a->flags |= b->flags;
623 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
624 b->pred = b->succ = NULL;
625 a->global_live_at_end = b->global_live_at_end;
629 /* Delete everything marked above as well as crap that might be
630 hanging out between the two blocks. */
631 delete_insn_chain (del_first, del_last);
633 /* Reassociate the insns of B with A. */
638 for (x = a_end; x != b_end; x = NEXT_INSN (x))
639 set_block_for_insn (x, a);
641 set_block_for_insn (b_end, a);
649 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
656 if (block == EXIT_BLOCK_PTR)
659 if (GET_CODE (block->head) != CODE_LABEL)
661 block->head = emit_label_before (gen_label_rtx (), block->head);
667 /* Attempt to perform edge redirection by replacing possibly complex jump
668 instruction by unconditional jump or removing jump completely. This can
669 apply only if all edges now point to the same block. The parameters and
670 return values are equivalent to redirect_edge_and_branch. */
673 try_redirect_by_replacing_jump (e, target)
677 basic_block src = e->src;
678 rtx insn = src->end, kill_from;
683 /* Verify that all targets will be TARGET. */
684 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
685 if (tmp->dest != target && tmp != e)
688 if (tmp || !onlyjump_p (insn))
690 if ((!optimize || flow2_completed) && tablejump_p (insn, NULL, NULL))
693 /* Avoid removing branch with side effects. */
694 set = single_set (insn);
695 if (!set || side_effects_p (set))
698 /* In case we zap a conditional jump, we'll need to kill
699 the cc0 setter too. */
702 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
703 kill_from = PREV_INSN (insn);
706 /* See if we can create the fallthru edge. */
707 if (can_fallthru (src, target))
710 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
713 /* Selectively unlink whole insn chain. */
714 delete_insn_chain (kill_from, PREV_INSN (target->head));
717 /* If this already is simplejump, redirect it. */
718 else if (simplejump_p (insn))
720 if (e->dest == target)
723 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
724 INSN_UID (insn), e->dest->index, target->index);
725 if (!redirect_jump (insn, block_label (target), 0))
727 if (target == EXIT_BLOCK_PTR)
733 /* Cannot do anything for target exit block. */
734 else if (target == EXIT_BLOCK_PTR)
737 /* Or replace possibly complicated jump insn by simple jump insn. */
740 rtx target_label = block_label (target);
741 rtx barrier, label, table;
743 emit_jump_insn_after (gen_jump (target_label), insn);
744 JUMP_LABEL (src->end) = target_label;
745 LABEL_NUSES (target_label)++;
747 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
748 INSN_UID (insn), INSN_UID (src->end));
751 delete_insn_chain (kill_from, insn);
753 /* Recognize a tablejump that we are converting to a
754 simple jump and remove its associated CODE_LABEL
755 and ADDR_VEC or ADDR_DIFF_VEC. */
756 if (tablejump_p (insn, &label, &table))
757 delete_insn_chain (label, table);
759 barrier = next_nonnote_insn (src->end);
760 if (!barrier || GET_CODE (barrier) != BARRIER)
761 emit_barrier_after (src->end);
764 /* Keep only one edge out and set proper flags. */
765 while (src->succ->succ_next)
766 remove_edge (src->succ);
769 e->flags = EDGE_FALLTHRU;
773 e->probability = REG_BR_PROB_BASE;
774 e->count = src->count;
776 /* We don't want a block to end on a line-number note since that has
777 the potential of changing the code between -g and not -g. */
778 while (GET_CODE (e->src->end) == NOTE
779 && NOTE_LINE_NUMBER (e->src->end) >= 0)
780 delete_insn (e->src->end);
782 if (e->dest != target)
783 redirect_edge_succ (e, target);
788 /* Return last loop_beg note appearing after INSN, before start of next
789 basic block. Return INSN if there are no such notes.
791 When emitting jump to redirect a fallthru edge, it should always appear
792 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
793 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
797 last_loop_beg_note (insn)
802 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
803 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
804 insn = NEXT_INSN (insn))
805 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
811 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
812 expense of adding new instructions or reordering basic blocks.
814 Function can be also called with edge destination equivalent to the TARGET.
815 Then it should try the simplifications and do nothing if none is possible.
817 Return true if transformation succeeded. We still return false in case E
818 already destinated TARGET and we didn't managed to simplify instruction
822 rtl_redirect_edge_and_branch (e, target)
827 rtx old_label = e->dest->head;
828 basic_block src = e->src;
831 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
834 if (try_redirect_by_replacing_jump (e, target))
837 /* Do this fast path late, as we want above code to simplify for cases
838 where called on single edge leaving basic block containing nontrivial
840 else if (e->dest == target)
843 /* We can only redirect non-fallthru edges of jump insn. */
844 if (e->flags & EDGE_FALLTHRU)
846 else if (GET_CODE (insn) != JUMP_INSN)
849 /* Recognize a tablejump and adjust all matching cases. */
850 if (tablejump_p (insn, NULL, &tmp))
854 rtx new_label = block_label (target);
856 if (target == EXIT_BLOCK_PTR)
858 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
859 vec = XVEC (PATTERN (tmp), 0);
861 vec = XVEC (PATTERN (tmp), 1);
863 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
864 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
866 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
867 --LABEL_NUSES (old_label);
868 ++LABEL_NUSES (new_label);
871 /* Handle casesi dispatch insns */
872 if ((tmp = single_set (insn)) != NULL
873 && SET_DEST (tmp) == pc_rtx
874 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
875 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
876 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
878 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
880 --LABEL_NUSES (old_label);
881 ++LABEL_NUSES (new_label);
886 /* ?? We may play the games with moving the named labels from
887 one basic block to the other in case only one computed_jump is
889 if (computed_jump_p (insn)
890 /* A return instruction can't be redirected. */
891 || returnjump_p (insn))
894 /* If the insn doesn't go where we think, we're confused. */
895 if (JUMP_LABEL (insn) != old_label)
898 /* If the substitution doesn't succeed, die. This can happen
899 if the back end emitted unrecognizable instructions or if
900 target is exit block on some arches. */
901 if (!redirect_jump (insn, block_label (target), 0))
903 if (target == EXIT_BLOCK_PTR)
910 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
911 e->src->index, e->dest->index, target->index);
913 if (e->dest != target)
914 redirect_edge_succ_nodup (e, target);
919 /* Like force_nonfallthru below, but additionally performs redirection
920 Used by redirect_edge_and_branch_force. */
923 force_nonfallthru_and_redirect (e, target)
927 basic_block jump_block, new_bb = NULL, src = e->src;
930 int abnormal_edge_flags = 0;
932 /* In the case the last instruction is conditional jump to the next
933 instruction, first redirect the jump itself and then continue
934 by creating an basic block afterwards to redirect fallthru edge. */
935 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
936 && any_condjump_p (e->src->end)
937 /* When called from cfglayout, fallthru edges do not
938 neccessarily go to the next block. */
939 && e->src->next_bb == e->dest
940 && JUMP_LABEL (e->src->end) == e->dest->head)
943 edge b = unchecked_make_edge (e->src, target, 0);
945 if (!redirect_jump (e->src->end, block_label (target), 0))
947 note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
950 int prob = INTVAL (XEXP (note, 0));
952 b->probability = prob;
953 b->count = e->count * prob / REG_BR_PROB_BASE;
954 e->probability -= e->probability;
955 e->count -= b->count;
956 if (e->probability < 0)
963 if (e->flags & EDGE_ABNORMAL)
965 /* Irritating special case - fallthru edge to the same block as abnormal
967 We can't redirect abnormal edge, but we still can split the fallthru
968 one and create separate abnormal edge to original destination.
969 This allows bb-reorder to make such edge non-fallthru. */
970 if (e->dest != target)
972 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
973 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
975 else if (!(e->flags & EDGE_FALLTHRU))
977 else if (e->src == ENTRY_BLOCK_PTR)
979 /* We can't redirect the entry block. Create an empty block at the
980 start of the function which we use to add the new jump. */
982 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
984 /* Change the existing edge's source to be the new block, and add
985 a new edge from the entry block to the new block. */
987 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
995 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
998 if (e->src->succ->succ_next || abnormal_edge_flags)
1000 /* Create the new structures. */
1002 /* Position the new block correctly relative to loop notes. */
1003 note = last_loop_beg_note (e->src->end);
1004 note = NEXT_INSN (note);
1006 /* ... and ADDR_VECs. */
1008 && GET_CODE (note) == CODE_LABEL
1010 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
1011 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
1012 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
1013 note = NEXT_INSN (NEXT_INSN (note));
1015 jump_block = create_basic_block (note, NULL, e->src);
1016 jump_block->count = e->count;
1017 jump_block->frequency = EDGE_FREQUENCY (e);
1018 jump_block->loop_depth = target->loop_depth;
1020 if (target->global_live_at_start)
1022 jump_block->global_live_at_start
1023 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1024 jump_block->global_live_at_end
1025 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1026 COPY_REG_SET (jump_block->global_live_at_start,
1027 target->global_live_at_start);
1028 COPY_REG_SET (jump_block->global_live_at_end,
1029 target->global_live_at_start);
1033 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1034 new_edge->probability = e->probability;
1035 new_edge->count = e->count;
1037 /* Redirect old edge. */
1038 redirect_edge_pred (e, jump_block);
1039 e->probability = REG_BR_PROB_BASE;
1041 new_bb = jump_block;
1044 jump_block = e->src;
1046 e->flags &= ~EDGE_FALLTHRU;
1047 if (target == EXIT_BLOCK_PTR)
1050 emit_jump_insn_after (gen_return (), jump_block->end);
1056 rtx label = block_label (target);
1057 emit_jump_insn_after (gen_jump (label), jump_block->end);
1058 JUMP_LABEL (jump_block->end) = label;
1059 LABEL_NUSES (label)++;
1062 emit_barrier_after (jump_block->end);
1063 redirect_edge_succ_nodup (e, target);
1065 if (abnormal_edge_flags)
1066 make_edge (src, target, abnormal_edge_flags);
1071 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1072 (and possibly create new basic block) to make edge non-fallthru.
1073 Return newly created BB or NULL if none. */
1076 force_nonfallthru (e)
1079 return force_nonfallthru_and_redirect (e, e->dest);
1082 /* Redirect edge even at the expense of creating new jump insn or
1083 basic block. Return new basic block if created, NULL otherwise.
1084 Abort if conversion is impossible. */
1087 rtl_redirect_edge_and_branch_force (e, target)
1091 if (redirect_edge_and_branch (e, target)
1092 || e->dest == target)
1095 /* In case the edge redirection failed, try to force it to be non-fallthru
1096 and redirect newly created simplejump. */
1097 return force_nonfallthru_and_redirect (e, target);
1100 /* The given edge should potentially be a fallthru edge. If that is in
1101 fact true, delete the jump and barriers that are in the way. */
1104 tidy_fallthru_edge (e, b, c)
1110 /* ??? In a late-running flow pass, other folks may have deleted basic
1111 blocks by nopping out blocks, leaving multiple BARRIERs between here
1112 and the target label. They ought to be chastized and fixed.
1114 We can also wind up with a sequence of undeletable labels between
1115 one block and the next.
1117 So search through a sequence of barriers, labels, and notes for
1118 the head of block C and assert that we really do fall through. */
1120 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1124 /* Remove what will soon cease being the jump insn from the source block.
1125 If block B consisted only of this single jump, turn it into a deleted
1128 if (GET_CODE (q) == JUMP_INSN
1130 && (any_uncondjump_p (q)
1131 || (b->succ == e && e->succ_next == NULL)))
1134 /* If this was a conditional jump, we need to also delete
1135 the insn that set cc0. */
1136 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1142 /* We don't want a block to end on a line-number note since that has
1143 the potential of changing the code between -g and not -g. */
1144 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1148 /* Selectively unlink the sequence. */
1149 if (q != PREV_INSN (c->head))
1150 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1152 e->flags |= EDGE_FALLTHRU;
1155 /* Fix up edges that now fall through, or rather should now fall through
1156 but previously required a jump around now deleted blocks. Simplify
1157 the search by only examining blocks numerically adjacent, since this
1158 is how find_basic_blocks created them. */
1161 tidy_fallthru_edges ()
1165 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1168 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1174 /* We care about simple conditional or unconditional jumps with
1177 If we had a conditional branch to the next instruction when
1178 find_basic_blocks was called, then there will only be one
1179 out edge for the block which ended with the conditional
1180 branch (since we do not create duplicate edges).
1182 Furthermore, the edge will be marked as a fallthru because we
1183 merge the flags for the duplicate edges. So we do not want to
1184 check that the edge is not a FALLTHRU edge. */
1186 if ((s = b->succ) != NULL
1187 && ! (s->flags & EDGE_COMPLEX)
1188 && s->succ_next == NULL
1190 /* If the jump insn has side effects, we can't tidy the edge. */
1191 && (GET_CODE (b->end) != JUMP_INSN
1192 || onlyjump_p (b->end)))
1193 tidy_fallthru_edge (s, b, c);
1197 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1198 is back edge of syntactic loop. */
1201 back_edge_of_syntactic_loop_p (bb1, bb2)
1202 basic_block bb1, bb2;
1211 /* ??? Could we guarantee that bb indices are monotone, so that we could
1212 just compare them? */
1213 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1219 for (insn = bb1->end; insn != bb2->head && count >= 0;
1220 insn = NEXT_INSN (insn))
1221 if (GET_CODE (insn) == NOTE)
1223 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1225 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1232 /* Split a (typically critical) edge. Return the new block.
1233 Abort on abnormal edges.
1235 ??? The code generally expects to be called on critical edges.
1236 The case of a block ending in an unconditional jump to a
1237 block with multiple predecessors is not handled optimally. */
1240 rtl_split_edge (edge_in)
1246 /* Abnormal edges cannot be split. */
1247 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1250 /* We are going to place the new block in front of edge destination.
1251 Avoid existence of fallthru predecessors. */
1252 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1256 for (e = edge_in->dest->pred; e; e = e->pred_next)
1257 if (e->flags & EDGE_FALLTHRU)
1261 force_nonfallthru (e);
1264 /* Create the basic block note.
1266 Where we place the note can have a noticeable impact on the generated
1267 code. Consider this cfg:
1277 If we need to insert an insn on the edge from block 0 to block 1,
1278 we want to ensure the instructions we insert are outside of any
1279 loop notes that physically sit between block 0 and block 1. Otherwise
1280 we confuse the loop optimizer into thinking the loop is a phony. */
1282 if (edge_in->dest != EXIT_BLOCK_PTR
1283 && PREV_INSN (edge_in->dest->head)
1284 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1285 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1286 == NOTE_INSN_LOOP_BEG)
1287 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1288 before = PREV_INSN (edge_in->dest->head);
1289 else if (edge_in->dest != EXIT_BLOCK_PTR)
1290 before = edge_in->dest->head;
1294 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1295 bb->count = edge_in->count;
1296 bb->frequency = EDGE_FREQUENCY (edge_in);
1298 /* ??? This info is likely going to be out of date very soon. */
1299 if (edge_in->dest->global_live_at_start)
1301 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1302 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1303 COPY_REG_SET (bb->global_live_at_start,
1304 edge_in->dest->global_live_at_start);
1305 COPY_REG_SET (bb->global_live_at_end,
1306 edge_in->dest->global_live_at_start);
1309 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1311 /* For non-fallthry edges, we must adjust the predecessor's
1312 jump instruction to target our new block. */
1313 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1315 if (!redirect_edge_and_branch (edge_in, bb))
1319 redirect_edge_succ (edge_in, bb);
1324 /* Queue instructions for insertion on an edge between two basic blocks.
1325 The new instructions and basic blocks (if any) will not appear in the
1326 CFG until commit_edge_insertions is called. */
1329 insert_insn_on_edge (pattern, e)
1333 /* We cannot insert instructions on an abnormal critical edge.
1334 It will be easier to find the culprit if we die now. */
1335 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1338 if (e->insns == NULL_RTX)
1341 push_to_sequence (e->insns);
1343 emit_insn (pattern);
1345 e->insns = get_insns ();
1349 /* Update the CFG for the instructions queued on edge E. */
1352 commit_one_edge_insertion (e, watch_calls)
1356 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1357 basic_block bb = NULL;
1359 /* Pull the insns off the edge now since the edge might go away. */
1361 e->insns = NULL_RTX;
1363 /* Special case -- avoid inserting code between call and storing
1364 its return value. */
1365 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1366 && e->src != ENTRY_BLOCK_PTR
1367 && GET_CODE (e->src->end) == CALL_INSN)
1369 rtx next = next_nonnote_insn (e->src->end);
1371 after = e->dest->head;
1372 /* The first insn after the call may be a stack pop, skip it. */
1374 && keep_with_call_p (next))
1377 next = next_nonnote_insn (next);
1381 if (!before && !after)
1383 /* Figure out where to put these things. If the destination has
1384 one predecessor, insert there. Except for the exit block. */
1385 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1389 /* Get the location correct wrt a code label, and "nice" wrt
1390 a basic block note, and before everything else. */
1392 if (GET_CODE (tmp) == CODE_LABEL)
1393 tmp = NEXT_INSN (tmp);
1394 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1395 tmp = NEXT_INSN (tmp);
1396 if (tmp == bb->head)
1399 after = PREV_INSN (tmp);
1401 after = get_last_insn ();
1404 /* If the source has one successor and the edge is not abnormal,
1405 insert there. Except for the entry block. */
1406 else if ((e->flags & EDGE_ABNORMAL) == 0
1407 && e->src->succ->succ_next == NULL
1408 && e->src != ENTRY_BLOCK_PTR)
1412 /* It is possible to have a non-simple jump here. Consider a target
1413 where some forms of unconditional jumps clobber a register. This
1414 happens on the fr30 for example.
1416 We know this block has a single successor, so we can just emit
1417 the queued insns before the jump. */
1418 if (GET_CODE (bb->end) == JUMP_INSN)
1419 for (before = bb->end;
1420 GET_CODE (PREV_INSN (before)) == NOTE
1421 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1422 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1426 /* We'd better be fallthru, or we've lost track of what's what. */
1427 if ((e->flags & EDGE_FALLTHRU) == 0)
1433 /* Otherwise we must split the edge. */
1436 bb = split_edge (e);
1441 /* Now that we've found the spot, do the insertion. */
1445 emit_insn_before (insns, before);
1446 last = prev_nonnote_insn (before);
1449 last = emit_insn_after (insns, after);
1451 if (returnjump_p (last))
1453 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1454 This is not currently a problem because this only happens
1455 for the (single) epilogue, which already has a fallthru edge
1459 if (e->dest != EXIT_BLOCK_PTR
1460 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1463 e->flags &= ~EDGE_FALLTHRU;
1464 emit_barrier_after (last);
1467 delete_insn (before);
1469 else if (GET_CODE (last) == JUMP_INSN)
1472 /* Mark the basic block for find_sub_basic_blocks. */
1476 /* Update the CFG for all queued instructions. */
1479 commit_edge_insertions ()
1483 bool changed = false;
1485 #ifdef ENABLE_CHECKING
1486 verify_flow_info ();
1489 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1493 for (e = bb->succ; e; e = next)
1495 next = e->succ_next;
1499 commit_one_edge_insertion (e, false);
1507 blocks = sbitmap_alloc (last_basic_block);
1508 sbitmap_zero (blocks);
1512 SET_BIT (blocks, bb->index);
1513 /* Check for forgotten bb->aux values before commit_edge_insertions
1515 if (bb->aux != &bb->aux)
1519 find_many_sub_basic_blocks (blocks);
1520 sbitmap_free (blocks);
1523 /* Update the CFG for all queued instructions, taking special care of inserting
1524 code on edges between call and storing its return value. */
1527 commit_edge_insertions_watch_calls ()
1531 bool changed = false;
1533 #ifdef ENABLE_CHECKING
1534 verify_flow_info ();
1537 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1541 for (e = bb->succ; e; e = next)
1543 next = e->succ_next;
1547 commit_one_edge_insertion (e, true);
1555 blocks = sbitmap_alloc (last_basic_block);
1556 sbitmap_zero (blocks);
1560 SET_BIT (blocks, bb->index);
1561 /* Check for forgotten bb->aux values before commit_edge_insertions
1563 if (bb->aux != &bb->aux)
1567 find_many_sub_basic_blocks (blocks);
1568 sbitmap_free (blocks);
1571 /* Print out one basic block with live information at start and end. */
1582 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1583 bb->index, bb->loop_depth);
1584 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1587 fputs (";; Predecessors: ", outf);
1588 for (e = bb->pred; e; e = e->pred_next)
1589 dump_edge_info (outf, e, 0);
1592 fputs (";; Registers live at start:", outf);
1593 dump_regset (bb->global_live_at_start, outf);
1596 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1597 insn = NEXT_INSN (insn))
1598 print_rtl_single (outf, insn);
1600 fputs (";; Registers live at end:", outf);
1601 dump_regset (bb->global_live_at_end, outf);
1604 fputs (";; Successors: ", outf);
1605 for (e = bb->succ; e; e = e->succ_next)
1606 dump_edge_info (outf, e, 1);
1614 dump_bb (bb, stderr);
1621 basic_block bb = BASIC_BLOCK (n);
1622 dump_bb (bb, stderr);
1626 /* Like print_rtl, but also print out live information for the start of each
1630 print_rtl_with_bb (outf, rtx_first)
1637 fprintf (outf, "(nil)\n");
1640 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1641 int max_uid = get_max_uid ();
1643 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1645 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1646 enum bb_state *in_bb_p
1647 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1651 FOR_EACH_BB_REVERSE (bb)
1655 start[INSN_UID (bb->head)] = bb;
1656 end[INSN_UID (bb->end)] = bb;
1657 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1659 enum bb_state state = IN_MULTIPLE_BB;
1661 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1663 in_bb_p[INSN_UID (x)] = state;
1670 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1674 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1676 fprintf (outf, ";; Start of basic block %d, registers live:",
1678 dump_regset (bb->global_live_at_start, outf);
1682 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1683 && GET_CODE (tmp_rtx) != NOTE
1684 && GET_CODE (tmp_rtx) != BARRIER)
1685 fprintf (outf, ";; Insn is not within a basic block\n");
1686 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1687 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1689 did_output = print_rtl_single (outf, tmp_rtx);
1691 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1693 fprintf (outf, ";; End of basic block %d, registers live:\n",
1695 dump_regset (bb->global_live_at_end, outf);
1708 if (current_function_epilogue_delay_list != 0)
1710 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1711 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1712 tmp_rtx = XEXP (tmp_rtx, 1))
1713 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1718 update_br_prob_note (bb)
1722 if (GET_CODE (bb->end) != JUMP_INSN)
1724 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1725 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1727 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1730 /* Verify the CFG consistency. This function check some CFG invariants and
1731 aborts when something is wrong. Hope that this function will help to
1732 convert many optimization passes to preserve CFG consistent.
1734 Currently it does following checks:
1736 - test head/end pointers
1737 - overlapping of basic blocks
1738 - edge list correctness
1739 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1740 - tails of basic blocks (ensure that boundary is necessary)
1741 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1742 and NOTE_INSN_BASIC_BLOCK
1743 - check that all insns are in the basic blocks
1744 (except the switch handling code, barriers and notes)
1745 - check that all returns are followed by barriers
1747 In future it can be extended check a lot of other stuff as well
1748 (reachability of basic blocks, life information, etc. etc.). */
1751 rtl_verify_flow_info ()
1753 const int max_uid = get_max_uid ();
1754 const rtx rtx_first = get_insns ();
1755 rtx last_head = get_last_insn ();
1756 basic_block *bb_info, *last_visited;
1757 size_t *edge_checksum;
1759 int num_bb_notes, err = 0;
1760 basic_block bb, last_bb_seen;
1762 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1763 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1764 sizeof (basic_block));
1765 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1767 /* Check bb chain & numbers. */
1768 last_bb_seen = ENTRY_BLOCK_PTR;
1769 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1771 if (bb != EXIT_BLOCK_PTR
1772 && bb != BASIC_BLOCK (bb->index))
1774 error ("bb %d on wrong place", bb->index);
1778 if (bb->prev_bb != last_bb_seen)
1780 error ("prev_bb of %d should be %d, not %d",
1781 bb->index, last_bb_seen->index, bb->prev_bb->index);
1788 FOR_EACH_BB_REVERSE (bb)
1790 rtx head = bb->head;
1793 /* Verify the end of the basic block is in the INSN chain. */
1794 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1800 error ("end insn %d for block %d not found in the insn stream",
1801 INSN_UID (end), bb->index);
1805 /* Work backwards from the end to the head of the basic block
1806 to verify the head is in the RTL chain. */
1807 for (; x != NULL_RTX; x = PREV_INSN (x))
1809 /* While walking over the insn chain, verify insns appear
1810 in only one basic block and initialize the BB_INFO array
1811 used by other passes. */
1812 if (bb_info[INSN_UID (x)] != NULL)
1814 error ("insn %d is in multiple basic blocks (%d and %d)",
1815 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1819 bb_info[INSN_UID (x)] = bb;
1826 error ("head insn %d for block %d not found in the insn stream",
1827 INSN_UID (head), bb->index);
1834 /* Now check the basic blocks (boundaries etc.) */
1835 FOR_EACH_BB_REVERSE (bb)
1837 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1841 if (INSN_P (bb->end)
1842 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1843 && bb->succ && bb->succ->succ_next
1844 && any_condjump_p (bb->end))
1846 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1848 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1849 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1855 error ("verify_flow_info: Wrong count of block %i %i",
1856 bb->index, (int)bb->count);
1859 if (bb->frequency < 0)
1861 error ("verify_flow_info: Wrong frequency of block %i %i",
1862 bb->index, bb->frequency);
1865 for (e = bb->succ; e; e = e->succ_next)
1867 if (last_visited [e->dest->index + 2] == bb)
1869 error ("verify_flow_info: Duplicate edge %i->%i",
1870 e->src->index, e->dest->index);
1873 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1875 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1876 e->src->index, e->dest->index, e->probability);
1881 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1882 e->src->index, e->dest->index, (int)e->count);
1886 last_visited [e->dest->index + 2] = bb;
1888 if (e->flags & EDGE_FALLTHRU)
1891 if ((e->flags & ~(EDGE_DFS_BACK | EDGE_CAN_FALLTHRU | EDGE_IRREDUCIBLE_LOOP)) == 0)
1894 if (e->flags & EDGE_ABNORMAL_CALL)
1897 if (e->flags & EDGE_EH)
1899 else if (e->flags & EDGE_ABNORMAL)
1902 if ((e->flags & EDGE_FALLTHRU)
1903 && e->src != ENTRY_BLOCK_PTR
1904 && e->dest != EXIT_BLOCK_PTR)
1908 if (e->src->next_bb != e->dest)
1911 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1912 e->src->index, e->dest->index);
1916 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1917 insn = NEXT_INSN (insn))
1918 if (GET_CODE (insn) == BARRIER
1919 #ifndef CASE_DROPS_THROUGH
1922 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1926 error ("verify_flow_info: Incorrect fallthru %i->%i",
1927 e->src->index, e->dest->index);
1928 fatal_insn ("wrong insn in the fallthru edge", insn);
1935 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1937 fprintf (stderr, "Predecessor: ");
1938 dump_edge_info (stderr, e, 0);
1939 fprintf (stderr, "\nSuccessor: ");
1940 dump_edge_info (stderr, e, 1);
1941 fprintf (stderr, "\n");
1945 edge_checksum[e->dest->index + 2] += (size_t) e;
1948 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1949 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1951 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1955 && (GET_CODE (bb->end) != JUMP_INSN
1956 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1957 || any_condjump_p (bb->end)))))
1959 error ("Too many outgoing branch edges from bb %i", bb->index);
1962 if (n_fallthru && any_uncondjump_p (bb->end))
1964 error ("Fallthru edge after unconditional jump %i", bb->index);
1967 if (n_branch != 1 && any_uncondjump_p (bb->end))
1969 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1972 if (n_branch != 1 && any_condjump_p (bb->end)
1973 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1975 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1978 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1980 error ("Call edges for non-call insn in bb %i", bb->index);
1984 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1985 && (GET_CODE (bb->end) != JUMP_INSN
1986 || any_condjump_p (bb->end)
1987 || any_uncondjump_p (bb->end)))
1989 error ("Abnormal edges for no purpose in bb %i", bb->index);
1997 /* Ensure existence of barrier in BB with no fallthru edges. */
1998 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1999 insn = NEXT_INSN (insn))
2001 || (GET_CODE (insn) == NOTE
2002 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2004 error ("missing barrier after block %i", bb->index);
2010 for (e = bb->pred; e; e = e->pred_next)
2014 error ("basic block %d pred edge is corrupted", bb->index);
2015 fputs ("Predecessor: ", stderr);
2016 dump_edge_info (stderr, e, 0);
2017 fputs ("\nSuccessor: ", stderr);
2018 dump_edge_info (stderr, e, 1);
2019 fputc ('\n', stderr);
2022 edge_checksum[e->dest->index + 2] -= (size_t) e;
2025 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
2026 if (BLOCK_FOR_INSN (x) != bb)
2029 if (! BLOCK_FOR_INSN (x))
2031 ("insn %d inside basic block %d but block_for_insn is NULL",
2032 INSN_UID (x), bb->index);
2035 ("insn %d inside basic block %d but block_for_insn is %i",
2036 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2041 /* OK pointers are correct. Now check the header of basic
2042 block. It ought to contain optional CODE_LABEL followed
2043 by NOTE_BASIC_BLOCK. */
2045 if (GET_CODE (x) == CODE_LABEL)
2049 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2057 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2059 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2065 /* Do checks for empty blocks her. e */
2068 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2070 if (NOTE_INSN_BASIC_BLOCK_P (x))
2072 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2073 INSN_UID (x), bb->index);
2080 if (control_flow_insn_p (x))
2082 error ("in basic block %d:", bb->index);
2083 fatal_insn ("flow control insn inside a basic block", x);
2088 /* Complete edge checksumming for ENTRY and EXIT. */
2092 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2093 edge_checksum[e->dest->index + 2] += (size_t) e;
2095 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2096 edge_checksum[e->dest->index + 2] -= (size_t) e;
2099 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2100 if (edge_checksum[bb->index + 2])
2102 error ("basic block %i edge lists are corrupted", bb->index);
2107 last_bb_seen = ENTRY_BLOCK_PTR;
2109 for (x = rtx_first; x; x = NEXT_INSN (x))
2111 if (NOTE_INSN_BASIC_BLOCK_P (x))
2113 bb = NOTE_BASIC_BLOCK (x);
2116 if (bb != last_bb_seen->next_bb)
2117 internal_error ("basic blocks not numbered consecutively");
2122 if (!bb_info[INSN_UID (x)])
2124 switch (GET_CODE (x))
2131 /* An addr_vec is placed outside any block block. */
2133 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2134 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2135 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2138 /* But in any case, non-deletable labels can appear anywhere. */
2142 fatal_insn ("insn outside basic block", x);
2147 && GET_CODE (x) == JUMP_INSN
2148 && returnjump_p (x) && ! condjump_p (x)
2149 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2150 fatal_insn ("return not followed by barrier", x);
2153 if (num_bb_notes != n_basic_blocks)
2155 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2156 num_bb_notes, n_basic_blocks);
2159 internal_error ("verify_flow_info failed");
2163 free (last_visited);
2164 free (edge_checksum);
2167 /* Assume that the preceding pass has possibly eliminated jump instructions
2168 or converted the unconditional jumps. Eliminate the edges from CFG.
2169 Return true if any edges are eliminated. */
2172 purge_dead_edges (bb)
2176 rtx insn = bb->end, note;
2177 bool purged = false;
2179 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2180 if (GET_CODE (insn) == INSN
2181 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2185 if (! may_trap_p (PATTERN (insn))
2186 || ((eqnote = find_reg_equal_equiv_note (insn))
2187 && ! may_trap_p (XEXP (eqnote, 0))))
2188 remove_note (insn, note);
2191 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2192 for (e = bb->succ; e; e = next)
2194 next = e->succ_next;
2195 if (e->flags & EDGE_EH)
2197 if (can_throw_internal (bb->end))
2200 else if (e->flags & EDGE_ABNORMAL_CALL)
2202 if (GET_CODE (bb->end) == CALL_INSN
2203 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2204 || INTVAL (XEXP (note, 0)) >= 0))
2211 bb->flags |= BB_DIRTY;
2215 if (GET_CODE (insn) == JUMP_INSN)
2220 /* We do care only about conditional jumps and simplejumps. */
2221 if (!any_condjump_p (insn)
2222 && !returnjump_p (insn)
2223 && !simplejump_p (insn))
2226 /* Branch probability/prediction notes are defined only for
2227 condjumps. We've possibly turned condjump into simplejump. */
2228 if (simplejump_p (insn))
2230 note = find_reg_note (insn, REG_BR_PROB, NULL);
2232 remove_note (insn, note);
2233 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2234 remove_note (insn, note);
2237 for (e = bb->succ; e; e = next)
2239 next = e->succ_next;
2241 /* Avoid abnormal flags to leak from computed jumps turned
2242 into simplejumps. */
2244 e->flags &= ~EDGE_ABNORMAL;
2246 /* See if this edge is one we should keep. */
2247 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2248 /* A conditional jump can fall through into the next
2249 block, so we should keep the edge. */
2251 else if (e->dest != EXIT_BLOCK_PTR
2252 && e->dest->head == JUMP_LABEL (insn))
2253 /* If the destination block is the target of the jump,
2256 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2257 /* If the destination block is the exit block, and this
2258 instruction is a return, then keep the edge. */
2260 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2261 /* Keep the edges that correspond to exceptions thrown by
2262 this instruction. */
2265 /* We do not need this edge. */
2266 bb->flags |= BB_DIRTY;
2271 if (!bb->succ || !purged)
2275 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2280 /* Redistribute probabilities. */
2281 if (!bb->succ->succ_next)
2283 bb->succ->probability = REG_BR_PROB_BASE;
2284 bb->succ->count = bb->count;
2288 note = find_reg_note (insn, REG_BR_PROB, NULL);
2292 b = BRANCH_EDGE (bb);
2293 f = FALLTHRU_EDGE (bb);
2294 b->probability = INTVAL (XEXP (note, 0));
2295 f->probability = REG_BR_PROB_BASE - b->probability;
2296 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2297 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2302 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2304 /* First, there should not be any EH or ABCALL edges resulting
2305 from non-local gotos and the like. If there were, we shouldn't
2306 have created the sibcall in the first place. Second, there
2307 should of course never have been a fallthru edge. */
2308 if (!bb->succ || bb->succ->succ_next)
2310 if (bb->succ->flags != EDGE_SIBCALL)
2316 /* If we don't see a jump insn, we don't know exactly why the block would
2317 have been broken at this point. Look for a simple, non-fallthru edge,
2318 as these are only created by conditional branches. If we find such an
2319 edge we know that there used to be a jump here and can then safely
2320 remove all non-fallthru edges. */
2321 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2328 for (e = bb->succ; e; e = next)
2330 next = e->succ_next;
2331 if (!(e->flags & EDGE_FALLTHRU))
2333 bb->flags |= BB_DIRTY;
2339 if (!bb->succ || bb->succ->succ_next)
2342 bb->succ->probability = REG_BR_PROB_BASE;
2343 bb->succ->count = bb->count;
2346 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2351 /* Search all basic blocks for potentially dead edges and purge them. Return
2352 true if some edge has been eliminated. */
2355 purge_all_dead_edges (update_life_p)
2364 blocks = sbitmap_alloc (last_basic_block);
2365 sbitmap_zero (blocks);
2370 bool purged_here = purge_dead_edges (bb);
2372 purged |= purged_here;
2373 if (purged_here && update_life_p)
2374 SET_BIT (blocks, bb->index);
2377 if (update_life_p && purged)
2378 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2379 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2380 | PROP_KILL_DEAD_CODE);
2383 sbitmap_free (blocks);
2387 /* Same as split_block but update cfg_layout structures. */
2389 cfg_layout_split_block (bb, insnp)
2395 edge fallthru = rtl_split_block (bb, insn);
2397 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
2398 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
2399 RBI (fallthru->src)->footer = NULL;
2404 /* Redirect Edge to DEST. */
2406 cfg_layout_redirect_edge_and_branch (e, dest)
2410 basic_block src = e->src;
2411 basic_block old_next_bb = src->next_bb;
2414 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2415 in the case the basic block appears to be in sequence. Avoid this
2418 src->next_bb = NULL;
2419 if (e->flags & EDGE_FALLTHRU)
2421 /* Redirect any branch edges unified with the fallthru one. */
2422 if (GET_CODE (src->end) == JUMP_INSN
2423 && JUMP_LABEL (src->end) == e->dest->head)
2425 if (!redirect_jump (src->end, block_label (dest), 0))
2428 /* In case we are redirecting fallthru edge to the branch edge
2429 of conditional jump, remove it. */
2430 if (src->succ->succ_next
2431 && !src->succ->succ_next->succ_next)
2433 edge s = e->succ_next ? e->succ_next : src->succ;
2435 && any_condjump_p (src->end)
2436 && onlyjump_p (src->end))
2437 delete_insn (src->end);
2439 redirect_edge_succ_nodup (e, dest);
2444 ret = rtl_redirect_edge_and_branch (e, dest);
2446 /* We don't want simplejumps in the insn stream during cfglayout. */
2447 if (simplejump_p (src->end))
2449 delete_insn (src->end);
2450 delete_barrier (NEXT_INSN (src->end));
2451 src->succ->flags |= EDGE_FALLTHRU;
2453 src->next_bb = old_next_bb;
2458 /* Simple wrapper as we always can redirect fallthru edges. */
2460 cfg_layout_redirect_edge_and_branch_force (e, dest)
2464 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2469 /* Same as flow_delete_block but update cfg_layout structures. */
2471 cfg_layout_delete_block (bb)
2474 rtx insn, next, prev = PREV_INSN (bb->head), *to, remaints;
2476 if (RBI (bb)->header)
2480 NEXT_INSN (prev) = RBI (bb)->header;
2482 set_first_insn (RBI (bb)->header);
2483 PREV_INSN (RBI (bb)->header) = prev;
2484 insn = RBI (bb)->header;
2485 while (NEXT_INSN (insn))
2486 insn = NEXT_INSN (insn);
2487 NEXT_INSN (insn) = next;
2488 PREV_INSN (next) = insn;
2490 next = NEXT_INSN (bb->end);
2491 if (RBI (bb)->footer)
2494 NEXT_INSN (insn) = RBI (bb)->footer;
2495 PREV_INSN (RBI (bb)->footer) = insn;
2496 while (NEXT_INSN (insn))
2497 insn = NEXT_INSN (insn);
2498 NEXT_INSN (insn) = next;
2500 PREV_INSN (next) = insn;
2502 set_last_insn (insn);
2504 if (bb->next_bb != EXIT_BLOCK_PTR)
2505 to = &RBI(bb->next_bb)->header;
2507 to = &cfg_layout_function_footer;
2508 rtl_delete_block (bb);
2511 prev = NEXT_INSN (prev);
2513 prev = get_insns ();
2515 next = PREV_INSN (next);
2517 next = get_last_insn ();
2519 if (next && NEXT_INSN (next) != prev)
2521 remaints = unlink_insn_chain (prev, next);
2523 while (NEXT_INSN (insn))
2524 insn = NEXT_INSN (insn);
2525 NEXT_INSN (insn) = *to;
2527 PREV_INSN (*to) = insn;
2532 /* Implementation of CFG manipulation for linearized RTL. */
2533 struct cfg_hooks rtl_cfg_hooks = {
2534 rtl_verify_flow_info,
2535 rtl_redirect_edge_and_branch,
2536 rtl_redirect_edge_and_branch_force,
2542 /* Implementation of CFG manipulation for cfg layout RTL, where
2543 basic block connected via fallthru edges does not have to be adjacent.
2544 This representation will hopefully become the default one in future
2545 version of the compiler. */
2546 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2547 NULL, /* verify_flow_info. */
2548 cfg_layout_redirect_edge_and_branch,
2549 cfg_layout_redirect_edge_and_branch_force,
2550 cfg_layout_delete_block,
2551 cfg_layout_split_block,
2552 NULL /* split_edge. */