1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, rtl_delete_block,rtl_split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and committing to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - CFG updating after constant propagation
39 purge_dead_edges, purge_all_dead_edges */
43 #include "coretypes.h"
47 #include "hard-reg-set.h"
48 #include "basic-block.h"
57 #include "insn-config.h"
58 #include "cfglayout.h"
60 /* Stubs in case we don't have a return insn. */
63 #define gen_return() NULL_RTX
66 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
67 /* ??? Should probably be using LABEL_NUSES instead. It would take a
68 bit of surgery to be able to use or co-opt the routines in jump. */
70 rtx tail_recursion_label_list;
72 static int can_delete_note_p PARAMS ((rtx));
73 static int can_delete_label_p PARAMS ((rtx));
74 static void commit_one_edge_insertion PARAMS ((edge, int));
75 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
76 static rtx last_loop_beg_note PARAMS ((rtx));
77 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
78 basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
79 static basic_block rtl_split_edge PARAMS ((edge));
80 static int rtl_verify_flow_info PARAMS ((void));
81 static edge cfg_layout_split_block PARAMS ((basic_block, void *));
82 static bool cfg_layout_redirect_edge_and_branch PARAMS ((edge, basic_block));
83 static basic_block cfg_layout_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
84 static void cfg_layout_delete_block PARAMS ((basic_block));
85 static void rtl_delete_block PARAMS ((basic_block));
86 static basic_block rtl_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
87 static bool rtl_redirect_edge_and_branch PARAMS ((edge, basic_block));
88 static edge rtl_split_block PARAMS ((basic_block, void *));
89 static void rtl_dump_bb PARAMS ((basic_block, FILE *));
90 static int rtl_verify_flow_info_1 PARAMS ((void));
92 /* Return true if NOTE is not one of the ones that must be kept paired,
93 so that we may simply delete it. */
96 can_delete_note_p (note)
99 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
100 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
101 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
104 /* True if a given label can be deleted. */
107 can_delete_label_p (label)
110 return (!LABEL_PRESERVE_P (label)
111 /* User declared labels must be preserved. */
112 && LABEL_NAME (label) == 0
113 && !in_expr_list_p (forced_labels, label)
114 && !in_expr_list_p (label_value_list, label));
117 /* Delete INSN by patching it out. Return the next insn. */
123 rtx next = NEXT_INSN (insn);
125 bool really_delete = true;
127 if (GET_CODE (insn) == CODE_LABEL)
129 /* Some labels can't be directly removed from the INSN chain, as they
130 might be references via variables, constant pool etc.
131 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
132 if (! can_delete_label_p (insn))
134 const char *name = LABEL_NAME (insn);
136 really_delete = false;
137 PUT_CODE (insn, NOTE);
138 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
139 NOTE_SOURCE_FILE (insn) = name;
142 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
147 /* If this insn has already been deleted, something is very wrong. */
148 if (INSN_DELETED_P (insn))
151 INSN_DELETED_P (insn) = 1;
154 /* If deleting a jump, decrement the use count of the label. Deleting
155 the label itself should happen in the normal course of block merging. */
156 if (GET_CODE (insn) == JUMP_INSN
158 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
159 LABEL_NUSES (JUMP_LABEL (insn))--;
161 /* Also if deleting an insn that references a label. */
162 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
163 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
164 LABEL_NUSES (XEXP (note, 0))--;
166 if (GET_CODE (insn) == JUMP_INSN
167 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
168 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
170 rtx pat = PATTERN (insn);
171 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
172 int len = XVECLEN (pat, diff_vec_p);
175 for (i = 0; i < len; i++)
177 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
179 /* When deleting code in bulk (e.g. removing many unreachable
180 blocks) we can delete a label that's a target of the vector
181 before deleting the vector itself. */
182 if (GET_CODE (label) != NOTE)
183 LABEL_NUSES (label)--;
190 /* Like delete_insn but also purge dead edges from BB. */
192 delete_insn_and_edges (insn)
199 && BLOCK_FOR_INSN (insn)
200 && BLOCK_FOR_INSN (insn)->end == insn)
202 x = delete_insn (insn);
204 purge_dead_edges (BLOCK_FOR_INSN (insn));
208 /* Unlink a chain of insns between START and FINISH, leaving notes
209 that must be paired. */
212 delete_insn_chain (start, finish)
217 /* Unchain the insns one by one. It would be quicker to delete all of these
218 with a single unchaining, rather than one at a time, but we need to keep
222 next = NEXT_INSN (start);
223 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
226 next = delete_insn (start);
234 /* Like delete_insn but also purge dead edges from BB. */
236 delete_insn_chain_and_edges (first, last)
242 && BLOCK_FOR_INSN (last)
243 && BLOCK_FOR_INSN (last)->end == last)
245 delete_insn_chain (first, last);
247 purge_dead_edges (BLOCK_FOR_INSN (last));
250 /* Create a new basic block consisting of the instructions between HEAD and END
251 inclusive. This function is designed to allow fast BB construction - reuses
252 the note and basic block struct in BB_NOTE, if any and do not grow
253 BASIC_BLOCK chain and should be used directly only by CFG construction code.
254 END can be NULL in to create new empty basic block before HEAD. Both END
255 and HEAD can be NULL to create basic block at the end of INSN chain.
256 AFTER is the basic block we should be put after. */
259 create_basic_block_structure (head, end, bb_note, after)
260 rtx head, end, bb_note;
266 && ! RTX_INTEGRATED_P (bb_note)
267 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
270 /* If we found an existing note, thread it back onto the chain. */
274 if (GET_CODE (head) == CODE_LABEL)
278 after = PREV_INSN (head);
282 if (after != bb_note && NEXT_INSN (after) != bb_note)
283 reorder_insns_nobb (bb_note, bb_note, after);
287 /* Otherwise we must create a note and a basic block structure. */
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
294 else if (GET_CODE (head) == CODE_LABEL && end)
296 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
302 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
308 NOTE_BASIC_BLOCK (bb_note) = bb;
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end) == bb_note)
317 bb->index = last_basic_block++;
319 link_block (bb, after);
320 BASIC_BLOCK (bb->index) = bb;
321 update_bb_for_insn (bb);
323 /* Tag the block so that we know it has been used when considering
324 other basic block notes. */
330 /* Create new basic block consisting of instructions in between HEAD and END
331 and place it to the BB chain after block AFTER. END can be NULL in to
332 create new empty basic block before HEAD. Both END and HEAD can be NULL to
333 create basic block at the end of INSN chain. */
336 create_basic_block (head, end, after)
342 /* Place the new block just after the end. */
343 VARRAY_GROW (basic_block_info, last_basic_block+1);
347 bb = create_basic_block_structure (head, end, NULL, after);
352 /* Delete the insns in a (non-live) block. We physically delete every
353 non-deleted-note insn, and update the flow graph appropriately.
355 Return nonzero if we deleted an exception handler. */
357 /* ??? Preserving all such notes strikes me as wrong. It would be nice
358 to post-process the stream to remove empty blocks, loops, ranges, etc. */
361 flow_delete_block_noexpunge (b)
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached.
369 We need to remove the label from the exception_handler_label list
370 and remove the associated NOTE_INSN_EH_REGION_BEG and
371 NOTE_INSN_EH_REGION_END notes. */
373 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
374 hanging before the block. */
376 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
378 if (GET_CODE (insn) != NOTE)
380 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
381 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
382 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
387 never_reached_warning (insn, b->end);
389 if (GET_CODE (insn) == CODE_LABEL)
390 maybe_remove_eh_handler (insn);
392 /* Include any jump table following the basic block. */
394 if (tablejump_p (end, NULL, &tmp))
397 /* Include any barrier that may follow the basic block. */
398 tmp = next_nonnote_insn (end);
399 if (tmp && GET_CODE (tmp) == BARRIER)
402 /* Selectively delete the entire chain. */
404 delete_insn_chain (insn, end);
406 /* Remove the edges into and out of this block. Note that there may
407 indeed be edges in, if we are removing an unreachable loop. */
408 while (b->pred != NULL)
409 remove_edge (b->pred);
410 while (b->succ != NULL)
411 remove_edge (b->succ);
421 flow_delete_block_noexpunge (b);
423 /* Remove the basic block from the array. */
427 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
430 compute_bb_for_insn ()
439 for (insn = bb->head; ; insn = NEXT_INSN (insn))
441 BLOCK_FOR_INSN (insn) = bb;
448 /* Release the basic_block_for_insn array. */
454 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
455 if (GET_CODE (insn) != BARRIER)
456 BLOCK_FOR_INSN (insn) = NULL;
459 /* Update insns block within BB. */
462 update_bb_for_insn (bb)
467 for (insn = bb->head; ; insn = NEXT_INSN (insn))
469 if (GET_CODE (insn) != BARRIER)
470 set_block_for_insn (insn, bb);
476 /* Split a block BB after insn INSN creating a new fallthru edge.
477 Return the new edge. Note that to keep other parts of the compiler happy,
478 this function renumbers all the basic blocks so that the new
479 one has a number one greater than the block split. */
482 rtl_split_block (bb, insnp)
491 /* There is no point splitting the block after its end. */
495 /* Create the new basic block. */
496 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
497 new_bb->count = bb->count;
498 new_bb->frequency = bb->frequency;
499 new_bb->loop_depth = bb->loop_depth;
502 /* Redirect the outgoing edges. */
503 new_bb->succ = bb->succ;
505 for (e = new_bb->succ; e; e = e->succ_next)
508 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
510 if (bb->global_live_at_start)
512 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
513 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
514 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
516 /* We now have to calculate which registers are live at the end
517 of the split basic block and at the start of the new basic
518 block. Start with those registers that are known to be live
519 at the end of the original basic block and get
520 propagate_block to determine which registers are live. */
521 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
522 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
523 COPY_REG_SET (bb->global_live_at_end,
524 new_bb->global_live_at_start);
525 #ifdef HAVE_conditional_execution
526 /* In the presence of conditional execution we are not able to update
527 liveness precisely. */
528 if (reload_completed)
530 bb->flags |= BB_DIRTY;
531 new_bb->flags |= BB_DIRTY;
539 /* Blocks A and B are to be merged into a single block A. The insns
540 are already contiguous, hence `nomove'. */
543 merge_blocks_nomove (a, b)
546 rtx b_head = b->head, b_end = b->end, a_end = a->end;
547 rtx del_first = NULL_RTX, del_last = NULL_RTX;
551 /* If there was a CODE_LABEL beginning B, delete it. */
552 if (GET_CODE (b_head) == CODE_LABEL)
554 /* Detect basic blocks with nothing but a label. This can happen
555 in particular at the end of a function. */
559 del_first = del_last = b_head;
560 b_head = NEXT_INSN (b_head);
563 /* Delete the basic block note and handle blocks containing just that
565 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
573 b_head = NEXT_INSN (b_head);
576 /* If there was a jump out of A, delete it. */
577 if (GET_CODE (a_end) == JUMP_INSN)
581 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
582 if (GET_CODE (prev) != NOTE
583 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
590 /* If this was a conditional jump, we need to also delete
591 the insn that set cc0. */
592 if (only_sets_cc0_p (prev))
596 prev = prev_nonnote_insn (prev);
603 a_end = PREV_INSN (del_first);
605 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
606 del_first = NEXT_INSN (a_end);
608 /* Normally there should only be one successor of A and that is B, but
609 partway though the merge of blocks for conditional_execution we'll
610 be merging a TEST block with THEN and ELSE successors. Free the
611 whole lot of them and hope the caller knows what they're doing. */
613 remove_edge (a->succ);
615 /* Adjust the edges out of B for the new owner. */
616 for (e = b->succ; e; e = e->succ_next)
619 a->flags |= b->flags;
621 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
622 b->pred = b->succ = NULL;
623 a->global_live_at_end = b->global_live_at_end;
627 /* Delete everything marked above as well as crap that might be
628 hanging out between the two blocks. */
629 delete_insn_chain (del_first, del_last);
631 /* Reassociate the insns of B with A. */
636 for (x = a_end; x != b_end; x = NEXT_INSN (x))
637 set_block_for_insn (x, a);
639 set_block_for_insn (b_end, a);
647 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
654 if (block == EXIT_BLOCK_PTR)
657 if (GET_CODE (block->head) != CODE_LABEL)
659 block->head = emit_label_before (gen_label_rtx (), block->head);
665 /* Attempt to perform edge redirection by replacing possibly complex jump
666 instruction by unconditional jump or removing jump completely. This can
667 apply only if all edges now point to the same block. The parameters and
668 return values are equivalent to redirect_edge_and_branch. */
671 try_redirect_by_replacing_jump (e, target)
675 basic_block src = e->src;
676 rtx insn = src->end, kill_from;
681 /* Verify that all targets will be TARGET. */
682 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
683 if (tmp->dest != target && tmp != e)
686 if (tmp || !onlyjump_p (insn))
688 if ((!optimize || flow2_completed) && tablejump_p (insn, NULL, NULL))
691 /* Avoid removing branch with side effects. */
692 set = single_set (insn);
693 if (!set || side_effects_p (set))
696 /* In case we zap a conditional jump, we'll need to kill
697 the cc0 setter too. */
700 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
701 kill_from = PREV_INSN (insn);
704 /* See if we can create the fallthru edge. */
705 if (can_fallthru (src, target))
708 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
711 /* Selectively unlink whole insn chain. */
712 delete_insn_chain (kill_from, PREV_INSN (target->head));
715 /* If this already is simplejump, redirect it. */
716 else if (simplejump_p (insn))
718 if (e->dest == target)
721 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
722 INSN_UID (insn), e->dest->index, target->index);
723 if (!redirect_jump (insn, block_label (target), 0))
725 if (target == EXIT_BLOCK_PTR)
731 /* Cannot do anything for target exit block. */
732 else if (target == EXIT_BLOCK_PTR)
735 /* Or replace possibly complicated jump insn by simple jump insn. */
738 rtx target_label = block_label (target);
739 rtx barrier, label, table;
741 emit_jump_insn_after (gen_jump (target_label), insn);
742 JUMP_LABEL (src->end) = target_label;
743 LABEL_NUSES (target_label)++;
745 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
746 INSN_UID (insn), INSN_UID (src->end));
749 delete_insn_chain (kill_from, insn);
751 /* Recognize a tablejump that we are converting to a
752 simple jump and remove its associated CODE_LABEL
753 and ADDR_VEC or ADDR_DIFF_VEC. */
754 if (tablejump_p (insn, &label, &table))
755 delete_insn_chain (label, table);
757 barrier = next_nonnote_insn (src->end);
758 if (!barrier || GET_CODE (barrier) != BARRIER)
759 emit_barrier_after (src->end);
762 /* Keep only one edge out and set proper flags. */
763 while (src->succ->succ_next)
764 remove_edge (src->succ);
767 e->flags = EDGE_FALLTHRU;
771 e->probability = REG_BR_PROB_BASE;
772 e->count = src->count;
774 /* We don't want a block to end on a line-number note since that has
775 the potential of changing the code between -g and not -g. */
776 while (GET_CODE (e->src->end) == NOTE
777 && NOTE_LINE_NUMBER (e->src->end) >= 0)
778 delete_insn (e->src->end);
780 if (e->dest != target)
781 redirect_edge_succ (e, target);
786 /* Return last loop_beg note appearing after INSN, before start of next
787 basic block. Return INSN if there are no such notes.
789 When emitting jump to redirect a fallthru edge, it should always appear
790 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
791 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
795 last_loop_beg_note (insn)
800 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
801 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
802 insn = NEXT_INSN (insn))
803 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
809 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
810 expense of adding new instructions or reordering basic blocks.
812 Function can be also called with edge destination equivalent to the TARGET.
813 Then it should try the simplifications and do nothing if none is possible.
815 Return true if transformation succeeded. We still return false in case E
816 already destinated TARGET and we didn't managed to simplify instruction
820 rtl_redirect_edge_and_branch (e, target)
825 rtx old_label = e->dest->head;
826 basic_block src = e->src;
829 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
832 if (try_redirect_by_replacing_jump (e, target))
835 /* Do this fast path late, as we want above code to simplify for cases
836 where called on single edge leaving basic block containing nontrivial
838 else if (e->dest == target)
841 /* We can only redirect non-fallthru edges of jump insn. */
842 if (e->flags & EDGE_FALLTHRU)
844 else if (GET_CODE (insn) != JUMP_INSN)
847 /* Recognize a tablejump and adjust all matching cases. */
848 if (tablejump_p (insn, NULL, &tmp))
852 rtx new_label = block_label (target);
854 if (target == EXIT_BLOCK_PTR)
856 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
857 vec = XVEC (PATTERN (tmp), 0);
859 vec = XVEC (PATTERN (tmp), 1);
861 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
862 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
864 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
865 --LABEL_NUSES (old_label);
866 ++LABEL_NUSES (new_label);
869 /* Handle casesi dispatch insns */
870 if ((tmp = single_set (insn)) != NULL
871 && SET_DEST (tmp) == pc_rtx
872 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
873 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
874 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
876 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
878 --LABEL_NUSES (old_label);
879 ++LABEL_NUSES (new_label);
884 /* ?? We may play the games with moving the named labels from
885 one basic block to the other in case only one computed_jump is
887 if (computed_jump_p (insn)
888 /* A return instruction can't be redirected. */
889 || returnjump_p (insn))
892 /* If the insn doesn't go where we think, we're confused. */
893 if (JUMP_LABEL (insn) != old_label)
896 /* If the substitution doesn't succeed, die. This can happen
897 if the back end emitted unrecognizable instructions or if
898 target is exit block on some arches. */
899 if (!redirect_jump (insn, block_label (target), 0))
901 if (target == EXIT_BLOCK_PTR)
908 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
909 e->src->index, e->dest->index, target->index);
911 if (e->dest != target)
912 redirect_edge_succ_nodup (e, target);
917 /* Like force_nonfallthru below, but additionally performs redirection
918 Used by redirect_edge_and_branch_force. */
921 force_nonfallthru_and_redirect (e, target)
925 basic_block jump_block, new_bb = NULL, src = e->src;
928 int abnormal_edge_flags = 0;
930 /* In the case the last instruction is conditional jump to the next
931 instruction, first redirect the jump itself and then continue
932 by creating an basic block afterwards to redirect fallthru edge. */
933 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
934 && any_condjump_p (e->src->end)
935 /* When called from cfglayout, fallthru edges do not
936 neccessarily go to the next block. */
937 && e->src->next_bb == e->dest
938 && JUMP_LABEL (e->src->end) == e->dest->head)
941 edge b = unchecked_make_edge (e->src, target, 0);
943 if (!redirect_jump (e->src->end, block_label (target), 0))
945 note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
948 int prob = INTVAL (XEXP (note, 0));
950 b->probability = prob;
951 b->count = e->count * prob / REG_BR_PROB_BASE;
952 e->probability -= e->probability;
953 e->count -= b->count;
954 if (e->probability < 0)
961 if (e->flags & EDGE_ABNORMAL)
963 /* Irritating special case - fallthru edge to the same block as abnormal
965 We can't redirect abnormal edge, but we still can split the fallthru
966 one and create separate abnormal edge to original destination.
967 This allows bb-reorder to make such edge non-fallthru. */
968 if (e->dest != target)
970 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
971 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
973 else if (!(e->flags & EDGE_FALLTHRU))
975 else if (e->src == ENTRY_BLOCK_PTR)
977 /* We can't redirect the entry block. Create an empty block at the
978 start of the function which we use to add the new jump. */
980 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
982 /* Change the existing edge's source to be the new block, and add
983 a new edge from the entry block to the new block. */
985 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
993 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
996 if (e->src->succ->succ_next || abnormal_edge_flags)
998 /* Create the new structures. */
1000 /* Position the new block correctly relative to loop notes. */
1001 note = last_loop_beg_note (e->src->end);
1002 note = NEXT_INSN (note);
1004 /* ... and ADDR_VECs. */
1006 && GET_CODE (note) == CODE_LABEL
1008 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
1009 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
1010 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
1011 note = NEXT_INSN (NEXT_INSN (note));
1013 jump_block = create_basic_block (note, NULL, e->src);
1014 jump_block->count = e->count;
1015 jump_block->frequency = EDGE_FREQUENCY (e);
1016 jump_block->loop_depth = target->loop_depth;
1018 if (target->global_live_at_start)
1020 jump_block->global_live_at_start
1021 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1022 jump_block->global_live_at_end
1023 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1024 COPY_REG_SET (jump_block->global_live_at_start,
1025 target->global_live_at_start);
1026 COPY_REG_SET (jump_block->global_live_at_end,
1027 target->global_live_at_start);
1031 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1032 new_edge->probability = e->probability;
1033 new_edge->count = e->count;
1035 /* Redirect old edge. */
1036 redirect_edge_pred (e, jump_block);
1037 e->probability = REG_BR_PROB_BASE;
1039 new_bb = jump_block;
1042 jump_block = e->src;
1044 e->flags &= ~EDGE_FALLTHRU;
1045 if (target == EXIT_BLOCK_PTR)
1048 emit_jump_insn_after (gen_return (), jump_block->end);
1054 rtx label = block_label (target);
1055 emit_jump_insn_after (gen_jump (label), jump_block->end);
1056 JUMP_LABEL (jump_block->end) = label;
1057 LABEL_NUSES (label)++;
1060 emit_barrier_after (jump_block->end);
1061 redirect_edge_succ_nodup (e, target);
1063 if (abnormal_edge_flags)
1064 make_edge (src, target, abnormal_edge_flags);
1069 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1070 (and possibly create new basic block) to make edge non-fallthru.
1071 Return newly created BB or NULL if none. */
1074 force_nonfallthru (e)
1077 return force_nonfallthru_and_redirect (e, e->dest);
1080 /* Redirect edge even at the expense of creating new jump insn or
1081 basic block. Return new basic block if created, NULL otherwise.
1082 Abort if conversion is impossible. */
1085 rtl_redirect_edge_and_branch_force (e, target)
1089 if (redirect_edge_and_branch (e, target)
1090 || e->dest == target)
1093 /* In case the edge redirection failed, try to force it to be non-fallthru
1094 and redirect newly created simplejump. */
1095 return force_nonfallthru_and_redirect (e, target);
1098 /* The given edge should potentially be a fallthru edge. If that is in
1099 fact true, delete the jump and barriers that are in the way. */
1102 tidy_fallthru_edge (e, b, c)
1108 /* ??? In a late-running flow pass, other folks may have deleted basic
1109 blocks by nopping out blocks, leaving multiple BARRIERs between here
1110 and the target label. They ought to be chastized and fixed.
1112 We can also wind up with a sequence of undeletable labels between
1113 one block and the next.
1115 So search through a sequence of barriers, labels, and notes for
1116 the head of block C and assert that we really do fall through. */
1118 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1122 /* Remove what will soon cease being the jump insn from the source block.
1123 If block B consisted only of this single jump, turn it into a deleted
1126 if (GET_CODE (q) == JUMP_INSN
1128 && (any_uncondjump_p (q)
1129 || (b->succ == e && e->succ_next == NULL)))
1132 /* If this was a conditional jump, we need to also delete
1133 the insn that set cc0. */
1134 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1140 /* We don't want a block to end on a line-number note since that has
1141 the potential of changing the code between -g and not -g. */
1142 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1146 /* Selectively unlink the sequence. */
1147 if (q != PREV_INSN (c->head))
1148 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1150 e->flags |= EDGE_FALLTHRU;
1153 /* Fix up edges that now fall through, or rather should now fall through
1154 but previously required a jump around now deleted blocks. Simplify
1155 the search by only examining blocks numerically adjacent, since this
1156 is how find_basic_blocks created them. */
1159 tidy_fallthru_edges ()
1163 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1166 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1172 /* We care about simple conditional or unconditional jumps with
1175 If we had a conditional branch to the next instruction when
1176 find_basic_blocks was called, then there will only be one
1177 out edge for the block which ended with the conditional
1178 branch (since we do not create duplicate edges).
1180 Furthermore, the edge will be marked as a fallthru because we
1181 merge the flags for the duplicate edges. So we do not want to
1182 check that the edge is not a FALLTHRU edge. */
1184 if ((s = b->succ) != NULL
1185 && ! (s->flags & EDGE_COMPLEX)
1186 && s->succ_next == NULL
1188 /* If the jump insn has side effects, we can't tidy the edge. */
1189 && (GET_CODE (b->end) != JUMP_INSN
1190 || onlyjump_p (b->end)))
1191 tidy_fallthru_edge (s, b, c);
1195 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1196 is back edge of syntactic loop. */
1199 back_edge_of_syntactic_loop_p (bb1, bb2)
1200 basic_block bb1, bb2;
1209 /* ??? Could we guarantee that bb indices are monotone, so that we could
1210 just compare them? */
1211 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1217 for (insn = bb1->end; insn != bb2->head && count >= 0;
1218 insn = NEXT_INSN (insn))
1219 if (GET_CODE (insn) == NOTE)
1221 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1223 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1230 /* Split a (typically critical) edge. Return the new block.
1231 Abort on abnormal edges.
1233 ??? The code generally expects to be called on critical edges.
1234 The case of a block ending in an unconditional jump to a
1235 block with multiple predecessors is not handled optimally. */
1238 rtl_split_edge (edge_in)
1244 /* Abnormal edges cannot be split. */
1245 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1248 /* We are going to place the new block in front of edge destination.
1249 Avoid existence of fallthru predecessors. */
1250 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1254 for (e = edge_in->dest->pred; e; e = e->pred_next)
1255 if (e->flags & EDGE_FALLTHRU)
1259 force_nonfallthru (e);
1262 /* Create the basic block note.
1264 Where we place the note can have a noticeable impact on the generated
1265 code. Consider this cfg:
1275 If we need to insert an insn on the edge from block 0 to block 1,
1276 we want to ensure the instructions we insert are outside of any
1277 loop notes that physically sit between block 0 and block 1. Otherwise
1278 we confuse the loop optimizer into thinking the loop is a phony. */
1280 if (edge_in->dest != EXIT_BLOCK_PTR
1281 && PREV_INSN (edge_in->dest->head)
1282 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1283 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1284 == NOTE_INSN_LOOP_BEG)
1285 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1286 before = PREV_INSN (edge_in->dest->head);
1287 else if (edge_in->dest != EXIT_BLOCK_PTR)
1288 before = edge_in->dest->head;
1292 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1293 bb->count = edge_in->count;
1294 bb->frequency = EDGE_FREQUENCY (edge_in);
1296 /* ??? This info is likely going to be out of date very soon. */
1297 if (edge_in->dest->global_live_at_start)
1299 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1300 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1301 COPY_REG_SET (bb->global_live_at_start,
1302 edge_in->dest->global_live_at_start);
1303 COPY_REG_SET (bb->global_live_at_end,
1304 edge_in->dest->global_live_at_start);
1307 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1309 /* For non-fallthry edges, we must adjust the predecessor's
1310 jump instruction to target our new block. */
1311 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1313 if (!redirect_edge_and_branch (edge_in, bb))
1317 redirect_edge_succ (edge_in, bb);
1322 /* Queue instructions for insertion on an edge between two basic blocks.
1323 The new instructions and basic blocks (if any) will not appear in the
1324 CFG until commit_edge_insertions is called. */
1327 insert_insn_on_edge (pattern, e)
1331 /* We cannot insert instructions on an abnormal critical edge.
1332 It will be easier to find the culprit if we die now. */
1333 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1336 if (e->insns == NULL_RTX)
1339 push_to_sequence (e->insns);
1341 emit_insn (pattern);
1343 e->insns = get_insns ();
1347 /* Update the CFG for the instructions queued on edge E. */
1350 commit_one_edge_insertion (e, watch_calls)
1354 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1355 basic_block bb = NULL;
1357 /* Pull the insns off the edge now since the edge might go away. */
1359 e->insns = NULL_RTX;
1361 /* Special case -- avoid inserting code between call and storing
1362 its return value. */
1363 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1364 && e->src != ENTRY_BLOCK_PTR
1365 && GET_CODE (e->src->end) == CALL_INSN)
1367 rtx next = next_nonnote_insn (e->src->end);
1369 after = e->dest->head;
1370 /* The first insn after the call may be a stack pop, skip it. */
1372 && keep_with_call_p (next))
1375 next = next_nonnote_insn (next);
1379 if (!before && !after)
1381 /* Figure out where to put these things. If the destination has
1382 one predecessor, insert there. Except for the exit block. */
1383 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1387 /* Get the location correct wrt a code label, and "nice" wrt
1388 a basic block note, and before everything else. */
1390 if (GET_CODE (tmp) == CODE_LABEL)
1391 tmp = NEXT_INSN (tmp);
1392 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1393 tmp = NEXT_INSN (tmp);
1394 if (tmp == bb->head)
1397 after = PREV_INSN (tmp);
1399 after = get_last_insn ();
1402 /* If the source has one successor and the edge is not abnormal,
1403 insert there. Except for the entry block. */
1404 else if ((e->flags & EDGE_ABNORMAL) == 0
1405 && e->src->succ->succ_next == NULL
1406 && e->src != ENTRY_BLOCK_PTR)
1410 /* It is possible to have a non-simple jump here. Consider a target
1411 where some forms of unconditional jumps clobber a register. This
1412 happens on the fr30 for example.
1414 We know this block has a single successor, so we can just emit
1415 the queued insns before the jump. */
1416 if (GET_CODE (bb->end) == JUMP_INSN)
1417 for (before = bb->end;
1418 GET_CODE (PREV_INSN (before)) == NOTE
1419 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1420 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1424 /* We'd better be fallthru, or we've lost track of what's what. */
1425 if ((e->flags & EDGE_FALLTHRU) == 0)
1431 /* Otherwise we must split the edge. */
1434 bb = split_edge (e);
1439 /* Now that we've found the spot, do the insertion. */
1443 emit_insn_before (insns, before);
1444 last = prev_nonnote_insn (before);
1447 last = emit_insn_after (insns, after);
1449 if (returnjump_p (last))
1451 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1452 This is not currently a problem because this only happens
1453 for the (single) epilogue, which already has a fallthru edge
1457 if (e->dest != EXIT_BLOCK_PTR
1458 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1461 e->flags &= ~EDGE_FALLTHRU;
1462 emit_barrier_after (last);
1465 delete_insn (before);
1467 else if (GET_CODE (last) == JUMP_INSN)
1470 /* Mark the basic block for find_sub_basic_blocks. */
1474 /* Update the CFG for all queued instructions. */
1477 commit_edge_insertions ()
1481 bool changed = false;
1483 #ifdef ENABLE_CHECKING
1484 verify_flow_info ();
1487 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1491 for (e = bb->succ; e; e = next)
1493 next = e->succ_next;
1497 commit_one_edge_insertion (e, false);
1505 blocks = sbitmap_alloc (last_basic_block);
1506 sbitmap_zero (blocks);
1510 SET_BIT (blocks, bb->index);
1511 /* Check for forgotten bb->aux values before commit_edge_insertions
1513 if (bb->aux != &bb->aux)
1517 find_many_sub_basic_blocks (blocks);
1518 sbitmap_free (blocks);
1521 /* Update the CFG for all queued instructions, taking special care of inserting
1522 code on edges between call and storing its return value. */
1525 commit_edge_insertions_watch_calls ()
1529 bool changed = false;
1531 #ifdef ENABLE_CHECKING
1532 verify_flow_info ();
1535 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1539 for (e = bb->succ; e; e = next)
1541 next = e->succ_next;
1545 commit_one_edge_insertion (e, true);
1553 blocks = sbitmap_alloc (last_basic_block);
1554 sbitmap_zero (blocks);
1558 SET_BIT (blocks, bb->index);
1559 /* Check for forgotten bb->aux values before commit_edge_insertions
1561 if (bb->aux != &bb->aux)
1565 find_many_sub_basic_blocks (blocks);
1566 sbitmap_free (blocks);
1569 /* Print out one basic block with live information at start and end. */
1572 rtl_dump_bb (bb, outf)
1579 fputs (";; Registers live at start:", outf);
1580 dump_regset (bb->global_live_at_start, outf);
1583 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1584 insn = NEXT_INSN (insn))
1585 print_rtl_single (outf, insn);
1587 fputs (";; Registers live at end:", outf);
1588 dump_regset (bb->global_live_at_end, outf);
1592 /* Like print_rtl, but also print out live information for the start of each
1596 print_rtl_with_bb (outf, rtx_first)
1603 fprintf (outf, "(nil)\n");
1606 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1607 int max_uid = get_max_uid ();
1609 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1611 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1612 enum bb_state *in_bb_p
1613 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1617 FOR_EACH_BB_REVERSE (bb)
1621 start[INSN_UID (bb->head)] = bb;
1622 end[INSN_UID (bb->end)] = bb;
1623 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1625 enum bb_state state = IN_MULTIPLE_BB;
1627 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1629 in_bb_p[INSN_UID (x)] = state;
1636 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1640 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1642 fprintf (outf, ";; Start of basic block %d, registers live:",
1644 dump_regset (bb->global_live_at_start, outf);
1648 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1649 && GET_CODE (tmp_rtx) != NOTE
1650 && GET_CODE (tmp_rtx) != BARRIER)
1651 fprintf (outf, ";; Insn is not within a basic block\n");
1652 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1653 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1655 did_output = print_rtl_single (outf, tmp_rtx);
1657 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1659 fprintf (outf, ";; End of basic block %d, registers live:\n",
1661 dump_regset (bb->global_live_at_end, outf);
1674 if (current_function_epilogue_delay_list != 0)
1676 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1677 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1678 tmp_rtx = XEXP (tmp_rtx, 1))
1679 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1684 update_br_prob_note (bb)
1688 if (GET_CODE (bb->end) != JUMP_INSN)
1690 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1691 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1693 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1696 /* Verify the CFG and RTL consistency common for both underlying RTL and
1699 Currently it does following checks:
1701 - test head/end pointers
1702 - overlapping of basic blocks
1703 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1704 - tails of basic blocks (ensure that boundary is necessary)
1705 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1706 and NOTE_INSN_BASIC_BLOCK
1708 In future it can be extended check a lot of other stuff as well
1709 (reachability of basic blocks, life information, etc. etc.). */
1711 rtl_verify_flow_info_1 ()
1713 const int max_uid = get_max_uid ();
1714 rtx last_head = get_last_insn ();
1715 basic_block *bb_info;
1718 basic_block bb, last_bb_seen;
1720 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1722 /* Check bb chain & numbers. */
1723 last_bb_seen = ENTRY_BLOCK_PTR;
1725 FOR_EACH_BB_REVERSE (bb)
1727 rtx head = bb->head;
1730 /* Verify the end of the basic block is in the INSN chain. */
1731 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1737 error ("end insn %d for block %d not found in the insn stream",
1738 INSN_UID (end), bb->index);
1742 /* Work backwards from the end to the head of the basic block
1743 to verify the head is in the RTL chain. */
1744 for (; x != NULL_RTX; x = PREV_INSN (x))
1746 /* While walking over the insn chain, verify insns appear
1747 in only one basic block and initialize the BB_INFO array
1748 used by other passes. */
1749 if (bb_info[INSN_UID (x)] != NULL)
1751 error ("insn %d is in multiple basic blocks (%d and %d)",
1752 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1756 bb_info[INSN_UID (x)] = bb;
1763 error ("head insn %d for block %d not found in the insn stream",
1764 INSN_UID (head), bb->index);
1771 /* Now check the basic blocks (boundaries etc.) */
1772 FOR_EACH_BB_REVERSE (bb)
1774 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1778 if (INSN_P (bb->end)
1779 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1780 && bb->succ && bb->succ->succ_next
1781 && any_condjump_p (bb->end))
1783 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1785 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1786 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1790 for (e = bb->succ; e; e = e->succ_next)
1792 if (e->flags & EDGE_FALLTHRU)
1795 if ((e->flags & ~(EDGE_DFS_BACK | EDGE_CAN_FALLTHRU | EDGE_IRREDUCIBLE_LOOP)) == 0)
1798 if (e->flags & EDGE_ABNORMAL_CALL)
1801 if (e->flags & EDGE_EH)
1803 else if (e->flags & EDGE_ABNORMAL)
1807 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1808 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1810 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1814 && (GET_CODE (bb->end) != JUMP_INSN
1815 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1816 || any_condjump_p (bb->end)))))
1818 error ("Too many outgoing branch edges from bb %i", bb->index);
1821 if (n_fallthru && any_uncondjump_p (bb->end))
1823 error ("Fallthru edge after unconditional jump %i", bb->index);
1826 if (n_branch != 1 && any_uncondjump_p (bb->end))
1828 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1831 if (n_branch != 1 && any_condjump_p (bb->end)
1832 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1834 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1837 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1839 error ("Call edges for non-call insn in bb %i", bb->index);
1843 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1844 && (GET_CODE (bb->end) != JUMP_INSN
1845 || any_condjump_p (bb->end)
1846 || any_uncondjump_p (bb->end)))
1848 error ("Abnormal edges for no purpose in bb %i", bb->index);
1852 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1853 if (BLOCK_FOR_INSN (x) != bb)
1856 if (! BLOCK_FOR_INSN (x))
1858 ("insn %d inside basic block %d but block_for_insn is NULL",
1859 INSN_UID (x), bb->index);
1862 ("insn %d inside basic block %d but block_for_insn is %i",
1863 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1868 /* OK pointers are correct. Now check the header of basic
1869 block. It ought to contain optional CODE_LABEL followed
1870 by NOTE_BASIC_BLOCK. */
1872 if (GET_CODE (x) == CODE_LABEL)
1876 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1884 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1886 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1892 /* Do checks for empty blocks her. e */
1895 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1897 if (NOTE_INSN_BASIC_BLOCK_P (x))
1899 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1900 INSN_UID (x), bb->index);
1907 if (control_flow_insn_p (x))
1909 error ("in basic block %d:", bb->index);
1910 fatal_insn ("flow control insn inside a basic block", x);
1920 /* Verify the CFG and RTL consistency common for both underlying RTL and
1923 Currently it does following checks:
1924 - all checks of rtl_verify_flow_info_1
1925 - check that all insns are in the basic blocks
1926 (except the switch handling code, barriers and notes)
1927 - check that all returns are followed by barriers
1928 - check that all fallthru edge points to the adjacent blocks. */
1930 rtl_verify_flow_info ()
1933 int err = rtl_verify_flow_info_1 ();
1936 const rtx rtx_first = get_insns ();
1937 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1939 FOR_EACH_BB_REVERSE (bb)
1942 for (e = bb->succ; e; e = e->succ_next)
1943 if (e->flags & EDGE_FALLTHRU)
1949 /* Ensure existence of barrier in BB with no fallthru edges. */
1950 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1951 insn = NEXT_INSN (insn))
1953 || (GET_CODE (insn) == NOTE
1954 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1956 error ("missing barrier after block %i", bb->index);
1961 else if (e->src != ENTRY_BLOCK_PTR
1962 && e->dest != EXIT_BLOCK_PTR)
1966 if (e->src->next_bb != e->dest)
1969 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1970 e->src->index, e->dest->index);
1974 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1975 insn = NEXT_INSN (insn))
1976 if (GET_CODE (insn) == BARRIER
1977 #ifndef CASE_DROPS_THROUGH
1980 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1984 error ("verify_flow_info: Incorrect fallthru %i->%i",
1985 e->src->index, e->dest->index);
1986 fatal_insn ("wrong insn in the fallthru edge", insn);
1993 last_bb_seen = ENTRY_BLOCK_PTR;
1995 for (x = rtx_first; x; x = NEXT_INSN (x))
1997 if (NOTE_INSN_BASIC_BLOCK_P (x))
1999 bb = NOTE_BASIC_BLOCK (x);
2002 if (bb != last_bb_seen->next_bb)
2003 internal_error ("basic blocks not laid down consecutively");
2005 curr_bb = last_bb_seen = bb;
2010 switch (GET_CODE (x))
2017 /* An addr_vec is placed outside any block block. */
2019 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2020 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2021 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2024 /* But in any case, non-deletable labels can appear anywhere. */
2028 fatal_insn ("insn outside basic block", x);
2033 && GET_CODE (x) == JUMP_INSN
2034 && returnjump_p (x) && ! condjump_p (x)
2035 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2036 fatal_insn ("return not followed by barrier", x);
2037 if (curr_bb && x == curr_bb->end)
2041 if (num_bb_notes != n_basic_blocks)
2043 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2044 num_bb_notes, n_basic_blocks);
2049 /* Assume that the preceding pass has possibly eliminated jump instructions
2050 or converted the unconditional jumps. Eliminate the edges from CFG.
2051 Return true if any edges are eliminated. */
2054 purge_dead_edges (bb)
2058 rtx insn = bb->end, note;
2059 bool purged = false;
2061 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2062 if (GET_CODE (insn) == INSN
2063 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2067 if (! may_trap_p (PATTERN (insn))
2068 || ((eqnote = find_reg_equal_equiv_note (insn))
2069 && ! may_trap_p (XEXP (eqnote, 0))))
2070 remove_note (insn, note);
2073 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2074 for (e = bb->succ; e; e = next)
2076 next = e->succ_next;
2077 if (e->flags & EDGE_EH)
2079 if (can_throw_internal (bb->end))
2082 else if (e->flags & EDGE_ABNORMAL_CALL)
2084 if (GET_CODE (bb->end) == CALL_INSN
2085 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2086 || INTVAL (XEXP (note, 0)) >= 0))
2093 bb->flags |= BB_DIRTY;
2097 if (GET_CODE (insn) == JUMP_INSN)
2102 /* We do care only about conditional jumps and simplejumps. */
2103 if (!any_condjump_p (insn)
2104 && !returnjump_p (insn)
2105 && !simplejump_p (insn))
2108 /* Branch probability/prediction notes are defined only for
2109 condjumps. We've possibly turned condjump into simplejump. */
2110 if (simplejump_p (insn))
2112 note = find_reg_note (insn, REG_BR_PROB, NULL);
2114 remove_note (insn, note);
2115 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2116 remove_note (insn, note);
2119 for (e = bb->succ; e; e = next)
2121 next = e->succ_next;
2123 /* Avoid abnormal flags to leak from computed jumps turned
2124 into simplejumps. */
2126 e->flags &= ~EDGE_ABNORMAL;
2128 /* See if this edge is one we should keep. */
2129 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2130 /* A conditional jump can fall through into the next
2131 block, so we should keep the edge. */
2133 else if (e->dest != EXIT_BLOCK_PTR
2134 && e->dest->head == JUMP_LABEL (insn))
2135 /* If the destination block is the target of the jump,
2138 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2139 /* If the destination block is the exit block, and this
2140 instruction is a return, then keep the edge. */
2142 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2143 /* Keep the edges that correspond to exceptions thrown by
2144 this instruction. */
2147 /* We do not need this edge. */
2148 bb->flags |= BB_DIRTY;
2153 if (!bb->succ || !purged)
2157 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2162 /* Redistribute probabilities. */
2163 if (!bb->succ->succ_next)
2165 bb->succ->probability = REG_BR_PROB_BASE;
2166 bb->succ->count = bb->count;
2170 note = find_reg_note (insn, REG_BR_PROB, NULL);
2174 b = BRANCH_EDGE (bb);
2175 f = FALLTHRU_EDGE (bb);
2176 b->probability = INTVAL (XEXP (note, 0));
2177 f->probability = REG_BR_PROB_BASE - b->probability;
2178 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2179 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2184 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2186 /* First, there should not be any EH or ABCALL edges resulting
2187 from non-local gotos and the like. If there were, we shouldn't
2188 have created the sibcall in the first place. Second, there
2189 should of course never have been a fallthru edge. */
2190 if (!bb->succ || bb->succ->succ_next)
2192 if (bb->succ->flags != EDGE_SIBCALL)
2198 /* If we don't see a jump insn, we don't know exactly why the block would
2199 have been broken at this point. Look for a simple, non-fallthru edge,
2200 as these are only created by conditional branches. If we find such an
2201 edge we know that there used to be a jump here and can then safely
2202 remove all non-fallthru edges. */
2203 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2210 for (e = bb->succ; e; e = next)
2212 next = e->succ_next;
2213 if (!(e->flags & EDGE_FALLTHRU))
2215 bb->flags |= BB_DIRTY;
2221 if (!bb->succ || bb->succ->succ_next)
2224 bb->succ->probability = REG_BR_PROB_BASE;
2225 bb->succ->count = bb->count;
2228 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2233 /* Search all basic blocks for potentially dead edges and purge them. Return
2234 true if some edge has been eliminated. */
2237 purge_all_dead_edges (update_life_p)
2246 blocks = sbitmap_alloc (last_basic_block);
2247 sbitmap_zero (blocks);
2252 bool purged_here = purge_dead_edges (bb);
2254 purged |= purged_here;
2255 if (purged_here && update_life_p)
2256 SET_BIT (blocks, bb->index);
2259 if (update_life_p && purged)
2260 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2261 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2262 | PROP_KILL_DEAD_CODE);
2265 sbitmap_free (blocks);
2269 /* Same as split_block but update cfg_layout structures. */
2271 cfg_layout_split_block (bb, insnp)
2277 edge fallthru = rtl_split_block (bb, insn);
2279 alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
2280 RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
2281 RBI (fallthru->src)->footer = NULL;
2286 /* Redirect Edge to DEST. */
2288 cfg_layout_redirect_edge_and_branch (e, dest)
2292 basic_block src = e->src;
2293 basic_block old_next_bb = src->next_bb;
2296 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2297 in the case the basic block appears to be in sequence. Avoid this
2300 src->next_bb = NULL;
2301 if (e->flags & EDGE_FALLTHRU)
2303 /* Redirect any branch edges unified with the fallthru one. */
2304 if (GET_CODE (src->end) == JUMP_INSN
2305 && JUMP_LABEL (src->end) == e->dest->head)
2307 if (!redirect_jump (src->end, block_label (dest), 0))
2310 /* In case we are redirecting fallthru edge to the branch edge
2311 of conditional jump, remove it. */
2312 if (src->succ->succ_next
2313 && !src->succ->succ_next->succ_next)
2315 edge s = e->succ_next ? e->succ_next : src->succ;
2317 && any_condjump_p (src->end)
2318 && onlyjump_p (src->end))
2319 delete_insn (src->end);
2321 redirect_edge_succ_nodup (e, dest);
2326 ret = rtl_redirect_edge_and_branch (e, dest);
2328 /* We don't want simplejumps in the insn stream during cfglayout. */
2329 if (simplejump_p (src->end))
2331 delete_insn (src->end);
2332 delete_barrier (NEXT_INSN (src->end));
2333 src->succ->flags |= EDGE_FALLTHRU;
2335 src->next_bb = old_next_bb;
2340 /* Simple wrapper as we always can redirect fallthru edges. */
2342 cfg_layout_redirect_edge_and_branch_force (e, dest)
2346 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2351 /* Same as flow_delete_block but update cfg_layout structures. */
2353 cfg_layout_delete_block (bb)
2356 rtx insn, next, prev = PREV_INSN (bb->head), *to, remaints;
2358 if (RBI (bb)->header)
2362 NEXT_INSN (prev) = RBI (bb)->header;
2364 set_first_insn (RBI (bb)->header);
2365 PREV_INSN (RBI (bb)->header) = prev;
2366 insn = RBI (bb)->header;
2367 while (NEXT_INSN (insn))
2368 insn = NEXT_INSN (insn);
2369 NEXT_INSN (insn) = next;
2370 PREV_INSN (next) = insn;
2372 next = NEXT_INSN (bb->end);
2373 if (RBI (bb)->footer)
2376 NEXT_INSN (insn) = RBI (bb)->footer;
2377 PREV_INSN (RBI (bb)->footer) = insn;
2378 while (NEXT_INSN (insn))
2379 insn = NEXT_INSN (insn);
2380 NEXT_INSN (insn) = next;
2382 PREV_INSN (next) = insn;
2384 set_last_insn (insn);
2386 if (bb->next_bb != EXIT_BLOCK_PTR)
2387 to = &RBI(bb->next_bb)->header;
2389 to = &cfg_layout_function_footer;
2390 rtl_delete_block (bb);
2393 prev = NEXT_INSN (prev);
2395 prev = get_insns ();
2397 next = PREV_INSN (next);
2399 next = get_last_insn ();
2401 if (next && NEXT_INSN (next) != prev)
2403 remaints = unlink_insn_chain (prev, next);
2405 while (NEXT_INSN (insn))
2406 insn = NEXT_INSN (insn);
2407 NEXT_INSN (insn) = *to;
2409 PREV_INSN (*to) = insn;
2414 /* Implementation of CFG manipulation for linearized RTL. */
2415 struct cfg_hooks rtl_cfg_hooks = {
2416 rtl_verify_flow_info,
2418 rtl_redirect_edge_and_branch,
2419 rtl_redirect_edge_and_branch_force,
2425 /* Implementation of CFG manipulation for cfg layout RTL, where
2426 basic block connected via fallthru edges does not have to be adjacent.
2427 This representation will hopefully become the default one in future
2428 version of the compiler. */
2429 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2430 rtl_verify_flow_info_1, /* verify_flow_info. */
2432 cfg_layout_redirect_edge_and_branch,
2433 cfg_layout_redirect_edge_and_branch_force,
2434 cfg_layout_delete_block,
2435 cfg_layout_split_block,
2436 NULL /* split_edge. */