1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
59 #include "insn-config.h"
61 /* Stubs in case we don't have a return insn. */
64 #define gen_return() NULL_RTX
67 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
68 /* ??? Should probably be using LABEL_NUSES instead. It would take a
69 bit of surgery to be able to use or co-opt the routines in jump. */
71 rtx tail_recursion_label_list;
73 static int can_delete_note_p PARAMS ((rtx));
74 static int can_delete_label_p PARAMS ((rtx));
75 static void commit_one_edge_insertion PARAMS ((edge, int));
76 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
77 static rtx last_loop_beg_note PARAMS ((rtx));
78 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
79 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
81 /* Return true if NOTE is not one of the ones that must be kept paired,
82 so that we may simply delete it. */
85 can_delete_note_p (note)
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
90 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
93 /* True if a given label can be deleted. */
96 can_delete_label_p (label)
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label)
103 && !in_expr_list_p (label_value_list, label));
106 /* Delete INSN by patching it out. Return the next insn. */
112 rtx next = NEXT_INSN (insn);
114 bool really_delete = true;
116 if (GET_CODE (insn) == CODE_LABEL)
118 /* Some labels can't be directly removed from the INSN chain, as they
119 might be references via variables, constant pool etc.
120 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
121 if (! can_delete_label_p (insn))
123 const char *name = LABEL_NAME (insn);
125 really_delete = false;
126 PUT_CODE (insn, NOTE);
127 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
128 NOTE_SOURCE_FILE (insn) = name;
131 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
136 /* If this insn has already been deleted, something is very wrong. */
137 if (INSN_DELETED_P (insn))
140 INSN_DELETED_P (insn) = 1;
143 /* If deleting a jump, decrement the use count of the label. Deleting
144 the label itself should happen in the normal course of block merging. */
145 if (GET_CODE (insn) == JUMP_INSN
147 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
148 LABEL_NUSES (JUMP_LABEL (insn))--;
150 /* Also if deleting an insn that references a label. */
151 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
152 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
153 LABEL_NUSES (XEXP (note, 0))--;
155 if (GET_CODE (insn) == JUMP_INSN
156 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
157 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
159 rtx pat = PATTERN (insn);
160 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
161 int len = XVECLEN (pat, diff_vec_p);
164 for (i = 0; i < len; i++)
166 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
168 /* When deleting code in bulk (e.g. removing many unreachable
169 blocks) we can delete a label that's a target of the vector
170 before deleting the vector itself. */
171 if (GET_CODE (label) != NOTE)
172 LABEL_NUSES (label)--;
179 /* Like delete_insn but also purge dead edges from BB. */
181 delete_insn_and_edges (insn)
188 && BLOCK_FOR_INSN (insn)
189 && BLOCK_FOR_INSN (insn)->end == insn)
191 x = delete_insn (insn);
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
197 /* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
201 delete_insn_chain (start, finish)
206 /* Unchain the insns one by one. It would be quicker to delete all of these
207 with a single unchaining, rather than one at a time, but we need to keep
211 next = NEXT_INSN (start);
212 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
215 next = delete_insn (start);
223 /* Like delete_insn but also purge dead edges from BB. */
225 delete_insn_chain_and_edges (first, last)
231 && BLOCK_FOR_INSN (last)
232 && BLOCK_FOR_INSN (last)->end == last)
234 delete_insn_chain (first, last);
236 purge_dead_edges (BLOCK_FOR_INSN (last));
239 /* Create a new basic block consisting of the instructions between HEAD and END
240 inclusive. This function is designed to allow fast BB construction - reuses
241 the note and basic block struct in BB_NOTE, if any and do not grow
242 BASIC_BLOCK chain and should be used directly only by CFG construction code.
243 END can be NULL in to create new empty basic block before HEAD. Both END
244 and HEAD can be NULL to create basic block at the end of INSN chain.
245 AFTER is the basic block we should be put after. */
248 create_basic_block_structure (head, end, bb_note, after)
249 rtx head, end, bb_note;
255 && ! RTX_INTEGRATED_P (bb_note)
256 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 /* If we found an existing note, thread it back onto the chain. */
263 if (GET_CODE (head) == CODE_LABEL)
267 after = PREV_INSN (head);
271 if (after != bb_note && NEXT_INSN (after) != bb_note)
272 reorder_insns_nobb (bb_note, bb_note, after);
276 /* Otherwise we must create a note and a basic block structure. */
282 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
283 else if (GET_CODE (head) == CODE_LABEL && end)
285 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
291 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
297 NOTE_BASIC_BLOCK (bb_note) = bb;
300 /* Always include the bb note in the block. */
301 if (NEXT_INSN (end) == bb_note)
306 bb->index = last_basic_block++;
308 link_block (bb, after);
309 BASIC_BLOCK (bb->index) = bb;
310 update_bb_for_insn (bb);
312 /* Tag the block so that we know it has been used when considering
313 other basic block notes. */
319 /* Create new basic block consisting of instructions in between HEAD and END
320 and place it to the BB chain after block AFTER. END can be NULL in to
321 create new empty basic block before HEAD. Both END and HEAD can be NULL to
322 create basic block at the end of INSN chain. */
325 create_basic_block (head, end, after)
331 /* Place the new block just after the end. */
332 VARRAY_GROW (basic_block_info, last_basic_block+1);
336 bb = create_basic_block_structure (head, end, NULL, after);
341 /* Delete the insns in a (non-live) block. We physically delete every
342 non-deleted-note insn, and update the flow graph appropriately.
344 Return nonzero if we deleted an exception handler. */
346 /* ??? Preserving all such notes strikes me as wrong. It would be nice
347 to post-process the stream to remove empty blocks, loops, ranges, etc. */
350 flow_delete_block_noexpunge (b)
353 int deleted_handler = 0;
356 /* If the head of this block is a CODE_LABEL, then it might be the
357 label for an exception handler which can't be reached.
359 We need to remove the label from the exception_handler_label list
360 and remove the associated NOTE_INSN_EH_REGION_BEG and
361 NOTE_INSN_EH_REGION_END notes. */
363 /* Get rid of all NOTE_INSN_PREDICTIONs hanging before the block. */
365 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
367 if (GET_CODE (insn) != NOTE)
369 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
370 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
375 never_reached_warning (insn, b->end);
377 if (GET_CODE (insn) == CODE_LABEL)
378 maybe_remove_eh_handler (insn);
380 /* Include any jump table following the basic block. */
382 if (GET_CODE (end) == JUMP_INSN
383 && (tmp = JUMP_LABEL (end)) != NULL_RTX
384 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
385 && GET_CODE (tmp) == JUMP_INSN
386 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
387 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
390 /* Include any barrier that may follow the basic block. */
391 tmp = next_nonnote_insn (end);
392 if (tmp && GET_CODE (tmp) == BARRIER)
395 /* Selectively delete the entire chain. */
397 delete_insn_chain (insn, end);
399 /* Remove the edges into and out of this block. Note that there may
400 indeed be edges in, if we are removing an unreachable loop. */
401 while (b->pred != NULL)
402 remove_edge (b->pred);
403 while (b->succ != NULL)
404 remove_edge (b->succ);
409 return deleted_handler;
413 flow_delete_block (b)
416 int deleted_handler = flow_delete_block_noexpunge (b);
418 /* Remove the basic block from the array. */
421 return deleted_handler;
424 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
427 compute_bb_for_insn ()
436 for (insn = bb->head; ; insn = NEXT_INSN (insn))
438 BLOCK_FOR_INSN (insn) = bb;
445 /* Release the basic_block_for_insn array. */
451 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
452 if (GET_CODE (insn) != BARRIER)
453 BLOCK_FOR_INSN (insn) = NULL;
456 /* Update insns block within BB. */
459 update_bb_for_insn (bb)
464 for (insn = bb->head; ; insn = NEXT_INSN (insn))
466 set_block_for_insn (insn, bb);
472 /* Split a block BB after insn INSN creating a new fallthru edge.
473 Return the new edge. Note that to keep other parts of the compiler happy,
474 this function renumbers all the basic blocks so that the new
475 one has a number one greater than the block split. */
478 split_block (bb, insn)
486 /* There is no point splitting the block after its end. */
490 /* Create the new basic block. */
491 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
492 new_bb->count = bb->count;
493 new_bb->frequency = bb->frequency;
494 new_bb->loop_depth = bb->loop_depth;
497 /* Redirect the outgoing edges. */
498 new_bb->succ = bb->succ;
500 for (e = new_bb->succ; e; e = e->succ_next)
503 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
505 if (bb->global_live_at_start)
507 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
508 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
509 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
511 /* We now have to calculate which registers are live at the end
512 of the split basic block and at the start of the new basic
513 block. Start with those registers that are known to be live
514 at the end of the original basic block and get
515 propagate_block to determine which registers are live. */
516 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
517 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
518 COPY_REG_SET (bb->global_live_at_end,
519 new_bb->global_live_at_start);
520 #ifdef HAVE_conditional_execution
521 /* In the presence of conditional execution we are not able to update
522 liveness precisely. */
523 if (reload_completed)
525 bb->flags |= BB_DIRTY;
526 new_bb->flags |= BB_DIRTY;
534 /* Blocks A and B are to be merged into a single block A. The insns
535 are already contiguous, hence `nomove'. */
538 merge_blocks_nomove (a, b)
541 rtx b_head = b->head, b_end = b->end, a_end = a->end;
542 rtx del_first = NULL_RTX, del_last = NULL_RTX;
546 /* If there was a CODE_LABEL beginning B, delete it. */
547 if (GET_CODE (b_head) == CODE_LABEL)
549 /* Detect basic blocks with nothing but a label. This can happen
550 in particular at the end of a function. */
554 del_first = del_last = b_head;
555 b_head = NEXT_INSN (b_head);
558 /* Delete the basic block note and handle blocks containing just that
560 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
568 b_head = NEXT_INSN (b_head);
571 /* If there was a jump out of A, delete it. */
572 if (GET_CODE (a_end) == JUMP_INSN)
576 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
577 if (GET_CODE (prev) != NOTE
578 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
585 /* If this was a conditional jump, we need to also delete
586 the insn that set cc0. */
587 if (only_sets_cc0_p (prev))
591 prev = prev_nonnote_insn (prev);
598 a_end = PREV_INSN (del_first);
600 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
601 del_first = NEXT_INSN (a_end);
603 /* Normally there should only be one successor of A and that is B, but
604 partway though the merge of blocks for conditional_execution we'll
605 be merging a TEST block with THEN and ELSE successors. Free the
606 whole lot of them and hope the caller knows what they're doing. */
608 remove_edge (a->succ);
610 /* Adjust the edges out of B for the new owner. */
611 for (e = b->succ; e; e = e->succ_next)
614 a->flags |= b->flags;
616 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
617 b->pred = b->succ = NULL;
618 a->global_live_at_end = b->global_live_at_end;
622 /* Delete everything marked above as well as crap that might be
623 hanging out between the two blocks. */
624 delete_insn_chain (del_first, del_last);
626 /* Reassociate the insns of B with A. */
631 for (x = a_end; x != b_end; x = NEXT_INSN (x))
632 set_block_for_insn (x, a);
634 set_block_for_insn (b_end, a);
642 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
649 if (block == EXIT_BLOCK_PTR)
652 if (GET_CODE (block->head) != CODE_LABEL)
654 block->head = emit_label_before (gen_label_rtx (), block->head);
660 /* Attempt to perform edge redirection by replacing possibly complex jump
661 instruction by unconditional jump or removing jump completely. This can
662 apply only if all edges now point to the same block. The parameters and
663 return values are equivalent to redirect_edge_and_branch. */
666 try_redirect_by_replacing_jump (e, target)
670 basic_block src = e->src;
671 rtx insn = src->end, kill_from;
676 /* Verify that all targets will be TARGET. */
677 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
678 if (tmp->dest != target && tmp != e)
681 if (tmp || !onlyjump_p (insn))
683 if (reload_completed && JUMP_LABEL (insn)
684 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
685 && GET_CODE (table) == JUMP_INSN
686 && (GET_CODE (PATTERN (table)) == ADDR_VEC
687 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
690 /* Avoid removing branch with side effects. */
691 set = single_set (insn);
692 if (!set || side_effects_p (set))
695 /* In case we zap a conditional jump, we'll need to kill
696 the cc0 setter too. */
699 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
700 kill_from = PREV_INSN (insn);
703 /* See if we can create the fallthru edge. */
704 if (can_fallthru (src, target))
707 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
710 /* Selectively unlink whole insn chain. */
711 delete_insn_chain (kill_from, PREV_INSN (target->head));
714 /* If this already is simplejump, redirect it. */
715 else if (simplejump_p (insn))
717 if (e->dest == target)
720 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
721 INSN_UID (insn), e->dest->index, target->index);
722 if (!redirect_jump (insn, block_label (target), 0))
724 if (target == EXIT_BLOCK_PTR)
730 /* Cannot do anything for target exit block. */
731 else if (target == EXIT_BLOCK_PTR)
734 /* Or replace possibly complicated jump insn by simple jump insn. */
737 rtx target_label = block_label (target);
740 emit_jump_insn_after (gen_jump (target_label), insn);
741 JUMP_LABEL (src->end) = target_label;
742 LABEL_NUSES (target_label)++;
744 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
745 INSN_UID (insn), INSN_UID (src->end));
748 delete_insn_chain (kill_from, insn);
750 /* Recognize a tablejump that we are converting to a
751 simple jump and remove its associated CODE_LABEL
752 and ADDR_VEC or ADDR_DIFF_VEC. */
753 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
754 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
755 && GET_CODE (tmp) == JUMP_INSN
756 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
757 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
759 delete_insn_chain (JUMP_LABEL (insn), tmp);
762 barrier = next_nonnote_insn (src->end);
763 if (!barrier || GET_CODE (barrier) != BARRIER)
764 emit_barrier_after (src->end);
767 /* Keep only one edge out and set proper flags. */
768 while (src->succ->succ_next)
769 remove_edge (src->succ);
772 e->flags = EDGE_FALLTHRU;
776 e->probability = REG_BR_PROB_BASE;
777 e->count = src->count;
779 /* We don't want a block to end on a line-number note since that has
780 the potential of changing the code between -g and not -g. */
781 while (GET_CODE (e->src->end) == NOTE
782 && NOTE_LINE_NUMBER (e->src->end) >= 0)
783 delete_insn (e->src->end);
785 if (e->dest != target)
786 redirect_edge_succ (e, target);
791 /* Return last loop_beg note appearing after INSN, before start of next
792 basic block. Return INSN if there are no such notes.
794 When emitting jump to redirect an fallthru edge, it should always appear
795 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
796 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
800 last_loop_beg_note (insn)
805 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
806 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
807 insn = NEXT_INSN (insn))
808 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
814 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
815 expense of adding new instructions or reordering basic blocks.
817 Function can be also called with edge destination equivalent to the TARGET.
818 Then it should try the simplifications and do nothing if none is possible.
820 Return true if transformation succeeded. We still return false in case E
821 already destinated TARGET and we didn't managed to simplify instruction
825 redirect_edge_and_branch (e, target)
830 rtx old_label = e->dest->head;
831 basic_block src = e->src;
834 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
837 if (try_redirect_by_replacing_jump (e, target))
840 /* Do this fast path late, as we want above code to simplify for cases
841 where called on single edge leaving basic block containing nontrivial
843 else if (e->dest == target)
846 /* We can only redirect non-fallthru edges of jump insn. */
847 if (e->flags & EDGE_FALLTHRU)
849 else if (GET_CODE (insn) != JUMP_INSN)
852 /* Recognize a tablejump and adjust all matching cases. */
853 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
854 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
855 && GET_CODE (tmp) == JUMP_INSN
856 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
857 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
861 rtx new_label = block_label (target);
863 if (target == EXIT_BLOCK_PTR)
865 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
866 vec = XVEC (PATTERN (tmp), 0);
868 vec = XVEC (PATTERN (tmp), 1);
870 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
871 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
873 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
874 --LABEL_NUSES (old_label);
875 ++LABEL_NUSES (new_label);
878 /* Handle casesi dispatch insns */
879 if ((tmp = single_set (insn)) != NULL
880 && SET_DEST (tmp) == pc_rtx
881 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
882 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
883 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
885 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
887 --LABEL_NUSES (old_label);
888 ++LABEL_NUSES (new_label);
893 /* ?? We may play the games with moving the named labels from
894 one basic block to the other in case only one computed_jump is
896 if (computed_jump_p (insn)
897 /* A return instruction can't be redirected. */
898 || returnjump_p (insn))
901 /* If the insn doesn't go where we think, we're confused. */
902 if (JUMP_LABEL (insn) != old_label)
905 /* If the substitution doesn't succeed, die. This can happen
906 if the back end emitted unrecognizable instructions or if
907 target is exit block on some arches. */
908 if (!redirect_jump (insn, block_label (target), 0))
910 if (target == EXIT_BLOCK_PTR)
917 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
918 e->src->index, e->dest->index, target->index);
920 if (e->dest != target)
921 redirect_edge_succ_nodup (e, target);
926 /* Like force_nonfallthru below, but additionally performs redirection
927 Used by redirect_edge_and_branch_force. */
930 force_nonfallthru_and_redirect (e, target)
934 basic_block jump_block, new_bb = NULL;
938 if (e->flags & EDGE_ABNORMAL)
940 else if (!(e->flags & EDGE_FALLTHRU))
942 else if (e->src == ENTRY_BLOCK_PTR)
944 /* We can't redirect the entry block. Create an empty block at the
945 start of the function which we use to add the new jump. */
947 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
949 /* Change the existing edge's source to be the new block, and add
950 a new edge from the entry block to the new block. */
952 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
960 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
963 if (e->src->succ->succ_next)
965 /* Create the new structures. */
966 note = last_loop_beg_note (e->src->end);
968 = create_basic_block (NEXT_INSN (note), NULL, e->src);
969 jump_block->count = e->count;
970 jump_block->frequency = EDGE_FREQUENCY (e);
971 jump_block->loop_depth = target->loop_depth;
973 if (target->global_live_at_start)
975 jump_block->global_live_at_start
976 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
977 jump_block->global_live_at_end
978 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
979 COPY_REG_SET (jump_block->global_live_at_start,
980 target->global_live_at_start);
981 COPY_REG_SET (jump_block->global_live_at_end,
982 target->global_live_at_start);
986 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
987 new_edge->probability = e->probability;
988 new_edge->count = e->count;
990 /* Redirect old edge. */
991 redirect_edge_pred (e, jump_block);
992 e->probability = REG_BR_PROB_BASE;
999 e->flags &= ~EDGE_FALLTHRU;
1000 if (target == EXIT_BLOCK_PTR)
1003 emit_jump_insn_after (gen_return (), jump_block->end);
1009 rtx label = block_label (target);
1010 emit_jump_insn_after (gen_jump (label), jump_block->end);
1011 JUMP_LABEL (jump_block->end) = label;
1012 LABEL_NUSES (label)++;
1015 emit_barrier_after (jump_block->end);
1016 redirect_edge_succ_nodup (e, target);
1021 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1022 (and possibly create new basic block) to make edge non-fallthru.
1023 Return newly created BB or NULL if none. */
1026 force_nonfallthru (e)
1029 return force_nonfallthru_and_redirect (e, e->dest);
1032 /* Redirect edge even at the expense of creating new jump insn or
1033 basic block. Return new basic block if created, NULL otherwise.
1034 Abort if conversion is impossible. */
1037 redirect_edge_and_branch_force (e, target)
1041 if (redirect_edge_and_branch (e, target)
1042 || e->dest == target)
1045 /* In case the edge redirection failed, try to force it to be non-fallthru
1046 and redirect newly created simplejump. */
1047 return force_nonfallthru_and_redirect (e, target);
1050 /* The given edge should potentially be a fallthru edge. If that is in
1051 fact true, delete the jump and barriers that are in the way. */
1054 tidy_fallthru_edge (e, b, c)
1060 /* ??? In a late-running flow pass, other folks may have deleted basic
1061 blocks by nopping out blocks, leaving multiple BARRIERs between here
1062 and the target label. They ought to be chastized and fixed.
1064 We can also wind up with a sequence of undeletable labels between
1065 one block and the next.
1067 So search through a sequence of barriers, labels, and notes for
1068 the head of block C and assert that we really do fall through. */
1070 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1074 /* Remove what will soon cease being the jump insn from the source block.
1075 If block B consisted only of this single jump, turn it into a deleted
1078 if (GET_CODE (q) == JUMP_INSN
1080 && (any_uncondjump_p (q)
1081 || (b->succ == e && e->succ_next == NULL)))
1084 /* If this was a conditional jump, we need to also delete
1085 the insn that set cc0. */
1086 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1092 /* We don't want a block to end on a line-number note since that has
1093 the potential of changing the code between -g and not -g. */
1094 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1098 /* Selectively unlink the sequence. */
1099 if (q != PREV_INSN (c->head))
1100 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1102 e->flags |= EDGE_FALLTHRU;
1105 /* Fix up edges that now fall through, or rather should now fall through
1106 but previously required a jump around now deleted blocks. Simplify
1107 the search by only examining blocks numerically adjacent, since this
1108 is how find_basic_blocks created them. */
1111 tidy_fallthru_edges ()
1115 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1118 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1124 /* We care about simple conditional or unconditional jumps with
1127 If we had a conditional branch to the next instruction when
1128 find_basic_blocks was called, then there will only be one
1129 out edge for the block which ended with the conditional
1130 branch (since we do not create duplicate edges).
1132 Furthermore, the edge will be marked as a fallthru because we
1133 merge the flags for the duplicate edges. So we do not want to
1134 check that the edge is not a FALLTHRU edge. */
1136 if ((s = b->succ) != NULL
1137 && ! (s->flags & EDGE_COMPLEX)
1138 && s->succ_next == NULL
1140 /* If the jump insn has side effects, we can't tidy the edge. */
1141 && (GET_CODE (b->end) != JUMP_INSN
1142 || onlyjump_p (b->end)))
1143 tidy_fallthru_edge (s, b, c);
1147 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1148 is back edge of syntactic loop. */
1151 back_edge_of_syntactic_loop_p (bb1, bb2)
1152 basic_block bb1, bb2;
1161 /* ??? Could we guarantee that bb indices are monotone, so that we could
1162 just compare them? */
1163 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1169 for (insn = bb1->end; insn != bb2->head && count >= 0;
1170 insn = NEXT_INSN (insn))
1171 if (GET_CODE (insn) == NOTE)
1173 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1175 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1182 /* Split a (typically critical) edge. Return the new block.
1183 Abort on abnormal edges.
1185 ??? The code generally expects to be called on critical edges.
1186 The case of a block ending in an unconditional jump to a
1187 block with multiple predecessors is not handled optimally. */
1190 split_edge (edge_in)
1197 /* Abnormal edges cannot be split. */
1198 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1201 /* We are going to place the new block in front of edge destination.
1202 Avoid existence of fallthru predecessors. */
1203 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1207 for (e = edge_in->dest->pred; e; e = e->pred_next)
1208 if (e->flags & EDGE_FALLTHRU)
1212 force_nonfallthru (e);
1215 /* Create the basic block note.
1217 Where we place the note can have a noticeable impact on the generated
1218 code. Consider this cfg:
1228 If we need to insert an insn on the edge from block 0 to block 1,
1229 we want to ensure the instructions we insert are outside of any
1230 loop notes that physically sit between block 0 and block 1. Otherwise
1231 we confuse the loop optimizer into thinking the loop is a phony. */
1233 if (edge_in->dest != EXIT_BLOCK_PTR
1234 && PREV_INSN (edge_in->dest->head)
1235 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1236 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1237 == NOTE_INSN_LOOP_BEG)
1238 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1239 before = PREV_INSN (edge_in->dest->head);
1240 else if (edge_in->dest != EXIT_BLOCK_PTR)
1241 before = edge_in->dest->head;
1245 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1246 bb->count = edge_in->count;
1247 bb->frequency = EDGE_FREQUENCY (edge_in);
1249 /* ??? This info is likely going to be out of date very soon. */
1250 if (edge_in->dest->global_live_at_start)
1252 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1253 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1254 COPY_REG_SET (bb->global_live_at_start,
1255 edge_in->dest->global_live_at_start);
1256 COPY_REG_SET (bb->global_live_at_end,
1257 edge_in->dest->global_live_at_start);
1260 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1262 /* For non-fallthry edges, we must adjust the predecessor's
1263 jump instruction to target our new block. */
1264 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1266 if (!redirect_edge_and_branch (edge_in, bb))
1270 redirect_edge_succ (edge_in, bb);
1275 /* Queue instructions for insertion on an edge between two basic blocks.
1276 The new instructions and basic blocks (if any) will not appear in the
1277 CFG until commit_edge_insertions is called. */
1280 insert_insn_on_edge (pattern, e)
1284 /* We cannot insert instructions on an abnormal critical edge.
1285 It will be easier to find the culprit if we die now. */
1286 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1289 if (e->insns == NULL_RTX)
1292 push_to_sequence (e->insns);
1294 emit_insn (pattern);
1296 e->insns = get_insns ();
1300 /* Update the CFG for the instructions queued on edge E. */
1303 commit_one_edge_insertion (e, watch_calls)
1307 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1308 basic_block bb = NULL;
1310 /* Pull the insns off the edge now since the edge might go away. */
1312 e->insns = NULL_RTX;
1314 /* Special case -- avoid inserting code between call and storing
1315 its return value. */
1316 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1317 && e->src != ENTRY_BLOCK_PTR
1318 && GET_CODE (e->src->end) == CALL_INSN)
1320 rtx next = next_nonnote_insn (e->src->end);
1322 after = e->dest->head;
1323 /* The first insn after the call may be a stack pop, skip it. */
1325 && keep_with_call_p (next))
1328 next = next_nonnote_insn (next);
1332 if (!before && !after)
1334 /* Figure out where to put these things. If the destination has
1335 one predecessor, insert there. Except for the exit block. */
1336 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1340 /* Get the location correct wrt a code label, and "nice" wrt
1341 a basic block note, and before everything else. */
1343 if (GET_CODE (tmp) == CODE_LABEL)
1344 tmp = NEXT_INSN (tmp);
1345 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1346 tmp = NEXT_INSN (tmp);
1347 if (tmp == bb->head)
1350 after = PREV_INSN (tmp);
1352 after = get_last_insn ();
1355 /* If the source has one successor and the edge is not abnormal,
1356 insert there. Except for the entry block. */
1357 else if ((e->flags & EDGE_ABNORMAL) == 0
1358 && e->src->succ->succ_next == NULL
1359 && e->src != ENTRY_BLOCK_PTR)
1363 /* It is possible to have a non-simple jump here. Consider a target
1364 where some forms of unconditional jumps clobber a register. This
1365 happens on the fr30 for example.
1367 We know this block has a single successor, so we can just emit
1368 the queued insns before the jump. */
1369 if (GET_CODE (bb->end) == JUMP_INSN)
1370 for (before = bb->end;
1371 GET_CODE (PREV_INSN (before)) == NOTE
1372 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1373 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1377 /* We'd better be fallthru, or we've lost track of what's what. */
1378 if ((e->flags & EDGE_FALLTHRU) == 0)
1384 /* Otherwise we must split the edge. */
1387 bb = split_edge (e);
1392 /* Now that we've found the spot, do the insertion. */
1396 emit_insn_before (insns, before);
1397 last = prev_nonnote_insn (before);
1400 last = emit_insn_after (insns, after);
1402 if (returnjump_p (last))
1404 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1405 This is not currently a problem because this only happens
1406 for the (single) epilogue, which already has a fallthru edge
1410 if (e->dest != EXIT_BLOCK_PTR
1411 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1414 e->flags &= ~EDGE_FALLTHRU;
1415 emit_barrier_after (last);
1418 delete_insn (before);
1420 else if (GET_CODE (last) == JUMP_INSN)
1423 find_sub_basic_blocks (bb);
1426 /* Update the CFG for all queued instructions. */
1429 commit_edge_insertions ()
1433 #ifdef ENABLE_CHECKING
1434 verify_flow_info ();
1437 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1441 for (e = bb->succ; e; e = next)
1443 next = e->succ_next;
1445 commit_one_edge_insertion (e, false);
1450 /* Update the CFG for all queued instructions, taking special care of inserting
1451 code on edges between call and storing its return value. */
1454 commit_edge_insertions_watch_calls ()
1458 #ifdef ENABLE_CHECKING
1459 verify_flow_info ();
1462 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1466 for (e = bb->succ; e; e = next)
1468 next = e->succ_next;
1470 commit_one_edge_insertion (e, true);
1475 /* Print out one basic block with live information at start and end. */
1486 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1487 bb->index, bb->loop_depth);
1488 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1491 fputs (";; Predecessors: ", outf);
1492 for (e = bb->pred; e; e = e->pred_next)
1493 dump_edge_info (outf, e, 0);
1496 fputs (";; Registers live at start:", outf);
1497 dump_regset (bb->global_live_at_start, outf);
1500 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1501 insn = NEXT_INSN (insn))
1502 print_rtl_single (outf, insn);
1504 fputs (";; Registers live at end:", outf);
1505 dump_regset (bb->global_live_at_end, outf);
1508 fputs (";; Successors: ", outf);
1509 for (e = bb->succ; e; e = e->succ_next)
1510 dump_edge_info (outf, e, 1);
1518 dump_bb (bb, stderr);
1525 dump_bb (BASIC_BLOCK (n), stderr);
1528 /* Like print_rtl, but also print out live information for the start of each
1532 print_rtl_with_bb (outf, rtx_first)
1539 fprintf (outf, "(nil)\n");
1542 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1543 int max_uid = get_max_uid ();
1545 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1547 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1548 enum bb_state *in_bb_p
1549 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1553 FOR_EACH_BB_REVERSE (bb)
1557 start[INSN_UID (bb->head)] = bb;
1558 end[INSN_UID (bb->end)] = bb;
1559 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1561 enum bb_state state = IN_MULTIPLE_BB;
1563 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1565 in_bb_p[INSN_UID (x)] = state;
1572 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1576 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1578 fprintf (outf, ";; Start of basic block %d, registers live:",
1580 dump_regset (bb->global_live_at_start, outf);
1584 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1585 && GET_CODE (tmp_rtx) != NOTE
1586 && GET_CODE (tmp_rtx) != BARRIER)
1587 fprintf (outf, ";; Insn is not within a basic block\n");
1588 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1589 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1591 did_output = print_rtl_single (outf, tmp_rtx);
1593 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1595 fprintf (outf, ";; End of basic block %d, registers live:\n",
1597 dump_regset (bb->global_live_at_end, outf);
1610 if (current_function_epilogue_delay_list != 0)
1612 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1613 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1614 tmp_rtx = XEXP (tmp_rtx, 1))
1615 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1620 update_br_prob_note (bb)
1624 if (GET_CODE (bb->end) != JUMP_INSN)
1626 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1627 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1629 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1632 /* Verify the CFG consistency. This function check some CFG invariants and
1633 aborts when something is wrong. Hope that this function will help to
1634 convert many optimization passes to preserve CFG consistent.
1636 Currently it does following checks:
1638 - test head/end pointers
1639 - overlapping of basic blocks
1640 - edge list correctness
1641 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1642 - tails of basic blocks (ensure that boundary is necessary)
1643 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1644 and NOTE_INSN_BASIC_BLOCK
1645 - check that all insns are in the basic blocks
1646 (except the switch handling code, barriers and notes)
1647 - check that all returns are followed by barriers
1649 In future it can be extended check a lot of other stuff as well
1650 (reachability of basic blocks, life information, etc. etc.). */
1655 const int max_uid = get_max_uid ();
1656 const rtx rtx_first = get_insns ();
1657 rtx last_head = get_last_insn ();
1658 basic_block *bb_info, *last_visited;
1659 size_t *edge_checksum;
1661 int num_bb_notes, err = 0;
1662 basic_block bb, last_bb_seen;
1664 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1665 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1666 sizeof (basic_block));
1667 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1669 /* Check bb chain & numbers. */
1670 last_bb_seen = ENTRY_BLOCK_PTR;
1671 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1673 if (bb != EXIT_BLOCK_PTR
1674 && bb != BASIC_BLOCK (bb->index))
1676 error ("bb %d on wrong place", bb->index);
1680 if (bb->prev_bb != last_bb_seen)
1682 error ("prev_bb of %d should be %d, not %d",
1683 bb->index, last_bb_seen->index, bb->prev_bb->index);
1690 FOR_EACH_BB_REVERSE (bb)
1692 rtx head = bb->head;
1695 /* Verify the end of the basic block is in the INSN chain. */
1696 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1702 error ("end insn %d for block %d not found in the insn stream",
1703 INSN_UID (end), bb->index);
1707 /* Work backwards from the end to the head of the basic block
1708 to verify the head is in the RTL chain. */
1709 for (; x != NULL_RTX; x = PREV_INSN (x))
1711 /* While walking over the insn chain, verify insns appear
1712 in only one basic block and initialize the BB_INFO array
1713 used by other passes. */
1714 if (bb_info[INSN_UID (x)] != NULL)
1716 error ("insn %d is in multiple basic blocks (%d and %d)",
1717 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1721 bb_info[INSN_UID (x)] = bb;
1728 error ("head insn %d for block %d not found in the insn stream",
1729 INSN_UID (head), bb->index);
1736 /* Now check the basic blocks (boundaries etc.) */
1737 FOR_EACH_BB_REVERSE (bb)
1739 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1743 if (INSN_P (bb->end)
1744 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1745 && bb->succ && bb->succ->succ_next
1746 && any_condjump_p (bb->end))
1748 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1750 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1751 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1757 error ("verify_flow_info: Wrong count of block %i %i",
1758 bb->index, (int)bb->count);
1761 if (bb->frequency < 0)
1763 error ("verify_flow_info: Wrong frequency of block %i %i",
1764 bb->index, bb->frequency);
1767 for (e = bb->succ; e; e = e->succ_next)
1769 if (last_visited [e->dest->index + 2] == bb)
1771 error ("verify_flow_info: Duplicate edge %i->%i",
1772 e->src->index, e->dest->index);
1775 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1777 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1778 e->src->index, e->dest->index, e->probability);
1783 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1784 e->src->index, e->dest->index, (int)e->count);
1788 last_visited [e->dest->index + 2] = bb;
1790 if (e->flags & EDGE_FALLTHRU)
1793 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1796 if (e->flags & EDGE_ABNORMAL_CALL)
1799 if (e->flags & EDGE_EH)
1801 else if (e->flags & EDGE_ABNORMAL)
1804 if ((e->flags & EDGE_FALLTHRU)
1805 && e->src != ENTRY_BLOCK_PTR
1806 && e->dest != EXIT_BLOCK_PTR)
1810 if (e->src->next_bb != e->dest)
1813 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1814 e->src->index, e->dest->index);
1818 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1819 insn = NEXT_INSN (insn))
1820 if (GET_CODE (insn) == BARRIER
1821 #ifndef CASE_DROPS_THROUGH
1824 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1828 error ("verify_flow_info: Incorrect fallthru %i->%i",
1829 e->src->index, e->dest->index);
1830 fatal_insn ("wrong insn in the fallthru edge", insn);
1837 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1839 fprintf (stderr, "Predecessor: ");
1840 dump_edge_info (stderr, e, 0);
1841 fprintf (stderr, "\nSuccessor: ");
1842 dump_edge_info (stderr, e, 1);
1843 fprintf (stderr, "\n");
1847 edge_checksum[e->dest->index + 2] += (size_t) e;
1850 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1851 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1853 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1857 && (GET_CODE (bb->end) != JUMP_INSN
1858 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1859 || any_condjump_p (bb->end)))))
1861 error ("Too many outgoing branch edges from bb %i", bb->index);
1864 if (n_fallthru && any_uncondjump_p (bb->end))
1866 error ("Fallthru edge after unconditional jump %i", bb->index);
1869 if (n_branch != 1 && any_uncondjump_p (bb->end))
1871 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1874 if (n_branch != 1 && any_condjump_p (bb->end)
1875 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1877 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1880 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1882 error ("Call edges for non-call insn in bb %i", bb->index);
1886 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1887 && (GET_CODE (bb->end) != JUMP_INSN
1888 || any_condjump_p (bb->end)
1889 || any_uncondjump_p (bb->end)))
1891 error ("Abnormal edges for no purpose in bb %i", bb->index);
1899 /* Ensure existence of barrier in BB with no fallthru edges. */
1900 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1901 insn = NEXT_INSN (insn))
1903 || (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
1906 error ("missing barrier after block %i", bb->index);
1912 for (e = bb->pred; e; e = e->pred_next)
1916 error ("basic block %d pred edge is corrupted", bb->index);
1917 fputs ("Predecessor: ", stderr);
1918 dump_edge_info (stderr, e, 0);
1919 fputs ("\nSuccessor: ", stderr);
1920 dump_edge_info (stderr, e, 1);
1921 fputc ('\n', stderr);
1924 edge_checksum[e->dest->index + 2] -= (size_t) e;
1927 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
1928 if (BLOCK_FOR_INSN (x) != bb)
1931 if (! BLOCK_FOR_INSN (x))
1933 ("insn %d inside basic block %d but block_for_insn is NULL",
1934 INSN_UID (x), bb->index);
1937 ("insn %d inside basic block %d but block_for_insn is %i",
1938 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1943 /* OK pointers are correct. Now check the header of basic
1944 block. It ought to contain optional CODE_LABEL followed
1945 by NOTE_BASIC_BLOCK. */
1947 if (GET_CODE (x) == CODE_LABEL)
1951 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1959 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1961 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1967 /* Do checks for empty blocks her. e */
1970 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1972 if (NOTE_INSN_BASIC_BLOCK_P (x))
1974 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1975 INSN_UID (x), bb->index);
1982 if (GET_CODE (x) == JUMP_INSN
1983 || GET_CODE (x) == CODE_LABEL
1984 || GET_CODE (x) == BARRIER)
1986 error ("in basic block %d:", bb->index);
1987 fatal_insn ("flow control insn inside a basic block", x);
1992 /* Complete edge checksumming for ENTRY and EXIT. */
1996 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1997 edge_checksum[e->dest->index + 2] += (size_t) e;
1999 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2000 edge_checksum[e->dest->index + 2] -= (size_t) e;
2003 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2004 if (edge_checksum[bb->index + 2])
2006 error ("basic block %i edge lists are corrupted", bb->index);
2011 last_bb_seen = ENTRY_BLOCK_PTR;
2013 for (x = rtx_first; x; x = NEXT_INSN (x))
2015 if (NOTE_INSN_BASIC_BLOCK_P (x))
2017 bb = NOTE_BASIC_BLOCK (x);
2020 if (bb != last_bb_seen->next_bb)
2021 internal_error ("basic blocks not numbered consecutively");
2026 if (!bb_info[INSN_UID (x)])
2028 switch (GET_CODE (x))
2035 /* An addr_vec is placed outside any block block. */
2037 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2038 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2039 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2042 /* But in any case, non-deletable labels can appear anywhere. */
2046 fatal_insn ("insn outside basic block", x);
2051 && GET_CODE (x) == JUMP_INSN
2052 && returnjump_p (x) && ! condjump_p (x)
2053 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2054 fatal_insn ("return not followed by barrier", x);
2057 if (num_bb_notes != n_basic_blocks)
2059 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2060 num_bb_notes, n_basic_blocks);
2063 internal_error ("verify_flow_info failed");
2067 free (last_visited);
2068 free (edge_checksum);
2071 /* Assume that the preceding pass has possibly eliminated jump instructions
2072 or converted the unconditional jumps. Eliminate the edges from CFG.
2073 Return true if any edges are eliminated. */
2076 purge_dead_edges (bb)
2080 rtx insn = bb->end, note;
2081 bool purged = false;
2083 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2084 if (GET_CODE (insn) == INSN
2085 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2089 if (! may_trap_p (PATTERN (insn))
2090 || ((eqnote = find_reg_equal_equiv_note (insn))
2091 && ! may_trap_p (XEXP (eqnote, 0))))
2092 remove_note (insn, note);
2095 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2096 for (e = bb->succ; e; e = next)
2098 next = e->succ_next;
2099 if (e->flags & EDGE_EH)
2101 if (can_throw_internal (bb->end))
2104 else if (e->flags & EDGE_ABNORMAL_CALL)
2106 if (GET_CODE (bb->end) == CALL_INSN
2107 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2108 || INTVAL (XEXP (note, 0)) >= 0))
2115 bb->flags |= BB_DIRTY;
2119 if (GET_CODE (insn) == JUMP_INSN)
2124 /* We do care only about conditional jumps and simplejumps. */
2125 if (!any_condjump_p (insn)
2126 && !returnjump_p (insn)
2127 && !simplejump_p (insn))
2130 /* Branch probability/prediction notes are defined only for
2131 condjumps. We've possibly turned condjump into simplejump. */
2132 if (simplejump_p (insn))
2134 note = find_reg_note (insn, REG_BR_PROB, NULL);
2136 remove_note (insn, note);
2137 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2138 remove_note (insn, note);
2141 for (e = bb->succ; e; e = next)
2143 next = e->succ_next;
2145 /* Avoid abnormal flags to leak from computed jumps turned
2146 into simplejumps. */
2148 e->flags &= ~EDGE_ABNORMAL;
2150 /* See if this edge is one we should keep. */
2151 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2152 /* A conditional jump can fall through into the next
2153 block, so we should keep the edge. */
2155 else if (e->dest != EXIT_BLOCK_PTR
2156 && e->dest->head == JUMP_LABEL (insn))
2157 /* If the destination block is the target of the jump,
2160 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2161 /* If the destination block is the exit block, and this
2162 instruction is a return, then keep the edge. */
2164 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2165 /* Keep the edges that correspond to exceptions thrown by
2166 this instruction. */
2169 /* We do not need this edge. */
2170 bb->flags |= BB_DIRTY;
2175 if (!bb->succ || !purged)
2179 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2184 /* Redistribute probabilities. */
2185 if (!bb->succ->succ_next)
2187 bb->succ->probability = REG_BR_PROB_BASE;
2188 bb->succ->count = bb->count;
2192 note = find_reg_note (insn, REG_BR_PROB, NULL);
2196 b = BRANCH_EDGE (bb);
2197 f = FALLTHRU_EDGE (bb);
2198 b->probability = INTVAL (XEXP (note, 0));
2199 f->probability = REG_BR_PROB_BASE - b->probability;
2200 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2201 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2207 /* If we don't see a jump insn, we don't know exactly why the block would
2208 have been broken at this point. Look for a simple, non-fallthru edge,
2209 as these are only created by conditional branches. If we find such an
2210 edge we know that there used to be a jump here and can then safely
2211 remove all non-fallthru edges. */
2212 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2219 for (e = bb->succ; e; e = next)
2221 next = e->succ_next;
2222 if (!(e->flags & EDGE_FALLTHRU))
2224 bb->flags |= BB_DIRTY;
2230 if (!bb->succ || bb->succ->succ_next)
2233 bb->succ->probability = REG_BR_PROB_BASE;
2234 bb->succ->count = bb->count;
2237 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2242 /* Search all basic blocks for potentially dead edges and purge them. Return
2243 true if some edge has been eliminated. */
2246 purge_all_dead_edges (update_life_p)
2255 blocks = sbitmap_alloc (last_basic_block);
2256 sbitmap_zero (blocks);
2261 bool purged_here = purge_dead_edges (bb);
2263 purged |= purged_here;
2264 if (purged_here && update_life_p)
2265 SET_BIT (blocks, bb->index);
2268 if (update_life_p && purged)
2269 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2270 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2271 | PROP_KILL_DEAD_CODE);
2274 sbitmap_free (blocks);