1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
62 #include "tree-pass.h"
65 static int can_delete_note_p (const_rtx);
66 static int can_delete_label_p (const_rtx);
67 static void commit_one_edge_insertion (edge);
68 static basic_block rtl_split_edge (edge);
69 static bool rtl_move_block_after (basic_block, basic_block);
70 static int rtl_verify_flow_info (void);
71 static basic_block cfg_layout_split_block (basic_block, void *);
72 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
73 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
74 static void cfg_layout_delete_block (basic_block);
75 static void rtl_delete_block (basic_block);
76 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
77 static edge rtl_redirect_edge_and_branch (edge, basic_block);
78 static basic_block rtl_split_block (basic_block, void *);
79 static void rtl_dump_bb (basic_block, FILE *, int);
80 static int rtl_verify_flow_info_1 (void);
81 static void rtl_make_forwarder_block (edge);
83 /* Return true if NOTE is not one of the ones that must be kept paired,
84 so that we may simply delete it. */
87 can_delete_note_p (const_rtx note)
89 return (NOTE_KIND (note) == NOTE_INSN_DELETED
90 || NOTE_KIND (note) == NOTE_INSN_BASIC_BLOCK);
93 /* True if a given label can be deleted. */
96 can_delete_label_p (const_rtx label)
98 return (!LABEL_PRESERVE_P (label)
99 /* User declared labels must be preserved. */
100 && LABEL_NAME (label) == 0
101 && !in_expr_list_p (forced_labels, label));
104 /* Delete INSN by patching it out. Return the next insn. */
107 delete_insn (rtx insn)
109 rtx next = NEXT_INSN (insn);
111 bool really_delete = true;
115 /* Some labels can't be directly removed from the INSN chain, as they
116 might be references via variables, constant pool etc.
117 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
118 if (! can_delete_label_p (insn))
120 const char *name = LABEL_NAME (insn);
122 really_delete = false;
123 PUT_CODE (insn, NOTE);
124 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
125 NOTE_DELETED_LABEL_NAME (insn) = name;
128 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
133 /* If this insn has already been deleted, something is very wrong. */
134 gcc_assert (!INSN_DELETED_P (insn));
136 INSN_DELETED_P (insn) = 1;
139 /* If deleting a jump, decrement the use count of the label. Deleting
140 the label itself should happen in the normal course of block merging. */
143 if (JUMP_LABEL (insn)
144 && LABEL_P (JUMP_LABEL (insn)))
145 LABEL_NUSES (JUMP_LABEL (insn))--;
147 /* If there are more targets, remove them too. */
149 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
150 && LABEL_P (XEXP (note, 0)))
152 LABEL_NUSES (XEXP (note, 0))--;
153 remove_note (insn, note);
157 /* Also if deleting any insn that references a label as an operand. */
158 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
159 && LABEL_P (XEXP (note, 0)))
161 LABEL_NUSES (XEXP (note, 0))--;
162 remove_note (insn, note);
166 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
167 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
169 rtx pat = PATTERN (insn);
170 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
171 int len = XVECLEN (pat, diff_vec_p);
174 for (i = 0; i < len; i++)
176 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
178 /* When deleting code in bulk (e.g. removing many unreachable
179 blocks) we can delete a label that's a target of the vector
180 before deleting the vector itself. */
182 LABEL_NUSES (label)--;
189 /* Like delete_insn but also purge dead edges from BB. */
192 delete_insn_and_edges (rtx insn)
198 && BLOCK_FOR_INSN (insn)
199 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
201 x = delete_insn (insn);
203 purge_dead_edges (BLOCK_FOR_INSN (insn));
207 /* Unlink a chain of insns between START and FINISH, leaving notes
208 that must be paired. If CLEAR_BB is true, we set bb field for
209 insns that cannot be removed to NULL. */
212 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
216 /* Unchain the insns one by one. It would be quicker to delete all of these
217 with a single unchaining, rather than one at a time, but we need to keep
221 next = NEXT_INSN (start);
222 if (NOTE_P (start) && !can_delete_note_p (start))
225 next = delete_insn (start);
227 if (clear_bb && !INSN_DELETED_P (start))
228 set_block_for_insn (start, NULL);
236 /* Like delete_insn_chain but also purge dead edges from BB. */
239 delete_insn_chain_and_edges (rtx first, rtx last)
244 && BLOCK_FOR_INSN (last)
245 && BB_END (BLOCK_FOR_INSN (last)) == last)
247 delete_insn_chain (first, last, false);
249 purge_dead_edges (BLOCK_FOR_INSN (last));
252 /* Create a new basic block consisting of the instructions between HEAD and END
253 inclusive. This function is designed to allow fast BB construction - reuses
254 the note and basic block struct in BB_NOTE, if any and do not grow
255 BASIC_BLOCK chain and should be used directly only by CFG construction code.
256 END can be NULL in to create new empty basic block before HEAD. Both END
257 and HEAD can be NULL to create basic block at the end of INSN chain.
258 AFTER is the basic block we should be put after. */
261 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
266 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
269 /* If we found an existing note, thread it back onto the chain. */
277 after = PREV_INSN (head);
281 if (after != bb_note && NEXT_INSN (after) != bb_note)
282 reorder_insns_nobb (bb_note, bb_note, after);
286 /* Otherwise we must create a note and a basic block structure. */
290 init_rtl_bb_info (bb);
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
294 else if (LABEL_P (head) && end)
296 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
302 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
308 NOTE_BASIC_BLOCK (bb_note) = bb;
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end) == bb_note)
317 bb->index = last_basic_block++;
318 bb->flags = BB_NEW | BB_RTL;
319 link_block (bb, after);
320 SET_BASIC_BLOCK (bb->index, bb);
321 df_bb_refs_record (bb->index, false);
322 update_bb_for_insn (bb);
323 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
325 /* Tag the block so that we know it has been used when considering
326 other basic block notes. */
332 /* Create new basic block consisting of instructions in between HEAD and END
333 and place it to the BB chain after block AFTER. END can be NULL in to
334 create new empty basic block before HEAD. Both END and HEAD can be NULL to
335 create basic block at the end of INSN chain. */
338 rtl_create_basic_block (void *headp, void *endp, basic_block after)
340 rtx head = (rtx) headp, end = (rtx) endp;
343 /* Grow the basic block array if needed. */
344 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
346 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
347 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
352 bb = create_basic_block_structure (head, end, NULL, after);
358 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
360 basic_block newbb = rtl_create_basic_block (head, end, after);
365 /* Delete the insns in a (non-live) block. We physically delete every
366 non-deleted-note insn, and update the flow graph appropriately.
368 Return nonzero if we deleted an exception handler. */
370 /* ??? Preserving all such notes strikes me as wrong. It would be nice
371 to post-process the stream to remove empty blocks, loops, ranges, etc. */
374 rtl_delete_block (basic_block b)
378 /* If the head of this block is a CODE_LABEL, then it might be the
379 label for an exception handler which can't be reached. We need
380 to remove the label from the exception_handler_label list. */
383 maybe_remove_eh_handler (insn);
385 end = get_last_bb_insn (b);
387 /* Selectively delete the entire chain. */
389 delete_insn_chain (insn, end, true);
393 fprintf (dump_file, "deleting block %d\n", b->index);
394 df_bb_delete (b->index);
397 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
400 compute_bb_for_insn (void)
406 rtx end = BB_END (bb);
409 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
411 BLOCK_FOR_INSN (insn) = bb;
418 /* Release the basic_block_for_insn array. */
421 free_bb_for_insn (void)
424 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
425 if (!BARRIER_P (insn))
426 BLOCK_FOR_INSN (insn) = NULL;
430 struct tree_opt_pass pass_free_cfg =
434 free_bb_for_insn, /* execute */
437 0, /* static_pass_number */
439 0, /* properties_required */
440 0, /* properties_provided */
441 PROP_cfg, /* properties_destroyed */
442 0, /* todo_flags_start */
443 0, /* todo_flags_finish */
447 /* Return RTX to emit after when we want to emit code on the entry of function. */
449 entry_of_function (void)
451 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
452 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
455 /* Emit INSN at the entry point of the function, ensuring that it is only
456 executed once per function. */
458 emit_insn_at_entry (rtx insn)
460 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
461 edge e = ei_safe_edge (ei);
462 gcc_assert (e->flags & EDGE_FALLTHRU);
464 insert_insn_on_edge (insn, e);
465 commit_edge_insertions ();
468 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
469 (or BARRIER if found) and notify df of the bb change.
470 The insn chain range is inclusive
471 (i.e. both BEGIN and END will be updated. */
474 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
478 end = NEXT_INSN (end);
479 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
480 if (!BARRIER_P (insn))
481 df_insn_change_bb (insn, bb);
484 /* Update BLOCK_FOR_INSN of insns in BB to BB,
485 and notify df of the change. */
488 update_bb_for_insn (basic_block bb)
490 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
494 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
495 note associated with the BLOCK. */
498 first_insn_after_basic_block_note (basic_block block)
502 /* Get the first instruction in the block. */
503 insn = BB_HEAD (block);
505 if (insn == NULL_RTX)
508 insn = NEXT_INSN (insn);
509 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
511 return NEXT_INSN (insn);
514 /* Creates a new basic block just after basic block B by splitting
515 everything after specified instruction I. */
518 rtl_split_block (basic_block bb, void *insnp)
521 rtx insn = (rtx) insnp;
527 insn = first_insn_after_basic_block_note (bb);
530 insn = PREV_INSN (insn);
532 insn = get_last_insn ();
535 /* We probably should check type of the insn so that we do not create
536 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
538 if (insn == BB_END (bb))
539 emit_note_after (NOTE_INSN_DELETED, insn);
541 /* Create the new basic block. */
542 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
543 BB_COPY_PARTITION (new_bb, bb);
546 /* Redirect the outgoing edges. */
547 new_bb->succs = bb->succs;
549 FOR_EACH_EDGE (e, ei, new_bb->succs)
552 /* The new block starts off being dirty. */
553 df_set_bb_dirty (bb);
557 /* Blocks A and B are to be merged into a single block A. The insns
558 are already contiguous. */
561 rtl_merge_blocks (basic_block a, basic_block b)
563 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
564 rtx del_first = NULL_RTX, del_last = NULL_RTX;
568 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
570 /* If there was a CODE_LABEL beginning B, delete it. */
571 if (LABEL_P (b_head))
573 /* This might have been an EH label that no longer has incoming
574 EH edges. Update data structures to match. */
575 maybe_remove_eh_handler (b_head);
577 /* Detect basic blocks with nothing but a label. This can happen
578 in particular at the end of a function. */
582 del_first = del_last = b_head;
583 b_head = NEXT_INSN (b_head);
586 /* Delete the basic block note and handle blocks containing just that
588 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
596 b_head = NEXT_INSN (b_head);
599 /* If there was a jump out of A, delete it. */
604 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
606 || NOTE_INSN_BASIC_BLOCK_P (prev)
607 || prev == BB_HEAD (a))
613 /* If this was a conditional jump, we need to also delete
614 the insn that set cc0. */
615 if (only_sets_cc0_p (prev))
619 prev = prev_nonnote_insn (prev);
626 a_end = PREV_INSN (del_first);
628 else if (BARRIER_P (NEXT_INSN (a_end)))
629 del_first = NEXT_INSN (a_end);
631 /* Delete everything marked above as well as crap that might be
632 hanging out between the two blocks. */
634 delete_insn_chain (del_first, del_last, true);
636 /* Reassociate the insns of B with A. */
639 update_bb_for_insn_chain (a_end, b_end, a);
644 df_bb_delete (b->index);
649 /* Return true when block A and B can be merged. */
652 rtl_can_merge_blocks (basic_block a, basic_block b)
654 /* If we are partitioning hot/cold basic blocks, we don't want to
655 mess up unconditional or indirect jumps that cross between hot
658 Basic block partitioning may result in some jumps that appear to
659 be optimizable (or blocks that appear to be mergeable), but which really
660 must be left untouched (they are required to make it safely across
661 partition boundaries). See the comments at the top of
662 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
664 if (BB_PARTITION (a) != BB_PARTITION (b))
667 /* There must be exactly one edge in between the blocks. */
668 return (single_succ_p (a)
669 && single_succ (a) == b
672 /* Must be simple edge. */
673 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
675 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
676 /* If the jump insn has side effects,
677 we can't kill the edge. */
678 && (!JUMP_P (BB_END (a))
680 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
683 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
687 block_label (basic_block block)
689 if (block == EXIT_BLOCK_PTR)
692 if (!LABEL_P (BB_HEAD (block)))
694 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
697 return BB_HEAD (block);
700 /* Attempt to perform edge redirection by replacing possibly complex jump
701 instruction by unconditional jump or removing jump completely. This can
702 apply only if all edges now point to the same block. The parameters and
703 return values are equivalent to redirect_edge_and_branch. */
706 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
708 basic_block src = e->src;
709 rtx insn = BB_END (src), kill_from;
713 /* If we are partitioning hot/cold basic blocks, we don't want to
714 mess up unconditional or indirect jumps that cross between hot
717 Basic block partitioning may result in some jumps that appear to
718 be optimizable (or blocks that appear to be mergeable), but which really
719 must be left untouched (they are required to make it safely across
720 partition boundaries). See the comments at the top of
721 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
723 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
724 || BB_PARTITION (src) != BB_PARTITION (target))
727 /* We can replace or remove a complex jump only when we have exactly
728 two edges. Also, if we have exactly one outgoing edge, we can
730 if (EDGE_COUNT (src->succs) >= 3
731 /* Verify that all targets will be TARGET. Specifically, the
732 edge that is not E must also go to TARGET. */
733 || (EDGE_COUNT (src->succs) == 2
734 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
737 if (!onlyjump_p (insn))
739 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
742 /* Avoid removing branch with side effects. */
743 set = single_set (insn);
744 if (!set || side_effects_p (set))
747 /* In case we zap a conditional jump, we'll need to kill
748 the cc0 setter too. */
751 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
752 && only_sets_cc0_p (PREV_INSN (insn)))
753 kill_from = PREV_INSN (insn);
756 /* See if we can create the fallthru edge. */
757 if (in_cfglayout || can_fallthru (src, target))
760 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
763 /* Selectively unlink whole insn chain. */
766 rtx insn = src->il.rtl->footer;
768 delete_insn_chain (kill_from, BB_END (src), false);
770 /* Remove barriers but keep jumptables. */
773 if (BARRIER_P (insn))
775 if (PREV_INSN (insn))
776 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
778 src->il.rtl->footer = NEXT_INSN (insn);
779 if (NEXT_INSN (insn))
780 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
784 insn = NEXT_INSN (insn);
788 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
792 /* If this already is simplejump, redirect it. */
793 else if (simplejump_p (insn))
795 if (e->dest == target)
798 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
799 INSN_UID (insn), e->dest->index, target->index);
800 if (!redirect_jump (insn, block_label (target), 0))
802 gcc_assert (target == EXIT_BLOCK_PTR);
807 /* Cannot do anything for target exit block. */
808 else if (target == EXIT_BLOCK_PTR)
811 /* Or replace possibly complicated jump insn by simple jump insn. */
814 rtx target_label = block_label (target);
815 rtx barrier, label, table;
817 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
818 JUMP_LABEL (BB_END (src)) = target_label;
819 LABEL_NUSES (target_label)++;
821 fprintf (dump_file, "Replacing insn %i by jump %i\n",
822 INSN_UID (insn), INSN_UID (BB_END (src)));
825 delete_insn_chain (kill_from, insn, false);
827 /* Recognize a tablejump that we are converting to a
828 simple jump and remove its associated CODE_LABEL
829 and ADDR_VEC or ADDR_DIFF_VEC. */
830 if (tablejump_p (insn, &label, &table))
831 delete_insn_chain (label, table, false);
833 barrier = next_nonnote_insn (BB_END (src));
834 if (!barrier || !BARRIER_P (barrier))
835 emit_barrier_after (BB_END (src));
838 if (barrier != NEXT_INSN (BB_END (src)))
840 /* Move the jump before barrier so that the notes
841 which originally were or were created before jump table are
842 inside the basic block. */
843 rtx new_insn = BB_END (src);
845 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
846 PREV_INSN (barrier), src);
848 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
849 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
851 NEXT_INSN (new_insn) = barrier;
852 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
854 PREV_INSN (new_insn) = PREV_INSN (barrier);
855 PREV_INSN (barrier) = new_insn;
860 /* Keep only one edge out and set proper flags. */
861 if (!single_succ_p (src))
863 gcc_assert (single_succ_p (src));
865 e = single_succ_edge (src);
867 e->flags = EDGE_FALLTHRU;
871 e->probability = REG_BR_PROB_BASE;
872 e->count = src->count;
874 if (e->dest != target)
875 redirect_edge_succ (e, target);
879 /* Redirect edge representing branch of (un)conditional jump or tablejump,
882 redirect_branch_edge (edge e, basic_block target)
885 rtx old_label = BB_HEAD (e->dest);
886 basic_block src = e->src;
887 rtx insn = BB_END (src);
889 /* We can only redirect non-fallthru edges of jump insn. */
890 if (e->flags & EDGE_FALLTHRU)
892 else if (!JUMP_P (insn))
895 /* Recognize a tablejump and adjust all matching cases. */
896 if (tablejump_p (insn, NULL, &tmp))
900 rtx new_label = block_label (target);
902 if (target == EXIT_BLOCK_PTR)
904 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
905 vec = XVEC (PATTERN (tmp), 0);
907 vec = XVEC (PATTERN (tmp), 1);
909 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
910 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
912 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
913 --LABEL_NUSES (old_label);
914 ++LABEL_NUSES (new_label);
917 /* Handle casesi dispatch insns. */
918 if ((tmp = single_set (insn)) != NULL
919 && SET_DEST (tmp) == pc_rtx
920 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
921 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
922 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
924 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
926 --LABEL_NUSES (old_label);
927 ++LABEL_NUSES (new_label);
932 /* ?? We may play the games with moving the named labels from
933 one basic block to the other in case only one computed_jump is
935 if (computed_jump_p (insn)
936 /* A return instruction can't be redirected. */
937 || returnjump_p (insn))
940 /* If the insn doesn't go where we think, we're confused. */
941 gcc_assert (JUMP_LABEL (insn) == old_label);
943 /* If the substitution doesn't succeed, die. This can happen
944 if the back end emitted unrecognizable instructions or if
945 target is exit block on some arches. */
946 if (!redirect_jump (insn, block_label (target), 0))
948 gcc_assert (target == EXIT_BLOCK_PTR);
954 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
955 e->src->index, e->dest->index, target->index);
957 if (e->dest != target)
958 e = redirect_edge_succ_nodup (e, target);
963 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
964 expense of adding new instructions or reordering basic blocks.
966 Function can be also called with edge destination equivalent to the TARGET.
967 Then it should try the simplifications and do nothing if none is possible.
969 Return edge representing the branch if transformation succeeded. Return NULL
971 We still return NULL in case E already destinated TARGET and we didn't
972 managed to simplify instruction stream. */
975 rtl_redirect_edge_and_branch (edge e, basic_block target)
978 basic_block src = e->src;
980 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
983 if (e->dest == target)
986 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
988 df_set_bb_dirty (src);
992 ret = redirect_branch_edge (e, target);
996 df_set_bb_dirty (src);
1000 /* Like force_nonfallthru below, but additionally performs redirection
1001 Used by redirect_edge_and_branch_force. */
1004 force_nonfallthru_and_redirect (edge e, basic_block target)
1006 basic_block jump_block, new_bb = NULL, src = e->src;
1009 int abnormal_edge_flags = 0;
1011 /* In the case the last instruction is conditional jump to the next
1012 instruction, first redirect the jump itself and then continue
1013 by creating a basic block afterwards to redirect fallthru edge. */
1014 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1015 && any_condjump_p (BB_END (e->src))
1016 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1019 edge b = unchecked_make_edge (e->src, target, 0);
1022 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1023 gcc_assert (redirected);
1025 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1028 int prob = INTVAL (XEXP (note, 0));
1030 b->probability = prob;
1031 b->count = e->count * prob / REG_BR_PROB_BASE;
1032 e->probability -= e->probability;
1033 e->count -= b->count;
1034 if (e->probability < 0)
1041 if (e->flags & EDGE_ABNORMAL)
1043 /* Irritating special case - fallthru edge to the same block as abnormal
1045 We can't redirect abnormal edge, but we still can split the fallthru
1046 one and create separate abnormal edge to original destination.
1047 This allows bb-reorder to make such edge non-fallthru. */
1048 gcc_assert (e->dest == target);
1049 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1050 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1054 gcc_assert (e->flags & EDGE_FALLTHRU);
1055 if (e->src == ENTRY_BLOCK_PTR)
1057 /* We can't redirect the entry block. Create an empty block
1058 at the start of the function which we use to add the new
1064 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1066 /* Change the existing edge's source to be the new block, and add
1067 a new edge from the entry block to the new block. */
1069 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1073 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1083 VEC_safe_push (edge, gc, bb->succs, e);
1084 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1088 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1090 /* Create the new structures. */
1092 /* If the old block ended with a tablejump, skip its table
1093 by searching forward from there. Otherwise start searching
1094 forward from the last instruction of the old block. */
1095 if (!tablejump_p (BB_END (e->src), NULL, ¬e))
1096 note = BB_END (e->src);
1097 note = NEXT_INSN (note);
1099 jump_block = create_basic_block (note, NULL, e->src);
1100 jump_block->count = e->count;
1101 jump_block->frequency = EDGE_FREQUENCY (e);
1102 jump_block->loop_depth = target->loop_depth;
1104 /* Make sure new block ends up in correct hot/cold section. */
1106 BB_COPY_PARTITION (jump_block, e->src);
1107 if (flag_reorder_blocks_and_partition
1108 && targetm.have_named_sections
1109 && JUMP_P (BB_END (jump_block))
1110 && !any_condjump_p (BB_END (jump_block))
1111 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1112 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1119 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1120 new_edge->probability = e->probability;
1121 new_edge->count = e->count;
1123 /* Redirect old edge. */
1124 redirect_edge_pred (e, jump_block);
1125 e->probability = REG_BR_PROB_BASE;
1127 new_bb = jump_block;
1130 jump_block = e->src;
1132 e->flags &= ~EDGE_FALLTHRU;
1133 if (target == EXIT_BLOCK_PTR)
1136 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1143 rtx label = block_label (target);
1144 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1145 JUMP_LABEL (BB_END (jump_block)) = label;
1146 LABEL_NUSES (label)++;
1149 emit_barrier_after (BB_END (jump_block));
1150 redirect_edge_succ_nodup (e, target);
1152 if (abnormal_edge_flags)
1153 make_edge (src, target, abnormal_edge_flags);
1155 df_mark_solutions_dirty ();
1159 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1160 (and possibly create new basic block) to make edge non-fallthru.
1161 Return newly created BB or NULL if none. */
1164 force_nonfallthru (edge e)
1166 return force_nonfallthru_and_redirect (e, e->dest);
1169 /* Redirect edge even at the expense of creating new jump insn or
1170 basic block. Return new basic block if created, NULL otherwise.
1171 Conversion must be possible. */
1174 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1176 if (redirect_edge_and_branch (e, target)
1177 || e->dest == target)
1180 /* In case the edge redirection failed, try to force it to be non-fallthru
1181 and redirect newly created simplejump. */
1182 df_set_bb_dirty (e->src);
1183 return force_nonfallthru_and_redirect (e, target);
1186 /* The given edge should potentially be a fallthru edge. If that is in
1187 fact true, delete the jump and barriers that are in the way. */
1190 rtl_tidy_fallthru_edge (edge e)
1193 basic_block b = e->src, c = b->next_bb;
1195 /* ??? In a late-running flow pass, other folks may have deleted basic
1196 blocks by nopping out blocks, leaving multiple BARRIERs between here
1197 and the target label. They ought to be chastised and fixed.
1199 We can also wind up with a sequence of undeletable labels between
1200 one block and the next.
1202 So search through a sequence of barriers, labels, and notes for
1203 the head of block C and assert that we really do fall through. */
1205 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1209 /* Remove what will soon cease being the jump insn from the source block.
1210 If block B consisted only of this single jump, turn it into a deleted
1215 && (any_uncondjump_p (q)
1216 || single_succ_p (b)))
1219 /* If this was a conditional jump, we need to also delete
1220 the insn that set cc0. */
1221 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1228 /* Selectively unlink the sequence. */
1229 if (q != PREV_INSN (BB_HEAD (c)))
1230 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1232 e->flags |= EDGE_FALLTHRU;
1235 /* Should move basic block BB after basic block AFTER. NIY. */
1238 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1239 basic_block after ATTRIBUTE_UNUSED)
1244 /* Split a (typically critical) edge. Return the new block.
1245 The edge must not be abnormal.
1247 ??? The code generally expects to be called on critical edges.
1248 The case of a block ending in an unconditional jump to a
1249 block with multiple predecessors is not handled optimally. */
1252 rtl_split_edge (edge edge_in)
1257 /* Abnormal edges cannot be split. */
1258 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1260 /* We are going to place the new block in front of edge destination.
1261 Avoid existence of fallthru predecessors. */
1262 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1267 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1268 if (e->flags & EDGE_FALLTHRU)
1272 force_nonfallthru (e);
1275 /* Create the basic block note. */
1276 if (edge_in->dest != EXIT_BLOCK_PTR)
1277 before = BB_HEAD (edge_in->dest);
1281 /* If this is a fall through edge to the exit block, the blocks might be
1282 not adjacent, and the right place is the after the source. */
1283 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1285 before = NEXT_INSN (BB_END (edge_in->src));
1286 bb = create_basic_block (before, NULL, edge_in->src);
1287 BB_COPY_PARTITION (bb, edge_in->src);
1291 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1292 /* ??? Why not edge_in->dest->prev_bb here? */
1293 BB_COPY_PARTITION (bb, edge_in->dest);
1296 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1298 /* For non-fallthru edges, we must adjust the predecessor's
1299 jump instruction to target our new block. */
1300 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1302 edge redirected = redirect_edge_and_branch (edge_in, bb);
1303 gcc_assert (redirected);
1306 redirect_edge_succ (edge_in, bb);
1311 /* Queue instructions for insertion on an edge between two basic blocks.
1312 The new instructions and basic blocks (if any) will not appear in the
1313 CFG until commit_edge_insertions is called. */
1316 insert_insn_on_edge (rtx pattern, edge e)
1318 /* We cannot insert instructions on an abnormal critical edge.
1319 It will be easier to find the culprit if we die now. */
1320 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1322 if (e->insns.r == NULL_RTX)
1325 push_to_sequence (e->insns.r);
1327 emit_insn (pattern);
1329 e->insns.r = get_insns ();
1333 /* Update the CFG for the instructions queued on edge E. */
1336 commit_one_edge_insertion (edge e)
1338 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1339 basic_block bb = NULL;
1341 /* Pull the insns off the edge now since the edge might go away. */
1343 e->insns.r = NULL_RTX;
1345 if (!before && !after)
1347 /* Figure out where to put these things. If the destination has
1348 one predecessor, insert there. Except for the exit block. */
1349 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1353 /* Get the location correct wrt a code label, and "nice" wrt
1354 a basic block note, and before everything else. */
1357 tmp = NEXT_INSN (tmp);
1358 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1359 tmp = NEXT_INSN (tmp);
1360 if (tmp == BB_HEAD (bb))
1363 after = PREV_INSN (tmp);
1365 after = get_last_insn ();
1368 /* If the source has one successor and the edge is not abnormal,
1369 insert there. Except for the entry block. */
1370 else if ((e->flags & EDGE_ABNORMAL) == 0
1371 && single_succ_p (e->src)
1372 && e->src != ENTRY_BLOCK_PTR)
1376 /* It is possible to have a non-simple jump here. Consider a target
1377 where some forms of unconditional jumps clobber a register. This
1378 happens on the fr30 for example.
1380 We know this block has a single successor, so we can just emit
1381 the queued insns before the jump. */
1382 if (JUMP_P (BB_END (bb)))
1383 before = BB_END (bb);
1386 /* We'd better be fallthru, or we've lost track of
1388 gcc_assert (e->flags & EDGE_FALLTHRU);
1390 after = BB_END (bb);
1393 /* Otherwise we must split the edge. */
1396 bb = split_edge (e);
1397 after = BB_END (bb);
1399 if (flag_reorder_blocks_and_partition
1400 && targetm.have_named_sections
1401 && e->src != ENTRY_BLOCK_PTR
1402 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1403 && !(e->flags & EDGE_CROSSING))
1405 rtx bb_note, cur_insn;
1408 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1409 cur_insn = NEXT_INSN (cur_insn))
1410 if (NOTE_INSN_BASIC_BLOCK_P (cur_insn))
1416 if (JUMP_P (BB_END (bb))
1417 && !any_condjump_p (BB_END (bb))
1418 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1419 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1420 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1425 /* Now that we've found the spot, do the insertion. */
1429 emit_insn_before_noloc (insns, before, bb);
1430 last = prev_nonnote_insn (before);
1433 last = emit_insn_after_noloc (insns, after, bb);
1435 if (returnjump_p (last))
1437 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1438 This is not currently a problem because this only happens
1439 for the (single) epilogue, which already has a fallthru edge
1442 e = single_succ_edge (bb);
1443 gcc_assert (e->dest == EXIT_BLOCK_PTR
1444 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1446 e->flags &= ~EDGE_FALLTHRU;
1447 emit_barrier_after (last);
1450 delete_insn (before);
1453 gcc_assert (!JUMP_P (last));
1455 /* Mark the basic block for find_many_sub_basic_blocks. */
1456 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1460 /* Update the CFG for all queued instructions. */
1463 commit_edge_insertions (void)
1467 bool changed = false;
1469 #ifdef ENABLE_CHECKING
1470 verify_flow_info ();
1473 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1478 FOR_EACH_EDGE (e, ei, bb->succs)
1482 commit_one_edge_insertion (e);
1489 /* In the old rtl CFG API, it was OK to insert control flow on an
1490 edge, apparently? In cfglayout mode, this will *not* work, and
1491 the caller is responsible for making sure that control flow is
1492 valid at all times. */
1493 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1496 blocks = sbitmap_alloc (last_basic_block);
1497 sbitmap_zero (blocks);
1501 SET_BIT (blocks, bb->index);
1502 /* Check for forgotten bb->aux values before commit_edge_insertions
1504 gcc_assert (bb->aux == &bb->aux);
1507 find_many_sub_basic_blocks (blocks);
1508 sbitmap_free (blocks);
1512 /* Print out RTL-specific basic block information (live information
1513 at start and end). */
1516 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1522 s_indent = (char *) alloca ((size_t) indent + 1);
1523 memset (s_indent, ' ', (size_t) indent);
1524 s_indent[indent] = '\0';
1528 df_dump_top (bb, outf);
1532 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1533 insn = NEXT_INSN (insn))
1534 print_rtl_single (outf, insn);
1538 df_dump_bottom (bb, outf);
1544 /* Like print_rtl, but also print out live information for the start of each
1548 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1552 fprintf (outf, "(nil)\n");
1555 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1556 int max_uid = get_max_uid ();
1557 basic_block *start = XCNEWVEC (basic_block, max_uid);
1558 basic_block *end = XCNEWVEC (basic_block, max_uid);
1559 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1564 df_dump_start (outf);
1566 FOR_EACH_BB_REVERSE (bb)
1570 start[INSN_UID (BB_HEAD (bb))] = bb;
1571 end[INSN_UID (BB_END (bb))] = bb;
1572 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1574 enum bb_state state = IN_MULTIPLE_BB;
1576 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1578 in_bb_p[INSN_UID (x)] = state;
1580 if (x == BB_END (bb))
1585 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1588 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1593 fprintf (outf, ";; Start of basic block (");
1594 FOR_EACH_EDGE (e, ei, bb->preds)
1595 fprintf (outf, " %d", e->src->index);
1596 fprintf (outf, ") -> %d\n", bb->index);
1600 df_dump_top (bb, outf);
1603 FOR_EACH_EDGE (e, ei, bb->preds)
1605 fputs (";; Pred edge ", outf);
1606 dump_edge_info (outf, e, 0);
1611 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1612 && !NOTE_P (tmp_rtx)
1613 && !BARRIER_P (tmp_rtx))
1614 fprintf (outf, ";; Insn is not within a basic block\n");
1615 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1616 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1618 did_output = print_rtl_single (outf, tmp_rtx);
1620 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1625 fprintf (outf, ";; End of basic block %d -> (", bb->index);
1626 FOR_EACH_EDGE (e, ei, bb->succs)
1627 fprintf (outf, " %d", e->dest->index);
1628 fprintf (outf, ")\n");
1632 df_dump_bottom (bb, outf);
1636 FOR_EACH_EDGE (e, ei, bb->succs)
1638 fputs (";; Succ edge ", outf);
1639 dump_edge_info (outf, e, 1);
1652 if (current_function_epilogue_delay_list != 0)
1654 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1655 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1656 tmp_rtx = XEXP (tmp_rtx, 1))
1657 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1662 update_br_prob_note (basic_block bb)
1665 if (!JUMP_P (BB_END (bb)))
1667 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1668 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1670 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1673 /* Get the last insn associated with block BB (that includes barriers and
1674 tablejumps after BB). */
1676 get_last_bb_insn (basic_block bb)
1679 rtx end = BB_END (bb);
1681 /* Include any jump table following the basic block. */
1682 if (tablejump_p (end, NULL, &tmp))
1685 /* Include any barriers that may follow the basic block. */
1686 tmp = next_nonnote_insn (end);
1687 while (tmp && BARRIER_P (tmp))
1690 tmp = next_nonnote_insn (end);
1696 /* Verify the CFG and RTL consistency common for both underlying RTL and
1699 Currently it does following checks:
1701 - overlapping of basic blocks
1702 - insns with wrong BLOCK_FOR_INSN pointers
1703 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1704 - tails of basic blocks (ensure that boundary is necessary)
1705 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1706 and NOTE_INSN_BASIC_BLOCK
1707 - verify that no fall_thru edge crosses hot/cold partition boundaries
1708 - verify that there are no pending RTL branch predictions
1710 In future it can be extended check a lot of other stuff as well
1711 (reachability of basic blocks, life information, etc. etc.). */
1714 rtl_verify_flow_info_1 (void)
1720 /* Check the general integrity of the basic blocks. */
1721 FOR_EACH_BB_REVERSE (bb)
1725 if (!(bb->flags & BB_RTL))
1727 error ("BB_RTL flag not set for block %d", bb->index);
1731 FOR_BB_INSNS (bb, insn)
1732 if (BLOCK_FOR_INSN (insn) != bb)
1734 error ("insn %d basic block pointer is %d, should be %d",
1736 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1741 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1742 if (!BARRIER_P (insn)
1743 && BLOCK_FOR_INSN (insn) != NULL)
1745 error ("insn %d in header of bb %d has non-NULL basic block",
1746 INSN_UID (insn), bb->index);
1749 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1750 if (!BARRIER_P (insn)
1751 && BLOCK_FOR_INSN (insn) != NULL)
1753 error ("insn %d in footer of bb %d has non-NULL basic block",
1754 INSN_UID (insn), bb->index);
1759 /* Now check the basic blocks (boundaries etc.) */
1760 FOR_EACH_BB_REVERSE (bb)
1762 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1763 edge e, fallthru = NULL;
1767 if (JUMP_P (BB_END (bb))
1768 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1769 && EDGE_COUNT (bb->succs) >= 2
1770 && any_condjump_p (BB_END (bb)))
1772 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1773 && profile_status != PROFILE_ABSENT)
1775 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1776 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1780 FOR_EACH_EDGE (e, ei, bb->succs)
1782 if (e->flags & EDGE_FALLTHRU)
1784 n_fallthru++, fallthru = e;
1785 if ((e->flags & EDGE_CROSSING)
1786 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1787 && e->src != ENTRY_BLOCK_PTR
1788 && e->dest != EXIT_BLOCK_PTR))
1790 error ("fallthru edge crosses section boundary (bb %i)",
1796 if ((e->flags & ~(EDGE_DFS_BACK
1798 | EDGE_IRREDUCIBLE_LOOP
1800 | EDGE_CROSSING)) == 0)
1803 if (e->flags & EDGE_ABNORMAL_CALL)
1806 if (e->flags & EDGE_EH)
1808 else if (e->flags & EDGE_ABNORMAL)
1812 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1813 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1815 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1819 && (!JUMP_P (BB_END (bb))
1820 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1821 || any_condjump_p (BB_END (bb))))))
1823 error ("too many outgoing branch edges from bb %i", bb->index);
1826 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1828 error ("fallthru edge after unconditional jump %i", bb->index);
1831 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1833 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1836 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1837 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1839 error ("wrong amount of branch edges after conditional jump %i",
1843 if (n_call && !CALL_P (BB_END (bb)))
1845 error ("call edges for non-call insn in bb %i", bb->index);
1849 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1850 && (!JUMP_P (BB_END (bb))
1851 || any_condjump_p (BB_END (bb))
1852 || any_uncondjump_p (BB_END (bb))))
1854 error ("abnormal edges for no purpose in bb %i", bb->index);
1858 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1859 /* We may have a barrier inside a basic block before dead code
1860 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1861 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1864 if (! BLOCK_FOR_INSN (x))
1866 ("insn %d inside basic block %d but block_for_insn is NULL",
1867 INSN_UID (x), bb->index);
1870 ("insn %d inside basic block %d but block_for_insn is %i",
1871 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1876 /* OK pointers are correct. Now check the header of basic
1877 block. It ought to contain optional CODE_LABEL followed
1878 by NOTE_BASIC_BLOCK. */
1882 if (BB_END (bb) == x)
1884 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1892 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1894 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1899 if (BB_END (bb) == x)
1900 /* Do checks for empty blocks here. */
1903 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1905 if (NOTE_INSN_BASIC_BLOCK_P (x))
1907 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1908 INSN_UID (x), bb->index);
1912 if (x == BB_END (bb))
1915 if (control_flow_insn_p (x))
1917 error ("in basic block %d:", bb->index);
1918 fatal_insn ("flow control insn inside a basic block", x);
1927 /* Verify the CFG and RTL consistency common for both underlying RTL and
1930 Currently it does following checks:
1931 - all checks of rtl_verify_flow_info_1
1932 - test head/end pointers
1933 - check that all insns are in the basic blocks
1934 (except the switch handling code, barriers and notes)
1935 - check that all returns are followed by barriers
1936 - check that all fallthru edge points to the adjacent blocks. */
1939 rtl_verify_flow_info (void)
1942 int err = rtl_verify_flow_info_1 ();
1944 rtx last_head = get_last_insn ();
1945 basic_block *bb_info;
1947 const rtx rtx_first = get_insns ();
1948 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1949 const int max_uid = get_max_uid ();
1951 bb_info = XCNEWVEC (basic_block, max_uid);
1953 FOR_EACH_BB_REVERSE (bb)
1957 rtx head = BB_HEAD (bb);
1958 rtx end = BB_END (bb);
1960 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1962 /* Verify the end of the basic block is in the INSN chain. */
1966 /* And that the code outside of basic blocks has NULL bb field. */
1968 && BLOCK_FOR_INSN (x) != NULL)
1970 error ("insn %d outside of basic blocks has non-NULL bb field",
1978 error ("end insn %d for block %d not found in the insn stream",
1979 INSN_UID (end), bb->index);
1983 /* Work backwards from the end to the head of the basic block
1984 to verify the head is in the RTL chain. */
1985 for (; x != NULL_RTX; x = PREV_INSN (x))
1987 /* While walking over the insn chain, verify insns appear
1988 in only one basic block. */
1989 if (bb_info[INSN_UID (x)] != NULL)
1991 error ("insn %d is in multiple basic blocks (%d and %d)",
1992 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1996 bb_info[INSN_UID (x)] = bb;
2003 error ("head insn %d for block %d not found in the insn stream",
2004 INSN_UID (head), bb->index);
2008 last_head = PREV_INSN (x);
2010 FOR_EACH_EDGE (e, ei, bb->succs)
2011 if (e->flags & EDGE_FALLTHRU)
2017 /* Ensure existence of barrier in BB with no fallthru edges. */
2018 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
2019 insn = NEXT_INSN (insn))
2021 || NOTE_INSN_BASIC_BLOCK_P (insn))
2023 error ("missing barrier after block %i", bb->index);
2028 else if (e->src != ENTRY_BLOCK_PTR
2029 && e->dest != EXIT_BLOCK_PTR)
2033 if (e->src->next_bb != e->dest)
2036 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2037 e->src->index, e->dest->index);
2041 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2042 insn = NEXT_INSN (insn))
2043 if (BARRIER_P (insn) || INSN_P (insn))
2045 error ("verify_flow_info: Incorrect fallthru %i->%i",
2046 e->src->index, e->dest->index);
2047 fatal_insn ("wrong insn in the fallthru edge", insn);
2053 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2055 /* Check that the code before the first basic block has NULL
2058 && BLOCK_FOR_INSN (x) != NULL)
2060 error ("insn %d outside of basic blocks has non-NULL bb field",
2068 last_bb_seen = ENTRY_BLOCK_PTR;
2070 for (x = rtx_first; x; x = NEXT_INSN (x))
2072 if (NOTE_INSN_BASIC_BLOCK_P (x))
2074 bb = NOTE_BASIC_BLOCK (x);
2077 if (bb != last_bb_seen->next_bb)
2078 internal_error ("basic blocks not laid down consecutively");
2080 curr_bb = last_bb_seen = bb;
2085 switch (GET_CODE (x))
2092 /* An addr_vec is placed outside any basic block. */
2094 && JUMP_P (NEXT_INSN (x))
2095 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2096 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2099 /* But in any case, non-deletable labels can appear anywhere. */
2103 fatal_insn ("insn outside basic block", x);
2108 && returnjump_p (x) && ! condjump_p (x)
2109 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2110 fatal_insn ("return not followed by barrier", x);
2111 if (curr_bb && x == BB_END (curr_bb))
2115 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2117 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2118 num_bb_notes, n_basic_blocks);
2123 /* Assume that the preceding pass has possibly eliminated jump instructions
2124 or converted the unconditional jumps. Eliminate the edges from CFG.
2125 Return true if any edges are eliminated. */
2128 purge_dead_edges (basic_block bb)
2131 rtx insn = BB_END (bb), note;
2132 bool purged = false;
2136 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2137 if (NONJUMP_INSN_P (insn)
2138 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2142 if (! may_trap_p (PATTERN (insn))
2143 || ((eqnote = find_reg_equal_equiv_note (insn))
2144 && ! may_trap_p (XEXP (eqnote, 0))))
2145 remove_note (insn, note);
2148 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2149 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2151 /* There are three types of edges we need to handle correctly here: EH
2152 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2153 latter can appear when nonlocal gotos are used. */
2154 if (e->flags & EDGE_EH)
2156 if (can_throw_internal (BB_END (bb))
2157 /* If this is a call edge, verify that this is a call insn. */
2158 && (! (e->flags & EDGE_ABNORMAL_CALL)
2159 || CALL_P (BB_END (bb))))
2165 else if (e->flags & EDGE_ABNORMAL_CALL)
2167 if (CALL_P (BB_END (bb))
2168 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2169 || INTVAL (XEXP (note, 0)) >= 0))
2182 df_set_bb_dirty (bb);
2192 /* We do care only about conditional jumps and simplejumps. */
2193 if (!any_condjump_p (insn)
2194 && !returnjump_p (insn)
2195 && !simplejump_p (insn))
2198 /* Branch probability/prediction notes are defined only for
2199 condjumps. We've possibly turned condjump into simplejump. */
2200 if (simplejump_p (insn))
2202 note = find_reg_note (insn, REG_BR_PROB, NULL);
2204 remove_note (insn, note);
2205 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2206 remove_note (insn, note);
2209 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2211 /* Avoid abnormal flags to leak from computed jumps turned
2212 into simplejumps. */
2214 e->flags &= ~EDGE_ABNORMAL;
2216 /* See if this edge is one we should keep. */
2217 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2218 /* A conditional jump can fall through into the next
2219 block, so we should keep the edge. */
2224 else if (e->dest != EXIT_BLOCK_PTR
2225 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2226 /* If the destination block is the target of the jump,
2232 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2233 /* If the destination block is the exit block, and this
2234 instruction is a return, then keep the edge. */
2239 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2240 /* Keep the edges that correspond to exceptions thrown by
2241 this instruction and rematerialize the EDGE_ABNORMAL
2242 flag we just cleared above. */
2244 e->flags |= EDGE_ABNORMAL;
2249 /* We do not need this edge. */
2250 df_set_bb_dirty (bb);
2255 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2259 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2264 /* Redistribute probabilities. */
2265 if (single_succ_p (bb))
2267 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2268 single_succ_edge (bb)->count = bb->count;
2272 note = find_reg_note (insn, REG_BR_PROB, NULL);
2276 b = BRANCH_EDGE (bb);
2277 f = FALLTHRU_EDGE (bb);
2278 b->probability = INTVAL (XEXP (note, 0));
2279 f->probability = REG_BR_PROB_BASE - b->probability;
2280 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2281 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2286 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2288 /* First, there should not be any EH or ABCALL edges resulting
2289 from non-local gotos and the like. If there were, we shouldn't
2290 have created the sibcall in the first place. Second, there
2291 should of course never have been a fallthru edge. */
2292 gcc_assert (single_succ_p (bb));
2293 gcc_assert (single_succ_edge (bb)->flags
2294 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2299 /* If we don't see a jump insn, we don't know exactly why the block would
2300 have been broken at this point. Look for a simple, non-fallthru edge,
2301 as these are only created by conditional branches. If we find such an
2302 edge we know that there used to be a jump here and can then safely
2303 remove all non-fallthru edges. */
2305 FOR_EACH_EDGE (e, ei, bb->succs)
2306 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2315 /* Remove all but the fake and fallthru edges. The fake edge may be
2316 the only successor for this block in the case of noreturn
2318 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2320 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2322 df_set_bb_dirty (bb);
2330 gcc_assert (single_succ_p (bb));
2332 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2333 single_succ_edge (bb)->count = bb->count;
2336 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2341 /* Search all basic blocks for potentially dead edges and purge them. Return
2342 true if some edge has been eliminated. */
2345 purge_all_dead_edges (void)
2352 bool purged_here = purge_dead_edges (bb);
2354 purged |= purged_here;
2360 /* Same as split_block but update cfg_layout structures. */
2363 cfg_layout_split_block (basic_block bb, void *insnp)
2365 rtx insn = (rtx) insnp;
2366 basic_block new_bb = rtl_split_block (bb, insn);
2368 new_bb->il.rtl->footer = bb->il.rtl->footer;
2369 bb->il.rtl->footer = NULL;
2374 /* Redirect Edge to DEST. */
2376 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2378 basic_block src = e->src;
2381 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2384 if (e->dest == dest)
2387 if (e->src != ENTRY_BLOCK_PTR
2388 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2390 df_set_bb_dirty (src);
2394 if (e->src == ENTRY_BLOCK_PTR
2395 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2398 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2399 e->src->index, dest->index);
2401 df_set_bb_dirty (e->src);
2402 redirect_edge_succ (e, dest);
2406 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2407 in the case the basic block appears to be in sequence. Avoid this
2410 if (e->flags & EDGE_FALLTHRU)
2412 /* Redirect any branch edges unified with the fallthru one. */
2413 if (JUMP_P (BB_END (src))
2414 && label_is_jump_target_p (BB_HEAD (e->dest),
2420 fprintf (dump_file, "Fallthru edge unified with branch "
2421 "%i->%i redirected to %i\n",
2422 e->src->index, e->dest->index, dest->index);
2423 e->flags &= ~EDGE_FALLTHRU;
2424 redirected = redirect_branch_edge (e, dest);
2425 gcc_assert (redirected);
2426 e->flags |= EDGE_FALLTHRU;
2427 df_set_bb_dirty (e->src);
2430 /* In case we are redirecting fallthru edge to the branch edge
2431 of conditional jump, remove it. */
2432 if (EDGE_COUNT (src->succs) == 2)
2434 /* Find the edge that is different from E. */
2435 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2438 && any_condjump_p (BB_END (src))
2439 && onlyjump_p (BB_END (src)))
2440 delete_insn (BB_END (src));
2442 ret = redirect_edge_succ_nodup (e, dest);
2444 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2445 e->src->index, e->dest->index, dest->index);
2448 ret = redirect_branch_edge (e, dest);
2450 /* We don't want simplejumps in the insn stream during cfglayout. */
2451 gcc_assert (!simplejump_p (BB_END (src)));
2453 df_set_bb_dirty (src);
2457 /* Simple wrapper as we always can redirect fallthru edges. */
2459 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2461 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2463 gcc_assert (redirected);
2467 /* Same as delete_basic_block but update cfg_layout structures. */
2470 cfg_layout_delete_block (basic_block bb)
2472 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2474 if (bb->il.rtl->header)
2476 next = BB_HEAD (bb);
2478 NEXT_INSN (prev) = bb->il.rtl->header;
2480 set_first_insn (bb->il.rtl->header);
2481 PREV_INSN (bb->il.rtl->header) = prev;
2482 insn = bb->il.rtl->header;
2483 while (NEXT_INSN (insn))
2484 insn = NEXT_INSN (insn);
2485 NEXT_INSN (insn) = next;
2486 PREV_INSN (next) = insn;
2488 next = NEXT_INSN (BB_END (bb));
2489 if (bb->il.rtl->footer)
2491 insn = bb->il.rtl->footer;
2494 if (BARRIER_P (insn))
2496 if (PREV_INSN (insn))
2497 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2499 bb->il.rtl->footer = NEXT_INSN (insn);
2500 if (NEXT_INSN (insn))
2501 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2505 insn = NEXT_INSN (insn);
2507 if (bb->il.rtl->footer)
2510 NEXT_INSN (insn) = bb->il.rtl->footer;
2511 PREV_INSN (bb->il.rtl->footer) = insn;
2512 while (NEXT_INSN (insn))
2513 insn = NEXT_INSN (insn);
2514 NEXT_INSN (insn) = next;
2516 PREV_INSN (next) = insn;
2518 set_last_insn (insn);
2521 if (bb->next_bb != EXIT_BLOCK_PTR)
2522 to = &bb->next_bb->il.rtl->header;
2524 to = &cfg_layout_function_footer;
2526 rtl_delete_block (bb);
2529 prev = NEXT_INSN (prev);
2531 prev = get_insns ();
2533 next = PREV_INSN (next);
2535 next = get_last_insn ();
2537 if (next && NEXT_INSN (next) != prev)
2539 remaints = unlink_insn_chain (prev, next);
2541 while (NEXT_INSN (insn))
2542 insn = NEXT_INSN (insn);
2543 NEXT_INSN (insn) = *to;
2545 PREV_INSN (*to) = insn;
2550 /* Return true when blocks A and B can be safely merged. */
2553 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2555 /* If we are partitioning hot/cold basic blocks, we don't want to
2556 mess up unconditional or indirect jumps that cross between hot
2559 Basic block partitioning may result in some jumps that appear to
2560 be optimizable (or blocks that appear to be mergeable), but which really
2561 must be left untouched (they are required to make it safely across
2562 partition boundaries). See the comments at the top of
2563 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2565 if (BB_PARTITION (a) != BB_PARTITION (b))
2568 /* There must be exactly one edge in between the blocks. */
2569 return (single_succ_p (a)
2570 && single_succ (a) == b
2571 && single_pred_p (b) == 1
2573 /* Must be simple edge. */
2574 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2575 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2576 /* If the jump insn has side effects, we can't kill the edge.
2577 When not optimizing, try_redirect_by_replacing_jump will
2578 not allow us to redirect an edge by replacing a table jump. */
2579 && (!JUMP_P (BB_END (a))
2580 || ((!optimize || reload_completed)
2581 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2584 /* Merge block A and B. The blocks must be mergeable. */
2587 cfg_layout_merge_blocks (basic_block a, basic_block b)
2589 #ifdef ENABLE_CHECKING
2590 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2594 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2596 /* If there was a CODE_LABEL beginning B, delete it. */
2597 if (LABEL_P (BB_HEAD (b)))
2599 /* This might have been an EH label that no longer has incoming
2600 EH edges. Update data structures to match. */
2601 maybe_remove_eh_handler (BB_HEAD (b));
2603 delete_insn (BB_HEAD (b));
2606 /* We should have fallthru edge in a, or we can do dummy redirection to get
2608 if (JUMP_P (BB_END (a)))
2609 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2610 gcc_assert (!JUMP_P (BB_END (a)));
2612 /* Possible line number notes should appear in between. */
2613 if (b->il.rtl->header)
2615 rtx first = BB_END (a), last;
2617 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2618 delete_insn_chain (NEXT_INSN (first), last, false);
2619 b->il.rtl->header = NULL;
2622 /* In the case basic blocks are not adjacent, move them around. */
2623 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2625 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2627 emit_insn_after_noloc (first, BB_END (a), a);
2628 /* Skip possible DELETED_LABEL insn. */
2629 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2630 first = NEXT_INSN (first);
2631 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2634 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2635 We need to explicitly call. */
2636 update_bb_for_insn_chain (NEXT_INSN (first),
2640 delete_insn (first);
2642 /* Otherwise just re-associate the instructions. */
2647 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2650 /* Skip possible DELETED_LABEL insn. */
2651 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2652 insn = NEXT_INSN (insn);
2653 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2655 BB_END (a) = BB_END (b);
2659 df_bb_delete (b->index);
2661 /* Possible tablejumps and barriers should appear after the block. */
2662 if (b->il.rtl->footer)
2664 if (!a->il.rtl->footer)
2665 a->il.rtl->footer = b->il.rtl->footer;
2668 rtx last = a->il.rtl->footer;
2670 while (NEXT_INSN (last))
2671 last = NEXT_INSN (last);
2672 NEXT_INSN (last) = b->il.rtl->footer;
2673 PREV_INSN (b->il.rtl->footer) = last;
2675 b->il.rtl->footer = NULL;
2679 fprintf (dump_file, "Merged blocks %d and %d.\n",
2680 a->index, b->index);
2686 cfg_layout_split_edge (edge e)
2688 basic_block new_bb =
2689 create_basic_block (e->src != ENTRY_BLOCK_PTR
2690 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2693 if (e->dest == EXIT_BLOCK_PTR)
2694 BB_COPY_PARTITION (new_bb, e->src);
2696 BB_COPY_PARTITION (new_bb, e->dest);
2697 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2698 redirect_edge_and_branch_force (e, new_bb);
2703 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2706 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2710 /* Return 1 if BB ends with a call, possibly followed by some
2711 instructions that must stay with the call, 0 otherwise. */
2714 rtl_block_ends_with_call_p (basic_block bb)
2716 rtx insn = BB_END (bb);
2718 while (!CALL_P (insn)
2719 && insn != BB_HEAD (bb)
2720 && (keep_with_call_p (insn)
2722 insn = PREV_INSN (insn);
2723 return (CALL_P (insn));
2726 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2729 rtl_block_ends_with_condjump_p (const_basic_block bb)
2731 return any_condjump_p (BB_END (bb));
2734 /* Return true if we need to add fake edge to exit.
2735 Helper function for rtl_flow_call_edges_add. */
2738 need_fake_edge_p (const_rtx insn)
2744 && !SIBLING_CALL_P (insn)
2745 && !find_reg_note (insn, REG_NORETURN, NULL)
2746 && !CONST_OR_PURE_CALL_P (insn)))
2749 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2750 && MEM_VOLATILE_P (PATTERN (insn)))
2751 || (GET_CODE (PATTERN (insn)) == PARALLEL
2752 && asm_noperands (insn) != -1
2753 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2754 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2757 /* Add fake edges to the function exit for any non constant and non noreturn
2758 calls, volatile inline assembly in the bitmap of blocks specified by
2759 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2762 The goal is to expose cases in which entering a basic block does not imply
2763 that all subsequent instructions must be executed. */
2766 rtl_flow_call_edges_add (sbitmap blocks)
2769 int blocks_split = 0;
2770 int last_bb = last_basic_block;
2771 bool check_last_block = false;
2773 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2777 check_last_block = true;
2779 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2781 /* In the last basic block, before epilogue generation, there will be
2782 a fallthru edge to EXIT. Special care is required if the last insn
2783 of the last basic block is a call because make_edge folds duplicate
2784 edges, which would result in the fallthru edge also being marked
2785 fake, which would result in the fallthru edge being removed by
2786 remove_fake_edges, which would result in an invalid CFG.
2788 Moreover, we can't elide the outgoing fake edge, since the block
2789 profiler needs to take this into account in order to solve the minimal
2790 spanning tree in the case that the call doesn't return.
2792 Handle this by adding a dummy instruction in a new last basic block. */
2793 if (check_last_block)
2795 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2796 rtx insn = BB_END (bb);
2798 /* Back up past insns that must be kept in the same block as a call. */
2799 while (insn != BB_HEAD (bb)
2800 && keep_with_call_p (insn))
2801 insn = PREV_INSN (insn);
2803 if (need_fake_edge_p (insn))
2807 e = find_edge (bb, EXIT_BLOCK_PTR);
2810 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2811 commit_edge_insertions ();
2816 /* Now add fake edges to the function exit for any non constant
2817 calls since there is no way that we can determine if they will
2820 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2822 basic_block bb = BASIC_BLOCK (i);
2829 if (blocks && !TEST_BIT (blocks, i))
2832 for (insn = BB_END (bb); ; insn = prev_insn)
2834 prev_insn = PREV_INSN (insn);
2835 if (need_fake_edge_p (insn))
2838 rtx split_at_insn = insn;
2840 /* Don't split the block between a call and an insn that should
2841 remain in the same block as the call. */
2843 while (split_at_insn != BB_END (bb)
2844 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2845 split_at_insn = NEXT_INSN (split_at_insn);
2847 /* The handling above of the final block before the epilogue
2848 should be enough to verify that there is no edge to the exit
2849 block in CFG already. Calling make_edge in such case would
2850 cause us to mark that edge as fake and remove it later. */
2852 #ifdef ENABLE_CHECKING
2853 if (split_at_insn == BB_END (bb))
2855 e = find_edge (bb, EXIT_BLOCK_PTR);
2856 gcc_assert (e == NULL);
2860 /* Note that the following may create a new basic block
2861 and renumber the existing basic blocks. */
2862 if (split_at_insn != BB_END (bb))
2864 e = split_block (bb, split_at_insn);
2869 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2872 if (insn == BB_HEAD (bb))
2878 verify_flow_info ();
2880 return blocks_split;
2883 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2884 the conditional branch target, SECOND_HEAD should be the fall-thru
2885 there is no need to handle this here the loop versioning code handles
2886 this. the reason for SECON_HEAD is that it is needed for condition
2887 in trees, and this should be of the same type since it is a hook. */
2889 rtl_lv_add_condition_to_bb (basic_block first_head ,
2890 basic_block second_head ATTRIBUTE_UNUSED,
2891 basic_block cond_bb, void *comp_rtx)
2893 rtx label, seq, jump;
2894 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2895 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2896 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2897 enum machine_mode mode;
2900 label = block_label (first_head);
2901 mode = GET_MODE (op0);
2902 if (mode == VOIDmode)
2903 mode = GET_MODE (op1);
2906 op0 = force_operand (op0, NULL_RTX);
2907 op1 = force_operand (op1, NULL_RTX);
2908 do_compare_rtx_and_jump (op0, op1, comp, 0,
2909 mode, NULL_RTX, NULL_RTX, label);
2910 jump = get_last_insn ();
2911 JUMP_LABEL (jump) = label;
2912 LABEL_NUSES (label)++;
2916 /* Add the new cond , in the new head. */
2917 emit_insn_after(seq, BB_END(cond_bb));
2921 /* Given a block B with unconditional branch at its end, get the
2922 store the return the branch edge and the fall-thru edge in
2923 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2925 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2926 edge *fallthru_edge)
2928 edge e = EDGE_SUCC (b, 0);
2930 if (e->flags & EDGE_FALLTHRU)
2933 *branch_edge = EDGE_SUCC (b, 1);
2938 *fallthru_edge = EDGE_SUCC (b, 1);
2943 init_rtl_bb_info (basic_block bb)
2945 gcc_assert (!bb->il.rtl);
2946 bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
2950 /* Add EXPR to the end of basic block BB. */
2953 insert_insn_end_bb_new (rtx pat, basic_block bb)
2955 rtx insn = BB_END (bb);
2959 while (NEXT_INSN (pat_end) != NULL_RTX)
2960 pat_end = NEXT_INSN (pat_end);
2962 /* If the last insn is a jump, insert EXPR in front [taking care to
2963 handle cc0, etc. properly]. Similarly we need to care trapping
2964 instructions in presence of non-call exceptions. */
2967 || (NONJUMP_INSN_P (insn)
2968 && (!single_succ_p (bb)
2969 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2974 /* If this is a jump table, then we can't insert stuff here. Since
2975 we know the previous real insn must be the tablejump, we insert
2976 the new instruction just before the tablejump. */
2977 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2978 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2979 insn = prev_real_insn (insn);
2982 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2983 if cc0 isn't set. */
2984 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2986 insn = XEXP (note, 0);
2989 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2990 if (maybe_cc0_setter
2991 && INSN_P (maybe_cc0_setter)
2992 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2993 insn = maybe_cc0_setter;
2996 /* FIXME: What if something in cc0/jump uses value set in new
2998 new_insn = emit_insn_before_noloc (pat, insn, bb);
3001 /* Likewise if the last insn is a call, as will happen in the presence
3002 of exception handling. */
3003 else if (CALL_P (insn)
3004 && (!single_succ_p (bb)
3005 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3007 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3008 we search backward and place the instructions before the first
3009 parameter is loaded. Do this for everyone for consistency and a
3010 presumption that we'll get better code elsewhere as well. */
3012 /* Since different machines initialize their parameter registers
3013 in different orders, assume nothing. Collect the set of all
3014 parameter registers. */
3015 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3017 /* If we found all the parameter loads, then we want to insert
3018 before the first parameter load.
3020 If we did not find all the parameter loads, then we might have
3021 stopped on the head of the block, which could be a CODE_LABEL.
3022 If we inserted before the CODE_LABEL, then we would be putting
3023 the insn in the wrong basic block. In that case, put the insn
3024 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3025 while (LABEL_P (insn)
3026 || NOTE_INSN_BASIC_BLOCK_P (insn))
3027 insn = NEXT_INSN (insn);
3029 new_insn = emit_insn_before_noloc (pat, insn, bb);
3032 new_insn = emit_insn_after_noloc (pat, insn, bb);
3037 /* Returns true if it is possible to remove edge E by redirecting
3038 it to the destination of the other edge from E->src. */
3041 rtl_can_remove_branch_p (const_edge e)
3043 const_basic_block src = e->src;
3044 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3045 const_rtx insn = BB_END (src), set;
3047 /* The conditions are taken from try_redirect_by_replacing_jump. */
3048 if (target == EXIT_BLOCK_PTR)
3051 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3054 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3055 || BB_PARTITION (src) != BB_PARTITION (target))
3058 if (!onlyjump_p (insn)
3059 || tablejump_p (insn, NULL, NULL))
3062 set = single_set (insn);
3063 if (!set || side_effects_p (set))
3069 /* Implementation of CFG manipulation for linearized RTL. */
3070 struct cfg_hooks rtl_cfg_hooks = {
3072 rtl_verify_flow_info,
3074 rtl_create_basic_block,
3075 rtl_redirect_edge_and_branch,
3076 rtl_redirect_edge_and_branch_force,
3077 rtl_can_remove_branch_p,
3080 rtl_move_block_after,
3081 rtl_can_merge_blocks, /* can_merge_blocks_p */
3085 NULL, /* can_duplicate_block_p */
3086 NULL, /* duplicate_block */
3088 rtl_make_forwarder_block,
3089 rtl_tidy_fallthru_edge,
3090 rtl_block_ends_with_call_p,
3091 rtl_block_ends_with_condjump_p,
3092 rtl_flow_call_edges_add,
3093 NULL, /* execute_on_growing_pred */
3094 NULL, /* execute_on_shrinking_pred */
3095 NULL, /* duplicate loop for trees */
3096 NULL, /* lv_add_condition_to_bb */
3097 NULL, /* lv_adjust_loop_header_phi*/
3098 NULL, /* extract_cond_bb_edges */
3099 NULL /* flush_pending_stmts */
3102 /* Implementation of CFG manipulation for cfg layout RTL, where
3103 basic block connected via fallthru edges does not have to be adjacent.
3104 This representation will hopefully become the default one in future
3105 version of the compiler. */
3107 /* We do not want to declare these functions in a header file, since they
3108 should only be used through the cfghooks interface, and we do not want to
3109 move them here since it would require also moving quite a lot of related
3110 code. They are in cfglayout.c. */
3111 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3112 extern basic_block cfg_layout_duplicate_bb (basic_block);
3114 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3116 rtl_verify_flow_info_1,
3118 cfg_layout_create_basic_block,
3119 cfg_layout_redirect_edge_and_branch,
3120 cfg_layout_redirect_edge_and_branch_force,
3121 rtl_can_remove_branch_p,
3122 cfg_layout_delete_block,
3123 cfg_layout_split_block,
3124 rtl_move_block_after,
3125 cfg_layout_can_merge_blocks_p,
3126 cfg_layout_merge_blocks,
3129 cfg_layout_can_duplicate_bb_p,
3130 cfg_layout_duplicate_bb,
3131 cfg_layout_split_edge,
3132 rtl_make_forwarder_block,
3134 rtl_block_ends_with_call_p,
3135 rtl_block_ends_with_condjump_p,
3136 rtl_flow_call_edges_add,
3137 NULL, /* execute_on_growing_pred */
3138 NULL, /* execute_on_shrinking_pred */
3139 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3140 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3141 NULL, /* lv_adjust_loop_header_phi*/
3142 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3143 NULL /* flush_pending_stmts */