1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
42 #include "coretypes.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
62 #include "tree-pass.h"
64 static int can_delete_note_p (rtx);
65 static int can_delete_label_p (rtx);
66 static void commit_one_edge_insertion (edge, int);
67 static basic_block rtl_split_edge (edge);
68 static bool rtl_move_block_after (basic_block, basic_block);
69 static int rtl_verify_flow_info (void);
70 static basic_block cfg_layout_split_block (basic_block, void *);
71 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
72 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
73 static void cfg_layout_delete_block (basic_block);
74 static void rtl_delete_block (basic_block);
75 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
76 static edge rtl_redirect_edge_and_branch (edge, basic_block);
77 static basic_block rtl_split_block (basic_block, void *);
78 static void rtl_dump_bb (basic_block, FILE *, int);
79 static int rtl_verify_flow_info_1 (void);
80 static void rtl_make_forwarder_block (edge);
82 /* Return true if NOTE is not one of the ones that must be kept paired,
83 so that we may simply delete it. */
86 can_delete_note_p (rtx note)
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
92 /* True if a given label can be deleted. */
95 can_delete_label_p (rtx label)
97 return (!LABEL_PRESERVE_P (label)
98 /* User declared labels must be preserved. */
99 && LABEL_NAME (label) == 0
100 && !in_expr_list_p (forced_labels, label));
103 /* Delete INSN by patching it out. Return the next insn. */
106 delete_insn (rtx insn)
108 rtx next = NEXT_INSN (insn);
110 bool really_delete = true;
114 /* Some labels can't be directly removed from the INSN chain, as they
115 might be references via variables, constant pool etc.
116 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
117 if (! can_delete_label_p (insn))
119 const char *name = LABEL_NAME (insn);
121 really_delete = false;
122 PUT_CODE (insn, NOTE);
123 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
124 NOTE_DELETED_LABEL_NAME (insn) = name;
127 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
132 /* If this insn has already been deleted, something is very wrong. */
133 gcc_assert (!INSN_DELETED_P (insn));
135 INSN_DELETED_P (insn) = 1;
138 /* If deleting a jump, decrement the use count of the label. Deleting
139 the label itself should happen in the normal course of block merging. */
142 && LABEL_P (JUMP_LABEL (insn)))
143 LABEL_NUSES (JUMP_LABEL (insn))--;
145 /* Also if deleting an insn that references a label. */
148 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
149 && LABEL_P (XEXP (note, 0)))
151 LABEL_NUSES (XEXP (note, 0))--;
152 remove_note (insn, note);
157 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
158 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
160 rtx pat = PATTERN (insn);
161 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
162 int len = XVECLEN (pat, diff_vec_p);
165 for (i = 0; i < len; i++)
167 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
169 /* When deleting code in bulk (e.g. removing many unreachable
170 blocks) we can delete a label that's a target of the vector
171 before deleting the vector itself. */
173 LABEL_NUSES (label)--;
180 /* Like delete_insn but also purge dead edges from BB. */
182 delete_insn_and_edges (rtx insn)
188 && BLOCK_FOR_INSN (insn)
189 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
191 x = delete_insn (insn);
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
197 /* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
201 delete_insn_chain (rtx start, rtx finish)
205 /* Unchain the insns one by one. It would be quicker to delete all of these
206 with a single unchaining, rather than one at a time, but we need to keep
210 next = NEXT_INSN (start);
211 if (NOTE_P (start) && !can_delete_note_p (start))
214 next = delete_insn (start);
222 /* Like delete_insn but also purge dead edges from BB. */
224 delete_insn_chain_and_edges (rtx first, rtx last)
229 && BLOCK_FOR_INSN (last)
230 && BB_END (BLOCK_FOR_INSN (last)) == last)
232 delete_insn_chain (first, last);
234 purge_dead_edges (BLOCK_FOR_INSN (last));
237 /* Create a new basic block consisting of the instructions between HEAD and END
238 inclusive. This function is designed to allow fast BB construction - reuses
239 the note and basic block struct in BB_NOTE, if any and do not grow
240 BASIC_BLOCK chain and should be used directly only by CFG construction code.
241 END can be NULL in to create new empty basic block before HEAD. Both END
242 and HEAD can be NULL to create basic block at the end of INSN chain.
243 AFTER is the basic block we should be put after. */
246 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
251 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
254 /* If we found an existing note, thread it back onto the chain. */
262 after = PREV_INSN (head);
266 if (after != bb_note && NEXT_INSN (after) != bb_note)
267 reorder_insns_nobb (bb_note, bb_note, after);
271 /* Otherwise we must create a note and a basic block structure. */
275 init_rtl_bb_info (bb);
278 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
279 else if (LABEL_P (head) && end)
281 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
287 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
293 NOTE_BASIC_BLOCK (bb_note) = bb;
296 /* Always include the bb note in the block. */
297 if (NEXT_INSN (end) == bb_note)
302 bb->index = last_basic_block++;
303 bb->flags = BB_NEW | BB_RTL;
304 link_block (bb, after);
305 SET_BASIC_BLOCK (bb->index, bb);
306 update_bb_for_insn (bb);
307 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
309 /* Tag the block so that we know it has been used when considering
310 other basic block notes. */
316 /* Create new basic block consisting of instructions in between HEAD and END
317 and place it to the BB chain after block AFTER. END can be NULL in to
318 create new empty basic block before HEAD. Both END and HEAD can be NULL to
319 create basic block at the end of INSN chain. */
322 rtl_create_basic_block (void *headp, void *endp, basic_block after)
324 rtx head = headp, end = endp;
327 /* Grow the basic block array if needed. */
328 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
330 size_t old_size = VEC_length (basic_block, basic_block_info);
331 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
333 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
334 p = VEC_address (basic_block, basic_block_info);
335 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
340 bb = create_basic_block_structure (head, end, NULL, after);
346 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
348 basic_block newbb = rtl_create_basic_block (head, end, after);
353 /* Delete the insns in a (non-live) block. We physically delete every
354 non-deleted-note insn, and update the flow graph appropriately.
356 Return nonzero if we deleted an exception handler. */
358 /* ??? Preserving all such notes strikes me as wrong. It would be nice
359 to post-process the stream to remove empty blocks, loops, ranges, etc. */
362 rtl_delete_block (basic_block b)
366 /* If the head of this block is a CODE_LABEL, then it might be the
367 label for an exception handler which can't be reached. We need
368 to remove the label from the exception_handler_label list. */
371 maybe_remove_eh_handler (insn);
373 /* Include any jump table following the basic block. */
375 if (tablejump_p (end, NULL, &tmp))
378 /* Include any barriers that may follow the basic block. */
379 tmp = next_nonnote_insn (end);
380 while (tmp && BARRIER_P (tmp))
383 tmp = next_nonnote_insn (end);
386 /* Selectively delete the entire chain. */
388 delete_insn_chain (insn, end);
389 if (b->il.rtl->global_live_at_start)
391 FREE_REG_SET (b->il.rtl->global_live_at_start);
392 FREE_REG_SET (b->il.rtl->global_live_at_end);
393 b->il.rtl->global_live_at_start = NULL;
394 b->il.rtl->global_live_at_end = NULL;
398 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
401 compute_bb_for_insn (void)
407 rtx end = BB_END (bb);
410 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
412 BLOCK_FOR_INSN (insn) = bb;
419 /* Release the basic_block_for_insn array. */
422 free_bb_for_insn (void)
425 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
426 if (!BARRIER_P (insn))
427 BLOCK_FOR_INSN (insn) = NULL;
431 struct tree_opt_pass pass_free_cfg =
435 free_bb_for_insn, /* execute */
438 0, /* static_pass_number */
440 0, /* properties_required */
441 0, /* properties_provided */
442 PROP_cfg, /* properties_destroyed */
443 0, /* todo_flags_start */
444 0, /* todo_flags_finish */
448 /* Return RTX to emit after when we want to emit code on the entry of function. */
450 entry_of_function (void)
452 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
453 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
456 /* Emit INSN at the entry point of the function, ensuring that it is only
457 executed once per function. */
459 emit_insn_at_entry (rtx insn)
461 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
462 edge e = ei_safe_edge (ei);
463 gcc_assert (e->flags & EDGE_FALLTHRU);
465 insert_insn_on_edge (insn, e);
466 commit_edge_insertions ();
469 /* Update insns block within BB. */
472 update_bb_for_insn (basic_block bb)
476 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
478 if (!BARRIER_P (insn))
479 set_block_for_insn (insn, bb);
480 if (insn == BB_END (bb))
485 /* Creates a new basic block just after basic block B by splitting
486 everything after specified instruction I. */
489 rtl_split_block (basic_block bb, void *insnp)
498 insn = first_insn_after_basic_block_note (bb);
501 insn = PREV_INSN (insn);
503 insn = get_last_insn ();
506 /* We probably should check type of the insn so that we do not create
507 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
509 if (insn == BB_END (bb))
510 emit_note_after (NOTE_INSN_DELETED, insn);
512 /* Create the new basic block. */
513 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
514 BB_COPY_PARTITION (new_bb, bb);
517 /* Redirect the outgoing edges. */
518 new_bb->succs = bb->succs;
520 FOR_EACH_EDGE (e, ei, new_bb->succs)
523 if (bb->il.rtl->global_live_at_start)
525 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
526 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
527 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
529 /* We now have to calculate which registers are live at the end
530 of the split basic block and at the start of the new basic
531 block. Start with those registers that are known to be live
532 at the end of the original basic block and get
533 propagate_block to determine which registers are live. */
534 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
535 propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
536 COPY_REG_SET (bb->il.rtl->global_live_at_end,
537 new_bb->il.rtl->global_live_at_start);
538 #ifdef HAVE_conditional_execution
539 /* In the presence of conditional execution we are not able to update
540 liveness precisely. */
541 if (reload_completed)
543 bb->flags |= BB_DIRTY;
544 new_bb->flags |= BB_DIRTY;
552 /* Blocks A and B are to be merged into a single block A. The insns
553 are already contiguous. */
556 rtl_merge_blocks (basic_block a, basic_block b)
558 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
559 rtx del_first = NULL_RTX, del_last = NULL_RTX;
562 /* If there was a CODE_LABEL beginning B, delete it. */
563 if (LABEL_P (b_head))
565 /* This might have been an EH label that no longer has incoming
566 EH edges. Update data structures to match. */
567 maybe_remove_eh_handler (b_head);
569 /* Detect basic blocks with nothing but a label. This can happen
570 in particular at the end of a function. */
574 del_first = del_last = b_head;
575 b_head = NEXT_INSN (b_head);
578 /* Delete the basic block note and handle blocks containing just that
580 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
588 b_head = NEXT_INSN (b_head);
591 /* If there was a jump out of A, delete it. */
596 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
598 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
599 || prev == BB_HEAD (a))
605 /* If this was a conditional jump, we need to also delete
606 the insn that set cc0. */
607 if (only_sets_cc0_p (prev))
611 prev = prev_nonnote_insn (prev);
618 a_end = PREV_INSN (del_first);
620 else if (BARRIER_P (NEXT_INSN (a_end)))
621 del_first = NEXT_INSN (a_end);
623 /* Delete everything marked above as well as crap that might be
624 hanging out between the two blocks. */
626 delete_insn_chain (del_first, del_last);
628 /* Reassociate the insns of B with A. */
633 for (x = a_end; x != b_end; x = NEXT_INSN (x))
634 set_block_for_insn (x, a);
636 set_block_for_insn (b_end, a);
642 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
645 /* Return true when block A and B can be merged. */
647 rtl_can_merge_blocks (basic_block a,basic_block b)
649 /* If we are partitioning hot/cold basic blocks, we don't want to
650 mess up unconditional or indirect jumps that cross between hot
653 Basic block partitioning may result in some jumps that appear to
654 be optimizable (or blocks that appear to be mergeable), but which really
655 must be left untouched (they are required to make it safely across
656 partition boundaries). See the comments at the top of
657 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
659 if (BB_PARTITION (a) != BB_PARTITION (b))
662 /* There must be exactly one edge in between the blocks. */
663 return (single_succ_p (a)
664 && single_succ (a) == b
667 /* Must be simple edge. */
668 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
670 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
671 /* If the jump insn has side effects,
672 we can't kill the edge. */
673 && (!JUMP_P (BB_END (a))
675 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
678 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
682 block_label (basic_block block)
684 if (block == EXIT_BLOCK_PTR)
687 if (!LABEL_P (BB_HEAD (block)))
689 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
692 return BB_HEAD (block);
695 /* Attempt to perform edge redirection by replacing possibly complex jump
696 instruction by unconditional jump or removing jump completely. This can
697 apply only if all edges now point to the same block. The parameters and
698 return values are equivalent to redirect_edge_and_branch. */
701 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
703 basic_block src = e->src;
704 rtx insn = BB_END (src), kill_from;
708 /* If we are partitioning hot/cold basic blocks, we don't want to
709 mess up unconditional or indirect jumps that cross between hot
712 Basic block partitioning may result in some jumps that appear to
713 be optimizable (or blocks that appear to be mergeable), but which really
714 must be left untouched (they are required to make it safely across
715 partition boundaries). See the comments at the top of
716 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
718 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
719 || BB_PARTITION (src) != BB_PARTITION (target))
722 /* We can replace or remove a complex jump only when we have exactly
723 two edges. Also, if we have exactly one outgoing edge, we can
725 if (EDGE_COUNT (src->succs) >= 3
726 /* Verify that all targets will be TARGET. Specifically, the
727 edge that is not E must also go to TARGET. */
728 || (EDGE_COUNT (src->succs) == 2
729 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
732 if (!onlyjump_p (insn))
734 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
737 /* Avoid removing branch with side effects. */
738 set = single_set (insn);
739 if (!set || side_effects_p (set))
742 /* In case we zap a conditional jump, we'll need to kill
743 the cc0 setter too. */
746 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
747 kill_from = PREV_INSN (insn);
750 /* See if we can create the fallthru edge. */
751 if (in_cfglayout || can_fallthru (src, target))
754 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
757 /* Selectively unlink whole insn chain. */
760 rtx insn = src->il.rtl->footer;
762 delete_insn_chain (kill_from, BB_END (src));
764 /* Remove barriers but keep jumptables. */
767 if (BARRIER_P (insn))
769 if (PREV_INSN (insn))
770 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
772 src->il.rtl->footer = NEXT_INSN (insn);
773 if (NEXT_INSN (insn))
774 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
778 insn = NEXT_INSN (insn);
782 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
785 /* If this already is simplejump, redirect it. */
786 else if (simplejump_p (insn))
788 if (e->dest == target)
791 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
792 INSN_UID (insn), e->dest->index, target->index);
793 if (!redirect_jump (insn, block_label (target), 0))
795 gcc_assert (target == EXIT_BLOCK_PTR);
800 /* Cannot do anything for target exit block. */
801 else if (target == EXIT_BLOCK_PTR)
804 /* Or replace possibly complicated jump insn by simple jump insn. */
807 rtx target_label = block_label (target);
808 rtx barrier, label, table;
810 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
811 JUMP_LABEL (BB_END (src)) = target_label;
812 LABEL_NUSES (target_label)++;
814 fprintf (dump_file, "Replacing insn %i by jump %i\n",
815 INSN_UID (insn), INSN_UID (BB_END (src)));
818 delete_insn_chain (kill_from, insn);
820 /* Recognize a tablejump that we are converting to a
821 simple jump and remove its associated CODE_LABEL
822 and ADDR_VEC or ADDR_DIFF_VEC. */
823 if (tablejump_p (insn, &label, &table))
824 delete_insn_chain (label, table);
826 barrier = next_nonnote_insn (BB_END (src));
827 if (!barrier || !BARRIER_P (barrier))
828 emit_barrier_after (BB_END (src));
831 if (barrier != NEXT_INSN (BB_END (src)))
833 /* Move the jump before barrier so that the notes
834 which originally were or were created before jump table are
835 inside the basic block. */
836 rtx new_insn = BB_END (src);
839 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
840 tmp = NEXT_INSN (tmp))
841 set_block_for_insn (tmp, src);
843 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
844 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
846 NEXT_INSN (new_insn) = barrier;
847 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
849 PREV_INSN (new_insn) = PREV_INSN (barrier);
850 PREV_INSN (barrier) = new_insn;
855 /* Keep only one edge out and set proper flags. */
856 if (!single_succ_p (src))
858 gcc_assert (single_succ_p (src));
860 e = single_succ_edge (src);
862 e->flags = EDGE_FALLTHRU;
866 e->probability = REG_BR_PROB_BASE;
867 e->count = src->count;
869 if (e->dest != target)
870 redirect_edge_succ (e, target);
875 /* Redirect edge representing branch of (un)conditional jump or tablejump,
878 redirect_branch_edge (edge e, basic_block target)
881 rtx old_label = BB_HEAD (e->dest);
882 basic_block src = e->src;
883 rtx insn = BB_END (src);
885 /* We can only redirect non-fallthru edges of jump insn. */
886 if (e->flags & EDGE_FALLTHRU)
888 else if (!JUMP_P (insn))
891 /* Recognize a tablejump and adjust all matching cases. */
892 if (tablejump_p (insn, NULL, &tmp))
896 rtx new_label = block_label (target);
898 if (target == EXIT_BLOCK_PTR)
900 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
901 vec = XVEC (PATTERN (tmp), 0);
903 vec = XVEC (PATTERN (tmp), 1);
905 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
906 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
908 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
909 --LABEL_NUSES (old_label);
910 ++LABEL_NUSES (new_label);
913 /* Handle casesi dispatch insns. */
914 if ((tmp = single_set (insn)) != NULL
915 && SET_DEST (tmp) == pc_rtx
916 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
917 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
918 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
920 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
922 --LABEL_NUSES (old_label);
923 ++LABEL_NUSES (new_label);
928 /* ?? We may play the games with moving the named labels from
929 one basic block to the other in case only one computed_jump is
931 if (computed_jump_p (insn)
932 /* A return instruction can't be redirected. */
933 || returnjump_p (insn))
936 /* If the insn doesn't go where we think, we're confused. */
937 gcc_assert (JUMP_LABEL (insn) == old_label);
939 /* If the substitution doesn't succeed, die. This can happen
940 if the back end emitted unrecognizable instructions or if
941 target is exit block on some arches. */
942 if (!redirect_jump (insn, block_label (target), 0))
944 gcc_assert (target == EXIT_BLOCK_PTR);
950 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
951 e->src->index, e->dest->index, target->index);
953 if (e->dest != target)
954 e = redirect_edge_succ_nodup (e, target);
958 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
959 expense of adding new instructions or reordering basic blocks.
961 Function can be also called with edge destination equivalent to the TARGET.
962 Then it should try the simplifications and do nothing if none is possible.
964 Return edge representing the branch if transformation succeeded. Return NULL
966 We still return NULL in case E already destinated TARGET and we didn't
967 managed to simplify instruction stream. */
970 rtl_redirect_edge_and_branch (edge e, basic_block target)
973 basic_block src = e->src;
975 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
978 if (e->dest == target)
981 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
983 src->flags |= BB_DIRTY;
987 ret = redirect_branch_edge (e, target);
991 src->flags |= BB_DIRTY;
995 /* Like force_nonfallthru below, but additionally performs redirection
996 Used by redirect_edge_and_branch_force. */
999 force_nonfallthru_and_redirect (edge e, basic_block target)
1001 basic_block jump_block, new_bb = NULL, src = e->src;
1004 int abnormal_edge_flags = 0;
1006 /* In the case the last instruction is conditional jump to the next
1007 instruction, first redirect the jump itself and then continue
1008 by creating a basic block afterwards to redirect fallthru edge. */
1009 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1010 && any_condjump_p (BB_END (e->src))
1011 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1014 edge b = unchecked_make_edge (e->src, target, 0);
1017 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1018 gcc_assert (redirected);
1020 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1023 int prob = INTVAL (XEXP (note, 0));
1025 b->probability = prob;
1026 b->count = e->count * prob / REG_BR_PROB_BASE;
1027 e->probability -= e->probability;
1028 e->count -= b->count;
1029 if (e->probability < 0)
1036 if (e->flags & EDGE_ABNORMAL)
1038 /* Irritating special case - fallthru edge to the same block as abnormal
1040 We can't redirect abnormal edge, but we still can split the fallthru
1041 one and create separate abnormal edge to original destination.
1042 This allows bb-reorder to make such edge non-fallthru. */
1043 gcc_assert (e->dest == target);
1044 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1045 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1049 gcc_assert (e->flags & EDGE_FALLTHRU);
1050 if (e->src == ENTRY_BLOCK_PTR)
1052 /* We can't redirect the entry block. Create an empty block
1053 at the start of the function which we use to add the new
1059 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1061 /* Change the existing edge's source to be the new block, and add
1062 a new edge from the entry block to the new block. */
1064 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1068 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1078 VEC_safe_push (edge, gc, bb->succs, e);
1079 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1083 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1085 /* Create the new structures. */
1087 /* If the old block ended with a tablejump, skip its table
1088 by searching forward from there. Otherwise start searching
1089 forward from the last instruction of the old block. */
1090 if (!tablejump_p (BB_END (e->src), NULL, ¬e))
1091 note = BB_END (e->src);
1092 note = NEXT_INSN (note);
1094 jump_block = create_basic_block (note, NULL, e->src);
1095 jump_block->count = e->count;
1096 jump_block->frequency = EDGE_FREQUENCY (e);
1097 jump_block->loop_depth = target->loop_depth;
1099 if (target->il.rtl->global_live_at_start)
1101 jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
1102 jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
1103 COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
1104 target->il.rtl->global_live_at_start);
1105 COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
1106 target->il.rtl->global_live_at_start);
1109 /* Make sure new block ends up in correct hot/cold section. */
1111 BB_COPY_PARTITION (jump_block, e->src);
1112 if (flag_reorder_blocks_and_partition
1113 && targetm.have_named_sections
1114 && JUMP_P (BB_END (jump_block))
1115 && !any_condjump_p (BB_END (jump_block))
1116 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1117 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1124 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1125 new_edge->probability = e->probability;
1126 new_edge->count = e->count;
1128 /* Redirect old edge. */
1129 redirect_edge_pred (e, jump_block);
1130 e->probability = REG_BR_PROB_BASE;
1132 new_bb = jump_block;
1135 jump_block = e->src;
1137 e->flags &= ~EDGE_FALLTHRU;
1138 if (target == EXIT_BLOCK_PTR)
1141 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1148 rtx label = block_label (target);
1149 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1150 JUMP_LABEL (BB_END (jump_block)) = label;
1151 LABEL_NUSES (label)++;
1154 emit_barrier_after (BB_END (jump_block));
1155 redirect_edge_succ_nodup (e, target);
1157 if (abnormal_edge_flags)
1158 make_edge (src, target, abnormal_edge_flags);
1163 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1164 (and possibly create new basic block) to make edge non-fallthru.
1165 Return newly created BB or NULL if none. */
1168 force_nonfallthru (edge e)
1170 return force_nonfallthru_and_redirect (e, e->dest);
1173 /* Redirect edge even at the expense of creating new jump insn or
1174 basic block. Return new basic block if created, NULL otherwise.
1175 Conversion must be possible. */
1178 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1180 if (redirect_edge_and_branch (e, target)
1181 || e->dest == target)
1184 /* In case the edge redirection failed, try to force it to be non-fallthru
1185 and redirect newly created simplejump. */
1186 e->src->flags |= BB_DIRTY;
1187 return force_nonfallthru_and_redirect (e, target);
1190 /* The given edge should potentially be a fallthru edge. If that is in
1191 fact true, delete the jump and barriers that are in the way. */
1194 rtl_tidy_fallthru_edge (edge e)
1197 basic_block b = e->src, c = b->next_bb;
1199 /* ??? In a late-running flow pass, other folks may have deleted basic
1200 blocks by nopping out blocks, leaving multiple BARRIERs between here
1201 and the target label. They ought to be chastised and fixed.
1203 We can also wind up with a sequence of undeletable labels between
1204 one block and the next.
1206 So search through a sequence of barriers, labels, and notes for
1207 the head of block C and assert that we really do fall through. */
1209 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1213 /* Remove what will soon cease being the jump insn from the source block.
1214 If block B consisted only of this single jump, turn it into a deleted
1219 && (any_uncondjump_p (q)
1220 || single_succ_p (b)))
1223 /* If this was a conditional jump, we need to also delete
1224 the insn that set cc0. */
1225 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1232 /* Selectively unlink the sequence. */
1233 if (q != PREV_INSN (BB_HEAD (c)))
1234 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1236 e->flags |= EDGE_FALLTHRU;
1239 /* Should move basic block BB after basic block AFTER. NIY. */
1242 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1243 basic_block after ATTRIBUTE_UNUSED)
1248 /* Split a (typically critical) edge. Return the new block.
1249 The edge must not be abnormal.
1251 ??? The code generally expects to be called on critical edges.
1252 The case of a block ending in an unconditional jump to a
1253 block with multiple predecessors is not handled optimally. */
1256 rtl_split_edge (edge edge_in)
1261 /* Abnormal edges cannot be split. */
1262 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1264 /* We are going to place the new block in front of edge destination.
1265 Avoid existence of fallthru predecessors. */
1266 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1271 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1272 if (e->flags & EDGE_FALLTHRU)
1276 force_nonfallthru (e);
1279 /* Create the basic block note. */
1280 if (edge_in->dest != EXIT_BLOCK_PTR)
1281 before = BB_HEAD (edge_in->dest);
1285 /* If this is a fall through edge to the exit block, the blocks might be
1286 not adjacent, and the right place is the after the source. */
1287 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1289 before = NEXT_INSN (BB_END (edge_in->src));
1290 bb = create_basic_block (before, NULL, edge_in->src);
1291 BB_COPY_PARTITION (bb, edge_in->src);
1295 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1296 /* ??? Why not edge_in->dest->prev_bb here? */
1297 BB_COPY_PARTITION (bb, edge_in->dest);
1300 /* ??? This info is likely going to be out of date very soon. */
1301 if (edge_in->dest->il.rtl->global_live_at_start)
1303 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
1304 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
1305 COPY_REG_SET (bb->il.rtl->global_live_at_start,
1306 edge_in->dest->il.rtl->global_live_at_start);
1307 COPY_REG_SET (bb->il.rtl->global_live_at_end,
1308 edge_in->dest->il.rtl->global_live_at_start);
1311 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1313 /* For non-fallthru edges, we must adjust the predecessor's
1314 jump instruction to target our new block. */
1315 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1317 edge redirected = redirect_edge_and_branch (edge_in, bb);
1318 gcc_assert (redirected);
1321 redirect_edge_succ (edge_in, bb);
1326 /* Queue instructions for insertion on an edge between two basic blocks.
1327 The new instructions and basic blocks (if any) will not appear in the
1328 CFG until commit_edge_insertions is called. */
1331 insert_insn_on_edge (rtx pattern, edge e)
1333 /* We cannot insert instructions on an abnormal critical edge.
1334 It will be easier to find the culprit if we die now. */
1335 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1337 if (e->insns.r == NULL_RTX)
1340 push_to_sequence (e->insns.r);
1342 emit_insn (pattern);
1344 e->insns.r = get_insns ();
1348 /* Update the CFG for the instructions queued on edge E. */
1351 commit_one_edge_insertion (edge e, int watch_calls)
1353 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1354 basic_block bb = NULL;
1356 /* Pull the insns off the edge now since the edge might go away. */
1358 e->insns.r = NULL_RTX;
1360 /* Special case -- avoid inserting code between call and storing
1361 its return value. */
1362 if (watch_calls && (e->flags & EDGE_FALLTHRU)
1363 && single_pred_p (e->dest)
1364 && e->src != ENTRY_BLOCK_PTR
1365 && CALL_P (BB_END (e->src)))
1367 rtx next = next_nonnote_insn (BB_END (e->src));
1369 after = BB_HEAD (e->dest);
1370 /* The first insn after the call may be a stack pop, skip it. */
1372 && keep_with_call_p (next))
1375 next = next_nonnote_insn (next);
1379 if (!before && !after)
1381 /* Figure out where to put these things. If the destination has
1382 one predecessor, insert there. Except for the exit block. */
1383 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1387 /* Get the location correct wrt a code label, and "nice" wrt
1388 a basic block note, and before everything else. */
1391 tmp = NEXT_INSN (tmp);
1392 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1393 tmp = NEXT_INSN (tmp);
1394 if (tmp == BB_HEAD (bb))
1397 after = PREV_INSN (tmp);
1399 after = get_last_insn ();
1402 /* If the source has one successor and the edge is not abnormal,
1403 insert there. Except for the entry block. */
1404 else if ((e->flags & EDGE_ABNORMAL) == 0
1405 && single_succ_p (e->src)
1406 && e->src != ENTRY_BLOCK_PTR)
1410 /* It is possible to have a non-simple jump here. Consider a target
1411 where some forms of unconditional jumps clobber a register. This
1412 happens on the fr30 for example.
1414 We know this block has a single successor, so we can just emit
1415 the queued insns before the jump. */
1416 if (JUMP_P (BB_END (bb)))
1417 before = BB_END (bb);
1420 /* We'd better be fallthru, or we've lost track of
1422 gcc_assert (e->flags & EDGE_FALLTHRU);
1424 after = BB_END (bb);
1427 /* Otherwise we must split the edge. */
1430 bb = split_edge (e);
1431 after = BB_END (bb);
1433 if (flag_reorder_blocks_and_partition
1434 && targetm.have_named_sections
1435 && e->src != ENTRY_BLOCK_PTR
1436 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1437 && !(e->flags & EDGE_CROSSING))
1439 rtx bb_note, cur_insn;
1442 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1443 cur_insn = NEXT_INSN (cur_insn))
1444 if (NOTE_P (cur_insn)
1445 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
1451 if (JUMP_P (BB_END (bb))
1452 && !any_condjump_p (BB_END (bb))
1453 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1454 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1455 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1460 /* Now that we've found the spot, do the insertion. */
1464 emit_insn_before_noloc (insns, before);
1465 last = prev_nonnote_insn (before);
1468 last = emit_insn_after_noloc (insns, after);
1470 if (returnjump_p (last))
1472 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1473 This is not currently a problem because this only happens
1474 for the (single) epilogue, which already has a fallthru edge
1477 e = single_succ_edge (bb);
1478 gcc_assert (e->dest == EXIT_BLOCK_PTR
1479 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1481 e->flags &= ~EDGE_FALLTHRU;
1482 emit_barrier_after (last);
1485 delete_insn (before);
1488 gcc_assert (!JUMP_P (last));
1490 /* Mark the basic block for find_many_sub_basic_blocks. */
1494 /* Update the CFG for all queued instructions. */
1497 commit_edge_insertions (void)
1501 bool changed = false;
1503 #ifdef ENABLE_CHECKING
1504 verify_flow_info ();
1507 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1512 FOR_EACH_EDGE (e, ei, bb->succs)
1516 commit_one_edge_insertion (e, false);
1523 blocks = sbitmap_alloc (last_basic_block);
1524 sbitmap_zero (blocks);
1528 SET_BIT (blocks, bb->index);
1529 /* Check for forgotten bb->aux values before commit_edge_insertions
1531 gcc_assert (bb->aux == &bb->aux);
1534 find_many_sub_basic_blocks (blocks);
1535 sbitmap_free (blocks);
1538 /* Update the CFG for all queued instructions, taking special care of inserting
1539 code on edges between call and storing its return value. */
1542 commit_edge_insertions_watch_calls (void)
1546 bool changed = false;
1548 #ifdef ENABLE_CHECKING
1549 verify_flow_info ();
1552 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1557 FOR_EACH_EDGE (e, ei, bb->succs)
1561 commit_one_edge_insertion (e, true);
1568 blocks = sbitmap_alloc (last_basic_block);
1569 sbitmap_zero (blocks);
1573 SET_BIT (blocks, bb->index);
1574 /* Check for forgotten bb->aux values before commit_edge_insertions
1576 gcc_assert (bb->aux == &bb->aux);
1579 find_many_sub_basic_blocks (blocks);
1580 sbitmap_free (blocks);
1583 /* Print out RTL-specific basic block information (live information
1584 at start and end). */
1587 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1593 s_indent = alloca ((size_t) indent + 1);
1594 memset (s_indent, ' ', (size_t) indent);
1595 s_indent[indent] = '\0';
1597 fprintf (outf, ";;%s Registers live at start: ", s_indent);
1598 dump_regset (bb->il.rtl->global_live_at_start, outf);
1601 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1602 insn = NEXT_INSN (insn))
1603 print_rtl_single (outf, insn);
1605 fprintf (outf, ";;%s Registers live at end: ", s_indent);
1606 dump_regset (bb->il.rtl->global_live_at_end, outf);
1610 /* Like print_rtl, but also print out live information for the start of each
1614 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1619 fprintf (outf, "(nil)\n");
1622 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1623 int max_uid = get_max_uid ();
1624 basic_block *start = XCNEWVEC (basic_block, max_uid);
1625 basic_block *end = XCNEWVEC (basic_block, max_uid);
1626 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1630 FOR_EACH_BB_REVERSE (bb)
1634 start[INSN_UID (BB_HEAD (bb))] = bb;
1635 end[INSN_UID (BB_END (bb))] = bb;
1636 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1638 enum bb_state state = IN_MULTIPLE_BB;
1640 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1642 in_bb_p[INSN_UID (x)] = state;
1644 if (x == BB_END (bb))
1649 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1653 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1655 fprintf (outf, ";; Start of basic block %d, registers live:",
1657 dump_regset (bb->il.rtl->global_live_at_start, outf);
1661 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1662 && !NOTE_P (tmp_rtx)
1663 && !BARRIER_P (tmp_rtx))
1664 fprintf (outf, ";; Insn is not within a basic block\n");
1665 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1666 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1668 did_output = print_rtl_single (outf, tmp_rtx);
1670 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1672 fprintf (outf, ";; End of basic block %d, registers live:\n",
1674 dump_regset (bb->il.rtl->global_live_at_end, outf);
1687 if (current_function_epilogue_delay_list != 0)
1689 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1690 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1691 tmp_rtx = XEXP (tmp_rtx, 1))
1692 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1697 update_br_prob_note (basic_block bb)
1700 if (!JUMP_P (BB_END (bb)))
1702 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1703 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1705 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1708 /* Verify the CFG and RTL consistency common for both underlying RTL and
1711 Currently it does following checks:
1713 - test head/end pointers
1714 - overlapping of basic blocks
1715 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1716 - tails of basic blocks (ensure that boundary is necessary)
1717 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1718 and NOTE_INSN_BASIC_BLOCK
1719 - verify that no fall_thru edge crosses hot/cold partition boundaries
1721 In future it can be extended check a lot of other stuff as well
1722 (reachability of basic blocks, life information, etc. etc.). */
1725 rtl_verify_flow_info_1 (void)
1727 const int max_uid = get_max_uid ();
1728 rtx last_head = get_last_insn ();
1729 basic_block *bb_info;
1734 bb_info = XCNEWVEC (basic_block, max_uid);
1736 FOR_EACH_BB_REVERSE (bb)
1738 rtx head = BB_HEAD (bb);
1739 rtx end = BB_END (bb);
1741 /* Verify the end of the basic block is in the INSN chain. */
1742 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1746 if (!(bb->flags & BB_RTL))
1748 error ("BB_RTL flag not set for block %d", bb->index);
1754 error ("end insn %d for block %d not found in the insn stream",
1755 INSN_UID (end), bb->index);
1759 /* Work backwards from the end to the head of the basic block
1760 to verify the head is in the RTL chain. */
1761 for (; x != NULL_RTX; x = PREV_INSN (x))
1763 /* While walking over the insn chain, verify insns appear
1764 in only one basic block and initialize the BB_INFO array
1765 used by other passes. */
1766 if (bb_info[INSN_UID (x)] != NULL)
1768 error ("insn %d is in multiple basic blocks (%d and %d)",
1769 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1773 bb_info[INSN_UID (x)] = bb;
1780 error ("head insn %d for block %d not found in the insn stream",
1781 INSN_UID (head), bb->index);
1788 /* Now check the basic blocks (boundaries etc.) */
1789 FOR_EACH_BB_REVERSE (bb)
1791 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1792 edge e, fallthru = NULL;
1796 if (JUMP_P (BB_END (bb))
1797 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1798 && EDGE_COUNT (bb->succs) >= 2
1799 && any_condjump_p (BB_END (bb)))
1801 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1802 && profile_status != PROFILE_ABSENT)
1804 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1805 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1809 FOR_EACH_EDGE (e, ei, bb->succs)
1811 if (e->flags & EDGE_FALLTHRU)
1813 n_fallthru++, fallthru = e;
1814 if ((e->flags & EDGE_CROSSING)
1815 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1816 && e->src != ENTRY_BLOCK_PTR
1817 && e->dest != EXIT_BLOCK_PTR))
1819 error ("fallthru edge crosses section boundary (bb %i)",
1825 if ((e->flags & ~(EDGE_DFS_BACK
1827 | EDGE_IRREDUCIBLE_LOOP
1829 | EDGE_CROSSING)) == 0)
1832 if (e->flags & EDGE_ABNORMAL_CALL)
1835 if (e->flags & EDGE_EH)
1837 else if (e->flags & EDGE_ABNORMAL)
1841 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1842 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1844 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1848 && (!JUMP_P (BB_END (bb))
1849 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1850 || any_condjump_p (BB_END (bb))))))
1852 error ("too many outgoing branch edges from bb %i", bb->index);
1855 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1857 error ("fallthru edge after unconditional jump %i", bb->index);
1860 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1862 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1865 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1866 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1868 error ("wrong amount of branch edges after conditional jump %i",
1872 if (n_call && !CALL_P (BB_END (bb)))
1874 error ("call edges for non-call insn in bb %i", bb->index);
1878 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1879 && (!JUMP_P (BB_END (bb))
1880 || any_condjump_p (BB_END (bb))
1881 || any_uncondjump_p (BB_END (bb))))
1883 error ("abnormal edges for no purpose in bb %i", bb->index);
1887 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1888 /* We may have a barrier inside a basic block before dead code
1889 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1890 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1893 if (! BLOCK_FOR_INSN (x))
1895 ("insn %d inside basic block %d but block_for_insn is NULL",
1896 INSN_UID (x), bb->index);
1899 ("insn %d inside basic block %d but block_for_insn is %i",
1900 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1905 /* OK pointers are correct. Now check the header of basic
1906 block. It ought to contain optional CODE_LABEL followed
1907 by NOTE_BASIC_BLOCK. */
1911 if (BB_END (bb) == x)
1913 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1921 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1923 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1928 if (BB_END (bb) == x)
1929 /* Do checks for empty blocks here. */
1932 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1934 if (NOTE_INSN_BASIC_BLOCK_P (x))
1936 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1937 INSN_UID (x), bb->index);
1941 if (x == BB_END (bb))
1944 if (control_flow_insn_p (x))
1946 error ("in basic block %d:", bb->index);
1947 fatal_insn ("flow control insn inside a basic block", x);
1957 /* Verify the CFG and RTL consistency common for both underlying RTL and
1960 Currently it does following checks:
1961 - all checks of rtl_verify_flow_info_1
1962 - check that all insns are in the basic blocks
1963 (except the switch handling code, barriers and notes)
1964 - check that all returns are followed by barriers
1965 - check that all fallthru edge points to the adjacent blocks. */
1967 rtl_verify_flow_info (void)
1970 int err = rtl_verify_flow_info_1 ();
1973 const rtx rtx_first = get_insns ();
1974 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1976 FOR_EACH_BB_REVERSE (bb)
1981 if (bb->predictions)
1983 error ("bb prediction set for block %i, but it is not used in RTL land", bb->index);
1987 FOR_EACH_EDGE (e, ei, bb->succs)
1988 if (e->flags & EDGE_FALLTHRU)
1994 /* Ensure existence of barrier in BB with no fallthru edges. */
1995 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
1996 insn = NEXT_INSN (insn))
1999 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2001 error ("missing barrier after block %i", bb->index);
2006 else if (e->src != ENTRY_BLOCK_PTR
2007 && e->dest != EXIT_BLOCK_PTR)
2011 if (e->src->next_bb != e->dest)
2014 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2015 e->src->index, e->dest->index);
2019 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2020 insn = NEXT_INSN (insn))
2021 if (BARRIER_P (insn) || INSN_P (insn))
2023 error ("verify_flow_info: Incorrect fallthru %i->%i",
2024 e->src->index, e->dest->index);
2025 fatal_insn ("wrong insn in the fallthru edge", insn);
2032 last_bb_seen = ENTRY_BLOCK_PTR;
2034 for (x = rtx_first; x; x = NEXT_INSN (x))
2036 if (NOTE_INSN_BASIC_BLOCK_P (x))
2038 bb = NOTE_BASIC_BLOCK (x);
2041 if (bb != last_bb_seen->next_bb)
2042 internal_error ("basic blocks not laid down consecutively");
2044 curr_bb = last_bb_seen = bb;
2049 switch (GET_CODE (x))
2056 /* An addr_vec is placed outside any basic block. */
2058 && JUMP_P (NEXT_INSN (x))
2059 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2060 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2063 /* But in any case, non-deletable labels can appear anywhere. */
2067 fatal_insn ("insn outside basic block", x);
2072 && returnjump_p (x) && ! condjump_p (x)
2073 && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
2074 fatal_insn ("return not followed by barrier", x);
2075 if (curr_bb && x == BB_END (curr_bb))
2079 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2081 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2082 num_bb_notes, n_basic_blocks);
2087 /* Assume that the preceding pass has possibly eliminated jump instructions
2088 or converted the unconditional jumps. Eliminate the edges from CFG.
2089 Return true if any edges are eliminated. */
2092 purge_dead_edges (basic_block bb)
2095 rtx insn = BB_END (bb), note;
2096 bool purged = false;
2100 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2101 if (NONJUMP_INSN_P (insn)
2102 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2106 if (! may_trap_p (PATTERN (insn))
2107 || ((eqnote = find_reg_equal_equiv_note (insn))
2108 && ! may_trap_p (XEXP (eqnote, 0))))
2109 remove_note (insn, note);
2112 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2113 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2115 /* There are three types of edges we need to handle correctly here: EH
2116 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2117 latter can appear when nonlocal gotos are used. */
2118 if (e->flags & EDGE_EH)
2120 if (can_throw_internal (BB_END (bb))
2121 /* If this is a call edge, verify that this is a call insn. */
2122 && (! (e->flags & EDGE_ABNORMAL_CALL)
2123 || CALL_P (BB_END (bb))))
2129 else if (e->flags & EDGE_ABNORMAL_CALL)
2131 if (CALL_P (BB_END (bb))
2132 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2133 || INTVAL (XEXP (note, 0)) >= 0))
2146 bb->flags |= BB_DIRTY;
2156 /* We do care only about conditional jumps and simplejumps. */
2157 if (!any_condjump_p (insn)
2158 && !returnjump_p (insn)
2159 && !simplejump_p (insn))
2162 /* Branch probability/prediction notes are defined only for
2163 condjumps. We've possibly turned condjump into simplejump. */
2164 if (simplejump_p (insn))
2166 note = find_reg_note (insn, REG_BR_PROB, NULL);
2168 remove_note (insn, note);
2169 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2170 remove_note (insn, note);
2173 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2175 /* Avoid abnormal flags to leak from computed jumps turned
2176 into simplejumps. */
2178 e->flags &= ~EDGE_ABNORMAL;
2180 /* See if this edge is one we should keep. */
2181 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2182 /* A conditional jump can fall through into the next
2183 block, so we should keep the edge. */
2188 else if (e->dest != EXIT_BLOCK_PTR
2189 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2190 /* If the destination block is the target of the jump,
2196 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2197 /* If the destination block is the exit block, and this
2198 instruction is a return, then keep the edge. */
2203 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2204 /* Keep the edges that correspond to exceptions thrown by
2205 this instruction and rematerialize the EDGE_ABNORMAL
2206 flag we just cleared above. */
2208 e->flags |= EDGE_ABNORMAL;
2213 /* We do not need this edge. */
2214 bb->flags |= BB_DIRTY;
2219 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2223 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2228 /* Redistribute probabilities. */
2229 if (single_succ_p (bb))
2231 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2232 single_succ_edge (bb)->count = bb->count;
2236 note = find_reg_note (insn, REG_BR_PROB, NULL);
2240 b = BRANCH_EDGE (bb);
2241 f = FALLTHRU_EDGE (bb);
2242 b->probability = INTVAL (XEXP (note, 0));
2243 f->probability = REG_BR_PROB_BASE - b->probability;
2244 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2245 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2250 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2252 /* First, there should not be any EH or ABCALL edges resulting
2253 from non-local gotos and the like. If there were, we shouldn't
2254 have created the sibcall in the first place. Second, there
2255 should of course never have been a fallthru edge. */
2256 gcc_assert (single_succ_p (bb));
2257 gcc_assert (single_succ_edge (bb)->flags
2258 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2263 /* If we don't see a jump insn, we don't know exactly why the block would
2264 have been broken at this point. Look for a simple, non-fallthru edge,
2265 as these are only created by conditional branches. If we find such an
2266 edge we know that there used to be a jump here and can then safely
2267 remove all non-fallthru edges. */
2269 FOR_EACH_EDGE (e, ei, bb->succs)
2270 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2279 /* Remove all but the fake and fallthru edges. The fake edge may be
2280 the only successor for this block in the case of noreturn
2282 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2284 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2286 bb->flags |= BB_DIRTY;
2294 gcc_assert (single_succ_p (bb));
2296 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2297 single_succ_edge (bb)->count = bb->count;
2300 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2305 /* Search all basic blocks for potentially dead edges and purge them. Return
2306 true if some edge has been eliminated. */
2309 purge_all_dead_edges (void)
2316 bool purged_here = purge_dead_edges (bb);
2318 purged |= purged_here;
2324 /* Same as split_block but update cfg_layout structures. */
2327 cfg_layout_split_block (basic_block bb, void *insnp)
2330 basic_block new_bb = rtl_split_block (bb, insn);
2332 new_bb->il.rtl->footer = bb->il.rtl->footer;
2333 bb->il.rtl->footer = NULL;
2339 /* Redirect Edge to DEST. */
2341 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2343 basic_block src = e->src;
2346 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2349 if (e->dest == dest)
2352 if (e->src != ENTRY_BLOCK_PTR
2353 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2355 src->flags |= BB_DIRTY;
2359 if (e->src == ENTRY_BLOCK_PTR
2360 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2363 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2364 e->src->index, dest->index);
2366 e->src->flags |= BB_DIRTY;
2367 redirect_edge_succ (e, dest);
2371 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2372 in the case the basic block appears to be in sequence. Avoid this
2375 if (e->flags & EDGE_FALLTHRU)
2377 /* Redirect any branch edges unified with the fallthru one. */
2378 if (JUMP_P (BB_END (src))
2379 && label_is_jump_target_p (BB_HEAD (e->dest),
2385 fprintf (dump_file, "Fallthru edge unified with branch "
2386 "%i->%i redirected to %i\n",
2387 e->src->index, e->dest->index, dest->index);
2388 e->flags &= ~EDGE_FALLTHRU;
2389 redirected = redirect_branch_edge (e, dest);
2390 gcc_assert (redirected);
2391 e->flags |= EDGE_FALLTHRU;
2392 e->src->flags |= BB_DIRTY;
2395 /* In case we are redirecting fallthru edge to the branch edge
2396 of conditional jump, remove it. */
2397 if (EDGE_COUNT (src->succs) == 2)
2399 /* Find the edge that is different from E. */
2400 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2403 && any_condjump_p (BB_END (src))
2404 && onlyjump_p (BB_END (src)))
2405 delete_insn (BB_END (src));
2407 ret = redirect_edge_succ_nodup (e, dest);
2409 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2410 e->src->index, e->dest->index, dest->index);
2413 ret = redirect_branch_edge (e, dest);
2415 /* We don't want simplejumps in the insn stream during cfglayout. */
2416 gcc_assert (!simplejump_p (BB_END (src)));
2418 src->flags |= BB_DIRTY;
2422 /* Simple wrapper as we always can redirect fallthru edges. */
2424 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2426 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2428 gcc_assert (redirected);
2432 /* Same as delete_basic_block but update cfg_layout structures. */
2435 cfg_layout_delete_block (basic_block bb)
2437 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2439 if (bb->il.rtl->header)
2441 next = BB_HEAD (bb);
2443 NEXT_INSN (prev) = bb->il.rtl->header;
2445 set_first_insn (bb->il.rtl->header);
2446 PREV_INSN (bb->il.rtl->header) = prev;
2447 insn = bb->il.rtl->header;
2448 while (NEXT_INSN (insn))
2449 insn = NEXT_INSN (insn);
2450 NEXT_INSN (insn) = next;
2451 PREV_INSN (next) = insn;
2453 next = NEXT_INSN (BB_END (bb));
2454 if (bb->il.rtl->footer)
2456 insn = bb->il.rtl->footer;
2459 if (BARRIER_P (insn))
2461 if (PREV_INSN (insn))
2462 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2464 bb->il.rtl->footer = NEXT_INSN (insn);
2465 if (NEXT_INSN (insn))
2466 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2470 insn = NEXT_INSN (insn);
2472 if (bb->il.rtl->footer)
2475 NEXT_INSN (insn) = bb->il.rtl->footer;
2476 PREV_INSN (bb->il.rtl->footer) = insn;
2477 while (NEXT_INSN (insn))
2478 insn = NEXT_INSN (insn);
2479 NEXT_INSN (insn) = next;
2481 PREV_INSN (next) = insn;
2483 set_last_insn (insn);
2486 if (bb->next_bb != EXIT_BLOCK_PTR)
2487 to = &bb->next_bb->il.rtl->header;
2489 to = &cfg_layout_function_footer;
2491 rtl_delete_block (bb);
2494 prev = NEXT_INSN (prev);
2496 prev = get_insns ();
2498 next = PREV_INSN (next);
2500 next = get_last_insn ();
2502 if (next && NEXT_INSN (next) != prev)
2504 remaints = unlink_insn_chain (prev, next);
2506 while (NEXT_INSN (insn))
2507 insn = NEXT_INSN (insn);
2508 NEXT_INSN (insn) = *to;
2510 PREV_INSN (*to) = insn;
2515 /* Return true when blocks A and B can be safely merged. */
2517 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2519 /* If we are partitioning hot/cold basic blocks, we don't want to
2520 mess up unconditional or indirect jumps that cross between hot
2523 Basic block partitioning may result in some jumps that appear to
2524 be optimizable (or blocks that appear to be mergeable), but which really
2525 must be left untouched (they are required to make it safely across
2526 partition boundaries). See the comments at the top of
2527 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2529 if (BB_PARTITION (a) != BB_PARTITION (b))
2532 /* There must be exactly one edge in between the blocks. */
2533 return (single_succ_p (a)
2534 && single_succ (a) == b
2535 && single_pred_p (b) == 1
2537 /* Must be simple edge. */
2538 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2539 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2540 /* If the jump insn has side effects,
2541 we can't kill the edge. */
2542 && (!JUMP_P (BB_END (a))
2543 || (reload_completed
2544 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2547 /* Merge block A and B. The blocks must be mergeable. */
2550 cfg_layout_merge_blocks (basic_block a, basic_block b)
2552 #ifdef ENABLE_CHECKING
2553 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2556 /* If there was a CODE_LABEL beginning B, delete it. */
2557 if (LABEL_P (BB_HEAD (b)))
2559 /* This might have been an EH label that no longer has incoming
2560 EH edges. Update data structures to match. */
2561 maybe_remove_eh_handler (BB_HEAD (b));
2563 delete_insn (BB_HEAD (b));
2566 /* We should have fallthru edge in a, or we can do dummy redirection to get
2568 if (JUMP_P (BB_END (a)))
2569 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2570 gcc_assert (!JUMP_P (BB_END (a)));
2572 /* Possible line number notes should appear in between. */
2573 if (b->il.rtl->header)
2575 rtx first = BB_END (a), last;
2577 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
2578 delete_insn_chain (NEXT_INSN (first), last);
2579 b->il.rtl->header = NULL;
2582 /* In the case basic blocks are not adjacent, move them around. */
2583 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2585 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2587 emit_insn_after_noloc (first, BB_END (a));
2588 /* Skip possible DELETED_LABEL insn. */
2589 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2590 first = NEXT_INSN (first);
2591 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2593 delete_insn (first);
2595 /* Otherwise just re-associate the instructions. */
2600 for (insn = BB_HEAD (b);
2601 insn != NEXT_INSN (BB_END (b));
2602 insn = NEXT_INSN (insn))
2603 set_block_for_insn (insn, a);
2605 /* Skip possible DELETED_LABEL insn. */
2606 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2607 insn = NEXT_INSN (insn);
2608 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2610 BB_END (a) = BB_END (b);
2614 /* Possible tablejumps and barriers should appear after the block. */
2615 if (b->il.rtl->footer)
2617 if (!a->il.rtl->footer)
2618 a->il.rtl->footer = b->il.rtl->footer;
2621 rtx last = a->il.rtl->footer;
2623 while (NEXT_INSN (last))
2624 last = NEXT_INSN (last);
2625 NEXT_INSN (last) = b->il.rtl->footer;
2626 PREV_INSN (b->il.rtl->footer) = last;
2628 b->il.rtl->footer = NULL;
2630 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
2633 fprintf (dump_file, "Merged blocks %d and %d.\n",
2634 a->index, b->index);
2640 cfg_layout_split_edge (edge e)
2642 basic_block new_bb =
2643 create_basic_block (e->src != ENTRY_BLOCK_PTR
2644 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2647 /* ??? This info is likely going to be out of date very soon, but we must
2648 create it to avoid getting an ICE later. */
2649 if (e->dest->il.rtl->global_live_at_start)
2651 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
2652 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
2653 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
2654 e->dest->il.rtl->global_live_at_start);
2655 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
2656 e->dest->il.rtl->global_live_at_start);
2659 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2660 redirect_edge_and_branch_force (e, new_bb);
2665 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2668 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2672 /* Return 1 if BB ends with a call, possibly followed by some
2673 instructions that must stay with the call, 0 otherwise. */
2676 rtl_block_ends_with_call_p (basic_block bb)
2678 rtx insn = BB_END (bb);
2680 while (!CALL_P (insn)
2681 && insn != BB_HEAD (bb)
2682 && keep_with_call_p (insn))
2683 insn = PREV_INSN (insn);
2684 return (CALL_P (insn));
2687 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2690 rtl_block_ends_with_condjump_p (basic_block bb)
2692 return any_condjump_p (BB_END (bb));
2695 /* Return true if we need to add fake edge to exit.
2696 Helper function for rtl_flow_call_edges_add. */
2699 need_fake_edge_p (rtx insn)
2705 && !SIBLING_CALL_P (insn)
2706 && !find_reg_note (insn, REG_NORETURN, NULL)
2707 && !CONST_OR_PURE_CALL_P (insn)))
2710 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2711 && MEM_VOLATILE_P (PATTERN (insn)))
2712 || (GET_CODE (PATTERN (insn)) == PARALLEL
2713 && asm_noperands (insn) != -1
2714 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2715 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2718 /* Add fake edges to the function exit for any non constant and non noreturn
2719 calls, volatile inline assembly in the bitmap of blocks specified by
2720 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2723 The goal is to expose cases in which entering a basic block does not imply
2724 that all subsequent instructions must be executed. */
2727 rtl_flow_call_edges_add (sbitmap blocks)
2730 int blocks_split = 0;
2731 int last_bb = last_basic_block;
2732 bool check_last_block = false;
2734 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2738 check_last_block = true;
2740 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2742 /* In the last basic block, before epilogue generation, there will be
2743 a fallthru edge to EXIT. Special care is required if the last insn
2744 of the last basic block is a call because make_edge folds duplicate
2745 edges, which would result in the fallthru edge also being marked
2746 fake, which would result in the fallthru edge being removed by
2747 remove_fake_edges, which would result in an invalid CFG.
2749 Moreover, we can't elide the outgoing fake edge, since the block
2750 profiler needs to take this into account in order to solve the minimal
2751 spanning tree in the case that the call doesn't return.
2753 Handle this by adding a dummy instruction in a new last basic block. */
2754 if (check_last_block)
2756 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2757 rtx insn = BB_END (bb);
2759 /* Back up past insns that must be kept in the same block as a call. */
2760 while (insn != BB_HEAD (bb)
2761 && keep_with_call_p (insn))
2762 insn = PREV_INSN (insn);
2764 if (need_fake_edge_p (insn))
2768 e = find_edge (bb, EXIT_BLOCK_PTR);
2771 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2772 commit_edge_insertions ();
2777 /* Now add fake edges to the function exit for any non constant
2778 calls since there is no way that we can determine if they will
2781 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2783 basic_block bb = BASIC_BLOCK (i);
2790 if (blocks && !TEST_BIT (blocks, i))
2793 for (insn = BB_END (bb); ; insn = prev_insn)
2795 prev_insn = PREV_INSN (insn);
2796 if (need_fake_edge_p (insn))
2799 rtx split_at_insn = insn;
2801 /* Don't split the block between a call and an insn that should
2802 remain in the same block as the call. */
2804 while (split_at_insn != BB_END (bb)
2805 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2806 split_at_insn = NEXT_INSN (split_at_insn);
2808 /* The handling above of the final block before the epilogue
2809 should be enough to verify that there is no edge to the exit
2810 block in CFG already. Calling make_edge in such case would
2811 cause us to mark that edge as fake and remove it later. */
2813 #ifdef ENABLE_CHECKING
2814 if (split_at_insn == BB_END (bb))
2816 e = find_edge (bb, EXIT_BLOCK_PTR);
2817 gcc_assert (e == NULL);
2821 /* Note that the following may create a new basic block
2822 and renumber the existing basic blocks. */
2823 if (split_at_insn != BB_END (bb))
2825 e = split_block (bb, split_at_insn);
2830 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2833 if (insn == BB_HEAD (bb))
2839 verify_flow_info ();
2841 return blocks_split;
2844 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2845 the conditional branch target, SECOND_HEAD should be the fall-thru
2846 there is no need to handle this here the loop versioning code handles
2847 this. the reason for SECON_HEAD is that it is needed for condition
2848 in trees, and this should be of the same type since it is a hook. */
2850 rtl_lv_add_condition_to_bb (basic_block first_head ,
2851 basic_block second_head ATTRIBUTE_UNUSED,
2852 basic_block cond_bb, void *comp_rtx)
2854 rtx label, seq, jump;
2855 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2856 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2857 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2858 enum machine_mode mode;
2861 label = block_label (first_head);
2862 mode = GET_MODE (op0);
2863 if (mode == VOIDmode)
2864 mode = GET_MODE (op1);
2867 op0 = force_operand (op0, NULL_RTX);
2868 op1 = force_operand (op1, NULL_RTX);
2869 do_compare_rtx_and_jump (op0, op1, comp, 0,
2870 mode, NULL_RTX, NULL_RTX, label);
2871 jump = get_last_insn ();
2872 JUMP_LABEL (jump) = label;
2873 LABEL_NUSES (label)++;
2877 /* Add the new cond , in the new head. */
2878 emit_insn_after(seq, BB_END(cond_bb));
2882 /* Given a block B with unconditional branch at its end, get the
2883 store the return the branch edge and the fall-thru edge in
2884 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2886 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2887 edge *fallthru_edge)
2889 edge e = EDGE_SUCC (b, 0);
2891 if (e->flags & EDGE_FALLTHRU)
2894 *branch_edge = EDGE_SUCC (b, 1);
2899 *fallthru_edge = EDGE_SUCC (b, 1);
2904 init_rtl_bb_info (basic_block bb)
2906 gcc_assert (!bb->il.rtl);
2907 bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info));
2911 /* Add EXPR to the end of basic block BB. */
2914 insert_insn_end_bb_new (rtx pat, basic_block bb)
2916 rtx insn = BB_END (bb);
2920 while (NEXT_INSN (pat_end) != NULL_RTX)
2921 pat_end = NEXT_INSN (pat_end);
2923 /* If the last insn is a jump, insert EXPR in front [taking care to
2924 handle cc0, etc. properly]. Similarly we need to care trapping
2925 instructions in presence of non-call exceptions. */
2928 || (NONJUMP_INSN_P (insn)
2929 && (!single_succ_p (bb)
2930 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2935 /* If this is a jump table, then we can't insert stuff here. Since
2936 we know the previous real insn must be the tablejump, we insert
2937 the new instruction just before the tablejump. */
2938 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2939 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2940 insn = prev_real_insn (insn);
2943 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2944 if cc0 isn't set. */
2945 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2947 insn = XEXP (note, 0);
2950 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2951 if (maybe_cc0_setter
2952 && INSN_P (maybe_cc0_setter)
2953 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2954 insn = maybe_cc0_setter;
2957 /* FIXME: What if something in cc0/jump uses value set in new
2959 new_insn = emit_insn_before_noloc (pat, insn);
2962 /* Likewise if the last insn is a call, as will happen in the presence
2963 of exception handling. */
2964 else if (CALL_P (insn)
2965 && (!single_succ_p (bb)
2966 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2968 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2969 we search backward and place the instructions before the first
2970 parameter is loaded. Do this for everyone for consistency and a
2971 presumption that we'll get better code elsewhere as well. */
2973 /* Since different machines initialize their parameter registers
2974 in different orders, assume nothing. Collect the set of all
2975 parameter registers. */
2976 insn = find_first_parameter_load (insn, BB_HEAD (bb));
2978 /* If we found all the parameter loads, then we want to insert
2979 before the first parameter load.
2981 If we did not find all the parameter loads, then we might have
2982 stopped on the head of the block, which could be a CODE_LABEL.
2983 If we inserted before the CODE_LABEL, then we would be putting
2984 the insn in the wrong basic block. In that case, put the insn
2985 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
2986 while (LABEL_P (insn)
2987 || NOTE_INSN_BASIC_BLOCK_P (insn))
2988 insn = NEXT_INSN (insn);
2990 new_insn = emit_insn_before_noloc (pat, insn);
2993 new_insn = emit_insn_after_noloc (pat, insn);
2998 /* Implementation of CFG manipulation for linearized RTL. */
2999 struct cfg_hooks rtl_cfg_hooks = {
3001 rtl_verify_flow_info,
3003 rtl_create_basic_block,
3004 rtl_redirect_edge_and_branch,
3005 rtl_redirect_edge_and_branch_force,
3008 rtl_move_block_after,
3009 rtl_can_merge_blocks, /* can_merge_blocks_p */
3013 NULL, /* can_duplicate_block_p */
3014 NULL, /* duplicate_block */
3016 rtl_make_forwarder_block,
3017 rtl_tidy_fallthru_edge,
3018 rtl_block_ends_with_call_p,
3019 rtl_block_ends_with_condjump_p,
3020 rtl_flow_call_edges_add,
3021 NULL, /* execute_on_growing_pred */
3022 NULL, /* execute_on_shrinking_pred */
3023 NULL, /* duplicate loop for trees */
3024 NULL, /* lv_add_condition_to_bb */
3025 NULL, /* lv_adjust_loop_header_phi*/
3026 NULL, /* extract_cond_bb_edges */
3027 NULL /* flush_pending_stmts */
3030 /* Implementation of CFG manipulation for cfg layout RTL, where
3031 basic block connected via fallthru edges does not have to be adjacent.
3032 This representation will hopefully become the default one in future
3033 version of the compiler. */
3035 /* We do not want to declare these functions in a header file, since they
3036 should only be used through the cfghooks interface, and we do not want to
3037 move them here since it would require also moving quite a lot of related
3039 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3040 extern basic_block cfg_layout_duplicate_bb (basic_block);
3042 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3044 rtl_verify_flow_info_1,
3046 cfg_layout_create_basic_block,
3047 cfg_layout_redirect_edge_and_branch,
3048 cfg_layout_redirect_edge_and_branch_force,
3049 cfg_layout_delete_block,
3050 cfg_layout_split_block,
3051 rtl_move_block_after,
3052 cfg_layout_can_merge_blocks_p,
3053 cfg_layout_merge_blocks,
3056 cfg_layout_can_duplicate_bb_p,
3057 cfg_layout_duplicate_bb,
3058 cfg_layout_split_edge,
3059 rtl_make_forwarder_block,
3061 rtl_block_ends_with_call_p,
3062 rtl_block_ends_with_condjump_p,
3063 rtl_flow_call_edges_add,
3064 NULL, /* execute_on_growing_pred */
3065 NULL, /* execute_on_shrinking_pred */
3066 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3067 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3068 NULL, /* lv_adjust_loop_header_phi*/
3069 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3070 NULL /* flush_pending_stmts */