1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
38 /* The contents of the current function definition are allocated
39 in this obstack, and all are freed at the end of the function. */
40 extern struct obstack flow_obstack;
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer;
45 static rtx skip_insns_after_block PARAMS ((basic_block));
46 static void record_effective_endpoints PARAMS ((void));
47 static rtx label_for_bb PARAMS ((basic_block));
48 static void fixup_reorder_chain PARAMS ((void));
50 static void set_block_levels PARAMS ((tree, int));
51 static void change_scope PARAMS ((rtx, tree, tree));
53 void verify_insn_chain PARAMS ((void));
54 static void cleanup_unconditional_jumps PARAMS ((struct loops *));
55 static void fixup_fallthru_exit_predecessor PARAMS ((void));
56 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
57 static void break_superblocks PARAMS ((void));
58 static tree insn_scope PARAMS ((rtx));
61 unlink_insn_chain (first, last)
65 rtx prevfirst = PREV_INSN (first);
66 rtx nextlast = NEXT_INSN (last);
68 PREV_INSN (first) = NULL;
69 NEXT_INSN (last) = NULL;
71 NEXT_INSN (prevfirst) = nextlast;
73 PREV_INSN (nextlast) = prevfirst;
75 set_last_insn (prevfirst);
77 set_first_insn (nextlast);
81 /* Skip over inter-block insns occurring after BB which are typically
82 associated with BB (e.g., barriers). If there are any such insns,
83 we return the last one. Otherwise, we return the end of BB. */
86 skip_insns_after_block (bb)
89 rtx insn, last_insn, next_head, prev;
92 if (bb->next_bb != EXIT_BLOCK_PTR)
93 next_head = bb->next_bb->head;
95 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
97 if (insn == next_head)
100 switch (GET_CODE (insn))
107 switch (NOTE_LINE_NUMBER (insn))
109 case NOTE_INSN_LOOP_END:
110 case NOTE_INSN_BLOCK_END:
113 case NOTE_INSN_DELETED:
114 case NOTE_INSN_DELETED_LABEL:
125 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
126 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
127 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
129 insn = NEXT_INSN (insn);
142 /* It is possible to hit contradictory sequence. For instance:
148 Where barrier belongs to jump_insn, but the note does not. This can be
149 created by removing the basic block originally following
150 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
152 for (insn = last_insn; insn != bb->end; insn = prev)
154 prev = PREV_INSN (insn);
155 if (GET_CODE (insn) == NOTE)
156 switch (NOTE_LINE_NUMBER (insn))
158 case NOTE_INSN_LOOP_END:
159 case NOTE_INSN_BLOCK_END:
160 case NOTE_INSN_DELETED:
161 case NOTE_INSN_DELETED_LABEL:
164 reorder_insns (insn, insn, last_insn);
171 /* Locate or create a label for a given basic block. */
177 rtx label = bb->head;
179 if (GET_CODE (label) != CODE_LABEL)
182 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
184 label = block_label (bb);
190 /* Locate the effective beginning and end of the insn chain for each
191 block, as defined by skip_insns_after_block above. */
194 record_effective_endpoints ()
196 rtx next_insn = get_insns ();
203 if (PREV_INSN (bb->head) && next_insn != bb->head)
204 RBI (bb)->header = unlink_insn_chain (next_insn,
205 PREV_INSN (bb->head));
206 end = skip_insns_after_block (bb);
207 if (NEXT_INSN (bb->end) && bb->end != end)
208 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
209 next_insn = NEXT_INSN (bb->end);
212 cfg_layout_function_footer = next_insn;
213 if (cfg_layout_function_footer)
214 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
217 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
218 numbers and files. In order to be GGC friendly we need to use separate
219 varrays. This also slightly improve the memory locality in binary search.
220 The _locs array contains locators where the given property change. The
221 block_locators_blocks contains the scope block that is used for all insn
222 locator greater than corresponding block_locators_locs value and smaller
223 than the following one. Similarly for the other properties. */
224 static GTY(()) varray_type block_locators_locs;
225 static GTY(()) varray_type block_locators_blocks;
226 static GTY(()) varray_type line_locators_locs;
227 static GTY(()) varray_type line_locators_lines;
228 static GTY(()) varray_type file_locators_locs;
229 static GTY(()) varray_type file_locators_files;
230 int prologue_locator;
231 int epilogue_locator;
233 /* During the RTL expansion the lexical blocks and line numbers are
234 represented via INSN_NOTEs. Replace them by representation using
238 insn_locators_initialize ()
241 tree last_block = NULL;
244 int line_number = 0, last_line_number = 0;
245 char *file_name = NULL, *last_file_name = NULL;
247 prologue_locator = epilogue_locator = 0;
249 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
250 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
251 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
252 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
253 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
254 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
256 for (insn = get_insns (); insn; insn = next)
258 next = NEXT_INSN (insn);
260 if ((active_insn_p (insn)
261 && GET_CODE (PATTERN (insn)) != ADDR_VEC
262 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
264 || (!prologue_locator && file_name))
266 if (last_block != block)
269 VARRAY_PUSH_INT (block_locators_locs, loc);
270 VARRAY_PUSH_TREE (block_locators_blocks, block);
273 if (last_line_number != line_number)
276 VARRAY_PUSH_INT (line_locators_locs, loc);
277 VARRAY_PUSH_INT (line_locators_lines, line_number);
278 last_line_number = line_number;
280 if (last_file_name != file_name)
283 VARRAY_PUSH_INT (file_locators_locs, loc);
284 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
285 last_file_name = file_name;
288 if (!prologue_locator && file_name)
289 prologue_locator = loc;
290 if (!NEXT_INSN (insn))
291 epilogue_locator = loc;
292 if (active_insn_p (insn))
293 INSN_LOCATOR (insn) = loc;
294 else if (GET_CODE (insn) == NOTE)
296 switch (NOTE_LINE_NUMBER (insn))
298 case NOTE_INSN_BLOCK_BEG:
299 block = NOTE_BLOCK (insn);
302 case NOTE_INSN_BLOCK_END:
303 block = BLOCK_SUPERCONTEXT (block);
304 if (block && TREE_CODE (block) == FUNCTION_DECL)
309 if (NOTE_LINE_NUMBER (insn) > 0)
311 line_number = NOTE_LINE_NUMBER (insn);
312 file_name = (char *)NOTE_SOURCE_FILE (insn);
319 /* Tag the blocks with a depth number so that change_scope can find
320 the common parent easily. */
321 set_block_levels (DECL_INITIAL (cfun->decl), 0);
324 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
325 found in the block tree. */
328 set_block_levels (block, level)
334 BLOCK_NUMBER (block) = level;
335 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
336 block = BLOCK_CHAIN (block);
340 /* Return sope resulting from combination of S1 and S2. */
342 choose_inner_scope (s1, s2)
349 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
354 /* Emit lexical block notes needed to change scope from S1 to S2. */
357 change_scope (orig_insn, s1, s2)
361 rtx insn = orig_insn;
362 tree com = NULL_TREE;
363 tree ts1 = s1, ts2 = s2;
368 if (ts1 == NULL || ts2 == NULL)
370 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
371 ts1 = BLOCK_SUPERCONTEXT (ts1);
372 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
373 ts2 = BLOCK_SUPERCONTEXT (ts2);
376 ts1 = BLOCK_SUPERCONTEXT (ts1);
377 ts2 = BLOCK_SUPERCONTEXT (ts2);
386 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
387 NOTE_BLOCK (note) = s;
388 s = BLOCK_SUPERCONTEXT (s);
395 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
396 NOTE_BLOCK (insn) = s;
397 s = BLOCK_SUPERCONTEXT (s);
401 /* Return lexical scope block insn belong to. */
406 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
408 int loc = INSN_LOCATOR (insn);
414 int pos = (min + max) / 2;
415 int tmp = VARRAY_INT (block_locators_locs, pos);
417 if (tmp <= loc && min != pos)
419 else if (tmp > loc && max != pos)
427 return VARRAY_TREE (block_locators_blocks, min);
430 /* Return line number of the statement that produced this insn. */
435 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
437 int loc = INSN_LOCATOR (insn);
443 int pos = (min + max) / 2;
444 int tmp = VARRAY_INT (line_locators_locs, pos);
446 if (tmp <= loc && min != pos)
448 else if (tmp > loc && max != pos)
456 return VARRAY_INT (line_locators_lines, min);
459 /* Return source file of the statement that produced this insn. */
464 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
466 int loc = INSN_LOCATOR (insn);
472 int pos = (min + max) / 2;
473 int tmp = VARRAY_INT (file_locators_locs, pos);
475 if (tmp <= loc && min != pos)
477 else if (tmp > loc && max != pos)
485 return VARRAY_CHAR_PTR (file_locators_files, min);
488 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
489 on the scope tree and the newly reordered instructions. */
492 reemit_insn_block_notes ()
494 tree cur_block = DECL_INITIAL (cfun->decl);
498 if (!active_insn_p (insn))
499 insn = next_active_insn (insn);
500 for (; insn; insn = next_active_insn (insn))
504 this_block = insn_scope (insn);
505 /* For sequences compute scope resulting from merging all scopes
506 of instructions nested inside. */
507 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
510 rtx body = PATTERN (insn);
513 for (i = 0; i < XVECLEN (body, 0); i++)
514 this_block = choose_inner_scope (this_block,
515 insn_scope (XVECEXP (body, 0, i)));
520 if (this_block != cur_block)
522 change_scope (insn, cur_block, this_block);
523 cur_block = this_block;
527 /* change_scope emits before the insn, not after. */
528 note = emit_note (NULL, NOTE_INSN_DELETED);
529 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
535 /* Given a reorder chain, rearrange the code to match. */
538 fixup_reorder_chain ()
540 basic_block bb, prev_bb;
544 /* First do the bulk reordering -- rechain the blocks without regard to
545 the needed changes to jumps and labels. */
547 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
549 bb = RBI (bb)->next, index++)
551 if (RBI (bb)->header)
554 NEXT_INSN (insn) = RBI (bb)->header;
556 set_first_insn (RBI (bb)->header);
557 PREV_INSN (RBI (bb)->header) = insn;
558 insn = RBI (bb)->header;
559 while (NEXT_INSN (insn))
560 insn = NEXT_INSN (insn);
563 NEXT_INSN (insn) = bb->head;
565 set_first_insn (bb->head);
566 PREV_INSN (bb->head) = insn;
568 if (RBI (bb)->footer)
570 NEXT_INSN (insn) = RBI (bb)->footer;
571 PREV_INSN (RBI (bb)->footer) = insn;
572 while (NEXT_INSN (insn))
573 insn = NEXT_INSN (insn);
577 if (index != n_basic_blocks)
580 NEXT_INSN (insn) = cfg_layout_function_footer;
581 if (cfg_layout_function_footer)
582 PREV_INSN (cfg_layout_function_footer) = insn;
584 while (NEXT_INSN (insn))
585 insn = NEXT_INSN (insn);
587 set_last_insn (insn);
588 #ifdef ENABLE_CHECKING
589 verify_insn_chain ();
592 /* Now add jumps and labels as needed to match the blocks new
595 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
597 edge e_fall, e_taken, e;
601 if (bb->succ == NULL)
604 /* Find the old fallthru edge, and another non-EH edge for
606 e_taken = e_fall = NULL;
607 for (e = bb->succ; e ; e = e->succ_next)
608 if (e->flags & EDGE_FALLTHRU)
610 else if (! (e->flags & EDGE_EH))
613 bb_end_insn = bb->end;
614 if (GET_CODE (bb_end_insn) == JUMP_INSN)
616 if (any_condjump_p (bb_end_insn))
618 /* If the old fallthru is still next, nothing to do. */
619 if (RBI (bb)->next == e_fall->dest
621 && e_fall->dest == EXIT_BLOCK_PTR))
624 /* The degenerated case of conditional jump jumping to the next
625 instruction can happen on target having jumps with side
628 Create temporarily the duplicated edge representing branch.
629 It will get unidentified by force_nonfallthru_and_redirect
630 that would otherwise get confused by fallthru edge not pointing
631 to the next basic block. */
637 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
639 if (!redirect_jump (bb->end, block_label (bb), 0))
641 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
644 int prob = INTVAL (XEXP (note, 0));
646 e_fake->probability = prob;
647 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
648 e_fall->probability -= e_fall->probability;
649 e_fall->count -= e_fake->count;
650 if (e_fall->probability < 0)
651 e_fall->probability = 0;
652 if (e_fall->count < 0)
656 /* There is one special case: if *neither* block is next,
657 such as happens at the very end of a function, then we'll
658 need to add a new unconditional jump. Choose the taken
659 edge based on known or assumed probability. */
660 else if (RBI (bb)->next != e_taken->dest)
662 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
665 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
666 && invert_jump (bb_end_insn,
667 label_for_bb (e_fall->dest), 0))
669 e_fall->flags &= ~EDGE_FALLTHRU;
670 e_taken->flags |= EDGE_FALLTHRU;
671 update_br_prob_note (bb);
672 e = e_fall, e_fall = e_taken, e_taken = e;
676 /* Otherwise we can try to invert the jump. This will
677 basically never fail, however, keep up the pretense. */
678 else if (invert_jump (bb_end_insn,
679 label_for_bb (e_fall->dest), 0))
681 e_fall->flags &= ~EDGE_FALLTHRU;
682 e_taken->flags |= EDGE_FALLTHRU;
683 update_br_prob_note (bb);
687 else if (returnjump_p (bb_end_insn))
691 /* Otherwise we have some switch or computed jump. In the
692 99% case, there should not have been a fallthru edge. */
696 #ifdef CASE_DROPS_THROUGH
697 /* Except for VAX. Since we didn't have predication for the
698 tablejump, the fallthru block should not have moved. */
699 if (RBI (bb)->next == e_fall->dest)
701 bb_end_insn = skip_insns_after_block (bb);
709 /* No fallthru implies a noreturn function with EH edges, or
710 something similarly bizarre. In any case, we don't need to
715 /* If the fallthru block is still next, nothing to do. */
716 if (RBI (bb)->next == e_fall->dest)
719 /* A fallthru to exit block. */
720 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
724 /* We got here if we need to add a new jump insn. */
725 nb = force_nonfallthru (e_fall);
728 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
729 RBI (nb)->visited = 1;
730 RBI (nb)->next = RBI (bb)->next;
732 /* Don't process this new block. */
737 /* Put basic_block_info in the new order. */
741 fprintf (rtl_dump_file, "Reordered sequence:\n");
742 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
744 fprintf (rtl_dump_file, " %i ", index);
745 if (RBI (bb)->original)
746 fprintf (rtl_dump_file, "duplicate of %i ",
747 RBI (bb)->original->index);
748 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
749 fprintf (rtl_dump_file, "compensation ");
751 fprintf (rtl_dump_file, "bb %i ", bb->index);
752 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
756 prev_bb = ENTRY_BLOCK_PTR;
757 bb = ENTRY_BLOCK_PTR->next_bb;
760 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
763 BASIC_BLOCK (index) = bb;
765 bb->prev_bb = prev_bb;
766 prev_bb->next_bb = bb;
768 prev_bb->next_bb = EXIT_BLOCK_PTR;
769 EXIT_BLOCK_PTR->prev_bb = prev_bb;
772 /* Perform sanity checks on the insn chain.
773 1. Check that next/prev pointers are consistent in both the forward and
775 2. Count insns in chain, going both directions, and check if equal.
776 3. Check that get_last_insn () returns the actual end of chain. */
782 int insn_cnt1, insn_cnt2;
784 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
786 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
787 if (PREV_INSN (x) != prevx)
790 if (prevx != get_last_insn ())
793 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
795 nextx = x, insn_cnt2++, x = PREV_INSN (x))
796 if (NEXT_INSN (x) != nextx)
799 if (insn_cnt1 != insn_cnt2)
803 /* Remove any unconditional jumps and forwarder block creating fallthru
804 edges instead. During BB reordering, fallthru edges are not required
805 to target next basic block in the linear CFG layout, so the unconditional
806 jumps are not needed. If LOOPS is not null, also update loop structure &
810 cleanup_unconditional_jumps (loops)
819 if (bb->succ->flags & EDGE_FALLTHRU)
821 if (!bb->succ->succ_next)
824 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
825 && bb->prev_bb != ENTRY_BLOCK_PTR)
827 basic_block prev = bb->prev_bb;
830 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
835 /* bb cannot be loop header, as it only has one entry
836 edge. It could be a loop latch. */
837 if (bb->loop_father->header == bb)
840 if (bb->loop_father->latch == bb)
841 bb->loop_father->latch = bb->pred->src;
843 if (get_immediate_dominator
844 (loops->cfg.dom, bb->succ->dest) == bb)
845 set_immediate_dominator
846 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
848 remove_bb_from_loops (bb);
849 delete_from_dominance_info (loops->cfg.dom, bb);
852 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
856 else if (simplejump_p (bb->end))
861 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
862 INSN_UID (jump), bb->index);
864 bb->succ->flags |= EDGE_FALLTHRU;
869 insn = NEXT_INSN (bb->end);
871 && (GET_CODE (insn) != NOTE
872 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
874 rtx next = NEXT_INSN (insn);
876 if (GET_CODE (insn) == BARRIER)
877 delete_barrier (insn);
885 /* The block falling through to exit must be the last one in the
886 reordered chain. Ensure that this condition is met. */
888 fixup_fallthru_exit_predecessor ()
891 basic_block bb = NULL;
893 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
894 if (e->flags & EDGE_FALLTHRU)
897 if (bb && RBI (bb)->next)
899 basic_block c = ENTRY_BLOCK_PTR->next_bb;
901 while (RBI (c)->next != bb)
904 RBI (c)->next = RBI (bb)->next;
905 while (RBI (c)->next)
909 RBI (bb)->next = NULL;
913 /* Return true in case it is possible to duplicate the basic block BB. */
916 cfg_layout_can_duplicate_bb_p (bb)
921 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
924 /* Duplicating fallthru block to exit would require adding a jump
925 and splitting the real last BB. */
926 for (s = bb->succ; s; s = s->succ_next)
927 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
930 /* Do not attempt to duplicate tablejumps, as we need to unshare
931 the dispatch table. This is difficult to do, as the instructions
932 computing jump destination may be hoisted outside the basic block. */
933 if (tablejump_p (bb->end, NULL, NULL))
936 /* Do not duplicate blocks containing insns that can't be copied. */
937 if (targetm.cannot_copy_insn_p)
942 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
946 insn = NEXT_INSN (insn);
954 duplicate_insn_chain (from, to)
959 /* Avoid updating of boundaries of previous basic block. The
960 note will get removed from insn stream in fixup. */
961 last = emit_note (NULL, NOTE_INSN_DELETED);
963 /* Create copy at the end of INSN chain. The chain will
964 be reordered later. */
965 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
967 switch (GET_CODE (insn))
972 /* Avoid copying of dispatch tables. We never duplicate
973 tablejumps, so this can hit only in case the table got
974 moved far from original jump. */
975 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
976 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
978 emit_copy_of_insn_after (insn, get_last_insn ());
989 switch (NOTE_LINE_NUMBER (insn))
991 /* In case prologue is empty and function contain label
992 in first BB, we may want to copy the block. */
993 case NOTE_INSN_PROLOGUE_END:
995 case NOTE_INSN_LOOP_VTOP:
996 case NOTE_INSN_LOOP_CONT:
997 case NOTE_INSN_LOOP_BEG:
998 case NOTE_INSN_LOOP_END:
999 /* Strip down the loop notes - we don't really want to keep
1000 them consistent in loop copies. */
1001 case NOTE_INSN_DELETED:
1002 case NOTE_INSN_DELETED_LABEL:
1003 /* No problem to strip these. */
1004 case NOTE_INSN_EPILOGUE_BEG:
1005 case NOTE_INSN_FUNCTION_END:
1006 /* Debug code expect these notes to exist just once.
1007 Keep them in the master copy.
1008 ??? It probably makes more sense to duplicate them for each
1010 case NOTE_INSN_FUNCTION_BEG:
1011 /* There is always just single entry to function. */
1012 case NOTE_INSN_BASIC_BLOCK:
1015 /* There is no purpose to duplicate prologue. */
1016 case NOTE_INSN_BLOCK_BEG:
1017 case NOTE_INSN_BLOCK_END:
1018 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
1019 reordering is in the progress. */
1020 case NOTE_INSN_EH_REGION_BEG:
1021 case NOTE_INSN_EH_REGION_END:
1022 /* Should never exist at BB duplication time. */
1025 case NOTE_INSN_REPEATED_LINE_NUMBER:
1026 emit_line_note (NOTE_SOURCE_FILE (insn),
1027 NOTE_LINE_NUMBER (insn));
1031 if (NOTE_LINE_NUMBER (insn) < 0)
1033 /* It is possible that no_line_number is set and the note
1034 won't be emitted. */
1035 emit_line_note (NOTE_SOURCE_FILE (insn),
1036 NOTE_LINE_NUMBER (insn));
1043 insn = NEXT_INSN (last);
1047 /* Create a duplicate of the basic block BB and redirect edge E into it. */
1050 cfg_layout_duplicate_bb (bb, e)
1057 gcov_type new_count = e ? e->count : 0;
1059 if (bb->count < new_count)
1060 new_count = bb->count;
1063 #ifdef ENABLE_CHECKING
1064 if (!cfg_layout_can_duplicate_bb_p (bb))
1068 insn = duplicate_insn_chain (bb->head, bb->end);
1069 new_bb = create_basic_block (insn,
1070 insn ? get_last_insn () : NULL,
1071 EXIT_BLOCK_PTR->prev_bb);
1072 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
1074 if (RBI (bb)->header)
1076 insn = RBI (bb)->header;
1077 while (NEXT_INSN (insn))
1078 insn = NEXT_INSN (insn);
1079 insn = duplicate_insn_chain (RBI (bb)->header, insn);
1081 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
1084 if (RBI (bb)->footer)
1086 insn = RBI (bb)->footer;
1087 while (NEXT_INSN (insn))
1088 insn = NEXT_INSN (insn);
1089 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
1091 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
1094 if (bb->global_live_at_start)
1096 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1097 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1098 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1099 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1102 new_bb->loop_depth = bb->loop_depth;
1103 new_bb->flags = bb->flags;
1104 for (s = bb->succ; s; s = s->succ_next)
1106 /* Since we are creating edges from a new block to successors
1107 of another block (which therefore are known to be disjoint), there
1108 is no need to actually check for duplicated edges. */
1109 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1110 n->probability = s->probability;
1112 /* Take care for overflows! */
1113 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1116 s->count -= n->count;
1119 new_bb->count = new_count;
1120 bb->count -= new_count;
1124 new_bb->frequency = EDGE_FREQUENCY (e);
1125 bb->frequency -= EDGE_FREQUENCY (e);
1127 redirect_edge_and_branch_force (e, new_bb);
1132 if (bb->frequency < 0)
1135 RBI (new_bb)->original = bb;
1136 RBI (bb)->copy = new_bb;
1140 /* Main entry point to this module - initialize the datastructures for
1141 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1144 cfg_layout_initialize (loops)
1145 struct loops *loops;
1147 /* Our algorithm depends on fact that there are now dead jumptables
1149 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1150 cfg_layout_rtl_register_cfg_hooks ();
1152 cleanup_unconditional_jumps (loops);
1154 record_effective_endpoints ();
1157 /* Splits superblocks. */
1159 break_superblocks ()
1161 sbitmap superblocks;
1164 superblocks = sbitmap_alloc (n_basic_blocks);
1165 sbitmap_zero (superblocks);
1169 for (i = 0; i < n_basic_blocks; i++)
1170 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1172 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1173 SET_BIT (superblocks, i);
1179 rebuild_jump_labels (get_insns ());
1180 find_many_sub_basic_blocks (superblocks);
1186 /* Finalize the changes: reorder insn list according to the sequence, enter
1187 compensation code, rebuild scope forest. */
1190 cfg_layout_finalize ()
1192 #ifdef ENABLE_CHECKING
1193 verify_flow_info ();
1195 rtl_register_cfg_hooks ();
1196 fixup_fallthru_exit_predecessor ();
1197 fixup_reorder_chain ();
1199 #ifdef ENABLE_CHECKING
1200 verify_insn_chain ();
1203 free_aux_for_blocks ();
1205 break_superblocks ();
1207 #ifdef ENABLE_CHECKING
1208 verify_flow_info ();
1212 #include "gt-cfglayout.h"