1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
38 /* The contents of the current function definition are allocated
39 in this obstack, and all are freed at the end of the function. */
40 extern struct obstack flow_obstack;
42 /* Holds the interesting trailing notes for the function. */
43 rtx cfg_layout_function_footer;
45 static rtx skip_insns_after_block (basic_block);
46 static void record_effective_endpoints (void);
47 static rtx label_for_bb (basic_block);
48 static void fixup_reorder_chain (void);
50 static void set_block_levels (tree, int);
51 static void change_scope (rtx, tree, tree);
53 void verify_insn_chain (void);
54 static void cleanup_unconditional_jumps (struct loops *);
55 static void fixup_fallthru_exit_predecessor (void);
56 static rtx duplicate_insn_chain (rtx, rtx);
57 static void break_superblocks (void);
58 static tree insn_scope (rtx);
61 unlink_insn_chain (rtx first, rtx last)
63 rtx prevfirst = PREV_INSN (first);
64 rtx nextlast = NEXT_INSN (last);
66 PREV_INSN (first) = NULL;
67 NEXT_INSN (last) = NULL;
69 NEXT_INSN (prevfirst) = nextlast;
71 PREV_INSN (nextlast) = prevfirst;
73 set_last_insn (prevfirst);
75 set_first_insn (nextlast);
79 /* Skip over inter-block insns occurring after BB which are typically
80 associated with BB (e.g., barriers). If there are any such insns,
81 we return the last one. Otherwise, we return the end of BB. */
84 skip_insns_after_block (basic_block bb)
86 rtx insn, last_insn, next_head, prev;
89 if (bb->next_bb != EXIT_BLOCK_PTR)
90 next_head = bb->next_bb->head;
92 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
94 if (insn == next_head)
97 switch (GET_CODE (insn))
104 switch (NOTE_LINE_NUMBER (insn))
106 case NOTE_INSN_LOOP_END:
107 case NOTE_INSN_BLOCK_END:
110 case NOTE_INSN_DELETED:
111 case NOTE_INSN_DELETED_LABEL:
122 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
126 insn = NEXT_INSN (insn);
139 /* It is possible to hit contradictory sequence. For instance:
145 Where barrier belongs to jump_insn, but the note does not. This can be
146 created by removing the basic block originally following
147 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
149 for (insn = last_insn; insn != bb->end; insn = prev)
151 prev = PREV_INSN (insn);
152 if (GET_CODE (insn) == NOTE)
153 switch (NOTE_LINE_NUMBER (insn))
155 case NOTE_INSN_LOOP_END:
156 case NOTE_INSN_BLOCK_END:
157 case NOTE_INSN_DELETED:
158 case NOTE_INSN_DELETED_LABEL:
161 reorder_insns (insn, insn, last_insn);
168 /* Locate or create a label for a given basic block. */
171 label_for_bb (basic_block bb)
173 rtx label = bb->head;
175 if (GET_CODE (label) != CODE_LABEL)
178 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
180 label = block_label (bb);
186 /* Locate the effective beginning and end of the insn chain for each
187 block, as defined by skip_insns_after_block above. */
190 record_effective_endpoints (void)
192 rtx next_insn = get_insns ();
199 if (PREV_INSN (bb->head) && next_insn != bb->head)
200 RBI (bb)->header = unlink_insn_chain (next_insn,
201 PREV_INSN (bb->head));
202 end = skip_insns_after_block (bb);
203 if (NEXT_INSN (bb->end) && bb->end != end)
204 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
205 next_insn = NEXT_INSN (bb->end);
208 cfg_layout_function_footer = next_insn;
209 if (cfg_layout_function_footer)
210 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
213 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
214 numbers and files. In order to be GGC friendly we need to use separate
215 varrays. This also slightly improve the memory locality in binary search.
216 The _locs array contains locators where the given property change. The
217 block_locators_blocks contains the scope block that is used for all insn
218 locator greater than corresponding block_locators_locs value and smaller
219 than the following one. Similarly for the other properties. */
220 static GTY(()) varray_type block_locators_locs;
221 static GTY(()) varray_type block_locators_blocks;
222 static GTY(()) varray_type line_locators_locs;
223 static GTY(()) varray_type line_locators_lines;
224 static GTY(()) varray_type file_locators_locs;
225 static GTY(()) varray_type file_locators_files;
226 int prologue_locator;
227 int epilogue_locator;
229 /* During the RTL expansion the lexical blocks and line numbers are
230 represented via INSN_NOTEs. Replace them by representation using
234 insn_locators_initialize (void)
237 tree last_block = NULL;
240 int line_number = 0, last_line_number = 0;
241 char *file_name = NULL, *last_file_name = NULL;
243 prologue_locator = epilogue_locator = 0;
245 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
246 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
247 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
248 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
249 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
250 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
252 for (insn = get_insns (); insn; insn = next)
254 next = NEXT_INSN (insn);
256 if ((active_insn_p (insn)
257 && GET_CODE (PATTERN (insn)) != ADDR_VEC
258 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
260 || (!prologue_locator && file_name))
262 if (last_block != block)
265 VARRAY_PUSH_INT (block_locators_locs, loc);
266 VARRAY_PUSH_TREE (block_locators_blocks, block);
269 if (last_line_number != line_number)
272 VARRAY_PUSH_INT (line_locators_locs, loc);
273 VARRAY_PUSH_INT (line_locators_lines, line_number);
274 last_line_number = line_number;
276 if (last_file_name != file_name)
279 VARRAY_PUSH_INT (file_locators_locs, loc);
280 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
281 last_file_name = file_name;
284 if (!prologue_locator && file_name)
285 prologue_locator = loc;
286 if (!NEXT_INSN (insn))
287 epilogue_locator = loc;
288 if (active_insn_p (insn))
289 INSN_LOCATOR (insn) = loc;
290 else if (GET_CODE (insn) == NOTE)
292 switch (NOTE_LINE_NUMBER (insn))
294 case NOTE_INSN_BLOCK_BEG:
295 block = NOTE_BLOCK (insn);
298 case NOTE_INSN_BLOCK_END:
299 block = BLOCK_SUPERCONTEXT (block);
300 if (block && TREE_CODE (block) == FUNCTION_DECL)
305 if (NOTE_LINE_NUMBER (insn) > 0)
307 line_number = NOTE_LINE_NUMBER (insn);
308 file_name = (char *)NOTE_SOURCE_FILE (insn);
315 /* Tag the blocks with a depth number so that change_scope can find
316 the common parent easily. */
317 set_block_levels (DECL_INITIAL (cfun->decl), 0);
320 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
321 found in the block tree. */
324 set_block_levels (tree block, int level)
328 BLOCK_NUMBER (block) = level;
329 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
330 block = BLOCK_CHAIN (block);
334 /* Return sope resulting from combination of S1 and S2. */
336 choose_inner_scope (tree s1, tree s2)
342 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
347 /* Emit lexical block notes needed to change scope from S1 to S2. */
350 change_scope (rtx orig_insn, tree s1, tree s2)
352 rtx insn = orig_insn;
353 tree com = NULL_TREE;
354 tree ts1 = s1, ts2 = s2;
359 if (ts1 == NULL || ts2 == NULL)
361 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
362 ts1 = BLOCK_SUPERCONTEXT (ts1);
363 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
364 ts2 = BLOCK_SUPERCONTEXT (ts2);
367 ts1 = BLOCK_SUPERCONTEXT (ts1);
368 ts2 = BLOCK_SUPERCONTEXT (ts2);
377 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
378 NOTE_BLOCK (note) = s;
379 s = BLOCK_SUPERCONTEXT (s);
386 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
387 NOTE_BLOCK (insn) = s;
388 s = BLOCK_SUPERCONTEXT (s);
392 /* Return lexical scope block insn belong to. */
394 insn_scope (rtx insn)
396 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
398 int loc = INSN_LOCATOR (insn);
404 int pos = (min + max) / 2;
405 int tmp = VARRAY_INT (block_locators_locs, pos);
407 if (tmp <= loc && min != pos)
409 else if (tmp > loc && max != pos)
417 return VARRAY_TREE (block_locators_blocks, min);
420 /* Return line number of the statement that produced this insn. */
424 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
426 int loc = INSN_LOCATOR (insn);
432 int pos = (min + max) / 2;
433 int tmp = VARRAY_INT (line_locators_locs, pos);
435 if (tmp <= loc && min != pos)
437 else if (tmp > loc && max != pos)
445 return VARRAY_INT (line_locators_lines, min);
448 /* Return source file of the statement that produced this insn. */
452 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
454 int loc = INSN_LOCATOR (insn);
460 int pos = (min + max) / 2;
461 int tmp = VARRAY_INT (file_locators_locs, pos);
463 if (tmp <= loc && min != pos)
465 else if (tmp > loc && max != pos)
473 return VARRAY_CHAR_PTR (file_locators_files, min);
476 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
477 on the scope tree and the newly reordered instructions. */
480 reemit_insn_block_notes (void)
482 tree cur_block = DECL_INITIAL (cfun->decl);
486 if (!active_insn_p (insn))
487 insn = next_active_insn (insn);
488 for (; insn; insn = next_active_insn (insn))
492 this_block = insn_scope (insn);
493 /* For sequences compute scope resulting from merging all scopes
494 of instructions nested inside. */
495 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
498 rtx body = PATTERN (insn);
501 for (i = 0; i < XVECLEN (body, 0); i++)
502 this_block = choose_inner_scope (this_block,
503 insn_scope (XVECEXP (body, 0, i)));
508 if (this_block != cur_block)
510 change_scope (insn, cur_block, this_block);
511 cur_block = this_block;
515 /* change_scope emits before the insn, not after. */
516 note = emit_note (NULL, NOTE_INSN_DELETED);
517 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
523 /* Given a reorder chain, rearrange the code to match. */
526 fixup_reorder_chain (void)
528 basic_block bb, prev_bb;
532 /* First do the bulk reordering -- rechain the blocks without regard to
533 the needed changes to jumps and labels. */
535 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
537 bb = RBI (bb)->next, index++)
539 if (RBI (bb)->header)
542 NEXT_INSN (insn) = RBI (bb)->header;
544 set_first_insn (RBI (bb)->header);
545 PREV_INSN (RBI (bb)->header) = insn;
546 insn = RBI (bb)->header;
547 while (NEXT_INSN (insn))
548 insn = NEXT_INSN (insn);
551 NEXT_INSN (insn) = bb->head;
553 set_first_insn (bb->head);
554 PREV_INSN (bb->head) = insn;
556 if (RBI (bb)->footer)
558 NEXT_INSN (insn) = RBI (bb)->footer;
559 PREV_INSN (RBI (bb)->footer) = insn;
560 while (NEXT_INSN (insn))
561 insn = NEXT_INSN (insn);
565 if (index != n_basic_blocks)
568 NEXT_INSN (insn) = cfg_layout_function_footer;
569 if (cfg_layout_function_footer)
570 PREV_INSN (cfg_layout_function_footer) = insn;
572 while (NEXT_INSN (insn))
573 insn = NEXT_INSN (insn);
575 set_last_insn (insn);
576 #ifdef ENABLE_CHECKING
577 verify_insn_chain ();
580 /* Now add jumps and labels as needed to match the blocks new
583 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
585 edge e_fall, e_taken, e;
589 if (bb->succ == NULL)
592 /* Find the old fallthru edge, and another non-EH edge for
594 e_taken = e_fall = NULL;
595 for (e = bb->succ; e ; e = e->succ_next)
596 if (e->flags & EDGE_FALLTHRU)
598 else if (! (e->flags & EDGE_EH))
601 bb_end_insn = bb->end;
602 if (GET_CODE (bb_end_insn) == JUMP_INSN)
604 if (any_condjump_p (bb_end_insn))
606 /* If the old fallthru is still next, nothing to do. */
607 if (RBI (bb)->next == e_fall->dest
609 && e_fall->dest == EXIT_BLOCK_PTR))
612 /* The degenerated case of conditional jump jumping to the next
613 instruction can happen on target having jumps with side
616 Create temporarily the duplicated edge representing branch.
617 It will get unidentified by force_nonfallthru_and_redirect
618 that would otherwise get confused by fallthru edge not pointing
619 to the next basic block. */
625 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
627 if (!redirect_jump (bb->end, block_label (bb), 0))
629 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
632 int prob = INTVAL (XEXP (note, 0));
634 e_fake->probability = prob;
635 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
636 e_fall->probability -= e_fall->probability;
637 e_fall->count -= e_fake->count;
638 if (e_fall->probability < 0)
639 e_fall->probability = 0;
640 if (e_fall->count < 0)
644 /* There is one special case: if *neither* block is next,
645 such as happens at the very end of a function, then we'll
646 need to add a new unconditional jump. Choose the taken
647 edge based on known or assumed probability. */
648 else if (RBI (bb)->next != e_taken->dest)
650 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
653 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
654 && invert_jump (bb_end_insn,
655 label_for_bb (e_fall->dest), 0))
657 e_fall->flags &= ~EDGE_FALLTHRU;
658 e_taken->flags |= EDGE_FALLTHRU;
659 update_br_prob_note (bb);
660 e = e_fall, e_fall = e_taken, e_taken = e;
664 /* Otherwise we can try to invert the jump. This will
665 basically never fail, however, keep up the pretense. */
666 else if (invert_jump (bb_end_insn,
667 label_for_bb (e_fall->dest), 0))
669 e_fall->flags &= ~EDGE_FALLTHRU;
670 e_taken->flags |= EDGE_FALLTHRU;
671 update_br_prob_note (bb);
675 else if (returnjump_p (bb_end_insn))
679 /* Otherwise we have some switch or computed jump. In the
680 99% case, there should not have been a fallthru edge. */
684 #ifdef CASE_DROPS_THROUGH
685 /* Except for VAX. Since we didn't have predication for the
686 tablejump, the fallthru block should not have moved. */
687 if (RBI (bb)->next == e_fall->dest)
689 bb_end_insn = skip_insns_after_block (bb);
697 /* No fallthru implies a noreturn function with EH edges, or
698 something similarly bizarre. In any case, we don't need to
703 /* If the fallthru block is still next, nothing to do. */
704 if (RBI (bb)->next == e_fall->dest)
707 /* A fallthru to exit block. */
708 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
712 /* We got here if we need to add a new jump insn. */
713 nb = force_nonfallthru (e_fall);
716 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
717 RBI (nb)->visited = 1;
718 RBI (nb)->next = RBI (bb)->next;
720 /* Don't process this new block. */
725 /* Put basic_block_info in the new order. */
729 fprintf (rtl_dump_file, "Reordered sequence:\n");
730 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
732 fprintf (rtl_dump_file, " %i ", index);
733 if (RBI (bb)->original)
734 fprintf (rtl_dump_file, "duplicate of %i ",
735 RBI (bb)->original->index);
736 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
737 fprintf (rtl_dump_file, "compensation ");
739 fprintf (rtl_dump_file, "bb %i ", bb->index);
740 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
744 prev_bb = ENTRY_BLOCK_PTR;
745 bb = ENTRY_BLOCK_PTR->next_bb;
748 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
751 BASIC_BLOCK (index) = bb;
753 bb->prev_bb = prev_bb;
754 prev_bb->next_bb = bb;
756 prev_bb->next_bb = EXIT_BLOCK_PTR;
757 EXIT_BLOCK_PTR->prev_bb = prev_bb;
760 /* Perform sanity checks on the insn chain.
761 1. Check that next/prev pointers are consistent in both the forward and
763 2. Count insns in chain, going both directions, and check if equal.
764 3. Check that get_last_insn () returns the actual end of chain. */
767 verify_insn_chain (void)
770 int insn_cnt1, insn_cnt2;
772 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
774 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
775 if (PREV_INSN (x) != prevx)
778 if (prevx != get_last_insn ())
781 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
783 nextx = x, insn_cnt2++, x = PREV_INSN (x))
784 if (NEXT_INSN (x) != nextx)
787 if (insn_cnt1 != insn_cnt2)
791 /* Remove any unconditional jumps and forwarder block creating fallthru
792 edges instead. During BB reordering, fallthru edges are not required
793 to target next basic block in the linear CFG layout, so the unconditional
794 jumps are not needed. If LOOPS is not null, also update loop structure &
798 cleanup_unconditional_jumps (struct loops *loops)
806 if (bb->succ->flags & EDGE_FALLTHRU)
808 if (!bb->succ->succ_next)
811 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
812 && bb->prev_bb != ENTRY_BLOCK_PTR)
814 basic_block prev = bb->prev_bb;
817 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
822 /* bb cannot be loop header, as it only has one entry
823 edge. It could be a loop latch. */
824 if (bb->loop_father->header == bb)
827 if (bb->loop_father->latch == bb)
828 bb->loop_father->latch = bb->pred->src;
830 if (get_immediate_dominator
831 (loops->cfg.dom, bb->succ->dest) == bb)
832 set_immediate_dominator
833 (loops->cfg.dom, bb->succ->dest, bb->pred->src);
835 remove_bb_from_loops (bb);
836 delete_from_dominance_info (loops->cfg.dom, bb);
839 redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
843 else if (simplejump_p (bb->end))
848 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
849 INSN_UID (jump), bb->index);
851 bb->succ->flags |= EDGE_FALLTHRU;
856 insn = NEXT_INSN (bb->end);
858 && (GET_CODE (insn) != NOTE
859 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
861 rtx next = NEXT_INSN (insn);
863 if (GET_CODE (insn) == BARRIER)
864 delete_barrier (insn);
872 /* The block falling through to exit must be the last one in the
873 reordered chain. Ensure that this condition is met. */
875 fixup_fallthru_exit_predecessor (void)
878 basic_block bb = NULL;
880 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
881 if (e->flags & EDGE_FALLTHRU)
884 if (bb && RBI (bb)->next)
886 basic_block c = ENTRY_BLOCK_PTR->next_bb;
888 while (RBI (c)->next != bb)
891 RBI (c)->next = RBI (bb)->next;
892 while (RBI (c)->next)
896 RBI (bb)->next = NULL;
900 /* Return true in case it is possible to duplicate the basic block BB. */
903 cfg_layout_can_duplicate_bb_p (basic_block bb)
907 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
910 /* Duplicating fallthru block to exit would require adding a jump
911 and splitting the real last BB. */
912 for (s = bb->succ; s; s = s->succ_next)
913 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
916 /* Do not attempt to duplicate tablejumps, as we need to unshare
917 the dispatch table. This is difficult to do, as the instructions
918 computing jump destination may be hoisted outside the basic block. */
919 if (tablejump_p (bb->end, NULL, NULL))
922 /* Do not duplicate blocks containing insns that can't be copied. */
923 if (targetm.cannot_copy_insn_p)
928 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
932 insn = NEXT_INSN (insn);
940 duplicate_insn_chain (rtx from, rtx to)
944 /* Avoid updating of boundaries of previous basic block. The
945 note will get removed from insn stream in fixup. */
946 last = emit_note (NULL, NOTE_INSN_DELETED);
948 /* Create copy at the end of INSN chain. The chain will
949 be reordered later. */
950 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
952 switch (GET_CODE (insn))
957 /* Avoid copying of dispatch tables. We never duplicate
958 tablejumps, so this can hit only in case the table got
959 moved far from original jump. */
960 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
961 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
963 emit_copy_of_insn_after (insn, get_last_insn ());
974 switch (NOTE_LINE_NUMBER (insn))
976 /* In case prologue is empty and function contain label
977 in first BB, we may want to copy the block. */
978 case NOTE_INSN_PROLOGUE_END:
980 case NOTE_INSN_LOOP_VTOP:
981 case NOTE_INSN_LOOP_CONT:
982 case NOTE_INSN_LOOP_BEG:
983 case NOTE_INSN_LOOP_END:
984 /* Strip down the loop notes - we don't really want to keep
985 them consistent in loop copies. */
986 case NOTE_INSN_DELETED:
987 case NOTE_INSN_DELETED_LABEL:
988 /* No problem to strip these. */
989 case NOTE_INSN_EPILOGUE_BEG:
990 case NOTE_INSN_FUNCTION_END:
991 /* Debug code expect these notes to exist just once.
992 Keep them in the master copy.
993 ??? It probably makes more sense to duplicate them for each
995 case NOTE_INSN_FUNCTION_BEG:
996 /* There is always just single entry to function. */
997 case NOTE_INSN_BASIC_BLOCK:
1000 /* There is no purpose to duplicate prologue. */
1001 case NOTE_INSN_BLOCK_BEG:
1002 case NOTE_INSN_BLOCK_END:
1003 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
1004 reordering is in the progress. */
1005 case NOTE_INSN_EH_REGION_BEG:
1006 case NOTE_INSN_EH_REGION_END:
1007 /* Should never exist at BB duplication time. */
1010 case NOTE_INSN_REPEATED_LINE_NUMBER:
1011 emit_line_note (NOTE_SOURCE_FILE (insn),
1012 NOTE_LINE_NUMBER (insn));
1016 if (NOTE_LINE_NUMBER (insn) < 0)
1018 /* It is possible that no_line_number is set and the note
1019 won't be emitted. */
1020 emit_line_note (NOTE_SOURCE_FILE (insn),
1021 NOTE_LINE_NUMBER (insn));
1028 insn = NEXT_INSN (last);
1032 /* Create a duplicate of the basic block BB and redirect edge E into it. */
1035 cfg_layout_duplicate_bb (basic_block bb, edge e)
1040 gcov_type new_count = e ? e->count : 0;
1042 if (bb->count < new_count)
1043 new_count = bb->count;
1046 #ifdef ENABLE_CHECKING
1047 if (!cfg_layout_can_duplicate_bb_p (bb))
1051 insn = duplicate_insn_chain (bb->head, bb->end);
1052 new_bb = create_basic_block (insn,
1053 insn ? get_last_insn () : NULL,
1054 EXIT_BLOCK_PTR->prev_bb);
1055 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
1057 if (RBI (bb)->header)
1059 insn = RBI (bb)->header;
1060 while (NEXT_INSN (insn))
1061 insn = NEXT_INSN (insn);
1062 insn = duplicate_insn_chain (RBI (bb)->header, insn);
1064 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
1067 if (RBI (bb)->footer)
1069 insn = RBI (bb)->footer;
1070 while (NEXT_INSN (insn))
1071 insn = NEXT_INSN (insn);
1072 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
1074 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
1077 if (bb->global_live_at_start)
1079 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1080 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1081 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1082 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1085 new_bb->loop_depth = bb->loop_depth;
1086 new_bb->flags = bb->flags;
1087 for (s = bb->succ; s; s = s->succ_next)
1089 /* Since we are creating edges from a new block to successors
1090 of another block (which therefore are known to be disjoint), there
1091 is no need to actually check for duplicated edges. */
1092 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1093 n->probability = s->probability;
1095 /* Take care for overflows! */
1096 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1099 s->count -= n->count;
1102 new_bb->count = new_count;
1103 bb->count -= new_count;
1107 new_bb->frequency = EDGE_FREQUENCY (e);
1108 bb->frequency -= EDGE_FREQUENCY (e);
1110 redirect_edge_and_branch_force (e, new_bb);
1115 if (bb->frequency < 0)
1118 RBI (new_bb)->original = bb;
1119 RBI (bb)->copy = new_bb;
1123 /* Main entry point to this module - initialize the datastructures for
1124 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1127 cfg_layout_initialize (struct loops *loops)
1129 /* Our algorithm depends on fact that there are now dead jumptables
1131 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
1132 cfg_layout_rtl_register_cfg_hooks ();
1134 cleanup_unconditional_jumps (loops);
1136 record_effective_endpoints ();
1139 /* Splits superblocks. */
1141 break_superblocks (void)
1143 sbitmap superblocks;
1146 superblocks = sbitmap_alloc (n_basic_blocks);
1147 sbitmap_zero (superblocks);
1151 for (i = 0; i < n_basic_blocks; i++)
1152 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1154 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1155 SET_BIT (superblocks, i);
1161 rebuild_jump_labels (get_insns ());
1162 find_many_sub_basic_blocks (superblocks);
1168 /* Finalize the changes: reorder insn list according to the sequence, enter
1169 compensation code, rebuild scope forest. */
1172 cfg_layout_finalize (void)
1174 #ifdef ENABLE_CHECKING
1175 verify_flow_info ();
1177 rtl_register_cfg_hooks ();
1178 fixup_fallthru_exit_predecessor ();
1179 fixup_reorder_chain ();
1181 #ifdef ENABLE_CHECKING
1182 verify_insn_chain ();
1185 free_aux_for_blocks ();
1187 break_superblocks ();
1189 #ifdef ENABLE_CHECKING
1190 verify_flow_info ();
1194 #include "gt-cfglayout.h"