1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "insn-config.h"
33 #include "cfglayout.h"
37 #include "alloc-pool.h"
39 /* The contents of the current function definition are allocated
40 in this obstack, and all are freed at the end of the function. */
41 extern struct obstack flow_obstack;
43 alloc_pool cfg_layout_pool;
45 /* Holds the interesting trailing notes for the function. */
46 rtx cfg_layout_function_footer, cfg_layout_function_header;
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
53 static void set_block_levels (tree, int);
54 static void change_scope (rtx, tree, tree);
56 void verify_insn_chain (void);
57 static void fixup_fallthru_exit_predecessor (void);
58 static rtx duplicate_insn_chain (rtx, rtx);
59 static void break_superblocks (void);
60 static tree insn_scope (rtx);
63 unlink_insn_chain (rtx first, rtx last)
65 rtx prevfirst = PREV_INSN (first);
66 rtx nextlast = NEXT_INSN (last);
68 PREV_INSN (first) = NULL;
69 NEXT_INSN (last) = NULL;
71 NEXT_INSN (prevfirst) = nextlast;
73 PREV_INSN (nextlast) = prevfirst;
75 set_last_insn (prevfirst);
77 set_first_insn (nextlast);
81 /* Skip over inter-block insns occurring after BB which are typically
82 associated with BB (e.g., barriers). If there are any such insns,
83 we return the last one. Otherwise, we return the end of BB. */
86 skip_insns_after_block (basic_block bb)
88 rtx insn, last_insn, next_head, prev;
91 if (bb->next_bb != EXIT_BLOCK_PTR)
92 next_head = bb->next_bb->head;
94 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
96 if (insn == next_head)
99 switch (GET_CODE (insn))
106 switch (NOTE_LINE_NUMBER (insn))
108 case NOTE_INSN_LOOP_END:
109 case NOTE_INSN_BLOCK_END:
112 case NOTE_INSN_DELETED:
113 case NOTE_INSN_DELETED_LABEL:
124 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
125 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
126 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
128 insn = NEXT_INSN (insn);
141 /* It is possible to hit contradictory sequence. For instance:
147 Where barrier belongs to jump_insn, but the note does not. This can be
148 created by removing the basic block originally following
149 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
151 for (insn = last_insn; insn != bb->end; insn = prev)
153 prev = PREV_INSN (insn);
154 if (GET_CODE (insn) == NOTE)
155 switch (NOTE_LINE_NUMBER (insn))
157 case NOTE_INSN_LOOP_END:
158 case NOTE_INSN_BLOCK_END:
159 case NOTE_INSN_DELETED:
160 case NOTE_INSN_DELETED_LABEL:
163 reorder_insns (insn, insn, last_insn);
170 /* Locate or create a label for a given basic block. */
173 label_for_bb (basic_block bb)
175 rtx label = bb->head;
177 if (GET_CODE (label) != CODE_LABEL)
180 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
182 label = block_label (bb);
188 /* Locate the effective beginning and end of the insn chain for each
189 block, as defined by skip_insns_after_block above. */
192 record_effective_endpoints (void)
198 for (insn = get_insns ();
199 NEXT_INSN (insn) && GET_CODE (insn) == NOTE;
200 insn = NEXT_INSN (insn))
202 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)
207 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
211 cfg_layout_function_header = unlink_insn_chain (get_insns (), insn);
213 cfg_layout_function_header = NULL_RTX;
215 next_insn = get_insns ();
220 if (PREV_INSN (bb->head) && next_insn != bb->head)
221 bb->rbi->header = unlink_insn_chain (next_insn,
222 PREV_INSN (bb->head));
223 end = skip_insns_after_block (bb);
224 if (NEXT_INSN (bb->end) && bb->end != end)
225 bb->rbi->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
226 next_insn = NEXT_INSN (bb->end);
229 cfg_layout_function_footer = next_insn;
230 if (cfg_layout_function_footer)
231 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
234 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
235 numbers and files. In order to be GGC friendly we need to use separate
236 varrays. This also slightly improve the memory locality in binary search.
237 The _locs array contains locators where the given property change. The
238 block_locators_blocks contains the scope block that is used for all insn
239 locator greater than corresponding block_locators_locs value and smaller
240 than the following one. Similarly for the other properties. */
241 static GTY(()) varray_type block_locators_locs;
242 static GTY(()) varray_type block_locators_blocks;
243 static GTY(()) varray_type line_locators_locs;
244 static GTY(()) varray_type line_locators_lines;
245 static GTY(()) varray_type file_locators_locs;
246 static GTY(()) varray_type file_locators_files;
247 int prologue_locator;
248 int epilogue_locator;
250 /* During the RTL expansion the lexical blocks and line numbers are
251 represented via INSN_NOTEs. Replace them by representation using
255 insn_locators_initialize (void)
258 tree last_block = NULL;
261 int line_number = 0, last_line_number = 0;
262 char *file_name = NULL, *last_file_name = NULL;
264 prologue_locator = epilogue_locator = 0;
266 VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
267 VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
268 VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
269 VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
270 VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
271 VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
273 for (insn = get_insns (); insn; insn = next)
275 next = NEXT_INSN (insn);
277 if ((active_insn_p (insn)
278 && GET_CODE (PATTERN (insn)) != ADDR_VEC
279 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
281 || (!prologue_locator && file_name))
283 if (last_block != block)
286 VARRAY_PUSH_INT (block_locators_locs, loc);
287 VARRAY_PUSH_TREE (block_locators_blocks, block);
290 if (last_line_number != line_number)
293 VARRAY_PUSH_INT (line_locators_locs, loc);
294 VARRAY_PUSH_INT (line_locators_lines, line_number);
295 last_line_number = line_number;
297 if (last_file_name != file_name)
300 VARRAY_PUSH_INT (file_locators_locs, loc);
301 VARRAY_PUSH_CHAR_PTR (file_locators_files, file_name);
302 last_file_name = file_name;
305 if (!prologue_locator && file_name)
306 prologue_locator = loc;
307 if (!NEXT_INSN (insn))
308 epilogue_locator = loc;
309 if (active_insn_p (insn))
310 INSN_LOCATOR (insn) = loc;
311 else if (GET_CODE (insn) == NOTE)
313 switch (NOTE_LINE_NUMBER (insn))
315 case NOTE_INSN_BLOCK_BEG:
316 block = NOTE_BLOCK (insn);
319 case NOTE_INSN_BLOCK_END:
320 block = BLOCK_SUPERCONTEXT (block);
321 if (block && TREE_CODE (block) == FUNCTION_DECL)
326 if (NOTE_LINE_NUMBER (insn) > 0)
328 line_number = NOTE_LINE_NUMBER (insn);
329 file_name = (char *)NOTE_SOURCE_FILE (insn);
336 /* Tag the blocks with a depth number so that change_scope can find
337 the common parent easily. */
338 set_block_levels (DECL_INITIAL (cfun->decl), 0);
341 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
342 found in the block tree. */
345 set_block_levels (tree block, int level)
349 BLOCK_NUMBER (block) = level;
350 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
351 block = BLOCK_CHAIN (block);
355 /* Return sope resulting from combination of S1 and S2. */
357 choose_inner_scope (tree s1, tree s2)
363 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
368 /* Emit lexical block notes needed to change scope from S1 to S2. */
371 change_scope (rtx orig_insn, tree s1, tree s2)
373 rtx insn = orig_insn;
374 tree com = NULL_TREE;
375 tree ts1 = s1, ts2 = s2;
380 if (ts1 == NULL || ts2 == NULL)
382 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
383 ts1 = BLOCK_SUPERCONTEXT (ts1);
384 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
385 ts2 = BLOCK_SUPERCONTEXT (ts2);
388 ts1 = BLOCK_SUPERCONTEXT (ts1);
389 ts2 = BLOCK_SUPERCONTEXT (ts2);
398 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
399 NOTE_BLOCK (note) = s;
400 s = BLOCK_SUPERCONTEXT (s);
407 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
408 NOTE_BLOCK (insn) = s;
409 s = BLOCK_SUPERCONTEXT (s);
413 /* Return lexical scope block insn belong to. */
415 insn_scope (rtx insn)
417 int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
419 int loc = INSN_LOCATOR (insn);
425 int pos = (min + max) / 2;
426 int tmp = VARRAY_INT (block_locators_locs, pos);
428 if (tmp <= loc && min != pos)
430 else if (tmp > loc && max != pos)
438 return VARRAY_TREE (block_locators_blocks, min);
441 /* Return line number of the statement that produced this insn. */
445 int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
447 int loc = INSN_LOCATOR (insn);
453 int pos = (min + max) / 2;
454 int tmp = VARRAY_INT (line_locators_locs, pos);
456 if (tmp <= loc && min != pos)
458 else if (tmp > loc && max != pos)
466 return VARRAY_INT (line_locators_lines, min);
469 /* Return source file of the statement that produced this insn. */
473 int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
475 int loc = INSN_LOCATOR (insn);
481 int pos = (min + max) / 2;
482 int tmp = VARRAY_INT (file_locators_locs, pos);
484 if (tmp <= loc && min != pos)
486 else if (tmp > loc && max != pos)
494 return VARRAY_CHAR_PTR (file_locators_files, min);
497 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
498 on the scope tree and the newly reordered instructions. */
501 reemit_insn_block_notes (void)
503 tree cur_block = DECL_INITIAL (cfun->decl);
507 if (!active_insn_p (insn))
508 insn = next_active_insn (insn);
509 for (; insn; insn = next_active_insn (insn))
513 this_block = insn_scope (insn);
514 /* For sequences compute scope resulting from merging all scopes
515 of instructions nested inside. */
516 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
519 rtx body = PATTERN (insn);
522 for (i = 0; i < XVECLEN (body, 0); i++)
523 this_block = choose_inner_scope (this_block,
524 insn_scope (XVECEXP (body, 0, i)));
529 if (this_block != cur_block)
531 change_scope (insn, cur_block, this_block);
532 cur_block = this_block;
536 /* change_scope emits before the insn, not after. */
537 note = emit_note (NOTE_INSN_DELETED);
538 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
544 /* Given a reorder chain, rearrange the code to match. */
547 fixup_reorder_chain (void)
549 basic_block bb, prev_bb;
553 if (cfg_layout_function_header)
555 set_first_insn (cfg_layout_function_header);
556 insn = cfg_layout_function_header;
557 while (NEXT_INSN (insn))
558 insn = NEXT_INSN (insn);
561 /* First do the bulk reordering -- rechain the blocks without regard to
562 the needed changes to jumps and labels. */
564 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
566 bb = bb->rbi->next, index++)
571 NEXT_INSN (insn) = bb->rbi->header;
573 set_first_insn (bb->rbi->header);
574 PREV_INSN (bb->rbi->header) = insn;
575 insn = bb->rbi->header;
576 while (NEXT_INSN (insn))
577 insn = NEXT_INSN (insn);
580 NEXT_INSN (insn) = bb->head;
582 set_first_insn (bb->head);
583 PREV_INSN (bb->head) = insn;
587 NEXT_INSN (insn) = bb->rbi->footer;
588 PREV_INSN (bb->rbi->footer) = insn;
589 while (NEXT_INSN (insn))
590 insn = NEXT_INSN (insn);
594 if (index != n_basic_blocks)
597 NEXT_INSN (insn) = cfg_layout_function_footer;
598 if (cfg_layout_function_footer)
599 PREV_INSN (cfg_layout_function_footer) = insn;
601 while (NEXT_INSN (insn))
602 insn = NEXT_INSN (insn);
604 set_last_insn (insn);
605 #ifdef ENABLE_CHECKING
606 verify_insn_chain ();
609 /* Now add jumps and labels as needed to match the blocks new
612 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
614 edge e_fall, e_taken, e;
618 if (bb->succ == NULL)
621 /* Find the old fallthru edge, and another non-EH edge for
623 e_taken = e_fall = NULL;
624 for (e = bb->succ; e ; e = e->succ_next)
625 if (e->flags & EDGE_FALLTHRU)
627 else if (! (e->flags & EDGE_EH))
630 bb_end_insn = bb->end;
631 if (GET_CODE (bb_end_insn) == JUMP_INSN)
633 if (any_condjump_p (bb_end_insn))
635 /* If the old fallthru is still next, nothing to do. */
636 if (bb->rbi->next == e_fall->dest
638 && e_fall->dest == EXIT_BLOCK_PTR))
641 /* The degenerated case of conditional jump jumping to the next
642 instruction can happen on target having jumps with side
645 Create temporarily the duplicated edge representing branch.
646 It will get unidentified by force_nonfallthru_and_redirect
647 that would otherwise get confused by fallthru edge not pointing
648 to the next basic block. */
654 e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
656 if (!redirect_jump (bb->end, block_label (bb), 0))
658 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
661 int prob = INTVAL (XEXP (note, 0));
663 e_fake->probability = prob;
664 e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
665 e_fall->probability -= e_fall->probability;
666 e_fall->count -= e_fake->count;
667 if (e_fall->probability < 0)
668 e_fall->probability = 0;
669 if (e_fall->count < 0)
673 /* There is one special case: if *neither* block is next,
674 such as happens at the very end of a function, then we'll
675 need to add a new unconditional jump. Choose the taken
676 edge based on known or assumed probability. */
677 else if (bb->rbi->next != e_taken->dest)
679 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
682 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
683 && invert_jump (bb_end_insn,
684 label_for_bb (e_fall->dest), 0))
686 e_fall->flags &= ~EDGE_FALLTHRU;
687 e_taken->flags |= EDGE_FALLTHRU;
688 update_br_prob_note (bb);
689 e = e_fall, e_fall = e_taken, e_taken = e;
693 /* Otherwise we can try to invert the jump. This will
694 basically never fail, however, keep up the pretense. */
695 else if (invert_jump (bb_end_insn,
696 label_for_bb (e_fall->dest), 0))
698 e_fall->flags &= ~EDGE_FALLTHRU;
699 e_taken->flags |= EDGE_FALLTHRU;
700 update_br_prob_note (bb);
704 else if (returnjump_p (bb_end_insn))
708 /* Otherwise we have some switch or computed jump. In the
709 99% case, there should not have been a fallthru edge. */
713 #ifdef CASE_DROPS_THROUGH
714 /* Except for VAX. Since we didn't have predication for the
715 tablejump, the fallthru block should not have moved. */
716 if (bb->rbi->next == e_fall->dest)
718 bb_end_insn = skip_insns_after_block (bb);
726 /* No fallthru implies a noreturn function with EH edges, or
727 something similarly bizarre. In any case, we don't need to
732 /* If the fallthru block is still next, nothing to do. */
733 if (bb->rbi->next == e_fall->dest)
736 /* A fallthru to exit block. */
737 if (!bb->rbi->next && e_fall->dest == EXIT_BLOCK_PTR)
741 /* We got here if we need to add a new jump insn. */
742 nb = force_nonfallthru (e_fall);
745 cfg_layout_initialize_rbi (nb);
746 nb->rbi->visited = 1;
747 nb->rbi->next = bb->rbi->next;
749 /* Don't process this new block. */
754 /* Put basic_block_info in the new order. */
758 fprintf (rtl_dump_file, "Reordered sequence:\n");
759 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = bb->rbi->next, index ++)
761 fprintf (rtl_dump_file, " %i ", index);
762 if (bb->rbi->original)
763 fprintf (rtl_dump_file, "duplicate of %i ",
764 bb->rbi->original->index);
765 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
766 fprintf (rtl_dump_file, "compensation ");
768 fprintf (rtl_dump_file, "bb %i ", bb->index);
769 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
773 prev_bb = ENTRY_BLOCK_PTR;
774 bb = ENTRY_BLOCK_PTR->next_bb;
777 for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
780 BASIC_BLOCK (index) = bb;
782 bb->prev_bb = prev_bb;
783 prev_bb->next_bb = bb;
785 prev_bb->next_bb = EXIT_BLOCK_PTR;
786 EXIT_BLOCK_PTR->prev_bb = prev_bb;
788 /* Anoying special case - jump around dead jumptables left in the code. */
792 for (e = bb->succ; e && !(e->flags & EDGE_FALLTHRU); e = e->succ_next)
794 if (e && !can_fallthru (e->src, e->dest))
795 force_nonfallthru (e);
799 /* Perform sanity checks on the insn chain.
800 1. Check that next/prev pointers are consistent in both the forward and
802 2. Count insns in chain, going both directions, and check if equal.
803 3. Check that get_last_insn () returns the actual end of chain. */
806 verify_insn_chain (void)
809 int insn_cnt1, insn_cnt2;
811 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
813 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
814 if (PREV_INSN (x) != prevx)
817 if (prevx != get_last_insn ())
820 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
822 nextx = x, insn_cnt2++, x = PREV_INSN (x))
823 if (NEXT_INSN (x) != nextx)
826 if (insn_cnt1 != insn_cnt2)
830 /* The block falling through to exit must be the last one in the
831 reordered chain. Ensure that this condition is met. */
833 fixup_fallthru_exit_predecessor (void)
836 basic_block bb = NULL;
838 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
839 if (e->flags & EDGE_FALLTHRU)
842 if (bb && bb->rbi->next)
844 basic_block c = ENTRY_BLOCK_PTR->next_bb;
846 while (c->rbi->next != bb)
849 c->rbi->next = bb->rbi->next;
854 bb->rbi->next = NULL;
858 /* Return true in case it is possible to duplicate the basic block BB. */
861 cfg_layout_can_duplicate_bb_p (basic_block bb)
865 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
868 /* Duplicating fallthru block to exit would require adding a jump
869 and splitting the real last BB. */
870 for (s = bb->succ; s; s = s->succ_next)
871 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
874 /* Do not attempt to duplicate tablejumps, as we need to unshare
875 the dispatch table. This is difficult to do, as the instructions
876 computing jump destination may be hoisted outside the basic block. */
877 if (tablejump_p (bb->end, NULL, NULL))
880 /* Do not duplicate blocks containing insns that can't be copied. */
881 if (targetm.cannot_copy_insn_p)
886 if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
890 insn = NEXT_INSN (insn);
898 duplicate_insn_chain (rtx from, rtx to)
902 /* Avoid updating of boundaries of previous basic block. The
903 note will get removed from insn stream in fixup. */
904 last = emit_note (NOTE_INSN_DELETED);
906 /* Create copy at the end of INSN chain. The chain will
907 be reordered later. */
908 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
910 switch (GET_CODE (insn))
915 /* Avoid copying of dispatch tables. We never duplicate
916 tablejumps, so this can hit only in case the table got
917 moved far from original jump. */
918 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
919 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
921 emit_copy_of_insn_after (insn, get_last_insn ());
932 switch (NOTE_LINE_NUMBER (insn))
934 /* In case prologue is empty and function contain label
935 in first BB, we may want to copy the block. */
936 case NOTE_INSN_PROLOGUE_END:
938 case NOTE_INSN_LOOP_VTOP:
939 case NOTE_INSN_LOOP_CONT:
940 case NOTE_INSN_LOOP_BEG:
941 case NOTE_INSN_LOOP_END:
942 /* Strip down the loop notes - we don't really want to keep
943 them consistent in loop copies. */
944 case NOTE_INSN_DELETED:
945 case NOTE_INSN_DELETED_LABEL:
946 /* No problem to strip these. */
947 case NOTE_INSN_EPILOGUE_BEG:
948 case NOTE_INSN_FUNCTION_END:
949 /* Debug code expect these notes to exist just once.
950 Keep them in the master copy.
951 ??? It probably makes more sense to duplicate them for each
953 case NOTE_INSN_FUNCTION_BEG:
954 /* There is always just single entry to function. */
955 case NOTE_INSN_BASIC_BLOCK:
958 /* There is no purpose to duplicate prologue. */
959 case NOTE_INSN_BLOCK_BEG:
960 case NOTE_INSN_BLOCK_END:
961 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
962 reordering is in the progress. */
963 case NOTE_INSN_EH_REGION_BEG:
964 case NOTE_INSN_EH_REGION_END:
965 /* Should never exist at BB duplication time. */
968 case NOTE_INSN_REPEATED_LINE_NUMBER:
969 emit_note_copy (insn);
973 if (NOTE_LINE_NUMBER (insn) < 0)
975 /* It is possible that no_line_number is set and the note
977 emit_note_copy (insn);
984 insn = NEXT_INSN (last);
988 /* Create a duplicate of the basic block BB and redirect edge E into it.
989 If E is not specified, BB is just copied, but updating the frequencies
990 etc. is left to the caller. */
993 cfg_layout_duplicate_bb (basic_block bb, edge e)
998 gcov_type new_count = e ? e->count : 0;
1000 if (bb->count < new_count)
1001 new_count = bb->count;
1004 #ifdef ENABLE_CHECKING
1005 if (!cfg_layout_can_duplicate_bb_p (bb))
1009 insn = duplicate_insn_chain (bb->head, bb->end);
1010 new_bb = create_basic_block (insn,
1011 insn ? get_last_insn () : NULL,
1012 EXIT_BLOCK_PTR->prev_bb);
1014 if (bb->rbi->header)
1016 insn = bb->rbi->header;
1017 while (NEXT_INSN (insn))
1018 insn = NEXT_INSN (insn);
1019 insn = duplicate_insn_chain (bb->rbi->header, insn);
1021 new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
1024 if (bb->rbi->footer)
1026 insn = bb->rbi->footer;
1027 while (NEXT_INSN (insn))
1028 insn = NEXT_INSN (insn);
1029 insn = duplicate_insn_chain (bb->rbi->footer, insn);
1031 new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
1034 if (bb->global_live_at_start)
1036 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1037 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1038 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
1039 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1042 new_bb->loop_depth = bb->loop_depth;
1043 new_bb->flags = bb->flags;
1044 for (s = bb->succ; s; s = s->succ_next)
1046 /* Since we are creating edges from a new block to successors
1047 of another block (which therefore are known to be disjoint), there
1048 is no need to actually check for duplicated edges. */
1049 n = unchecked_make_edge (new_bb, s->dest, s->flags);
1050 n->probability = s->probability;
1053 /* Take care for overflows! */
1054 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
1055 s->count -= n->count;
1058 n->count = s->count;
1064 new_bb->count = new_count;
1065 bb->count -= new_count;
1067 new_bb->frequency = EDGE_FREQUENCY (e);
1068 bb->frequency -= EDGE_FREQUENCY (e);
1070 redirect_edge_and_branch_force (e, new_bb);
1074 if (bb->frequency < 0)
1079 new_bb->count = bb->count;
1080 new_bb->frequency = bb->frequency;
1083 new_bb->rbi->original = bb;
1084 bb->rbi->copy = new_bb;
1090 cfg_layout_initialize_rbi (bb)
1095 bb->rbi = pool_alloc (cfg_layout_pool);
1096 memset (bb->rbi, 0, sizeof (struct reorder_block_def));
1099 /* Main entry point to this module - initialize the datastructures for
1100 CFG layout changes. It keeps LOOPS up-to-date if not null. */
1103 cfg_layout_initialize ()
1107 /* Our algorithm depends on fact that there are now dead jumptables
1110 create_alloc_pool ("cfg layout pool", sizeof (struct reorder_block_def),
1111 n_basic_blocks + 2);
1112 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1113 cfg_layout_initialize_rbi (bb);
1115 cfg_layout_rtl_register_cfg_hooks ();
1117 record_effective_endpoints ();
1119 cleanup_cfg (CLEANUP_CFGLAYOUT);
1122 /* Splits superblocks. */
1124 break_superblocks (void)
1126 sbitmap superblocks;
1129 superblocks = sbitmap_alloc (n_basic_blocks);
1130 sbitmap_zero (superblocks);
1134 for (i = 0; i < n_basic_blocks; i++)
1135 if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
1137 BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
1138 SET_BIT (superblocks, i);
1144 rebuild_jump_labels (get_insns ());
1145 find_many_sub_basic_blocks (superblocks);
1151 /* Finalize the changes: reorder insn list according to the sequence, enter
1152 compensation code, rebuild scope forest. */
1155 cfg_layout_finalize (void)
1159 #ifdef ENABLE_CHECKING
1160 verify_flow_info ();
1162 rtl_register_cfg_hooks ();
1163 fixup_fallthru_exit_predecessor ();
1164 fixup_reorder_chain ();
1166 #ifdef ENABLE_CHECKING
1167 verify_insn_chain ();
1170 free_alloc_pool (cfg_layout_pool);
1171 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1174 break_superblocks ();
1176 #ifdef ENABLE_CHECKING
1177 verify_flow_info ();
1181 /* Checks whether all N blocks in BBS array can be copied. */
1183 can_copy_bbs_p (basic_block *bbs, unsigned n)
1189 for (i = 0; i < n; i++)
1190 bbs[i]->rbi->duplicated = 1;
1192 for (i = 0; i < n; i++)
1194 /* In case we should redirect abnormal edge during duplication, fail. */
1195 for (e = bbs[i]->succ; e; e = e->succ_next)
1196 if ((e->flags & EDGE_ABNORMAL)
1197 && e->dest->rbi->duplicated)
1203 if (!cfg_layout_can_duplicate_bb_p (bbs[i]))
1211 for (i = 0; i < n; i++)
1212 bbs[i]->rbi->duplicated = 0;
1217 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1218 are placed into array NEW_BBS in the same order. Edges from basic blocks
1219 in BBS are also duplicated and copies of those of them
1220 that lead into BBS are redirected to appropriate newly created block. The
1221 function assigns bbs into loops (copy of basic block bb is assigned to
1222 bb->loop_father->copy loop, so this must be set up correctly in advance)
1223 and updates dominators locally (LOOPS structure that contains the information
1224 about dominators is passed to enable this).
1226 BASE is the superloop to that basic block belongs; if its header or latch
1227 is copied, we do not set the new blocks as header or latch.
1229 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1230 also in the same order. */
1233 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1234 edge *edges, unsigned n_edges, edge *new_edges,
1235 struct loop *base, struct loops *loops)
1238 basic_block bb, new_bb, dom_bb;
1241 /* Duplicate bbs, update dominators, assign bbs to loops. */
1242 for (i = 0; i < n; i++)
1246 new_bb = new_bbs[i] = cfg_layout_duplicate_bb (bb, NULL);
1247 bb->rbi->duplicated = 1;
1249 add_bb_to_loop (new_bb, bb->loop_father->copy);
1250 add_to_dominance_info (loops->cfg.dom, new_bb);
1251 /* Possibly set header. */
1252 if (bb->loop_father->header == bb && bb->loop_father != base)
1253 new_bb->loop_father->header = new_bb;
1255 if (bb->loop_father->latch == bb && bb->loop_father != base)
1256 new_bb->loop_father->latch = new_bb;
1259 /* Set dominators. */
1260 for (i = 0; i < n; i++)
1263 new_bb = new_bbs[i];
1265 dom_bb = get_immediate_dominator (loops->cfg.dom, bb);
1266 if (dom_bb->rbi->duplicated)
1268 dom_bb = dom_bb->rbi->copy;
1269 set_immediate_dominator (loops->cfg.dom, new_bb, dom_bb);
1273 /* Redirect edges. */
1274 for (j = 0; j < n_edges; j++)
1275 new_edges[j] = NULL;
1276 for (i = 0; i < n; i++)
1278 new_bb = new_bbs[i];
1281 for (e = new_bb->succ; e; e = e->succ_next)
1283 for (j = 0; j < n_edges; j++)
1284 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1287 if (!e->dest->rbi->duplicated)
1289 redirect_edge_and_branch_force (e, e->dest->rbi->copy);
1293 /* Clear information about duplicates. */
1294 for (i = 0; i < n; i++)
1295 bbs[i]->rbi->duplicated = 0;
1298 #include "gt-cfglayout.h"