1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
111 extern struct obstack flow_obstack;
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 /* The bb containing note_beg (if any). */
128 /* The bb containing note_end (if any). */
131 /* List of basic blocks contained within this scope. */
134 /* Number of blocks contained within this scope. */
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
151 /* Structure to hold information about the scope forest. */
154 /* Number of trees in forest. */
157 /* List of tree roots. */
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
169 } *reorder_block_def;
171 #define RBI(BB) ((reorder_block_def) (BB)->aux)
173 /* Holds the interesting trailing notes for the function. */
174 static rtx function_tail_eff_head;
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *, basic_block));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
200 void verify_insn_chain PARAMS ((void));
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
207 skip_insns_after_block (bb)
210 rtx insn, last_insn, next_head, prev;
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
216 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
218 if (insn == next_head)
221 switch (GET_CODE (insn))
228 switch (NOTE_LINE_NUMBER (insn))
230 case NOTE_INSN_LOOP_END:
231 case NOTE_INSN_BLOCK_END:
234 case NOTE_INSN_DELETED:
235 case NOTE_INSN_DELETED_LABEL:
246 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
247 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
248 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
250 insn = NEXT_INSN (insn);
262 /* It is possible to hit contradicting sequence. For instance:
268 Where barrier belongs to jump_insn, but the note does not.
269 This can be created by removing the basic block originally
270 following NOTE_INSN_LOOP_BEG.
272 In such case reorder the notes. */
273 for (insn = last_insn; insn != bb->end; insn = prev)
275 prev = PREV_INSN (insn);
276 if (GET_CODE (insn) == NOTE)
277 switch (NOTE_LINE_NUMBER (insn))
279 case NOTE_INSN_LOOP_END:
280 case NOTE_INSN_BLOCK_END:
281 case NOTE_INSN_DELETED:
282 case NOTE_INSN_DELETED_LABEL:
285 reorder_insns (insn, insn, last_insn);
293 /* Locate the effective beginning and end of the insn chain for each
294 block, as defined by skip_insns_after_block above. */
297 record_effective_endpoints ()
299 rtx next_insn = get_insns ();
302 for (i = 0; i < n_basic_blocks; ++i)
304 basic_block bb = BASIC_BLOCK (i);
307 RBI (bb)->eff_head = next_insn;
308 end = skip_insns_after_block (bb);
309 RBI (bb)->eff_end = end;
310 next_insn = NEXT_INSN (end);
312 function_tail_eff_head = next_insn;
316 /* Compute an ordering for a subgraph beginning with block BB. Record the
317 ordering in RBI()->index and chained through RBI()->next. */
320 make_reorder_chain ()
322 basic_block last_block = NULL;
323 basic_block prev = NULL;
324 int nbb_m1 = n_basic_blocks - 1;
327 /* If we've not got epilogue in RTL, we must fallthru to the exit.
328 Force the last block to be at the end. */
329 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
330 end of the function for stack unwinding purposes. */
333 last_block = BASIC_BLOCK (nbb_m1);
334 RBI (last_block)->visited = 1;
338 /* Loop until we've placed every block. */
345 /* Find the next unplaced block. */
346 /* ??? Get rid of this loop, and track which blocks are not yet
347 placed more directly, so as to avoid the O(N^2) worst case.
348 Perhaps keep a doubly-linked list of all to-be-placed blocks;
349 remove from the list as we place. The head of that list is
350 what we're looking for here. */
352 for (i = 0; i <= nbb_m1 && !next; ++i)
354 basic_block bb = BASIC_BLOCK (i);
355 if (! RBI (bb)->visited)
359 prev = make_reorder_chain_1 (next, prev);
363 /* Terminate the chain. */
366 RBI (prev)->next = last_block;
369 RBI (prev)->next = NULL;
372 /* A helper function for make_reorder_chain.
374 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
375 These are assumed to be the error condition and we wish to cluster
376 all of them at the very end of the function for the benefit of cache
377 locality for the rest of the function.
379 ??? We could do slightly better by noticing earlier that some subgraph
380 has all paths leading to noreturn functions, but for there to be more
381 than one block in such a subgraph is rare. */
384 make_reorder_chain_1 (bb, prev)
392 /* Mark this block visited. */
396 RBI (prev)->next = bb;
398 if (rtl_dump_file && prev->index + 1 != bb->index)
399 fprintf (rtl_dump_file, "Reordering block %d after %d\n",
400 bb->index, prev->index);
407 RBI (bb)->visited = 1;
410 if (bb->succ == NULL)
413 /* Find the most probable block. */
416 if (any_condjump_p (bb->end)
417 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
419 int taken, probability;
420 edge e_taken, e_fall;
422 probability = INTVAL (XEXP (note, 0));
423 taken = probability > REG_BR_PROB_BASE / 2;
425 /* Find the normal taken edge and the normal fallthru edge.
427 Note, conditional jumps with other side effects may not
428 be fully optimized. In this case it is possible for
429 the conditional jump to branch to the same location as
432 We should probably work to improve optimization of that
433 case; however, it seems silly not to also deal with such
434 problems here if they happen to occur. */
436 e_taken = e_fall = NULL;
437 for (e = bb->succ; e ; e = e->succ_next)
439 if (e->flags & EDGE_FALLTHRU)
441 else if (! (e->flags & EDGE_EH))
445 next = (taken ? e_taken : e_fall)->dest;
448 /* In the absence of a prediction, disturb things as little as possible
449 by selecting the old "next" block from the list of successors. If
450 there had been a fallthru edge, that will be the one. */
453 for (e = bb->succ; e ; e = e->succ_next)
454 if (e->dest->index == bb->index + 1)
456 if ((e->flags & EDGE_FALLTHRU)
458 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
464 /* Make sure we didn't select a silly next block. */
465 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
468 /* Recurse on the successors. Unroll the last call, as the normal
469 case is exactly one or two edges, and we can tail recurse. */
470 for (e = bb->succ; e; e = e->succ_next)
471 if (e->dest != EXIT_BLOCK_PTR
472 && ! RBI (e->dest)->visited
474 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
478 prev = make_reorder_chain_1 (next, prev);
479 next = RBI (e->dest)->visited ? NULL : e->dest;
494 /* Locate or create a label for a given basic block. */
500 rtx label = bb->head;
502 if (GET_CODE (label) != CODE_LABEL)
505 fprintf (rtl_dump_file, "Emitting label for block %d\n",
508 label = emit_label_before (gen_label_rtx (), label);
509 if (bb->head == RBI (bb)->eff_head)
510 RBI (bb)->eff_head = label;
512 if (basic_block_for_insn)
513 set_block_for_insn (label, bb);
520 /* Emit a jump to BB after insn AFTER. */
523 emit_jump_to_block_after (bb, after)
529 if (bb != EXIT_BLOCK_PTR)
531 rtx label = label_for_bb (bb);
532 jump = emit_jump_insn_after (gen_jump (label), after);
533 JUMP_LABEL (jump) = label;
534 LABEL_NUSES (label) += 1;
535 if (basic_block_for_insn)
536 set_block_for_new_insns (jump, bb);
539 fprintf (rtl_dump_file, "Emitting jump to block %d\n",
547 jump = emit_jump_insn_after (gen_return (), after);
548 if (basic_block_for_insn)
549 set_block_for_new_insns (jump, bb);
552 fprintf (rtl_dump_file, "Emitting return\n");
562 /* Given a reorder chain, rearrange the code to match. */
565 fixup_reorder_chain ()
567 basic_block bb, last_bb;
570 int old_n_basic_blocks = n_basic_blocks;
572 /* First do the bulk reordering -- rechain the blocks without regard to
573 the needed changes to jumps and labels. */
575 last_bb = BASIC_BLOCK (0);
576 bb = RBI (last_bb)->next;
580 rtx last_e = RBI (last_bb)->eff_end;
581 rtx curr_h = RBI (bb)->eff_head;
583 NEXT_INSN (last_e) = curr_h;
584 PREV_INSN (curr_h) = last_e;
591 if (index != n_basic_blocks)
594 insn = RBI (last_bb)->eff_end;
596 NEXT_INSN (insn) = function_tail_eff_head;
597 if (function_tail_eff_head)
598 PREV_INSN (function_tail_eff_head) = insn;
600 while (NEXT_INSN (insn))
601 insn = NEXT_INSN (insn);
602 set_last_insn (insn);
603 #ifdef ENABLE_CHECKING
604 verify_insn_chain ();
607 /* Now add jumps and labels as needed to match the blocks new
610 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
612 edge e_fall, e_taken, e;
616 if (bb->succ == NULL)
619 /* Find the old fallthru edge, and another non-EH edge for
621 e_taken = e_fall = NULL;
622 for (e = bb->succ; e ; e = e->succ_next)
623 if (e->flags & EDGE_FALLTHRU)
625 else if (! (e->flags & EDGE_EH))
628 bb_end_insn = bb->end;
629 if (GET_CODE (bb_end_insn) == JUMP_INSN)
631 if (any_condjump_p (bb_end_insn))
633 /* If the old fallthru is still next, nothing to do. */
634 if (RBI (bb)->next == e_fall->dest
636 && e_fall->dest == EXIT_BLOCK_PTR))
639 /* There is one special case: if *neither* block is next,
640 such as happens at the very end of a function, then we'll
641 need to add a new unconditional jump. Choose the taken
642 edge based on known or assumed probability. */
643 if (RBI (bb)->next != e_taken->dest)
645 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
647 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
648 && invert_jump (bb_end_insn,
649 label_for_bb (e_fall->dest), 0))
651 e_fall->flags &= ~EDGE_FALLTHRU;
652 e_taken->flags |= EDGE_FALLTHRU;
653 e = e_fall, e_fall = e_taken, e_taken = e;
657 /* Otherwise we can try to invert the jump. This will
658 basically never fail, however, keep up the pretense. */
659 else if (invert_jump (bb_end_insn,
660 label_for_bb (e_fall->dest), 0))
662 e_fall->flags &= ~EDGE_FALLTHRU;
663 e_taken->flags |= EDGE_FALLTHRU;
667 else if (returnjump_p (bb_end_insn))
671 /* Otherwise we have some switch or computed jump. In the
672 99% case, there should not have been a fallthru edge. */
675 #ifdef CASE_DROPS_THROUGH
676 /* Except for VAX. Since we didn't have predication for the
677 tablejump, the fallthru block should not have moved. */
678 if (RBI (bb)->next == e_fall->dest)
680 bb_end_insn = skip_insns_after_block (bb);
688 /* No fallthru implies a noreturn function with EH edges, or
689 something similarly bizarre. In any case, we don't need to
694 /* If the fallthru block is still next, nothing to do. */
695 if (RBI (bb)->next == e_fall->dest)
698 /* An fallthru to exit block. */
699 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
703 /* We got here if we need to add a new jump insn. */
705 nb = force_nonfallthru (e_fall);
709 nb->aux = xmalloc (sizeof (struct reorder_block_def));
710 RBI (nb)->eff_head = nb->head;
711 RBI (nb)->eff_end = NEXT_INSN (nb->end);
712 RBI (nb)->scope = RBI (bb)->scope;
713 RBI (nb)->visited = 1;
714 RBI (nb)->next = RBI (bb)->next;
716 /* Don't process this new block. */
721 /* Put basic_block_info in the new order. */
722 bb = BASIC_BLOCK (0);
726 fprintf (rtl_dump_file, "Reordered sequence:\n");
730 fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
731 bb->index >= old_n_basic_blocks ? "compensation " : "",
735 BASIC_BLOCK (index) = bb;
743 /* Perform sanity checks on the insn chain.
744 1. Check that next/prev pointers are consistent in both the forward and
746 2. Count insns in chain, going both directions, and check if equal.
747 3. Check that get_last_insn () returns the actual end of chain. */
760 for (x = get_insns (); x; x = NEXT_INSN (x))
762 if (PREV_INSN (x) != prevx)
764 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
765 fprintf (stderr, "previous insn:\n");
767 fprintf (stderr, "current insn:\n");
775 if (prevx != get_last_insn ())
777 fprintf (stderr, "last_insn corrupt.\n");
783 for (x = get_last_insn (); x; x = PREV_INSN (x))
785 if (NEXT_INSN (x) != nextx)
787 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
788 fprintf (stderr, "current insn:\n");
790 fprintf (stderr, "next insn:\n");
798 if (insn_cnt1 != insn_cnt2)
800 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
801 insn_cnt1, insn_cnt2);
812 if (NOTE_INSN_BASIC_BLOCK_P (x))
826 if (NOTE_INSN_BASIC_BLOCK_P (x))
834 /* Determine and record the relationships between basic blocks and
835 scopes in scope tree S. */
838 relate_bbs_with_scopes (s)
842 int i, bbi1, bbi2, bbs_spanned;
845 for (p = s->inner; p; p = p->next)
846 relate_bbs_with_scopes (p);
851 /* If the begin and end notes are both inside the same basic block,
852 or if they are both outside of basic blocks, then we know immediately
853 how they are related. Otherwise, we need to poke around to make the
855 if (s->bb_beg != s->bb_end)
857 if (s->bb_beg && s->bb_end)
859 /* Both notes are in different bbs. This implies that all the
860 basic blocks spanned by the pair of notes are contained in
862 bbi1 = s->bb_beg->index;
863 bbi2 = s->bb_end->index;
866 else if (! s->bb_beg)
868 /* First note is outside of a bb. If the scope spans more than
869 one basic block, then they all are contained within this
870 scope. Otherwise, this scope is contained within the basic
872 bbnote = get_next_bb_note (s->note_beg);
875 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
878 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
882 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
883 bbi2 = s->bb_end->index;
888 else /* ! s->bb_end */
890 /* Second note is outside of a bb. If the scope spans more than
891 one basic block, then they all are contained within this
892 scope. Otherwise, this scope is contained within the basic
894 bbnote = get_prev_bb_note (s->note_end);
897 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
900 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
904 bbi1 = s->bb_beg->index;
905 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
914 /* Both notes are in the same bb, which implies the block
915 contains this scope. */
920 /* Both notes are outside of any bbs. This implies that all the
921 basic blocks spanned by the pair of notes are contained in
923 There is a degenerate case to consider. If the notes do not
924 span any basic blocks, then it is an empty scope that can
925 safely be deleted or ignored. Mark these with level = -1. */
927 x1 = get_next_bb_note (s->note_beg);
928 x2 = get_prev_bb_note (s->note_end);
936 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
937 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
943 /* If the scope spans one or more basic blocks, we record them. We
944 only record the bbs that are immediately contained within this
945 scope. Note that if a scope is contained within a bb, we can tell
946 by checking that bb_beg = bb_end and that they are non-null. */
952 for (i = bbi1; i <= bbi2; i++)
953 if (! RBI (BASIC_BLOCK (i))->scope)
956 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
957 for (i = bbi1; i <= bbi2; i++)
959 basic_block curr_bb = BASIC_BLOCK (i);
960 if (! RBI (curr_bb)->scope)
962 s->bbs[j++] = curr_bb;
963 RBI (curr_bb)->scope = s;
972 /* Allocate and initialize a new scope structure with scope level LEVEL,
973 and record the NOTE beginning the scope. */
976 make_new_scope (level, note)
980 scope new_scope = xcalloc (1, sizeof (struct scope_def));
981 new_scope->level = level;
982 new_scope->note_beg = note;
987 /* Build a forest representing the scope structure of the function.
988 Return a pointer to a structure describing the forest. */
991 build_scope_forest (forest)
992 scope_forest_info *forest;
997 scope root, curr_scope = 0;
999 forest->num_trees = 0;
1000 forest->trees = NULL;
1005 for (x = get_insns (); x; x = NEXT_INSN (x))
1007 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1008 curr_bb = BASIC_BLOCK (bbi);
1010 if (GET_CODE (x) == NOTE)
1012 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1020 new_scope = make_new_scope (level, x);
1021 new_scope->outer = curr_scope;
1022 new_scope->next = NULL;
1023 if (! curr_scope->inner)
1025 curr_scope->inner = new_scope;
1026 curr_scope->inner_last = new_scope;
1030 curr_scope->inner_last->next = new_scope;
1031 curr_scope->inner_last = new_scope;
1033 curr_scope = curr_scope->inner_last;
1037 int ntrees = forest->num_trees;
1039 curr_scope = make_new_scope (level, x);
1041 forest->trees = xrealloc (forest->trees,
1042 sizeof (scope) * (ntrees + 1));
1043 forest->trees[forest->num_trees++] = root;
1045 curr_scope->bb_beg = curr_bb;
1047 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1049 curr_scope->bb_end = curr_bb;
1050 curr_scope->note_end = x;
1052 curr_scope = curr_scope->outer;
1058 if (curr_bb && curr_bb->end == x)
1066 for (i = 0; i < forest->num_trees; i++)
1067 relate_bbs_with_scopes (forest->trees[i]);
1071 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1075 remove_scope_notes ()
1078 basic_block currbb = NULL;
1080 for (x = get_insns (); x; x = next)
1082 next = NEXT_INSN (x);
1083 if (NOTE_INSN_BASIC_BLOCK_P (x))
1084 currbb = NOTE_BASIC_BLOCK (x);
1086 if (GET_CODE (x) == NOTE
1087 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1088 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1090 /* Check if the scope note happens to be the end of a bb. */
1091 if (currbb && x == currbb->end)
1092 currbb->end = PREV_INSN (x);
1093 if (currbb && x == currbb->head)
1098 NEXT_INSN (PREV_INSN (x)) = next;
1099 PREV_INSN (next) = PREV_INSN (x);
1101 NEXT_INSN (x) = NULL;
1102 PREV_INSN (x) = NULL;
1111 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1114 insert_intra_1 (s, ip, bb)
1121 if (NOTE_BLOCK (s->note_beg))
1123 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1124 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1125 if (basic_block_for_insn)
1126 set_block_for_insn (*ip, bb);
1129 for (p = s->inner; p; p = p->next)
1130 insert_intra_1 (p, ip, bb);
1132 if (NOTE_BLOCK (s->note_beg))
1134 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1135 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1136 if (basic_block_for_insn)
1137 set_block_for_insn (*ip, bb);
1142 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1143 scopes that are contained within BB. */
1146 insert_intra_bb_scope_notes (bb)
1149 scope s = RBI (bb)->scope;
1157 if (GET_CODE (ip) == CODE_LABEL)
1158 ip = NEXT_INSN (ip);
1160 for (p = s->inner; p; p = p->next)
1162 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1163 insert_intra_1 (p, &ip, bb);
1168 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1169 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1170 notes before BB2 such that the notes are correctly balanced. If BB1 or
1171 BB2 is NULL, we are inserting scope notes for the first and last basic
1172 blocks, respectively. */
1175 insert_inter_bb_scope_notes (bb1, bb2)
1182 /* It is possible that a basic block is not contained in any scope.
1183 In that case, we either open or close a scope but not both. */
1186 scope s1 = RBI (bb1)->scope;
1187 scope s2 = RBI (bb2)->scope;
1196 /* Find common ancestor scope. */
1199 scope s1 = RBI (bb1)->scope;
1200 scope s2 = RBI (bb2)->scope;
1205 if (s1->level > s2->level)
1207 else if (s2->level > s1->level)
1223 scope s = RBI (bb1)->scope;
1224 ip = RBI (bb1)->eff_end;
1227 if (NOTE_BLOCK (s->note_beg))
1229 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1230 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1231 if (basic_block_for_insn)
1232 set_block_for_insn (ip, bb1);
1241 scope s = RBI (bb2)->scope;
1245 if (NOTE_BLOCK (s->note_beg))
1247 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1248 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1249 if (basic_block_for_insn)
1250 set_block_for_insn (ip, bb2);
1258 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1259 on the scope forest and the newly reordered basic blocks. */
1262 rebuild_scope_notes (forest)
1263 scope_forest_info *forest;
1267 if (forest->num_trees == 0)
1270 /* Start by opening the scopes before the first basic block. */
1271 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1273 /* Then, open and close scopes as needed between blocks. */
1274 for (i = 0; i < n_basic_blocks - 1; i++)
1276 basic_block bb1 = BASIC_BLOCK (i);
1277 basic_block bb2 = BASIC_BLOCK (i + 1);
1278 if (RBI (bb1)->scope != RBI (bb2)->scope)
1279 insert_inter_bb_scope_notes (bb1, bb2);
1280 insert_intra_bb_scope_notes (bb1);
1283 /* Finally, close the scopes after the last basic block. */
1284 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1285 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1289 /* Free the storage associated with the scope tree at S. */
1292 free_scope_forest_1 (s)
1297 for (p = s->inner; p; p = next)
1300 free_scope_forest_1 (p);
1309 /* Free the storage associated with the scope forest. */
1312 free_scope_forest (forest)
1313 scope_forest_info *forest;
1316 for (i = 0; i < forest->num_trees; i++)
1317 free_scope_forest_1 (forest->trees[i]);
1321 /* Visualize the scope forest. */
1324 dump_scope_forest (forest)
1325 scope_forest_info *forest;
1327 if (forest->num_trees == 0)
1328 fprintf (stderr, "\n< Empty scope forest >\n");
1332 fprintf (stderr, "\n< Scope forest >\n");
1333 for (i = 0; i < forest->num_trees; i++)
1334 dump_scope_forest_1 (forest->trees[i], 0);
1339 /* Recursive portion of dump_scope_forest. */
1342 dump_scope_forest_1 (s, indent)
1349 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1350 && RBI (s->bb_beg)->scope
1351 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1353 fprintf (stderr, "%*s", indent, "");
1354 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1357 fprintf (stderr, "%*s", indent, "");
1358 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1359 (PTR) NOTE_BLOCK (s->note_beg));
1361 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1362 for (i = 0; i < s->num_bbs; i++)
1363 fprintf (stderr, " %d", s->bbs[i]->index);
1364 fprintf (stderr, "\n");
1366 for (p = s->inner; p; p = p->next)
1367 dump_scope_forest_1 (p, indent + 2);
1369 fprintf (stderr, "%*s", indent, "");
1370 fprintf (stderr, "}\n");
1374 /* Reorder basic blocks. The main entry point to this file. */
1377 reorder_basic_blocks ()
1379 scope_forest_info forest;
1382 if (n_basic_blocks <= 1)
1385 for (i = 0; i < n_basic_blocks; i++)
1386 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1388 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1390 build_scope_forest (&forest);
1391 remove_scope_notes ();
1393 record_effective_endpoints ();
1394 make_reorder_chain ();
1397 dump_flow_info (rtl_dump_file);
1399 fixup_reorder_chain ();
1401 #ifdef ENABLE_CHECKING
1402 verify_insn_chain ();
1405 rebuild_scope_notes (&forest);
1406 free_scope_forest (&forest);
1409 for (i = 0; i < n_basic_blocks; i++)
1410 free (BASIC_BLOCK (i)->aux);
1412 free (EXIT_BLOCK_PTR->aux);
1414 #ifdef ENABLE_CHECKING
1415 verify_flow_info ();