1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
111 extern struct obstack flow_obstack;
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 /* The bb containing note_beg (if any). */
128 /* The bb containing note_end (if any). */
131 /* List of basic blocks contained within this scope. */
134 /* Number of blocks contained within this scope. */
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
151 /* Structure to hold information about the scope forest. */
154 /* Number of trees in forest. */
157 /* List of tree roots. */
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
169 } *reorder_block_def;
171 #define RBI(BB) ((reorder_block_def) (BB)->aux)
173 /* Holds the interesting trailing notes for the function. */
174 static rtx function_tail_eff_head;
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *, basic_block));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
200 void verify_insn_chain PARAMS ((void));
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
207 skip_insns_after_block (bb)
210 rtx insn, last_insn, next_head, prev;
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
216 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
218 if (insn == next_head)
221 switch (GET_CODE (insn))
228 switch (NOTE_LINE_NUMBER (insn))
230 case NOTE_INSN_LOOP_END:
231 case NOTE_INSN_BLOCK_END:
234 case NOTE_INSN_DELETED:
235 case NOTE_INSN_DELETED_LABEL:
246 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
247 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
248 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
250 insn = NEXT_INSN (insn);
262 /* It is possible to hit contradicting sequence. For instance:
268 Where barrier belongs to jump_insn, but the note does not.
269 This can be created by removing the basic block originally
270 following NOTE_INSN_LOOP_BEG.
272 In such case reorder the notes. */
273 for (insn = last_insn; insn != bb->end; insn = prev)
275 prev = PREV_INSN (insn);
276 if (GET_CODE (insn) == NOTE)
277 switch (NOTE_LINE_NUMBER (insn))
279 case NOTE_INSN_LOOP_END:
280 case NOTE_INSN_BLOCK_END:
281 case NOTE_INSN_DELETED:
282 case NOTE_INSN_DELETED_LABEL:
285 reorder_insns (insn, insn, last_insn);
293 /* Locate the effective beginning and end of the insn chain for each
294 block, as defined by skip_insns_after_block above. */
297 record_effective_endpoints ()
299 rtx next_insn = get_insns ();
302 for (i = 0; i < n_basic_blocks; ++i)
304 basic_block bb = BASIC_BLOCK (i);
307 RBI (bb)->eff_head = next_insn;
308 end = skip_insns_after_block (bb);
309 RBI (bb)->eff_end = end;
310 next_insn = NEXT_INSN (end);
312 function_tail_eff_head = next_insn;
316 /* Compute an ordering for a subgraph beginning with block BB. Record the
317 ordering in RBI()->index and chained through RBI()->next. */
320 make_reorder_chain ()
322 basic_block last_block = NULL;
323 basic_block prev = NULL;
324 int nbb_m1 = n_basic_blocks - 1;
327 /* If we've not got epilogue in RTL, we must fallthru to the exit.
328 Force the last block to be at the end. */
329 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
330 end of the function for stack unwinding purposes. */
333 last_block = BASIC_BLOCK (nbb_m1);
334 RBI (last_block)->visited = 1;
338 /* Loop until we've placed every block. */
345 /* Find the next unplaced block. */
346 /* ??? Get rid of this loop, and track which blocks are not yet
347 placed more directly, so as to avoid the O(N^2) worst case.
348 Perhaps keep a doubly-linked list of all to-be-placed blocks;
349 remove from the list as we place. The head of that list is
350 what we're looking for here. */
352 for (i = 0; i <= nbb_m1 && !next; ++i)
354 basic_block bb = BASIC_BLOCK (i);
355 if (! RBI (bb)->visited)
359 prev = make_reorder_chain_1 (next, prev);
363 /* Terminate the chain. */
366 RBI (prev)->next = last_block;
369 RBI (prev)->next = NULL;
372 /* A helper function for make_reorder_chain.
374 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
375 These are assumed to be the error condition and we wish to cluster
376 all of them at the very end of the function for the benefit of cache
377 locality for the rest of the function.
379 ??? We could do slightly better by noticing earlier that some subgraph
380 has all paths leading to noreturn functions, but for there to be more
381 than one block in such a subgraph is rare. */
384 make_reorder_chain_1 (bb, prev)
392 /* Mark this block visited. */
396 RBI (prev)->next = bb;
398 if (rtl_dump_file && prev->index + 1 != bb->index)
399 fprintf (rtl_dump_file, "Reordering block %d after %d\n",
400 bb->index, prev->index);
407 RBI (bb)->visited = 1;
410 if (bb->succ == NULL)
413 /* Find the most probable block. */
416 if (any_condjump_p (bb->end)
417 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
419 int taken, probability;
420 edge e_taken, e_fall;
422 probability = INTVAL (XEXP (note, 0));
423 taken = probability > REG_BR_PROB_BASE / 2;
425 /* Find the normal taken edge and the normal fallthru edge.
427 Note, conditional jumps with other side effects may not
428 be fully optimized. In this case it is possible for
429 the conditional jump to branch to the same location as
432 We should probably work to improve optimization of that
433 case; however, it seems silly not to also deal with such
434 problems here if they happen to occur. */
436 e_taken = e_fall = NULL;
437 for (e = bb->succ; e ; e = e->succ_next)
439 if (e->flags & EDGE_FALLTHRU)
441 else if (! (e->flags & EDGE_EH))
445 next = (taken ? e_taken : e_fall)->dest;
448 /* In the absence of a prediction, disturb things as little as possible
449 by selecting the old "next" block from the list of successors. If
450 there had been a fallthru edge, that will be the one. */
453 for (e = bb->succ; e ; e = e->succ_next)
454 if (e->dest->index == bb->index + 1)
456 if ((e->flags & EDGE_FALLTHRU)
458 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
464 /* Make sure we didn't select a silly next block. */
465 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
468 /* Recurse on the successors. Unroll the last call, as the normal
469 case is exactly one or two edges, and we can tail recurse. */
470 for (e = bb->succ; e; e = e->succ_next)
471 if (e->dest != EXIT_BLOCK_PTR
472 && ! RBI (e->dest)->visited
474 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
478 prev = make_reorder_chain_1 (next, prev);
479 next = RBI (e->dest)->visited ? NULL : e->dest;
494 /* Locate or create a label for a given basic block. */
500 rtx label = bb->head;
502 if (GET_CODE (label) != CODE_LABEL)
505 fprintf (rtl_dump_file, "Emitting label for block %d\n",
508 label = emit_label_before (gen_label_rtx (), label);
509 if (bb->head == RBI (bb)->eff_head)
510 RBI (bb)->eff_head = label;
512 if (basic_block_for_insn)
513 set_block_for_insn (label, bb);
520 /* Emit a jump to BB after insn AFTER. */
523 emit_jump_to_block_after (bb, after)
529 if (bb != EXIT_BLOCK_PTR)
531 rtx label = label_for_bb (bb);
532 jump = emit_jump_insn_after (gen_jump (label), after);
533 JUMP_LABEL (jump) = label;
534 LABEL_NUSES (label) += 1;
535 if (basic_block_for_insn)
536 set_block_for_new_insns (jump, bb);
539 fprintf (rtl_dump_file, "Emitting jump to block %d\n",
547 jump = emit_jump_insn_after (gen_return (), after);
548 if (basic_block_for_insn)
549 set_block_for_new_insns (jump, bb);
552 fprintf (rtl_dump_file, "Emitting return\n");
562 /* Given a reorder chain, rearrange the code to match. */
565 fixup_reorder_chain ()
567 basic_block bb, last_bb;
570 int old_n_basic_blocks = n_basic_blocks;
572 /* First do the bulk reordering -- rechain the blocks without regard to
573 the needed changes to jumps and labels. */
575 last_bb = BASIC_BLOCK (0);
576 bb = RBI (last_bb)->next;
580 rtx last_e = RBI (last_bb)->eff_end;
581 rtx curr_h = RBI (bb)->eff_head;
583 NEXT_INSN (last_e) = curr_h;
584 PREV_INSN (curr_h) = last_e;
591 if (index != n_basic_blocks)
594 insn = RBI (last_bb)->eff_end;
596 NEXT_INSN (insn) = function_tail_eff_head;
597 if (function_tail_eff_head)
598 PREV_INSN (function_tail_eff_head) = insn;
600 while (NEXT_INSN (insn))
601 insn = NEXT_INSN (insn);
602 set_last_insn (insn);
603 #ifdef ENABLE_CHECKING
604 verify_insn_chain ();
607 /* Now add jumps and labels as needed to match the blocks new
610 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
612 edge e_fall, e_taken, e;
613 rtx jump_insn, barrier_insn, bb_end_insn;
616 if (bb->succ == NULL)
619 /* Find the old fallthru edge, and another non-EH edge for
621 e_taken = e_fall = NULL;
622 for (e = bb->succ; e ; e = e->succ_next)
623 if (e->flags & EDGE_FALLTHRU)
625 else if (! (e->flags & EDGE_EH))
628 bb_end_insn = bb->end;
629 if (GET_CODE (bb_end_insn) == JUMP_INSN)
631 if (any_condjump_p (bb_end_insn))
633 /* If the old fallthru is still next, nothing to do. */
634 if (RBI (bb)->next == e_fall->dest
636 && e_fall->dest == EXIT_BLOCK_PTR))
639 /* There is one special case: if *neither* block is next,
640 such as happens at the very end of a function, then we'll
641 need to add a new unconditional jump. Choose the taken
642 edge based on known or assumed probability. */
643 if (RBI (bb)->next != e_taken->dest)
645 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
647 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
648 && invert_jump (bb_end_insn,
649 label_for_bb (e_fall->dest), 0))
651 e_fall->flags &= ~EDGE_FALLTHRU;
652 e_taken->flags |= EDGE_FALLTHRU;
653 e = e_fall, e_fall = e_taken, e_taken = e;
657 /* Otherwise we can try to invert the jump. This will
658 basically never fail, however, keep up the pretense. */
659 else if (invert_jump (bb_end_insn,
660 label_for_bb (e_fall->dest), 0))
662 e_fall->flags &= ~EDGE_FALLTHRU;
663 e_taken->flags |= EDGE_FALLTHRU;
667 else if (returnjump_p (bb_end_insn))
671 /* Otherwise we have some switch or computed jump. In the
672 99% case, there should not have been a fallthru edge. */
675 #ifdef CASE_DROPS_THROUGH
676 /* Except for VAX. Since we didn't have predication for the
677 tablejump, the fallthru block should not have moved. */
678 if (RBI (bb)->next == e_fall->dest)
680 bb_end_insn = skip_insns_after_block (bb);
688 /* No fallthru implies a noreturn function with EH edges, or
689 something similarly bizarre. In any case, we don't need to
694 /* If the fallthru block is still next, nothing to do. */
695 if (RBI (bb)->next == e_fall->dest)
698 /* We need a new jump insn. If the block has only one outgoing
699 edge, then we can stuff the new jump insn in directly. */
700 if (bb->succ->succ_next == NULL)
702 e_fall->flags &= ~EDGE_FALLTHRU;
704 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
706 barrier_insn = emit_barrier_after (jump_insn);
707 RBI (bb)->eff_end = barrier_insn;
712 /* We got here if we need to add a new jump insn in a new block
713 across the edge e_fall. */
715 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
716 barrier_insn = emit_barrier_after (jump_insn);
718 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
719 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
721 nb = BASIC_BLOCK (n_basic_blocks - 1);
723 nb->count = e_fall->count;
724 nb->frequency = EDGE_FREQUENCY (e_fall);
726 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
727 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
728 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
729 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
731 nb->aux = xmalloc (sizeof (struct reorder_block_def));
732 RBI (nb)->eff_head = nb->head;
733 RBI (nb)->eff_end = barrier_insn;
734 RBI (nb)->scope = RBI (bb)->scope;
735 RBI (nb)->visited = 1;
736 RBI (nb)->next = RBI (bb)->next;
739 /* Link to new block. */
740 make_edge (NULL, nb, e_fall->dest, 0);
741 redirect_edge_succ (e_fall, nb);
742 nb->succ->count = e_fall->count;
743 nb->succ->probability = REG_BR_PROB_BASE;
745 /* Don't process this new block. */
749 /* Put basic_block_info in the new order. */
750 bb = BASIC_BLOCK (0);
754 fprintf (rtl_dump_file, "Reordered sequence:\n");
758 fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index,
759 bb->index >= old_n_basic_blocks ? "compensation " : "",
763 BASIC_BLOCK (index) = bb;
771 /* Perform sanity checks on the insn chain.
772 1. Check that next/prev pointers are consistent in both the forward and
774 2. Count insns in chain, going both directions, and check if equal.
775 3. Check that get_last_insn () returns the actual end of chain. */
788 for (x = get_insns (); x; x = NEXT_INSN (x))
790 if (PREV_INSN (x) != prevx)
792 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
793 fprintf (stderr, "previous insn:\n");
795 fprintf (stderr, "current insn:\n");
803 if (prevx != get_last_insn ())
805 fprintf (stderr, "last_insn corrupt.\n");
811 for (x = get_last_insn (); x; x = PREV_INSN (x))
813 if (NEXT_INSN (x) != nextx)
815 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
816 fprintf (stderr, "current insn:\n");
818 fprintf (stderr, "next insn:\n");
826 if (insn_cnt1 != insn_cnt2)
828 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
829 insn_cnt1, insn_cnt2);
840 if (NOTE_INSN_BASIC_BLOCK_P (x))
854 if (NOTE_INSN_BASIC_BLOCK_P (x))
862 /* Determine and record the relationships between basic blocks and
863 scopes in scope tree S. */
866 relate_bbs_with_scopes (s)
870 int i, bbi1, bbi2, bbs_spanned;
873 for (p = s->inner; p; p = p->next)
874 relate_bbs_with_scopes (p);
879 /* If the begin and end notes are both inside the same basic block,
880 or if they are both outside of basic blocks, then we know immediately
881 how they are related. Otherwise, we need to poke around to make the
883 if (s->bb_beg != s->bb_end)
885 if (s->bb_beg && s->bb_end)
887 /* Both notes are in different bbs. This implies that all the
888 basic blocks spanned by the pair of notes are contained in
890 bbi1 = s->bb_beg->index;
891 bbi2 = s->bb_end->index;
894 else if (! s->bb_beg)
896 /* First note is outside of a bb. If the scope spans more than
897 one basic block, then they all are contained within this
898 scope. Otherwise, this scope is contained within the basic
900 bbnote = get_next_bb_note (s->note_beg);
903 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
906 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
910 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
911 bbi2 = s->bb_end->index;
916 else /* ! s->bb_end */
918 /* Second note is outside of a bb. If the scope spans more than
919 one basic block, then they all are contained within this
920 scope. Otherwise, this scope is contained within the basic
922 bbnote = get_prev_bb_note (s->note_end);
925 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
928 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
932 bbi1 = s->bb_beg->index;
933 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
942 /* Both notes are in the same bb, which implies the block
943 contains this scope. */
948 /* Both notes are outside of any bbs. This implies that all the
949 basic blocks spanned by the pair of notes are contained in
951 There is a degenerate case to consider. If the notes do not
952 span any basic blocks, then it is an empty scope that can
953 safely be deleted or ignored. Mark these with level = -1. */
955 x1 = get_next_bb_note (s->note_beg);
956 x2 = get_prev_bb_note (s->note_end);
964 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
965 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
971 /* If the scope spans one or more basic blocks, we record them. We
972 only record the bbs that are immediately contained within this
973 scope. Note that if a scope is contained within a bb, we can tell
974 by checking that bb_beg = bb_end and that they are non-null. */
980 for (i = bbi1; i <= bbi2; i++)
981 if (! RBI (BASIC_BLOCK (i))->scope)
984 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
985 for (i = bbi1; i <= bbi2; i++)
987 basic_block curr_bb = BASIC_BLOCK (i);
988 if (! RBI (curr_bb)->scope)
990 s->bbs[j++] = curr_bb;
991 RBI (curr_bb)->scope = s;
1000 /* Allocate and initialize a new scope structure with scope level LEVEL,
1001 and record the NOTE beginning the scope. */
1004 make_new_scope (level, note)
1008 scope new_scope = xcalloc (1, sizeof (struct scope_def));
1009 new_scope->level = level;
1010 new_scope->note_beg = note;
1015 /* Build a forest representing the scope structure of the function.
1016 Return a pointer to a structure describing the forest. */
1019 build_scope_forest (forest)
1020 scope_forest_info *forest;
1024 basic_block curr_bb;
1025 scope root, curr_scope = 0;
1027 forest->num_trees = 0;
1028 forest->trees = NULL;
1033 for (x = get_insns (); x; x = NEXT_INSN (x))
1035 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1036 curr_bb = BASIC_BLOCK (bbi);
1038 if (GET_CODE (x) == NOTE)
1040 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1048 new_scope = make_new_scope (level, x);
1049 new_scope->outer = curr_scope;
1050 new_scope->next = NULL;
1051 if (! curr_scope->inner)
1053 curr_scope->inner = new_scope;
1054 curr_scope->inner_last = new_scope;
1058 curr_scope->inner_last->next = new_scope;
1059 curr_scope->inner_last = new_scope;
1061 curr_scope = curr_scope->inner_last;
1065 int ntrees = forest->num_trees;
1067 curr_scope = make_new_scope (level, x);
1069 forest->trees = xrealloc (forest->trees,
1070 sizeof (scope) * (ntrees + 1));
1071 forest->trees[forest->num_trees++] = root;
1073 curr_scope->bb_beg = curr_bb;
1075 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1077 curr_scope->bb_end = curr_bb;
1078 curr_scope->note_end = x;
1080 curr_scope = curr_scope->outer;
1086 if (curr_bb && curr_bb->end == x)
1094 for (i = 0; i < forest->num_trees; i++)
1095 relate_bbs_with_scopes (forest->trees[i]);
1099 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1103 remove_scope_notes ()
1106 basic_block currbb = NULL;
1108 for (x = get_insns (); x; x = next)
1110 next = NEXT_INSN (x);
1111 if (NOTE_INSN_BASIC_BLOCK_P (x))
1112 currbb = NOTE_BASIC_BLOCK (x);
1114 if (GET_CODE (x) == NOTE
1115 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1116 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1118 /* Check if the scope note happens to be the end of a bb. */
1119 if (currbb && x == currbb->end)
1120 currbb->end = PREV_INSN (x);
1121 if (currbb && x == currbb->head)
1126 NEXT_INSN (PREV_INSN (x)) = next;
1127 PREV_INSN (next) = PREV_INSN (x);
1129 NEXT_INSN (x) = NULL;
1130 PREV_INSN (x) = NULL;
1139 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1142 insert_intra_1 (s, ip, bb)
1149 if (NOTE_BLOCK (s->note_beg))
1151 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1152 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1153 if (basic_block_for_insn)
1154 set_block_for_insn (*ip, bb);
1157 for (p = s->inner; p; p = p->next)
1158 insert_intra_1 (p, ip, bb);
1160 if (NOTE_BLOCK (s->note_beg))
1162 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1163 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1164 if (basic_block_for_insn)
1165 set_block_for_insn (*ip, bb);
1170 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1171 scopes that are contained within BB. */
1174 insert_intra_bb_scope_notes (bb)
1177 scope s = RBI (bb)->scope;
1185 if (GET_CODE (ip) == CODE_LABEL)
1186 ip = NEXT_INSN (ip);
1188 for (p = s->inner; p; p = p->next)
1190 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1191 insert_intra_1 (p, &ip, bb);
1196 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1197 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1198 notes before BB2 such that the notes are correctly balanced. If BB1 or
1199 BB2 is NULL, we are inserting scope notes for the first and last basic
1200 blocks, respectively. */
1203 insert_inter_bb_scope_notes (bb1, bb2)
1210 /* It is possible that a basic block is not contained in any scope.
1211 In that case, we either open or close a scope but not both. */
1214 scope s1 = RBI (bb1)->scope;
1215 scope s2 = RBI (bb2)->scope;
1224 /* Find common ancestor scope. */
1227 scope s1 = RBI (bb1)->scope;
1228 scope s2 = RBI (bb2)->scope;
1233 if (s1->level > s2->level)
1235 else if (s2->level > s1->level)
1251 scope s = RBI (bb1)->scope;
1252 ip = RBI (bb1)->eff_end;
1255 if (NOTE_BLOCK (s->note_beg))
1257 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1258 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1259 if (basic_block_for_insn)
1260 set_block_for_insn (ip, bb1);
1269 scope s = RBI (bb2)->scope;
1273 if (NOTE_BLOCK (s->note_beg))
1275 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1276 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1277 if (basic_block_for_insn)
1278 set_block_for_insn (ip, bb2);
1286 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1287 on the scope forest and the newly reordered basic blocks. */
1290 rebuild_scope_notes (forest)
1291 scope_forest_info *forest;
1295 if (forest->num_trees == 0)
1298 /* Start by opening the scopes before the first basic block. */
1299 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1301 /* Then, open and close scopes as needed between blocks. */
1302 for (i = 0; i < n_basic_blocks - 1; i++)
1304 basic_block bb1 = BASIC_BLOCK (i);
1305 basic_block bb2 = BASIC_BLOCK (i + 1);
1306 if (RBI (bb1)->scope != RBI (bb2)->scope)
1307 insert_inter_bb_scope_notes (bb1, bb2);
1308 insert_intra_bb_scope_notes (bb1);
1311 /* Finally, close the scopes after the last basic block. */
1312 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1313 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1317 /* Free the storage associated with the scope tree at S. */
1320 free_scope_forest_1 (s)
1325 for (p = s->inner; p; p = next)
1328 free_scope_forest_1 (p);
1337 /* Free the storage associated with the scope forest. */
1340 free_scope_forest (forest)
1341 scope_forest_info *forest;
1344 for (i = 0; i < forest->num_trees; i++)
1345 free_scope_forest_1 (forest->trees[i]);
1349 /* Visualize the scope forest. */
1352 dump_scope_forest (forest)
1353 scope_forest_info *forest;
1355 if (forest->num_trees == 0)
1356 fprintf (stderr, "\n< Empty scope forest >\n");
1360 fprintf (stderr, "\n< Scope forest >\n");
1361 for (i = 0; i < forest->num_trees; i++)
1362 dump_scope_forest_1 (forest->trees[i], 0);
1367 /* Recursive portion of dump_scope_forest. */
1370 dump_scope_forest_1 (s, indent)
1377 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1378 && RBI (s->bb_beg)->scope
1379 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1381 fprintf (stderr, "%*s", indent, "");
1382 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1385 fprintf (stderr, "%*s", indent, "");
1386 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1387 (PTR) NOTE_BLOCK (s->note_beg));
1389 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1390 for (i = 0; i < s->num_bbs; i++)
1391 fprintf (stderr, " %d", s->bbs[i]->index);
1392 fprintf (stderr, "\n");
1394 for (p = s->inner; p; p = p->next)
1395 dump_scope_forest_1 (p, indent + 2);
1397 fprintf (stderr, "%*s", indent, "");
1398 fprintf (stderr, "}\n");
1402 /* Reorder basic blocks. The main entry point to this file. */
1405 reorder_basic_blocks ()
1407 scope_forest_info forest;
1410 if (n_basic_blocks <= 1)
1413 for (i = 0; i < n_basic_blocks; i++)
1414 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1416 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1418 build_scope_forest (&forest);
1419 remove_scope_notes ();
1421 record_effective_endpoints ();
1422 make_reorder_chain ();
1425 dump_flow_info (rtl_dump_file);
1427 fixup_reorder_chain ();
1429 #ifdef ENABLE_CHECKING
1430 verify_insn_chain ();
1433 rebuild_scope_notes (&forest);
1434 free_scope_forest (&forest);
1437 for (i = 0; i < n_basic_blocks; i++)
1438 free (BASIC_BLOCK (i)->aux);
1440 free (EXIT_BLOCK_PTR->aux);
1442 #ifdef ENABLE_CHECKING
1443 verify_flow_info ();