1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
111 extern struct obstack flow_obstack;
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 /* The bb containing note_beg (if any). */
128 /* The bb containing note_end (if any). */
131 /* List of basic blocks contained within this scope. */
134 /* Number of blocks contained within this scope. */
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
151 /* Structure to hold information about the scope forest. */
154 /* Number of trees in forest. */
157 /* List of tree roots. */
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
170 } *reorder_block_def;
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
174 /* Holds the interesting trailing notes for the function. */
175 static rtx function_tail_eff_head;
178 /* Local function prototypes. */
179 static rtx skip_insns_after_block PARAMS ((basic_block));
180 static void record_effective_endpoints PARAMS ((void));
181 static void make_reorder_chain PARAMS ((void));
182 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
183 static rtx label_for_bb PARAMS ((basic_block));
184 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
185 static void fixup_reorder_chain PARAMS ((void));
186 static void relate_bbs_with_scopes PARAMS ((scope));
187 static scope make_new_scope PARAMS ((int, rtx));
188 static void build_scope_forest PARAMS ((scope_forest_info *));
189 static void remove_scope_notes PARAMS ((void));
190 static void insert_intra_1 PARAMS ((scope, rtx *));
191 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
192 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
193 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
194 static void free_scope_forest_1 PARAMS ((scope));
195 static void free_scope_forest PARAMS ((scope_forest_info *));
196 void dump_scope_forest PARAMS ((scope_forest_info *));
197 static void dump_scope_forest_1 PARAMS ((scope, int));
198 static rtx get_next_bb_note PARAMS ((rtx));
199 static rtx get_prev_bb_note PARAMS ((rtx));
201 void verify_insn_chain PARAMS ((void));
203 /* Skip over inter-block insns occurring after BB which are typically
204 associated with BB (e.g., barriers). If there are any such insns,
205 we return the last one. Otherwise, we return the end of BB. */
208 skip_insns_after_block (bb)
211 rtx insn, last_insn, next_head, prev;
213 next_head = NULL_RTX;
214 if (bb->index + 1 != n_basic_blocks)
215 next_head = BASIC_BLOCK (bb->index + 1)->head;
217 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
219 if (insn == next_head)
222 switch (GET_CODE (insn))
229 switch (NOTE_LINE_NUMBER (insn))
231 case NOTE_INSN_LOOP_END:
232 case NOTE_INSN_BLOCK_END:
235 case NOTE_INSN_DELETED:
236 case NOTE_INSN_DELETED_LABEL:
247 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
248 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
249 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
251 insn = NEXT_INSN (insn);
263 /* It is possible to hit contradicting sequence. For instance:
269 Where barrier belongs to jump_insn, but the note does not.
270 This can be created by removing the basic block originally
271 following NOTE_INSN_LOOP_BEG.
273 In such case reorder the notes. */
274 for (insn = last_insn; insn != bb->end; insn = prev)
276 prev = PREV_INSN (insn);
277 if (GET_CODE (insn) == NOTE)
278 switch (NOTE_LINE_NUMBER (insn))
280 case NOTE_INSN_LOOP_END:
281 case NOTE_INSN_BLOCK_END:
282 case NOTE_INSN_DELETED:
283 case NOTE_INSN_DELETED_LABEL:
286 reorder_insns (insn, insn, last_insn);
294 /* Locate the effective beginning and end of the insn chain for each
295 block, as defined by skip_insns_after_block above. */
298 record_effective_endpoints ()
300 rtx next_insn = get_insns ();
303 for (i = 0; i < n_basic_blocks; ++i)
305 basic_block bb = BASIC_BLOCK (i);
308 RBI (bb)->eff_head = next_insn;
309 end = skip_insns_after_block (bb);
310 RBI (bb)->eff_end = end;
311 next_insn = NEXT_INSN (end);
313 function_tail_eff_head = next_insn;
317 /* Compute an ordering for a subgraph beginning with block BB. Record the
318 ordering in RBI()->index and chained through RBI()->next. */
321 make_reorder_chain ()
323 basic_block last_block = NULL;
324 basic_block prev = NULL;
325 int nbb_m1 = n_basic_blocks - 1;
327 /* If we've not got epilogue in RTL, we must fallthru to the exit.
328 Force the last block to be at the end. */
329 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
330 end of the function for stack unwinding purposes. */
333 last_block = BASIC_BLOCK (nbb_m1);
334 RBI (last_block)->visited = 1;
338 /* Loop until we've placed every block. */
342 basic_block next = NULL;
344 /* Find the next unplaced block. */
345 /* ??? Get rid of this loop, and track which blocks are not yet
346 placed more directly, so as to avoid the O(N^2) worst case.
347 Perhaps keep a doubly-linked list of all to-be-placed blocks;
348 remove from the list as we place. The head of that list is
349 what we're looking for here. */
351 for (i = 0; i <= nbb_m1; ++i)
353 basic_block bb = BASIC_BLOCK (i);
354 if (! RBI (bb)->visited)
363 prev = make_reorder_chain_1 (next, prev);
365 while (RBI (prev)->index < nbb_m1);
367 /* Terminate the chain. */
370 RBI (prev)->next = last_block;
371 RBI (last_block)->index = RBI (prev)->index + 1;
374 RBI (prev)->next = NULL;
377 /* A helper function for make_reorder_chain.
379 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
380 These are assumed to be the error condition and we wish to cluster
381 all of them at the very end of the function for the benefit of cache
382 locality for the rest of the function.
384 ??? We could do slightly better by noticing earlier that some subgraph
385 has all paths leading to noreturn functions, but for there to be more
386 than one block in such a subgraph is rare. */
389 make_reorder_chain_1 (bb, prev)
397 /* Mark this block visited. */
403 RBI (prev)->next = bb;
404 new_index = RBI (prev)->index + 1;
405 RBI (bb)->index = new_index;
407 if (rtl_dump_file && prev->index + 1 != bb->index)
408 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
409 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
413 RBI (bb)->visited = 1;
416 if (bb->succ == NULL)
419 /* Find the most probable block. */
422 if (any_condjump_p (bb->end)
423 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
425 int taken, probability;
426 edge e_taken, e_fall;
428 probability = INTVAL (XEXP (note, 0));
429 taken = probability > REG_BR_PROB_BASE / 2;
431 /* Find the normal taken edge and the normal fallthru edge.
433 Note, conditional jumps with other side effects may not
434 be fully optimized. In this case it is possible for
435 the conditional jump to branch to the same location as
438 We should probably work to improve optimization of that
439 case; however, it seems silly not to also deal with such
440 problems here if they happen to occur. */
442 e_taken = e_fall = NULL;
443 for (e = bb->succ; e ; e = e->succ_next)
445 if (e->flags & EDGE_FALLTHRU)
447 else if (! (e->flags & EDGE_EH))
451 next = (taken ? e_taken : e_fall)->dest;
454 /* In the absence of a prediction, disturb things as little as possible
455 by selecting the old "next" block from the list of successors. If
456 there had been a fallthru edge, that will be the one. */
459 for (e = bb->succ; e ; e = e->succ_next)
460 if (e->dest->index == bb->index + 1)
462 if ((e->flags & EDGE_FALLTHRU)
464 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
470 /* Make sure we didn't select a silly next block. */
471 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
474 /* Recurse on the successors. Unroll the last call, as the normal
475 case is exactly one or two edges, and we can tail recurse. */
476 for (e = bb->succ; e; e = e->succ_next)
477 if (e->dest != EXIT_BLOCK_PTR
478 && ! RBI (e->dest)->visited
480 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
484 prev = make_reorder_chain_1 (next, prev);
485 next = RBI (e->dest)->visited ? NULL : e->dest;
500 /* Locate or create a label for a given basic block. */
506 rtx label = bb->head;
508 if (GET_CODE (label) != CODE_LABEL)
511 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
512 bb->index, RBI (bb)->index);
514 label = emit_label_before (gen_label_rtx (), label);
515 if (bb->head == RBI (bb)->eff_head)
516 RBI (bb)->eff_head = label;
524 /* Emit a jump to BB after insn AFTER. */
527 emit_jump_to_block_after (bb, after)
533 if (bb != EXIT_BLOCK_PTR)
535 rtx label = label_for_bb (bb);
536 jump = emit_jump_insn_after (gen_jump (label), after);
537 JUMP_LABEL (jump) = label;
538 LABEL_NUSES (label) += 1;
539 if (basic_block_for_insn)
540 set_block_for_new_insns (jump, bb);
543 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
544 bb->index, RBI (bb)->index);
551 jump = emit_jump_insn_after (gen_return (), after);
554 fprintf (rtl_dump_file, "Emitting return\n");
564 /* Given a reorder chain, rearrange the code to match. */
567 fixup_reorder_chain ()
569 basic_block bb, last_bb;
571 /* First do the bulk reordering -- rechain the blocks without regard to
572 the needed changes to jumps and labels. */
574 last_bb = BASIC_BLOCK (0);
575 bb = RBI (last_bb)->next;
578 rtx last_e = RBI (last_bb)->eff_end;
579 rtx curr_h = RBI (bb)->eff_head;
581 NEXT_INSN (last_e) = curr_h;
582 PREV_INSN (curr_h) = last_e;
589 rtx insn = RBI (last_bb)->eff_end;
591 NEXT_INSN (insn) = function_tail_eff_head;
592 if (function_tail_eff_head)
593 PREV_INSN (function_tail_eff_head) = insn;
595 while (NEXT_INSN (insn))
596 insn = NEXT_INSN (insn);
597 set_last_insn (insn);
600 /* Now add jumps and labels as needed to match the blocks new
603 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
605 edge e_fall, e_taken, e;
606 rtx jump_insn, barrier_insn, bb_end_insn;
609 if (bb->succ == NULL)
612 /* Find the old fallthru edge, and another non-EH edge for
614 e_taken = e_fall = NULL;
615 for (e = bb->succ; e ; e = e->succ_next)
616 if (e->flags & EDGE_FALLTHRU)
618 else if (! (e->flags & EDGE_EH))
621 bb_end_insn = bb->end;
622 if (GET_CODE (bb_end_insn) == JUMP_INSN)
624 if (any_uncondjump_p (bb_end_insn))
626 /* If the destination is still not next, nothing to do. */
627 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
630 /* Otherwise, we can remove the jump and cleanup the edge. */
631 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
632 RBI (bb)->eff_end = skip_insns_after_block (bb);
633 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
636 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
637 bb->index, RBI (bb)->index);
640 else if (any_condjump_p (bb_end_insn))
642 /* If the old fallthru is still next, nothing to do. */
643 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
644 || (RBI (bb)->index == n_basic_blocks - 1
645 && e_fall->dest == EXIT_BLOCK_PTR))
648 /* There is one special case: if *neither* block is next,
649 such as happens at the very end of a function, then we'll
650 need to add a new unconditional jump. Choose the taken
651 edge based on known or assumed probability. */
652 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
654 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
656 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
657 && invert_jump (bb_end_insn,
658 label_for_bb (e_fall->dest), 0))
660 e_fall->flags &= ~EDGE_FALLTHRU;
661 e_taken->flags |= EDGE_FALLTHRU;
662 e = e_fall, e_fall = e_taken, e_taken = e;
666 /* Otherwise we can try to invert the jump. This will
667 basically never fail, however, keep up the pretense. */
668 else if (invert_jump (bb_end_insn,
669 label_for_bb (e_fall->dest), 0))
671 e_fall->flags &= ~EDGE_FALLTHRU;
672 e_taken->flags |= EDGE_FALLTHRU;
676 else if (returnjump_p (bb_end_insn))
680 /* Otherwise we have some switch or computed jump. In the
681 99% case, there should not have been a fallthru edge. */
684 #ifdef CASE_DROPS_THROUGH
685 /* Except for VAX. Since we didn't have predication for the
686 tablejump, the fallthru block should not have moved. */
687 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
689 bb_end_insn = skip_insns_after_block (bb);
697 /* No fallthru implies a noreturn function with EH edges, or
698 something similarly bizarre. In any case, we don't need to
703 /* If the fallthru block is still next, nothing to do. */
704 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
705 || (RBI (bb)->index == n_basic_blocks - 1
706 && e_fall->dest == EXIT_BLOCK_PTR))
709 /* We need a new jump insn. If the block has only one outgoing
710 edge, then we can stuff the new jump insn in directly. */
711 if (bb->succ->succ_next == NULL)
713 e_fall->flags &= ~EDGE_FALLTHRU;
715 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
717 barrier_insn = emit_barrier_after (jump_insn);
718 RBI (bb)->eff_end = barrier_insn;
723 /* We got here if we need to add a new jump insn in a new block
724 across the edge e_fall. */
726 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
727 barrier_insn = emit_barrier_after (jump_insn);
729 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
730 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
732 nb = BASIC_BLOCK (n_basic_blocks - 1);
733 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
734 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
736 nb->count = e_fall->count;
737 nb->frequency = EDGE_FREQUENCY (e_fall);
739 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
740 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
742 nb->aux = xmalloc (sizeof (struct reorder_block_def));
743 RBI (nb)->eff_head = nb->head;
744 RBI (nb)->eff_end = barrier_insn;
745 RBI (nb)->scope = RBI (bb)->scope;
746 RBI (nb)->index = RBI (bb)->index + 1;
747 RBI (nb)->visited = 1;
748 RBI (nb)->next = RBI (bb)->next;
751 /* Link to new block. */
752 make_edge (NULL, nb, e_fall->dest, 0);
753 redirect_edge_succ (e_fall, nb);
754 nb->succ->count = e_fall->count;
755 nb->succ->probability = REG_BR_PROB_BASE;
757 /* Don't process this new block. */
760 /* Fix subsequent reorder block indices to reflect new block. */
761 while ((nb = RBI (nb)->next) != NULL)
762 RBI (nb)->index += 1;
765 /* Put basic_block_info in the new order. */
766 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
768 bb->index = RBI (bb)->index;
769 BASIC_BLOCK (bb->index) = bb;
774 /* Perform sanity checks on the insn chain.
775 1. Check that next/prev pointers are consistent in both the forward and
777 2. Count insns in chain, going both directions, and check if equal.
778 3. Check that get_last_insn () returns the actual end of chain. */
791 for (x = get_insns (); x; x = NEXT_INSN (x))
793 if (PREV_INSN (x) != prevx)
795 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
796 fprintf (stderr, "previous insn:\n");
798 fprintf (stderr, "current insn:\n");
806 if (prevx != get_last_insn ())
808 fprintf (stderr, "last_insn corrupt.\n");
814 for (x = get_last_insn (); x; x = PREV_INSN (x))
816 if (NEXT_INSN (x) != nextx)
818 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
819 fprintf (stderr, "current insn:\n");
821 fprintf (stderr, "next insn:\n");
829 if (insn_cnt1 != insn_cnt2)
831 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
832 insn_cnt1, insn_cnt2);
843 if (NOTE_INSN_BASIC_BLOCK_P (x))
857 if (NOTE_INSN_BASIC_BLOCK_P (x))
865 /* Determine and record the relationships between basic blocks and
866 scopes in scope tree S. */
869 relate_bbs_with_scopes (s)
873 int i, bbi1, bbi2, bbs_spanned;
876 for (p = s->inner; p; p = p->next)
877 relate_bbs_with_scopes (p);
882 /* If the begin and end notes are both inside the same basic block,
883 or if they are both outside of basic blocks, then we know immediately
884 how they are related. Otherwise, we need to poke around to make the
886 if (s->bb_beg != s->bb_end)
888 if (s->bb_beg && s->bb_end)
890 /* Both notes are in different bbs. This implies that all the
891 basic blocks spanned by the pair of notes are contained in
893 bbi1 = s->bb_beg->index;
894 bbi2 = s->bb_end->index;
897 else if (! s->bb_beg)
899 /* First note is outside of a bb. If the scope spans more than
900 one basic block, then they all are contained within this
901 scope. Otherwise, this scope is contained within the basic
903 bbnote = get_next_bb_note (s->note_beg);
906 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
909 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
913 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
914 bbi2 = s->bb_end->index;
919 else /* ! s->bb_end */
921 /* Second note is outside of a bb. If the scope spans more than
922 one basic block, then they all are contained within this
923 scope. Otherwise, this scope is contained within the basic
925 bbnote = get_prev_bb_note (s->note_end);
928 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
931 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
935 bbi1 = s->bb_beg->index;
936 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
945 /* Both notes are in the same bb, which implies the block
946 contains this scope. */
951 /* Both notes are outside of any bbs. This implies that all the
952 basic blocks spanned by the pair of notes are contained in
954 There is a degenerate case to consider. If the notes do not
955 span any basic blocks, then it is an empty scope that can
956 safely be deleted or ignored. Mark these with level = -1. */
958 x1 = get_next_bb_note (s->note_beg);
959 x2 = get_prev_bb_note (s->note_end);
967 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
968 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
974 /* If the scope spans one or more basic blocks, we record them. We
975 only record the bbs that are immediately contained within this
976 scope. Note that if a scope is contained within a bb, we can tell
977 by checking that bb_beg = bb_end and that they are non-null. */
983 for (i = bbi1; i <= bbi2; i++)
984 if (! RBI (BASIC_BLOCK (i))->scope)
987 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
988 for (i = bbi1; i <= bbi2; i++)
990 basic_block curr_bb = BASIC_BLOCK (i);
991 if (! RBI (curr_bb)->scope)
993 s->bbs[j++] = curr_bb;
994 RBI (curr_bb)->scope = s;
1003 /* Allocate and initialize a new scope structure with scope level LEVEL,
1004 and record the NOTE beginning the scope. */
1007 make_new_scope (level, note)
1011 scope new_scope = xcalloc (1, sizeof (struct scope_def));
1012 new_scope->level = level;
1013 new_scope->note_beg = note;
1018 /* Build a forest representing the scope structure of the function.
1019 Return a pointer to a structure describing the forest. */
1022 build_scope_forest (forest)
1023 scope_forest_info *forest;
1027 basic_block curr_bb;
1028 scope root, curr_scope = 0;
1030 forest->num_trees = 0;
1031 forest->trees = NULL;
1036 for (x = get_insns (); x; x = NEXT_INSN (x))
1038 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1039 curr_bb = BASIC_BLOCK (bbi);
1041 if (GET_CODE (x) == NOTE)
1043 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1051 new_scope = make_new_scope (level, x);
1052 new_scope->outer = curr_scope;
1053 new_scope->next = NULL;
1054 if (! curr_scope->inner)
1056 curr_scope->inner = new_scope;
1057 curr_scope->inner_last = new_scope;
1061 curr_scope->inner_last->next = new_scope;
1062 curr_scope->inner_last = new_scope;
1064 curr_scope = curr_scope->inner_last;
1068 int ntrees = forest->num_trees;
1070 curr_scope = make_new_scope (level, x);
1072 forest->trees = xrealloc (forest->trees,
1073 sizeof (scope) * (ntrees + 1));
1074 forest->trees[forest->num_trees++] = root;
1076 curr_scope->bb_beg = curr_bb;
1078 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1080 curr_scope->bb_end = curr_bb;
1081 curr_scope->note_end = x;
1083 curr_scope = curr_scope->outer;
1089 if (curr_bb && curr_bb->end == x)
1097 for (i = 0; i < forest->num_trees; i++)
1098 relate_bbs_with_scopes (forest->trees[i]);
1102 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1106 remove_scope_notes ()
1109 basic_block currbb = NULL;
1111 for (x = get_insns (); x; x = next)
1113 next = NEXT_INSN (x);
1114 if (NOTE_INSN_BASIC_BLOCK_P (x))
1115 currbb = NOTE_BASIC_BLOCK (x);
1117 if (GET_CODE (x) == NOTE
1118 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1119 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1121 /* Check if the scope note happens to be the end of a bb. */
1122 if (currbb && x == currbb->end)
1123 currbb->end = PREV_INSN (x);
1124 if (currbb && x == currbb->head)
1129 NEXT_INSN (PREV_INSN (x)) = next;
1130 PREV_INSN (next) = PREV_INSN (x);
1132 NEXT_INSN (x) = NULL;
1133 PREV_INSN (x) = NULL;
1142 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1145 insert_intra_1 (s, ip)
1151 if (NOTE_BLOCK (s->note_beg))
1153 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1154 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1157 for (p = s->inner; p; p = p->next)
1158 insert_intra_1 (p, ip);
1160 if (NOTE_BLOCK (s->note_beg))
1162 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1163 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1168 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1169 scopes that are contained within BB. */
1172 insert_intra_bb_scope_notes (bb)
1175 scope s = RBI (bb)->scope;
1183 if (GET_CODE (ip) == CODE_LABEL)
1184 ip = NEXT_INSN (ip);
1186 for (p = s->inner; p; p = p->next)
1188 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1189 insert_intra_1 (p, &ip);
1194 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1195 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1196 notes before BB2 such that the notes are correctly balanced. If BB1 or
1197 BB2 is NULL, we are inserting scope notes for the first and last basic
1198 blocks, respectively. */
1201 insert_inter_bb_scope_notes (bb1, bb2)
1208 /* It is possible that a basic block is not contained in any scope.
1209 In that case, we either open or close a scope but not both. */
1212 scope s1 = RBI (bb1)->scope;
1213 scope s2 = RBI (bb2)->scope;
1222 /* Find common ancestor scope. */
1225 scope s1 = RBI (bb1)->scope;
1226 scope s2 = RBI (bb2)->scope;
1231 if (s1->level > s2->level)
1233 else if (s2->level > s1->level)
1249 scope s = RBI (bb1)->scope;
1250 ip = RBI (bb1)->eff_end;
1253 if (NOTE_BLOCK (s->note_beg))
1255 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1256 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1265 scope s = RBI (bb2)->scope;
1269 if (NOTE_BLOCK (s->note_beg))
1271 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1272 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1280 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1281 on the scope forest and the newly reordered basic blocks. */
1284 rebuild_scope_notes (forest)
1285 scope_forest_info *forest;
1289 if (forest->num_trees == 0)
1292 /* Start by opening the scopes before the first basic block. */
1293 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1295 /* Then, open and close scopes as needed between blocks. */
1296 for (i = 0; i < n_basic_blocks - 1; i++)
1298 basic_block bb1 = BASIC_BLOCK (i);
1299 basic_block bb2 = BASIC_BLOCK (i + 1);
1300 if (RBI (bb1)->scope != RBI (bb2)->scope)
1301 insert_inter_bb_scope_notes (bb1, bb2);
1302 insert_intra_bb_scope_notes (bb1);
1305 /* Finally, close the scopes after the last basic block. */
1306 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1307 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1311 /* Free the storage associated with the scope tree at S. */
1314 free_scope_forest_1 (s)
1319 for (p = s->inner; p; p = next)
1322 free_scope_forest_1 (p);
1331 /* Free the storage associated with the scope forest. */
1334 free_scope_forest (forest)
1335 scope_forest_info *forest;
1338 for (i = 0; i < forest->num_trees; i++)
1339 free_scope_forest_1 (forest->trees[i]);
1343 /* Visualize the scope forest. */
1346 dump_scope_forest (forest)
1347 scope_forest_info *forest;
1349 if (forest->num_trees == 0)
1350 fprintf (stderr, "\n< Empty scope forest >\n");
1354 fprintf (stderr, "\n< Scope forest >\n");
1355 for (i = 0; i < forest->num_trees; i++)
1356 dump_scope_forest_1 (forest->trees[i], 0);
1361 /* Recursive portion of dump_scope_forest. */
1364 dump_scope_forest_1 (s, indent)
1371 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1372 && RBI (s->bb_beg)->scope
1373 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1375 fprintf (stderr, "%*s", indent, "");
1376 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1379 fprintf (stderr, "%*s", indent, "");
1380 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1381 (PTR) NOTE_BLOCK (s->note_beg));
1383 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1384 for (i = 0; i < s->num_bbs; i++)
1385 fprintf (stderr, " %d", s->bbs[i]->index);
1386 fprintf (stderr, "\n");
1388 for (p = s->inner; p; p = p->next)
1389 dump_scope_forest_1 (p, indent + 2);
1391 fprintf (stderr, "%*s", indent, "");
1392 fprintf (stderr, "}\n");
1396 /* Reorder basic blocks. The main entry point to this file. */
1399 reorder_basic_blocks ()
1401 scope_forest_info forest;
1404 if (n_basic_blocks <= 1)
1407 for (i = 0; i < n_basic_blocks; i++)
1408 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1410 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1412 build_scope_forest (&forest);
1413 remove_scope_notes ();
1415 record_effective_endpoints ();
1416 make_reorder_chain ();
1417 fixup_reorder_chain ();
1419 #ifdef ENABLE_CHECKING
1420 verify_insn_chain ();
1423 rebuild_scope_notes (&forest);
1424 free_scope_forest (&forest);
1427 for (i = 0; i < n_basic_blocks; i++)
1428 free (BASIC_BLOCK (i)->aux);
1430 free (EXIT_BLOCK_PTR->aux);
1432 #ifdef ENABLE_CHECKING
1433 verify_flow_info ();