1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
111 extern struct obstack flow_obstack;
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 /* The bb containing note_beg (if any). */
128 /* The bb containing note_end (if any). */
131 /* List of basic blocks contained within this scope. */
134 /* Number of blocks contained within this scope. */
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
151 /* Structure to hold information about the scope forest. */
154 /* Number of trees in forest. */
157 /* List of tree roots. */
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
170 } *reorder_block_def;
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
175 /* Local function prototypes. */
176 static rtx skip_insns_after_block PARAMS ((basic_block));
177 static void record_effective_endpoints PARAMS ((void));
178 static void make_reorder_chain PARAMS ((void));
179 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
180 static rtx label_for_bb PARAMS ((basic_block));
181 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
182 static void fixup_reorder_chain PARAMS ((void));
183 static void relate_bbs_with_scopes PARAMS ((scope));
184 static scope make_new_scope PARAMS ((int, rtx));
185 static void build_scope_forest PARAMS ((scope_forest_info *));
186 static void remove_scope_notes PARAMS ((void));
187 static void insert_intra_1 PARAMS ((scope, rtx *));
188 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
189 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
190 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
191 static void free_scope_forest_1 PARAMS ((scope));
192 static void free_scope_forest PARAMS ((scope_forest_info *));
193 void dump_scope_forest PARAMS ((scope_forest_info *));
194 static void dump_scope_forest_1 PARAMS ((scope, int));
195 static rtx get_next_bb_note PARAMS ((rtx));
196 static rtx get_prev_bb_note PARAMS ((rtx));
198 void verify_insn_chain PARAMS ((void));
200 /* Skip over inter-block insns occurring after BB which are typically
201 associated with BB (e.g., barriers). If there are any such insns,
202 we return the last one. Otherwise, we return the end of BB. */
205 skip_insns_after_block (bb)
208 rtx insn, last_insn, next_head, prev;
210 next_head = NULL_RTX;
211 if (bb->index + 1 != n_basic_blocks)
212 next_head = BASIC_BLOCK (bb->index + 1)->head;
214 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
216 if (insn == next_head)
219 switch (GET_CODE (insn))
226 switch (NOTE_LINE_NUMBER (insn))
228 case NOTE_INSN_LOOP_END:
229 case NOTE_INSN_BLOCK_END:
232 case NOTE_INSN_DELETED:
233 case NOTE_INSN_DELETED_LABEL:
244 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
245 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
246 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
248 insn = NEXT_INSN (insn);
260 /* It is possible to hit contradicting sequence. For instance:
266 Where barrier belongs to jump_insn, but the note does not.
267 This can be created by removing the basic block originally
268 following NOTE_INSN_LOOP_BEG.
270 In such case reorder the notes. */
271 for (insn = last_insn; insn != bb->end; insn = prev)
273 prev = PREV_INSN (insn);
274 if (GET_CODE (insn) == NOTE)
275 switch (NOTE_LINE_NUMBER (insn))
277 case NOTE_INSN_LOOP_END:
278 case NOTE_INSN_BLOCK_END:
279 case NOTE_INSN_DELETED:
280 case NOTE_INSN_DELETED_LABEL:
283 reorder_insns (insn, insn, last_insn);
291 /* Locate the effective beginning and end of the insn chain for each
292 block, as defined by skip_insns_after_block above. */
295 record_effective_endpoints ()
297 rtx next_insn = get_insns ();
300 for (i = 0; i < n_basic_blocks; ++i)
302 basic_block bb = BASIC_BLOCK (i);
305 RBI (bb)->eff_head = next_insn;
306 end = skip_insns_after_block (bb);
307 RBI (bb)->eff_end = end;
308 next_insn = NEXT_INSN (end);
313 /* Compute an ordering for a subgraph beginning with block BB. Record the
314 ordering in RBI()->index and chained through RBI()->next. */
317 make_reorder_chain ()
319 basic_block last_block = NULL;
320 basic_block prev = NULL;
321 int nbb_m1 = n_basic_blocks - 1;
323 /* If we've not got epilogue in RTL, we must fallthru to the exit.
324 Force the last block to be at the end. */
325 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
326 end of the function for stack unwinding purposes. */
329 last_block = BASIC_BLOCK (nbb_m1);
330 RBI (last_block)->visited = 1;
334 /* Loop until we've placed every block. */
338 basic_block next = NULL;
340 /* Find the next unplaced block. */
341 /* ??? Get rid of this loop, and track which blocks are not yet
342 placed more directly, so as to avoid the O(N^2) worst case.
343 Perhaps keep a doubly-linked list of all to-be-placed blocks;
344 remove from the list as we place. The head of that list is
345 what we're looking for here. */
347 for (i = 0; i <= nbb_m1; ++i)
349 basic_block bb = BASIC_BLOCK (i);
350 if (! RBI (bb)->visited)
359 prev = make_reorder_chain_1 (next, prev);
361 while (RBI (prev)->index < nbb_m1);
363 /* Terminate the chain. */
366 RBI (prev)->next = last_block;
367 RBI (last_block)->index = RBI (prev)->index + 1;
370 RBI (prev)->next = NULL;
373 /* A helper function for make_reorder_chain.
375 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
376 These are assumed to be the error condition and we wish to cluster
377 all of them at the very end of the function for the benefit of cache
378 locality for the rest of the function.
380 ??? We could do slightly better by noticing earlier that some subgraph
381 has all paths leading to noreturn functions, but for there to be more
382 than one block in such a subgraph is rare. */
385 make_reorder_chain_1 (bb, prev)
393 /* Mark this block visited. */
399 RBI (prev)->next = bb;
400 new_index = RBI (prev)->index + 1;
401 RBI (bb)->index = new_index;
403 if (rtl_dump_file && prev->index + 1 != bb->index)
404 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
405 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
409 RBI (bb)->visited = 1;
412 if (bb->succ == NULL)
415 /* Find the most probable block. */
418 if (any_condjump_p (bb->end)
419 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
421 int taken, probability;
422 edge e_taken, e_fall;
424 probability = INTVAL (XEXP (note, 0));
425 taken = probability > REG_BR_PROB_BASE / 2;
427 /* Find the normal taken edge and the normal fallthru edge.
429 Note, conditional jumps with other side effects may not
430 be fully optimized. In this case it is possible for
431 the conditional jump to branch to the same location as
434 We should probably work to improve optimization of that
435 case; however, it seems silly not to also deal with such
436 problems here if they happen to occur. */
438 e_taken = e_fall = NULL;
439 for (e = bb->succ; e ; e = e->succ_next)
441 if (e->flags & EDGE_FALLTHRU)
443 else if (! (e->flags & EDGE_EH))
447 next = (taken ? e_taken : e_fall)->dest;
450 /* In the absence of a prediction, disturb things as little as possible
451 by selecting the old "next" block from the list of successors. If
452 there had been a fallthru edge, that will be the one. */
455 for (e = bb->succ; e ; e = e->succ_next)
456 if (e->dest->index == bb->index + 1)
458 if ((e->flags & EDGE_FALLTHRU)
460 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
466 /* Make sure we didn't select a silly next block. */
467 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
470 /* Recurse on the successors. Unroll the last call, as the normal
471 case is exactly one or two edges, and we can tail recurse. */
472 for (e = bb->succ; e; e = e->succ_next)
473 if (e->dest != EXIT_BLOCK_PTR
474 && ! RBI (e->dest)->visited
476 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
480 prev = make_reorder_chain_1 (next, prev);
481 next = RBI (e->dest)->visited ? NULL : e->dest;
496 /* Locate or create a label for a given basic block. */
502 rtx label = bb->head;
504 if (GET_CODE (label) != CODE_LABEL)
507 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
508 bb->index, RBI (bb)->index);
510 label = emit_label_before (gen_label_rtx (), label);
511 if (bb->head == RBI (bb)->eff_head)
512 RBI (bb)->eff_head = label;
520 /* Emit a jump to BB after insn AFTER. */
523 emit_jump_to_block_after (bb, after)
529 if (bb != EXIT_BLOCK_PTR)
531 rtx label = label_for_bb (bb);
532 jump = emit_jump_insn_after (gen_jump (label), after);
533 JUMP_LABEL (jump) = label;
534 LABEL_NUSES (label) += 1;
535 if (basic_block_for_insn)
536 set_block_for_new_insns (jump, bb);
539 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
540 bb->index, RBI (bb)->index);
547 jump = emit_jump_insn_after (gen_return (), after);
550 fprintf (rtl_dump_file, "Emitting return\n");
560 /* Given a reorder chain, rearrange the code to match. */
563 fixup_reorder_chain ()
565 basic_block bb, last_bb;
567 /* First do the bulk reordering -- rechain the blocks without regard to
568 the needed changes to jumps and labels. */
570 last_bb = BASIC_BLOCK (0);
571 bb = RBI (last_bb)->next;
574 rtx last_e = RBI (last_bb)->eff_end;
575 rtx curr_h = RBI (bb)->eff_head;
577 NEXT_INSN (last_e) = curr_h;
578 PREV_INSN (curr_h) = last_e;
583 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
584 set_last_insn (RBI (last_bb)->eff_end);
586 /* Now add jumps and labels as needed to match the blocks new
589 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
591 edge e_fall, e_taken, e;
592 rtx jump_insn, barrier_insn, bb_end_insn;
595 if (bb->succ == NULL)
598 /* Find the old fallthru edge, and another non-EH edge for
600 e_taken = e_fall = NULL;
601 for (e = bb->succ; e ; e = e->succ_next)
602 if (e->flags & EDGE_FALLTHRU)
604 else if (! (e->flags & EDGE_EH))
607 bb_end_insn = bb->end;
608 if (GET_CODE (bb_end_insn) == JUMP_INSN)
610 if (any_uncondjump_p (bb_end_insn))
612 /* If the destination is still not next, nothing to do. */
613 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
616 /* Otherwise, we can remove the jump and cleanup the edge. */
617 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
618 RBI (bb)->eff_end = skip_insns_after_block (bb);
619 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
622 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
623 bb->index, RBI (bb)->index);
626 else if (any_condjump_p (bb_end_insn))
628 /* If the old fallthru is still next, nothing to do. */
629 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
630 || (RBI (bb)->index == n_basic_blocks - 1
631 && e_fall->dest == EXIT_BLOCK_PTR))
634 /* There is one special case: if *neither* block is next,
635 such as happens at the very end of a function, then we'll
636 need to add a new unconditional jump. Choose the taken
637 edge based on known or assumed probability. */
638 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
640 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
642 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
643 && invert_jump (bb_end_insn,
644 label_for_bb (e_fall->dest), 0))
646 e_fall->flags &= ~EDGE_FALLTHRU;
647 e_taken->flags |= EDGE_FALLTHRU;
648 e = e_fall, e_fall = e_taken, e_taken = e;
652 /* Otherwise we can try to invert the jump. This will
653 basically never fail, however, keep up the pretense. */
654 else if (invert_jump (bb_end_insn,
655 label_for_bb (e_fall->dest), 0))
657 e_fall->flags &= ~EDGE_FALLTHRU;
658 e_taken->flags |= EDGE_FALLTHRU;
662 else if (returnjump_p (bb_end_insn))
666 /* Otherwise we have some switch or computed jump. In the
667 99% case, there should not have been a fallthru edge. */
670 #ifdef CASE_DROPS_THROUGH
671 /* Except for VAX. Since we didn't have predication for the
672 tablejump, the fallthru block should not have moved. */
673 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
675 bb_end_insn = skip_insns_after_block (bb);
683 /* No fallthru implies a noreturn function with EH edges, or
684 something similarly bizarre. In any case, we don't need to
689 /* If the fallthru block is still next, nothing to do. */
690 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
691 || (RBI (bb)->index == n_basic_blocks - 1
692 && e_fall->dest == EXIT_BLOCK_PTR))
695 /* We need a new jump insn. If the block has only one outgoing
696 edge, then we can stuff the new jump insn in directly. */
697 if (bb->succ->succ_next == NULL)
699 e_fall->flags &= ~EDGE_FALLTHRU;
701 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
703 barrier_insn = emit_barrier_after (jump_insn);
704 RBI (bb)->eff_end = barrier_insn;
709 /* We got here if we need to add a new jump insn in a new block
710 across the edge e_fall. */
712 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
713 barrier_insn = emit_barrier_after (jump_insn);
715 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
716 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
718 nb = BASIC_BLOCK (n_basic_blocks - 1);
719 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
720 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
723 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
724 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
726 nb->aux = xmalloc (sizeof (struct reorder_block_def));
727 RBI (nb)->eff_head = nb->head;
728 RBI (nb)->eff_end = barrier_insn;
729 RBI (nb)->scope = RBI (bb)->scope;
730 RBI (nb)->index = RBI (bb)->index + 1;
731 RBI (nb)->visited = 1;
732 RBI (nb)->next = RBI (bb)->next;
735 /* Link to new block. */
736 make_edge (NULL, nb, e_fall->dest, 0);
737 redirect_edge_succ (e_fall, nb);
739 /* Don't process this new block. */
742 /* Fix subsequent reorder block indices to reflect new block. */
743 while ((nb = RBI (nb)->next) != NULL)
744 RBI (nb)->index += 1;
747 /* Put basic_block_info in the new order. */
748 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
750 bb->index = RBI (bb)->index;
751 BASIC_BLOCK (bb->index) = bb;
756 /* Perform sanity checks on the insn chain.
757 1. Check that next/prev pointers are consistent in both the forward and
759 2. Count insns in chain, going both directions, and check if equal.
760 3. Check that get_last_insn () returns the actual end of chain. */
773 for (x = get_insns (); x; x = NEXT_INSN (x))
775 if (PREV_INSN (x) != prevx)
777 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
778 fprintf (stderr, "previous insn:\n");
780 fprintf (stderr, "current insn:\n");
788 if (prevx != get_last_insn ())
790 fprintf (stderr, "last_insn corrupt.\n");
796 for (x = get_last_insn (); x; x = PREV_INSN (x))
798 if (NEXT_INSN (x) != nextx)
800 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
801 fprintf (stderr, "current insn:\n");
803 fprintf (stderr, "next insn:\n");
811 if (insn_cnt1 != insn_cnt2)
813 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
814 insn_cnt1, insn_cnt2);
825 if (NOTE_INSN_BASIC_BLOCK_P (x))
839 if (NOTE_INSN_BASIC_BLOCK_P (x))
847 /* Determine and record the relationships between basic blocks and
848 scopes in scope tree S. */
851 relate_bbs_with_scopes (s)
855 int i, bbi1, bbi2, bbs_spanned;
858 for (p = s->inner; p; p = p->next)
859 relate_bbs_with_scopes (p);
864 /* If the begin and end notes are both inside the same basic block,
865 or if they are both outside of basic blocks, then we know immediately
866 how they are related. Otherwise, we need to poke around to make the
868 if (s->bb_beg != s->bb_end)
870 if (s->bb_beg && s->bb_end)
872 /* Both notes are in different bbs. This implies that all the
873 basic blocks spanned by the pair of notes are contained in
875 bbi1 = s->bb_beg->index;
876 bbi2 = s->bb_end->index;
879 else if (! s->bb_beg)
881 /* First note is outside of a bb. If the scope spans more than
882 one basic block, then they all are contained within this
883 scope. Otherwise, this scope is contained within the basic
885 bbnote = get_next_bb_note (s->note_beg);
888 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
891 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
895 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
896 bbi2 = s->bb_end->index;
901 else /* ! s->bb_end */
903 /* Second note is outside of a bb. If the scope spans more than
904 one basic block, then they all are contained within this
905 scope. Otherwise, this scope is contained within the basic
907 bbnote = get_prev_bb_note (s->note_end);
910 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
913 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
917 bbi1 = s->bb_beg->index;
918 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
927 /* Both notes are in the same bb, which implies the block
928 contains this scope. */
933 /* Both notes are outside of any bbs. This implies that all the
934 basic blocks spanned by the pair of notes are contained in
936 There is a degenerate case to consider. If the notes do not
937 span any basic blocks, then it is an empty scope that can
938 safely be deleted or ignored. Mark these with level = -1. */
940 x1 = get_next_bb_note (s->note_beg);
941 x2 = get_prev_bb_note (s->note_end);
949 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
950 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
956 /* If the scope spans one or more basic blocks, we record them. We
957 only record the bbs that are immediately contained within this
958 scope. Note that if a scope is contained within a bb, we can tell
959 by checking that bb_beg = bb_end and that they are non-null. */
965 for (i = bbi1; i <= bbi2; i++)
966 if (! RBI (BASIC_BLOCK (i))->scope)
969 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
970 for (i = bbi1; i <= bbi2; i++)
972 basic_block curr_bb = BASIC_BLOCK (i);
973 if (! RBI (curr_bb)->scope)
975 s->bbs[j++] = curr_bb;
976 RBI (curr_bb)->scope = s;
985 /* Allocate and initialize a new scope structure with scope level LEVEL,
986 and record the NOTE beginning the scope. */
989 make_new_scope (level, note)
993 scope new_scope = xcalloc (1, sizeof (struct scope_def));
994 new_scope->level = level;
995 new_scope->note_beg = note;
1000 /* Build a forest representing the scope structure of the function.
1001 Return a pointer to a structure describing the forest. */
1004 build_scope_forest (forest)
1005 scope_forest_info *forest;
1009 basic_block curr_bb;
1010 scope root, curr_scope = 0;
1012 forest->num_trees = 0;
1013 forest->trees = NULL;
1018 for (x = get_insns (); x; x = NEXT_INSN (x))
1020 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1021 curr_bb = BASIC_BLOCK (bbi);
1023 if (GET_CODE (x) == NOTE)
1025 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1033 new_scope = make_new_scope (level, x);
1034 new_scope->outer = curr_scope;
1035 new_scope->next = NULL;
1036 if (! curr_scope->inner)
1038 curr_scope->inner = new_scope;
1039 curr_scope->inner_last = new_scope;
1043 curr_scope->inner_last->next = new_scope;
1044 curr_scope->inner_last = new_scope;
1046 curr_scope = curr_scope->inner_last;
1050 int ntrees = forest->num_trees;
1052 curr_scope = make_new_scope (level, x);
1054 forest->trees = xrealloc (forest->trees,
1055 sizeof (scope) * (ntrees + 1));
1056 forest->trees[forest->num_trees++] = root;
1058 curr_scope->bb_beg = curr_bb;
1060 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1062 curr_scope->bb_end = curr_bb;
1063 curr_scope->note_end = x;
1065 curr_scope = curr_scope->outer;
1071 if (curr_bb && curr_bb->end == x)
1079 for (i = 0; i < forest->num_trees; i++)
1080 relate_bbs_with_scopes (forest->trees[i]);
1084 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1088 remove_scope_notes ()
1091 basic_block currbb = NULL;
1093 for (x = get_insns (); x; x = next)
1095 next = NEXT_INSN (x);
1096 if (NOTE_INSN_BASIC_BLOCK_P (x))
1097 currbb = NOTE_BASIC_BLOCK (x);
1099 if (GET_CODE (x) == NOTE
1100 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1101 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1103 /* Check if the scope note happens to be the end of a bb. */
1104 if (currbb && x == currbb->end)
1105 currbb->end = PREV_INSN (x);
1106 if (currbb && x == currbb->head)
1111 NEXT_INSN (PREV_INSN (x)) = next;
1112 PREV_INSN (next) = PREV_INSN (x);
1114 NEXT_INSN (x) = NULL;
1115 PREV_INSN (x) = NULL;
1124 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1127 insert_intra_1 (s, ip)
1133 if (NOTE_BLOCK (s->note_beg))
1135 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1136 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1139 for (p = s->inner; p; p = p->next)
1140 insert_intra_1 (p, ip);
1142 if (NOTE_BLOCK (s->note_beg))
1144 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1145 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1150 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1151 scopes that are contained within BB. */
1154 insert_intra_bb_scope_notes (bb)
1157 scope s = RBI (bb)->scope;
1165 if (GET_CODE (ip) == CODE_LABEL)
1166 ip = NEXT_INSN (ip);
1168 for (p = s->inner; p; p = p->next)
1170 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1171 insert_intra_1 (p, &ip);
1176 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1177 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1178 notes before BB2 such that the notes are correctly balanced. If BB1 or
1179 BB2 is NULL, we are inserting scope notes for the first and last basic
1180 blocks, respectively. */
1183 insert_inter_bb_scope_notes (bb1, bb2)
1190 /* It is possible that a basic block is not contained in any scope.
1191 In that case, we either open or close a scope but not both. */
1194 scope s1 = RBI (bb1)->scope;
1195 scope s2 = RBI (bb2)->scope;
1204 /* Find common ancestor scope. */
1207 scope s1 = RBI (bb1)->scope;
1208 scope s2 = RBI (bb2)->scope;
1213 if (s1->level > s2->level)
1215 else if (s2->level > s1->level)
1231 scope s = RBI (bb1)->scope;
1232 ip = RBI (bb1)->eff_end;
1235 if (NOTE_BLOCK (s->note_beg))
1237 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1238 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1247 scope s = RBI (bb2)->scope;
1251 if (NOTE_BLOCK (s->note_beg))
1253 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1254 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1262 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1263 on the scope forest and the newly reordered basic blocks. */
1266 rebuild_scope_notes (forest)
1267 scope_forest_info *forest;
1271 if (forest->num_trees == 0)
1274 /* Start by opening the scopes before the first basic block. */
1275 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1277 /* Then, open and close scopes as needed between blocks. */
1278 for (i = 0; i < n_basic_blocks - 1; i++)
1280 basic_block bb1 = BASIC_BLOCK (i);
1281 basic_block bb2 = BASIC_BLOCK (i + 1);
1282 if (RBI (bb1)->scope != RBI (bb2)->scope)
1283 insert_inter_bb_scope_notes (bb1, bb2);
1284 insert_intra_bb_scope_notes (bb1);
1287 /* Finally, close the scopes after the last basic block. */
1288 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1289 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1293 /* Free the storage associated with the scope tree at S. */
1296 free_scope_forest_1 (s)
1301 for (p = s->inner; p; p = next)
1304 free_scope_forest_1 (p);
1313 /* Free the storage associated with the scope forest. */
1316 free_scope_forest (forest)
1317 scope_forest_info *forest;
1320 for (i = 0; i < forest->num_trees; i++)
1321 free_scope_forest_1 (forest->trees[i]);
1325 /* Visualize the scope forest. */
1328 dump_scope_forest (forest)
1329 scope_forest_info *forest;
1331 if (forest->num_trees == 0)
1332 fprintf (stderr, "\n< Empty scope forest >\n");
1336 fprintf (stderr, "\n< Scope forest >\n");
1337 for (i = 0; i < forest->num_trees; i++)
1338 dump_scope_forest_1 (forest->trees[i], 0);
1343 /* Recursive portion of dump_scope_forest. */
1346 dump_scope_forest_1 (s, indent)
1353 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1354 && RBI (s->bb_beg)->scope
1355 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1357 fprintf (stderr, "%*s", indent, "");
1358 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1361 fprintf (stderr, "%*s", indent, "");
1362 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1363 (PTR) NOTE_BLOCK (s->note_beg));
1365 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1366 for (i = 0; i < s->num_bbs; i++)
1367 fprintf (stderr, " %d", s->bbs[i]->index);
1368 fprintf (stderr, "\n");
1370 for (p = s->inner; p; p = p->next)
1371 dump_scope_forest_1 (p, indent + 2);
1373 fprintf (stderr, "%*s", indent, "");
1374 fprintf (stderr, "}\n");
1378 /* Reorder basic blocks. The main entry point to this file. */
1381 reorder_basic_blocks ()
1383 scope_forest_info forest;
1386 if (n_basic_blocks <= 1)
1389 for (i = 0; i < n_basic_blocks; i++)
1390 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1392 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1394 build_scope_forest (&forest);
1395 remove_scope_notes ();
1397 record_effective_endpoints ();
1398 make_reorder_chain ();
1399 fixup_reorder_chain ();
1401 #ifdef ENABLE_CHECKING
1402 verify_insn_chain ();
1405 rebuild_scope_notes (&forest);
1406 free_scope_forest (&forest);
1409 for (i = 0; i < n_basic_blocks; i++)
1410 free (BASIC_BLOCK (i)->aux);
1412 free (EXIT_BLOCK_PTR->aux);
1414 #ifdef ENABLE_CHECKING
1415 verify_flow_info ();