1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
101 #ifndef HAVE_epilogue
102 #define HAVE_epilogue 0
106 /* The contents of the current function definition are allocated
107 in this obstack, and all are freed at the end of the function.
108 For top-level functions, this is temporary_obstack.
109 Separate obstacks are made for nested functions. */
111 extern struct obstack flow_obstack;
114 /* Structure to hold information about lexical scopes. */
115 typedef struct scope_def
119 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
122 /* The NOTE_INSN_BLOCK_END that ended this scope. */
125 /* The bb containing note_beg (if any). */
128 /* The bb containing note_end (if any). */
131 /* List of basic blocks contained within this scope. */
134 /* Number of blocks contained within this scope. */
137 /* The outer scope or NULL if outermost scope. */
138 struct scope_def *outer;
140 /* The first inner scope or NULL if innermost scope. */
141 struct scope_def *inner;
143 /* The last inner scope or NULL if innermost scope. */
144 struct scope_def *inner_last;
146 /* Link to the next (sibling) scope. */
147 struct scope_def *next;
151 /* Structure to hold information about the scope forest. */
154 /* Number of trees in forest. */
157 /* List of tree roots. */
161 /* Structure to hold information about the blocks during reordering. */
162 typedef struct reorder_block_def
170 } *reorder_block_def;
172 #define RBI(BB) ((reorder_block_def) (BB)->aux)
175 /* Local function prototypes. */
176 static rtx skip_insns_after_block PARAMS ((basic_block));
177 static void record_effective_endpoints PARAMS ((void));
178 static void make_reorder_chain PARAMS ((void));
179 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
180 static rtx label_for_bb PARAMS ((basic_block));
181 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
182 static void fixup_reorder_chain PARAMS ((void));
183 static void relate_bbs_with_scopes PARAMS ((scope));
184 static scope make_new_scope PARAMS ((int, rtx));
185 static void build_scope_forest PARAMS ((scope_forest_info *));
186 static void remove_scope_notes PARAMS ((void));
187 static void insert_intra_1 PARAMS ((scope, rtx *));
188 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
189 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
190 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
191 static void free_scope_forest_1 PARAMS ((scope));
192 static void free_scope_forest PARAMS ((scope_forest_info *));
193 void dump_scope_forest PARAMS ((scope_forest_info *));
194 static void dump_scope_forest_1 PARAMS ((scope, int));
195 static rtx get_next_bb_note PARAMS ((rtx));
196 static rtx get_prev_bb_note PARAMS ((rtx));
198 void verify_insn_chain PARAMS ((void));
200 /* Skip over inter-block insns occurring after BB which are typically
201 associated with BB (e.g., barriers). If there are any such insns,
202 we return the last one. Otherwise, we return the end of BB. */
205 skip_insns_after_block (bb)
208 rtx insn, last_insn, next_head;
210 next_head = NULL_RTX;
211 if (bb->index + 1 != n_basic_blocks)
212 next_head = BASIC_BLOCK (bb->index + 1)->head;
214 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); )
216 if (insn == next_head)
219 switch (GET_CODE (insn))
226 switch (NOTE_LINE_NUMBER (insn))
228 case NOTE_INSN_LOOP_END:
229 case NOTE_INSN_BLOCK_END:
232 case NOTE_INSN_DELETED:
233 case NOTE_INSN_DELETED_LABEL:
237 /* Make line notes attached to the succesor block unless they
238 are followed by something attached to predecesor block.
239 These notes remained after removing code in the predecesor
240 block and thus should be kept together. */
241 if (NOTE_LINE_NUMBER (insn) >= 0)
249 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
250 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
251 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
253 insn = NEXT_INSN (insn);
270 /* Locate the effective beginning and end of the insn chain for each
271 block, as defined by skip_insns_after_block above. */
274 record_effective_endpoints ()
276 rtx next_insn = get_insns ();
279 for (i = 0; i < n_basic_blocks; ++i)
281 basic_block bb = BASIC_BLOCK (i);
284 RBI (bb)->eff_head = next_insn;
285 end = skip_insns_after_block (bb);
286 RBI (bb)->eff_end = end;
287 next_insn = NEXT_INSN (end);
292 /* Compute an ordering for a subgraph beginning with block BB. Record the
293 ordering in RBI()->index and chained through RBI()->next. */
296 make_reorder_chain ()
298 basic_block last_block = NULL;
299 basic_block prev = NULL;
300 int nbb_m1 = n_basic_blocks - 1;
302 /* If we've not got epilogue in RTL, we must fallthru to the exit.
303 Force the last block to be at the end. */
304 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
305 end of the function for stack unwinding purposes. */
308 last_block = BASIC_BLOCK (nbb_m1);
309 RBI (last_block)->visited = 1;
313 /* Loop until we've placed every block. */
317 basic_block next = NULL;
319 /* Find the next unplaced block. */
320 /* ??? Get rid of this loop, and track which blocks are not yet
321 placed more directly, so as to avoid the O(N^2) worst case.
322 Perhaps keep a doubly-linked list of all to-be-placed blocks;
323 remove from the list as we place. The head of that list is
324 what we're looking for here. */
326 for (i = 0; i <= nbb_m1; ++i)
328 basic_block bb = BASIC_BLOCK (i);
329 if (! RBI (bb)->visited)
338 prev = make_reorder_chain_1 (next, prev);
340 while (RBI (prev)->index < nbb_m1);
342 /* Terminate the chain. */
345 RBI (prev)->next = last_block;
346 RBI (last_block)->index = RBI (prev)->index + 1;
349 RBI (prev)->next = NULL;
352 /* A helper function for make_reorder_chain.
354 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
355 These are assumed to be the error condition and we wish to cluster
356 all of them at the very end of the function for the benefit of cache
357 locality for the rest of the function.
359 ??? We could do slightly better by noticing earlier that some subgraph
360 has all paths leading to noreturn functions, but for there to be more
361 than one block in such a subgraph is rare. */
364 make_reorder_chain_1 (bb, prev)
372 /* Mark this block visited. */
378 RBI (prev)->next = bb;
379 new_index = RBI (prev)->index + 1;
380 RBI (bb)->index = new_index;
382 if (rtl_dump_file && prev->index + 1 != bb->index)
383 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
384 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
388 RBI (bb)->visited = 1;
391 if (bb->succ == NULL)
394 /* Find the most probable block. */
397 if (any_condjump_p (bb->end)
398 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
400 int taken, probability;
401 edge e_taken, e_fall;
403 probability = INTVAL (XEXP (note, 0));
404 taken = probability > REG_BR_PROB_BASE / 2;
406 /* Find the normal taken edge and the normal fallthru edge.
408 Note, conditional jumps with other side effects may not
409 be fully optimized. In this case it is possible for
410 the conditional jump to branch to the same location as
413 We should probably work to improve optimization of that
414 case; however, it seems silly not to also deal with such
415 problems here if they happen to occur. */
417 e_taken = e_fall = NULL;
418 for (e = bb->succ; e ; e = e->succ_next)
420 if (e->flags & EDGE_FALLTHRU)
422 else if (! (e->flags & EDGE_EH))
426 next = (taken ? e_taken : e_fall)->dest;
429 /* In the absence of a prediction, disturb things as little as possible
430 by selecting the old "next" block from the list of successors. If
431 there had been a fallthru edge, that will be the one. */
434 for (e = bb->succ; e ; e = e->succ_next)
435 if (e->dest->index == bb->index + 1)
437 if ((e->flags & EDGE_FALLTHRU)
439 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
445 /* Make sure we didn't select a silly next block. */
446 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
449 /* Recurse on the successors. Unroll the last call, as the normal
450 case is exactly one or two edges, and we can tail recurse. */
451 for (e = bb->succ; e; e = e->succ_next)
452 if (e->dest != EXIT_BLOCK_PTR
453 && ! RBI (e->dest)->visited
455 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
459 prev = make_reorder_chain_1 (next, prev);
460 next = RBI (e->dest)->visited ? NULL : e->dest;
475 /* Locate or create a label for a given basic block. */
481 rtx label = bb->head;
483 if (GET_CODE (label) != CODE_LABEL)
486 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
487 bb->index, RBI (bb)->index);
489 label = emit_label_before (gen_label_rtx (), label);
490 if (bb->head == RBI (bb)->eff_head)
491 RBI (bb)->eff_head = label;
499 /* Emit a jump to BB after insn AFTER. */
502 emit_jump_to_block_after (bb, after)
508 if (bb != EXIT_BLOCK_PTR)
510 rtx label = label_for_bb (bb);
511 jump = emit_jump_insn_after (gen_jump (label), after);
512 JUMP_LABEL (jump) = label;
513 LABEL_NUSES (label) += 1;
514 if (basic_block_for_insn)
515 set_block_for_new_insns (jump, bb);
518 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
519 bb->index, RBI (bb)->index);
526 jump = emit_jump_insn_after (gen_return (), after);
529 fprintf (rtl_dump_file, "Emitting return\n");
539 /* Given a reorder chain, rearrange the code to match. */
542 fixup_reorder_chain ()
544 basic_block bb, last_bb;
546 /* First do the bulk reordering -- rechain the blocks without regard to
547 the needed changes to jumps and labels. */
549 last_bb = BASIC_BLOCK (0);
550 bb = RBI (last_bb)->next;
553 rtx last_e = RBI (last_bb)->eff_end;
554 rtx curr_h = RBI (bb)->eff_head;
556 NEXT_INSN (last_e) = curr_h;
557 PREV_INSN (curr_h) = last_e;
562 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
563 set_last_insn (RBI (last_bb)->eff_end);
565 /* Now add jumps and labels as needed to match the blocks new
568 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
570 edge e_fall, e_taken, e;
571 rtx jump_insn, barrier_insn, bb_end_insn;
574 if (bb->succ == NULL)
577 /* Find the old fallthru edge, and another non-EH edge for
579 e_taken = e_fall = NULL;
580 for (e = bb->succ; e ; e = e->succ_next)
581 if (e->flags & EDGE_FALLTHRU)
583 else if (! (e->flags & EDGE_EH))
586 bb_end_insn = bb->end;
587 if (GET_CODE (bb_end_insn) == JUMP_INSN)
589 if (any_uncondjump_p (bb_end_insn))
591 /* If the destination is still not next, nothing to do. */
592 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
595 /* Otherwise, we can remove the jump and cleanup the edge. */
596 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
597 RBI (bb)->eff_end = skip_insns_after_block (bb);
598 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
601 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
602 bb->index, RBI (bb)->index);
605 else if (any_condjump_p (bb_end_insn))
607 /* If the old fallthru is still next, nothing to do. */
608 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
609 || (RBI (bb)->index == n_basic_blocks - 1
610 && e_fall->dest == EXIT_BLOCK_PTR))
613 /* There is one special case: if *neither* block is next,
614 such as happens at the very end of a function, then we'll
615 need to add a new unconditional jump. Choose the taken
616 edge based on known or assumed probability. */
617 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
619 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
621 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
622 && invert_jump (bb_end_insn,
623 label_for_bb (e_fall->dest), 0))
625 e_fall->flags &= ~EDGE_FALLTHRU;
626 e_taken->flags |= EDGE_FALLTHRU;
627 e = e_fall, e_fall = e_taken, e_taken = e;
631 /* Otherwise we can try to invert the jump. This will
632 basically never fail, however, keep up the pretense. */
633 else if (invert_jump (bb_end_insn,
634 label_for_bb (e_fall->dest), 0))
636 e_fall->flags &= ~EDGE_FALLTHRU;
637 e_taken->flags |= EDGE_FALLTHRU;
641 else if (returnjump_p (bb_end_insn))
645 /* Otherwise we have some switch or computed jump. In the
646 99% case, there should not have been a fallthru edge. */
649 #ifdef CASE_DROPS_THROUGH
650 /* Except for VAX. Since we didn't have predication for the
651 tablejump, the fallthru block should not have moved. */
652 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
654 bb_end_insn = skip_insns_after_block (bb);
662 /* No fallthru implies a noreturn function with EH edges, or
663 something similarly bizarre. In any case, we don't need to
668 /* If the fallthru block is still next, nothing to do. */
669 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
670 || (RBI (bb)->index == n_basic_blocks - 1
671 && e_fall->dest == EXIT_BLOCK_PTR))
674 /* We need a new jump insn. If the block has only one outgoing
675 edge, then we can stuff the new jump insn in directly. */
676 if (bb->succ->succ_next == NULL)
678 e_fall->flags &= ~EDGE_FALLTHRU;
680 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
682 barrier_insn = emit_barrier_after (jump_insn);
683 RBI (bb)->eff_end = barrier_insn;
688 /* We got here if we need to add a new jump insn in a new block
689 across the edge e_fall. */
691 jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn);
692 barrier_insn = emit_barrier_after (jump_insn);
694 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
695 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
697 nb = BASIC_BLOCK (n_basic_blocks - 1);
698 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
699 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
702 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
703 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
705 nb->aux = xmalloc (sizeof (struct reorder_block_def));
706 RBI (nb)->eff_head = nb->head;
707 RBI (nb)->eff_end = barrier_insn;
708 RBI (nb)->scope = RBI (bb)->scope;
709 RBI (nb)->index = RBI (bb)->index + 1;
710 RBI (nb)->visited = 1;
711 RBI (nb)->next = RBI (bb)->next;
714 /* Link to new block. */
715 make_edge (NULL, nb, e_fall->dest, 0);
716 redirect_edge_succ (e_fall, nb);
718 /* Don't process this new block. */
721 /* Fix subsequent reorder block indices to reflect new block. */
722 while ((nb = RBI (nb)->next) != NULL)
723 RBI (nb)->index += 1;
726 /* Put basic_block_info in the new order. */
727 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
729 bb->index = RBI (bb)->index;
730 BASIC_BLOCK (bb->index) = bb;
735 /* Perform sanity checks on the insn chain.
736 1. Check that next/prev pointers are consistent in both the forward and
738 2. Count insns in chain, going both directions, and check if equal.
739 3. Check that get_last_insn () returns the actual end of chain. */
752 for (x = get_insns (); x; x = NEXT_INSN (x))
754 if (PREV_INSN (x) != prevx)
756 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
757 fprintf (stderr, "previous insn:\n");
759 fprintf (stderr, "current insn:\n");
767 if (prevx != get_last_insn ())
769 fprintf (stderr, "last_insn corrupt.\n");
775 for (x = get_last_insn (); x; x = PREV_INSN (x))
777 if (NEXT_INSN (x) != nextx)
779 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
780 fprintf (stderr, "current insn:\n");
782 fprintf (stderr, "next insn:\n");
790 if (insn_cnt1 != insn_cnt2)
792 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
793 insn_cnt1, insn_cnt2);
804 if (NOTE_INSN_BASIC_BLOCK_P (x))
818 if (NOTE_INSN_BASIC_BLOCK_P (x))
826 /* Determine and record the relationships between basic blocks and
827 scopes in scope tree S. */
830 relate_bbs_with_scopes (s)
834 int i, bbi1, bbi2, bbs_spanned;
837 for (p = s->inner; p; p = p->next)
838 relate_bbs_with_scopes (p);
843 /* If the begin and end notes are both inside the same basic block,
844 or if they are both outside of basic blocks, then we know immediately
845 how they are related. Otherwise, we need to poke around to make the
847 if (s->bb_beg != s->bb_end)
849 if (s->bb_beg && s->bb_end)
851 /* Both notes are in different bbs. This implies that all the
852 basic blocks spanned by the pair of notes are contained in
854 bbi1 = s->bb_beg->index;
855 bbi2 = s->bb_end->index;
858 else if (! s->bb_beg)
860 /* First note is outside of a bb. If the scope spans more than
861 one basic block, then they all are contained within this
862 scope. Otherwise, this scope is contained within the basic
864 bbnote = get_next_bb_note (s->note_beg);
867 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
870 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
874 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
875 bbi2 = s->bb_end->index;
880 else /* ! s->bb_end */
882 /* Second note is outside of a bb. If the scope spans more than
883 one basic block, then they all are contained within this
884 scope. Otherwise, this scope is contained within the basic
886 bbnote = get_prev_bb_note (s->note_end);
889 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
892 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
896 bbi1 = s->bb_beg->index;
897 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
906 /* Both notes are in the same bb, which implies the block
907 contains this scope. */
912 /* Both notes are outside of any bbs. This implies that all the
913 basic blocks spanned by the pair of notes are contained in
915 There is a degenerate case to consider. If the notes do not
916 span any basic blocks, then it is an empty scope that can
917 safely be deleted or ignored. Mark these with level = -1. */
919 x1 = get_next_bb_note (s->note_beg);
920 x2 = get_prev_bb_note (s->note_end);
928 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
929 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
935 /* If the scope spans one or more basic blocks, we record them. We
936 only record the bbs that are immediately contained within this
937 scope. Note that if a scope is contained within a bb, we can tell
938 by checking that bb_beg = bb_end and that they are non-null. */
944 for (i = bbi1; i <= bbi2; i++)
945 if (! RBI (BASIC_BLOCK (i))->scope)
948 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
949 for (i = bbi1; i <= bbi2; i++)
951 basic_block curr_bb = BASIC_BLOCK (i);
952 if (! RBI (curr_bb)->scope)
954 s->bbs[j++] = curr_bb;
955 RBI (curr_bb)->scope = s;
964 /* Allocate and initialize a new scope structure with scope level LEVEL,
965 and record the NOTE beginning the scope. */
968 make_new_scope (level, note)
972 scope new_scope = xcalloc (1, sizeof (struct scope_def));
973 new_scope->level = level;
974 new_scope->note_beg = note;
979 /* Build a forest representing the scope structure of the function.
980 Return a pointer to a structure describing the forest. */
983 build_scope_forest (forest)
984 scope_forest_info *forest;
989 scope root, curr_scope = 0;
991 forest->num_trees = 0;
992 forest->trees = NULL;
997 for (x = get_insns (); x; x = NEXT_INSN (x))
999 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
1000 curr_bb = BASIC_BLOCK (bbi);
1002 if (GET_CODE (x) == NOTE)
1004 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
1012 new_scope = make_new_scope (level, x);
1013 new_scope->outer = curr_scope;
1014 new_scope->next = NULL;
1015 if (! curr_scope->inner)
1017 curr_scope->inner = new_scope;
1018 curr_scope->inner_last = new_scope;
1022 curr_scope->inner_last->next = new_scope;
1023 curr_scope->inner_last = new_scope;
1025 curr_scope = curr_scope->inner_last;
1029 int ntrees = forest->num_trees;
1031 curr_scope = make_new_scope (level, x);
1033 forest->trees = xrealloc (forest->trees,
1034 sizeof (scope) * (ntrees + 1));
1035 forest->trees[forest->num_trees++] = root;
1037 curr_scope->bb_beg = curr_bb;
1039 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1041 curr_scope->bb_end = curr_bb;
1042 curr_scope->note_end = x;
1044 curr_scope = curr_scope->outer;
1050 if (curr_bb && curr_bb->end == x)
1058 for (i = 0; i < forest->num_trees; i++)
1059 relate_bbs_with_scopes (forest->trees[i]);
1063 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1067 remove_scope_notes ()
1070 basic_block currbb = NULL;
1072 for (x = get_insns (); x; x = next)
1074 next = NEXT_INSN (x);
1075 if (NOTE_INSN_BASIC_BLOCK_P (x))
1076 currbb = NOTE_BASIC_BLOCK (x);
1078 if (GET_CODE (x) == NOTE
1079 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1080 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1082 /* Check if the scope note happens to be the end of a bb. */
1083 if (currbb && x == currbb->end)
1084 currbb->end = PREV_INSN (x);
1085 if (currbb && x == currbb->head)
1090 NEXT_INSN (PREV_INSN (x)) = next;
1091 PREV_INSN (next) = PREV_INSN (x);
1093 NEXT_INSN (x) = NULL;
1094 PREV_INSN (x) = NULL;
1103 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1106 insert_intra_1 (s, ip)
1112 if (NOTE_BLOCK (s->note_beg))
1114 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1115 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1118 for (p = s->inner; p; p = p->next)
1119 insert_intra_1 (p, ip);
1121 if (NOTE_BLOCK (s->note_beg))
1123 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1124 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1129 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1130 scopes that are contained within BB. */
1133 insert_intra_bb_scope_notes (bb)
1136 scope s = RBI (bb)->scope;
1144 if (GET_CODE (ip) == CODE_LABEL)
1145 ip = NEXT_INSN (ip);
1147 for (p = s->inner; p; p = p->next)
1149 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1150 insert_intra_1 (p, &ip);
1155 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1156 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1157 notes before BB2 such that the notes are correctly balanced. If BB1 or
1158 BB2 is NULL, we are inserting scope notes for the first and last basic
1159 blocks, respectively. */
1162 insert_inter_bb_scope_notes (bb1, bb2)
1169 /* It is possible that a basic block is not contained in any scope.
1170 In that case, we either open or close a scope but not both. */
1173 scope s1 = RBI (bb1)->scope;
1174 scope s2 = RBI (bb2)->scope;
1183 /* Find common ancestor scope. */
1186 scope s1 = RBI (bb1)->scope;
1187 scope s2 = RBI (bb2)->scope;
1192 if (s1->level > s2->level)
1194 else if (s2->level > s1->level)
1210 scope s = RBI (bb1)->scope;
1211 ip = RBI (bb1)->eff_end;
1214 if (NOTE_BLOCK (s->note_beg))
1216 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1217 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1226 scope s = RBI (bb2)->scope;
1230 if (NOTE_BLOCK (s->note_beg))
1232 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1233 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1241 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1242 on the scope forest and the newly reordered basic blocks. */
1245 rebuild_scope_notes (forest)
1246 scope_forest_info *forest;
1250 if (forest->num_trees == 0)
1253 /* Start by opening the scopes before the first basic block. */
1254 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1256 /* Then, open and close scopes as needed between blocks. */
1257 for (i = 0; i < n_basic_blocks - 1; i++)
1259 basic_block bb1 = BASIC_BLOCK (i);
1260 basic_block bb2 = BASIC_BLOCK (i + 1);
1261 if (RBI (bb1)->scope != RBI (bb2)->scope)
1262 insert_inter_bb_scope_notes (bb1, bb2);
1263 insert_intra_bb_scope_notes (bb1);
1266 /* Finally, close the scopes after the last basic block. */
1267 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1268 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1272 /* Free the storage associated with the scope tree at S. */
1275 free_scope_forest_1 (s)
1280 for (p = s->inner; p; p = next)
1283 free_scope_forest_1 (p);
1292 /* Free the storage associated with the scope forest. */
1295 free_scope_forest (forest)
1296 scope_forest_info *forest;
1299 for (i = 0; i < forest->num_trees; i++)
1300 free_scope_forest_1 (forest->trees[i]);
1304 /* Visualize the scope forest. */
1307 dump_scope_forest (forest)
1308 scope_forest_info *forest;
1310 if (forest->num_trees == 0)
1311 fprintf (stderr, "\n< Empty scope forest >\n");
1315 fprintf (stderr, "\n< Scope forest >\n");
1316 for (i = 0; i < forest->num_trees; i++)
1317 dump_scope_forest_1 (forest->trees[i], 0);
1322 /* Recursive portion of dump_scope_forest. */
1325 dump_scope_forest_1 (s, indent)
1332 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1333 && RBI (s->bb_beg)->scope
1334 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1336 fprintf (stderr, "%*s", indent, "");
1337 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1340 fprintf (stderr, "%*s", indent, "");
1341 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1342 (PTR) NOTE_BLOCK (s->note_beg));
1344 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1345 for (i = 0; i < s->num_bbs; i++)
1346 fprintf (stderr, " %d", s->bbs[i]->index);
1347 fprintf (stderr, "\n");
1349 for (p = s->inner; p; p = p->next)
1350 dump_scope_forest_1 (p, indent + 2);
1352 fprintf (stderr, "%*s", indent, "");
1353 fprintf (stderr, "}\n");
1357 /* Reorder basic blocks. The main entry point to this file. */
1360 reorder_basic_blocks ()
1362 scope_forest_info forest;
1365 if (n_basic_blocks <= 1)
1368 for (i = 0; i < n_basic_blocks; i++)
1369 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1371 EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def));
1373 build_scope_forest (&forest);
1374 remove_scope_notes ();
1376 record_effective_endpoints ();
1377 make_reorder_chain ();
1378 fixup_reorder_chain ();
1380 #ifdef ENABLE_CHECKING
1381 verify_insn_chain ();
1384 rebuild_scope_notes (&forest);
1385 free_scope_forest (&forest);
1388 for (i = 0; i < n_basic_blocks; i++)
1389 free (BASIC_BLOCK (i)->aux);
1391 free (EXIT_BLOCK_PTR->aux);
1393 #ifdef ENABLE_CHECKING
1394 verify_flow_info ();