1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 "Profile Guided Code Positioning"
24 Pettis and Hanson; PLDI '90.
30 if (p) goto A; // predict taken
33 if (q) goto B; // predict taken
39 We'll currently reorder this as
68 This requires that we be able to duplicate the jump at A, and
69 adjust the graph traversal such that greedy placement doesn't
70 fix D before C is considered.
72 (2) Coordinate with shorten_branches to minimize the number of
75 (3) Invent a method by which sufficiently non-predicted code can
76 be moved to either the end of the section or another section
77 entirely. Some sort of NOTE_INSN note would work fine.
79 This completely scroggs all debugging formats, so the user
80 would have to explicitly ask for it.
88 #include "hard-reg-set.h"
89 #include "basic-block.h"
90 #include "insn-config.h"
98 #include "insn-flags.h"
103 #ifndef HAVE_epilogue
104 #define HAVE_epilogue 0
108 /* The contents of the current function definition are allocated
109 in this obstack, and all are freed at the end of the function.
110 For top-level functions, this is temporary_obstack.
111 Separate obstacks are made for nested functions. */
113 extern struct obstack *function_obstack;
116 /* Structure to hold information about lexical scopes. */
117 typedef struct scope_def
121 /* The NOTE_INSN_BLOCK_BEG that started this scope. */
124 /* The NOTE_INSN_BLOCK_END that ended this scope. */
127 /* The bb containing note_beg (if any). */
130 /* The bb containing note_end (if any). */
133 /* List of basic blocks contained within this scope. */
136 /* Number of blocks contained within this scope. */
139 /* The outer scope or NULL if outermost scope. */
140 struct scope_def *outer;
142 /* The first inner scope or NULL if innermost scope. */
143 struct scope_def *inner;
145 /* The last inner scope or NULL if innermost scope. */
146 struct scope_def *inner_last;
148 /* Link to the next (sibling) scope. */
149 struct scope_def *next;
153 /* Structure to hold information about the scope forest. */
156 /* Number of trees in forest. */
159 /* List of tree roots. */
163 /* Structure to hold information about the blocks during reordering. */
164 typedef struct reorder_block_def
172 } *reorder_block_def;
174 #define RBI(BB) ((reorder_block_def) (BB)->aux)
177 /* Local function prototypes. */
178 static rtx skip_insns_after_block PARAMS ((basic_block));
179 static void record_effective_endpoints PARAMS ((void));
180 static void make_reorder_chain PARAMS ((void));
181 static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block));
182 static rtx label_for_bb PARAMS ((basic_block));
183 static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx));
184 static void fixup_reorder_chain PARAMS ((void));
185 static void relate_bbs_with_scopes PARAMS ((scope));
186 static scope make_new_scope PARAMS ((int, rtx));
187 static void build_scope_forest PARAMS ((scope_forest_info *));
188 static void remove_scope_notes PARAMS ((void));
189 static void insert_intra_1 PARAMS ((scope, rtx *));
190 static void insert_intra_bb_scope_notes PARAMS ((basic_block));
191 static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block));
192 static void rebuild_scope_notes PARAMS ((scope_forest_info *));
193 static void free_scope_forest_1 PARAMS ((scope));
194 static void free_scope_forest PARAMS ((scope_forest_info *));
195 void dump_scope_forest PARAMS ((scope_forest_info *));
196 static void dump_scope_forest_1 PARAMS ((scope, int));
197 static rtx get_next_bb_note PARAMS ((rtx));
198 static rtx get_prev_bb_note PARAMS ((rtx));
200 void verify_insn_chain PARAMS ((void));
202 /* Skip over inter-block insns occurring after BB which are typically
203 associated with BB (e.g., barriers). If there are any such insns,
204 we return the last one. Otherwise, we return the end of BB. */
207 skip_insns_after_block (bb)
210 rtx insn, last_insn, next_head;
212 next_head = NULL_RTX;
213 if (bb->index + 1 != n_basic_blocks)
214 next_head = BASIC_BLOCK (bb->index + 1)->head;
216 for (last_insn = bb->end; (insn = NEXT_INSN (last_insn)); last_insn = insn)
218 if (insn == next_head)
221 switch (GET_CODE (insn))
227 switch (NOTE_LINE_NUMBER (insn))
229 case NOTE_INSN_LOOP_END:
230 case NOTE_INSN_BLOCK_END:
231 case NOTE_INSN_DELETED:
232 case NOTE_INSN_DELETED_LABEL:
242 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
243 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
244 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
246 insn = NEXT_INSN (insn);
262 /* Locate the effective beginning and end of the insn chain for each
263 block, as defined by skip_insns_after_block above. */
266 record_effective_endpoints ()
268 rtx next_insn = get_insns ();
271 for (i = 0; i < n_basic_blocks; ++i)
273 basic_block bb = BASIC_BLOCK (i);
276 RBI (bb)->eff_head = next_insn;
277 end = skip_insns_after_block (bb);
278 RBI (bb)->eff_end = end;
279 next_insn = NEXT_INSN (end);
284 /* Compute an ordering for a subgraph beginning with block BB. Record the
285 ordering in RBI()->index and chained through RBI()->next. */
288 make_reorder_chain ()
290 basic_block last_block = NULL;
291 basic_block prev = NULL;
292 int nbb_m1 = n_basic_blocks - 1;
294 /* If we've not got epilogue in RTL, we must fallthru to the exit.
295 Force the last block to be at the end. */
296 /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the
297 end of the function for stack unwinding purposes. */
300 last_block = BASIC_BLOCK (nbb_m1);
301 RBI (last_block)->visited = 1;
305 /* Loop until we've placed every block. */
309 basic_block next = NULL;
311 /* Find the next unplaced block. */
312 /* ??? Get rid of this loop, and track which blocks are not yet
313 placed more directly, so as to avoid the O(N^2) worst case.
314 Perhaps keep a doubly-linked list of all to-be-placed blocks;
315 remove from the list as we place. The head of that list is
316 what we're looking for here. */
318 for (i = 0; i <= nbb_m1; ++i)
320 basic_block bb = BASIC_BLOCK (i);
321 if (! RBI (bb)->visited)
330 prev = make_reorder_chain_1 (next, prev);
332 while (RBI (prev)->index < nbb_m1);
334 /* Terminate the chain. */
337 RBI (prev)->next = last_block;
338 RBI (last_block)->index = RBI (prev)->index + 1;
341 RBI (prev)->next = NULL;
344 /* A helper function for make_reorder_chain.
346 We do not follow EH edges, or non-fallthru edges to noreturn blocks.
347 These are assumed to be the error condition and we wish to cluster
348 all of them at the very end of the function for the benefit of cache
349 locality for the rest of the function.
351 ??? We could do slightly better by noticing earlier that some subgraph
352 has all paths leading to noreturn functions, but for there to be more
353 than one block in such a subgraph is rare. */
356 make_reorder_chain_1 (bb, prev)
364 /* Mark this block visited. */
370 RBI (prev)->next = bb;
371 new_index = RBI (prev)->index + 1;
372 RBI (bb)->index = new_index;
374 if (rtl_dump_file && prev->index + 1 != bb->index)
375 fprintf (rtl_dump_file, "Reordering block %d (%d) after %d (%d)\n",
376 bb->index, RBI (bb)->index, prev->index, RBI (prev)->index);
380 RBI (bb)->visited = 1;
383 if (bb->succ == NULL)
386 /* Find the most probable block. */
389 if (any_condjump_p (bb->end)
390 && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL)
392 int taken, probability;
393 edge e_taken, e_fall;
395 probability = INTVAL (XEXP (note, 0));
396 taken = probability > REG_BR_PROB_BASE / 2;
398 /* Find the normal taken edge and the normal fallthru edge.
399 Note that there may in fact be other edges due to
400 asynchronous_exceptions. */
402 e_taken = e_fall = NULL;
403 for (e = bb->succ; e ; e = e->succ_next)
404 if (e->flags & EDGE_FALLTHRU)
406 else if (! (e->flags & EDGE_EH))
409 next = (taken ? e_taken : e_fall)->dest;
412 /* In the absence of a prediction, disturb things as little as possible
413 by selecting the old "next" block from the list of successors. If
414 there had been a fallthru edge, that will be the one. */
417 for (e = bb->succ; e ; e = e->succ_next)
418 if (e->dest->index == bb->index + 1)
420 if ((e->flags & EDGE_FALLTHRU)
422 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))))
428 /* Make sure we didn't select a silly next block. */
429 if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited)
432 /* Recurse on the successors. Unroll the last call, as the normal
433 case is exactly one or two edges, and we can tail recurse. */
434 for (e = bb->succ; e; e = e->succ_next)
435 if (e->dest != EXIT_BLOCK_PTR
436 && ! RBI (e->dest)->visited
438 && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
442 prev = make_reorder_chain_1 (next, prev);
443 next = RBI (e->dest)->visited ? NULL : e->dest;
458 /* Locate or create a label for a given basic block. */
464 rtx label = bb->head;
466 if (GET_CODE (label) != CODE_LABEL)
469 fprintf (rtl_dump_file, "Emitting label for block %d (%d)\n",
470 bb->index, RBI (bb)->index);
472 label = emit_label_before (gen_label_rtx (), label);
473 if (bb->head == RBI (bb)->eff_head)
474 RBI (bb)->eff_head = label;
482 /* Emit a jump to BB after insn AFTER. */
485 emit_jump_to_block_after (bb, after)
491 if (bb != EXIT_BLOCK_PTR)
493 rtx label = label_for_bb (bb);
494 jump = emit_jump_insn_after (gen_jump (label), after);
495 JUMP_LABEL (jump) = label;
496 LABEL_NUSES (label) += 1;
499 fprintf (rtl_dump_file, "Emitting jump to block %d (%d)\n",
500 bb->index, RBI (bb)->index);
506 jump = emit_jump_insn_after (gen_return (), after);
509 fprintf (rtl_dump_file, "Emitting return\n");
516 /* Given a reorder chain, rearrange the code to match. */
519 fixup_reorder_chain ()
521 basic_block bb, last_bb;
523 /* First do the bulk reordering -- rechain the blocks without regard to
524 the needed changes to jumps and labels. */
526 last_bb = BASIC_BLOCK (0);
527 bb = RBI (last_bb)->next;
530 rtx last_e = RBI (last_bb)->eff_end;
531 rtx curr_h = RBI (bb)->eff_head;
533 NEXT_INSN (last_e) = curr_h;
534 PREV_INSN (curr_h) = last_e;
539 NEXT_INSN (RBI (last_bb)->eff_end) = NULL_RTX;
540 set_last_insn (RBI (last_bb)->eff_end);
542 /* Now add jumps and labels as needed to match the blocks new
545 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
547 edge e_fall, e_taken, e;
548 rtx jump_insn, barrier_insn;
551 if (bb->succ == NULL)
554 /* Find the old fallthru edge, and another non-EH edge for
556 e_taken = e_fall = NULL;
557 for (e = bb->succ; e ; e = e->succ_next)
558 if (e->flags & EDGE_FALLTHRU)
560 else if (! (e->flags & EDGE_EH))
563 if (GET_CODE (bb->end) == JUMP_INSN)
565 if (any_uncondjump_p (bb->end))
567 /* If the destination is still not next, nothing to do. */
568 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
571 /* Otherwise, we can remove the jump and cleanup the edge. */
572 tidy_fallthru_edge (e_taken, bb, e_taken->dest);
573 RBI (bb)->eff_end = skip_insns_after_block (bb);
574 RBI (e_taken->dest)->eff_head = NEXT_INSN (RBI (bb)->eff_end);
577 fprintf (rtl_dump_file, "Removing jump in block %d (%d)\n",
578 bb->index, RBI (bb)->index);
581 else if (any_condjump_p (bb->end))
583 /* If the old fallthru is still next, nothing to do. */
584 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
585 || (RBI (bb)->index == n_basic_blocks - 1
586 && e_fall->dest == EXIT_BLOCK_PTR))
589 /* There is one special case: if *neither* block is next,
590 such as happens at the very end of a function, then we'll
591 need to add a new unconditional jump. Choose the taken
592 edge based on known or assumed probability. */
593 if (RBI (bb)->index + 1 != RBI (e_taken->dest)->index)
595 rtx note = find_reg_note (bb->end, REG_BR_PROB, 0);
597 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
598 && invert_jump (bb->end, label_for_bb (e_fall->dest), 0))
600 e_fall->flags &= ~EDGE_FALLTHRU;
601 e_taken->flags |= EDGE_FALLTHRU;
602 e = e_fall, e_fall = e_taken, e_taken = e;
606 /* Otherwise we can try to invert the jump. This will
607 basically never fail, however, keep up the pretense. */
608 else if (invert_jump (bb->end, label_for_bb (e_fall->dest), 0))
610 e_fall->flags &= ~EDGE_FALLTHRU;
611 e_taken->flags |= EDGE_FALLTHRU;
615 else if (returnjump_p (bb->end))
619 /* Otherwise we have some switch or computed jump. In the
620 99% case, there should not have been a fallthru edge. */
623 #ifdef CASE_DROPS_THROUGH
624 /* Except for VAX. Since we didn't have predication for the
625 tablejump, the fallthru block should not have moved. */
626 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index)
634 /* No fallthru implies a noreturn function with EH edges, or
635 something similarly bizarre. In any case, we don't need to
640 /* If the fallthru block is still next, nothing to do. */
641 if (RBI (bb)->index + 1 == RBI (e_fall->dest)->index
642 || (RBI (bb)->index == n_basic_blocks - 1
643 && e_fall->dest == EXIT_BLOCK_PTR))
646 /* We need a new jump insn. If the block has only one outgoing
647 edge, then we can stuff the new jump insn in directly. */
648 if (bb->succ->succ_next == NULL)
650 e_fall->flags &= ~EDGE_FALLTHRU;
652 jump_insn = emit_jump_to_block_after (e_fall->dest, bb->end);
654 barrier_insn = emit_barrier_after (jump_insn);
655 RBI (bb)->eff_end = barrier_insn;
660 /* We got here if we need to add a new jump insn in a new block
661 across the edge e_fall. */
663 jump_insn = emit_jump_to_block_after (e_fall->dest, bb->end);
664 barrier_insn = emit_barrier_after (jump_insn);
666 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
667 create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL);
669 nb = BASIC_BLOCK (n_basic_blocks - 1);
670 nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
671 nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
674 COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start);
675 COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start);
677 nb->aux = xmalloc (sizeof (struct reorder_block_def));
678 RBI (nb)->eff_head = nb->head;
679 RBI (nb)->eff_end = barrier_insn;
680 RBI (nb)->scope = RBI (bb)->scope;
681 RBI (nb)->index = RBI (bb)->index + 1;
682 RBI (nb)->visited = 1;
683 RBI (nb)->next = RBI (bb)->next;
686 /* Link to new block. */
687 make_edge (NULL, nb, e_fall->dest, 0);
688 redirect_edge_succ (e_fall, nb);
690 /* Don't process this new block. */
693 /* Fix subsequent reorder block indices to reflect new block. */
694 while ((nb = RBI (nb)->next) != NULL)
695 RBI (nb)->index += 1;
698 /* Put basic_block_info in the new order. */
699 for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next)
701 bb->index = RBI (bb)->index;
702 BASIC_BLOCK (bb->index) = bb;
707 /* Perform sanity checks on the insn chain.
708 1. Check that next/prev pointers are consistent in both the forward and
710 2. Count insns in chain, going both directions, and check if equal.
711 3. Check that get_last_insn () returns the actual end of chain. */
724 for (x = get_insns (); x; x = NEXT_INSN (x))
726 if (PREV_INSN (x) != prevx)
728 fprintf (stderr, "Forward traversal: insn chain corrupt.\n");
729 fprintf (stderr, "previous insn:\n");
731 fprintf (stderr, "current insn:\n");
739 if (prevx != get_last_insn ())
741 fprintf (stderr, "last_insn corrupt.\n");
747 for (x = get_last_insn (); x; x = PREV_INSN (x))
749 if (NEXT_INSN (x) != nextx)
751 fprintf (stderr, "Reverse traversal: insn chain corrupt.\n");
752 fprintf (stderr, "current insn:\n");
754 fprintf (stderr, "next insn:\n");
762 if (insn_cnt1 != insn_cnt2)
764 fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n",
765 insn_cnt1, insn_cnt2);
776 if (GET_CODE (x) == NOTE
777 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
791 if (GET_CODE (x) == NOTE
792 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
800 /* Determine and record the relationships between basic blocks and
801 scopes in scope tree S. */
804 relate_bbs_with_scopes (s)
808 int i, bbi1, bbi2, bbs_spanned;
811 for (p = s->inner; p; p = p->next)
812 relate_bbs_with_scopes (p);
817 /* If the begin and end notes are both inside the same basic block,
818 or if they are both outside of basic blocks, then we know immediately
819 how they are related. Otherwise, we need to poke around to make the
821 if (s->bb_beg != s->bb_end)
823 if (s->bb_beg && s->bb_end)
825 /* Both notes are in different bbs. This implies that all the
826 basic blocks spanned by the pair of notes are contained in
828 bbi1 = s->bb_beg->index;
829 bbi2 = s->bb_end->index;
832 else if (! s->bb_beg)
834 /* First note is outside of a bb. If the scope spans more than
835 one basic block, then they all are contained within this
836 scope. Otherwise, this scope is contained within the basic
838 bbnote = get_next_bb_note (s->note_beg);
841 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end)
844 s->bb_beg = NOTE_BASIC_BLOCK (bbnote);
848 bbi1 = NOTE_BASIC_BLOCK (bbnote)->index;
849 bbi2 = s->bb_end->index;
854 else /* ! s->bb_end */
856 /* Second note is outside of a bb. If the scope spans more than
857 one basic block, then they all are contained within this
858 scope. Otherwise, this scope is contained within the basic
860 bbnote = get_prev_bb_note (s->note_end);
863 if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg)
866 s->bb_end = NOTE_BASIC_BLOCK (bbnote);
870 bbi1 = s->bb_beg->index;
871 bbi2 = NOTE_BASIC_BLOCK (bbnote)->index;
880 /* Both notes are in the same bb, which implies the block
881 contains this scope. */
886 /* Both notes are outside of any bbs. This implies that all the
887 basic blocks spanned by the pair of notes are contained in
889 There is a degenerate case to consider. If the notes do not
890 span any basic blocks, then it is an empty scope that can
891 safely be deleted or ignored. Mark these with level = -1. */
893 x1 = get_next_bb_note (s->note_beg);
894 x2 = get_prev_bb_note (s->note_end);
902 bbi1 = NOTE_BASIC_BLOCK (x1)->index;
903 bbi2 = NOTE_BASIC_BLOCK (x2)->index;
909 /* If the scope spans one or more basic blocks, we record them. We
910 only record the bbs that are immediately contained within this
911 scope. Note that if a scope is contained within a bb, we can tell
912 by checking that bb_beg = bb_end and that they are non-null. */
918 for (i = bbi1; i <= bbi2; i++)
919 if (! RBI (BASIC_BLOCK (i))->scope)
922 s->bbs = xmalloc (s->num_bbs * sizeof (basic_block));
923 for (i = bbi1; i <= bbi2; i++)
925 basic_block curr_bb = BASIC_BLOCK (i);
926 if (! RBI (curr_bb)->scope)
928 s->bbs[j++] = curr_bb;
929 RBI (curr_bb)->scope = s;
938 /* Allocate and initialize a new scope structure with scope level LEVEL,
939 and record the NOTE beginning the scope. */
942 make_new_scope (level, note)
946 scope new_scope = xcalloc (1, sizeof (struct scope_def));
947 new_scope->level = level;
948 new_scope->note_beg = note;
953 /* Build a forest representing the scope structure of the function.
954 Return a pointer to a structure describing the forest. */
957 build_scope_forest (forest)
958 scope_forest_info *forest;
963 scope root, curr_scope;
965 forest->num_trees = 0;
966 forest->trees = NULL;
971 for (x = get_insns (); x; x = NEXT_INSN (x))
973 if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head)
974 curr_bb = BASIC_BLOCK (bbi);
976 if (GET_CODE (x) == NOTE)
978 if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG)
986 new_scope = make_new_scope (level, x);
987 new_scope->outer = curr_scope;
988 new_scope->next = NULL;
989 if (! curr_scope->inner)
991 curr_scope->inner = new_scope;
992 curr_scope->inner_last = new_scope;
996 curr_scope->inner_last->next = new_scope;
997 curr_scope->inner_last = new_scope;
999 curr_scope = curr_scope->inner_last;
1003 int ntrees = forest->num_trees;
1005 curr_scope = make_new_scope (level, x);
1007 forest->trees = xrealloc (forest->trees,
1008 sizeof (scope) * (ntrees + 1));
1009 forest->trees[forest->num_trees++] = root;
1011 curr_scope->bb_beg = curr_bb;
1013 else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)
1015 curr_scope->bb_end = curr_bb;
1016 curr_scope->note_end = x;
1018 curr_scope = curr_scope->outer;
1024 if (curr_bb && curr_bb->end == x)
1032 for (i = 0; i < forest->num_trees; i++)
1033 relate_bbs_with_scopes (forest->trees[i]);
1037 /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from
1041 remove_scope_notes ()
1044 basic_block currbb = NULL;
1046 for (x = get_insns (); x; x = next)
1048 next = NEXT_INSN (x);
1049 if (GET_CODE (x) == NOTE
1050 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
1051 currbb = NOTE_BASIC_BLOCK (x);
1053 if (GET_CODE (x) == NOTE
1054 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG
1055 || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END))
1057 /* Check if the scope note happens to be the end of a bb. */
1058 if (currbb && x == currbb->end)
1059 currbb->end = PREV_INSN (x);
1060 if (currbb && x == currbb->head)
1065 NEXT_INSN (PREV_INSN (x)) = next;
1066 PREV_INSN (next) = PREV_INSN (x);
1068 NEXT_INSN (x) = NULL;
1069 PREV_INSN (x) = NULL;
1078 /* Insert scope note pairs for a contained scope tree S after insn IP. */
1081 insert_intra_1 (s, ip)
1087 if (NOTE_BLOCK (s->note_beg))
1089 *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip);
1090 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg);
1093 for (p = s->inner; p; p = p->next)
1094 insert_intra_1 (p, ip);
1096 if (NOTE_BLOCK (s->note_beg))
1098 *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip);
1099 NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end);
1104 /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for
1105 scopes that are contained within BB. */
1108 insert_intra_bb_scope_notes (bb)
1111 scope s = RBI (bb)->scope;
1119 if (GET_CODE (ip) == CODE_LABEL)
1120 ip = NEXT_INSN (ip);
1122 for (p = s->inner; p; p = p->next)
1124 if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb)
1125 insert_intra_1 (p, &ip);
1130 /* Given two consecutive basic blocks BB1 and BB2 with different scopes,
1131 insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG
1132 notes before BB2 such that the notes are correctly balanced. If BB1 or
1133 BB2 is NULL, we are inserting scope notes for the first and last basic
1134 blocks, respectively. */
1137 insert_inter_bb_scope_notes (bb1, bb2)
1144 /* It is possible that a basic block is not contained in any scope.
1145 In that case, we either open or close a scope but not both. */
1148 scope s1 = RBI (bb1)->scope;
1149 scope s2 = RBI (bb2)->scope;
1158 /* Find common ancestor scope. */
1161 scope s1 = RBI (bb1)->scope;
1162 scope s2 = RBI (bb2)->scope;
1167 if (s1->level > s2->level)
1169 else if (s2->level > s1->level)
1185 scope s = RBI (bb1)->scope;
1186 ip = RBI (bb1)->eff_end;
1189 if (NOTE_BLOCK (s->note_beg))
1191 ip = emit_note_after (NOTE_INSN_BLOCK_END, ip);
1192 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end);
1201 scope s = RBI (bb2)->scope;
1205 if (NOTE_BLOCK (s->note_beg))
1207 ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip);
1208 NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg);
1216 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1217 on the scope forest and the newly reordered basic blocks. */
1220 rebuild_scope_notes (forest)
1221 scope_forest_info *forest;
1225 if (forest->num_trees == 0)
1228 /* Start by opening the scopes before the first basic block. */
1229 insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0));
1231 /* Then, open and close scopes as needed between blocks. */
1232 for (i = 0; i < n_basic_blocks - 1; i++)
1234 basic_block bb1 = BASIC_BLOCK (i);
1235 basic_block bb2 = BASIC_BLOCK (i + 1);
1236 if (RBI (bb1)->scope != RBI (bb2)->scope)
1237 insert_inter_bb_scope_notes (bb1, bb2);
1238 insert_intra_bb_scope_notes (bb1);
1241 /* Finally, close the scopes after the last basic block. */
1242 insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL);
1243 insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1));
1247 /* Free the storage associated with the scope tree at S. */
1250 free_scope_forest_1 (s)
1255 for (p = s->inner; p; p = next)
1258 free_scope_forest_1 (p);
1267 /* Free the storage associated with the scope forest. */
1270 free_scope_forest (forest)
1271 scope_forest_info *forest;
1274 for (i = 0; i < forest->num_trees; i++)
1275 free_scope_forest_1 (forest->trees[i]);
1279 /* Visualize the scope forest. */
1282 dump_scope_forest (forest)
1283 scope_forest_info *forest;
1285 if (forest->num_trees == 0)
1286 fprintf (stderr, "\n< Empty scope forest >\n");
1290 fprintf (stderr, "\n< Scope forest >\n");
1291 for (i = 0; i < forest->num_trees; i++)
1292 dump_scope_forest_1 (forest->trees[i], 0);
1297 /* Recursive portion of dump_scope_forest. */
1300 dump_scope_forest_1 (s, indent)
1307 if (s->bb_beg != NULL && s->bb_beg == s->bb_end
1308 && RBI (s->bb_beg)->scope
1309 && RBI (s->bb_beg)->scope->level + 1 == s->level)
1311 fprintf (stderr, "%*s", indent, "");
1312 fprintf (stderr, "BB%d:\n", s->bb_beg->index);
1315 fprintf (stderr, "%*s", indent, "");
1316 fprintf (stderr, "{ level %d (block %p)\n", s->level,
1317 NOTE_BLOCK (s->note_beg));
1319 fprintf (stderr, "%*s%s", indent, "", "bbs:");
1320 for (i = 0; i < s->num_bbs; i++)
1321 fprintf (stderr, " %d", s->bbs[i]->index);
1322 fprintf (stderr, "\n");
1324 for (p = s->inner; p; p = p->next)
1325 dump_scope_forest_1 (p, indent + 2);
1327 fprintf (stderr, "%*s", indent, "");
1328 fprintf (stderr, "}\n");
1332 /* Reorder basic blocks. The main entry point to this file. */
1335 reorder_basic_blocks ()
1337 scope_forest_info forest;
1340 if (n_basic_blocks <= 1)
1343 /* We do not currently handle correct re-placement of EH notes. */
1344 for (i = 0; i < n_basic_blocks; i++)
1347 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1348 if (e->flags & EDGE_EH)
1352 for (i = 0; i < n_basic_blocks; i++)
1353 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
1355 build_scope_forest (&forest);
1356 remove_scope_notes ();
1358 record_effective_endpoints ();
1359 make_reorder_chain ();
1360 fixup_reorder_chain ();
1362 #ifdef ENABLE_CHECKING
1363 verify_insn_chain ();
1366 rebuild_scope_notes (&forest);
1367 free_scope_forest (&forest);
1370 for (i = 0; i < n_basic_blocks; i++)
1371 free (BASIC_BLOCK (i)->aux);
1373 #ifdef ENABLE_CHECKING
1374 verify_flow_info ();