1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_footer;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void cleanup_unconditional_jumps PARAMS ((void));
50 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
52 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
54 /* Map insn uid to lexical block. */
55 static varray_type insn_scopes;
58 unlink_insn_chain (first, last)
62 rtx prevfirst = PREV_INSN (first);
63 rtx nextlast = NEXT_INSN (last);
65 PREV_INSN (first) = NULL;
66 NEXT_INSN (last) = NULL;
68 NEXT_INSN (prevfirst) = nextlast;
70 PREV_INSN (nextlast) = prevfirst;
72 set_last_insn (prevfirst);
74 set_first_insn (nextlast);
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
83 skip_insns_after_block (bb)
86 rtx insn, last_insn, next_head, prev;
89 if (bb->next_bb != EXIT_BLOCK_PTR)
90 next_head = bb->next_bb->head;
92 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
94 if (insn == next_head)
97 switch (GET_CODE (insn))
104 switch (NOTE_LINE_NUMBER (insn))
106 case NOTE_INSN_LOOP_END:
107 case NOTE_INSN_BLOCK_END:
110 case NOTE_INSN_DELETED:
111 case NOTE_INSN_DELETED_LABEL:
122 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
126 insn = NEXT_INSN (insn);
139 /* It is possible to hit contradictory sequence. For instance:
145 Where barrier belongs to jump_insn, but the note does not. This can be
146 created by removing the basic block originally following
147 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
149 for (insn = last_insn; insn != bb->end; insn = prev)
151 prev = PREV_INSN (insn);
152 if (GET_CODE (insn) == NOTE)
153 switch (NOTE_LINE_NUMBER (insn))
155 case NOTE_INSN_LOOP_END:
156 case NOTE_INSN_BLOCK_END:
157 case NOTE_INSN_DELETED:
158 case NOTE_INSN_DELETED_LABEL:
161 reorder_insns (insn, insn, last_insn);
168 /* Locate or create a label for a given basic block. */
174 rtx label = bb->head;
176 if (GET_CODE (label) != CODE_LABEL)
179 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->sindex);
181 label = block_label (bb);
187 /* Locate the effective beginning and end of the insn chain for each
188 block, as defined by skip_insns_after_block above. */
191 record_effective_endpoints ()
193 rtx next_insn = get_insns ();
200 if (PREV_INSN (bb->head) && next_insn != bb->head)
201 RBI (bb)->header = unlink_insn_chain (next_insn,
202 PREV_INSN (bb->head));
203 end = skip_insns_after_block (bb);
204 if (NEXT_INSN (bb->end) && bb->end != end)
205 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
206 next_insn = NEXT_INSN (bb->end);
209 function_footer = next_insn;
211 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
214 /* Build a varray mapping INSN_UID to lexical block. Return it. */
217 scope_to_insns_initialize ()
222 VARRAY_TREE_INIT (insn_scopes, get_max_uid (), "insn scopes");
224 for (insn = get_insns (); insn; insn = next)
226 next = NEXT_INSN (insn);
228 if (active_insn_p (insn)
229 && GET_CODE (PATTERN (insn)) != ADDR_VEC
230 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
231 VARRAY_TREE (insn_scopes, INSN_UID (insn)) = block;
232 else if (GET_CODE (insn) == NOTE)
234 switch (NOTE_LINE_NUMBER (insn))
236 case NOTE_INSN_BLOCK_BEG:
237 block = NOTE_BLOCK (insn);
240 case NOTE_INSN_BLOCK_END:
241 block = BLOCK_SUPERCONTEXT (block);
251 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
252 found in the block tree. */
255 set_block_levels (block, level)
261 BLOCK_NUMBER (block) = level;
262 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
263 block = BLOCK_CHAIN (block);
267 /* Emit lexical block notes needed to change scope from S1 to S2. */
270 change_scope (orig_insn, s1, s2)
274 rtx insn = orig_insn;
275 tree com = NULL_TREE;
276 tree ts1 = s1, ts2 = s2;
281 if (ts1 == NULL || ts2 == NULL)
283 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
284 ts1 = BLOCK_SUPERCONTEXT (ts1);
285 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
286 ts2 = BLOCK_SUPERCONTEXT (ts2);
289 ts1 = BLOCK_SUPERCONTEXT (ts1);
290 ts2 = BLOCK_SUPERCONTEXT (ts2);
299 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
300 NOTE_BLOCK (note) = s;
301 s = BLOCK_SUPERCONTEXT (s);
308 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
309 NOTE_BLOCK (insn) = s;
310 s = BLOCK_SUPERCONTEXT (s);
314 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
315 on the scope tree and the newly reordered instructions. */
318 scope_to_insns_finalize ()
320 tree cur_block = DECL_INITIAL (cfun->decl);
323 /* Tag the blocks with a depth number so that change_scope can find
324 the common parent easily. */
325 set_block_levels (cur_block, 0);
327 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
331 if ((size_t) INSN_UID (insn) >= insn_scopes->num_elements)
333 this_block = VARRAY_TREE (insn_scopes, INSN_UID (insn));
337 if (this_block != cur_block)
339 change_scope (insn, cur_block, this_block);
340 cur_block = this_block;
344 VARRAY_FREE (insn_scopes);
346 /* change_scope emits before the insn, not after. */
347 note = emit_note (NULL, NOTE_INSN_DELETED);
348 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
354 /* Given a reorder chain, rearrange the code to match. */
357 fixup_reorder_chain ()
359 basic_block bb, prev_bb;
363 /* First do the bulk reordering -- rechain the blocks without regard to
364 the needed changes to jumps and labels. */
366 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
368 bb = RBI (bb)->next, index++)
370 if (RBI (bb)->header)
373 NEXT_INSN (insn) = RBI (bb)->header;
375 set_first_insn (RBI (bb)->header);
376 PREV_INSN (RBI (bb)->header) = insn;
377 insn = RBI (bb)->header;
378 while (NEXT_INSN (insn))
379 insn = NEXT_INSN (insn);
382 NEXT_INSN (insn) = bb->head;
384 set_first_insn (bb->head);
385 PREV_INSN (bb->head) = insn;
387 if (RBI (bb)->footer)
389 NEXT_INSN (insn) = RBI (bb)->footer;
390 PREV_INSN (RBI (bb)->footer) = insn;
391 while (NEXT_INSN (insn))
392 insn = NEXT_INSN (insn);
396 if (index != num_basic_blocks)
399 NEXT_INSN (insn) = function_footer;
401 PREV_INSN (function_footer) = insn;
403 while (NEXT_INSN (insn))
404 insn = NEXT_INSN (insn);
406 set_last_insn (insn);
407 #ifdef ENABLE_CHECKING
408 verify_insn_chain ();
411 /* Now add jumps and labels as needed to match the blocks new
414 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
416 edge e_fall, e_taken, e;
420 if (bb->succ == NULL)
423 /* Find the old fallthru edge, and another non-EH edge for
425 e_taken = e_fall = NULL;
426 for (e = bb->succ; e ; e = e->succ_next)
427 if (e->flags & EDGE_FALLTHRU)
429 else if (! (e->flags & EDGE_EH))
432 bb_end_insn = bb->end;
433 if (GET_CODE (bb_end_insn) == JUMP_INSN)
435 if (any_condjump_p (bb_end_insn))
437 /* If the old fallthru is still next, nothing to do. */
438 if (RBI (bb)->next == e_fall->dest
440 && e_fall->dest == EXIT_BLOCK_PTR))
443 /* There is one special case: if *neither* block is next,
444 such as happens at the very end of a function, then we'll
445 need to add a new unconditional jump. Choose the taken
446 edge based on known or assumed probability. */
447 if (RBI (bb)->next != e_taken->dest)
449 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
452 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
453 && invert_jump (bb_end_insn,
454 label_for_bb (e_fall->dest), 0))
456 e_fall->flags &= ~EDGE_FALLTHRU;
457 e_taken->flags |= EDGE_FALLTHRU;
458 update_br_prob_note (bb);
459 e = e_fall, e_fall = e_taken, e_taken = e;
463 /* Otherwise we can try to invert the jump. This will
464 basically never fail, however, keep up the pretense. */
465 else if (invert_jump (bb_end_insn,
466 label_for_bb (e_fall->dest), 0))
468 e_fall->flags &= ~EDGE_FALLTHRU;
469 e_taken->flags |= EDGE_FALLTHRU;
470 update_br_prob_note (bb);
474 else if (returnjump_p (bb_end_insn))
478 /* Otherwise we have some switch or computed jump. In the
479 99% case, there should not have been a fallthru edge. */
483 #ifdef CASE_DROPS_THROUGH
484 /* Except for VAX. Since we didn't have predication for the
485 tablejump, the fallthru block should not have moved. */
486 if (RBI (bb)->next == e_fall->dest)
488 bb_end_insn = skip_insns_after_block (bb);
496 /* No fallthru implies a noreturn function with EH edges, or
497 something similarly bizarre. In any case, we don't need to
502 /* If the fallthru block is still next, nothing to do. */
503 if (RBI (bb)->next == e_fall->dest)
506 /* A fallthru to exit block. */
507 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
511 /* We got here if we need to add a new jump insn. */
512 nb = force_nonfallthru (e_fall);
515 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
516 RBI (nb)->visited = 1;
517 RBI (nb)->next = RBI (bb)->next;
519 /* Don't process this new block. */
524 /* Put basic_block_info in the new order. */
527 fprintf (rtl_dump_file, "Reordered sequence:\n");
528 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
530 bb = RBI (bb)->next, index ++)
532 fprintf (rtl_dump_file, " %i ", index);
533 if (RBI (bb)->original)
534 fprintf (rtl_dump_file, "duplicate of %i ",
535 RBI (bb)->original->sindex);
536 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
537 fprintf (rtl_dump_file, "compensation ");
539 fprintf (rtl_dump_file, "bb %i ", bb->sindex);
540 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
544 prev_bb = ENTRY_BLOCK_PTR;
545 bb = ENTRY_BLOCK_PTR->next_bb;
548 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index++)
551 BASIC_BLOCK (index) = bb;
553 bb->prev_bb = prev_bb;
554 prev_bb->next_bb = bb;
556 prev_bb->next_bb = EXIT_BLOCK_PTR;
557 EXIT_BLOCK_PTR->prev_bb = prev_bb;
560 /* Perform sanity checks on the insn chain.
561 1. Check that next/prev pointers are consistent in both the forward and
563 2. Count insns in chain, going both directions, and check if equal.
564 3. Check that get_last_insn () returns the actual end of chain. */
570 int insn_cnt1, insn_cnt2;
572 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
574 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
575 if (PREV_INSN (x) != prevx)
578 if (prevx != get_last_insn ())
581 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
583 nextx = x, insn_cnt2++, x = PREV_INSN (x))
584 if (NEXT_INSN (x) != nextx)
587 if (insn_cnt1 != insn_cnt2)
591 /* Remove any unconditional jumps and forwarder block creating fallthru
592 edges instead. During BB reordering fallthru edges are not required
593 to target next basic block in the linear CFG layout, so the unconditional
594 jumps are not needed. If LOOPS is not null, also update loop structure &
598 cleanup_unconditional_jumps ()
606 if (bb->succ->flags & EDGE_FALLTHRU)
608 if (!bb->succ->succ_next)
611 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb)
612 && bb->prev_bb != ENTRY_BLOCK_PTR)
614 basic_block prev = bb->prev_bb;
617 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
620 redirect_edge_succ (bb->pred, bb->succ->dest);
621 flow_delete_block (bb);
624 else if (simplejump_p (bb->end))
629 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
630 INSN_UID (jump), bb->sindex);
632 bb->succ->flags |= EDGE_FALLTHRU;
637 /* Cleanup barriers and delete ADDR_VECs in a way as they are belonging
638 to removed tablejump anyway. */
639 insn = NEXT_INSN (bb->end);
641 && (GET_CODE (insn) != NOTE
642 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
644 rtx next = NEXT_INSN (insn);
646 if (GET_CODE (insn) == BARRIER)
647 delete_barrier (insn);
648 else if (GET_CODE (insn) == JUMP_INSN)
649 delete_insn_chain (PREV_INSN (insn), insn);
650 else if (GET_CODE (insn) == CODE_LABEL)
652 else if (GET_CODE (insn) != NOTE)
661 /* The block falling through to exit must be the last one in the
662 reordered chain. Ensure that this condition is met. */
664 fixup_fallthru_exit_predecessor ()
667 basic_block bb = NULL;
669 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
670 if (e->flags & EDGE_FALLTHRU)
673 if (bb && RBI (bb)->next)
675 basic_block c = ENTRY_BLOCK_PTR->next_bb;
677 while (RBI (c)->next != bb)
680 RBI (c)->next = RBI (bb)->next;
681 while (RBI (c)->next)
685 RBI (bb)->next = NULL;
689 /* Return true in case it is possible to duplicate the basic block BB. */
692 cfg_layout_can_duplicate_bb_p (bb)
698 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
701 /* Duplicating fallthru block to exit would require adding an jump
702 and splitting the real last BB. */
703 for (s = bb->succ; s; s = s->succ_next)
704 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
707 /* Do not attempt to duplicate tablejumps, as we need to unshare
708 the dispatch table. This is dificult to do, as the instructions
709 computing jump destination may be hoisted outside the basic block. */
710 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
711 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
712 && GET_CODE (next) == JUMP_INSN
713 && (GET_CODE (PATTERN (next)) == ADDR_VEC
714 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
720 duplicate_insn_chain (from, to)
725 /* Avoid updating of boundaries of previous basic block. The
726 note will get removed from insn stream in fixup. */
727 last = emit_note (NULL, NOTE_INSN_DELETED);
729 /* Create copy at the end of INSN chain. The chain will
730 be reordered later. */
731 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
734 switch (GET_CODE (insn))
739 /* Avoid copying of dispatch tables. We never duplicate
740 tablejumps, so this can hit only in case the table got
741 moved far from original jump. */
742 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
743 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
745 new = emit_copy_of_insn_after (insn, get_last_insn ());
746 /* Record the INSN_SCOPE. */
747 VARRAY_GROW (insn_scopes, INSN_UID (new) + 1);
748 VARRAY_TREE (insn_scopes, INSN_UID (new))
749 = VARRAY_TREE (insn_scopes, INSN_UID (insn));
760 switch (NOTE_LINE_NUMBER (insn))
762 /* In case prologue is empty and function contain label
763 in first BB, we may want to copy the block. */
764 case NOTE_INSN_PROLOGUE_END:
766 case NOTE_INSN_LOOP_VTOP:
767 case NOTE_INSN_LOOP_CONT:
768 case NOTE_INSN_LOOP_BEG:
769 case NOTE_INSN_LOOP_END:
770 /* Strip down the loop notes - we don't really want to keep
771 them consistent in loop copies. */
772 case NOTE_INSN_DELETED:
773 case NOTE_INSN_DELETED_LABEL:
774 /* No problem to strip these. */
775 case NOTE_INSN_EPILOGUE_BEG:
776 case NOTE_INSN_FUNCTION_END:
777 /* Debug code expect these notes to exist just once.
778 Keep them in the master copy.
779 ??? It probably makes more sense to duplicate them for each
781 case NOTE_INSN_FUNCTION_BEG:
782 /* There is always just single entry to function. */
783 case NOTE_INSN_BASIC_BLOCK:
786 /* There is no purpose to duplicate prologue. */
787 case NOTE_INSN_BLOCK_BEG:
788 case NOTE_INSN_BLOCK_END:
789 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
790 reordering is in the progress. */
791 case NOTE_INSN_EH_REGION_BEG:
792 case NOTE_INSN_EH_REGION_END:
793 case NOTE_INSN_RANGE_BEG:
794 case NOTE_INSN_RANGE_END:
795 /* Should never exist at BB duplication time. */
798 case NOTE_INSN_REPEATED_LINE_NUMBER:
799 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
803 if (NOTE_LINE_NUMBER (insn) < 0)
805 /* It is possible that no_line_number is set and the note
807 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
814 insn = NEXT_INSN (last);
819 /* Redirect Edge to DEST. */
821 cfg_layout_redirect_edge (e, dest)
825 basic_block src = e->src;
826 basic_block old_next_bb = src->next_bb;
828 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
829 in the case the basic block appears to be in sequence. Avoid this
833 if (e->flags & EDGE_FALLTHRU)
835 /* In case we are redirecting fallthru edge to the branch edge
836 of conditional jump, remove it. */
837 if (src->succ->succ_next
838 && !src->succ->succ_next->succ_next)
840 edge s = e->succ_next ? e->succ_next : src->succ;
842 && any_condjump_p (src->end)
843 && onlyjump_p (src->end))
844 delete_insn (src->end);
846 redirect_edge_succ_nodup (e, dest);
849 redirect_edge_and_branch (e, dest);
851 /* We don't want simplejumps in the insn stream during cfglayout. */
852 if (simplejump_p (src->end))
854 delete_insn (src->end);
855 delete_barrier (NEXT_INSN (src->end));
856 src->succ->flags |= EDGE_FALLTHRU;
858 src->next_bb = old_next_bb;
861 /* Create an duplicate of the basic block BB and redirect edge E into it. */
864 cfg_layout_duplicate_bb (bb, e)
871 gcov_type new_count = e ? e->count : 0;
873 if (bb->count < new_count)
874 new_count = bb->count;
877 #ifdef ENABLE_CHECKING
878 if (!cfg_layout_can_duplicate_bb_p (bb))
882 insn = duplicate_insn_chain (bb->head, bb->end);
883 new_bb = create_basic_block (insn,
884 insn ? get_last_insn () : NULL,
885 EXIT_BLOCK_PTR->prev_bb);
886 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
888 if (RBI (bb)->header)
890 insn = RBI (bb)->header;
891 while (NEXT_INSN (insn))
892 insn = NEXT_INSN (insn);
893 insn = duplicate_insn_chain (RBI (bb)->header, insn);
895 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
898 if (RBI (bb)->footer)
900 insn = RBI (bb)->footer;
901 while (NEXT_INSN (insn))
902 insn = NEXT_INSN (insn);
903 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
905 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
908 if (bb->global_live_at_start)
910 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
911 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
912 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
913 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
916 new_bb->loop_depth = bb->loop_depth;
917 new_bb->flags = bb->flags;
918 for (s = bb->succ; s; s = s->succ_next)
920 n = make_edge (new_bb, s->dest, s->flags);
921 n->probability = s->probability;
923 /* Take care for overflows! */
924 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
927 s->count -= n->count;
930 new_bb->count = new_count;
931 bb->count -= new_count;
935 new_bb->frequency = EDGE_FREQUENCY (e);
936 bb->frequency -= EDGE_FREQUENCY (e);
938 cfg_layout_redirect_edge (e, new_bb);
943 if (bb->frequency < 0)
946 RBI (new_bb)->original = bb;
950 /* Main entry point to this module - initialize the datastructures for
951 CFG layout changes. It keeps LOOPS up-to-date if not null. */
954 cfg_layout_initialize ()
956 /* Our algorithm depends on fact that there are now dead jumptables
958 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
960 cleanup_unconditional_jumps ();
962 scope_to_insns_initialize ();
964 record_effective_endpoints ();
967 /* Finalize the changes: reorder insn list according to the sequence, enter
968 compensation code, rebuild scope forest. */
971 cfg_layout_finalize ()
973 fixup_fallthru_exit_predecessor ();
974 fixup_reorder_chain ();
976 #ifdef ENABLE_CHECKING
977 verify_insn_chain ();
980 scope_to_insns_finalize ();
982 free_aux_for_blocks ();
984 #ifdef ENABLE_CHECKING