1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "hard-reg-set.h"
26 #include "basic-block.h"
27 #include "insn-config.h"
31 #include "cfglayout.h"
33 /* The contents of the current function definition are allocated
34 in this obstack, and all are freed at the end of the function. */
35 extern struct obstack flow_obstack;
37 /* Holds the interesting trailing notes for the function. */
38 static rtx function_footer;
40 static rtx skip_insns_after_block PARAMS ((basic_block));
41 static void record_effective_endpoints PARAMS ((void));
42 static rtx label_for_bb PARAMS ((basic_block));
43 static void fixup_reorder_chain PARAMS ((void));
45 static void set_block_levels PARAMS ((tree, int));
46 static void change_scope PARAMS ((rtx, tree, tree));
48 void verify_insn_chain PARAMS ((void));
49 static void cleanup_unconditional_jumps PARAMS ((void));
50 static void fixup_fallthru_exit_predecessor PARAMS ((void));
51 static rtx unlink_insn_chain PARAMS ((rtx, rtx));
52 static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
54 /* Map insn uid to lexical block. */
55 static varray_type insn_scopes;
58 unlink_insn_chain (first, last)
62 rtx prevfirst = PREV_INSN (first);
63 rtx nextlast = NEXT_INSN (last);
65 PREV_INSN (first) = NULL;
66 NEXT_INSN (last) = NULL;
68 NEXT_INSN (prevfirst) = nextlast;
70 PREV_INSN (nextlast) = prevfirst;
72 set_last_insn (prevfirst);
74 set_first_insn (nextlast);
78 /* Skip over inter-block insns occurring after BB which are typically
79 associated with BB (e.g., barriers). If there are any such insns,
80 we return the last one. Otherwise, we return the end of BB. */
83 skip_insns_after_block (bb)
86 rtx insn, last_insn, next_head, prev;
89 if (bb->next_bb != EXIT_BLOCK_PTR)
90 next_head = bb->next_bb->head;
92 for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
94 if (insn == next_head)
97 switch (GET_CODE (insn))
104 switch (NOTE_LINE_NUMBER (insn))
106 case NOTE_INSN_LOOP_END:
107 case NOTE_INSN_BLOCK_END:
110 case NOTE_INSN_DELETED:
111 case NOTE_INSN_DELETED_LABEL:
122 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
126 insn = NEXT_INSN (insn);
139 /* It is possible to hit contradictory sequence. For instance:
145 Where barrier belongs to jump_insn, but the note does not. This can be
146 created by removing the basic block originally following
147 NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
149 for (insn = last_insn; insn != bb->end; insn = prev)
151 prev = PREV_INSN (insn);
152 if (GET_CODE (insn) == NOTE)
153 switch (NOTE_LINE_NUMBER (insn))
155 case NOTE_INSN_LOOP_END:
156 case NOTE_INSN_BLOCK_END:
157 case NOTE_INSN_DELETED:
158 case NOTE_INSN_DELETED_LABEL:
161 reorder_insns (insn, insn, last_insn);
168 /* Locate or create a label for a given basic block. */
174 rtx label = bb->head;
176 if (GET_CODE (label) != CODE_LABEL)
179 fprintf (rtl_dump_file, "Emitting label for block %d\n", bb->index);
181 label = block_label (bb);
187 /* Locate the effective beginning and end of the insn chain for each
188 block, as defined by skip_insns_after_block above. */
191 record_effective_endpoints ()
193 rtx next_insn = get_insns ();
196 for (i = 0; i < n_basic_blocks; i++)
198 basic_block bb = BASIC_BLOCK (i);
201 if (PREV_INSN (bb->head) && next_insn != bb->head)
202 RBI (bb)->header = unlink_insn_chain (next_insn,
203 PREV_INSN (bb->head));
204 end = skip_insns_after_block (bb);
205 if (NEXT_INSN (bb->end) && bb->end != end)
206 RBI (bb)->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
207 next_insn = NEXT_INSN (bb->end);
210 function_footer = next_insn;
212 function_footer = unlink_insn_chain (function_footer, get_last_insn ());
215 /* Build a varray mapping INSN_UID to lexical block. Return it. */
218 scope_to_insns_initialize ()
223 VARRAY_TREE_INIT (insn_scopes, get_max_uid (), "insn scopes");
225 for (insn = get_insns (); insn; insn = next)
227 next = NEXT_INSN (insn);
229 if (active_insn_p (insn)
230 && GET_CODE (PATTERN (insn)) != ADDR_VEC
231 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
232 VARRAY_TREE (insn_scopes, INSN_UID (insn)) = block;
233 else if (GET_CODE (insn) == NOTE)
235 switch (NOTE_LINE_NUMBER (insn))
237 case NOTE_INSN_BLOCK_BEG:
238 block = NOTE_BLOCK (insn);
241 case NOTE_INSN_BLOCK_END:
242 block = BLOCK_SUPERCONTEXT (block);
252 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
253 found in the block tree. */
256 set_block_levels (block, level)
262 BLOCK_NUMBER (block) = level;
263 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
264 block = BLOCK_CHAIN (block);
268 /* Emit lexical block notes needed to change scope from S1 to S2. */
271 change_scope (orig_insn, s1, s2)
275 rtx insn = orig_insn;
276 tree com = NULL_TREE;
277 tree ts1 = s1, ts2 = s2;
282 if (ts1 == NULL || ts2 == NULL)
284 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
285 ts1 = BLOCK_SUPERCONTEXT (ts1);
286 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
287 ts2 = BLOCK_SUPERCONTEXT (ts2);
290 ts1 = BLOCK_SUPERCONTEXT (ts1);
291 ts2 = BLOCK_SUPERCONTEXT (ts2);
300 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
301 NOTE_BLOCK (note) = s;
302 s = BLOCK_SUPERCONTEXT (s);
309 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
310 NOTE_BLOCK (insn) = s;
311 s = BLOCK_SUPERCONTEXT (s);
315 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
316 on the scope tree and the newly reordered instructions. */
319 scope_to_insns_finalize ()
321 tree cur_block = DECL_INITIAL (cfun->decl);
324 /* Tag the blocks with a depth number so that change_scope can find
325 the common parent easily. */
326 set_block_levels (cur_block, 0);
328 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
332 if ((size_t) INSN_UID (insn) >= insn_scopes->num_elements)
334 this_block = VARRAY_TREE (insn_scopes, INSN_UID (insn));
338 if (this_block != cur_block)
340 change_scope (insn, cur_block, this_block);
341 cur_block = this_block;
345 VARRAY_FREE (insn_scopes);
347 /* change_scope emits before the insn, not after. */
348 note = emit_note (NULL, NOTE_INSN_DELETED);
349 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
355 /* Given a reorder chain, rearrange the code to match. */
358 fixup_reorder_chain ()
360 basic_block bb, prev_bb;
364 /* First do the bulk reordering -- rechain the blocks without regard to
365 the needed changes to jumps and labels. */
367 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
369 bb = RBI (bb)->next, index++)
371 if (RBI (bb)->header)
374 NEXT_INSN (insn) = RBI (bb)->header;
376 set_first_insn (RBI (bb)->header);
377 PREV_INSN (RBI (bb)->header) = insn;
378 insn = RBI (bb)->header;
379 while (NEXT_INSN (insn))
380 insn = NEXT_INSN (insn);
383 NEXT_INSN (insn) = bb->head;
385 set_first_insn (bb->head);
386 PREV_INSN (bb->head) = insn;
388 if (RBI (bb)->footer)
390 NEXT_INSN (insn) = RBI (bb)->footer;
391 PREV_INSN (RBI (bb)->footer) = insn;
392 while (NEXT_INSN (insn))
393 insn = NEXT_INSN (insn);
397 if (index != n_basic_blocks)
400 NEXT_INSN (insn) = function_footer;
402 PREV_INSN (function_footer) = insn;
404 while (NEXT_INSN (insn))
405 insn = NEXT_INSN (insn);
407 set_last_insn (insn);
408 #ifdef ENABLE_CHECKING
409 verify_insn_chain ();
412 /* Now add jumps and labels as needed to match the blocks new
415 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = RBI (bb)->next)
417 edge e_fall, e_taken, e;
421 if (bb->succ == NULL)
424 /* Find the old fallthru edge, and another non-EH edge for
426 e_taken = e_fall = NULL;
427 for (e = bb->succ; e ; e = e->succ_next)
428 if (e->flags & EDGE_FALLTHRU)
430 else if (! (e->flags & EDGE_EH))
433 bb_end_insn = bb->end;
434 if (GET_CODE (bb_end_insn) == JUMP_INSN)
436 if (any_condjump_p (bb_end_insn))
438 /* If the old fallthru is still next, nothing to do. */
439 if (RBI (bb)->next == e_fall->dest
441 && e_fall->dest == EXIT_BLOCK_PTR))
444 /* There is one special case: if *neither* block is next,
445 such as happens at the very end of a function, then we'll
446 need to add a new unconditional jump. Choose the taken
447 edge based on known or assumed probability. */
448 if (RBI (bb)->next != e_taken->dest)
450 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
453 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
454 && invert_jump (bb_end_insn,
455 label_for_bb (e_fall->dest), 0))
457 e_fall->flags &= ~EDGE_FALLTHRU;
458 e_taken->flags |= EDGE_FALLTHRU;
459 update_br_prob_note (bb);
460 e = e_fall, e_fall = e_taken, e_taken = e;
464 /* Otherwise we can try to invert the jump. This will
465 basically never fail, however, keep up the pretense. */
466 else if (invert_jump (bb_end_insn,
467 label_for_bb (e_fall->dest), 0))
469 e_fall->flags &= ~EDGE_FALLTHRU;
470 e_taken->flags |= EDGE_FALLTHRU;
471 update_br_prob_note (bb);
475 else if (returnjump_p (bb_end_insn))
479 /* Otherwise we have some switch or computed jump. In the
480 99% case, there should not have been a fallthru edge. */
484 #ifdef CASE_DROPS_THROUGH
485 /* Except for VAX. Since we didn't have predication for the
486 tablejump, the fallthru block should not have moved. */
487 if (RBI (bb)->next == e_fall->dest)
489 bb_end_insn = skip_insns_after_block (bb);
497 /* No fallthru implies a noreturn function with EH edges, or
498 something similarly bizarre. In any case, we don't need to
503 /* If the fallthru block is still next, nothing to do. */
504 if (RBI (bb)->next == e_fall->dest)
507 /* A fallthru to exit block. */
508 if (!RBI (bb)->next && e_fall->dest == EXIT_BLOCK_PTR)
512 /* We got here if we need to add a new jump insn. */
513 nb = force_nonfallthru (e_fall);
516 alloc_aux_for_block (nb, sizeof (struct reorder_block_def));
517 RBI (nb)->visited = 1;
518 RBI (nb)->next = RBI (bb)->next;
520 /* Don't process this new block. */
525 /* Put basic_block_info in the new order. */
529 fprintf (rtl_dump_file, "Reordered sequence:\n");
530 for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0; bb; bb = RBI (bb)->next, index ++)
532 fprintf (rtl_dump_file, " %i ", index);
533 if (RBI (bb)->original)
534 fprintf (rtl_dump_file, "duplicate of %i ",
535 RBI (bb)->original->index);
536 else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
537 fprintf (rtl_dump_file, "compensation ");
539 fprintf (rtl_dump_file, "bb %i ", bb->index);
540 fprintf (rtl_dump_file, " [%i]\n", bb->frequency);
544 prev_bb = ENTRY_BLOCK_PTR;
545 bb = ENTRY_BLOCK_PTR->next_bb;
548 for (; bb; prev_bb = bb, bb = RBI (bb)->next, index ++)
551 BASIC_BLOCK (index) = bb;
553 bb->prev_bb = prev_bb;
554 prev_bb->next_bb = bb;
556 prev_bb->next_bb = EXIT_BLOCK_PTR;
557 EXIT_BLOCK_PTR->prev_bb = prev_bb;
560 /* Perform sanity checks on the insn chain.
561 1. Check that next/prev pointers are consistent in both the forward and
563 2. Count insns in chain, going both directions, and check if equal.
564 3. Check that get_last_insn () returns the actual end of chain. */
570 int insn_cnt1, insn_cnt2;
572 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
574 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
575 if (PREV_INSN (x) != prevx)
578 if (prevx != get_last_insn ())
581 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
583 nextx = x, insn_cnt2++, x = PREV_INSN (x))
584 if (NEXT_INSN (x) != nextx)
587 if (insn_cnt1 != insn_cnt2)
591 /* Remove any unconditional jumps and forwarder block creating fallthru
592 edges instead. During BB reordering fallthru edges are not required
593 to target next basic block in the linear CFG layout, so the unconditional
594 jumps are not needed. If LOOPS is not null, also update loop structure &
598 cleanup_unconditional_jumps ()
601 for (i = 0; i < n_basic_blocks; i++)
603 basic_block bb = BASIC_BLOCK (i);
607 if (bb->succ->flags & EDGE_FALLTHRU)
609 if (!bb->succ->succ_next)
612 if (GET_CODE (bb->head) != CODE_LABEL && forwarder_block_p (bb) && i)
614 basic_block prev = bb->prev_bb;
619 fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
622 redirect_edge_succ (bb->pred, bb->succ->dest);
623 flow_delete_block (bb);
626 else if (simplejump_p (bb->end))
631 fprintf (rtl_dump_file, "Removing jump %i in BB %i\n",
632 INSN_UID (jump), bb->index);
634 bb->succ->flags |= EDGE_FALLTHRU;
639 /* Cleanup barriers and delete ADDR_VECs in a way as they are belonging
640 to removed tablejump anyway. */
641 insn = NEXT_INSN (bb->end);
643 && (GET_CODE (insn) != NOTE
644 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
646 rtx next = NEXT_INSN (insn);
648 if (GET_CODE (insn) == BARRIER)
649 delete_barrier (insn);
650 else if (GET_CODE (insn) == JUMP_INSN)
651 delete_insn_chain (PREV_INSN (insn), insn);
652 else if (GET_CODE (insn) == CODE_LABEL)
654 else if (GET_CODE (insn) != NOTE)
663 /* The block falling through to exit must be the last one in the
664 reordered chain. Ensure that this condition is met. */
666 fixup_fallthru_exit_predecessor ()
669 basic_block bb = NULL;
671 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
672 if (e->flags & EDGE_FALLTHRU)
675 if (bb && RBI (bb)->next)
677 basic_block c = ENTRY_BLOCK_PTR->next_bb;
679 while (RBI (c)->next != bb)
682 RBI (c)->next = RBI (bb)->next;
683 while (RBI (c)->next)
687 RBI (bb)->next = NULL;
691 /* Return true in case it is possible to duplicate the basic block BB. */
694 cfg_layout_can_duplicate_bb_p (bb)
700 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
703 /* Duplicating fallthru block to exit would require adding an jump
704 and splitting the real last BB. */
705 for (s = bb->succ; s; s = s->succ_next)
706 if (s->dest == EXIT_BLOCK_PTR && s->flags & EDGE_FALLTHRU)
709 /* Do not attempt to duplicate tablejumps, as we need to unshare
710 the dispatch table. This is dificult to do, as the instructions
711 computing jump destination may be hoisted outside the basic block. */
712 if (GET_CODE (bb->end) == JUMP_INSN && JUMP_LABEL (bb->end)
713 && (next = next_nonnote_insn (JUMP_LABEL (bb->end)))
714 && GET_CODE (next) == JUMP_INSN
715 && (GET_CODE (PATTERN (next)) == ADDR_VEC
716 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
722 duplicate_insn_chain (from, to)
727 /* Avoid updating of boundaries of previous basic block. The
728 note will get removed from insn stream in fixup. */
729 last = emit_note (NULL, NOTE_INSN_DELETED);
731 /* Create copy at the end of INSN chain. The chain will
732 be reordered later. */
733 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
736 switch (GET_CODE (insn))
741 /* Avoid copying of dispatch tables. We never duplicate
742 tablejumps, so this can hit only in case the table got
743 moved far from original jump. */
744 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
745 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
747 new = emit_copy_of_insn_after (insn, get_last_insn ());
748 /* Record the INSN_SCOPE. */
749 VARRAY_GROW (insn_scopes, INSN_UID (new) + 1);
750 VARRAY_TREE (insn_scopes, INSN_UID (new))
751 = VARRAY_TREE (insn_scopes, INSN_UID (insn));
762 switch (NOTE_LINE_NUMBER (insn))
764 /* In case prologue is empty and function contain label
765 in first BB, we may want to copy the block. */
766 case NOTE_INSN_PROLOGUE_END:
768 case NOTE_INSN_LOOP_VTOP:
769 case NOTE_INSN_LOOP_CONT:
770 case NOTE_INSN_LOOP_BEG:
771 case NOTE_INSN_LOOP_END:
772 /* Strip down the loop notes - we don't really want to keep
773 them consistent in loop copies. */
774 case NOTE_INSN_DELETED:
775 case NOTE_INSN_DELETED_LABEL:
776 /* No problem to strip these. */
777 case NOTE_INSN_EPILOGUE_BEG:
778 case NOTE_INSN_FUNCTION_END:
779 /* Debug code expect these notes to exist just once.
780 Keep them in the master copy.
781 ??? It probably makes more sense to duplicate them for each
783 case NOTE_INSN_FUNCTION_BEG:
784 /* There is always just single entry to function. */
785 case NOTE_INSN_BASIC_BLOCK:
788 /* There is no purpose to duplicate prologue. */
789 case NOTE_INSN_BLOCK_BEG:
790 case NOTE_INSN_BLOCK_END:
791 /* The BLOCK_BEG/BLOCK_END notes should be eliminated when BB
792 reordering is in the progress. */
793 case NOTE_INSN_EH_REGION_BEG:
794 case NOTE_INSN_EH_REGION_END:
795 case NOTE_INSN_RANGE_BEG:
796 case NOTE_INSN_RANGE_END:
797 /* Should never exist at BB duplication time. */
800 case NOTE_INSN_REPEATED_LINE_NUMBER:
801 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
805 if (NOTE_LINE_NUMBER (insn) < 0)
807 /* It is possible that no_line_number is set and the note
809 emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
816 insn = NEXT_INSN (last);
821 /* Redirect Edge to DEST. */
823 cfg_layout_redirect_edge (e, dest)
827 basic_block src = e->src;
828 basic_block old_next_bb = src->next_bb;
830 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
831 in the case the basic block appears to be in sequence. Avoid this
835 if (e->flags & EDGE_FALLTHRU)
837 /* In case we are redirecting fallthru edge to the branch edge
838 of conditional jump, remove it. */
839 if (src->succ->succ_next
840 && !src->succ->succ_next->succ_next)
842 edge s = e->succ_next ? e->succ_next : src->succ;
844 && any_condjump_p (src->end)
845 && onlyjump_p (src->end))
846 delete_insn (src->end);
848 redirect_edge_succ_nodup (e, dest);
851 redirect_edge_and_branch (e, dest);
853 /* We don't want simplejumps in the insn stream during cfglayout. */
854 if (simplejump_p (src->end))
856 delete_insn (src->end);
857 delete_barrier (NEXT_INSN (src->end));
858 src->succ->flags |= EDGE_FALLTHRU;
860 src->next_bb = old_next_bb;
863 /* Create an duplicate of the basic block BB and redirect edge E into it. */
866 cfg_layout_duplicate_bb (bb, e)
873 gcov_type new_count = e ? e->count : 0;
875 if (bb->count < new_count)
876 new_count = bb->count;
879 #ifdef ENABLE_CHECKING
880 if (!cfg_layout_can_duplicate_bb_p (bb))
884 insn = duplicate_insn_chain (bb->head, bb->end);
885 new_bb = create_basic_block (insn,
886 insn ? get_last_insn () : NULL,
887 EXIT_BLOCK_PTR->prev_bb);
888 alloc_aux_for_block (new_bb, sizeof (struct reorder_block_def));
890 if (RBI (bb)->header)
892 insn = RBI (bb)->header;
893 while (NEXT_INSN (insn))
894 insn = NEXT_INSN (insn);
895 insn = duplicate_insn_chain (RBI (bb)->header, insn);
897 RBI (new_bb)->header = unlink_insn_chain (insn, get_last_insn ());
900 if (RBI (bb)->footer)
902 insn = RBI (bb)->footer;
903 while (NEXT_INSN (insn))
904 insn = NEXT_INSN (insn);
905 insn = duplicate_insn_chain (RBI (bb)->footer, insn);
907 RBI (new_bb)->footer = unlink_insn_chain (insn, get_last_insn ());
910 if (bb->global_live_at_start)
912 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
913 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
914 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
915 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
918 new_bb->loop_depth = bb->loop_depth;
919 new_bb->flags = bb->flags;
920 for (s = bb->succ; s; s = s->succ_next)
922 n = make_edge (new_bb, s->dest, s->flags);
923 n->probability = s->probability;
925 /* Take care for overflows! */
926 n->count = s->count * (new_count * 10000 / bb->count) / 10000;
929 s->count -= n->count;
932 new_bb->count = new_count;
933 bb->count -= new_count;
937 new_bb->frequency = EDGE_FREQUENCY (e);
938 bb->frequency -= EDGE_FREQUENCY (e);
940 cfg_layout_redirect_edge (e, new_bb);
945 if (bb->frequency < 0)
948 RBI (new_bb)->original = bb;
952 /* Main entry point to this module - initialize the datastructures for
953 CFG layout changes. It keeps LOOPS up-to-date if not null. */
956 cfg_layout_initialize ()
958 /* Our algorithm depends on fact that there are now dead jumptables
960 alloc_aux_for_blocks (sizeof (struct reorder_block_def));
962 cleanup_unconditional_jumps ();
964 scope_to_insns_initialize ();
966 record_effective_endpoints ();
969 /* Finalize the changes: reorder insn list according to the sequence, enter
970 compensation code, rebuild scope forest. */
973 cfg_layout_finalize ()
975 fixup_fallthru_exit_predecessor ();
976 fixup_reorder_chain ();
978 #ifdef ENABLE_CHECKING
979 verify_insn_chain ();
982 scope_to_insns_finalize ();
984 free_aux_for_blocks ();
986 #ifdef ENABLE_CHECKING