1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains optimizer of the control flow. The main entrypoint is
23 cleanup_cfg. Following optimizations are performed:
25 - Unreachable blocks removal
26 - Edge forwarding (edge to the forwarder block is forwarded to it's
27 successor. Simplification of the branch instruction is performed by
28 underlying infrastructure so branch can be converted to simplejump or
30 - Cross jumping (tail merging)
31 - Conditional jump-around-simplejump simplification
32 - Basic block merging. */
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
41 #include "insn-config.h"
48 /* cleanup_cfg maintains following flags for each basic block. */
50 /* Set if life info needs to be recomputed for given BB. */
52 /* Set if BB is the forwarder block to avoid too many
53 forwarder_block_p calls. */
54 BB_FORWARDER_BLOCK = 2
57 #define BB_FLAGS(bb) (enum bb_flags)(bb)->aux
58 #define BB_SET_FLAG(bb,flag) \
59 (bb)->aux = (void *) (long) ((enum bb_flags)(bb)->aux | (flag))
60 #define BB_CLEAR_FLAG(bb,flag) \
61 (bb)->aux = (void *) (long) ((enum bb_flags)(bb)->aux & ~(flag))
63 #define FORWARDER_BLOCK_P(bb) (BB_FLAGS(bb) & BB_FORWARDER_BLOCK)
65 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
66 static bool try_crossjump_bb PARAMS ((int, basic_block));
67 static bool outgoing_edges_match PARAMS ((int,
68 basic_block, basic_block));
69 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
71 static bool insns_match_p PARAMS ((int, rtx, rtx));
73 static bool delete_unreachable_blocks PARAMS ((void));
74 static bool label_is_jump_target_p PARAMS ((rtx, rtx));
75 static bool tail_recursion_label_p PARAMS ((rtx));
76 static void merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
78 static void merge_blocks_move_successor_nojumps PARAMS ((basic_block,
80 static bool merge_blocks PARAMS ((edge,basic_block,basic_block,
82 static bool try_optimize_cfg PARAMS ((int));
83 static bool try_simplify_condjump PARAMS ((basic_block));
84 static bool try_forward_edges PARAMS ((int, basic_block));
85 static void notice_new_block PARAMS ((basic_block));
86 static void update_forwarder_flag PARAMS ((basic_block));
88 /* Set flags for newly created block. */
96 BB_SET_FLAG (bb, BB_UPDATE_LIFE);
97 if (forwarder_block_p (bb))
98 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
101 /* Recompute forwarder flag after block has been modified. */
104 update_forwarder_flag (bb)
107 if (forwarder_block_p (bb))
108 BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
110 BB_CLEAR_FLAG (bb, BB_FORWARDER_BLOCK);
113 /* Simplify a conditional jump around an unconditional jump.
114 Return true if something changed. */
117 try_simplify_condjump (cbranch_block)
118 basic_block cbranch_block;
120 basic_block jump_block, jump_dest_block, cbranch_dest_block;
121 edge cbranch_jump_edge, cbranch_fallthru_edge;
124 /* Verify that there are exactly two successors. */
125 if (!cbranch_block->succ
126 || !cbranch_block->succ->succ_next
127 || cbranch_block->succ->succ_next->succ_next)
130 /* Verify that we've got a normal conditional branch at the end
132 cbranch_insn = cbranch_block->end;
133 if (!any_condjump_p (cbranch_insn))
136 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
137 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
139 /* The next block must not have multiple predecessors, must not
140 be the last block in the function, and must contain just the
141 unconditional jump. */
142 jump_block = cbranch_fallthru_edge->dest;
143 if (jump_block->pred->pred_next
144 || jump_block->index == n_basic_blocks - 1
145 || !FORWARDER_BLOCK_P (jump_block))
147 jump_dest_block = jump_block->succ->dest;
149 /* The conditional branch must target the block after the
150 unconditional branch. */
151 cbranch_dest_block = cbranch_jump_edge->dest;
153 if (!can_fallthru (jump_block, cbranch_dest_block))
156 /* Invert the conditional branch. */
157 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 0))
161 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
162 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
164 /* Success. Update the CFG to match. Note that after this point
165 the edge variable names appear backwards; the redirection is done
166 this way to preserve edge profile data. */
167 cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
169 cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
171 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
172 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
174 /* Delete the block with the unconditional jump, and clean up the mess. */
175 flow_delete_block (jump_block);
176 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
181 /* Attempt to forward edges leaving basic block B.
182 Return true if successful. */
185 try_forward_edges (mode, b)
189 bool changed = false;
192 for (e = b->succ; e ; e = next)
194 basic_block target, first;
199 /* Skip complex edges because we don't know how to update them.
201 Still handle fallthru edges, as we can succeed to forward fallthru
202 edge to the same place as the branch edge of conditional branch
203 and turn conditional branch to an unconditional branch. */
204 if (e->flags & EDGE_COMPLEX)
207 target = first = e->dest;
210 /* Look for the real destination of the jump.
211 Avoid infinite loop in the infinite empty loop by counting
212 up to n_basic_blocks. */
213 while (FORWARDER_BLOCK_P (target)
214 && target->succ->dest != EXIT_BLOCK_PTR
215 && counter < n_basic_blocks)
217 /* Bypass trivial infinite loops. */
218 if (target == target->succ->dest)
219 counter = n_basic_blocks;
221 /* Avoid killing of loop pre-headers, as it is the place loop
222 optimizer wants to hoist code to.
224 For fallthru forwarders, the LOOP_BEG note must appear between
225 the header of block and CODE_LABEL of the loop, for non forwarders
226 it must appear before the JUMP_INSN. */
227 if (mode & CLEANUP_PRE_LOOP)
229 rtx insn = (target->succ->flags & EDGE_FALLTHRU
230 ? target->head : prev_nonnote_insn (target->end));
232 if (GET_CODE (insn) != NOTE)
233 insn = NEXT_INSN (insn);
235 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
236 insn = NEXT_INSN (insn))
237 if (GET_CODE (insn) == NOTE
238 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
241 if (GET_CODE (insn) == NOTE)
244 target = target->succ->dest, counter++;
247 if (counter >= n_basic_blocks)
250 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
253 else if (target == first)
254 ; /* We didn't do anything. */
257 /* Save the values now, as the edge may get removed. */
258 gcov_type edge_count = e->count;
259 int edge_probability = e->probability;
261 if (redirect_edge_and_branch (e, target))
263 /* We successfully forwarded the edge. Now update profile
264 data: for each edge we traversed in the chain, remove
265 the original edge's execution count. */
266 int edge_frequency = ((edge_probability * b->frequency
267 + REG_BR_PROB_BASE / 2)
270 if (!FORWARDER_BLOCK_P (b) && forwarder_block_p (b))
271 BB_SET_FLAG (b, BB_FORWARDER_BLOCK);
272 BB_SET_FLAG (b, BB_UPDATE_LIFE);
276 first->count -= edge_count;
277 first->succ->count -= edge_count;
278 first->frequency -= edge_frequency;
279 first = first->succ->dest;
281 while (first != target);
288 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
289 b->index, e->dest->index, target->index);
297 /* Return true if LABEL is a target of JUMP_INSN. This applies only
298 to non-complex jumps. That is, direct unconditional, conditional,
299 and tablejumps, but not computed jumps or returns. It also does
300 not apply to the fallthru case of a conditional jump. */
303 label_is_jump_target_p (label, jump_insn)
304 rtx label, jump_insn;
306 rtx tmp = JUMP_LABEL (jump_insn);
312 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
313 && GET_CODE (tmp) == JUMP_INSN
314 && (tmp = PATTERN (tmp),
315 GET_CODE (tmp) == ADDR_VEC
316 || GET_CODE (tmp) == ADDR_DIFF_VEC))
318 rtvec vec = XVEC (tmp, GET_CODE (tmp) == ADDR_DIFF_VEC);
319 int i, veclen = GET_NUM_ELEM (vec);
321 for (i = 0; i < veclen; ++i)
322 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
329 /* Return true if LABEL is used for tail recursion. */
332 tail_recursion_label_p (label)
337 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
338 if (label == XEXP (x, 0))
344 /* Blocks A and B are to be merged into a single block. A has no incoming
345 fallthru edge, so it can be moved before B without adding or modifying
346 any jumps (aside from the jump from A to B). */
349 merge_blocks_move_predecessor_nojumps (a, b)
355 barrier = next_nonnote_insn (a->end);
356 if (GET_CODE (barrier) != BARRIER)
358 delete_insn (barrier);
360 /* Move block and loop notes out of the chain so that we do not
363 ??? A better solution would be to squeeze out all the non-nested notes
364 and adjust the block trees appropriately. Even better would be to have
365 a tighter connection between block trees and rtl so that this is not
367 if (squeeze_notes (&a->head, &a->end))
370 /* Scramble the insn chain. */
371 if (a->end != PREV_INSN (b->head))
372 reorder_insns_nobb (a->head, a->end, PREV_INSN (b->head));
373 BB_SET_FLAG (a, BB_UPDATE_LIFE);
377 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
381 /* Swap the records for the two blocks around. Although we are deleting B,
382 A is now where B was and we want to compact the BB array from where
384 BASIC_BLOCK (a->index) = b;
385 BASIC_BLOCK (b->index) = a;
390 /* Now blocks A and B are contiguous. Merge them. */
391 merge_blocks_nomove (a, b);
394 /* Blocks A and B are to be merged into a single block. B has no outgoing
395 fallthru edge, so it can be moved after A without adding or modifying
396 any jumps (aside from the jump from A to B). */
399 merge_blocks_move_successor_nojumps (a, b)
402 rtx barrier, real_b_end;
405 barrier = NEXT_INSN (b->end);
407 /* Recognize a jump table following block B. */
409 && GET_CODE (barrier) == CODE_LABEL
410 && NEXT_INSN (barrier)
411 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
412 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
413 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
415 /* Temporarily add the table jump insn to b, so that it will also
416 be moved to the correct location. */
417 b->end = NEXT_INSN (barrier);
418 barrier = NEXT_INSN (b->end);
421 /* There had better have been a barrier there. Delete it. */
422 if (barrier && GET_CODE (barrier) == BARRIER)
423 delete_insn (barrier);
425 /* Move block and loop notes out of the chain so that we do not
428 ??? A better solution would be to squeeze out all the non-nested notes
429 and adjust the block trees appropriately. Even better would be to have
430 a tighter connection between block trees and rtl so that this is not
432 if (squeeze_notes (&b->head, &b->end))
435 /* Scramble the insn chain. */
436 reorder_insns_nobb (b->head, b->end, a->end);
438 /* Restore the real end of b. */
441 /* Now blocks A and B are contiguous. Merge them. */
442 merge_blocks_nomove (a, b);
443 BB_SET_FLAG (a, BB_UPDATE_LIFE);
447 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
452 /* Attempt to merge basic blocks that are potentially non-adjacent.
453 Return true iff the attempt succeeded. */
456 merge_blocks (e, b, c, mode)
461 /* If C has a tail recursion label, do not merge. There is no
462 edge recorded from the call_placeholder back to this label, as
463 that would make optimize_sibling_and_tail_recursive_calls more
464 complex for no gain. */
465 if ((mode & CLEANUP_PRE_SIBCALL)
466 && GET_CODE (c->head) == CODE_LABEL
467 && tail_recursion_label_p (c->head))
470 /* If B has a fallthru edge to C, no need to move anything. */
471 if (e->flags & EDGE_FALLTHRU)
473 /* We need to update liveness in case C already has broken liveness
474 or B ends by conditional jump to next instructions that will be
476 if ((BB_FLAGS (c) & BB_UPDATE_LIFE)
477 || GET_CODE (b->end) == JUMP_INSN)
478 BB_SET_FLAG (b, BB_UPDATE_LIFE);
479 merge_blocks_nomove (b, c);
480 update_forwarder_flag (b);
484 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
490 /* Otherwise we will need to move code around. Do that only if expensive
491 transformations are allowed. */
492 else if (mode & CLEANUP_EXPENSIVE)
494 edge tmp_edge, b_fallthru_edge;
495 bool c_has_outgoing_fallthru;
496 bool b_has_incoming_fallthru;
498 /* Avoid overactive code motion, as the forwarder blocks should be
499 eliminated by edge redirection instead. One exception might have
500 been if B is a forwarder block and C has no fallthru edge, but
501 that should be cleaned up by bb-reorder instead. */
502 if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
505 /* We must make sure to not munge nesting of lexical blocks,
506 and loop notes. This is done by squeezing out all the notes
507 and leaving them there to lie. Not ideal, but functional. */
509 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
510 if (tmp_edge->flags & EDGE_FALLTHRU)
512 c_has_outgoing_fallthru = (tmp_edge != NULL);
514 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
515 if (tmp_edge->flags & EDGE_FALLTHRU)
517 b_has_incoming_fallthru = (tmp_edge != NULL);
518 b_fallthru_edge = tmp_edge;
520 /* Otherwise, we're going to try to move C after B. If C does
521 not have an outgoing fallthru, then it can be moved
522 immediately after B without introducing or modifying jumps. */
523 if (! c_has_outgoing_fallthru)
525 merge_blocks_move_successor_nojumps (b, c);
529 /* If B does not have an incoming fallthru, then it can be moved
530 immediately before C without introducing or modifying jumps.
531 C cannot be the first block, so we do not have to worry about
532 accessing a non-existent block. */
534 if (b_has_incoming_fallthru)
537 if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
539 bb = force_nonfallthru (b_fallthru_edge);
541 notice_new_block (bb);
543 BB_SET_FLAG (b_fallthru_edge->src, BB_UPDATE_LIFE);
545 merge_blocks_move_predecessor_nojumps (b, c);
552 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
555 insns_match_p (mode, i1, i2)
561 /* Verify that I1 and I2 are equivalent. */
562 if (GET_CODE (i1) != GET_CODE (i2))
568 if (GET_CODE (p1) != GET_CODE (p2))
571 /* If this is a CALL_INSN, compare register usage information.
572 If we don't check this on stack register machines, the two
573 CALL_INSNs might be merged leaving reg-stack.c with mismatching
574 numbers of stack registers in the same basic block.
575 If we don't check this on machines with delay slots, a delay slot may
576 be filled that clobbers a parameter expected by the subroutine.
578 ??? We take the simple route for now and assume that if they're
579 equal, they were constructed identically. */
581 if (GET_CODE (i1) == CALL_INSN
582 && !rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
583 CALL_INSN_FUNCTION_USAGE (i2)))
587 /* If cross_jump_death_matters is not 0, the insn's mode
588 indicates whether or not the insn contains any stack-like
591 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
593 /* If register stack conversion has already been done, then
594 death notes must also be compared before it is certain that
595 the two instruction streams match. */
598 HARD_REG_SET i1_regset, i2_regset;
600 CLEAR_HARD_REG_SET (i1_regset);
601 CLEAR_HARD_REG_SET (i2_regset);
603 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
604 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
605 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
607 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
608 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
609 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
611 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
621 ? ! rtx_renumbered_equal_p (p1, p2) : ! rtx_equal_p (p1, p2))
623 /* The following code helps take care of G++ cleanups. */
624 rtx equiv1 = find_reg_equal_equiv_note (i1);
625 rtx equiv2 = find_reg_equal_equiv_note (i2);
628 /* If the equivalences are not to a constant, they may
629 reference pseudos that no longer exist, so we can't
631 && (! reload_completed
632 || (CONSTANT_P (XEXP (equiv1, 0))
633 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
635 rtx s1 = single_set (i1);
636 rtx s2 = single_set (i2);
637 if (s1 != 0 && s2 != 0
638 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
640 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
641 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
642 if (! rtx_renumbered_equal_p (p1, p2))
644 else if (apply_change_group ())
653 /* Look through the insns at the end of BB1 and BB2 and find the longest
654 sequence that are equivalent. Store the first insns for that sequence
655 in *F1 and *F2 and return the sequence length.
657 To simplify callers of this function, if the blocks match exactly,
658 store the head of the blocks in *F1 and *F2. */
661 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
662 int mode ATTRIBUTE_UNUSED;
663 basic_block bb1, bb2;
666 rtx i1, i2, last1, last2, afterlast1, afterlast2;
669 /* Skip simple jumps at the end of the blocks. Complex jumps still
670 need to be compared for equivalence, which we'll do below. */
674 || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
678 || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
681 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
685 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
687 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
690 if (i1 == bb1->head || i2 == bb2->head)
693 if (!insns_match_p (mode, i1, i2))
696 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
697 if (active_insn_p (i1))
699 /* If the merged insns have different REG_EQUAL notes, then
701 rtx equiv1 = find_reg_equal_equiv_note (i1);
702 rtx equiv2 = find_reg_equal_equiv_note (i2);
704 if (equiv1 && !equiv2)
705 remove_note (i1, equiv1);
706 else if (!equiv1 && equiv2)
707 remove_note (i2, equiv2);
708 else if (equiv1 && equiv2
709 && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
711 remove_note (i1, equiv1);
712 remove_note (i2, equiv2);
715 afterlast1 = last1, afterlast2 = last2;
716 last1 = i1, last2 = i2;
726 /* Don't allow the insn after a compare to be shared by
727 cross-jumping unless the compare is also shared. */
728 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
729 last1 = afterlast1, last2 = afterlast2, ninsns--;
733 /* Include preceding notes and labels in the cross-jump. One,
734 this may bring us to the head of the blocks as requested above.
735 Two, it keeps line number notes as matched as may be. */
738 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
739 last1 = PREV_INSN (last1);
740 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
741 last1 = PREV_INSN (last1);
742 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
743 last2 = PREV_INSN (last2);
744 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
745 last2 = PREV_INSN (last2);
754 /* Return true iff outgoing edges of BB1 and BB2 match, together with
755 the branch instruction. This means that if we commonize the control
756 flow before end of the basic block, the semantic remains unchanged.
758 We may assume that there exists one edge with a common destination. */
761 outgoing_edges_match (mode, bb1, bb2)
766 int nehedges1 = 0, nehedges2 = 0;
767 edge fallthru1 = 0, fallthru2 = 0;
770 /* If BB1 has only one successor, we must be looking at an unconditional
771 jump. Which, by the assumption above, means that we only need to check
772 that BB2 has one successor. */
773 if (bb1->succ && !bb1->succ->succ_next)
774 return (bb2->succ && !bb2->succ->succ_next);
776 /* Match conditional jumps - this may get tricky when fallthru and branch
777 edges are crossed. */
779 && bb1->succ->succ_next
780 && !bb1->succ->succ_next->succ_next
781 && any_condjump_p (bb1->end))
785 rtx set1, set2, cond1, cond2;
786 enum rtx_code code1, code2;
789 || !bb2->succ->succ_next
790 || bb1->succ->succ_next->succ_next
791 || !any_condjump_p (bb2->end))
794 b1 = BRANCH_EDGE (bb1);
795 b2 = BRANCH_EDGE (bb2);
796 f1 = FALLTHRU_EDGE (bb1);
797 f2 = FALLTHRU_EDGE (bb2);
799 /* Get around possible forwarders on fallthru edges. Other cases
800 should be optimized out already. */
801 if (FORWARDER_BLOCK_P (f1->dest))
803 if (FORWARDER_BLOCK_P (f2->dest))
806 /* To simplify use of this function, return false if there are
807 unneeded forwarder blocks. These will get eliminated later
808 during cleanup_cfg. */
809 if (FORWARDER_BLOCK_P (f1->dest)
810 || FORWARDER_BLOCK_P (f2->dest)
811 || FORWARDER_BLOCK_P (b1->dest)
812 || FORWARDER_BLOCK_P (b2->dest))
815 if (f1->dest == f2->dest && b1->dest == b2->dest)
817 else if (f1->dest == b2->dest && b1->dest == f2->dest)
822 set1 = pc_set (bb1->end);
823 set2 = pc_set (bb2->end);
824 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
825 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
828 cond1 = XEXP (SET_SRC (set1), 0);
829 cond2 = XEXP (SET_SRC (set2), 0);
830 code1 = GET_CODE (cond1);
832 code2 = reversed_comparison_code (cond2, bb2->end);
834 code2 = GET_CODE (cond2);
835 if (code2 == UNKNOWN)
838 /* Verify codes and operands match. */
839 match = ((code1 == code2
840 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
841 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
842 || (code1 == swap_condition (code2)
843 && rtx_renumbered_equal_p (XEXP (cond1, 1),
845 && rtx_renumbered_equal_p (XEXP (cond1, 0),
848 /* If we return true, we will join the blocks. Which means that
849 we will only have one branch prediction bit to work with. Thus
850 we require the existing branches to have probabilities that are
852 /* ??? We should use bb->frequency to allow merging in infrequently
853 executed blocks, but at the moment it is not available when
854 cleanup_cfg is run. */
855 if (match && !optimize_size)
859 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
860 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
864 prob1 = INTVAL (XEXP (note1, 0));
865 prob2 = INTVAL (XEXP (note2, 0));
867 prob2 = REG_BR_PROB_BASE - prob2;
869 /* Fail if the difference in probabilities is
871 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
874 else if (note1 || note2)
878 if (rtl_dump_file && match)
879 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
880 bb1->index, bb2->index);
885 /* Generic case - we are seeing an computed jump, table jump or trapping
888 /* First ensure that the instructions match. There may be many outgoing
889 edges so this test is generally cheaper.
890 ??? Currently the tablejumps will never match, as they do have
892 if (!insns_match_p (mode, bb1->end, bb2->end))
895 /* Search the outgoing edges, ensure that the counts do match, find possible
896 fallthru and exception handling edges since these needs more
898 for (e1 = bb1->succ, e2 = bb2->succ; e1 && e2;
899 e1 = e1->succ_next, e2 = e2->succ_next)
901 if (e1->flags & EDGE_EH)
903 if (e2->flags & EDGE_EH)
905 if (e1->flags & EDGE_FALLTHRU)
907 if (e2->flags & EDGE_FALLTHRU)
910 /* If number of edges of various types does not match, fail. */
913 if (nehedges1 != nehedges2)
915 if ((fallthru1 != 0) != (fallthru2 != 0))
918 /* fallthru edges must be forwarded to the same destination. */
921 basic_block d1 = (forwarder_block_p (fallthru1->dest)
922 ? fallthru1->dest->succ->dest: fallthru1->dest);
923 basic_block d2 = (forwarder_block_p (fallthru2->dest)
924 ? fallthru2->dest->succ->dest: fallthru2->dest);
928 /* In case we do have EH edges, ensure we are in the same region. */
931 rtx n1 = find_reg_note (bb1->end, REG_EH_REGION, 0);
932 rtx n2 = find_reg_note (bb2->end, REG_EH_REGION, 0);
933 if (XEXP (n1, 0) != XEXP (n2, 0))
936 /* We don't need to match the rest of edges as above checks should be enought
937 to ensure that they are equivalent. */
941 /* E1 and E2 are edges with the same destination block. Search their
942 predecessors for common code. If found, redirect control flow from
943 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
946 try_crossjump_to_edge (mode, e1, e2)
951 basic_block src1 = e1->src, src2 = e2->src;
952 basic_block redirect_to;
953 rtx newpos1, newpos2;
959 /* Search backward through forwarder blocks. We don't need to worry
960 about multiple entry or chained forwarders, as they will be optimized
961 away. We do this to look past the unconditional jump following a
962 conditional jump that is required due to the current CFG shape. */
964 && !src1->pred->pred_next
965 && FORWARDER_BLOCK_P (src1))
971 && !src2->pred->pred_next
972 && FORWARDER_BLOCK_P (src2))
978 /* Nothing to do if we reach ENTRY, or a common source block. */
979 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
984 /* Seeing more than 1 forwarder blocks would confuse us later... */
985 if (FORWARDER_BLOCK_P (e1->dest)
986 && FORWARDER_BLOCK_P (e1->dest->succ->dest))
988 if (FORWARDER_BLOCK_P (e2->dest)
989 && FORWARDER_BLOCK_P (e2->dest->succ->dest))
992 /* Likewise with dead code (possibly newly created by the other optimizations
994 if (!src1->pred || !src2->pred)
997 /* Look for the common insn sequence, part the first ... */
998 if (!outgoing_edges_match (mode, src1, src2))
1001 /* ... and part the second. */
1002 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
1006 /* Avoid splitting if possible. */
1007 if (newpos2 == src2->head)
1012 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
1013 src2->index, nmatch);
1014 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
1018 fprintf (rtl_dump_file,
1019 "Cross jumping from bb %i to bb %i; %i common insns\n",
1020 src1->index, src2->index, nmatch);
1022 redirect_to->count += src1->count;
1023 redirect_to->frequency += src1->frequency;
1025 /* Recompute the frequencies and counts of outgoing edges. */
1026 for (s = redirect_to->succ; s; s = s->succ_next)
1029 basic_block d = s->dest;
1031 if (FORWARDER_BLOCK_P (d))
1033 for (s2 = src1->succ; ; s2 = s2->succ_next)
1035 basic_block d2 = s2->dest;
1036 if (FORWARDER_BLOCK_P (d2))
1037 d2 = d2->succ->dest;
1041 s->count += s2->count;
1043 /* Take care to update possible forwarder blocks. We verified
1044 that there is no more than one in the chain, so we can't run
1045 into infinite loop. */
1046 if (FORWARDER_BLOCK_P (s->dest))
1048 s->dest->succ->count += s2->count;
1049 s->dest->count += s2->count;
1050 s->dest->frequency += EDGE_FREQUENCY (s);
1052 if (FORWARDER_BLOCK_P (s2->dest))
1054 s2->dest->succ->count -= s2->count;
1055 s2->dest->count -= s2->count;
1056 s2->dest->frequency -= EDGE_FREQUENCY (s);
1058 if (!redirect_to->frequency && !src1->frequency)
1059 s->probability = (s->probability + s2->probability) / 2;
1062 ((s->probability * redirect_to->frequency +
1063 s2->probability * src1->frequency)
1064 / (redirect_to->frequency + src1->frequency));
1067 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
1069 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
1071 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
1073 /* Skip possible basic block header. */
1074 if (GET_CODE (newpos1) == CODE_LABEL)
1075 newpos1 = NEXT_INSN (newpos1);
1076 if (GET_CODE (newpos1) == NOTE)
1077 newpos1 = NEXT_INSN (newpos1);
1080 /* Emit the jump insn. */
1081 label = block_label (redirect_to);
1082 emit_jump_insn_after (gen_jump (label), src1->end);
1083 JUMP_LABEL (src1->end) = label;
1084 LABEL_NUSES (label)++;
1086 /* Delete the now unreachable instructions. */
1087 delete_insn_chain (newpos1, last);
1089 /* Make sure there is a barrier after the new jump. */
1090 last = next_nonnote_insn (src1->end);
1091 if (!last || GET_CODE (last) != BARRIER)
1092 emit_barrier_after (src1->end);
1096 remove_edge (src1->succ);
1097 make_single_succ_edge (src1, redirect_to, 0);
1099 BB_SET_FLAG (src1, BB_UPDATE_LIFE);
1100 update_forwarder_flag (src1);
1105 /* Search the predecessors of BB for common insn sequences. When found,
1106 share code between them by redirecting control flow. Return true if
1107 any changes made. */
1110 try_crossjump_bb (mode, bb)
1114 edge e, e2, nexte2, nexte, fallthru;
1117 /* Nothing to do if there is not at least two incoming edges. */
1118 if (!bb->pred || !bb->pred->pred_next)
1121 /* It is always cheapest to redirect a block that ends in a branch to
1122 a block that falls through into BB, as that adds no branches to the
1123 program. We'll try that combination first. */
1124 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
1125 if (fallthru->flags & EDGE_FALLTHRU)
1129 for (e = bb->pred; e; e = nexte)
1131 nexte = e->pred_next;
1133 /* As noted above, first try with the fallthru predecessor. */
1136 /* Don't combine the fallthru edge into anything else.
1137 If there is a match, we'll do it the other way around. */
1141 if (try_crossjump_to_edge (mode, e, fallthru))
1149 /* Non-obvious work limiting check: Recognize that we're going
1150 to call try_crossjump_bb on every basic block. So if we have
1151 two blocks with lots of outgoing edges (a switch) and they
1152 share lots of common destinations, then we would do the
1153 cross-jump check once for each common destination.
1155 Now, if the blocks actually are cross-jump candidates, then
1156 all of their destinations will be shared. Which means that
1157 we only need check them for cross-jump candidacy once. We
1158 can eliminate redundant checks of crossjump(A,B) by arbitrarily
1159 choosing to do the check from the block for which the edge
1160 in question is the first successor of A. */
1161 if (e->src->succ != e)
1164 for (e2 = bb->pred; e2; e2 = nexte2)
1166 nexte2 = e2->pred_next;
1171 /* We've already checked the fallthru edge above. */
1175 /* The "first successor" check above only prevents multiple
1176 checks of crossjump(A,B). In order to prevent redundant
1177 checks of crossjump(B,A), require that A be the block
1178 with the lowest index. */
1179 if (e->src->index > e2->src->index)
1182 if (try_crossjump_to_edge (mode, e, e2))
1194 /* Do simple CFG optimizations - basic block merging, simplifying of jump
1195 instructions etc. Return nonzero if changes were made. */
1198 try_optimize_cfg (mode)
1202 bool changed_overall = false;
1207 if (mode & CLEANUP_CROSSJUMP)
1208 add_noreturn_fake_exit_edges ();
1210 for (i = 0; i < n_basic_blocks; i++)
1211 update_forwarder_flag (BASIC_BLOCK (i));
1213 /* Attempt to merge blocks as made possible by edge removal. If a block
1214 has only one successor, and the successor has only one predecessor,
1215 they may be combined. */
1223 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
1226 for (i = 0; i < n_basic_blocks;)
1228 basic_block c, b = BASIC_BLOCK (i);
1230 bool changed_here = false;
1232 /* Delete trivially dead basic blocks. */
1233 while (b->pred == NULL)
1235 c = BASIC_BLOCK (b->index - 1);
1237 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
1238 flow_delete_block (b);
1243 /* Remove code labels no longer used. Don't do this before
1244 CALL_PLACEHOLDER is removed, as some branches may be hidden
1246 if (b->pred->pred_next == NULL
1247 && (b->pred->flags & EDGE_FALLTHRU)
1248 && !(b->pred->flags & EDGE_COMPLEX)
1249 && GET_CODE (b->head) == CODE_LABEL
1250 && (!(mode & CLEANUP_PRE_SIBCALL)
1251 || !tail_recursion_label_p (b->head))
1252 /* If the previous block ends with a branch to this block,
1253 we can't delete the label. Normally this is a condjump
1254 that is yet to be simplified, but if CASE_DROPS_THRU,
1255 this can be a tablejump with some element going to the
1256 same place as the default (fallthru). */
1257 && (b->pred->src == ENTRY_BLOCK_PTR
1258 || GET_CODE (b->pred->src->end) != JUMP_INSN
1259 || ! label_is_jump_target_p (b->head, b->pred->src->end)))
1261 rtx label = b->head;
1262 b->head = NEXT_INSN (b->head);
1263 delete_insn_chain (label, label);
1265 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
1269 /* If we fall through an empty block, we can remove it. */
1270 if (b->pred->pred_next == NULL
1271 && (b->pred->flags & EDGE_FALLTHRU)
1272 && GET_CODE (b->head) != CODE_LABEL
1273 && FORWARDER_BLOCK_P (b)
1274 /* Note that forwarder_block_p true ensures that there
1275 is a successor for this block. */
1276 && (b->succ->flags & EDGE_FALLTHRU)
1277 && n_basic_blocks > 1)
1280 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
1282 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
1283 redirect_edge_succ_nodup (b->pred, b->succ->dest);
1284 flow_delete_block (b);
1289 /* Merge blocks. Loop because chains of blocks might be
1291 while ((s = b->succ) != NULL
1292 && s->succ_next == NULL
1293 && !(s->flags & EDGE_COMPLEX)
1294 && (c = s->dest) != EXIT_BLOCK_PTR
1295 && c->pred->pred_next == NULL
1296 /* If the jump insn has side effects,
1297 we can't kill the edge. */
1298 && (GET_CODE (b->end) != JUMP_INSN
1299 || onlyjump_p (b->end))
1300 && merge_blocks (s, b, c, mode))
1301 changed_here = true;
1303 /* Simplify branch over branch. */
1304 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
1306 BB_SET_FLAG (b, BB_UPDATE_LIFE);
1307 changed_here = true;
1310 /* If B has a single outgoing edge, but uses a non-trivial jump
1311 instruction without side-effects, we can either delete the
1312 jump entirely, or replace it with a simple unconditional jump.
1313 Use redirect_edge_and_branch to do the dirty work. */
1315 && ! b->succ->succ_next
1316 && b->succ->dest != EXIT_BLOCK_PTR
1317 && onlyjump_p (b->end)
1318 && redirect_edge_and_branch (b->succ, b->succ->dest))
1320 BB_SET_FLAG (b, BB_UPDATE_LIFE);
1321 update_forwarder_flag (b);
1322 changed_here = true;
1325 /* Simplify branch to branch. */
1326 if (try_forward_edges (mode, b))
1327 changed_here = true;
1329 /* Look for shared code between blocks. */
1330 if ((mode & CLEANUP_CROSSJUMP)
1331 && try_crossjump_bb (mode, b))
1332 changed_here = true;
1334 /* Don't get confused by the index shift caused by deleting
1342 if ((mode & CLEANUP_CROSSJUMP)
1343 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
1346 #ifdef ENABLE_CHECKING
1348 verify_flow_info ();
1351 changed_overall |= changed;
1355 if (mode & CLEANUP_CROSSJUMP)
1356 remove_fake_edges ();
1358 if ((mode & CLEANUP_UPDATE_LIFE) && changed_overall)
1361 blocks = sbitmap_alloc (n_basic_blocks);
1362 sbitmap_zero (blocks);
1363 for (i = 0; i < n_basic_blocks; i++)
1364 if (BB_FLAGS (BASIC_BLOCK (i)) & BB_UPDATE_LIFE)
1367 SET_BIT (blocks, i);
1370 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
1371 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
1372 | PROP_KILL_DEAD_CODE);
1373 sbitmap_free (blocks);
1375 for (i = 0; i < n_basic_blocks; i++)
1376 BASIC_BLOCK (i)->aux = NULL;
1378 return changed_overall;
1381 /* Delete all unreachable basic blocks. */
1384 delete_unreachable_blocks ()
1387 bool changed = false;
1389 find_unreachable_blocks ();
1391 /* Delete all unreachable basic blocks. Count down so that we
1392 don't interfere with the block renumbering that happens in
1393 flow_delete_block. */
1395 for (i = n_basic_blocks - 1; i >= 0; --i)
1397 basic_block b = BASIC_BLOCK (i);
1399 if (!(b->flags & BB_REACHABLE))
1400 flow_delete_block (b), changed = true;
1404 tidy_fallthru_edges ();
1408 /* Tidy the CFG by deleting unreachable code and whatnot. */
1414 bool changed = false;
1416 timevar_push (TV_CLEANUP_CFG);
1417 changed = delete_unreachable_blocks ();
1418 if (try_optimize_cfg (mode))
1419 delete_unreachable_blocks (), changed = true;
1421 /* Kill the data we won't maintain. */
1422 free_EXPR_LIST_list (&label_value_list);
1423 free_EXPR_LIST_list (&tail_recursion_label_list);
1424 timevar_pop (TV_CLEANUP_CFG);