1 /* If-conversion support.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
71 static int num_updated_if_blocks;
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
76 /* True if life data ok at present. */
77 static bool life_data_ok;
79 /* The post-dominator relation on the original block numbers. */
80 static sbitmap *post_dominators;
82 /* Forward references. */
83 static int count_bb_insns PARAMS ((basic_block));
84 static rtx first_active_insn PARAMS ((basic_block));
85 static int last_active_insn_p PARAMS ((basic_block, rtx));
86 static int seq_contains_jump PARAMS ((rtx));
88 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
89 static rtx cond_exec_get_condition PARAMS ((rtx));
90 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
93 static rtx noce_get_condition PARAMS ((rtx, rtx *));
94 static int noce_operand_ok PARAMS ((rtx));
95 static int noce_process_if_block PARAMS ((basic_block, basic_block,
96 basic_block, basic_block));
98 static int process_if_block PARAMS ((basic_block, basic_block,
99 basic_block, basic_block));
100 static void merge_if_block PARAMS ((basic_block, basic_block,
101 basic_block, basic_block));
103 static int find_if_header PARAMS ((basic_block));
104 static int find_if_block PARAMS ((basic_block, edge, edge));
105 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
106 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
107 static int find_cond_trap PARAMS ((basic_block, edge, edge));
108 static int find_memory PARAMS ((rtx *, void *));
109 static int dead_or_predicable PARAMS ((basic_block, basic_block,
110 basic_block, basic_block, int));
111 static void noce_emit_move_insn PARAMS ((rtx, rtx));
113 /* Abuse the basic_block AUX field to store the original block index,
114 as well as a flag indicating that the block should be rescaned for
117 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
118 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
119 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
120 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
123 /* Count the number of non-jump active insns in BB. */
134 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
139 insn = NEXT_INSN (insn);
145 /* Return the first non-jump active insn in the basic block. */
148 first_active_insn (bb)
153 if (GET_CODE (insn) == CODE_LABEL)
157 insn = NEXT_INSN (insn);
160 while (GET_CODE (insn) == NOTE)
164 insn = NEXT_INSN (insn);
167 if (GET_CODE (insn) == JUMP_INSN)
173 /* Return true if INSN is the last active non-jump insn in BB. */
176 last_active_insn_p (bb, insn)
184 insn = NEXT_INSN (insn);
186 while (GET_CODE (insn) == NOTE);
188 return GET_CODE (insn) == JUMP_INSN;
191 /* It is possible, especially when having dealt with multi-word
192 arithmetic, for the expanders to have emitted jumps. Search
193 through the sequence and return TRUE if a jump exists so that
194 we can abort the conversion. */
197 seq_contains_jump (insn)
202 if (GET_CODE (insn) == JUMP_INSN)
204 insn = NEXT_INSN (insn);
209 /* Go through a bunch of insns, converting them to conditional
210 execution format if possible. Return TRUE if all of the non-note
211 insns were processed. */
214 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
215 rtx start; /* first insn to look at */
216 rtx end; /* last insn to look at */
217 rtx test; /* conditional execution test */
218 rtx prob_val; /* probability of branch taken. */
219 int mod_ok; /* true if modifications ok last insn. */
221 int must_be_last = FALSE;
225 for (insn = start; ; insn = NEXT_INSN (insn))
227 if (GET_CODE (insn) == NOTE)
230 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
233 /* Remove USE insns that get in the way. */
234 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
236 /* ??? Ug. Actually unlinking the thing is problematic,
237 given what we'd have to coordinate with our callers. */
238 PUT_CODE (insn, NOTE);
239 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
240 NOTE_SOURCE_FILE (insn) = 0;
244 /* Last insn wasn't last? */
248 if (modified_in_p (test, insn))
255 /* Now build the conditional form of the instruction. */
256 pattern = PATTERN (insn);
258 /* If the machine needs to modify the insn being conditionally executed,
259 say for example to force a constant integer operand into a temp
260 register, do so here. */
261 #ifdef IFCVT_MODIFY_INSN
262 IFCVT_MODIFY_INSN (pattern, insn);
267 validate_change (insn, &PATTERN (insn),
268 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
271 if (GET_CODE (insn) == CALL_INSN && prob_val)
272 validate_change (insn, ®_NOTES (insn),
273 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
274 REG_NOTES (insn)), 1);
284 /* Return the condition for a jump. Do not do any special processing. */
287 cond_exec_get_condition (jump)
292 if (any_condjump_p (jump))
293 test_if = SET_SRC (pc_set (jump));
296 cond = XEXP (test_if, 0);
298 /* If this branches to JUMP_LABEL when the condition is false,
299 reverse the condition. */
300 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
301 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
303 enum rtx_code rev = reversed_comparison_code (cond, jump);
307 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
314 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
315 to conditional execution. Return TRUE if we were successful at
316 converting the the block. */
319 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
320 basic_block test_bb; /* Basic block test is in */
321 basic_block then_bb; /* Basic block for THEN block */
322 basic_block else_bb; /* Basic block for ELSE block */
323 basic_block join_bb; /* Basic block the join label is in */
325 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
326 rtx then_start; /* first insn in THEN block */
327 rtx then_end; /* last insn + 1 in THEN block */
328 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
329 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
330 int max; /* max # of insns to convert. */
331 int then_mod_ok; /* whether conditional mods are ok in THEN */
332 rtx true_expr; /* test for else block insns */
333 rtx false_expr; /* test for then block insns */
334 rtx true_prob_val; /* probability of else block */
335 rtx false_prob_val; /* probability of then block */
337 enum rtx_code false_code;
339 /* Find the conditional jump to the ELSE or JOIN part, and isolate
341 test_expr = cond_exec_get_condition (test_bb->end);
345 /* If the conditional jump is more than just a conditional jump,
346 then we can not do conditional execution conversion on this block. */
347 if (!onlyjump_p (test_bb->end))
350 /* Collect the bounds of where we're to search. */
352 then_start = then_bb->head;
353 then_end = then_bb->end;
355 /* Skip a label heading THEN block. */
356 if (GET_CODE (then_start) == CODE_LABEL)
357 then_start = NEXT_INSN (then_start);
359 /* Skip a (use (const_int 0)) or branch as the final insn. */
360 if (GET_CODE (then_end) == INSN
361 && GET_CODE (PATTERN (then_end)) == USE
362 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
363 then_end = PREV_INSN (then_end);
364 else if (GET_CODE (then_end) == JUMP_INSN)
365 then_end = PREV_INSN (then_end);
369 /* Skip the ELSE block's label. */
370 else_start = NEXT_INSN (else_bb->head);
371 else_end = else_bb->end;
373 /* Skip a (use (const_int 0)) or branch as the final insn. */
374 if (GET_CODE (else_end) == INSN
375 && GET_CODE (PATTERN (else_end)) == USE
376 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
377 else_end = PREV_INSN (else_end);
378 else if (GET_CODE (else_end) == JUMP_INSN)
379 else_end = PREV_INSN (else_end);
382 /* How many instructions should we convert in total? */
386 max = 2 * MAX_CONDITIONAL_EXECUTE;
387 n_insns = count_bb_insns (else_bb);
390 max = MAX_CONDITIONAL_EXECUTE;
391 n_insns += count_bb_insns (then_bb);
395 /* Map test_expr/test_jump into the appropriate MD tests to use on
396 the conditionally executed code. */
398 true_expr = test_expr;
400 false_code = reversed_comparison_code (true_expr, test_bb->end);
401 if (false_code != UNKNOWN)
402 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
403 XEXP (true_expr, 0), XEXP (true_expr, 1));
405 false_expr = NULL_RTX;
407 #ifdef IFCVT_MODIFY_TESTS
408 /* If the machine description needs to modify the tests, such as setting a
409 conditional execution register from a comparison, it can do so here. */
410 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
413 /* See if the conversion failed */
414 if (!true_expr || !false_expr)
418 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
421 true_prob_val = XEXP (true_prob_val, 0);
422 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
425 false_prob_val = NULL_RTX;
427 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
428 on then THEN block. */
429 then_mod_ok = (else_bb == NULL_BLOCK);
431 /* Go through the THEN and ELSE blocks converting the insns if possible
432 to conditional execution. */
436 || ! cond_exec_process_insns (then_start, then_end, false_expr,
437 false_prob_val, then_mod_ok)))
441 && ! cond_exec_process_insns (else_start, else_end,
442 true_expr, true_prob_val, TRUE))
445 if (! apply_change_group ())
448 #ifdef IFCVT_MODIFY_FINAL
449 /* Do any machine dependent final modifications */
450 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
453 /* Conversion succeeded. */
455 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
456 n_insns, (n_insns == 1) ? " was" : "s were");
458 /* Merge the blocks! */
459 merge_if_block (test_bb, then_bb, else_bb, join_bb);
463 #ifdef IFCVT_MODIFY_CANCEL
464 /* Cancel any machine dependent changes. */
465 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
472 /* Used by noce_process_if_block to communicate with its subroutines.
474 The subroutines know that A and B may be evaluated freely. They
475 know that X is a register. They should insert new instructions
476 before cond_earliest. */
483 rtx jump, cond, cond_earliest;
486 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
488 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
489 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
490 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
491 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
492 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
493 rtx, enum rtx_code, rtx,
495 static int noce_try_cmove PARAMS ((struct noce_if_info *));
496 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
497 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
499 static int noce_try_minmax PARAMS ((struct noce_if_info *));
500 static int noce_try_abs PARAMS ((struct noce_if_info *));
502 /* Helper function for noce_try_store_flag*. */
505 noce_emit_store_flag (if_info, x, reversep, normalize)
506 struct noce_if_info *if_info;
508 int reversep, normalize;
510 rtx cond = if_info->cond;
514 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
515 || ! general_operand (XEXP (cond, 1), VOIDmode));
517 /* If earliest == jump, or when the condition is complex, try to
518 build the store_flag insn directly. */
521 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
524 code = reversed_comparison_code (cond, if_info->jump);
526 code = GET_CODE (cond);
528 if ((if_info->cond_earliest == if_info->jump || cond_complex)
529 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
533 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
535 tmp = gen_rtx_SET (VOIDmode, x, tmp);
538 tmp = emit_insn (tmp);
540 if (recog_memoized (tmp) >= 0)
546 if_info->cond_earliest = if_info->jump;
554 /* Don't even try if the comparison operands are weird. */
558 return emit_store_flag (x, code, XEXP (cond, 0),
559 XEXP (cond, 1), VOIDmode,
560 (code == LTU || code == LEU
561 || code == GEU || code == GTU), normalize);
564 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
566 noce_emit_move_insn (x, y)
569 enum machine_mode outmode, inmode;
573 if (GET_CODE (x) != STRICT_LOW_PART)
575 emit_move_insn (x, y);
580 inner = XEXP (outer, 0);
581 outmode = GET_MODE (outer);
582 inmode = GET_MODE (inner);
583 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
584 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
585 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
586 GET_MODE_BITSIZE (inmode));
589 /* Convert "if (test) x = 1; else x = 0".
591 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
592 tried in noce_try_store_flag_constants after noce_try_cmove has had
593 a go at the conversion. */
596 noce_try_store_flag (if_info)
597 struct noce_if_info *if_info;
602 if (GET_CODE (if_info->b) == CONST_INT
603 && INTVAL (if_info->b) == STORE_FLAG_VALUE
604 && if_info->a == const0_rtx)
606 else if (if_info->b == const0_rtx
607 && GET_CODE (if_info->a) == CONST_INT
608 && INTVAL (if_info->a) == STORE_FLAG_VALUE
609 && (reversed_comparison_code (if_info->cond, if_info->jump)
617 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
620 if (target != if_info->x)
621 noce_emit_move_insn (if_info->x, target);
625 emit_insns_before (seq, if_info->cond_earliest);
636 /* Convert "if (test) x = a; else x = b", for A and B constant. */
639 noce_try_store_flag_constants (if_info)
640 struct noce_if_info *if_info;
644 HOST_WIDE_INT itrue, ifalse, diff, tmp;
645 int normalize, can_reverse;
646 enum machine_mode mode;
649 && GET_CODE (if_info->a) == CONST_INT
650 && GET_CODE (if_info->b) == CONST_INT)
652 mode = GET_MODE (if_info->x);
653 ifalse = INTVAL (if_info->a);
654 itrue = INTVAL (if_info->b);
655 diff = trunc_int_for_mode (itrue - ifalse, mode);
657 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
661 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
663 else if (ifalse == 0 && exact_log2 (itrue) >= 0
664 && (STORE_FLAG_VALUE == 1
665 || BRANCH_COST >= 2))
667 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
668 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
669 normalize = 1, reversep = 1;
671 && (STORE_FLAG_VALUE == -1
672 || BRANCH_COST >= 2))
674 else if (ifalse == -1 && can_reverse
675 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
676 normalize = -1, reversep = 1;
677 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
685 tmp = itrue; itrue = ifalse; ifalse = tmp;
686 diff = trunc_int_for_mode (-diff, mode);
690 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
697 /* if (test) x = 3; else x = 4;
698 => x = 3 + (test == 0); */
699 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
701 target = expand_binop (mode,
702 (diff == STORE_FLAG_VALUE
703 ? add_optab : sub_optab),
704 GEN_INT (ifalse), target, if_info->x, 0,
708 /* if (test) x = 8; else x = 0;
709 => x = (test != 0) << 3; */
710 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
712 target = expand_binop (mode, ashl_optab,
713 target, GEN_INT (tmp), if_info->x, 0,
717 /* if (test) x = -1; else x = b;
718 => x = -(test != 0) | b; */
719 else if (itrue == -1)
721 target = expand_binop (mode, ior_optab,
722 target, GEN_INT (ifalse), if_info->x, 0,
726 /* if (test) x = a; else x = b;
727 => x = (-(test != 0) & (b - a)) + a; */
730 target = expand_binop (mode, and_optab,
731 target, GEN_INT (diff), if_info->x, 0,
734 target = expand_binop (mode, add_optab,
735 target, GEN_INT (ifalse), if_info->x, 0,
745 if (target != if_info->x)
746 noce_emit_move_insn (if_info->x, target);
751 if (seq_contains_jump (seq))
754 emit_insns_before (seq, if_info->cond_earliest);
762 /* Convert "if (test) foo++" into "foo += (test != 0)", and
763 similarly for "foo--". */
766 noce_try_store_flag_inc (if_info)
767 struct noce_if_info *if_info;
770 int subtract, normalize;
776 /* Should be no `else' case to worry about. */
777 && if_info->b == if_info->x
778 && GET_CODE (if_info->a) == PLUS
779 && (XEXP (if_info->a, 1) == const1_rtx
780 || XEXP (if_info->a, 1) == constm1_rtx)
781 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
782 && (reversed_comparison_code (if_info->cond, if_info->jump)
785 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
786 subtract = 0, normalize = 0;
787 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
788 subtract = 1, normalize = 0;
790 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
794 target = noce_emit_store_flag (if_info,
795 gen_reg_rtx (GET_MODE (if_info->x)),
799 target = expand_binop (GET_MODE (if_info->x),
800 subtract ? sub_optab : add_optab,
801 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
804 if (target != if_info->x)
805 noce_emit_move_insn (if_info->x, target);
810 if (seq_contains_jump (seq))
813 emit_insns_before (seq, if_info->cond_earliest);
824 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
827 noce_try_store_flag_mask (if_info)
828 struct noce_if_info *if_info;
836 || STORE_FLAG_VALUE == -1)
837 && ((if_info->a == const0_rtx
838 && rtx_equal_p (if_info->b, if_info->x))
839 || ((reversep = (reversed_comparison_code (if_info->cond,
842 && if_info->b == const0_rtx
843 && rtx_equal_p (if_info->a, if_info->x))))
846 target = noce_emit_store_flag (if_info,
847 gen_reg_rtx (GET_MODE (if_info->x)),
850 target = expand_binop (GET_MODE (if_info->x), and_optab,
851 if_info->x, target, if_info->x, 0,
856 if (target != if_info->x)
857 noce_emit_move_insn (if_info->x, target);
862 if (seq_contains_jump (seq))
865 emit_insns_before (seq, if_info->cond_earliest);
876 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
879 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
880 struct noce_if_info *if_info;
881 rtx x, cmp_a, cmp_b, vfalse, vtrue;
884 /* If earliest == jump, try to build the cmove insn directly.
885 This is helpful when combine has created some complex condition
886 (like for alpha's cmovlbs) that we can't hope to regenerate
887 through the normal interface. */
889 if (if_info->cond_earliest == if_info->jump)
893 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
894 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
895 tmp = gen_rtx_SET (VOIDmode, x, tmp);
898 tmp = emit_insn (tmp);
900 if (recog_memoized (tmp) >= 0)
912 /* Don't even try if the comparison operands are weird. */
913 if (! general_operand (cmp_a, GET_MODE (cmp_a))
914 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
917 #if HAVE_conditional_move
918 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
919 vtrue, vfalse, GET_MODE (x),
920 (code == LTU || code == GEU
921 || code == LEU || code == GTU));
923 /* We'll never get here, as noce_process_if_block doesn't call the
924 functions involved. Ifdef code, however, should be discouraged
925 because it leads to typos in the code not selected. However,
926 emit_conditional_move won't exist either. */
931 /* Try only simple constants and registers here. More complex cases
932 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
933 has had a go at it. */
936 noce_try_cmove (if_info)
937 struct noce_if_info *if_info;
942 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
943 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
947 code = GET_CODE (if_info->cond);
948 target = noce_emit_cmove (if_info, if_info->x, code,
949 XEXP (if_info->cond, 0),
950 XEXP (if_info->cond, 1),
951 if_info->a, if_info->b);
955 if (target != if_info->x)
956 noce_emit_move_insn (if_info->x, target);
960 emit_insns_before (seq, if_info->cond_earliest);
973 /* Try more complex cases involving conditional_move. */
976 noce_try_cmove_arith (if_info)
977 struct noce_if_info *if_info;
987 /* A conditional move from two memory sources is equivalent to a
988 conditional on their addresses followed by a load. Don't do this
989 early because it'll screw alias analysis. Note that we've
990 already checked for no side effects. */
991 if (! no_new_pseudos && cse_not_expected
992 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
997 x = gen_reg_rtx (Pmode);
1001 /* ??? We could handle this if we knew that a load from A or B could
1002 not fault. This is also true if we've already loaded
1003 from the address along the path from ENTRY. */
1004 else if (may_trap_p (a) || may_trap_p (b))
1007 /* if (test) x = a + b; else x = c - d;
1014 code = GET_CODE (if_info->cond);
1015 insn_a = if_info->insn_a;
1016 insn_b = if_info->insn_b;
1018 /* Possibly rearrange operands to make things come out more natural. */
1019 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1022 if (rtx_equal_p (b, x))
1024 else if (general_operand (b, GET_MODE (b)))
1029 code = reversed_comparison_code (if_info->cond, if_info->jump);
1030 tmp = a, a = b, b = tmp;
1031 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1037 /* If either operand is complex, load it into a register first.
1038 The best way to do this is to copy the original insn. In this
1039 way we preserve any clobbers etc that the insn may have had.
1040 This is of course not possible in the IS_MEM case. */
1041 if (! general_operand (a, GET_MODE (a)))
1046 goto end_seq_and_fail;
1050 tmp = gen_reg_rtx (GET_MODE (a));
1051 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1054 goto end_seq_and_fail;
1057 a = gen_reg_rtx (GET_MODE (a));
1058 tmp = copy_rtx (insn_a);
1059 set = single_set (tmp);
1061 tmp = emit_insn (PATTERN (tmp));
1063 if (recog_memoized (tmp) < 0)
1064 goto end_seq_and_fail;
1066 if (! general_operand (b, GET_MODE (b)))
1071 goto end_seq_and_fail;
1075 tmp = gen_reg_rtx (GET_MODE (b));
1076 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1079 goto end_seq_and_fail;
1082 b = gen_reg_rtx (GET_MODE (b));
1083 tmp = copy_rtx (insn_b);
1084 set = single_set (tmp);
1086 tmp = emit_insn (PATTERN (tmp));
1088 if (recog_memoized (tmp) < 0)
1089 goto end_seq_and_fail;
1092 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1093 XEXP (if_info->cond, 1), a, b);
1096 goto end_seq_and_fail;
1098 /* If we're handling a memory for above, emit the load now. */
1101 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1103 /* Copy over flags as appropriate. */
1104 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1105 MEM_VOLATILE_P (tmp) = 1;
1106 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1107 MEM_IN_STRUCT_P (tmp) = 1;
1108 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1109 MEM_SCALAR_P (tmp) = 1;
1110 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1111 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1113 noce_emit_move_insn (if_info->x, tmp);
1115 else if (target != x)
1116 noce_emit_move_insn (x, target);
1120 emit_insns_before (tmp, if_info->cond_earliest);
1128 /* For most cases, the simplified condition we found is the best
1129 choice, but this is not the case for the min/max/abs transforms.
1130 For these we wish to know that it is A or B in the condition. */
1133 noce_get_alt_condition (if_info, target, earliest)
1134 struct noce_if_info *if_info;
1138 rtx cond, set, insn;
1141 /* If target is already mentioned in the known condition, return it. */
1142 if (reg_mentioned_p (target, if_info->cond))
1144 *earliest = if_info->cond_earliest;
1145 return if_info->cond;
1148 set = pc_set (if_info->jump);
1149 cond = XEXP (SET_SRC (set), 0);
1151 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1152 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1154 /* If we're looking for a constant, try to make the conditional
1155 have that constant in it. There are two reasons why it may
1156 not have the constant we want:
1158 1. GCC may have needed to put the constant in a register, because
1159 the target can't compare directly against that constant. For
1160 this case, we look for a SET immediately before the comparison
1161 that puts a constant in that register.
1163 2. GCC may have canonicalized the conditional, for example
1164 replacing "if x < 4" with "if x <= 3". We can undo that (or
1165 make equivalent types of changes) to get the constants we need
1166 if they're off by one in the right direction. */
1168 if (GET_CODE (target) == CONST_INT)
1170 enum rtx_code code = GET_CODE (if_info->cond);
1171 rtx op_a = XEXP (if_info->cond, 0);
1172 rtx op_b = XEXP (if_info->cond, 1);
1175 /* First, look to see if we put a constant in a register. */
1176 prev_insn = PREV_INSN (if_info->cond_earliest);
1178 && INSN_P (prev_insn)
1179 && GET_CODE (PATTERN (prev_insn)) == SET)
1181 rtx src = find_reg_equal_equiv_note (prev_insn);
1183 src = SET_SRC (PATTERN (prev_insn));
1184 if (GET_CODE (src) == CONST_INT)
1186 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1188 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1191 if (GET_CODE (op_a) == CONST_INT)
1196 code = swap_condition (code);
1201 /* Now, look to see if we can get the right constant by
1202 adjusting the conditional. */
1203 if (GET_CODE (op_b) == CONST_INT)
1205 HOST_WIDE_INT desired_val = INTVAL (target);
1206 HOST_WIDE_INT actual_val = INTVAL (op_b);
1211 if (actual_val == desired_val + 1)
1214 op_b = GEN_INT (desired_val);
1218 if (actual_val == desired_val - 1)
1221 op_b = GEN_INT (desired_val);
1225 if (actual_val == desired_val - 1)
1228 op_b = GEN_INT (desired_val);
1232 if (actual_val == desired_val + 1)
1235 op_b = GEN_INT (desired_val);
1243 /* If we made any changes, generate a new conditional that is
1244 equivalent to what we started with, but has the right
1246 if (code != GET_CODE (if_info->cond)
1247 || op_a != XEXP (if_info->cond, 0)
1248 || op_b != XEXP (if_info->cond, 1))
1250 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1251 *earliest = if_info->cond_earliest;
1256 cond = canonicalize_condition (if_info->jump, cond, reverse,
1258 if (! cond || ! reg_mentioned_p (target, cond))
1261 /* We almost certainly searched back to a different place.
1262 Need to re-verify correct lifetimes. */
1264 /* X may not be mentioned in the range (cond_earliest, jump]. */
1265 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1266 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1269 /* A and B may not be modified in the range [cond_earliest, jump). */
1270 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1272 && (modified_in_p (if_info->a, insn)
1273 || modified_in_p (if_info->b, insn)))
1279 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1282 noce_try_minmax (if_info)
1283 struct noce_if_info *if_info;
1285 rtx cond, earliest, target, seq;
1290 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1294 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1295 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1296 to get the target to tell us... */
1297 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1298 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1299 && ! flag_unsafe_math_optimizations)
1302 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1306 /* Verify the condition is of the form we expect, and canonicalize
1307 the comparison code. */
1308 code = GET_CODE (cond);
1309 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1311 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1314 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1316 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1318 code = swap_condition (code);
1323 /* Determine what sort of operation this is. Note that the code is for
1324 a taken branch, so the code->operation mapping appears backwards. */
1357 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1358 if_info->x, unsignedp, OPTAB_WIDEN);
1364 if (target != if_info->x)
1365 noce_emit_move_insn (if_info->x, target);
1370 if (seq_contains_jump (seq))
1373 emit_insns_before (seq, earliest);
1374 if_info->cond = cond;
1375 if_info->cond_earliest = earliest;
1380 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1383 noce_try_abs (if_info)
1384 struct noce_if_info *if_info;
1386 rtx cond, earliest, target, seq, a, b, c;
1389 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1393 /* Recognize A and B as constituting an ABS or NABS. */
1396 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1398 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1400 c = a; a = b; b = c;
1406 cond = noce_get_alt_condition (if_info, b, &earliest);
1410 /* Verify the condition is of the form we expect. */
1411 if (rtx_equal_p (XEXP (cond, 0), b))
1413 else if (rtx_equal_p (XEXP (cond, 1), b))
1418 /* Verify that C is zero. Search backward through the block for
1419 a REG_EQUAL note if necessary. */
1422 rtx insn, note = NULL;
1423 for (insn = earliest;
1424 insn != if_info->test_bb->head;
1425 insn = PREV_INSN (insn))
1427 && ((note = find_reg_note (insn, REG_EQUAL, c))
1428 || (note = find_reg_note (insn, REG_EQUIV, c))))
1434 if (GET_CODE (c) == MEM
1435 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1436 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1437 c = get_pool_constant (XEXP (c, 0));
1439 /* Work around funny ideas get_condition has wrt canonicalization.
1440 Note that these rtx constants are known to be CONST_INT, and
1441 therefore imply integer comparisons. */
1442 if (c == constm1_rtx && GET_CODE (cond) == GT)
1444 else if (c == const1_rtx && GET_CODE (cond) == LT)
1446 else if (c != CONST0_RTX (GET_MODE (b)))
1449 /* Determine what sort of operation this is. */
1450 switch (GET_CODE (cond))
1469 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1471 /* ??? It's a quandry whether cmove would be better here, especially
1472 for integers. Perhaps combine will clean things up. */
1473 if (target && negate)
1474 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1482 if (target != if_info->x)
1483 noce_emit_move_insn (if_info->x, target);
1488 if (seq_contains_jump (seq))
1491 emit_insns_before (seq, earliest);
1492 if_info->cond = cond;
1493 if_info->cond_earliest = earliest;
1498 /* Look for the condition for the jump first. We'd prefer to avoid
1499 get_condition if we can -- it tries to look back for the contents
1500 of an original compare. On targets that use normal integers for
1501 comparisons, e.g. alpha, this is wasteful. */
1504 noce_get_condition (jump, earliest)
1511 /* If the condition variable is a register and is MODE_INT, accept it.
1512 Otherwise, fall back on get_condition. */
1514 if (! any_condjump_p (jump))
1517 set = pc_set (jump);
1519 cond = XEXP (SET_SRC (set), 0);
1520 if (GET_CODE (XEXP (cond, 0)) == REG
1521 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1525 /* If this branches to JUMP_LABEL when the condition is false,
1526 reverse the condition. */
1527 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1528 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1529 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1530 GET_MODE (cond), XEXP (cond, 0),
1534 cond = get_condition (jump, earliest);
1539 /* Return true if OP is ok for if-then-else processing. */
1542 noce_operand_ok (op)
1545 /* We special-case memories, so handle any of them with
1546 no address side effects. */
1547 if (GET_CODE (op) == MEM)
1548 return ! side_effects_p (XEXP (op, 0));
1550 if (side_effects_p (op))
1553 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1554 being linked into the genfoo programs. This is probably a mistake.
1555 With finite operands, most fp operations don't trap. */
1556 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1557 switch (GET_CODE (op))
1563 /* ??? This is kinda lame -- almost every target will have forced
1564 the constant into a register first. But given the expense of
1565 division, this is probably for the best. */
1566 return (CONSTANT_P (XEXP (op, 1))
1567 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1568 && ! may_trap_p (XEXP (op, 0)));
1571 switch (GET_RTX_CLASS (GET_CODE (op)))
1574 return ! may_trap_p (XEXP (op, 0));
1577 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1582 return ! may_trap_p (op);
1585 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1586 without using conditional execution. Return TRUE if we were
1587 successful at converting the the block. */
1590 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1591 basic_block test_bb; /* Basic block test is in */
1592 basic_block then_bb; /* Basic block for THEN block */
1593 basic_block else_bb; /* Basic block for ELSE block */
1594 basic_block join_bb; /* Basic block the join label is in */
1596 /* We're looking for patterns of the form
1598 (1) if (...) x = a; else x = b;
1599 (2) x = b; if (...) x = a;
1600 (3) if (...) x = a; // as if with an initial x = x.
1602 The later patterns require jumps to be more expensive.
1604 ??? For future expansion, look for multiple X in such patterns. */
1606 struct noce_if_info if_info;
1609 rtx orig_x, x, a, b;
1610 rtx jump, cond, insn;
1612 /* If this is not a standard conditional jump, we can't parse it. */
1613 jump = test_bb->end;
1614 cond = noce_get_condition (jump, &if_info.cond_earliest);
1618 /* If the conditional jump is more than just a conditional jump,
1619 then we can not do if-conversion on this block. */
1620 if (! onlyjump_p (jump))
1623 /* We must be comparing objects whose modes imply the size. */
1624 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1627 /* Look for one of the potential sets. */
1628 insn_a = first_active_insn (then_bb);
1630 || ! last_active_insn_p (then_bb, insn_a)
1631 || (set_a = single_set (insn_a)) == NULL_RTX)
1634 x = SET_DEST (set_a);
1635 a = SET_SRC (set_a);
1637 /* Look for the other potential set. Make sure we've got equivalent
1639 /* ??? This is overconservative. Storing to two different mems is
1640 as easy as conditionally computing the address. Storing to a
1641 single mem merely requires a scratch memory to use as one of the
1642 destination addresses; often the memory immediately below the
1643 stack pointer is available for this. */
1647 insn_b = first_active_insn (else_bb);
1649 || ! last_active_insn_p (else_bb, insn_b)
1650 || (set_b = single_set (insn_b)) == NULL_RTX
1651 || ! rtx_equal_p (x, SET_DEST (set_b)))
1656 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1658 || GET_CODE (insn_b) != INSN
1659 || (set_b = single_set (insn_b)) == NULL_RTX
1660 || ! rtx_equal_p (x, SET_DEST (set_b))
1661 || reg_mentioned_p (x, cond)
1662 || reg_mentioned_p (x, a)
1663 || reg_mentioned_p (x, SET_SRC (set_b)))
1664 insn_b = set_b = NULL_RTX;
1666 b = (set_b ? SET_SRC (set_b) : x);
1668 /* X may not be mentioned in the range (cond_earliest, jump]. */
1669 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1670 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1673 /* A and B may not be modified in the range [cond_earliest, jump). */
1674 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1676 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1679 /* Only operate on register destinations, and even then avoid extending
1680 the lifetime of hard registers on small register class machines. */
1682 if (GET_CODE (x) != REG
1683 || (SMALL_REGISTER_CLASSES
1684 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1688 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1689 ? XEXP (x, 0) : x));
1692 /* Don't operate on sources that may trap or are volatile. */
1693 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1696 /* Set up the info block for our subroutines. */
1697 if_info.test_bb = test_bb;
1698 if_info.cond = cond;
1699 if_info.jump = jump;
1700 if_info.insn_a = insn_a;
1701 if_info.insn_b = insn_b;
1706 /* Try optimizations in some approximation of a useful order. */
1707 /* ??? Should first look to see if X is live incoming at all. If it
1708 isn't, we don't need anything but an unconditional set. */
1710 /* Look and see if A and B are really the same. Avoid creating silly
1711 cmove constructs that no one will fix up later. */
1712 if (rtx_equal_p (a, b))
1714 /* If we have an INSN_B, we don't have to create any new rtl. Just
1715 move the instruction that we already have. If we don't have an
1716 INSN_B, that means that A == X, and we've got a noop move. In
1717 that case don't do anything and let the code below delete INSN_A. */
1718 if (insn_b && else_bb)
1722 if (else_bb && insn_b == else_bb->end)
1723 else_bb->end = PREV_INSN (insn_b);
1724 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1726 /* If there was a REG_EQUAL note, delete it since it may have been
1727 true due to this insn being after a jump. */
1728 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1729 remove_note (insn_b, note);
1733 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1734 x must be executed twice. */
1735 else if (insn_b && side_effects_p (orig_x))
1742 if (noce_try_store_flag (&if_info))
1744 if (noce_try_minmax (&if_info))
1746 if (noce_try_abs (&if_info))
1748 if (HAVE_conditional_move
1749 && noce_try_cmove (&if_info))
1751 if (! HAVE_conditional_execution)
1753 if (noce_try_store_flag_constants (&if_info))
1755 if (noce_try_store_flag_inc (&if_info))
1757 if (noce_try_store_flag_mask (&if_info))
1759 if (HAVE_conditional_move
1760 && noce_try_cmove_arith (&if_info))
1767 /* The original sets may now be killed. */
1768 if (insn_a == then_bb->end)
1769 then_bb->end = PREV_INSN (insn_a);
1770 flow_delete_insn (insn_a);
1772 /* Several special cases here: First, we may have reused insn_b above,
1773 in which case insn_b is now NULL. Second, we want to delete insn_b
1774 if it came from the ELSE block, because follows the now correct
1775 write that appears in the TEST block. However, if we got insn_b from
1776 the TEST block, it may in fact be loading data needed for the comparison.
1777 We'll let life_analysis remove the insn if it's really dead. */
1778 if (insn_b && else_bb)
1780 if (insn_b == else_bb->end)
1781 else_bb->end = PREV_INSN (insn_b);
1782 flow_delete_insn (insn_b);
1785 /* The new insns will have been inserted before cond_earliest. We should
1786 be able to remove the jump with impunity, but the condition itself may
1787 have been modified by gcse to be shared across basic blocks. */
1788 test_bb->end = PREV_INSN (jump);
1789 flow_delete_insn (jump);
1791 /* If we used a temporary, fix it up now. */
1795 noce_emit_move_insn (orig_x, x);
1796 insn_b = gen_sequence ();
1799 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1802 /* Merge the blocks! */
1803 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1808 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1809 straight line code. Return true if successful. */
1812 process_if_block (test_bb, then_bb, else_bb, join_bb)
1813 basic_block test_bb; /* Basic block test is in */
1814 basic_block then_bb; /* Basic block for THEN block */
1815 basic_block else_bb; /* Basic block for ELSE block */
1816 basic_block join_bb; /* Basic block the join label is in */
1818 if (! reload_completed
1819 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1822 if (HAVE_conditional_execution
1824 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1830 /* Merge the blocks and mark for local life update. */
1833 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1834 basic_block test_bb; /* Basic block test is in */
1835 basic_block then_bb; /* Basic block for THEN block */
1836 basic_block else_bb; /* Basic block for ELSE block */
1837 basic_block join_bb; /* Basic block the join label is in */
1839 basic_block combo_bb;
1841 /* All block merging is done into the lower block numbers. */
1845 /* First merge TEST block into THEN block. This is a no-brainer since
1846 the THEN block did not have a code label to begin with. */
1849 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1850 merge_blocks_nomove (combo_bb, then_bb);
1851 num_removed_blocks++;
1853 /* The ELSE block, if it existed, had a label. That label count
1854 will almost always be zero, but odd things can happen when labels
1855 get their addresses taken. */
1858 merge_blocks_nomove (combo_bb, else_bb);
1859 num_removed_blocks++;
1862 /* If there was no join block reported, that means it was not adjacent
1863 to the others, and so we cannot merge them. */
1867 /* The outgoing edge for the current COMBO block should already
1868 be correct. Verify this. */
1869 if (combo_bb->succ == NULL_EDGE)
1872 /* There should still be a branch at the end of the THEN or ELSE
1873 blocks taking us to our final destination. */
1874 if (GET_CODE (combo_bb->end) != JUMP_INSN)
1878 /* The JOIN block may have had quite a number of other predecessors too.
1879 Since we've already merged the TEST, THEN and ELSE blocks, we should
1880 have only one remaining edge from our if-then-else diamond. If there
1881 is more than one remaining edge, it must come from elsewhere. There
1882 may be zero incoming edges if the THEN block didn't actually join
1883 back up (as with a call to abort). */
1884 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1886 /* We can merge the JOIN. */
1888 COPY_REG_SET (combo_bb->global_live_at_end,
1889 join_bb->global_live_at_end);
1890 merge_blocks_nomove (combo_bb, join_bb);
1891 num_removed_blocks++;
1895 /* We cannot merge the JOIN. */
1897 /* The outgoing edge for the current COMBO block should already
1898 be correct. Verify this. */
1899 if (combo_bb->succ->succ_next != NULL_EDGE
1900 || combo_bb->succ->dest != join_bb)
1903 /* Remove the jump and cruft from the end of the COMBO block. */
1904 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1907 /* Make sure we update life info properly. */
1908 SET_UPDATE_LIFE (combo_bb);
1910 num_updated_if_blocks++;
1913 /* Find a block ending in a simple IF condition. Return TRUE if
1914 we were able to transform it in some way. */
1917 find_if_header (test_bb)
1918 basic_block test_bb;
1923 /* The kind of block we're looking for has exactly two successors. */
1924 if ((then_edge = test_bb->succ) == NULL_EDGE
1925 || (else_edge = then_edge->succ_next) == NULL_EDGE
1926 || else_edge->succ_next != NULL_EDGE)
1929 /* Neither edge should be abnormal. */
1930 if ((then_edge->flags & EDGE_COMPLEX)
1931 || (else_edge->flags & EDGE_COMPLEX))
1934 /* The THEN edge is canonically the one that falls through. */
1935 if (then_edge->flags & EDGE_FALLTHRU)
1937 else if (else_edge->flags & EDGE_FALLTHRU)
1940 else_edge = then_edge;
1944 /* Otherwise this must be a multiway branch of some sort. */
1947 if (find_if_block (test_bb, then_edge, else_edge))
1949 if (HAVE_trap && HAVE_conditional_trap
1950 && find_cond_trap (test_bb, then_edge, else_edge))
1953 && (! HAVE_conditional_execution || reload_completed))
1955 if (find_if_case_1 (test_bb, then_edge, else_edge))
1957 if (find_if_case_2 (test_bb, then_edge, else_edge))
1965 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1969 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1970 block. If so, we'll try to convert the insns to not require the branch.
1971 Return TRUE if we were successful at converting the the block. */
1974 find_if_block (test_bb, then_edge, else_edge)
1975 basic_block test_bb;
1976 edge then_edge, else_edge;
1978 basic_block then_bb = then_edge->dest;
1979 basic_block else_bb = else_edge->dest;
1980 basic_block join_bb = NULL_BLOCK;
1981 edge then_succ = then_bb->succ;
1982 edge else_succ = else_bb->succ;
1985 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1986 if (then_bb->pred->pred_next != NULL_EDGE)
1989 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1990 if (then_succ != NULL_EDGE
1991 && (then_succ->succ_next != NULL_EDGE
1992 || (then_succ->flags & EDGE_COMPLEX)))
1995 /* If the THEN block has no successors, conditional execution can still
1996 make a conditional call. Don't do this unless the ELSE block has
1997 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1998 Check for the last insn of the THEN block being an indirect jump, which
1999 is listed as not having any successors, but confuses the rest of the CE
2000 code processing. XXX we should fix this in the future. */
2001 if (then_succ == NULL)
2003 if (else_bb->pred->pred_next == NULL_EDGE)
2005 rtx last_insn = then_bb->end;
2008 && GET_CODE (last_insn) == NOTE
2009 && last_insn != then_bb->head)
2010 last_insn = PREV_INSN (last_insn);
2013 && GET_CODE (last_insn) == JUMP_INSN
2014 && ! simplejump_p (last_insn))
2018 else_bb = NULL_BLOCK;
2024 /* If the THEN block's successor is the other edge out of the TEST block,
2025 then we have an IF-THEN combo without an ELSE. */
2026 else if (then_succ->dest == else_bb)
2029 else_bb = NULL_BLOCK;
2032 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2033 has exactly one predecessor and one successor, and the outgoing edge
2034 is not complex, then we have an IF-THEN-ELSE combo. */
2035 else if (else_succ != NULL_EDGE
2036 && then_succ->dest == else_succ->dest
2037 && else_bb->pred->pred_next == NULL_EDGE
2038 && else_succ->succ_next == NULL_EDGE
2039 && ! (else_succ->flags & EDGE_COMPLEX))
2040 join_bb = else_succ->dest;
2042 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2046 num_possible_if_blocks++;
2051 fprintf (rtl_dump_file,
2052 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2053 test_bb->index, then_bb->index, else_bb->index,
2056 fprintf (rtl_dump_file,
2057 "\nIF-THEN block found, start %d, then %d, join %d\n",
2058 test_bb->index, then_bb->index, join_bb->index);
2061 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2062 get the first condition for free, since we've already asserted that
2063 there's a fallthru edge from IF to THEN. */
2064 /* ??? As an enhancement, move the ELSE block. Have to deal with
2065 BLOCK notes, if by no other means than aborting the merge if they
2066 exist. Sticky enough I don't want to think about it now. */
2067 next_index = then_bb->index;
2068 if (else_bb && ++next_index != else_bb->index)
2070 if (++next_index != join_bb->index)
2078 /* Do the real work. */
2079 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2082 /* Convert a branch over a trap, or a branch to a trap,
2083 into a conditional trap. */
2086 find_cond_trap (test_bb, then_edge, else_edge)
2087 basic_block test_bb;
2088 edge then_edge, else_edge;
2090 basic_block then_bb, else_bb, join_bb, trap_bb;
2091 rtx trap, jump, cond, cond_earliest, seq;
2094 then_bb = then_edge->dest;
2095 else_bb = else_edge->dest;
2098 /* Locate the block with the trap instruction. */
2099 /* ??? While we look for no successors, we really ought to allow
2100 EH successors. Need to fix merge_if_block for that to work. */
2101 /* ??? We can't currently handle merging the blocks if they are not
2102 already adjacent. Prevent losage in merge_if_block by detecting
2104 if (then_bb->succ == NULL)
2107 if (else_bb->index != then_bb->index + 1)
2112 else if (else_bb->succ == NULL)
2115 if (else_bb->index != then_bb->index + 1)
2117 else if (then_bb->succ
2118 && ! then_bb->succ->succ_next
2119 && ! (then_bb->succ->flags & EDGE_COMPLEX)
2120 && then_bb->succ->dest->index == else_bb->index + 1)
2121 join_bb = then_bb->succ->dest;
2126 /* Don't confuse a conditional return with something we want to
2128 if (trap_bb == EXIT_BLOCK_PTR)
2131 /* The only instruction in the THEN block must be the trap. */
2132 trap = first_active_insn (trap_bb);
2133 if (! (trap == trap_bb->end
2134 && GET_CODE (PATTERN (trap)) == TRAP_IF
2135 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2140 if (trap_bb == then_bb)
2141 fprintf (rtl_dump_file,
2142 "\nTRAP-IF block found, start %d, trap %d",
2143 test_bb->index, then_bb->index);
2145 fprintf (rtl_dump_file,
2146 "\nTRAP-IF block found, start %d, then %d, trap %d",
2147 test_bb->index, then_bb->index, trap_bb->index);
2149 fprintf (rtl_dump_file, ", join %d\n", join_bb->index);
2151 fputc ('\n', rtl_dump_file);
2154 /* If this is not a standard conditional jump, we can't parse it. */
2155 jump = test_bb->end;
2156 cond = noce_get_condition (jump, &cond_earliest);
2160 /* If the conditional jump is more than just a conditional jump,
2161 then we can not do if-conversion on this block. */
2162 if (! onlyjump_p (jump))
2165 /* We must be comparing objects whose modes imply the size. */
2166 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2169 /* Reverse the comparison code, if necessary. */
2170 code = GET_CODE (cond);
2171 if (then_bb == trap_bb)
2173 code = reversed_comparison_code (cond, jump);
2174 if (code == UNKNOWN)
2178 /* Attempt to generate the conditional trap. */
2179 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2180 TRAP_CODE (PATTERN (trap)));
2184 /* Emit the new insns before cond_earliest; delete the old jump
2187 emit_insn_before (seq, cond_earliest);
2189 test_bb->end = PREV_INSN (jump);
2190 flow_delete_insn (jump);
2192 trap_bb->end = PREV_INSN (trap);
2193 flow_delete_insn (trap);
2195 /* Merge the blocks! */
2196 if (trap_bb != then_bb && ! else_bb)
2198 flow_delete_block (trap_bb);
2199 num_removed_blocks++;
2201 merge_if_block (test_bb, then_bb, else_bb, join_bb);
2206 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2207 transformable, but not necessarily the other. There need be no
2210 Return TRUE if we were successful at converting the the block.
2212 Cases we'd like to look at:
2215 if (test) goto over; // x not live
2223 if (! test) goto label;
2226 if (test) goto E; // x not live
2240 (3) // This one's really only interesting for targets that can do
2241 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2242 // it results in multiple branches on a cache line, which often
2243 // does not sit well with predictors.
2245 if (test1) goto E; // predicted not taken
2261 (A) Don't do (2) if the branch is predicted against the block we're
2262 eliminating. Do it anyway if we can eliminate a branch; this requires
2263 that the sole successor of the eliminated block postdominate the other
2266 (B) With CE, on (3) we can steal from both sides of the if, creating
2275 Again, this is most useful if J postdominates.
2277 (C) CE substitutes for helpful life information.
2279 (D) These heuristics need a lot of work. */
2281 /* Tests for case 1 above. */
2284 find_if_case_1 (test_bb, then_edge, else_edge)
2285 basic_block test_bb;
2286 edge then_edge, else_edge;
2288 basic_block then_bb = then_edge->dest;
2289 basic_block else_bb = else_edge->dest, new_bb;
2290 edge then_succ = then_bb->succ;
2292 /* THEN has one successor. */
2293 if (!then_succ || then_succ->succ_next != NULL)
2296 /* THEN does not fall through, but is not strange either. */
2297 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2300 /* THEN has one predecessor. */
2301 if (then_bb->pred->pred_next != NULL)
2304 /* THEN must do something. */
2305 if (forwarder_block_p (then_bb))
2308 num_possible_if_blocks++;
2310 fprintf (rtl_dump_file,
2311 "\nIF-CASE-1 found, start %d, then %d\n",
2312 test_bb->index, then_bb->index);
2314 /* THEN is small. */
2315 if (count_bb_insns (then_bb) > BRANCH_COST)
2318 /* Registers set are dead, or are predicable. */
2319 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2320 then_bb->succ->dest, 1))
2323 /* Conversion went ok, including moving the insns and fixing up the
2324 jump. Adjust the CFG to match. */
2326 SET_UPDATE_LIFE (test_bb);
2327 bitmap_operation (test_bb->global_live_at_end,
2328 else_bb->global_live_at_start,
2329 then_bb->global_live_at_end, BITMAP_IOR);
2331 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2332 /* Make rest of code believe that the newly created block is the THEN_BB
2333 block we are going to remove. */
2336 new_bb->aux = then_bb->aux;
2337 SET_UPDATE_LIFE (then_bb);
2339 flow_delete_block (then_bb);
2340 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2343 num_removed_blocks++;
2344 num_updated_if_blocks++;
2349 /* Test for case 2 above. */
2352 find_if_case_2 (test_bb, then_edge, else_edge)
2353 basic_block test_bb;
2354 edge then_edge, else_edge;
2356 basic_block then_bb = then_edge->dest;
2357 basic_block else_bb = else_edge->dest;
2358 edge else_succ = else_bb->succ;
2361 /* ELSE has one successor. */
2362 if (!else_succ || else_succ->succ_next != NULL)
2365 /* ELSE outgoing edge is not complex. */
2366 if (else_succ->flags & EDGE_COMPLEX)
2369 /* ELSE has one predecessor. */
2370 if (else_bb->pred->pred_next != NULL)
2373 /* THEN is not EXIT. */
2374 if (then_bb->index < 0)
2377 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2378 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2379 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2381 else if (else_succ->dest->index < 0
2382 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2383 ORIG_INDEX (else_succ->dest)))
2388 num_possible_if_blocks++;
2390 fprintf (rtl_dump_file,
2391 "\nIF-CASE-2 found, start %d, else %d\n",
2392 test_bb->index, else_bb->index);
2394 /* ELSE is small. */
2395 if (count_bb_insns (then_bb) > BRANCH_COST)
2398 /* Registers set are dead, or are predicable. */
2399 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2402 /* Conversion went ok, including moving the insns and fixing up the
2403 jump. Adjust the CFG to match. */
2405 SET_UPDATE_LIFE (test_bb);
2406 bitmap_operation (test_bb->global_live_at_end,
2407 then_bb->global_live_at_start,
2408 else_bb->global_live_at_end, BITMAP_IOR);
2410 flow_delete_block (else_bb);
2412 num_removed_blocks++;
2413 num_updated_if_blocks++;
2415 /* ??? We may now fallthru from one of THEN's successors into a join
2416 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2421 /* A subroutine of dead_or_predicable called through for_each_rtx.
2422 Return 1 if a memory is found. */
2425 find_memory (px, data)
2427 void *data ATTRIBUTE_UNUSED;
2429 return GET_CODE (*px) == MEM;
2432 /* Used by the code above to perform the actual rtl transformations.
2433 Return TRUE if successful.
2435 TEST_BB is the block containing the conditional branch. MERGE_BB
2436 is the block containing the code to manipulate. NEW_DEST is the
2437 label TEST_BB should be branching to after the conversion.
2438 REVERSEP is true if the sense of the branch should be reversed. */
2441 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2442 basic_block test_bb, merge_bb, other_bb;
2443 basic_block new_dest;
2446 rtx head, end, jump, earliest, old_dest, new_label;
2448 jump = test_bb->end;
2450 /* Find the extent of the real code in the merge block. */
2451 head = merge_bb->head;
2452 end = merge_bb->end;
2454 if (GET_CODE (head) == CODE_LABEL)
2455 head = NEXT_INSN (head);
2456 if (GET_CODE (head) == NOTE)
2460 head = end = NULL_RTX;
2463 head = NEXT_INSN (head);
2466 if (GET_CODE (end) == JUMP_INSN)
2470 head = end = NULL_RTX;
2473 end = PREV_INSN (end);
2476 /* Disable handling dead code by conditional execution if the machine needs
2477 to do anything funny with the tests, etc. */
2478 #ifndef IFCVT_MODIFY_TESTS
2479 if (HAVE_conditional_execution)
2481 /* In the conditional execution case, we have things easy. We know
2482 the condition is reversable. We don't have to check life info,
2483 becase we're going to conditionally execute the code anyway.
2484 All that's left is making sure the insns involved can actually
2489 cond = cond_exec_get_condition (jump);
2493 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2495 prob_val = XEXP (prob_val, 0);
2499 enum rtx_code rev = reversed_comparison_code (cond, jump);
2502 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2505 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2508 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2516 /* In the non-conditional execution case, we have to verify that there
2517 are no trapping operations, no calls, no references to memory, and
2518 that any registers modified are dead at the branch site. */
2520 rtx insn, cond, prev;
2521 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2522 regset merge_set, tmp, test_live, test_set;
2523 struct propagate_block_info *pbi;
2526 /* Check for no calls or trapping operations. */
2527 for (insn = head; ; insn = NEXT_INSN (insn))
2529 if (GET_CODE (insn) == CALL_INSN)
2533 if (may_trap_p (PATTERN (insn)))
2536 /* ??? Even non-trapping memories such as stack frame
2537 references must be avoided. For stores, we collect
2538 no lifetime info; for reads, we'd have to assert
2539 true_dependance false against every store in the
2541 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2548 if (! any_condjump_p (jump))
2551 /* Find the extent of the conditional. */
2552 cond = noce_get_condition (jump, &earliest);
2557 MERGE_SET = set of registers set in MERGE_BB
2558 TEST_LIVE = set of registers live at EARLIEST
2559 TEST_SET = set of registers set between EARLIEST and the
2560 end of the block. */
2562 tmp = INITIALIZE_REG_SET (tmp_head);
2563 merge_set = INITIALIZE_REG_SET (merge_set_head);
2564 test_live = INITIALIZE_REG_SET (test_live_head);
2565 test_set = INITIALIZE_REG_SET (test_set_head);
2567 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2568 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2569 since we've already asserted that MERGE_BB is small. */
2570 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2572 /* For small register class machines, don't lengthen lifetimes of
2573 hard registers before reload. */
2574 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2576 EXECUTE_IF_SET_IN_BITMAP
2579 if (i < FIRST_PSEUDO_REGISTER
2581 && ! global_regs[i])
2586 /* For TEST, we're interested in a range of insns, not a whole block.
2587 Moreover, we're interested in the insns live from OTHER_BB. */
2589 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2590 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2593 for (insn = jump; ; insn = prev)
2595 prev = propagate_one_insn (pbi, insn);
2596 if (insn == earliest)
2600 free_propagate_block_info (pbi);
2602 /* We can perform the transformation if
2603 MERGE_SET & (TEST_SET | TEST_LIVE)
2605 TEST_SET & merge_bb->global_live_at_start
2608 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2609 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2610 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2612 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2614 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2617 FREE_REG_SET (merge_set);
2618 FREE_REG_SET (test_live);
2619 FREE_REG_SET (test_set);
2626 /* We don't want to use normal invert_jump or redirect_jump because
2627 we don't want to delete_insn called. Also, we want to do our own
2628 change group management. */
2630 old_dest = JUMP_LABEL (jump);
2631 new_label = block_label (new_dest);
2633 ? ! invert_jump_1 (jump, new_label)
2634 : ! redirect_jump_1 (jump, new_label))
2637 if (! apply_change_group ())
2641 LABEL_NUSES (old_dest) -= 1;
2643 LABEL_NUSES (new_label) += 1;
2644 JUMP_LABEL (jump) = new_label;
2647 invert_br_probabilities (jump);
2649 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2652 gcov_type count, probability;
2653 count = BRANCH_EDGE (test_bb)->count;
2654 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2655 FALLTHRU_EDGE (test_bb)->count = count;
2656 probability = BRANCH_EDGE (test_bb)->probability;
2657 BRANCH_EDGE (test_bb)->probability = FALLTHRU_EDGE (test_bb)->probability;
2658 FALLTHRU_EDGE (test_bb)->probability = probability;
2661 /* Move the insns out of MERGE_BB to before the branch. */
2664 if (end == merge_bb->end)
2665 merge_bb->end = PREV_INSN (head);
2667 head = squeeze_notes (head, end);
2668 if (GET_CODE (end) == NOTE
2669 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2670 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2671 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2672 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2673 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2674 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2678 end = PREV_INSN (end);
2681 reorder_insns (head, end, PREV_INSN (earliest));
2690 /* Main entry point for all if-conversion. */
2693 if_convert (x_life_data_ok)
2698 num_possible_if_blocks = 0;
2699 num_updated_if_blocks = 0;
2700 num_removed_blocks = 0;
2701 life_data_ok = (x_life_data_ok != 0);
2703 /* Free up basic_block_for_insn so that we don't have to keep it
2704 up to date, either here or in merge_blocks_nomove. */
2705 free_basic_block_vars (1);
2707 /* Compute postdominators if we think we'll use them. */
2708 post_dominators = NULL;
2709 if (HAVE_conditional_execution || life_data_ok)
2711 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2712 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2715 /* Record initial block numbers. */
2716 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2717 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2719 /* Go through each of the basic blocks looking for things to convert. */
2720 for (block_num = 0; block_num < n_basic_blocks; )
2722 basic_block bb = BASIC_BLOCK (block_num);
2723 if (find_if_header (bb))
2724 block_num = bb->index;
2729 if (post_dominators)
2730 sbitmap_vector_free (post_dominators);
2733 fflush (rtl_dump_file);
2735 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2736 compute_bb_for_insn (get_max_uid ());
2738 /* Rebuild life info for basic blocks that require it. */
2739 if (num_removed_blocks && life_data_ok)
2741 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2742 sbitmap_zero (update_life_blocks);
2744 /* If we allocated new pseudos, we must resize the array for sched1. */
2745 if (max_regno < max_reg_num ())
2747 max_regno = max_reg_num ();
2748 allocate_reg_info (max_regno, FALSE, FALSE);
2751 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2752 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2753 SET_BIT (update_life_blocks, block_num);
2755 count_or_remove_death_notes (update_life_blocks, 1);
2756 /* ??? See about adding a mode that verifies that the initial
2757 set of blocks don't let registers come live. */
2758 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2759 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2760 | PROP_KILL_DEAD_CODE);
2762 sbitmap_free (update_life_blocks);
2765 /* Write the final stats. */
2766 if (rtl_dump_file && num_possible_if_blocks > 0)
2768 fprintf (rtl_dump_file,
2769 "\n%d possible IF blocks searched.\n",
2770 num_possible_if_blocks);
2771 fprintf (rtl_dump_file,
2772 "%d IF blocks converted.\n",
2773 num_updated_if_blocks);
2774 fprintf (rtl_dump_file,
2775 "%d basic blocks deleted.\n\n\n",
2776 num_removed_blocks);
2779 #ifdef ENABLE_CHECKING
2780 verify_flow_info ();