1 /* If-conversion support.
2 Copyright (C) 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
54 #ifndef HAVE_conditional_trap
55 #define HAVE_conditional_trap 0
58 #ifndef MAX_CONDITIONAL_EXECUTE
59 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
62 #define NULL_EDGE ((struct edge_def *)NULL)
63 #define NULL_BLOCK ((struct basic_block_def *)NULL)
65 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
66 static int num_possible_if_blocks;
68 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
70 static int num_updated_if_blocks;
72 /* # of basic blocks that were removed. */
73 static int num_removed_blocks;
75 /* True if life data ok at present. */
76 static bool life_data_ok;
78 /* The post-dominator relation on the original block numbers. */
79 static sbitmap *post_dominators;
81 /* Forward references. */
82 static int count_bb_insns PARAMS ((basic_block));
83 static rtx first_active_insn PARAMS ((basic_block));
84 static int last_active_insn_p PARAMS ((basic_block, rtx));
85 static int seq_contains_jump PARAMS ((rtx));
87 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
88 static rtx cond_exec_get_condition PARAMS ((rtx));
89 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
92 static rtx noce_get_condition PARAMS ((rtx, rtx *));
93 static int noce_operand_ok PARAMS ((rtx));
94 static int noce_process_if_block PARAMS ((basic_block, basic_block,
95 basic_block, basic_block));
97 static int process_if_block PARAMS ((basic_block, basic_block,
98 basic_block, basic_block));
99 static void merge_if_block PARAMS ((basic_block, basic_block,
100 basic_block, basic_block));
102 static int find_if_header PARAMS ((basic_block));
103 static int find_if_block PARAMS ((basic_block, edge, edge));
104 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
105 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
106 static int find_cond_trap PARAMS ((basic_block, edge, edge));
107 static int find_memory PARAMS ((rtx *, void *));
108 static int dead_or_predicable PARAMS ((basic_block, basic_block,
109 basic_block, basic_block, int));
110 static void noce_emit_move_insn PARAMS ((rtx, rtx));
112 /* Abuse the basic_block AUX field to store the original block index,
113 as well as a flag indicating that the block should be rescaned for
116 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
117 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
118 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
119 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
122 /* Count the number of non-jump active insns in BB. */
133 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
138 insn = NEXT_INSN (insn);
144 /* Return the first non-jump active insn in the basic block. */
147 first_active_insn (bb)
152 if (GET_CODE (insn) == CODE_LABEL)
156 insn = NEXT_INSN (insn);
159 while (GET_CODE (insn) == NOTE)
163 insn = NEXT_INSN (insn);
166 if (GET_CODE (insn) == JUMP_INSN)
172 /* Return true if INSN is the last active non-jump insn in BB. */
175 last_active_insn_p (bb, insn)
183 insn = NEXT_INSN (insn);
185 while (GET_CODE (insn) == NOTE);
187 return GET_CODE (insn) == JUMP_INSN;
190 /* It is possible, especially when having dealt with multi-word
191 arithmetic, for the expanders to have emitted jumps. Search
192 through the sequence and return TRUE if a jump exists so that
193 we can abort the conversion. */
196 seq_contains_jump (insn)
201 if (GET_CODE (insn) == JUMP_INSN)
203 insn = NEXT_INSN (insn);
208 /* Go through a bunch of insns, converting them to conditional
209 execution format if possible. Return TRUE if all of the non-note
210 insns were processed. */
213 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
214 rtx start; /* first insn to look at */
215 rtx end; /* last insn to look at */
216 rtx test; /* conditional execution test */
217 rtx prob_val; /* probability of branch taken. */
218 int mod_ok; /* true if modifications ok last insn. */
220 int must_be_last = FALSE;
224 for (insn = start; ; insn = NEXT_INSN (insn))
226 if (GET_CODE (insn) == NOTE)
229 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
232 /* Remove USE insns that get in the way. */
233 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
235 /* ??? Ug. Actually unlinking the thing is problematic,
236 given what we'd have to coordinate with our callers. */
237 PUT_CODE (insn, NOTE);
238 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
239 NOTE_SOURCE_FILE (insn) = 0;
243 /* Last insn wasn't last? */
247 if (modified_in_p (test, insn))
254 /* Now build the conditional form of the instruction. */
255 pattern = PATTERN (insn);
257 /* If the machine needs to modify the insn being conditionally executed,
258 say for example to force a constant integer operand into a temp
259 register, do so here. */
260 #ifdef IFCVT_MODIFY_INSN
261 IFCVT_MODIFY_INSN (pattern, insn);
266 validate_change (insn, &PATTERN (insn),
267 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
270 if (GET_CODE (insn) == CALL_INSN && prob_val)
271 validate_change (insn, ®_NOTES (insn),
272 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
273 REG_NOTES (insn)), 1);
283 /* Return the condition for a jump. Do not do any special processing. */
286 cond_exec_get_condition (jump)
291 if (any_condjump_p (jump))
292 test_if = SET_SRC (pc_set (jump));
295 cond = XEXP (test_if, 0);
297 /* If this branches to JUMP_LABEL when the condition is false,
298 reverse the condition. */
299 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
300 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
302 enum rtx_code rev = reversed_comparison_code (cond, jump);
306 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
313 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
314 to conditional execution. Return TRUE if we were successful at
315 converting the the block. */
318 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
319 basic_block test_bb; /* Basic block test is in */
320 basic_block then_bb; /* Basic block for THEN block */
321 basic_block else_bb; /* Basic block for ELSE block */
322 basic_block join_bb; /* Basic block the join label is in */
324 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
325 rtx then_start; /* first insn in THEN block */
326 rtx then_end; /* last insn + 1 in THEN block */
327 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
328 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
329 int max; /* max # of insns to convert. */
330 int then_mod_ok; /* whether conditional mods are ok in THEN */
331 rtx true_expr; /* test for else block insns */
332 rtx false_expr; /* test for then block insns */
333 rtx true_prob_val; /* probability of else block */
334 rtx false_prob_val; /* probability of then block */
336 enum rtx_code false_code;
338 /* Find the conditional jump to the ELSE or JOIN part, and isolate
340 test_expr = cond_exec_get_condition (test_bb->end);
344 /* If the conditional jump is more than just a conditional jump,
345 then we can not do conditional execution conversion on this block. */
346 if (!onlyjump_p (test_bb->end))
349 /* Collect the bounds of where we're to search. */
351 then_start = then_bb->head;
352 then_end = then_bb->end;
354 /* Skip a label heading THEN block. */
355 if (GET_CODE (then_start) == CODE_LABEL)
356 then_start = NEXT_INSN (then_start);
358 /* Skip a (use (const_int 0)) or branch as the final insn. */
359 if (GET_CODE (then_end) == INSN
360 && GET_CODE (PATTERN (then_end)) == USE
361 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
362 then_end = PREV_INSN (then_end);
363 else if (GET_CODE (then_end) == JUMP_INSN)
364 then_end = PREV_INSN (then_end);
368 /* Skip the ELSE block's label. */
369 else_start = NEXT_INSN (else_bb->head);
370 else_end = else_bb->end;
372 /* Skip a (use (const_int 0)) or branch as the final insn. */
373 if (GET_CODE (else_end) == INSN
374 && GET_CODE (PATTERN (else_end)) == USE
375 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
376 else_end = PREV_INSN (else_end);
377 else if (GET_CODE (else_end) == JUMP_INSN)
378 else_end = PREV_INSN (else_end);
381 /* How many instructions should we convert in total? */
385 max = 2 * MAX_CONDITIONAL_EXECUTE;
386 n_insns = count_bb_insns (else_bb);
389 max = MAX_CONDITIONAL_EXECUTE;
390 n_insns += count_bb_insns (then_bb);
394 /* Map test_expr/test_jump into the appropriate MD tests to use on
395 the conditionally executed code. */
397 true_expr = test_expr;
399 false_code = reversed_comparison_code (true_expr, test_bb->end);
400 if (false_code != UNKNOWN)
401 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
402 XEXP (true_expr, 0), XEXP (true_expr, 1));
404 false_expr = NULL_RTX;
406 #ifdef IFCVT_MODIFY_TESTS
407 /* If the machine description needs to modify the tests, such as setting a
408 conditional execution register from a comparison, it can do so here. */
409 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
412 /* See if the conversion failed */
413 if (!true_expr || !false_expr)
417 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
420 true_prob_val = XEXP (true_prob_val, 0);
421 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
424 false_prob_val = NULL_RTX;
426 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
427 on then THEN block. */
428 then_mod_ok = (else_bb == NULL_BLOCK);
430 /* Go through the THEN and ELSE blocks converting the insns if possible
431 to conditional execution. */
435 || ! cond_exec_process_insns (then_start, then_end, false_expr,
436 false_prob_val, then_mod_ok)))
440 && ! cond_exec_process_insns (else_start, else_end,
441 true_expr, true_prob_val, TRUE))
444 if (! apply_change_group ())
447 #ifdef IFCVT_MODIFY_FINAL
448 /* Do any machine dependent final modifications */
449 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
452 /* Conversion succeeded. */
454 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
455 n_insns, (n_insns == 1) ? " was" : "s were");
457 /* Merge the blocks! */
458 merge_if_block (test_bb, then_bb, else_bb, join_bb);
462 #ifdef IFCVT_MODIFY_CANCEL
463 /* Cancel any machine dependent changes. */
464 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
471 /* Used by noce_process_if_block to communicate with its subroutines.
473 The subroutines know that A and B may be evaluated freely. They
474 know that X is a register. They should insert new instructions
475 before cond_earliest. */
482 rtx jump, cond, cond_earliest;
485 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
487 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
488 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
489 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
490 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
491 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
492 rtx, enum rtx_code, rtx,
494 static int noce_try_cmove PARAMS ((struct noce_if_info *));
495 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
496 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
498 static int noce_try_minmax PARAMS ((struct noce_if_info *));
499 static int noce_try_abs PARAMS ((struct noce_if_info *));
501 /* Helper function for noce_try_store_flag*. */
504 noce_emit_store_flag (if_info, x, reversep, normalize)
505 struct noce_if_info *if_info;
507 int reversep, normalize;
509 rtx cond = if_info->cond;
513 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
514 || ! general_operand (XEXP (cond, 1), VOIDmode));
516 /* If earliest == jump, or when the condition is complex, try to
517 build the store_flag insn directly. */
520 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
523 code = reversed_comparison_code (cond, if_info->jump);
525 code = GET_CODE (cond);
527 if ((if_info->cond_earliest == if_info->jump || cond_complex)
528 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
532 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
534 tmp = gen_rtx_SET (VOIDmode, x, tmp);
537 tmp = emit_insn (tmp);
539 if (recog_memoized (tmp) >= 0)
545 if_info->cond_earliest = if_info->jump;
553 /* Don't even try if the comparison operands are weird. */
557 return emit_store_flag (x, code, XEXP (cond, 0),
558 XEXP (cond, 1), VOIDmode,
559 (code == LTU || code == LEU
560 || code == GEU || code == GTU), normalize);
563 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
565 noce_emit_move_insn (x, y)
568 enum machine_mode outmode, inmode;
572 if (GET_CODE (x) != STRICT_LOW_PART)
574 emit_move_insn (x, y);
579 inner = XEXP (outer, 0);
580 outmode = GET_MODE (outer);
581 inmode = GET_MODE (inner);
582 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
583 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
584 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
585 GET_MODE_BITSIZE (inmode));
588 /* Convert "if (test) x = 1; else x = 0".
590 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
591 tried in noce_try_store_flag_constants after noce_try_cmove has had
592 a go at the conversion. */
595 noce_try_store_flag (if_info)
596 struct noce_if_info *if_info;
601 if (GET_CODE (if_info->b) == CONST_INT
602 && INTVAL (if_info->b) == STORE_FLAG_VALUE
603 && if_info->a == const0_rtx)
605 else if (if_info->b == const0_rtx
606 && GET_CODE (if_info->a) == CONST_INT
607 && INTVAL (if_info->a) == STORE_FLAG_VALUE
608 && (reversed_comparison_code (if_info->cond, if_info->jump)
616 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
619 if (target != if_info->x)
620 noce_emit_move_insn (if_info->x, target);
624 emit_insns_before (seq, if_info->cond_earliest);
635 /* Convert "if (test) x = a; else x = b", for A and B constant. */
638 noce_try_store_flag_constants (if_info)
639 struct noce_if_info *if_info;
643 HOST_WIDE_INT itrue, ifalse, diff, tmp;
644 int normalize, can_reverse;
645 enum machine_mode mode;
648 && GET_CODE (if_info->a) == CONST_INT
649 && GET_CODE (if_info->b) == CONST_INT)
651 mode = GET_MODE (if_info->x);
652 ifalse = INTVAL (if_info->a);
653 itrue = INTVAL (if_info->b);
654 diff = trunc_int_for_mode (itrue - ifalse, mode);
656 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
660 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
662 else if (ifalse == 0 && exact_log2 (itrue) >= 0
663 && (STORE_FLAG_VALUE == 1
664 || BRANCH_COST >= 2))
666 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
667 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
668 normalize = 1, reversep = 1;
670 && (STORE_FLAG_VALUE == -1
671 || BRANCH_COST >= 2))
673 else if (ifalse == -1 && can_reverse
674 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
675 normalize = -1, reversep = 1;
676 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
684 tmp = itrue; itrue = ifalse; ifalse = tmp;
685 diff = trunc_int_for_mode (-diff, mode);
689 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
696 /* if (test) x = 3; else x = 4;
697 => x = 3 + (test == 0); */
698 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
700 target = expand_binop (mode,
701 (diff == STORE_FLAG_VALUE
702 ? add_optab : sub_optab),
703 GEN_INT (ifalse), target, if_info->x, 0,
707 /* if (test) x = 8; else x = 0;
708 => x = (test != 0) << 3; */
709 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
711 target = expand_binop (mode, ashl_optab,
712 target, GEN_INT (tmp), if_info->x, 0,
716 /* if (test) x = -1; else x = b;
717 => x = -(test != 0) | b; */
718 else if (itrue == -1)
720 target = expand_binop (mode, ior_optab,
721 target, GEN_INT (ifalse), if_info->x, 0,
725 /* if (test) x = a; else x = b;
726 => x = (-(test != 0) & (b - a)) + a; */
729 target = expand_binop (mode, and_optab,
730 target, GEN_INT (diff), if_info->x, 0,
733 target = expand_binop (mode, add_optab,
734 target, GEN_INT (ifalse), if_info->x, 0,
744 if (target != if_info->x)
745 noce_emit_move_insn (if_info->x, target);
750 if (seq_contains_jump (seq))
753 emit_insns_before (seq, if_info->cond_earliest);
761 /* Convert "if (test) foo++" into "foo += (test != 0)", and
762 similarly for "foo--". */
765 noce_try_store_flag_inc (if_info)
766 struct noce_if_info *if_info;
769 int subtract, normalize;
775 /* Should be no `else' case to worry about. */
776 && if_info->b == if_info->x
777 && GET_CODE (if_info->a) == PLUS
778 && (XEXP (if_info->a, 1) == const1_rtx
779 || XEXP (if_info->a, 1) == constm1_rtx)
780 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
781 && (reversed_comparison_code (if_info->cond, if_info->jump)
784 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
785 subtract = 0, normalize = 0;
786 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
787 subtract = 1, normalize = 0;
789 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
793 target = noce_emit_store_flag (if_info,
794 gen_reg_rtx (GET_MODE (if_info->x)),
798 target = expand_binop (GET_MODE (if_info->x),
799 subtract ? sub_optab : add_optab,
800 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
803 if (target != if_info->x)
804 noce_emit_move_insn (if_info->x, target);
809 if (seq_contains_jump (seq))
812 emit_insns_before (seq, if_info->cond_earliest);
823 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
826 noce_try_store_flag_mask (if_info)
827 struct noce_if_info *if_info;
835 || STORE_FLAG_VALUE == -1)
836 && ((if_info->a == const0_rtx
837 && rtx_equal_p (if_info->b, if_info->x))
838 || ((reversep = (reversed_comparison_code (if_info->cond,
841 && if_info->b == const0_rtx
842 && rtx_equal_p (if_info->a, if_info->x))))
845 target = noce_emit_store_flag (if_info,
846 gen_reg_rtx (GET_MODE (if_info->x)),
849 target = expand_binop (GET_MODE (if_info->x), and_optab,
850 if_info->x, target, if_info->x, 0,
855 if (target != if_info->x)
856 noce_emit_move_insn (if_info->x, target);
861 if (seq_contains_jump (seq))
864 emit_insns_before (seq, if_info->cond_earliest);
875 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
878 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
879 struct noce_if_info *if_info;
880 rtx x, cmp_a, cmp_b, vfalse, vtrue;
883 /* If earliest == jump, try to build the cmove insn directly.
884 This is helpful when combine has created some complex condition
885 (like for alpha's cmovlbs) that we can't hope to regenerate
886 through the normal interface. */
888 if (if_info->cond_earliest == if_info->jump)
892 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
893 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
894 tmp = gen_rtx_SET (VOIDmode, x, tmp);
897 tmp = emit_insn (tmp);
899 if (recog_memoized (tmp) >= 0)
911 /* Don't even try if the comparison operands are weird. */
912 if (! general_operand (cmp_a, GET_MODE (cmp_a))
913 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
916 #if HAVE_conditional_move
917 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
918 vtrue, vfalse, GET_MODE (x),
919 (code == LTU || code == GEU
920 || code == LEU || code == GTU));
922 /* We'll never get here, as noce_process_if_block doesn't call the
923 functions involved. Ifdef code, however, should be discouraged
924 because it leads to typos in the code not selected. However,
925 emit_conditional_move won't exist either. */
930 /* Try only simple constants and registers here. More complex cases
931 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
932 has had a go at it. */
935 noce_try_cmove (if_info)
936 struct noce_if_info *if_info;
941 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
942 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
946 code = GET_CODE (if_info->cond);
947 target = noce_emit_cmove (if_info, if_info->x, code,
948 XEXP (if_info->cond, 0),
949 XEXP (if_info->cond, 1),
950 if_info->a, if_info->b);
954 if (target != if_info->x)
955 noce_emit_move_insn (if_info->x, target);
959 emit_insns_before (seq, if_info->cond_earliest);
972 /* Try more complex cases involving conditional_move. */
975 noce_try_cmove_arith (if_info)
976 struct noce_if_info *if_info;
986 /* A conditional move from two memory sources is equivalent to a
987 conditional on their addresses followed by a load. Don't do this
988 early because it'll screw alias analysis. Note that we've
989 already checked for no side effects. */
990 if (! no_new_pseudos && cse_not_expected
991 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
996 x = gen_reg_rtx (Pmode);
1000 /* ??? We could handle this if we knew that a load from A or B could
1001 not fault. This is also true if we've already loaded
1002 from the address along the path from ENTRY. */
1003 else if (may_trap_p (a) || may_trap_p (b))
1006 /* if (test) x = a + b; else x = c - d;
1013 code = GET_CODE (if_info->cond);
1014 insn_a = if_info->insn_a;
1015 insn_b = if_info->insn_b;
1017 /* Possibly rearrange operands to make things come out more natural. */
1018 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1021 if (rtx_equal_p (b, x))
1023 else if (general_operand (b, GET_MODE (b)))
1028 code = reversed_comparison_code (if_info->cond, if_info->jump);
1029 tmp = a, a = b, b = tmp;
1030 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1036 /* If either operand is complex, load it into a register first.
1037 The best way to do this is to copy the original insn. In this
1038 way we preserve any clobbers etc that the insn may have had.
1039 This is of course not possible in the IS_MEM case. */
1040 if (! general_operand (a, GET_MODE (a)))
1045 goto end_seq_and_fail;
1049 tmp = gen_reg_rtx (GET_MODE (a));
1050 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1053 goto end_seq_and_fail;
1056 a = gen_reg_rtx (GET_MODE (a));
1057 tmp = copy_rtx (insn_a);
1058 set = single_set (tmp);
1060 tmp = emit_insn (PATTERN (tmp));
1062 if (recog_memoized (tmp) < 0)
1063 goto end_seq_and_fail;
1065 if (! general_operand (b, GET_MODE (b)))
1070 goto end_seq_and_fail;
1074 tmp = gen_reg_rtx (GET_MODE (b));
1075 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1078 goto end_seq_and_fail;
1081 b = gen_reg_rtx (GET_MODE (b));
1082 tmp = copy_rtx (insn_b);
1083 set = single_set (tmp);
1085 tmp = emit_insn (PATTERN (tmp));
1087 if (recog_memoized (tmp) < 0)
1088 goto end_seq_and_fail;
1091 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1092 XEXP (if_info->cond, 1), a, b);
1095 goto end_seq_and_fail;
1097 /* If we're handling a memory for above, emit the load now. */
1100 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1102 /* Copy over flags as appropriate. */
1103 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1104 MEM_VOLATILE_P (tmp) = 1;
1105 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1106 MEM_IN_STRUCT_P (tmp) = 1;
1107 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1108 MEM_SCALAR_P (tmp) = 1;
1109 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1110 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1112 noce_emit_move_insn (if_info->x, tmp);
1114 else if (target != x)
1115 noce_emit_move_insn (x, target);
1119 emit_insns_before (tmp, if_info->cond_earliest);
1127 /* For most cases, the simplified condition we found is the best
1128 choice, but this is not the case for the min/max/abs transforms.
1129 For these we wish to know that it is A or B in the condition. */
1132 noce_get_alt_condition (if_info, target, earliest)
1133 struct noce_if_info *if_info;
1137 rtx cond, set, insn;
1140 /* If target is already mentioned in the known condition, return it. */
1141 if (reg_mentioned_p (target, if_info->cond))
1143 *earliest = if_info->cond_earliest;
1144 return if_info->cond;
1147 set = pc_set (if_info->jump);
1148 cond = XEXP (SET_SRC (set), 0);
1150 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1151 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1153 /* If we're looking for a constant, try to make the conditional
1154 have that constant in it. There are two reasons why it may
1155 not have the constant we want:
1157 1. GCC may have needed to put the constant in a register, because
1158 the target can't compare directly against that constant. For
1159 this case, we look for a SET immediately before the comparison
1160 that puts a constant in that register.
1162 2. GCC may have canonicalized the conditional, for example
1163 replacing "if x < 4" with "if x <= 3". We can undo that (or
1164 make equivalent types of changes) to get the constants we need
1165 if they're off by one in the right direction. */
1167 if (GET_CODE (target) == CONST_INT)
1169 enum rtx_code code = GET_CODE (if_info->cond);
1170 rtx op_a = XEXP (if_info->cond, 0);
1171 rtx op_b = XEXP (if_info->cond, 1);
1174 /* First, look to see if we put a constant in a register. */
1175 prev_insn = PREV_INSN (if_info->cond_earliest);
1177 && INSN_P (prev_insn)
1178 && GET_CODE (PATTERN (prev_insn)) == SET)
1180 rtx src = find_reg_equal_equiv_note (prev_insn);
1182 src = SET_SRC (PATTERN (prev_insn));
1183 if (GET_CODE (src) == CONST_INT)
1185 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1187 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1190 if (GET_CODE (op_a) == CONST_INT)
1195 code = swap_condition (code);
1200 /* Now, look to see if we can get the right constant by
1201 adjusting the conditional. */
1202 if (GET_CODE (op_b) == CONST_INT)
1204 HOST_WIDE_INT desired_val = INTVAL (target);
1205 HOST_WIDE_INT actual_val = INTVAL (op_b);
1210 if (actual_val == desired_val + 1)
1213 op_b = GEN_INT (desired_val);
1217 if (actual_val == desired_val - 1)
1220 op_b = GEN_INT (desired_val);
1224 if (actual_val == desired_val - 1)
1227 op_b = GEN_INT (desired_val);
1231 if (actual_val == desired_val + 1)
1234 op_b = GEN_INT (desired_val);
1242 /* If we made any changes, generate a new conditional that is
1243 equivalent to what we started with, but has the right
1245 if (code != GET_CODE (if_info->cond)
1246 || op_a != XEXP (if_info->cond, 0)
1247 || op_b != XEXP (if_info->cond, 1))
1249 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1250 *earliest = if_info->cond_earliest;
1255 cond = canonicalize_condition (if_info->jump, cond, reverse,
1257 if (! cond || ! reg_mentioned_p (target, cond))
1260 /* We almost certainly searched back to a different place.
1261 Need to re-verify correct lifetimes. */
1263 /* X may not be mentioned in the range (cond_earliest, jump]. */
1264 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1265 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1268 /* A and B may not be modified in the range [cond_earliest, jump). */
1269 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1271 && (modified_in_p (if_info->a, insn)
1272 || modified_in_p (if_info->b, insn)))
1278 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1281 noce_try_minmax (if_info)
1282 struct noce_if_info *if_info;
1284 rtx cond, earliest, target, seq;
1289 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1293 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1294 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1295 to get the target to tell us... */
1296 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1297 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1298 && ! flag_unsafe_math_optimizations)
1301 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1305 /* Verify the condition is of the form we expect, and canonicalize
1306 the comparison code. */
1307 code = GET_CODE (cond);
1308 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1310 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1313 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1315 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1317 code = swap_condition (code);
1322 /* Determine what sort of operation this is. Note that the code is for
1323 a taken branch, so the code->operation mapping appears backwards. */
1356 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1357 if_info->x, unsignedp, OPTAB_WIDEN);
1363 if (target != if_info->x)
1364 noce_emit_move_insn (if_info->x, target);
1369 if (seq_contains_jump (seq))
1372 emit_insns_before (seq, earliest);
1373 if_info->cond = cond;
1374 if_info->cond_earliest = earliest;
1379 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1382 noce_try_abs (if_info)
1383 struct noce_if_info *if_info;
1385 rtx cond, earliest, target, seq, a, b, c;
1388 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1392 /* Recognize A and B as constituting an ABS or NABS. */
1395 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1397 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1399 c = a; a = b; b = c;
1405 cond = noce_get_alt_condition (if_info, b, &earliest);
1409 /* Verify the condition is of the form we expect. */
1410 if (rtx_equal_p (XEXP (cond, 0), b))
1412 else if (rtx_equal_p (XEXP (cond, 1), b))
1417 /* Verify that C is zero. Search backward through the block for
1418 a REG_EQUAL note if necessary. */
1421 rtx insn, note = NULL;
1422 for (insn = earliest;
1423 insn != if_info->test_bb->head;
1424 insn = PREV_INSN (insn))
1426 && ((note = find_reg_note (insn, REG_EQUAL, c))
1427 || (note = find_reg_note (insn, REG_EQUIV, c))))
1433 if (GET_CODE (c) == MEM
1434 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1435 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1436 c = get_pool_constant (XEXP (c, 0));
1438 /* Work around funny ideas get_condition has wrt canonicalization.
1439 Note that these rtx constants are known to be CONST_INT, and
1440 therefore imply integer comparisons. */
1441 if (c == constm1_rtx && GET_CODE (cond) == GT)
1443 else if (c == const1_rtx && GET_CODE (cond) == LT)
1445 else if (c != CONST0_RTX (GET_MODE (b)))
1448 /* Determine what sort of operation this is. */
1449 switch (GET_CODE (cond))
1468 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1470 /* ??? It's a quandry whether cmove would be better here, especially
1471 for integers. Perhaps combine will clean things up. */
1472 if (target && negate)
1473 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1481 if (target != if_info->x)
1482 noce_emit_move_insn (if_info->x, target);
1487 if (seq_contains_jump (seq))
1490 emit_insns_before (seq, earliest);
1491 if_info->cond = cond;
1492 if_info->cond_earliest = earliest;
1497 /* Look for the condition for the jump first. We'd prefer to avoid
1498 get_condition if we can -- it tries to look back for the contents
1499 of an original compare. On targets that use normal integers for
1500 comparisons, e.g. alpha, this is wasteful. */
1503 noce_get_condition (jump, earliest)
1510 /* If the condition variable is a register and is MODE_INT, accept it.
1511 Otherwise, fall back on get_condition. */
1513 if (! any_condjump_p (jump))
1516 set = pc_set (jump);
1518 cond = XEXP (SET_SRC (set), 0);
1519 if (GET_CODE (XEXP (cond, 0)) == REG
1520 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1524 /* If this branches to JUMP_LABEL when the condition is false,
1525 reverse the condition. */
1526 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1527 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1528 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1529 GET_MODE (cond), XEXP (cond, 0),
1533 cond = get_condition (jump, earliest);
1538 /* Return true if OP is ok for if-then-else processing. */
1541 noce_operand_ok (op)
1544 /* We special-case memories, so handle any of them with
1545 no address side effects. */
1546 if (GET_CODE (op) == MEM)
1547 return ! side_effects_p (XEXP (op, 0));
1549 if (side_effects_p (op))
1552 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1553 being linked into the genfoo programs. This is probably a mistake.
1554 With finite operands, most fp operations don't trap. */
1555 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1556 switch (GET_CODE (op))
1562 /* ??? This is kinda lame -- almost every target will have forced
1563 the constant into a register first. But given the expense of
1564 division, this is probably for the best. */
1565 return (CONSTANT_P (XEXP (op, 1))
1566 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1567 && ! may_trap_p (XEXP (op, 0)));
1570 switch (GET_RTX_CLASS (GET_CODE (op)))
1573 return ! may_trap_p (XEXP (op, 0));
1576 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1581 return ! may_trap_p (op);
1584 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1585 without using conditional execution. Return TRUE if we were
1586 successful at converting the the block. */
1589 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1590 basic_block test_bb; /* Basic block test is in */
1591 basic_block then_bb; /* Basic block for THEN block */
1592 basic_block else_bb; /* Basic block for ELSE block */
1593 basic_block join_bb; /* Basic block the join label is in */
1595 /* We're looking for patterns of the form
1597 (1) if (...) x = a; else x = b;
1598 (2) x = b; if (...) x = a;
1599 (3) if (...) x = a; // as if with an initial x = x.
1601 The later patterns require jumps to be more expensive.
1603 ??? For future expansion, look for multiple X in such patterns. */
1605 struct noce_if_info if_info;
1608 rtx orig_x, x, a, b;
1609 rtx jump, cond, insn;
1611 /* If this is not a standard conditional jump, we can't parse it. */
1612 jump = test_bb->end;
1613 cond = noce_get_condition (jump, &if_info.cond_earliest);
1617 /* If the conditional jump is more than just a conditional jump,
1618 then we can not do if-conversion on this block. */
1619 if (! onlyjump_p (jump))
1622 /* We must be comparing objects whose modes imply the size. */
1623 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1626 /* Look for one of the potential sets. */
1627 insn_a = first_active_insn (then_bb);
1629 || ! last_active_insn_p (then_bb, insn_a)
1630 || (set_a = single_set (insn_a)) == NULL_RTX)
1633 x = SET_DEST (set_a);
1634 a = SET_SRC (set_a);
1636 /* Look for the other potential set. Make sure we've got equivalent
1638 /* ??? This is overconservative. Storing to two different mems is
1639 as easy as conditionally computing the address. Storing to a
1640 single mem merely requires a scratch memory to use as one of the
1641 destination addresses; often the memory immediately below the
1642 stack pointer is available for this. */
1646 insn_b = first_active_insn (else_bb);
1648 || ! last_active_insn_p (else_bb, insn_b)
1649 || (set_b = single_set (insn_b)) == NULL_RTX
1650 || ! rtx_equal_p (x, SET_DEST (set_b)))
1655 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1657 || GET_CODE (insn_b) != INSN
1658 || (set_b = single_set (insn_b)) == NULL_RTX
1659 || ! rtx_equal_p (x, SET_DEST (set_b))
1660 || reg_mentioned_p (x, cond)
1661 || reg_mentioned_p (x, a)
1662 || reg_mentioned_p (x, SET_SRC (set_b)))
1663 insn_b = set_b = NULL_RTX;
1665 b = (set_b ? SET_SRC (set_b) : x);
1667 /* X may not be mentioned in the range (cond_earliest, jump]. */
1668 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1669 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1672 /* A and B may not be modified in the range [cond_earliest, jump). */
1673 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1675 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1678 /* Only operate on register destinations, and even then avoid extending
1679 the lifetime of hard registers on small register class machines. */
1681 if (GET_CODE (x) != REG
1682 || (SMALL_REGISTER_CLASSES
1683 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1687 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1688 ? XEXP (x, 0) : x));
1691 /* Don't operate on sources that may trap or are volatile. */
1692 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1695 /* Set up the info block for our subroutines. */
1696 if_info.test_bb = test_bb;
1697 if_info.cond = cond;
1698 if_info.jump = jump;
1699 if_info.insn_a = insn_a;
1700 if_info.insn_b = insn_b;
1705 /* Try optimizations in some approximation of a useful order. */
1706 /* ??? Should first look to see if X is live incoming at all. If it
1707 isn't, we don't need anything but an unconditional set. */
1709 /* Look and see if A and B are really the same. Avoid creating silly
1710 cmove constructs that no one will fix up later. */
1711 if (rtx_equal_p (a, b))
1713 /* If we have an INSN_B, we don't have to create any new rtl. Just
1714 move the instruction that we already have. If we don't have an
1715 INSN_B, that means that A == X, and we've got a noop move. In
1716 that case don't do anything and let the code below delete INSN_A. */
1717 if (insn_b && else_bb)
1721 if (else_bb && insn_b == else_bb->end)
1722 else_bb->end = PREV_INSN (insn_b);
1723 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1725 /* If there was a REG_EQUAL note, delete it since it may have been
1726 true due to this insn being after a jump. */
1727 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1728 remove_note (insn_b, note);
1732 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1733 x must be executed twice. */
1734 else if (insn_b && side_effects_p (orig_x))
1741 if (noce_try_store_flag (&if_info))
1743 if (noce_try_minmax (&if_info))
1745 if (noce_try_abs (&if_info))
1747 if (HAVE_conditional_move
1748 && noce_try_cmove (&if_info))
1750 if (! HAVE_conditional_execution)
1752 if (noce_try_store_flag_constants (&if_info))
1754 if (noce_try_store_flag_inc (&if_info))
1756 if (noce_try_store_flag_mask (&if_info))
1758 if (HAVE_conditional_move
1759 && noce_try_cmove_arith (&if_info))
1766 /* The original sets may now be killed. */
1767 if (insn_a == then_bb->end)
1768 then_bb->end = PREV_INSN (insn_a);
1769 flow_delete_insn (insn_a);
1771 /* Several special cases here: First, we may have reused insn_b above,
1772 in which case insn_b is now NULL. Second, we want to delete insn_b
1773 if it came from the ELSE block, because follows the now correct
1774 write that appears in the TEST block. However, if we got insn_b from
1775 the TEST block, it may in fact be loading data needed for the comparison.
1776 We'll let life_analysis remove the insn if it's really dead. */
1777 if (insn_b && else_bb)
1779 if (insn_b == else_bb->end)
1780 else_bb->end = PREV_INSN (insn_b);
1781 flow_delete_insn (insn_b);
1784 /* The new insns will have been inserted before cond_earliest. We should
1785 be able to remove the jump with impunity, but the condition itself may
1786 have been modified by gcse to be shared across basic blocks. */
1787 test_bb->end = PREV_INSN (jump);
1788 flow_delete_insn (jump);
1790 /* If we used a temporary, fix it up now. */
1794 noce_emit_move_insn (orig_x, x);
1795 insn_b = gen_sequence ();
1798 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1801 /* Merge the blocks! */
1802 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1807 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1808 straight line code. Return true if successful. */
1811 process_if_block (test_bb, then_bb, else_bb, join_bb)
1812 basic_block test_bb; /* Basic block test is in */
1813 basic_block then_bb; /* Basic block for THEN block */
1814 basic_block else_bb; /* Basic block for ELSE block */
1815 basic_block join_bb; /* Basic block the join label is in */
1817 if (! reload_completed
1818 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1821 if (HAVE_conditional_execution
1823 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1829 /* Merge the blocks and mark for local life update. */
1832 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1833 basic_block test_bb; /* Basic block test is in */
1834 basic_block then_bb; /* Basic block for THEN block */
1835 basic_block else_bb; /* Basic block for ELSE block */
1836 basic_block join_bb; /* Basic block the join label is in */
1838 basic_block combo_bb;
1840 /* All block merging is done into the lower block numbers. */
1844 /* First merge TEST block into THEN block. This is a no-brainer since
1845 the THEN block did not have a code label to begin with. */
1848 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1849 merge_blocks_nomove (combo_bb, then_bb);
1850 num_removed_blocks++;
1852 /* The ELSE block, if it existed, had a label. That label count
1853 will almost always be zero, but odd things can happen when labels
1854 get their addresses taken. */
1857 merge_blocks_nomove (combo_bb, else_bb);
1858 num_removed_blocks++;
1861 /* If there was no join block reported, that means it was not adjacent
1862 to the others, and so we cannot merge them. */
1866 /* The outgoing edge for the current COMBO block should already
1867 be correct. Verify this. */
1868 if (combo_bb->succ == NULL_EDGE)
1871 /* There should still be a branch at the end of the THEN or ELSE
1872 blocks taking us to our final destination. */
1873 if (GET_CODE (combo_bb->end) != JUMP_INSN)
1877 /* The JOIN block may have had quite a number of other predecessors too.
1878 Since we've already merged the TEST, THEN and ELSE blocks, we should
1879 have only one remaining edge from our if-then-else diamond. If there
1880 is more than one remaining edge, it must come from elsewhere. There
1881 may be zero incoming edges if the THEN block didn't actually join
1882 back up (as with a call to abort). */
1883 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1885 /* We can merge the JOIN. */
1887 COPY_REG_SET (combo_bb->global_live_at_end,
1888 join_bb->global_live_at_end);
1889 merge_blocks_nomove (combo_bb, join_bb);
1890 num_removed_blocks++;
1894 /* We cannot merge the JOIN. */
1896 /* The outgoing edge for the current COMBO block should already
1897 be correct. Verify this. */
1898 if (combo_bb->succ->succ_next != NULL_EDGE
1899 || combo_bb->succ->dest != join_bb)
1902 /* Remove the jump and cruft from the end of the COMBO block. */
1903 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1906 /* Make sure we update life info properly. */
1907 SET_UPDATE_LIFE (combo_bb);
1909 num_updated_if_blocks++;
1912 /* Find a block ending in a simple IF condition. Return TRUE if
1913 we were able to transform it in some way. */
1916 find_if_header (test_bb)
1917 basic_block test_bb;
1922 /* The kind of block we're looking for has exactly two successors. */
1923 if ((then_edge = test_bb->succ) == NULL_EDGE
1924 || (else_edge = then_edge->succ_next) == NULL_EDGE
1925 || else_edge->succ_next != NULL_EDGE)
1928 /* Neither edge should be abnormal. */
1929 if ((then_edge->flags & EDGE_COMPLEX)
1930 || (else_edge->flags & EDGE_COMPLEX))
1933 /* The THEN edge is canonically the one that falls through. */
1934 if (then_edge->flags & EDGE_FALLTHRU)
1936 else if (else_edge->flags & EDGE_FALLTHRU)
1939 else_edge = then_edge;
1943 /* Otherwise this must be a multiway branch of some sort. */
1946 if (find_if_block (test_bb, then_edge, else_edge))
1948 if (HAVE_trap && HAVE_conditional_trap
1949 && find_cond_trap (test_bb, then_edge, else_edge))
1952 && (! HAVE_conditional_execution || reload_completed))
1954 if (find_if_case_1 (test_bb, then_edge, else_edge))
1956 if (find_if_case_2 (test_bb, then_edge, else_edge))
1964 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1968 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1969 block. If so, we'll try to convert the insns to not require the branch.
1970 Return TRUE if we were successful at converting the the block. */
1973 find_if_block (test_bb, then_edge, else_edge)
1974 basic_block test_bb;
1975 edge then_edge, else_edge;
1977 basic_block then_bb = then_edge->dest;
1978 basic_block else_bb = else_edge->dest;
1979 basic_block join_bb = NULL_BLOCK;
1980 edge then_succ = then_bb->succ;
1981 edge else_succ = else_bb->succ;
1984 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1985 if (then_bb->pred->pred_next != NULL_EDGE)
1988 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1989 if (then_succ != NULL_EDGE
1990 && (then_succ->succ_next != NULL_EDGE
1991 || (then_succ->flags & EDGE_COMPLEX)))
1994 /* If the THEN block has no successors, conditional execution can still
1995 make a conditional call. Don't do this unless the ELSE block has
1996 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1997 Check for the last insn of the THEN block being an indirect jump, which
1998 is listed as not having any successors, but confuses the rest of the CE
1999 code processing. XXX we should fix this in the future. */
2000 if (then_succ == NULL)
2002 if (else_bb->pred->pred_next == NULL_EDGE)
2004 rtx last_insn = then_bb->end;
2007 && GET_CODE (last_insn) == NOTE
2008 && last_insn != then_bb->head)
2009 last_insn = PREV_INSN (last_insn);
2012 && GET_CODE (last_insn) == JUMP_INSN
2013 && ! simplejump_p (last_insn))
2017 else_bb = NULL_BLOCK;
2023 /* If the THEN block's successor is the other edge out of the TEST block,
2024 then we have an IF-THEN combo without an ELSE. */
2025 else if (then_succ->dest == else_bb)
2028 else_bb = NULL_BLOCK;
2031 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2032 has exactly one predecessor and one successor, and the outgoing edge
2033 is not complex, then we have an IF-THEN-ELSE combo. */
2034 else if (else_succ != NULL_EDGE
2035 && then_succ->dest == else_succ->dest
2036 && else_bb->pred->pred_next == NULL_EDGE
2037 && else_succ->succ_next == NULL_EDGE
2038 && ! (else_succ->flags & EDGE_COMPLEX))
2039 join_bb = else_succ->dest;
2041 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2045 num_possible_if_blocks++;
2050 fprintf (rtl_dump_file,
2051 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2052 test_bb->index, then_bb->index, else_bb->index,
2055 fprintf (rtl_dump_file,
2056 "\nIF-THEN block found, start %d, then %d, join %d\n",
2057 test_bb->index, then_bb->index, join_bb->index);
2060 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2061 get the first condition for free, since we've already asserted that
2062 there's a fallthru edge from IF to THEN. */
2063 /* ??? As an enhancement, move the ELSE block. Have to deal with
2064 BLOCK notes, if by no other means than aborting the merge if they
2065 exist. Sticky enough I don't want to think about it now. */
2066 next_index = then_bb->index;
2067 if (else_bb && ++next_index != else_bb->index)
2069 if (++next_index != join_bb->index)
2077 /* Do the real work. */
2078 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2081 /* Convert a branch over a trap, or a branch to a trap,
2082 into a conditional trap. */
2085 find_cond_trap (test_bb, then_edge, else_edge)
2086 basic_block test_bb;
2087 edge then_edge, else_edge;
2089 basic_block then_bb, else_bb, join_bb, trap_bb;
2090 rtx trap, jump, cond, cond_earliest, seq;
2093 then_bb = then_edge->dest;
2094 else_bb = else_edge->dest;
2097 /* Locate the block with the trap instruction. */
2098 /* ??? While we look for no successors, we really ought to allow
2099 EH successors. Need to fix merge_if_block for that to work. */
2100 /* ??? We can't currently handle merging the blocks if they are not
2101 already adjacent. Prevent losage in merge_if_block by detecting
2103 if (then_bb->succ == NULL)
2106 if (else_bb->index != then_bb->index + 1)
2111 else if (else_bb->succ == NULL)
2114 if (else_bb->index != then_bb->index + 1)
2116 else if (then_bb->succ
2117 && ! then_bb->succ->succ_next
2118 && ! (then_bb->succ->flags & EDGE_COMPLEX)
2119 && then_bb->succ->dest->index == else_bb->index + 1)
2120 join_bb = then_bb->succ->dest;
2125 /* Don't confuse a conditional return with something we want to
2127 if (trap_bb == EXIT_BLOCK_PTR)
2130 /* The only instruction in the THEN block must be the trap. */
2131 trap = first_active_insn (trap_bb);
2132 if (! (trap == trap_bb->end
2133 && GET_CODE (PATTERN (trap)) == TRAP_IF
2134 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2139 if (trap_bb == then_bb)
2140 fprintf (rtl_dump_file,
2141 "\nTRAP-IF block found, start %d, trap %d",
2142 test_bb->index, then_bb->index);
2144 fprintf (rtl_dump_file,
2145 "\nTRAP-IF block found, start %d, then %d, trap %d",
2146 test_bb->index, then_bb->index, trap_bb->index);
2148 fprintf (rtl_dump_file, ", join %d\n", join_bb->index);
2150 fputc ('\n', rtl_dump_file);
2153 /* If this is not a standard conditional jump, we can't parse it. */
2154 jump = test_bb->end;
2155 cond = noce_get_condition (jump, &cond_earliest);
2159 /* If the conditional jump is more than just a conditional jump,
2160 then we can not do if-conversion on this block. */
2161 if (! onlyjump_p (jump))
2164 /* We must be comparing objects whose modes imply the size. */
2165 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2168 /* Reverse the comparison code, if necessary. */
2169 code = GET_CODE (cond);
2170 if (then_bb == trap_bb)
2172 code = reversed_comparison_code (cond, jump);
2173 if (code == UNKNOWN)
2177 /* Attempt to generate the conditional trap. */
2178 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2179 TRAP_CODE (PATTERN (trap)));
2183 /* Emit the new insns before cond_earliest; delete the old jump
2186 emit_insn_before (seq, cond_earliest);
2188 test_bb->end = PREV_INSN (jump);
2189 flow_delete_insn (jump);
2191 trap_bb->end = PREV_INSN (trap);
2192 flow_delete_insn (trap);
2194 /* Merge the blocks! */
2195 if (trap_bb != then_bb && ! else_bb)
2197 flow_delete_block (trap_bb);
2198 num_removed_blocks++;
2200 merge_if_block (test_bb, then_bb, else_bb, join_bb);
2205 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2206 transformable, but not necessarily the other. There need be no
2209 Return TRUE if we were successful at converting the the block.
2211 Cases we'd like to look at:
2214 if (test) goto over; // x not live
2222 if (! test) goto label;
2225 if (test) goto E; // x not live
2239 (3) // This one's really only interesting for targets that can do
2240 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2241 // it results in multiple branches on a cache line, which often
2242 // does not sit well with predictors.
2244 if (test1) goto E; // predicted not taken
2260 (A) Don't do (2) if the branch is predicted against the block we're
2261 eliminating. Do it anyway if we can eliminate a branch; this requires
2262 that the sole successor of the eliminated block postdominate the other
2265 (B) With CE, on (3) we can steal from both sides of the if, creating
2274 Again, this is most useful if J postdominates.
2276 (C) CE substitutes for helpful life information.
2278 (D) These heuristics need a lot of work. */
2280 /* Tests for case 1 above. */
2283 find_if_case_1 (test_bb, then_edge, else_edge)
2284 basic_block test_bb;
2285 edge then_edge, else_edge;
2287 basic_block then_bb = then_edge->dest;
2288 basic_block else_bb = else_edge->dest, new_bb;
2289 edge then_succ = then_bb->succ;
2291 /* THEN has one successor. */
2292 if (!then_succ || then_succ->succ_next != NULL)
2295 /* THEN does not fall through, but is not strange either. */
2296 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2299 /* THEN has one predecessor. */
2300 if (then_bb->pred->pred_next != NULL)
2303 /* THEN must do something. */
2304 if (forwarder_block_p (then_bb))
2307 num_possible_if_blocks++;
2309 fprintf (rtl_dump_file,
2310 "\nIF-CASE-1 found, start %d, then %d\n",
2311 test_bb->index, then_bb->index);
2313 /* THEN is small. */
2314 if (count_bb_insns (then_bb) > BRANCH_COST)
2317 /* Registers set are dead, or are predicable. */
2318 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2319 then_bb->succ->dest, 1))
2322 /* Conversion went ok, including moving the insns and fixing up the
2323 jump. Adjust the CFG to match. */
2325 SET_UPDATE_LIFE (test_bb);
2326 bitmap_operation (test_bb->global_live_at_end,
2327 else_bb->global_live_at_start,
2328 then_bb->global_live_at_end, BITMAP_IOR);
2330 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2331 /* Make rest of code believe that the newly created block is the THEN_BB
2332 block we are going to remove. */
2335 new_bb->aux = then_bb->aux;
2336 SET_UPDATE_LIFE (then_bb);
2338 flow_delete_block (then_bb);
2339 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2342 num_removed_blocks++;
2343 num_updated_if_blocks++;
2348 /* Test for case 2 above. */
2351 find_if_case_2 (test_bb, then_edge, else_edge)
2352 basic_block test_bb;
2353 edge then_edge, else_edge;
2355 basic_block then_bb = then_edge->dest;
2356 basic_block else_bb = else_edge->dest;
2357 edge else_succ = else_bb->succ;
2360 /* ELSE has one successor. */
2361 if (!else_succ || else_succ->succ_next != NULL)
2364 /* ELSE outgoing edge is not complex. */
2365 if (else_succ->flags & EDGE_COMPLEX)
2368 /* ELSE has one predecessor. */
2369 if (else_bb->pred->pred_next != NULL)
2372 /* THEN is not EXIT. */
2373 if (then_bb->index < 0)
2376 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2377 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2378 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2380 else if (else_succ->dest->index < 0
2381 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2382 ORIG_INDEX (else_succ->dest)))
2387 num_possible_if_blocks++;
2389 fprintf (rtl_dump_file,
2390 "\nIF-CASE-2 found, start %d, else %d\n",
2391 test_bb->index, else_bb->index);
2393 /* ELSE is small. */
2394 if (count_bb_insns (then_bb) > BRANCH_COST)
2397 /* Registers set are dead, or are predicable. */
2398 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2401 /* Conversion went ok, including moving the insns and fixing up the
2402 jump. Adjust the CFG to match. */
2404 SET_UPDATE_LIFE (test_bb);
2405 bitmap_operation (test_bb->global_live_at_end,
2406 then_bb->global_live_at_start,
2407 else_bb->global_live_at_end, BITMAP_IOR);
2409 flow_delete_block (else_bb);
2411 num_removed_blocks++;
2412 num_updated_if_blocks++;
2414 /* ??? We may now fallthru from one of THEN's successors into a join
2415 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2420 /* A subroutine of dead_or_predicable called through for_each_rtx.
2421 Return 1 if a memory is found. */
2424 find_memory (px, data)
2426 void *data ATTRIBUTE_UNUSED;
2428 return GET_CODE (*px) == MEM;
2431 /* Used by the code above to perform the actual rtl transformations.
2432 Return TRUE if successful.
2434 TEST_BB is the block containing the conditional branch. MERGE_BB
2435 is the block containing the code to manipulate. NEW_DEST is the
2436 label TEST_BB should be branching to after the conversion.
2437 REVERSEP is true if the sense of the branch should be reversed. */
2440 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2441 basic_block test_bb, merge_bb, other_bb;
2442 basic_block new_dest;
2445 rtx head, end, jump, earliest, old_dest, new_label;
2447 jump = test_bb->end;
2449 /* Find the extent of the real code in the merge block. */
2450 head = merge_bb->head;
2451 end = merge_bb->end;
2453 if (GET_CODE (head) == CODE_LABEL)
2454 head = NEXT_INSN (head);
2455 if (GET_CODE (head) == NOTE)
2459 head = end = NULL_RTX;
2462 head = NEXT_INSN (head);
2465 if (GET_CODE (end) == JUMP_INSN)
2469 head = end = NULL_RTX;
2472 end = PREV_INSN (end);
2475 /* Disable handling dead code by conditional execution if the machine needs
2476 to do anything funny with the tests, etc. */
2477 #ifndef IFCVT_MODIFY_TESTS
2478 if (HAVE_conditional_execution)
2480 /* In the conditional execution case, we have things easy. We know
2481 the condition is reversable. We don't have to check life info,
2482 becase we're going to conditionally execute the code anyway.
2483 All that's left is making sure the insns involved can actually
2488 cond = cond_exec_get_condition (jump);
2492 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2494 prob_val = XEXP (prob_val, 0);
2498 enum rtx_code rev = reversed_comparison_code (cond, jump);
2501 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2504 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2507 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2515 /* In the non-conditional execution case, we have to verify that there
2516 are no trapping operations, no calls, no references to memory, and
2517 that any registers modified are dead at the branch site. */
2519 rtx insn, cond, prev;
2520 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2521 regset merge_set, tmp, test_live, test_set;
2522 struct propagate_block_info *pbi;
2525 /* Check for no calls or trapping operations. */
2526 for (insn = head; ; insn = NEXT_INSN (insn))
2528 if (GET_CODE (insn) == CALL_INSN)
2532 if (may_trap_p (PATTERN (insn)))
2535 /* ??? Even non-trapping memories such as stack frame
2536 references must be avoided. For stores, we collect
2537 no lifetime info; for reads, we'd have to assert
2538 true_dependance false against every store in the
2540 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2547 if (! any_condjump_p (jump))
2550 /* Find the extent of the conditional. */
2551 cond = noce_get_condition (jump, &earliest);
2556 MERGE_SET = set of registers set in MERGE_BB
2557 TEST_LIVE = set of registers live at EARLIEST
2558 TEST_SET = set of registers set between EARLIEST and the
2559 end of the block. */
2561 tmp = INITIALIZE_REG_SET (tmp_head);
2562 merge_set = INITIALIZE_REG_SET (merge_set_head);
2563 test_live = INITIALIZE_REG_SET (test_live_head);
2564 test_set = INITIALIZE_REG_SET (test_set_head);
2566 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2567 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2568 since we've already asserted that MERGE_BB is small. */
2569 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2571 /* For small register class machines, don't lengthen lifetimes of
2572 hard registers before reload. */
2573 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2575 EXECUTE_IF_SET_IN_BITMAP
2578 if (i < FIRST_PSEUDO_REGISTER
2580 && ! global_regs[i])
2585 /* For TEST, we're interested in a range of insns, not a whole block.
2586 Moreover, we're interested in the insns live from OTHER_BB. */
2588 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2589 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2592 for (insn = jump; ; insn = prev)
2594 prev = propagate_one_insn (pbi, insn);
2595 if (insn == earliest)
2599 free_propagate_block_info (pbi);
2601 /* We can perform the transformation if
2602 MERGE_SET & (TEST_SET | TEST_LIVE)
2604 TEST_SET & merge_bb->global_live_at_start
2607 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2608 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2609 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2611 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2613 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2616 FREE_REG_SET (merge_set);
2617 FREE_REG_SET (test_live);
2618 FREE_REG_SET (test_set);
2625 /* We don't want to use normal invert_jump or redirect_jump because
2626 we don't want to delete_insn called. Also, we want to do our own
2627 change group management. */
2629 old_dest = JUMP_LABEL (jump);
2630 new_label = block_label (new_dest);
2632 ? ! invert_jump_1 (jump, new_label)
2633 : ! redirect_jump_1 (jump, new_label))
2636 if (! apply_change_group ())
2640 LABEL_NUSES (old_dest) -= 1;
2642 LABEL_NUSES (new_label) += 1;
2643 JUMP_LABEL (jump) = new_label;
2646 invert_br_probabilities (jump);
2648 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2651 gcov_type count, probability;
2652 count = BRANCH_EDGE (test_bb)->count;
2653 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2654 FALLTHRU_EDGE (test_bb)->count = count;
2655 probability = BRANCH_EDGE (test_bb)->probability;
2656 BRANCH_EDGE (test_bb)->probability = FALLTHRU_EDGE (test_bb)->probability;
2657 FALLTHRU_EDGE (test_bb)->probability = probability;
2660 /* Move the insns out of MERGE_BB to before the branch. */
2663 if (end == merge_bb->end)
2664 merge_bb->end = PREV_INSN (head);
2666 head = squeeze_notes (head, end);
2667 if (GET_CODE (end) == NOTE
2668 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2669 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2670 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2671 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2672 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2673 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2677 end = PREV_INSN (end);
2680 reorder_insns (head, end, PREV_INSN (earliest));
2689 /* Main entry point for all if-conversion. */
2692 if_convert (x_life_data_ok)
2697 num_possible_if_blocks = 0;
2698 num_updated_if_blocks = 0;
2699 num_removed_blocks = 0;
2700 life_data_ok = (x_life_data_ok != 0);
2702 /* Free up basic_block_for_insn so that we don't have to keep it
2703 up to date, either here or in merge_blocks_nomove. */
2704 free_basic_block_vars (1);
2706 /* Compute postdominators if we think we'll use them. */
2707 post_dominators = NULL;
2708 if (HAVE_conditional_execution || life_data_ok)
2710 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2711 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2714 /* Record initial block numbers. */
2715 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2716 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2718 /* Go through each of the basic blocks looking for things to convert. */
2719 for (block_num = 0; block_num < n_basic_blocks; )
2721 basic_block bb = BASIC_BLOCK (block_num);
2722 if (find_if_header (bb))
2723 block_num = bb->index;
2728 if (post_dominators)
2729 sbitmap_vector_free (post_dominators);
2732 fflush (rtl_dump_file);
2734 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2735 compute_bb_for_insn (get_max_uid ());
2737 /* Rebuild life info for basic blocks that require it. */
2738 if (num_removed_blocks && life_data_ok)
2740 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2741 sbitmap_zero (update_life_blocks);
2743 /* If we allocated new pseudos, we must resize the array for sched1. */
2744 if (max_regno < max_reg_num ())
2746 max_regno = max_reg_num ();
2747 allocate_reg_info (max_regno, FALSE, FALSE);
2750 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2751 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2752 SET_BIT (update_life_blocks, block_num);
2754 count_or_remove_death_notes (update_life_blocks, 1);
2755 /* ??? See about adding a mode that verifies that the initial
2756 set of blocks don't let registers come live. */
2757 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2758 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2759 | PROP_KILL_DEAD_CODE);
2761 sbitmap_free (update_life_blocks);
2764 /* Write the final stats. */
2765 if (rtl_dump_file && num_possible_if_blocks > 0)
2767 fprintf (rtl_dump_file,
2768 "\n%d possible IF blocks searched.\n",
2769 num_possible_if_blocks);
2770 fprintf (rtl_dump_file,
2771 "%d IF blocks converted.\n",
2772 num_updated_if_blocks);
2773 fprintf (rtl_dump_file,
2774 "%d basic blocks deleted.\n\n\n",
2775 num_removed_blocks);
2778 #ifdef ENABLE_CHECKING
2779 verify_flow_info ();