1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "basic-block.h"
33 #include "hard-reg-set.h"
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
54 #define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
64 static int num_updated_if_blocks;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks;
69 /* The post-dominator relation on the original block numbers. */
70 static sbitmap *post_dominators;
72 /* Forward references. */
73 static int count_bb_insns PARAMS ((basic_block));
74 static rtx first_active_insn PARAMS ((basic_block));
75 static int last_active_insn_p PARAMS ((basic_block, rtx));
77 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
78 static rtx cond_exec_get_condition PARAMS ((rtx));
79 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
80 basic_block, basic_block));
82 static rtx noce_get_condition PARAMS ((rtx, rtx *));
83 static int noce_process_if_block PARAMS ((basic_block, basic_block,
84 basic_block, basic_block));
86 static int process_if_block PARAMS ((basic_block, basic_block,
87 basic_block, basic_block));
88 static void merge_if_block PARAMS ((basic_block, basic_block,
89 basic_block, basic_block));
91 static int find_if_header PARAMS ((basic_block));
92 static int find_if_block PARAMS ((basic_block, edge, edge));
93 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
94 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
95 static int find_memory PARAMS ((rtx *, void *));
96 static int dead_or_predicable PARAMS ((basic_block, basic_block,
97 basic_block, rtx, int));
99 /* Abuse the basic_block AUX field to store the original block index,
100 as well as a flag indicating that the block should be rescaned for
103 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
104 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
105 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
106 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
109 /* Count the number of non-jump active insns in BB. */
120 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
125 insn = NEXT_INSN (insn);
131 /* Return the first non-jump active insn in the basic block. */
134 first_active_insn (bb)
139 if (GET_CODE (insn) == CODE_LABEL)
143 insn = NEXT_INSN (insn);
146 while (GET_CODE (insn) == NOTE)
150 insn = NEXT_INSN (insn);
153 if (GET_CODE (insn) == JUMP_INSN)
159 /* Return true if INSN is the last active non-jump insn in BB. */
162 last_active_insn_p (bb, insn)
170 insn = NEXT_INSN (insn);
172 while (GET_CODE (insn) == NOTE);
174 return GET_CODE (insn) == JUMP_INSN;
177 /* Go through a bunch of insns, converting them to conditional
178 execution format if possible. Return TRUE if all of the non-note
179 insns were processed. */
182 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
183 rtx start; /* first insn to look at */
184 rtx end; /* last insn to look at */
185 rtx test; /* conditional execution test */
186 rtx prob_val; /* probability of branch taken. */
187 int mod_ok; /* true if modifications ok last insn. */
189 int must_be_last = FALSE;
192 for (insn = start; ; insn = NEXT_INSN (insn))
194 if (GET_CODE (insn) == NOTE)
197 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
200 /* Remove USE and CLOBBER insns that get in the way. */
202 && (GET_CODE (PATTERN (insn)) == USE
203 || GET_CODE (PATTERN (insn)) == CLOBBER))
205 /* ??? Ug. Actually unlinking the thing is problematic,
206 given what we'd have to coordinate with our callers. */
207 PUT_CODE (insn, NOTE);
208 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
209 NOTE_SOURCE_FILE (insn) = 0;
213 /* Last insn wasn't last? */
217 if (modified_in_p (test, insn))
224 /* Now build the conditional form of the instruction. */
225 validate_change (insn, &PATTERN (insn),
226 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
229 if (GET_CODE (insn) == CALL_INSN && prob_val)
230 validate_change (insn, ®_NOTES (insn),
231 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
232 REG_NOTES (insn)), 1);
242 /* Return the condition for a jump. Do not do any special processing. */
245 cond_exec_get_condition (jump)
250 if (condjump_p (jump))
251 test_if = SET_SRC (PATTERN (jump));
252 else if (condjump_in_parallel_p (jump))
253 test_if = SET_SRC (XVECEXP (PATTERN (jump), 0, 0));
256 cond = XEXP (test_if, 0);
258 /* If this branches to JUMP_LABEL when the condition is false,
259 reverse the condition. */
260 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
261 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
262 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
263 GET_MODE (cond), XEXP (cond, 0),
269 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
270 to conditional execution. Return TRUE if we were successful at
271 converting the the block. */
274 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
275 basic_block test_bb; /* Basic block test is in */
276 basic_block then_bb; /* Basic block for THEN block */
277 basic_block else_bb; /* Basic block for ELSE block */
278 basic_block join_bb; /* Basic block the join label is in */
280 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
281 rtx then_start; /* first insn in THEN block */
282 rtx then_end; /* last insn + 1 in THEN block */
283 rtx else_start; /* first insn in ELSE block or NULL */
284 rtx else_end; /* last insn + 1 in ELSE block */
285 int max; /* max # of insns to convert. */
286 int then_mod_ok; /* whether conditional mods are ok in THEN */
287 rtx true_expr; /* test for else block insns */
288 rtx false_expr; /* test for then block insns */
289 rtx true_prob_val; /* probability of else block */
290 rtx false_prob_val; /* probability of then block */
293 /* Find the conditional jump to the ELSE or JOIN part, and isolate
295 test_expr = cond_exec_get_condition (test_bb->end);
299 /* Collect the bounds of where we're to search. */
301 then_start = then_bb->head;
302 then_end = then_bb->end;
304 /* Skip a label heading THEN block. */
305 if (GET_CODE (then_start) == CODE_LABEL)
306 then_start = NEXT_INSN (then_start);
308 /* Skip a (use (const_int 0)) or branch as the final insn. */
309 if (GET_CODE (then_end) == INSN
310 && GET_CODE (PATTERN (then_end)) == USE
311 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
312 then_end = PREV_INSN (then_end);
313 else if (GET_CODE (then_end) == JUMP_INSN)
314 then_end = PREV_INSN (then_end);
318 /* Skip the ELSE block's label. */
319 else_start = NEXT_INSN (else_bb->head);
320 else_end = else_bb->end;
322 /* Skip a (use (const_int 0)) or branch as the final insn. */
323 if (GET_CODE (else_end) == INSN
324 && GET_CODE (PATTERN (else_end)) == USE
325 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
326 else_end = PREV_INSN (else_end);
327 else if (GET_CODE (else_end) == JUMP_INSN)
328 else_end = PREV_INSN (else_end);
331 /* How many instructions should we convert in total? */
335 max = 2 * MAX_CONDITIONAL_EXECUTE;
336 n_insns = count_bb_insns (else_bb);
339 max = MAX_CONDITIONAL_EXECUTE;
340 n_insns += count_bb_insns (then_bb);
344 /* Map test_expr/test_jump into the appropriate MD tests to use on
345 the conditionally executed code. */
347 true_expr = test_expr;
348 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
349 GET_MODE (true_expr), XEXP (true_expr, 0),
350 XEXP (true_expr, 1));
352 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
355 true_prob_val = XEXP (true_prob_val, 0);
356 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
359 false_prob_val = NULL_RTX;
361 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
362 on then THEN block. */
363 then_mod_ok = (else_bb == NULL_BLOCK);
365 /* Go through the THEN and ELSE blocks converting the insns if possible
366 to conditional execution. */
369 && ! cond_exec_process_insns (then_start, then_end,
370 false_expr, false_prob_val, then_mod_ok))
374 && ! cond_exec_process_insns (else_start, else_end,
375 true_expr, true_prob_val, TRUE))
378 if (! apply_change_group ())
381 /* Conversion succeeded. */
383 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
384 n_insns, (n_insns == 1) ? " was" : "s were");
386 /* Merge the blocks! */
387 merge_if_block (test_bb, then_bb, else_bb, join_bb);
395 /* Used by noce_process_if_block to communicate with its subroutines.
397 The subroutines know that A and B may be evaluated freely. They
398 know that X is a register. They should insert new instructions
399 before cond_earliest. */
405 rtx jump, cond, cond_earliest;
408 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
410 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
411 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
412 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
413 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
414 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
415 rtx, enum rtx_code, rtx,
417 static int noce_try_cmove PARAMS ((struct noce_if_info *));
418 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
420 /* Helper function for noce_try_store_flag*. */
423 noce_emit_store_flag (if_info, x, reversep, normalize)
424 struct noce_if_info *if_info;
426 int reversep, normalize;
428 rtx cond = if_info->cond;
432 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
433 || ! general_operand (XEXP (cond, 1), VOIDmode));
435 /* If earliest == jump, or when the condition is complex, try to
436 build the store_flag insn directly. */
439 cond = XEXP (SET_SRC (PATTERN (if_info->jump)), 0);
441 if ((if_info->cond_earliest == if_info->jump || cond_complex)
442 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
446 code = GET_CODE (cond);
448 code = reverse_condition (code);
450 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
452 tmp = gen_rtx_SET (VOIDmode, x, tmp);
455 tmp = emit_insn (tmp);
457 if (recog_memoized (tmp) >= 0)
463 if_info->cond_earliest = if_info->jump;
471 /* Don't even try if the comparison operands are weird. */
475 code = GET_CODE (cond);
477 code = reverse_condition (code);
479 return emit_store_flag (x, code, XEXP (cond, 0),
480 XEXP (cond, 1), VOIDmode,
481 (code == LTU || code == LEU
482 || code == GEU || code == GTU), normalize);
485 /* Convert "if (test) x = 1; else x = 0".
487 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
488 tried in noce_try_store_flag_constants after noce_try_cmove has had
489 a go at the conversion. */
492 noce_try_store_flag (if_info)
493 struct noce_if_info *if_info;
498 if (GET_CODE (if_info->b) == CONST_INT
499 && INTVAL (if_info->b) == STORE_FLAG_VALUE
500 && if_info->a == const0_rtx)
502 else if (if_info->b == const0_rtx
503 && GET_CODE (if_info->a) == CONST_INT
504 && INTVAL (if_info->a) == STORE_FLAG_VALUE
505 && can_reverse_comparison_p (if_info->cond, if_info->jump))
512 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
515 if (target != if_info->x)
516 emit_move_insn (if_info->x, target);
520 emit_insns_before (seq, if_info->cond_earliest);
531 /* Convert "if (test) x = a; else x = b", for A and B constant. */
534 noce_try_store_flag_constants (if_info)
535 struct noce_if_info *if_info;
539 HOST_WIDE_INT itrue, ifalse, diff, tmp;
540 int normalize, can_reverse;
543 && GET_CODE (if_info->a) == CONST_INT
544 && GET_CODE (if_info->b) == CONST_INT)
546 ifalse = INTVAL (if_info->a);
547 itrue = INTVAL (if_info->b);
548 diff = itrue - ifalse;
550 can_reverse = can_reverse_comparison_p (if_info->cond, if_info->jump);
553 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
555 else if (ifalse == 0 && exact_log2 (itrue) >= 0
556 && (STORE_FLAG_VALUE == 1
557 || BRANCH_COST >= 2))
559 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
560 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
561 normalize = 1, reversep = 1;
563 && (STORE_FLAG_VALUE == -1
564 || BRANCH_COST >= 2))
566 else if (ifalse == -1 && can_reverse
567 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
568 normalize = -1, reversep = 1;
569 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
577 tmp = itrue; itrue = ifalse; ifalse = tmp;
582 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
589 /* if (test) x = 3; else x = 4;
590 => x = 3 + (test == 0); */
591 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
593 target = expand_binop (GET_MODE (if_info->x),
594 (diff == STORE_FLAG_VALUE
595 ? add_optab : sub_optab),
596 GEN_INT (ifalse), target, if_info->x, 0,
600 /* if (test) x = 8; else x = 0;
601 => x = (test != 0) << 3; */
602 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
604 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
605 target, GEN_INT (tmp), if_info->x, 0,
609 /* if (test) x = -1; else x = b;
610 => x = -(test != 0) | b; */
611 else if (itrue == -1)
613 target = expand_binop (GET_MODE (if_info->x), ior_optab,
614 target, GEN_INT (ifalse), if_info->x, 0,
618 /* if (test) x = a; else x = b;
619 => x = (-(test != 0) & (b - a)) + a; */
622 target = expand_binop (GET_MODE (if_info->x), and_optab,
623 target, GEN_INT (diff), if_info->x, 0,
626 target = expand_binop (GET_MODE (if_info->x), add_optab,
627 target, GEN_INT (ifalse), if_info->x, 0,
637 if (target != if_info->x)
638 emit_move_insn (if_info->x, target);
642 emit_insns_before (seq, if_info->cond_earliest);
650 /* Convert "if (test) foo++" into "foo += (test != 0)", and
651 similarly for "foo--". */
654 noce_try_store_flag_inc (if_info)
655 struct noce_if_info *if_info;
658 int subtract, normalize;
664 /* Should be no `else' case to worry about. */
665 && if_info->b == if_info->x
666 && GET_CODE (if_info->a) == PLUS
667 && (XEXP (if_info->a, 1) == const1_rtx
668 || XEXP (if_info->a, 1) == constm1_rtx)
669 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
670 && can_reverse_comparison_p (if_info->cond, if_info->jump))
672 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
673 subtract = 0, normalize = 0;
674 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
675 subtract = 1, normalize = 0;
677 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
681 target = noce_emit_store_flag (if_info,
682 gen_reg_rtx (GET_MODE (if_info->x)),
686 target = expand_binop (GET_MODE (if_info->x),
687 subtract ? sub_optab : add_optab,
688 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
691 if (target != if_info->x)
692 emit_move_insn (if_info->x, target);
696 emit_insns_before (seq, if_info->cond_earliest);
707 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
710 noce_try_store_flag_mask (if_info)
711 struct noce_if_info *if_info;
719 || STORE_FLAG_VALUE == -1)
720 && ((if_info->a == const0_rtx
721 && rtx_equal_p (if_info->b, if_info->x))
722 || ((reversep = can_reverse_comparison_p (if_info->cond,
724 && if_info->b == const0_rtx
725 && rtx_equal_p (if_info->a, if_info->x))))
728 target = noce_emit_store_flag (if_info,
729 gen_reg_rtx (GET_MODE (if_info->x)),
732 target = expand_binop (GET_MODE (if_info->x), and_optab,
733 if_info->x, target, if_info->x, 0,
738 if (target != if_info->x)
739 emit_move_insn (if_info->x, target);
743 emit_insns_before (seq, if_info->cond_earliest);
754 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
757 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
758 struct noce_if_info *if_info;
759 rtx x, cmp_a, cmp_b, vfalse, vtrue;
762 /* If earliest == jump, try to build the cmove insn directly.
763 This is helpful when combine has created some complex condition
764 (like for alpha's cmovlbs) that we can't hope to regenerate
765 through the normal interface. */
767 if (if_info->cond_earliest == if_info->jump)
771 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
772 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
773 tmp = gen_rtx_SET (VOIDmode, x, tmp);
776 tmp = emit_insn (tmp);
778 if (recog_memoized (tmp) >= 0)
790 /* Don't even try if the comparison operands are weird. */
791 if (! general_operand (cmp_a, GET_MODE (cmp_a))
792 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
795 #if HAVE_conditional_move
796 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
797 vtrue, vfalse, GET_MODE (x),
798 (code == LTU || code == GEU
799 || code == LEU || code == GTU));
801 /* We'll never get here, as noce_process_if_block doesn't call the
802 functions involved. Ifdef code, however, should be discouraged
803 because it leads to typos in the code not selected. However,
804 emit_conditional_move won't exist either. */
809 /* Try only simple constants and registers here. More complex cases
810 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
811 has had a go at it. */
814 noce_try_cmove (if_info)
815 struct noce_if_info *if_info;
820 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
821 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
825 code = GET_CODE (if_info->cond);
826 target = noce_emit_cmove (if_info, if_info->x, code,
827 XEXP (if_info->cond, 0),
828 XEXP (if_info->cond, 1),
829 if_info->a, if_info->b);
833 if (target != if_info->x)
834 emit_move_insn (if_info->x, target);
838 emit_insns_before (seq, if_info->cond_earliest);
851 /* Try more complex cases involving conditional_move. */
854 noce_try_cmove_arith (if_info)
855 struct noce_if_info *if_info;
865 /* A conditional move from two memory sources is equivalent to a
866 conditional on their addresses followed by a load. Don't do this
867 early because it'll screw alias analysis. Note that we've
868 already checked for no side effects. */
869 if (! no_new_pseudos && cse_not_expected
870 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
875 x = gen_reg_rtx (Pmode);
879 /* ??? We could handle this if we knew that a load from A or B could
880 not fault. This is also true if we've already loaded
881 from the address along the path from ENTRY. */
882 else if (may_trap_p (a) || may_trap_p (b))
885 /* if (test) x = a + b; else x = c - d;
892 code = GET_CODE (if_info->cond);
893 insn_a = if_info->insn_a;
894 insn_b = if_info->insn_b;
896 /* Possibly rearrange operands to make things come out more natural. */
897 if (can_reverse_comparison_p (if_info->cond, if_info->jump))
900 if (rtx_equal_p (b, x))
902 else if (general_operand (b, GET_MODE (b)))
907 code = reverse_condition (code);
908 tmp = a, a = b, b = tmp;
909 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
915 /* If either operand is complex, load it into a register first.
916 The best way to do this is to copy the original insn. In this
917 way we preserve any clobbers etc that the insn may have had.
918 This is of course not possible in the IS_MEM case. */
919 if (! general_operand (a, GET_MODE (a)))
924 goto end_seq_and_fail;
928 tmp = gen_reg_rtx (GET_MODE (a));
929 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
932 goto end_seq_and_fail;
935 a = gen_reg_rtx (GET_MODE (a));
936 tmp = copy_rtx (insn_a);
937 set = single_set (tmp);
939 tmp = emit_insn (PATTERN (tmp));
941 if (recog_memoized (tmp) < 0)
942 goto end_seq_and_fail;
944 if (! general_operand (b, GET_MODE (b)))
949 goto end_seq_and_fail;
953 tmp = gen_reg_rtx (GET_MODE (b));
954 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
957 goto end_seq_and_fail;
960 b = gen_reg_rtx (GET_MODE (b));
961 tmp = copy_rtx (insn_b);
962 set = single_set (tmp);
964 tmp = emit_insn (PATTERN (tmp));
966 if (recog_memoized (tmp) < 0)
967 goto end_seq_and_fail;
970 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
971 XEXP (if_info->cond, 1), a, b);
974 goto end_seq_and_fail;
976 /* If we're handling a memory for above, emit the load now. */
979 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
981 /* Copy over flags as appropriate. */
982 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
983 MEM_VOLATILE_P (tmp) = 1;
984 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
985 MEM_IN_STRUCT_P (tmp) = 1;
986 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
987 MEM_SCALAR_P (tmp) = 1;
988 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
989 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
991 emit_move_insn (if_info->x, tmp);
993 else if (target != x)
994 emit_move_insn (x, target);
998 emit_insns_before (tmp, if_info->cond_earliest);
1006 /* Look for the condition for the jump first. We'd prefer to avoid
1007 get_condition if we can -- it tries to look back for the contents
1008 of an original compare. On targets that use normal integers for
1009 comparisons, e.g. alpha, this is wasteful. */
1012 noce_get_condition (jump, earliest)
1018 /* If the condition variable is a register and is MODE_INT, accept it.
1019 Otherwise, fall back on get_condition. */
1021 if (! condjump_p (jump))
1024 cond = XEXP (SET_SRC (PATTERN (jump)), 0);
1025 if (GET_CODE (XEXP (cond, 0)) == REG
1026 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1030 /* If this branches to JUMP_LABEL when the condition is false,
1031 reverse the condition. */
1032 if (GET_CODE (XEXP (SET_SRC (PATTERN (jump)), 2)) == LABEL_REF
1033 && XEXP (XEXP (SET_SRC (PATTERN (jump)), 2), 0) == JUMP_LABEL (jump))
1034 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1035 GET_MODE (cond), XEXP (cond, 0),
1039 cond = get_condition (jump, earliest);
1044 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1045 without using conditional execution. Return TRUE if we were
1046 successful at converting the the block. */
1049 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1050 basic_block test_bb; /* Basic block test is in */
1051 basic_block then_bb; /* Basic block for THEN block */
1052 basic_block else_bb; /* Basic block for ELSE block */
1053 basic_block join_bb; /* Basic block the join label is in */
1055 /* We're looking for patterns of the form
1057 (1) if (...) x = a; else x = b;
1058 (2) x = b; if (...) x = a;
1059 (3) if (...) x = a; // as if with an initial x = x.
1061 The later patterns require jumps to be more expensive.
1063 ??? For future expansion, look for multiple X in such patterns. */
1065 struct noce_if_info if_info;
1068 rtx orig_x, x, a, b;
1069 rtx jump, cond, insn;
1071 /* If this is not a standard conditional jump, we can't parse it. */
1072 jump = test_bb->end;
1073 cond = noce_get_condition (jump, &if_info.cond_earliest);
1077 /* We must be comparing objects whose modes imply the size. */
1078 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1081 /* Look for one of the potential sets. */
1082 insn_a = first_active_insn (then_bb);
1084 || ! last_active_insn_p (then_bb, insn_a)
1085 || (set_a = single_set (insn_a)) == NULL_RTX)
1088 x = SET_DEST (set_a);
1089 a = SET_SRC (set_a);
1091 /* Look for the other potential set. Make sure we've got equivalent
1093 /* ??? This is overconservative. Storing to two different mems is
1094 as easy as conditionally computing the address. Storing to a
1095 single mem merely requires a scratch memory to use as one of the
1096 destination addresses; often the memory immediately below the
1097 stack pointer is available for this. */
1101 insn_b = first_active_insn (else_bb);
1103 || ! last_active_insn_p (else_bb, insn_b)
1104 || (set_b = single_set (insn_b)) == NULL_RTX
1105 || ! rtx_equal_p (x, SET_DEST (set_b)))
1110 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1112 || GET_CODE (insn_b) != INSN
1113 || (set_b = single_set (insn_b)) == NULL_RTX
1114 || ! rtx_equal_p (x, SET_DEST (set_b))
1115 || reg_mentioned_p (x, cond)
1116 || reg_mentioned_p (x, a)
1117 || reg_mentioned_p (x, SET_SRC (set_b)))
1118 insn_b = set_b = NULL_RTX;
1120 b = (set_b ? SET_SRC (set_b) : x);
1122 /* X may not be mentioned in the range (cond_earliest, jump]. */
1123 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1124 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1127 /* A and B may not be modified in the range [cond_earliest, jump). */
1128 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1130 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1133 /* Only operate on register destinations, and even then avoid extending
1134 the lifetime of hard registers on small register class machines. */
1136 if (GET_CODE (x) != REG
1137 || (SMALL_REGISTER_CLASSES
1138 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1142 x = gen_reg_rtx (GET_MODE (x));
1145 /* Don't operate on sources that may trap or are volatile. */
1146 if (side_effects_p (a) || side_effects_p (b)
1147 || (GET_CODE (a) != MEM && may_trap_p (a))
1148 || (GET_CODE (b) != MEM && may_trap_p (b)))
1151 /* Set up the info block for our subroutines. */
1152 if_info.cond = cond;
1153 if_info.jump = jump;
1154 if_info.insn_a = insn_a;
1155 if_info.insn_b = insn_b;
1160 /* Try optimizations in some approximation of a useful order. */
1161 /* ??? Should first look to see if X is live incoming at all. If it
1162 isn't, we don't need anything but an unconditional set. */
1164 /* Look and see if A and B are really the same. Avoid creating silly
1165 cmove constructs that no one will fix up later. */
1166 if (rtx_equal_p (a, b))
1168 /* If we have an INSN_B, we don't have to create any new rtl. Just
1169 move the instruction that we already have. If we don't have an
1170 INSN_B, that means that A == X, and we've got a noop move. In
1171 that case don't do anything and let the code below delete INSN_A. */
1172 if (insn_b && else_bb)
1174 if (else_bb && insn_b == else_bb->end)
1175 else_bb->end = PREV_INSN (insn_b);
1176 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1183 if (noce_try_store_flag (&if_info))
1185 if (HAVE_conditional_move
1186 && noce_try_cmove (&if_info))
1188 if (! HAVE_conditional_execution)
1190 if (noce_try_store_flag_constants (&if_info))
1192 if (noce_try_store_flag_inc (&if_info))
1194 if (noce_try_store_flag_mask (&if_info))
1196 if (HAVE_conditional_move
1197 && noce_try_cmove_arith (&if_info))
1204 /* The original sets may now be killed. */
1205 if (insn_a == then_bb->end)
1206 then_bb->end = PREV_INSN (insn_a);
1207 flow_delete_insn (insn_a);
1209 /* Several special cases here: First, we may have reused insn_b above,
1210 in which case insn_b is now NULL. Second, we want to delete insn_b
1211 if it came from the ELSE block, because follows the now correct
1212 write that appears in the TEST block. However, if we got insn_b from
1213 the TEST block, it may in fact be loading data needed for the comparison.
1214 We'll let life_analysis remove the insn if it's really dead. */
1215 if (insn_b && else_bb)
1217 if (insn_b == else_bb->end)
1218 else_bb->end = PREV_INSN (insn_b);
1219 flow_delete_insn (insn_b);
1222 /* The new insns will have been inserted before cond_earliest. We should
1223 be able to remove the jump with impunity, but the condition itself may
1224 have been modified by gcse to be shared across basic blocks. */
1225 test_bb->end = PREV_INSN (jump);
1226 flow_delete_insn (jump);
1228 /* If we used a temporary, fix it up now. */
1232 emit_move_insn (orig_x, x);
1233 insn_b = gen_sequence ();
1236 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1239 /* Merge the blocks! */
1240 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1245 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1246 straight line code. Return true if successful. */
1249 process_if_block (test_bb, then_bb, else_bb, join_bb)
1250 basic_block test_bb; /* Basic block test is in */
1251 basic_block then_bb; /* Basic block for THEN block */
1252 basic_block else_bb; /* Basic block for ELSE block */
1253 basic_block join_bb; /* Basic block the join label is in */
1255 if (! reload_completed
1256 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1259 if (HAVE_conditional_execution
1261 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1267 /* Merge the blocks and mark for local life update. */
1270 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1271 basic_block test_bb; /* Basic block test is in */
1272 basic_block then_bb; /* Basic block for THEN block */
1273 basic_block else_bb; /* Basic block for ELSE block */
1274 basic_block join_bb; /* Basic block the join label is in */
1276 basic_block combo_bb;
1278 /* All block merging is done into the lower block numbers. */
1282 /* First merge TEST block into THEN block. This is a no-brainer since
1283 the THEN block did not have a code label to begin with. */
1285 if (combo_bb->global_live_at_end)
1286 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1287 merge_blocks_nomove (combo_bb, then_bb);
1288 num_removed_blocks++;
1290 /* The ELSE block, if it existed, had a label. That label count
1291 will almost always be zero, but odd things can happen when labels
1292 get their addresses taken. */
1295 merge_blocks_nomove (combo_bb, else_bb);
1296 num_removed_blocks++;
1299 /* If there was no join block reported, that means it was not adjacent
1300 to the others, and so we cannot merge them. */
1304 /* The outgoing edge for the current COMBO block should already
1305 be correct. Verify this. */
1306 if (combo_bb->succ == NULL_EDGE)
1309 /* There should sill be a branch at the end of the THEN or ELSE
1310 blocks taking us to our final destination. */
1311 if (! simplejump_p (combo_bb->end)
1312 && ! returnjump_p (combo_bb->end))
1316 /* The JOIN block may have had quite a number of other predecessors too.
1317 Since we've already merged the TEST, THEN and ELSE blocks, we should
1318 have only one remaining edge from our if-then-else diamond. If there
1319 is more than one remaining edge, it must come from elsewhere. */
1320 else if (join_bb->pred->pred_next == NULL)
1322 /* We can merge the JOIN. */
1323 if (combo_bb->global_live_at_end)
1324 COPY_REG_SET (combo_bb->global_live_at_end,
1325 join_bb->global_live_at_end);
1326 merge_blocks_nomove (combo_bb, join_bb);
1327 num_removed_blocks++;
1331 /* We cannot merge the JOIN. */
1333 /* The outgoing edge for the current COMBO block should already
1334 be correct. Verify this. */
1335 if (combo_bb->succ->succ_next != NULL_EDGE
1336 || combo_bb->succ->dest != join_bb)
1339 /* Remove the jump and cruft from the end of the COMBO block. */
1340 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1343 /* Make sure we update life info properly. */
1344 SET_UPDATE_LIFE (combo_bb);
1346 num_updated_if_blocks++;
1349 /* Find a block ending in a simple IF condition. Return TRUE if
1350 we were able to transform it in some way. */
1353 find_if_header (test_bb)
1354 basic_block test_bb;
1359 /* The kind of block we're looking for has exactly two successors. */
1360 if ((then_edge = test_bb->succ) == NULL_EDGE
1361 || (else_edge = then_edge->succ_next) == NULL_EDGE
1362 || else_edge->succ_next != NULL_EDGE)
1365 /* Neither edge should be abnormal. */
1366 if ((then_edge->flags & EDGE_COMPLEX)
1367 || (else_edge->flags & EDGE_COMPLEX))
1370 /* The THEN edge is canonically the one that falls through. */
1371 if (then_edge->flags & EDGE_FALLTHRU)
1373 else if (else_edge->flags & EDGE_FALLTHRU)
1376 else_edge = then_edge;
1380 /* Otherwise this must be a multiway branch of some sort. */
1383 if (find_if_block (test_bb, then_edge, else_edge))
1386 && (! HAVE_conditional_execution || reload_completed))
1388 if (find_if_case_1 (test_bb, then_edge, else_edge))
1390 if (find_if_case_2 (test_bb, then_edge, else_edge))
1398 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1402 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1403 block. If so, we'll try to convert the insns to not require the branch.
1404 Return TRUE if we were successful at converting the the block. */
1407 find_if_block (test_bb, then_edge, else_edge)
1408 basic_block test_bb;
1409 edge then_edge, else_edge;
1411 basic_block then_bb = then_edge->dest;
1412 basic_block else_bb = else_edge->dest;
1413 basic_block join_bb = NULL_BLOCK;
1414 edge then_succ = then_bb->succ;
1415 edge else_succ = else_bb->succ;
1418 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1419 if (then_bb->pred->pred_next != NULL_EDGE)
1422 /* The THEN block of an IF-THEN combo must have exactly one successor. */
1423 if (then_succ == NULL_EDGE
1424 || then_succ->succ_next != NULL_EDGE
1425 || (then_succ->flags & EDGE_COMPLEX))
1428 /* If the THEN block's successor is the other edge out of the TEST block,
1429 then we have an IF-THEN combo without an ELSE. */
1430 if (then_succ->dest == else_bb)
1433 else_bb = NULL_BLOCK;
1436 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1437 has exactly one predecessor and one successor, and the outgoing edge
1438 is not complex, then we have an IF-THEN-ELSE combo. */
1439 else if (else_succ != NULL_EDGE
1440 && then_succ->dest == else_succ->dest
1441 && else_bb->pred->pred_next == NULL_EDGE
1442 && else_succ->succ_next == NULL_EDGE
1443 && ! (else_succ->flags & EDGE_COMPLEX))
1444 join_bb = else_succ->dest;
1446 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1450 num_possible_if_blocks++;
1455 fprintf (rtl_dump_file,
1456 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1457 test_bb->index, then_bb->index, else_bb->index,
1460 fprintf (rtl_dump_file,
1461 "\nIF-THEN block found, start %d, then %d, join %d\n",
1462 test_bb->index, then_bb->index, join_bb->index);
1465 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1466 get the first condition for free, since we've already asserted that
1467 there's a fallthru edge from IF to THEN. */
1468 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1469 BLOCK notes, if by no other means than aborting the merge if they
1470 exist. Sticky enough I don't want to think about it now. */
1471 next_index = then_bb->index;
1472 if (else_bb && ++next_index != else_bb->index)
1474 if (++next_index != join_bb->index)
1482 /* Do the real work. */
1483 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1486 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1487 transformable, but not necessarily the other. There need be no
1490 Return TRUE if we were successful at converting the the block.
1492 Cases we'd like to look at:
1495 if (test) goto over; // x not live
1503 if (! test) goto label;
1506 if (test) goto E; // x not live
1520 (3) // This one's really only interesting for targets that can do
1521 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1522 // it results in multiple branches on a cache line, which often
1523 // does not sit well with predictors.
1525 if (test1) goto E; // predicted not taken
1541 (A) Don't do (2) if the branch is predicted against the block we're
1542 eliminating. Do it anyway if we can eliminate a branch; this requires
1543 that the sole successor of the eliminated block postdominate the other
1546 (B) With CE, on (3) we can steal from both sides of the if, creating
1555 Again, this is most useful if J postdominates.
1557 (C) CE substitutes for helpful life information.
1559 (D) These heuristics need a lot of work. */
1561 /* Tests for case 1 above. */
1564 find_if_case_1 (test_bb, then_edge, else_edge)
1565 basic_block test_bb;
1566 edge then_edge, else_edge;
1568 basic_block then_bb = then_edge->dest;
1569 basic_block else_bb = else_edge->dest;
1570 edge then_succ = then_bb->succ;
1573 /* THEN has one successor. */
1574 if (!then_succ || then_succ->succ_next != NULL)
1577 /* THEN does not fall through, but is not strange either. */
1578 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
1581 /* THEN has one predecessor. */
1582 if (then_bb->pred->pred_next != NULL)
1585 /* ELSE follows THEN. (??? could be moved) */
1586 if (else_bb->index != then_bb->index + 1)
1589 num_possible_if_blocks++;
1591 fprintf (rtl_dump_file,
1592 "\nIF-CASE-1 found, start %d, then %d\n",
1593 test_bb->index, then_bb->index);
1595 /* THEN is small. */
1596 if (count_bb_insns (then_bb) > BRANCH_COST)
1599 /* Find the label for THEN's destination. */
1600 if (then_succ->dest == EXIT_BLOCK_PTR)
1604 new_lab = JUMP_LABEL (then_bb->end);
1609 /* Registers set are dead, or are predicable. */
1610 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
1613 /* Conversion went ok, including moving the insns and fixing up the
1614 jump. Adjust the CFG to match. */
1616 SET_UPDATE_LIFE (test_bb);
1617 bitmap_operation (test_bb->global_live_at_end,
1618 else_bb->global_live_at_start,
1619 then_bb->global_live_at_end, BITMAP_IOR);
1621 make_edge (NULL, test_bb, then_succ->dest, 0);
1622 flow_delete_block (then_bb);
1623 tidy_fallthru_edge (else_edge, test_bb, else_bb);
1625 num_removed_blocks++;
1626 num_updated_if_blocks++;
1631 /* Test for case 2 above. */
1634 find_if_case_2 (test_bb, then_edge, else_edge)
1635 basic_block test_bb;
1636 edge then_edge, else_edge;
1638 basic_block then_bb = then_edge->dest;
1639 basic_block else_bb = else_edge->dest;
1640 edge else_succ = else_bb->succ;
1643 /* ELSE has one successor. */
1644 if (!else_succ || else_succ->succ_next != NULL)
1647 /* ELSE outgoing edge is not complex. */
1648 if (else_succ->flags & EDGE_COMPLEX)
1651 /* ELSE has one predecessor. */
1652 if (else_bb->pred->pred_next != NULL)
1655 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1656 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
1657 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
1659 else if (else_succ->dest->index < 0
1660 || (then_bb->index >= 0
1661 && TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
1662 ORIG_INDEX (else_succ->dest))))
1667 num_possible_if_blocks++;
1669 fprintf (rtl_dump_file,
1670 "\nIF-CASE-2 found, start %d, else %d\n",
1671 test_bb->index, else_bb->index);
1673 /* ELSE is small. */
1674 if (count_bb_insns (then_bb) > BRANCH_COST)
1677 /* Find the label for ELSE's destination. */
1678 if (else_succ->dest == EXIT_BLOCK_PTR)
1682 if (else_succ->flags & EDGE_FALLTHRU)
1684 new_lab = else_succ->dest->head;
1685 if (GET_CODE (new_lab) != CODE_LABEL)
1690 new_lab = JUMP_LABEL (else_bb->end);
1696 /* Registers set are dead, or are predicable. */
1697 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
1700 /* Conversion went ok, including moving the insns and fixing up the
1701 jump. Adjust the CFG to match. */
1703 SET_UPDATE_LIFE (test_bb);
1704 bitmap_operation (test_bb->global_live_at_end,
1705 then_bb->global_live_at_start,
1706 else_bb->global_live_at_end, BITMAP_IOR);
1708 remove_edge (else_edge);
1709 make_edge (NULL, test_bb, else_succ->dest, 0);
1710 flow_delete_block (else_bb);
1712 num_removed_blocks++;
1713 num_updated_if_blocks++;
1715 /* ??? We may now fallthru from one of THEN's successors into a join
1716 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1721 /* A subroutine of dead_or_predicable called through for_each_rtx.
1722 Return 1 if a memory is found. */
1725 find_memory (px, data)
1727 void *data ATTRIBUTE_UNUSED;
1729 return GET_CODE (*px) == MEM;
1732 /* Used by the code above to perform the actual rtl transformations.
1733 Return TRUE if successful.
1735 TEST_BB is the block containing the conditional branch. MERGE_BB
1736 is the block containing the code to manipulate. NEW_DEST is the
1737 label TEST_BB should be branching to after the conversion.
1738 REVERSEP is true if the sense of the branch should be reversed. */
1741 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
1742 basic_block test_bb, merge_bb, other_bb;
1746 rtx head, end, jump, earliest, old_dest;
1748 jump = test_bb->end;
1750 /* Find the extent of the real code in the merge block. */
1751 head = merge_bb->head;
1752 end = merge_bb->end;
1754 if (GET_CODE (head) == CODE_LABEL)
1755 head = NEXT_INSN (head);
1756 if (GET_CODE (head) == NOTE)
1760 head = end = NULL_RTX;
1763 head = NEXT_INSN (head);
1766 if (GET_CODE (end) == JUMP_INSN)
1770 head = end = NULL_RTX;
1773 end = PREV_INSN (end);
1776 if (HAVE_conditional_execution)
1778 /* In the conditional execution case, we have things easy. We know
1779 the condition is reversable. We don't have to check life info,
1780 becase we're going to conditionally execute the code anyway.
1781 All that's left is making sure the insns involved can actually
1786 cond = cond_exec_get_condition (jump);
1788 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1790 prob_val = XEXP (prob_val, 0);
1794 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1795 GET_MODE (cond), XEXP (cond, 0),
1798 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
1801 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
1808 /* In the non-conditional execution case, we have to verify that there
1809 are no trapping operations, no calls, no references to memory, and
1810 that any registers modified are dead at the branch site. */
1812 rtx insn, cond, prev;
1813 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
1814 regset merge_set, tmp, test_live, test_set;
1815 struct propagate_block_info *pbi;
1818 /* Check for no calls or trapping operations. */
1819 for (insn = head; ; insn = NEXT_INSN (insn))
1821 if (GET_CODE (insn) == CALL_INSN)
1825 if (may_trap_p (PATTERN (insn)))
1828 /* ??? Even non-trapping memories such as stack frame
1829 references must be avoided. For stores, we collect
1830 no lifetime info; for reads, we'd have to assert
1831 true_dependance false against every store in the
1833 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
1840 if (! condjump_p (jump))
1843 /* Find the extent of the conditional. */
1844 cond = noce_get_condition (jump, &earliest);
1849 MERGE_SET = set of registers set in MERGE_BB
1850 TEST_LIVE = set of registers live at EARLIEST
1851 TEST_SET = set of registers set between EARLIEST and the
1852 end of the block. */
1854 tmp = INITIALIZE_REG_SET (tmp_head);
1855 merge_set = INITIALIZE_REG_SET (merge_set_head);
1856 test_live = INITIALIZE_REG_SET (test_live_head);
1857 test_set = INITIALIZE_REG_SET (test_set_head);
1859 /* ??? bb->local_set is only valid during calculate_global_regs_live,
1860 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
1861 since we've already asserted that MERGE_BB is small. */
1862 propagate_block (merge_bb, tmp, merge_set, 0);
1864 /* For small register class machines, don't lengthen lifetimes of
1865 hard registers before reload. */
1866 if (SMALL_REGISTER_CLASSES && ! reload_completed)
1868 EXECUTE_IF_SET_IN_BITMAP
1871 if (i < FIRST_PSEUDO_REGISTER
1873 && ! global_regs[i])
1878 /* For TEST, we're interested in a range of insns, not a whole block.
1879 Moreover, we're interested in the insns live from OTHER_BB. */
1881 COPY_REG_SET (test_live, other_bb->global_live_at_start);
1882 pbi = init_propagate_block_info (test_bb, test_live, test_set, 0);
1884 for (insn = jump; ; insn = prev)
1886 prev = propagate_one_insn (pbi, insn);
1887 if (insn == earliest)
1891 free_propagate_block_info (pbi);
1893 /* We can perform the transformation if
1894 MERGE_SET & (TEST_SET | TEST_LIVE)
1896 TEST_SET & merge_bb->global_live_at_start
1899 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
1900 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
1901 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
1903 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
1905 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
1908 FREE_REG_SET (merge_set);
1909 FREE_REG_SET (test_live);
1910 FREE_REG_SET (test_set);
1917 /* We don't want to use normal invert_jump or redirect_jump because
1918 we don't want to delete_insn called. Also, we want to do our own
1919 change group management. */
1921 old_dest = JUMP_LABEL (jump);
1923 ? ! invert_jump_1 (jump, new_dest)
1924 : ! redirect_jump_1 (jump, new_dest))
1927 if (! apply_change_group ())
1931 LABEL_NUSES (old_dest) -= 1;
1933 LABEL_NUSES (new_dest) += 1;
1934 JUMP_LABEL (jump) = new_dest;
1938 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1940 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
1943 /* Move the insns out of MERGE_BB to before the branch. */
1946 if (end == merge_bb->end)
1947 merge_bb->end = PREV_INSN (head);
1949 head = squeeze_notes (head, end);
1950 if (GET_CODE (end) == NOTE
1951 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
1952 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
1953 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
1954 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
1955 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
1956 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
1960 end = PREV_INSN (end);
1963 reorder_insns (head, end, PREV_INSN (earliest));
1972 /* Main entry point for all if-conversion. */
1975 if_convert (life_data_ok)
1980 num_possible_if_blocks = 0;
1981 num_updated_if_blocks = 0;
1982 num_removed_blocks = 0;
1984 /* Free up basic_block_for_insn so that we don't have to keep it
1985 up to date, either here or in merge_blocks_nomove. */
1986 free_basic_block_vars (1);
1988 /* Compute postdominators if we think we'll use them. */
1989 post_dominators = NULL;
1990 if (HAVE_conditional_execution || life_data_ok)
1992 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1993 compute_flow_dominators (NULL, post_dominators);
1996 /* Record initial block numbers. */
1997 for (block_num = 0; block_num < n_basic_blocks; block_num++)
1998 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2000 /* Go through each of the basic blocks looking for things to convert. */
2001 for (block_num = 0; block_num < n_basic_blocks; )
2003 basic_block bb = BASIC_BLOCK (block_num);
2004 if (find_if_header (bb))
2005 block_num = bb->index;
2010 if (post_dominators)
2011 sbitmap_vector_free (post_dominators);
2014 fflush (rtl_dump_file);
2016 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2017 compute_bb_for_insn (get_max_uid ());
2019 /* Rebuild life info for basic blocks that require it. */
2020 if (num_removed_blocks && life_data_ok)
2022 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2023 sbitmap_zero (update_life_blocks);
2025 /* If we allocated new pseudos, we must resize the array for sched1. */
2026 if (max_regno < max_reg_num ())
2028 max_regno = max_reg_num ();
2029 allocate_reg_info (max_regno, FALSE, FALSE);
2032 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2033 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2034 SET_BIT (update_life_blocks, block_num);
2036 count_or_remove_death_notes (update_life_blocks, 1);
2037 update_life_info (update_life_blocks, UPDATE_LIFE_LOCAL,
2040 sbitmap_free (update_life_blocks);
2043 /* Write the final stats. */
2044 if (rtl_dump_file && num_possible_if_blocks > 0)
2046 fprintf (rtl_dump_file,
2047 "\n%d possible IF blocks searched.\n",
2048 num_possible_if_blocks);
2049 fprintf (rtl_dump_file,
2050 "%d IF blocks converted.\n",
2051 num_updated_if_blocks);
2052 fprintf (rtl_dump_file,
2053 "%d basic blocks deleted.\n\n\n",
2054 num_removed_blocks);
2057 #ifdef ENABLE_CHECKING
2058 verify_flow_info ();