1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
54 #define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
64 static int num_updated_if_blocks;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks;
69 /* The post-dominator relation on the original block numbers. */
70 static sbitmap *post_dominators;
72 /* Forward references. */
73 static int count_bb_insns PARAMS ((basic_block));
74 static rtx first_active_insn PARAMS ((basic_block));
75 static int last_active_insn_p PARAMS ((basic_block, rtx));
76 static int seq_contains_jump PARAMS ((rtx));
78 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
79 static rtx cond_exec_get_condition PARAMS ((rtx));
80 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
81 basic_block, basic_block));
83 static rtx noce_get_condition PARAMS ((rtx, rtx *));
84 static int noce_process_if_block PARAMS ((basic_block, basic_block,
85 basic_block, basic_block));
87 static int process_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
89 static void merge_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
92 static int find_if_header PARAMS ((basic_block));
93 static int find_if_block PARAMS ((basic_block, edge, edge));
94 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
95 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
96 static int find_memory PARAMS ((rtx *, void *));
97 static int dead_or_predicable PARAMS ((basic_block, basic_block,
98 basic_block, rtx, int));
100 /* Abuse the basic_block AUX field to store the original block index,
101 as well as a flag indicating that the block should be rescaned for
104 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
105 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
106 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
107 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
110 /* Count the number of non-jump active insns in BB. */
121 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 insn = NEXT_INSN (insn);
132 /* Return the first non-jump active insn in the basic block. */
135 first_active_insn (bb)
140 if (GET_CODE (insn) == CODE_LABEL)
144 insn = NEXT_INSN (insn);
147 while (GET_CODE (insn) == NOTE)
151 insn = NEXT_INSN (insn);
154 if (GET_CODE (insn) == JUMP_INSN)
160 /* Return true if INSN is the last active non-jump insn in BB. */
163 last_active_insn_p (bb, insn)
171 insn = NEXT_INSN (insn);
173 while (GET_CODE (insn) == NOTE);
175 return GET_CODE (insn) == JUMP_INSN;
178 /* It is possible, especially when having dealt with multi-word
179 arithmetic, for the expanders to have emitted jumps. Search
180 through the sequence and return TRUE if a jump exists so that
181 we can abort the conversion. */
184 seq_contains_jump (insn)
189 if (GET_CODE (insn) == JUMP_INSN)
191 insn = NEXT_INSN (insn);
196 /* Go through a bunch of insns, converting them to conditional
197 execution format if possible. Return TRUE if all of the non-note
198 insns were processed. */
201 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
202 rtx start; /* first insn to look at */
203 rtx end; /* last insn to look at */
204 rtx test; /* conditional execution test */
205 rtx prob_val; /* probability of branch taken. */
206 int mod_ok; /* true if modifications ok last insn. */
208 int must_be_last = FALSE;
211 for (insn = start; ; insn = NEXT_INSN (insn))
213 if (GET_CODE (insn) == NOTE)
216 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
219 /* Remove USE insns that get in the way. */
220 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
222 /* ??? Ug. Actually unlinking the thing is problematic,
223 given what we'd have to coordinate with our callers. */
224 PUT_CODE (insn, NOTE);
225 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
226 NOTE_SOURCE_FILE (insn) = 0;
230 /* Last insn wasn't last? */
234 if (modified_in_p (test, insn))
241 /* Now build the conditional form of the instruction. */
242 validate_change (insn, &PATTERN (insn),
243 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
246 if (GET_CODE (insn) == CALL_INSN && prob_val)
247 validate_change (insn, ®_NOTES (insn),
248 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
249 REG_NOTES (insn)), 1);
259 /* Return the condition for a jump. Do not do any special processing. */
262 cond_exec_get_condition (jump)
267 if (any_condjump_p (jump))
268 test_if = SET_SRC (pc_set (jump));
271 cond = XEXP (test_if, 0);
273 /* If this branches to JUMP_LABEL when the condition is false,
274 reverse the condition. */
275 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
276 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
277 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
278 GET_MODE (cond), XEXP (cond, 0),
284 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
285 to conditional execution. Return TRUE if we were successful at
286 converting the the block. */
289 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
290 basic_block test_bb; /* Basic block test is in */
291 basic_block then_bb; /* Basic block for THEN block */
292 basic_block else_bb; /* Basic block for ELSE block */
293 basic_block join_bb; /* Basic block the join label is in */
295 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
296 rtx then_start; /* first insn in THEN block */
297 rtx then_end; /* last insn + 1 in THEN block */
298 rtx else_start; /* first insn in ELSE block or NULL */
299 rtx else_end; /* last insn + 1 in ELSE block */
300 int max; /* max # of insns to convert. */
301 int then_mod_ok; /* whether conditional mods are ok in THEN */
302 rtx true_expr; /* test for else block insns */
303 rtx false_expr; /* test for then block insns */
304 rtx true_prob_val; /* probability of else block */
305 rtx false_prob_val; /* probability of then block */
308 /* Find the conditional jump to the ELSE or JOIN part, and isolate
310 test_expr = cond_exec_get_condition (test_bb->end);
314 /* If the conditional jump is more than just a conditional jump,
315 then we can not do conditional execution conversion on this block. */
316 if (!onlyjump_p (test_bb->end))
319 /* Collect the bounds of where we're to search. */
321 then_start = then_bb->head;
322 then_end = then_bb->end;
324 /* Skip a label heading THEN block. */
325 if (GET_CODE (then_start) == CODE_LABEL)
326 then_start = NEXT_INSN (then_start);
328 /* Skip a (use (const_int 0)) or branch as the final insn. */
329 if (GET_CODE (then_end) == INSN
330 && GET_CODE (PATTERN (then_end)) == USE
331 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
332 then_end = PREV_INSN (then_end);
333 else if (GET_CODE (then_end) == JUMP_INSN)
334 then_end = PREV_INSN (then_end);
338 /* Skip the ELSE block's label. */
339 else_start = NEXT_INSN (else_bb->head);
340 else_end = else_bb->end;
342 /* Skip a (use (const_int 0)) or branch as the final insn. */
343 if (GET_CODE (else_end) == INSN
344 && GET_CODE (PATTERN (else_end)) == USE
345 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
346 else_end = PREV_INSN (else_end);
347 else if (GET_CODE (else_end) == JUMP_INSN)
348 else_end = PREV_INSN (else_end);
351 /* How many instructions should we convert in total? */
355 max = 2 * MAX_CONDITIONAL_EXECUTE;
356 n_insns = count_bb_insns (else_bb);
359 max = MAX_CONDITIONAL_EXECUTE;
360 n_insns += count_bb_insns (then_bb);
364 /* Map test_expr/test_jump into the appropriate MD tests to use on
365 the conditionally executed code. */
367 true_expr = test_expr;
368 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
369 GET_MODE (true_expr), XEXP (true_expr, 0),
370 XEXP (true_expr, 1));
372 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
375 true_prob_val = XEXP (true_prob_val, 0);
376 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
379 false_prob_val = NULL_RTX;
381 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
382 on then THEN block. */
383 then_mod_ok = (else_bb == NULL_BLOCK);
385 /* Go through the THEN and ELSE blocks converting the insns if possible
386 to conditional execution. */
389 && ! cond_exec_process_insns (then_start, then_end,
390 false_expr, false_prob_val, then_mod_ok))
394 && ! cond_exec_process_insns (else_start, else_end,
395 true_expr, true_prob_val, TRUE))
398 if (! apply_change_group ())
401 /* Conversion succeeded. */
403 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
404 n_insns, (n_insns == 1) ? " was" : "s were");
406 /* Merge the blocks! */
407 merge_if_block (test_bb, then_bb, else_bb, join_bb);
415 /* Used by noce_process_if_block to communicate with its subroutines.
417 The subroutines know that A and B may be evaluated freely. They
418 know that X is a register. They should insert new instructions
419 before cond_earliest. */
425 rtx jump, cond, cond_earliest;
428 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
430 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
431 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
432 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
433 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
434 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
435 rtx, enum rtx_code, rtx,
437 static int noce_try_cmove PARAMS ((struct noce_if_info *));
438 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
440 /* Helper function for noce_try_store_flag*. */
443 noce_emit_store_flag (if_info, x, reversep, normalize)
444 struct noce_if_info *if_info;
446 int reversep, normalize;
448 rtx cond = if_info->cond;
452 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
453 || ! general_operand (XEXP (cond, 1), VOIDmode));
455 /* If earliest == jump, or when the condition is complex, try to
456 build the store_flag insn directly. */
459 cond = XEXP (SET_SRC (PATTERN (if_info->jump)), 0);
461 if ((if_info->cond_earliest == if_info->jump || cond_complex)
462 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
466 code = GET_CODE (cond);
468 code = reverse_condition (code);
470 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
472 tmp = gen_rtx_SET (VOIDmode, x, tmp);
475 tmp = emit_insn (tmp);
477 if (recog_memoized (tmp) >= 0)
483 if_info->cond_earliest = if_info->jump;
491 /* Don't even try if the comparison operands are weird. */
495 code = GET_CODE (cond);
497 code = reverse_condition (code);
499 return emit_store_flag (x, code, XEXP (cond, 0),
500 XEXP (cond, 1), VOIDmode,
501 (code == LTU || code == LEU
502 || code == GEU || code == GTU), normalize);
505 /* Convert "if (test) x = 1; else x = 0".
507 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
508 tried in noce_try_store_flag_constants after noce_try_cmove has had
509 a go at the conversion. */
512 noce_try_store_flag (if_info)
513 struct noce_if_info *if_info;
518 if (GET_CODE (if_info->b) == CONST_INT
519 && INTVAL (if_info->b) == STORE_FLAG_VALUE
520 && if_info->a == const0_rtx)
522 else if (if_info->b == const0_rtx
523 && GET_CODE (if_info->a) == CONST_INT
524 && INTVAL (if_info->a) == STORE_FLAG_VALUE
525 && can_reverse_comparison_p (if_info->cond, if_info->jump))
532 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
535 if (target != if_info->x)
536 emit_move_insn (if_info->x, target);
540 emit_insns_before (seq, if_info->cond_earliest);
551 /* Convert "if (test) x = a; else x = b", for A and B constant. */
554 noce_try_store_flag_constants (if_info)
555 struct noce_if_info *if_info;
559 HOST_WIDE_INT itrue, ifalse, diff, tmp;
560 int normalize, can_reverse;
563 && GET_CODE (if_info->a) == CONST_INT
564 && GET_CODE (if_info->b) == CONST_INT)
566 ifalse = INTVAL (if_info->a);
567 itrue = INTVAL (if_info->b);
568 diff = itrue - ifalse;
570 can_reverse = can_reverse_comparison_p (if_info->cond, if_info->jump);
573 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
575 else if (ifalse == 0 && exact_log2 (itrue) >= 0
576 && (STORE_FLAG_VALUE == 1
577 || BRANCH_COST >= 2))
579 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
580 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
581 normalize = 1, reversep = 1;
583 && (STORE_FLAG_VALUE == -1
584 || BRANCH_COST >= 2))
586 else if (ifalse == -1 && can_reverse
587 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
588 normalize = -1, reversep = 1;
589 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
597 tmp = itrue; itrue = ifalse; ifalse = tmp;
602 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
609 /* if (test) x = 3; else x = 4;
610 => x = 3 + (test == 0); */
611 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
613 target = expand_binop (GET_MODE (if_info->x),
614 (diff == STORE_FLAG_VALUE
615 ? add_optab : sub_optab),
616 GEN_INT (ifalse), target, if_info->x, 0,
620 /* if (test) x = 8; else x = 0;
621 => x = (test != 0) << 3; */
622 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
624 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
625 target, GEN_INT (tmp), if_info->x, 0,
629 /* if (test) x = -1; else x = b;
630 => x = -(test != 0) | b; */
631 else if (itrue == -1)
633 target = expand_binop (GET_MODE (if_info->x), ior_optab,
634 target, GEN_INT (ifalse), if_info->x, 0,
638 /* if (test) x = a; else x = b;
639 => x = (-(test != 0) & (b - a)) + a; */
642 target = expand_binop (GET_MODE (if_info->x), and_optab,
643 target, GEN_INT (diff), if_info->x, 0,
646 target = expand_binop (GET_MODE (if_info->x), add_optab,
647 target, GEN_INT (ifalse), if_info->x, 0,
657 if (target != if_info->x)
658 emit_move_insn (if_info->x, target);
663 if (seq_contains_jump (seq))
666 emit_insns_before (seq, if_info->cond_earliest);
674 /* Convert "if (test) foo++" into "foo += (test != 0)", and
675 similarly for "foo--". */
678 noce_try_store_flag_inc (if_info)
679 struct noce_if_info *if_info;
682 int subtract, normalize;
688 /* Should be no `else' case to worry about. */
689 && if_info->b == if_info->x
690 && GET_CODE (if_info->a) == PLUS
691 && (XEXP (if_info->a, 1) == const1_rtx
692 || XEXP (if_info->a, 1) == constm1_rtx)
693 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
694 && can_reverse_comparison_p (if_info->cond, if_info->jump))
696 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
697 subtract = 0, normalize = 0;
698 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
699 subtract = 1, normalize = 0;
701 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
705 target = noce_emit_store_flag (if_info,
706 gen_reg_rtx (GET_MODE (if_info->x)),
710 target = expand_binop (GET_MODE (if_info->x),
711 subtract ? sub_optab : add_optab,
712 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
715 if (target != if_info->x)
716 emit_move_insn (if_info->x, target);
721 if (seq_contains_jump (seq))
724 emit_insns_before (seq, if_info->cond_earliest);
735 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
738 noce_try_store_flag_mask (if_info)
739 struct noce_if_info *if_info;
747 || STORE_FLAG_VALUE == -1)
748 && ((if_info->a == const0_rtx
749 && rtx_equal_p (if_info->b, if_info->x))
750 || ((reversep = can_reverse_comparison_p (if_info->cond,
752 && if_info->b == const0_rtx
753 && rtx_equal_p (if_info->a, if_info->x))))
756 target = noce_emit_store_flag (if_info,
757 gen_reg_rtx (GET_MODE (if_info->x)),
760 target = expand_binop (GET_MODE (if_info->x), and_optab,
761 if_info->x, target, if_info->x, 0,
766 if (target != if_info->x)
767 emit_move_insn (if_info->x, target);
772 if (seq_contains_jump (seq))
775 emit_insns_before (seq, if_info->cond_earliest);
786 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
789 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
790 struct noce_if_info *if_info;
791 rtx x, cmp_a, cmp_b, vfalse, vtrue;
794 /* If earliest == jump, try to build the cmove insn directly.
795 This is helpful when combine has created some complex condition
796 (like for alpha's cmovlbs) that we can't hope to regenerate
797 through the normal interface. */
799 if (if_info->cond_earliest == if_info->jump)
803 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
804 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
805 tmp = gen_rtx_SET (VOIDmode, x, tmp);
808 tmp = emit_insn (tmp);
810 if (recog_memoized (tmp) >= 0)
822 /* Don't even try if the comparison operands are weird. */
823 if (! general_operand (cmp_a, GET_MODE (cmp_a))
824 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
827 #if HAVE_conditional_move
828 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
829 vtrue, vfalse, GET_MODE (x),
830 (code == LTU || code == GEU
831 || code == LEU || code == GTU));
833 /* We'll never get here, as noce_process_if_block doesn't call the
834 functions involved. Ifdef code, however, should be discouraged
835 because it leads to typos in the code not selected. However,
836 emit_conditional_move won't exist either. */
841 /* Try only simple constants and registers here. More complex cases
842 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
843 has had a go at it. */
846 noce_try_cmove (if_info)
847 struct noce_if_info *if_info;
852 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
853 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
857 code = GET_CODE (if_info->cond);
858 target = noce_emit_cmove (if_info, if_info->x, code,
859 XEXP (if_info->cond, 0),
860 XEXP (if_info->cond, 1),
861 if_info->a, if_info->b);
865 if (target != if_info->x)
866 emit_move_insn (if_info->x, target);
870 emit_insns_before (seq, if_info->cond_earliest);
883 /* Try more complex cases involving conditional_move. */
886 noce_try_cmove_arith (if_info)
887 struct noce_if_info *if_info;
897 /* A conditional move from two memory sources is equivalent to a
898 conditional on their addresses followed by a load. Don't do this
899 early because it'll screw alias analysis. Note that we've
900 already checked for no side effects. */
901 if (! no_new_pseudos && cse_not_expected
902 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
907 x = gen_reg_rtx (Pmode);
911 /* ??? We could handle this if we knew that a load from A or B could
912 not fault. This is also true if we've already loaded
913 from the address along the path from ENTRY. */
914 else if (may_trap_p (a) || may_trap_p (b))
917 /* if (test) x = a + b; else x = c - d;
924 code = GET_CODE (if_info->cond);
925 insn_a = if_info->insn_a;
926 insn_b = if_info->insn_b;
928 /* Possibly rearrange operands to make things come out more natural. */
929 if (can_reverse_comparison_p (if_info->cond, if_info->jump))
932 if (rtx_equal_p (b, x))
934 else if (general_operand (b, GET_MODE (b)))
939 code = reverse_condition (code);
940 tmp = a, a = b, b = tmp;
941 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
947 /* If either operand is complex, load it into a register first.
948 The best way to do this is to copy the original insn. In this
949 way we preserve any clobbers etc that the insn may have had.
950 This is of course not possible in the IS_MEM case. */
951 if (! general_operand (a, GET_MODE (a)))
956 goto end_seq_and_fail;
960 tmp = gen_reg_rtx (GET_MODE (a));
961 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
964 goto end_seq_and_fail;
967 a = gen_reg_rtx (GET_MODE (a));
968 tmp = copy_rtx (insn_a);
969 set = single_set (tmp);
971 tmp = emit_insn (PATTERN (tmp));
973 if (recog_memoized (tmp) < 0)
974 goto end_seq_and_fail;
976 if (! general_operand (b, GET_MODE (b)))
981 goto end_seq_and_fail;
985 tmp = gen_reg_rtx (GET_MODE (b));
986 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
989 goto end_seq_and_fail;
992 b = gen_reg_rtx (GET_MODE (b));
993 tmp = copy_rtx (insn_b);
994 set = single_set (tmp);
996 tmp = emit_insn (PATTERN (tmp));
998 if (recog_memoized (tmp) < 0)
999 goto end_seq_and_fail;
1002 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1003 XEXP (if_info->cond, 1), a, b);
1006 goto end_seq_and_fail;
1008 /* If we're handling a memory for above, emit the load now. */
1011 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1013 /* Copy over flags as appropriate. */
1014 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1015 MEM_VOLATILE_P (tmp) = 1;
1016 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1017 MEM_IN_STRUCT_P (tmp) = 1;
1018 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1019 MEM_SCALAR_P (tmp) = 1;
1020 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1021 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1023 emit_move_insn (if_info->x, tmp);
1025 else if (target != x)
1026 emit_move_insn (x, target);
1030 emit_insns_before (tmp, if_info->cond_earliest);
1038 /* Look for the condition for the jump first. We'd prefer to avoid
1039 get_condition if we can -- it tries to look back for the contents
1040 of an original compare. On targets that use normal integers for
1041 comparisons, e.g. alpha, this is wasteful. */
1044 noce_get_condition (jump, earliest)
1051 /* If the condition variable is a register and is MODE_INT, accept it.
1052 Otherwise, fall back on get_condition. */
1054 if (! any_condjump_p (jump))
1057 set = pc_set (jump);
1059 cond = XEXP (SET_SRC (set), 0);
1060 if (GET_CODE (XEXP (cond, 0)) == REG
1061 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1065 /* If this branches to JUMP_LABEL when the condition is false,
1066 reverse the condition. */
1067 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1068 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1069 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1070 GET_MODE (cond), XEXP (cond, 0),
1074 cond = get_condition (jump, earliest);
1079 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1080 without using conditional execution. Return TRUE if we were
1081 successful at converting the the block. */
1084 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1085 basic_block test_bb; /* Basic block test is in */
1086 basic_block then_bb; /* Basic block for THEN block */
1087 basic_block else_bb; /* Basic block for ELSE block */
1088 basic_block join_bb; /* Basic block the join label is in */
1090 /* We're looking for patterns of the form
1092 (1) if (...) x = a; else x = b;
1093 (2) x = b; if (...) x = a;
1094 (3) if (...) x = a; // as if with an initial x = x.
1096 The later patterns require jumps to be more expensive.
1098 ??? For future expansion, look for multiple X in such patterns. */
1100 struct noce_if_info if_info;
1103 rtx orig_x, x, a, b;
1104 rtx jump, cond, insn;
1106 /* If this is not a standard conditional jump, we can't parse it. */
1107 jump = test_bb->end;
1108 cond = noce_get_condition (jump, &if_info.cond_earliest);
1112 /* If the conditional jump is more than just a conditional jump,
1113 then we can not do if-conversion on this block. */
1114 if (! onlyjump_p (jump))
1117 /* We must be comparing objects whose modes imply the size. */
1118 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1121 /* Look for one of the potential sets. */
1122 insn_a = first_active_insn (then_bb);
1124 || ! last_active_insn_p (then_bb, insn_a)
1125 || (set_a = single_set (insn_a)) == NULL_RTX)
1128 x = SET_DEST (set_a);
1129 a = SET_SRC (set_a);
1131 /* Look for the other potential set. Make sure we've got equivalent
1133 /* ??? This is overconservative. Storing to two different mems is
1134 as easy as conditionally computing the address. Storing to a
1135 single mem merely requires a scratch memory to use as one of the
1136 destination addresses; often the memory immediately below the
1137 stack pointer is available for this. */
1141 insn_b = first_active_insn (else_bb);
1143 || ! last_active_insn_p (else_bb, insn_b)
1144 || (set_b = single_set (insn_b)) == NULL_RTX
1145 || ! rtx_equal_p (x, SET_DEST (set_b)))
1150 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1152 || GET_CODE (insn_b) != INSN
1153 || (set_b = single_set (insn_b)) == NULL_RTX
1154 || ! rtx_equal_p (x, SET_DEST (set_b))
1155 || reg_mentioned_p (x, cond)
1156 || reg_mentioned_p (x, a)
1157 || reg_mentioned_p (x, SET_SRC (set_b)))
1158 insn_b = set_b = NULL_RTX;
1160 b = (set_b ? SET_SRC (set_b) : x);
1162 /* X may not be mentioned in the range (cond_earliest, jump]. */
1163 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1164 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1167 /* A and B may not be modified in the range [cond_earliest, jump). */
1168 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1170 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1173 /* Only operate on register destinations, and even then avoid extending
1174 the lifetime of hard registers on small register class machines. */
1176 if (GET_CODE (x) != REG
1177 || (SMALL_REGISTER_CLASSES
1178 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1182 x = gen_reg_rtx (GET_MODE (x));
1185 /* Don't operate on sources that may trap or are volatile. */
1186 if (side_effects_p (a) || side_effects_p (b)
1187 || (GET_CODE (a) != MEM && may_trap_p (a))
1188 || (GET_CODE (b) != MEM && may_trap_p (b)))
1191 /* Set up the info block for our subroutines. */
1192 if_info.cond = cond;
1193 if_info.jump = jump;
1194 if_info.insn_a = insn_a;
1195 if_info.insn_b = insn_b;
1200 /* Try optimizations in some approximation of a useful order. */
1201 /* ??? Should first look to see if X is live incoming at all. If it
1202 isn't, we don't need anything but an unconditional set. */
1204 /* Look and see if A and B are really the same. Avoid creating silly
1205 cmove constructs that no one will fix up later. */
1206 if (rtx_equal_p (a, b))
1208 /* If we have an INSN_B, we don't have to create any new rtl. Just
1209 move the instruction that we already have. If we don't have an
1210 INSN_B, that means that A == X, and we've got a noop move. In
1211 that case don't do anything and let the code below delete INSN_A. */
1212 if (insn_b && else_bb)
1214 if (else_bb && insn_b == else_bb->end)
1215 else_bb->end = PREV_INSN (insn_b);
1216 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1223 if (noce_try_store_flag (&if_info))
1225 if (HAVE_conditional_move
1226 && noce_try_cmove (&if_info))
1228 if (! HAVE_conditional_execution)
1230 if (noce_try_store_flag_constants (&if_info))
1232 if (noce_try_store_flag_inc (&if_info))
1234 if (noce_try_store_flag_mask (&if_info))
1236 if (HAVE_conditional_move
1237 && noce_try_cmove_arith (&if_info))
1244 /* The original sets may now be killed. */
1245 if (insn_a == then_bb->end)
1246 then_bb->end = PREV_INSN (insn_a);
1247 flow_delete_insn (insn_a);
1249 /* Several special cases here: First, we may have reused insn_b above,
1250 in which case insn_b is now NULL. Second, we want to delete insn_b
1251 if it came from the ELSE block, because follows the now correct
1252 write that appears in the TEST block. However, if we got insn_b from
1253 the TEST block, it may in fact be loading data needed for the comparison.
1254 We'll let life_analysis remove the insn if it's really dead. */
1255 if (insn_b && else_bb)
1257 if (insn_b == else_bb->end)
1258 else_bb->end = PREV_INSN (insn_b);
1259 flow_delete_insn (insn_b);
1262 /* The new insns will have been inserted before cond_earliest. We should
1263 be able to remove the jump with impunity, but the condition itself may
1264 have been modified by gcse to be shared across basic blocks. */
1265 test_bb->end = PREV_INSN (jump);
1266 flow_delete_insn (jump);
1268 /* If we used a temporary, fix it up now. */
1272 emit_move_insn (orig_x, x);
1273 insn_b = gen_sequence ();
1276 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1279 /* Merge the blocks! */
1280 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1285 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1286 straight line code. Return true if successful. */
1289 process_if_block (test_bb, then_bb, else_bb, join_bb)
1290 basic_block test_bb; /* Basic block test is in */
1291 basic_block then_bb; /* Basic block for THEN block */
1292 basic_block else_bb; /* Basic block for ELSE block */
1293 basic_block join_bb; /* Basic block the join label is in */
1295 if (! reload_completed
1296 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1299 if (HAVE_conditional_execution
1301 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1307 /* Merge the blocks and mark for local life update. */
1310 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1311 basic_block test_bb; /* Basic block test is in */
1312 basic_block then_bb; /* Basic block for THEN block */
1313 basic_block else_bb; /* Basic block for ELSE block */
1314 basic_block join_bb; /* Basic block the join label is in */
1316 basic_block combo_bb;
1318 /* All block merging is done into the lower block numbers. */
1322 /* First merge TEST block into THEN block. This is a no-brainer since
1323 the THEN block did not have a code label to begin with. */
1325 if (combo_bb->global_live_at_end)
1326 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1327 merge_blocks_nomove (combo_bb, then_bb);
1328 num_removed_blocks++;
1330 /* The ELSE block, if it existed, had a label. That label count
1331 will almost always be zero, but odd things can happen when labels
1332 get their addresses taken. */
1335 merge_blocks_nomove (combo_bb, else_bb);
1336 num_removed_blocks++;
1339 /* If there was no join block reported, that means it was not adjacent
1340 to the others, and so we cannot merge them. */
1344 /* The outgoing edge for the current COMBO block should already
1345 be correct. Verify this. */
1346 if (combo_bb->succ == NULL_EDGE)
1349 /* There should sill be a branch at the end of the THEN or ELSE
1350 blocks taking us to our final destination. */
1351 if (! simplejump_p (combo_bb->end)
1352 && ! returnjump_p (combo_bb->end))
1356 /* The JOIN block may have had quite a number of other predecessors too.
1357 Since we've already merged the TEST, THEN and ELSE blocks, we should
1358 have only one remaining edge from our if-then-else diamond. If there
1359 is more than one remaining edge, it must come from elsewhere. There
1360 may be zero incoming edges if the THEN block didn't actually join
1361 back up (as with a call to abort). */
1362 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1364 /* We can merge the JOIN. */
1365 if (combo_bb->global_live_at_end)
1366 COPY_REG_SET (combo_bb->global_live_at_end,
1367 join_bb->global_live_at_end);
1368 merge_blocks_nomove (combo_bb, join_bb);
1369 num_removed_blocks++;
1373 /* We cannot merge the JOIN. */
1375 /* The outgoing edge for the current COMBO block should already
1376 be correct. Verify this. */
1377 if (combo_bb->succ->succ_next != NULL_EDGE
1378 || combo_bb->succ->dest != join_bb)
1381 /* Remove the jump and cruft from the end of the COMBO block. */
1382 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1385 /* Make sure we update life info properly. */
1386 SET_UPDATE_LIFE (combo_bb);
1388 num_updated_if_blocks++;
1391 /* Find a block ending in a simple IF condition. Return TRUE if
1392 we were able to transform it in some way. */
1395 find_if_header (test_bb)
1396 basic_block test_bb;
1401 /* The kind of block we're looking for has exactly two successors. */
1402 if ((then_edge = test_bb->succ) == NULL_EDGE
1403 || (else_edge = then_edge->succ_next) == NULL_EDGE
1404 || else_edge->succ_next != NULL_EDGE)
1407 /* Neither edge should be abnormal. */
1408 if ((then_edge->flags & EDGE_COMPLEX)
1409 || (else_edge->flags & EDGE_COMPLEX))
1412 /* The THEN edge is canonically the one that falls through. */
1413 if (then_edge->flags & EDGE_FALLTHRU)
1415 else if (else_edge->flags & EDGE_FALLTHRU)
1418 else_edge = then_edge;
1422 /* Otherwise this must be a multiway branch of some sort. */
1425 if (find_if_block (test_bb, then_edge, else_edge))
1428 && (! HAVE_conditional_execution || reload_completed))
1430 if (find_if_case_1 (test_bb, then_edge, else_edge))
1432 if (find_if_case_2 (test_bb, then_edge, else_edge))
1440 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1444 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1445 block. If so, we'll try to convert the insns to not require the branch.
1446 Return TRUE if we were successful at converting the the block. */
1449 find_if_block (test_bb, then_edge, else_edge)
1450 basic_block test_bb;
1451 edge then_edge, else_edge;
1453 basic_block then_bb = then_edge->dest;
1454 basic_block else_bb = else_edge->dest;
1455 basic_block join_bb = NULL_BLOCK;
1456 edge then_succ = then_bb->succ;
1457 edge else_succ = else_bb->succ;
1460 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1461 if (then_bb->pred->pred_next != NULL_EDGE)
1464 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1465 if (then_succ != NULL_EDGE
1466 && (then_succ->succ_next != NULL_EDGE
1467 || (then_succ->flags & EDGE_COMPLEX)))
1470 /* If the THEN block has no successors, conditional execution can still
1471 make a conditional call. Don't do this unless the ELSE block has
1472 only one incoming edge -- the CFG manipulation is too ugly otherwise. */
1473 if (then_succ == NULL)
1475 if (else_bb->pred->pred_next == NULL_EDGE)
1478 else_bb = NULL_BLOCK;
1484 /* If the THEN block's successor is the other edge out of the TEST block,
1485 then we have an IF-THEN combo without an ELSE. */
1486 else if (then_succ->dest == else_bb)
1489 else_bb = NULL_BLOCK;
1492 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1493 has exactly one predecessor and one successor, and the outgoing edge
1494 is not complex, then we have an IF-THEN-ELSE combo. */
1495 else if (else_succ != NULL_EDGE
1496 && then_succ->dest == else_succ->dest
1497 && else_bb->pred->pred_next == NULL_EDGE
1498 && else_succ->succ_next == NULL_EDGE
1499 && ! (else_succ->flags & EDGE_COMPLEX))
1500 join_bb = else_succ->dest;
1502 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1506 num_possible_if_blocks++;
1511 fprintf (rtl_dump_file,
1512 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1513 test_bb->index, then_bb->index, else_bb->index,
1516 fprintf (rtl_dump_file,
1517 "\nIF-THEN block found, start %d, then %d, join %d\n",
1518 test_bb->index, then_bb->index, join_bb->index);
1521 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1522 get the first condition for free, since we've already asserted that
1523 there's a fallthru edge from IF to THEN. */
1524 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1525 BLOCK notes, if by no other means than aborting the merge if they
1526 exist. Sticky enough I don't want to think about it now. */
1527 next_index = then_bb->index;
1528 if (else_bb && ++next_index != else_bb->index)
1530 if (++next_index != join_bb->index)
1538 /* Do the real work. */
1539 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1542 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1543 transformable, but not necessarily the other. There need be no
1546 Return TRUE if we were successful at converting the the block.
1548 Cases we'd like to look at:
1551 if (test) goto over; // x not live
1559 if (! test) goto label;
1562 if (test) goto E; // x not live
1576 (3) // This one's really only interesting for targets that can do
1577 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1578 // it results in multiple branches on a cache line, which often
1579 // does not sit well with predictors.
1581 if (test1) goto E; // predicted not taken
1597 (A) Don't do (2) if the branch is predicted against the block we're
1598 eliminating. Do it anyway if we can eliminate a branch; this requires
1599 that the sole successor of the eliminated block postdominate the other
1602 (B) With CE, on (3) we can steal from both sides of the if, creating
1611 Again, this is most useful if J postdominates.
1613 (C) CE substitutes for helpful life information.
1615 (D) These heuristics need a lot of work. */
1617 /* Tests for case 1 above. */
1620 find_if_case_1 (test_bb, then_edge, else_edge)
1621 basic_block test_bb;
1622 edge then_edge, else_edge;
1624 basic_block then_bb = then_edge->dest;
1625 basic_block else_bb = else_edge->dest;
1626 edge then_succ = then_bb->succ;
1629 /* THEN has one successor. */
1630 if (!then_succ || then_succ->succ_next != NULL)
1633 /* THEN does not fall through, but is not strange either. */
1634 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
1637 /* THEN has one predecessor. */
1638 if (then_bb->pred->pred_next != NULL)
1641 /* ELSE follows THEN. (??? could be moved) */
1642 if (else_bb->index != then_bb->index + 1)
1645 num_possible_if_blocks++;
1647 fprintf (rtl_dump_file,
1648 "\nIF-CASE-1 found, start %d, then %d\n",
1649 test_bb->index, then_bb->index);
1651 /* THEN is small. */
1652 if (count_bb_insns (then_bb) > BRANCH_COST)
1655 /* Find the label for THEN's destination. */
1656 if (then_succ->dest == EXIT_BLOCK_PTR)
1660 new_lab = JUMP_LABEL (then_bb->end);
1665 /* Registers set are dead, or are predicable. */
1666 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
1669 /* Conversion went ok, including moving the insns and fixing up the
1670 jump. Adjust the CFG to match. */
1672 SET_UPDATE_LIFE (test_bb);
1673 bitmap_operation (test_bb->global_live_at_end,
1674 else_bb->global_live_at_start,
1675 then_bb->global_live_at_end, BITMAP_IOR);
1677 make_edge (NULL, test_bb, then_succ->dest, 0);
1678 flow_delete_block (then_bb);
1679 tidy_fallthru_edge (else_edge, test_bb, else_bb);
1681 num_removed_blocks++;
1682 num_updated_if_blocks++;
1687 /* Test for case 2 above. */
1690 find_if_case_2 (test_bb, then_edge, else_edge)
1691 basic_block test_bb;
1692 edge then_edge, else_edge;
1694 basic_block then_bb = then_edge->dest;
1695 basic_block else_bb = else_edge->dest;
1696 edge else_succ = else_bb->succ;
1699 /* ELSE has one successor. */
1700 if (!else_succ || else_succ->succ_next != NULL)
1703 /* ELSE outgoing edge is not complex. */
1704 if (else_succ->flags & EDGE_COMPLEX)
1707 /* ELSE has one predecessor. */
1708 if (else_bb->pred->pred_next != NULL)
1711 /* THEN is not EXIT. */
1712 if (then_bb->index < 0)
1715 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1716 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
1717 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
1719 else if (else_succ->dest->index < 0
1720 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
1721 ORIG_INDEX (else_succ->dest)))
1726 num_possible_if_blocks++;
1728 fprintf (rtl_dump_file,
1729 "\nIF-CASE-2 found, start %d, else %d\n",
1730 test_bb->index, else_bb->index);
1732 /* ELSE is small. */
1733 if (count_bb_insns (then_bb) > BRANCH_COST)
1736 /* Find the label for ELSE's destination. */
1737 if (else_succ->dest == EXIT_BLOCK_PTR)
1741 if (else_succ->flags & EDGE_FALLTHRU)
1743 new_lab = else_succ->dest->head;
1744 if (GET_CODE (new_lab) != CODE_LABEL)
1749 new_lab = JUMP_LABEL (else_bb->end);
1755 /* Registers set are dead, or are predicable. */
1756 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
1759 /* Conversion went ok, including moving the insns and fixing up the
1760 jump. Adjust the CFG to match. */
1762 SET_UPDATE_LIFE (test_bb);
1763 bitmap_operation (test_bb->global_live_at_end,
1764 then_bb->global_live_at_start,
1765 else_bb->global_live_at_end, BITMAP_IOR);
1767 remove_edge (else_edge);
1768 make_edge (NULL, test_bb, else_succ->dest, 0);
1769 flow_delete_block (else_bb);
1771 num_removed_blocks++;
1772 num_updated_if_blocks++;
1774 /* ??? We may now fallthru from one of THEN's successors into a join
1775 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1780 /* A subroutine of dead_or_predicable called through for_each_rtx.
1781 Return 1 if a memory is found. */
1784 find_memory (px, data)
1786 void *data ATTRIBUTE_UNUSED;
1788 return GET_CODE (*px) == MEM;
1791 /* Used by the code above to perform the actual rtl transformations.
1792 Return TRUE if successful.
1794 TEST_BB is the block containing the conditional branch. MERGE_BB
1795 is the block containing the code to manipulate. NEW_DEST is the
1796 label TEST_BB should be branching to after the conversion.
1797 REVERSEP is true if the sense of the branch should be reversed. */
1800 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
1801 basic_block test_bb, merge_bb, other_bb;
1805 rtx head, end, jump, earliest, old_dest;
1807 jump = test_bb->end;
1809 /* Find the extent of the real code in the merge block. */
1810 head = merge_bb->head;
1811 end = merge_bb->end;
1813 if (GET_CODE (head) == CODE_LABEL)
1814 head = NEXT_INSN (head);
1815 if (GET_CODE (head) == NOTE)
1819 head = end = NULL_RTX;
1822 head = NEXT_INSN (head);
1825 if (GET_CODE (end) == JUMP_INSN)
1829 head = end = NULL_RTX;
1832 end = PREV_INSN (end);
1835 if (HAVE_conditional_execution)
1837 /* In the conditional execution case, we have things easy. We know
1838 the condition is reversable. We don't have to check life info,
1839 becase we're going to conditionally execute the code anyway.
1840 All that's left is making sure the insns involved can actually
1845 cond = cond_exec_get_condition (jump);
1847 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1849 prob_val = XEXP (prob_val, 0);
1853 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1854 GET_MODE (cond), XEXP (cond, 0),
1857 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
1860 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
1867 /* In the non-conditional execution case, we have to verify that there
1868 are no trapping operations, no calls, no references to memory, and
1869 that any registers modified are dead at the branch site. */
1871 rtx insn, cond, prev;
1872 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
1873 regset merge_set, tmp, test_live, test_set;
1874 struct propagate_block_info *pbi;
1877 /* Check for no calls or trapping operations. */
1878 for (insn = head; ; insn = NEXT_INSN (insn))
1880 if (GET_CODE (insn) == CALL_INSN)
1884 if (may_trap_p (PATTERN (insn)))
1887 /* ??? Even non-trapping memories such as stack frame
1888 references must be avoided. For stores, we collect
1889 no lifetime info; for reads, we'd have to assert
1890 true_dependance false against every store in the
1892 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
1899 if (! any_condjump_p (jump))
1902 /* Find the extent of the conditional. */
1903 cond = noce_get_condition (jump, &earliest);
1908 MERGE_SET = set of registers set in MERGE_BB
1909 TEST_LIVE = set of registers live at EARLIEST
1910 TEST_SET = set of registers set between EARLIEST and the
1911 end of the block. */
1913 tmp = INITIALIZE_REG_SET (tmp_head);
1914 merge_set = INITIALIZE_REG_SET (merge_set_head);
1915 test_live = INITIALIZE_REG_SET (test_live_head);
1916 test_set = INITIALIZE_REG_SET (test_set_head);
1918 /* ??? bb->local_set is only valid during calculate_global_regs_live,
1919 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
1920 since we've already asserted that MERGE_BB is small. */
1921 propagate_block (merge_bb, tmp, merge_set, 0);
1923 /* For small register class machines, don't lengthen lifetimes of
1924 hard registers before reload. */
1925 if (SMALL_REGISTER_CLASSES && ! reload_completed)
1927 EXECUTE_IF_SET_IN_BITMAP
1930 if (i < FIRST_PSEUDO_REGISTER
1932 && ! global_regs[i])
1937 /* For TEST, we're interested in a range of insns, not a whole block.
1938 Moreover, we're interested in the insns live from OTHER_BB. */
1940 COPY_REG_SET (test_live, other_bb->global_live_at_start);
1941 pbi = init_propagate_block_info (test_bb, test_live, test_set, 0);
1943 for (insn = jump; ; insn = prev)
1945 prev = propagate_one_insn (pbi, insn);
1946 if (insn == earliest)
1950 free_propagate_block_info (pbi);
1952 /* We can perform the transformation if
1953 MERGE_SET & (TEST_SET | TEST_LIVE)
1955 TEST_SET & merge_bb->global_live_at_start
1958 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
1959 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
1960 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
1962 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
1964 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
1967 FREE_REG_SET (merge_set);
1968 FREE_REG_SET (test_live);
1969 FREE_REG_SET (test_set);
1976 /* We don't want to use normal invert_jump or redirect_jump because
1977 we don't want to delete_insn called. Also, we want to do our own
1978 change group management. */
1980 old_dest = JUMP_LABEL (jump);
1982 ? ! invert_jump_1 (jump, new_dest)
1983 : ! redirect_jump_1 (jump, new_dest))
1986 if (! apply_change_group ())
1990 LABEL_NUSES (old_dest) -= 1;
1992 LABEL_NUSES (new_dest) += 1;
1993 JUMP_LABEL (jump) = new_dest;
1997 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1999 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2002 /* Move the insns out of MERGE_BB to before the branch. */
2005 if (end == merge_bb->end)
2006 merge_bb->end = PREV_INSN (head);
2008 head = squeeze_notes (head, end);
2009 if (GET_CODE (end) == NOTE
2010 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2011 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2012 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2013 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2014 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2015 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2019 end = PREV_INSN (end);
2022 reorder_insns (head, end, PREV_INSN (earliest));
2031 /* Main entry point for all if-conversion. */
2034 if_convert (life_data_ok)
2039 num_possible_if_blocks = 0;
2040 num_updated_if_blocks = 0;
2041 num_removed_blocks = 0;
2043 /* Free up basic_block_for_insn so that we don't have to keep it
2044 up to date, either here or in merge_blocks_nomove. */
2045 free_basic_block_vars (1);
2047 /* Compute postdominators if we think we'll use them. */
2048 post_dominators = NULL;
2049 if (HAVE_conditional_execution || life_data_ok)
2051 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2052 compute_flow_dominators (NULL, post_dominators);
2055 /* Record initial block numbers. */
2056 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2057 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2059 /* Go through each of the basic blocks looking for things to convert. */
2060 for (block_num = 0; block_num < n_basic_blocks; )
2062 basic_block bb = BASIC_BLOCK (block_num);
2063 if (find_if_header (bb))
2064 block_num = bb->index;
2069 if (post_dominators)
2070 sbitmap_vector_free (post_dominators);
2073 fflush (rtl_dump_file);
2075 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2076 compute_bb_for_insn (get_max_uid ());
2078 /* Rebuild life info for basic blocks that require it. */
2079 if (num_removed_blocks && life_data_ok)
2081 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2082 sbitmap_zero (update_life_blocks);
2084 /* If we allocated new pseudos, we must resize the array for sched1. */
2085 if (max_regno < max_reg_num ())
2087 max_regno = max_reg_num ();
2088 allocate_reg_info (max_regno, FALSE, FALSE);
2091 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2092 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2093 SET_BIT (update_life_blocks, block_num);
2095 count_or_remove_death_notes (update_life_blocks, 1);
2096 /* ??? See about adding a mode that verifies that the initial
2097 set of blocks don't let registers come live. */
2098 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2099 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2100 | PROP_KILL_DEAD_CODE);
2102 sbitmap_free (update_life_blocks);
2105 /* Write the final stats. */
2106 if (rtl_dump_file && num_possible_if_blocks > 0)
2108 fprintf (rtl_dump_file,
2109 "\n%d possible IF blocks searched.\n",
2110 num_possible_if_blocks);
2111 fprintf (rtl_dump_file,
2112 "%d IF blocks converted.\n",
2113 num_updated_if_blocks);
2114 fprintf (rtl_dump_file,
2115 "%d basic blocks deleted.\n\n\n",
2116 num_removed_blocks);
2119 #ifdef ENABLE_CHECKING
2120 verify_flow_info ();