1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
38 #ifndef HAVE_conditional_execution
39 #define HAVE_conditional_execution 0
41 #ifndef HAVE_conditional_move
42 #define HAVE_conditional_move 0
51 #ifndef MAX_CONDITIONAL_EXECUTE
52 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
55 #define NULL_EDGE ((struct edge_def *)NULL)
56 #define NULL_BLOCK ((struct basic_block_def *)NULL)
58 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
59 static int num_possible_if_blocks;
61 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
63 static int num_updated_if_blocks;
65 /* # of basic blocks that were removed. */
66 static int num_removed_blocks;
68 /* The post-dominator relation on the original block numbers. */
69 static sbitmap *post_dominators;
71 /* Forward references. */
72 static int count_bb_insns PARAMS ((basic_block));
73 static rtx first_active_insn PARAMS ((basic_block));
74 static int last_active_insn_p PARAMS ((basic_block, rtx));
75 static int seq_contains_jump PARAMS ((rtx));
77 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
78 static rtx cond_exec_get_condition PARAMS ((rtx));
79 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
80 basic_block, basic_block));
82 static rtx noce_get_condition PARAMS ((rtx, rtx *));
83 static int noce_operand_ok PARAMS ((rtx));
84 static int noce_process_if_block PARAMS ((basic_block, basic_block,
85 basic_block, basic_block));
87 static int process_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
89 static void merge_if_block PARAMS ((basic_block, basic_block,
90 basic_block, basic_block));
92 static int find_if_header PARAMS ((basic_block));
93 static int find_if_block PARAMS ((basic_block, edge, edge));
94 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
95 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
96 static int find_memory PARAMS ((rtx *, void *));
97 static int dead_or_predicable PARAMS ((basic_block, basic_block,
98 basic_block, rtx, int));
100 /* Abuse the basic_block AUX field to store the original block index,
101 as well as a flag indicating that the block should be rescaned for
104 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
105 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
106 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
107 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
110 /* Count the number of non-jump active insns in BB. */
121 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
126 insn = NEXT_INSN (insn);
132 /* Return the first non-jump active insn in the basic block. */
135 first_active_insn (bb)
140 if (GET_CODE (insn) == CODE_LABEL)
144 insn = NEXT_INSN (insn);
147 while (GET_CODE (insn) == NOTE)
151 insn = NEXT_INSN (insn);
154 if (GET_CODE (insn) == JUMP_INSN)
160 /* Return true if INSN is the last active non-jump insn in BB. */
163 last_active_insn_p (bb, insn)
171 insn = NEXT_INSN (insn);
173 while (GET_CODE (insn) == NOTE);
175 return GET_CODE (insn) == JUMP_INSN;
178 /* It is possible, especially when having dealt with multi-word
179 arithmetic, for the expanders to have emitted jumps. Search
180 through the sequence and return TRUE if a jump exists so that
181 we can abort the conversion. */
184 seq_contains_jump (insn)
189 if (GET_CODE (insn) == JUMP_INSN)
191 insn = NEXT_INSN (insn);
196 /* Go through a bunch of insns, converting them to conditional
197 execution format if possible. Return TRUE if all of the non-note
198 insns were processed. */
201 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
202 rtx start; /* first insn to look at */
203 rtx end; /* last insn to look at */
204 rtx test; /* conditional execution test */
205 rtx prob_val; /* probability of branch taken. */
206 int mod_ok; /* true if modifications ok last insn. */
208 int must_be_last = FALSE;
212 for (insn = start; ; insn = NEXT_INSN (insn))
214 if (GET_CODE (insn) == NOTE)
217 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
220 /* Remove USE insns that get in the way. */
221 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
223 /* ??? Ug. Actually unlinking the thing is problematic,
224 given what we'd have to coordinate with our callers. */
225 PUT_CODE (insn, NOTE);
226 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
227 NOTE_SOURCE_FILE (insn) = 0;
231 /* Last insn wasn't last? */
235 if (modified_in_p (test, insn))
242 /* Now build the conditional form of the instruction. */
243 pattern = PATTERN (insn);
245 /* If the machine needs to modify the insn being conditionally executed,
246 say for example to force a constant integer operand into a temp
247 register, do so here. */
248 #ifdef IFCVT_MODIFY_INSN
249 IFCVT_MODIFY_INSN (pattern, insn);
254 validate_change (insn, &PATTERN (insn),
255 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
258 if (GET_CODE (insn) == CALL_INSN && prob_val)
259 validate_change (insn, ®_NOTES (insn),
260 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
261 REG_NOTES (insn)), 1);
271 /* Return the condition for a jump. Do not do any special processing. */
274 cond_exec_get_condition (jump)
279 if (any_condjump_p (jump))
280 test_if = SET_SRC (pc_set (jump));
283 cond = XEXP (test_if, 0);
285 /* If this branches to JUMP_LABEL when the condition is false,
286 reverse the condition. */
287 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
288 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
289 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
290 GET_MODE (cond), XEXP (cond, 0),
296 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
297 to conditional execution. Return TRUE if we were successful at
298 converting the the block. */
301 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
302 basic_block test_bb; /* Basic block test is in */
303 basic_block then_bb; /* Basic block for THEN block */
304 basic_block else_bb; /* Basic block for ELSE block */
305 basic_block join_bb; /* Basic block the join label is in */
307 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
308 rtx then_start; /* first insn in THEN block */
309 rtx then_end; /* last insn + 1 in THEN block */
310 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
311 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
312 int max; /* max # of insns to convert. */
313 int then_mod_ok; /* whether conditional mods are ok in THEN */
314 rtx true_expr; /* test for else block insns */
315 rtx false_expr; /* test for then block insns */
316 rtx true_prob_val; /* probability of else block */
317 rtx false_prob_val; /* probability of then block */
320 /* Find the conditional jump to the ELSE or JOIN part, and isolate
322 test_expr = cond_exec_get_condition (test_bb->end);
326 /* If the conditional jump is more than just a conditional jump,
327 then we can not do conditional execution conversion on this block. */
328 if (!onlyjump_p (test_bb->end))
331 /* Collect the bounds of where we're to search. */
333 then_start = then_bb->head;
334 then_end = then_bb->end;
336 /* Skip a label heading THEN block. */
337 if (GET_CODE (then_start) == CODE_LABEL)
338 then_start = NEXT_INSN (then_start);
340 /* Skip a (use (const_int 0)) or branch as the final insn. */
341 if (GET_CODE (then_end) == INSN
342 && GET_CODE (PATTERN (then_end)) == USE
343 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
344 then_end = PREV_INSN (then_end);
345 else if (GET_CODE (then_end) == JUMP_INSN)
346 then_end = PREV_INSN (then_end);
350 /* Skip the ELSE block's label. */
351 else_start = NEXT_INSN (else_bb->head);
352 else_end = else_bb->end;
354 /* Skip a (use (const_int 0)) or branch as the final insn. */
355 if (GET_CODE (else_end) == INSN
356 && GET_CODE (PATTERN (else_end)) == USE
357 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
358 else_end = PREV_INSN (else_end);
359 else if (GET_CODE (else_end) == JUMP_INSN)
360 else_end = PREV_INSN (else_end);
363 /* How many instructions should we convert in total? */
367 max = 2 * MAX_CONDITIONAL_EXECUTE;
368 n_insns = count_bb_insns (else_bb);
371 max = MAX_CONDITIONAL_EXECUTE;
372 n_insns += count_bb_insns (then_bb);
376 /* Map test_expr/test_jump into the appropriate MD tests to use on
377 the conditionally executed code. */
379 true_expr = test_expr;
380 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
381 GET_MODE (true_expr), XEXP (true_expr, 0),
382 XEXP (true_expr, 1));
384 #ifdef IFCVT_MODIFY_TESTS
385 /* If the machine description needs to modify the tests, such as setting a
386 conditional execution register from a comparison, it can do so here. */
387 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
390 /* See if the conversion failed */
391 if (!true_expr || !false_expr)
395 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
398 true_prob_val = XEXP (true_prob_val, 0);
399 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
402 false_prob_val = NULL_RTX;
404 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
405 on then THEN block. */
406 then_mod_ok = (else_bb == NULL_BLOCK);
408 /* Go through the THEN and ELSE blocks converting the insns if possible
409 to conditional execution. */
412 && ! cond_exec_process_insns (then_start, then_end,
413 false_expr, false_prob_val, then_mod_ok))
417 && ! cond_exec_process_insns (else_start, else_end,
418 true_expr, true_prob_val, TRUE))
421 if (! apply_change_group ())
424 #ifdef IFCVT_MODIFY_FINAL
425 /* Do any machine dependent final modifications */
426 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
429 /* Conversion succeeded. */
431 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
432 n_insns, (n_insns == 1) ? " was" : "s were");
434 /* Merge the blocks! */
435 merge_if_block (test_bb, then_bb, else_bb, join_bb);
439 #ifdef IFCVT_MODIFY_CANCEL
440 /* Cancel any machine dependent changes. */
441 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
448 /* Used by noce_process_if_block to communicate with its subroutines.
450 The subroutines know that A and B may be evaluated freely. They
451 know that X is a register. They should insert new instructions
452 before cond_earliest. */
459 rtx jump, cond, cond_earliest;
462 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
464 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
465 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
466 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
467 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
468 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
469 rtx, enum rtx_code, rtx,
471 static int noce_try_cmove PARAMS ((struct noce_if_info *));
472 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
473 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
475 static int noce_try_minmax PARAMS ((struct noce_if_info *));
476 static int noce_try_abs PARAMS ((struct noce_if_info *));
478 /* Helper function for noce_try_store_flag*. */
481 noce_emit_store_flag (if_info, x, reversep, normalize)
482 struct noce_if_info *if_info;
484 int reversep, normalize;
486 rtx cond = if_info->cond;
490 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
491 || ! general_operand (XEXP (cond, 1), VOIDmode));
493 /* If earliest == jump, or when the condition is complex, try to
494 build the store_flag insn directly. */
497 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
500 code = reversed_comparison_code (cond, if_info->jump);
502 code = GET_CODE (cond);
504 if ((if_info->cond_earliest == if_info->jump || cond_complex)
505 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
509 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
511 tmp = gen_rtx_SET (VOIDmode, x, tmp);
514 tmp = emit_insn (tmp);
516 if (recog_memoized (tmp) >= 0)
522 if_info->cond_earliest = if_info->jump;
530 /* Don't even try if the comparison operands are weird. */
534 return emit_store_flag (x, code, XEXP (cond, 0),
535 XEXP (cond, 1), VOIDmode,
536 (code == LTU || code == LEU
537 || code == GEU || code == GTU), normalize);
540 /* Convert "if (test) x = 1; else x = 0".
542 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
543 tried in noce_try_store_flag_constants after noce_try_cmove has had
544 a go at the conversion. */
547 noce_try_store_flag (if_info)
548 struct noce_if_info *if_info;
553 if (GET_CODE (if_info->b) == CONST_INT
554 && INTVAL (if_info->b) == STORE_FLAG_VALUE
555 && if_info->a == const0_rtx)
557 else if (if_info->b == const0_rtx
558 && GET_CODE (if_info->a) == CONST_INT
559 && INTVAL (if_info->a) == STORE_FLAG_VALUE
560 && (reversed_comparison_code (if_info->cond, if_info->jump)
568 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
571 if (target != if_info->x)
572 emit_move_insn (if_info->x, target);
576 emit_insns_before (seq, if_info->cond_earliest);
587 /* Convert "if (test) x = a; else x = b", for A and B constant. */
590 noce_try_store_flag_constants (if_info)
591 struct noce_if_info *if_info;
595 HOST_WIDE_INT itrue, ifalse, diff, tmp;
596 int normalize, can_reverse;
599 && GET_CODE (if_info->a) == CONST_INT
600 && GET_CODE (if_info->b) == CONST_INT)
602 ifalse = INTVAL (if_info->a);
603 itrue = INTVAL (if_info->b);
604 diff = itrue - ifalse;
606 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
610 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
612 else if (ifalse == 0 && exact_log2 (itrue) >= 0
613 && (STORE_FLAG_VALUE == 1
614 || BRANCH_COST >= 2))
616 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
617 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
618 normalize = 1, reversep = 1;
620 && (STORE_FLAG_VALUE == -1
621 || BRANCH_COST >= 2))
623 else if (ifalse == -1 && can_reverse
624 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
625 normalize = -1, reversep = 1;
626 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
634 tmp = itrue; itrue = ifalse; ifalse = tmp;
639 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
646 /* if (test) x = 3; else x = 4;
647 => x = 3 + (test == 0); */
648 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
650 target = expand_binop (GET_MODE (if_info->x),
651 (diff == STORE_FLAG_VALUE
652 ? add_optab : sub_optab),
653 GEN_INT (ifalse), target, if_info->x, 0,
657 /* if (test) x = 8; else x = 0;
658 => x = (test != 0) << 3; */
659 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
661 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
662 target, GEN_INT (tmp), if_info->x, 0,
666 /* if (test) x = -1; else x = b;
667 => x = -(test != 0) | b; */
668 else if (itrue == -1)
670 target = expand_binop (GET_MODE (if_info->x), ior_optab,
671 target, GEN_INT (ifalse), if_info->x, 0,
675 /* if (test) x = a; else x = b;
676 => x = (-(test != 0) & (b - a)) + a; */
679 target = expand_binop (GET_MODE (if_info->x), and_optab,
680 target, GEN_INT (diff), if_info->x, 0,
683 target = expand_binop (GET_MODE (if_info->x), add_optab,
684 target, GEN_INT (ifalse), if_info->x, 0,
694 if (target != if_info->x)
695 emit_move_insn (if_info->x, target);
700 if (seq_contains_jump (seq))
703 emit_insns_before (seq, if_info->cond_earliest);
711 /* Convert "if (test) foo++" into "foo += (test != 0)", and
712 similarly for "foo--". */
715 noce_try_store_flag_inc (if_info)
716 struct noce_if_info *if_info;
719 int subtract, normalize;
725 /* Should be no `else' case to worry about. */
726 && if_info->b == if_info->x
727 && GET_CODE (if_info->a) == PLUS
728 && (XEXP (if_info->a, 1) == const1_rtx
729 || XEXP (if_info->a, 1) == constm1_rtx)
730 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
731 && (reversed_comparison_code (if_info->cond, if_info->jump)
734 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
735 subtract = 0, normalize = 0;
736 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
737 subtract = 1, normalize = 0;
739 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
743 target = noce_emit_store_flag (if_info,
744 gen_reg_rtx (GET_MODE (if_info->x)),
748 target = expand_binop (GET_MODE (if_info->x),
749 subtract ? sub_optab : add_optab,
750 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
753 if (target != if_info->x)
754 emit_move_insn (if_info->x, target);
759 if (seq_contains_jump (seq))
762 emit_insns_before (seq, if_info->cond_earliest);
773 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
776 noce_try_store_flag_mask (if_info)
777 struct noce_if_info *if_info;
785 || STORE_FLAG_VALUE == -1)
786 && ((if_info->a == const0_rtx
787 && rtx_equal_p (if_info->b, if_info->x))
788 || ((reversep = (reversed_comparison_code (if_info->cond,
791 && if_info->b == const0_rtx
792 && rtx_equal_p (if_info->a, if_info->x))))
795 target = noce_emit_store_flag (if_info,
796 gen_reg_rtx (GET_MODE (if_info->x)),
799 target = expand_binop (GET_MODE (if_info->x), and_optab,
800 if_info->x, target, if_info->x, 0,
805 if (target != if_info->x)
806 emit_move_insn (if_info->x, target);
811 if (seq_contains_jump (seq))
814 emit_insns_before (seq, if_info->cond_earliest);
825 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
828 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
829 struct noce_if_info *if_info;
830 rtx x, cmp_a, cmp_b, vfalse, vtrue;
833 /* If earliest == jump, try to build the cmove insn directly.
834 This is helpful when combine has created some complex condition
835 (like for alpha's cmovlbs) that we can't hope to regenerate
836 through the normal interface. */
838 if (if_info->cond_earliest == if_info->jump)
842 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
843 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
844 tmp = gen_rtx_SET (VOIDmode, x, tmp);
847 tmp = emit_insn (tmp);
849 if (recog_memoized (tmp) >= 0)
861 /* Don't even try if the comparison operands are weird. */
862 if (! general_operand (cmp_a, GET_MODE (cmp_a))
863 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
866 #if HAVE_conditional_move
867 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
868 vtrue, vfalse, GET_MODE (x),
869 (code == LTU || code == GEU
870 || code == LEU || code == GTU));
872 /* We'll never get here, as noce_process_if_block doesn't call the
873 functions involved. Ifdef code, however, should be discouraged
874 because it leads to typos in the code not selected. However,
875 emit_conditional_move won't exist either. */
880 /* Try only simple constants and registers here. More complex cases
881 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
882 has had a go at it. */
885 noce_try_cmove (if_info)
886 struct noce_if_info *if_info;
891 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
892 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
896 code = GET_CODE (if_info->cond);
897 target = noce_emit_cmove (if_info, if_info->x, code,
898 XEXP (if_info->cond, 0),
899 XEXP (if_info->cond, 1),
900 if_info->a, if_info->b);
904 if (target != if_info->x)
905 emit_move_insn (if_info->x, target);
909 emit_insns_before (seq, if_info->cond_earliest);
922 /* Try more complex cases involving conditional_move. */
925 noce_try_cmove_arith (if_info)
926 struct noce_if_info *if_info;
936 /* A conditional move from two memory sources is equivalent to a
937 conditional on their addresses followed by a load. Don't do this
938 early because it'll screw alias analysis. Note that we've
939 already checked for no side effects. */
940 if (! no_new_pseudos && cse_not_expected
941 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
946 x = gen_reg_rtx (Pmode);
950 /* ??? We could handle this if we knew that a load from A or B could
951 not fault. This is also true if we've already loaded
952 from the address along the path from ENTRY. */
953 else if (may_trap_p (a) || may_trap_p (b))
956 /* if (test) x = a + b; else x = c - d;
963 code = GET_CODE (if_info->cond);
964 insn_a = if_info->insn_a;
965 insn_b = if_info->insn_b;
967 /* Possibly rearrange operands to make things come out more natural. */
968 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
971 if (rtx_equal_p (b, x))
973 else if (general_operand (b, GET_MODE (b)))
978 code = reversed_comparison_code (if_info->cond, if_info->jump);
979 tmp = a, a = b, b = tmp;
980 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
986 /* If either operand is complex, load it into a register first.
987 The best way to do this is to copy the original insn. In this
988 way we preserve any clobbers etc that the insn may have had.
989 This is of course not possible in the IS_MEM case. */
990 if (! general_operand (a, GET_MODE (a)))
995 goto end_seq_and_fail;
999 tmp = gen_reg_rtx (GET_MODE (a));
1000 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1003 goto end_seq_and_fail;
1006 a = gen_reg_rtx (GET_MODE (a));
1007 tmp = copy_rtx (insn_a);
1008 set = single_set (tmp);
1010 tmp = emit_insn (PATTERN (tmp));
1012 if (recog_memoized (tmp) < 0)
1013 goto end_seq_and_fail;
1015 if (! general_operand (b, GET_MODE (b)))
1020 goto end_seq_and_fail;
1024 tmp = gen_reg_rtx (GET_MODE (b));
1025 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1028 goto end_seq_and_fail;
1031 b = gen_reg_rtx (GET_MODE (b));
1032 tmp = copy_rtx (insn_b);
1033 set = single_set (tmp);
1035 tmp = emit_insn (PATTERN (tmp));
1037 if (recog_memoized (tmp) < 0)
1038 goto end_seq_and_fail;
1041 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1042 XEXP (if_info->cond, 1), a, b);
1045 goto end_seq_and_fail;
1047 /* If we're handling a memory for above, emit the load now. */
1050 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1052 /* Copy over flags as appropriate. */
1053 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1054 MEM_VOLATILE_P (tmp) = 1;
1055 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1056 MEM_IN_STRUCT_P (tmp) = 1;
1057 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1058 MEM_SCALAR_P (tmp) = 1;
1059 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1060 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1062 emit_move_insn (if_info->x, tmp);
1064 else if (target != x)
1065 emit_move_insn (x, target);
1069 emit_insns_before (tmp, if_info->cond_earliest);
1077 /* For most cases, the simplified condition we found is the best
1078 choice, but this is not the case for the min/max/abs transforms.
1079 For these we wish to know that it is A or B in the condition. */
1082 noce_get_alt_condition (if_info, target, earliest)
1083 struct noce_if_info *if_info;
1087 rtx cond, set, insn;
1090 /* If target is already mentioned in the known condition, return it. */
1091 if (reg_mentioned_p (target, if_info->cond))
1093 *earliest = if_info->cond_earliest;
1094 return if_info->cond;
1097 set = pc_set (if_info->jump);
1098 cond = XEXP (SET_SRC (set), 0);
1100 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1101 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1103 cond = canonicalize_condition (if_info->jump, cond, reverse,
1105 if (! cond || ! reg_mentioned_p (target, cond))
1108 /* We almost certainly searched back to a different place.
1109 Need to re-verify correct lifetimes. */
1111 /* X may not be mentioned in the range (cond_earliest, jump]. */
1112 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1113 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1116 /* A and B may not be modified in the range [cond_earliest, jump). */
1117 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1119 && (modified_in_p (if_info->a, insn)
1120 || modified_in_p (if_info->b, insn)))
1126 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1129 noce_try_minmax (if_info)
1130 struct noce_if_info *if_info;
1132 rtx cond, earliest, target, seq;
1137 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1141 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1142 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1143 to get the target to tell us... */
1144 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1145 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1146 && ! flag_fast_math)
1149 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1153 /* Verify the condition is of the form we expect, and canonicalize
1154 the comparison code. */
1155 code = GET_CODE (cond);
1156 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1158 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1161 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1163 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1165 code = swap_condition (code);
1170 /* Determine what sort of operation this is. Note that the code is for
1171 a taken branch, so the code->operation mapping appears backwards. */
1204 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1205 if_info->x, unsignedp, OPTAB_WIDEN);
1211 if (target != if_info->x)
1212 emit_move_insn (if_info->x, target);
1217 if (seq_contains_jump (seq))
1220 emit_insns_before (seq, earliest);
1221 if_info->cond = cond;
1222 if_info->cond_earliest = earliest;
1227 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1230 noce_try_abs (if_info)
1231 struct noce_if_info *if_info;
1233 rtx cond, earliest, target, seq, a, b, c;
1236 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1240 /* Recognize A and B as constituting an ABS or NABS. */
1243 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1245 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1247 c = a; a = b; b = c;
1253 cond = noce_get_alt_condition (if_info, b, &earliest);
1257 /* Verify the condition is of the form we expect. */
1258 if (rtx_equal_p (XEXP (cond, 0), b))
1260 else if (rtx_equal_p (XEXP (cond, 1), b))
1265 /* Verify that C is zero. Search backward through the block for
1266 a REG_EQUAL note if necessary. */
1269 rtx insn, note = NULL;
1270 for (insn = earliest;
1271 insn != if_info->test_bb->head;
1272 insn = PREV_INSN (insn))
1274 && ((note = find_reg_note (insn, REG_EQUAL, c))
1275 || (note = find_reg_note (insn, REG_EQUIV, c))))
1281 if (GET_CODE (c) == MEM
1282 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1283 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1284 c = get_pool_constant (XEXP (c, 0));
1286 /* Work around funny ideas get_condition has wrt canonicalization.
1287 Note that these rtx constants are known to be CONST_INT, and
1288 therefore imply integer comparisons. */
1289 if (c == constm1_rtx && GET_CODE (cond) == GT)
1291 else if (c == const1_rtx && GET_CODE (cond) == LT)
1293 else if (c != CONST0_RTX (GET_MODE (b)))
1296 /* Determine what sort of operation this is. */
1297 switch (GET_CODE (cond))
1316 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1318 /* ??? It's a quandry whether cmove would be better here, especially
1319 for integers. Perhaps combine will clean things up. */
1320 if (target && negate)
1321 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1329 if (target != if_info->x)
1330 emit_move_insn (if_info->x, target);
1335 if (seq_contains_jump (seq))
1338 emit_insns_before (seq, earliest);
1339 if_info->cond = cond;
1340 if_info->cond_earliest = earliest;
1345 /* Look for the condition for the jump first. We'd prefer to avoid
1346 get_condition if we can -- it tries to look back for the contents
1347 of an original compare. On targets that use normal integers for
1348 comparisons, e.g. alpha, this is wasteful. */
1351 noce_get_condition (jump, earliest)
1358 /* If the condition variable is a register and is MODE_INT, accept it.
1359 Otherwise, fall back on get_condition. */
1361 if (! any_condjump_p (jump))
1364 set = pc_set (jump);
1366 cond = XEXP (SET_SRC (set), 0);
1367 if (GET_CODE (XEXP (cond, 0)) == REG
1368 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1372 /* If this branches to JUMP_LABEL when the condition is false,
1373 reverse the condition. */
1374 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1375 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1376 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1377 GET_MODE (cond), XEXP (cond, 0),
1381 cond = get_condition (jump, earliest);
1386 /* Return true if OP is ok for if-then-else processing. */
1389 noce_operand_ok (op)
1392 /* We special-case memories, so handle any of them with
1393 no address side effects. */
1394 if (GET_CODE (op) == MEM)
1395 return ! side_effects_p (XEXP (op, 0));
1397 if (side_effects_p (op))
1400 /* ??? Unfortuantely may_trap_p can't look at flag_fast_math, due to
1401 being linked into the genfoo programs. This is probably a mistake.
1402 With finite operands, most fp operations don't trap. */
1403 if (flag_fast_math && FLOAT_MODE_P (GET_MODE (op)))
1404 switch (GET_CODE (op))
1410 /* ??? This is kinda lame -- almost every target will have forced
1411 the constant into a register first. But given the expense of
1412 division, this is probably for the best. */
1413 return (CONSTANT_P (XEXP (op, 1))
1414 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1415 && ! may_trap_p (XEXP (op, 0)));
1418 switch (GET_RTX_CLASS (GET_CODE (op)))
1423 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1428 return ! may_trap_p (op);
1431 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1432 without using conditional execution. Return TRUE if we were
1433 successful at converting the the block. */
1436 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1437 basic_block test_bb; /* Basic block test is in */
1438 basic_block then_bb; /* Basic block for THEN block */
1439 basic_block else_bb; /* Basic block for ELSE block */
1440 basic_block join_bb; /* Basic block the join label is in */
1442 /* We're looking for patterns of the form
1444 (1) if (...) x = a; else x = b;
1445 (2) x = b; if (...) x = a;
1446 (3) if (...) x = a; // as if with an initial x = x.
1448 The later patterns require jumps to be more expensive.
1450 ??? For future expansion, look for multiple X in such patterns. */
1452 struct noce_if_info if_info;
1455 rtx orig_x, x, a, b;
1456 rtx jump, cond, insn;
1458 /* If this is not a standard conditional jump, we can't parse it. */
1459 jump = test_bb->end;
1460 cond = noce_get_condition (jump, &if_info.cond_earliest);
1464 /* If the conditional jump is more than just a conditional jump,
1465 then we can not do if-conversion on this block. */
1466 if (! onlyjump_p (jump))
1469 /* We must be comparing objects whose modes imply the size. */
1470 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1473 /* Look for one of the potential sets. */
1474 insn_a = first_active_insn (then_bb);
1476 || ! last_active_insn_p (then_bb, insn_a)
1477 || (set_a = single_set (insn_a)) == NULL_RTX)
1480 x = SET_DEST (set_a);
1481 a = SET_SRC (set_a);
1483 /* Look for the other potential set. Make sure we've got equivalent
1485 /* ??? This is overconservative. Storing to two different mems is
1486 as easy as conditionally computing the address. Storing to a
1487 single mem merely requires a scratch memory to use as one of the
1488 destination addresses; often the memory immediately below the
1489 stack pointer is available for this. */
1493 insn_b = first_active_insn (else_bb);
1495 || ! last_active_insn_p (else_bb, insn_b)
1496 || (set_b = single_set (insn_b)) == NULL_RTX
1497 || ! rtx_equal_p (x, SET_DEST (set_b)))
1502 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1504 || GET_CODE (insn_b) != INSN
1505 || (set_b = single_set (insn_b)) == NULL_RTX
1506 || ! rtx_equal_p (x, SET_DEST (set_b))
1507 || reg_mentioned_p (x, cond)
1508 || reg_mentioned_p (x, a)
1509 || reg_mentioned_p (x, SET_SRC (set_b)))
1510 insn_b = set_b = NULL_RTX;
1512 b = (set_b ? SET_SRC (set_b) : x);
1514 /* X may not be mentioned in the range (cond_earliest, jump]. */
1515 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1516 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1519 /* A and B may not be modified in the range [cond_earliest, jump). */
1520 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1522 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1525 /* Only operate on register destinations, and even then avoid extending
1526 the lifetime of hard registers on small register class machines. */
1528 if (GET_CODE (x) != REG
1529 || (SMALL_REGISTER_CLASSES
1530 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1534 x = gen_reg_rtx (GET_MODE (x));
1537 /* Don't operate on sources that may trap or are volatile. */
1538 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1541 /* Set up the info block for our subroutines. */
1542 if_info.test_bb = test_bb;
1543 if_info.cond = cond;
1544 if_info.jump = jump;
1545 if_info.insn_a = insn_a;
1546 if_info.insn_b = insn_b;
1551 /* Try optimizations in some approximation of a useful order. */
1552 /* ??? Should first look to see if X is live incoming at all. If it
1553 isn't, we don't need anything but an unconditional set. */
1555 /* Look and see if A and B are really the same. Avoid creating silly
1556 cmove constructs that no one will fix up later. */
1557 if (rtx_equal_p (a, b))
1559 /* If we have an INSN_B, we don't have to create any new rtl. Just
1560 move the instruction that we already have. If we don't have an
1561 INSN_B, that means that A == X, and we've got a noop move. In
1562 that case don't do anything and let the code below delete INSN_A. */
1563 if (insn_b && else_bb)
1565 if (else_bb && insn_b == else_bb->end)
1566 else_bb->end = PREV_INSN (insn_b);
1567 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1570 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1571 x must be executed twice. */
1572 else if (insn_b && side_effects_p (orig_x))
1579 if (noce_try_store_flag (&if_info))
1581 if (noce_try_minmax (&if_info))
1583 if (noce_try_abs (&if_info))
1585 if (HAVE_conditional_move
1586 && noce_try_cmove (&if_info))
1588 if (! HAVE_conditional_execution)
1590 if (noce_try_store_flag_constants (&if_info))
1592 if (noce_try_store_flag_inc (&if_info))
1594 if (noce_try_store_flag_mask (&if_info))
1596 if (HAVE_conditional_move
1597 && noce_try_cmove_arith (&if_info))
1604 /* The original sets may now be killed. */
1605 if (insn_a == then_bb->end)
1606 then_bb->end = PREV_INSN (insn_a);
1607 flow_delete_insn (insn_a);
1609 /* Several special cases here: First, we may have reused insn_b above,
1610 in which case insn_b is now NULL. Second, we want to delete insn_b
1611 if it came from the ELSE block, because follows the now correct
1612 write that appears in the TEST block. However, if we got insn_b from
1613 the TEST block, it may in fact be loading data needed for the comparison.
1614 We'll let life_analysis remove the insn if it's really dead. */
1615 if (insn_b && else_bb)
1617 if (insn_b == else_bb->end)
1618 else_bb->end = PREV_INSN (insn_b);
1619 flow_delete_insn (insn_b);
1622 /* The new insns will have been inserted before cond_earliest. We should
1623 be able to remove the jump with impunity, but the condition itself may
1624 have been modified by gcse to be shared across basic blocks. */
1625 test_bb->end = PREV_INSN (jump);
1626 flow_delete_insn (jump);
1628 /* If we used a temporary, fix it up now. */
1632 emit_move_insn (orig_x, x);
1633 insn_b = gen_sequence ();
1636 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1639 /* Merge the blocks! */
1640 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1645 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1646 straight line code. Return true if successful. */
1649 process_if_block (test_bb, then_bb, else_bb, join_bb)
1650 basic_block test_bb; /* Basic block test is in */
1651 basic_block then_bb; /* Basic block for THEN block */
1652 basic_block else_bb; /* Basic block for ELSE block */
1653 basic_block join_bb; /* Basic block the join label is in */
1655 if (! reload_completed
1656 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1659 if (HAVE_conditional_execution
1661 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1667 /* Merge the blocks and mark for local life update. */
1670 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1671 basic_block test_bb; /* Basic block test is in */
1672 basic_block then_bb; /* Basic block for THEN block */
1673 basic_block else_bb; /* Basic block for ELSE block */
1674 basic_block join_bb; /* Basic block the join label is in */
1676 basic_block combo_bb;
1678 /* All block merging is done into the lower block numbers. */
1682 /* First merge TEST block into THEN block. This is a no-brainer since
1683 the THEN block did not have a code label to begin with. */
1685 if (combo_bb->global_live_at_end)
1686 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1687 merge_blocks_nomove (combo_bb, then_bb);
1688 num_removed_blocks++;
1690 /* The ELSE block, if it existed, had a label. That label count
1691 will almost always be zero, but odd things can happen when labels
1692 get their addresses taken. */
1695 merge_blocks_nomove (combo_bb, else_bb);
1696 num_removed_blocks++;
1699 /* If there was no join block reported, that means it was not adjacent
1700 to the others, and so we cannot merge them. */
1704 /* The outgoing edge for the current COMBO block should already
1705 be correct. Verify this. */
1706 if (combo_bb->succ == NULL_EDGE)
1709 /* There should sill be a branch at the end of the THEN or ELSE
1710 blocks taking us to our final destination. */
1711 if (! simplejump_p (combo_bb->end)
1712 && ! returnjump_p (combo_bb->end))
1716 /* The JOIN block may have had quite a number of other predecessors too.
1717 Since we've already merged the TEST, THEN and ELSE blocks, we should
1718 have only one remaining edge from our if-then-else diamond. If there
1719 is more than one remaining edge, it must come from elsewhere. There
1720 may be zero incoming edges if the THEN block didn't actually join
1721 back up (as with a call to abort). */
1722 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1724 /* We can merge the JOIN. */
1725 if (combo_bb->global_live_at_end)
1726 COPY_REG_SET (combo_bb->global_live_at_end,
1727 join_bb->global_live_at_end);
1728 merge_blocks_nomove (combo_bb, join_bb);
1729 num_removed_blocks++;
1733 /* We cannot merge the JOIN. */
1735 /* The outgoing edge for the current COMBO block should already
1736 be correct. Verify this. */
1737 if (combo_bb->succ->succ_next != NULL_EDGE
1738 || combo_bb->succ->dest != join_bb)
1741 /* Remove the jump and cruft from the end of the COMBO block. */
1742 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1745 /* Make sure we update life info properly. */
1746 SET_UPDATE_LIFE (combo_bb);
1748 num_updated_if_blocks++;
1751 /* Find a block ending in a simple IF condition. Return TRUE if
1752 we were able to transform it in some way. */
1755 find_if_header (test_bb)
1756 basic_block test_bb;
1761 /* The kind of block we're looking for has exactly two successors. */
1762 if ((then_edge = test_bb->succ) == NULL_EDGE
1763 || (else_edge = then_edge->succ_next) == NULL_EDGE
1764 || else_edge->succ_next != NULL_EDGE)
1767 /* Neither edge should be abnormal. */
1768 if ((then_edge->flags & EDGE_COMPLEX)
1769 || (else_edge->flags & EDGE_COMPLEX))
1772 /* The THEN edge is canonically the one that falls through. */
1773 if (then_edge->flags & EDGE_FALLTHRU)
1775 else if (else_edge->flags & EDGE_FALLTHRU)
1778 else_edge = then_edge;
1782 /* Otherwise this must be a multiway branch of some sort. */
1785 if (find_if_block (test_bb, then_edge, else_edge))
1788 && (! HAVE_conditional_execution || reload_completed))
1790 if (find_if_case_1 (test_bb, then_edge, else_edge))
1792 if (find_if_case_2 (test_bb, then_edge, else_edge))
1800 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1804 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1805 block. If so, we'll try to convert the insns to not require the branch.
1806 Return TRUE if we were successful at converting the the block. */
1809 find_if_block (test_bb, then_edge, else_edge)
1810 basic_block test_bb;
1811 edge then_edge, else_edge;
1813 basic_block then_bb = then_edge->dest;
1814 basic_block else_bb = else_edge->dest;
1815 basic_block join_bb = NULL_BLOCK;
1816 edge then_succ = then_bb->succ;
1817 edge else_succ = else_bb->succ;
1820 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1821 if (then_bb->pred->pred_next != NULL_EDGE)
1824 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1825 if (then_succ != NULL_EDGE
1826 && (then_succ->succ_next != NULL_EDGE
1827 || (then_succ->flags & EDGE_COMPLEX)))
1830 /* If the THEN block has no successors, conditional execution can still
1831 make a conditional call. Don't do this unless the ELSE block has
1832 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1833 Check for the last insn of the THEN block being an indirect jump, which
1834 is listed as not having any successors, but confuses the rest of the CE
1835 code processing. XXX we should fix this in the future. */
1836 if (then_succ == NULL)
1838 if (else_bb->pred->pred_next == NULL_EDGE)
1840 rtx last_insn = then_bb->end;
1843 && GET_CODE (last_insn) == NOTE
1844 && last_insn != then_bb->head)
1845 last_insn = PREV_INSN (last_insn);
1848 && GET_CODE (last_insn) == JUMP_INSN
1849 && ! simplejump_p (last_insn))
1853 else_bb = NULL_BLOCK;
1859 /* If the THEN block's successor is the other edge out of the TEST block,
1860 then we have an IF-THEN combo without an ELSE. */
1861 else if (then_succ->dest == else_bb)
1864 else_bb = NULL_BLOCK;
1867 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1868 has exactly one predecessor and one successor, and the outgoing edge
1869 is not complex, then we have an IF-THEN-ELSE combo. */
1870 else if (else_succ != NULL_EDGE
1871 && then_succ->dest == else_succ->dest
1872 && else_bb->pred->pred_next == NULL_EDGE
1873 && else_succ->succ_next == NULL_EDGE
1874 && ! (else_succ->flags & EDGE_COMPLEX))
1875 join_bb = else_succ->dest;
1877 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1881 num_possible_if_blocks++;
1886 fprintf (rtl_dump_file,
1887 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1888 test_bb->index, then_bb->index, else_bb->index,
1891 fprintf (rtl_dump_file,
1892 "\nIF-THEN block found, start %d, then %d, join %d\n",
1893 test_bb->index, then_bb->index, join_bb->index);
1896 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1897 get the first condition for free, since we've already asserted that
1898 there's a fallthru edge from IF to THEN. */
1899 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1900 BLOCK notes, if by no other means than aborting the merge if they
1901 exist. Sticky enough I don't want to think about it now. */
1902 next_index = then_bb->index;
1903 if (else_bb && ++next_index != else_bb->index)
1905 if (++next_index != join_bb->index)
1913 /* Do the real work. */
1914 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1917 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1918 transformable, but not necessarily the other. There need be no
1921 Return TRUE if we were successful at converting the the block.
1923 Cases we'd like to look at:
1926 if (test) goto over; // x not live
1934 if (! test) goto label;
1937 if (test) goto E; // x not live
1951 (3) // This one's really only interesting for targets that can do
1952 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1953 // it results in multiple branches on a cache line, which often
1954 // does not sit well with predictors.
1956 if (test1) goto E; // predicted not taken
1972 (A) Don't do (2) if the branch is predicted against the block we're
1973 eliminating. Do it anyway if we can eliminate a branch; this requires
1974 that the sole successor of the eliminated block postdominate the other
1977 (B) With CE, on (3) we can steal from both sides of the if, creating
1986 Again, this is most useful if J postdominates.
1988 (C) CE substitutes for helpful life information.
1990 (D) These heuristics need a lot of work. */
1992 /* Tests for case 1 above. */
1995 find_if_case_1 (test_bb, then_edge, else_edge)
1996 basic_block test_bb;
1997 edge then_edge, else_edge;
1999 basic_block then_bb = then_edge->dest;
2000 basic_block else_bb = else_edge->dest;
2001 edge then_succ = then_bb->succ;
2004 /* THEN has one successor. */
2005 if (!then_succ || then_succ->succ_next != NULL)
2008 /* THEN does not fall through, but is not strange either. */
2009 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2012 /* THEN has one predecessor. */
2013 if (then_bb->pred->pred_next != NULL)
2016 /* ELSE follows THEN. (??? could be moved) */
2017 if (else_bb->index != then_bb->index + 1)
2020 num_possible_if_blocks++;
2022 fprintf (rtl_dump_file,
2023 "\nIF-CASE-1 found, start %d, then %d\n",
2024 test_bb->index, then_bb->index);
2026 /* THEN is small. */
2027 if (count_bb_insns (then_bb) > BRANCH_COST)
2030 /* Find the label for THEN's destination. */
2031 if (then_succ->dest == EXIT_BLOCK_PTR)
2035 new_lab = JUMP_LABEL (then_bb->end);
2040 /* Registers set are dead, or are predicable. */
2041 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2044 /* Conversion went ok, including moving the insns and fixing up the
2045 jump. Adjust the CFG to match. */
2047 SET_UPDATE_LIFE (test_bb);
2048 bitmap_operation (test_bb->global_live_at_end,
2049 else_bb->global_live_at_start,
2050 then_bb->global_live_at_end, BITMAP_IOR);
2052 make_edge (NULL, test_bb, then_succ->dest, 0);
2053 flow_delete_block (then_bb);
2054 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2056 num_removed_blocks++;
2057 num_updated_if_blocks++;
2062 /* Test for case 2 above. */
2065 find_if_case_2 (test_bb, then_edge, else_edge)
2066 basic_block test_bb;
2067 edge then_edge, else_edge;
2069 basic_block then_bb = then_edge->dest;
2070 basic_block else_bb = else_edge->dest;
2071 edge else_succ = else_bb->succ;
2074 /* ELSE has one successor. */
2075 if (!else_succ || else_succ->succ_next != NULL)
2078 /* ELSE outgoing edge is not complex. */
2079 if (else_succ->flags & EDGE_COMPLEX)
2082 /* ELSE has one predecessor. */
2083 if (else_bb->pred->pred_next != NULL)
2086 /* THEN is not EXIT. */
2087 if (then_bb->index < 0)
2090 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2091 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2092 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2094 else if (else_succ->dest->index < 0
2095 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2096 ORIG_INDEX (else_succ->dest)))
2101 num_possible_if_blocks++;
2103 fprintf (rtl_dump_file,
2104 "\nIF-CASE-2 found, start %d, else %d\n",
2105 test_bb->index, else_bb->index);
2107 /* ELSE is small. */
2108 if (count_bb_insns (then_bb) > BRANCH_COST)
2111 /* Find the label for ELSE's destination. */
2112 if (else_succ->dest == EXIT_BLOCK_PTR)
2116 if (else_succ->flags & EDGE_FALLTHRU)
2118 new_lab = else_succ->dest->head;
2119 if (GET_CODE (new_lab) != CODE_LABEL)
2124 new_lab = JUMP_LABEL (else_bb->end);
2130 /* Registers set are dead, or are predicable. */
2131 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2134 /* Conversion went ok, including moving the insns and fixing up the
2135 jump. Adjust the CFG to match. */
2137 SET_UPDATE_LIFE (test_bb);
2138 bitmap_operation (test_bb->global_live_at_end,
2139 then_bb->global_live_at_start,
2140 else_bb->global_live_at_end, BITMAP_IOR);
2142 remove_edge (else_edge);
2143 make_edge (NULL, test_bb, else_succ->dest, 0);
2144 flow_delete_block (else_bb);
2146 num_removed_blocks++;
2147 num_updated_if_blocks++;
2149 /* ??? We may now fallthru from one of THEN's successors into a join
2150 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2155 /* A subroutine of dead_or_predicable called through for_each_rtx.
2156 Return 1 if a memory is found. */
2159 find_memory (px, data)
2161 void *data ATTRIBUTE_UNUSED;
2163 return GET_CODE (*px) == MEM;
2166 /* Used by the code above to perform the actual rtl transformations.
2167 Return TRUE if successful.
2169 TEST_BB is the block containing the conditional branch. MERGE_BB
2170 is the block containing the code to manipulate. NEW_DEST is the
2171 label TEST_BB should be branching to after the conversion.
2172 REVERSEP is true if the sense of the branch should be reversed. */
2175 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2176 basic_block test_bb, merge_bb, other_bb;
2180 rtx head, end, jump, earliest, old_dest;
2182 /* No code movement can occur if we'd be scrogging EH regions.
2183 Within MERGE_BB, ensure that we've not got stray EH_BEG or EH_END
2184 notes within the block. Between the blocks, checking that the end
2185 region numbers match ensures that we won't disrupt the nesting
2187 if (merge_bb->eh_beg != merge_bb->eh_end
2188 || merge_bb->eh_end != test_bb->eh_end)
2191 jump = test_bb->end;
2193 /* Find the extent of the real code in the merge block. */
2194 head = merge_bb->head;
2195 end = merge_bb->end;
2197 if (GET_CODE (head) == CODE_LABEL)
2198 head = NEXT_INSN (head);
2199 if (GET_CODE (head) == NOTE)
2203 head = end = NULL_RTX;
2206 head = NEXT_INSN (head);
2209 if (GET_CODE (end) == JUMP_INSN)
2213 head = end = NULL_RTX;
2216 end = PREV_INSN (end);
2219 /* Disable handling dead code by conditional execution if the machine needs
2220 to do anything funny with the tests, etc. */
2221 #ifndef IFCVT_MODIFY_TESTS
2222 if (HAVE_conditional_execution)
2224 /* In the conditional execution case, we have things easy. We know
2225 the condition is reversable. We don't have to check life info,
2226 becase we're going to conditionally execute the code anyway.
2227 All that's left is making sure the insns involved can actually
2232 cond = cond_exec_get_condition (jump);
2234 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2236 prob_val = XEXP (prob_val, 0);
2240 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2241 GET_MODE (cond), XEXP (cond, 0),
2244 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2247 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2255 /* In the non-conditional execution case, we have to verify that there
2256 are no trapping operations, no calls, no references to memory, and
2257 that any registers modified are dead at the branch site. */
2259 rtx insn, cond, prev;
2260 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2261 regset merge_set, tmp, test_live, test_set;
2262 struct propagate_block_info *pbi;
2265 /* Check for no calls or trapping operations. */
2266 for (insn = head; ; insn = NEXT_INSN (insn))
2268 if (GET_CODE (insn) == CALL_INSN)
2272 if (may_trap_p (PATTERN (insn)))
2275 /* ??? Even non-trapping memories such as stack frame
2276 references must be avoided. For stores, we collect
2277 no lifetime info; for reads, we'd have to assert
2278 true_dependance false against every store in the
2280 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2287 if (! any_condjump_p (jump))
2290 /* Find the extent of the conditional. */
2291 cond = noce_get_condition (jump, &earliest);
2296 MERGE_SET = set of registers set in MERGE_BB
2297 TEST_LIVE = set of registers live at EARLIEST
2298 TEST_SET = set of registers set between EARLIEST and the
2299 end of the block. */
2301 tmp = INITIALIZE_REG_SET (tmp_head);
2302 merge_set = INITIALIZE_REG_SET (merge_set_head);
2303 test_live = INITIALIZE_REG_SET (test_live_head);
2304 test_set = INITIALIZE_REG_SET (test_set_head);
2306 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2307 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2308 since we've already asserted that MERGE_BB is small. */
2309 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2311 /* For small register class machines, don't lengthen lifetimes of
2312 hard registers before reload. */
2313 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2315 EXECUTE_IF_SET_IN_BITMAP
2318 if (i < FIRST_PSEUDO_REGISTER
2320 && ! global_regs[i])
2325 /* For TEST, we're interested in a range of insns, not a whole block.
2326 Moreover, we're interested in the insns live from OTHER_BB. */
2328 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2329 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2332 for (insn = jump; ; insn = prev)
2334 prev = propagate_one_insn (pbi, insn);
2335 if (insn == earliest)
2339 free_propagate_block_info (pbi);
2341 /* We can perform the transformation if
2342 MERGE_SET & (TEST_SET | TEST_LIVE)
2344 TEST_SET & merge_bb->global_live_at_start
2347 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2348 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2349 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2351 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2353 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2356 FREE_REG_SET (merge_set);
2357 FREE_REG_SET (test_live);
2358 FREE_REG_SET (test_set);
2365 /* We don't want to use normal invert_jump or redirect_jump because
2366 we don't want to delete_insn called. Also, we want to do our own
2367 change group management. */
2369 old_dest = JUMP_LABEL (jump);
2371 ? ! invert_jump_1 (jump, new_dest)
2372 : ! redirect_jump_1 (jump, new_dest))
2375 if (! apply_change_group ())
2379 LABEL_NUSES (old_dest) -= 1;
2381 LABEL_NUSES (new_dest) += 1;
2382 JUMP_LABEL (jump) = new_dest;
2386 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2388 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2391 /* Move the insns out of MERGE_BB to before the branch. */
2394 if (end == merge_bb->end)
2395 merge_bb->end = PREV_INSN (head);
2397 head = squeeze_notes (head, end);
2398 if (GET_CODE (end) == NOTE
2399 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2400 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2401 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2402 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2403 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2404 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2408 end = PREV_INSN (end);
2411 reorder_insns (head, end, PREV_INSN (earliest));
2420 /* Main entry point for all if-conversion. */
2423 if_convert (life_data_ok)
2428 num_possible_if_blocks = 0;
2429 num_updated_if_blocks = 0;
2430 num_removed_blocks = 0;
2432 /* Free up basic_block_for_insn so that we don't have to keep it
2433 up to date, either here or in merge_blocks_nomove. */
2434 free_basic_block_vars (1);
2436 /* Compute postdominators if we think we'll use them. */
2437 post_dominators = NULL;
2438 if (HAVE_conditional_execution || life_data_ok)
2440 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2441 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2444 /* Record initial block numbers. */
2445 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2446 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2448 /* Go through each of the basic blocks looking for things to convert. */
2449 for (block_num = 0; block_num < n_basic_blocks; )
2451 basic_block bb = BASIC_BLOCK (block_num);
2452 if (find_if_header (bb))
2453 block_num = bb->index;
2458 if (post_dominators)
2459 sbitmap_vector_free (post_dominators);
2462 fflush (rtl_dump_file);
2464 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2465 compute_bb_for_insn (get_max_uid ());
2467 /* Rebuild life info for basic blocks that require it. */
2468 if (num_removed_blocks && life_data_ok)
2470 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2471 sbitmap_zero (update_life_blocks);
2473 /* If we allocated new pseudos, we must resize the array for sched1. */
2474 if (max_regno < max_reg_num ())
2476 max_regno = max_reg_num ();
2477 allocate_reg_info (max_regno, FALSE, FALSE);
2480 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2481 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2482 SET_BIT (update_life_blocks, block_num);
2484 count_or_remove_death_notes (update_life_blocks, 1);
2485 /* ??? See about adding a mode that verifies that the initial
2486 set of blocks don't let registers come live. */
2487 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2488 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2489 | PROP_KILL_DEAD_CODE);
2491 sbitmap_free (update_life_blocks);
2494 /* Write the final stats. */
2495 if (rtl_dump_file && num_possible_if_blocks > 0)
2497 fprintf (rtl_dump_file,
2498 "\n%d possible IF blocks searched.\n",
2499 num_possible_if_blocks);
2500 fprintf (rtl_dump_file,
2501 "%d IF blocks converted.\n",
2502 num_updated_if_blocks);
2503 fprintf (rtl_dump_file,
2504 "%d basic blocks deleted.\n\n\n",
2505 num_removed_blocks);
2508 #ifdef ENABLE_CHECKING
2509 verify_flow_info ();