1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
37 #ifndef HAVE_conditional_execution
38 #define HAVE_conditional_execution 0
40 #ifndef HAVE_conditional_move
41 #define HAVE_conditional_move 0
50 #ifndef MAX_CONDITIONAL_EXECUTE
51 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
54 #define NULL_EDGE ((struct edge_def *)NULL)
55 #define NULL_BLOCK ((struct basic_block_def *)NULL)
57 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
58 static int num_possible_if_blocks;
60 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
62 static int num_updated_if_blocks;
64 /* # of basic blocks that were removed. */
65 static int num_removed_blocks;
67 /* The post-dominator relation on the original block numbers. */
68 static sbitmap *post_dominators;
70 /* Forward references. */
71 static int count_bb_insns PARAMS ((basic_block));
72 static rtx first_active_insn PARAMS ((basic_block));
73 static int last_active_insn_p PARAMS ((basic_block, rtx));
74 static int seq_contains_jump PARAMS ((rtx));
76 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
77 static rtx cond_exec_get_condition PARAMS ((rtx));
78 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
79 basic_block, basic_block));
81 static rtx noce_get_condition PARAMS ((rtx, rtx *));
82 static int noce_process_if_block PARAMS ((basic_block, basic_block,
83 basic_block, basic_block));
85 static int process_if_block PARAMS ((basic_block, basic_block,
86 basic_block, basic_block));
87 static void merge_if_block PARAMS ((basic_block, basic_block,
88 basic_block, basic_block));
90 static int find_if_header PARAMS ((basic_block));
91 static int find_if_block PARAMS ((basic_block, edge, edge));
92 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
93 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
94 static int find_memory PARAMS ((rtx *, void *));
95 static int dead_or_predicable PARAMS ((basic_block, basic_block,
96 basic_block, rtx, int));
98 /* Abuse the basic_block AUX field to store the original block index,
99 as well as a flag indicating that the block should be rescaned for
102 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
103 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
104 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
105 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
108 /* Count the number of non-jump active insns in BB. */
119 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
124 insn = NEXT_INSN (insn);
130 /* Return the first non-jump active insn in the basic block. */
133 first_active_insn (bb)
138 if (GET_CODE (insn) == CODE_LABEL)
142 insn = NEXT_INSN (insn);
145 while (GET_CODE (insn) == NOTE)
149 insn = NEXT_INSN (insn);
152 if (GET_CODE (insn) == JUMP_INSN)
158 /* Return true if INSN is the last active non-jump insn in BB. */
161 last_active_insn_p (bb, insn)
169 insn = NEXT_INSN (insn);
171 while (GET_CODE (insn) == NOTE);
173 return GET_CODE (insn) == JUMP_INSN;
176 /* It is possible, especially when having dealt with multi-word
177 arithmetic, for the expanders to have emitted jumps. Search
178 through the sequence and return TRUE if a jump exists so that
179 we can abort the conversion. */
182 seq_contains_jump (insn)
187 if (GET_CODE (insn) == JUMP_INSN)
189 insn = NEXT_INSN (insn);
194 /* Go through a bunch of insns, converting them to conditional
195 execution format if possible. Return TRUE if all of the non-note
196 insns were processed. */
199 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
200 rtx start; /* first insn to look at */
201 rtx end; /* last insn to look at */
202 rtx test; /* conditional execution test */
203 rtx prob_val; /* probability of branch taken. */
204 int mod_ok; /* true if modifications ok last insn. */
206 int must_be_last = FALSE;
210 for (insn = start; ; insn = NEXT_INSN (insn))
212 if (GET_CODE (insn) == NOTE)
215 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
218 /* Remove USE insns that get in the way. */
219 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
221 /* ??? Ug. Actually unlinking the thing is problematic,
222 given what we'd have to coordinate with our callers. */
223 PUT_CODE (insn, NOTE);
224 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
225 NOTE_SOURCE_FILE (insn) = 0;
229 /* Last insn wasn't last? */
233 if (modified_in_p (test, insn))
240 /* Now build the conditional form of the instruction. */
241 pattern = PATTERN (insn);
243 /* If the machine needs to modify the insn being conditionally executed,
244 say for example to force a constant integer operand into a temp
245 register, do so here. */
246 #ifdef IFCVT_MODIFY_INSN
247 IFCVT_MODIFY_INSN (pattern, insn);
252 validate_change (insn, &PATTERN (insn),
253 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
256 if (GET_CODE (insn) == CALL_INSN && prob_val)
257 validate_change (insn, ®_NOTES (insn),
258 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
259 REG_NOTES (insn)), 1);
269 /* Return the condition for a jump. Do not do any special processing. */
272 cond_exec_get_condition (jump)
277 if (any_condjump_p (jump))
278 test_if = SET_SRC (pc_set (jump));
281 cond = XEXP (test_if, 0);
283 /* If this branches to JUMP_LABEL when the condition is false,
284 reverse the condition. */
285 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
286 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
287 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
288 GET_MODE (cond), XEXP (cond, 0),
294 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
295 to conditional execution. Return TRUE if we were successful at
296 converting the the block. */
299 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
300 basic_block test_bb; /* Basic block test is in */
301 basic_block then_bb; /* Basic block for THEN block */
302 basic_block else_bb; /* Basic block for ELSE block */
303 basic_block join_bb; /* Basic block the join label is in */
305 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
306 rtx then_start; /* first insn in THEN block */
307 rtx then_end; /* last insn + 1 in THEN block */
308 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
309 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
310 int max; /* max # of insns to convert. */
311 int then_mod_ok; /* whether conditional mods are ok in THEN */
312 rtx true_expr; /* test for else block insns */
313 rtx false_expr; /* test for then block insns */
314 rtx true_prob_val; /* probability of else block */
315 rtx false_prob_val; /* probability of then block */
318 /* Find the conditional jump to the ELSE or JOIN part, and isolate
320 test_expr = cond_exec_get_condition (test_bb->end);
324 /* If the conditional jump is more than just a conditional jump,
325 then we can not do conditional execution conversion on this block. */
326 if (!onlyjump_p (test_bb->end))
329 /* Collect the bounds of where we're to search. */
331 then_start = then_bb->head;
332 then_end = then_bb->end;
334 /* Skip a label heading THEN block. */
335 if (GET_CODE (then_start) == CODE_LABEL)
336 then_start = NEXT_INSN (then_start);
338 /* Skip a (use (const_int 0)) or branch as the final insn. */
339 if (GET_CODE (then_end) == INSN
340 && GET_CODE (PATTERN (then_end)) == USE
341 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
342 then_end = PREV_INSN (then_end);
343 else if (GET_CODE (then_end) == JUMP_INSN)
344 then_end = PREV_INSN (then_end);
348 /* Skip the ELSE block's label. */
349 else_start = NEXT_INSN (else_bb->head);
350 else_end = else_bb->end;
352 /* Skip a (use (const_int 0)) or branch as the final insn. */
353 if (GET_CODE (else_end) == INSN
354 && GET_CODE (PATTERN (else_end)) == USE
355 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
356 else_end = PREV_INSN (else_end);
357 else if (GET_CODE (else_end) == JUMP_INSN)
358 else_end = PREV_INSN (else_end);
361 /* How many instructions should we convert in total? */
365 max = 2 * MAX_CONDITIONAL_EXECUTE;
366 n_insns = count_bb_insns (else_bb);
369 max = MAX_CONDITIONAL_EXECUTE;
370 n_insns += count_bb_insns (then_bb);
374 /* Map test_expr/test_jump into the appropriate MD tests to use on
375 the conditionally executed code. */
377 true_expr = test_expr;
378 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
379 GET_MODE (true_expr), XEXP (true_expr, 0),
380 XEXP (true_expr, 1));
382 #ifdef IFCVT_MODIFY_TESTS
383 /* If the machine description needs to modify the tests, such as setting a
384 conditional execution register from a comparison, it can do so here. */
385 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
388 /* See if the conversion failed */
389 if (!true_expr || !false_expr)
393 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
396 true_prob_val = XEXP (true_prob_val, 0);
397 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
400 false_prob_val = NULL_RTX;
402 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
403 on then THEN block. */
404 then_mod_ok = (else_bb == NULL_BLOCK);
406 /* Go through the THEN and ELSE blocks converting the insns if possible
407 to conditional execution. */
410 && ! cond_exec_process_insns (then_start, then_end,
411 false_expr, false_prob_val, then_mod_ok))
415 && ! cond_exec_process_insns (else_start, else_end,
416 true_expr, true_prob_val, TRUE))
419 if (! apply_change_group ())
422 #ifdef IFCVT_MODIFY_FINAL
423 /* Do any machine dependent final modifications */
424 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
427 /* Conversion succeeded. */
429 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
430 n_insns, (n_insns == 1) ? " was" : "s were");
432 /* Merge the blocks! */
433 merge_if_block (test_bb, then_bb, else_bb, join_bb);
437 #ifdef IFCVT_MODIFY_CANCEL
438 /* Cancel any machine dependent changes. */
439 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
446 /* Used by noce_process_if_block to communicate with its subroutines.
448 The subroutines know that A and B may be evaluated freely. They
449 know that X is a register. They should insert new instructions
450 before cond_earliest. */
456 rtx jump, cond, cond_earliest;
459 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
461 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
462 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
463 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
464 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
465 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
466 rtx, enum rtx_code, rtx,
468 static int noce_try_cmove PARAMS ((struct noce_if_info *));
469 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
471 /* Helper function for noce_try_store_flag*. */
474 noce_emit_store_flag (if_info, x, reversep, normalize)
475 struct noce_if_info *if_info;
477 int reversep, normalize;
479 rtx cond = if_info->cond;
483 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
484 || ! general_operand (XEXP (cond, 1), VOIDmode));
486 /* If earliest == jump, or when the condition is complex, try to
487 build the store_flag insn directly. */
490 cond = XEXP (SET_SRC (PATTERN (if_info->jump)), 0);
492 if ((if_info->cond_earliest == if_info->jump || cond_complex)
493 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
497 code = GET_CODE (cond);
499 code = reverse_condition (code);
501 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
503 tmp = gen_rtx_SET (VOIDmode, x, tmp);
506 tmp = emit_insn (tmp);
508 if (recog_memoized (tmp) >= 0)
514 if_info->cond_earliest = if_info->jump;
522 /* Don't even try if the comparison operands are weird. */
526 code = GET_CODE (cond);
528 code = reverse_condition (code);
530 return emit_store_flag (x, code, XEXP (cond, 0),
531 XEXP (cond, 1), VOIDmode,
532 (code == LTU || code == LEU
533 || code == GEU || code == GTU), normalize);
536 /* Convert "if (test) x = 1; else x = 0".
538 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
539 tried in noce_try_store_flag_constants after noce_try_cmove has had
540 a go at the conversion. */
543 noce_try_store_flag (if_info)
544 struct noce_if_info *if_info;
549 if (GET_CODE (if_info->b) == CONST_INT
550 && INTVAL (if_info->b) == STORE_FLAG_VALUE
551 && if_info->a == const0_rtx)
553 else if (if_info->b == const0_rtx
554 && GET_CODE (if_info->a) == CONST_INT
555 && INTVAL (if_info->a) == STORE_FLAG_VALUE
556 && can_reverse_comparison_p (if_info->cond, if_info->jump))
563 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
566 if (target != if_info->x)
567 emit_move_insn (if_info->x, target);
571 emit_insns_before (seq, if_info->cond_earliest);
582 /* Convert "if (test) x = a; else x = b", for A and B constant. */
585 noce_try_store_flag_constants (if_info)
586 struct noce_if_info *if_info;
590 HOST_WIDE_INT itrue, ifalse, diff, tmp;
591 int normalize, can_reverse;
594 && GET_CODE (if_info->a) == CONST_INT
595 && GET_CODE (if_info->b) == CONST_INT)
597 ifalse = INTVAL (if_info->a);
598 itrue = INTVAL (if_info->b);
599 diff = itrue - ifalse;
601 can_reverse = can_reverse_comparison_p (if_info->cond, if_info->jump);
604 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
606 else if (ifalse == 0 && exact_log2 (itrue) >= 0
607 && (STORE_FLAG_VALUE == 1
608 || BRANCH_COST >= 2))
610 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
611 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
612 normalize = 1, reversep = 1;
614 && (STORE_FLAG_VALUE == -1
615 || BRANCH_COST >= 2))
617 else if (ifalse == -1 && can_reverse
618 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
619 normalize = -1, reversep = 1;
620 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
628 tmp = itrue; itrue = ifalse; ifalse = tmp;
633 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
640 /* if (test) x = 3; else x = 4;
641 => x = 3 + (test == 0); */
642 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
644 target = expand_binop (GET_MODE (if_info->x),
645 (diff == STORE_FLAG_VALUE
646 ? add_optab : sub_optab),
647 GEN_INT (ifalse), target, if_info->x, 0,
651 /* if (test) x = 8; else x = 0;
652 => x = (test != 0) << 3; */
653 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
655 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
656 target, GEN_INT (tmp), if_info->x, 0,
660 /* if (test) x = -1; else x = b;
661 => x = -(test != 0) | b; */
662 else if (itrue == -1)
664 target = expand_binop (GET_MODE (if_info->x), ior_optab,
665 target, GEN_INT (ifalse), if_info->x, 0,
669 /* if (test) x = a; else x = b;
670 => x = (-(test != 0) & (b - a)) + a; */
673 target = expand_binop (GET_MODE (if_info->x), and_optab,
674 target, GEN_INT (diff), if_info->x, 0,
677 target = expand_binop (GET_MODE (if_info->x), add_optab,
678 target, GEN_INT (ifalse), if_info->x, 0,
688 if (target != if_info->x)
689 emit_move_insn (if_info->x, target);
694 if (seq_contains_jump (seq))
697 emit_insns_before (seq, if_info->cond_earliest);
705 /* Convert "if (test) foo++" into "foo += (test != 0)", and
706 similarly for "foo--". */
709 noce_try_store_flag_inc (if_info)
710 struct noce_if_info *if_info;
713 int subtract, normalize;
719 /* Should be no `else' case to worry about. */
720 && if_info->b == if_info->x
721 && GET_CODE (if_info->a) == PLUS
722 && (XEXP (if_info->a, 1) == const1_rtx
723 || XEXP (if_info->a, 1) == constm1_rtx)
724 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
725 && can_reverse_comparison_p (if_info->cond, if_info->jump))
727 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
728 subtract = 0, normalize = 0;
729 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
730 subtract = 1, normalize = 0;
732 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
736 target = noce_emit_store_flag (if_info,
737 gen_reg_rtx (GET_MODE (if_info->x)),
741 target = expand_binop (GET_MODE (if_info->x),
742 subtract ? sub_optab : add_optab,
743 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
746 if (target != if_info->x)
747 emit_move_insn (if_info->x, target);
752 if (seq_contains_jump (seq))
755 emit_insns_before (seq, if_info->cond_earliest);
766 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
769 noce_try_store_flag_mask (if_info)
770 struct noce_if_info *if_info;
778 || STORE_FLAG_VALUE == -1)
779 && ((if_info->a == const0_rtx
780 && rtx_equal_p (if_info->b, if_info->x))
781 || ((reversep = can_reverse_comparison_p (if_info->cond,
783 && if_info->b == const0_rtx
784 && rtx_equal_p (if_info->a, if_info->x))))
787 target = noce_emit_store_flag (if_info,
788 gen_reg_rtx (GET_MODE (if_info->x)),
791 target = expand_binop (GET_MODE (if_info->x), and_optab,
792 if_info->x, target, if_info->x, 0,
797 if (target != if_info->x)
798 emit_move_insn (if_info->x, target);
803 if (seq_contains_jump (seq))
806 emit_insns_before (seq, if_info->cond_earliest);
817 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
820 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
821 struct noce_if_info *if_info;
822 rtx x, cmp_a, cmp_b, vfalse, vtrue;
825 /* If earliest == jump, try to build the cmove insn directly.
826 This is helpful when combine has created some complex condition
827 (like for alpha's cmovlbs) that we can't hope to regenerate
828 through the normal interface. */
830 if (if_info->cond_earliest == if_info->jump)
834 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
835 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
836 tmp = gen_rtx_SET (VOIDmode, x, tmp);
839 tmp = emit_insn (tmp);
841 if (recog_memoized (tmp) >= 0)
853 /* Don't even try if the comparison operands are weird. */
854 if (! general_operand (cmp_a, GET_MODE (cmp_a))
855 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
858 #if HAVE_conditional_move
859 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
860 vtrue, vfalse, GET_MODE (x),
861 (code == LTU || code == GEU
862 || code == LEU || code == GTU));
864 /* We'll never get here, as noce_process_if_block doesn't call the
865 functions involved. Ifdef code, however, should be discouraged
866 because it leads to typos in the code not selected. However,
867 emit_conditional_move won't exist either. */
872 /* Try only simple constants and registers here. More complex cases
873 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
874 has had a go at it. */
877 noce_try_cmove (if_info)
878 struct noce_if_info *if_info;
883 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
884 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
888 code = GET_CODE (if_info->cond);
889 target = noce_emit_cmove (if_info, if_info->x, code,
890 XEXP (if_info->cond, 0),
891 XEXP (if_info->cond, 1),
892 if_info->a, if_info->b);
896 if (target != if_info->x)
897 emit_move_insn (if_info->x, target);
901 emit_insns_before (seq, if_info->cond_earliest);
914 /* Try more complex cases involving conditional_move. */
917 noce_try_cmove_arith (if_info)
918 struct noce_if_info *if_info;
928 /* A conditional move from two memory sources is equivalent to a
929 conditional on their addresses followed by a load. Don't do this
930 early because it'll screw alias analysis. Note that we've
931 already checked for no side effects. */
932 if (! no_new_pseudos && cse_not_expected
933 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
938 x = gen_reg_rtx (Pmode);
942 /* ??? We could handle this if we knew that a load from A or B could
943 not fault. This is also true if we've already loaded
944 from the address along the path from ENTRY. */
945 else if (may_trap_p (a) || may_trap_p (b))
948 /* if (test) x = a + b; else x = c - d;
955 code = GET_CODE (if_info->cond);
956 insn_a = if_info->insn_a;
957 insn_b = if_info->insn_b;
959 /* Possibly rearrange operands to make things come out more natural. */
960 if (can_reverse_comparison_p (if_info->cond, if_info->jump))
963 if (rtx_equal_p (b, x))
965 else if (general_operand (b, GET_MODE (b)))
970 code = reverse_condition (code);
971 tmp = a, a = b, b = tmp;
972 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
978 /* If either operand is complex, load it into a register first.
979 The best way to do this is to copy the original insn. In this
980 way we preserve any clobbers etc that the insn may have had.
981 This is of course not possible in the IS_MEM case. */
982 if (! general_operand (a, GET_MODE (a)))
987 goto end_seq_and_fail;
991 tmp = gen_reg_rtx (GET_MODE (a));
992 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
995 goto end_seq_and_fail;
998 a = gen_reg_rtx (GET_MODE (a));
999 tmp = copy_rtx (insn_a);
1000 set = single_set (tmp);
1002 tmp = emit_insn (PATTERN (tmp));
1004 if (recog_memoized (tmp) < 0)
1005 goto end_seq_and_fail;
1007 if (! general_operand (b, GET_MODE (b)))
1012 goto end_seq_and_fail;
1016 tmp = gen_reg_rtx (GET_MODE (b));
1017 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1020 goto end_seq_and_fail;
1023 b = gen_reg_rtx (GET_MODE (b));
1024 tmp = copy_rtx (insn_b);
1025 set = single_set (tmp);
1027 tmp = emit_insn (PATTERN (tmp));
1029 if (recog_memoized (tmp) < 0)
1030 goto end_seq_and_fail;
1033 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1034 XEXP (if_info->cond, 1), a, b);
1037 goto end_seq_and_fail;
1039 /* If we're handling a memory for above, emit the load now. */
1042 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1044 /* Copy over flags as appropriate. */
1045 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1046 MEM_VOLATILE_P (tmp) = 1;
1047 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1048 MEM_IN_STRUCT_P (tmp) = 1;
1049 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1050 MEM_SCALAR_P (tmp) = 1;
1051 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1052 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1054 emit_move_insn (if_info->x, tmp);
1056 else if (target != x)
1057 emit_move_insn (x, target);
1061 emit_insns_before (tmp, if_info->cond_earliest);
1069 /* Look for the condition for the jump first. We'd prefer to avoid
1070 get_condition if we can -- it tries to look back for the contents
1071 of an original compare. On targets that use normal integers for
1072 comparisons, e.g. alpha, this is wasteful. */
1075 noce_get_condition (jump, earliest)
1082 /* If the condition variable is a register and is MODE_INT, accept it.
1083 Otherwise, fall back on get_condition. */
1085 if (! any_condjump_p (jump))
1088 set = pc_set (jump);
1090 cond = XEXP (SET_SRC (set), 0);
1091 if (GET_CODE (XEXP (cond, 0)) == REG
1092 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1096 /* If this branches to JUMP_LABEL when the condition is false,
1097 reverse the condition. */
1098 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1099 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1100 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1101 GET_MODE (cond), XEXP (cond, 0),
1105 cond = get_condition (jump, earliest);
1110 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1111 without using conditional execution. Return TRUE if we were
1112 successful at converting the the block. */
1115 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1116 basic_block test_bb; /* Basic block test is in */
1117 basic_block then_bb; /* Basic block for THEN block */
1118 basic_block else_bb; /* Basic block for ELSE block */
1119 basic_block join_bb; /* Basic block the join label is in */
1121 /* We're looking for patterns of the form
1123 (1) if (...) x = a; else x = b;
1124 (2) x = b; if (...) x = a;
1125 (3) if (...) x = a; // as if with an initial x = x.
1127 The later patterns require jumps to be more expensive.
1129 ??? For future expansion, look for multiple X in such patterns. */
1131 struct noce_if_info if_info;
1134 rtx orig_x, x, a, b;
1135 rtx jump, cond, insn;
1137 /* If this is not a standard conditional jump, we can't parse it. */
1138 jump = test_bb->end;
1139 cond = noce_get_condition (jump, &if_info.cond_earliest);
1143 /* If the conditional jump is more than just a conditional jump,
1144 then we can not do if-conversion on this block. */
1145 if (! onlyjump_p (jump))
1148 /* We must be comparing objects whose modes imply the size. */
1149 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1152 /* Look for one of the potential sets. */
1153 insn_a = first_active_insn (then_bb);
1155 || ! last_active_insn_p (then_bb, insn_a)
1156 || (set_a = single_set (insn_a)) == NULL_RTX)
1159 x = SET_DEST (set_a);
1160 a = SET_SRC (set_a);
1162 /* Look for the other potential set. Make sure we've got equivalent
1164 /* ??? This is overconservative. Storing to two different mems is
1165 as easy as conditionally computing the address. Storing to a
1166 single mem merely requires a scratch memory to use as one of the
1167 destination addresses; often the memory immediately below the
1168 stack pointer is available for this. */
1172 insn_b = first_active_insn (else_bb);
1174 || ! last_active_insn_p (else_bb, insn_b)
1175 || (set_b = single_set (insn_b)) == NULL_RTX
1176 || ! rtx_equal_p (x, SET_DEST (set_b)))
1181 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1183 || GET_CODE (insn_b) != INSN
1184 || (set_b = single_set (insn_b)) == NULL_RTX
1185 || ! rtx_equal_p (x, SET_DEST (set_b))
1186 || reg_mentioned_p (x, cond)
1187 || reg_mentioned_p (x, a)
1188 || reg_mentioned_p (x, SET_SRC (set_b)))
1189 insn_b = set_b = NULL_RTX;
1191 b = (set_b ? SET_SRC (set_b) : x);
1193 /* X may not be mentioned in the range (cond_earliest, jump]. */
1194 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1195 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1198 /* A and B may not be modified in the range [cond_earliest, jump). */
1199 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1201 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1204 /* Only operate on register destinations, and even then avoid extending
1205 the lifetime of hard registers on small register class machines. */
1207 if (GET_CODE (x) != REG
1208 || (SMALL_REGISTER_CLASSES
1209 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1213 x = gen_reg_rtx (GET_MODE (x));
1216 /* Don't operate on sources that may trap or are volatile. */
1217 if (side_effects_p (a) || side_effects_p (b)
1218 || (GET_CODE (a) != MEM && may_trap_p (a))
1219 || (GET_CODE (b) != MEM && may_trap_p (b)))
1222 /* Set up the info block for our subroutines. */
1223 if_info.cond = cond;
1224 if_info.jump = jump;
1225 if_info.insn_a = insn_a;
1226 if_info.insn_b = insn_b;
1231 /* Try optimizations in some approximation of a useful order. */
1232 /* ??? Should first look to see if X is live incoming at all. If it
1233 isn't, we don't need anything but an unconditional set. */
1235 /* Look and see if A and B are really the same. Avoid creating silly
1236 cmove constructs that no one will fix up later. */
1237 if (rtx_equal_p (a, b))
1239 /* If we have an INSN_B, we don't have to create any new rtl. Just
1240 move the instruction that we already have. If we don't have an
1241 INSN_B, that means that A == X, and we've got a noop move. In
1242 that case don't do anything and let the code below delete INSN_A. */
1243 if (insn_b && else_bb)
1245 if (else_bb && insn_b == else_bb->end)
1246 else_bb->end = PREV_INSN (insn_b);
1247 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1254 if (noce_try_store_flag (&if_info))
1256 if (HAVE_conditional_move
1257 && noce_try_cmove (&if_info))
1259 if (! HAVE_conditional_execution)
1261 if (noce_try_store_flag_constants (&if_info))
1263 if (noce_try_store_flag_inc (&if_info))
1265 if (noce_try_store_flag_mask (&if_info))
1267 if (HAVE_conditional_move
1268 && noce_try_cmove_arith (&if_info))
1275 /* The original sets may now be killed. */
1276 if (insn_a == then_bb->end)
1277 then_bb->end = PREV_INSN (insn_a);
1278 flow_delete_insn (insn_a);
1280 /* Several special cases here: First, we may have reused insn_b above,
1281 in which case insn_b is now NULL. Second, we want to delete insn_b
1282 if it came from the ELSE block, because follows the now correct
1283 write that appears in the TEST block. However, if we got insn_b from
1284 the TEST block, it may in fact be loading data needed for the comparison.
1285 We'll let life_analysis remove the insn if it's really dead. */
1286 if (insn_b && else_bb)
1288 if (insn_b == else_bb->end)
1289 else_bb->end = PREV_INSN (insn_b);
1290 flow_delete_insn (insn_b);
1293 /* The new insns will have been inserted before cond_earliest. We should
1294 be able to remove the jump with impunity, but the condition itself may
1295 have been modified by gcse to be shared across basic blocks. */
1296 test_bb->end = PREV_INSN (jump);
1297 flow_delete_insn (jump);
1299 /* If we used a temporary, fix it up now. */
1303 emit_move_insn (orig_x, x);
1304 insn_b = gen_sequence ();
1307 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1310 /* Merge the blocks! */
1311 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1316 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1317 straight line code. Return true if successful. */
1320 process_if_block (test_bb, then_bb, else_bb, join_bb)
1321 basic_block test_bb; /* Basic block test is in */
1322 basic_block then_bb; /* Basic block for THEN block */
1323 basic_block else_bb; /* Basic block for ELSE block */
1324 basic_block join_bb; /* Basic block the join label is in */
1326 if (! reload_completed
1327 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1330 if (HAVE_conditional_execution
1332 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1338 /* Merge the blocks and mark for local life update. */
1341 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1342 basic_block test_bb; /* Basic block test is in */
1343 basic_block then_bb; /* Basic block for THEN block */
1344 basic_block else_bb; /* Basic block for ELSE block */
1345 basic_block join_bb; /* Basic block the join label is in */
1347 basic_block combo_bb;
1349 /* All block merging is done into the lower block numbers. */
1353 /* First merge TEST block into THEN block. This is a no-brainer since
1354 the THEN block did not have a code label to begin with. */
1356 if (combo_bb->global_live_at_end)
1357 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1358 merge_blocks_nomove (combo_bb, then_bb);
1359 num_removed_blocks++;
1361 /* The ELSE block, if it existed, had a label. That label count
1362 will almost always be zero, but odd things can happen when labels
1363 get their addresses taken. */
1366 merge_blocks_nomove (combo_bb, else_bb);
1367 num_removed_blocks++;
1370 /* If there was no join block reported, that means it was not adjacent
1371 to the others, and so we cannot merge them. */
1375 /* The outgoing edge for the current COMBO block should already
1376 be correct. Verify this. */
1377 if (combo_bb->succ == NULL_EDGE)
1380 /* There should sill be a branch at the end of the THEN or ELSE
1381 blocks taking us to our final destination. */
1382 if (! simplejump_p (combo_bb->end)
1383 && ! returnjump_p (combo_bb->end))
1387 /* The JOIN block may have had quite a number of other predecessors too.
1388 Since we've already merged the TEST, THEN and ELSE blocks, we should
1389 have only one remaining edge from our if-then-else diamond. If there
1390 is more than one remaining edge, it must come from elsewhere. There
1391 may be zero incoming edges if the THEN block didn't actually join
1392 back up (as with a call to abort). */
1393 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1395 /* We can merge the JOIN. */
1396 if (combo_bb->global_live_at_end)
1397 COPY_REG_SET (combo_bb->global_live_at_end,
1398 join_bb->global_live_at_end);
1399 merge_blocks_nomove (combo_bb, join_bb);
1400 num_removed_blocks++;
1404 /* We cannot merge the JOIN. */
1406 /* The outgoing edge for the current COMBO block should already
1407 be correct. Verify this. */
1408 if (combo_bb->succ->succ_next != NULL_EDGE
1409 || combo_bb->succ->dest != join_bb)
1412 /* Remove the jump and cruft from the end of the COMBO block. */
1413 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1416 /* Make sure we update life info properly. */
1417 SET_UPDATE_LIFE (combo_bb);
1419 num_updated_if_blocks++;
1422 /* Find a block ending in a simple IF condition. Return TRUE if
1423 we were able to transform it in some way. */
1426 find_if_header (test_bb)
1427 basic_block test_bb;
1432 /* The kind of block we're looking for has exactly two successors. */
1433 if ((then_edge = test_bb->succ) == NULL_EDGE
1434 || (else_edge = then_edge->succ_next) == NULL_EDGE
1435 || else_edge->succ_next != NULL_EDGE)
1438 /* Neither edge should be abnormal. */
1439 if ((then_edge->flags & EDGE_COMPLEX)
1440 || (else_edge->flags & EDGE_COMPLEX))
1443 /* The THEN edge is canonically the one that falls through. */
1444 if (then_edge->flags & EDGE_FALLTHRU)
1446 else if (else_edge->flags & EDGE_FALLTHRU)
1449 else_edge = then_edge;
1453 /* Otherwise this must be a multiway branch of some sort. */
1456 if (find_if_block (test_bb, then_edge, else_edge))
1459 && (! HAVE_conditional_execution || reload_completed))
1461 if (find_if_case_1 (test_bb, then_edge, else_edge))
1463 if (find_if_case_2 (test_bb, then_edge, else_edge))
1471 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1475 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1476 block. If so, we'll try to convert the insns to not require the branch.
1477 Return TRUE if we were successful at converting the the block. */
1480 find_if_block (test_bb, then_edge, else_edge)
1481 basic_block test_bb;
1482 edge then_edge, else_edge;
1484 basic_block then_bb = then_edge->dest;
1485 basic_block else_bb = else_edge->dest;
1486 basic_block join_bb = NULL_BLOCK;
1487 edge then_succ = then_bb->succ;
1488 edge else_succ = else_bb->succ;
1491 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1492 if (then_bb->pred->pred_next != NULL_EDGE)
1495 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1496 if (then_succ != NULL_EDGE
1497 && (then_succ->succ_next != NULL_EDGE
1498 || (then_succ->flags & EDGE_COMPLEX)))
1501 /* If the THEN block has no successors, conditional execution can still
1502 make a conditional call. Don't do this unless the ELSE block has
1503 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1504 Check for the last insn of the THEN block being an indirect jump, which
1505 is listed as not having any successors, but confuses the rest of the CE
1506 code processing. XXX we should fix this in the future. */
1507 if (then_succ == NULL)
1509 if (else_bb->pred->pred_next == NULL_EDGE)
1511 rtx last_insn = then_bb->end;
1514 && GET_CODE (last_insn) == NOTE
1515 && last_insn != then_bb->head)
1516 last_insn = PREV_INSN (last_insn);
1519 && GET_CODE (last_insn) == JUMP_INSN
1520 && ! simplejump_p (last_insn))
1524 else_bb = NULL_BLOCK;
1530 /* If the THEN block's successor is the other edge out of the TEST block,
1531 then we have an IF-THEN combo without an ELSE. */
1532 else if (then_succ->dest == else_bb)
1535 else_bb = NULL_BLOCK;
1538 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1539 has exactly one predecessor and one successor, and the outgoing edge
1540 is not complex, then we have an IF-THEN-ELSE combo. */
1541 else if (else_succ != NULL_EDGE
1542 && then_succ->dest == else_succ->dest
1543 && else_bb->pred->pred_next == NULL_EDGE
1544 && else_succ->succ_next == NULL_EDGE
1545 && ! (else_succ->flags & EDGE_COMPLEX))
1546 join_bb = else_succ->dest;
1548 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1552 num_possible_if_blocks++;
1557 fprintf (rtl_dump_file,
1558 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1559 test_bb->index, then_bb->index, else_bb->index,
1562 fprintf (rtl_dump_file,
1563 "\nIF-THEN block found, start %d, then %d, join %d\n",
1564 test_bb->index, then_bb->index, join_bb->index);
1567 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1568 get the first condition for free, since we've already asserted that
1569 there's a fallthru edge from IF to THEN. */
1570 /* ??? As an enhancement, move the ELSE block. Have to deal with EH and
1571 BLOCK notes, if by no other means than aborting the merge if they
1572 exist. Sticky enough I don't want to think about it now. */
1573 next_index = then_bb->index;
1574 if (else_bb && ++next_index != else_bb->index)
1576 if (++next_index != join_bb->index)
1584 /* Do the real work. */
1585 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1588 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1589 transformable, but not necessarily the other. There need be no
1592 Return TRUE if we were successful at converting the the block.
1594 Cases we'd like to look at:
1597 if (test) goto over; // x not live
1605 if (! test) goto label;
1608 if (test) goto E; // x not live
1622 (3) // This one's really only interesting for targets that can do
1623 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1624 // it results in multiple branches on a cache line, which often
1625 // does not sit well with predictors.
1627 if (test1) goto E; // predicted not taken
1643 (A) Don't do (2) if the branch is predicted against the block we're
1644 eliminating. Do it anyway if we can eliminate a branch; this requires
1645 that the sole successor of the eliminated block postdominate the other
1648 (B) With CE, on (3) we can steal from both sides of the if, creating
1657 Again, this is most useful if J postdominates.
1659 (C) CE substitutes for helpful life information.
1661 (D) These heuristics need a lot of work. */
1663 /* Tests for case 1 above. */
1666 find_if_case_1 (test_bb, then_edge, else_edge)
1667 basic_block test_bb;
1668 edge then_edge, else_edge;
1670 basic_block then_bb = then_edge->dest;
1671 basic_block else_bb = else_edge->dest;
1672 edge then_succ = then_bb->succ;
1675 /* THEN has one successor. */
1676 if (!then_succ || then_succ->succ_next != NULL)
1679 /* THEN does not fall through, but is not strange either. */
1680 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
1683 /* THEN has one predecessor. */
1684 if (then_bb->pred->pred_next != NULL)
1687 /* ELSE follows THEN. (??? could be moved) */
1688 if (else_bb->index != then_bb->index + 1)
1691 num_possible_if_blocks++;
1693 fprintf (rtl_dump_file,
1694 "\nIF-CASE-1 found, start %d, then %d\n",
1695 test_bb->index, then_bb->index);
1697 /* THEN is small. */
1698 if (count_bb_insns (then_bb) > BRANCH_COST)
1701 /* Find the label for THEN's destination. */
1702 if (then_succ->dest == EXIT_BLOCK_PTR)
1706 new_lab = JUMP_LABEL (then_bb->end);
1711 /* Registers set are dead, or are predicable. */
1712 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
1715 /* Conversion went ok, including moving the insns and fixing up the
1716 jump. Adjust the CFG to match. */
1718 SET_UPDATE_LIFE (test_bb);
1719 bitmap_operation (test_bb->global_live_at_end,
1720 else_bb->global_live_at_start,
1721 then_bb->global_live_at_end, BITMAP_IOR);
1723 make_edge (NULL, test_bb, then_succ->dest, 0);
1724 flow_delete_block (then_bb);
1725 tidy_fallthru_edge (else_edge, test_bb, else_bb);
1727 num_removed_blocks++;
1728 num_updated_if_blocks++;
1733 /* Test for case 2 above. */
1736 find_if_case_2 (test_bb, then_edge, else_edge)
1737 basic_block test_bb;
1738 edge then_edge, else_edge;
1740 basic_block then_bb = then_edge->dest;
1741 basic_block else_bb = else_edge->dest;
1742 edge else_succ = else_bb->succ;
1745 /* ELSE has one successor. */
1746 if (!else_succ || else_succ->succ_next != NULL)
1749 /* ELSE outgoing edge is not complex. */
1750 if (else_succ->flags & EDGE_COMPLEX)
1753 /* ELSE has one predecessor. */
1754 if (else_bb->pred->pred_next != NULL)
1757 /* THEN is not EXIT. */
1758 if (then_bb->index < 0)
1761 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
1762 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
1763 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
1765 else if (else_succ->dest->index < 0
1766 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
1767 ORIG_INDEX (else_succ->dest)))
1772 num_possible_if_blocks++;
1774 fprintf (rtl_dump_file,
1775 "\nIF-CASE-2 found, start %d, else %d\n",
1776 test_bb->index, else_bb->index);
1778 /* ELSE is small. */
1779 if (count_bb_insns (then_bb) > BRANCH_COST)
1782 /* Find the label for ELSE's destination. */
1783 if (else_succ->dest == EXIT_BLOCK_PTR)
1787 if (else_succ->flags & EDGE_FALLTHRU)
1789 new_lab = else_succ->dest->head;
1790 if (GET_CODE (new_lab) != CODE_LABEL)
1795 new_lab = JUMP_LABEL (else_bb->end);
1801 /* Registers set are dead, or are predicable. */
1802 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
1805 /* Conversion went ok, including moving the insns and fixing up the
1806 jump. Adjust the CFG to match. */
1808 SET_UPDATE_LIFE (test_bb);
1809 bitmap_operation (test_bb->global_live_at_end,
1810 then_bb->global_live_at_start,
1811 else_bb->global_live_at_end, BITMAP_IOR);
1813 remove_edge (else_edge);
1814 make_edge (NULL, test_bb, else_succ->dest, 0);
1815 flow_delete_block (else_bb);
1817 num_removed_blocks++;
1818 num_updated_if_blocks++;
1820 /* ??? We may now fallthru from one of THEN's successors into a join
1821 block. Rerun cleanup_cfg? Examine things manually? Wait? */
1826 /* A subroutine of dead_or_predicable called through for_each_rtx.
1827 Return 1 if a memory is found. */
1830 find_memory (px, data)
1832 void *data ATTRIBUTE_UNUSED;
1834 return GET_CODE (*px) == MEM;
1837 /* Used by the code above to perform the actual rtl transformations.
1838 Return TRUE if successful.
1840 TEST_BB is the block containing the conditional branch. MERGE_BB
1841 is the block containing the code to manipulate. NEW_DEST is the
1842 label TEST_BB should be branching to after the conversion.
1843 REVERSEP is true if the sense of the branch should be reversed. */
1846 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
1847 basic_block test_bb, merge_bb, other_bb;
1851 rtx head, end, jump, earliest, old_dest;
1853 jump = test_bb->end;
1855 /* Find the extent of the real code in the merge block. */
1856 head = merge_bb->head;
1857 end = merge_bb->end;
1859 if (GET_CODE (head) == CODE_LABEL)
1860 head = NEXT_INSN (head);
1861 if (GET_CODE (head) == NOTE)
1865 head = end = NULL_RTX;
1868 head = NEXT_INSN (head);
1871 if (GET_CODE (end) == JUMP_INSN)
1875 head = end = NULL_RTX;
1878 end = PREV_INSN (end);
1881 /* Disable handling dead code by conditional execution if the machine needs
1882 to do anything funny with the tests, etc. */
1883 #ifndef IFCVT_MODIFY_TESTS
1884 if (HAVE_conditional_execution)
1886 /* In the conditional execution case, we have things easy. We know
1887 the condition is reversable. We don't have to check life info,
1888 becase we're going to conditionally execute the code anyway.
1889 All that's left is making sure the insns involved can actually
1894 cond = cond_exec_get_condition (jump);
1896 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
1898 prob_val = XEXP (prob_val, 0);
1902 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1903 GET_MODE (cond), XEXP (cond, 0),
1906 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
1909 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
1917 /* In the non-conditional execution case, we have to verify that there
1918 are no trapping operations, no calls, no references to memory, and
1919 that any registers modified are dead at the branch site. */
1921 rtx insn, cond, prev;
1922 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
1923 regset merge_set, tmp, test_live, test_set;
1924 struct propagate_block_info *pbi;
1927 /* Check for no calls or trapping operations. */
1928 for (insn = head; ; insn = NEXT_INSN (insn))
1930 if (GET_CODE (insn) == CALL_INSN)
1934 if (may_trap_p (PATTERN (insn)))
1937 /* ??? Even non-trapping memories such as stack frame
1938 references must be avoided. For stores, we collect
1939 no lifetime info; for reads, we'd have to assert
1940 true_dependance false against every store in the
1942 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
1949 if (! any_condjump_p (jump))
1952 /* Find the extent of the conditional. */
1953 cond = noce_get_condition (jump, &earliest);
1958 MERGE_SET = set of registers set in MERGE_BB
1959 TEST_LIVE = set of registers live at EARLIEST
1960 TEST_SET = set of registers set between EARLIEST and the
1961 end of the block. */
1963 tmp = INITIALIZE_REG_SET (tmp_head);
1964 merge_set = INITIALIZE_REG_SET (merge_set_head);
1965 test_live = INITIALIZE_REG_SET (test_live_head);
1966 test_set = INITIALIZE_REG_SET (test_set_head);
1968 /* ??? bb->local_set is only valid during calculate_global_regs_live,
1969 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
1970 since we've already asserted that MERGE_BB is small. */
1971 propagate_block (merge_bb, tmp, merge_set, 0);
1973 /* For small register class machines, don't lengthen lifetimes of
1974 hard registers before reload. */
1975 if (SMALL_REGISTER_CLASSES && ! reload_completed)
1977 EXECUTE_IF_SET_IN_BITMAP
1980 if (i < FIRST_PSEUDO_REGISTER
1982 && ! global_regs[i])
1987 /* For TEST, we're interested in a range of insns, not a whole block.
1988 Moreover, we're interested in the insns live from OTHER_BB. */
1990 COPY_REG_SET (test_live, other_bb->global_live_at_start);
1991 pbi = init_propagate_block_info (test_bb, test_live, test_set, 0);
1993 for (insn = jump; ; insn = prev)
1995 prev = propagate_one_insn (pbi, insn);
1996 if (insn == earliest)
2000 free_propagate_block_info (pbi);
2002 /* We can perform the transformation if
2003 MERGE_SET & (TEST_SET | TEST_LIVE)
2005 TEST_SET & merge_bb->global_live_at_start
2008 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2009 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2010 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2012 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2014 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2017 FREE_REG_SET (merge_set);
2018 FREE_REG_SET (test_live);
2019 FREE_REG_SET (test_set);
2026 /* We don't want to use normal invert_jump or redirect_jump because
2027 we don't want to delete_insn called. Also, we want to do our own
2028 change group management. */
2030 old_dest = JUMP_LABEL (jump);
2032 ? ! invert_jump_1 (jump, new_dest)
2033 : ! redirect_jump_1 (jump, new_dest))
2036 if (! apply_change_group ())
2040 LABEL_NUSES (old_dest) -= 1;
2042 LABEL_NUSES (new_dest) += 1;
2043 JUMP_LABEL (jump) = new_dest;
2047 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2049 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
2052 /* Move the insns out of MERGE_BB to before the branch. */
2055 if (end == merge_bb->end)
2056 merge_bb->end = PREV_INSN (head);
2058 head = squeeze_notes (head, end);
2059 if (GET_CODE (end) == NOTE
2060 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2061 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2062 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2063 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2064 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2065 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2069 end = PREV_INSN (end);
2072 reorder_insns (head, end, PREV_INSN (earliest));
2081 /* Main entry point for all if-conversion. */
2084 if_convert (life_data_ok)
2089 num_possible_if_blocks = 0;
2090 num_updated_if_blocks = 0;
2091 num_removed_blocks = 0;
2093 /* Free up basic_block_for_insn so that we don't have to keep it
2094 up to date, either here or in merge_blocks_nomove. */
2095 free_basic_block_vars (1);
2097 /* Compute postdominators if we think we'll use them. */
2098 post_dominators = NULL;
2099 if (HAVE_conditional_execution || life_data_ok)
2101 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2102 compute_flow_dominators (NULL, post_dominators);
2105 /* Record initial block numbers. */
2106 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2107 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2109 /* Go through each of the basic blocks looking for things to convert. */
2110 for (block_num = 0; block_num < n_basic_blocks; )
2112 basic_block bb = BASIC_BLOCK (block_num);
2113 if (find_if_header (bb))
2114 block_num = bb->index;
2119 if (post_dominators)
2120 sbitmap_vector_free (post_dominators);
2123 fflush (rtl_dump_file);
2125 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2126 compute_bb_for_insn (get_max_uid ());
2128 /* Rebuild life info for basic blocks that require it. */
2129 if (num_removed_blocks && life_data_ok)
2131 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2132 sbitmap_zero (update_life_blocks);
2134 /* If we allocated new pseudos, we must resize the array for sched1. */
2135 if (max_regno < max_reg_num ())
2137 max_regno = max_reg_num ();
2138 allocate_reg_info (max_regno, FALSE, FALSE);
2141 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2142 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2143 SET_BIT (update_life_blocks, block_num);
2145 count_or_remove_death_notes (update_life_blocks, 1);
2146 /* ??? See about adding a mode that verifies that the initial
2147 set of blocks don't let registers come live. */
2148 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2149 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2150 | PROP_KILL_DEAD_CODE);
2152 sbitmap_free (update_life_blocks);
2155 /* Write the final stats. */
2156 if (rtl_dump_file && num_possible_if_blocks > 0)
2158 fprintf (rtl_dump_file,
2159 "\n%d possible IF blocks searched.\n",
2160 num_possible_if_blocks);
2161 fprintf (rtl_dump_file,
2162 "%d IF blocks converted.\n",
2163 num_updated_if_blocks);
2164 fprintf (rtl_dump_file,
2165 "%d basic blocks deleted.\n\n\n",
2166 num_removed_blocks);
2169 #ifdef ENABLE_CHECKING
2170 verify_flow_info ();