1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
52 #ifndef MAX_CONDITIONAL_EXECUTE
53 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
64 static int num_updated_if_blocks;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks;
69 /* True if life data ok at present. */
70 static bool life_data_ok;
72 /* The post-dominator relation on the original block numbers. */
73 static sbitmap *post_dominators;
75 /* Forward references. */
76 static int count_bb_insns PARAMS ((basic_block));
77 static rtx first_active_insn PARAMS ((basic_block));
78 static int last_active_insn_p PARAMS ((basic_block, rtx));
79 static int seq_contains_jump PARAMS ((rtx));
81 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
82 static rtx cond_exec_get_condition PARAMS ((rtx));
83 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
84 basic_block, basic_block));
86 static rtx noce_get_condition PARAMS ((rtx, rtx *));
87 static int noce_operand_ok PARAMS ((rtx));
88 static int noce_process_if_block PARAMS ((basic_block, basic_block,
89 basic_block, basic_block));
91 static int process_if_block PARAMS ((basic_block, basic_block,
92 basic_block, basic_block));
93 static void merge_if_block PARAMS ((basic_block, basic_block,
94 basic_block, basic_block));
96 static int find_if_header PARAMS ((basic_block));
97 static int find_if_block PARAMS ((basic_block, edge, edge));
98 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
99 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
100 static int find_memory PARAMS ((rtx *, void *));
101 static int dead_or_predicable PARAMS ((basic_block, basic_block,
102 basic_block, rtx, int));
103 static void noce_emit_move_insn PARAMS ((rtx, rtx));
105 /* Abuse the basic_block AUX field to store the original block index,
106 as well as a flag indicating that the block should be rescaned for
109 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
110 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
111 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
112 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
115 /* Count the number of non-jump active insns in BB. */
126 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
131 insn = NEXT_INSN (insn);
137 /* Return the first non-jump active insn in the basic block. */
140 first_active_insn (bb)
145 if (GET_CODE (insn) == CODE_LABEL)
149 insn = NEXT_INSN (insn);
152 while (GET_CODE (insn) == NOTE)
156 insn = NEXT_INSN (insn);
159 if (GET_CODE (insn) == JUMP_INSN)
165 /* Return true if INSN is the last active non-jump insn in BB. */
168 last_active_insn_p (bb, insn)
176 insn = NEXT_INSN (insn);
178 while (GET_CODE (insn) == NOTE);
180 return GET_CODE (insn) == JUMP_INSN;
183 /* It is possible, especially when having dealt with multi-word
184 arithmetic, for the expanders to have emitted jumps. Search
185 through the sequence and return TRUE if a jump exists so that
186 we can abort the conversion. */
189 seq_contains_jump (insn)
194 if (GET_CODE (insn) == JUMP_INSN)
196 insn = NEXT_INSN (insn);
201 /* Go through a bunch of insns, converting them to conditional
202 execution format if possible. Return TRUE if all of the non-note
203 insns were processed. */
206 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
207 rtx start; /* first insn to look at */
208 rtx end; /* last insn to look at */
209 rtx test; /* conditional execution test */
210 rtx prob_val; /* probability of branch taken. */
211 int mod_ok; /* true if modifications ok last insn. */
213 int must_be_last = FALSE;
217 for (insn = start; ; insn = NEXT_INSN (insn))
219 if (GET_CODE (insn) == NOTE)
222 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
225 /* Remove USE insns that get in the way. */
226 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
228 /* ??? Ug. Actually unlinking the thing is problematic,
229 given what we'd have to coordinate with our callers. */
230 PUT_CODE (insn, NOTE);
231 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
232 NOTE_SOURCE_FILE (insn) = 0;
236 /* Last insn wasn't last? */
240 if (modified_in_p (test, insn))
247 /* Now build the conditional form of the instruction. */
248 pattern = PATTERN (insn);
250 /* If the machine needs to modify the insn being conditionally executed,
251 say for example to force a constant integer operand into a temp
252 register, do so here. */
253 #ifdef IFCVT_MODIFY_INSN
254 IFCVT_MODIFY_INSN (pattern, insn);
259 validate_change (insn, &PATTERN (insn),
260 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
263 if (GET_CODE (insn) == CALL_INSN && prob_val)
264 validate_change (insn, ®_NOTES (insn),
265 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
266 REG_NOTES (insn)), 1);
276 /* Return the condition for a jump. Do not do any special processing. */
279 cond_exec_get_condition (jump)
284 if (any_condjump_p (jump))
285 test_if = SET_SRC (pc_set (jump));
288 cond = XEXP (test_if, 0);
290 /* If this branches to JUMP_LABEL when the condition is false,
291 reverse the condition. */
292 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
293 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
294 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
295 GET_MODE (cond), XEXP (cond, 0),
301 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
302 to conditional execution. Return TRUE if we were successful at
303 converting the the block. */
306 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
307 basic_block test_bb; /* Basic block test is in */
308 basic_block then_bb; /* Basic block for THEN block */
309 basic_block else_bb; /* Basic block for ELSE block */
310 basic_block join_bb; /* Basic block the join label is in */
312 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
313 rtx then_start; /* first insn in THEN block */
314 rtx then_end; /* last insn + 1 in THEN block */
315 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
316 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
317 int max; /* max # of insns to convert. */
318 int then_mod_ok; /* whether conditional mods are ok in THEN */
319 rtx true_expr; /* test for else block insns */
320 rtx false_expr; /* test for then block insns */
321 rtx true_prob_val; /* probability of else block */
322 rtx false_prob_val; /* probability of then block */
325 /* Find the conditional jump to the ELSE or JOIN part, and isolate
327 test_expr = cond_exec_get_condition (test_bb->end);
331 /* If the conditional jump is more than just a conditional jump,
332 then we can not do conditional execution conversion on this block. */
333 if (!onlyjump_p (test_bb->end))
336 /* Collect the bounds of where we're to search. */
338 then_start = then_bb->head;
339 then_end = then_bb->end;
341 /* Skip a label heading THEN block. */
342 if (GET_CODE (then_start) == CODE_LABEL)
343 then_start = NEXT_INSN (then_start);
345 /* Skip a (use (const_int 0)) or branch as the final insn. */
346 if (GET_CODE (then_end) == INSN
347 && GET_CODE (PATTERN (then_end)) == USE
348 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
349 then_end = PREV_INSN (then_end);
350 else if (GET_CODE (then_end) == JUMP_INSN)
351 then_end = PREV_INSN (then_end);
355 /* Skip the ELSE block's label. */
356 else_start = NEXT_INSN (else_bb->head);
357 else_end = else_bb->end;
359 /* Skip a (use (const_int 0)) or branch as the final insn. */
360 if (GET_CODE (else_end) == INSN
361 && GET_CODE (PATTERN (else_end)) == USE
362 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
363 else_end = PREV_INSN (else_end);
364 else if (GET_CODE (else_end) == JUMP_INSN)
365 else_end = PREV_INSN (else_end);
368 /* How many instructions should we convert in total? */
372 max = 2 * MAX_CONDITIONAL_EXECUTE;
373 n_insns = count_bb_insns (else_bb);
376 max = MAX_CONDITIONAL_EXECUTE;
377 n_insns += count_bb_insns (then_bb);
381 /* Map test_expr/test_jump into the appropriate MD tests to use on
382 the conditionally executed code. */
384 true_expr = test_expr;
385 false_expr = gen_rtx_fmt_ee (reverse_condition (GET_CODE (true_expr)),
386 GET_MODE (true_expr), XEXP (true_expr, 0),
387 XEXP (true_expr, 1));
389 #ifdef IFCVT_MODIFY_TESTS
390 /* If the machine description needs to modify the tests, such as setting a
391 conditional execution register from a comparison, it can do so here. */
392 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
395 /* See if the conversion failed */
396 if (!true_expr || !false_expr)
400 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
403 true_prob_val = XEXP (true_prob_val, 0);
404 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
407 false_prob_val = NULL_RTX;
409 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
410 on then THEN block. */
411 then_mod_ok = (else_bb == NULL_BLOCK);
413 /* Go through the THEN and ELSE blocks converting the insns if possible
414 to conditional execution. */
417 && ! cond_exec_process_insns (then_start, then_end,
418 false_expr, false_prob_val, then_mod_ok))
422 && ! cond_exec_process_insns (else_start, else_end,
423 true_expr, true_prob_val, TRUE))
426 if (! apply_change_group ())
429 #ifdef IFCVT_MODIFY_FINAL
430 /* Do any machine dependent final modifications */
431 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
434 /* Conversion succeeded. */
436 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
437 n_insns, (n_insns == 1) ? " was" : "s were");
439 /* Merge the blocks! */
440 merge_if_block (test_bb, then_bb, else_bb, join_bb);
444 #ifdef IFCVT_MODIFY_CANCEL
445 /* Cancel any machine dependent changes. */
446 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
453 /* Used by noce_process_if_block to communicate with its subroutines.
455 The subroutines know that A and B may be evaluated freely. They
456 know that X is a register. They should insert new instructions
457 before cond_earliest. */
464 rtx jump, cond, cond_earliest;
467 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
469 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
470 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
471 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
472 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
473 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
474 rtx, enum rtx_code, rtx,
476 static int noce_try_cmove PARAMS ((struct noce_if_info *));
477 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
478 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
480 static int noce_try_minmax PARAMS ((struct noce_if_info *));
481 static int noce_try_abs PARAMS ((struct noce_if_info *));
483 /* Helper function for noce_try_store_flag*. */
486 noce_emit_store_flag (if_info, x, reversep, normalize)
487 struct noce_if_info *if_info;
489 int reversep, normalize;
491 rtx cond = if_info->cond;
495 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
496 || ! general_operand (XEXP (cond, 1), VOIDmode));
498 /* If earliest == jump, or when the condition is complex, try to
499 build the store_flag insn directly. */
502 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
505 code = reversed_comparison_code (cond, if_info->jump);
507 code = GET_CODE (cond);
509 if ((if_info->cond_earliest == if_info->jump || cond_complex)
510 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
514 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
516 tmp = gen_rtx_SET (VOIDmode, x, tmp);
519 tmp = emit_insn (tmp);
521 if (recog_memoized (tmp) >= 0)
527 if_info->cond_earliest = if_info->jump;
535 /* Don't even try if the comparison operands are weird. */
539 return emit_store_flag (x, code, XEXP (cond, 0),
540 XEXP (cond, 1), VOIDmode,
541 (code == LTU || code == LEU
542 || code == GEU || code == GTU), normalize);
545 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
547 noce_emit_move_insn (x, y)
550 enum machine_mode outmode, inmode;
554 if (GET_CODE (x) != STRICT_LOW_PART)
556 emit_move_insn (x, y);
561 inner = XEXP (outer, 0);
562 outmode = GET_MODE (outer);
563 inmode = GET_MODE (inner);
564 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
565 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
566 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
567 GET_MODE_BITSIZE (inmode));
570 /* Convert "if (test) x = 1; else x = 0".
572 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
573 tried in noce_try_store_flag_constants after noce_try_cmove has had
574 a go at the conversion. */
577 noce_try_store_flag (if_info)
578 struct noce_if_info *if_info;
583 if (GET_CODE (if_info->b) == CONST_INT
584 && INTVAL (if_info->b) == STORE_FLAG_VALUE
585 && if_info->a == const0_rtx)
587 else if (if_info->b == const0_rtx
588 && GET_CODE (if_info->a) == CONST_INT
589 && INTVAL (if_info->a) == STORE_FLAG_VALUE
590 && (reversed_comparison_code (if_info->cond, if_info->jump)
598 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
601 if (target != if_info->x)
602 noce_emit_move_insn (if_info->x, target);
606 emit_insns_before (seq, if_info->cond_earliest);
617 /* Convert "if (test) x = a; else x = b", for A and B constant. */
620 noce_try_store_flag_constants (if_info)
621 struct noce_if_info *if_info;
625 HOST_WIDE_INT itrue, ifalse, diff, tmp;
626 int normalize, can_reverse;
629 && GET_CODE (if_info->a) == CONST_INT
630 && GET_CODE (if_info->b) == CONST_INT)
632 ifalse = INTVAL (if_info->a);
633 itrue = INTVAL (if_info->b);
634 diff = itrue - ifalse;
636 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
640 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
642 else if (ifalse == 0 && exact_log2 (itrue) >= 0
643 && (STORE_FLAG_VALUE == 1
644 || BRANCH_COST >= 2))
646 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
647 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
648 normalize = 1, reversep = 1;
650 && (STORE_FLAG_VALUE == -1
651 || BRANCH_COST >= 2))
653 else if (ifalse == -1 && can_reverse
654 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
655 normalize = -1, reversep = 1;
656 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
664 tmp = itrue; itrue = ifalse; ifalse = tmp;
669 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
676 /* if (test) x = 3; else x = 4;
677 => x = 3 + (test == 0); */
678 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
680 target = expand_binop (GET_MODE (if_info->x),
681 (diff == STORE_FLAG_VALUE
682 ? add_optab : sub_optab),
683 GEN_INT (ifalse), target, if_info->x, 0,
687 /* if (test) x = 8; else x = 0;
688 => x = (test != 0) << 3; */
689 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
691 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
692 target, GEN_INT (tmp), if_info->x, 0,
696 /* if (test) x = -1; else x = b;
697 => x = -(test != 0) | b; */
698 else if (itrue == -1)
700 target = expand_binop (GET_MODE (if_info->x), ior_optab,
701 target, GEN_INT (ifalse), if_info->x, 0,
705 /* if (test) x = a; else x = b;
706 => x = (-(test != 0) & (b - a)) + a; */
709 target = expand_binop (GET_MODE (if_info->x), and_optab,
710 target, GEN_INT (diff), if_info->x, 0,
713 target = expand_binop (GET_MODE (if_info->x), add_optab,
714 target, GEN_INT (ifalse), if_info->x, 0,
724 if (target != if_info->x)
725 noce_emit_move_insn (if_info->x, target);
730 if (seq_contains_jump (seq))
733 emit_insns_before (seq, if_info->cond_earliest);
741 /* Convert "if (test) foo++" into "foo += (test != 0)", and
742 similarly for "foo--". */
745 noce_try_store_flag_inc (if_info)
746 struct noce_if_info *if_info;
749 int subtract, normalize;
755 /* Should be no `else' case to worry about. */
756 && if_info->b == if_info->x
757 && GET_CODE (if_info->a) == PLUS
758 && (XEXP (if_info->a, 1) == const1_rtx
759 || XEXP (if_info->a, 1) == constm1_rtx)
760 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
761 && (reversed_comparison_code (if_info->cond, if_info->jump)
764 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
765 subtract = 0, normalize = 0;
766 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
767 subtract = 1, normalize = 0;
769 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
773 target = noce_emit_store_flag (if_info,
774 gen_reg_rtx (GET_MODE (if_info->x)),
778 target = expand_binop (GET_MODE (if_info->x),
779 subtract ? sub_optab : add_optab,
780 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
783 if (target != if_info->x)
784 noce_emit_move_insn (if_info->x, target);
789 if (seq_contains_jump (seq))
792 emit_insns_before (seq, if_info->cond_earliest);
803 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
806 noce_try_store_flag_mask (if_info)
807 struct noce_if_info *if_info;
815 || STORE_FLAG_VALUE == -1)
816 && ((if_info->a == const0_rtx
817 && rtx_equal_p (if_info->b, if_info->x))
818 || ((reversep = (reversed_comparison_code (if_info->cond,
821 && if_info->b == const0_rtx
822 && rtx_equal_p (if_info->a, if_info->x))))
825 target = noce_emit_store_flag (if_info,
826 gen_reg_rtx (GET_MODE (if_info->x)),
829 target = expand_binop (GET_MODE (if_info->x), and_optab,
830 if_info->x, target, if_info->x, 0,
835 if (target != if_info->x)
836 noce_emit_move_insn (if_info->x, target);
841 if (seq_contains_jump (seq))
844 emit_insns_before (seq, if_info->cond_earliest);
855 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
858 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
859 struct noce_if_info *if_info;
860 rtx x, cmp_a, cmp_b, vfalse, vtrue;
863 /* If earliest == jump, try to build the cmove insn directly.
864 This is helpful when combine has created some complex condition
865 (like for alpha's cmovlbs) that we can't hope to regenerate
866 through the normal interface. */
868 if (if_info->cond_earliest == if_info->jump)
872 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
873 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
874 tmp = gen_rtx_SET (VOIDmode, x, tmp);
877 tmp = emit_insn (tmp);
879 if (recog_memoized (tmp) >= 0)
891 /* Don't even try if the comparison operands are weird. */
892 if (! general_operand (cmp_a, GET_MODE (cmp_a))
893 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
896 #if HAVE_conditional_move
897 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
898 vtrue, vfalse, GET_MODE (x),
899 (code == LTU || code == GEU
900 || code == LEU || code == GTU));
902 /* We'll never get here, as noce_process_if_block doesn't call the
903 functions involved. Ifdef code, however, should be discouraged
904 because it leads to typos in the code not selected. However,
905 emit_conditional_move won't exist either. */
910 /* Try only simple constants and registers here. More complex cases
911 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
912 has had a go at it. */
915 noce_try_cmove (if_info)
916 struct noce_if_info *if_info;
921 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
922 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
926 code = GET_CODE (if_info->cond);
927 target = noce_emit_cmove (if_info, if_info->x, code,
928 XEXP (if_info->cond, 0),
929 XEXP (if_info->cond, 1),
930 if_info->a, if_info->b);
934 if (target != if_info->x)
935 noce_emit_move_insn (if_info->x, target);
939 emit_insns_before (seq, if_info->cond_earliest);
952 /* Try more complex cases involving conditional_move. */
955 noce_try_cmove_arith (if_info)
956 struct noce_if_info *if_info;
966 /* A conditional move from two memory sources is equivalent to a
967 conditional on their addresses followed by a load. Don't do this
968 early because it'll screw alias analysis. Note that we've
969 already checked for no side effects. */
970 if (! no_new_pseudos && cse_not_expected
971 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
976 x = gen_reg_rtx (Pmode);
980 /* ??? We could handle this if we knew that a load from A or B could
981 not fault. This is also true if we've already loaded
982 from the address along the path from ENTRY. */
983 else if (may_trap_p (a) || may_trap_p (b))
986 /* if (test) x = a + b; else x = c - d;
993 code = GET_CODE (if_info->cond);
994 insn_a = if_info->insn_a;
995 insn_b = if_info->insn_b;
997 /* Possibly rearrange operands to make things come out more natural. */
998 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1001 if (rtx_equal_p (b, x))
1003 else if (general_operand (b, GET_MODE (b)))
1008 code = reversed_comparison_code (if_info->cond, if_info->jump);
1009 tmp = a, a = b, b = tmp;
1010 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1016 /* If either operand is complex, load it into a register first.
1017 The best way to do this is to copy the original insn. In this
1018 way we preserve any clobbers etc that the insn may have had.
1019 This is of course not possible in the IS_MEM case. */
1020 if (! general_operand (a, GET_MODE (a)))
1025 goto end_seq_and_fail;
1029 tmp = gen_reg_rtx (GET_MODE (a));
1030 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1033 goto end_seq_and_fail;
1036 a = gen_reg_rtx (GET_MODE (a));
1037 tmp = copy_rtx (insn_a);
1038 set = single_set (tmp);
1040 tmp = emit_insn (PATTERN (tmp));
1042 if (recog_memoized (tmp) < 0)
1043 goto end_seq_and_fail;
1045 if (! general_operand (b, GET_MODE (b)))
1050 goto end_seq_and_fail;
1054 tmp = gen_reg_rtx (GET_MODE (b));
1055 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1058 goto end_seq_and_fail;
1061 b = gen_reg_rtx (GET_MODE (b));
1062 tmp = copy_rtx (insn_b);
1063 set = single_set (tmp);
1065 tmp = emit_insn (PATTERN (tmp));
1067 if (recog_memoized (tmp) < 0)
1068 goto end_seq_and_fail;
1071 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1072 XEXP (if_info->cond, 1), a, b);
1075 goto end_seq_and_fail;
1077 /* If we're handling a memory for above, emit the load now. */
1080 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1082 /* Copy over flags as appropriate. */
1083 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1084 MEM_VOLATILE_P (tmp) = 1;
1085 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1086 MEM_IN_STRUCT_P (tmp) = 1;
1087 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1088 MEM_SCALAR_P (tmp) = 1;
1089 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1090 MEM_ALIAS_SET (tmp) = MEM_ALIAS_SET (if_info->a);
1092 noce_emit_move_insn (if_info->x, tmp);
1094 else if (target != x)
1095 noce_emit_move_insn (x, target);
1099 emit_insns_before (tmp, if_info->cond_earliest);
1107 /* For most cases, the simplified condition we found is the best
1108 choice, but this is not the case for the min/max/abs transforms.
1109 For these we wish to know that it is A or B in the condition. */
1112 noce_get_alt_condition (if_info, target, earliest)
1113 struct noce_if_info *if_info;
1117 rtx cond, set, insn;
1120 /* If target is already mentioned in the known condition, return it. */
1121 if (reg_mentioned_p (target, if_info->cond))
1123 *earliest = if_info->cond_earliest;
1124 return if_info->cond;
1127 set = pc_set (if_info->jump);
1128 cond = XEXP (SET_SRC (set), 0);
1130 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1131 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1133 cond = canonicalize_condition (if_info->jump, cond, reverse,
1135 if (! cond || ! reg_mentioned_p (target, cond))
1138 /* We almost certainly searched back to a different place.
1139 Need to re-verify correct lifetimes. */
1141 /* X may not be mentioned in the range (cond_earliest, jump]. */
1142 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1143 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1146 /* A and B may not be modified in the range [cond_earliest, jump). */
1147 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1149 && (modified_in_p (if_info->a, insn)
1150 || modified_in_p (if_info->b, insn)))
1156 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1159 noce_try_minmax (if_info)
1160 struct noce_if_info *if_info;
1162 rtx cond, earliest, target, seq;
1167 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1171 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1172 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1173 to get the target to tell us... */
1174 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1175 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1176 && ! flag_unsafe_math_optimizations)
1179 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1183 /* Verify the condition is of the form we expect, and canonicalize
1184 the comparison code. */
1185 code = GET_CODE (cond);
1186 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1188 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1191 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1193 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1195 code = swap_condition (code);
1200 /* Determine what sort of operation this is. Note that the code is for
1201 a taken branch, so the code->operation mapping appears backwards. */
1234 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1235 if_info->x, unsignedp, OPTAB_WIDEN);
1241 if (target != if_info->x)
1242 noce_emit_move_insn (if_info->x, target);
1247 if (seq_contains_jump (seq))
1250 emit_insns_before (seq, earliest);
1251 if_info->cond = cond;
1252 if_info->cond_earliest = earliest;
1257 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1260 noce_try_abs (if_info)
1261 struct noce_if_info *if_info;
1263 rtx cond, earliest, target, seq, a, b, c;
1266 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1270 /* Recognize A and B as constituting an ABS or NABS. */
1273 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1275 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1277 c = a; a = b; b = c;
1283 cond = noce_get_alt_condition (if_info, b, &earliest);
1287 /* Verify the condition is of the form we expect. */
1288 if (rtx_equal_p (XEXP (cond, 0), b))
1290 else if (rtx_equal_p (XEXP (cond, 1), b))
1295 /* Verify that C is zero. Search backward through the block for
1296 a REG_EQUAL note if necessary. */
1299 rtx insn, note = NULL;
1300 for (insn = earliest;
1301 insn != if_info->test_bb->head;
1302 insn = PREV_INSN (insn))
1304 && ((note = find_reg_note (insn, REG_EQUAL, c))
1305 || (note = find_reg_note (insn, REG_EQUIV, c))))
1311 if (GET_CODE (c) == MEM
1312 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1313 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1314 c = get_pool_constant (XEXP (c, 0));
1316 /* Work around funny ideas get_condition has wrt canonicalization.
1317 Note that these rtx constants are known to be CONST_INT, and
1318 therefore imply integer comparisons. */
1319 if (c == constm1_rtx && GET_CODE (cond) == GT)
1321 else if (c == const1_rtx && GET_CODE (cond) == LT)
1323 else if (c != CONST0_RTX (GET_MODE (b)))
1326 /* Determine what sort of operation this is. */
1327 switch (GET_CODE (cond))
1346 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1348 /* ??? It's a quandry whether cmove would be better here, especially
1349 for integers. Perhaps combine will clean things up. */
1350 if (target && negate)
1351 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1359 if (target != if_info->x)
1360 noce_emit_move_insn (if_info->x, target);
1365 if (seq_contains_jump (seq))
1368 emit_insns_before (seq, earliest);
1369 if_info->cond = cond;
1370 if_info->cond_earliest = earliest;
1375 /* Look for the condition for the jump first. We'd prefer to avoid
1376 get_condition if we can -- it tries to look back for the contents
1377 of an original compare. On targets that use normal integers for
1378 comparisons, e.g. alpha, this is wasteful. */
1381 noce_get_condition (jump, earliest)
1388 /* If the condition variable is a register and is MODE_INT, accept it.
1389 Otherwise, fall back on get_condition. */
1391 if (! any_condjump_p (jump))
1394 set = pc_set (jump);
1396 cond = XEXP (SET_SRC (set), 0);
1397 if (GET_CODE (XEXP (cond, 0)) == REG
1398 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1402 /* If this branches to JUMP_LABEL when the condition is false,
1403 reverse the condition. */
1404 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1405 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1406 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1407 GET_MODE (cond), XEXP (cond, 0),
1411 cond = get_condition (jump, earliest);
1416 /* Return true if OP is ok for if-then-else processing. */
1419 noce_operand_ok (op)
1422 /* We special-case memories, so handle any of them with
1423 no address side effects. */
1424 if (GET_CODE (op) == MEM)
1425 return ! side_effects_p (XEXP (op, 0));
1427 if (side_effects_p (op))
1430 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1431 being linked into the genfoo programs. This is probably a mistake.
1432 With finite operands, most fp operations don't trap. */
1433 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1434 switch (GET_CODE (op))
1440 /* ??? This is kinda lame -- almost every target will have forced
1441 the constant into a register first. But given the expense of
1442 division, this is probably for the best. */
1443 return (CONSTANT_P (XEXP (op, 1))
1444 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1445 && ! may_trap_p (XEXP (op, 0)));
1448 switch (GET_RTX_CLASS (GET_CODE (op)))
1451 return ! may_trap_p (XEXP (op, 0));
1454 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1459 return ! may_trap_p (op);
1462 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1463 without using conditional execution. Return TRUE if we were
1464 successful at converting the the block. */
1467 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1468 basic_block test_bb; /* Basic block test is in */
1469 basic_block then_bb; /* Basic block for THEN block */
1470 basic_block else_bb; /* Basic block for ELSE block */
1471 basic_block join_bb; /* Basic block the join label is in */
1473 /* We're looking for patterns of the form
1475 (1) if (...) x = a; else x = b;
1476 (2) x = b; if (...) x = a;
1477 (3) if (...) x = a; // as if with an initial x = x.
1479 The later patterns require jumps to be more expensive.
1481 ??? For future expansion, look for multiple X in such patterns. */
1483 struct noce_if_info if_info;
1486 rtx orig_x, x, a, b;
1487 rtx jump, cond, insn;
1489 /* If this is not a standard conditional jump, we can't parse it. */
1490 jump = test_bb->end;
1491 cond = noce_get_condition (jump, &if_info.cond_earliest);
1495 /* If the conditional jump is more than just a conditional jump,
1496 then we can not do if-conversion on this block. */
1497 if (! onlyjump_p (jump))
1500 /* We must be comparing objects whose modes imply the size. */
1501 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1504 /* Look for one of the potential sets. */
1505 insn_a = first_active_insn (then_bb);
1507 || ! last_active_insn_p (then_bb, insn_a)
1508 || (set_a = single_set (insn_a)) == NULL_RTX)
1511 x = SET_DEST (set_a);
1512 a = SET_SRC (set_a);
1514 /* Look for the other potential set. Make sure we've got equivalent
1516 /* ??? This is overconservative. Storing to two different mems is
1517 as easy as conditionally computing the address. Storing to a
1518 single mem merely requires a scratch memory to use as one of the
1519 destination addresses; often the memory immediately below the
1520 stack pointer is available for this. */
1524 insn_b = first_active_insn (else_bb);
1526 || ! last_active_insn_p (else_bb, insn_b)
1527 || (set_b = single_set (insn_b)) == NULL_RTX
1528 || ! rtx_equal_p (x, SET_DEST (set_b)))
1533 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1535 || GET_CODE (insn_b) != INSN
1536 || (set_b = single_set (insn_b)) == NULL_RTX
1537 || ! rtx_equal_p (x, SET_DEST (set_b))
1538 || reg_mentioned_p (x, cond)
1539 || reg_mentioned_p (x, a)
1540 || reg_mentioned_p (x, SET_SRC (set_b)))
1541 insn_b = set_b = NULL_RTX;
1543 b = (set_b ? SET_SRC (set_b) : x);
1545 /* X may not be mentioned in the range (cond_earliest, jump]. */
1546 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1547 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1550 /* A and B may not be modified in the range [cond_earliest, jump). */
1551 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1553 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1556 /* Only operate on register destinations, and even then avoid extending
1557 the lifetime of hard registers on small register class machines. */
1559 if (GET_CODE (x) != REG
1560 || (SMALL_REGISTER_CLASSES
1561 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1565 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1566 ? XEXP (x, 0) : x));
1569 /* Don't operate on sources that may trap or are volatile. */
1570 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1573 /* Set up the info block for our subroutines. */
1574 if_info.test_bb = test_bb;
1575 if_info.cond = cond;
1576 if_info.jump = jump;
1577 if_info.insn_a = insn_a;
1578 if_info.insn_b = insn_b;
1583 /* Try optimizations in some approximation of a useful order. */
1584 /* ??? Should first look to see if X is live incoming at all. If it
1585 isn't, we don't need anything but an unconditional set. */
1587 /* Look and see if A and B are really the same. Avoid creating silly
1588 cmove constructs that no one will fix up later. */
1589 if (rtx_equal_p (a, b))
1591 /* If we have an INSN_B, we don't have to create any new rtl. Just
1592 move the instruction that we already have. If we don't have an
1593 INSN_B, that means that A == X, and we've got a noop move. In
1594 that case don't do anything and let the code below delete INSN_A. */
1595 if (insn_b && else_bb)
1599 if (else_bb && insn_b == else_bb->end)
1600 else_bb->end = PREV_INSN (insn_b);
1601 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1603 /* If there was a REG_EQUAL note, delete it since it may have been
1604 true due to this insn being after a jump. */
1605 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1606 remove_note (insn_b, note);
1610 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1611 x must be executed twice. */
1612 else if (insn_b && side_effects_p (orig_x))
1619 if (noce_try_store_flag (&if_info))
1621 if (noce_try_minmax (&if_info))
1623 if (noce_try_abs (&if_info))
1625 if (HAVE_conditional_move
1626 && noce_try_cmove (&if_info))
1628 if (! HAVE_conditional_execution)
1630 if (noce_try_store_flag_constants (&if_info))
1632 if (noce_try_store_flag_inc (&if_info))
1634 if (noce_try_store_flag_mask (&if_info))
1636 if (HAVE_conditional_move
1637 && noce_try_cmove_arith (&if_info))
1644 /* The original sets may now be killed. */
1645 if (insn_a == then_bb->end)
1646 then_bb->end = PREV_INSN (insn_a);
1647 flow_delete_insn (insn_a);
1649 /* Several special cases here: First, we may have reused insn_b above,
1650 in which case insn_b is now NULL. Second, we want to delete insn_b
1651 if it came from the ELSE block, because follows the now correct
1652 write that appears in the TEST block. However, if we got insn_b from
1653 the TEST block, it may in fact be loading data needed for the comparison.
1654 We'll let life_analysis remove the insn if it's really dead. */
1655 if (insn_b && else_bb)
1657 if (insn_b == else_bb->end)
1658 else_bb->end = PREV_INSN (insn_b);
1659 flow_delete_insn (insn_b);
1662 /* The new insns will have been inserted before cond_earliest. We should
1663 be able to remove the jump with impunity, but the condition itself may
1664 have been modified by gcse to be shared across basic blocks. */
1665 test_bb->end = PREV_INSN (jump);
1666 flow_delete_insn (jump);
1668 /* If we used a temporary, fix it up now. */
1672 noce_emit_move_insn (orig_x, x);
1673 insn_b = gen_sequence ();
1676 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1679 /* Merge the blocks! */
1680 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1685 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1686 straight line code. Return true if successful. */
1689 process_if_block (test_bb, then_bb, else_bb, join_bb)
1690 basic_block test_bb; /* Basic block test is in */
1691 basic_block then_bb; /* Basic block for THEN block */
1692 basic_block else_bb; /* Basic block for ELSE block */
1693 basic_block join_bb; /* Basic block the join label is in */
1695 if (! reload_completed
1696 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1699 if (HAVE_conditional_execution
1701 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1707 /* Merge the blocks and mark for local life update. */
1710 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1711 basic_block test_bb; /* Basic block test is in */
1712 basic_block then_bb; /* Basic block for THEN block */
1713 basic_block else_bb; /* Basic block for ELSE block */
1714 basic_block join_bb; /* Basic block the join label is in */
1716 basic_block combo_bb;
1718 /* All block merging is done into the lower block numbers. */
1722 /* First merge TEST block into THEN block. This is a no-brainer since
1723 the THEN block did not have a code label to begin with. */
1726 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1727 merge_blocks_nomove (combo_bb, then_bb);
1728 num_removed_blocks++;
1730 /* The ELSE block, if it existed, had a label. That label count
1731 will almost always be zero, but odd things can happen when labels
1732 get their addresses taken. */
1735 merge_blocks_nomove (combo_bb, else_bb);
1736 num_removed_blocks++;
1739 /* If there was no join block reported, that means it was not adjacent
1740 to the others, and so we cannot merge them. */
1744 /* The outgoing edge for the current COMBO block should already
1745 be correct. Verify this. */
1746 if (combo_bb->succ == NULL_EDGE)
1749 /* There should sill be a branch at the end of the THEN or ELSE
1750 blocks taking us to our final destination. */
1751 if (! any_uncondjump_p (combo_bb->end)
1752 && ! returnjump_p (combo_bb->end))
1756 /* The JOIN block may have had quite a number of other predecessors too.
1757 Since we've already merged the TEST, THEN and ELSE blocks, we should
1758 have only one remaining edge from our if-then-else diamond. If there
1759 is more than one remaining edge, it must come from elsewhere. There
1760 may be zero incoming edges if the THEN block didn't actually join
1761 back up (as with a call to abort). */
1762 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1764 /* We can merge the JOIN. */
1766 COPY_REG_SET (combo_bb->global_live_at_end,
1767 join_bb->global_live_at_end);
1768 merge_blocks_nomove (combo_bb, join_bb);
1769 num_removed_blocks++;
1773 /* We cannot merge the JOIN. */
1775 /* The outgoing edge for the current COMBO block should already
1776 be correct. Verify this. */
1777 if (combo_bb->succ->succ_next != NULL_EDGE
1778 || combo_bb->succ->dest != join_bb)
1781 /* Remove the jump and cruft from the end of the COMBO block. */
1782 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1785 /* Make sure we update life info properly. */
1786 SET_UPDATE_LIFE (combo_bb);
1788 num_updated_if_blocks++;
1791 /* Find a block ending in a simple IF condition. Return TRUE if
1792 we were able to transform it in some way. */
1795 find_if_header (test_bb)
1796 basic_block test_bb;
1801 /* The kind of block we're looking for has exactly two successors. */
1802 if ((then_edge = test_bb->succ) == NULL_EDGE
1803 || (else_edge = then_edge->succ_next) == NULL_EDGE
1804 || else_edge->succ_next != NULL_EDGE)
1807 /* Neither edge should be abnormal. */
1808 if ((then_edge->flags & EDGE_COMPLEX)
1809 || (else_edge->flags & EDGE_COMPLEX))
1812 /* The THEN edge is canonically the one that falls through. */
1813 if (then_edge->flags & EDGE_FALLTHRU)
1815 else if (else_edge->flags & EDGE_FALLTHRU)
1818 else_edge = then_edge;
1822 /* Otherwise this must be a multiway branch of some sort. */
1825 if (find_if_block (test_bb, then_edge, else_edge))
1828 && (! HAVE_conditional_execution || reload_completed))
1830 if (find_if_case_1 (test_bb, then_edge, else_edge))
1832 if (find_if_case_2 (test_bb, then_edge, else_edge))
1840 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1844 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1845 block. If so, we'll try to convert the insns to not require the branch.
1846 Return TRUE if we were successful at converting the the block. */
1849 find_if_block (test_bb, then_edge, else_edge)
1850 basic_block test_bb;
1851 edge then_edge, else_edge;
1853 basic_block then_bb = then_edge->dest;
1854 basic_block else_bb = else_edge->dest;
1855 basic_block join_bb = NULL_BLOCK;
1856 edge then_succ = then_bb->succ;
1857 edge else_succ = else_bb->succ;
1860 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1861 if (then_bb->pred->pred_next != NULL_EDGE)
1864 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1865 if (then_succ != NULL_EDGE
1866 && (then_succ->succ_next != NULL_EDGE
1867 || (then_succ->flags & EDGE_COMPLEX)))
1870 /* If the THEN block has no successors, conditional execution can still
1871 make a conditional call. Don't do this unless the ELSE block has
1872 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1873 Check for the last insn of the THEN block being an indirect jump, which
1874 is listed as not having any successors, but confuses the rest of the CE
1875 code processing. XXX we should fix this in the future. */
1876 if (then_succ == NULL)
1878 if (else_bb->pred->pred_next == NULL_EDGE)
1880 rtx last_insn = then_bb->end;
1883 && GET_CODE (last_insn) == NOTE
1884 && last_insn != then_bb->head)
1885 last_insn = PREV_INSN (last_insn);
1888 && GET_CODE (last_insn) == JUMP_INSN
1889 && ! simplejump_p (last_insn))
1893 else_bb = NULL_BLOCK;
1899 /* If the THEN block's successor is the other edge out of the TEST block,
1900 then we have an IF-THEN combo without an ELSE. */
1901 else if (then_succ->dest == else_bb)
1904 else_bb = NULL_BLOCK;
1907 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1908 has exactly one predecessor and one successor, and the outgoing edge
1909 is not complex, then we have an IF-THEN-ELSE combo. */
1910 else if (else_succ != NULL_EDGE
1911 && then_succ->dest == else_succ->dest
1912 && else_bb->pred->pred_next == NULL_EDGE
1913 && else_succ->succ_next == NULL_EDGE
1914 && ! (else_succ->flags & EDGE_COMPLEX))
1915 join_bb = else_succ->dest;
1917 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1921 num_possible_if_blocks++;
1926 fprintf (rtl_dump_file,
1927 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1928 test_bb->index, then_bb->index, else_bb->index,
1931 fprintf (rtl_dump_file,
1932 "\nIF-THEN block found, start %d, then %d, join %d\n",
1933 test_bb->index, then_bb->index, join_bb->index);
1936 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1937 get the first condition for free, since we've already asserted that
1938 there's a fallthru edge from IF to THEN. */
1939 /* ??? As an enhancement, move the ELSE block. Have to deal with
1940 BLOCK notes, if by no other means than aborting the merge if they
1941 exist. Sticky enough I don't want to think about it now. */
1942 next_index = then_bb->index;
1943 if (else_bb && ++next_index != else_bb->index)
1945 if (++next_index != join_bb->index)
1953 /* Do the real work. */
1954 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1957 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1958 transformable, but not necessarily the other. There need be no
1961 Return TRUE if we were successful at converting the the block.
1963 Cases we'd like to look at:
1966 if (test) goto over; // x not live
1974 if (! test) goto label;
1977 if (test) goto E; // x not live
1991 (3) // This one's really only interesting for targets that can do
1992 // multiway branching, e.g. IA-64 BBB bundles. For other targets
1993 // it results in multiple branches on a cache line, which often
1994 // does not sit well with predictors.
1996 if (test1) goto E; // predicted not taken
2012 (A) Don't do (2) if the branch is predicted against the block we're
2013 eliminating. Do it anyway if we can eliminate a branch; this requires
2014 that the sole successor of the eliminated block postdominate the other
2017 (B) With CE, on (3) we can steal from both sides of the if, creating
2026 Again, this is most useful if J postdominates.
2028 (C) CE substitutes for helpful life information.
2030 (D) These heuristics need a lot of work. */
2032 /* Tests for case 1 above. */
2035 find_if_case_1 (test_bb, then_edge, else_edge)
2036 basic_block test_bb;
2037 edge then_edge, else_edge;
2039 basic_block then_bb = then_edge->dest;
2040 basic_block else_bb = else_edge->dest;
2041 edge then_succ = then_bb->succ;
2044 /* THEN has one successor. */
2045 if (!then_succ || then_succ->succ_next != NULL)
2048 /* THEN does not fall through, but is not strange either. */
2049 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2052 /* THEN has one predecessor. */
2053 if (then_bb->pred->pred_next != NULL)
2056 /* ELSE follows THEN. (??? could be moved) */
2057 if (else_bb->index != then_bb->index + 1)
2060 num_possible_if_blocks++;
2062 fprintf (rtl_dump_file,
2063 "\nIF-CASE-1 found, start %d, then %d\n",
2064 test_bb->index, then_bb->index);
2066 /* THEN is small. */
2067 if (count_bb_insns (then_bb) > BRANCH_COST)
2070 /* Find the label for THEN's destination. */
2071 if (then_succ->dest == EXIT_BLOCK_PTR)
2075 new_lab = JUMP_LABEL (then_bb->end);
2080 /* Registers set are dead, or are predicable. */
2081 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2084 /* Conversion went ok, including moving the insns and fixing up the
2085 jump. Adjust the CFG to match. */
2087 SET_UPDATE_LIFE (test_bb);
2088 bitmap_operation (test_bb->global_live_at_end,
2089 else_bb->global_live_at_start,
2090 then_bb->global_live_at_end, BITMAP_IOR);
2092 make_edge (NULL, test_bb, then_succ->dest, 0);
2093 flow_delete_block (then_bb);
2094 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2096 num_removed_blocks++;
2097 num_updated_if_blocks++;
2102 /* Test for case 2 above. */
2105 find_if_case_2 (test_bb, then_edge, else_edge)
2106 basic_block test_bb;
2107 edge then_edge, else_edge;
2109 basic_block then_bb = then_edge->dest;
2110 basic_block else_bb = else_edge->dest;
2111 edge else_succ = else_bb->succ;
2114 /* ELSE has one successor. */
2115 if (!else_succ || else_succ->succ_next != NULL)
2118 /* ELSE outgoing edge is not complex. */
2119 if (else_succ->flags & EDGE_COMPLEX)
2122 /* ELSE has one predecessor. */
2123 if (else_bb->pred->pred_next != NULL)
2126 /* THEN is not EXIT. */
2127 if (then_bb->index < 0)
2130 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2131 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2132 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2134 else if (else_succ->dest->index < 0
2135 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2136 ORIG_INDEX (else_succ->dest)))
2141 num_possible_if_blocks++;
2143 fprintf (rtl_dump_file,
2144 "\nIF-CASE-2 found, start %d, else %d\n",
2145 test_bb->index, else_bb->index);
2147 /* ELSE is small. */
2148 if (count_bb_insns (then_bb) > BRANCH_COST)
2151 /* Find the label for ELSE's destination. */
2152 if (else_succ->dest == EXIT_BLOCK_PTR)
2156 if (else_succ->flags & EDGE_FALLTHRU)
2158 new_lab = else_succ->dest->head;
2159 if (GET_CODE (new_lab) != CODE_LABEL)
2164 new_lab = JUMP_LABEL (else_bb->end);
2170 /* Registers set are dead, or are predicable. */
2171 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2174 /* Conversion went ok, including moving the insns and fixing up the
2175 jump. Adjust the CFG to match. */
2177 SET_UPDATE_LIFE (test_bb);
2178 bitmap_operation (test_bb->global_live_at_end,
2179 then_bb->global_live_at_start,
2180 else_bb->global_live_at_end, BITMAP_IOR);
2182 remove_edge (else_edge);
2183 make_edge (NULL, test_bb, else_succ->dest, 0);
2184 flow_delete_block (else_bb);
2186 num_removed_blocks++;
2187 num_updated_if_blocks++;
2189 /* ??? We may now fallthru from one of THEN's successors into a join
2190 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2195 /* A subroutine of dead_or_predicable called through for_each_rtx.
2196 Return 1 if a memory is found. */
2199 find_memory (px, data)
2201 void *data ATTRIBUTE_UNUSED;
2203 return GET_CODE (*px) == MEM;
2206 /* Used by the code above to perform the actual rtl transformations.
2207 Return TRUE if successful.
2209 TEST_BB is the block containing the conditional branch. MERGE_BB
2210 is the block containing the code to manipulate. NEW_DEST is the
2211 label TEST_BB should be branching to after the conversion.
2212 REVERSEP is true if the sense of the branch should be reversed. */
2215 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2216 basic_block test_bb, merge_bb, other_bb;
2220 rtx head, end, jump, earliest, old_dest;
2222 jump = test_bb->end;
2224 /* Find the extent of the real code in the merge block. */
2225 head = merge_bb->head;
2226 end = merge_bb->end;
2228 if (GET_CODE (head) == CODE_LABEL)
2229 head = NEXT_INSN (head);
2230 if (GET_CODE (head) == NOTE)
2234 head = end = NULL_RTX;
2237 head = NEXT_INSN (head);
2240 if (GET_CODE (end) == JUMP_INSN)
2244 head = end = NULL_RTX;
2247 end = PREV_INSN (end);
2250 /* Disable handling dead code by conditional execution if the machine needs
2251 to do anything funny with the tests, etc. */
2252 #ifndef IFCVT_MODIFY_TESTS
2253 if (HAVE_conditional_execution)
2255 /* In the conditional execution case, we have things easy. We know
2256 the condition is reversable. We don't have to check life info,
2257 becase we're going to conditionally execute the code anyway.
2258 All that's left is making sure the insns involved can actually
2263 cond = cond_exec_get_condition (jump);
2265 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2267 prob_val = XEXP (prob_val, 0);
2271 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2272 GET_MODE (cond), XEXP (cond, 0),
2275 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2278 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2286 /* In the non-conditional execution case, we have to verify that there
2287 are no trapping operations, no calls, no references to memory, and
2288 that any registers modified are dead at the branch site. */
2290 rtx insn, cond, prev;
2291 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2292 regset merge_set, tmp, test_live, test_set;
2293 struct propagate_block_info *pbi;
2296 /* Check for no calls or trapping operations. */
2297 for (insn = head; ; insn = NEXT_INSN (insn))
2299 if (GET_CODE (insn) == CALL_INSN)
2303 if (may_trap_p (PATTERN (insn)))
2306 /* ??? Even non-trapping memories such as stack frame
2307 references must be avoided. For stores, we collect
2308 no lifetime info; for reads, we'd have to assert
2309 true_dependance false against every store in the
2311 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2318 if (! any_condjump_p (jump))
2321 /* Find the extent of the conditional. */
2322 cond = noce_get_condition (jump, &earliest);
2327 MERGE_SET = set of registers set in MERGE_BB
2328 TEST_LIVE = set of registers live at EARLIEST
2329 TEST_SET = set of registers set between EARLIEST and the
2330 end of the block. */
2332 tmp = INITIALIZE_REG_SET (tmp_head);
2333 merge_set = INITIALIZE_REG_SET (merge_set_head);
2334 test_live = INITIALIZE_REG_SET (test_live_head);
2335 test_set = INITIALIZE_REG_SET (test_set_head);
2337 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2338 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2339 since we've already asserted that MERGE_BB is small. */
2340 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2342 /* For small register class machines, don't lengthen lifetimes of
2343 hard registers before reload. */
2344 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2346 EXECUTE_IF_SET_IN_BITMAP
2349 if (i < FIRST_PSEUDO_REGISTER
2351 && ! global_regs[i])
2356 /* For TEST, we're interested in a range of insns, not a whole block.
2357 Moreover, we're interested in the insns live from OTHER_BB. */
2359 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2360 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2363 for (insn = jump; ; insn = prev)
2365 prev = propagate_one_insn (pbi, insn);
2366 if (insn == earliest)
2370 free_propagate_block_info (pbi);
2372 /* We can perform the transformation if
2373 MERGE_SET & (TEST_SET | TEST_LIVE)
2375 TEST_SET & merge_bb->global_live_at_start
2378 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2379 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2380 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2382 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2384 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2387 FREE_REG_SET (merge_set);
2388 FREE_REG_SET (test_live);
2389 FREE_REG_SET (test_set);
2396 /* We don't want to use normal invert_jump or redirect_jump because
2397 we don't want to delete_insn called. Also, we want to do our own
2398 change group management. */
2400 old_dest = JUMP_LABEL (jump);
2402 ? ! invert_jump_1 (jump, new_dest)
2403 : ! redirect_jump_1 (jump, new_dest))
2406 if (! apply_change_group ())
2410 LABEL_NUSES (old_dest) -= 1;
2412 LABEL_NUSES (new_dest) += 1;
2413 JUMP_LABEL (jump) = new_dest;
2416 invert_br_probabilities (jump);
2418 /* Move the insns out of MERGE_BB to before the branch. */
2421 if (end == merge_bb->end)
2422 merge_bb->end = PREV_INSN (head);
2424 head = squeeze_notes (head, end);
2425 if (GET_CODE (end) == NOTE
2426 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2427 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2428 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2429 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2430 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2431 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2435 end = PREV_INSN (end);
2438 reorder_insns (head, end, PREV_INSN (earliest));
2447 /* Main entry point for all if-conversion. */
2450 if_convert (x_life_data_ok)
2455 num_possible_if_blocks = 0;
2456 num_updated_if_blocks = 0;
2457 num_removed_blocks = 0;
2458 life_data_ok = (x_life_data_ok != 0);
2460 /* Free up basic_block_for_insn so that we don't have to keep it
2461 up to date, either here or in merge_blocks_nomove. */
2462 free_basic_block_vars (1);
2464 /* Compute postdominators if we think we'll use them. */
2465 post_dominators = NULL;
2466 if (HAVE_conditional_execution || life_data_ok)
2468 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2469 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2472 /* Record initial block numbers. */
2473 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2474 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2476 /* Go through each of the basic blocks looking for things to convert. */
2477 for (block_num = 0; block_num < n_basic_blocks; )
2479 basic_block bb = BASIC_BLOCK (block_num);
2480 if (find_if_header (bb))
2481 block_num = bb->index;
2486 if (post_dominators)
2487 sbitmap_vector_free (post_dominators);
2490 fflush (rtl_dump_file);
2492 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2493 compute_bb_for_insn (get_max_uid ());
2495 /* Rebuild life info for basic blocks that require it. */
2496 if (num_removed_blocks && life_data_ok)
2498 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2499 sbitmap_zero (update_life_blocks);
2501 /* If we allocated new pseudos, we must resize the array for sched1. */
2502 if (max_regno < max_reg_num ())
2504 max_regno = max_reg_num ();
2505 allocate_reg_info (max_regno, FALSE, FALSE);
2508 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2509 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2510 SET_BIT (update_life_blocks, block_num);
2512 count_or_remove_death_notes (update_life_blocks, 1);
2513 /* ??? See about adding a mode that verifies that the initial
2514 set of blocks don't let registers come live. */
2515 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2516 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2517 | PROP_KILL_DEAD_CODE);
2519 sbitmap_free (update_life_blocks);
2522 /* Write the final stats. */
2523 if (rtl_dump_file && num_possible_if_blocks > 0)
2525 fprintf (rtl_dump_file,
2526 "\n%d possible IF blocks searched.\n",
2527 num_possible_if_blocks);
2528 fprintf (rtl_dump_file,
2529 "%d IF blocks converted.\n",
2530 num_updated_if_blocks);
2531 fprintf (rtl_dump_file,
2532 "%d basic blocks deleted.\n\n\n",
2533 num_removed_blocks);
2536 #ifdef ENABLE_CHECKING
2537 verify_flow_info ();