1 /* If-conversion support.
2 Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
28 #include "insn-config.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
40 #ifndef HAVE_conditional_execution
41 #define HAVE_conditional_execution 0
43 #ifndef HAVE_conditional_move
44 #define HAVE_conditional_move 0
55 #ifndef HAVE_conditional_trap
56 #define HAVE_conditional_trap 0
59 #ifndef MAX_CONDITIONAL_EXECUTE
60 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
63 #define NULL_EDGE ((struct edge_def *)NULL)
64 #define NULL_BLOCK ((struct basic_block_def *)NULL)
66 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
67 static int num_possible_if_blocks;
69 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
71 static int num_updated_if_blocks;
73 /* # of basic blocks that were removed. */
74 static int num_removed_blocks;
76 /* True if life data ok at present. */
77 static bool life_data_ok;
79 /* The post-dominator relation on the original block numbers. */
80 static sbitmap *post_dominators;
82 /* Forward references. */
83 static int count_bb_insns PARAMS ((basic_block));
84 static rtx first_active_insn PARAMS ((basic_block));
85 static int last_active_insn_p PARAMS ((basic_block, rtx));
86 static int seq_contains_jump PARAMS ((rtx));
88 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
89 static rtx cond_exec_get_condition PARAMS ((rtx));
90 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
91 basic_block, basic_block));
93 static rtx noce_get_condition PARAMS ((rtx, rtx *));
94 static int noce_operand_ok PARAMS ((rtx));
95 static int noce_process_if_block PARAMS ((basic_block, basic_block,
96 basic_block, basic_block));
98 static int process_if_block PARAMS ((basic_block, basic_block,
99 basic_block, basic_block));
100 static void merge_if_block PARAMS ((basic_block, basic_block,
101 basic_block, basic_block));
103 static int find_if_header PARAMS ((basic_block));
104 static int find_if_block PARAMS ((basic_block, edge, edge));
105 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
106 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
107 static int find_cond_trap PARAMS ((basic_block, edge, edge));
108 static rtx block_has_only_trap PARAMS ((basic_block));
109 static int find_memory PARAMS ((rtx *, void *));
110 static int dead_or_predicable PARAMS ((basic_block, basic_block,
111 basic_block, basic_block, int));
112 static void noce_emit_move_insn PARAMS ((rtx, rtx));
114 /* Count the number of non-jump active insns in BB. */
125 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
130 insn = NEXT_INSN (insn);
136 /* Return the first non-jump active insn in the basic block. */
139 first_active_insn (bb)
144 if (GET_CODE (insn) == CODE_LABEL)
148 insn = NEXT_INSN (insn);
151 while (GET_CODE (insn) == NOTE)
155 insn = NEXT_INSN (insn);
158 if (GET_CODE (insn) == JUMP_INSN)
164 /* Return true if INSN is the last active non-jump insn in BB. */
167 last_active_insn_p (bb, insn)
175 insn = NEXT_INSN (insn);
177 while (GET_CODE (insn) == NOTE);
179 return GET_CODE (insn) == JUMP_INSN;
182 /* It is possible, especially when having dealt with multi-word
183 arithmetic, for the expanders to have emitted jumps. Search
184 through the sequence and return TRUE if a jump exists so that
185 we can abort the conversion. */
188 seq_contains_jump (insn)
193 if (GET_CODE (insn) == JUMP_INSN)
195 insn = NEXT_INSN (insn);
200 /* Go through a bunch of insns, converting them to conditional
201 execution format if possible. Return TRUE if all of the non-note
202 insns were processed. */
205 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
206 rtx start; /* first insn to look at */
207 rtx end; /* last insn to look at */
208 rtx test; /* conditional execution test */
209 rtx prob_val; /* probability of branch taken. */
210 int mod_ok; /* true if modifications ok last insn. */
212 int must_be_last = FALSE;
216 for (insn = start; ; insn = NEXT_INSN (insn))
218 if (GET_CODE (insn) == NOTE)
221 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
224 /* Remove USE insns that get in the way. */
225 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
227 /* ??? Ug. Actually unlinking the thing is problematic,
228 given what we'd have to coordinate with our callers. */
229 PUT_CODE (insn, NOTE);
230 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
231 NOTE_SOURCE_FILE (insn) = 0;
235 /* Last insn wasn't last? */
239 if (modified_in_p (test, insn))
246 /* Now build the conditional form of the instruction. */
247 pattern = PATTERN (insn);
249 /* If the machine needs to modify the insn being conditionally executed,
250 say for example to force a constant integer operand into a temp
251 register, do so here. */
252 #ifdef IFCVT_MODIFY_INSN
253 IFCVT_MODIFY_INSN (pattern, insn);
258 validate_change (insn, &PATTERN (insn),
259 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
262 if (GET_CODE (insn) == CALL_INSN && prob_val)
263 validate_change (insn, ®_NOTES (insn),
264 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
265 REG_NOTES (insn)), 1);
275 /* Return the condition for a jump. Do not do any special processing. */
278 cond_exec_get_condition (jump)
283 if (any_condjump_p (jump))
284 test_if = SET_SRC (pc_set (jump));
287 cond = XEXP (test_if, 0);
289 /* If this branches to JUMP_LABEL when the condition is false,
290 reverse the condition. */
291 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
292 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
294 enum rtx_code rev = reversed_comparison_code (cond, jump);
298 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
305 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
306 to conditional execution. Return TRUE if we were successful at
307 converting the the block. */
310 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
311 basic_block test_bb; /* Basic block test is in */
312 basic_block then_bb; /* Basic block for THEN block */
313 basic_block else_bb; /* Basic block for ELSE block */
314 basic_block join_bb; /* Basic block the join label is in */
316 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
317 rtx then_start; /* first insn in THEN block */
318 rtx then_end; /* last insn + 1 in THEN block */
319 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
320 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
321 int max; /* max # of insns to convert. */
322 int then_mod_ok; /* whether conditional mods are ok in THEN */
323 rtx true_expr; /* test for else block insns */
324 rtx false_expr; /* test for then block insns */
325 rtx true_prob_val; /* probability of else block */
326 rtx false_prob_val; /* probability of then block */
328 enum rtx_code false_code;
330 /* Find the conditional jump to the ELSE or JOIN part, and isolate
332 test_expr = cond_exec_get_condition (test_bb->end);
336 /* If the conditional jump is more than just a conditional jump,
337 then we can not do conditional execution conversion on this block. */
338 if (!onlyjump_p (test_bb->end))
341 /* Collect the bounds of where we're to search. */
343 then_start = then_bb->head;
344 then_end = then_bb->end;
346 /* Skip a label heading THEN block. */
347 if (GET_CODE (then_start) == CODE_LABEL)
348 then_start = NEXT_INSN (then_start);
350 /* Skip a (use (const_int 0)) or branch as the final insn. */
351 if (GET_CODE (then_end) == INSN
352 && GET_CODE (PATTERN (then_end)) == USE
353 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
354 then_end = PREV_INSN (then_end);
355 else if (GET_CODE (then_end) == JUMP_INSN)
356 then_end = PREV_INSN (then_end);
360 /* Skip the ELSE block's label. */
361 else_start = NEXT_INSN (else_bb->head);
362 else_end = else_bb->end;
364 /* Skip a (use (const_int 0)) or branch as the final insn. */
365 if (GET_CODE (else_end) == INSN
366 && GET_CODE (PATTERN (else_end)) == USE
367 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
368 else_end = PREV_INSN (else_end);
369 else if (GET_CODE (else_end) == JUMP_INSN)
370 else_end = PREV_INSN (else_end);
373 /* How many instructions should we convert in total? */
377 max = 2 * MAX_CONDITIONAL_EXECUTE;
378 n_insns = count_bb_insns (else_bb);
381 max = MAX_CONDITIONAL_EXECUTE;
382 n_insns += count_bb_insns (then_bb);
386 /* Map test_expr/test_jump into the appropriate MD tests to use on
387 the conditionally executed code. */
389 true_expr = test_expr;
391 false_code = reversed_comparison_code (true_expr, test_bb->end);
392 if (false_code != UNKNOWN)
393 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
394 XEXP (true_expr, 0), XEXP (true_expr, 1));
396 false_expr = NULL_RTX;
398 #ifdef IFCVT_MODIFY_TESTS
399 /* If the machine description needs to modify the tests, such as setting a
400 conditional execution register from a comparison, it can do so here. */
401 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
404 /* See if the conversion failed */
405 if (!true_expr || !false_expr)
409 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
412 true_prob_val = XEXP (true_prob_val, 0);
413 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
416 false_prob_val = NULL_RTX;
418 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
419 on then THEN block. */
420 then_mod_ok = (else_bb == NULL_BLOCK);
422 /* Go through the THEN and ELSE blocks converting the insns if possible
423 to conditional execution. */
427 || ! cond_exec_process_insns (then_start, then_end, false_expr,
428 false_prob_val, then_mod_ok)))
432 && ! cond_exec_process_insns (else_start, else_end,
433 true_expr, true_prob_val, TRUE))
436 if (! apply_change_group ())
439 #ifdef IFCVT_MODIFY_FINAL
440 /* Do any machine dependent final modifications */
441 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
444 /* Conversion succeeded. */
446 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
447 n_insns, (n_insns == 1) ? " was" : "s were");
449 /* Merge the blocks! */
450 merge_if_block (test_bb, then_bb, else_bb, join_bb);
454 #ifdef IFCVT_MODIFY_CANCEL
455 /* Cancel any machine dependent changes. */
456 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
463 /* Used by noce_process_if_block to communicate with its subroutines.
465 The subroutines know that A and B may be evaluated freely. They
466 know that X is a register. They should insert new instructions
467 before cond_earliest. */
474 rtx jump, cond, cond_earliest;
477 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
479 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
480 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
481 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
482 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
483 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
484 rtx, enum rtx_code, rtx,
486 static int noce_try_cmove PARAMS ((struct noce_if_info *));
487 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
488 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
490 static int noce_try_minmax PARAMS ((struct noce_if_info *));
491 static int noce_try_abs PARAMS ((struct noce_if_info *));
493 /* Helper function for noce_try_store_flag*. */
496 noce_emit_store_flag (if_info, x, reversep, normalize)
497 struct noce_if_info *if_info;
499 int reversep, normalize;
501 rtx cond = if_info->cond;
505 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
506 || ! general_operand (XEXP (cond, 1), VOIDmode));
508 /* If earliest == jump, or when the condition is complex, try to
509 build the store_flag insn directly. */
512 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
515 code = reversed_comparison_code (cond, if_info->jump);
517 code = GET_CODE (cond);
519 if ((if_info->cond_earliest == if_info->jump || cond_complex)
520 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
524 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
526 tmp = gen_rtx_SET (VOIDmode, x, tmp);
529 tmp = emit_insn (tmp);
531 if (recog_memoized (tmp) >= 0)
537 if_info->cond_earliest = if_info->jump;
545 /* Don't even try if the comparison operands are weird. */
549 return emit_store_flag (x, code, XEXP (cond, 0),
550 XEXP (cond, 1), VOIDmode,
551 (code == LTU || code == LEU
552 || code == GEU || code == GTU), normalize);
555 /* Emit instruction to move an rtx into STRICT_LOW_PART. */
557 noce_emit_move_insn (x, y)
560 enum machine_mode outmode, inmode;
564 if (GET_CODE (x) != STRICT_LOW_PART)
566 emit_move_insn (x, y);
571 inner = XEXP (outer, 0);
572 outmode = GET_MODE (outer);
573 inmode = GET_MODE (inner);
574 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
575 store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
576 GET_MODE_BITSIZE (inmode));
579 /* Convert "if (test) x = 1; else x = 0".
581 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
582 tried in noce_try_store_flag_constants after noce_try_cmove has had
583 a go at the conversion. */
586 noce_try_store_flag (if_info)
587 struct noce_if_info *if_info;
592 if (GET_CODE (if_info->b) == CONST_INT
593 && INTVAL (if_info->b) == STORE_FLAG_VALUE
594 && if_info->a == const0_rtx)
596 else if (if_info->b == const0_rtx
597 && GET_CODE (if_info->a) == CONST_INT
598 && INTVAL (if_info->a) == STORE_FLAG_VALUE
599 && (reversed_comparison_code (if_info->cond, if_info->jump)
607 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
610 if (target != if_info->x)
611 noce_emit_move_insn (if_info->x, target);
615 emit_insns_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
626 /* Convert "if (test) x = a; else x = b", for A and B constant. */
629 noce_try_store_flag_constants (if_info)
630 struct noce_if_info *if_info;
634 HOST_WIDE_INT itrue, ifalse, diff, tmp;
635 int normalize, can_reverse;
636 enum machine_mode mode;
639 && GET_CODE (if_info->a) == CONST_INT
640 && GET_CODE (if_info->b) == CONST_INT)
642 mode = GET_MODE (if_info->x);
643 ifalse = INTVAL (if_info->a);
644 itrue = INTVAL (if_info->b);
646 /* Make sure we can represent the difference between the two values. */
647 if ((itrue - ifalse > 0)
648 != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
651 diff = trunc_int_for_mode (itrue - ifalse, mode);
653 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
657 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
659 else if (ifalse == 0 && exact_log2 (itrue) >= 0
660 && (STORE_FLAG_VALUE == 1
661 || BRANCH_COST >= 2))
663 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
664 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
665 normalize = 1, reversep = 1;
667 && (STORE_FLAG_VALUE == -1
668 || BRANCH_COST >= 2))
670 else if (ifalse == -1 && can_reverse
671 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
672 normalize = -1, reversep = 1;
673 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
681 tmp = itrue; itrue = ifalse; ifalse = tmp;
682 diff = trunc_int_for_mode (-diff, mode);
686 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
693 /* if (test) x = 3; else x = 4;
694 => x = 3 + (test == 0); */
695 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
697 target = expand_simple_binop (mode,
698 (diff == STORE_FLAG_VALUE
700 GEN_INT (ifalse), target, if_info->x, 0,
704 /* if (test) x = 8; else x = 0;
705 => x = (test != 0) << 3; */
706 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
708 target = expand_simple_binop (mode, ASHIFT,
709 target, GEN_INT (tmp), if_info->x, 0,
713 /* if (test) x = -1; else x = b;
714 => x = -(test != 0) | b; */
715 else if (itrue == -1)
717 target = expand_simple_binop (mode, IOR,
718 target, GEN_INT (ifalse), if_info->x, 0,
722 /* if (test) x = a; else x = b;
723 => x = (-(test != 0) & (b - a)) + a; */
726 target = expand_simple_binop (mode, AND,
727 target, GEN_INT (diff), if_info->x, 0,
730 target = expand_simple_binop (mode, PLUS,
731 target, GEN_INT (ifalse),
732 if_info->x, 0, OPTAB_WIDEN);
741 if (target != if_info->x)
742 noce_emit_move_insn (if_info->x, target);
747 if (seq_contains_jump (seq))
750 emit_insns_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
758 /* Convert "if (test) foo++" into "foo += (test != 0)", and
759 similarly for "foo--". */
762 noce_try_store_flag_inc (if_info)
763 struct noce_if_info *if_info;
766 int subtract, normalize;
772 /* Should be no `else' case to worry about. */
773 && if_info->b == if_info->x
774 && GET_CODE (if_info->a) == PLUS
775 && (XEXP (if_info->a, 1) == const1_rtx
776 || XEXP (if_info->a, 1) == constm1_rtx)
777 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
778 && (reversed_comparison_code (if_info->cond, if_info->jump)
781 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
782 subtract = 0, normalize = 0;
783 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
784 subtract = 1, normalize = 0;
786 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
790 target = noce_emit_store_flag (if_info,
791 gen_reg_rtx (GET_MODE (if_info->x)),
795 target = expand_simple_binop (GET_MODE (if_info->x),
796 subtract ? MINUS : PLUS,
797 if_info->x, target, if_info->x,
801 if (target != if_info->x)
802 noce_emit_move_insn (if_info->x, target);
807 if (seq_contains_jump (seq))
810 emit_insns_before_scope (seq, if_info->jump,
811 INSN_SCOPE (if_info->insn_a));
822 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
825 noce_try_store_flag_mask (if_info)
826 struct noce_if_info *if_info;
834 || STORE_FLAG_VALUE == -1)
835 && ((if_info->a == const0_rtx
836 && rtx_equal_p (if_info->b, if_info->x))
837 || ((reversep = (reversed_comparison_code (if_info->cond,
840 && if_info->b == const0_rtx
841 && rtx_equal_p (if_info->a, if_info->x))))
844 target = noce_emit_store_flag (if_info,
845 gen_reg_rtx (GET_MODE (if_info->x)),
848 target = expand_simple_binop (GET_MODE (if_info->x), AND,
849 if_info->x, target, if_info->x, 0,
854 if (target != if_info->x)
855 noce_emit_move_insn (if_info->x, target);
860 if (seq_contains_jump (seq))
863 emit_insns_before_scope (seq, if_info->jump,
864 INSN_SCOPE (if_info->insn_a));
875 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
878 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
879 struct noce_if_info *if_info;
880 rtx x, cmp_a, cmp_b, vfalse, vtrue;
883 /* If earliest == jump, try to build the cmove insn directly.
884 This is helpful when combine has created some complex condition
885 (like for alpha's cmovlbs) that we can't hope to regenerate
886 through the normal interface. */
888 if (if_info->cond_earliest == if_info->jump)
892 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
893 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
894 tmp = gen_rtx_SET (VOIDmode, x, tmp);
897 tmp = emit_insn (tmp);
899 if (recog_memoized (tmp) >= 0)
911 /* Don't even try if the comparison operands are weird. */
912 if (! general_operand (cmp_a, GET_MODE (cmp_a))
913 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
916 #if HAVE_conditional_move
917 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
918 vtrue, vfalse, GET_MODE (x),
919 (code == LTU || code == GEU
920 || code == LEU || code == GTU));
922 /* We'll never get here, as noce_process_if_block doesn't call the
923 functions involved. Ifdef code, however, should be discouraged
924 because it leads to typos in the code not selected. However,
925 emit_conditional_move won't exist either. */
930 /* Try only simple constants and registers here. More complex cases
931 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
932 has had a go at it. */
935 noce_try_cmove (if_info)
936 struct noce_if_info *if_info;
941 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
942 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
946 code = GET_CODE (if_info->cond);
947 target = noce_emit_cmove (if_info, if_info->x, code,
948 XEXP (if_info->cond, 0),
949 XEXP (if_info->cond, 1),
950 if_info->a, if_info->b);
954 if (target != if_info->x)
955 noce_emit_move_insn (if_info->x, target);
959 emit_insns_before_scope (seq, if_info->jump,
960 INSN_SCOPE (if_info->insn_a));
973 /* Try more complex cases involving conditional_move. */
976 noce_try_cmove_arith (if_info)
977 struct noce_if_info *if_info;
987 /* A conditional move from two memory sources is equivalent to a
988 conditional on their addresses followed by a load. Don't do this
989 early because it'll screw alias analysis. Note that we've
990 already checked for no side effects. */
991 if (! no_new_pseudos && cse_not_expected
992 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
997 x = gen_reg_rtx (Pmode);
1001 /* ??? We could handle this if we knew that a load from A or B could
1002 not fault. This is also true if we've already loaded
1003 from the address along the path from ENTRY. */
1004 else if (may_trap_p (a) || may_trap_p (b))
1007 /* if (test) x = a + b; else x = c - d;
1014 code = GET_CODE (if_info->cond);
1015 insn_a = if_info->insn_a;
1016 insn_b = if_info->insn_b;
1018 /* Possibly rearrange operands to make things come out more natural. */
1019 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1022 if (rtx_equal_p (b, x))
1024 else if (general_operand (b, GET_MODE (b)))
1029 code = reversed_comparison_code (if_info->cond, if_info->jump);
1030 tmp = a, a = b, b = tmp;
1031 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1037 /* If either operand is complex, load it into a register first.
1038 The best way to do this is to copy the original insn. In this
1039 way we preserve any clobbers etc that the insn may have had.
1040 This is of course not possible in the IS_MEM case. */
1041 if (! general_operand (a, GET_MODE (a)))
1046 goto end_seq_and_fail;
1050 tmp = gen_reg_rtx (GET_MODE (a));
1051 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1054 goto end_seq_and_fail;
1057 a = gen_reg_rtx (GET_MODE (a));
1058 tmp = copy_rtx (insn_a);
1059 set = single_set (tmp);
1061 tmp = emit_insn (PATTERN (tmp));
1063 if (recog_memoized (tmp) < 0)
1064 goto end_seq_and_fail;
1066 if (! general_operand (b, GET_MODE (b)))
1071 goto end_seq_and_fail;
1075 tmp = gen_reg_rtx (GET_MODE (b));
1076 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1079 goto end_seq_and_fail;
1082 b = gen_reg_rtx (GET_MODE (b));
1083 tmp = copy_rtx (insn_b);
1084 set = single_set (tmp);
1086 tmp = emit_insn (PATTERN (tmp));
1088 if (recog_memoized (tmp) < 0)
1089 goto end_seq_and_fail;
1092 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1093 XEXP (if_info->cond, 1), a, b);
1096 goto end_seq_and_fail;
1098 /* If we're handling a memory for above, emit the load now. */
1101 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1103 /* Copy over flags as appropriate. */
1104 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1105 MEM_VOLATILE_P (tmp) = 1;
1106 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1107 MEM_IN_STRUCT_P (tmp) = 1;
1108 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1109 MEM_SCALAR_P (tmp) = 1;
1110 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1111 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1113 MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1115 noce_emit_move_insn (if_info->x, tmp);
1117 else if (target != x)
1118 noce_emit_move_insn (x, target);
1122 emit_insns_before_scope (tmp, if_info->jump, INSN_SCOPE (if_info->insn_a));
1130 /* For most cases, the simplified condition we found is the best
1131 choice, but this is not the case for the min/max/abs transforms.
1132 For these we wish to know that it is A or B in the condition. */
1135 noce_get_alt_condition (if_info, target, earliest)
1136 struct noce_if_info *if_info;
1140 rtx cond, set, insn;
1143 /* If target is already mentioned in the known condition, return it. */
1144 if (reg_mentioned_p (target, if_info->cond))
1146 *earliest = if_info->cond_earliest;
1147 return if_info->cond;
1150 set = pc_set (if_info->jump);
1151 cond = XEXP (SET_SRC (set), 0);
1153 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1154 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1156 /* If we're looking for a constant, try to make the conditional
1157 have that constant in it. There are two reasons why it may
1158 not have the constant we want:
1160 1. GCC may have needed to put the constant in a register, because
1161 the target can't compare directly against that constant. For
1162 this case, we look for a SET immediately before the comparison
1163 that puts a constant in that register.
1165 2. GCC may have canonicalized the conditional, for example
1166 replacing "if x < 4" with "if x <= 3". We can undo that (or
1167 make equivalent types of changes) to get the constants we need
1168 if they're off by one in the right direction. */
1170 if (GET_CODE (target) == CONST_INT)
1172 enum rtx_code code = GET_CODE (if_info->cond);
1173 rtx op_a = XEXP (if_info->cond, 0);
1174 rtx op_b = XEXP (if_info->cond, 1);
1177 /* First, look to see if we put a constant in a register. */
1178 prev_insn = PREV_INSN (if_info->cond_earliest);
1180 && INSN_P (prev_insn)
1181 && GET_CODE (PATTERN (prev_insn)) == SET)
1183 rtx src = find_reg_equal_equiv_note (prev_insn);
1185 src = SET_SRC (PATTERN (prev_insn));
1186 if (GET_CODE (src) == CONST_INT)
1188 if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1190 else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1193 if (GET_CODE (op_a) == CONST_INT)
1198 code = swap_condition (code);
1203 /* Now, look to see if we can get the right constant by
1204 adjusting the conditional. */
1205 if (GET_CODE (op_b) == CONST_INT)
1207 HOST_WIDE_INT desired_val = INTVAL (target);
1208 HOST_WIDE_INT actual_val = INTVAL (op_b);
1213 if (actual_val == desired_val + 1)
1216 op_b = GEN_INT (desired_val);
1220 if (actual_val == desired_val - 1)
1223 op_b = GEN_INT (desired_val);
1227 if (actual_val == desired_val - 1)
1230 op_b = GEN_INT (desired_val);
1234 if (actual_val == desired_val + 1)
1237 op_b = GEN_INT (desired_val);
1245 /* If we made any changes, generate a new conditional that is
1246 equivalent to what we started with, but has the right
1248 if (code != GET_CODE (if_info->cond)
1249 || op_a != XEXP (if_info->cond, 0)
1250 || op_b != XEXP (if_info->cond, 1))
1252 cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1253 *earliest = if_info->cond_earliest;
1258 cond = canonicalize_condition (if_info->jump, cond, reverse,
1260 if (! cond || ! reg_mentioned_p (target, cond))
1263 /* We almost certainly searched back to a different place.
1264 Need to re-verify correct lifetimes. */
1266 /* X may not be mentioned in the range (cond_earliest, jump]. */
1267 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1268 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1271 /* A and B may not be modified in the range [cond_earliest, jump). */
1272 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1274 && (modified_in_p (if_info->a, insn)
1275 || modified_in_p (if_info->b, insn)))
1281 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1284 noce_try_minmax (if_info)
1285 struct noce_if_info *if_info;
1287 rtx cond, earliest, target, seq;
1288 enum rtx_code code, op;
1291 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1295 /* ??? Reject modes with NaNs or signed zeros since we don't know how
1296 they will be resolved with an SMIN/SMAX. It wouldn't be too hard
1297 to get the target to tell us... */
1298 if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1299 || HONOR_NANS (GET_MODE (if_info->x)))
1302 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1306 /* Verify the condition is of the form we expect, and canonicalize
1307 the comparison code. */
1308 code = GET_CODE (cond);
1309 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1311 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1314 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1316 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1318 code = swap_condition (code);
1323 /* Determine what sort of operation this is. Note that the code is for
1324 a taken branch, so the code->operation mapping appears backwards. */
1357 target = expand_simple_binop (GET_MODE (if_info->x), op,
1358 if_info->a, if_info->b,
1359 if_info->x, unsignedp, OPTAB_WIDEN);
1365 if (target != if_info->x)
1366 noce_emit_move_insn (if_info->x, target);
1371 if (seq_contains_jump (seq))
1374 emit_insns_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1375 if_info->cond = cond;
1376 if_info->cond_earliest = earliest;
1381 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1384 noce_try_abs (if_info)
1385 struct noce_if_info *if_info;
1387 rtx cond, earliest, target, seq, a, b, c;
1390 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1394 /* Recognize A and B as constituting an ABS or NABS. */
1397 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1399 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1401 c = a; a = b; b = c;
1407 cond = noce_get_alt_condition (if_info, b, &earliest);
1411 /* Verify the condition is of the form we expect. */
1412 if (rtx_equal_p (XEXP (cond, 0), b))
1414 else if (rtx_equal_p (XEXP (cond, 1), b))
1419 /* Verify that C is zero. Search backward through the block for
1420 a REG_EQUAL note if necessary. */
1423 rtx insn, note = NULL;
1424 for (insn = earliest;
1425 insn != if_info->test_bb->head;
1426 insn = PREV_INSN (insn))
1428 && ((note = find_reg_note (insn, REG_EQUAL, c))
1429 || (note = find_reg_note (insn, REG_EQUIV, c))))
1435 if (GET_CODE (c) == MEM
1436 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1437 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1438 c = get_pool_constant (XEXP (c, 0));
1440 /* Work around funny ideas get_condition has wrt canonicalization.
1441 Note that these rtx constants are known to be CONST_INT, and
1442 therefore imply integer comparisons. */
1443 if (c == constm1_rtx && GET_CODE (cond) == GT)
1445 else if (c == const1_rtx && GET_CODE (cond) == LT)
1447 else if (c != CONST0_RTX (GET_MODE (b)))
1450 /* Determine what sort of operation this is. */
1451 switch (GET_CODE (cond))
1470 target = expand_simple_unop (GET_MODE (if_info->x), ABS, b, if_info->x, 0);
1472 /* ??? It's a quandry whether cmove would be better here, especially
1473 for integers. Perhaps combine will clean things up. */
1474 if (target && negate)
1475 target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1483 if (target != if_info->x)
1484 noce_emit_move_insn (if_info->x, target);
1489 if (seq_contains_jump (seq))
1492 emit_insns_before_scope (seq, if_info->jump, INSN_SCOPE (if_info->insn_a));
1493 if_info->cond = cond;
1494 if_info->cond_earliest = earliest;
1499 /* Look for the condition for the jump first. We'd prefer to avoid
1500 get_condition if we can -- it tries to look back for the contents
1501 of an original compare. On targets that use normal integers for
1502 comparisons, e.g. alpha, this is wasteful. */
1505 noce_get_condition (jump, earliest)
1512 /* If the condition variable is a register and is MODE_INT, accept it.
1513 Otherwise, fall back on get_condition. */
1515 if (! any_condjump_p (jump))
1518 set = pc_set (jump);
1520 cond = XEXP (SET_SRC (set), 0);
1521 if (GET_CODE (XEXP (cond, 0)) == REG
1522 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1526 /* If this branches to JUMP_LABEL when the condition is false,
1527 reverse the condition. */
1528 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1529 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1530 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1531 GET_MODE (cond), XEXP (cond, 0),
1535 cond = get_condition (jump, earliest);
1540 /* Return true if OP is ok for if-then-else processing. */
1543 noce_operand_ok (op)
1546 /* We special-case memories, so handle any of them with
1547 no address side effects. */
1548 if (GET_CODE (op) == MEM)
1549 return ! side_effects_p (XEXP (op, 0));
1551 if (side_effects_p (op))
1554 return ! may_trap_p (op);
1557 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1558 without using conditional execution. Return TRUE if we were
1559 successful at converting the the block. */
1562 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1563 basic_block test_bb; /* Basic block test is in */
1564 basic_block then_bb; /* Basic block for THEN block */
1565 basic_block else_bb; /* Basic block for ELSE block */
1566 basic_block join_bb; /* Basic block the join label is in */
1568 /* We're looking for patterns of the form
1570 (1) if (...) x = a; else x = b;
1571 (2) x = b; if (...) x = a;
1572 (3) if (...) x = a; // as if with an initial x = x.
1574 The later patterns require jumps to be more expensive.
1576 ??? For future expansion, look for multiple X in such patterns. */
1578 struct noce_if_info if_info;
1581 rtx orig_x, x, a, b;
1582 rtx jump, cond, insn;
1584 /* If this is not a standard conditional jump, we can't parse it. */
1585 jump = test_bb->end;
1586 cond = noce_get_condition (jump, &if_info.cond_earliest);
1590 /* If the conditional jump is more than just a conditional jump,
1591 then we can not do if-conversion on this block. */
1592 if (! onlyjump_p (jump))
1595 /* We must be comparing objects whose modes imply the size. */
1596 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1599 /* Look for one of the potential sets. */
1600 insn_a = first_active_insn (then_bb);
1602 || ! last_active_insn_p (then_bb, insn_a)
1603 || (set_a = single_set (insn_a)) == NULL_RTX)
1606 x = SET_DEST (set_a);
1607 a = SET_SRC (set_a);
1609 /* Look for the other potential set. Make sure we've got equivalent
1611 /* ??? This is overconservative. Storing to two different mems is
1612 as easy as conditionally computing the address. Storing to a
1613 single mem merely requires a scratch memory to use as one of the
1614 destination addresses; often the memory immediately below the
1615 stack pointer is available for this. */
1619 insn_b = first_active_insn (else_bb);
1621 || ! last_active_insn_p (else_bb, insn_b)
1622 || (set_b = single_set (insn_b)) == NULL_RTX
1623 || ! rtx_equal_p (x, SET_DEST (set_b)))
1628 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1630 || GET_CODE (insn_b) != INSN
1631 || (set_b = single_set (insn_b)) == NULL_RTX
1632 || ! rtx_equal_p (x, SET_DEST (set_b))
1633 || reg_mentioned_p (x, cond)
1634 || reg_mentioned_p (x, a)
1635 || reg_mentioned_p (x, SET_SRC (set_b)))
1636 insn_b = set_b = NULL_RTX;
1638 b = (set_b ? SET_SRC (set_b) : x);
1640 /* X may not be mentioned in the range (cond_earliest, jump]. */
1641 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1642 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1645 /* A and B may not be modified in the range [cond_earliest, jump). */
1646 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1648 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1651 /* Only operate on register destinations, and even then avoid extending
1652 the lifetime of hard registers on small register class machines. */
1654 if (GET_CODE (x) != REG
1655 || (SMALL_REGISTER_CLASSES
1656 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1660 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1661 ? XEXP (x, 0) : x));
1664 /* Don't operate on sources that may trap or are volatile. */
1665 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1668 /* Set up the info block for our subroutines. */
1669 if_info.test_bb = test_bb;
1670 if_info.cond = cond;
1671 if_info.jump = jump;
1672 if_info.insn_a = insn_a;
1673 if_info.insn_b = insn_b;
1678 /* Try optimizations in some approximation of a useful order. */
1679 /* ??? Should first look to see if X is live incoming at all. If it
1680 isn't, we don't need anything but an unconditional set. */
1682 /* Look and see if A and B are really the same. Avoid creating silly
1683 cmove constructs that no one will fix up later. */
1684 if (rtx_equal_p (a, b))
1686 /* If we have an INSN_B, we don't have to create any new rtl. Just
1687 move the instruction that we already have. If we don't have an
1688 INSN_B, that means that A == X, and we've got a noop move. In
1689 that case don't do anything and let the code below delete INSN_A. */
1690 if (insn_b && else_bb)
1694 if (else_bb && insn_b == else_bb->end)
1695 else_bb->end = PREV_INSN (insn_b);
1696 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1698 /* If there was a REG_EQUAL note, delete it since it may have been
1699 true due to this insn being after a jump. */
1700 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1701 remove_note (insn_b, note);
1705 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1706 x must be executed twice. */
1707 else if (insn_b && side_effects_p (orig_x))
1714 if (noce_try_store_flag (&if_info))
1716 if (noce_try_minmax (&if_info))
1718 if (noce_try_abs (&if_info))
1720 if (HAVE_conditional_move
1721 && noce_try_cmove (&if_info))
1723 if (! HAVE_conditional_execution)
1725 if (noce_try_store_flag_constants (&if_info))
1727 if (noce_try_store_flag_inc (&if_info))
1729 if (noce_try_store_flag_mask (&if_info))
1731 if (HAVE_conditional_move
1732 && noce_try_cmove_arith (&if_info))
1739 /* The original sets may now be killed. */
1740 delete_insn (insn_a);
1742 /* Several special cases here: First, we may have reused insn_b above,
1743 in which case insn_b is now NULL. Second, we want to delete insn_b
1744 if it came from the ELSE block, because follows the now correct
1745 write that appears in the TEST block. However, if we got insn_b from
1746 the TEST block, it may in fact be loading data needed for the comparison.
1747 We'll let life_analysis remove the insn if it's really dead. */
1748 if (insn_b && else_bb)
1749 delete_insn (insn_b);
1751 /* The new insns will have been inserted just before the jump. We should
1752 be able to remove the jump with impunity, but the condition itself may
1753 have been modified by gcse to be shared across basic blocks. */
1756 /* If we used a temporary, fix it up now. */
1760 noce_emit_move_insn (copy_rtx (orig_x), x);
1761 insn_b = gen_sequence ();
1764 emit_insn_after_scope (insn_b, test_bb->end, INSN_SCOPE (insn_a));
1767 /* Merge the blocks! */
1768 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1773 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1774 straight line code. Return true if successful. */
1777 process_if_block (test_bb, then_bb, else_bb, join_bb)
1778 basic_block test_bb; /* Basic block test is in */
1779 basic_block then_bb; /* Basic block for THEN block */
1780 basic_block else_bb; /* Basic block for ELSE block */
1781 basic_block join_bb; /* Basic block the join label is in */
1783 if (! reload_completed
1784 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1787 if (HAVE_conditional_execution
1789 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1795 /* Merge the blocks and mark for local life update. */
1798 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1799 basic_block test_bb; /* Basic block test is in */
1800 basic_block then_bb; /* Basic block for THEN block */
1801 basic_block else_bb; /* Basic block for ELSE block */
1802 basic_block join_bb; /* Basic block the join label is in */
1804 basic_block combo_bb;
1806 /* All block merging is done into the lower block numbers. */
1810 /* First merge TEST block into THEN block. This is a no-brainer since
1811 the THEN block did not have a code label to begin with. */
1814 if (combo_bb->global_live_at_end)
1815 COPY_REG_SET (combo_bb->global_live_at_end,
1816 then_bb->global_live_at_end);
1817 merge_blocks_nomove (combo_bb, then_bb);
1818 num_removed_blocks++;
1821 /* The ELSE block, if it existed, had a label. That label count
1822 will almost always be zero, but odd things can happen when labels
1823 get their addresses taken. */
1826 merge_blocks_nomove (combo_bb, else_bb);
1827 num_removed_blocks++;
1830 /* If there was no join block reported, that means it was not adjacent
1831 to the others, and so we cannot merge them. */
1835 rtx last = combo_bb->end;
1837 /* The outgoing edge for the current COMBO block should already
1838 be correct. Verify this. */
1839 if (combo_bb->succ == NULL_EDGE)
1841 if (find_reg_note (last, REG_NORETURN, NULL))
1843 else if (GET_CODE (last) == INSN
1844 && GET_CODE (PATTERN (last)) == TRAP_IF
1845 && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
1851 /* There should still be something at the end of the THEN or ELSE
1852 blocks taking us to our final destination. */
1853 else if (GET_CODE (last) == JUMP_INSN)
1855 else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
1856 && GET_CODE (last) == CALL_INSN
1857 && SIBLING_CALL_P (last))
1859 else if ((combo_bb->succ->flags & EDGE_EH)
1860 && can_throw_internal (last))
1866 /* The JOIN block may have had quite a number of other predecessors too.
1867 Since we've already merged the TEST, THEN and ELSE blocks, we should
1868 have only one remaining edge from our if-then-else diamond. If there
1869 is more than one remaining edge, it must come from elsewhere. There
1870 may be zero incoming edges if the THEN block didn't actually join
1871 back up (as with a call to abort). */
1872 else if ((join_bb->pred == NULL
1873 || join_bb->pred->pred_next == NULL)
1874 && join_bb != EXIT_BLOCK_PTR)
1876 /* We can merge the JOIN. */
1877 if (combo_bb->global_live_at_end)
1878 COPY_REG_SET (combo_bb->global_live_at_end,
1879 join_bb->global_live_at_end);
1880 merge_blocks_nomove (combo_bb, join_bb);
1881 num_removed_blocks++;
1885 /* We cannot merge the JOIN. */
1887 /* The outgoing edge for the current COMBO block should already
1888 be correct. Verify this. */
1889 if (combo_bb->succ->succ_next != NULL_EDGE
1890 || combo_bb->succ->dest != join_bb)
1893 /* Remove the jump and cruft from the end of the COMBO block. */
1894 if (join_bb != EXIT_BLOCK_PTR)
1895 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1898 num_updated_if_blocks++;
1901 /* Find a block ending in a simple IF condition. Return TRUE if
1902 we were able to transform it in some way. */
1905 find_if_header (test_bb)
1906 basic_block test_bb;
1911 /* The kind of block we're looking for has exactly two successors. */
1912 if ((then_edge = test_bb->succ) == NULL_EDGE
1913 || (else_edge = then_edge->succ_next) == NULL_EDGE
1914 || else_edge->succ_next != NULL_EDGE)
1917 /* Neither edge should be abnormal. */
1918 if ((then_edge->flags & EDGE_COMPLEX)
1919 || (else_edge->flags & EDGE_COMPLEX))
1922 /* The THEN edge is canonically the one that falls through. */
1923 if (then_edge->flags & EDGE_FALLTHRU)
1925 else if (else_edge->flags & EDGE_FALLTHRU)
1928 else_edge = then_edge;
1932 /* Otherwise this must be a multiway branch of some sort. */
1935 if (find_if_block (test_bb, then_edge, else_edge))
1937 if (HAVE_trap && HAVE_conditional_trap
1938 && find_cond_trap (test_bb, then_edge, else_edge))
1941 && (! HAVE_conditional_execution || reload_completed))
1943 if (find_if_case_1 (test_bb, then_edge, else_edge))
1945 if (find_if_case_2 (test_bb, then_edge, else_edge))
1953 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1957 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1958 block. If so, we'll try to convert the insns to not require the branch.
1959 Return TRUE if we were successful at converting the the block. */
1962 find_if_block (test_bb, then_edge, else_edge)
1963 basic_block test_bb;
1964 edge then_edge, else_edge;
1966 basic_block then_bb = then_edge->dest;
1967 basic_block else_bb = else_edge->dest;
1968 basic_block join_bb = NULL_BLOCK;
1969 edge then_succ = then_bb->succ;
1970 edge else_succ = else_bb->succ;
1973 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1974 if (then_bb->pred->pred_next != NULL_EDGE)
1977 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1978 if (then_succ != NULL_EDGE
1979 && (then_succ->succ_next != NULL_EDGE
1980 || (then_succ->flags & EDGE_COMPLEX)))
1983 /* If the THEN block has no successors, conditional execution can still
1984 make a conditional call. Don't do this unless the ELSE block has
1985 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1986 Check for the last insn of the THEN block being an indirect jump, which
1987 is listed as not having any successors, but confuses the rest of the CE
1988 code processing. XXX we should fix this in the future. */
1989 if (then_succ == NULL)
1991 if (else_bb->pred->pred_next == NULL_EDGE)
1993 rtx last_insn = then_bb->end;
1996 && GET_CODE (last_insn) == NOTE
1997 && last_insn != then_bb->head)
1998 last_insn = PREV_INSN (last_insn);
2001 && GET_CODE (last_insn) == JUMP_INSN
2002 && ! simplejump_p (last_insn))
2006 else_bb = NULL_BLOCK;
2012 /* If the THEN block's successor is the other edge out of the TEST block,
2013 then we have an IF-THEN combo without an ELSE. */
2014 else if (then_succ->dest == else_bb)
2017 else_bb = NULL_BLOCK;
2020 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2021 has exactly one predecessor and one successor, and the outgoing edge
2022 is not complex, then we have an IF-THEN-ELSE combo. */
2023 else if (else_succ != NULL_EDGE
2024 && then_succ->dest == else_succ->dest
2025 && else_bb->pred->pred_next == NULL_EDGE
2026 && else_succ->succ_next == NULL_EDGE
2027 && ! (else_succ->flags & EDGE_COMPLEX))
2028 join_bb = else_succ->dest;
2030 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
2034 num_possible_if_blocks++;
2039 fprintf (rtl_dump_file,
2040 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
2041 test_bb->index, then_bb->index, else_bb->index,
2044 fprintf (rtl_dump_file,
2045 "\nIF-THEN block found, start %d, then %d, join %d\n",
2046 test_bb->index, then_bb->index, join_bb->index);
2049 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
2050 get the first condition for free, since we've already asserted that
2051 there's a fallthru edge from IF to THEN. */
2052 /* ??? As an enhancement, move the ELSE block. Have to deal with
2053 BLOCK notes, if by no other means than aborting the merge if they
2054 exist. Sticky enough I don't want to think about it now. */
2056 if (else_bb && (next = next->next_bb) != else_bb)
2058 if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2066 /* Do the real work. */
2067 return process_if_block (test_bb, then_bb, else_bb, join_bb);
2070 /* Convert a branch over a trap, or a branch to a trap,
2071 into a conditional trap. */
2074 find_cond_trap (test_bb, then_edge, else_edge)
2075 basic_block test_bb;
2076 edge then_edge, else_edge;
2078 basic_block then_bb, else_bb, trap_bb, other_bb;
2079 rtx trap, jump, cond, cond_earliest, seq;
2082 then_bb = then_edge->dest;
2083 else_bb = else_edge->dest;
2085 /* Locate the block with the trap instruction. */
2086 /* ??? While we look for no successors, we really ought to allow
2087 EH successors. Need to fix merge_if_block for that to work. */
2088 if ((trap = block_has_only_trap (then_bb)) != NULL)
2089 trap_bb = then_bb, other_bb = else_bb;
2090 else if ((trap = block_has_only_trap (else_bb)) != NULL)
2091 trap_bb = else_bb, other_bb = then_bb;
2097 fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2098 test_bb->index, trap_bb->index);
2101 /* If this is not a standard conditional jump, we can't parse it. */
2102 jump = test_bb->end;
2103 cond = noce_get_condition (jump, &cond_earliest);
2107 /* If the conditional jump is more than just a conditional jump,
2108 then we can not do if-conversion on this block. */
2109 if (! onlyjump_p (jump))
2112 /* We must be comparing objects whose modes imply the size. */
2113 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2116 /* Reverse the comparison code, if necessary. */
2117 code = GET_CODE (cond);
2118 if (then_bb == trap_bb)
2120 code = reversed_comparison_code (cond, jump);
2121 if (code == UNKNOWN)
2125 /* Attempt to generate the conditional trap. */
2126 seq = gen_cond_trap (code, XEXP (cond, 0), XEXP (cond, 1),
2127 TRAP_CODE (PATTERN (trap)));
2131 /* Emit the new insns before cond_earliest. */
2132 emit_insn_before_scope (seq, cond_earliest, INSN_SCOPE (trap));
2134 /* Delete the trap block if possible. */
2135 remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2136 if (trap_bb->pred == NULL)
2138 flow_delete_block (trap_bb);
2139 num_removed_blocks++;
2142 /* If the non-trap block and the test are now adjacent, merge them.
2143 Otherwise we must insert a direct branch. */
2144 if (test_bb->next_bb == other_bb)
2147 merge_if_block (test_bb, NULL, NULL, other_bb);
2153 lab = JUMP_LABEL (jump);
2154 newjump = emit_jump_insn_after (gen_jump (lab), jump);
2155 LABEL_NUSES (lab) += 1;
2156 JUMP_LABEL (newjump) = lab;
2157 emit_barrier_after (newjump);
2165 /* Subroutine of find_cond_trap: if BB contains only a trap insn,
2169 block_has_only_trap (bb)
2174 /* We're not the exit block. */
2175 if (bb == EXIT_BLOCK_PTR)
2178 /* The block must have no successors. */
2182 /* The only instruction in the THEN block must be the trap. */
2183 trap = first_active_insn (bb);
2184 if (! (trap == bb->end
2185 && GET_CODE (PATTERN (trap)) == TRAP_IF
2186 && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
2192 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
2193 transformable, but not necessarily the other. There need be no
2196 Return TRUE if we were successful at converting the the block.
2198 Cases we'd like to look at:
2201 if (test) goto over; // x not live
2209 if (! test) goto label;
2212 if (test) goto E; // x not live
2226 (3) // This one's really only interesting for targets that can do
2227 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2228 // it results in multiple branches on a cache line, which often
2229 // does not sit well with predictors.
2231 if (test1) goto E; // predicted not taken
2247 (A) Don't do (2) if the branch is predicted against the block we're
2248 eliminating. Do it anyway if we can eliminate a branch; this requires
2249 that the sole successor of the eliminated block postdominate the other
2252 (B) With CE, on (3) we can steal from both sides of the if, creating
2261 Again, this is most useful if J postdominates.
2263 (C) CE substitutes for helpful life information.
2265 (D) These heuristics need a lot of work. */
2267 /* Tests for case 1 above. */
2270 find_if_case_1 (test_bb, then_edge, else_edge)
2271 basic_block test_bb;
2272 edge then_edge, else_edge;
2274 basic_block then_bb = then_edge->dest;
2275 basic_block else_bb = else_edge->dest, new_bb;
2276 edge then_succ = then_bb->succ;
2279 /* THEN has one successor. */
2280 if (!then_succ || then_succ->succ_next != NULL)
2283 /* THEN does not fall through, but is not strange either. */
2284 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2287 /* THEN has one predecessor. */
2288 if (then_bb->pred->pred_next != NULL)
2291 /* THEN must do something. */
2292 if (forwarder_block_p (then_bb))
2295 num_possible_if_blocks++;
2297 fprintf (rtl_dump_file,
2298 "\nIF-CASE-1 found, start %d, then %d\n",
2299 test_bb->index, then_bb->index);
2301 /* THEN is small. */
2302 if (count_bb_insns (then_bb) > BRANCH_COST)
2305 /* Registers set are dead, or are predicable. */
2306 if (! dead_or_predicable (test_bb, then_bb, else_bb,
2307 then_bb->succ->dest, 1))
2310 /* Conversion went ok, including moving the insns and fixing up the
2311 jump. Adjust the CFG to match. */
2313 bitmap_operation (test_bb->global_live_at_end,
2314 else_bb->global_live_at_start,
2315 then_bb->global_live_at_end, BITMAP_IOR);
2317 new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), else_bb);
2318 then_bb_index = then_bb->index;
2319 flow_delete_block (then_bb);
2320 /* Make rest of code believe that the newly created block is the THEN_BB
2321 block we removed. */
2324 new_bb->index = then_bb_index;
2325 BASIC_BLOCK (then_bb_index) = new_bb;
2327 /* We've possibly created jump to next insn, cleanup_cfg will solve that
2330 num_removed_blocks++;
2331 num_updated_if_blocks++;
2336 /* Test for case 2 above. */
2339 find_if_case_2 (test_bb, then_edge, else_edge)
2340 basic_block test_bb;
2341 edge then_edge, else_edge;
2343 basic_block then_bb = then_edge->dest;
2344 basic_block else_bb = else_edge->dest;
2345 edge else_succ = else_bb->succ;
2348 /* ELSE has one successor. */
2349 if (!else_succ || else_succ->succ_next != NULL)
2352 /* ELSE outgoing edge is not complex. */
2353 if (else_succ->flags & EDGE_COMPLEX)
2356 /* ELSE has one predecessor. */
2357 if (else_bb->pred->pred_next != NULL)
2360 /* THEN is not EXIT. */
2361 if (then_bb->index < 0)
2364 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2365 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2366 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2368 else if (else_succ->dest->index < 0
2369 || TEST_BIT (post_dominators[then_bb->index],
2370 else_succ->dest->index))
2375 num_possible_if_blocks++;
2377 fprintf (rtl_dump_file,
2378 "\nIF-CASE-2 found, start %d, else %d\n",
2379 test_bb->index, else_bb->index);
2381 /* ELSE is small. */
2382 if (count_bb_insns (then_bb) > BRANCH_COST)
2385 /* Registers set are dead, or are predicable. */
2386 if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
2389 /* Conversion went ok, including moving the insns and fixing up the
2390 jump. Adjust the CFG to match. */
2392 bitmap_operation (test_bb->global_live_at_end,
2393 then_bb->global_live_at_start,
2394 else_bb->global_live_at_end, BITMAP_IOR);
2396 flow_delete_block (else_bb);
2398 num_removed_blocks++;
2399 num_updated_if_blocks++;
2401 /* ??? We may now fallthru from one of THEN's successors into a join
2402 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2407 /* A subroutine of dead_or_predicable called through for_each_rtx.
2408 Return 1 if a memory is found. */
2411 find_memory (px, data)
2413 void *data ATTRIBUTE_UNUSED;
2415 return GET_CODE (*px) == MEM;
2418 /* Used by the code above to perform the actual rtl transformations.
2419 Return TRUE if successful.
2421 TEST_BB is the block containing the conditional branch. MERGE_BB
2422 is the block containing the code to manipulate. NEW_DEST is the
2423 label TEST_BB should be branching to after the conversion.
2424 REVERSEP is true if the sense of the branch should be reversed. */
2427 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2428 basic_block test_bb, merge_bb, other_bb;
2429 basic_block new_dest;
2432 rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
2434 jump = test_bb->end;
2436 /* Find the extent of the real code in the merge block. */
2437 head = merge_bb->head;
2438 end = merge_bb->end;
2440 if (GET_CODE (head) == CODE_LABEL)
2441 head = NEXT_INSN (head);
2442 if (GET_CODE (head) == NOTE)
2446 head = end = NULL_RTX;
2449 head = NEXT_INSN (head);
2452 if (GET_CODE (end) == JUMP_INSN)
2456 head = end = NULL_RTX;
2459 end = PREV_INSN (end);
2462 /* Disable handling dead code by conditional execution if the machine needs
2463 to do anything funny with the tests, etc. */
2464 #ifndef IFCVT_MODIFY_TESTS
2465 if (HAVE_conditional_execution)
2467 /* In the conditional execution case, we have things easy. We know
2468 the condition is reversable. We don't have to check life info,
2469 becase we're going to conditionally execute the code anyway.
2470 All that's left is making sure the insns involved can actually
2475 cond = cond_exec_get_condition (jump);
2479 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2481 prob_val = XEXP (prob_val, 0);
2485 enum rtx_code rev = reversed_comparison_code (cond, jump);
2488 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2491 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2494 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2502 /* In the non-conditional execution case, we have to verify that there
2503 are no trapping operations, no calls, no references to memory, and
2504 that any registers modified are dead at the branch site. */
2506 rtx insn, cond, prev;
2507 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2508 regset merge_set, tmp, test_live, test_set;
2509 struct propagate_block_info *pbi;
2512 /* Check for no calls or trapping operations. */
2513 for (insn = head; ; insn = NEXT_INSN (insn))
2515 if (GET_CODE (insn) == CALL_INSN)
2519 if (may_trap_p (PATTERN (insn)))
2522 /* ??? Even non-trapping memories such as stack frame
2523 references must be avoided. For stores, we collect
2524 no lifetime info; for reads, we'd have to assert
2525 true_dependence false against every store in the
2527 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2534 if (! any_condjump_p (jump))
2537 /* Find the extent of the conditional. */
2538 cond = noce_get_condition (jump, &earliest);
2543 MERGE_SET = set of registers set in MERGE_BB
2544 TEST_LIVE = set of registers live at EARLIEST
2545 TEST_SET = set of registers set between EARLIEST and the
2546 end of the block. */
2548 tmp = INITIALIZE_REG_SET (tmp_head);
2549 merge_set = INITIALIZE_REG_SET (merge_set_head);
2550 test_live = INITIALIZE_REG_SET (test_live_head);
2551 test_set = INITIALIZE_REG_SET (test_set_head);
2553 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2554 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2555 since we've already asserted that MERGE_BB is small. */
2556 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2558 /* For small register class machines, don't lengthen lifetimes of
2559 hard registers before reload. */
2560 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2562 EXECUTE_IF_SET_IN_BITMAP
2565 if (i < FIRST_PSEUDO_REGISTER
2567 && ! global_regs[i])
2572 /* For TEST, we're interested in a range of insns, not a whole block.
2573 Moreover, we're interested in the insns live from OTHER_BB. */
2575 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2576 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2579 for (insn = jump; ; insn = prev)
2581 prev = propagate_one_insn (pbi, insn);
2582 if (insn == earliest)
2586 free_propagate_block_info (pbi);
2588 /* We can perform the transformation if
2589 MERGE_SET & (TEST_SET | TEST_LIVE)
2591 TEST_SET & merge_bb->global_live_at_start
2594 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2595 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2596 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2598 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2600 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2603 FREE_REG_SET (merge_set);
2604 FREE_REG_SET (test_live);
2605 FREE_REG_SET (test_set);
2612 /* We don't want to use normal invert_jump or redirect_jump because
2613 we don't want to delete_insn called. Also, we want to do our own
2614 change group management. */
2616 old_dest = JUMP_LABEL (jump);
2617 if (other_bb != new_dest)
2619 new_label = block_label (new_dest);
2621 ? ! invert_jump_1 (jump, new_label)
2622 : ! redirect_jump_1 (jump, new_label))
2626 if (! apply_change_group ())
2629 if (other_bb != new_dest)
2632 LABEL_NUSES (old_dest) -= 1;
2634 LABEL_NUSES (new_label) += 1;
2635 JUMP_LABEL (jump) = new_label;
2637 invert_br_probabilities (jump);
2639 redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
2642 gcov_type count, probability;
2643 count = BRANCH_EDGE (test_bb)->count;
2644 BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
2645 FALLTHRU_EDGE (test_bb)->count = count;
2646 probability = BRANCH_EDGE (test_bb)->probability;
2647 BRANCH_EDGE (test_bb)->probability
2648 = FALLTHRU_EDGE (test_bb)->probability;
2649 FALLTHRU_EDGE (test_bb)->probability = probability;
2650 update_br_prob_note (test_bb);
2654 /* Move the insns out of MERGE_BB to before the branch. */
2657 if (end == merge_bb->end)
2658 merge_bb->end = PREV_INSN (head);
2660 if (squeeze_notes (&head, &end))
2663 reorder_insns (head, end, PREV_INSN (earliest));
2666 /* Remove the jump and edge if we can. */
2667 if (other_bb == new_dest)
2670 remove_edge (BRANCH_EDGE (test_bb));
2671 /* ??? Can't merge blocks here, as then_bb is still in use.
2672 At minimum, the merge will get done just before bb-reorder. */
2682 /* Main entry point for all if-conversion. */
2685 if_convert (x_life_data_ok)
2690 num_possible_if_blocks = 0;
2691 num_updated_if_blocks = 0;
2692 num_removed_blocks = 0;
2693 life_data_ok = (x_life_data_ok != 0);
2695 /* Free up basic_block_for_insn so that we don't have to keep it
2696 up to date, either here or in merge_blocks_nomove. */
2697 free_basic_block_vars (1);
2699 /* Compute postdominators if we think we'll use them. */
2700 post_dominators = NULL;
2701 if (HAVE_conditional_execution || life_data_ok)
2703 post_dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
2704 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2709 /* Go through each of the basic blocks looking for things to convert. */
2711 while (find_if_header (bb))
2714 if (post_dominators)
2715 sbitmap_vector_free (post_dominators);
2718 fflush (rtl_dump_file);
2720 clear_aux_for_blocks ();
2722 /* Rebuild life info for basic blocks that require it. */
2723 if (num_removed_blocks && life_data_ok)
2725 /* If we allocated new pseudos, we must resize the array for sched1. */
2726 if (max_regno < max_reg_num ())
2728 max_regno = max_reg_num ();
2729 allocate_reg_info (max_regno, FALSE, FALSE);
2731 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
2732 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2733 | PROP_KILL_DEAD_CODE);
2736 /* Write the final stats. */
2737 if (rtl_dump_file && num_possible_if_blocks > 0)
2739 fprintf (rtl_dump_file,
2740 "\n%d possible IF blocks searched.\n",
2741 num_possible_if_blocks);
2742 fprintf (rtl_dump_file,
2743 "%d IF blocks converted.\n",
2744 num_updated_if_blocks);
2745 fprintf (rtl_dump_file,
2746 "%d basic blocks deleted.\n\n\n",
2747 num_removed_blocks);
2750 #ifdef ENABLE_CHECKING
2751 verify_flow_info ();