/* If-conversion support.
- Copyright (C) 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
This file is part of GCC.
#include "flags.h"
#include "insn-config.h"
#include "recog.h"
+#include "except.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "expr.h"
static int find_if_case_1 PARAMS ((basic_block, edge, edge));
static int find_if_case_2 PARAMS ((basic_block, edge, edge));
static int find_cond_trap PARAMS ((basic_block, edge, edge));
+static rtx block_has_only_trap PARAMS ((basic_block));
static int find_memory PARAMS ((rtx *, void *));
static int dead_or_predicable PARAMS ((basic_block, basic_block,
basic_block, basic_block, int));
as well as a flag indicating that the block should be rescaned for
life analysis. */
-#define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
-#define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
-#define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
-#define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
+#define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I)))
+#define ORIG_INDEX(BB) ((size_t)(BB)->aux)
\f
/* Count the number of non-jump active insns in BB. */
if (no_new_pseudos)
return FALSE;
- /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
- will be resolved with an SMIN/SMAX. It wouldn't be too hard
+ /* ??? Reject modes with NaNs or signed zeros since we don't know how
+ they will be resolved with an SMIN/SMAX. It wouldn't be too hard
to get the target to tell us... */
- if (FLOAT_MODE_P (GET_MODE (if_info->x))
- && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
- && ! flag_unsafe_math_optimizations)
+ if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
+ || HONOR_NANS (GET_MODE (if_info->x)))
return FALSE;
cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
if (side_effects_p (op))
return FALSE;
- /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
- being linked into the genfoo programs. This is probably a mistake.
- With finite operands, most fp operations don't trap. */
- if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
- switch (GET_CODE (op))
- {
- case DIV:
- case MOD:
- case UDIV:
- case UMOD:
- /* ??? This is kinda lame -- almost every target will have forced
- the constant into a register first. But given the expense of
- division, this is probably for the best. */
- return (CONSTANT_P (XEXP (op, 1))
- && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
- && ! may_trap_p (XEXP (op, 0)));
-
- default:
- switch (GET_RTX_CLASS (GET_CODE (op)))
- {
- case '1':
- return ! may_trap_p (XEXP (op, 0));
- case 'c':
- case '2':
- return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
- }
- break;
- }
-
return ! may_trap_p (op);
}
if (orig_x != x)
{
start_sequence ();
- noce_emit_move_insn (orig_x, x);
+ noce_emit_move_insn (copy_rtx (orig_x), x);
insn_b = gen_sequence ();
end_sequence ();
/* First merge TEST block into THEN block. This is a no-brainer since
the THEN block did not have a code label to begin with. */
-
- if (life_data_ok)
- COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
- merge_blocks_nomove (combo_bb, then_bb);
- num_removed_blocks++;
+ if (then_bb)
+ {
+ if (combo_bb->global_live_at_end)
+ COPY_REG_SET (combo_bb->global_live_at_end,
+ then_bb->global_live_at_end);
+ merge_blocks_nomove (combo_bb, then_bb);
+ num_removed_blocks++;
+ }
/* The ELSE block, if it existed, had a label. That label count
will almost always be zero, but odd things can happen when labels
if (! join_bb)
{
+ rtx last = combo_bb->end;
+
/* The outgoing edge for the current COMBO block should already
be correct. Verify this. */
if (combo_bb->succ == NULL_EDGE)
- abort ();
+ {
+ if (find_reg_note (last, REG_NORETURN, NULL))
+ ;
+ else if (GET_CODE (last) == INSN
+ && GET_CODE (PATTERN (last)) == TRAP_IF
+ && TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
+ ;
+ else
+ abort ();
+ }
- /* There should still be a branch at the end of the THEN or ELSE
+ /* There should still be something at the end of the THEN or ELSE
blocks taking us to our final destination. */
- if (GET_CODE (combo_bb->end) != JUMP_INSN)
+ else if (GET_CODE (last) == JUMP_INSN)
+ ;
+ else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
+ && GET_CODE (last) == CALL_INSN
+ && SIBLING_CALL_P (last))
+ ;
+ else if ((combo_bb->succ->flags & EDGE_EH)
+ && can_throw_internal (last))
+ ;
+ else
abort ();
}
&& join_bb != EXIT_BLOCK_PTR)
{
/* We can merge the JOIN. */
- if (life_data_ok)
+ if (combo_bb->global_live_at_end)
COPY_REG_SET (combo_bb->global_live_at_end,
join_bb->global_live_at_end);
merge_blocks_nomove (combo_bb, join_bb);
tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
}
- /* Make sure we update life info properly. */
- SET_UPDATE_LIFE (combo_bb);
-
num_updated_if_blocks++;
}
\f
basic_block test_bb;
edge then_edge, else_edge;
{
- basic_block then_bb, else_bb, join_bb, trap_bb;
+ basic_block then_bb, else_bb, trap_bb, other_bb;
rtx trap, jump, cond, cond_earliest, seq;
enum rtx_code code;
then_bb = then_edge->dest;
else_bb = else_edge->dest;
- join_bb = NULL;
/* Locate the block with the trap instruction. */
/* ??? While we look for no successors, we really ought to allow
EH successors. Need to fix merge_if_block for that to work. */
- /* ??? We can't currently handle merging the blocks if they are not
- already adjacent. Prevent losage in merge_if_block by detecting
- this now. */
- if (then_bb->succ == NULL)
- {
- trap_bb = then_bb;
- if (else_bb->index != then_bb->index + 1)
- return FALSE;
- join_bb = else_bb;
- else_bb = NULL;
- }
- else if (else_bb->succ == NULL)
- {
- trap_bb = else_bb;
- if (else_bb->index != then_bb->index + 1)
- else_bb = NULL;
- else if (then_bb->succ
- && ! then_bb->succ->succ_next
- && ! (then_bb->succ->flags & EDGE_COMPLEX)
- && then_bb->succ->dest->index == else_bb->index + 1)
- join_bb = then_bb->succ->dest;
- }
+ if ((trap = block_has_only_trap (then_bb)) != NULL)
+ trap_bb = then_bb, other_bb = else_bb;
+ else if ((trap = block_has_only_trap (else_bb)) != NULL)
+ trap_bb = else_bb, other_bb = then_bb;
else
return FALSE;
- /* Don't confuse a conditional return with something we want to
- optimize here. */
- if (trap_bb == EXIT_BLOCK_PTR)
- return FALSE;
-
- /* The only instruction in the THEN block must be the trap. */
- trap = first_active_insn (trap_bb);
- if (! (trap == trap_bb->end
- && GET_CODE (PATTERN (trap)) == TRAP_IF
- && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
- return FALSE;
-
if (rtl_dump_file)
{
- if (trap_bb == then_bb)
- fprintf (rtl_dump_file,
- "\nTRAP-IF block found, start %d, trap %d",
- test_bb->index, then_bb->index);
- else
- fprintf (rtl_dump_file,
- "\nTRAP-IF block found, start %d, then %d, trap %d",
- test_bb->index, then_bb->index, trap_bb->index);
- if (join_bb)
- fprintf (rtl_dump_file, ", join %d\n", join_bb->index);
- else
- fputc ('\n', rtl_dump_file);
+ fprintf (rtl_dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
+ test_bb->index, trap_bb->index);
}
/* If this is not a standard conditional jump, we can't parse it. */
if (seq == NULL)
return FALSE;
- /* Emit the new insns before cond_earliest; delete the old jump
- and trap insns. */
-
+ /* Emit the new insns before cond_earliest. */
emit_insn_before (seq, cond_earliest);
- delete_insn (jump);
-
- delete_insn (trap);
-
- /* Merge the blocks! */
- if (trap_bb != then_bb && ! else_bb)
+ /* Delete the trap block if possible. */
+ remove_edge (trap_bb == then_bb ? then_edge : else_edge);
+ if (trap_bb->pred == NULL)
{
flow_delete_block (trap_bb);
num_removed_blocks++;
}
- merge_if_block (test_bb, then_bb, else_bb, join_bb);
+
+ /* If the non-trap block and the test are now adjacent, merge them.
+ Otherwise we must insert a direct branch. */
+ if (test_bb->index + 1 == other_bb->index)
+ {
+ delete_insn (jump);
+ merge_if_block (test_bb, NULL, NULL, other_bb);
+ }
+ else
+ {
+ rtx lab, newjump;
+
+ lab = JUMP_LABEL (jump);
+ newjump = emit_jump_insn_after (gen_jump (lab), jump);
+ LABEL_NUSES (lab) += 1;
+ JUMP_LABEL (newjump) = lab;
+ emit_barrier_after (newjump);
+
+ delete_insn (jump);
+ }
return TRUE;
}
+/* Subroutine of find_cond_trap: if BB contains only a trap insn,
+ return it. */
+
+static rtx
+block_has_only_trap (bb)
+ basic_block bb;
+{
+ rtx trap;
+
+ /* We're not the exit block. */
+ if (bb == EXIT_BLOCK_PTR)
+ return NULL_RTX;
+
+ /* The block must have no successors. */
+ if (bb->succ)
+ return NULL_RTX;
+
+ /* The only instruction in the THEN block must be the trap. */
+ trap = first_active_insn (bb);
+ if (! (trap == bb->end
+ && GET_CODE (PATTERN (trap)) == TRAP_IF
+ && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
+ return NULL_RTX;
+
+ return trap;
+}
+
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
transformable, but not necessarily the other. There need be no
JOIN block.
/* Conversion went ok, including moving the insns and fixing up the
jump. Adjust the CFG to match. */
- SET_UPDATE_LIFE (test_bb);
bitmap_operation (test_bb->global_live_at_end,
else_bb->global_live_at_start,
then_bb->global_live_at_end, BITMAP_IOR);
/* Make rest of code believe that the newly created block is the THEN_BB
block we are going to remove. */
if (new_bb)
- {
- new_bb->aux = then_bb->aux;
- SET_UPDATE_LIFE (then_bb);
- }
+ new_bb->aux = then_bb->aux;
flow_delete_block (then_bb);
/* We've possibly created jump to next insn, cleanup_cfg will solve that
later. */
/* Conversion went ok, including moving the insns and fixing up the
jump. Adjust the CFG to match. */
- SET_UPDATE_LIFE (test_bb);
bitmap_operation (test_bb->global_live_at_end,
then_bb->global_live_at_start,
else_bb->global_live_at_end, BITMAP_IOR);
basic_block new_dest;
int reversep;
{
- rtx head, end, jump, earliest, old_dest, new_label;
+ rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
jump = test_bb->end;
change group management. */
old_dest = JUMP_LABEL (jump);
- new_label = block_label (new_dest);
- if (reversep
- ? ! invert_jump_1 (jump, new_label)
- : ! redirect_jump_1 (jump, new_label))
- goto cancel;
+ if (other_bb != new_dest)
+ {
+ new_label = block_label (new_dest);
+ if (reversep
+ ? ! invert_jump_1 (jump, new_label)
+ : ! redirect_jump_1 (jump, new_label))
+ goto cancel;
+ }
if (! apply_change_group ())
return FALSE;
- if (old_dest)
- LABEL_NUSES (old_dest) -= 1;
- if (new_label)
- LABEL_NUSES (new_label) += 1;
- JUMP_LABEL (jump) = new_label;
-
- if (reversep)
- invert_br_probabilities (jump);
-
- redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
- if (reversep)
+ if (other_bb != new_dest)
{
- gcov_type count, probability;
- count = BRANCH_EDGE (test_bb)->count;
- BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
- FALLTHRU_EDGE (test_bb)->count = count;
- probability = BRANCH_EDGE (test_bb)->probability;
- BRANCH_EDGE (test_bb)->probability = FALLTHRU_EDGE (test_bb)->probability;
- FALLTHRU_EDGE (test_bb)->probability = probability;
- update_br_prob_note (test_bb);
+ if (old_dest)
+ LABEL_NUSES (old_dest) -= 1;
+ if (new_label)
+ LABEL_NUSES (new_label) += 1;
+ JUMP_LABEL (jump) = new_label;
+ if (reversep)
+ invert_br_probabilities (jump);
+
+ redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
+ if (reversep)
+ {
+ gcov_type count, probability;
+ count = BRANCH_EDGE (test_bb)->count;
+ BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
+ FALLTHRU_EDGE (test_bb)->count = count;
+ probability = BRANCH_EDGE (test_bb)->probability;
+ BRANCH_EDGE (test_bb)->probability
+ = FALLTHRU_EDGE (test_bb)->probability;
+ FALLTHRU_EDGE (test_bb)->probability = probability;
+ update_br_prob_note (test_bb);
+ }
}
/* Move the insns out of MERGE_BB to before the branch. */
reorder_insns (head, end, PREV_INSN (earliest));
}
+
+ /* Remove the jump and edge if we can. */
+ if (other_bb == new_dest)
+ {
+ delete_insn (jump);
+ remove_edge (BRANCH_EDGE (test_bb));
+ /* ??? Can't merge blocks here, as then_bb is still in use.
+ At minimum, the merge will get done just before bb-reorder. */
+ }
+
return TRUE;
cancel:
post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
}
+ if (life_data_ok)
+ clear_bb_flags ();
/* Record initial block numbers. */
for (block_num = 0; block_num < n_basic_blocks; block_num++)
if (rtl_dump_file)
fflush (rtl_dump_file);
+ clear_aux_for_blocks ();
+
/* Rebuild life info for basic blocks that require it. */
if (num_removed_blocks && life_data_ok)
{
- sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
- sbitmap_zero (update_life_blocks);
-
/* If we allocated new pseudos, we must resize the array for sched1. */
if (max_regno < max_reg_num ())
{
max_regno = max_reg_num ();
allocate_reg_info (max_regno, FALSE, FALSE);
}
-
- for (block_num = 0; block_num < n_basic_blocks; block_num++)
- if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
- SET_BIT (update_life_blocks, block_num);
-
- count_or_remove_death_notes (update_life_blocks, 1);
- /* ??? See about adding a mode that verifies that the initial
- set of blocks don't let registers come live. */
- update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
- PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
- | PROP_KILL_DEAD_CODE);
-
- sbitmap_free (update_life_blocks);
+ update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
+ PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
+ | PROP_KILL_DEAD_CODE);
}
- clear_aux_for_blocks ();
/* Write the final stats. */
if (rtl_dump_file && num_possible_if_blocks > 0)