/* Data flow analysis for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
/* TODO:
Split out from life_analysis:
- - local property discovery (bb->local_live, bb->local_set)
+ - local property discovery
- global property computation
- log links creation
- pre/post modify transformation
#endif
#endif
+/* This is the maximum number of times we process any given block if the
+ latest loop depth count is smaller than this number. Only used for the
+ failure strategy to avoid infinite loops in calculate_global_regs_live. */
+#define MAX_LIVENESS_ROUNDS 20
+
/* Nonzero if the second flow pass has completed. */
int flow2_completed;
varray_type reg_n_info;
-/* Size of a regset for the current function,
- in (1) bytes and (2) elements. */
-
-int regset_bytes;
-int regset_size;
-
/* Regset of regs live when calls to `setjmp'-like functions happen. */
/* ??? Does this exist only for the setjmp-clobbered warning message? */
-regset regs_live_at_setjmp;
+static regset regs_live_at_setjmp;
/* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
that have to go in the same hard reg.
static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
static void clear_log_links (sbitmap);
static int count_or_remove_death_notes_bb (basic_block, int);
+static void allocate_bb_life_data (void);
\f
/* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
note associated with the BLOCK. */
}
else
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
/* Find the set of changed registers. */
unless the caller resets it to zero. */
int
-update_life_info (sbitmap blocks, enum update_life_extent extent, int prop_flags)
+update_life_info (sbitmap blocks, enum update_life_extent extent,
+ int prop_flags)
{
regset tmp;
- regset_head tmp_head;
- int i;
+ unsigned i;
int stabilized_prop_flags = prop_flags;
basic_block bb;
- tmp = INITIALIZE_REG_SET (tmp_head);
+ tmp = ALLOC_REG_SET (®_obstack);
ndead = 0;
if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
sbitmap_zero (update_life_blocks);
FOR_EACH_BB (bb)
{
- if (extent == UPDATE_LIFE_LOCAL)
- {
- if (bb->flags & BB_DIRTY)
- {
- SET_BIT (update_life_blocks, bb->index);
- n++;
- }
- }
- else
+ if (bb->flags & BB_DIRTY)
{
- /* ??? Bootstrap with -march=pentium4 fails to terminate
- with only a partial life update. */
SET_BIT (update_life_blocks, bb->index);
- if (bb->flags & BB_DIRTY)
- n++;
+ n++;
}
}
void
delete_dead_jumptables (void)
{
- rtx insn, next;
- for (insn = get_insns (); insn; insn = next)
+ basic_block bb;
+
+ /* A dead jump table does not belong to any basic block. Scan insns
+ between two adjacent basic blocks. */
+ FOR_EACH_BB (bb)
{
- next = NEXT_INSN (insn);
- if (LABEL_P (insn)
- && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
- && JUMP_P (next)
- && (GET_CODE (PATTERN (next)) == ADDR_VEC
- || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ rtx insn, next;
+
+ for (insn = NEXT_INSN (BB_END (bb));
+ insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
+ insn = next)
{
- if (dump_file)
- fprintf (dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
- delete_insn (NEXT_INSN (insn));
- delete_insn (insn);
- next = NEXT_INSN (next);
+ next = NEXT_INSN (insn);
+ if (LABEL_P (insn)
+ && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
+ && JUMP_P (next)
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ {
+ rtx label = insn, jump = next;
+
+ if (dump_file)
+ fprintf (dump_file, "Dead jumptable %i removed\n",
+ INSN_UID (insn));
+
+ next = NEXT_INSN (next);
+ delete_insn (jump);
+ delete_insn (label);
+ }
}
}
}
{
basic_block *queue, *qhead, *qtail, *qend, bb;
regset tmp, new_live_at_end, invalidated_by_call;
- regset_head tmp_head, invalidated_by_call_head;
- regset_head new_live_at_end_head;
+ regset registers_made_dead;
+ bool failure_strategy_required = false;
+ int *block_accesses;
+
+ /* The registers that are modified within this in block. */
+ regset *local_sets;
+
+ /* The registers that are conditionally modified within this block.
+ In other words, regs that are set only as part of a COND_EXEC. */
+ regset *cond_local_sets;
+
int i;
/* Some passes used to forget clear aux field of basic block causing
gcc_assert (!bb->aux);
#endif
- tmp = INITIALIZE_REG_SET (tmp_head);
- new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
- invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head);
+ tmp = ALLOC_REG_SET (®_obstack);
+ new_live_at_end = ALLOC_REG_SET (®_obstack);
+ invalidated_by_call = ALLOC_REG_SET (®_obstack);
+ registers_made_dead = ALLOC_REG_SET (®_obstack);
/* Inconveniently, this is only readily available in hard reg set form. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
SET_REGNO_REG_SET (invalidated_by_call, i);
+ /* Allocate space for the sets of local properties. */
+ local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
+ sizeof (regset));
+ cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
+ sizeof (regset));
+
/* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
because the `head == tail' style test for an empty queue doesn't
work with a full queue. */
- queue = xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
+ queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
qtail = queue;
- qhead = qend = queue + n_basic_blocks + 2;
+ qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
/* Queue the blocks set in the initial mask. Do this in reverse block
number order so that we are more likely for the first round to do
}
}
+ block_accesses = xcalloc (last_basic_block, sizeof (int));
+
/* We clean aux when we remove the initially-enqueued bbs, but we
don't enqueue ENTRY and EXIT initially, so clean them upfront and
unconditionally. */
from GLOBAL_LIVE_AT_START. In the former case, the register
could go away only if it disappeared from GLOBAL_LIVE_AT_START
for one of the successor blocks. By induction, that cannot
- occur. */
+ occur.
+
+ ??? This reasoning doesn't work if we start from non-empty initial
+ GLOBAL_LIVE_AT_START sets. And there are actually two problems:
+ 1) Updating may not terminate (endless oscillation).
+ 2) Even if it does (and it usually does), the resulting information
+ may be inaccurate. Consider for example the following case:
+
+ a = ...;
+ while (...) {...} -- 'a' not mentioned at all
+ ... = a;
+
+ If the use of 'a' is deleted between two calculations of liveness
+ information and the initial sets are not cleared, the information
+ about a's liveness will get stuck inside the loop and the set will
+ appear not to be dead.
+
+ We do not attempt to solve 2) -- the information is conservatively
+ correct (i.e. we never claim that something live is dead) and the
+ amount of optimization opportunities missed due to this problem is
+ not significant.
+
+ 1) is more serious. In order to fix it, we monitor the number of times
+ each block is processed. Once one of the blocks has been processed more
+ times than the maximum number of rounds, we use the following strategy:
+ When a register disappears from one of the sets, we add it to a MAKE_DEAD
+ set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and
+ add the blocks with changed sets into the queue. Thus we are guaranteed
+ to terminate (the worst case corresponds to all registers in MADE_DEAD,
+ in which case the original reasoning above is valid), but in general we
+ only fix up a few offending registers.
+
+ The maximum number of rounds for computing liveness is the largest of
+ MAX_LIVENESS_ROUNDS and the latest loop depth count for this function. */
+
while (qhead != qtail)
{
int rescan, changed;
qhead = queue;
bb->aux = NULL;
+ /* Should we start using the failure strategy? */
+ if (bb != ENTRY_BLOCK_PTR)
+ {
+ int max_liveness_rounds =
+ MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth);
+
+ block_accesses[bb->index]++;
+ if (block_accesses[bb->index] > max_liveness_rounds)
+ failure_strategy_required = true;
+ }
+
/* Begin by propagating live_at_start from the successor blocks. */
CLEAR_REG_SET (new_live_at_end);
}
/* On our first pass through this block, we'll go ahead and continue.
- Recognize first pass by local_set NULL. On subsequent passes, we
- get to skip out early if live_at_end wouldn't have changed. */
+ Recognize first pass by checking if local_set is NULL for this
+ basic block. On subsequent passes, we get to skip out early if
+ live_at_end wouldn't have changed. */
- if (bb->local_set == NULL)
+ if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
{
- bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ local_sets[bb->index - (INVALID_BLOCK + 1)]
+ = ALLOC_REG_SET (®_obstack);
+ cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
+ = ALLOC_REG_SET (®_obstack);
rescan = 1;
}
else
new_live_at_end);
if (!rescan)
- /* If any of the registers in the new live_at_end set are
- conditionally set in this basic block, we must rescan.
- This is because conditional lifetimes at the end of the
- block do not just take the live_at_end set into
- account, but also the liveness at the start of each
- successor block. We can miss changes in those sets if
- we only compare the new live_at_end against the
- previous one. */
- rescan = bitmap_intersect_p (new_live_at_end,
- bb->cond_local_set);
+ {
+ regset cond_local_set;
+
+ /* If any of the registers in the new live_at_end set are
+ conditionally set in this basic block, we must rescan.
+ This is because conditional lifetimes at the end of the
+ block do not just take the live_at_end set into
+ account, but also the liveness at the start of each
+ successor block. We can miss changes in those sets if
+ we only compare the new live_at_end against the
+ previous one. */
+ cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
+ rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
+ }
if (!rescan)
{
+ regset local_set;
+
/* Find the set of changed bits. Take this opportunity
to notice that this set is empty and early out. */
bitmap_xor (tmp, bb->global_live_at_end, new_live_at_end);
if (bitmap_empty_p (tmp))
continue;
- /* If any of the changed bits overlap with local_set,
+ /* If any of the changed bits overlap with local_sets[bb],
we'll have to rescan the block. */
- rescan = bitmap_intersect_p (tmp, bb->local_set);
+ local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
+ rescan = bitmap_intersect_p (tmp, local_set);
}
}
/* Rescan the block insn by insn to turn (a copy of) live_at_end
into live_at_start. */
- propagate_block (bb, new_live_at_end, bb->local_set,
- bb->cond_local_set, flags);
+ propagate_block (bb, new_live_at_end,
+ local_sets[bb->index - (INVALID_BLOCK + 1)],
+ cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
+ flags);
/* If live_at start didn't change, no need to go farther. */
if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
continue;
+ if (failure_strategy_required)
+ {
+ /* Get the list of registers that were removed from the
+ bb->global_live_at_start set. */
+ bitmap_and_compl (tmp, bb->global_live_at_start,
+ new_live_at_end);
+ if (!bitmap_empty_p (tmp))
+ {
+ bool pbb_changed;
+ basic_block pbb;
+
+ /* It should not happen that one of registers we have
+ removed last time is disappears again before any other
+ register does. */
+ pbb_changed = bitmap_ior_into (registers_made_dead, tmp);
+ gcc_assert (pbb_changed);
+
+ /* Now remove the registers from all sets. */
+ FOR_EACH_BB (pbb)
+ {
+ pbb_changed = false;
+
+ pbb_changed
+ |= bitmap_and_compl_into (pbb->global_live_at_start,
+ registers_made_dead);
+ pbb_changed
+ |= bitmap_and_compl_into (pbb->global_live_at_end,
+ registers_made_dead);
+ if (!pbb_changed)
+ continue;
+
+ /* Note the (possible) change. */
+ if (blocks_out)
+ SET_BIT (blocks_out, pbb->index);
+
+ /* Makes sure to really rescan the block. */
+ if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
+ {
+ FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
+ FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
+ local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
+ }
+
+ /* Add it to the queue. */
+ if (pbb->aux == NULL)
+ {
+ *qtail++ = pbb;
+ if (qtail == qend)
+ qtail = queue;
+ pbb->aux = pbb;
+ }
+ }
+ continue;
+ }
+ } /* end of failure_strategy_required */
+
COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
}
FREE_REG_SET (tmp);
FREE_REG_SET (new_live_at_end);
FREE_REG_SET (invalidated_by_call);
+ FREE_REG_SET (registers_made_dead);
if (blocks_out)
{
EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
{
basic_block bb = BASIC_BLOCK (i);
- FREE_REG_SET (bb->local_set);
- FREE_REG_SET (bb->cond_local_set);
+ FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
});
}
else
{
FOR_EACH_BB (bb)
{
- FREE_REG_SET (bb->local_set);
- FREE_REG_SET (bb->cond_local_set);
+ FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
}
}
+ free (block_accesses);
free (queue);
+ free (cond_local_sets);
+ free (local_sets);
}
\f
{
rtx insn;
edge e;
- int reg, did_something = 0;
+ unsigned reg, did_something = 0;
find_regno_partial_param param;
edge_iterator ei;
/* Subroutines of life analysis. */
/* Allocate the permanent data structures that represent the results
- of life analysis. Not static since used also for stupid life analysis. */
+ of life analysis. */
-void
+static void
allocate_bb_life_data (void)
{
basic_block bb;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
{
- bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
- bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ bb->global_live_at_start = ALLOC_REG_SET (®_obstack);
+ bb->global_live_at_end = ALLOC_REG_SET (®_obstack);
}
- regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
+ regs_live_at_setjmp = ALLOC_REG_SET (®_obstack);
}
void
int insn_is_dead = 0;
int libcall_is_dead = 0;
rtx note;
- int i;
+ unsigned i;
if (! INSN_P (insn))
return prev;
pbi->cc0_live = 0;
if (libcall_is_dead)
- prev = propagate_block_delete_libcall ( insn, note);
+ prev = propagate_block_delete_libcall (insn, note);
else
{
else
pbi->reg_next_use = NULL;
- pbi->new_set = BITMAP_XMALLOC ();
+ pbi->new_set = BITMAP_ALLOC (NULL);
#ifdef HAVE_conditional_execution
pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
free_reg_cond_life_info);
- pbi->reg_cond_reg = BITMAP_XMALLOC ();
+ pbi->reg_cond_reg = BITMAP_ALLOC (NULL);
/* If this block ends in a conditional branch, for each register
live from one side of the branch and not the other, record the
if (JUMP_P (BB_END (bb))
&& any_condjump_p (BB_END (bb)))
{
- regset_head diff_head;
- regset diff = INITIALIZE_REG_SET (diff_head);
+ regset diff = ALLOC_REG_SET (®_obstack);
basic_block bb_true, bb_false;
- int i;
+ unsigned i;
/* Identify the successor blocks. */
bb_true = EDGE_SUCC (bb, 0)->dest;
- if (EDGE_COUNT (bb->succs) > 1)
+ if (!single_succ_p (bb))
{
bb_false = EDGE_SUCC (bb, 1)->dest;
}
/* Compute which register lead different lives in the successors. */
- if (bitmap_xor (diff, bb_true->global_live_at_start,
- bb_false->global_live_at_start))
- {
+ bitmap_xor (diff, bb_true->global_live_at_start,
+ bb_false->global_live_at_start);
+
+ if (!bitmap_empty_p (diff))
+ {
/* Extract the condition from the branch. */
rtx set_src = SET_SRC (pc_set (BB_END (bb)));
rtx cond_true = XEXP (set_src, 0);
(TREE_TYPE (current_function_decl))))
&& (flags & PROP_SCAN_DEAD_STORES)
&& (EDGE_COUNT (bb->succs) == 0
- || (EDGE_COUNT (bb->succs) == 1
- && EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
+ || (single_succ_p (bb)
+ && single_succ (bb) == EXIT_BLOCK_PTR
&& ! current_function_calls_eh_return)))
{
rtx insn, set;
{
free_EXPR_LIST_list (&pbi->mem_set_list);
- BITMAP_XFREE (pbi->new_set);
+ BITMAP_FREE (pbi->new_set);
#ifdef HAVE_conditional_execution
splay_tree_delete (pbi->reg_cond_dead);
- BITMAP_XFREE (pbi->reg_cond_reg);
+ BITMAP_FREE (pbi->reg_cond_reg);
#endif
if (pbi->flags & PROP_REG_INFO)
{
int num = pbi->insn_num;
- int i;
+ unsigned i;
reg_set_iterator rsi;
EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i, rsi)
if (flags & PROP_REG_INFO)
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
/* Process the regs live at the end of the block.
{
rtx r = SET_SRC (x);
- if (REG_P (r))
+ if (REG_P (r) || GET_CODE (r) == SUBREG)
{
rtx call = XEXP (note, 0);
rtx call_pat;
call_pat = XVECEXP (call_pat, 0, i);
}
- return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
+ if (! insn_dead_p (pbi, call_pat, 1, REG_NOTES (call)))
+ return 0;
+
+ while ((insn = PREV_INSN (insn)) != call)
+ {
+ if (! INSN_P (insn))
+ continue;
+ if (! insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn)))
+ return 0;
+ }
+ return 1;
}
}
- return 1;
+ return 0;
}
/* 1 if register REGNO was alive at a place where `setjmp' was called
flags);
return;
- case ZERO_EXTRACT:
case SIGN_EXTRACT:
+ /* SIGN_EXTRACT cannot be an lvalue. */
+ gcc_unreachable ();
+
+ case ZERO_EXTRACT:
case STRICT_LOW_PART:
/* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
do
reg = XEXP (reg, 0);
while (GET_CODE (reg) == SUBREG
|| GET_CODE (reg) == ZERO_EXTRACT
- || GET_CODE (reg) == SIGN_EXTRACT
|| GET_CODE (reg) == STRICT_LOW_PART);
if (MEM_P (reg))
break;
invalidate_mems_from_set (pbi, reg);
/* If the memory reference had embedded side effects (autoincrement
- address modes. Then we may need to kill some entries on the
+ address modes) then we may need to kill some entries on the
memory set list. */
if (insn && MEM_P (reg))
for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
/* Otherwise this is a conditional set. Record that fact.
It may have been conditionally used, or there may be a
- subsequent set with a complimentary condition. */
+ subsequent set with a complementary condition. */
node = splay_tree_lookup (pbi->reg_cond_dead, regno);
if (node == NULL)
then this SET is not needed. */
while (GET_CODE (testreg) == STRICT_LOW_PART
|| GET_CODE (testreg) == ZERO_EXTRACT
- || GET_CODE (testreg) == SIGN_EXTRACT
|| GET_CODE (testreg) == SUBREG)
{
#ifdef CANNOT_CHANGE_MODE_CLASS
void
dump_regset (regset r, FILE *outf)
{
- int i;
+ unsigned i;
reg_set_iterator rsi;
if (r == NULL)
register allocators to prioritize pseudos for allocation to hard regs.
More accurate reference counts generally lead to better register allocation.
- F is the first insn to be scanned.
-
- LOOP_STEP denotes how much loop_depth should be incremented per
- loop nesting level in order to increase the ref count more for
- references in a loop.
-
It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
possibly other information which is used by the register allocators. */
void
-recompute_reg_usage (rtx f ATTRIBUTE_UNUSED, int loop_step ATTRIBUTE_UNUSED)
+recompute_reg_usage (void)
{
allocate_reg_life_data ();
/* distribute_notes in combiner fails to convert some of the REG_UNUSED notes
void
reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
{
- int i;
+ unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)