/* Data flow analysis for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
+ Inc.
This file is part of GCC.
/* Indexed by n, giving various register information */
-varray_type reg_n_info;
+VEC(reg_info_p,heap) *reg_n_info;
/* Regset of regs live when calls to `setjmp'-like functions happen. */
/* ??? Does this exist only for the setjmp-clobbered warning message? */
FLAGS is a set of PROP_* flags to be used in accumulating flow info. */
void
-life_analysis (FILE *file, int flags)
+life_analysis (int flags)
{
#ifdef ELIMINABLE_REGS
int i;
if (optimize && (flags & PROP_SCAN_DEAD_STORES))
end_alias_analysis ();
- if (file)
- dump_flow_info (file);
+ if (dump_file)
+ dump_flow_info (dump_file, dump_flags);
/* Removing dead insns should have made jumptables really dead. */
delete_dead_jumptables ();
ndead = 0;
if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
- reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
+ reg_deaths = XCNEWVEC (int, max_regno);
timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
? TV_LIFE_UPDATE : TV_LIFE);
/* If asked, remove notes from the blocks we'll update. */
if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
- count_or_remove_death_notes (blocks, 1);
+ count_or_remove_death_notes (blocks,
+ prop_flags & PROP_POST_REGSTACK ? -1 : 1);
}
/* Clear log links in case we are asked to (re)compute them. */
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
{
bb = BASIC_BLOCK (i);
-
- COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
- propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
-
- if (extent == UPDATE_LIFE_LOCAL)
- verify_local_live_at_start (tmp, bb);
+ if (bb)
+ {
+ /* The bitmap may be flawed in that one of the basic
+ blocks may have been deleted before you get here. */
+ COPY_REG_SET (tmp, bb->il.rtl->global_live_at_end);
+ propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
+
+ if (extent == UPDATE_LIFE_LOCAL)
+ verify_local_live_at_start (tmp, bb);
+ }
};
}
else
}
}
}
+
if (nnoops && dump_file)
- fprintf (dump_file, "deleted %i noop moves", nnoops);
+ fprintf (dump_file, "deleted %i noop moves\n", nnoops);
+
return nnoops;
}
SET_REGNO_REG_SET (invalidated_by_call, i);
/* Allocate space for the sets of local properties. */
- local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
- sizeof (regset));
- cond_local_sets = xcalloc (last_basic_block - (INVALID_BLOCK + 1),
- sizeof (regset));
-
- /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
- because the `head == tail' style test for an empty queue doesn't
- work with a full queue. */
- queue = xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1)) * sizeof (*queue));
+ local_sets = XCNEWVEC (bitmap, last_basic_block);
+ cond_local_sets = XCNEWVEC (bitmap, last_basic_block);
+
+ /* Create a worklist. Allocate an extra slot for the `head == tail'
+ style test for an empty queue doesn't work with a full queue. */
+ queue = XNEWVEC (basic_block, n_basic_blocks + 1);
qtail = queue;
- qhead = qend = queue + n_basic_blocks - (INVALID_BLOCK + 1);
+ qhead = qend = queue + n_basic_blocks;
/* Queue the blocks set in the initial mask. Do this in reverse block
number order so that we are more likely for the first round to do
}
}
- block_accesses = xcalloc (last_basic_block, sizeof (int));
+ block_accesses = XCNEWVEC (int, last_basic_block);
/* We clean aux when we remove the initially-enqueued bbs, but we
don't enqueue ENTRY and EXIT initially, so clean them upfront and
basic block. On subsequent passes, we get to skip out early if
live_at_end wouldn't have changed. */
- if (local_sets[bb->index - (INVALID_BLOCK + 1)] == NULL)
+ if (local_sets[bb->index] == NULL)
{
- local_sets[bb->index - (INVALID_BLOCK + 1)]
- = ALLOC_REG_SET (®_obstack);
- cond_local_sets[bb->index - (INVALID_BLOCK + 1)]
- = ALLOC_REG_SET (®_obstack);
+ local_sets[bb->index] = ALLOC_REG_SET (®_obstack);
+ cond_local_sets[bb->index] = ALLOC_REG_SET (®_obstack);
rescan = 1;
}
else
successor block. We can miss changes in those sets if
we only compare the new live_at_end against the
previous one. */
- cond_local_set = cond_local_sets[bb->index - (INVALID_BLOCK + 1)];
+ cond_local_set = cond_local_sets[bb->index];
rescan = bitmap_intersect_p (new_live_at_end, cond_local_set);
}
/* If any of the changed bits overlap with local_sets[bb],
we'll have to rescan the block. */
- local_set = local_sets[bb->index - (INVALID_BLOCK + 1)];
+ local_set = local_sets[bb->index];
rescan = bitmap_intersect_p (tmp, local_set);
}
}
/* Rescan the block insn by insn to turn (a copy of) live_at_end
into live_at_start. */
propagate_block (bb, new_live_at_end,
- local_sets[bb->index - (INVALID_BLOCK + 1)],
- cond_local_sets[bb->index - (INVALID_BLOCK + 1)],
+ local_sets[bb->index],
+ cond_local_sets[bb->index],
flags);
/* If live_at start didn't change, no need to go farther. */
SET_BIT (blocks_out, pbb->index);
/* Makes sure to really rescan the block. */
- if (local_sets[pbb->index - (INVALID_BLOCK + 1)])
+ if (local_sets[pbb->index])
{
- FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]);
- FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]);
- local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0;
+ FREE_REG_SET (local_sets[pbb->index]);
+ FREE_REG_SET (cond_local_sets[pbb->index]);
+ local_sets[pbb->index] = 0;
}
/* Add it to the queue. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pb = e->src;
+
+ gcc_assert ((e->flags & EDGE_FAKE) == 0);
+
if (pb->aux == NULL)
{
*qtail++ = pb;
EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i, sbi)
{
basic_block bb = BASIC_BLOCK (i);
- FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
- FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ FREE_REG_SET (local_sets[bb->index]);
+ FREE_REG_SET (cond_local_sets[bb->index]);
};
}
else
{
FOR_EACH_BB (bb)
{
- FREE_REG_SET (local_sets[bb->index - (INVALID_BLOCK + 1)]);
- FREE_REG_SET (cond_local_sets[bb->index - (INVALID_BLOCK + 1)]);
+ FREE_REG_SET (local_sets[bb->index]);
+ FREE_REG_SET (cond_local_sets[bb->index]);
}
}
registers whose value is unknown, and may contain some kind of sticky
bits we don't want. */
-int
+static int
initialize_uninitialized_subregs (void)
{
rtx insn;
max_regno = max_reg_num ();
gcc_assert (!reg_deaths);
- reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
+ reg_deaths = XCNEWVEC (int, max_regno);
/* Recalculate the register space, in case it has grown. Old style
vector oriented regsets would set regset_{size,bytes} here also. */
init_propagate_block_info (basic_block bb, regset live, regset local_set,
regset cond_local_set, int flags)
{
- struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
+ struct propagate_block_info *pbi = XNEW (struct propagate_block_info);
pbi->bb = bb;
pbi->reg_live = live;
pbi->insn_num = 0;
if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
- pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
+ pbi->reg_next_use = XCNEWVEC (rtx, max_reg_num ());
else
pbi->reg_next_use = NULL;
struct reg_cond_life_info *rcli;
rtx cond;
- rcli = xmalloc (sizeof (*rcli));
+ rcli = XNEW (struct reg_cond_life_info);
if (REGNO_REG_SET_P (bb_true->il.rtl->global_live_at_start,
i))
int
regno_clobbered_at_setjmp (int regno)
{
- if (n_basic_blocks == 0)
+ if (n_basic_blocks == NUM_FIXED_BLOCKS)
return 0;
return ((REG_N_SETS (regno) > 1
if (flags & PROP_REG_INFO)
REG_N_DEATHS (regno_first) += 1;
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
+ LAST_STACK_REG))
+#endif
+ )
{
/* Note that dead stores have already been deleted
when possible. If we get here, we have found a
}
else
{
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG,
+ LAST_STACK_REG))
+#endif
+ )
{
/* This is a case where we have a multi-word hard register
and some, but not all, of the words of the register are
here and count it. */
else if (GET_CODE (reg) == SCRATCH)
{
- if (flags & PROP_DEATH_NOTES)
+ if (flags & PROP_DEATH_NOTES
+#ifdef STACK_REGS
+ && (!(flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
+#endif
+ )
REG_NOTES (insn)
= alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
}
/* The register was unconditionally live previously.
Record the current condition as the condition under
which it is dead. */
- rcli = xmalloc (sizeof (*rcli));
+ rcli = XNEW (struct reg_cond_life_info);
rcli->condition = cond;
rcli->stores = cond;
rcli->orig_condition = const0_rtx;
if (! some_was_live)
{
if ((pbi->flags & PROP_DEATH_NOTES)
+#ifdef STACK_REGS
+ && (!(pbi->flags & PROP_POST_REGSTACK)
+ || !IN_RANGE (REGNO (reg), FIRST_STACK_REG, LAST_STACK_REG))
+#endif
&& ! find_regno_note (insn, REG_DEAD, regno_first))
REG_NOTES (insn)
= alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
{
/* The register was not previously live at all. Record
the condition under which it is still dead. */
- rcli = xmalloc (sizeof (*rcli));
+ rcli = XNEW (struct reg_cond_life_info);
rcli->condition = not_reg_cond (cond);
rcli->stores = const0_rtx;
rcli->orig_condition = const0_rtx;
}
}
+/* Scan expression X for registers which have to be marked used in PBI.
+ X is considered to be the SET_DEST rtx of SET. TRUE is returned if
+ X could be handled by this function. */
+
+static bool
+mark_used_dest_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
+{
+ int regno;
+ bool mark_dest = false;
+ rtx dest = x;
+
+ /* On some platforms calls return values spread over several
+ locations. These locations are wrapped in a EXPR_LIST rtx
+ together with a CONST_INT offset. */
+ if (GET_CODE (x) == EXPR_LIST
+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ x = XEXP (x, 0);
+
+ if (x == NULL_RTX)
+ return false;
+
+ /* If storing into MEM, don't show it as being used. But do
+ show the address as being used. */
+ if (MEM_P (x))
+ {
+#ifdef AUTO_INC_DEC
+ if (pbi->flags & PROP_AUTOINC)
+ find_auto_inc (pbi, x, insn);
+#endif
+ mark_used_regs (pbi, XEXP (x, 0), cond, insn);
+ return true;
+ }
+
+ /* Storing in STRICT_LOW_PART is like storing in a reg
+ in that this SET might be dead, so ignore it in TESTREG.
+ but in some other ways it is like using the reg.
+
+ Storing in a SUBREG or a bit field is like storing the entire
+ register in that if the register's value is not used
+ then this SET is not needed. */
+ while (GET_CODE (x) == STRICT_LOW_PART
+ || GET_CODE (x) == ZERO_EXTRACT
+ || GET_CODE (x) == SUBREG)
+ {
+#ifdef CANNOT_CHANGE_MODE_CLASS
+ if ((pbi->flags & PROP_REG_INFO) && GET_CODE (x) == SUBREG)
+ record_subregs_of_mode (x);
+#endif
+
+ /* Modifying a single register in an alternate mode
+ does not use any of the old value. But these other
+ ways of storing in a register do use the old value. */
+ if (GET_CODE (x) == SUBREG
+ && !((REG_BYTES (SUBREG_REG (x))
+ + UNITS_PER_WORD - 1) / UNITS_PER_WORD
+ > (REG_BYTES (x)
+ + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
+ ;
+ else
+ mark_dest = true;
+
+ x = XEXP (x, 0);
+ }
+
+ /* If this is a store into a register or group of registers,
+ recursively scan the value being stored. */
+ if (REG_P (x)
+ && (regno = REGNO (x),
+ !(regno == FRAME_POINTER_REGNUM
+ && (!reload_completed || frame_pointer_needed)))
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ && !(regno == HARD_FRAME_POINTER_REGNUM
+ && (!reload_completed || frame_pointer_needed))
+#endif
+#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ && !(regno == ARG_POINTER_REGNUM && fixed_regs[regno])
+#endif
+ )
+ {
+ if (mark_dest)
+ mark_used_regs (pbi, dest, cond, insn);
+ return true;
+ }
+ return false;
+}
+
/* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
This is done assuming the registers needed from X are those that
have 1-bits in PBI->REG_LIVE.
mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
{
RTX_CODE code;
- int regno;
int flags = pbi->flags;
retry:
case SET:
{
- rtx testreg = SET_DEST (x);
- int mark_dest = 0;
-
- /* If storing into MEM, don't show it as being used. But do
- show the address as being used. */
- if (MEM_P (testreg))
- {
-#ifdef AUTO_INC_DEC
- if (flags & PROP_AUTOINC)
- find_auto_inc (pbi, testreg, insn);
-#endif
- mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
- mark_used_regs (pbi, SET_SRC (x), cond, insn);
- return;
- }
-
- /* Storing in STRICT_LOW_PART is like storing in a reg
- in that this SET might be dead, so ignore it in TESTREG.
- but in some other ways it is like using the reg.
-
- Storing in a SUBREG or a bit field is like storing the entire
- register in that if the register's value is not used
- then this SET is not needed. */
- while (GET_CODE (testreg) == STRICT_LOW_PART
- || GET_CODE (testreg) == ZERO_EXTRACT
- || GET_CODE (testreg) == SUBREG)
- {
-#ifdef CANNOT_CHANGE_MODE_CLASS
- if ((flags & PROP_REG_INFO) && GET_CODE (testreg) == SUBREG)
- record_subregs_of_mode (testreg);
-#endif
-
- /* Modifying a single register in an alternate mode
- does not use any of the old value. But these other
- ways of storing in a register do use the old value. */
- if (GET_CODE (testreg) == SUBREG
- && !((REG_BYTES (SUBREG_REG (testreg))
- + UNITS_PER_WORD - 1) / UNITS_PER_WORD
- > (REG_BYTES (testreg)
- + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
- ;
- else
- mark_dest = 1;
-
- testreg = XEXP (testreg, 0);
- }
-
- /* If this is a store into a register or group of registers,
- recursively scan the value being stored. */
-
- if ((GET_CODE (testreg) == PARALLEL
- && GET_MODE (testreg) == BLKmode)
- || (REG_P (testreg)
- && (regno = REGNO (testreg),
- ! (regno == FRAME_POINTER_REGNUM
- && (! reload_completed || frame_pointer_needed)))
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- && ! (regno == HARD_FRAME_POINTER_REGNUM
- && (! reload_completed || frame_pointer_needed))
-#endif
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
- && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
-#endif
- ))
+ rtx dest = SET_DEST (x);
+ int i;
+ bool ret = false;
+
+ if (GET_CODE (dest) == PARALLEL)
+ for (i = 0; i < XVECLEN (dest, 0); i++)
+ ret |= mark_used_dest_regs (pbi, XVECEXP (dest, 0, i), cond, insn);
+ else
+ ret = mark_used_dest_regs (pbi, dest, cond, insn);
+
+ if (ret)
{
- if (mark_dest)
- mark_used_regs (pbi, SET_DEST (x), cond, insn);
mark_used_regs (pbi, SET_SRC (x), cond, insn);
return;
}
It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
possibly other information which is used by the register allocators. */
-void
+static unsigned int
recompute_reg_usage (void)
{
allocate_reg_life_data ();
update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
if (dump_file)
- dump_flow_info (dump_file);
+ dump_flow_info (dump_file, dump_flags);
+ return 0;
}
struct tree_opt_pass pass_recompute_reg_usage =
/* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
blocks. If BLOCKS is NULL, assume the universal set. Returns a count
- of the number of registers that died. */
+ of the number of registers that died.
+ If KILL is 1, remove old REG_DEAD / REG_UNUSED notes. If it is 0, don't.
+ if it is -1, remove them unless they pertain to a stack reg. */
int
count_or_remove_death_notes (sbitmap blocks, int kill)
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, sbi)
{
- count += count_or_remove_death_notes_bb (BASIC_BLOCK (i), kill);
+ basic_block bb = BASIC_BLOCK (i);
+ /* The bitmap may be flawed in that one of the basic blocks
+ may have been deleted before you get here. */
+ if (bb)
+ count += count_or_remove_death_notes_bb (bb, kill);
};
}
else
/* Fall through. */
case REG_UNUSED:
- if (kill)
+ if (kill > 0
+ || (kill
+#ifdef STACK_REGS
+ && (!REG_P (XEXP (link, 0))
+ || !IN_RANGE (REGNO (XEXP (link, 0)),
+ FIRST_STACK_REG, LAST_STACK_REG))
+#endif
+ ))
{
rtx next = XEXP (link, 1);
free_EXPR_LIST_node (link);
return flag_profile_values;
}
-static void
+static unsigned int
rest_of_handle_remove_death_notes (void)
{
count_or_remove_death_notes (NULL, 1);
+ return 0;
}
struct tree_opt_pass pass_remove_death_notes =
};
/* Perform life analysis. */
-static void
+static unsigned int
rest_of_handle_life (void)
{
regclass_init ();
- life_analysis (dump_file, PROP_FINAL);
+ life_analysis (PROP_FINAL);
if (optimize)
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE | CLEANUP_LOG_LINKS
| (flag_thread_jumps ? CLEANUP_THREADING : 0));
}
no_new_pseudos = 1;
+ return 0;
}
struct tree_opt_pass pass_life =
'f' /* letter */
};
-static void
+static unsigned int
rest_of_handle_flow2 (void)
{
/* If optimizing, then go ahead and split insns now. */
thread_prologue_and_epilogue_insns (get_insns ());
epilogue_completed = 1;
flow2_completed = 1;
+ return 0;
}
struct tree_opt_pass pass_flow2 =