/* Standard problems for dataflow support routines.
- Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
- Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
+ 2008, 2009 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
{
gcc_assert (df_lr);
- if (df_urec)
- return DF_RA_LIVE_OUT (bb);
- else if (df_live)
+ if (df_live)
return DF_LIVE_OUT (bb);
else
return DF_LR_OUT (bb);
{
gcc_assert (df_lr);
- if (df_urec)
- return DF_RA_LIVE_IN (bb);
- else if (df_live)
+ if (df_live)
return DF_LIVE_IN (bb);
else
return DF_LR_IN (bb);
}
-/* Get the live at top set for BB no matter what problem happens to be
- defined. This function is used by the register allocators who
- choose different dataflow problems depending on the optimization
- level. */
-
-bitmap
-df_get_live_top (basic_block bb)
-{
- gcc_assert (df_lr);
-
- if (df_urec)
- return DF_RA_LIVE_TOP (bb);
- else
- return DF_LR_TOP (bb);
-}
-
-
/*----------------------------------------------------------------------------
Utility functions.
----------------------------------------------------------------------------*/
if (dflow->block_info_size < new_size)
{
new_size += new_size / 4;
- dflow->block_info = xrealloc (dflow->block_info,
- new_size *sizeof (void*));
+ dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size);
memset (dflow->block_info + dflow->block_info_size, 0,
(new_size - dflow->block_info_size) *sizeof (void *));
dflow->block_info_size = new_size;
DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
DF_REF_ID (link->ref),
DF_REF_BBNO (link->ref),
- DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
+ DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref));
}
fprintf (file, "}");
}
\f
/*----------------------------------------------------------------------------
- REACHING USES
+ REACHING DEFINITIONS
- Find the locations in the function where each use site for a pseudo
- can reach backwards. In and out bitvectors are built for each basic
+ Find the locations in the function where each definition site for a
+ pseudo reaches. In and out bitvectors are built for each basic
block. The id field in the ref is used to index into these sets.
See df.h for details.
-
-----------------------------------------------------------------------------*/
+ ----------------------------------------------------------------------------*/
/* This problem plays a large number of games for the sake of
efficiency.
1) The order of the bits in the bitvectors. After the scanning
- phase, all of the uses are sorted. All of the uses for the reg 0
- are first, followed by all uses for reg 1 and so on.
+ phase, all of the defs are sorted. All of the defs for the reg 0
+ are first, followed by all defs for reg 1 and so on.
- 2) There are two kill sets, one if the number of uses is less or
- equal to DF_SPARSE_THRESHOLD and another if it is greater.
+ 2) There are two kill sets, one if the number of defs is less or
+ equal to DF_SPARSE_THRESHOLD and another if the number of defs is
+ greater.
<= : Data is built directly in the kill set.
/* Private data used to compute the solution for this problem. These
data structures are not accessible outside of this module. */
-struct df_ru_problem_data
+struct df_rd_problem_data
{
/* The set of defs to regs invalidated by call. */
bitmap sparse_invalidated_by_call;
- /* The set of defs to regs invalidated by call for ru. */
+ /* The set of defs to regs invalidate by call for rd. */
bitmap dense_invalidated_by_call;
/* An obstack for the bitmaps we need for this problem. */
- bitmap_obstack ru_bitmaps;
+ bitmap_obstack rd_bitmaps;
};
/* Set basic block info. */
static void
-df_ru_set_bb_info (unsigned int index, struct df_ru_bb_info *bb_info)
+df_rd_set_bb_info (unsigned int index,
+ struct df_rd_bb_info *bb_info)
{
- gcc_assert (df_ru);
- gcc_assert (index < df_ru->block_info_size);
- df_ru->block_info[index] = bb_info;
+ gcc_assert (df_rd);
+ gcc_assert (index < df_rd->block_info_size);
+ df_rd->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_ru_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
- struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
+ struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
if (bb_info)
{
BITMAP_FREE (bb_info->kill);
BITMAP_FREE (bb_info->gen);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (df_ru->block_pool, bb_info);
+ pool_free (df_rd->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DF_RU blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
not touched unless the block is new. */
static void
-df_ru_alloc (bitmap all_blocks)
+df_rd_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- struct df_ru_problem_data *problem_data;
+ struct df_rd_problem_data *problem_data;
- if (!df_ru->block_pool)
- df_ru->block_pool = create_alloc_pool ("df_ru_block pool",
- sizeof (struct df_ru_bb_info), 50);
+ if (!df_rd->block_pool)
+ df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
+ sizeof (struct df_rd_bb_info), 50);
- if (df_ru->problem_data)
+ if (df_rd->problem_data)
{
- problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
+ problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap_clear (problem_data->sparse_invalidated_by_call);
bitmap_clear (problem_data->dense_invalidated_by_call);
}
else
{
- problem_data = XNEW (struct df_ru_problem_data);
- df_ru->problem_data = problem_data;
+ problem_data = XNEW (struct df_rd_problem_data);
+ df_rd->problem_data = problem_data;
- bitmap_obstack_initialize (&problem_data->ru_bitmaps);
+ bitmap_obstack_initialize (&problem_data->rd_bitmaps);
problem_data->sparse_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ = BITMAP_ALLOC (&problem_data->rd_bitmaps);
problem_data->dense_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ = BITMAP_ALLOC (&problem_data->rd_bitmaps);
}
- df_grow_bb_info (df_ru);
+ df_grow_bb_info (df_rd);
- /* Because of the clustering of all def sites for the same pseudo,
+ /* Because of the clustering of all use sites for the same pseudo,
we have to process all of the blocks before doing the
analysis. */
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->kill);
}
else
{
- bb_info = (struct df_ru_bb_info *) pool_alloc (df_ru->block_pool);
- df_ru_set_bb_info (bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->gen = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->in = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->out = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool);
+ df_rd_set_bb_info (bb_index, bb_info);
+ bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->gen = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->in = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->out = BITMAP_ALLOC (&problem_data->rd_bitmaps);
}
}
- df_ru->optional_p = true;
+ df_rd->optional_p = true;
}
-/* Process a list of DEFs for df_ru_bb_local_compute. */
+/* Process a list of DEFs for df_rd_bb_local_compute. */
static void
-df_ru_bb_local_compute_process_def (struct df_ru_bb_info *bb_info,
- struct df_ref **def_rec,
+df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
+ df_ref *def_rec,
enum df_ref_flags top_flag)
{
while (*def_rec)
{
- struct df_ref *def = *def_rec;
- if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
- /* If the def is to only part of the reg, it is as if it did
- not happen, since some of the bits may get thru. */
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
+ df_ref def = *def_rec;
+ if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
- unsigned int begin = DF_USES_BEGIN (regno);
- unsigned int n_uses = DF_USES_COUNT (regno);
-
- if (!bitmap_bit_p (seen_in_block, regno))
+ unsigned int begin = DF_DEFS_BEGIN (regno);
+ unsigned int n_defs = DF_DEFS_COUNT (regno);
+
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (regno >= FIRST_PSEUDO_REGISTER))
{
- /* The first def for regno in the insn, causes the kill
- info to be generated. Do not modify the gen set
- because the only values in it are the uses from here
- to the top of the block and this def does not effect
- them. */
- if (!bitmap_bit_p (seen_in_insn, regno))
+ /* Only the last def(s) for a regno in the block has any
+ effect. */
+ if (!bitmap_bit_p (seen_in_block, regno))
{
- if (n_uses > DF_SPARSE_THRESHOLD)
- bitmap_set_bit (bb_info->sparse_kill, regno);
- else
- bitmap_set_range (bb_info->kill, begin, n_uses);
+ /* The first def for regno in insn gets to knock out the
+ defs from other instructions. */
+ if ((!bitmap_bit_p (seen_in_insn, regno))
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ && (!(DF_REF_FLAGS (def) &
+ (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
+ {
+ if (n_defs > DF_SPARSE_THRESHOLD)
+ {
+ bitmap_set_bit (bb_info->sparse_kill, regno);
+ bitmap_clear_range(bb_info->gen, begin, n_defs);
+ }
+ else
+ {
+ bitmap_set_range (bb_info->kill, begin, n_defs);
+ bitmap_clear_range (bb_info->gen, begin, n_defs);
+ }
+ }
+
+ bitmap_set_bit (seen_in_insn, regno);
+ /* All defs for regno in the instruction may be put into
+ the gen set. */
+ if (!(DF_REF_FLAGS (def)
+ & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
}
- bitmap_set_bit (seen_in_insn, regno);
}
}
def_rec++;
}
}
+/* Compute local reaching def info for basic block BB. */
-/* Process a list of USEs for df_ru_bb_local_compute. */
-
-static void
-df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
- struct df_ref **use_rec,
- enum df_ref_flags top_flag)
-{
- while (*use_rec)
- {
- struct df_ref *use = *use_rec;
- if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
- {
- /* Add use to set of gens in this BB unless we have seen a
- def in a previous instruction. */
- unsigned int regno = DF_REF_REGNO (use);
- if (!bitmap_bit_p (seen_in_block, regno))
- bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
- }
- use_rec++;
- }
-}
-
-/* Compute local reaching use (upward exposed use) info for basic
- block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
static void
-df_ru_bb_local_compute (unsigned int bb_index)
+df_rd_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
- /* Set when a def for regno is seen. */
bitmap_clear (seen_in_block);
bitmap_clear (seen_in_insn);
-#ifdef EH_USES
- /* Variables defined in the prolog that are used by the exception
- handler. */
- df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (bb_index),
- DF_REF_AT_TOP);
-#endif
- df_ru_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index),
- DF_REF_AT_TOP);
+ /* Artificials are only hard regs. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_rd_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index),
+ 0);
- FOR_BB_INSNS (bb, insn)
+ FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
+
if (!INSN_P (insn))
continue;
- df_ru_bb_local_compute_process_use (bb_info,
- DF_INSN_UID_USES (uid), 0);
-
- if (df->changeable_flags & DF_EQ_NOTES)
- df_ru_bb_local_compute_process_use (bb_info,
- DF_INSN_UID_EQ_USES (uid), 0);
-
- df_ru_bb_local_compute_process_def (bb_info,
+ df_rd_bb_local_compute_process_def (bb_info,
DF_INSN_UID_DEFS (uid), 0);
+ /* This complex dance with the two bitmaps is required because
+ instructions can assign twice to the same pseudo. This
+ generally happens with calls that will have one def for the
+ result and another def for the clobber. If only one vector
+ is used and the clobber goes first, the result will be
+ lost. */
bitmap_ior_into (seen_in_block, seen_in_insn);
bitmap_clear (seen_in_insn);
}
- /* Process the hardware registers that are always live. */
- df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (bb_index), 0);
-
- df_ru_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index), 0);
+ /* Process the artificial defs at the top of the block last since we
+ are going backwards through the block and these are logically at
+ the start. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_rd_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index),
+ DF_REF_AT_TOP);
}
-/* Compute local reaching use (upward exposed use) info for each basic
- block within BLOCKS. */
+/* Compute local reaching def info for each basic block within BLOCKS. */
+
static void
-df_ru_local_compute (bitmap all_blocks)
+df_rd_local_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
unsigned int regno;
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
+ struct df_rd_problem_data *problem_data
+ = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
df_set_seen ();
- df_maybe_reorganize_use_refs (df->changeable_flags & DF_EQ_NOTES ?
- DF_REF_ORDER_BY_REG_WITH_NOTES : DF_REF_ORDER_BY_REG);
+ df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_ru_bb_local_compute (bb_index);
+ df_rd_bb_local_compute (bb_index);
}
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
+ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
{
- if (DF_USES_COUNT (regno) > DF_SPARSE_THRESHOLD)
+ if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
bitmap_set_bit (sparse_invalidated, regno);
else
- bitmap_set_range (dense_invalidated,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
+ bitmap_set_range (dense_invalidated,
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
-
df_unset_seen ();
}
/* Initialize the solution bit vectors for problem. */
static void
-df_ru_init_solution (bitmap all_blocks)
+df_rd_init_solution (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
- bitmap_copy (bb_info->in, bb_info->gen);
- bitmap_clear (bb_info->out);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
+
+ bitmap_copy (bb_info->out, bb_info->gen);
+ bitmap_clear (bb_info->in);
}
}
-
-/* Out of target gets or of in of source. */
+/* In of target gets or of out of source. */
static void
-df_ru_confluence_n (edge e)
+df_rd_confluence_n (edge e)
{
- bitmap op1 = df_ru_get_bb_info (e->src->index)->out;
- bitmap op2 = df_ru_get_bb_info (e->dest->index)->in;
+ bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
+ bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
+
+ if (e->flags & EDGE_FAKE)
+ return;
if (e->flags & EDGE_EH)
{
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
+ struct df_rd_problem_data *problem_data
+ = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
bitmap_iterator bi;
bitmap_and_compl_into (tmp, dense_invalidated);
EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
- {
+ {
bitmap_clear_range (tmp,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
bitmap_ior_into (op1, tmp);
BITMAP_FREE (tmp);
/* Transfer function. */
static bool
-df_ru_transfer_function (int bb_index)
+df_rd_transfer_function (int bb_index)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
unsigned int regno;
bitmap_iterator bi;
bitmap in = bb_info->in;
bitmap sparse_kill = bb_info->sparse_kill;
if (bitmap_empty_p (sparse_kill))
- return bitmap_ior_and_compl (in, gen, out, kill);
+ return bitmap_ior_and_compl (out, gen, in, kill);
else
{
- struct df_ru_problem_data *problem_data;
- bitmap tmp;
+ struct df_rd_problem_data *problem_data;
bool changed = false;
+ bitmap tmp;
/* Note that TMP is _not_ a temporary bitmap if we end up replacing
- IN with TMP. Therefore, allocate TMP in the RU bitmaps obstack. */
- problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
- tmp = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
+ problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
+ tmp = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bitmap_copy (tmp, out);
+ bitmap_copy (tmp, in);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
{
bitmap_clear_range (tmp,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
bitmap_and_compl_into (tmp, kill);
bitmap_ior_into (tmp, gen);
- changed = !bitmap_equal_p (tmp, in);
+ changed = !bitmap_equal_p (tmp, out);
if (changed)
{
- BITMAP_FREE (in);
- bb_info->in = tmp;
+ BITMAP_FREE (out);
+ bb_info->out = tmp;
}
else
- BITMAP_FREE (tmp);
+ BITMAP_FREE (tmp);
return changed;
}
}
/* Free all storage associated with the problem. */
static void
-df_ru_free (void)
+df_rd_free (void)
{
- unsigned int i;
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
+ struct df_rd_problem_data *problem_data
+ = (struct df_rd_problem_data *) df_rd->problem_data;
if (problem_data)
{
- for (i = 0; i < df_ru->block_info_size; i++)
- {
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (i);
- if (bb_info)
- {
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->sparse_kill);
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- }
- }
-
- free_alloc_pool (df_ru->block_pool);
- BITMAP_FREE (problem_data->sparse_invalidated_by_call);
- BITMAP_FREE (problem_data->dense_invalidated_by_call);
- bitmap_obstack_release (&problem_data->ru_bitmaps);
+ free_alloc_pool (df_rd->block_pool);
+ bitmap_obstack_release (&problem_data->rd_bitmaps);
- df_ru->block_info_size = 0;
- free (df_ru->block_info);
- free (df_ru->problem_data);
+ df_rd->block_info_size = 0;
+ free (df_rd->block_info);
+ free (df_rd->problem_data);
}
- free (df_ru);
+ free (df_rd);
}
/* Debugging info. */
static void
-df_ru_start_dump (FILE *file)
+df_rd_start_dump (FILE *file)
{
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
+ struct df_rd_problem_data *problem_data
+ = (struct df_rd_problem_data *) df_rd->problem_data;
unsigned int m = DF_REG_SIZE(df);
unsigned int regno;
- if (!df_ru->block_info)
+ if (!df_rd->block_info)
return;
- fprintf (file, ";; Reaching uses:\n");
+ fprintf (file, ";; Reaching defs:\n\n");
- fprintf (file, ";; sparse invalidated \t");
+ fprintf (file, " sparse invalidated \t");
dump_bitmap (file, problem_data->sparse_invalidated_by_call);
- fprintf (file, " dense invalidated \t");
+ fprintf (file, " dense invalidated \t");
dump_bitmap (file, problem_data->dense_invalidated_by_call);
-
+
for (regno = 0; regno < m; regno++)
- if (DF_USES_COUNT (regno))
+ if (DF_DEFS_COUNT (regno))
fprintf (file, "%d[%d,%d] ", regno,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
fprintf (file, "\n");
+
}
/* Debugging info at top of bb. */
static void
-df_ru_top_dump (basic_block bb, FILE *file)
+df_rd_top_dump (basic_block bb, FILE *file)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info || !bb_info->in)
return;
- fprintf (file, ";; ru in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
+ fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
dump_bitmap (file, bb_info->in);
- fprintf (file, ";; ru gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
+ fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
dump_bitmap (file, bb_info->gen);
- fprintf (file, ";; ru kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
+ fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
dump_bitmap (file, bb_info->kill);
-}
+}
-/* Debugging info at bottom of bb. */
+/* Debugging info at top of bb. */
static void
-df_ru_bottom_dump (basic_block bb, FILE *file)
+df_rd_bottom_dump (basic_block bb, FILE *file)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info || !bb_info->out)
return;
- fprintf (file, ";; ru out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
+ fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
dump_bitmap (file, bb_info->out);
-}
-
+}
/* All of the information associated with every instance of the problem. */
-static struct df_problem problem_RU =
+static struct df_problem problem_RD =
{
- DF_RU, /* Problem id. */
- DF_BACKWARD, /* Direction. */
- df_ru_alloc, /* Allocate the problem specific data. */
+ DF_RD, /* Problem id. */
+ DF_FORWARD, /* Direction. */
+ df_rd_alloc, /* Allocate the problem specific data. */
NULL, /* Reset global information. */
- df_ru_free_bb_info, /* Free basic block info. */
- df_ru_local_compute, /* Local compute function. */
- df_ru_init_solution, /* Init the solution specific data. */
+ df_rd_free_bb_info, /* Free basic block info. */
+ df_rd_local_compute, /* Local compute function. */
+ df_rd_init_solution, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
- df_ru_confluence_n, /* Confluence operator n. */
- df_ru_transfer_function, /* Transfer function. */
+ df_rd_confluence_n, /* Confluence operator n. */
+ df_rd_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
- df_ru_free, /* Free all of the problem information. */
- df_ru_free, /* Remove this problem from the stack of dataflow problems. */
- df_ru_start_dump, /* Debugging. */
- df_ru_top_dump, /* Debugging start block. */
- df_ru_bottom_dump, /* Debugging end block. */
+ df_rd_free, /* Free all of the problem information. */
+ df_rd_free, /* Remove this problem from the stack of dataflow problems. */
+ df_rd_start_dump, /* Debugging. */
+ df_rd_top_dump, /* Debugging start block. */
+ df_rd_bottom_dump, /* Debugging end block. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_RU, /* Timing variable. */
+ TV_DF_RD, /* Timing variable. */
true /* Reset blocks on dropping out of blocks_to_analyze. */
};
solution. */
void
-df_ru_add_problem (void)
+df_rd_add_problem (void)
{
- df_add_problem (&problem_RU);
+ df_add_problem (&problem_RD);
}
+
\f
/*----------------------------------------------------------------------------
- REACHING DEFINITIONS
+ LIVE REGISTERS
- Find the locations in the function where each definition site for a
- pseudo reaches. In and out bitvectors are built for each basic
- block. The id field in the ref is used to index into these sets.
+ Find the locations in the function where any use of a pseudo can
+ reach in the backwards direction. In and out bitvectors are built
+ for each basic block. The regno is used to index into these sets.
See df.h for details.
----------------------------------------------------------------------------*/
-/* See the comment at the top of the Reaching Uses problem for how the
- uses are represented in the kill sets. The same games are played
- here for the defs. */
-
-/* Private data used to compute the solution for this problem. These
- data structures are not accessible outside of this module. */
-struct df_rd_problem_data
+/* Private data used to verify the solution for this problem. */
+struct df_lr_problem_data
{
- /* The set of defs to regs invalidated by call. */
- bitmap sparse_invalidated_by_call;
- /* The set of defs to regs invalidate by call for rd. */
- bitmap dense_invalidated_by_call;
- /* An obstack for the bitmaps we need for this problem. */
- bitmap_obstack rd_bitmaps;
+ bitmap *in;
+ bitmap *out;
};
+
/* Set basic block info. */
static void
-df_rd_set_bb_info (unsigned int index,
- struct df_rd_bb_info *bb_info)
+df_lr_set_bb_info (unsigned int index,
+ struct df_lr_bb_info *bb_info)
{
- gcc_assert (df_rd);
- gcc_assert (index < df_rd->block_info_size);
- df_rd->block_info[index] = bb_info;
+ gcc_assert (df_lr);
+ gcc_assert (index < df_lr->block_info_size);
+ df_lr->block_info[index] = bb_info;
}
-
+
/* Free basic block info. */
static void
-df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
- struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
+ struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
if (bb_info)
{
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->sparse_kill);
- BITMAP_FREE (bb_info->gen);
+ BITMAP_FREE (bb_info->use);
+ BITMAP_FREE (bb_info->def);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (df_rd->block_pool, bb_info);
+ pool_free (df_lr->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
not touched unless the block is new. */
static void
-df_rd_alloc (bitmap all_blocks)
+df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
- struct df_rd_problem_data *problem_data;
-
- if (!df_rd->block_pool)
- df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
- sizeof (struct df_rd_bb_info), 50);
-
- if (df_rd->problem_data)
- {
- problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
- bitmap_clear (problem_data->sparse_invalidated_by_call);
- bitmap_clear (problem_data->dense_invalidated_by_call);
- }
- else
- {
- problem_data = XNEW (struct df_rd_problem_data);
- df_rd->problem_data = problem_data;
-
- bitmap_obstack_initialize (&problem_data->rd_bitmaps);
- problem_data->sparse_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- problem_data->dense_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- }
- df_grow_bb_info (df_rd);
+ if (!df_lr->block_pool)
+ df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
+ sizeof (struct df_lr_bb_info), 50);
- /* Because of the clustering of all use sites for the same pseudo,
- we have to process all of the blocks before doing the
- analysis. */
+ df_grow_bb_info (df_lr);
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
if (bb_info)
{
- bitmap_clear (bb_info->kill);
- bitmap_clear (bb_info->sparse_kill);
- bitmap_clear (bb_info->gen);
+ bitmap_clear (bb_info->def);
+ bitmap_clear (bb_info->use);
}
else
{
- bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool);
- df_rd_set_bb_info (bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bb_info->gen = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bb_info->in = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bb_info->out = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool);
+ df_lr_set_bb_info (bb_index, bb_info);
+ bb_info->use = BITMAP_ALLOC (NULL);
+ bb_info->def = BITMAP_ALLOC (NULL);
+ bb_info->in = BITMAP_ALLOC (NULL);
+ bb_info->out = BITMAP_ALLOC (NULL);
}
}
- df_rd->optional_p = true;
+
+ df_lr->optional_p = false;
}
-/* Process a list of DEFs for df_rd_bb_local_compute. */
+/* Reset the global solution for recalculation. */
-static void
-df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
- struct df_ref **def_rec,
- enum df_ref_flags top_flag)
+static void
+df_lr_reset (bitmap all_blocks)
{
- while (*def_rec)
+ unsigned int bb_index;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ref *def = *def_rec;
- if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
- {
- unsigned int regno = DF_REF_REGNO (def);
- unsigned int begin = DF_DEFS_BEGIN (regno);
- unsigned int n_defs = DF_DEFS_COUNT (regno);
-
- if ((!(df->changeable_flags & DF_NO_HARD_REGS))
- || (regno >= FIRST_PSEUDO_REGISTER))
- {
- /* Only the last def(s) for a regno in the block has any
- effect. */
- if (!bitmap_bit_p (seen_in_block, regno))
- {
- /* The first def for regno in insn gets to knock out the
- defs from other instructions. */
- if ((!bitmap_bit_p (seen_in_insn, regno))
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- && (!(DF_REF_FLAGS (def) &
- (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
- {
- if (n_defs > DF_SPARSE_THRESHOLD)
- {
- bitmap_set_bit (bb_info->sparse_kill, regno);
- bitmap_clear_range(bb_info->gen, begin, n_defs);
- }
- else
- {
- bitmap_set_range (bb_info->kill, begin, n_defs);
- bitmap_clear_range (bb_info->gen, begin, n_defs);
- }
- }
-
- bitmap_set_bit (seen_in_insn, regno);
- /* All defs for regno in the instruction may be put into
- the gen set. */
- if (!(DF_REF_FLAGS (def)
- & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
- }
- }
- }
- def_rec++;
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ gcc_assert (bb_info);
+ bitmap_clear (bb_info->in);
+ bitmap_clear (bb_info->out);
}
}
-/* Compute local reaching def info for basic block BB. */
+
+/* Compute local live register info for basic block BB. */
static void
-df_rd_bb_local_compute (unsigned int bb_index)
+df_lr_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx insn;
+ df_ref *def_rec;
+ df_ref *use_rec;
- bitmap_clear (seen_in_block);
- bitmap_clear (seen_in_insn);
+ /* Process the registers set in an exception handler. */
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
+ }
- /* Artificials are only hard regs. */
- if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_rd_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index),
- 0);
+ /* Process the hardware registers that are always live. */
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ }
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
- continue;
+ continue;
- df_rd_bb_local_compute_process_def (bb_info,
- DF_INSN_UID_DEFS (uid), 0);
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
+ }
- /* This complex dance with the two bitmaps is required because
- instructions can assign twice to the same pseudo. This
- generally happens with calls that will have one def for the
- result and another def for the clobber. If only one vector
- is used and the clobber goes first, the result will be
- lost. */
- bitmap_ior_into (seen_in_block, seen_in_insn);
- bitmap_clear (seen_in_insn);
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ }
}
- /* Process the artificial defs at the top of the block last since we
- are going backwards through the block and these are logically at
- the start. */
- if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_rd_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index),
- DF_REF_AT_TOP);
+ /* Process the registers set in an exception handler or the hard
+ frame pointer if this block is the target of a non local
+ goto. */
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
+ }
+
+#ifdef EH_USES
+ /* Process the uses that are live into an exception handler. */
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ }
+#endif
+
+ /* If the df_live problem is not defined, such as at -O0 and -O1, we
+ still need to keep the luids up to date. This is normally done
+ in the df_live problem since this problem has a forwards
+ scan. */
+ if (!df_live)
+ df_recompute_luids (bb);
}
-/* Compute local reaching def info for each basic block within BLOCKS. */
+/* Compute local live register info for each basic block within BLOCKS. */
static void
-df_rd_local_compute (bitmap all_blocks)
+df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
- unsigned int regno;
- struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) df_rd->problem_data;
- bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
- bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
-
- df_set_seen ();
-
- df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+
+ bitmap_clear (df->hardware_regs_used);
+
+ /* The all-important stack pointer must always be live. */
+ bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
+
+ /* Before reload, there are a few registers that must be forced
+ live everywhere -- which might not already be the case for
+ blocks within infinite loops. */
+ if (!reload_completed)
{
- df_rd_bb_local_compute (bb_index);
+ /* Any reference to any pseudo before reload is a potential
+ reference of the frame pointer. */
+ bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
+
+#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ /* Pseudos with argument area equivalences may require
+ reloading via the argument pointer. */
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
+#endif
+
+ /* Any constant, or pseudo with constant equivalences, may
+ require reloading from memory using the pic register. */
+ if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
}
- /* Set up the knockout bit vectors to be applied across EH_EDGES. */
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
- if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
- bitmap_set_bit (sparse_invalidated, regno);
+ if (bb_index == EXIT_BLOCK)
+ {
+ /* The exit block is special for this problem and its bits are
+ computed from thin air. */
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
+ bitmap_copy (bb_info->use, df->exit_block_uses);
+ }
else
- bitmap_set_range (dense_invalidated,
- DF_DEFS_BEGIN (regno),
- DF_DEFS_COUNT (regno));
+ df_lr_bb_local_compute (bb_index);
}
- df_unset_seen ();
+
+ bitmap_clear (df_lr->out_of_date_transfer_functions);
}
-/* Initialize the solution bit vectors for problem. */
+/* Initialize the solution vectors. */
static void
-df_rd_init_solution (bitmap all_blocks)
+df_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
-
- bitmap_copy (bb_info->out, bb_info->gen);
- bitmap_clear (bb_info->in);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ bitmap_copy (bb_info->in, bb_info->use);
+ bitmap_clear (bb_info->out);
}
}
-/* In of target gets or of out of source. */
+/* Confluence function that processes infinite loops. This might be a
+ noreturn function that throws. And even if it isn't, getting the
+ unwind info right helps debugging. */
static void
-df_rd_confluence_n (edge e)
+df_lr_confluence_0 (basic_block bb)
{
- bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
- bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
+ bitmap op1 = df_lr_get_bb_info (bb->index)->out;
+ if (bb != EXIT_BLOCK_PTR)
+ bitmap_copy (op1, df->hardware_regs_used);
+}
- if (e->flags & EDGE_EH)
- {
- struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) df_rd->problem_data;
- bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
- bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
- bitmap_iterator bi;
- unsigned int regno;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
- bitmap_copy (tmp, op2);
- bitmap_and_compl_into (tmp, dense_invalidated);
+/* Confluence function that ignores fake edges. */
- EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
- {
- bitmap_clear_range (tmp,
- DF_DEFS_BEGIN (regno),
- DF_DEFS_COUNT (regno));
- }
- bitmap_ior_into (op1, tmp);
- BITMAP_FREE (tmp);
- }
+static void
+df_lr_confluence_n (edge e)
+{
+ bitmap op1 = df_lr_get_bb_info (e->src->index)->out;
+ bitmap op2 = df_lr_get_bb_info (e->dest->index)->in;
+
+ /* Call-clobbered registers die across exception and call edges. */
+ /* ??? Abnormal call edges ignored for the moment, as this gets
+ confused by sibling call edges, which crashes reg-stack. */
+ if (e->flags & EDGE_EH)
+ bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
else
bitmap_ior_into (op1, op2);
-}
+
+ bitmap_ior_into (op1, df->hardware_regs_used);
+}
/* Transfer function. */
static bool
-df_rd_transfer_function (int bb_index)
+df_lr_transfer_function (int bb_index)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
- unsigned int regno;
- bitmap_iterator bi;
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
- bitmap gen = bb_info->gen;
- bitmap kill = bb_info->kill;
- bitmap sparse_kill = bb_info->sparse_kill;
+ bitmap use = bb_info->use;
+ bitmap def = bb_info->def;
- if (bitmap_empty_p (sparse_kill))
- return bitmap_ior_and_compl (out, gen, in, kill);
- else
- {
- struct df_rd_problem_data *problem_data;
- bool changed = false;
- bitmap tmp;
+ return bitmap_ior_and_compl (in, use, out, def);
+}
- /* Note that TMP is _not_ a temporary bitmap if we end up replacing
- OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
- problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
- tmp = BITMAP_ALLOC (&problem_data->rd_bitmaps);
- bitmap_copy (tmp, in);
- EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
- {
- bitmap_clear_range (tmp,
- DF_DEFS_BEGIN (regno),
- DF_DEFS_COUNT (regno));
- }
- bitmap_and_compl_into (tmp, kill);
- bitmap_ior_into (tmp, gen);
- changed = !bitmap_equal_p (tmp, out);
- if (changed)
+/* Run the fast dce as a side effect of building LR. */
+
+static void
+df_lr_finalize (bitmap all_blocks)
+{
+ df_lr->solutions_dirty = false;
+ if (df->changeable_flags & DF_LR_RUN_DCE)
+ {
+ run_fast_df_dce ();
+
+ /* If dce deletes some instructions, we need to recompute the lr
+ solution before proceeding further. The problem is that fast
+ dce is a pessimestic dataflow algorithm. In the case where
+ it deletes a statement S inside of a loop, the uses inside of
+ S may not be deleted from the dataflow solution because they
+ were carried around the loop. While it is conservatively
+ correct to leave these extra bits, the standards of df
+ require that we maintain the best possible (least fixed
+ point) solution. The only way to do that is to redo the
+ iteration from the beginning. See PR35805 for an
+ example. */
+ if (df_lr->solutions_dirty)
{
- BITMAP_FREE (out);
- bb_info->out = tmp;
+ df_clear_flags (DF_LR_RUN_DCE);
+ df_lr_alloc (all_blocks);
+ df_lr_local_compute (all_blocks);
+ df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
+ df_lr_finalize (all_blocks);
+ df_set_flags (DF_LR_RUN_DCE);
}
- else
- BITMAP_FREE (tmp);
- return changed;
}
}
/* Free all storage associated with the problem. */
static void
-df_rd_free (void)
+df_lr_free (void)
{
- unsigned int i;
- struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) df_rd->problem_data;
-
- if (problem_data)
+ if (df_lr->block_info)
{
- for (i = 0; i < df_rd->block_info_size; i++)
+ unsigned int i;
+ for (i = 0; i < df_lr->block_info_size; i++)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (i);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (i);
if (bb_info)
{
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->sparse_kill);
- BITMAP_FREE (bb_info->gen);
+ BITMAP_FREE (bb_info->use);
+ BITMAP_FREE (bb_info->def);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
}
}
+ free_alloc_pool (df_lr->block_pool);
- free_alloc_pool (df_rd->block_pool);
- BITMAP_FREE (problem_data->sparse_invalidated_by_call);
- BITMAP_FREE (problem_data->dense_invalidated_by_call);
- bitmap_obstack_release (&problem_data->rd_bitmaps);
-
- df_rd->block_info_size = 0;
- free (df_rd->block_info);
- free (df_rd->problem_data);
+ df_lr->block_info_size = 0;
+ free (df_lr->block_info);
}
- free (df_rd);
-}
-
-
-/* Debugging info. */
-
-static void
-df_rd_start_dump (FILE *file)
-{
- struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) df_rd->problem_data;
- unsigned int m = DF_REG_SIZE(df);
- unsigned int regno;
-
- if (!df_rd->block_info)
- return;
-
- fprintf (file, ";; Reaching defs:\n\n");
-
- fprintf (file, " sparse invalidated \t");
- dump_bitmap (file, problem_data->sparse_invalidated_by_call);
- fprintf (file, " dense invalidated \t");
- dump_bitmap (file, problem_data->dense_invalidated_by_call);
-
- for (regno = 0; regno < m; regno++)
- if (DF_DEFS_COUNT (regno))
- fprintf (file, "%d[%d,%d] ", regno,
- DF_DEFS_BEGIN (regno),
- DF_DEFS_COUNT (regno));
- fprintf (file, "\n");
+ BITMAP_FREE (df_lr->out_of_date_transfer_functions);
+ free (df_lr);
}
/* Debugging info at top of bb. */
static void
-df_rd_top_dump (basic_block bb, FILE *file)
+df_lr_top_dump (basic_block bb, FILE *file)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
+ struct df_lr_problem_data *problem_data;
if (!bb_info || !bb_info->in)
return;
-
- fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
- dump_bitmap (file, bb_info->in);
- fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
- dump_bitmap (file, bb_info->gen);
- fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
- dump_bitmap (file, bb_info->kill);
-}
+
+ fprintf (file, ";; lr in \t");
+ df_print_regset (file, bb_info->in);
+ if (df_lr->problem_data)
+ {
+ problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
+ fprintf (file, ";; old in \t");
+ df_print_regset (file, problem_data->in[bb->index]);
+ }
+ fprintf (file, ";; lr use \t");
+ df_print_regset (file, bb_info->use);
+ fprintf (file, ";; lr def \t");
+ df_print_regset (file, bb_info->def);
+}
-/* Debugging info at top of bb. */
+/* Debugging info at bottom of bb. */
static void
-df_rd_bottom_dump (basic_block bb, FILE *file)
+df_lr_bottom_dump (basic_block bb, FILE *file)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
- if (!bb_info || !bb_info->out)
- return;
-
- fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
- dump_bitmap (file, bb_info->out);
-}
-
-/* All of the information associated with every instance of the problem. */
-
-static struct df_problem problem_RD =
-{
- DF_RD, /* Problem id. */
- DF_FORWARD, /* Direction. */
- df_rd_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
- df_rd_free_bb_info, /* Free basic block info. */
- df_rd_local_compute, /* Local compute function. */
- df_rd_init_solution, /* Init the solution specific data. */
- df_worklist_dataflow, /* Worklist solver. */
- NULL, /* Confluence operator 0. */
- df_rd_confluence_n, /* Confluence operator n. */
- df_rd_transfer_function, /* Transfer function. */
- NULL, /* Finalize function. */
- df_rd_free, /* Free all of the problem information. */
- df_rd_free, /* Remove this problem from the stack of dataflow problems. */
- df_rd_start_dump, /* Debugging. */
- df_rd_top_dump, /* Debugging start block. */
- df_rd_bottom_dump, /* Debugging end block. */
- NULL, /* Incremental solution verify start. */
- NULL, /* Incremental solution verify end. */
- NULL, /* Dependent problem. */
- TV_DF_RD, /* Timing variable. */
- true /* Reset blocks on dropping out of blocks_to_analyze. */
-};
-
-
-
-/* Create a new DATAFLOW instance and add it to an existing instance
- of DF. The returned structure is what is used to get at the
- solution. */
-
-void
-df_rd_add_problem (void)
-{
- df_add_problem (&problem_RD);
-}
-
-
-\f
-/*----------------------------------------------------------------------------
- LIVE REGISTERS
-
- Find the locations in the function where any use of a pseudo can
- reach in the backwards direction. In and out bitvectors are built
- for each basic block. The regnum is used to index into these sets.
- See df.h for details.
- ----------------------------------------------------------------------------*/
-
-/* Private data used to verify the solution for this problem. */
-struct df_lr_problem_data
-{
- bitmap *in;
- bitmap *out;
-};
-
-
-/* Set basic block info. */
-
-static void
-df_lr_set_bb_info (unsigned int index,
- struct df_lr_bb_info *bb_info)
-{
- gcc_assert (df_lr);
- gcc_assert (index < df_lr->block_info_size);
- df_lr->block_info[index] = bb_info;
-}
-
-
-/* Free basic block info. */
-
-static void
-df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
- void *vbb_info)
-{
- struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
- if (bb_info)
- {
- BITMAP_FREE (bb_info->use);
- BITMAP_FREE (bb_info->def);
- if (bb_info->in == bb_info->top)
- bb_info->top = NULL;
- else
- {
- BITMAP_FREE (bb_info->top);
- BITMAP_FREE (bb_info->ause);
- BITMAP_FREE (bb_info->adef);
- }
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- pool_free (df_lr->block_pool, bb_info);
- }
-}
-
-
-/* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
- not touched unless the block is new. */
-
-static void
-df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- if (!df_lr->block_pool)
- df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
- sizeof (struct df_lr_bb_info), 50);
-
- df_grow_bb_info (df_lr);
-
- EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
- {
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- if (bb_info)
- {
- bitmap_clear (bb_info->def);
- bitmap_clear (bb_info->use);
- if (bb_info->adef)
- {
- bitmap_clear (bb_info->adef);
- bitmap_clear (bb_info->ause);
- }
- }
- else
- {
- bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool);
- df_lr_set_bb_info (bb_index, bb_info);
- bb_info->use = BITMAP_ALLOC (NULL);
- bb_info->def = BITMAP_ALLOC (NULL);
- bb_info->in = BITMAP_ALLOC (NULL);
- bb_info->out = BITMAP_ALLOC (NULL);
- bb_info->top = bb_info->in;
- bb_info->adef = NULL;
- bb_info->ause = NULL;
- }
- }
-
- df_lr->optional_p = false;
-}
-
-
-/* Reset the global solution for recalculation. */
-
-static void
-df_lr_reset (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- gcc_assert (bb_info);
- bitmap_clear (bb_info->in);
- bitmap_clear (bb_info->out);
- bitmap_clear (bb_info->top);
- }
-}
-
-
-/* Compute local live register info for basic block BB. */
-
-static void
-df_lr_bb_local_compute (unsigned int bb_index)
-{
- basic_block bb = BASIC_BLOCK (bb_index);
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
-
- /* Process the registers set in an exception handler. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
-
- /* Process the hardware registers that are always live. */
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- struct df_ref *use = *use_rec;
- /* Add use to set of uses in this BB. */
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
- }
-
- FOR_BB_INSNS_REVERSE (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
-
- if (!INSN_P (insn))
- continue;
-
- if (CALL_P (insn))
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
- }
- else
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
- }
-
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- {
- struct df_ref *use = *use_rec;
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
- }
- }
- /* Process the registers set in an exception handler. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- if (bb_info->adef == NULL)
- {
- gcc_assert (bb_info->ause == NULL);
- gcc_assert (bb_info->top == bb_info->in);
- bb_info->adef = BITMAP_ALLOC (NULL);
- bb_info->ause = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- }
- bitmap_set_bit (bb_info->adef, dregno);
- }
- }
-
-#ifdef EH_USES
- /* Process the uses that are live into an exception handler. */
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- struct df_ref *use = *use_rec;
- /* Add use to set of uses in this BB. */
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- {
- if (bb_info->adef == NULL)
- {
- gcc_assert (bb_info->ause == NULL);
- gcc_assert (bb_info->top == bb_info->in);
- bb_info->adef = BITMAP_ALLOC (NULL);
- bb_info->ause = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- }
- bitmap_set_bit (bb_info->ause, DF_REF_REGNO (use));
- }
- }
-#endif
-
- /* If the df_live problem is not defined, such as at -O0 and -O1, we
- still need to keep the luids up to date. This is normally done
- in the df_live problem since this problem has a forwards
- scan. */
- if (!df_live)
- df_recompute_luids (bb);
-}
-
-
-/* Compute local live register info for each basic block within BLOCKS. */
-
-static void
-df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- bitmap_clear (df->hardware_regs_used);
-
- /* The all-important stack pointer must always be live. */
- bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
-
- /* Before reload, there are a few registers that must be forced
- live everywhere -- which might not already be the case for
- blocks within infinite loops. */
- if (!reload_completed)
- {
- /* Any reference to any pseudo before reload is a potential
- reference of the frame pointer. */
- bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
-
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
- /* Pseudos with argument area equivalences may require
- reloading via the argument pointer. */
- if (fixed_regs[ARG_POINTER_REGNUM])
- bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
-#endif
-
- /* Any constant, or pseudo with constant equivalences, may
- require reloading from memory using the pic register. */
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
- && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
- }
-
- EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
- {
- if (bb_index == EXIT_BLOCK)
- {
- /* The exit block is special for this problem and its bits are
- computed from thin air. */
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
- bitmap_copy (bb_info->use, df->exit_block_uses);
- }
- else
- df_lr_bb_local_compute (bb_index);
- }
-
- bitmap_clear (df_lr->out_of_date_transfer_functions);
-}
-
-
-/* Initialize the solution vectors. */
-
-static void
-df_lr_init (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- bitmap_copy (bb_info->in, bb_info->use);
- bitmap_clear (bb_info->out);
- }
-}
-
-
-/* Confluence function that processes infinite loops. This might be a
- noreturn function that throws. And even if it isn't, getting the
- unwind info right helps debugging. */
-static void
-df_lr_confluence_0 (basic_block bb)
-{
- bitmap op1 = df_lr_get_bb_info (bb->index)->out;
- if (bb != EXIT_BLOCK_PTR)
- bitmap_copy (op1, df->hardware_regs_used);
-}
-
-
-/* Confluence function that ignores fake edges. */
-
-static void
-df_lr_confluence_n (edge e)
-{
- bitmap op1 = df_lr_get_bb_info (e->src->index)->out;
- bitmap op2 = df_lr_get_bb_info (e->dest->index)->in;
-
- /* Call-clobbered registers die across exception and call edges. */
- /* ??? Abnormal call edges ignored for the moment, as this gets
- confused by sibling call edges, which crashes reg-stack. */
- if (e->flags & EDGE_EH)
- bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
- else
- bitmap_ior_into (op1, op2);
-
- bitmap_ior_into (op1, df->hardware_regs_used);
-}
-
-
-/* Transfer function. */
-
-static bool
-df_lr_transfer_function (int bb_index)
-{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
- bitmap in = bb_info->in;
- bitmap out = bb_info->out;
- bitmap use = bb_info->use;
- bitmap def = bb_info->def;
- bitmap top = bb_info->top;
- bitmap ause = bb_info->ause;
- bitmap adef = bb_info->adef;
- bool changed;
-
- changed = bitmap_ior_and_compl (top, use, out, def);
- if (in != top)
- {
- gcc_assert (ause && adef);
- changed |= bitmap_ior_and_compl (in, ause, top, adef);
- }
-
- return changed;
-}
-
-
-/* Run the fast dce as a side effect of building LR. */
-
-static void
-df_lr_local_finalize (bitmap all_blocks ATTRIBUTE_UNUSED)
-{
- if (df->changeable_flags & DF_LR_RUN_DCE)
- {
- run_fast_df_dce ();
- if (df_lr->problem_data && df_lr->solutions_dirty)
- {
- /* If we are here, then it is because we are both verifying
- the solution and the dce changed the function. In that case
- the verification info built will be wrong. So we leave the
- dirty flag true so that the verifier will skip the checking
- part and just clean up.*/
- df_lr->solutions_dirty = true;
- }
- else
- df_lr->solutions_dirty = false;
- }
- else
- df_lr->solutions_dirty = false;
-}
-
-
-/* Free all storage associated with the problem. */
-
-static void
-df_lr_free (void)
-{
- if (df_lr->block_info)
- {
- unsigned int i;
- for (i = 0; i < df_lr->block_info_size; i++)
- {
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (i);
- if (bb_info)
- {
- BITMAP_FREE (bb_info->use);
- BITMAP_FREE (bb_info->def);
- if (bb_info->in == bb_info->top)
- bb_info->top = NULL;
- else
- {
- BITMAP_FREE (bb_info->top);
- BITMAP_FREE (bb_info->ause);
- BITMAP_FREE (bb_info->adef);
- }
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- }
- }
- free_alloc_pool (df_lr->block_pool);
-
- df_lr->block_info_size = 0;
- free (df_lr->block_info);
- }
-
- BITMAP_FREE (df_lr->out_of_date_transfer_functions);
- free (df_lr);
-}
-
-
-/* Debugging info at top of bb. */
-
-static void
-df_lr_top_dump (basic_block bb, FILE *file)
-{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
- struct df_lr_problem_data *problem_data;
- if (!bb_info || !bb_info->in)
- return;
-
- fprintf (file, ";; lr in \t");
- df_print_regset (file, bb_info->in);
- if (df_lr->problem_data)
- {
- problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
- fprintf (file, ";; old in \t");
- df_print_regset (file, problem_data->in[bb->index]);
- }
- fprintf (file, ";; lr use \t");
- df_print_regset (file, bb_info->use);
- fprintf (file, ";; lr def \t");
- df_print_regset (file, bb_info->def);
-}
-
-
-/* Debugging info at bottom of bb. */
-
-static void
-df_lr_bottom_dump (basic_block bb, FILE *file)
-{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
- struct df_lr_problem_data *problem_data;
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
+ struct df_lr_problem_data *problem_data;
if (!bb_info || !bb_info->out)
return;
if (df_lr->solutions_dirty)
/* Do not check if the solution is still dirty. See the comment
- in df_lr_local_finalize for details. */
+ in df_lr_finalize for details. */
df_lr->solutions_dirty = false;
else
FOR_ALL_BB (bb)
df_lr_confluence_0, /* Confluence operator 0. */
df_lr_confluence_n, /* Confluence operator n. */
df_lr_transfer_function, /* Transfer function. */
- df_lr_local_finalize, /* Finalize function. */
+ df_lr_finalize, /* Finalize function. */
df_lr_free, /* Free all of the problem information. */
NULL, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
bitmap saved_adef;
bitmap saved_ause;
bitmap all_blocks;
- bool need_as;
if (!df)
return;
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
- if (bb_info->adef)
- {
- need_as = true;
- bitmap_copy (saved_adef, bb_info->adef);
- bitmap_copy (saved_ause, bb_info->ause);
- bitmap_clear (bb_info->adef);
- bitmap_clear (bb_info->ause);
- }
- else
- need_as = false;
-
df_lr_bb_local_compute (bb->index);
gcc_assert (bitmap_equal_p (saved_def, bb_info->def));
gcc_assert (bitmap_equal_p (saved_use, bb_info->use));
-
- if (need_as)
- {
- gcc_assert (bb_info->adef);
- gcc_assert (bb_info->ause);
- gcc_assert (bitmap_equal_p (saved_adef, bb_info->adef));
- gcc_assert (bitmap_equal_p (saved_ause, bb_info->ause));
- }
- else
- {
- gcc_assert (!bb_info->adef);
- gcc_assert (!bb_info->ause);
- }
}
}
else
\f
/*----------------------------------------------------------------------------
- COMBINED LIVE REGISTERS AND UNINITIALIZED REGISTERS.
-
- First find the set of uses for registers that are reachable from
- the entry block without passing thru a definition. In and out
- bitvectors are built for each basic block. The regnum is used to
- index into these sets. See df.h for details.
-
- Then the in and out sets here are the anded results of the in and
- out sets from the lr and ur
- problems.
+ LIVE AND MUST-INITIALIZED REGISTERS.
+
+ This problem first computes the IN and OUT bitvectors for the
+ must-initialized registers problems, which is a forward problem.
+ It gives the set of registers for which we MUST have an available
+ definition on any path from the entry block to the entry/exit of
+ a basic block. Sets generate a definition, while clobbers kill
+ a definition.
+
+ In and out bitvectors are built for each basic block and are indexed by
+ regnum (see df.h for details). In and out bitvectors in struct
+ df_live_bb_info actually refers to the must-initialized problem;
+
+ Then, the in and out sets for the LIVE problem itself are computed.
+ These are the logical AND of the IN and OUT sets from the LR problem
+ and the must-initialized problem.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
bitmap *out;
};
+/* Scratch var used by transfer functions. This is used to implement
+ an optimization to reduce the amount of space used to compute the
+ combined lr and live analysis. */
+static bitmap df_live_scratch;
/* Set basic block info. */
if (!df_live->block_pool)
df_live->block_pool = create_alloc_pool ("df_live_block pool",
sizeof (struct df_live_bb_info), 100);
+ if (!df_live_scratch)
+ df_live_scratch = BITMAP_ALLOC (NULL);
df_grow_bb_info (df_live);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (bb_info->in);
bitmap_clear (bb_info->out);
basic_block bb = BASIC_BLOCK (bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
int luid = 0;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
- }
-
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
if (!INSN_P (insn))
continue;
luid++;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
+ df_ref def = *def_rec;
+ bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
}
}
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
- bitmap_copy (bb_info->out, bb_info->gen);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and (bb_info->out, bb_info->gen, bb_lr_info->out);
bitmap_clear (bb_info->in);
}
}
-/* Confluence function that ignores fake edges. */
+/* Forward confluence function that ignores fake edges. */
static void
df_live_confluence_n (edge e)
}
-/* Transfer function. */
+/* Transfer function for the forwards must-initialized problem. */
static bool
df_live_transfer_function (int bb_index)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- return bitmap_ior_and_compl (out, gen, in, kill);
+ /* We need to use a scratch set here so that the value returned from
+ this function invocation properly reflects if the sets changed in
+ a significant way; i.e. not just because the lr set was anded
+ in. */
+ bitmap_and (df_live_scratch, gen, bb_lr_info->out);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and_into (in, bb_lr_info->in);
+
+ return bitmap_ior_and_compl (out, df_live_scratch, in, kill);
}
-/* And the LR and UR info to produce the LIVE info. */
+/* And the LR info with the must-initialized registers, to produce the LIVE info. */
static void
-df_live_local_finalize (bitmap all_blocks)
+df_live_finalize (bitmap all_blocks)
{
if (df_live->solutions_dirty)
{
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
-
+
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and_into (bb_live_info->in, bb_lr_info->in);
free_alloc_pool (df_live->block_pool);
df_live->block_info_size = 0;
free (df_live->block_info);
+
+ if (df_live_scratch)
+ BITMAP_FREE (df_live_scratch);
}
BITMAP_FREE (df_live->out_of_date_transfer_functions);
free (df_live);
NULL, /* Confluence operator 0. */
df_live_confluence_n, /* Confluence operator n. */
df_live_transfer_function, /* Transfer function. */
- df_live_local_finalize, /* Finalize function. */
+ df_live_finalize, /* Finalize function. */
df_live_free, /* Free all of the problem information. */
df_live_free, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
BITMAP_FREE (saved_kill);
BITMAP_FREE (all_blocks);
}
-
-
\f
/*----------------------------------------------------------------------------
- UNINITIALIZED REGISTERS WITH EARLYCLOBBER
-
- Find the set of uses for registers that are reachable from the entry
- block without passing thru a definition. In and out bitvectors are built
- for each basic block. The regnum is used to index into these sets.
- See df.h for details.
-
- This is a variant of the UR problem above that has a lot of special
- features just for the register allocation phase. This problem
- should go away if someone would fix the interference graph.
+ CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
- ----------------------------------------------------------------------------*/
+ Link either the defs to the uses and / or the uses to the defs.
-/* Private data used to compute the solution for this problem. These
- data structures are not accessible outside of this module. */
-struct df_urec_problem_data
-{
- bool earlyclobbers_found; /* True if any instruction contains an
- earlyclobber. */
-#ifdef STACK_REGS
- bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
-#endif
-};
+ These problems are set up like the other dataflow problems so that
+ they nicely fit into the framework. They are much simpler and only
+ involve a single traversal of instructions and an examination of
+ the reaching defs information (the dependent problem).
+----------------------------------------------------------------------------*/
+#define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
-/* Set basic block info. */
+/* Create a du or ud chain from SRC to DST and link it into SRC. */
-static void
-df_urec_set_bb_info (unsigned int index,
- struct df_urec_bb_info *bb_info)
+struct df_link *
+df_chain_create (df_ref src, df_ref dst)
{
- gcc_assert (df_urec);
- gcc_assert (index < df_urec->block_info_size);
- df_urec->block_info[index] = bb_info;
+ struct df_link *head = DF_REF_CHAIN (src);
+ struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
+
+ DF_REF_CHAIN (src) = link;
+ link->next = head;
+ link->ref = dst;
+ return link;
}
-/* Free basic block info. */
-
+/* Delete any du or ud chains that start at REF and point to
+ TARGET. */
static void
-df_urec_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
- void *vbb_info)
+df_chain_unlink_1 (df_ref ref, df_ref target)
{
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
- if (bb_info)
+ struct df_link *chain = DF_REF_CHAIN (ref);
+ struct df_link *prev = NULL;
+
+ while (chain)
{
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- BITMAP_FREE (bb_info->earlyclobber);
- pool_free (df_urec->block_pool, bb_info);
+ if (chain->ref == target)
+ {
+ if (prev)
+ prev->next = chain->next;
+ else
+ DF_REF_CHAIN (ref) = chain->next;
+ pool_free (df_chain->block_pool, chain);
+ return;
+ }
+ prev = chain;
+ chain = chain->next;
}
}
-/* Allocate or reset bitmaps for DF_UREC blocks. The solution bits are
- not touched unless the block is new. */
-
-static void
-df_urec_alloc (bitmap all_blocks)
+/* Delete a du or ud chain that leave or point to REF. */
+void
+df_chain_unlink (df_ref ref)
{
- unsigned int bb_index;
- bitmap_iterator bi;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
-
- if (!df_urec->block_pool)
- df_urec->block_pool = create_alloc_pool ("df_urec_block pool",
- sizeof (struct df_urec_bb_info), 50);
-
- if (!df_urec->problem_data)
+ struct df_link *chain = DF_REF_CHAIN (ref);
+ while (chain)
{
- problem_data = XNEW (struct df_urec_problem_data);
- df_urec->problem_data = problem_data;
+ struct df_link *next = chain->next;
+ /* Delete the other side if it exists. */
+ df_chain_unlink_1 (chain->ref, ref);
+ pool_free (df_chain->block_pool, chain);
+ chain = next;
}
- problem_data->earlyclobbers_found = false;
+ DF_REF_CHAIN (ref) = NULL;
+}
- df_grow_bb_info (df_urec);
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+/* Copy the du or ud chain starting at FROM_REF and attach it to
+ TO_REF. */
+
+void
+df_chain_copy (df_ref to_ref,
+ struct df_link *from_ref)
+{
+ while (from_ref)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- if (bb_info)
- {
- bitmap_clear (bb_info->kill);
- bitmap_clear (bb_info->gen);
- bitmap_clear (bb_info->earlyclobber);
- }
- else
- {
- bb_info = (struct df_urec_bb_info *) pool_alloc (df_urec->block_pool);
- df_urec_set_bb_info (bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (NULL);
- bb_info->gen = BITMAP_ALLOC (NULL);
- bb_info->in = BITMAP_ALLOC (NULL);
- bb_info->out = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- bb_info->earlyclobber = BITMAP_ALLOC (NULL);
- }
+ df_chain_create (to_ref, from_ref->ref);
+ from_ref = from_ref->next;
}
- df_urec->optional_p = true;
}
-/* The function modifies local info for register REG being changed in
- SETTER. DATA is used to pass the current basic block info. */
+/* Remove this problem from the stack of dataflow problems. */
static void
-df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
+df_chain_remove_problem (void)
{
- int regno;
- int endregno;
- int i;
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
+ bitmap_iterator bi;
+ unsigned int bb_index;
- if (GET_CODE (reg) == SUBREG)
- reg = SUBREG_REG (reg);
+ /* Wholesale destruction of the old chains. */
+ if (df_chain->block_pool)
+ free_alloc_pool (df_chain->block_pool);
- if (!REG_P (reg))
- return;
-
- regno = REGNO (reg);
- if (regno < FIRST_PSEUDO_REGISTER)
+ EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
{
- endregno = END_HARD_REGNO (reg);
- for (i = regno; i < endregno; i++)
+ rtx insn;
+ df_ref *def_rec;
+ df_ref *use_rec;
+ basic_block bb = BASIC_BLOCK (bb_index);
+
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
+ DF_REF_CHAIN (*def_rec) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+
+ FOR_BB_INSNS (bb, insn)
{
- bitmap_set_bit (bb_info->kill, i);
+ unsigned int uid = INSN_UID (insn);
- if (GET_CODE (setter) != CLOBBER)
- bitmap_set_bit (bb_info->gen, i);
- else
- bitmap_clear_bit (bb_info->gen, i);
+ if (INSN_P (insn))
+ {
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ DF_REF_CHAIN (*def_rec) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ {
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+ for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+ }
+ }
}
}
- else
- {
- bitmap_set_bit (bb_info->kill, regno);
-
- if (GET_CODE (setter) != CLOBBER)
- bitmap_set_bit (bb_info->gen, regno);
- else
- bitmap_clear_bit (bb_info->gen, regno);
- }
+
+ bitmap_clear (df_chain->out_of_date_transfer_functions);
+ df_chain->block_pool = NULL;
}
-/* Classes of registers which could be early clobbered in the current
- insn. */
-static VEC(int,heap) *earlyclobber_regclass;
-/* This function finds and stores register classes that could be early
- clobbered in INSN. If any earlyclobber classes are found, the function
- returns TRUE, in all other cases it returns FALSE. */
+/* Remove the chain problem completely. */
-static bool
-df_urec_check_earlyclobber (rtx insn)
+static void
+df_chain_fully_remove_problem (void)
{
- int opno;
- bool found = false;
+ df_chain_remove_problem ();
+ BITMAP_FREE (df_chain->out_of_date_transfer_functions);
+ free (df_chain);
+}
- extract_insn (insn);
- VEC_truncate (int, earlyclobber_regclass, 0);
- for (opno = 0; opno < recog_data.n_operands; opno++)
- {
- char c;
- bool amp_p;
- int i;
- enum reg_class class;
- const char *p = recog_data.constraints[opno];
-
- class = NO_REGS;
- amp_p = false;
- for (;;)
- {
- c = *p;
- switch (c)
- {
- case '=': case '+': case '?':
- case '#': case '!':
- case '*': case '%':
- case 'm': case '<': case '>': case 'V': case 'o':
- case 'E': case 'F': case 'G': case 'H':
- case 's': case 'i': case 'n':
- case 'I': case 'J': case 'K': case 'L':
- case 'M': case 'N': case 'O': case 'P':
- case 'X':
- case '0': case '1': case '2': case '3': case '4':
- case '5': case '6': case '7': case '8': case '9':
- /* These don't say anything we care about. */
- break;
-
- case '&':
- amp_p = true;
- break;
- case '\0':
- case ',':
- if (amp_p && class != NO_REGS)
- {
- int rc;
+/* Create def-use or use-def chains. */
- found = true;
- for (i = 0;
- VEC_iterate (int, earlyclobber_regclass, i, rc);
- i++)
- {
- if (rc == (int) class)
- goto found_rc;
- }
+static void
+df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
+{
+ df_chain_remove_problem ();
+ df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
+ sizeof (struct df_link), 50);
+ df_chain->optional_p = true;
+}
- /* We use VEC_quick_push here because
- earlyclobber_regclass holds no more than
- N_REG_CLASSES elements. */
- VEC_quick_push (int, earlyclobber_regclass, (int) class);
- found_rc:
- ;
- }
-
- amp_p = false;
- class = NO_REGS;
- break;
-
- case 'r':
- class = GENERAL_REGS;
- break;
-
- default:
- class = REG_CLASS_FROM_CONSTRAINT (c, p);
- break;
- }
- if (c == '\0')
- break;
- p += CONSTRAINT_LEN (c, p);
- }
- }
- return found;
-}
+/* Reset all of the chains when the set of basic blocks changes. */
-/* The function checks that pseudo-register *X has a class
- intersecting with the class of pseudo-register could be early
- clobbered in the same insn.
+static void
+df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
+{
+ df_chain_remove_problem ();
+}
- This function is a no-op if earlyclobber_regclass is empty.
- Reload can assign the same hard register to uninitialized
- pseudo-register and early clobbered pseudo-register in an insn if
- the pseudo-register is used first time in given BB and not lived at
- the BB start. To prevent this we don't change life information for
- such pseudo-registers. */
+/* Create the chains for a list of USEs. */
-static int
-df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
+static void
+df_chain_create_bb_process_use (bitmap local_rd,
+ df_ref *use_rec,
+ enum df_ref_flags top_flag)
{
- enum reg_class pref_class, alt_class;
- int i, regno;
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
-
- if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
+ bitmap_iterator bi;
+ unsigned int def_index;
+
+ while (*use_rec)
{
- int rc;
-
- regno = REGNO (*x);
- if (bitmap_bit_p (bb_info->kill, regno)
- || bitmap_bit_p (bb_info->gen, regno))
- return 0;
- pref_class = reg_preferred_class (regno);
- alt_class = reg_alternate_class (regno);
- for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
+ df_ref use = *use_rec;
+ unsigned int uregno = DF_REF_REGNO (use);
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (uregno >= FIRST_PSEUDO_REGISTER))
{
- if (reg_classes_intersect_p (rc, pref_class)
- || (rc != NO_REGS
- && reg_classes_intersect_p (rc, alt_class)))
+ /* Do not want to go through this for an uninitialized var. */
+ int count = DF_DEFS_COUNT (uregno);
+ if (count)
{
- bitmap_set_bit (bb_info->earlyclobber, regno);
- break;
+ if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
+ {
+ unsigned int first_index = DF_DEFS_BEGIN (uregno);
+ unsigned int last_index = first_index + count - 1;
+
+ EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
+ {
+ df_ref def;
+ if (def_index > last_index)
+ break;
+
+ def = DF_DEFS_GET (def_index);
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ df_chain_create (def, use);
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ df_chain_create (use, def);
+ }
+ }
}
}
- }
- return 0;
-}
-
-/* The function processes all pseudo-registers in *X with the aid of
- previous function. */
-static void
-df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
-{
- for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
+ use_rec++;
+ }
}
-/* Compute local uninitialized register info for basic block BB. */
+/* Create chains from reaching defs bitmaps for basic block BB. */
static void
-df_urec_bb_local_compute (unsigned int bb_index)
+df_chain_create_bb (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
+ bitmap cpy = BITMAP_ALLOC (NULL);
+ df_ref *def_rec;
+
+ bitmap_copy (cpy, bb_info->in);
+ bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
+
+ /* Since we are going forwards, process the artificial uses first
+ then the artificial defs second. */
+
+#ifdef EH_USES
+ /* Create the chains for the artificial uses from the EH_USES at the
+ beginning of the block. */
+
+ /* Artificials are only hard regs. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_chain_create_bb_process_use (cpy,
+ df_get_artificial_uses (bb->index),
+ DF_REF_AT_TOP);
+#endif
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_range (cpy,
+ DF_DEFS_BEGIN (dregno),
+ DF_DEFS_COUNT (dregno));
+ bitmap_set_bit (cpy, DF_REF_ID (def));
}
}
+ /* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
{
- if (INSN_P (insn))
+ df_ref *def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ if (!INSN_P (insn))
+ continue;
+
+ /* Now scan the uses and link them up with the defs that remain
+ in the cpy vector. */
+
+ df_chain_create_bb_process_use (cpy, DF_INSN_UID_USES (uid), 0);
+
+ if (df->changeable_flags & DF_EQ_NOTES)
+ df_chain_create_bb_process_use (cpy, DF_INSN_UID_EQ_USES (uid), 0);
+
+
+ /* Since we are going forwards, process the defs second. This
+ pass only changes the bits in cpy. */
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
- if (df_urec_check_earlyclobber (insn))
+ df_ref def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (dregno >= FIRST_PSEUDO_REGISTER))
{
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
- problem_data->earlyclobbers_found = true;
- note_uses (&PATTERN (insn),
- df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_range (cpy,
+ DF_DEFS_BEGIN (dregno),
+ DF_DEFS_COUNT (dregno));
+ if (!(DF_REF_FLAGS (def)
+ & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (cpy, DF_REF_ID (def));
}
}
}
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
- }
- }
-}
+ /* Create the chains for the artificial uses of the hard registers
+ at the end of the block. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_chain_create_bb_process_use (cpy,
+ df_get_artificial_uses (bb->index),
+ 0);
+ BITMAP_FREE (cpy);
+}
-/* Compute local uninitialized register info. */
+/* Create def-use chains from reaching use bitmaps for basic blocks
+ in BLOCKS. */
static void
-df_urec_local_compute (bitmap all_blocks)
+df_chain_finalize (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
-#ifdef STACK_REGS
- int i;
- HARD_REG_SET stack_hard_regs, used;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
- /* Any register that MAY be allocated to a register stack (like the
- 387) is treated poorly. Each such register is marked as being
- live everywhere. This keeps the register allocator and the
- subsequent passes from doing anything useful with these values.
-
- FIXME: This seems like an incredibly poor idea. */
-
- CLEAR_HARD_REG_SET (stack_hard_regs);
- for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
- SET_HARD_REG_BIT (stack_hard_regs, i);
- problem_data->stack_regs = BITMAP_ALLOC (NULL);
- for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
- {
- COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
- IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
- AND_HARD_REG_SET (used, stack_hard_regs);
- if (!hard_reg_set_empty_p (used))
- bitmap_set_bit (problem_data->stack_regs, i);
- }
-#endif
-
- /* We know that earlyclobber_regclass holds no more than
- N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
- earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
-
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_urec_bb_local_compute (bb_index);
+ df_chain_create_bb (bb_index);
}
-
- VEC_free (int, heap, earlyclobber_regclass);
}
-/* Initialize the solution vectors. */
+/* Free all storage associated with the problem. */
-static void
-df_urec_init (bitmap all_blocks)
+static void
+df_chain_free (void)
{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
-
- bitmap_copy (bb_info->out, bb_info->gen);
- bitmap_clear (bb_info->in);
- }
+ free_alloc_pool (df_chain->block_pool);
+ BITMAP_FREE (df_chain->out_of_date_transfer_functions);
+ free (df_chain);
}
-/* Or in the stack regs, hard regs and early clobber regs into the
- urec_in sets of all of the blocks. */
-
+/* Debugging info. */
static void
-df_urec_local_finalize (bitmap all_blocks)
+df_chain_top_dump (basic_block bb, FILE *file)
{
- bitmap tmp = BITMAP_ALLOC (NULL);
- bitmap_iterator bi;
- unsigned int bb_index;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ if (df_chain_problem_p (DF_DU_CHAIN))
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
+ rtx insn;
+ df_ref *def_rec = df_get_artificial_defs (bb->index);
+ if (*def_rec)
+ {
+
+ fprintf (file, ";; DU chains for artificial defs\n");
+ while (*def_rec)
+ {
+ df_ref def = *def_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ def_rec++;
+ }
+ }
- if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
+ FOR_BB_INSNS (bb, insn)
{
- if (problem_data->earlyclobbers_found)
- bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
-
-#ifdef STACK_REGS
- /* We can not use the same stack register for uninitialized
- pseudo-register and another living pseudo-register
- because if the uninitialized pseudo-register dies,
- subsequent pass reg-stack will be confused (it will
- believe that the other register dies). */
- bitmap_ior_into (bb_info->in, problem_data->stack_regs);
- bitmap_ior_into (bb_info->out, problem_data->stack_regs);
-#endif
+ if (INSN_P (insn))
+ {
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ def_rec = DF_INSN_INFO_DEFS (insn_info);
+ if (*def_rec)
+ {
+ fprintf (file, ";; DU chains for insn luid %d uid %d\n",
+ DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
+
+ while (*def_rec)
+ {
+ df_ref def = *def_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ def_rec++;
+ }
+ }
+ }
}
-
- /* No register may reach a location where it is not used. Thus
- we trim the rr result to the places where it is used. */
- bitmap_and_into (bb_info->in, bb_lr_info->in);
- bitmap_and_into (bb_info->out, bb_lr_info->out);
- bitmap_copy (bb_info->top, bb_info->in);
- if (bb_lr_info->adef)
- bitmap_ior_into (bb_info->top, bb_lr_info->adef);
- bitmap_and_into (bb_info->top, bb_lr_info->top);
-#if 0
- /* Hard registers may still stick in the ur_out set, but not
- be in the ur_in set, if their only mention was in a call
- in this block. This is because a call kills in the lr
- problem but does not kill in the rr problem. To clean
- this up, we execute the transfer function on the lr_in
- set and then use that to knock bits out of ur_out. */
- bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
- bb_info->kill);
- bitmap_and_into (bb_info->out, tmp);
-#endif
}
-
-#ifdef STACK_REGS
- BITMAP_FREE (problem_data->stack_regs);
-#endif
- BITMAP_FREE (tmp);
}
-/* Confluence function that ignores fake edges. */
-
static void
-df_urec_confluence_n (edge e)
-{
- bitmap op1 = df_urec_get_bb_info (e->dest->index)->in;
- bitmap op2 = df_urec_get_bb_info (e->src->index)->out;
-
- if (e->flags & EDGE_FAKE)
- return;
-
- bitmap_ior_into (op1, op2);
-}
-
-
-/* Transfer function. */
-
-static bool
-df_urec_transfer_function (int bb_index)
+df_chain_bottom_dump (basic_block bb, FILE *file)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- bitmap in = bb_info->in;
- bitmap out = bb_info->out;
- bitmap gen = bb_info->gen;
- bitmap kill = bb_info->kill;
-
- return bitmap_ior_and_compl (out, gen, in, kill);
-}
-
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ {
+ rtx insn;
+ df_ref *use_rec = df_get_artificial_uses (bb->index);
-/* Free all storage associated with the problem. */
+ if (*use_rec)
+ {
+ fprintf (file, ";; UD chains for artificial uses\n");
+ while (*use_rec)
+ {
+ df_ref use = *use_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ use_rec++;
+ }
+ }
-static void
-df_urec_free (void)
-{
- if (df_urec->block_info)
- {
- unsigned int i;
-
- for (i = 0; i < df_urec->block_info_size; i++)
+ FOR_BB_INSNS (bb, insn)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (i);
- if (bb_info)
+ if (INSN_P (insn))
{
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- BITMAP_FREE (bb_info->earlyclobber);
- BITMAP_FREE (bb_info->top);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
+ use_rec = DF_INSN_INFO_USES (insn_info);
+ if (*use_rec || *eq_use_rec)
+ {
+ fprintf (file, ";; UD chains for insn luid %d uid %d\n",
+ DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
+
+ while (*use_rec)
+ {
+ df_ref use = *use_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ use_rec++;
+ }
+ while (*eq_use_rec)
+ {
+ df_ref use = *eq_use_rec;
+ fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ eq_use_rec++;
+ }
+ }
}
}
-
- free_alloc_pool (df_urec->block_pool);
-
- df_urec->block_info_size = 0;
- free (df_urec->block_info);
- free (df_urec->problem_data);
}
- free (df_urec);
-}
-
-
-/* Debugging info at top of bb. */
-
-static void
-df_urec_top_dump (basic_block bb, FILE *file)
-{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
- if (!bb_info || !bb_info->in)
- return;
-
- fprintf (file, ";; urec in \t");
- df_print_regset (file, bb_info->in);
- fprintf (file, ";; urec gen \t");
- df_print_regset (file, bb_info->gen);
- fprintf (file, ";; urec kill\t");
- df_print_regset (file, bb_info->kill);
- fprintf (file, ";; urec ec\t");
- df_print_regset (file, bb_info->earlyclobber);
-}
-
-
-/* Debugging info at bottom of bb. */
-
-static void
-df_urec_bottom_dump (basic_block bb, FILE *file)
-{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
- if (!bb_info || !bb_info->out)
- return;
- fprintf (file, ";; urec out \t");
- df_print_regset (file, bb_info->out);
}
-/* All of the information associated with every instance of the problem. */
-
-static struct df_problem problem_UREC =
+static struct df_problem problem_CHAIN =
{
- DF_UREC, /* Problem id. */
- DF_FORWARD, /* Direction. */
- df_urec_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
- df_urec_free_bb_info, /* Free basic block info. */
- df_urec_local_compute, /* Local compute function. */
- df_urec_init, /* Init the solution specific data. */
- df_worklist_dataflow, /* Worklist solver. */
+ DF_CHAIN, /* Problem id. */
+ DF_NONE, /* Direction. */
+ df_chain_alloc, /* Allocate the problem specific data. */
+ df_chain_reset, /* Reset global information. */
+ NULL, /* Free basic block info. */
+ NULL, /* Local compute function. */
+ NULL, /* Init the solution specific data. */
+ NULL, /* Iterative solver. */
NULL, /* Confluence operator 0. */
- df_urec_confluence_n, /* Confluence operator n. */
- df_urec_transfer_function, /* Transfer function. */
- df_urec_local_finalize, /* Finalize function. */
- df_urec_free, /* Free all of the problem information. */
- df_urec_free, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Confluence operator n. */
+ NULL, /* Transfer function. */
+ df_chain_finalize, /* Finalize function. */
+ df_chain_free, /* Free all of the problem information. */
+ df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
- df_urec_top_dump, /* Debugging start block. */
- df_urec_bottom_dump, /* Debugging end block. */
+ df_chain_top_dump, /* Debugging start block. */
+ df_chain_bottom_dump, /* Debugging end block. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
- &problem_LR, /* Dependent problem. */
- TV_DF_UREC, /* Timing variable. */
+ &problem_RD, /* Dependent problem. */
+ TV_DF_CHAIN, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
solution. */
void
-df_urec_add_problem (void)
+df_chain_add_problem (enum df_chain_flags chain_flags)
{
- df_add_problem (&problem_UREC);
+ df_add_problem (&problem_CHAIN);
+ df_chain->local_flags = (unsigned int)chain_flags;
+ df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
}
+#undef df_chain_problem_p
\f
/*----------------------------------------------------------------------------
- CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
+ BYTE LEVEL LIVE REGISTERS
- Link either the defs to the uses and / or the uses to the defs.
+ Find the locations in the function where any use of a pseudo can
+ reach in the backwards direction. In and out bitvectors are built
+ for each basic block. There are two mapping functions,
+ df_byte_lr_get_regno_start and df_byte_lr_get_regno_len that are
+ used to map regnos into bit vector positions.
+
+ This problem differs from the regular df_lr function in the way
+ that subregs, *_extracts and strict_low_parts are handled. In lr
+ these are consider partial kills, here, the exact set of bytes is
+ modeled. Note that any reg that has none of these operations is
+ only modeled with a single bit since all operations access the
+ entire register.
+
+ This problem is more brittle that the regular lr. It currently can
+ be used in dce incrementally, but cannot be used in an environment
+ where insns are created or modified. The problem is that the
+ mapping of regnos to bitmap positions is relatively compact, in
+ that if a pseudo does not do any of the byte wise operations, only
+ one slot is allocated, rather than a slot for each byte. If insn
+ are created, where a subreg is used for a reg that had no subregs,
+ the mapping would be wrong. Likewise, there are no checks to see
+ that new pseudos have been added. These issues could be addressed
+ by adding a problem specific flag to not use the compact mapping,
+ if there was a need to do so.
- These problems are set up like the other dataflow problems so that
- they nicely fit into the framework. They are much simpler and only
- involve a single traversal of instructions and an examination of
- the reaching defs information (the dependent problem).
-----------------------------------------------------------------------------*/
+ ----------------------------------------------------------------------------*/
-#define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
+/* Private data used to verify the solution for this problem. */
+struct df_byte_lr_problem_data
+{
+ /* Expanded versions of bitvectors used in lr. */
+ bitmap invalidated_by_call;
+ bitmap hardware_regs_used;
-/* Create a du or ud chain from SRC to DST and link it into SRC. */
+ /* Indexed by regno, this is true if there are subregs, extracts or
+ strict_low_parts for this regno. */
+ bitmap needs_expansion;
-struct df_link *
-df_chain_create (struct df_ref *src, struct df_ref *dst)
+ /* The start position and len for each regno in the various bit
+ vectors. */
+ unsigned int* regno_start;
+ unsigned int* regno_len;
+ /* An obstack for the bitmaps we need for this problem. */
+ bitmap_obstack byte_lr_bitmaps;
+};
+
+
+/* Get the starting location for REGNO in the df_byte_lr bitmaps. */
+
+int
+df_byte_lr_get_regno_start (unsigned int regno)
{
- struct df_link *head = DF_REF_CHAIN (src);
- struct df_link *link = pool_alloc (df_chain->block_pool);;
-
- DF_REF_CHAIN (src) = link;
- link->next = head;
- link->ref = dst;
- return link;
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
+ return problem_data->regno_start[regno];
}
-/* Delete any du or ud chains that start at REF and point to
- TARGET. */
+/* Get the len for REGNO in the df_byte_lr bitmaps. */
+
+int
+df_byte_lr_get_regno_len (unsigned int regno)
+{
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
+ return problem_data->regno_len[regno];
+}
+
+
+/* Set basic block info. */
+
static void
-df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
+df_byte_lr_set_bb_info (unsigned int index,
+ struct df_byte_lr_bb_info *bb_info)
{
- struct df_link *chain = DF_REF_CHAIN (ref);
- struct df_link *prev = NULL;
+ gcc_assert (df_byte_lr);
+ gcc_assert (index < df_byte_lr->block_info_size);
+ df_byte_lr->block_info[index] = bb_info;
+}
- while (chain)
+
+/* Free basic block info. */
+
+static void
+df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+ void *vbb_info)
+{
+ struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info;
+ if (bb_info)
{
- if (chain->ref == target)
- {
- if (prev)
- prev->next = chain->next;
- else
- DF_REF_CHAIN (ref) = chain->next;
- pool_free (df_chain->block_pool, chain);
- return;
- }
- prev = chain;
- chain = chain->next;
+ BITMAP_FREE (bb_info->use);
+ BITMAP_FREE (bb_info->def);
+ BITMAP_FREE (bb_info->in);
+ BITMAP_FREE (bb_info->out);
+ pool_free (df_byte_lr->block_pool, bb_info);
}
}
-/* Delete a du or ud chain that leave or point to REF. */
+/* Check all of the refs in REF_REC to see if any of them are
+ extracts, subregs or strict_low_parts. */
-void
-df_chain_unlink (struct df_ref *ref)
+static void
+df_byte_lr_check_regs (df_ref *ref_rec)
{
- struct df_link *chain = DF_REF_CHAIN (ref);
- while (chain)
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+
+ for (; *ref_rec; ref_rec++)
{
- struct df_link *next = chain->next;
- /* Delete the other side if it exists. */
- df_chain_unlink_1 (chain->ref, ref);
- pool_free (df_chain->block_pool, chain);
- chain = next;
+ df_ref ref = *ref_rec;
+ if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
+ | DF_REF_ZERO_EXTRACT
+ | DF_REF_STRICT_LOW_PART)
+ || GET_CODE (DF_REF_REG (ref)) == SUBREG)
+ bitmap_set_bit (problem_data->needs_expansion, DF_REF_REGNO (ref));
}
- DF_REF_CHAIN (ref) = NULL;
}
-/* Copy the du or ud chain starting at FROM_REF and attach it to
- TO_REF. */
+/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by
+ regno_start and regno_len. */
-void
-df_chain_copy (struct df_ref *to_ref,
- struct df_link *from_ref)
+static void
+df_byte_lr_expand_bitmap (bitmap dest, bitmap src)
{
- while (from_ref)
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ bitmap_iterator bi;
+ unsigned int i;
+
+ bitmap_clear (dest);
+ EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi)
{
- df_chain_create (to_ref, from_ref->ref);
- from_ref = from_ref->next;
+ bitmap_set_range (dest, problem_data->regno_start[i],
+ problem_data->regno_len[i]);
}
}
-/* Remove this problem from the stack of dataflow problems. */
+/* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are
+ not touched unless the block is new. */
-static void
-df_chain_remove_problem (void)
+static void
+df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
+ unsigned int bb_index;
bitmap_iterator bi;
+ basic_block bb;
+ unsigned int regno;
+ unsigned int index = 0;
+ unsigned int max_reg = max_reg_num();
+ struct df_byte_lr_problem_data *problem_data
+ = problem_data = XNEW (struct df_byte_lr_problem_data);
+
+ df_byte_lr->problem_data = problem_data;
+
+ if (!df_byte_lr->block_pool)
+ df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool",
+ sizeof (struct df_byte_lr_bb_info), 50);
+
+ df_grow_bb_info (df_byte_lr);
+
+ /* Create the mapping from regnos to slots. This does not change
+ unless the problem is destroyed and recreated. In particular, if
+ we end up deleting the only insn that used a subreg, we do not
+ want to redo the mapping because this would invalidate everything
+ else. */
+
+ bitmap_obstack_initialize (&problem_data->byte_lr_bitmaps);
+ problem_data->regno_start = XNEWVEC (unsigned int, max_reg);
+ problem_data->regno_len = XNEWVEC (unsigned int, max_reg);
+ problem_data->hardware_regs_used = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ problem_data->invalidated_by_call = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ problem_data->needs_expansion = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+
+ /* Discover which regno's use subregs, extracts or
+ strict_low_parts. */
+ FOR_EACH_BB (bb)
+ {
+ rtx insn;
+ FOR_BB_INSNS (bb, insn)
+ {
+ if (INSN_P (insn))
+ {
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_byte_lr_check_regs (DF_INSN_INFO_DEFS (insn_info));
+ df_byte_lr_check_regs (DF_INSN_INFO_USES (insn_info));
+ }
+ }
+ bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, bb->index);
+ }
+
+ bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
+ bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK);
+
+ /* Allocate the slots for each regno. */
+ for (regno = 0; regno < max_reg; regno++)
+ {
+ int len;
+ problem_data->regno_start[regno] = index;
+ if (bitmap_bit_p (problem_data->needs_expansion, regno))
+ len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno]));
+ else
+ len = 1;
+
+ problem_data->regno_len[regno] = len;
+ index += len;
+ }
+
+ df_byte_lr_expand_bitmap (problem_data->hardware_regs_used,
+ df->hardware_regs_used);
+ df_byte_lr_expand_bitmap (problem_data->invalidated_by_call,
+ regs_invalidated_by_call_regset);
+
+ EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
+ {
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
+ if (bb_info)
+ {
+ bitmap_clear (bb_info->def);
+ bitmap_clear (bb_info->use);
+ }
+ else
+ {
+ bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool);
+ df_byte_lr_set_bb_info (bb_index, bb_info);
+ bb_info->use = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ bb_info->def = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ bb_info->in = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ bb_info->out = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
+ }
+ }
+
+ df_byte_lr->optional_p = true;
+}
+
+
+/* Reset the global solution for recalculation. */
+
+static void
+df_byte_lr_reset (bitmap all_blocks)
+{
unsigned int bb_index;
+ bitmap_iterator bi;
- /* Wholesale destruction of the old chains. */
- if (df_chain->block_pool)
- free_alloc_pool (df_chain->block_pool);
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ {
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
+ gcc_assert (bb_info);
+ bitmap_clear (bb_info->in);
+ bitmap_clear (bb_info->out);
+ }
+}
- EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
+
+/* Compute local live register info for basic block BB. */
+
+static void
+df_byte_lr_bb_local_compute (unsigned int bb_index)
+{
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ basic_block bb = BASIC_BLOCK (bb_index);
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
+ rtx insn;
+ df_ref *def_rec;
+ df_ref *use_rec;
+
+ /* Process the registers set in an exception handler. */
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
- basic_block bb = BASIC_BLOCK (bb_index);
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ bitmap_set_range (bb_info->def, start, len);
+ bitmap_clear_range (bb_info->use, start, len);
+ }
+ }
- if (df_chain_problem_p (DF_DU_CHAIN))
- for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
- DF_REF_CHAIN (*def_rec) = NULL;
- if (df_chain_problem_p (DF_UD_CHAIN))
- for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
-
- FOR_BB_INSNS (bb, insn)
+ /* Process the hardware registers that are always live. */
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
- unsigned int uid = INSN_UID (insn);
-
- if (INSN_P (insn))
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ bitmap_set_range (bb_info->use, start, len);
+ }
+ }
+
+ FOR_BB_INSNS_REVERSE (bb, insn)
+ {
+ unsigned int uid = INSN_UID (insn);
+
+ if (!INSN_P (insn))
+ continue;
+
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
{
- if (df_chain_problem_p (DF_DU_CHAIN))
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- DF_REF_CHAIN (*def_rec) = NULL;
- if (df_chain_problem_p (DF_UD_CHAIN))
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ unsigned int sb;
+ unsigned int lb;
+ if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb))
{
- for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
- for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
- DF_REF_CHAIN (*use_rec) = NULL;
+ start += sb;
+ len = lb - sb;
+ }
+ if (len)
+ {
+ bitmap_set_range (bb_info->def, start, len);
+ bitmap_clear_range (bb_info->use, start, len);
}
}
}
+
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ unsigned int sb;
+ unsigned int lb;
+ if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb))
+ {
+ start += sb;
+ len = lb - sb;
+ }
+ /* Add use to set of uses in this BB. */
+ if (len)
+ bitmap_set_range (bb_info->use, start, len);
+ }
+ }
+
+ /* Process the registers set in an exception handler or the hard
+ frame pointer if this block is the target of a non local
+ goto. */
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ bitmap_set_range (bb_info->def, start, len);
+ bitmap_clear_range (bb_info->use, start, len);
+ }
+ }
+
+#ifdef EH_USES
+ /* Process the uses that are live into an exception handler. */
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ {
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ bitmap_set_range (bb_info->use, start, len);
+ }
+ }
+#endif
+}
+
+
+/* Compute local live register info for each basic block within BLOCKS. */
+
+static void
+df_byte_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
+{
+ unsigned int bb_index;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
+ {
+ if (bb_index == EXIT_BLOCK)
+ {
+ /* The exit block is special for this problem and its bits are
+ computed from thin air. */
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (EXIT_BLOCK);
+ df_byte_lr_expand_bitmap (bb_info->use, df->exit_block_uses);
+ }
+ else
+ df_byte_lr_bb_local_compute (bb_index);
}
- bitmap_clear (df_chain->out_of_date_transfer_functions);
- df_chain->block_pool = NULL;
+ bitmap_clear (df_byte_lr->out_of_date_transfer_functions);
}
-/* Remove the chain problem completely. */
+/* Initialize the solution vectors. */
-static void
-df_chain_fully_remove_problem (void)
+static void
+df_byte_lr_init (bitmap all_blocks)
{
- df_chain_remove_problem ();
- BITMAP_FREE (df_chain->out_of_date_transfer_functions);
- free (df_chain);
-}
+ unsigned int bb_index;
+ bitmap_iterator bi;
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ {
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
+ bitmap_copy (bb_info->in, bb_info->use);
+ bitmap_clear (bb_info->out);
+ }
+}
-/* Create def-use or use-def chains. */
-static void
-df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
+/* Confluence function that processes infinite loops. This might be a
+ noreturn function that throws. And even if it isn't, getting the
+ unwind info right helps debugging. */
+static void
+df_byte_lr_confluence_0 (basic_block bb)
{
- df_chain_remove_problem ();
- df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
- sizeof (struct df_link), 50);
- df_chain->optional_p = true;
-}
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ bitmap op1 = df_byte_lr_get_bb_info (bb->index)->out;
+ if (bb != EXIT_BLOCK_PTR)
+ bitmap_copy (op1, problem_data->hardware_regs_used);
+}
-/* Reset all of the chains when the set of basic blocks changes. */
+/* Confluence function that ignores fake edges. */
static void
-df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
+df_byte_lr_confluence_n (edge e)
{
- df_chain_remove_problem ();
-}
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ bitmap op1 = df_byte_lr_get_bb_info (e->src->index)->out;
+ bitmap op2 = df_byte_lr_get_bb_info (e->dest->index)->in;
+
+ /* Call-clobbered registers die across exception and call edges. */
+ /* ??? Abnormal call edges ignored for the moment, as this gets
+ confused by sibling call edges, which crashes reg-stack. */
+ if (e->flags & EDGE_EH)
+ bitmap_ior_and_compl_into (op1, op2, problem_data->invalidated_by_call);
+ else
+ bitmap_ior_into (op1, op2);
+ bitmap_ior_into (op1, problem_data->hardware_regs_used);
+}
-/* Create the chains for a list of USEs. */
-static void
-df_chain_create_bb_process_use (bitmap local_rd,
- struct df_ref **use_rec,
- enum df_ref_flags top_flag)
+/* Transfer function. */
+
+static bool
+df_byte_lr_transfer_function (int bb_index)
{
- bitmap_iterator bi;
- unsigned int def_index;
-
- while (*use_rec)
- {
- struct df_ref *use = *use_rec;
- unsigned int uregno = DF_REF_REGNO (use);
- if ((!(df->changeable_flags & DF_NO_HARD_REGS))
- || (uregno >= FIRST_PSEUDO_REGISTER))
- {
- /* Do not want to go through this for an uninitialized var. */
- int count = DF_DEFS_COUNT (uregno);
- if (count)
- {
- if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
- {
- unsigned int first_index = DF_DEFS_BEGIN (uregno);
- unsigned int last_index = first_index + count - 1;
-
- EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
- {
- struct df_ref *def;
- if (def_index > last_index)
- break;
-
- def = DF_DEFS_GET (def_index);
- if (df_chain_problem_p (DF_DU_CHAIN))
- df_chain_create (def, use);
- if (df_chain_problem_p (DF_UD_CHAIN))
- df_chain_create (use, def);
- }
- }
- }
- }
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
+ bitmap in = bb_info->in;
+ bitmap out = bb_info->out;
+ bitmap use = bb_info->use;
+ bitmap def = bb_info->def;
- use_rec++;
- }
+ return bitmap_ior_and_compl (in, use, out, def);
}
-/* Create chains from reaching defs bitmaps for basic block BB. */
+/* Free all storage associated with the problem. */
static void
-df_chain_create_bb (unsigned int bb_index)
+df_byte_lr_free (void)
{
- basic_block bb = BASIC_BLOCK (bb_index);
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
- rtx insn;
- bitmap cpy = BITMAP_ALLOC (NULL);
- struct df_ref **def_rec;
-
- bitmap_copy (cpy, bb_info->in);
- bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- /* Since we are going forwards, process the artificial uses first
- then the artificial defs second. */
-
-#ifdef EH_USES
- /* Create the chains for the artificial uses from the EH_USES at the
- beginning of the block. */
-
- /* Artificials are only hard regs. */
- if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_chain_create_bb_process_use (cpy,
- df_get_artificial_uses (bb->index),
- DF_REF_AT_TOP);
-#endif
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ if (df_byte_lr->block_info)
{
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_range (cpy,
- DF_DEFS_BEGIN (dregno),
- DF_DEFS_COUNT (dregno));
- bitmap_set_bit (cpy, DF_REF_ID (def));
- }
+ free_alloc_pool (df_byte_lr->block_pool);
+ df_byte_lr->block_info_size = 0;
+ free (df_byte_lr->block_info);
}
-
- /* Process the regular instructions next. */
- FOR_BB_INSNS (bb, insn)
- {
- struct df_ref **def_rec;
- unsigned int uid = INSN_UID (insn);
-
- if (!INSN_P (insn))
- continue;
-
- /* Now scan the uses and link them up with the defs that remain
- in the cpy vector. */
-
- df_chain_create_bb_process_use (cpy, DF_INSN_UID_USES (uid), 0);
- if (df->changeable_flags & DF_EQ_NOTES)
- df_chain_create_bb_process_use (cpy, DF_INSN_UID_EQ_USES (uid), 0);
+ BITMAP_FREE (df_byte_lr->out_of_date_transfer_functions);
+ bitmap_obstack_release (&problem_data->byte_lr_bitmaps);
+ free (problem_data->regno_start);
+ free (problem_data->regno_len);
+ free (problem_data);
+ free (df_byte_lr);
+}
- /* Since we are going forwards, process the defs second. This
- pass only changes the bits in cpy. */
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
- if ((!(df->changeable_flags & DF_NO_HARD_REGS))
- || (dregno >= FIRST_PSEUDO_REGISTER))
- {
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_range (cpy,
- DF_DEFS_BEGIN (dregno),
- DF_DEFS_COUNT (dregno));
- if (!(DF_REF_FLAGS (def)
- & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (cpy, DF_REF_ID (def));
- }
- }
- }
+/* Debugging info at top of bb. */
- /* Create the chains for the artificial uses of the hard registers
- at the end of the block. */
- if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_chain_create_bb_process_use (cpy,
- df_get_artificial_uses (bb->index),
- 0);
+static void
+df_byte_lr_top_dump (basic_block bb, FILE *file)
+{
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->in)
+ return;
+
+ fprintf (file, ";; blr in \t");
+ df_print_byte_regset (file, bb_info->in);
+ fprintf (file, ";; blr use \t");
+ df_print_byte_regset (file, bb_info->use);
+ fprintf (file, ";; blr def \t");
+ df_print_byte_regset (file, bb_info->def);
+}
- BITMAP_FREE (cpy);
-}
-/* Create def-use chains from reaching use bitmaps for basic blocks
- in BLOCKS. */
+/* Debugging info at bottom of bb. */
static void
-df_chain_finalize (bitmap all_blocks)
+df_byte_lr_bottom_dump (basic_block bb, FILE *file)
{
- unsigned int bb_index;
- bitmap_iterator bi;
+ struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->out)
+ return;
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- df_chain_create_bb (bb_index);
- }
-}
+ fprintf (file, ";; blr out \t");
+ df_print_byte_regset (file, bb_info->out);
+}
-/* Free all storage associated with the problem. */
+/* All of the information associated with every instance of the problem. */
-static void
-df_chain_free (void)
+static struct df_problem problem_BYTE_LR =
+{
+ DF_BYTE_LR, /* Problem id. */
+ DF_BACKWARD, /* Direction. */
+ df_byte_lr_alloc, /* Allocate the problem specific data. */
+ df_byte_lr_reset, /* Reset global information. */
+ df_byte_lr_free_bb_info, /* Free basic block info. */
+ df_byte_lr_local_compute, /* Local compute function. */
+ df_byte_lr_init, /* Init the solution specific data. */
+ df_worklist_dataflow, /* Worklist solver. */
+ df_byte_lr_confluence_0, /* Confluence operator 0. */
+ df_byte_lr_confluence_n, /* Confluence operator n. */
+ df_byte_lr_transfer_function, /* Transfer function. */
+ NULL, /* Finalize function. */
+ df_byte_lr_free, /* Free all of the problem information. */
+ df_byte_lr_free, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ df_byte_lr_top_dump, /* Debugging start block. */
+ df_byte_lr_bottom_dump, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verify end. */
+ NULL, /* Dependent problem. */
+ TV_DF_BYTE_LR, /* Timing variable. */
+ false /* Reset blocks on dropping out of blocks_to_analyze. */
+};
+
+
+/* Create a new DATAFLOW instance and add it to an existing instance
+ of DF. The returned structure is what is used to get at the
+ solution. */
+
+void
+df_byte_lr_add_problem (void)
{
- free_alloc_pool (df_chain->block_pool);
- BITMAP_FREE (df_chain->out_of_date_transfer_functions);
- free (df_chain);
+ df_add_problem (&problem_BYTE_LR);
+ /* These will be initialized when df_scan_blocks processes each
+ block. */
+ df_byte_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
}
-/* Debugging info. */
+/* Simulate the effects of the defs of INSN on LIVE. */
-static void
-df_chain_top_dump (basic_block bb, FILE *file)
+void
+df_byte_lr_simulate_defs (rtx insn, bitmap live)
{
- if (df_chain_problem_p (DF_DU_CHAIN))
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ df_ref *def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- rtx insn;
- struct df_ref **def_rec = df_get_artificial_defs (bb->index);
- if (*def_rec)
- {
-
- fprintf (file, ";; DU chains for artificial defs\n");
- while (*def_rec)
- {
- struct df_ref *def = *def_rec;
- fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- df_chain_dump (DF_REF_CHAIN (def), file);
- fprintf (file, "\n");
- def_rec++;
- }
- }
+ df_ref def = *def_rec;
- FOR_BB_INSNS (bb, insn)
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & DF_REF_CONDITIONAL))
{
- unsigned int uid = INSN_UID (insn);
- if (INSN_P (insn))
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ unsigned int sb;
+ unsigned int lb;
+ if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb))
{
- def_rec = DF_INSN_UID_DEFS (uid);
- if (*def_rec)
- {
- fprintf (file, ";; DU chains for insn luid %d uid %d\n",
- DF_INSN_LUID (insn), uid);
-
- while (*def_rec)
- {
- struct df_ref *def = *def_rec;
- fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- if (def->flags & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (def), file);
- fprintf (file, "\n");
- def_rec++;
- }
- }
+ start += sb;
+ len = lb - sb;
}
+
+ if (len)
+ bitmap_clear_range (live, start, len);
+ }
+ }
+}
+
+
+/* Simulate the effects of the uses of INSN on LIVE. */
+
+void
+df_byte_lr_simulate_uses (rtx insn, bitmap live)
+{
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ df_ref *use_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ unsigned int sb;
+ unsigned int lb;
+
+ if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb))
+ {
+ start += sb;
+ len = lb - sb;
}
+
+ /* Add use to set of uses in this BB. */
+ if (len)
+ bitmap_set_range (live, start, len);
}
}
-static void
-df_chain_bottom_dump (basic_block bb, FILE *file)
+/* Apply the artificial uses and defs at the top of BB in a forwards
+ direction. */
+
+void
+df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
{
- if (df_chain_problem_p (DF_UD_CHAIN))
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ df_ref *def_rec;
+#ifdef EH_USES
+ df_ref *use_rec;
+#endif
+ int bb_index = bb->index;
+
+#ifdef EH_USES
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- rtx insn;
- struct df_ref **use_rec = df_get_artificial_uses (bb->index);
-
- if (*use_rec)
+ df_ref use = *use_rec;
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
{
- fprintf (file, ";; UD chains for artificial uses\n");
- while (*use_rec)
- {
- struct df_ref *use = *use_rec;
- fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- use_rec++;
- }
- }
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ bitmap_set_range (live, start, len);
+ }
+ }
+#endif
- FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
- if (INSN_P (insn))
- {
- struct df_ref **eq_use_rec = DF_INSN_UID_EQ_USES (uid);
- use_rec = DF_INSN_UID_USES (uid);
- if (*use_rec || *eq_use_rec)
- {
- fprintf (file, ";; UD chains for insn luid %d uid %d\n",
- DF_INSN_LUID (insn), uid);
-
- while (*use_rec)
- {
- struct df_ref *use = *use_rec;
- fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- if (use->flags & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- use_rec++;
- }
- while (*eq_use_rec)
- {
- struct df_ref *use = *eq_use_rec;
- fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
- df_chain_dump (DF_REF_CHAIN (use), file);
- fprintf (file, "\n");
- eq_use_rec++;
- }
- }
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ bitmap_clear_range (live, start, len);
}
}
}
-static struct df_problem problem_CHAIN =
-{
- DF_CHAIN, /* Problem id. */
- DF_NONE, /* Direction. */
- df_chain_alloc, /* Allocate the problem specific data. */
- df_chain_reset, /* Reset global information. */
- NULL, /* Free basic block info. */
- NULL, /* Local compute function. */
- NULL, /* Init the solution specific data. */
- NULL, /* Iterative solver. */
- NULL, /* Confluence operator 0. */
- NULL, /* Confluence operator n. */
- NULL, /* Transfer function. */
- df_chain_finalize, /* Finalize function. */
- df_chain_free, /* Free all of the problem information. */
- df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
- NULL, /* Debugging. */
- df_chain_top_dump, /* Debugging start block. */
- df_chain_bottom_dump, /* Debugging end block. */
- NULL, /* Incremental solution verify start. */
- NULL, /* Incremental solution verify end. */
- &problem_RD, /* Dependent problem. */
- TV_DF_CHAIN, /* Timing variable. */
- false /* Reset blocks on dropping out of blocks_to_analyze. */
-};
-
-
-/* Create a new DATAFLOW instance and add it to an existing instance
- of DF. The returned structure is what is used to get at the
- solution. */
+/* Apply the artificial uses and defs at the end of BB in a backwards
+ direction. */
-void
-df_chain_add_problem (enum df_chain_flags chain_flags)
+void
+df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
{
- df_add_problem (&problem_CHAIN);
- df_chain->local_flags = (unsigned int)chain_flags;
- df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
+ struct df_byte_lr_problem_data *problem_data
+ = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
+ df_ref *def_rec;
+ df_ref *use_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = problem_data->regno_start[dregno];
+ unsigned int len = problem_data->regno_len[dregno];
+ bitmap_clear_range (live, start, len);
+ }
+ }
+
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ df_ref use = *use_rec;
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int uregno = DF_REF_REGNO (use);
+ unsigned int start = problem_data->regno_start[uregno];
+ unsigned int len = problem_data->regno_len[uregno];
+ bitmap_set_range (live, start, len);
+ }
+ }
}
-#undef df_chain_problem_p
\f
/*----------------------------------------------------------------------------
- This pass computes REG_DEAD and REG_UNUSED notes.
+ This problem computes REG_DEAD and REG_UNUSED notes.
----------------------------------------------------------------------------*/
static void
static inline rtx
df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg)
{
- rtx this = old;
+ rtx curr = old;
rtx prev = NULL;
- while (this)
- if (XEXP (this, 0) == reg)
+ while (curr)
+ if (XEXP (curr, 0) == reg)
{
if (prev)
- XEXP (prev, 1) = XEXP (this, 1);
+ XEXP (prev, 1) = XEXP (curr, 1);
else
- old = XEXP (this, 1);
- XEXP (this, 1) = REG_NOTES (insn);
- REG_NOTES (insn) = this;
+ old = XEXP (curr, 1);
+ XEXP (curr, 1) = REG_NOTES (insn);
+ REG_NOTES (insn) = curr;
return old;
}
else
{
- prev = this;
- this = XEXP (this, 1);
+ prev = curr;
+ curr = XEXP (curr, 1);
}
/* Did not find the note. */
- REG_NOTES (insn) = alloc_EXPR_LIST (note_type, reg, REG_NOTES (insn));
+ add_reg_note (insn, note_type, reg);
return old;
}
}
-/* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
- and DO_NOT_GEN. Do not generate notes for registers in artificial
- uses. */
+/* Create a REG_UNUSED note if necessary for DEF in INSN updating
+ LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
static rtx
-df_create_unused_note (rtx insn, rtx old, struct df_ref *def,
- bitmap live, bitmap do_not_gen, bitmap artificial_uses)
+df_create_unused_note (rtx insn, rtx old, df_ref def,
+ bitmap live, bitmap artificial_uses)
{
unsigned int dregno = DF_REF_REGNO (def);
#endif
}
- if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER + DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (do_not_gen, dregno);
-
- /* Kill this register if it is not a subreg store or conditional store. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
return old;
}
static void
df_note_bb_compute (unsigned int bb_index,
- bitmap live, bitmap do_not_gen, bitmap artificial_uses)
+ bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
to begin processing. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int regno = DF_REF_REGNO (use);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type == DF_REF_REG_DEF)
- && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
+ if ((DF_MWS_REG_DEF_P (mws))
+ && !df_ignore_stack_reg (mws->start_regno))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
mws, live, do_not_gen,
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
- if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- old_unused_notes
- = df_create_unused_note (insn, old_unused_notes,
- def, live, do_not_gen,
- artificial_uses);
-
- /* However a may or must clobber still needs to kill the
- reg so that REG_DEAD notes are later placed
- appropriately. */
- else
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ df_ref def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
+ {
+ old_unused_notes
+ = df_create_unused_note (insn, old_unused_notes,
+ def, live, artificial_uses);
+ bitmap_set_bit (do_not_gen, dregno);
+ }
+
+ if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ bitmap_clear_bit (live, dregno);
}
}
else
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if (mws->type == DF_REF_REG_DEF)
+ if (DF_MWS_REG_DEF_P (mws))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
mws, live, do_not_gen,
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
old_unused_notes
= df_create_unused_note (insn, old_unused_notes,
- def, live, do_not_gen,
- artificial_uses);
+ def, live, artificial_uses);
+
+ if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
+ bitmap_set_bit (do_not_gen, dregno);
+
+ if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ bitmap_clear_bit (live, dregno);
}
}
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type != DF_REF_REG_DEF)
- && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
+ if ((DF_MWS_REG_DEF_P (mws))
+ && !df_ignore_stack_reg (mws->start_regno))
old_dead_notes
= df_set_dead_notes_for_mw (insn, old_dead_notes,
mws, live, do_not_gen,
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
#ifdef REG_DEAD_DEBUGGING
NULL, /* Debugging end block. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
-
- /* Technically this is only dependent on the live registers problem
- but it will produce information if built one of uninitialized
- register problems (UR, UREC) is also run. */
&problem_LR, /* Dependent problem. */
TV_DF_NOTE, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
DF_LR_IN. If you start at the bottom of the block use one of
DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
THEY WILL BE DESTROYED.
-
----------------------------------------------------------------------------*/
void
df_simulate_find_defs (rtx insn, bitmap defs)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
- if (CALL_P (insn))
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, dregno);
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, dregno);
- }
- }
- else
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ df_ref def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
}
void
df_simulate_defs (rtx insn, bitmap live)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
- if (CALL_P (insn))
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
- }
- else
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
+ df_ref def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_bit (live, dregno);
}
}
void
df_simulate_uses (rtx insn, bitmap live)
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (live, DF_REF_REGNO (use));
}
{
/* These regs are considered always live so if they end up dying
because of some def, we need to bring the back again. */
- if (df_has_eh_preds (bb))
+ if (bb_has_eh_pred (bb))
bitmap_ior_into (live, df->eh_block_artificial_uses);
else
bitmap_ior_into (live, df->regular_block_artificial_uses);
}
-/* Apply the artificial uses and defs at the top of BB in a forwards
+/*----------------------------------------------------------------------------
+ The following three functions are used only for BACKWARDS scanning:
+ i.e. they process the defs before the uses.
+
+ df_simulate_initialize_backwards should be called first with a
+ bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
+ df_simulate_one_insn_backwards should be called for each insn in
+ the block, starting with the last on. Finally,
+ df_simulate_finalize_backwards can be called to get a new value
+ of the sets at the top of the block (this is rarely used).
+ ----------------------------------------------------------------------------*/
+
+/* Apply the artificial uses and defs at the end of BB in a backwards
direction. */
void
-df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
+df_simulate_initialize_backwards (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int bb_index = bb->index;
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *use = *use_rec;
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- bitmap_set_bit (live, DF_REF_REGNO (use));
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
}
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ df_ref use = *use_rec;
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
}
}
-/* Simulate the forwards effects of INSN on the bitmap LIVE. */
+/* Simulate the backwards effects of INSN on the bitmap LIVE. */
void
-df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
+df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
{
if (! INSN_P (insn))
return;
- df_simulate_uses (insn, live);
df_simulate_defs (insn, live);
+ df_simulate_uses (insn, live);
df_simulate_fixup_sets (bb, live);
}
-/* Apply the artificial uses and defs at the end of BB in a backwards
+/* Apply the artificial uses and defs at the top of BB in a backwards
direction. */
void
-df_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
+df_simulate_finalize_backwards (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+#ifdef EH_USES
+ df_ref *use_rec;
+#endif
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
+#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ df_ref use = *use_rec;
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
+#endif
}
+/*----------------------------------------------------------------------------
+ The following three functions are used only for FORWARDS scanning:
+ i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
+ Thus it is important to add the DF_NOTES problem to the stack of
+ problems computed before using these functions.
+
+ df_simulate_initialize_forwards should be called first with a
+ bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
+ df_simulate_one_insn_forwards should be called for each insn in
+ the block, starting with the last on. Finally,
+ df_simulate_finalize_forwards can be called to get a new value
+ of the sets at the bottom of the block (this is rarely used).
+ ----------------------------------------------------------------------------*/
+
+/* Apply the artificial uses and defs at the top of BB in a backwards
+ direction. */
+void
+df_simulate_initialize_forwards (basic_block bb, bitmap live)
+{
+ df_ref *def_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+}
/* Simulate the backwards effects of INSN on the bitmap LIVE. */
void
-df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
+df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
{
+ rtx link;
if (! INSN_P (insn))
return;
-
+
+ /* Make sure that the DF_NOTES really is an active df problem. */
+ gcc_assert (df_note);
+
df_simulate_defs (insn, live);
- df_simulate_uses (insn, live);
+
+ /* Clear all of the registers that go dead. */
+ for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
+ {
+ switch (REG_NOTE_KIND (link))
+ case REG_DEAD:
+ case REG_UNUSED:
+ {
+ rtx reg = XEXP (link, 0);
+ int regno = REGNO (reg);
+ if (regno < FIRST_PSEUDO_REGISTER)
+ {
+ int n = hard_regno_nregs[regno][GET_MODE (reg)];
+ while (--n >= 0)
+ bitmap_clear_bit (live, regno + n);
+ }
+ else
+ bitmap_clear_bit (live, regno);
+ break;
+ default:
+ break;
+ }
+ }
df_simulate_fixup_sets (bb, live);
}
+/* Apply the artificial uses and defs at the end of BB in a backwards
+ direction. */
+
+void
+df_simulate_finalize_forwards (basic_block bb, bitmap live)
+{
+ df_ref *def_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+}
+
+