/* Standard problems for dataflow support routines.
- Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
-
- Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
+ 2008, 2009 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
if (dflow->block_info_size < new_size)
{
new_size += new_size / 4;
- dflow->block_info = xrealloc (dflow->block_info,
- new_size *sizeof (void*));
+ dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size);
memset (dflow->block_info + dflow->block_info_size, 0,
(new_size - dflow->block_info_size) *sizeof (void *));
dflow->block_info_size = new_size;
DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
DF_REF_ID (link->ref),
DF_REF_BBNO (link->ref),
- DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
+ DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref));
}
fprintf (file, "}");
}
static void
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
- struct df_ref **def_rec,
+ df_ref *def_rec,
enum df_ref_flags top_flag)
{
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
}
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
+ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
{
if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
bitmap_set_bit (sparse_invalidated, regno);
basic_block bb = BASIC_BLOCK (bb_index);
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the hardware registers that are always live. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
}
goto. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
/* ??? Abnormal call edges ignored for the moment, as this gets
confused by sibling call edges, which crashes reg-stack. */
if (e->flags & EDGE_EH)
- bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
+ bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
else
bitmap_ior_into (op1, op2);
/* Run the fast dce as a side effect of building LR. */
static void
-df_lr_finalize (bitmap all_blocks ATTRIBUTE_UNUSED)
+df_lr_finalize (bitmap all_blocks)
{
+ df_lr->solutions_dirty = false;
if (df->changeable_flags & DF_LR_RUN_DCE)
{
run_fast_df_dce ();
- if (df_lr->problem_data && df_lr->solutions_dirty)
+
+ /* If dce deletes some instructions, we need to recompute the lr
+ solution before proceeding further. The problem is that fast
+ dce is a pessimestic dataflow algorithm. In the case where
+ it deletes a statement S inside of a loop, the uses inside of
+ S may not be deleted from the dataflow solution because they
+ were carried around the loop. While it is conservatively
+ correct to leave these extra bits, the standards of df
+ require that we maintain the best possible (least fixed
+ point) solution. The only way to do that is to redo the
+ iteration from the beginning. See PR35805 for an
+ example. */
+ if (df_lr->solutions_dirty)
{
- /* If we are here, then it is because we are both verifying
- the solution and the dce changed the function. In that case
- the verification info built will be wrong. So we leave the
- dirty flag true so that the verifier will skip the checking
- part and just clean up.*/
- df_lr->solutions_dirty = true;
+ df_clear_flags (DF_LR_RUN_DCE);
+ df_lr_alloc (all_blocks);
+ df_lr_local_compute (all_blocks);
+ df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
+ df_lr_finalize (all_blocks);
+ df_set_flags (DF_LR_RUN_DCE);
}
- else
- df_lr->solutions_dirty = false;
}
- else
- df_lr->solutions_dirty = false;
}
basic_block bb = BASIC_BLOCK (bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
+ df_ref *def_rec;
int luid = 0;
FOR_BB_INSNS (bb, insn)
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
if (!INSN_P (insn))
continue;
luid++;
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
}
}
/* Create a du or ud chain from SRC to DST and link it into SRC. */
struct df_link *
-df_chain_create (struct df_ref *src, struct df_ref *dst)
+df_chain_create (df_ref src, df_ref dst)
{
struct df_link *head = DF_REF_CHAIN (src);
- struct df_link *link = pool_alloc (df_chain->block_pool);
+ struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
DF_REF_CHAIN (src) = link;
link->next = head;
/* Delete any du or ud chains that start at REF and point to
TARGET. */
static void
-df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
+df_chain_unlink_1 (df_ref ref, df_ref target)
{
struct df_link *chain = DF_REF_CHAIN (ref);
struct df_link *prev = NULL;
/* Delete a du or ud chain that leave or point to REF. */
void
-df_chain_unlink (struct df_ref *ref)
+df_chain_unlink (df_ref ref)
{
struct df_link *chain = DF_REF_CHAIN (ref);
while (chain)
TO_REF. */
void
-df_chain_copy (struct df_ref *to_ref,
+df_chain_copy (df_ref to_ref,
struct df_link *from_ref)
{
while (from_ref)
EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
{
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
basic_block bb = BASIC_BLOCK (bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
static void
df_chain_create_bb_process_use (bitmap local_rd,
- struct df_ref **use_rec,
+ df_ref *use_rec,
enum df_ref_flags top_flag)
{
bitmap_iterator bi;
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (uregno >= FIRST_PSEUDO_REGISTER))
EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
{
- struct df_ref *def;
+ df_ref def;
if (def_index > last_index)
break;
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
bitmap cpy = BITMAP_ALLOC (NULL);
- struct df_ref **def_rec;
+ df_ref *def_rec;
bitmap_copy (cpy, bb_info->in);
bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
pass only changes the bits in cpy. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
if (df_chain_problem_p (DF_DU_CHAIN))
{
rtx insn;
- struct df_ref **def_rec = df_get_artificial_defs (bb->index);
+ df_ref *def_rec = df_get_artificial_defs (bb->index);
if (*def_rec)
{
fprintf (file, ";; DU chains for artificial defs\n");
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
FOR_BB_INSNS (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
- def_rec = DF_INSN_UID_DEFS (uid);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ def_rec = DF_INSN_INFO_DEFS (insn_info);
if (*def_rec)
{
fprintf (file, ";; DU chains for insn luid %d uid %d\n",
- DF_INSN_LUID (insn), uid);
+ DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
while (*def_rec)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
- if (def->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
if (df_chain_problem_p (DF_UD_CHAIN))
{
rtx insn;
- struct df_ref **use_rec = df_get_artificial_uses (bb->index);
+ df_ref *use_rec = df_get_artificial_uses (bb->index);
if (*use_rec)
{
fprintf (file, ";; UD chains for artificial uses\n");
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
FOR_BB_INSNS (bb, insn)
{
- unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
- struct df_ref **eq_use_rec = DF_INSN_UID_EQ_USES (uid);
- use_rec = DF_INSN_UID_USES (uid);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
+ use_rec = DF_INSN_INFO_USES (insn_info);
if (*use_rec || *eq_use_rec)
{
fprintf (file, ";; UD chains for insn luid %d uid %d\n",
- DF_INSN_LUID (insn), uid);
+ DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
while (*use_rec)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
- if (use->flags & DF_REF_READ_WRITE)
+ if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
while (*eq_use_rec)
{
- struct df_ref *use = *eq_use_rec;
+ df_ref use = *eq_use_rec;
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
reach in the backwards direction. In and out bitvectors are built
for each basic block. There are two mapping functions,
df_byte_lr_get_regno_start and df_byte_lr_get_regno_len that are
- used to map regnos into bit vector postions.
+ used to map regnos into bit vector positions.
This problem differs from the regular df_lr function in the way
that subregs, *_extracts and strict_low_parts are handled. In lr
extracts, subregs or strict_low_parts. */
static void
-df_byte_lr_check_regs (struct df_ref **ref_rec)
+df_byte_lr_check_regs (df_ref *ref_rec)
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
for (; *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
| DF_REF_ZERO_EXTRACT
| DF_REF_STRICT_LOW_PART)
{
if (INSN_P (insn))
{
- df_byte_lr_check_regs (DF_INSN_DEFS (insn));
- df_byte_lr_check_regs (DF_INSN_USES (insn));
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
+ df_byte_lr_check_regs (DF_INSN_INFO_DEFS (insn_info));
+ df_byte_lr_check_regs (DF_INSN_INFO_USES (insn_info));
}
}
bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, bb->index);
df_byte_lr_expand_bitmap (problem_data->hardware_regs_used,
df->hardware_regs_used);
df_byte_lr_expand_bitmap (problem_data->invalidated_by_call,
- df_invalidated_by_call);
+ regs_invalidated_by_call_regset);
EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
basic_block bb = BASIC_BLOCK (bb_index);
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the hardware registers that are always live. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
unsigned int start = problem_data->regno_start[uregno];
unsigned int len = problem_data->regno_len[uregno];
goto. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
{
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **use_rec;
+ df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
unsigned int start = problem_data->regno_start[uregno];
unsigned int len = problem_data->regno_len[uregno];
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
+ df_ref *def_rec;
#ifdef EH_USES
- struct df_ref **use_rec;
+ df_ref *use_rec;
#endif
int bb_index = bb->index;
#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
{
unsigned int uregno = DF_REF_REGNO (use);
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
{
struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int uregno = DF_REF_REGNO (use);
static inline rtx
df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg)
{
- rtx this = old;
+ rtx curr = old;
rtx prev = NULL;
- while (this)
- if (XEXP (this, 0) == reg)
+ while (curr)
+ if (XEXP (curr, 0) == reg)
{
if (prev)
- XEXP (prev, 1) = XEXP (this, 1);
+ XEXP (prev, 1) = XEXP (curr, 1);
else
- old = XEXP (this, 1);
- XEXP (this, 1) = REG_NOTES (insn);
- REG_NOTES (insn) = this;
+ old = XEXP (curr, 1);
+ XEXP (curr, 1) = REG_NOTES (insn);
+ REG_NOTES (insn) = curr;
return old;
}
else
{
- prev = this;
- this = XEXP (this, 1);
+ prev = curr;
+ curr = XEXP (curr, 1);
}
/* Did not find the note. */
- REG_NOTES (insn) = alloc_EXPR_LIST (note_type, reg, REG_NOTES (insn));
+ add_reg_note (insn, note_type, reg);
return old;
}
LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
static rtx
-df_create_unused_note (rtx insn, rtx old, struct df_ref *def,
+df_create_unused_note (rtx insn, rtx old, df_ref def,
bitmap live, bitmap artificial_uses)
{
unsigned int dregno = DF_REF_REGNO (def);
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
to begin processing. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int regno = DF_REF_REGNO (use);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type == DF_REF_REG_DEF)
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if (mws->type == DF_REF_REG_DEF)
+ if (DF_MWS_REG_DEF_P (mws))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
mws, live, do_not_gen,
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
old_unused_notes
= df_create_unused_note (insn, old_unused_notes,
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
- if ((mws->type != DF_REF_REG_DEF)
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
old_dead_notes
= df_set_dead_notes_for_mw (insn, old_dead_notes,
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
#ifdef REG_DEAD_DEBUGGING
void
df_simulate_find_defs (rtx insn, bitmap defs)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
void
df_simulate_defs (rtx insn, bitmap live)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
/* If the def is to only part of the reg, it does
void
df_simulate_uses (rtx insn, bitmap live)
{
- struct df_ref **use_rec;
+ df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (live, DF_REF_REGNO (use));
}
The following three functions are used only for BACKWARDS scanning:
i.e. they process the defs before the uses.
- df_simulate_artificial_refs_at_end should be called first with a
+ df_simulate_initialize_backwards should be called first with a
bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
- df_simulate_one_insn should be called for each insn in the block,
- starting with the last on. Finally,
- df_simulate_artificial_refs_at_top can be called to get a new value
+ df_simulate_one_insn_backwards should be called for each insn in
+ the block, starting with the last on. Finally,
+ df_simulate_finalize_backwards can be called to get a new value
of the sets at the top of the block (this is rarely used).
-
- It would be not be difficult to define a similar set of functions
- that work in the forwards direction. In that case the functions
- would ignore the use sets and look for the REG_DEAD and REG_UNUSED
- notes.
-----------------------------------------------------------------------------*/
+ ----------------------------------------------------------------------------*/
/* Apply the artificial uses and defs at the end of BB in a backwards
direction. */
void
-df_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
+df_simulate_initialize_backwards (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
/* Simulate the backwards effects of INSN on the bitmap LIVE. */
void
-df_simulate_one_insn (basic_block bb, rtx insn, bitmap live)
+df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
{
if (! INSN_P (insn))
return;
direction. */
void
-df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
+df_simulate_finalize_backwards (basic_block bb, bitmap live)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
#ifdef EH_USES
- struct df_ref **use_rec;
+ df_ref *use_rec;
#endif
int bb_index = bb->index;
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
bitmap_clear_bit (live, DF_REF_REGNO (def));
}
#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
#endif
}
+/*----------------------------------------------------------------------------
+ The following three functions are used only for FORWARDS scanning:
+ i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
+ Thus it is important to add the DF_NOTES problem to the stack of
+ problems computed before using these functions.
+
+ df_simulate_initialize_forwards should be called first with a
+ bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
+ df_simulate_one_insn_forwards should be called for each insn in
+ the block, starting with the last on. Finally,
+ df_simulate_finalize_forwards can be called to get a new value
+ of the sets at the bottom of the block (this is rarely used).
+ ----------------------------------------------------------------------------*/
+
+/* Apply the artificial uses and defs at the top of BB in a backwards
+ direction. */
+
+void
+df_simulate_initialize_forwards (basic_block bb, bitmap live)
+{
+ df_ref *def_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+}
+
+/* Simulate the backwards effects of INSN on the bitmap LIVE. */
+
+void
+df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
+{
+ rtx link;
+ if (! INSN_P (insn))
+ return;
+
+ /* Make sure that the DF_NOTES really is an active df problem. */
+ gcc_assert (df_note);
+
+ df_simulate_defs (insn, live);
+
+ /* Clear all of the registers that go dead. */
+ for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
+ {
+ switch (REG_NOTE_KIND (link))
+ case REG_DEAD:
+ case REG_UNUSED:
+ {
+ rtx reg = XEXP (link, 0);
+ int regno = REGNO (reg);
+ if (regno < FIRST_PSEUDO_REGISTER)
+ {
+ int n = hard_regno_nregs[regno][GET_MODE (reg)];
+ while (--n >= 0)
+ bitmap_clear_bit (live, regno + n);
+ }
+ else
+ bitmap_clear_bit (live, regno);
+ break;
+ default:
+ break;
+ }
+ }
+ df_simulate_fixup_sets (bb, live);
+}
+
+
+/* Apply the artificial uses and defs at the end of BB in a backwards
+ direction. */
+
+void
+df_simulate_finalize_forwards (basic_block bb, bitmap live)
+{
+ df_ref *def_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+}
+
+