/* Check the size of the invariant table and realloc if necessary. */
-static void
+static void
check_invariant_table_size (void)
{
if (invariant_table_size < DF_DEFS_TABLE_SIZE())
{
unsigned int new_size = DF_DEFS_TABLE_SIZE () + (DF_DEFS_TABLE_SIZE () / 4);
invariant_table = XRESIZEVEC (struct invariant *, invariant_table, new_size);
- memset (&invariant_table[invariant_table_size], 0,
+ memset (&invariant_table[invariant_table_size], 0,
(new_size - invariant_table_size) * sizeof (struct rtx_iv *));
invariant_table_size = new_size;
}
struct df_link *defs;
struct def *def_data;
struct invariant *inv;
-
+
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
return false;
-
+
defs = DF_REF_CHAIN (use);
if (!defs)
return true;
-
+
if (defs->next)
return false;
-
+
def = defs->ref;
check_invariant_table_size ();
inv = invariant_table[DF_REF_ID(def)];
if (!inv)
return false;
-
+
def_data = inv->def;
gcc_assert (def_data != NULL);
-
+
def_bb = DF_REF_BB (def);
/* Note that in case bb == def_bb, we know that the definition
dominates insn, because def has invariant_table[DF_REF_ID(def)]
sequentially. */
if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
return false;
-
+
bitmap_set_bit (depends_on, def_data->invno);
return true;
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
if (!check_dependency (bb, *use_rec, depends_on))
return false;
-
+
return true;
}
rtx reg;
enum reg_class cover_class;
rtx set = single_set (insn);
-
+
/* Considered invariant insns have only one set. */
gcc_assert (set != NULL_RTX);
reg = SET_DEST (set);
return;
if (flag_ira_loop_pressure)
- /* REGS_USED is actually never used when the flag is on. */
+ /* REGS_USED is actually never used when the flag is on. */
regs_used = 0;
else
/* We do not really do a good job in estimating number of
unsigned int n_regs = DF_REG_SIZE (df);
regs_used = 2;
-
+
for (i = 0; i < n_regs; i++)
{
if (!DF_REGNO_FIRST_DEF (i) && DF_REGNO_LAST_USE (i))
emit_insn_after (gen_move_insn (dest, reg), inv->insn);
reorder_insns (inv->insn, inv->insn, BB_END (preheader));
- /* If there is a REG_EQUAL note on the insn we just moved, and
- insn is in a basic block that is not always executed, the note
- may no longer be valid after we move the insn.
- Note that uses in REG_EQUAL notes are taken into account in
- the computation of invariants. Hence it is safe to retain the
- note even if the note contains register references. */
- if (! inv->always_executed
- && (note = find_reg_note (inv->insn, REG_EQUAL, NULL_RTX)))
+ /* If there is a REG_EQUAL note on the insn we just moved, and the
+ insn is in a basic block that is not always executed or the note
+ contains something for which we don't know the invariant status,
+ the note may no longer be valid after we move the insn. Note that
+ uses in REG_EQUAL notes are taken into account in the computation
+ of invariants, so it is safe to retain the note even if it contains
+ register references for which we know the invariant status. */
+ if ((note = find_reg_note (inv->insn, REG_EQUAL, NULL_RTX))
+ && (!inv->always_executed
+ || !check_maybe_invariant (XEXP (note, 0))))
remove_note (inv->insn, note);
}
else
{
*use->pos = reg;
df_insn_rescan (use->insn);
- }
+ }
}
return true;
{
def = inv->def;
gcc_assert (def != NULL);
-
+
free_use_list (def->uses);
free (def);
invariant_table[i] = NULL;
static int n_regs_set;
/* Return cover class and number of needed hard registers (through
- *NREGS) of register REGNO. */
+ *NREGS) of register REGNO. */
static enum reg_class
get_regno_cover_class (int regno, int *nregs)
{
if (code == REG)
{
struct loop *loop;
-
+
for (loop = curr_loop;
loop != current_loops->tree_root;
loop = loop_outer (loop))
else if (fmt[i] == 'E')
{
int j;
-
+
for (j = 0; j < XVECLEN (x, i); j++)
mark_ref_regs (XVECEXP (x, i, j));
}
mark_ref_regs (PATTERN (insn));
n_regs_set = 0;
note_stores (PATTERN (insn), mark_reg_clobber, NULL);
-
+
/* Mark any registers dead after INSN as dead now. */
-
+
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD)
mark_reg_death (XEXP (link, 0));
-
+
/* Mark any registers set in INSN as live,
and mark them as conflicting with all other live regs.
Clobbers are processed again, so they conflict with
the registers that are set. */
-
+
note_stores (PATTERN (insn), mark_reg_store, NULL);
-
+
#ifdef AUTO_INC_DEC
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
REGNO (regs_set[n_regs_set]));
if (! note)
continue;
-
+
mark_reg_death (XEXP (note, 0));
}
}
for (i = 0; (int) i < ira_reg_class_cover_size; i++)
{
enum reg_class cover_class;
-
+
cover_class = ira_reg_class_cover[i];
if (LOOP_DATA (loop)->max_reg_pressure[cover_class] == 0)
continue;