rtx insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
- struct df_ref **def_rec;
+ df_ref *def_rec;
if (insn_contains_asm (insn))
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (dregno < FIRST_PSEUDO_REGISTER)
{
/* All registers that can be eliminated. */
-static HARD_REG_SET eliminable_regset;
+HARD_REG_SET eliminable_regset;
static int regno_compare (const void *, const void *);
static int allocno_compare (const void *, const void *);
static void set_preferences (void);
static void find_reg (int, HARD_REG_SET, int, int, int);
static void dump_conflicts (FILE *);
-static void build_insn_chain (void);
\f
/* Look through the list of eliminable registers. Set ELIM_SET to the
= (! flag_omit_frame_pointer
|| (cfun->calls_alloca && EXIT_IGNORE_STACK)
|| crtl->accesses_prior_frames
+ || crtl->stack_realign_needed
|| FRAME_POINTER_REQUIRED);
frame_pointer_needed = need_fp;
{
bool cannot_elim
= (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
- || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
+ || (eliminables[i].to == STACK_POINTER_REGNUM
+ && need_fp
+ && (! SUPPORTS_STACK_ALIGNMENT
+ || ! stack_realign_fp)));
if (!regs_asm_clobbered[eliminables[i].from])
{
int i, best_reg, pass;
HARD_REG_SET used, used1, used2;
- enum reg_class class = (alt_regs_p
+ enum reg_class rclass = (alt_regs_p
? reg_alternate_class (allocno[num].reg)
: reg_preferred_class (allocno[num].reg));
enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
if (losers)
IOR_HARD_REG_SET (used1, losers);
- IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
+ IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) rclass]);
#ifdef EH_RETURN_DATA_REGNO
if (allocno[num].no_eh_reg)
if (! accept_call_clobbered
&& allocno[num].calls_crossed != 0
&& allocno[num].throwing_calls_crossed == 0
- && CALLER_SAVE_PROFITABLE (optimize_size ? allocno[num].n_refs : allocno[num].freq,
- optimize_size ? allocno[num].calls_crossed
+ && CALLER_SAVE_PROFITABLE (optimize_function_for_size_p (cfun) ? allocno[num].n_refs : allocno[num].freq,
+ optimize_function_for_size_p (cfun) ? allocno[num].calls_crossed
: allocno[num].freq_calls_crossed))
{
HARD_REG_SET new_losers;
FOR_EACH_BB (bb)
{
- regset r = DF_LIVE_IN (bb);
+ /* We don't use LIVE info in IRA. */
+ regset r = (flag_ira ? DF_LR_IN (bb) : DF_LIVE_IN (bb));
if (REGNO_REG_SET_P (r, from))
{
CLEAR_REGNO_REG_SET (r, from);
print_insn_chain (file, c);
}
+/* Return true if pseudo REGNO should be added to set live_throughout
+ or dead_or_set of the insn chains for reload consideration. */
+
+static bool
+pseudo_for_reload_consideration_p (int regno)
+{
+ /* Consider spilled pseudos too for IRA because they still have a
+ chance to get hard-registers in the reload when IRA is used. */
+ return (reg_renumber[regno] >= 0
+ || (flag_ira && optimize && flag_ira_share_spill_slots));
+}
/* Walk the insns of the current function and build reload_insn_chain,
and record register life information. */
-static void
+void
build_insn_chain (void)
{
unsigned int i;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (eliminable_regset, i))
bitmap_set_bit (elim_regset, i);
-
FOR_EACH_BB_REVERSE (bb)
{
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), FIRST_PSEUDO_REGISTER, i, bi)
{
- if (reg_renumber[i] >= 0)
+ if (pseudo_for_reload_consideration_p (i))
bitmap_set_bit (live_relevant_regs, i);
}
if (!NOTE_P (insn) && !BARRIER_P (insn))
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
c = new_insn_chain ();
c->next = next;
if (INSN_P (insn))
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* Ignore may clobbers because these are generated
if (!fixed_regs[regno])
bitmap_set_bit (&c->dead_or_set, regno);
}
- else if (reg_renumber[regno] >= 0)
+ else if (pseudo_for_reload_consideration_p (regno))
bitmap_set_bit (&c->dead_or_set, regno);
}
- if ((regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
+ if ((regno < FIRST_PSEUDO_REGISTER
+ || reg_renumber[regno] >= 0
+ || (flag_ira && optimize))
&& (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)))
{
rtx reg = DF_REF_REG (def);
if (INSN_P (insn))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
rtx reg = DF_REF_REG (use);
if (!fixed_regs[regno])
bitmap_set_bit (&c->dead_or_set, regno);
}
- else if (reg_renumber[regno] >= 0)
+ else if (pseudo_for_reload_consideration_p (regno))
bitmap_set_bit (&c->dead_or_set, regno);
}
- if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
+ if (regno < FIRST_PSEUDO_REGISTER
+ || pseudo_for_reload_consideration_p (regno))
{
if (GET_CODE (reg) == SUBREG
&& !DF_REF_FLAGS_IS_SET (use,
fprintf (file, "\n\n");
}
+
+static bool
+gate_handle_global_alloc (void)
+{
+ return ! flag_ira;
+}
+
/* Run old register allocator. Return TRUE if we must exit
rest_of_compilation upon return. */
static unsigned int
{
RTL_PASS,
"greg", /* name */
- NULL, /* gate */
+ gate_handle_global_alloc, /* gate */
rest_of_handle_global_alloc, /* execute */
NULL, /* sub */
NULL, /* next */