/* Allocate registers for pseudo-registers that span basic blocks.
- Copyright (C) 2007 Free Software Foundation, Inc.
+ Copyright (C) 2007, 2008 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
This file is part of GCC.
alloc_pool adjacency_pool;
adjacency_t **adjacency;
-typedef struct df_ref * df_ref_t;
+typedef df_ref df_ref_t;
DEF_VEC_P(df_ref_t);
DEF_VEC_ALLOC_P(df_ref_t,heap);
static void
mark_reg_store (sparseset allocnos_live,
HARD_REG_SET *hard_regs_live,
- struct df_ref *ref)
+ df_ref ref)
{
rtx reg = DF_REF_REG (ref);
unsigned int regno = DF_REF_REGNO (ref);
{
unsigned int start = regno;
unsigned int last = end_hard_regno (mode, regno);
- if ((GET_CODE (reg) == SUBREG) && !DF_REF_FLAGS_IS_SET (ref, DF_REF_EXTRACT))
+ if ((GET_CODE (reg) == SUBREG) && !DF_REF_FLAGS_IS_SET (ref, DF_REF_ZERO_EXTRACT))
{
start += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
SUBREG_BYTE (reg), GET_MODE (reg));
/* Init LIVE_SUBREGS[ALLOCNUM] and LIVE_SUBREGS_USED[ALLOCNUM] using
- REG to the the number of nregs, and INIT_VALUE to get the
+ REG to the number of nregs, and INIT_VALUE to get the
initialization. ALLOCNUM need not be the regno of REG. */
void
/* Set REG to be not live in the sets ALLOCNOS_LIVE, LIVE_SUBREGS,
- HARD_REGS_LIVE. If EXTRACT is false, assume that the entire reg is
- set not live even if REG is a subreg. */
+ HARD_REGS_LIVE. DEF is the definition of the register. */
inline static void
clear_reg_in_live (sparseset allocnos_live,
sbitmap *live_subregs,
int *live_subregs_used,
HARD_REG_SET *hard_regs_live,
- rtx reg,
- bool extract)
+ rtx reg, df_ref def)
{
unsigned int regno = (GET_CODE (reg) == SUBREG)
? REGNO (SUBREG_REG (reg)): REGNO (reg);
if (allocnum >= 0)
{
- if ((GET_CODE (reg) == SUBREG) && !extract)
-
+ if (GET_CODE (reg) == SUBREG
+ && !DF_REF_FLAGS_IS_SET (def, DF_REF_ZERO_EXTRACT))
{
unsigned int start = SUBREG_BYTE (reg);
unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
ra_init_live_subregs (sparseset_bit_p (allocnos_live, allocnum),
live_subregs, live_subregs_used, allocnum, reg);
+ if (!DF_REF_FLAGS_IS_SET (def, DF_REF_STRICT_LOW_PART))
+ {
+ /* Expand the range to cover entire words.
+ Bytes added here are "don't care". */
+ start = start / UNITS_PER_WORD * UNITS_PER_WORD;
+ last = ((last + UNITS_PER_WORD - 1)
+ / UNITS_PER_WORD * UNITS_PER_WORD);
+ }
+
/* Ignore the paradoxical bits. */
if ((int)last > live_subregs_used[allocnum])
last = live_subregs_used[allocnum];
if (! fixed_regs[regno])
{
unsigned int start = regno;
- if ((GET_CODE (reg) == SUBREG) && !extract)
+ if (GET_CODE (reg) == SUBREG
+ && !DF_REF_FLAGS_IS_SET (def, DF_REF_ZERO_EXTRACT))
{
unsigned int last;
start += SUBREG_BYTE (reg);
bitmap_iterator bi;
bitmap_copy (live, DF_LIVE_OUT (bb));
- df_simulate_artificial_refs_at_end (bb, live);
+ df_simulate_initialize_backwards (bb, live);
sparseset_clear (allocnos_live);
memset (live_subregs_used, 0, max_allocno * sizeof (int));
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
- struct df_ref **def_rec;
- struct df_ref **use_rec;
+ df_ref *def_rec;
+ df_ref *use_rec;
if (!INSN_P (insn))
continue;
later. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
/* FIXME: Ignoring may clobbers is technically the wrong
thing to do. However the old version of the this
rtx reg = DF_REF_REG (def);
set_reg_in_live (allocnos_live, live_subregs, live_subregs_used,
&hard_regs_live, reg,
- DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT));
+ DF_REF_FLAGS_IS_SET (def, DF_REF_ZERO_EXTRACT));
if (dump_file)
dump_ref (dump_file, " adding def", "\n",
reg, DF_REF_REGNO (def), live_subregs, live_subregs_used);
/* Add the interferences for the defs. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
mark_reg_store (allocnos_live, &renumbers_live, def);
}
VEC_truncate (df_ref_t, clobbers, 0);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- struct df_ref *def = *def_rec;
+ df_ref def = *def_rec;
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
{
rtx reg = DF_REF_REG (def);
clear_reg_in_live (allocnos_live, live_subregs, live_subregs_used,
- &hard_regs_live, reg,
- DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT));
+ &hard_regs_live, reg, def);
if (dump_file)
dump_ref (dump_file, " clearing def", "\n",
reg, DF_REF_REGNO (def), live_subregs, live_subregs_used);
VEC_truncate (df_ref_t, dying_regs, 0);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
- struct df_ref *use = *use_rec;
+ df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
bool added = false;
int renumber = reg_renumber[regno];
use unless that set also happens to wrapped in a
ZERO_EXTRACT. */
if (DF_REF_FLAGS_IS_SET (use, DF_REF_READ_WRITE)
- && (!DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
+ && (!DF_REF_FLAGS_IS_SET (use, DF_REF_ZERO_EXTRACT))
&& DF_REF_FLAGS_IS_SET (use, DF_REF_SUBREG))
continue;
if (allocnum >= 0)
{
if (GET_CODE (reg) == SUBREG
- && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
+ && !DF_REF_FLAGS_IS_SET (use, DF_REF_ZERO_EXTRACT))
{
unsigned int start = SUBREG_BYTE (reg);
unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
fprintf (dump_file, " clobber conflicts\n");
for (k = VEC_length (df_ref_t, clobbers) - 1; k >= 0; k--)
{
- struct df_ref *def = VEC_index (df_ref_t, clobbers, k);
+ df_ref def = VEC_index (df_ref_t, clobbers, k);
int j;
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
{
- struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
+ df_ref use = VEC_index (df_ref_t, dying_regs, j);
record_one_conflict_between_regnos (GET_MODE (DF_REF_REG (def)),
DF_REF_REGNO (def),
GET_MODE (DF_REF_REG (use)),
}
/* Early clobbers, by definition, need to not only
- clobber the registers that are live accross the insn
+ clobber the registers that are live across the insn
but need to clobber the registers that die within the
insn. The clobbering for registers live across the
insn is handled above. */
for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
{
int used_in_output = 0;
- struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
+ df_ref use = VEC_index (df_ref_t, dying_regs, j);
rtx reg = DF_REF_REG (use);
int uregno = DF_REF_REGNO (use);
enum machine_mode umode = GET_MODE (DF_REF_REG (use));
break;
record_one_conflict (allocnos_live, &hard_regs_live, regno);
}
+
+ EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
+ {
+ allocno[i].no_eh_reg = 1;
+ }
}
#endif
/* No need to record conflicts for call clobbered regs if we have
nonlocal labels around, as we don't ever try to allocate such
regs in this case. */
- if (! current_function_has_nonlocal_label)
+ if (! cfun->has_nonlocal_label)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (call_used_regs [i])
record_one_conflict (allocnos_live, &hard_regs_live, i);