#include "sbitmap.h"
#include "bitmap.h"
#include "timevar.h"
+#include "tree.h"
+#include "target.h"
+#include "target-def.h"
#include "df.h"
#ifndef HAVE_epilogue
/* Initialize ur_in and ur_out as if all hard registers were partially
available. */
-bitmap df_all_hard_regs = NULL;
-
static void df_ref_record (struct dataflow *, rtx, rtx *,
basic_block, rtx, enum df_ref_type,
enum df_ref_flags, bool record_live);
static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
basic_block, rtx, enum df_ref_type,
enum df_ref_flags);
+static void df_record_entry_block_defs (struct dataflow *);
static void df_record_exit_block_uses (struct dataflow *);
static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
dflow->block_info_size = 0;
BITMAP_FREE (df->hardware_regs_used);
+ BITMAP_FREE (df->entry_block_defs);
BITMAP_FREE (df->exit_block_uses);
free_alloc_pool (dflow->block_pool);
/* Free basic block info. */
static void
-df_scan_free_bb_info (struct dataflow *dflow, void *vbb_info)
+df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
{
struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
if (bb_info)
- pool_free (dflow->block_pool, bb_info);
+ {
+ df_bb_refs_delete (dflow, bb->index);
+ pool_free (dflow->block_pool, bb_info);
+ }
}
sizeof (struct df_scan_bb_info),
block_size);
- problem_data = xmalloc (sizeof (struct df_scan_problem_data));
+ problem_data = XNEW (struct df_scan_problem_data);
dflow->problem_data = problem_data;
problem_data->ref_pool
problem_data->insn_pool
= create_alloc_pool ("df_scan_insn pool",
sizeof (struct df_insn_info), block_size);
-
problem_data->reg_pool
= create_alloc_pool ("df_scan_reg pool",
sizeof (struct df_reg_info), block_size);
}
df->hardware_regs_used = BITMAP_ALLOC (NULL);
+ df->entry_block_defs = BITMAP_ALLOC (NULL);
df->exit_block_uses = BITMAP_ALLOC (NULL);
}
{
struct df *df = dflow->df;
- df_scan_free_internal (dflow);
+ if (dflow->problem_data)
+ {
+ df_scan_free_internal (dflow);
+ free (dflow->problem_data);
+ }
+
if (df->blocks_to_scan)
BITMAP_FREE (df->blocks_to_scan);
if (df->blocks_to_analyze)
BITMAP_FREE (df->blocks_to_analyze);
- free (dflow->problem_data);
free (dflow);
}
struct df *df = dflow->df;
int i;
- fprintf (file, " all hard regs \t");
- dump_bitmap (file, df_all_hard_regs);
fprintf (file, " invalidated by call \t");
dump_bitmap (file, df_invalidated_by_call);
fprintf (file, " hardware regs used \t");
dump_bitmap (file, df->hardware_regs_used);
+ fprintf (file, " entry block uses \t");
+ dump_bitmap (file, df->entry_block_defs);
fprintf (file, " exit block uses \t");
dump_bitmap (file, df->exit_block_uses);
fprintf (file, " regs ever live \t");
DF_SCAN, /* Problem id. */
DF_NONE, /* Direction. */
df_scan_alloc, /* Allocate the problem specific data. */
+ NULL, /* Reset global information. */
df_scan_free_bb_info, /* Free basic block info. */
NULL, /* Local compute function. */
NULL, /* Init the solution specific data. */
{
bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
- struct dataflow *dflow = df->problems_by_index [DF_SCAN];
+ struct dataflow *dflow = df->problems_by_index[DF_SCAN];
basic_block bb;
df->def_info.refs_organized = false;
if (blocks)
{
+ int i;
+
/* Need to assure that there are space in all of the tables. */
unsigned int insn_num = get_max_uid () + 1;
insn_num += insn_num / 4;
df->def_info.add_refs_inline = true;
df->use_info.add_refs_inline = true;
+ for (i = df->num_problems_defined; i; i--)
+ {
+ bitmap blocks_to_reset = NULL;
+ if (dflow->problem->reset_fun)
+ {
+ if (!blocks_to_reset)
+ {
+ blocks_to_reset = BITMAP_ALLOC (NULL);
+ bitmap_copy (blocks_to_reset, local_blocks_to_scan);
+ if (df->blocks_to_scan)
+ bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
+ }
+ dflow->problem->reset_fun (dflow, blocks_to_reset);
+ }
+ if (blocks_to_reset)
+ BITMAP_FREE (blocks_to_reset);
+ }
+
df_refs_delete (dflow, local_blocks_to_scan);
/* This may be a mistake, but if an explicit blocks is passed in
----------------------------------------------------------------------------*/
-/* Get the artifical uses for a basic block. */
+/* Get the artificial uses for a basic block. */
struct df_ref *
df_get_artificial_defs (struct df *df, unsigned int bb_index)
}
-/* Get the artifical uses for a basic block. */
+/* Get the artificial uses for a basic block. */
struct df_ref *
df_get_artificial_uses (struct df *df, unsigned int bb_index)
void
df_ref_remove (struct df *df, struct df_ref *ref)
{
- struct dataflow *dflow = df->problems_by_index [DF_SCAN];
+ struct dataflow *dflow = df->problems_by_index[DF_SCAN];
if (DF_REF_REG_DEF_P (ref))
{
if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
return insn_rec;
}
-/* Delete all of the refs information from BLOCKS. */
+
+/* Delete all of the refs information from INSN. */
void
df_insn_refs_delete (struct dataflow *dflow, rtx insn)
{
struct df *df = dflow->df;
unsigned int uid = INSN_UID (insn);
- struct df_insn_info *insn_info = DF_INSN_UID_GET (df, uid);
+ struct df_insn_info *insn_info = NULL;
struct df_ref *ref;
struct df_scan_problem_data *problem_data =
(struct df_scan_problem_data *) dflow->problem_data;
+ if (uid < df->insns_size)
+ insn_info = DF_INSN_UID_GET (df, uid);
+
if (insn_info)
{
ref = insn_info->defs;
}
+/* Delete all of the refs information from basic_block with BB_INDEX. */
+
+void
+df_bb_refs_delete (struct dataflow *dflow, int bb_index)
+{
+ struct df_ref *def;
+ struct df_ref *use;
+
+ struct df_scan_bb_info *bb_info
+ = df_scan_get_bb_info (dflow, bb_index);
+ rtx insn;
+ basic_block bb = BASIC_BLOCK (bb_index);
+ FOR_BB_INSNS (bb, insn)
+ {
+ if (INSN_P (insn))
+ {
+ /* Record defs within INSN. */
+ df_insn_refs_delete (dflow, insn);
+ }
+ }
+
+ /* Get rid of any artificial uses or defs. */
+ if (bb_info)
+ {
+ def = bb_info->artificial_defs;
+ while (def)
+ def = df_reg_chain_unlink (dflow, def);
+ bb_info->artificial_defs = NULL;
+ use = bb_info->artificial_uses;
+ while (use)
+ use = df_reg_chain_unlink (dflow, use);
+ bb_info->artificial_uses = NULL;
+ }
+}
+
+
/* Delete all of the refs information from BLOCKS. */
void
{
bitmap_iterator bi;
unsigned int bb_index;
- struct df_ref *def;
- struct df_ref *use;
EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
{
- struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (dflow, bb_index);
- rtx insn;
- basic_block bb = BASIC_BLOCK (bb_index);
- FOR_BB_INSNS (bb, insn)
- {
- if (INSN_P (insn))
- {
- /* Record defs within INSN. */
- df_insn_refs_delete (dflow, insn);
- }
- }
-
- /* Get rid of any artifical uses. */
- if (bb_info)
- {
- def = bb_info->artificial_defs;
- while (def)
- def = df_reg_chain_unlink (dflow, def);
- bb_info->artificial_defs = NULL;
- use = bb_info->artificial_uses;
- while (use)
- use = df_reg_chain_unlink (dflow, use);
- bb_info->artificial_uses = NULL;
- }
+ df_bb_refs_delete (dflow, bb_index);
}
}
loc = &SET_DEST (x);
dst = *loc;
- /* Some targets place small structures in registers for
- return values of functions. */
- if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
+ /* It is legal to have a set destination be a parallel. */
+ if (GET_CODE (dst) == PARALLEL)
{
int i;
{
rtx dst = SET_DEST (x);
gcc_assert (!(flags & DF_REF_IN_NOTE));
- df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
+ df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
switch (GET_CODE (dst))
{
{
df_uses_record (dflow, &SUBREG_REG (dst),
DF_REF_REG_USE, bb,
- insn, DF_REF_READ_WRITE);
+ insn, flags | DF_REF_READ_WRITE);
break;
}
/* Fall through. */
case MEM:
df_uses_record (dflow, &XEXP (dst, 0),
DF_REF_REG_MEM_STORE,
- bb, insn, 0);
+ bb, insn, flags);
break;
case STRICT_LOW_PART:
{
DF_REF_REG_USE, bb, insn,
DF_REF_READ_WRITE);
df_uses_record (dflow, &XEXP (dst, 1),
- DF_REF_REG_USE, bb, insn, 0);
+ DF_REF_REG_USE, bb, insn, flags);
df_uses_record (dflow, &XEXP (dst, 2),
- DF_REF_REG_USE, bb, insn, 0);
+ DF_REF_REG_USE, bb, insn, flags);
dst = XEXP (dst, 0);
break;
default:
for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
- DF_REF_REG_USE, bb, insn, 0);
+ DF_REF_REG_USE, bb, insn, flags);
return;
}
break;
case PRE_MODIFY:
case POST_MODIFY:
/* Catch the def of the register being modified. */
+ flags |= DF_REF_READ_WRITE;
df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
- DF_REF_REG_DEF, DF_REF_READ_WRITE, true);
+ DF_REF_REG_DEF, flags, true);
/* ... Fall through to handle uses ... */
{
unsigned int i;
/* Mark the registers that will contain data for the handler. */
- if (current_function_calls_eh_return)
- for (i = 0; ; ++i)
- {
- unsigned regno = EH_RETURN_DATA_REGNO (i);
- if (regno == INVALID_REGNUM)
- break;
- df_ref_record (dflow, regno_reg_rtx[i], ®no_reg_rtx[i], bb, NULL,
- DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP, false);
- }
+ for (i = 0; ; ++i)
+ {
+ unsigned regno = EH_RETURN_DATA_REGNO (i);
+ if (regno == INVALID_REGNUM)
+ break;
+ df_ref_record (dflow, regno_reg_rtx[regno], ®no_reg_rtx[regno],
+ bb, NULL,
+ DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
+ false);
+ }
}
#endif
-#ifdef EH_USES
- /* This code is putting in a artificial ref for the use at the TOP
- of the block that receives the exception. It is too cumbersome
- to actually put the ref on the edge. We could either model this
- at the top of the receiver block or the bottom of the sender
- block.
-
- The bottom of the sender block is problematic because not all
- out-edges of the a block are eh-edges. However, it is true that
- all edges into a block are either eh-edges or none of them are
- eh-edges. Thus, we can model this at the top of the eh-receiver
- for all of the edges at once. */
+
if ((df->flags & DF_HARD_REGS)
&& df_has_eh_preds (bb))
{
+#ifdef EH_USES
unsigned int i;
+ /* This code is putting in a artificial ref for the use at the
+ TOP of the block that receives the exception. It is too
+ cumbersome to actually put the ref on the edge. We could
+ either model this at the top of the receiver block or the
+ bottom of the sender block.
+
+ The bottom of the sender block is problematic because not all
+ out-edges of the a block are eh-edges. However, it is true
+ that all edges into a block are either eh-edges or none of
+ them are eh-edges. Thus, we can model this at the top of the
+ eh-receiver for all of the edges at once. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (EH_USES (i))
df_uses_record (dflow, ®no_reg_rtx[i],
- DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
- DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
- }
+ DF_REF_REG_USE, bb, NULL,
+ DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
+#endif
+
+ /* The following code (down thru the arg_pointer setting APPEARS
+ to be necessary because there is nothing that actually
+ describes what the exception handling code may actually need
+ to keep alive. */
+ if (reload_completed)
+ {
+ if (frame_pointer_needed)
+ {
+ df_uses_record (dflow, ®no_reg_rtx[FRAME_POINTER_REGNUM],
+ DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ df_uses_record (dflow, ®no_reg_rtx[HARD_FRAME_POINTER_REGNUM],
+ DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
#endif
+ }
+#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ df_uses_record (dflow, ®no_reg_rtx[ARG_POINTER_REGNUM],
+ DF_REF_REG_USE, bb, NULL,
+ DF_REF_ARTIFICIAL);
+#endif
+ }
+ }
if ((df->flags & DF_HARD_REGS)
&& bb->index >= NUM_FIXED_BLOCKS)
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
- df_uses_record (dflow, ®no_reg_rtx [FRAME_POINTER_REGNUM],
+ df_uses_record (dflow, ®no_reg_rtx[FRAME_POINTER_REGNUM],
DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
if (bitmap_bit_p (blocks, EXIT_BLOCK))
df_record_exit_block_uses (dflow);
+
+ if (bitmap_bit_p (blocks, ENTRY_BLOCK))
+ df_record_entry_block_defs (dflow);
}
}
}
+
+/* Record the (conservative) set of hard registers that are defined on
+ entry to the function. */
+
+static void
+df_record_entry_block_defs (struct dataflow * dflow)
+{
+ unsigned int i;
+ bitmap_iterator bi;
+ rtx r;
+ struct df * df = dflow->df;
+
+ bitmap_clear (df->entry_block_defs);
+
+ if (! (df->flags & DF_HARD_REGS))
+ return;
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
+ if (FUNCTION_ARG_REGNO_P (i))
+#ifdef INCOMING_REGNO
+ bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
+#else
+ bitmap_set_bit (df->entry_block_defs, i);
+#endif
+ }
+
+ /* Once the prologue has been generated, all of these registers
+ should just show up in the first regular block. */
+ if (HAVE_prologue && epilogue_completed)
+ {
+ /* Defs for the callee saved registers are inserted so that the
+ pushes have some defining location. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
+ bitmap_set_bit (df->entry_block_defs, i);
+ }
+ else
+ {
+#ifdef INCOMING_RETURN_ADDR_RTX
+ if (REG_P (INCOMING_RETURN_ADDR_RTX))
+ bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
+#endif
+
+ /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
+ only STATIC_CHAIN_REGNUM is defined. If they are different,
+ we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
+#ifdef STATIC_CHAIN_INCOMING_REGNUM
+ bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
+#else
+#ifdef STATIC_CHAIN_REGNUM
+ bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
+#endif
+#endif
+
+ r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
+ if (r && REG_P (r))
+ bitmap_set_bit (df->entry_block_defs, REGNO (r));
+ }
+
+ /* These registers are live everywhere. */
+ if (!reload_completed)
+ {
+ /* Any reference to any pseudo before reload is a potential
+ reference of the frame pointer. */
+ bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
+
+#ifdef EH_USES
+ /* The ia-64, the only machine that uses this, does not define these
+ until after reload. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (EH_USES (i))
+ {
+ bitmap_set_bit (df->entry_block_defs, i);
+ }
+#endif
+
+#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
+ /* Pseudos with argument area equivalences may require
+ reloading via the argument pointer. */
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
+#endif
+
+#ifdef PIC_OFFSET_TABLE_REGNUM
+ /* Any constant, or pseudo with constant equivalences, may
+ require reloading from memory using the pic register. */
+ if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
+#endif
+ }
+
+ targetm.live_on_entry (df->entry_block_defs);
+
+ EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
+ {
+ df_ref_record (dflow, regno_reg_rtx[i], ®no_reg_rtx[i],
+ ENTRY_BLOCK_PTR, NULL,
+ DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
+ }
+}
+
+
/* Record the set of hard registers that are used in the exit block. */
static void
void
df_hard_reg_init (void)
{
-#ifdef ELIMINABLE_REGS
int i;
+#ifdef ELIMINABLE_REGS
static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
/* After reload, some ports add certain bits to regs_ever_live so
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
bitmap_set_bit (df_invalidated_by_call, i);
- df_all_hard_regs = BITMAP_ALLOC (&persistent_obstack);
-
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- bitmap_set_bit (df_all_hard_regs, i);
-
initialized = true;
}