/* Scanning of rtl for dataflow analysis.
- Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
- Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
+ 2008, 2009 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA.
-*/
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#define EPILOGUE_USES(REGNO) 0
#endif
-/* The bitmap_obstack is used to hold some static variables that
- should not be reset after each function is compiled. */
-
-static bitmap_obstack persistent_obstack;
+/* The following two macros free the vecs that hold either the refs or
+ the mw refs. They are a little tricky because the vec has 0
+ elements is special and is not to be freed. */
+#define df_scan_free_ref_vec(V) \
+ do { \
+ if (V && *V) \
+ free (V); \
+ } while (0)
+
+#define df_scan_free_mws_vec(V) \
+ do { \
+ if (V && *V) \
+ free (V); \
+ } while (0)
/* The set of hard registers in eliminables[i].from. */
static HARD_REG_SET elim_reg_set;
-/* This is a bitmap copy of regs_invalidated_by_call so that we can
- easily add it into bitmaps, etc. */
-
-bitmap df_invalidated_by_call = NULL;
-
/* Initialize ur_in and ur_out as if all hard registers were partially
available. */
struct df_collection_rec
{
- struct df_ref ** def_vec;
+ df_ref * def_vec;
+ df_ref * use_vec;
unsigned int next_def;
- struct df_ref ** use_vec;
unsigned int next_use;
- struct df_ref ** eq_use_vec;
- unsigned int next_eq_use;
+ df_ref * eq_use_vec;
struct df_mw_hardreg **mw_vec;
+ unsigned int next_eq_use;
unsigned int next_mw;
};
-static struct df_ref * df_null_ref_rec[1];
+static df_ref df_null_ref_rec[1];
static struct df_mw_hardreg * df_null_mw_rec[1];
-static void df_ref_record (struct df_collection_rec *,
+static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
rtx, rtx *,
- basic_block, rtx, enum df_ref_type,
- enum df_ref_flags);
-static void df_def_record_1 (struct df_collection_rec *,
- rtx, basic_block, rtx,
- enum df_ref_flags);
-static void df_defs_record (struct df_collection_rec *,
- rtx, basic_block, rtx,
- enum df_ref_flags);
-static void df_uses_record (struct df_collection_rec *,
+ basic_block, struct df_insn_info *,
+ enum df_ref_type, int ref_flags,
+ int, int, enum machine_mode);
+static void df_def_record_1 (struct df_collection_rec *, rtx,
+ basic_block, struct df_insn_info *,
+ int ref_flags);
+static void df_defs_record (struct df_collection_rec *, rtx,
+ basic_block, struct df_insn_info *,
+ int ref_flags);
+static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
rtx *, enum df_ref_type,
- basic_block, rtx, enum df_ref_flags);
+ basic_block, struct df_insn_info *,
+ int ref_flags,
+ int, int, enum machine_mode);
-static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
- basic_block, rtx, enum df_ref_type,
- enum df_ref_flags);
+static df_ref df_ref_create_structure (enum df_ref_class,
+ struct df_collection_rec *, rtx, rtx *,
+ basic_block, struct df_insn_info *,
+ enum df_ref_type, int ref_flags,
+ int, int, enum machine_mode);
static void df_insn_refs_collect (struct df_collection_rec*,
- basic_block, rtx);
+ basic_block, struct df_insn_info *);
static void df_canonize_collection_rec (struct df_collection_rec *);
static void df_get_regular_block_artificial_uses (bitmap);
static void df_get_exit_block_use_set (bitmap);
static void df_get_entry_block_def_set (bitmap);
static void df_grow_ref_info (struct df_ref_info *, unsigned int);
-static void df_ref_chain_delete_du_chain (struct df_ref **);
-static void df_ref_chain_delete (struct df_ref **);
+static void df_ref_chain_delete_du_chain (df_ref *);
+static void df_ref_chain_delete (df_ref *);
static void df_refs_add_to_chains (struct df_collection_rec *,
basic_block, rtx);
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
-static void df_install_ref (struct df_ref *, struct df_reg_info *,
+static void df_install_ref (df_ref, struct df_reg_info *,
struct df_ref_info *, bool);
static int df_ref_compare (const void *, const void *);
/* Problem data for the scanning dataflow function. */
struct df_scan_problem_data
{
- alloc_pool ref_pool;
+ alloc_pool ref_base_pool;
+ alloc_pool ref_artificial_pool;
+ alloc_pool ref_regular_pool;
+ alloc_pool ref_extract_pool;
alloc_pool insn_pool;
alloc_pool reg_pool;
alloc_pool mw_reg_pool;
- alloc_pool mw_link_pool;
bitmap_obstack reg_bitmaps;
bitmap_obstack insn_bitmaps;
};
typedef struct df_scan_bb_info *df_scan_bb_info_t;
+
+/* Internal function to shut down the scanning problem. */
static void
df_scan_free_internal (void)
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ unsigned int i;
+ basic_block bb;
+
+ /* The vectors that hold the refs are not pool allocated because
+ they come in many sizes. This makes them impossible to delete
+ all at once. */
+ for (i = 0; i < DF_INSN_SIZE(); i++)
+ {
+ struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
+ /* Skip the insns that have no insn_info or have been
+ deleted. */
+ if (insn_info)
+ {
+ df_scan_free_ref_vec (insn_info->defs);
+ df_scan_free_ref_vec (insn_info->uses);
+ df_scan_free_ref_vec (insn_info->eq_uses);
+ df_scan_free_mws_vec (insn_info->mw_hardregs);
+ }
+ }
+
+ FOR_ALL_BB (bb)
+ {
+ unsigned int bb_index = bb->index;
+ struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
+ if (bb_info)
+ {
+ df_scan_free_ref_vec (bb_info->artificial_defs);
+ df_scan_free_ref_vec (bb_info->artificial_uses);
+ }
+ }
free (df->def_info.refs);
free (df->def_info.begin);
BITMAP_FREE (df->insns_to_notes_rescan);
free_alloc_pool (df_scan->block_pool);
- free_alloc_pool (problem_data->ref_pool);
+ free_alloc_pool (problem_data->ref_base_pool);
+ free_alloc_pool (problem_data->ref_artificial_pool);
+ free_alloc_pool (problem_data->ref_regular_pool);
+ free_alloc_pool (problem_data->ref_extract_pool);
free_alloc_pool (problem_data->insn_pool);
free_alloc_pool (problem_data->reg_pool);
free_alloc_pool (problem_data->mw_reg_pool);
- free_alloc_pool (problem_data->mw_link_pool);
bitmap_obstack_release (&problem_data->reg_bitmaps);
bitmap_obstack_release (&problem_data->insn_bitmaps);
free (df_scan->problem_data);
df_scan->problem_data = problem_data;
df_scan->computed = true;
- problem_data->ref_pool
- = create_alloc_pool ("df_scan_ref pool",
- sizeof (struct df_ref), block_size);
+ problem_data->ref_base_pool
+ = create_alloc_pool ("df_scan ref base",
+ sizeof (struct df_base_ref), block_size);
+ problem_data->ref_artificial_pool
+ = create_alloc_pool ("df_scan ref artificial",
+ sizeof (struct df_artificial_ref), block_size);
+ problem_data->ref_regular_pool
+ = create_alloc_pool ("df_scan ref regular",
+ sizeof (struct df_regular_ref), block_size);
+ problem_data->ref_extract_pool
+ = create_alloc_pool ("df_scan ref extract",
+ sizeof (struct df_extract_ref), block_size);
problem_data->insn_pool
- = create_alloc_pool ("df_scan_insn pool",
+ = create_alloc_pool ("df_scan insn",
sizeof (struct df_insn_info), block_size);
problem_data->reg_pool
- = create_alloc_pool ("df_scan_reg pool",
+ = create_alloc_pool ("df_scan reg",
sizeof (struct df_reg_info), block_size);
problem_data->mw_reg_pool
- = create_alloc_pool ("df_scan_mw_reg pool",
+ = create_alloc_pool ("df_scan mw_reg",
sizeof (struct df_mw_hardreg), block_size);
- problem_data->mw_link_pool
- = create_alloc_pool ("df_scan_mw_link pool",
- sizeof (struct df_link), block_size);
bitmap_obstack_initialize (&problem_data->reg_bitmaps);
bitmap_obstack_initialize (&problem_data->insn_bitmaps);
df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
+ df_scan->optional_p = false;
}
df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
{
int i;
+ int dcount = 0;
+ int ucount = 0;
+ int ecount = 0;
+ int icount = 0;
+ int ccount = 0;
+ basic_block bb;
+ rtx insn;
fprintf (file, ";; invalidated by call \t");
- df_print_regset (file, df_invalidated_by_call);
+ df_print_regset (file, regs_invalidated_by_call_regset);
fprintf (file, ";; hardware regs used \t");
df_print_regset (file, df->hardware_regs_used);
fprintf (file, ";; regular block artificial uses \t");
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (df_regs_ever_live_p (i))
fprintf (file, " %d[%s]", i, reg_names[i]);
+ fprintf (file, "\n;; ref usage \t");
+
+ for (i = 0; i < (int)df->regs_inited; i++)
+ if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
+ {
+ const char * sep = "";
+
+ fprintf (file, "r%d={", i);
+ if (DF_REG_DEF_COUNT (i))
+ {
+ fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
+ sep = ",";
+ dcount += DF_REG_DEF_COUNT (i);
+ }
+ if (DF_REG_USE_COUNT (i))
+ {
+ fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
+ sep = ",";
+ ucount += DF_REG_USE_COUNT (i);
+ }
+ if (DF_REG_EQ_USE_COUNT (i))
+ {
+ fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
+ ecount += DF_REG_EQ_USE_COUNT (i);
+ }
+ fprintf (file, "} ");
+ }
- fprintf (file, "\n");
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ if (INSN_P (insn))
+ {
+ if (CALL_P (insn))
+ ccount++;
+ else
+ icount++;
+ }
+
+ fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
+ dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
}
/* Dump the bb_info for a given basic block. */
df_scan_start_block, /* Debugging start block. */
NULL, /* Debugging end block. */
NULL, /* Incremental solution verify start. */
- NULL, /* Incremental solution verfiy end. */
+ NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_SCAN /* Timing variable. */
+ TV_DF_SCAN, /* Timing variable. */
+ false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* First, grow the reg_info information. If the current size is less than
- the number of psuedos, grow to 25% more than the number of
+ the number of pseudos, grow to 25% more than the number of
pseudos.
Second, assure that all of the slots up to max_reg_num have been
if (df->regs_size < new_size)
{
new_size += new_size / 4;
- df->def_regs = xrealloc (df->def_regs,
- new_size *sizeof (struct df_reg_info*));
- df->use_regs = xrealloc (df->use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->eq_use_regs = xrealloc (df->eq_use_regs,
- new_size *sizeof (struct df_reg_info*));
- df->def_info.begin = xrealloc (df->def_info.begin,
- new_size *sizeof (int));
- df->def_info.count = xrealloc (df->def_info.count,
- new_size *sizeof (int));
- df->use_info.begin = xrealloc (df->use_info.begin,
- new_size *sizeof (int));
- df->use_info.count = xrealloc (df->use_info.count,
- new_size *sizeof (int));
+ df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
+ df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
+ df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
+ new_size);
+ df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
+ df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
+ df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
+ df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
df->regs_size = new_size;
}
{
struct df_reg_info *reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->def_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->use_regs[i] = reg_info;
- reg_info = pool_alloc (problem_data->reg_pool);
+ reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
memset (reg_info, 0, sizeof (struct df_reg_info));
df->eq_use_regs[i] = reg_info;
df->def_info.begin[i] = 0;
{
if (ref_info->refs_size < new_size)
{
- ref_info->refs = xrealloc (ref_info->refs,
- new_size *sizeof (struct df_ref *));
+ ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
memset (ref_info->refs + ref_info->refs_size, 0,
- (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
+ (new_size - ref_info->refs_size) *sizeof (df_ref));
ref_info->refs_size = new_size;
}
}
if (DF_INSN_SIZE () < new_size)
{
new_size += new_size / 4;
- df->insns = xrealloc (df->insns,
- new_size *sizeof (struct df_insn_info *));
+ df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
memset (df->insns + df->insns_size, 0,
(new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
DF_INSN_SIZE () = new_size;
/* Create a new ref of type DF_REF_TYPE for register REG at address
- LOC within INSN of BB. */
+ LOC within INSN of BB. This function is only used externally.
+
+ If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
+ DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
+ fields if they were constants. Otherwise they should be -1 if
+ those flags were set. */
-struct df_ref *
+df_ref
df_ref_create (rtx reg, rtx *loc, rtx insn,
basic_block bb,
enum df_ref_type ref_type,
- enum df_ref_flags ref_flags)
+ int ref_flags,
+ int width, int offset, enum machine_mode mode)
{
- struct df_ref *ref;
+ df_ref ref;
struct df_reg_info **reg_info;
struct df_ref_info *ref_info;
- struct df_ref **ref_rec;
- struct df_ref ***ref_rec_ptr;
+ df_ref *ref_rec;
+ df_ref **ref_rec_ptr;
unsigned int count = 0;
bool add_to_table;
+ enum df_ref_class cl;
df_grow_reg_info ();
/* You cannot hack artificial refs. */
gcc_assert (insn);
- ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
- ref_type, ref_flags);
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (width != -1 || offset != -1)
+ cl = DF_REF_EXTRACT;
+ else if (loc)
+ cl = DF_REF_REGULAR;
+ else
+ cl = DF_REF_BASE;
+ ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
+ ref_type, ref_flags,
+ width, offset, mode);
+
+ if (DF_REF_REG_DEF_P (ref))
{
reg_info = df->def_regs;
ref_info = &df->def_info;
ref_rec = *ref_rec_ptr;
if (count)
{
- ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
+ ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
*ref_rec_ptr = ref_rec;
ref_rec[count] = ref;
ref_rec[count+1] = NULL;
- qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
}
else
{
- struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
+ df_ref *ref_rec = XNEWVEC (df_ref, 2);
ref_rec[0] = ref;
ref_rec[1] = NULL;
*ref_rec_ptr = ref_rec;
UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
----------------------------------------------------------------------------*/
+static void
+df_free_ref (df_ref ref)
+{
+ struct df_scan_problem_data *problem_data
+ = (struct df_scan_problem_data *) df_scan->problem_data;
+
+ switch (DF_REF_CLASS (ref))
+ {
+ case DF_REF_BASE:
+ pool_free (problem_data->ref_base_pool, ref);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ pool_free (problem_data->ref_artificial_pool, ref);
+ break;
+
+ case DF_REF_REGULAR:
+ pool_free (problem_data->ref_regular_pool, ref);
+ break;
+
+ case DF_REF_EXTRACT:
+ pool_free (problem_data->ref_extract_pool, ref);
+ break;
+ }
+}
+
/* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
Also delete the def-use or use-def chain if it exists. */
static void
-df_reg_chain_unlink (struct df_ref *ref)
+df_reg_chain_unlink (df_ref ref)
{
- struct df_ref *next = DF_REF_NEXT_REG (ref);
- struct df_ref *prev = DF_REF_PREV_REG (ref);
- struct df_scan_problem_data *problem_data
- = (struct df_scan_problem_data *) df_scan->problem_data;
+ df_ref next = DF_REF_NEXT_REG (ref);
+ df_ref prev = DF_REF_PREV_REG (ref);
int id = DF_REF_ID (ref);
struct df_reg_info *reg_info;
- struct df_ref **refs = NULL;
+ df_ref *refs = NULL;
- if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (ref))
{
- reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
+ int regno = DF_REF_REGNO (ref);
+ reg_info = DF_REG_DEF_GET (regno);
refs = df->def_info.refs;
}
else
{
if (df->analyze_subset)
{
- if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
+ if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
refs[id] = NULL;
}
else
if (next)
DF_REF_PREV_REG (next) = prev;
- pool_free (problem_data->ref_pool, ref);
+ df_free_ref (ref);
}
/* Remove REF from VEC. */
static void
-df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
+df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
{
- struct df_ref **vec = *vec_ptr;
+ df_ref *vec = *vec_ptr;
if (vec[1])
{
/* Unlink REF from all def-use/use-def chains, etc. */
void
-df_ref_remove (struct df_ref *ref)
+df_ref_remove (df_ref ref)
{
#if 0
if (dump_file)
if (DF_REF_IS_ARTIFICIAL (ref))
{
struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_defs, ref);
}
else
if (DF_REF_IS_ARTIFICIAL (ref))
{
struct df_scan_bb_info *bb_info
- = df_scan_get_bb_info (DF_REF_BB (ref)->index);
+ = df_scan_get_bb_info (DF_REF_BBNO (ref));
df_ref_compress_rec (&bb_info->artificial_uses, ref);
}
else
struct df_insn_info *insn_rec;
df_grow_insn_info ();
- insn_rec = DF_INSN_GET (insn);
+ insn_rec = DF_INSN_INFO_GET (insn);
if (!insn_rec)
{
- insn_rec = pool_alloc (problem_data->insn_pool);
- DF_INSN_SET (insn, insn_rec);
+ insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
+ DF_INSN_INFO_SET (insn, insn_rec);
}
memset (insn_rec, 0, sizeof (struct df_insn_info));
insn_rec->insn = insn;
/* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
static void
-df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
+df_ref_chain_delete_du_chain (df_ref *ref_rec)
{
while (*ref_rec)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
/* CHAIN is allocated by DF_CHAIN. So make sure to
pass df_scan instance for the problem. */
if (DF_REF_CHAIN (ref))
/* Delete all refs in the ref chain. */
static void
-df_ref_chain_delete (struct df_ref **ref_rec)
+df_ref_chain_delete (df_ref *ref_rec)
{
- struct df_ref **start = ref_rec;
+ df_ref *start = ref_rec;
while (*ref_rec)
{
df_reg_chain_unlink (*ref_rec);
insn_info = DF_INSN_UID_SAFE_GET (uid);
- /* The client has defered rescanning. */
+ /* The client has deferred rescanning. */
if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
{
if (insn_info)
bitmap_set_bit (df->insns_to_delete, uid);
}
if (dump_file)
- fprintf (dump_file, "defering deletion of insn with uid = %d.\n", uid);
+ fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
return;
}
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- struct df_ref **ref;
+ df_ref *ref;
struct df_mw_hardreg **mw;
if (collection_rec->def_vec)
for (ref = collection_rec->def_vec; *ref; ref++)
- pool_free (problem_data->ref_pool, *ref);
+ df_free_ref (*ref);
if (collection_rec->use_vec)
for (ref = collection_rec->use_vec; *ref; ref++)
- pool_free (problem_data->ref_pool, *ref);
+ df_free_ref (*ref);
if (collection_rec->eq_use_vec)
for (ref = collection_rec->eq_use_vec; *ref; ref++)
- pool_free (problem_data->ref_pool, *ref);
+ df_free_ref (*ref);
if (collection_rec->mw_vec)
for (mw = collection_rec->mw_vec; *mw; mw++)
pool_free (problem_data->mw_reg_pool, *mw);
struct df_insn_info *insn_info = NULL;
basic_block bb = BLOCK_FOR_INSN (insn);
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if ((!df) || (!INSN_P (insn)))
return false;
insn_info = DF_INSN_UID_SAFE_GET (uid);
- /* The client has defered rescanning. */
+ /* The client has deferred rescanning. */
if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
{
if (!insn_info)
insn_info->mw_hardregs = df_null_mw_rec;
}
if (dump_file)
- fprintf (dump_file, "defering rescan insn with uid = %d.\n", uid);
+ fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
bitmap_clear_bit (df->insns_to_delete, uid);
bitmap_clear_bit (df->insns_to_notes_rescan, uid);
bitmap_clear_bit (df->insns_to_notes_rescan, uid);
if (insn_info)
{
+ int luid;
bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
/* If there's no change, return false. */
if (the_same)
if (dump_file)
fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
- /* There's change - we need to delete the existing info. */
+ /* There's change - we need to delete the existing info.
+ Since the insn isn't moved, we can salvage its LUID. */
+ luid = DF_INSN_LUID (insn);
df_insn_delete (NULL, uid);
df_insn_create_insn_record (insn);
+ DF_INSN_LUID (insn) = luid;
}
else
{
- df_insn_create_insn_record (insn);
- df_insn_refs_collect (&collection_rec, bb, insn);
+ struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
+ df_insn_refs_collect (&collection_rec, bb, insn_info);
if (dump_file)
fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
}
}
-/* Process all of the defered rescans or deletions. */
+/* Process all of the deferred rescans or deletions. */
void
df_process_deferred_rescans (void)
}
if (dump_file)
- fprintf (dump_file, "starting the processing of defered insns\n");
+ fprintf (dump_file, "starting the processing of deferred insns\n");
bitmap_copy (tmp, df->insns_to_delete);
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
}
if (dump_file)
- fprintf (dump_file, "ending the processing of defered insns\n");
+ fprintf (dump_file, "ending the processing of deferred insns\n");
BITMAP_FREE (tmp);
bitmap_clear (df->insns_to_delete);
ref_info->begin[regno] = offset;
if (include_defs)
{
- struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
+ df_ref ref = DF_REG_DEF_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
}
if (include_uses)
{
- struct df_ref *ref = DF_REG_USE_CHAIN (regno);
+ df_ref ref = DF_REG_USE_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
}
if (include_eq_uses)
{
- struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
+ df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
while (ref)
{
ref_info->refs[offset] = ref;
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref **ref_rec;
+ df_ref *ref_rec;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_uses)
for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
if (include_eq_uses)
for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
{
- struct df_ref *ref = *ref_rec;
+ df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
if (regno >= start)
{
static unsigned int
df_add_refs_to_table (unsigned int offset,
struct df_ref_info *ref_info,
- struct df_ref **ref_vec)
+ df_ref *ref_vec)
{
while (*ref_vec)
{
- struct df_ref *ref = *ref_vec;
+ df_ref ref = *ref_vec;
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
{
}
-/* Organinze the refs by insn into the table in REF_INFO. If
+/* Organize the refs by insn into the table in REF_INFO. If
blocks_to_analyze is defined, use that set, otherwise the entire
program. Include the defs if INCLUDE_DEFS. Include the uses if
INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
}
-/* Change the BB of all refs in the ref chain to NEW_BB.
- Assumes that all refs in the chain have the same BB.
- If changed, return the original bb the chain belonged to
- (or .
- If no change, return NEW_BB.
- If something's wrong, it will return NULL. */
-
-static basic_block
-df_ref_chain_change_bb (struct df_ref **ref_rec,
- basic_block old_bb,
- basic_block new_bb)
-{
- while (*ref_rec)
- {
- struct df_ref *ref = *ref_rec;
-
- if (DF_REF_BB (ref) == new_bb)
- return new_bb;
- else
- {
- gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
- old_bb = DF_REF_BB (ref);
- DF_REF_BB (ref) = new_bb;
- }
- ref_rec++;
- }
-
- return old_bb;
-}
-
-
/* Change all of the basic block references in INSN to use the insn's
current basic block. This function is called from routines that move
instructions from one block to another. */
void
-df_insn_change_bb (rtx insn)
+df_insn_change_bb (rtx insn, basic_block new_bb)
{
- basic_block new_bb = BLOCK_FOR_INSN (insn);
- basic_block old_bb = NULL;
+ basic_block old_bb = BLOCK_FOR_INSN (insn);
struct df_insn_info *insn_info;
unsigned int uid = INSN_UID (insn);
+ if (old_bb == new_bb)
+ return;
+
+ set_block_for_insn (insn, new_bb);
+
if (!df)
return;
if (!INSN_P (insn))
return;
- old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
- if (old_bb == new_bb)
- return;
-
- old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
- if (old_bb == new_bb)
- return;
-
- old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
- if (old_bb == new_bb)
- return;
-
df_set_bb_dirty (new_bb);
if (old_bb)
{
/* Helper function for df_ref_change_reg_with_loc. */
static void
-df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
+df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
+ struct df_reg_info *new_df,
int new_regno, rtx loc)
{
- struct df_ref *the_ref = old->reg_chain;
+ df_ref the_ref = old_df->reg_chain;
while (the_ref)
{
- if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
+ if ((!DF_REF_IS_ARTIFICIAL (the_ref))
+ && (DF_REF_LOC (the_ref))
+ && (*DF_REF_LOC (the_ref) == loc))
{
- struct df_ref *next_ref = the_ref->next_reg;
- struct df_ref *prev_ref = the_ref->prev_reg;
- struct df_ref **ref_vec, **ref_vec_t;
+ df_ref next_ref = DF_REF_NEXT_REG (the_ref);
+ df_ref prev_ref = DF_REF_PREV_REG (the_ref);
+ df_ref *ref_vec, *ref_vec_t;
+ struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
unsigned int count = 0;
DF_REF_REGNO (the_ref) = new_regno;
/* Pull the_ref out of the old regno chain. */
if (prev_ref)
- prev_ref->next_reg = next_ref;
+ DF_REF_NEXT_REG (prev_ref) = next_ref;
else
- old->reg_chain = next_ref;
+ old_df->reg_chain = next_ref;
if (next_ref)
- next_ref->prev_reg = prev_ref;
- old->n_refs--;
+ DF_REF_PREV_REG (next_ref) = prev_ref;
+ old_df->n_refs--;
/* Put the ref into the new regno chain. */
- the_ref->prev_reg = NULL;
- the_ref->next_reg = new->reg_chain;
- if (new->reg_chain)
- new->reg_chain->prev_reg = the_ref;
- new->reg_chain = the_ref;
- new->n_refs++;
- df_set_bb_dirty (DF_REF_BB (the_ref));
-
- /* Need to resort the record that the ref was in because the
- regno is a sorting key. First, find the right record. */
- if (DF_REF_IS_ARTIFICIAL (the_ref))
- {
- unsigned int bb_index = DF_REF_BB (the_ref)->index;
- if (DF_REF_REG_DEF_P (the_ref))
- ref_vec = df_get_artificial_defs (bb_index);
- else
- ref_vec = df_get_artificial_uses (bb_index);
- }
+ DF_REF_PREV_REG (the_ref) = NULL;
+ DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
+ if (new_df->reg_chain)
+ DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
+ new_df->reg_chain = the_ref;
+ new_df->n_refs++;
+ if (DF_REF_BB (the_ref))
+ df_set_bb_dirty (DF_REF_BB (the_ref));
+
+ /* Need to sort the record again that the ref was in because
+ the regno is a sorting key. First, find the right
+ record. */
+ if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
+ ref_vec = insn_info->eq_uses;
else
- {
- struct df_insn_info *insn_info
- = DF_INSN_GET (DF_REF_INSN (the_ref));
- if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
- ref_vec = insn_info->eq_uses;
- else
- ref_vec = insn_info->uses;
- if (dump_file)
- fprintf (dump_file, "changing reg in insn %d\n",
- INSN_UID (DF_REF_INSN (the_ref)));
- }
+ ref_vec = insn_info->uses;
+ if (dump_file)
+ fprintf (dump_file, "changing reg in insn %d\n",
+ DF_REF_INSN_UID (the_ref));
+
ref_vec_t = ref_vec;
-
+
/* Find the length. */
while (*ref_vec_t)
{
count++;
ref_vec_t++;
}
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
the_ref = next_ref;
}
else
- the_ref = the_ref->next_reg;
+ the_ref = DF_REF_NEXT_REG (the_ref);
}
}
/* Change the regno of all refs that contained LOC from OLD_REGNO to
- NEW_REGNO. Refs that do not match LOC are not changed. This call
- is to support the SET_REGNO macro. */
+ NEW_REGNO. Refs that do not match LOC are not changed which means
+ that artificial refs are not changed since they have no loc. This
+ call is to support the SET_REGNO macro. */
void
df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
if (count == 0)
{
- free (insn_info->mw_hardregs);
+ df_scan_free_mws_vec (insn_info->mw_hardregs);
insn_info->mw_hardregs = df_null_mw_rec;
return 0;
}
if (df->changeable_flags & DF_NO_INSN_RESCAN)
return;
+ /* Do nothing if the insn hasn't been emitted yet. */
+ if (!BLOCK_FOR_INSN (insn))
+ return;
+
df_grow_bb_info (df_scan);
df_grow_reg_info ();
insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
- /* The client has defered rescanning. */
+ /* The client has deferred rescanning. */
if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
{
if (!insn_info)
unsigned int num_deleted;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
df_ref_chain_delete (insn_info->eq_uses);
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (&collection_rec,
+ df_uses_record (DF_REF_REGULAR, &collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn, DF_REF_IN_NOTE);
+ bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
default:
break;
}
if (collection_rec.next_mw > num_deleted)
{
insn_info->mw_hardregs =
- xrealloc (insn_info->mw_hardregs,
- (count + 1 + collection_rec.next_mw)
- * sizeof (struct df_ref*));
+ XRESIZEVEC (struct df_mw_hardreg *,
+ insn_info->mw_hardregs,
+ count + 1 + collection_rec.next_mw);
}
memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
(collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
It ignores DF_REF_MARKER. */
static bool
-df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
+df_ref_equal_p (df_ref ref1, df_ref ref2)
{
if (!ref2)
return false;
- return (ref1 == ref2) ||
- (DF_REF_REG (ref1) == DF_REF_REG (ref2)
- && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
- && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
- && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
- && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
- && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
- == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
- && DF_REF_BB (ref1) == DF_REF_BB (ref2));
+
+ if (ref1 == ref2)
+ return true;
+
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
+ || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
+ || DF_REF_REG (ref1) != DF_REF_REG (ref2)
+ || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
+ || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
+ != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
+ || DF_REF_BB (ref1) != DF_REF_BB (ref2)
+ || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
+ return false;
+
+ switch (DF_REF_CLASS (ref1))
+ {
+ case DF_REF_ARTIFICIAL:
+ case DF_REF_BASE:
+ return true;
+
+ case DF_REF_EXTRACT:
+ if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
+ || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
+ || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
+ return false;
+ /* fallthru. */
+
+ case DF_REF_REGULAR:
+ return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
+
+ default:
+ gcc_unreachable ();
+ }
+ return false;
}
static int
df_ref_compare (const void *r1, const void *r2)
{
- const struct df_ref *ref1 = *(struct df_ref **)r1;
- const struct df_ref *ref2 = *(struct df_ref **)r2;
+ const df_ref ref1 = *(const df_ref *)r1;
+ const df_ref ref2 = *(const df_ref *)r2;
if (ref1 == ref2)
return 0;
+ if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
+ return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
+
if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
- if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
- || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
+ if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
+ return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
+
+ /* Cannot look at the LOC field on artificial refs. */
+ if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
+ && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
else
return 1;
}
+
+ /* The classes are the same at this point so it is safe to only look
+ at ref1. */
+ if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
+ {
+ if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
+ return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
+ if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
+ return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
+ if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
+ return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
+ }
return 0;
}
static void
-df_swap_refs (struct df_ref **ref_vec, int i, int j)
+df_swap_refs (df_ref *ref_vec, int i, int j)
{
- struct df_ref *tmp = ref_vec[i];
+ df_ref tmp = ref_vec[i];
ref_vec[i] = ref_vec[j];
ref_vec[j] = tmp;
}
/* Sort and compress a set of refs. */
static unsigned int
-df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
+df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
{
- struct df_scan_problem_data *problem_data
- = (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int dist = 0;
of DF_REF_COMPARE. */
if (i == count - 1)
return count;
- qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
+ qsort (ref_vec, count, sizeof (df_ref), df_ref_compare);
}
for (i=0; i<count-dist; i++)
/* Find the next ref that is not equal to the current ref. */
while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
{
- pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
+ df_free_ref (ref_vec[i + dist + 1]);
dist++;
}
/* Copy it down to the next position. */
static int
df_mw_compare (const void *m1, const void *m2)
{
- const struct df_mw_hardreg *mw1 = *(struct df_mw_hardreg **)m1;
- const struct df_mw_hardreg *mw2 = *(struct df_mw_hardreg **)m2;
+ const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
+ const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
if (mw1 == mw2)
return 0;
/* Add the new df_ref to appropriate reg_info/ref_info chains. */
static void
-df_install_ref (struct df_ref *this_ref,
+df_install_ref (df_ref this_ref,
struct df_reg_info *reg_info,
struct df_ref_info *ref_info,
bool add_to_table)
{
unsigned int regno = DF_REF_REGNO (this_ref);
/* Add the ref to the reg_{def,use,eq_use} chain. */
- struct df_ref *head = reg_info->reg_chain;
+ df_ref head = reg_info->reg_chain;
reg_info->reg_chain = this_ref;
reg_info->n_refs++;
eq_uses) and installs the entire group into the insn. It also adds
each of these refs into the appropriate chains. */
-static struct df_ref **
+static df_ref *
df_install_refs (basic_block bb,
- struct df_ref **old_vec, unsigned int count,
+ df_ref *old_vec, unsigned int count,
struct df_reg_info **reg_info,
struct df_ref_info *ref_info,
bool is_notes)
if (count)
{
unsigned int i;
- struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
+ df_ref *new_vec = XNEWVEC (df_ref, count + 1);
bool add_to_table;
switch (ref_info->ref_order)
for (i = 0; i < count; i++)
{
- struct df_ref *this_ref = old_vec[i];
+ df_ref this_ref = old_vec[i];
new_vec[i] = this_ref;
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
ref_info, add_to_table);
{
if (insn)
{
- struct df_insn_info *insn_rec = DF_INSN_GET (insn);
+ struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
/* If there is a vector in the collection rec, add it to the
insn. A null rec is a signal that the caller will handle the
chain specially. */
if (collection_rec->def_vec)
{
- if (insn_rec->defs && *insn_rec->defs)
- free (insn_rec->defs);
+ df_scan_free_ref_vec (insn_rec->defs);
insn_rec->defs
= df_install_refs (bb, collection_rec->def_vec,
collection_rec->next_def,
}
if (collection_rec->use_vec)
{
- if (insn_rec->uses && *insn_rec->uses)
- free (insn_rec->uses);
+ df_scan_free_ref_vec (insn_rec->uses);
insn_rec->uses
= df_install_refs (bb, collection_rec->use_vec,
collection_rec->next_use,
}
if (collection_rec->eq_use_vec)
{
- if (insn_rec->eq_uses && *insn_rec->eq_uses)
- free (insn_rec->eq_uses);
+ df_scan_free_ref_vec (insn_rec->eq_uses);
insn_rec->eq_uses
= df_install_refs (bb, collection_rec->eq_use_vec,
collection_rec->next_eq_use,
}
if (collection_rec->mw_vec)
{
- if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
- free (insn_rec->mw_hardregs);
+ df_scan_free_mws_vec (insn_rec->mw_hardregs);
insn_rec->mw_hardregs
= df_install_mws (collection_rec->mw_vec,
collection_rec->next_mw);
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
- if (bb_info->artificial_defs && *bb_info->artificial_defs)
- free (bb_info->artificial_defs);
+ df_scan_free_ref_vec (bb_info->artificial_defs);
bb_info->artificial_defs
= df_install_refs (bb, collection_rec->def_vec,
collection_rec->next_def,
df->def_regs,
&df->def_info, false);
- if (bb_info->artificial_uses && *bb_info->artificial_uses)
- free (bb_info->artificial_uses);
+ df_scan_free_ref_vec (bb_info->artificial_uses);
bb_info->artificial_uses
= df_install_refs (bb, collection_rec->use_vec,
collection_rec->next_use,
}
-/* Allocate a ref and initialize its fields. */
+/* Allocate a ref and initialize its fields.
+
+ If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
+ DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
+ if they were constants. Otherwise they should be -1 if those flags
+ were set. */
-static struct df_ref *
-df_ref_create_structure (struct df_collection_rec *collection_rec,
+static df_ref
+df_ref_create_structure (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
rtx reg, rtx *loc,
- basic_block bb, rtx insn,
+ basic_block bb, struct df_insn_info *info,
enum df_ref_type ref_type,
- enum df_ref_flags ref_flags)
+ int ref_flags,
+ int width, int offset, enum machine_mode mode)
{
- struct df_ref *this_ref;
+ df_ref this_ref = NULL;
int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
- this_ref = pool_alloc (problem_data->ref_pool);
+ switch (cl)
+ {
+ case DF_REF_BASE:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
+ gcc_assert (loc == NULL);
+ break;
+
+ case DF_REF_ARTIFICIAL:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
+ this_ref->artificial_ref.bb = bb;
+ gcc_assert (loc == NULL);
+ break;
+
+ case DF_REF_REGULAR:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
+ this_ref->regular_ref.loc = loc;
+ gcc_assert (loc);
+ break;
+
+ case DF_REF_EXTRACT:
+ this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
+ DF_REF_EXTRACT_WIDTH (this_ref) = width;
+ DF_REF_EXTRACT_OFFSET (this_ref) = offset;
+ DF_REF_EXTRACT_MODE (this_ref) = mode;
+ this_ref->regular_ref.loc = loc;
+ gcc_assert (loc);
+ break;
+ }
+
+ DF_REF_CLASS (this_ref) = cl;
DF_REF_ID (this_ref) = -1;
DF_REF_REG (this_ref) = reg;
DF_REF_REGNO (this_ref) = regno;
- DF_REF_LOC (this_ref) = loc;
- DF_REF_INSN (this_ref) = insn;
- DF_REF_CHAIN (this_ref) = NULL;
DF_REF_TYPE (this_ref) = ref_type;
+ DF_REF_INSN_INFO (this_ref) = info;
+ DF_REF_CHAIN (this_ref) = NULL;
DF_REF_FLAGS (this_ref) = ref_flags;
- DF_REF_BB (this_ref) = bb;
DF_REF_NEXT_REG (this_ref) = NULL;
DF_REF_PREV_REG (this_ref) = NULL;
DF_REF_ORDER (this_ref) = df->ref_order++;
if ((regno < FIRST_PSEUDO_REGISTER)
&& (!DF_REF_IS_ARTIFICIAL (this_ref)))
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (this_ref))
{
if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
if (collection_rec)
{
- if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
+ if (DF_REF_REG_DEF_P (this_ref))
collection_rec->def_vec[collection_rec->next_def++] = this_ref;
else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
/* Create new references of type DF_REF_TYPE for each part of register REG
- at address LOC within INSN of BB. */
+ at address LOC within INSN of BB.
+
+ If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
+ DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
+ fields if they were constants. Otherwise they should be -1 if
+ those flags were set. */
+
static void
-df_ref_record (struct df_collection_rec *collection_rec,
+df_ref_record (enum df_ref_class cl,
+ struct df_collection_rec *collection_rec,
rtx reg, rtx *loc,
- basic_block bb, rtx insn,
+ basic_block bb, struct df_insn_info *insn_info,
enum df_ref_type ref_type,
- enum df_ref_flags ref_flags)
+ int ref_flags,
+ int width, int offset, enum machine_mode mode)
{
- rtx oldreg = reg;
unsigned int regno;
gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
= (struct df_scan_problem_data *) df_scan->problem_data;
unsigned int i;
unsigned int endregno;
- struct df_ref *ref;
+ df_ref ref;
if (GET_CODE (reg) == SUBREG)
{
/* If this is a multiword hardreg, we create some extra
datastructures that will enable us to easily build REG_DEAD
and REG_UNUSED notes. */
- if ((endregno != regno + 1) && insn)
+ if ((endregno != regno + 1) && insn_info)
{
/* Sets to a subreg of a multiword register are partial.
Sets to a non-subreg of a multiword register are not. */
- if (GET_CODE (oldreg) == SUBREG)
+ if (GET_CODE (reg) == SUBREG)
ref_flags |= DF_REF_PARTIAL;
ref_flags |= DF_REF_MW_HARDREG;
- hardreg = pool_alloc (problem_data->mw_reg_pool);
+ hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
hardreg->type = ref_type;
hardreg->flags = ref_flags;
hardreg->mw_reg = reg;
- hardreg->loc = loc;
hardreg->start_regno = regno;
hardreg->end_regno = endregno - 1;
hardreg->mw_order = df->ref_order++;
for (i = regno; i < endregno; i++)
{
- ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
- bb, insn, ref_type, ref_flags);
+ ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
+ bb, insn_info, ref_type, ref_flags,
+ width, offset, mode);
gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
}
}
else
{
- struct df_ref *ref;
- ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
- ref_type, ref_flags);
+ df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
+ ref_type, ref_flags, width, offset, mode);
}
}
return false;
isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
osize = GET_MODE_SIZE (GET_MODE (x));
- return (isize > osize && isize > UNITS_PER_WORD);
+ return isize > osize
+ && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
}
static void
df_def_record_1 (struct df_collection_rec *collection_rec,
- rtx x, basic_block bb, rtx insn,
- enum df_ref_flags flags)
+ rtx x, basic_block bb, struct df_insn_info *insn_info,
+ int flags)
{
rtx *loc;
rtx dst;
- bool dst_in_strict_lowpart = false;
+ int offset = -1;
+ int width = -1;
+ enum machine_mode mode = VOIDmode;
+ enum df_ref_class cl = DF_REF_REGULAR;
/* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
construct. */
if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
|| GET_CODE (temp) == SET)
df_def_record_1 (collection_rec,
- temp, bb, insn,
+ temp, bb, insn_info,
GET_CODE (temp) == CLOBBER
? flags | DF_REF_MUST_CLOBBER : flags);
}
return;
}
- /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
- be handy for the reg allocator. */
- while (GET_CODE (dst) == STRICT_LOW_PART
- || GET_CODE (dst) == ZERO_EXTRACT
- || df_read_modify_subreg_p (dst))
+ if (GET_CODE (dst) == STRICT_LOW_PART)
{
-#if 0
- /* Strict low part always contains SUBREG, but we do not want to make
- it appear outside, as whole register is always considered. */
- if (GET_CODE (dst) == STRICT_LOW_PART)
+ flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
+
+ loc = &XEXP (dst, 0);
+ dst = *loc;
+ }
+
+ if (GET_CODE (dst) == ZERO_EXTRACT)
+ {
+ flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
+
+ if (GET_CODE (XEXP (dst, 1)) == CONST_INT
+ && GET_CODE (XEXP (dst, 2)) == CONST_INT)
{
- loc = &XEXP (dst, 0);
- dst = *loc;
+ width = INTVAL (XEXP (dst, 1));
+ offset = INTVAL (XEXP (dst, 2));
+ mode = GET_MODE (dst);
+ cl = DF_REF_EXTRACT;
}
-#endif
+
loc = &XEXP (dst, 0);
- if (GET_CODE (dst) == STRICT_LOW_PART)
- dst_in_strict_lowpart = true;
dst = *loc;
- flags |= DF_REF_READ_WRITE;
+ }
+ /* At this point if we do not have a reg or a subreg, just return. */
+ if (REG_P (dst))
+ {
+ df_ref_record (cl, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
+ width, offset, mode);
+
+ /* We want to keep sp alive everywhere - by making all
+ writes to sp also use of sp. */
+ if (REGNO (dst) == STACK_POINTER_REGNUM)
+ df_ref_record (DF_REF_BASE, collection_rec,
+ dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
+ width, offset, mode);
}
+ else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
+ {
+ if (df_read_modify_subreg_p (dst))
+ flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
- /* Sets to a subreg of a single word register are partial sets if
- they are wrapped in a strict lowpart, and not partial otherwise.
- */
- if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
- && dst_in_strict_lowpart)
- flags |= DF_REF_PARTIAL;
-
- if (REG_P (dst)
- || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
- df_ref_record (collection_rec,
- dst, loc, bb, insn, DF_REF_REG_DEF, flags);
+ flags |= DF_REF_SUBREG;
+
+ df_ref_record (cl, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
+ width, offset, mode);
+ }
}
static void
df_defs_record (struct df_collection_rec *collection_rec,
- rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
+ rtx x, basic_block bb, struct df_insn_info *insn_info,
+ int flags)
{
RTX_CODE code = GET_CODE (x);
if (code == SET || code == CLOBBER)
{
/* Mark the single def within the pattern. */
- enum df_ref_flags clobber_flags = flags;
+ int clobber_flags = flags;
clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
- df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
+ df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
}
else if (code == COND_EXEC)
{
df_defs_record (collection_rec, COND_EXEC_CODE (x),
- bb, insn, DF_REF_CONDITIONAL);
+ bb, insn_info, DF_REF_CONDITIONAL);
}
else if (code == PARALLEL)
{
/* Mark the multiple defs within the pattern. */
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
+ df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
}
}
-/* Process all the registers used in the rtx at address LOC. */
+/* Process all the registers used in the rtx at address LOC.
+
+ If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
+ DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
+ fields if they were constants. Otherwise they should be -1 if
+ those flags were set. */
static void
-df_uses_record (struct df_collection_rec *collection_rec,
+df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
rtx *loc, enum df_ref_type ref_type,
- basic_block bb, rtx insn, enum df_ref_flags flags)
+ basic_block bb, struct df_insn_info *insn_info,
+ int flags,
+ int width, int offset, enum machine_mode mode)
{
RTX_CODE code;
rtx x;
case CONST_INT:
case CONST:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case PC:
case CC0:
/* If we are clobbering a MEM, mark any registers inside the address
as being used. */
if (MEM_P (XEXP (x, 0)))
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&XEXP (XEXP (x, 0), 0),
- DF_REF_REG_MEM_STORE, bb, insn, flags);
+ DF_REF_REG_MEM_STORE,
+ bb, insn_info,
+ flags, width, offset, mode);
/* If we're clobbering a REG then we have a def so ignore. */
return;
case MEM:
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
&XEXP (x, 0), DF_REF_REG_MEM_LOAD,
- bb, insn, flags & DF_REF_IN_NOTE);
+ bb, insn_info, flags & DF_REF_IN_NOTE,
+ width, offset, mode);
return;
case SUBREG:
if (!REG_P (SUBREG_REG (x)))
{
loc = &SUBREG_REG (x);
- df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
+ df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
+ width, offset, mode);
return;
}
/* ... Fall through ... */
case REG:
- df_ref_record (collection_rec,
- x, loc, bb, insn, ref_type, flags);
+ df_ref_record (cl, collection_rec,
+ x, loc, bb, insn_info,
+ ref_type, flags,
+ width, offset, mode);
return;
+ case SIGN_EXTRACT:
+ case ZERO_EXTRACT:
+ {
+ /* If the parameters to the zero or sign extract are
+ constants, strip them off and recurse, otherwise there is
+ no information that we can gain from this operation. */
+ if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ && GET_CODE (XEXP (x, 2)) == CONST_INT)
+ {
+ width = INTVAL (XEXP (x, 1));
+ offset = INTVAL (XEXP (x, 2));
+ mode = GET_MODE (x);
+
+ if (code == ZERO_EXTRACT)
+ flags |= DF_REF_ZERO_EXTRACT;
+ else
+ flags |= DF_REF_SIGN_EXTRACT;
+
+ df_uses_record (DF_REF_EXTRACT, collection_rec,
+ &XEXP (x, 0), ref_type, bb, insn_info, flags,
+ width, offset, mode);
+ return;
+ }
+ }
+ break;
+
case SET:
{
rtx dst = SET_DEST (x);
gcc_assert (!(flags & DF_REF_IN_NOTE));
- df_uses_record (collection_rec,
- &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
+ df_uses_record (cl, collection_rec,
+ &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
+ width, offset, mode);
switch (GET_CODE (dst))
{
case SUBREG:
if (df_read_modify_subreg_p (dst))
{
- df_uses_record (collection_rec, &SUBREG_REG (dst),
- DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
+ df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
+ DF_REF_REG_USE, bb, insn_info,
+ flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
+ width, offset, mode);
break;
}
/* Fall through. */
case CC0:
break;
case MEM:
- df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_MEM_STORE, bb, insn, flags);
+ df_uses_record (cl, collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_MEM_STORE, bb, insn_info, flags,
+ width, offset, mode);
break;
case STRICT_LOW_PART:
{
/* A strict_low_part uses the whole REG and not just the
SUBREG. */
dst = XEXP (dst, 0);
- df_uses_record (collection_rec,
+ df_uses_record (cl, collection_rec,
(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
- DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
+ width, offset, mode);
}
break;
case ZERO_EXTRACT:
- case SIGN_EXTRACT:
- df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
- df_uses_record (collection_rec, &XEXP (dst, 1),
- DF_REF_REG_USE, bb, insn, flags);
- df_uses_record (collection_rec, &XEXP (dst, 2),
- DF_REF_REG_USE, bb, insn, flags);
- dst = XEXP (dst, 0);
+ {
+ if (GET_CODE (XEXP (dst, 1)) == CONST_INT
+ && GET_CODE (XEXP (dst, 2)) == CONST_INT)
+ {
+ width = INTVAL (XEXP (dst, 1));
+ offset = INTVAL (XEXP (dst, 2));
+ mode = GET_MODE (dst);
+ df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
+ width, offset, mode);
+ }
+ else
+ {
+ df_uses_record (cl, collection_rec, &XEXP (dst, 1),
+ DF_REF_REG_USE, bb, insn_info, flags,
+ width, offset, mode);
+ df_uses_record (cl, collection_rec, &XEXP (dst, 2),
+ DF_REF_REG_USE, bb, insn_info, flags,
+ width, offset, mode);
+ df_uses_record (cl, collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
+ width, offset, mode);
+ }
+
+ }
break;
+
default:
gcc_unreachable ();
}
int j;
for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
- df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
- DF_REF_REG_USE, bb, insn, flags);
+ df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
+ DF_REF_REG_USE, bb, insn_info, flags,
+ width, offset, mode);
return;
}
break;
case PRE_MODIFY:
case POST_MODIFY:
/* Catch the def of the register being modified. */
- flags |= DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY;
- df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
- DF_REF_REG_DEF, flags);
+ df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
+ bb, insn_info,
+ DF_REF_REG_DEF,
+ flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
+ width, offset, mode);
/* ... Fall through to handle uses ... */
loc = &XEXP (x, 0);
goto retry;
}
- df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
+ df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
+ bb, insn_info, flags,
+ width, offset, mode);
}
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
- df_uses_record (collection_rec,
- &XVECEXP (x, i, j), ref_type, bb, insn, flags);
+ df_uses_record (cl, collection_rec,
+ &XVECEXP (x, i, j), ref_type,
+ bb, insn_info, flags,
+ width, offset, mode);
}
}
}
unsigned int i;
for (i = 0; i < collection_rec->next_def; i++)
{
- struct df_ref *ref = collection_rec->def_vec[i];
+ df_ref ref = collection_rec->def_vec[i];
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
- struct df_ref *use
- = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
- DF_REF_LOC (ref), DF_REF_BB (ref),
- DF_REF_INSN (ref), DF_REF_REG_USE,
- DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
+ int width = -1;
+ int offset = -1;
+ enum machine_mode mode = VOIDmode;
+ df_ref use;
+
+ if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
+ {
+ width = DF_REF_EXTRACT_WIDTH (ref);
+ offset = DF_REF_EXTRACT_OFFSET (ref);
+ mode = DF_REF_EXTRACT_MODE (ref);
+ }
+
+ use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
+ DF_REF_LOC (ref), DF_REF_BB (ref),
+ DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
+ DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
+ width, offset, mode);
DF_REF_REGNO (use) = DF_REF_REGNO (ref);
}
}
static void
df_get_call_refs (struct df_collection_rec * collection_rec,
basic_block bb,
- rtx insn,
- enum df_ref_flags flags)
+ struct df_insn_info *insn_info,
+ int flags)
{
rtx note;
bitmap_iterator bi;
depending on which def is seen first. */
for (i=0; i<collection_rec->next_def; i++)
{
- struct df_ref *def = collection_rec->def_vec[i];
+ df_ref def = collection_rec->def_vec[i];
bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
}
/* Record the registers used to pass arguments, and explicitly
noted as clobbered. */
- for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
+ for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
note = XEXP (note, 1))
{
if (GET_CODE (XEXP (note, 0)) == USE)
- df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
- DF_REF_REG_USE, bb, insn, flags);
+ df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
+ DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
+ VOIDmode);
else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
{
- unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
- if (!bitmap_bit_p (defs_generated, regno))
- df_defs_record (collection_rec, XEXP (note, 0), bb, insn, flags);
+ if (REG_P (XEXP (XEXP (note, 0), 0)))
+ {
+ unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
+ if (!bitmap_bit_p (defs_generated, regno))
+ df_defs_record (collection_rec, XEXP (note, 0), bb,
+ insn_info, flags);
+ }
+ else
+ df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
+ DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
+ VOIDmode);
}
}
/* The stack ptr is used (honorarily) by a CALL insn. */
- df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
- NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
+ NULL, bb, insn_info, DF_REF_REG_USE,
+ DF_REF_CALL_STACK_USAGE | flags,
+ -1, -1, VOIDmode);
/* Calls may also reference any of the global registers,
so they are recorded as used. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
- df_ref_record (collection_rec, regno_reg_rtx[i],
- NULL, bb, insn, DF_REF_REG_USE, flags);
+ {
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
+ VOIDmode);
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
+ VOIDmode);
+ }
- is_sibling_call = SIBLING_CALL_P (insn);
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
+ is_sibling_call = SIBLING_CALL_P (insn_info->insn);
+ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
{
- if ((!bitmap_bit_p (defs_generated, ui))
+ if (!global_regs[ui]
+ && (!bitmap_bit_p (defs_generated, ui))
&& (!is_sibling_call
|| !bitmap_bit_p (df->exit_block_uses, ui)
|| refers_to_regno_p (ui, ui+1,
- current_function_return_rtx, NULL)))
-
- df_ref_record (collection_rec, regno_reg_rtx[ui],
- NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
+ crtl->return_rtx, NULL)))
+ df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
+ NULL, bb, insn_info, DF_REF_REG_DEF,
+ DF_REF_MAY_CLOBBER | flags,
+ -1, -1, VOIDmode);
}
BITMAP_FREE (defs_generated);
static void
df_insn_refs_collect (struct df_collection_rec* collection_rec,
- basic_block bb, rtx insn)
+ basic_block bb, struct df_insn_info *insn_info)
{
rtx note;
- bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
+ bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
/* Clear out the collection record. */
collection_rec->next_def = 0;
collection_rec->next_mw = 0;
/* Record register defs. */
- df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
+ df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
- /* Process REG_EQUIV/REG_EQUAL notes */
- for (note = REG_NOTES (insn); note;
+ /* Process REG_EQUIV/REG_EQUAL notes. */
+ for (note = REG_NOTES (insn_info->insn); note;
note = XEXP (note, 1))
{
switch (REG_NOTE_KIND (note))
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (collection_rec,
+ df_uses_record (DF_REF_REGULAR, collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn, DF_REF_IN_NOTE);
+ bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
break;
case REG_NON_LOCAL_GOTO:
/* The frame ptr is used by a non-local goto. */
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[FRAME_POINTER_REGNUM],
- NULL,
- bb, insn,
- DF_REF_REG_USE, 0);
+ NULL, bb, insn_info,
+ DF_REF_REG_USE, 0, -1, -1, VOIDmode);
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- df_ref_record (collection_rec,
+ df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
- NULL,
- bb, insn,
- DF_REF_REG_USE, 0);
+ NULL, bb, insn_info,
+ DF_REF_REG_USE, 0, -1, -1, VOIDmode);
#endif
break;
default:
}
}
- if (CALL_P (insn))
- df_get_call_refs (collection_rec, bb, insn,
+ if (CALL_P (insn_info->insn))
+ df_get_call_refs (collection_rec, bb, insn_info,
(is_cond_exec) ? DF_REF_CONDITIONAL : 0);
/* Record the register uses. */
- df_uses_record (collection_rec,
- &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
+ df_uses_record (DF_REF_REGULAR, collection_rec,
+ &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
+ -1, -1, VOIDmode);
/* DF_REF_CONDITIONAL needs corresponding USES. */
if (is_cond_exec)
df_canonize_collection_rec (collection_rec);
}
-/* Return true if any pred of BB is an eh. */
-
-bool
-df_has_eh_preds (basic_block bb)
-{
- edge e;
- edge_iterator ei;
-
- FOR_EACH_EDGE (e, ei, bb->preds)
- {
- if (e->flags & EDGE_EH)
- return true;
- }
- return false;
-}
-
-
/* Recompute the luids for the insns in BB. */
void
/* Scan the block an insn at a time from beginning to end. */
FOR_BB_INSNS (bb, insn)
{
- struct df_insn_info *insn_info = DF_INSN_GET (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
/* Inserting labels does not always trigger the incremental
rescanning. */
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
if (INSN_P (insn))
luid++;
}
}
#ifdef EH_RETURN_DATA_REGNO
- if (df_has_eh_preds (bb))
+ if (bb_has_eh_pred (bb))
{
unsigned int i;
/* Mark the registers that will contain data for the handler. */
unsigned regno = EH_RETURN_DATA_REGNO (i);
if (regno == INVALID_REGNUM)
break;
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
+ VOIDmode);
}
}
#endif
-
-#ifdef EH_USES
- if (df_has_eh_preds (bb))
- {
- unsigned int i;
- /* This code is putting in an artificial ref for the use at the
- TOP of the block that receives the exception. It is too
- cumbersome to actually put the ref on the edge. We could
- either model this at the top of the receiver block or the
- bottom of the sender block.
-
- The bottom of the sender block is problematic because not all
- out-edges of the a block are eh-edges. However, it is true
- that all edges into a block are either eh-edges or none of
- them are eh-edges. Thus, we can model this at the top of the
- eh-receiver for all of the edges at once. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (EH_USES (i))
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
- }
-#endif
-
/* Add the hard_frame_pointer if this block is the target of a
non-local goto. */
if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
- df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
/* Add the artificial uses. */
if (bb->index >= NUM_FIXED_BLOCKS)
{
bitmap_iterator bi;
unsigned int regno;
- bitmap au = df_has_eh_preds (bb)
+ bitmap au = bb_has_eh_pred (bb)
? df->eh_block_artificial_uses
: df->regular_block_artificial_uses;
EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_USE, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
+ bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
}
}
int luid = 0;
struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
if (!df)
return;
/* Scan the block an insn at a time from beginning to end. */
FOR_BB_INSNS (bb, insn)
{
- struct df_insn_info *insn_info = DF_INSN_GET (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
gcc_assert (!insn_info);
- df_insn_create_insn_record (insn);
+ insn_info = df_insn_create_insn_record (insn);
if (INSN_P (insn))
{
/* Record refs within INSN. */
- DF_INSN_LUID (insn) = luid++;
- df_insn_refs_collect (&collection_rec, bb, insn);
+ DF_INSN_INFO_LUID (insn_info) = luid++;
+ df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
df_refs_add_to_chains (&collection_rec, bb, insn);
}
- DF_INSN_LUID (insn) = luid;
+ DF_INSN_INFO_LUID (insn_info) = luid;
}
/* Other block level artificial refs */
df_refs_add_to_chains (&collection_rec, bb, NULL);
/* Now that the block has been processed, set the block as dirty so
- lr and ur will get it processed. */
+ LR and LIVE will get it processed. */
df_set_bb_dirty (bb);
}
static void
df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
{
+#ifdef EH_USES
+ unsigned int i;
+#endif
+
bitmap_clear (regular_block_artificial_uses);
if (reload_completed)
}
/* The all-important stack pointer must always be live. */
bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
+
+#ifdef EH_USES
+ /* EH_USES registers are used:
+ 1) at all insns that might throw (calls or with -fnon-call-exceptions
+ trapping insns)
+ 2) in all EH edges
+ 3) to support backtraces and/or debugging, anywhere between their
+ initialization and where they the saved registers are restored
+ from them, including the cases where we don't reach the epilogue
+ (noreturn call or infinite loop). */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (EH_USES (i))
+ bitmap_set_bit (regular_block_artificial_uses, i);
+#endif
}
{
bitmap_clear (eh_block_artificial_uses);
- /* The following code (down thru the arg_pointer seting APPEARS
+ /* The following code (down thru the arg_pointer setting APPEARS
to be necessary because there is nothing that actually
describes what the exception handling code may actually need
to keep alive. */
}
-
-
/* Set the bit for regs that are considered being defined at the entry. */
static void
#endif
}
+ /* The always important stack pointer. */
+ bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
+
/* Once the prologue has been generated, all of these registers
should just show up in the first regular block. */
if (HAVE_prologue && epilogue_completed)
}
else
{
- /* The always important stack pointer. */
- bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
-
/* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
only STATIC_CHAIN_REGNUM is defined. If they are different,
we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
#endif
#endif
-
- r = targetm.calls.struct_value_rtx (current_function_decl, true);
- if (r && REG_P (r))
- bitmap_set_bit (entry_block_defs, REGNO (r));
}
+ r = targetm.calls.struct_value_rtx (current_function_decl, true);
+ if (r && REG_P (r))
+ bitmap_set_bit (entry_block_defs, REGNO (r));
+
if ((!reload_completed) || frame_pointer_needed)
{
/* Any reference to any pseudo before reload is a potential
/* These registers are live everywhere. */
if (!reload_completed)
{
-#ifdef EH_USES
- /* The ia-64, the only machine that uses this, does not define these
- until after reload. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (EH_USES (i))
- {
- bitmap_set_bit (entry_block_defs, i);
- }
-#endif
-
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
it has to show up in the entry def set. */
if (df_need_static_chain_reg (cfun))
{
-#if !defined (STATIC_CHAIN_INCOMING_REGNUM) \
- || STATIC_CHAIN_REGNUM == STATIC_CHAIN_INCOMING_REGNUM
- bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
-#else
+#ifdef STATIC_CHAIN_INCOMING_REGNUM
bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
+#else
+#ifdef STATIC_CHAIN_REGNUM
+ bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
+#endif
#endif
}
}
/* Return the (conservative) set of hard registers that are defined on
entry to the function.
- It uses df->entry_block_defs to determine which regster
+ It uses df->entry_block_defs to determine which register
reference to include. */
static void
EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
{
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
+ ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
+ VOIDmode);
}
df_canonize_collection_rec (collection_rec);
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
}
-/* Update the defs in the entry bolck. */
+/* Update the defs in the entry block. */
void
df_update_entry_block_defs (void)
unsigned int i;
bitmap_clear (exit_block_uses);
-
- /* If exiting needs the right stack value, consider the stack
- pointer live at the end of the function. */
- if ((HAVE_epilogue && epilogue_completed)
- || !EXIT_IGNORE_STACK
- || (!FRAME_POINTER_REQUIRED
- && !current_function_calls_alloca
- && flag_omit_frame_pointer)
- || current_function_sp_is_unchanging)
- {
- bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
- }
+
+ /* Stack pointer is always live at the exit. */
+ bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
/* Mark the frame pointer if needed at the end of the function.
If we end up eliminating it, it will be removed from the live
#ifdef EH_RETURN_DATA_REGNO
/* Mark the registers that will contain data for the handler. */
- if (reload_completed && current_function_calls_eh_return)
+ if (reload_completed && crtl->calls_eh_return)
for (i = 0; ; ++i)
{
unsigned regno = EH_RETURN_DATA_REGNO (i);
#ifdef EH_RETURN_STACKADJ_RTX
if ((!HAVE_epilogue || ! epilogue_completed)
- && current_function_calls_eh_return)
+ && crtl->calls_eh_return)
{
rtx tmp = EH_RETURN_STACKADJ_RTX;
if (tmp && REG_P (tmp))
#ifdef EH_RETURN_HANDLER_RTX
if ((!HAVE_epilogue || ! epilogue_completed)
- && current_function_calls_eh_return)
+ && crtl->calls_eh_return)
{
rtx tmp = EH_RETURN_HANDLER_RTX;
if (tmp && REG_P (tmp))
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
- df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* It is deliberate that this is not put in the exit block uses but
I do not know why. */
if (reload_completed
&& !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
- && df_has_eh_preds (EXIT_BLOCK_PTR)
+ && bb_has_eh_pred (EXIT_BLOCK_PTR)
&& fixed_regs[ARG_POINTER_REGNUM])
- df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
+ df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
#endif
df_canonize_collection_rec (collection_rec);
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
void
df_hard_reg_init (void)
{
- int i;
#ifdef ELIMINABLE_REGS
+ int i;
static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
if (initialized)
return;
- bitmap_obstack_initialize (&persistent_obstack);
-
/* Record which registers will be eliminated. We use this in
mark_used_regs. */
CLEAR_HARD_REG_SET (elim_reg_set);
SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
#endif
- df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
-
- /* Inconveniently, this is only readily available in hard reg set
- form. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
- bitmap_set_bit (df_invalidated_by_call, i);
-
initialized = true;
}
are in the correct chain. */
static unsigned int
-df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
+df_reg_chain_mark (df_ref refs, unsigned int regno,
bool is_def, bool is_eq_use)
{
unsigned int count = 0;
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
{
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
/* Check to make sure the ref is in the correct chain. */
gcc_assert (DF_REF_REGNO (ref) == regno);
if (is_def)
- gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
+ gcc_assert (DF_REF_REG_DEF_P (ref));
else
- gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
+ gcc_assert (!DF_REF_REG_DEF_P (ref));
if (is_eq_use)
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
else
gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
- if (ref->next_reg)
- gcc_assert (ref->next_reg->prev_reg == ref);
+ if (DF_REF_NEXT_REG (ref))
+ gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
count++;
DF_REF_REG_MARK (ref);
}
/* Verify that all of the registers in the chain are unmarked. */
static void
-df_reg_chain_verify_unmarked (struct df_ref *refs)
+df_reg_chain_verify_unmarked (df_ref refs)
{
- struct df_ref *ref;
+ df_ref ref;
for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
gcc_assert (!DF_REF_IS_REG_MARKED (ref));
}
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
+df_refs_verify (df_ref *new_rec, df_ref *old_rec,
bool abort_if_fail)
{
while ((*new_rec) && (*old_rec))
{
bool ret1, ret2, ret3, ret4;
unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
- df_insn_refs_collect (collection_rec, bb, insn);
+ df_insn_refs_collect (collection_rec, bb, insn_info);
if (!DF_INSN_UID_DEFS (uid))
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
- collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
+ collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
+ collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
gcc_assert (bb_info);
- /* Scan the block an insn at a time from beginning to end. */
+ /* Scan the block, one insn at a time, from beginning to end. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!INSN_P (insn))
}
-/* Return true if df_ref information for all insns in all BLOCKS are
- correct and complete. If BLOCKS is null, all blocks are
- checked. */
+/* Return true if df_ref information for all insns in all blocks are
+ correct and complete. */
void
df_scan_verify (void)
if (!df)
return;
- /* This is a hack, but a necessary one. If you do not do this,
- insn_attrtab can never be compiled in a bootstrap. This
- verification is just too expensive. */
- if (n_basic_blocks > 250)
- return;
-
/* Verification is a 4 step process. */
/* (1) All of the refs are marked by going thru the reg chains. */