/* Scanning of rtl for dataflow analysis.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
- 2008, 2009 Free Software Foundation, Inc.
+ 2008, 2009, 2010 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
#include "target-def.h"
#include "df.h"
#include "tree-pass.h"
+#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
DEF_VEC_P(df_ref);
DEF_VEC_ALLOC_P_STACK(df_ref);
static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
rtx, rtx *,
basic_block, struct df_insn_info *,
- enum df_ref_type, int ref_flags,
- int, int, enum machine_mode);
+ enum df_ref_type, int ref_flags);
static void df_def_record_1 (struct df_collection_rec *, rtx,
basic_block, struct df_insn_info *,
int ref_flags);
static void df_defs_record (struct df_collection_rec *, rtx,
basic_block, struct df_insn_info *,
int ref_flags);
-static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
+static void df_uses_record (struct df_collection_rec *,
rtx *, enum df_ref_type,
basic_block, struct df_insn_info *,
- int ref_flags,
- int, int, enum machine_mode);
+ int ref_flags);
static df_ref df_ref_create_structure (enum df_ref_class,
struct df_collection_rec *, rtx, rtx *,
basic_block, struct df_insn_info *,
- enum df_ref_type, int ref_flags,
- int, int, enum machine_mode);
-
+ enum df_ref_type, int ref_flags);
static void df_insn_refs_collect (struct df_collection_rec*,
basic_block, struct df_insn_info *);
static void df_canonize_collection_rec (struct df_collection_rec *);
alloc_pool ref_base_pool;
alloc_pool ref_artificial_pool;
alloc_pool ref_regular_pool;
- alloc_pool ref_extract_pool;
alloc_pool insn_pool;
alloc_pool reg_pool;
alloc_pool mw_reg_pool;
df_scan->block_info = NULL;
df_scan->block_info_size = 0;
- BITMAP_FREE (df->hardware_regs_used);
- BITMAP_FREE (df->regular_block_artificial_uses);
- BITMAP_FREE (df->eh_block_artificial_uses);
+ bitmap_clear (&df->hardware_regs_used);
+ bitmap_clear (&df->regular_block_artificial_uses);
+ bitmap_clear (&df->eh_block_artificial_uses);
BITMAP_FREE (df->entry_block_defs);
BITMAP_FREE (df->exit_block_uses);
- BITMAP_FREE (df->insns_to_delete);
- BITMAP_FREE (df->insns_to_rescan);
- BITMAP_FREE (df->insns_to_notes_rescan);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
- free_alloc_pool (df_scan->block_pool);
free_alloc_pool (problem_data->ref_base_pool);
free_alloc_pool (problem_data->ref_artificial_pool);
free_alloc_pool (problem_data->ref_regular_pool);
- free_alloc_pool (problem_data->ref_extract_pool);
free_alloc_pool (problem_data->insn_pool);
free_alloc_pool (problem_data->reg_pool);
free_alloc_pool (problem_data->mw_reg_pool);
}
-/* Set basic block info. */
-
-static void
-df_scan_set_bb_info (unsigned int index,
- struct df_scan_bb_info *bb_info)
-{
- gcc_assert (df_scan);
- df_grow_bb_info (df_scan);
- df_scan->block_info[index] = (void *) bb_info;
-}
-
-
/* Free basic block info. */
static void
{
struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
unsigned int bb_index = bb->index;
- if (bb_info)
+
+ /* See if bb_info is initialized. */
+ if (bb_info->artificial_defs)
{
rtx insn;
FOR_BB_INSNS (bb, insn)
bb_info = df_scan_get_bb_info (bb_index);
/* Get rid of any artificial uses or defs. */
- df_ref_chain_delete_du_chain (bb_info->artificial_defs);
- df_ref_chain_delete_du_chain (bb_info->artificial_uses);
- df_ref_chain_delete (bb_info->artificial_defs);
- df_ref_chain_delete (bb_info->artificial_uses);
- bb_info->artificial_defs = NULL;
- bb_info->artificial_uses = NULL;
- pool_free (df_scan->block_pool, bb_info);
+ if (bb_info->artificial_defs)
+ {
+ df_ref_chain_delete_du_chain (bb_info->artificial_defs);
+ df_ref_chain_delete_du_chain (bb_info->artificial_uses);
+ df_ref_chain_delete (bb_info->artificial_defs);
+ df_ref_chain_delete (bb_info->artificial_uses);
+ bb_info->artificial_defs = NULL;
+ bb_info->artificial_uses = NULL;
+ }
}
}
if (df_scan->problem_data)
df_scan_free_internal ();
- df_scan->block_pool
- = create_alloc_pool ("df_scan_block pool",
- sizeof (struct df_scan_bb_info),
- block_size);
-
problem_data = XNEW (struct df_scan_problem_data);
df_scan->problem_data = problem_data;
df_scan->computed = true;
problem_data->ref_regular_pool
= create_alloc_pool ("df_scan ref regular",
sizeof (struct df_regular_ref), block_size);
- problem_data->ref_extract_pool
- = create_alloc_pool ("df_scan ref extract",
- sizeof (struct df_extract_ref), block_size);
problem_data->insn_pool
= create_alloc_pool ("df_scan insn",
sizeof (struct df_insn_info), block_size);
{
unsigned int bb_index = bb->index;
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
- if (!bb_info)
- {
- bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
- df_scan_set_bb_info (bb_index, bb_info);
- }
bb_info->artificial_defs = NULL;
bb_info->artificial_uses = NULL;
}
- df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
+ bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
+ bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
+ bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
- df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
- df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
- df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
+ bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
df_scan->optional_p = false;
}
fprintf (file, ";; invalidated by call \t");
df_print_regset (file, regs_invalidated_by_call_regset);
fprintf (file, ";; hardware regs used \t");
- df_print_regset (file, df->hardware_regs_used);
+ df_print_regset (file, &df->hardware_regs_used);
fprintf (file, ";; regular block artificial uses \t");
- df_print_regset (file, df->regular_block_artificial_uses);
+ df_print_regset (file, &df->regular_block_artificial_uses);
fprintf (file, ";; eh block artificial uses \t");
- df_print_regset (file, df->eh_block_artificial_uses);
+ df_print_regset (file, &df->eh_block_artificial_uses);
fprintf (file, ";; entry block defs \t");
df_print_regset (file, df->entry_block_defs);
fprintf (file, ";; exit block uses \t");
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
+ sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
TV_DF_SCAN, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
- df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
- df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
+ df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
+ df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
- bitmap_ior_into (df->eh_block_artificial_uses,
- df->regular_block_artificial_uses);
+ bitmap_ior_into (&df->eh_block_artificial_uses,
+ &df->regular_block_artificial_uses);
/* ENTRY and EXIT blocks have special defs/uses. */
df_get_entry_block_def_set (df->entry_block_defs);
/* Create a new ref of type DF_REF_TYPE for register REG at address
- LOC within INSN of BB. This function is only used externally.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+ LOC within INSN of BB. This function is only used externally. */
df_ref
df_ref_create (rtx reg, rtx *loc, rtx insn,
basic_block bb,
enum df_ref_type ref_type,
- int ref_flags,
- int width, int offset, enum machine_mode mode)
+ int ref_flags)
{
df_ref ref;
struct df_reg_info **reg_info;
/* You cannot hack artificial refs. */
gcc_assert (insn);
- if (width != -1 || offset != -1)
- cl = DF_REF_EXTRACT;
- else if (loc)
+ if (loc)
cl = DF_REF_REGULAR;
else
cl = DF_REF_BASE;
ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
- ref_type, ref_flags,
- width, offset, mode);
+ ref_type, ref_flags);
if (DF_REF_REG_DEF_P (ref))
{
case DF_REF_REGULAR:
pool_free (problem_data->ref_regular_pool, ref);
break;
-
- case DF_REF_EXTRACT:
- pool_free (problem_data->ref_extract_pool, ref);
- break;
}
}
{
if (insn_info)
{
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
- bitmap_set_bit (df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ bitmap_set_bit (&df->insns_to_delete, uid);
}
if (dump_file)
fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
if (dump_file)
fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
struct df_scan_problem_data *problem_data
df_ref ref;
struct df_mw_hardreg *mw;
- for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
df_free_ref (ref);
- for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
df_free_ref (ref);
- for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
df_free_ref (ref);
- for (ix = 0;
- VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
- ++ix)
+ FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
pool_free (problem_data->mw_reg_pool, mw);
VEC_free (df_ref, stack, collection_rec->def_vec);
if (dump_file)
fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
- bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
return false;
}
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
int luid;
}
df_refs_add_to_chains (&collection_rec, bb, insn);
- df_set_bb_dirty (bb);
+ if (!DEBUG_INSN_P (insn))
+ df_set_bb_dirty (bb);
VEC_free (df_ref, stack, collection_rec.def_vec);
VEC_free (df_ref, stack, collection_rec.use_vec);
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info;
- gcc_assert (DEBUG_INSN_P (insn));
- gcc_assert (VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
+ gcc_assert (DEBUG_INSN_P (insn)
+ && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
if (!df)
return false;
if (dump_file)
fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_rescan, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (!insn_info->defs)
return false;
basic_block bb;
bitmap_iterator bi;
unsigned int uid;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head tmp;
+
+ bitmap_initialize (&tmp, &df_bitmap_obstack);
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
defer_insn_rescan = true;
}
- bitmap_copy (tmp, df->insns_to_delete);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_delete);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_delete (NULL, uid);
}
- BITMAP_FREE (tmp);
- bitmap_clear (df->insns_to_delete);
- bitmap_clear (df->insns_to_rescan);
- bitmap_clear (df->insns_to_notes_rescan);
+ bitmap_clear (&tmp);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
FOR_EACH_BB (bb)
{
bool defer_insn_rescan = false;
bitmap_iterator bi;
unsigned int uid;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head tmp;
+
+ bitmap_initialize (&tmp, &df_bitmap_obstack);
if (df->changeable_flags & DF_NO_INSN_RESCAN)
{
if (dump_file)
fprintf (dump_file, "starting the processing of deferred insns\n");
- bitmap_copy (tmp, df->insns_to_delete);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_delete);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_delete (NULL, uid);
}
- bitmap_copy (tmp, df->insns_to_rescan);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_rescan);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
df_insn_rescan (insn_info->insn);
}
- bitmap_copy (tmp, df->insns_to_notes_rescan);
- EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
+ bitmap_copy (&tmp, &df->insns_to_notes_rescan);
+ EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
if (dump_file)
fprintf (dump_file, "ending the processing of deferred insns\n");
- BITMAP_FREE (tmp);
- bitmap_clear (df->insns_to_delete);
- bitmap_clear (df->insns_to_rescan);
- bitmap_clear (df->insns_to_notes_rescan);
+ bitmap_clear (&tmp);
+ bitmap_clear (&df->insns_to_delete);
+ bitmap_clear (&df->insns_to_rescan);
+ bitmap_clear (&df->insns_to_notes_rescan);
if (no_insn_rescan)
df_set_flags (DF_NO_INSN_RESCAN);
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
if (include_uses)
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
if (include_eq_uses)
DF_REF_ID (ref) = offset++;
count++;
ref = DF_REF_NEXT_REG (ref);
- gcc_assert (offset < ref_info->refs_size);
+ gcc_checking_assert (offset < ref_info->refs_size);
}
}
ref_info->count[regno] = count;
while (the_ref)
{
if ((!DF_REF_IS_ARTIFICIAL (the_ref))
- && (DF_REF_LOC (the_ref))
+ && DF_REF_LOC (the_ref)
&& (*DF_REF_LOC (the_ref) == loc))
{
df_ref next_ref = DF_REF_NEXT_REG (the_ref);
insn_info->mw_hardregs = df_null_mw_rec;
}
- bitmap_clear_bit (df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
/* If the insn is set to be rescanned, it does not need to also
be notes rescanned. */
- if (!bitmap_bit_p (df->insns_to_rescan, uid))
- bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
+ if (!bitmap_bit_p (&df->insns_to_rescan, uid))
+ bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
return;
}
- bitmap_clear_bit (df->insns_to_delete, uid);
- bitmap_clear_bit (df->insns_to_notes_rescan, uid);
+ bitmap_clear_bit (&df->insns_to_delete, uid);
+ bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
if (insn_info)
{
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (DF_REF_REGULAR, &collection_rec,
+ df_uses_record (&collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
+ bb, insn_info, DF_REF_IN_NOTE);
default:
break;
}
case DF_REF_BASE:
return true;
- case DF_REF_EXTRACT:
- if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
- || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
- return false;
- /* fallthru. */
-
case DF_REF_REGULAR:
return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
return 1;
}
- /* The classes are the same at this point so it is safe to only look
- at ref1. */
- if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
- {
- if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
- return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
- if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
- return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
- if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
- return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
- }
- return 0;
+ return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
}
static void
of DF_REF_COMPARE. */
if (i == count - 1)
return;
- qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
- df_ref_compare);
+ VEC_qsort (df_ref, *ref_vec, df_ref_compare);
}
for (i=0; i<count-dist; i++)
}
}
else
- qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
- sizeof (struct df_mw_hardreg *), df_mw_compare);
+ VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
for (i=0; i<count-dist; i++)
{
df->hard_regs_live_count[regno]++;
}
- gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
- gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
+ gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
+ && DF_REF_PREV_REG (this_ref) == NULL);
DF_REF_NEXT_REG (this_ref) = head;
if (add_to_table && df->analyze_subset)
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
- for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
{
new_vec[ix] = this_ref;
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
}
-/* Allocate a ref and initialize its fields.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
- if they were constants. Otherwise they should be -1 if those flags
- were set. */
+/* Allocate a ref and initialize its fields. */
static df_ref
df_ref_create_structure (enum df_ref_class cl,
rtx reg, rtx *loc,
basic_block bb, struct df_insn_info *info,
enum df_ref_type ref_type,
- int ref_flags,
- int width, int offset, enum machine_mode mode)
+ int ref_flags)
{
df_ref this_ref = NULL;
int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
{
case DF_REF_BASE:
this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
- gcc_assert (loc == NULL);
+ gcc_checking_assert (loc == NULL);
break;
case DF_REF_ARTIFICIAL:
this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
this_ref->artificial_ref.bb = bb;
- gcc_assert (loc == NULL);
+ gcc_checking_assert (loc == NULL);
break;
case DF_REF_REGULAR:
this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
this_ref->regular_ref.loc = loc;
- gcc_assert (loc);
- break;
-
- case DF_REF_EXTRACT:
- this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
- DF_REF_EXTRACT_WIDTH (this_ref) = width;
- DF_REF_EXTRACT_OFFSET (this_ref) = offset;
- DF_REF_EXTRACT_MODE (this_ref) = mode;
- this_ref->regular_ref.loc = loc;
- gcc_assert (loc);
+ gcc_checking_assert (loc);
break;
}
/* Create new references of type DF_REF_TYPE for each part of register REG
- at address LOC within INSN of BB.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+ at address LOC within INSN of BB. */
static void
rtx reg, rtx *loc,
basic_block bb, struct df_insn_info *insn_info,
enum df_ref_type ref_type,
- int ref_flags,
- int width, int offset, enum machine_mode mode)
+ int ref_flags)
{
unsigned int regno;
- gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
+ gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
if (regno < FIRST_PSEUDO_REGISTER)
for (i = regno; i < endregno; i++)
{
ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
- bb, insn_info, ref_type, ref_flags,
- width, offset, mode);
+ bb, insn_info, ref_type, ref_flags);
gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
}
else
{
df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
- ref_type, ref_flags, width, offset, mode);
+ ref_type, ref_flags);
}
}
{
rtx *loc;
rtx dst;
- int offset = -1;
- int width = -1;
- enum machine_mode mode = VOIDmode;
- enum df_ref_class cl = DF_REF_REGULAR;
/* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
construct. */
{
flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
- if (CONST_INT_P (XEXP (dst, 1))
- && CONST_INT_P (XEXP (dst, 2)))
- {
- width = INTVAL (XEXP (dst, 1));
- offset = INTVAL (XEXP (dst, 2));
- mode = GET_MODE (dst);
- cl = DF_REF_EXTRACT;
- }
-
loc = &XEXP (dst, 0);
dst = *loc;
}
/* At this point if we do not have a reg or a subreg, just return. */
if (REG_P (dst))
{
- df_ref_record (cl, collection_rec,
- dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_REGULAR, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
/* We want to keep sp alive everywhere - by making all
writes to sp also use of sp. */
if (REGNO (dst) == STACK_POINTER_REGNUM)
df_ref_record (DF_REF_BASE, collection_rec,
- dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
- width, offset, mode);
+ dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
}
else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
{
flags |= DF_REF_SUBREG;
- df_ref_record (cl, collection_rec,
- dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
- width, offset, mode);
+ df_ref_record (DF_REF_REGULAR, collection_rec,
+ dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
}
}
}
-/* Process all the registers used in the rtx at address LOC.
-
- If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
- DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
- fields if they were constants. Otherwise they should be -1 if
- those flags were set. */
+/* Process all the registers used in the rtx at address LOC. */
static void
-df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
+df_uses_record (struct df_collection_rec *collection_rec,
rtx *loc, enum df_ref_type ref_type,
basic_block bb, struct df_insn_info *insn_info,
- int flags,
- int width, int offset, enum machine_mode mode)
+ int flags)
{
RTX_CODE code;
rtx x;
/* If we are clobbering a MEM, mark any registers inside the address
as being used. */
if (MEM_P (XEXP (x, 0)))
- df_uses_record (cl, collection_rec,
+ df_uses_record (collection_rec,
&XEXP (XEXP (x, 0), 0),
DF_REF_REG_MEM_STORE,
bb, insn_info,
- flags, width, offset, mode);
+ flags);
/* If we're clobbering a REG then we have a def so ignore. */
return;
case MEM:
- df_uses_record (cl, collection_rec,
+ df_uses_record (collection_rec,
&XEXP (x, 0), DF_REF_REG_MEM_LOAD,
- bb, insn_info, flags & DF_REF_IN_NOTE,
- width, offset, mode);
+ bb, insn_info, flags & DF_REF_IN_NOTE);
return;
case SUBREG:
if (!REG_P (SUBREG_REG (x)))
{
loc = &SUBREG_REG (x);
- df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
return;
}
/* ... Fall through ... */
case REG:
- df_ref_record (cl, collection_rec,
+ df_ref_record (DF_REF_REGULAR, collection_rec,
x, loc, bb, insn_info,
- ref_type, flags,
- width, offset, mode);
+ ref_type, flags);
return;
case SIGN_EXTRACT:
case ZERO_EXTRACT:
{
- /* If the parameters to the zero or sign extract are
- constants, strip them off and recurse, otherwise there is
- no information that we can gain from this operation. */
- if (CONST_INT_P (XEXP (x, 1))
- && CONST_INT_P (XEXP (x, 2)))
- {
- width = INTVAL (XEXP (x, 1));
- offset = INTVAL (XEXP (x, 2));
- mode = GET_MODE (x);
-
- if (code == ZERO_EXTRACT)
- flags |= DF_REF_ZERO_EXTRACT;
- else
- flags |= DF_REF_SIGN_EXTRACT;
-
- df_uses_record (DF_REF_EXTRACT, collection_rec,
- &XEXP (x, 0), ref_type, bb, insn_info, flags,
- width, offset, mode);
- return;
- }
+ df_uses_record (collection_rec,
+ &XEXP (x, 1), ref_type, bb, insn_info, flags);
+ df_uses_record (collection_rec,
+ &XEXP (x, 2), ref_type, bb, insn_info, flags);
+
+ /* If the parameters to the zero or sign extract are
+ constants, strip them off and recurse, otherwise there is
+ no information that we can gain from this operation. */
+ if (code == ZERO_EXTRACT)
+ flags |= DF_REF_ZERO_EXTRACT;
+ else
+ flags |= DF_REF_SIGN_EXTRACT;
+
+ df_uses_record (collection_rec,
+ &XEXP (x, 0), ref_type, bb, insn_info, flags);
+ return;
}
break;
{
rtx dst = SET_DEST (x);
gcc_assert (!(flags & DF_REF_IN_NOTE));
- df_uses_record (cl, collection_rec,
- &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
- width, offset, mode);
+ df_uses_record (collection_rec,
+ &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
switch (GET_CODE (dst))
{
case SUBREG:
if (df_read_modify_subreg_p (dst))
{
- df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
+ df_uses_record (collection_rec, &SUBREG_REG (dst),
DF_REF_REG_USE, bb, insn_info,
- flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
- width, offset, mode);
+ flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
break;
}
/* Fall through. */
case CC0:
break;
case MEM:
- df_uses_record (cl, collection_rec, &XEXP (dst, 0),
- DF_REF_REG_MEM_STORE, bb, insn_info, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_MEM_STORE, bb, insn_info, flags);
break;
case STRICT_LOW_PART:
{
/* A strict_low_part uses the whole REG and not just the
SUBREG. */
dst = XEXP (dst, 0);
- df_uses_record (cl, collection_rec,
+ df_uses_record (collection_rec,
(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
DF_REF_REG_USE, bb, insn_info,
- DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
- width, offset, mode);
+ DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
}
break;
case ZERO_EXTRACT:
{
- if (CONST_INT_P (XEXP (dst, 1))
- && CONST_INT_P (XEXP (dst, 2)))
- {
- width = INTVAL (XEXP (dst, 1));
- offset = INTVAL (XEXP (dst, 2));
- mode = GET_MODE (dst);
- if (GET_CODE (XEXP (dst,0)) == MEM)
- {
- /* Handle the case of zero_extract(mem(...)) in the set dest.
- This special case is allowed only if the mem is a single byte and
- is useful to set a bitfield in memory. */
- df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (XEXP (dst,0), 0),
- DF_REF_REG_MEM_STORE, bb, insn_info,
- DF_REF_ZERO_EXTRACT,
- width, offset, mode);
- }
- else
- {
- df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn_info,
- DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
- width, offset, mode);
- }
- }
- else
- {
- df_uses_record (cl, collection_rec, &XEXP (dst, 1),
- DF_REF_REG_USE, bb, insn_info, flags,
- width, offset, mode);
- df_uses_record (cl, collection_rec, &XEXP (dst, 2),
- DF_REF_REG_USE, bb, insn_info, flags,
- width, offset, mode);
- df_uses_record (cl, collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn_info,
- DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
- width, offset, mode);
- }
-
+ df_uses_record (collection_rec, &XEXP (dst, 1),
+ DF_REF_REG_USE, bb, insn_info, flags);
+ df_uses_record (collection_rec, &XEXP (dst, 2),
+ DF_REF_REG_USE, bb, insn_info, flags);
+ if (GET_CODE (XEXP (dst,0)) == MEM)
+ df_uses_record (collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ flags);
+ else
+ df_uses_record (collection_rec, &XEXP (dst, 0),
+ DF_REF_REG_USE, bb, insn_info,
+ DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
}
break;
int j;
for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
- df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
- DF_REF_REG_USE, bb, insn_info, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
+ DF_REF_REG_USE, bb, insn_info, flags);
return;
}
break;
}
case VAR_LOCATION:
- df_uses_record (cl, collection_rec,
+ df_uses_record (collection_rec,
&PAT_VAR_LOCATION_LOC (x),
- DF_REF_REG_USE, bb, insn_info,
- flags, width, offset, mode);
+ DF_REF_REG_USE, bb, insn_info, flags);
return;
case PRE_DEC:
case POST_MODIFY:
gcc_assert (!DEBUG_INSN_P (insn_info->insn));
/* Catch the def of the register being modified. */
- df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
+ df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
bb, insn_info,
DF_REF_REG_DEF,
- flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
- width, offset, mode);
+ flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
/* ... Fall through to handle uses ... */
loc = &XEXP (x, 0);
goto retry;
}
- df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
- bb, insn_info, flags,
- width, offset, mode);
+ df_uses_record (collection_rec, &XEXP (x, i), ref_type,
+ bb, insn_info, flags);
}
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
- df_uses_record (cl, collection_rec,
+ df_uses_record (collection_rec,
&XVECEXP (x, i, j), ref_type,
- bb, insn_info, flags,
- width, offset, mode);
+ bb, insn_info, flags);
}
}
}
unsigned int ix;
df_ref ref;
- for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
{
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
- int width = -1;
- int offset = -1;
- enum machine_mode mode = VOIDmode;
df_ref use;
- if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
- {
- width = DF_REF_EXTRACT_WIDTH (ref);
- offset = DF_REF_EXTRACT_OFFSET (ref);
- mode = DF_REF_EXTRACT_MODE (ref);
- }
-
use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
DF_REF_LOC (ref), DF_REF_BB (ref),
DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
- DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
- width, offset, mode);
+ DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
DF_REF_REGNO (use) = DF_REF_REGNO (ref);
}
}
bool is_sibling_call;
unsigned int i;
df_ref def;
- bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head defs_generated;
+
+ bitmap_initialize (&defs_generated, &df_bitmap_obstack);
/* Do not generate clobbers for registers that are the result of the
call. This causes ordering problems in the chain building code
depending on which def is seen first. */
- for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
- bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
+ FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
+ bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
/* Record the registers used to pass arguments, and explicitly
noted as clobbered. */
note = XEXP (note, 1))
{
if (GET_CODE (XEXP (note, 0)) == USE)
- df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
- DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
- VOIDmode);
+ df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
+ DF_REF_REG_USE, bb, insn_info, flags);
else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
{
if (REG_P (XEXP (XEXP (note, 0), 0)))
{
unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
- if (!bitmap_bit_p (defs_generated, regno))
+ if (!bitmap_bit_p (&defs_generated, regno))
df_defs_record (collection_rec, XEXP (note, 0), bb,
insn_info, flags);
}
else
- df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
- DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
- VOIDmode);
+ df_uses_record (collection_rec, &XEXP (note, 0),
+ DF_REF_REG_USE, bb, insn_info, flags);
}
}
/* The stack ptr is used (honorarily) by a CALL insn. */
df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
NULL, bb, insn_info, DF_REF_REG_USE,
- DF_REF_CALL_STACK_USAGE | flags,
- -1, -1, VOIDmode);
+ DF_REF_CALL_STACK_USAGE | flags);
/* Calls may also reference any of the global registers,
so they are recorded as used. */
if (global_regs[i])
{
df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
- NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
- VOIDmode);
+ NULL, bb, insn_info, DF_REF_REG_USE, flags);
df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
- NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
- VOIDmode);
+ NULL, bb, insn_info, DF_REF_REG_DEF, flags);
}
is_sibling_call = SIBLING_CALL_P (insn_info->insn);
EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
{
if (!global_regs[ui]
- && (!bitmap_bit_p (defs_generated, ui))
+ && (!bitmap_bit_p (&defs_generated, ui))
&& (!is_sibling_call
|| !bitmap_bit_p (df->exit_block_uses, ui)
|| refers_to_regno_p (ui, ui+1,
crtl->return_rtx, NULL)))
df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
NULL, bb, insn_info, DF_REF_REG_DEF,
- DF_REF_MAY_CLOBBER | flags,
- -1, -1, VOIDmode);
+ DF_REF_MAY_CLOBBER | flags);
}
- BITMAP_FREE (defs_generated);
+ bitmap_clear (&defs_generated);
return;
}
{
case REG_EQUIV:
case REG_EQUAL:
- df_uses_record (DF_REF_REGULAR, collection_rec,
+ df_uses_record (collection_rec,
&XEXP (note, 0), DF_REF_REG_USE,
- bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
+ bb, insn_info, DF_REF_IN_NOTE);
break;
case REG_NON_LOCAL_GOTO:
/* The frame ptr is used by a non-local goto. */
df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[FRAME_POINTER_REGNUM],
NULL, bb, insn_info,
- DF_REF_REG_USE, 0, -1, -1, VOIDmode);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ DF_REF_REG_USE, 0);
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
df_ref_record (DF_REF_BASE, collection_rec,
regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
NULL, bb, insn_info,
- DF_REF_REG_USE, 0, -1, -1, VOIDmode);
+ DF_REF_REG_USE, 0);
#endif
break;
default:
(is_cond_exec) ? DF_REF_CONDITIONAL : 0);
/* Record the register uses. */
- df_uses_record (DF_REF_REGULAR, collection_rec,
- &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
- -1, -1, VOIDmode);
+ df_uses_record (collection_rec,
+ &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
/* DF_REF_CONDITIONAL needs corresponding USES. */
if (is_cond_exec)
if (regno == INVALID_REGNUM)
break;
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
- VOIDmode);
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
}
}
#endif
non-local goto. */
if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
- bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
+ bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
/* Add the artificial uses. */
if (bb->index >= NUM_FIXED_BLOCKS)
bitmap_iterator bi;
unsigned int regno;
bitmap au = bb_has_eh_pred (bb)
- ? df->eh_block_artificial_uses
- : df->regular_block_artificial_uses;
+ ? &df->eh_block_artificial_uses
+ : &df->regular_block_artificial_uses;
EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
{
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
- bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
+ bb, NULL, DF_REF_REG_USE, 0);
}
}
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
int luid = 0;
- struct df_scan_bb_info *bb_info;
struct df_collection_rec collection_rec;
if (!df)
return;
- bb_info = df_scan_get_bb_info (bb_index);
-
- /* Need to make sure that there is a record in the basic block info. */
- if (!bb_info)
- {
- bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
- df_scan_set_bb_info (bb_index, bb_info);
- bb_info->artificial_defs = NULL;
- bb_info->artificial_uses = NULL;
- }
-
+ df_grow_bb_info (df_scan);
collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
reference of the frame pointer. */
bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif
if (frame_pointer_needed)
{
bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
#endif
}
gcc_assert (GET_MODE (reg) != BLKmode);
- bitmap_set_bit (set, regno);
if (regno < FIRST_PSEUDO_REGISTER)
{
int n = hard_regno_nregs[regno][GET_MODE (reg)];
- while (--n > 0)
- bitmap_set_bit (set, regno + n);
+ bitmap_set_range (set, regno, n);
}
+ else
+ bitmap_set_bit (set, regno);
}
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
#endif
- targetm.live_on_entry (entry_block_defs);
+ targetm.extra_live_on_entry (entry_block_defs);
}
EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
{
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
- ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
- VOIDmode);
+ ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
}
df_canonize_collection_rec (collection_rec);
void
df_update_entry_block_defs (void)
{
- bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head refs;
bool changed = false;
- df_get_entry_block_def_set (refs);
+ bitmap_initialize (&refs, &df_bitmap_obstack);
+ df_get_entry_block_def_set (&refs);
if (df->entry_block_defs)
{
- if (!bitmap_equal_p (df->entry_block_defs, refs))
+ if (!bitmap_equal_p (df->entry_block_defs, &refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_defs);
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ gcc_unreachable ();
df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
changed = true;
}
if (changed)
{
- df_record_entry_block_defs (refs);
- bitmap_copy (df->entry_block_defs, refs);
+ df_record_entry_block_defs (&refs);
+ bitmap_copy (df->entry_block_defs, &refs);
df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
}
- BITMAP_FREE (refs);
+ bitmap_clear (&refs);
}
if ((!reload_completed) || frame_pointer_needed)
{
bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
#endif
}
-#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
/* Many architectures have a GP register even without flag_pic.
Assume the pic register is not in use, or will be handled by
other means, if it is not fixed. */
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
+ && (unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
-#endif
/* Mark all global registers, and all registers used by the
epilogue as being live at the end of the function since they
EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* It is deliberate that this is not put in the exit block uses but
&& bb_has_eh_pred (EXIT_BLOCK_PTR)
&& fixed_regs[ARG_POINTER_REGNUM])
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
- EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
+ EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
#endif
df_canonize_collection_rec (collection_rec);
void
df_update_exit_block_uses (void)
{
- bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head refs;
bool changed = false;
- df_get_exit_block_use_set (refs);
+ bitmap_initialize (&refs, &df_bitmap_obstack);
+ df_get_exit_block_use_set (&refs);
if (df->exit_block_uses)
{
- if (!bitmap_equal_p (df->exit_block_uses, refs))
+ if (!bitmap_equal_p (df->exit_block_uses, &refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_uses);
{
struct df_scan_problem_data *problem_data
= (struct df_scan_problem_data *) df_scan->problem_data;
+ gcc_unreachable ();
df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
changed = true;
}
if (changed)
{
- df_record_exit_block_uses (refs);
- bitmap_copy (df->exit_block_uses, refs);
+ df_record_exit_block_uses (&refs);
+ bitmap_copy (df->exit_block_uses,& refs);
df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
}
- BITMAP_FREE (refs);
+ bitmap_clear (&refs);
}
static bool initialized = false;
bool
df_hard_reg_used_p (unsigned int reg)
{
- gcc_assert (df);
return df->hard_regs_live_count[reg] != 0;
}
unsigned int
df_hard_reg_used_count (unsigned int reg)
{
- gcc_assert (df);
return df->hard_regs_live_count[reg];
}
unsigned int ix;
df_ref new_ref;
- for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
+ FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
{
if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
{
unsigned int ix;
struct df_mw_hardreg *new_reg;
- for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
+ FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
{
if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
{
static bool
df_entry_block_bitmap_verify (bool abort_if_fail)
{
- bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head entry_block_defs;
bool is_eq;
- df_get_entry_block_def_set (entry_block_defs);
+ bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
+ df_get_entry_block_def_set (&entry_block_defs);
- is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
+ is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
if (!is_eq && abort_if_fail)
{
print_current_pass (stderr);
fprintf (stderr, "entry_block_defs = ");
- df_print_regset (stderr, entry_block_defs);
+ df_print_regset (stderr, &entry_block_defs);
fprintf (stderr, "df->entry_block_defs = ");
df_print_regset (stderr, df->entry_block_defs);
gcc_assert (0);
}
- BITMAP_FREE (entry_block_defs);
+ bitmap_clear (&entry_block_defs);
return is_eq;
}
static bool
df_exit_block_bitmap_verify (bool abort_if_fail)
{
- bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_head exit_block_uses;
bool is_eq;
- df_get_exit_block_use_set (exit_block_uses);
+ bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
+ df_get_exit_block_use_set (&exit_block_uses);
- is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
+ is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
if (!is_eq && abort_if_fail)
{
print_current_pass (stderr);
fprintf (stderr, "exit_block_uses = ");
- df_print_regset (stderr, exit_block_uses);
+ df_print_regset (stderr, &exit_block_uses);
fprintf (stderr, "df->exit_block_uses = ");
df_print_regset (stderr, df->exit_block_uses);
gcc_assert (0);
}
- BITMAP_FREE (exit_block_uses);
+ bitmap_clear (&exit_block_uses);
return is_eq;
}
{
unsigned int i;
basic_block bb;
- bitmap regular_block_artificial_uses;
- bitmap eh_block_artificial_uses;
+ bitmap_head regular_block_artificial_uses;
+ bitmap_head eh_block_artificial_uses;
if (!df)
return;
/* (2) There are various bitmaps whose value may change over the
course of the compilation. This step recomputes them to make
sure that they have not slipped out of date. */
- regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
- eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap_initialize (®ular_block_artificial_uses, &df_bitmap_obstack);
+ bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
- df_get_regular_block_artificial_uses (regular_block_artificial_uses);
- df_get_eh_block_artificial_uses (eh_block_artificial_uses);
+ df_get_regular_block_artificial_uses (®ular_block_artificial_uses);
+ df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
- bitmap_ior_into (eh_block_artificial_uses,
- regular_block_artificial_uses);
+ bitmap_ior_into (&eh_block_artificial_uses,
+ ®ular_block_artificial_uses);
/* Check artificial_uses bitmaps didn't change. */
- gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
- df->regular_block_artificial_uses));
- gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
- df->eh_block_artificial_uses));
+ gcc_assert (bitmap_equal_p (®ular_block_artificial_uses,
+ &df->regular_block_artificial_uses));
+ gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
+ &df->eh_block_artificial_uses));
- BITMAP_FREE (regular_block_artificial_uses);
- BITMAP_FREE (eh_block_artificial_uses);
+ bitmap_clear (®ular_block_artificial_uses);
+ bitmap_clear (&eh_block_artificial_uses);
/* Verify entry block and exit block. These only verify the bitmaps,
the refs are verified in df_bb_verify. */