bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- /* We need to use a scratch set here so that the value returned from
- this function invocation properly reflects if the sets changed in
- a significant way; i.e. not just because the lr set was anded
- in. */
+ /* We need to use a scratch set here so that the value returned from this
+ function invocation properly reflects whether the sets changed in a
+ significant way; i.e. not just because the lr set was anded in. */
bitmap_and (df_live_scratch, gen, bb_lr_info->out);
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
return old;
}
+/* Node of a linked list of uses of dead REGs in debug insns. */
+struct dead_debug_use
+{
+ df_ref use;
+ struct dead_debug_use *next;
+};
+
+/* Linked list of the above, with a bitmap of the REGs in the
+ list. */
+struct dead_debug
+{
+ struct dead_debug_use *head;
+ bitmap used;
+};
+
+/* Initialize DEBUG to an empty list, and clear USED, if given. */
+static inline void
+dead_debug_init (struct dead_debug *debug, bitmap used)
+{
+ debug->head = NULL;
+ debug->used = used;
+ if (used)
+ bitmap_clear (used);
+}
+
+/* Reset all debug insns with pending uses. Release the bitmap in it,
+ unless it is USED. USED must be the same bitmap passed to
+ dead_debug_init. */
+static inline void
+dead_debug_finish (struct dead_debug *debug, bitmap used)
+{
+ struct dead_debug_use *head;
+ rtx insn = NULL;
+
+ if (debug->used != used)
+ BITMAP_FREE (debug->used);
+
+ while ((head = debug->head))
+ {
+ insn = DF_REF_INSN (head->use);
+ if (!head->next || DF_REF_INSN (head->next->use) != insn)
+ {
+ INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
+ df_insn_rescan_debug_internal (insn);
+ }
+ debug->head = head->next;
+ XDELETE (head);
+ }
+}
+
+/* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug
+ insn. Create a bitmap for DEBUG as needed. */
+static inline void
+dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno)
+{
+ struct dead_debug_use *newddu = XNEW (struct dead_debug_use);
+
+ newddu->use = use;
+ newddu->next = debug->head;
+ debug->head = newddu;
+
+ if (!debug->used)
+ debug->used = BITMAP_ALLOC (NULL);
+
+ bitmap_set_bit (debug->used, uregno);
+}
+
+/* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
+ before INSN that binds the REG to a debug temp, and replace all
+ uses of UREGNO in DEBUG with uses of the debug temp. INSN must be
+ the insn where UREGNO dies. */
+static inline void
+dead_debug_insert_before (struct dead_debug *debug, unsigned int uregno,
+ rtx insn)
+{
+ struct dead_debug_use **tailp = &debug->head;
+ struct dead_debug_use *cur;
+ struct dead_debug_use *uses = NULL;
+ struct dead_debug_use **usesp = &uses;
+ rtx reg = NULL;
+ rtx dval;
+ rtx bind;
+
+ if (!debug->used || !bitmap_clear_bit (debug->used, uregno))
+ return;
+
+ /* Move all uses of uregno from debug->head to uses, setting mode to
+ the widest referenced mode. */
+ while ((cur = *tailp))
+ {
+ if (DF_REF_REGNO (cur->use) == uregno)
+ {
+ *usesp = cur;
+ usesp = &cur->next;
+ *tailp = cur->next;
+ cur->next = NULL;
+ if (!reg
+ || (GET_MODE_BITSIZE (GET_MODE (reg))
+ < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use)))))
+ reg = *DF_REF_REAL_LOC (cur->use);
+ }
+ else
+ tailp = &(*tailp)->next;
+ }
+
+ gcc_assert (reg);
+
+ /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
+ dval = make_debug_expr_from_rtl (reg);
+
+ /* Emit a debug bind insn before the insn in which reg dies. */
+ bind = gen_rtx_VAR_LOCATION (GET_MODE (reg),
+ DEBUG_EXPR_TREE_DECL (dval), reg,
+ VAR_INIT_STATUS_INITIALIZED);
+
+ bind = emit_debug_insn_before (bind, insn);
+ df_insn_rescan (bind);
+
+ /* Adjust all uses. */
+ while ((cur = uses))
+ {
+ if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg))
+ *DF_REF_REAL_LOC (cur->use) = dval;
+ else
+ *DF_REF_REAL_LOC (cur->use)
+ = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
+ /* ??? Should we simplify subreg of subreg? */
+ df_insn_rescan (DF_REF_INSN (cur->use));
+ uses = cur->next;
+ XDELETE (cur);
+ }
+}
/* Recompute the REG_DEAD and REG_UNUSED notes and compute register
info: lifetime, bb, and number of defs and uses for basic block
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ struct dead_debug debug;
+
+ dead_debug_init (&debug, NULL);
bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
{
if (debug_insn)
{
- debug_insn = -1;
+ if (debug_insn > 0)
+ dead_debug_add (&debug, use, uregno);
break;
}
+ else
+ dead_debug_insert_before (&debug, uregno, insn);
if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
&& (!bitmap_bit_p (do_not_gen, uregno))
df_insn_rescan_debug_internal (insn);
}
}
+
+ dead_debug_finish (&debug, NULL);
}
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
+ }
+}
+
+/* Find the set of real DEFs, which are not clobbers, for INSN. */
+
+void
+df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
+{
+ df_ref *def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
}
the block, starting with the first one.
----------------------------------------------------------------------------*/
-/* Apply the artificial uses and defs at the top of BB in a forwards
- direction. ??? This is wrong; defs mark the point where a pseudo
- becomes live when scanning forwards (unless a def is unused). Since
- there are no REG_UNUSED notes for artificial defs, passes that
- require artificial defs probably should not call this function
- unless (as is the case for fwprop) they are correct when liveness
- bitmaps are *under*estimated. */
+/* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
+ DF_LR_IN for basic block BB, for forward scanning by marking artificial
+ defs live. */
void
df_simulate_initialize_forwards (basic_block bb, bitmap live)
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ bitmap_set_bit (live, DF_REF_REGNO (def));
}
}
while here the scan is performed forwards! So, first assume that the
def is live, and if this is not true REG_UNUSED notes will rectify the
situation. */
- df_simulate_find_defs (insn, live);
+ df_simulate_find_noclobber_defs (insn, live);
/* Clear all of the registers that go dead. */
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
MULTIPLE DEFINITIONS
Find the locations in the function reached by multiple definition sites
- for a pseudo. In and out bitvectors are built for each basic
- block.
+ for a live pseudo. In and out bitvectors are built for each basic
+ block. They are restricted for efficiency to live registers.
The gen and kill sets for the problem are obvious. Together they
include all defined registers in a basic block; the gen set includes
propagating the information to BB3's successors.
---------------------------------------------------------------------------*/
+/* Scratch var used by transfer functions. This is used to do md analysis
+ only for live registers. */
+static bitmap df_md_scratch;
+
/* Set basic block info. */
static void
sizeof (struct df_md_bb_info), 50);
df_grow_bb_info (df_md);
+ df_md_scratch = BITMAP_ALLOC (NULL);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
bitmap kill = df_md_get_bb_info (bb_index)->kill;
EXECUTE_IF_SET_IN_BITMAP (frontiers[bb_index], 0, df_bb_index, bi2)
{
+ basic_block bb = BASIC_BLOCK (df_bb_index);
if (bitmap_bit_p (all_blocks, df_bb_index))
- bitmap_ior_into (df_md_get_bb_info (df_bb_index)->init, kill);
+ bitmap_ior_and_into (df_md_get_bb_info (df_bb_index)->init, kill,
+ df_get_live_in (bb));
}
}
static bool
df_md_transfer_function (int bb_index)
{
+ basic_block bb = BASIC_BLOCK (bb_index);
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- return bitmap_ior_and_compl (out, gen, in, kill);
+ /* We need to use a scratch set here so that the value returned from this
+ function invocation properly reflects whether the sets changed in a
+ significant way; i.e. not just because the live set was anded in. */
+ bitmap_and (df_md_scratch, gen, df_get_live_out (bb));
+
+ /* Multiple definitions of a register are not relevant if it is not
+ live. Thus we trim the result to the places where it is live. */
+ bitmap_and_into (in, df_get_live_in (bb));
+
+ return bitmap_ior_and_compl (out, df_md_scratch, in, kill);
}
/* Initialize the solution bit vectors for problem. */
}
}
+ BITMAP_FREE (df_md_scratch);
free_alloc_pool (df_md->block_pool);
df_md->block_info_size = 0;