/* Standard problems for dataflow support routines.
Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
- 2008, 2009 Free Software Foundation, Inc.
- Originally contributed by Michael P. Hayes
+ 2008, 2009, 2010 Free Software Foundation, Inc.
+ Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
and Kenneth Zadeck (zadeck@naturalbridge.com).
/* Note that turning REG_DEAD_DEBUGGING on will cause
gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
- addresses in the dumps. */
+ addresses in the dumps. */
#if 0
#define REG_DEAD_DEBUGGING
#endif
if (df_live)
return DF_LIVE_OUT (bb);
- else
+ else
return DF_LR_OUT (bb);
}
if (df_live)
return DF_LIVE_IN (bb);
- else
+ else
return DF_LR_IN (bb);
}
/* Print some basic block info as part of df_dump. */
-void
+void
df_print_bb_index (basic_block bb, FILE *file)
{
edge e;
{
basic_block pred = e->src;
fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
- }
+ }
fprintf (file, ")->[%d]->( ", bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
basic_block succ = e->dest;
fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
- }
+ }
fprintf (file, ")\n");
}
----------------------------------------------------------------------------*/
/* This problem plays a large number of games for the sake of
- efficiency.
-
+ efficiency.
+
1) The order of the bits in the bitvectors. After the scanning
phase, all of the defs are sorted. All of the defs for the reg 0
are first, followed by all defs for reg 1 and so on.
-
+
2) There are two kill sets, one if the number of defs is less or
equal to DF_SPARSE_THRESHOLD and another if the number of defs is
greater.
struct df_rd_problem_data
{
/* The set of defs to regs invalidated by call. */
- bitmap sparse_invalidated_by_call;
- /* The set of defs to regs invalidate by call for rd. */
+ bitmap sparse_invalidated_by_call;
+ /* The set of defs to regs invalidate by call for rd. */
bitmap dense_invalidated_by_call;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack rd_bitmaps;
/* Set basic block info. */
static void
-df_rd_set_bb_info (unsigned int index,
+df_rd_set_bb_info (unsigned int index,
struct df_rd_bb_info *bb_info)
{
gcc_assert (df_rd);
/* Free basic block info. */
static void
-df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
/* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
not touched unless the block is new. */
-static void
+static void
df_rd_alloc (bitmap all_blocks)
{
unsigned int bb_index;
struct df_rd_problem_data *problem_data;
if (!df_rd->block_pool)
- df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
+ df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
sizeof (struct df_rd_bb_info), 50);
if (df_rd->problem_data)
bitmap_clear (problem_data->sparse_invalidated_by_call);
bitmap_clear (problem_data->dense_invalidated_by_call);
}
- else
+ else
{
problem_data = XNEW (struct df_rd_problem_data);
df_rd->problem_data = problem_data;
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
if (bb_info)
- {
+ {
bitmap_clear (bb_info->kill);
bitmap_clear (bb_info->sparse_kill);
bitmap_clear (bb_info->gen);
}
else
- {
+ {
bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool);
df_rd_set_bb_info (bb_index, bb_info);
bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
{
unsigned int dregno = DF_REF_REGNO (def);
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_range (local_rd,
- DF_DEFS_BEGIN (dregno),
+ bitmap_clear_range (local_rd,
+ DF_DEFS_BEGIN (dregno),
DF_DEFS_COUNT (dregno));
bitmap_set_bit (local_rd, DF_REF_ID (def));
}
|| (dregno >= FIRST_PSEUDO_REGISTER))
{
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_range (local_rd,
- DF_DEFS_BEGIN (dregno),
+ bitmap_clear_range (local_rd,
+ DF_DEFS_BEGIN (dregno),
DF_DEFS_COUNT (dregno));
- if (!(DF_REF_FLAGS (def)
+ if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (local_rd, DF_REF_ID (def));
}
of kill sets. */
static void
-df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
+df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
df_ref *def_rec,
int top_flag)
{
unsigned int regno = DF_REF_REGNO (def);
unsigned int begin = DF_DEFS_BEGIN (regno);
unsigned int n_defs = DF_DEFS_COUNT (regno);
-
+
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (regno >= FIRST_PSEUDO_REGISTER))
{
/* Only the last def(s) for a regno in the block has any
- effect. */
+ effect. */
if (!bitmap_bit_p (seen_in_block, regno))
{
/* The first def for regno in insn gets to knock out the
if ((!bitmap_bit_p (seen_in_insn, regno))
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
- && (!(DF_REF_FLAGS (def) &
+ && (!(DF_REF_FLAGS (def) &
(DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
{
if (n_defs > DF_SPARSE_THRESHOLD)
bitmap_clear_range (bb_info->gen, begin, n_defs);
}
}
-
+
bitmap_set_bit (seen_in_insn, regno);
/* All defs for regno in the instruction may be put into
the gen set. */
- if (!(DF_REF_FLAGS (def)
+ if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
}
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_rd_bb_local_compute_process_def (bb_info,
+ df_rd_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
0);
if (!INSN_P (insn))
continue;
- df_rd_bb_local_compute_process_def (bb_info,
+ df_rd_bb_local_compute_process_def (bb_info,
DF_INSN_UID_DEFS (uid), 0);
/* This complex dance with the two bitmaps is required because
are going backwards through the block and these are logically at
the start. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_rd_bb_local_compute_process_def (bb_info,
+ df_rd_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
DF_REF_AT_TOP);
}
{
df_rd_bb_local_compute (bb_index);
}
-
+
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
{
if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
bitmap_set_bit (sparse_invalidated, regno);
else
- bitmap_set_range (dense_invalidated,
- DF_DEFS_BEGIN (regno),
+ bitmap_set_range (dense_invalidated,
+ DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
/* Initialize the solution bit vectors for problem. */
-static void
+static void
df_rd_init_solution (bitmap all_blocks)
{
unsigned int bb_index;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
-
+
bitmap_copy (bb_info->out, bb_info->gen);
bitmap_clear (bb_info->in);
}
bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
- if (e->flags & EDGE_FAKE)
+ if (e->flags & EDGE_FAKE)
return;
if (e->flags & EDGE_EH)
EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
{
- bitmap_clear_range (tmp,
- DF_DEFS_BEGIN (regno),
+ bitmap_clear_range (tmp,
+ DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
bitmap_ior_into (op1, tmp);
if (bitmap_empty_p (sparse_kill))
return bitmap_ior_and_compl (out, gen, in, kill);
- else
+ else
{
struct df_rd_problem_data *problem_data;
bool changed = false;
bitmap_copy (tmp, in);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
{
- bitmap_clear_range (tmp,
- DF_DEFS_BEGIN (regno),
+ bitmap_clear_range (tmp,
+ DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
bitmap_and_compl_into (tmp, kill);
BITMAP_FREE (out);
bb_info->out = tmp;
}
- else
+ else
BITMAP_FREE (tmp);
return changed;
}
{
free_alloc_pool (df_rd->block_pool);
bitmap_obstack_release (&problem_data->rd_bitmaps);
-
+
df_rd->block_info_size = 0;
free (df_rd->block_info);
free (df_rd->problem_data);
= (struct df_rd_problem_data *) df_rd->problem_data;
unsigned int m = DF_REG_SIZE(df);
unsigned int regno;
-
- if (!df_rd->block_info)
+
+ if (!df_rd->block_info)
return;
fprintf (file, ";; Reaching defs:\n\n");
for (regno = 0; regno < m; regno++)
if (DF_DEFS_COUNT (regno))
- fprintf (file, "%d[%d,%d] ", regno,
- DF_DEFS_BEGIN (regno),
+ fprintf (file, "%d[%d,%d] ", regno,
+ DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
fprintf (file, "\n");
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info || !bb_info->in)
return;
-
+
fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
dump_bitmap (file, bb_info->in);
fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info || !bb_info->out)
return;
-
+
fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
dump_bitmap (file, bb_info->out);
}
df_rd_local_compute, /* Local compute function. */
df_rd_init_solution, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
- NULL, /* Confluence operator 0. */
- df_rd_confluence_n, /* Confluence operator n. */
+ NULL, /* Confluence operator 0. */
+ df_rd_confluence_n, /* Confluence operator n. */
df_rd_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_rd_free, /* Free all of the problem information. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_RD, /* Timing variable. */
+ TV_DF_RD, /* Timing variable. */
true /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Set basic block info. */
static void
-df_lr_set_bb_info (unsigned int index,
+df_lr_set_bb_info (unsigned int index,
struct df_lr_bb_info *bb_info)
{
gcc_assert (df_lr);
df_lr->block_info[index] = bb_info;
}
-
+
/* Free basic block info. */
static void
-df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
/* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
not touched unless the block is new. */
-static void
+static void
df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
if (!df_lr->block_pool)
- df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
+ df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
sizeof (struct df_lr_bb_info), 50);
df_grow_bb_info (df_lr);
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
if (bb_info)
- {
+ {
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
}
else
- {
+ {
bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool);
df_lr_set_bb_info (bb_index, bb_info);
bb_info->use = BITMAP_ALLOC (NULL);
/* Reset the global solution for recalculation. */
-static void
+static void
df_lr_reset (bitmap all_blocks)
{
unsigned int bb_index;
{
unsigned int uid = INSN_UID (insn);
- if (!INSN_P (insn))
- continue;
+ if (!NONDEBUG_INSN_P (insn))
+ continue;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
bitmap_clear_bit (bb_info->use, dregno);
}
}
-
+
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
unsigned int bb_index;
bitmap_iterator bi;
-
+
bitmap_clear (df->hardware_regs_used);
-
+
/* The all-important stack pointer must always be live. */
bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
-
+
/* Before reload, there are a few registers that must be forced
live everywhere -- which might not already be the case for
blocks within infinite loops. */
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
-
+
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (fixed_regs[ARG_POINTER_REGNUM])
bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
#endif
-
+
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
}
-
+
EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
if (bb_index == EXIT_BLOCK)
/* Initialize the solution vectors. */
-static void
+static void
df_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap op1 = df_lr_get_bb_info (bb->index)->out;
if (bb != EXIT_BLOCK_PTR)
bitmap_copy (op1, df->hardware_regs_used);
-}
+}
/* Confluence function that ignores fake edges. */
{
bitmap op1 = df_lr_get_bb_info (e->src->index)->out;
bitmap op2 = df_lr_get_bb_info (e->dest->index)->in;
-
+
/* Call-clobbered registers die across exception and call edges. */
/* ??? Abnormal call edges ignored for the moment, as this gets
confused by sibling call edges, which crashes reg-stack. */
bitmap_ior_into (op1, op2);
bitmap_ior_into (op1, df->hardware_regs_used);
-}
+}
/* Transfer function. */
}
}
free_alloc_pool (df_lr->block_pool);
-
+
df_lr->block_info_size = 0;
free (df_lr->block_info);
}
struct df_lr_problem_data *problem_data;
if (!bb_info || !bb_info->in)
return;
-
+
fprintf (file, ";; lr in \t");
df_print_regset (file, bb_info->in);
if (df_lr->problem_data)
df_print_regset (file, bb_info->use);
fprintf (file, ";; lr def \t");
df_print_regset (file, bb_info->def);
-}
+}
/* Debugging info at bottom of bb. */
struct df_lr_problem_data *problem_data;
if (!bb_info || !bb_info->out)
return;
-
+
fprintf (file, ";; lr out \t");
df_print_regset (file, bb_info->out);
if (df_lr->problem_data)
fprintf (file, ";; old out \t");
df_print_regset (file, problem_data->out[bb->index]);
}
-}
+}
/* Build the datastructure to verify that the solution to the dataflow
return;
}
- /* Set it true so that the solution is recomputed. */
+ /* Set it true so that the solution is recomputed. */
df_lr->solutions_dirty = true;
problem_data = XNEW (struct df_lr_problem_data);
df_lr_local_compute, /* Local compute function. */
df_lr_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
- df_lr_confluence_0, /* Confluence operator 0. */
- df_lr_confluence_n, /* Confluence operator n. */
+ df_lr_confluence_0, /* Confluence operator 0. */
+ df_lr_confluence_n, /* Confluence operator n. */
df_lr_transfer_function, /* Transfer function. */
df_lr_finalize, /* Finalize function. */
df_lr_free, /* Free all of the problem information. */
df_lr_verify_solution_start,/* Incremental solution verify start. */
df_lr_verify_solution_end, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_LR, /* Timing variable. */
+ TV_DF_LR, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Make a copy of the transfer functions and then compute
new ones to see if the transfer functions have
changed. */
- if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
+ if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
bb->index))
{
bitmap_copy (saved_def, bb_info->def);
/* If we do not have basic block info, the block must be in
the list of dirty blocks or else some one has added a
block behind our backs. */
- gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
+ gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
bb->index));
}
/* Make sure no one created a block without following
}
/* Make sure there are no dirty bits in blocks that have been deleted. */
- gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
- all_blocks));
+ gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
+ all_blocks));
BITMAP_FREE (saved_def);
BITMAP_FREE (saved_use);
Then, the in and out sets for the LIVE problem itself are computed.
These are the logical AND of the IN and OUT sets from the LR problem
- and the must-initialized problem.
+ and the must-initialized problem.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
/* Set basic block info. */
static void
-df_live_set_bb_info (unsigned int index,
+df_live_set_bb_info (unsigned int index,
struct df_live_bb_info *bb_info)
{
gcc_assert (df_live);
/* Free basic block info. */
static void
-df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
/* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
not touched unless the block is new. */
-static void
+static void
df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
if (!df_live->block_pool)
- df_live->block_pool = create_alloc_pool ("df_live_block pool",
+ df_live->block_pool = create_alloc_pool ("df_live_block pool",
sizeof (struct df_live_bb_info), 100);
if (!df_live_scratch)
df_live_scratch = BITMAP_ALLOC (NULL);
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
if (bb_info)
- {
+ {
bitmap_clear (bb_info->kill);
bitmap_clear (bb_info->gen);
}
else
- {
+ {
bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool);
df_live_set_bb_info (bb_index, bb_info);
bb_info->kill = BITMAP_ALLOC (NULL);
/* Reset the global solution for recalculation. */
-static void
+static void
df_live_reset (bitmap all_blocks)
{
unsigned int bb_index;
df_grow_insn_info ();
- EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
+ EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
0, bb_index, bi)
{
df_live_bb_local_compute (bb_index);
/* Initialize the solution vectors. */
-static void
+static void
df_live_init (bitmap all_blocks)
{
unsigned int bb_index;
{
bitmap op1 = df_live_get_bb_info (e->dest->index)->in;
bitmap op2 = df_live_get_bb_info (e->src->index)->out;
-
- if (e->flags & EDGE_FAKE)
+
+ if (e->flags & EDGE_FAKE)
return;
bitmap_ior_into (op1, op2);
-}
+}
/* Transfer function for the forwards must-initialized problem. */
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- /* We need to use a scratch set here so that the value returned from
- this function invocation properly reflects if the sets changed in
- a significant way; i.e. not just because the lr set was anded
- in. */
+ /* We need to use a scratch set here so that the value returned from this
+ function invocation properly reflects whether the sets changed in a
+ significant way; i.e. not just because the lr set was anded in. */
bitmap_and (df_live_scratch, gen, bb_lr_info->out);
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
{
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
-
+
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and_into (bb_live_info->in, bb_lr_info->in);
bitmap_and_into (bb_live_info->out, bb_lr_info->out);
}
-
+
df_live->solutions_dirty = false;
}
}
if (df_live->block_info)
{
unsigned int i;
-
+
for (i = 0; i < df_live->block_info_size; i++)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (i);
BITMAP_FREE (bb_info->out);
}
}
-
+
free_alloc_pool (df_live->block_pool);
df_live->block_info_size = 0;
free (df_live->block_info);
if (!bb_info || !bb_info->in)
return;
-
+
fprintf (file, ";; live in \t");
df_print_regset (file, bb_info->in);
if (df_live->problem_data)
if (!bb_info || !bb_info->out)
return;
-
+
fprintf (file, ";; live out \t");
df_print_regset (file, bb_info->out);
if (df_live->problem_data)
return;
}
- /* Set it true so that the solution is recomputed. */
+ /* Set it true so that the solution is recomputed. */
df_live->solutions_dirty = true;
problem_data = XNEW (struct df_live_problem_data);
df_live_local_compute, /* Local compute function. */
df_live_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
- NULL, /* Confluence operator 0. */
- df_live_confluence_n, /* Confluence operator n. */
+ NULL, /* Confluence operator 0. */
+ df_live_confluence_n, /* Confluence operator n. */
df_live_transfer_function, /* Transfer function. */
df_live_finalize, /* Finalize function. */
df_live_free, /* Free all of the problem information. */
{
basic_block bb;
FOR_ALL_BB (bb)
- bitmap_set_bit (df_live->out_of_date_transfer_functions,
+ bitmap_set_bit (df_live->out_of_date_transfer_functions,
bb->index);
}
/* Make a copy of the transfer functions and then compute
new ones to see if the transfer functions have
changed. */
- if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
+ if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
bb->index))
{
bitmap_copy (saved_gen, bb_info->gen);
/* If we do not have basic block info, the block must be in
the list of dirty blocks or else some one has added a
block behind our backs. */
- gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
+ gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
bb->index));
}
/* Make sure no one created a block without following
}
/* Make sure there are no dirty bits in blocks that have been deleted. */
- gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
- all_blocks));
+ gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
+ all_blocks));
BITMAP_FREE (saved_gen);
BITMAP_FREE (saved_kill);
BITMAP_FREE (all_blocks);
{
struct df_link *head = DF_REF_CHAIN (src);
struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
-
+
DF_REF_CHAIN (src) = link;
link->next = head;
link->ref = dst;
/* Delete any du or ud chains that start at REF and point to
- TARGET. */
+ TARGET. */
static void
df_chain_unlink_1 (df_ref ref, df_ref target)
{
/* Copy the du or ud chain starting at FROM_REF and attach it to
- TO_REF. */
+ TO_REF. */
-void
-df_chain_copy (df_ref to_ref,
+void
+df_chain_copy (df_ref to_ref,
struct df_link *from_ref)
{
while (from_ref)
bitmap_iterator bi;
unsigned int bb_index;
- /* Wholesale destruction of the old chains. */
+ /* Wholesale destruction of the old chains. */
if (df_chain->block_pool)
free_alloc_pool (df_chain->block_pool);
if (df_chain_problem_p (DF_UD_CHAIN))
for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
DF_REF_CHAIN (*use_rec) = NULL;
-
+
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
-
+
if (INSN_P (insn))
{
if (df_chain_problem_p (DF_DU_CHAIN))
/* Create def-use or use-def chains. */
-static void
+static void
df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
df_chain_remove_problem ();
- df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
+ df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
sizeof (struct df_link), 50);
df_chain->optional_p = true;
}
{
bitmap_iterator bi;
unsigned int def_index;
-
+
while (*use_rec)
{
df_ref use = *use_rec;
{
unsigned int first_index = DF_DEFS_BEGIN (uregno);
unsigned int last_index = first_index + count - 1;
-
+
EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
{
df_ref def;
- if (def_index > last_index)
+ if (def_index > last_index)
break;
-
+
def = DF_DEFS_GET (def_index);
if (df_chain_problem_p (DF_DU_CHAIN))
df_chain_create (def, use);
#ifdef EH_USES
/* Create the chains for the artificial uses from the EH_USES at the
beginning of the block. */
-
+
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_chain_create_bb_process_use (cpy,
- df_get_artificial_uses (bb->index),
+ df_get_artificial_uses (bb->index),
DF_REF_AT_TOP);
#endif
df_rd_simulate_artificial_defs_at_top (bb, cpy);
-
+
/* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
at the end of the block. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_chain_create_bb_process_use (cpy,
- df_get_artificial_uses (bb->index),
+ df_get_artificial_uses (bb->index),
0);
BITMAP_FREE (cpy);
{
unsigned int bb_index;
bitmap_iterator bi;
-
+
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
df_chain_create_bb (bb_index);
df_ref *def_rec = df_get_artificial_defs (bb->index);
if (*def_rec)
{
-
+
fprintf (file, ";; DU chains for artificial defs\n");
while (*def_rec)
{
fprintf (file, "\n");
def_rec++;
}
- }
+ }
FOR_BB_INSNS (bb, insn)
{
def_rec = DF_INSN_INFO_DEFS (insn_info);
if (*def_rec)
{
- fprintf (file, ";; DU chains for insn luid %d uid %d\n",
+ fprintf (file, ";; DU chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
-
+
while (*def_rec)
{
df_ref def = *def_rec;
fprintf (file, "\n");
use_rec++;
}
- }
+ }
FOR_BB_INSNS (bb, insn)
{
use_rec = DF_INSN_INFO_USES (insn_info);
if (*use_rec || *eq_use_rec)
{
- fprintf (file, ";; UD chains for insn luid %d uid %d\n",
+ fprintf (file, ";; UD chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
-
+
while (*use_rec)
{
df_ref use = *use_rec;
NULL, /* Local compute function. */
NULL, /* Init the solution specific data. */
NULL, /* Iterative solver. */
- NULL, /* Confluence operator 0. */
- NULL, /* Confluence operator n. */
+ NULL, /* Confluence operator 0. */
+ NULL, /* Confluence operator n. */
NULL, /* Transfer function. */
df_chain_finalize, /* Finalize function. */
df_chain_free, /* Free all of the problem information. */
bitmap needs_expansion;
/* The start position and len for each regno in the various bit
- vectors. */
- unsigned int* regno_start;
+ vectors. */
+ unsigned int* regno_start;
unsigned int* regno_len;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack byte_lr_bitmaps;
/* Get the starting location for REGNO in the df_byte_lr bitmaps. */
-int
+int
df_byte_lr_get_regno_start (unsigned int regno)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
return problem_data->regno_start[regno];
}
/* Get the len for REGNO in the df_byte_lr bitmaps. */
-int
+int
df_byte_lr_get_regno_len (unsigned int regno)
-{
- struct df_byte_lr_problem_data *problem_data
+{
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
return problem_data->regno_len[regno];
}
/* Set basic block info. */
static void
-df_byte_lr_set_bb_info (unsigned int index,
+df_byte_lr_set_bb_info (unsigned int index,
struct df_byte_lr_bb_info *bb_info)
{
gcc_assert (df_byte_lr);
df_byte_lr->block_info[index] = bb_info;
}
-
+
/* Free basic block info. */
static void
-df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info;
static void
df_byte_lr_check_regs (df_ref *ref_rec)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
for (; *ref_rec; ref_rec++)
{
df_ref ref = *ref_rec;
- if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
- | DF_REF_ZERO_EXTRACT
+ if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
+ | DF_REF_ZERO_EXTRACT
| DF_REF_STRICT_LOW_PART)
|| GET_CODE (DF_REF_REG (ref)) == SUBREG)
bitmap_set_bit (problem_data->needs_expansion, DF_REF_REGNO (ref));
}
-/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by
+/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by
regno_start and regno_len. */
static void
df_byte_lr_expand_bitmap (bitmap dest, bitmap src)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
bitmap_iterator bi;
unsigned int i;
bitmap_clear (dest);
EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi)
{
- bitmap_set_range (dest, problem_data->regno_start[i],
+ bitmap_set_range (dest, problem_data->regno_start[i],
problem_data->regno_len[i]);
}
}
/* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are
not touched unless the block is new. */
-static void
+static void
df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
df_byte_lr->problem_data = problem_data;
if (!df_byte_lr->block_pool)
- df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool",
+ df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool",
sizeof (struct df_byte_lr_bb_info), 50);
df_grow_bb_info (df_byte_lr);
problem_data->hardware_regs_used = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
problem_data->invalidated_by_call = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
problem_data->needs_expansion = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
-
+
/* Discover which regno's use subregs, extracts or
strict_low_parts. */
FOR_EACH_BB (bb)
bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK);
-
+
/* Allocate the slots for each regno. */
for (regno = 0; regno < max_reg; regno++)
{
problem_data->regno_start[regno] = index;
if (bitmap_bit_p (problem_data->needs_expansion, regno))
len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno]));
- else
+ else
len = 1;
-
+
problem_data->regno_len[regno] = len;
index += len;
}
- df_byte_lr_expand_bitmap (problem_data->hardware_regs_used,
+ df_byte_lr_expand_bitmap (problem_data->hardware_regs_used,
df->hardware_regs_used);
- df_byte_lr_expand_bitmap (problem_data->invalidated_by_call,
+ df_byte_lr_expand_bitmap (problem_data->invalidated_by_call,
regs_invalidated_by_call_regset);
EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
if (bb_info)
- {
+ {
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
}
else
- {
+ {
bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool);
df_byte_lr_set_bb_info (bb_index, bb_info);
bb_info->use = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
bb_info->out = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
}
}
-
+
df_byte_lr->optional_p = true;
}
/* Reset the global solution for recalculation. */
-static void
+static void
df_byte_lr_reset (bitmap all_blocks)
{
unsigned int bb_index;
static void
df_byte_lr_bb_local_compute (unsigned int bb_index)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
basic_block bb = BASIC_BLOCK (bb_index);
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
- continue;
+ continue;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
bitmap_clear_range (bb_info->use, start, len);
}
}
-
+
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
/* Initialize the solution vectors. */
-static void
+static void
df_byte_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
static void
df_byte_lr_confluence_0 (basic_block bb)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
bitmap op1 = df_byte_lr_get_bb_info (bb->index)->out;
if (bb != EXIT_BLOCK_PTR)
bitmap_copy (op1, problem_data->hardware_regs_used);
-}
+}
/* Confluence function that ignores fake edges. */
static void
df_byte_lr_confluence_n (edge e)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
bitmap op1 = df_byte_lr_get_bb_info (e->src->index)->out;
bitmap op2 = df_byte_lr_get_bb_info (e->dest->index)->in;
-
+
/* Call-clobbered registers die across exception and call edges. */
/* ??? Abnormal call edges ignored for the moment, as this gets
confused by sibling call edges, which crashes reg-stack. */
bitmap_ior_into (op1, op2);
bitmap_ior_into (op1, problem_data->hardware_regs_used);
-}
+}
/* Transfer function. */
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
if (!bb_info || !bb_info->in)
return;
-
+
fprintf (file, ";; blr in \t");
df_print_byte_regset (file, bb_info->in);
fprintf (file, ";; blr use \t");
df_print_byte_regset (file, bb_info->use);
fprintf (file, ";; blr def \t");
df_print_byte_regset (file, bb_info->def);
-}
+}
/* Debugging info at bottom of bb. */
struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
if (!bb_info || !bb_info->out)
return;
-
+
fprintf (file, ";; blr out \t");
df_print_byte_regset (file, bb_info->out);
-}
+}
/* All of the information associated with every instance of the problem. */
df_byte_lr_local_compute, /* Local compute function. */
df_byte_lr_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
- df_byte_lr_confluence_0, /* Confluence operator 0. */
- df_byte_lr_confluence_n, /* Confluence operator n. */
+ df_byte_lr_confluence_0, /* Confluence operator 0. */
+ df_byte_lr_confluence_n, /* Confluence operator n. */
df_byte_lr_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_byte_lr_free, /* Free all of the problem information. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_BYTE_LR, /* Timing variable. */
+ TV_DF_BYTE_LR, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
void
df_byte_lr_simulate_defs (rtx insn, bitmap live)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
bitmap_clear_range (live, start, len);
}
}
-}
+}
/* Simulate the effects of the uses of INSN on LIVE. */
-void
+void
df_byte_lr_simulate_uses (rtx insn, bitmap live)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
unsigned int len = problem_data->regno_len[uregno];
unsigned int sb;
unsigned int lb;
-
+
if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb))
{
start += sb;
len = lb - sb;
}
-
+
/* Add use to set of uses in this BB. */
if (len)
bitmap_set_range (live, start, len);
/* Apply the artificial uses and defs at the top of BB in a forwards
direction. */
-void
+void
df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
df_ref *def_rec;
#ifdef EH_USES
df_ref *use_rec;
#endif
int bb_index = bb->index;
-
+
#ifdef EH_USES
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
+ {
unsigned int dregno = DF_REF_REGNO (def);
unsigned int start = problem_data->regno_start[dregno];
unsigned int len = problem_data->regno_len[dregno];
/* Apply the artificial uses and defs at the end of BB in a backwards
direction. */
-void
+void
df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
{
- struct df_byte_lr_problem_data *problem_data
+ struct df_byte_lr_problem_data *problem_data
= (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
df_ref *def_rec;
df_ref *use_rec;
int bb_index = bb->index;
-
+
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
df_ref def = *def_rec;
This problem computes REG_DEAD and REG_UNUSED notes.
----------------------------------------------------------------------------*/
-static void
+static void
df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
df_note->optional_p = true;
}
#ifdef REG_DEAD_DEBUGGING
-static void
+static void
df_print_note (const char *prefix, rtx insn, rtx note)
{
if (dump_file)
just leave the notes alone. */
#ifdef STACK_REGS
-static inline bool
+static inline bool
df_ignore_stack_reg (int regno)
{
return regstack_completed
&& IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
}
#else
-static inline bool
+static inline bool
df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
{
return false;
switch (REG_NOTE_KIND (link))
{
case REG_DEAD:
- /* After reg-stack, we need to ignore any unused notes
+ /* After reg-stack, we need to ignore any unused notes
for the stack registers. */
if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
{
break;
case REG_UNUSED:
- /* After reg-stack, we need to ignore any unused notes
+ /* After reg-stack, we need to ignore any unused notes
for the stack registers. */
if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
{
*pprev = link = next;
}
break;
-
+
default:
pprev = &XEXP (link, 1);
link = *pprev;
rtx curr = old;
rtx prev = NULL;
+ gcc_assert (!DEBUG_INSN_P (insn));
+
while (curr)
if (XEXP (curr, 0) == reg)
{
prev = curr;
curr = XEXP (curr, 1);
}
-
+
/* Did not find the note. */
add_reg_note (insn, note_type, reg);
return old;
static rtx
df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
- bitmap live, bitmap do_not_gen,
+ bitmap live, bitmap do_not_gen,
bitmap artificial_uses)
{
unsigned int r;
-
+
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
- fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
+ fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
mws->start_regno, mws->end_regno);
#endif
static rtx
df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
- bitmap artificial_uses)
+ bitmap artificial_uses, bool *added_notes_p)
{
unsigned int r;
-
+ bool is_debug = *added_notes_p;
+
+ *added_notes_p = false;
+
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
{
- fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
+ fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
mws->start_regno, mws->end_regno);
df_print_regset (dump_file, do_not_gen);
fprintf (dump_file, " live =");
if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
{
/* Add a dead note for the entire multi word register. */
+ if (is_debug)
+ {
+ *added_notes_p = true;
+ return old;
+ }
old = df_set_note (REG_DEAD, insn, old, mws->mw_reg);
#ifdef REG_DEAD_DEBUGGING
df_print_note ("adding 1: ", insn, REG_NOTES (insn));
&& !bitmap_bit_p (artificial_uses, r)
&& !bitmap_bit_p (do_not_gen, r))
{
+ if (is_debug)
+ {
+ *added_notes_p = true;
+ return old;
+ }
old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]);
#ifdef REG_DEAD_DEBUGGING
df_print_note ("adding 2: ", insn, REG_NOTES (insn));
LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
static rtx
-df_create_unused_note (rtx insn, rtx old, df_ref def,
+df_create_unused_note (rtx insn, rtx old, df_ref def,
bitmap live, bitmap artificial_uses)
{
unsigned int dregno = DF_REF_REGNO (def);
-
+
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
{
|| bitmap_bit_p (artificial_uses, dregno)
|| df_ignore_stack_reg (dregno)))
{
- rtx reg = (DF_REF_LOC (def))
+ rtx reg = (DF_REF_LOC (def))
? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
old = df_set_note (REG_UNUSED, insn, old, reg);
#ifdef REG_DEAD_DEBUGGING
df_print_note ("adding 3: ", insn, REG_NOTES (insn));
#endif
}
-
+
return old;
}
+/* Node of a linked list of uses of dead REGs in debug insns. */
+struct dead_debug_use
+{
+ df_ref use;
+ struct dead_debug_use *next;
+};
+
+/* Linked list of the above, with a bitmap of the REGs in the
+ list. */
+struct dead_debug
+{
+ struct dead_debug_use *head;
+ bitmap used;
+ bitmap to_rescan;
+};
+
+/* Initialize DEBUG to an empty list, and clear USED, if given. */
+static inline void
+dead_debug_init (struct dead_debug *debug, bitmap used)
+{
+ debug->head = NULL;
+ debug->used = used;
+ debug->to_rescan = NULL;
+ if (used)
+ bitmap_clear (used);
+}
+
+/* Reset all debug insns with pending uses. Release the bitmap in it,
+ unless it is USED. USED must be the same bitmap passed to
+ dead_debug_init. */
+static inline void
+dead_debug_finish (struct dead_debug *debug, bitmap used)
+{
+ struct dead_debug_use *head;
+ rtx insn = NULL;
+
+ if (debug->used != used)
+ BITMAP_FREE (debug->used);
+
+ while ((head = debug->head))
+ {
+ insn = DF_REF_INSN (head->use);
+ if (!head->next || DF_REF_INSN (head->next->use) != insn)
+ {
+ INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
+ df_insn_rescan_debug_internal (insn);
+ if (debug->to_rescan)
+ bitmap_clear_bit (debug->to_rescan, INSN_UID (insn));
+ }
+ debug->head = head->next;
+ XDELETE (head);
+ }
+
+ if (debug->to_rescan)
+ {
+ bitmap_iterator bi;
+ unsigned int uid;
+
+ EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi)
+ {
+ struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
+ if (insn_info)
+ df_insn_rescan (insn_info->insn);
+ }
+ BITMAP_FREE (debug->to_rescan);
+ }
+}
+
+/* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug
+ insn. Create a bitmap for DEBUG as needed. */
+static inline void
+dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno)
+{
+ struct dead_debug_use *newddu = XNEW (struct dead_debug_use);
+
+ newddu->use = use;
+ newddu->next = debug->head;
+ debug->head = newddu;
+
+ if (!debug->used)
+ debug->used = BITMAP_ALLOC (NULL);
+
+ bitmap_set_bit (debug->used, uregno);
+}
+
+/* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
+ before INSN that binds the REG to a debug temp, and replace all
+ uses of UREGNO in DEBUG with uses of the debug temp. INSN must be
+ the insn where UREGNO dies. */
+static inline void
+dead_debug_insert_before (struct dead_debug *debug, unsigned int uregno,
+ rtx insn)
+{
+ struct dead_debug_use **tailp = &debug->head;
+ struct dead_debug_use *cur;
+ struct dead_debug_use *uses = NULL;
+ struct dead_debug_use **usesp = &uses;
+ rtx reg = NULL;
+ rtx dval;
+ rtx bind;
+
+ if (!debug->used || !bitmap_clear_bit (debug->used, uregno))
+ return;
+
+ /* Move all uses of uregno from debug->head to uses, setting mode to
+ the widest referenced mode. */
+ while ((cur = *tailp))
+ {
+ if (DF_REF_REGNO (cur->use) == uregno)
+ {
+ *usesp = cur;
+ usesp = &cur->next;
+ *tailp = cur->next;
+ cur->next = NULL;
+ if (!reg
+ || (GET_MODE_BITSIZE (GET_MODE (reg))
+ < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use)))))
+ reg = *DF_REF_REAL_LOC (cur->use);
+ }
+ else
+ tailp = &(*tailp)->next;
+ }
+
+ gcc_assert (reg);
+
+ /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
+ dval = make_debug_expr_from_rtl (reg);
+
+ /* Emit a debug bind insn before the insn in which reg dies. */
+ bind = gen_rtx_VAR_LOCATION (GET_MODE (reg),
+ DEBUG_EXPR_TREE_DECL (dval), reg,
+ VAR_INIT_STATUS_INITIALIZED);
+
+ bind = emit_debug_insn_before (bind, insn);
+ df_insn_rescan (bind);
+
+ /* Adjust all uses. */
+ while ((cur = uses))
+ {
+ if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg))
+ *DF_REF_REAL_LOC (cur->use) = dval;
+ else
+ *DF_REF_REAL_LOC (cur->use)
+ = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
+ /* ??? Should we simplify subreg of subreg? */
+ if (debug->to_rescan == NULL)
+ debug->to_rescan = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use)));
+ uses = cur->next;
+ XDELETE (cur);
+ }
+}
/* Recompute the REG_DEAD and REG_UNUSED notes and compute register
info: lifetime, bb, and number of defs and uses for basic block
BB. The three bitvectors are scratch regs used here. */
static void
-df_note_bb_compute (unsigned int bb_index,
+df_note_bb_compute (unsigned int bb_index,
bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
+ struct dead_debug debug;
+
+ dead_debug_init (&debug, NULL);
bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
{
unsigned int regno = DF_REF_REGNO (use);
bitmap_set_bit (live, regno);
-
+
/* Notes are not generated for any of the artificial registers
at the bottom of the block. */
bitmap_set_bit (artificial_uses, regno);
}
}
-
+
#ifdef REG_DEAD_DEBUGGING
if (dump_file)
{
struct df_mw_hardreg **mws_rec;
rtx old_dead_notes;
rtx old_unused_notes;
-
+ int debug_insn;
+
if (!INSN_P (insn))
continue;
+ debug_insn = DEBUG_INSN_P (insn);
+
bitmap_clear (do_not_gen);
df_kill_notes (insn, &old_dead_notes, &old_unused_notes);
mws_rec = DF_INSN_UID_MWS (uid);
while (*mws_rec)
{
- struct df_mw_hardreg *mws = *mws_rec;
- if ((DF_MWS_REG_DEF_P (mws))
+ struct df_mw_hardreg *mws = *mws_rec;
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
- old_unused_notes
- = df_set_unused_notes_for_mw (insn, old_unused_notes,
- mws, live, do_not_gen,
+ old_unused_notes
+ = df_set_unused_notes_for_mw (insn, old_unused_notes,
+ mws, live, do_not_gen,
artificial_uses);
mws_rec++;
}
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
old_unused_notes
- = df_create_unused_note (insn, old_unused_notes,
+ = df_create_unused_note (insn, old_unused_notes,
def, live, artificial_uses);
bitmap_set_bit (do_not_gen, dregno);
}
mws_rec = DF_INSN_UID_MWS (uid);
while (*mws_rec)
{
- struct df_mw_hardreg *mws = *mws_rec;
+ struct df_mw_hardreg *mws = *mws_rec;
if (DF_MWS_REG_DEF_P (mws))
old_unused_notes
- = df_set_unused_notes_for_mw (insn, old_unused_notes,
- mws, live, do_not_gen,
+ = df_set_unused_notes_for_mw (insn, old_unused_notes,
+ mws, live, do_not_gen,
artificial_uses);
mws_rec++;
}
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
old_unused_notes
- = df_create_unused_note (insn, old_unused_notes,
+ = df_create_unused_note (insn, old_unused_notes,
def, live, artificial_uses);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
bitmap_clear_bit (live, dregno);
}
}
-
+
/* Process the uses. */
mws_rec = DF_INSN_UID_MWS (uid);
while (*mws_rec)
{
- struct df_mw_hardreg *mws = *mws_rec;
- if ((DF_MWS_REG_DEF_P (mws))
+ struct df_mw_hardreg *mws = *mws_rec;
+ if ((DF_MWS_REG_DEF_P (mws))
&& !df_ignore_stack_reg (mws->start_regno))
- old_dead_notes
- = df_set_dead_notes_for_mw (insn, old_dead_notes,
- mws, live, do_not_gen,
- artificial_uses);
+ {
+ bool really_add_notes = debug_insn != 0;
+
+ old_dead_notes
+ = df_set_dead_notes_for_mw (insn, old_dead_notes,
+ mws, live, do_not_gen,
+ artificial_uses,
+ &really_add_notes);
+
+ if (really_add_notes)
+ debug_insn = -1;
+ }
mws_rec++;
}
unsigned int uregno = DF_REF_REGNO (use);
#ifdef REG_DEAD_DEBUGGING
- if (dump_file)
+ if (dump_file && !debug_insn)
{
fprintf (dump_file, " regular looking at use ");
df_ref_debug (use, dump_file);
#endif
if (!bitmap_bit_p (live, uregno))
{
+ if (debug_insn)
+ {
+ if (debug_insn > 0)
+ {
+ dead_debug_add (&debug, use, uregno);
+ continue;
+ }
+ break;
+ }
+ else
+ dead_debug_insert_before (&debug, uregno, insn);
+
if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
&& (!bitmap_bit_p (do_not_gen, uregno))
&& (!bitmap_bit_p (artificial_uses, uregno))
&& (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
&& (!df_ignore_stack_reg (uregno)))
{
- rtx reg = (DF_REF_LOC (use))
+ rtx reg = (DF_REF_LOC (use))
? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg);
free_EXPR_LIST_node (old_dead_notes);
old_dead_notes = next;
}
+
+ if (debug_insn == -1)
+ {
+ /* ??? We could probably do better here, replacing dead
+ registers with their definitions. */
+ INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
+ df_insn_rescan_debug_internal (insn);
+ }
}
+
+ dead_debug_finish (&debug, NULL);
}
df_note_compute, /* Local compute function. */
NULL, /* Init the solution specific data. */
NULL, /* Iterative solver. */
- NULL, /* Confluence operator 0. */
- NULL, /* Confluence operator n. */
+ NULL, /* Confluence operator 0. */
+ NULL, /* Confluence operator n. */
NULL, /* Transfer function. */
NULL, /* Finalize function. */
df_note_free, /* Free all of the problem information. */
\f
/*----------------------------------------------------------------------------
- Functions for simulating the effects of single insns.
+ Functions for simulating the effects of single insns.
You can either simulate in the forwards direction, starting from
the top of a block or the backwards direction from the end of the
- block. The main difference is that if you go forwards, the uses
- are examined first then the defs, and if you go backwards, the defs
- are examined first then the uses.
+ block. If you go backwards, defs are examined first to clear bits,
+ then uses are examined to set bits. If you go forwards, defs are
+ examined first to set bits, then REG_DEAD and REG_UNUSED notes
+ are examined to clear bits. In either case, the result of examining
+ a def can be undone (respectively by a use or a REG_UNUSED note).
If you start at the top of the block, use one of DF_LIVE_IN or
DF_LR_IN. If you start at the bottom of the block use one of
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
+ }
+}
+
+/* Find the set of real DEFs, which are not clobbers, for INSN. */
+
+void
+df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
+{
+ df_ref *def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ df_ref def = *def_rec;
+ if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
}
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
bitmap_clear_bit (live, dregno);
}
-}
+}
/* Simulate the effects of the uses of INSN on LIVE. */
-void
+void
df_simulate_uses (rtx insn, bitmap live)
{
df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
+ if (DEBUG_INSN_P (insn))
+ return;
+
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
df_simulate_initialize_backwards should be called first with a
bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
df_simulate_one_insn_backwards should be called for each insn in
- the block, starting with the last on. Finally,
+ the block, starting with the last one. Finally,
df_simulate_finalize_backwards can be called to get a new value
of the sets at the top of the block (this is rarely used).
----------------------------------------------------------------------------*/
/* Apply the artificial uses and defs at the end of BB in a backwards
direction. */
-void
+void
df_simulate_initialize_backwards (basic_block bb, bitmap live)
{
df_ref *def_rec;
df_ref *use_rec;
int bb_index = bb->index;
-
+
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
df_ref def = *def_rec;
/* Simulate the backwards effects of INSN on the bitmap LIVE. */
-void
+void
df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
{
- if (! INSN_P (insn))
- return;
-
+ if (!NONDEBUG_INSN_P (insn))
+ return;
+
df_simulate_defs (insn, live);
df_simulate_uses (insn, live);
df_simulate_fixup_sets (bb, live);
/* Apply the artificial uses and defs at the top of BB in a backwards
direction. */
-void
+void
df_simulate_finalize_backwards (basic_block bb, bitmap live)
{
df_ref *def_rec;
df_ref *use_rec;
#endif
int bb_index = bb->index;
-
+
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
df_ref def = *def_rec;
/*----------------------------------------------------------------------------
The following three functions are used only for FORWARDS scanning:
i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
- Thus it is important to add the DF_NOTES problem to the stack of
+ Thus it is important to add the DF_NOTES problem to the stack of
problems computed before using these functions.
df_simulate_initialize_forwards should be called first with a
bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
df_simulate_one_insn_forwards should be called for each insn in
- the block, starting with the last on. Finally,
- df_simulate_finalize_forwards can be called to get a new value
- of the sets at the bottom of the block (this is rarely used).
+ the block, starting with the first one.
----------------------------------------------------------------------------*/
-/* Apply the artificial uses and defs at the top of BB in a backwards
- direction. */
+/* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
+ DF_LR_IN for basic block BB, for forward scanning by marking artificial
+ defs live. */
-void
+void
df_simulate_initialize_forwards (basic_block bb, bitmap live)
{
df_ref *def_rec;
int bb_index = bb->index;
-
+
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ bitmap_set_bit (live, DF_REF_REGNO (def));
}
}
-/* Simulate the backwards effects of INSN on the bitmap LIVE. */
+/* Simulate the forwards effects of INSN on the bitmap LIVE. */
-void
+void
df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
{
rtx link;
if (! INSN_P (insn))
- return;
+ return;
- /* Make sure that the DF_NOTES really is an active df problem. */
+ /* Make sure that DF_NOTE really is an active df problem. */
gcc_assert (df_note);
- df_simulate_defs (insn, live);
+ /* Note that this is the opposite as how the problem is defined, because
+ in the LR problem defs _kill_ liveness. However, they do so backwards,
+ while here the scan is performed forwards! So, first assume that the
+ def is live, and if this is not true REG_UNUSED notes will rectify the
+ situation. */
+ df_simulate_find_noclobber_defs (insn, live);
/* Clear all of the registers that go dead. */
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
while (--n >= 0)
bitmap_clear_bit (live, regno + n);
}
- else
+ else
bitmap_clear_bit (live, regno);
}
break;
}
-/* Apply the artificial uses and defs at the end of BB in a backwards
- direction. */
-
-void
-df_simulate_finalize_forwards (basic_block bb, bitmap live)
-{
- df_ref *def_rec;
- int bb_index = bb->index;
-
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- df_ref def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
- }
-}
-
-
\f
/*----------------------------------------------------------------------------
MULTIPLE DEFINITIONS
Find the locations in the function reached by multiple definition sites
- for a pseudo. In and out bitvectors are built for each basic
- block.
+ for a live pseudo. In and out bitvectors are built for each basic
+ block. They are restricted for efficiency to live registers.
The gen and kill sets for the problem are obvious. Together they
include all defined registers in a basic block; the gen set includes
init-set of BB3 includes r10 and r12, but not r11. Note that we do
not need to iterate the dominance frontier, because we do not insert
anything like PHI functions there! Instead, dataflow will take care of
- propagating the information to BB3's successors.
+ propagating the information to BB3's successors.
---------------------------------------------------------------------------*/
/* Scratch var used by transfer functions. This is used to do md analysis
/* Set basic block info. */
static void
-df_md_set_bb_info (unsigned int index,
+df_md_set_bb_info (unsigned int index,
struct df_md_bb_info *bb_info)
{
gcc_assert (df_md);
static void
-df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
+df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
/* Allocate or reset bitmaps for DF_MD. The solution bits are
not touched unless the block is new. */
-static void
+static void
df_md_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
if (!df_md->block_pool)
- df_md->block_pool = create_alloc_pool ("df_md_block pool",
+ df_md->block_pool = create_alloc_pool ("df_md_block pool",
sizeof (struct df_md_bb_info), 50);
df_grow_bb_info (df_md);
{
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
if (bb_info)
- {
+ {
bitmap_clear (bb_info->init);
bitmap_clear (bb_info->gen);
bitmap_clear (bb_info->kill);
bitmap_clear (bb_info->out);
}
else
- {
+ {
bb_info = (struct df_md_bb_info *) pool_alloc (df_md->block_pool);
df_md_set_bb_info (bb_index, bb_info);
bb_info->init = BITMAP_ALLOC (NULL);
}
static void
-df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
+df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
df_ref *def_rec,
int top_flag)
{
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_md_bb_local_compute_process_def (bb_info,
+ df_md_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
DF_REF_AT_TOP);
}
if (!(df->changeable_flags & DF_NO_HARD_REGS))
- df_md_bb_local_compute_process_def (bb_info,
+ df_md_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
0);
}
{
df_md_bb_local_compute (bb_index);
}
-
+
BITMAP_FREE (seen_in_insn);
frontiers = XNEWVEC (bitmap, last_basic_block);
/* Reset the global solution for recalculation. */
-static void
+static void
df_md_reset (bitmap all_blocks)
{
unsigned int bb_index;
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- /* We need to use a scratch set here so that the value returned from
- this function invocation properly reflects if the sets changed in
- a significant way; i.e. not just because the live set was anded
- in. */
+ /* We need to use a scratch set here so that the value returned from this
+ function invocation properly reflects whether the sets changed in a
+ significant way; i.e. not just because the live set was anded in. */
bitmap_and (df_md_scratch, gen, df_get_live_out (bb));
/* Multiple definitions of a register are not relevant if it is not
- used. Thus we trim the result to the places where it is live. */
+ live. Thus we trim the result to the places where it is live. */
bitmap_and_into (in, df_get_live_in (bb));
return bitmap_ior_and_compl (out, df_md_scratch, in, kill);
/* Initialize the solution bit vectors for problem. */
-static void
+static void
df_md_init (bitmap all_blocks)
{
unsigned int bb_index;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
-
+
bitmap_copy (bb_info->in, bb_info->init);
df_md_transfer_function (bb_index);
}
{
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
bitmap_copy (bb_info->in, bb_info->init);
-}
+}
/* In of target gets or of out of source. */
bitmap op1 = df_md_get_bb_info (e->dest->index)->in;
bitmap op2 = df_md_get_bb_info (e->src->index)->out;
- if (e->flags & EDGE_FAKE)
+ if (e->flags & EDGE_FAKE)
return;
if (e->flags & EDGE_EH)
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
if (!bb_info || !bb_info->in)
return;
-
+
fprintf (file, ";; md in \t");
df_print_regset (file, bb_info->in);
fprintf (file, ";; md init \t");
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
if (!bb_info || !bb_info->out)
return;
-
+
fprintf (file, ";; md out \t");
df_print_regset (file, bb_info->out);
-}
+}
static struct df_problem problem_MD =
{
df_md_local_compute, /* Local compute function. */
df_md_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
- df_md_confluence_0, /* Confluence operator 0. */
- df_md_confluence_n, /* Confluence operator n. */
+ df_md_confluence_0, /* Confluence operator 0. */
+ df_md_confluence_n, /* Confluence operator n. */
df_md_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_md_free, /* Free all of the problem information. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- TV_DF_MD, /* Timing variable. */
+ TV_DF_MD, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};