PR rtl-optimization/30907
* fwprop.c (forward_propagate_into): Never propagate inside a loop.
(fwprop_init): Always call loop_optimizer_initialize.
(fwprop_done): Always call loop_optimizer_finalize.
(fwprop): We always have loop info now.
(gate_fwprop_addr): Remove.
(pass_fwprop_addr): Use gate_fwprop as gate.
PR rtl-optimization/30841
* df-problems.c (df_ru_local_compute, df_rd_local_compute,
df_chain_alloc): Call df_reorganize_refs unconditionally.
* df-scan.c (df_rescan_blocks, df_reorganize_refs): Change
refs_organized to refs_organized_size.
(df_ref_create_structure): Use refs_organized_size instead of
bitmap_size if refs had been organized, and keep refs_organized_size
up-to-date.
* df.h (struct df_ref_info): Change refs_organized to
refs_organized_size.
(DF_DEFS_SIZE, DF_USES_SIZE): Use refs_organized_size instead of
bitmap_size.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@123084
138bc75d-0d04-0410-961f-
82ee72b054a4
+2007-03-19 Paolo Bonzini <bonzini@gnu.org>
+
+ PR rtl-optimization/30907
+ * fwprop.c (forward_propagate_into): Never propagate inside a loop.
+ (fwprop_init): Always call loop_optimizer_initialize.
+ (fwprop_done): Always call loop_optimizer_finalize.
+ (fwprop): We always have loop info now.
+ (gate_fwprop_addr): Remove.
+ (pass_fwprop_addr): Use gate_fwprop as gate.
+
+ PR rtl-optimization/30841
+ * df-problems.c (df_ru_local_compute, df_rd_local_compute,
+ df_chain_alloc): Call df_reorganize_refs unconditionally.
+ * df-scan.c (df_rescan_blocks, df_reorganize_refs): Change
+ refs_organized to refs_organized_size.
+ (df_ref_create_structure): Use refs_organized_size instead of
+ bitmap_size if refs had been organized, and keep refs_organized_size
+ up-to-date.
+ * df.h (struct df_ref_info): Change refs_organized to
+ refs_organized_size.
+ (DF_DEFS_SIZE, DF_USES_SIZE): Use refs_organized_size instead of
+ bitmap_size.
+
2007-03-19 Mark Mitchell <mark@codesourcery.com>
* except.c (output_function_exception_table): Do not reference the
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
df_set_seen ();
-
- if (!df->use_info.refs_organized)
- df_reorganize_refs (&df->use_info);
+ df_reorganize_refs (&df->use_info);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
df_set_seen ();
-
- if (!df->def_info.refs_organized)
- df_reorganize_refs (&df->def_info);
+ df_reorganize_refs (&df->def_info);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
if (dflow->flags & DF_DU_CHAIN)
{
- if (!df->def_info.refs_organized)
- df_reorganize_refs (&df->def_info);
+ df_reorganize_refs (&df->def_info);
/* Clear out the pointers from the refs. */
for (i = 0; i < DF_DEFS_SIZE (df); i++)
if (dflow->flags & DF_UD_CHAIN)
{
- if (!df->use_info.refs_organized)
- df_reorganize_refs (&df->use_info);
+ df_reorganize_refs (&df->use_info);
for (i = 0; i < DF_USES_SIZE (df); i++)
{
struct df_ref *ref = df->use_info.refs[i];
struct dataflow *dflow = df->problems_by_index[DF_SCAN];
basic_block bb;
- df->def_info.refs_organized = false;
- df->use_info.refs_organized = false;
+ df->def_info.refs_organized_size = 0;
+ df->use_info.refs_organized_size = 0;
if (blocks)
{
unsigned int offset = 0;
unsigned int size = 0;
- if (ref_info->refs_organized)
+ if (ref_info->refs_organized_size)
return;
if (ref_info->refs_size < ref_info->bitmap_size)
reset it now that we have squished out all of the empty
slots. */
ref_info->bitmap_size = size;
- ref_info->refs_organized = true;
+ ref_info->refs_organized_size = size;
ref_info->add_refs_inline = true;
}
case DF_REF_REG_DEF:
{
struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
- reg_info->n_refs++;
+ unsigned int size = df->def_info.refs_organized_size
+ ? df->def_info.refs_organized_size
+ : df->def_info.bitmap_size;
/* Add the ref to the reg_def chain. */
+ reg_info->n_refs++;
df_reg_chain_create (reg_info, this_ref);
- DF_REF_ID (this_ref) = df->def_info.bitmap_size;
+ DF_REF_ID (this_ref) = size;
if (df->def_info.add_refs_inline)
{
- if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
+ if (size >= df->def_info.refs_size)
{
- int new_size = df->def_info.bitmap_size
- + df->def_info.bitmap_size / 4;
+ int new_size = size + size / 4;
df_grow_ref_info (&df->def_info, new_size);
}
/* Add the ref to the big array of defs. */
- DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
- df->def_info.refs_organized = false;
+ DF_DEFS_SET (df, size, this_ref);
+ if (df->def_info.refs_organized_size)
+ df->def_info.refs_organized_size++;
}
df->def_info.bitmap_size++;
case DF_REF_REG_USE:
{
struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
- reg_info->n_refs++;
+ unsigned int size = df->use_info.refs_organized_size
+ ? df->use_info.refs_organized_size
+ : df->use_info.bitmap_size;
/* Add the ref to the reg_use chain. */
+ reg_info->n_refs++;
df_reg_chain_create (reg_info, this_ref);
- DF_REF_ID (this_ref) = df->use_info.bitmap_size;
+ DF_REF_ID (this_ref) = size;
if (df->use_info.add_refs_inline)
{
- if (DF_USES_SIZE (df) >= df->use_info.refs_size)
+ if (size >= df->use_info.refs_size)
{
- int new_size = df->use_info.bitmap_size
- + df->use_info.bitmap_size / 4;
+ int new_size = size + size / 4;
df_grow_ref_info (&df->use_info, new_size);
}
/* Add the ref to the big array of defs. */
- DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
- df->use_info.refs_organized = false;
+ DF_USES_SET (df, size, this_ref);
+ if (df->def_info.refs_organized_size)
+ df->def_info.refs_organized_size++;
}
df->use_info.bitmap_size++;
unsigned int refs_size; /* Size of currently allocated refs table. */
unsigned int bitmap_size; /* Number of refs seen. */
- /* True if refs table is organized so that every reference for a
+ /* >0 if refs table is organized so that every reference for a
pseudo is contiguous. */
- bool refs_organized;
+ unsigned int refs_organized_size;
/* True if the next refs should be added immediately or false to
defer to later to reorganize the table. */
bool add_refs_inline;
|| DF_REF_REG_MEM_LOAD_P (REF))
/* Macros to get the refs out of def_info or use_info refs table. */
-#define DF_DEFS_SIZE(DF) ((DF)->def_info.bitmap_size)
+#define DF_DEFS_SIZE(DF) ((DF)->def_info.refs_organized_size)
#define DF_DEFS_GET(DF,ID) ((DF)->def_info.refs[(ID)])
#define DF_DEFS_SET(DF,ID,VAL) ((DF)->def_info.refs[(ID)]=(VAL))
-#define DF_USES_SIZE(DF) ((DF)->use_info.bitmap_size)
+#define DF_USES_SIZE(DF) ((DF)->use_info.refs_organized_size)
#define DF_USES_GET(DF,ID) ((DF)->use_info.refs[(ID)])
#define DF_USES_SET(DF,ID,VAL) ((DF)->use_info.refs[(ID)]=(VAL))
if (DF_REF_FLAGS (def) & DF_REF_ARTIFICIAL)
return;
- /* Do not propagate loop invariant definitions inside the loop if
- we are going to unroll. */
- if (current_loops
- && DF_REF_BB (def)->loop_father != DF_REF_BB (use)->loop_father)
+ /* Do not propagate loop invariant definitions inside the loop. */
+ if (DF_REF_BB (def)->loop_father != DF_REF_BB (use)->loop_father)
return;
/* Check if the use is still present in the insn! */
loops and be careful about them. But we have to call flow_loops_find
before df_analyze, because flow_loops_find may introduce new jump
insns (sadly) if we are not working in cfglayout mode. */
- if (flag_rerun_cse_after_loop && (flag_unroll_loops || flag_peel_loops))
- loop_optimizer_init (0);
+ loop_optimizer_init (0);
/* Now set up the dataflow problem (we only want use-def chains) and
put the dataflow solver to work. */
fwprop_done (void)
{
df_finish (df);
-
- if (flag_rerun_cse_after_loop && (flag_unroll_loops || flag_peel_loops))
- loop_optimizer_finalize ();
-
+ loop_optimizer_finalize ();
free_dominance_info (CDI_DOMINATORS);
cleanup_cfg (0);
delete_trivially_dead_insns (get_insns (), max_reg_num ());
{
struct df_ref *use = DF_USES_GET (df, i);
if (use)
- if (!current_loops
- || DF_REF_TYPE (use) == DF_REF_REG_USE
+ if (DF_REF_TYPE (use) == DF_REF_REG_USE
|| DF_REF_BB (use)->loop_father == NULL)
forward_propagate_into (use);
}
0 /* letter */
};
-static bool
-gate_fwprop_addr (void)
-{
- return optimize > 0 && flag_forward_propagate && flag_rerun_cse_after_loop
- && (flag_unroll_loops || flag_peel_loops);
-}
-
static unsigned int
fwprop_addr (void)
{
struct tree_opt_pass pass_rtl_fwprop_addr =
{
"fwprop2", /* name */
- gate_fwprop_addr, /* gate */
+ gate_fwprop, /* gate */
fwprop_addr, /* execute */
NULL, /* sub */
NULL, /* next */