/* Global common subexpression elimination/Partial redundancy elimination
and global constant/copy propagation for GNU compiler.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
- 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+ 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
This file is part of GCC.
modified. Here we want to search from INSN+1 on, but
oprs_available_p searches from INSN on. */
&& (insn == BB_END (BLOCK_FOR_INSN (insn))
- || (tmp = next_nonnote_insn (insn)) == NULL_RTX
+ || (tmp = next_nonnote_nondebug_insn (insn)) == NULL_RTX
|| BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
|| oprs_available_p (pat, tmp)))
insert_set_in_table (pat, insn, table);
determine when registers and memory are first and last set. */
FOR_BB_INSNS (current_bb, insn)
{
- if (! INSN_P (insn))
+ if (!NONDEBUG_INSN_P (insn))
continue;
if (CALL_P (insn))
/* The next pass builds the hash table. */
FOR_BB_INSNS (current_bb, insn)
- if (INSN_P (insn))
+ if (NONDEBUG_INSN_P (insn))
hash_scan_insn (insn, table);
}
&& validate_change (insn, &SET_SRC (set), src, 0))
success = 1;
- /* If we've failed to do replacement, have a single SET, don't already
- have a note, and have no special SET, add a REG_EQUAL note to not
- lose information. */
- if (!success && note == 0 && set != 0
- && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
- && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
+ /* If we've failed perform the replacement, have a single SET to
+ a REG destination and don't yet have a note, add a REG_EQUAL note
+ to not lose information. */
+ if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set)))
note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
}
sbitmap_free (prune_exprs);
}
+/* It may be necessary to insert a large number of insns on edges to
+ make the existing occurrences of expressions fully redundant. This
+ routine examines the set of insertions and deletions and if the ratio
+ of insertions to deletions is too high for a particular expression, then
+ the expression is removed from the insertion/deletion sets.
+
+ N_ELEMS is the number of elements in the hash table. */
+
+static void
+prune_insertions_deletions (int n_elems)
+{
+ sbitmap_iterator sbi;
+ sbitmap prune_exprs;
+
+ /* We always use I to iterate over blocks/edges and J to iterate over
+ expressions. */
+ unsigned int i, j;
+
+ /* Counts for the number of times an expression needs to be inserted and
+ number of times an expression can be removed as a result. */
+ int *insertions = GCNEWVEC (int, n_elems);
+ int *deletions = GCNEWVEC (int, n_elems);
+
+ /* Set of expressions which require too many insertions relative to
+ the number of deletions achieved. We will prune these out of the
+ insertion/deletion sets. */
+ prune_exprs = sbitmap_alloc (n_elems);
+ sbitmap_zero (prune_exprs);
+
+ /* Iterate over the edges counting the number of times each expression
+ needs to be inserted. */
+ for (i = 0; i < (unsigned) n_edges; i++)
+ {
+ EXECUTE_IF_SET_IN_SBITMAP (pre_insert_map[i], 0, j, sbi)
+ insertions[j]++;
+ }
+
+ /* Similarly for deletions, but those occur in blocks rather than on
+ edges. */
+ for (i = 0; i < (unsigned) last_basic_block; i++)
+ {
+ EXECUTE_IF_SET_IN_SBITMAP (pre_delete_map[i], 0, j, sbi)
+ deletions[j]++;
+ }
+
+ /* Now that we have accurate counts, iterate over the elements in the
+ hash table and see if any need too many insertions relative to the
+ number of evaluations that can be removed. If so, mark them in
+ PRUNE_EXPRS. */
+ for (j = 0; j < (unsigned) n_elems; j++)
+ if (deletions[j]
+ && ((unsigned) insertions[j] / deletions[j]) > MAX_GCSE_INSERTION_RATIO)
+ SET_BIT (prune_exprs, j);
+
+ /* Now prune PRE_INSERT_MAP and PRE_DELETE_MAP based on PRUNE_EXPRS. */
+ EXECUTE_IF_SET_IN_SBITMAP (prune_exprs, 0, j, sbi)
+ {
+ for (i = 0; i < (unsigned) n_edges; i++)
+ RESET_BIT (pre_insert_map[i], j);
+
+ for (i = 0; i < (unsigned) last_basic_block; i++)
+ RESET_BIT (pre_delete_map[i], j);
+ }
+
+ sbitmap_free (prune_exprs);
+ free (insertions);
+ free (deletions);
+}
+
/* Top level routine to do the dataflow analysis needed by PRE. */
static void
antloc = NULL;
sbitmap_vector_free (ae_kill);
ae_kill = NULL;
+
+ prune_insertions_deletions (expr_hash_table.n_elems);
}
\f
/* PRE utilities */
the new instruction just before the tablejump. */
if (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
- insn = prev_real_insn (insn);
+ insn = prev_active_insn (insn);
#ifdef HAVE_cc0
/* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
static unsigned int
execute_rtl_cprop (void)
{
+ int changed;
delete_unreachable_blocks ();
df_set_flags (DF_LR_RUN_DCE);
df_analyze ();
- flag_rerun_cse_after_global_opts |= one_cprop_pass ();
+ changed = one_cprop_pass ();
+ flag_rerun_cse_after_global_opts |= changed;
+ if (changed)
+ cleanup_cfg (0);
return 0;
}
static unsigned int
execute_rtl_pre (void)
{
+ int changed;
delete_unreachable_blocks ();
df_analyze ();
- flag_rerun_cse_after_global_opts |= one_pre_gcse_pass ();
+ changed = one_pre_gcse_pass ();
+ flag_rerun_cse_after_global_opts |= changed;
+ if (changed)
+ cleanup_cfg (0);
return 0;
}
static unsigned int
execute_rtl_hoist (void)
{
+ int changed;
delete_unreachable_blocks ();
df_analyze ();
- flag_rerun_cse_after_global_opts |= one_code_hoisting_pass ();
+ changed = one_code_hoisting_pass ();
+ flag_rerun_cse_after_global_opts |= changed;
+ if (changed)
+ cleanup_cfg (0);
return 0;
}
};
#include "gt-gcse.h"
+