You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "errors.h"
#include "ggc.h"
#include "tree.h"
#include "rtl.h"
bitmap stores;
};
+/* Basic blocks of the potentially dead store and the following
+ store, for memory_address_same. */
+struct address_walk_data
+{
+ basic_block store1_bb, store2_bb;
+};
+
static bool gate_dse (void);
static void tree_ssa_dse (void);
static void dse_initialize_block_local_data (struct dom_walk_data *,
return stmt_ann (stmt)->uid;
}
-/* Function indicating whether we ought to include information for 'var'
- when calculating immediate uses. For this pass we only want use
- information for virtual variables. */
-
-static bool
-need_imm_uses_for (tree var)
-{
- return !is_gimple_reg (var);
-}
-
-
/* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed. */
+
static void
record_voperand_set (bitmap global, bitmap *local, unsigned int uid)
{
bitmap_set_bit (*local, uid);
bitmap_set_bit (global, uid);
}
+
/* Initialize block local data structures. */
static void
bool recycled)
{
struct dse_block_local_data *bd
- = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
+ = VEC_last (void_p, walk_data->block_data_stack);
/* If we are given a recycled block local data structure, ensure any
bitmap associated with the block is cleared. */
}
}
+/* Helper function for memory_address_same via walk_tree. Returns
+ non-NULL if it finds an SSA_NAME which is part of the address,
+ such that the definition of the SSA_NAME post-dominates the store
+ we want to delete but not the store that we believe makes it
+ redundant. This indicates that the address may change between
+ the two stores. */
+
+static tree
+memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data)
+{
+ struct address_walk_data *walk_data = data;
+ tree expr = *expr_p;
+ tree def_stmt;
+ basic_block def_bb;
+
+ if (TREE_CODE (expr) != SSA_NAME)
+ return NULL_TREE;
+
+ /* If we've found a default definition, then there's no problem. Both
+ stores will post-dominate it. And def_bb will be NULL. */
+ if (expr == default_def (SSA_NAME_VAR (expr)))
+ return NULL_TREE;
+
+ def_stmt = SSA_NAME_DEF_STMT (expr);
+ def_bb = bb_for_stmt (def_stmt);
+
+ /* DEF_STMT must dominate both stores. So if it is in the same
+ basic block as one, it does not post-dominate that store. */
+ if (walk_data->store1_bb != def_bb
+ && dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
+ {
+ if (walk_data->store2_bb == def_bb
+ || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
+ def_bb))
+ /* Return non-NULL to stop the walk. */
+ return def_stmt;
+ }
+
+ return NULL_TREE;
+}
+
+/* Return TRUE if the destination memory address in STORE1 and STORE2
+ might be modified after STORE1, before control reaches STORE2. */
+
+static bool
+memory_address_same (tree store1, tree store2)
+{
+ struct address_walk_data walk_data;
+
+ walk_data.store1_bb = bb_for_stmt (store1);
+ walk_data.store2_bb = bb_for_stmt (store2);
+
+ return (walk_tree (&TREE_OPERAND (store1, 0), memory_ssa_name_same,
+ &walk_data, NULL)
+ == NULL);
+}
+
/* Attempt to eliminate dead stores in the statement referenced by BSI.
A dead store is a store into a memory location which will later be
block_stmt_iterator bsi)
{
struct dse_block_local_data *bd
- = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
+ = VEC_last (void_p, walk_data->block_data_stack);
struct dse_global_data *dse_gd = walk_data->global_data;
tree stmt = bsi_stmt (bsi);
stmt_ann_t ann = stmt_ann (stmt);
- v_may_def_optype v_may_defs;
- get_stmt_operands (stmt);
- v_may_defs = V_MAY_DEF_OPS (ann);
-
- /* If this statement has no virtual uses, then there is nothing
+ /* If this statement has no virtual defs, then there is nothing
to do. */
- if (NUM_V_MAY_DEFS (v_may_defs) == 0)
+ if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF)))
return;
/* We know we have virtual definitions. If this is a MODIFY_EXPR that's
if (TREE_CODE (stmt) == MODIFY_EXPR)
{
- unsigned int num_uses = 0, count = 0;
use_operand_p first_use_p = NULL_USE_OPERAND_P;
- use_operand_p use_p;
- tree use, use_stmt;
+ use_operand_p use_p = NULL;
+ tree use, use_stmt, temp;
tree defvar = NULL_TREE, usevar = NULL_TREE;
+ bool fail = false;
use_operand_p var2;
def_operand_p var1;
ssa_op_iter op_iter;
- FOR_EACH_SSA_MAYDEF_OPERAND (var1, var2, stmt, op_iter)
- {
+ /* We want to verify that each virtual definition in STMT has
+ precisely one use and that all the virtual definitions are
+ used by the same single statement. When complete, we
+ want USE_STMT to refer to the one statement which uses
+ all of the virtual definitions from STMT. */
+ use_stmt = NULL;
+ FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
+ {
defvar = DEF_FROM_PTR (var1);
usevar = USE_FROM_PTR (var2);
- num_uses += num_imm_uses (defvar);
- count++;
- if (num_uses > 1 || count > 1)
- break;
- }
- if (count == 1 && num_uses == 1)
- {
- single_imm_use (defvar, &use_p, &use_stmt);
+ /* If this virtual def does not have precisely one use, then
+ we will not be able to eliminate STMT. */
+ if (num_imm_uses (defvar) != 1)
+ {
+ fail = true;
+ break;
+ }
+
+ /* Get the one and only immediate use of DEFVAR. */
+ single_imm_use (defvar, &use_p, &temp);
gcc_assert (use_p != NULL_USE_OPERAND_P);
first_use_p = use_p;
use = USE_FROM_PTR (use_p);
+
+ /* If the immediate use of DEF_VAR is not the same as the
+ previously find immediate uses, then we will not be able
+ to eliminate STMT. */
+ if (use_stmt == NULL)
+ use_stmt = temp;
+ else if (temp != use_stmt)
+ {
+ fail = true;
+ break;
+ }
}
- else
+
+ if (fail)
{
record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
return;
represents the only use of this store.
Note this does not handle the case where the store has
- multiple V_MAY_DEFs which all reach a set of PHI nodes in the
+ multiple V_{MAY,MUST}_DEFs which all reach a set of PHI nodes in the
same block. */
while (use_p != NULL_USE_OPERAND_P
&& TREE_CODE (use_stmt) == PHI_NODE
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt)))
{
+ /* A PHI node can both define and use the same SSA_NAME if
+ the PHI is at the top of a loop and the PHI_RESULT is
+ a loop invariant and copies have not been fully propagated.
+
+ The safe thing to do is exit assuming no optimization is
+ possible. */
+ if (SSA_NAME_DEF_STMT (PHI_RESULT (use_stmt)) == use_stmt)
+ return;
+
/* Skip past this PHI and loop again in case we had a PHI
chain. */
if (single_imm_use (PHI_RESULT (use_stmt), &use_p, &use_stmt))
}
/* If we have precisely one immediate use at this point, then we may
- have found redundant store. */
+ have found redundant store. Make sure that the stores are to
+ the same memory location. This includes checking that any
+ SSA-form variables in the address will have the same values. */
if (use_p != NULL_USE_OPERAND_P
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& operand_equal_p (TREE_OPERAND (stmt, 0),
- TREE_OPERAND (use_stmt, 0), 0))
+ TREE_OPERAND (use_stmt, 0), 0)
+ && memory_address_same (stmt, use_stmt))
{
/* Make sure we propagate the ABNORMAL bit setting. */
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (first_use_p)))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
- /* Then we need to fix the operand of the consuming stmt. */
- SET_USE (first_use_p, usevar);
if (dump_file && (dump_flags & TDF_DETAILS))
{
print_generic_expr (dump_file, bsi_stmt (bsi), dump_flags);
fprintf (dump_file, "'\n");
}
-
+ /* Then we need to fix the operand of the consuming stmt. */
+ FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
+ {
+ single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
+ SET_USE (use_p, USE_FROM_PTR (var2));
+ }
/* Remove the dead store. */
bsi_remove (&bsi);
dse_record_phis (struct dom_walk_data *walk_data, basic_block bb)
{
struct dse_block_local_data *bd
- = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
+ = VEC_last (void_p, walk_data->block_data_stack);
struct dse_global_data *dse_gd = walk_data->global_data;
tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- if (need_imm_uses_for (PHI_RESULT (phi)))
+ if (!is_gimple_reg (PHI_RESULT (phi)))
record_voperand_set (dse_gd->stores,
&bd->stores,
get_stmt_uid (phi));
basic_block bb ATTRIBUTE_UNUSED)
{
struct dse_block_local_data *bd
- = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack);
+ = VEC_last (void_p, walk_data->block_data_stack);
struct dse_global_data *dse_gd = walk_data->global_data;
bitmap stores = dse_gd->stores;
unsigned int i;
walk_data.after_dom_children_before_stmts = NULL;
walk_data.after_dom_children_walk_stmts = NULL;
walk_data.after_dom_children_after_stmts = dse_finalize_block;
+ walk_data.interesting_blocks = NULL;
walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
NULL, /* next */
0, /* static_pass_number */
TV_TREE_DSE, /* tv_id */
- PROP_cfg | PROP_ssa
+ PROP_cfg
+ | PROP_ssa
| PROP_alias, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
- | TODO_verify_ssa,
- 0 /* letter */
+ TODO_dump_func
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};