+/* Helper function for memory_address_same via walk_tree. Returns
+ non-NULL if it finds an SSA_NAME which is part of the address,
+ such that the definition of the SSA_NAME post-dominates the store
+ we want to delete but not the store that we believe makes it
+ redundant. This indicates that the address may change between
+ the two stores. */
+
+static tree
+memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data)
+{
+ struct address_walk_data *walk_data = (struct address_walk_data *) data;
+ tree expr = *expr_p;
+ gimple def_stmt;
+ basic_block def_bb;
+
+ if (TREE_CODE (expr) != SSA_NAME)
+ return NULL_TREE;
+
+ /* If we've found a default definition, then there's no problem. Both
+ stores will post-dominate it. And def_bb will be NULL. */
+ if (SSA_NAME_IS_DEFAULT_DEF (expr))
+ return NULL_TREE;
+
+ def_stmt = SSA_NAME_DEF_STMT (expr);
+ def_bb = gimple_bb (def_stmt);
+
+ /* DEF_STMT must dominate both stores. So if it is in the same
+ basic block as one, it does not post-dominate that store. */
+ if (walk_data->store1_bb != def_bb
+ && dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
+ {
+ if (walk_data->store2_bb == def_bb
+ || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
+ def_bb))
+ /* Return non-NULL to stop the walk. */
+ return *expr_p;
+ }
+
+ return NULL_TREE;
+}
+
+/* Return TRUE if the destination memory address in STORE1 and STORE2
+ might be modified after STORE1, before control reaches STORE2. */
+
+static bool
+memory_address_same (gimple store1, gimple store2)
+{
+ struct address_walk_data walk_data;
+
+ walk_data.store1_bb = gimple_bb (store1);
+ walk_data.store2_bb = gimple_bb (store2);
+
+ return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same,
+ &walk_data, NULL)
+ == NULL);
+}
+
+/* Return true if there is a stmt that kills the lhs of STMT and is in the
+ virtual def-use chain of STMT without a use in between the kill and STMT.
+ Returns false if no such stmt is found.
+ *FIRST_USE_P is set to the first use of the single virtual def of
+ STMT. *USE_P is set to the vop killed by *USE_STMT. */
+
+static bool
+get_kill_of_stmt_lhs (gimple stmt,
+ use_operand_p * first_use_p,
+ use_operand_p * use_p, gimple * use_stmt)
+{
+ tree lhs;
+
+ gcc_assert (is_gimple_assign (stmt));
+
+ lhs = gimple_assign_lhs (stmt);
+
+ /* We now walk the chain of single uses of the single VDEFs.
+ We succeeded finding a kill if the lhs of the use stmt is
+ equal to the original lhs. We can keep walking to the next
+ use if there are no possible uses of the original lhs in
+ the stmt. */
+ do
+ {
+ tree use_lhs;
+ def_operand_p def_p;
+
+ /* The stmt must have a single VDEF. */
+ def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_VDEF);
+ if (def_p == NULL_DEF_OPERAND_P)
+ return false;
+
+ /* Get the single immediate use of the def. */
+ if (!single_imm_use (DEF_FROM_PTR (def_p), first_use_p, &stmt))
+ return false;
+ first_use_p = use_p;
+
+ /* If there are possible hidden uses, give up. */
+ if (!gimple_assign_single_p (stmt)
+ || (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
+ && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+ return false;
+
+ /* If the use stmts lhs matches the original lhs we have
+ found the kill, otherwise continue walking. */
+ use_lhs = gimple_assign_lhs (stmt);
+ if (operand_equal_p (use_lhs, lhs, 0))
+ {
+ *use_stmt = stmt;
+ return true;
+ }
+ }
+ while (1);
+}
+
+/* A helper of dse_optimize_stmt.
+ Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one
+ use, and that one use is another VDEF clobbering the first one.
+
+ Return TRUE if the above conditions are met, otherwise FALSE. */
+
+static bool
+dse_possible_dead_store_p (gimple stmt,
+ use_operand_p *first_use_p,
+ use_operand_p *use_p,
+ gimple *use_stmt,
+ struct dse_global_data *dse_gd,
+ struct dse_block_local_data *bd)
+{
+ ssa_op_iter op_iter;
+ bool fail = false;
+ def_operand_p var1;
+ vuse_vec_p vv;
+ tree defvar = NULL_TREE;
+ tree prev_defvar = NULL_TREE;
+ gimple temp;
+
+ /* We want to verify that each virtual definition in STMT has
+ precisely one use and that all the virtual definitions are
+ used by the same single statement. When complete, we
+ want USE_STMT to refer to the one statement which uses
+ all of the virtual definitions from STMT. */
+ *use_stmt = NULL;
+ FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
+ {
+ defvar = DEF_FROM_PTR (var1);
+
+ /* If this virtual def does not have precisely one use, then
+ we will not be able to eliminate STMT. */
+ if (!has_single_use (defvar))
+ {
+ fail = true;
+ break;
+ }
+
+ /* Get the one and only immediate use of DEFVAR. */
+ single_imm_use (defvar, use_p, &temp);
+ gcc_assert (*use_p != NULL_USE_OPERAND_P);
+ *first_use_p = *use_p;
+
+ /* ??? If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
+ Don't bother to do that for now. */
+ if (gimple_code (temp) == GIMPLE_PHI)
+ {
+ fail = true;
+ break;
+ }
+
+ /* In the case of memory partitions, we may get:
+
+ # MPT.764_162 = VDEF <MPT.764_161(D)>
+ x = {};
+ # MPT.764_167 = VDEF <MPT.764_162>
+ y = {};
+
+ So we must make sure we're talking about the same LHS.
+ */
+ if (is_gimple_assign (temp))
+ {
+ tree base1 = get_base_address (gimple_assign_lhs (stmt));
+ tree base2 = get_base_address (gimple_assign_lhs (temp));
+
+ while (base1 && INDIRECT_REF_P (base1))
+ base1 = TREE_OPERAND (base1, 0);
+ while (base2 && INDIRECT_REF_P (base2))
+ base2 = TREE_OPERAND (base2, 0);
+
+ if (base1 != base2)
+ {
+ fail = true;
+ break;
+ }
+ }
+
+ /* If the immediate use of DEF_VAR is not the same as the
+ previously find immediate uses, then we will not be able
+ to eliminate STMT. */
+ if (*use_stmt == NULL)
+ {
+ *use_stmt = temp;
+ prev_defvar = defvar;
+ }
+ else if (temp != *use_stmt)
+ {
+ fail = true;
+ break;
+ }
+ }
+
+ if (fail)
+ {
+ record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
+ return false;
+ }
+
+ return true;
+}
+
+