tree type_d = TREE_TYPE (dest);
tree type_o = TREE_TYPE (orig);
+ /* For memory partitions, copies are OK as long as the memory symbol
+ belongs to the partition. */
+ if (TREE_CODE (dest) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (dest)) == MEMORY_PARTITION_TAG)
+ return (TREE_CODE (orig) == SSA_NAME
+ && !is_gimple_reg (orig)
+ && (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (dest)),
+ DECL_UID (SSA_NAME_VAR (orig)))
+ || SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
+
+ if (TREE_CODE (orig) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG)
+ return (TREE_CODE (dest) == SSA_NAME
+ && !is_gimple_reg (dest)
+ && (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (orig)),
+ DECL_UID (SSA_NAME_VAR (dest)))
+ || SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
+
/* Do not copy between types for which we *do* need a conversion. */
if (!tree_ssa_useless_type_conversion_1 (type_d, type_o))
return false;
&& POINTER_TYPE_P (type_d)
&& POINTER_TYPE_P (type_o))
{
- tree mt_dest = var_ann (SSA_NAME_VAR (dest))->type_mem_tag;
- tree mt_orig = var_ann (SSA_NAME_VAR (orig))->type_mem_tag;
+ tree mt_dest = symbol_mem_tag (SSA_NAME_VAR (dest));
+ tree mt_orig = symbol_mem_tag (SSA_NAME_VAR (orig));
if (mt_dest && mt_orig && mt_dest != mt_orig)
return false;
else if (!lang_hooks.types_compatible_p (type_d, type_o))
else if (get_alias_set (TREE_TYPE (type_d)) !=
get_alias_set (TREE_TYPE (type_o)))
return false;
+
+ /* Also verify flow-sensitive information is compatible. */
+ if (SSA_NAME_PTR_INFO (orig) && SSA_NAME_PTR_INFO (dest))
+ {
+ struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
+ struct ptr_info_def *dest_ptr_info = SSA_NAME_PTR_INFO (dest);
+
+ if (orig_ptr_info->name_mem_tag
+ && dest_ptr_info->name_mem_tag
+ && orig_ptr_info->pt_vars
+ && dest_ptr_info->pt_vars
+ && !bitmap_intersect_p (dest_ptr_info->pt_vars,
+ orig_ptr_info->pt_vars))
+ return false;
+ }
}
/* If the destination is a SSA_NAME for a virtual operand, then we have
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
+ /* No merging necessary when memory partitions are involved. */
+ if (factoring_name_p (new))
+ {
+ gcc_assert (!is_gimple_reg (orig_sym));
+ return;
+ }
+ else if (factoring_name_p (orig))
+ {
+ gcc_assert (!is_gimple_reg (new_sym));
+ return;
+ }
+
gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig)));
gcc_assert (POINTER_TYPE_P (TREE_TYPE (new)));
== get_alias_set (TREE_TYPE (TREE_TYPE (orig_sym))));
#endif
- /* Synchronize the type tags. If both pointers had a tag and they
- are different, then something has gone wrong. Type tags can
+ /* Synchronize the symbol tags. If both pointers had a tag and they
+ are different, then something has gone wrong. Symbol tags can
always be merged because they are flow insensitive, all the SSA
- names of the same base DECL share the same type tag. */
- if (new_ann->type_mem_tag == NULL_TREE)
- new_ann->type_mem_tag = orig_ann->type_mem_tag;
- else if (orig_ann->type_mem_tag == NULL_TREE)
- orig_ann->type_mem_tag = new_ann->type_mem_tag;
+ names of the same base DECL share the same symbol tag. */
+ if (new_ann->symbol_mem_tag == NULL_TREE)
+ new_ann->symbol_mem_tag = orig_ann->symbol_mem_tag;
+ else if (orig_ann->symbol_mem_tag == NULL_TREE)
+ orig_ann->symbol_mem_tag = new_ann->symbol_mem_tag;
else
- gcc_assert (new_ann->type_mem_tag == orig_ann->type_mem_tag);
+ gcc_assert (new_ann->symbol_mem_tag == orig_ann->symbol_mem_tag);
/* Check that flow-sensitive information is compatible. Notice that
we may not merge flow-sensitive information here. This function
if (TREE_CODE (stmt) == PHI_NODE)
return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (stmt));
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return false;
- lhs = TREE_OPERAND (stmt, 0);
- rhs = TREE_OPERAND (stmt, 1);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
ann = stmt_ann (stmt);
/* If the statement has volatile operands, it won't generate a
/* Otherwise, the only statements that generate useful copies are
assignments whose RHS is just an SSA name that doesn't flow
through abnormal edges. */
- return TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs);
+ return (do_store_copy_prop
+ && TREE_CODE (lhs) == SSA_NAME)
+ || (TREE_CODE (rhs) == SSA_NAME
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs));
}
}
-/* Dump the copy-of value for variable VAR to DUMP_FILE. */
+/* Dump the copy-of value for variable VAR to FILE. */
static void
-dump_copy_of (FILE *dump_file, tree var)
+dump_copy_of (FILE *file, tree var)
{
tree val;
sbitmap visited;
- print_generic_expr (dump_file, var, dump_flags);
+ print_generic_expr (file, var, dump_flags);
if (TREE_CODE (var) != SSA_NAME)
return;
sbitmap_zero (visited);
SET_BIT (visited, SSA_NAME_VERSION (var));
- fprintf (dump_file, " copy-of chain: ");
+ fprintf (file, " copy-of chain: ");
val = var;
- print_generic_expr (dump_file, val, 0);
- fprintf (dump_file, " ");
+ print_generic_expr (file, val, 0);
+ fprintf (file, " ");
while (copy_of[SSA_NAME_VERSION (val)].value)
{
- fprintf (dump_file, "-> ");
+ fprintf (file, "-> ");
val = copy_of[SSA_NAME_VERSION (val)].value;
- print_generic_expr (dump_file, val, 0);
- fprintf (dump_file, " ");
+ print_generic_expr (file, val, 0);
+ fprintf (file, " ");
if (TEST_BIT (visited, SSA_NAME_VERSION (val)))
break;
SET_BIT (visited, SSA_NAME_VERSION (val));
val = get_copy_of_val (var)->value;
if (val == NULL_TREE)
- fprintf (dump_file, "[UNDEFINED]");
+ fprintf (file, "[UNDEFINED]");
else if (val != var)
- fprintf (dump_file, "[COPY]");
+ fprintf (file, "[COPY]");
else
- fprintf (dump_file, "[NOT A COPY]");
+ fprintf (file, "[NOT A COPY]");
sbitmap_free (visited);
}
/* Evaluate the RHS of STMT. If it produces a valid copy, set the LHS
value and store the LHS into *RESULT_P. If STMT generates more
than one name (i.e., STMT is an aliased store), it is enough to
- store the first name in the V_MAY_DEF list into *RESULT_P. After
+ store the first name in the VDEF list into *RESULT_P. After
all, the names generated will be VUSEd in the same statements. */
static enum ssa_prop_result
tree lhs, rhs;
prop_value_t *rhs_val;
- lhs = TREE_OPERAND (stmt, 0);
- rhs = TREE_OPERAND (stmt, 1);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
gcc_assert (TREE_CODE (rhs) == SSA_NAME);
if (!may_propagate_copy (lhs, rhs))
return SSA_PROP_VARYING;
- /* Avoid copy propagation from an inner into an outer loop.
- Otherwise, this may move loop variant variables outside of
- their loops and prevent coalescing opportunities. If the
- value was loop invariant, it will be hoisted by LICM and
- exposed for copy propagation. */
- if (loop_depth_of_name (rhs) > loop_depth_of_name (lhs))
- return SSA_PROP_VARYING;
-
/* Notice that in the case of assignments, we make the LHS be a
copy of RHS's value, not of RHS itself. This avoids keeping
unnecessary copy-of chains (assignments cannot be in a cycle
}
else if (stmt_makes_single_store (stmt))
{
- /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
- to be a copy of RHS. */
+ /* Otherwise, set the names in VDEF operands to be a copy
+ of RHS. */
ssa_op_iter i;
tree vdef;
bool changed;
static enum ssa_prop_result
copy_prop_visit_stmt (tree stmt, edge *taken_edge_p, tree *result_p)
{
- stmt_ann_t ann;
enum ssa_prop_result retval;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n");
}
- ann = stmt_ann (stmt);
-
- if (TREE_CODE (stmt) == MODIFY_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == SSA_NAME
&& (do_store_copy_prop
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
+ || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME))
{
/* If the statement is a copy assignment, evaluate its RHS to
see if the lattice value of its output has changed. */
retval = copy_prop_visit_assignment (stmt, result_p);
}
+ else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
+ && do_store_copy_prop
+ && stmt_makes_single_load (stmt))
+ {
+ /* If the statement is a copy assignment with a memory load
+ on the RHS, see if we know the value of this load and
+ update the lattice accordingly. */
+ prop_value_t *val = get_value_loaded_by (stmt, copy_of);
+ if (val
+ && val->mem_ref
+ && is_gimple_reg (val->value)
+ && operand_equal_p (val->mem_ref, GIMPLE_STMT_OPERAND (stmt, 1), 0))
+ {
+ bool changed;
+ changed = set_copy_of_val (GIMPLE_STMT_OPERAND (stmt, 0),
+ val->value, val->mem_ref);
+ if (changed)
+ {
+ *result_p = GIMPLE_STMT_OPERAND (stmt, 0);
+ retval = SSA_PROP_INTERESTING;
+ }
+ else
+ retval = SSA_PROP_NOT_INTERESTING;
+ }
+ else
+ retval = SSA_PROP_VARYING;
+ }
else if (TREE_CODE (stmt) == COND_EXPR)
{
/* See if we can determine which edge goes out of a conditional
opportunities. */
static void
-init_copy_prop (bool phis_only)
+init_copy_prop (void)
{
basic_block bb;
- copy_of = XNEWVEC (prop_value_t, num_ssa_names);
- memset (copy_of, 0, num_ssa_names * sizeof (*copy_of));
+ copy_of = XCNEWVEC (prop_value_t, num_ssa_names);
- cached_last_copy_of = XNEWVEC (tree, num_ssa_names);
- memset (cached_last_copy_of, 0, num_ssa_names * sizeof (*cached_last_copy_of));
+ cached_last_copy_of = XCNEWVEC (tree, num_ssa_names);
FOR_EACH_BB (bb)
{
block_stmt_iterator si;
- tree phi;
+ tree phi, def;
+ int depth = bb->loop_depth;
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
tree stmt = bsi_stmt (si);
+ ssa_op_iter iter;
/* The only statements that we care about are those that may
generate useful copies. We also need to mark conditional
jumps so that their outgoing edges are added to the work
- lists of the propagator. */
+ lists of the propagator.
+
+ Avoid copy propagation from an inner into an outer loop.
+ Otherwise, this may move loop variant variables outside of
+ their loops and prevent coalescing opportunities. If the
+ value was loop invariant, it will be hoisted by LICM and
+ exposed for copy propagation. */
if (stmt_ends_bb_p (stmt))
DONT_SIMULATE_AGAIN (stmt) = false;
- else if (!phis_only && stmt_may_generate_copy (stmt))
+ else if (stmt_may_generate_copy (stmt)
+ && loop_depth_of_name (GIMPLE_STMT_OPERAND (stmt, 1)) <= depth)
DONT_SIMULATE_AGAIN (stmt) = false;
else
- {
- tree def;
- ssa_op_iter iter;
-
- /* No need to simulate this statement anymore. */
- DONT_SIMULATE_AGAIN (stmt) = true;
-
- /* Mark all the outputs of this statement as not being
- the copy of anything. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
- set_copy_of_val (def, def, NULL_TREE);
- }
+ DONT_SIMULATE_AGAIN (stmt) = true;
+
+ /* Mark all the outputs of this statement as not being
+ the copy of anything. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
+ if (DONT_SIMULATE_AGAIN (stmt))
+ set_copy_of_val (def, def, NULL_TREE);
+ else
+ cached_last_copy_of[SSA_NAME_VERSION (def)] = def;
}
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- DONT_SIMULATE_AGAIN (phi) = false;
+ {
+ def = PHI_RESULT (phi);
+ if (!do_store_copy_prop && !is_gimple_reg (def))
+ DONT_SIMULATE_AGAIN (phi) = true;
+ else
+ DONT_SIMULATE_AGAIN (phi) = false;
+
+ if (DONT_SIMULATE_AGAIN (phi))
+ set_copy_of_val (def, def, NULL_TREE);
+ else
+ cached_last_copy_of[SSA_NAME_VERSION (def)] = def;
+ }
}
}
/* Set the final copy-of value for each variable by traversing the
copy-of chains. */
- tmp = XNEWVEC (prop_value_t, num_ssa_names);
- memset (tmp, 0, num_ssa_names * sizeof (*tmp));
+ tmp = XCNEWVEC (prop_value_t, num_ssa_names);
for (i = 1; i < num_ssa_names; i++)
{
tree var = ssa_name (i);
x_53 and x_54 are both copies of x_898. */
static void
-execute_copy_prop (bool store_copy_prop, bool phis_only)
+execute_copy_prop (bool store_copy_prop)
{
do_store_copy_prop = store_copy_prop;
- init_copy_prop (phis_only);
+ init_copy_prop ();
ssa_propagate (copy_prop_visit_stmt, copy_prop_visit_phi_node);
fini_copy_prop ();
}
return flag_tree_copy_prop != 0;
}
-static void
+static unsigned int
do_copy_prop (void)
{
- execute_copy_prop (false, false);
+ execute_copy_prop (false);
+ return 0;
}
struct tree_opt_pass pass_copy_prop =
NULL, /* next */
0, /* static_pass_number */
TV_TREE_COPY_PROP, /* tv_id */
- PROP_ssa | PROP_alias | PROP_cfg, /* properties_required */
+ PROP_ssa | PROP_cfg, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0 /* letter */
};
-
-static void
-do_phi_only_copy_prop (void)
-{
- execute_copy_prop (false, true);
-}
-
-struct tree_opt_pass pass_phi_only_copy_prop =
-{
- "phionlycopyprop", /* name */
- gate_copy_prop, /* gate */
- do_phi_only_copy_prop, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_TREE_COPY_PROP, /* tv_id */
- PROP_ssa | PROP_alias | PROP_cfg, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_cleanup_cfg
- | TODO_dump_func
- | TODO_ggc_collect
- | TODO_verify_ssa
- | TODO_update_ssa, /* todo_flags_finish */
- 0 /* letter */
-};
-
-
static bool
gate_store_copy_prop (void)
{
return flag_tree_store_copy_prop != 0 || flag_tree_copy_prop != 0;
}
-static void
+static unsigned int
store_copy_prop (void)
{
/* If STORE-COPY-PROP is not enabled, we just run regular COPY-PROP. */
- execute_copy_prop (flag_tree_store_copy_prop != 0, false);
+ execute_copy_prop (flag_tree_store_copy_prop != 0);
+ return 0;
}
struct tree_opt_pass pass_store_copy_prop =