/* Coalesce SSA_NAMES together for the out-of-ssa pass.
- Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation,
- Inc.
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
+ Free Software Foundation, Inc.
Contributed by Andrew MacLeod <amacleod@redhat.com>
This file is part of GCC.
#define MUST_COALESCE_COST INT_MAX
-/* Return cost of execution of copy instruction with FREQUENCY
- possibly on CRITICAL edge and in HOT basic block. */
+/* Return cost of execution of copy instruction with FREQUENCY. */
static inline int
-coalesce_cost (int frequency, bool optimize_for_size, bool critical)
+coalesce_cost (int frequency, bool optimize_for_size)
{
/* Base costs on BB frequencies bounded by 1. */
int cost = frequency;
if (optimize_for_size)
cost = 1;
- /* Inserting copy on critical edge costs more than inserting it elsewhere. */
- if (critical)
- cost *= 2;
return cost;
}
static inline int
coalesce_cost_bb (basic_block bb)
{
- return coalesce_cost (bb->frequency, optimize_bb_for_size_p (bb), false);
+ return coalesce_cost (bb->frequency, optimize_bb_for_size_p (bb));
}
static inline int
coalesce_cost_edge (edge e)
{
+ int mult = 1;
+
+ /* Inserting copy on critical edge costs more than inserting it elsewhere. */
+ if (EDGE_CRITICAL_P (e))
+ mult = 2;
if (e->flags & EDGE_ABNORMAL)
return MUST_COALESCE_COST;
+ if (e->flags & EDGE_EH)
+ {
+ edge e2;
+ edge_iterator ei;
+ FOR_EACH_EDGE (e2, ei, e->dest->preds)
+ if (e2 != e)
+ {
+ /* Putting code on EH edge that leads to BB
+ with multiple predecestors imply splitting of
+ edge too. */
+ if (mult < 2)
+ mult = 2;
+ /* If there are multiple EH predecestors, we
+ also copy EH regions and produce separate
+ landing pad. This is expensive. */
+ if (e2->flags & EDGE_EH)
+ {
+ mult = 5;
+ break;
+ }
+ }
+ }
return coalesce_cost (EDGE_FREQUENCY (e),
- optimize_edge_for_size_p (e),
- EDGE_CRITICAL_P (e));
+ optimize_edge_for_size_p (e)) * mult;
}
const_coalesce_pair_p const *const pp2 = (const_coalesce_pair_p const *) p2;
int result;
- result = (* pp2)->cost - (* pp1)->cost;
+ result = (* pp1)->cost - (* pp2)->cost;
/* Since qsort does not guarantee stability we use the elements
as a secondary key. This provides us with independence from
the host's implementation of the sorting algorithm. */
&& TREE_CODE (rhs1) == SSA_NAME)
live_track_clear_var (live, rhs1);
}
+ else if (is_gimple_debug (stmt))
+ continue;
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
live_track_process_def (live, var, graph);
used_in_virtual_ops = BITMAP_ALLOC (NULL);
#endif
- map = init_var_map (num_ssa_names + 1);
+ map = init_var_map (num_ssa_names);
FOR_EACH_BB (bb)
{
{
stmt = gsi_stmt (gsi);
+ if (is_gimple_debug (stmt))
+ continue;
+
/* Register USE and DEF operands in each statement. */
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, (SSA_OP_DEF|SSA_OP_USE))
register_ssa_partition (map, var);
if (SSA_NAME_VAR (outputs[match]) == SSA_NAME_VAR (input))
{
cost = coalesce_cost (REG_BR_PROB_BASE,
- optimize_bb_for_size_p (bb),
- false);
+ optimize_bb_for_size_p (bb));
add_coalesce (cl, v1, v2, cost);
bitmap_set_bit (used_in_copy, v1);
bitmap_set_bit (used_in_copy, v2);
bitmap_set_bit (used_in_real_ops, DECL_UID (SSA_NAME_VAR (var)));
/* Validate that virtual ops don't get used in funny ways. */
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
- {
- bitmap_set_bit (used_in_virtual_ops,
- DECL_UID (SSA_NAME_VAR (var)));
- }
-
+ if (gimple_vuse (stmt))
+ bitmap_set_bit (used_in_virtual_ops,
+ DECL_UID (SSA_NAME_VAR (gimple_vuse (stmt))));
#endif /* ENABLE_CHECKING */
}
}
first = NULL_TREE;
for (i = 1; i < num_ssa_names; i++)
{
- var = map->partition_to_var[i];
- if (var != NULL_TREE)
+ var = ssa_name (i);
+ if (var != NULL_TREE && is_gimple_reg (var))
{
/* Add coalesces between all the result decls. */
if (TREE_CODE (SSA_NAME_VAR (var)) == RESULT_DECL)
/* Mark any default_def variables as being in the coalesce list
since they will have to be coalesced with the base variable. If
not marked as present, they won't be in the coalesce view. */
- if (gimple_default_def (cfun, SSA_NAME_VAR (var)) == var)
+ if (gimple_default_def (cfun, SSA_NAME_VAR (var)) == var
+ && !has_zero_uses (var))
bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (var));
}
}
extern var_map
coalesce_ssa_name (void)
{
- unsigned num, x;
tree_live_info_p liveinfo;
ssa_conflicts_p graph;
coalesce_list_p cl;
{
tree a = ssa_name (i);
- if (a && SSA_NAME_VAR (a) && !DECL_ARTIFICIAL (SSA_NAME_VAR (a)))
+ if (a
+ && SSA_NAME_VAR (a)
+ && !DECL_ARTIFICIAL (SSA_NAME_VAR (a))
+ && (!has_zero_uses (a) || !SSA_NAME_IS_DEFAULT_DEF (a)))
{
tree *slot = (tree *) htab_find_slot (ssa_name_hash, a, INSERT);
/* First, coalesce all live on entry variables to their base variable.
This will ensure the first use is coming from the correct location. */
- num = num_var_partitions (map);
- for (x = 0 ; x < num; x++)
- {
- tree var = partition_to_var (map, x);
- tree root;
-
- if (TREE_CODE (var) != SSA_NAME)
- continue;
-
- root = SSA_NAME_VAR (var);
- if (gimple_default_def (cfun, root) == var)
- {
- /* This root variable should have not already been assigned
- to another partition which is not coalesced with this one. */
- gcc_assert (!var_ann (root)->out_of_ssa_tag);
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- print_exprs (dump_file, "Must coalesce ", var,
- " with the root variable ", root, ".\n");
- }
- change_partition_var (map, root, x);
- }
- }
-
if (dump_file && (dump_flags & TDF_DETAILS))
dump_var_map (dump_file, map);