/* Coalesce SSA_NAMES together for the out-of-ssa pass.
- Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation,
+ Inc.
Contributed by Andrew MacLeod <amacleod@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
int second_element;
int cost;
} * coalesce_pair_p;
+typedef const struct coalesce_pair *const_coalesce_pair_p;
typedef struct cost_one_pair_d
{
possibly on CRITICAL edge and in HOT basic block. */
static inline int
-coalesce_cost (int frequency, bool hot, bool critical)
+coalesce_cost (int frequency, bool optimize_for_size, bool critical)
{
/* Base costs on BB frequencies bounded by 1. */
int cost = frequency;
if (!cost)
cost = 1;
- if (optimize_size)
+ if (optimize_for_size)
cost = 1;
- else
- /* It is more important to coalesce in HOT blocks. */
- if (hot)
- cost *= 2;
/* Inserting copy on critical edge costs more than inserting it elsewhere. */
if (critical)
static inline int
coalesce_cost_bb (basic_block bb)
{
- return coalesce_cost (bb->frequency, maybe_hot_bb_p (bb), false);
+ return coalesce_cost (bb->frequency, optimize_bb_for_size_p (bb), false);
}
return MUST_COALESCE_COST;
return coalesce_cost (EDGE_FREQUENCY (e),
- maybe_hot_bb_p (e->src),
+ optimize_edge_for_size_p (e),
EDGE_CRITICAL_P (e));
}
static unsigned int
coalesce_pair_map_hash (const void *pair)
{
- hashval_t a = (hashval_t)(((coalesce_pair_p)pair)->first_element);
- hashval_t b = (hashval_t)(((coalesce_pair_p)pair)->second_element);
+ hashval_t a = (hashval_t)(((const_coalesce_pair_p)pair)->first_element);
+ hashval_t b = (hashval_t)(((const_coalesce_pair_p)pair)->second_element);
return COALESCE_HASH_FN (a,b);
}
static int
coalesce_pair_map_eq (const void *pair1, const void *pair2)
{
- coalesce_pair_p p1 = (coalesce_pair_p) pair1;
- coalesce_pair_p p2 = (coalesce_pair_p) pair2;
+ const_coalesce_pair_p const p1 = (const_coalesce_pair_p) pair1;
+ const_coalesce_pair_p const p2 = (const_coalesce_pair_p) pair2;
return (p1->first_element == p2->first_element
&& p1->second_element == p2->second_element);
static int
compare_pairs (const void *p1, const void *p2)
{
- return (*(coalesce_pair_p *)p1)->cost - (*(coalesce_pair_p *)p2)->cost;
+ const_coalesce_pair_p const *const pp1 = (const_coalesce_pair_p const *) p1;
+ const_coalesce_pair_p const *const pp2 = (const_coalesce_pair_p const *) p2;
+ int result;
+
+ result = (* pp2)->cost - (* pp1)->cost;
+ /* Since qsort does not guarantee stability we use the elements
+ as a secondary key. This provides us with independence from
+ the host's implementation of the sorting algorithm. */
+ if (result == 0)
+ {
+ result = (* pp2)->first_element - (* pp1)->first_element;
+ if (result == 0)
+ result = (* pp2)->second_element - (* pp1)->second_element;
+ }
+
+ return result;
}
return;
/* Add a conflict between X and every one Y has. If the bitmap doesn't
- exist, then it has already been coalesced, and we dont need to add a
+ exist, then it has already been coalesced, and we don't need to add a
conflict. */
EXECUTE_IF_SET_IN_BITMAP (ptr->conflicts[y], 0, z, bi)
if (ptr->conflicts[z])
}
+/* Dump a conflicts graph. */
+
+static void
+ssa_conflicts_dump (FILE *file, ssa_conflicts_p ptr)
+{
+ unsigned x;
+
+ fprintf (file, "\nConflict graph:\n");
+
+ for (x = 0; x < ptr->size; x++)
+ if (ptr->conflicts[x])
+ {
+ fprintf (dump_file, "%d: ", x);
+ dump_bitmap (file, ptr->conflicts[x]);
+ }
+}
+
+
/* This structure is used to efficiently record the current status of live
SSA_NAMES when building a conflict graph.
LIVE_BASE_VAR has a bit set for each base variable which has at least one
FOR_EACH_BB (bb)
{
- block_stmt_iterator bsi;
- tree phi;
+ gimple_stmt_iterator gsi;
/* Start with live on exit temporaries. */
live_track_init (live, live_on_exit (liveinfo, bb));
- for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+ for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
{
tree var;
- tree stmt = bsi_stmt (bsi);
+ gimple stmt = gsi_stmt (gsi);
/* A copy between 2 partitions does not introduce an interference
by itself. If they did, you would never be able to coalesce
This is handled by simply removing the SRC of the copy from the
live list, and processing the stmt normally. */
- if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ if (is_gimple_assign (stmt))
{
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- if (TREE_CODE (lhs) == SSA_NAME && TREE_CODE (rhs) == SSA_NAME)
- live_track_clear_var (live, rhs);
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ if (gimple_assign_copy_p (stmt)
+ && TREE_CODE (lhs) == SSA_NAME
+ && TREE_CODE (rhs1) == SSA_NAME)
+ live_track_clear_var (live, rhs1);
}
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
There must be a conflict recorded between the result of the PHI and
any variables that are live. Otherwise the out-of-ssa translation
may create incorrect code. */
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
+ gimple phi = gsi_stmt (gsi);
tree result = PHI_RESULT (phi);
if (live_track_live_p (live, result))
live_track_process_def (live, result, graph);
printed and compilation is then terminated. */
static inline void
-abnormal_corrupt (tree phi, int i)
+abnormal_corrupt (gimple phi, int i)
{
- edge e = PHI_ARG_EDGE (phi, i);
- tree res = PHI_RESULT (phi);
- tree arg = PHI_ARG_DEF (phi, i);
+ edge e = gimple_phi_arg_edge (phi, i);
+ tree res = gimple_phi_result (phi);
+ tree arg = gimple_phi_arg_def (phi, i);
fprintf (stderr, " Corrupt SSA across abnormal edge BB%d->BB%d\n",
e->src->index, e->dest->index);
static var_map
create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
{
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
basic_block bb;
tree var;
- tree stmt;
+ gimple stmt;
tree first;
var_map map;
ssa_op_iter iter;
FOR_EACH_BB (bb)
{
- tree phi, arg;
+ tree arg;
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- int i;
+ gimple phi = gsi_stmt (gsi);
+ size_t i;
int ver;
tree res;
bool saw_copy = false;
- res = PHI_RESULT (phi);
+ res = gimple_phi_result (phi);
ver = SSA_NAME_VERSION (res);
register_ssa_partition (map, res);
/* Register ssa_names and coalesces between the args and the result
of all PHI. */
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ for (i = 0; i < gimple_phi_num_args (phi); i++)
{
- edge e = PHI_ARG_EDGE (phi, i);
+ edge e = gimple_phi_arg_edge (phi, i);
arg = PHI_ARG_DEF (phi, i);
if (TREE_CODE (arg) == SSA_NAME)
register_ssa_partition (map, arg);
bitmap_set_bit (used_in_copy, ver);
}
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- stmt = bsi_stmt (bsi);
+ stmt = gsi_stmt (gsi);
/* Register USE and DEF operands in each statement. */
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, (SSA_OP_DEF|SSA_OP_USE))
register_ssa_partition (map, var);
/* Check for copy coalesces. */
- switch (TREE_CODE (stmt))
+ switch (gimple_code (stmt))
{
- case GIMPLE_MODIFY_STMT:
+ case GIMPLE_ASSIGN:
{
- tree op1 = GIMPLE_STMT_OPERAND (stmt, 0);
- tree op2 = GIMPLE_STMT_OPERAND (stmt, 1);
- if (TREE_CODE (op1) == SSA_NAME
- && TREE_CODE (op2) == SSA_NAME
- && SSA_NAME_VAR (op1) == SSA_NAME_VAR (op2))
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+
+ if (gimple_assign_copy_p (stmt)
+ && TREE_CODE (lhs) == SSA_NAME
+ && TREE_CODE (rhs1) == SSA_NAME
+ && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs1))
{
- v1 = SSA_NAME_VERSION (op1);
- v2 = SSA_NAME_VERSION (op2);
+ v1 = SSA_NAME_VERSION (lhs);
+ v2 = SSA_NAME_VERSION (rhs1);
cost = coalesce_cost_bb (bb);
add_coalesce (cl, v1, v2, cost);
bitmap_set_bit (used_in_copy, v1);
}
break;
- case ASM_EXPR:
+ case GIMPLE_ASM:
{
unsigned long noutputs, i;
+ unsigned long ninputs;
tree *outputs, link;
- noutputs = list_length (ASM_OUTPUTS (stmt));
+ noutputs = gimple_asm_noutputs (stmt);
+ ninputs = gimple_asm_ninputs (stmt);
outputs = (tree *) alloca (noutputs * sizeof (tree));
- for (i = 0, link = ASM_OUTPUTS (stmt); link;
- ++i, link = TREE_CHAIN (link))
+ for (i = 0; i < noutputs; ++i) {
+ link = gimple_asm_output_op (stmt, i);
outputs[i] = TREE_VALUE (link);
+ }
- for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+ for (i = 0; i < ninputs; ++i)
{
- const char *constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- tree input = TREE_VALUE (link);
+ const char *constraint;
+ tree input;
char *end;
unsigned long match;
+ link = gimple_asm_input_op (stmt, i);
+ constraint
+ = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ input = TREE_VALUE (link);
+
if (TREE_CODE (input) != SSA_NAME)
continue;
if (SSA_NAME_VAR (outputs[match]) == SSA_NAME_VAR (input))
{
cost = coalesce_cost (REG_BR_PROB_BASE,
- maybe_hot_bb_p (bb),
+ optimize_bb_for_size_p (bb),
false);
add_coalesce (cl, v1, v2, cost);
bitmap_set_bit (used_in_copy, v1);
FILE *debug)
{
int x = 0, y = 0;
- tree var1, var2, phi;
+ tree var1, var2;
int cost;
basic_block bb;
edge e;
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_ABNORMAL)
{
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ gimple_stmt_iterator gsi;
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
+ gimple phi = gsi_stmt (gsi);
tree res = PHI_RESULT (phi);
tree arg = PHI_ARG_DEF (phi, e->dest_idx);
int v1 = SSA_NAME_VERSION (res);
}
}
+/* Returns a hash code for P. */
+
+static hashval_t
+hash_ssa_name_by_var (const void *p)
+{
+ const_tree n = (const_tree) p;
+ return (hashval_t) htab_hash_pointer (SSA_NAME_VAR (n));
+}
+
+/* Returns nonzero if P1 and P2 are equal. */
+
+static int
+eq_ssa_name_by_var (const void *p1, const void *p2)
+{
+ const_tree n1 = (const_tree) p1;
+ const_tree n2 = (const_tree) p2;
+ return SSA_NAME_VAR (n1) == SSA_NAME_VAR (n2);
+}
/* Reduce the number of copies by coalescing variables in the function. Return
a partition map with the resulting coalesces. */
coalesce_list_p cl;
bitmap used_in_copies = BITMAP_ALLOC (NULL);
var_map map;
+ unsigned int i;
+ static htab_t ssa_name_hash;
cl = create_coalesce_list ();
map = create_outofssa_var_map (cl, used_in_copies);
+ /* We need to coalesce all names originating same SSA_NAME_VAR
+ so debug info remains undisturbed. */
+ if (!optimize)
+ {
+ ssa_name_hash = htab_create (10, hash_ssa_name_by_var,
+ eq_ssa_name_by_var, NULL);
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree a = ssa_name (i);
+
+ if (a && SSA_NAME_VAR (a) && !DECL_ARTIFICIAL (SSA_NAME_VAR (a)))
+ {
+ tree *slot = (tree *) htab_find_slot (ssa_name_hash, a, INSERT);
+
+ if (!*slot)
+ *slot = a;
+ else
+ {
+ add_coalesce (cl, SSA_NAME_VERSION (a), SSA_NAME_VERSION (*slot),
+ MUST_COALESCE_COST - 1);
+ bitmap_set_bit (used_in_copies, SSA_NAME_VERSION (a));
+ bitmap_set_bit (used_in_copies, SSA_NAME_VERSION (*slot));
+ }
+ }
+ }
+ htab_delete (ssa_name_hash);
+ }
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ dump_var_map (dump_file, map);
+
/* Don't calculate live ranges for variables not in the coalesce list. */
partition_view_bitmap (map, used_in_copies, true);
BITMAP_FREE (used_in_copies);
/* Build a conflict graph. */
graph = build_ssa_conflict_graph (liveinfo);
delete_tree_live_info (liveinfo);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ ssa_conflicts_dump (dump_file, graph);
sort_coalesce_list (cl);
return map;
}
-