You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "errors.h"
#include "ggc.h"
#include "tree.h"
#include "basic-block.h"
useful only for debugging, since we don't do identity lookups. */
+static bool in_fre = false;
+
/* A value set element. Basically a single linked list of
expressions/values. */
typedef struct value_set_node
static alloc_pool binary_node_pool;
static alloc_pool unary_node_pool;
static alloc_pool reference_node_pool;
+static alloc_pool comparison_node_pool;
+static alloc_pool expression_node_pool;
+static alloc_pool list_node_pool;
static bitmap_obstack grand_bitmap_obstack;
/* Set of blocks with statements that have had its EH information
print_value_set (stderr, set, setname, blockindex);
}
+/* Return the folded version of T if T, when folded, is a gimple
+ min_invariant. Otherwise, return T. */
+
+static tree
+fully_constant_expression (tree t)
+{
+ tree folded;
+ folded = fold (t);
+ if (folded && is_gimple_min_invariant (folded))
+ return folded;
+ return t;
+}
+
+/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
+ For example, this can copy a list made of TREE_LIST nodes.
+ Allocates the nodes in list_node_pool*/
+
+static tree
+pool_copy_list (tree list)
+{
+ tree head;
+ tree prev, next;
+
+ if (list == 0)
+ return 0;
+ head = pool_alloc (list_node_pool);
+
+ memcpy (head, list, tree_size (list));
+ prev = head;
+
+ next = TREE_CHAIN (list);
+ while (next)
+ {
+ TREE_CHAIN (prev) = pool_alloc (list_node_pool);
+ memcpy (TREE_CHAIN (prev), next, tree_size (next));
+ prev = TREE_CHAIN (prev);
+ next = TREE_CHAIN (next);
+ }
+ return head;
+}
+
+
/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
the phis in PRED. Return NULL if we can't find a leader for each
part of the translated expression. */
switch (TREE_CODE_CLASS (TREE_CODE (expr)))
{
+ case tcc_expression:
+ {
+ if (TREE_CODE (expr) != CALL_EXPR)
+ return NULL;
+ else
+ {
+ tree oldop0 = TREE_OPERAND (expr, 0);
+ tree oldarglist = TREE_OPERAND (expr, 1);
+ tree oldop2 = TREE_OPERAND (expr, 2);
+ tree newop0;
+ tree newarglist;
+ tree newop2 = NULL;
+ tree oldwalker;
+ tree newwalker;
+ tree newexpr;
+ bool listchanged = false;
+
+ /* Call expressions are kind of weird because they have an
+ argument list. We don't want to value number the list
+ as one value number, because that doesn't make much
+ sense, and just breaks the support functions we call,
+ which expect TREE_OPERAND (call_expr, 2) to be a
+ TREE_LIST. */
+
+ newop0 = phi_translate (find_leader (set, oldop0),
+ set, pred, phiblock);
+ if (newop0 == NULL)
+ return NULL;
+ if (oldop2)
+ {
+ newop2 = phi_translate (find_leader (set, oldop2),
+ set, pred, phiblock);
+ if (newop2 == NULL)
+ return NULL;
+ }
+
+ /* phi translate the argument list piece by piece.
+
+ We could actually build the list piece by piece here,
+ but it's likely to not be worth the memory we will save,
+ unless you have millions of call arguments. */
+
+ newarglist = pool_copy_list (oldarglist);
+ for (oldwalker = oldarglist, newwalker = newarglist;
+ oldwalker && newwalker;
+ oldwalker = TREE_CHAIN (oldwalker),
+ newwalker = TREE_CHAIN (newwalker))
+ {
+
+ tree oldval = TREE_VALUE (oldwalker);
+ tree newval;
+ if (oldval)
+ {
+ newval = phi_translate (find_leader (set, oldval),
+ set, pred, phiblock);
+ if (newval == NULL)
+ return NULL;
+ if (newval != oldval)
+ {
+ listchanged = true;
+ TREE_VALUE (newwalker) = get_value_handle (newval);
+ }
+ }
+ }
+ if (listchanged)
+ vn_lookup_or_add (newarglist, NULL);
+
+ if (listchanged || (newop0 != oldop0) || (oldop2 != newop2))
+ {
+ newexpr = pool_alloc (expression_node_pool);
+ memcpy (newexpr, expr, tree_size (expr));
+ TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
+ TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
+ TREE_OPERAND (newexpr, 2) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
+ create_tree_ann (newexpr);
+ vn_lookup_or_add (newexpr, NULL);
+ expr = newexpr;
+ phi_trans_add (oldexpr, newexpr, pred);
+ }
+ }
+ }
+ return expr;
+
case tcc_reference:
/* XXX: Until we have PRE of loads working, none will be ANTIC. */
return NULL;
return NULL;
if (newop1 != oldop1 || newop2 != oldop2)
{
+ tree t;
newexpr = pool_alloc (binary_node_pool);
memcpy (newexpr, expr, tree_size (expr));
- create_tree_ann (newexpr);
TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
- vn_lookup_or_add (newexpr, NULL);
+ t = fully_constant_expression (newexpr);
+ if (t != newexpr)
+ {
+ pool_free (binary_node_pool, newexpr);
+ newexpr = t;
+ }
+ else
+ {
+ create_tree_ann (newexpr);
+ vn_lookup_or_add (newexpr, NULL);
+ }
expr = newexpr;
phi_trans_add (oldexpr, newexpr, pred);
}
return NULL;
if (newop1 != oldop1)
{
+ tree t;
newexpr = pool_alloc (unary_node_pool);
memcpy (newexpr, expr, tree_size (expr));
- create_tree_ann (newexpr);
TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
- vn_lookup_or_add (newexpr, NULL);
+ t = fully_constant_expression (newexpr);
+ if (t != newexpr)
+ {
+ pool_free (unary_node_pool, newexpr);
+ newexpr = t;
+ }
+ else
+ {
+ create_tree_ann (newexpr);
+ vn_lookup_or_add (newexpr, NULL);
+ }
expr = newexpr;
phi_trans_add (oldexpr, newexpr, pred);
}
we have a leader for each part of the expression (if it consists of
values), or the expression is an SSA_NAME.
- NB: We never should run into a case where we have SSA_NAME +
+ NB: We never should run into a case where we have SSA_NAME +
SSA_NAME or SSA_NAME + value. The sets valid_in_set is called on,
- the ANTIC sets, will only ever have SSA_NAME's or binary value
- expression (IE VALUE1 + VALUE2) */
+ the ANTIC sets, will only ever have SSA_NAME's or value expressions
+ (IE VALUE1 + VALUE2, *VALUE1, VALUE1 < VALUE2) */
static bool
valid_in_set (value_set_t set, tree expr)
tree op1 = TREE_OPERAND (expr, 0);
return set_contains_value (set, op1);
}
+
+ case tcc_expression:
+ {
+ if (TREE_CODE (expr) == CALL_EXPR)
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree arglist = TREE_OPERAND (expr, 1);
+ tree op2 = TREE_OPERAND (expr, 2);
+
+ /* Check the non-list operands first. */
+ if (!set_contains_value (set, op0)
+ || (op2 && !set_contains_value (set, op2)))
+ return false;
+ /* Now check the operands. */
+ for (; arglist; arglist = TREE_CHAIN (arglist))
+ {
+ if (!set_contains_value (set, TREE_VALUE (arglist)))
+ return false;
+ }
+ return true;
+ }
+ return false;
+ }
+
case tcc_reference:
/* XXX: Until PRE of loads works, no reference nodes are ANTIC. */
return false;
translate through. */
else if (single_succ_p (block))
{
- phi_translate_set (ANTIC_OUT, ANTIC_IN(single_succ (block)),
+ phi_translate_set (ANTIC_OUT, ANTIC_IN (single_succ (block)),
block, single_succ (block));
}
/* If we have multiple successors, we take the intersection of all of
gcc_assert (UNARY_CLASS_P (genop)
|| BINARY_CLASS_P (genop)
|| COMPARISON_CLASS_P (genop)
- || REFERENCE_CLASS_P (genop));
+ || REFERENCE_CLASS_P (genop)
+ || TREE_CODE (genop) == CALL_EXPR);
genop = create_expression_by_pieces (block, genop, stmts);
}
return genop;
static tree
create_expression_by_pieces (basic_block block, tree expr, tree stmts)
{
- tree name = NULL_TREE;
- tree newexpr = NULL_TREE;
+ tree temp, name;
+ tree folded, forced_stmts, newexpr;
tree v;
-
+ tree_stmt_iterator tsi;
+
switch (TREE_CODE_CLASS (TREE_CODE (expr)))
{
+ case tcc_expression:
+ {
+ tree op0, op2;
+ tree arglist;
+ tree genop0, genop2;
+ tree genarglist;
+ tree walker, genwalker;
+
+ gcc_assert (TREE_CODE (expr) == CALL_EXPR);
+ genop2 = NULL;
+
+ op0 = TREE_OPERAND (expr, 0);
+ arglist = TREE_OPERAND (expr, 1);
+ op2 = TREE_OPERAND (expr, 2);
+
+ genop0 = find_or_generate_expression (block, op0, stmts);
+ genarglist = copy_list (arglist);
+ for (walker = arglist, genwalker = genarglist;
+ genwalker && walker;
+ genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
+ {
+ TREE_VALUE (genwalker) = find_or_generate_expression (block,
+ TREE_VALUE (walker),
+ stmts);
+ }
+
+ if (op2)
+ genop2 = find_or_generate_expression (block, op2, stmts);
+ folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
+ genop0, genarglist, genop2);
+ break;
+
+
+ }
+ break;
+
case tcc_binary:
case tcc_comparison:
{
- tree_stmt_iterator tsi;
- tree forced_stmts;
- tree genop1, genop2;
- tree temp;
- tree folded;
tree op1 = TREE_OPERAND (expr, 0);
tree op2 = TREE_OPERAND (expr, 1);
- genop1 = find_or_generate_expression (block, op1, stmts);
- genop2 = find_or_generate_expression (block, op2, stmts);
- temp = create_tmp_var (TREE_TYPE (expr), "pretmp");
- add_referenced_tmp_var (temp);
-
- folded = fold (build (TREE_CODE (expr), TREE_TYPE (expr),
- genop1, genop2));
- newexpr = force_gimple_operand (folded, &forced_stmts, false, NULL);
- if (forced_stmts)
- {
- tsi = tsi_start (forced_stmts);
- for (; !tsi_end_p (tsi); tsi_next (&tsi))
- {
- tree stmt = tsi_stmt (tsi);
- tree forcedname = TREE_OPERAND (stmt, 0);
- tree forcedexpr = TREE_OPERAND (stmt, 1);
- tree val = vn_lookup_or_add (forcedexpr, NULL);
- vn_add (forcedname, val, NULL);
- bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
- bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
- }
-
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
- }
- newexpr = build (MODIFY_EXPR, TREE_TYPE (expr),
- temp, newexpr);
- NECESSARY (newexpr) = 0;
- name = make_ssa_name (temp, newexpr);
- TREE_OPERAND (newexpr, 0) = name;
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
- VEC_safe_push (tree, heap, inserted_exprs, newexpr);
- pre_stats.insertions++;
+ tree genop1 = find_or_generate_expression (block, op1, stmts);
+ tree genop2 = find_or_generate_expression (block, op2, stmts);
+ folded = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr),
+ genop1, genop2);
break;
}
+
case tcc_unary:
{
- tree_stmt_iterator tsi;
- tree forced_stmts = NULL;
- tree genop1;
- tree temp;
- tree folded;
tree op1 = TREE_OPERAND (expr, 0);
- genop1 = find_or_generate_expression (block, op1, stmts);
- temp = create_tmp_var (TREE_TYPE (expr), "pretmp");
- add_referenced_tmp_var (temp);
- folded = fold (build (TREE_CODE (expr), TREE_TYPE (expr),
- genop1));
- /* If the generated operand is already GIMPLE min_invariant
- just use it instead of calling force_gimple_operand on it,
- since that may make it not invariant by copying it into an
- assignment. */
- if (!is_gimple_min_invariant (genop1))
- newexpr = force_gimple_operand (folded, &forced_stmts, false, NULL);
- else
- newexpr = genop1;
- if (forced_stmts)
- {
- tsi = tsi_start (forced_stmts);
- for (; !tsi_end_p (tsi); tsi_next (&tsi))
- {
- tree stmt = tsi_stmt (tsi);
- tree forcedname = TREE_OPERAND (stmt, 0);
- tree forcedexpr = TREE_OPERAND (stmt, 1);
- tree val = vn_lookup_or_add (forcedexpr, NULL);
- vn_add (forcedname, val, NULL);
- bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
- bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
- }
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
- }
- newexpr = build (MODIFY_EXPR, TREE_TYPE (expr),
- temp, newexpr);
- name = make_ssa_name (temp, newexpr);
- TREE_OPERAND (newexpr, 0) = name;
- NECESSARY (newexpr) = 0;
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
- VEC_safe_push (tree, heap, inserted_exprs, newexpr);
- pre_stats.insertions++;
-
+ tree genop1 = find_or_generate_expression (block, op1, stmts);
+ folded = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr),
+ genop1);
break;
}
+
default:
gcc_unreachable ();
-
}
- v = get_value_handle (expr);
- vn_add (name, v, NULL);
- /* The value may already exist in either NEW_SETS, or AVAIL_OUT, because
+ /* Force the generated expression to be a sequence of GIMPLE
+ statements.
+ We have to call unshare_expr because force_gimple_operand may
+ modify the tree we pass to it. */
+ newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
+ false, NULL);
+
+ /* If we have any intermediate expressions to the value sets, add them
+ to the value sets and chain them on in the instruction stream. */
+ if (forced_stmts)
+ {
+ tsi = tsi_start (forced_stmts);
+ for (; !tsi_end_p (tsi); tsi_next (&tsi))
+ {
+ tree stmt = tsi_stmt (tsi);
+ tree forcedname = TREE_OPERAND (stmt, 0);
+ tree forcedexpr = TREE_OPERAND (stmt, 1);
+ tree val = vn_lookup_or_add (forcedexpr, NULL);
+
+ VEC_safe_push (tree, heap, inserted_exprs, stmt);
+ vn_add (forcedname, val, NULL);
+ bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
+ bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
+ }
+ tsi = tsi_last (stmts);
+ tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
+ }
+
+ /* Build and insert the assignment of the end result to the temporary
+ that we will return. */
+ temp = create_tmp_var (TREE_TYPE (expr), "pretmp");
+ add_referenced_tmp_var (temp);
+ if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
+ DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
+ newexpr = build (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
+ name = make_ssa_name (temp, newexpr);
+ TREE_OPERAND (newexpr, 0) = name;
+ NECESSARY (newexpr) = 0;
+ tsi = tsi_last (stmts);
+ tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
+ VEC_safe_push (tree, heap, inserted_exprs, newexpr);
+
+ /* Add a value handle to the temporary.
+ The value may already exist in either NEW_SETS, or AVAIL_OUT, because
we are creating the expression by pieces, and this particular piece of
the expression may have been represented. There is no harm in replacing
here. */
+ v = get_value_handle (expr);
+ vn_add (name, v, NULL);
bitmap_value_replace_in_set (NEW_SETS (block), name);
bitmap_value_replace_in_set (AVAIL_OUT (block), name);
+
+ pre_stats.insertions++;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Inserted ");
print_generic_expr (dump_file, newexpr, 0);
fprintf (dump_file, " in predecessor %d\n", block->index);
}
- return name;
-}
-
-/* Return the folded version of T if T, when folded, is a gimple
- min_invariant. Otherwise, return T. */
-static tree
-fully_constant_expression (tree t)
-{
- tree folded;
- folded = fold (t);
- if (folded && is_gimple_min_invariant (folded))
- return folded;
- return t;
+ return name;
}
/* Insert the to-be-made-available values of NODE for each predecessor, stored
eprime = avail[bprime->index];
if (BINARY_CLASS_P (eprime)
|| COMPARISON_CLASS_P (eprime)
- || UNARY_CLASS_P (eprime))
+ || UNARY_CLASS_P (eprime)
+ || TREE_CODE (eprime) == CALL_EXPR)
{
builtexpr = create_expression_by_pieces (bprime,
eprime,
/* Now build a phi for the new variable. */
temp = create_tmp_var (type, tmpname);
add_referenced_tmp_var (temp);
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
temp = create_phi_node (temp, block);
NECESSARY (temp) = 0;
VEC_safe_push (tree, heap, inserted_exprs, temp);
{
if (BINARY_CLASS_P (node->expr)
|| COMPARISON_CLASS_P (node->expr)
- || UNARY_CLASS_P (node->expr))
+ || UNARY_CLASS_P (node->expr)
+ || TREE_CODE (node->expr) == CALL_EXPR)
{
tree *avail;
tree val;
any). They are used when computing the hash value for EXPR. */
static inline void
-add_to_sets (tree var, tree expr, vuse_optype vuses, bitmap_set_t s1,
+add_to_sets (tree var, tree expr, tree stmt, bitmap_set_t s1,
bitmap_set_t s2)
{
- tree val = vn_lookup_or_add (expr, vuses);
+ tree val = vn_lookup_or_add (expr, stmt);
/* VAR and EXPR may be the same when processing statements for which
we are not computing value numbers (e.g., non-assignments, or
statements that make aliased stores). In those cases, we are
only interested in making VAR available as its own value. */
if (var != expr)
- vn_add (var, val, NULL);
+ vn_add (var, val, NULL_TREE);
if (s1)
bitmap_insert_into_set (s1, var);
Insert EXPR's operands into the EXP_GEN set for BLOCK. */
static inline tree
-create_value_expr_from (tree expr, basic_block block,
- vuse_optype vuses)
-
+create_value_expr_from (tree expr, basic_block block, tree stmt)
{
int i;
enum tree_code code = TREE_CODE (expr);
gcc_assert (TREE_CODE_CLASS (code) == tcc_unary
|| TREE_CODE_CLASS (code) == tcc_binary
|| TREE_CODE_CLASS (code) == tcc_comparison
- || TREE_CODE_CLASS (code) == tcc_reference);
+ || TREE_CODE_CLASS (code) == tcc_reference
+ || TREE_CODE_CLASS (code) == tcc_expression
+ || TREE_CODE_CLASS (code) == tcc_exceptional);
if (TREE_CODE_CLASS (code) == tcc_unary)
pool = unary_node_pool;
else if (TREE_CODE_CLASS (code) == tcc_reference)
pool = reference_node_pool;
- else
+ else if (TREE_CODE_CLASS (code) == tcc_binary)
pool = binary_node_pool;
+ else if (TREE_CODE_CLASS (code) == tcc_comparison)
+ pool = comparison_node_pool;
+ else if (TREE_CODE_CLASS (code) == tcc_exceptional)
+ {
+ gcc_assert (code == TREE_LIST);
+ pool = list_node_pool;
+ }
+ else
+ {
+ gcc_assert (code == CALL_EXPR);
+ pool = expression_node_pool;
+ }
vexpr = pool_alloc (pool);
memcpy (vexpr, expr, tree_size (expr));
+
+ /* This case is only for TREE_LIST's that appear as part of
+ CALL_EXPR's. Anything else is a bug, but we can't easily verify
+ this, hence this comment. TREE_LIST is not handled by the
+ general case below is because they don't have a fixed length, or
+ operands, so you can't access purpose/value/chain through
+ TREE_OPERAND macros. */
+
+ if (code == TREE_LIST)
+ {
+ tree op = NULL_TREE;
+ tree temp = NULL_TREE;
+ if (TREE_CHAIN (vexpr))
+ temp = create_value_expr_from (TREE_CHAIN (vexpr), block, stmt);
+ TREE_CHAIN (vexpr) = temp ? temp : TREE_CHAIN (vexpr);
+
+
+ /* Recursively value-numberize reference ops. */
+ if (REFERENCE_CLASS_P (TREE_VALUE (vexpr)))
+ {
+ tree tempop;
+ op = TREE_VALUE (vexpr);
+ tempop = create_value_expr_from (op, block, stmt);
+ op = tempop ? tempop : op;
+
+ TREE_VALUE (vexpr) = vn_lookup_or_add (op, stmt);
+ }
+ else
+ {
+ op = TREE_VALUE (vexpr);
+ TREE_VALUE (vexpr) = vn_lookup_or_add (TREE_VALUE (vexpr), NULL);
+ }
+ /* This is the equivalent of inserting op into EXP_GEN like we
+ do below */
+ if (!is_undefined_value (op))
+ value_insert_into_set (EXP_GEN (block), op);
+
+ return vexpr;
+ }
for (i = 0; i < TREE_CODE_LENGTH (code); i++)
{
return NULL;
}
- /* Recursively value-numberize reference ops */
+ /* Recursively value-numberize reference ops and tree lists. */
if (REFERENCE_CLASS_P (op))
{
- tree tempop = create_value_expr_from (op, block, vuses);
+ tree tempop = create_value_expr_from (op, block, stmt);
+ op = tempop ? tempop : op;
+ val = vn_lookup_or_add (op, stmt);
+ }
+ else if (TREE_CODE (op) == TREE_LIST)
+ {
+ tree tempop;
+
+ gcc_assert (TREE_CODE (expr) == CALL_EXPR);
+ tempop = create_value_expr_from (op, block, stmt);
+
op = tempop ? tempop : op;
- val = vn_lookup_or_add (op, vuses);
+ vn_lookup_or_add (op, NULL);
+ /* Unlike everywhere else, we do *not* want to replace the
+ TREE_LIST itself with a value number, because support
+ functions we call will blow up. */
+ val = op;
}
else
/* Create a value handle for OP and add it to VEXPR. */
val = vn_lookup_or_add (op, NULL);
- if (!is_undefined_value (op))
+ if (!is_undefined_value (op) && TREE_CODE (op) != TREE_LIST)
value_insert_into_set (EXP_GEN (block), op);
if (TREE_CODE (val) == VALUE_HANDLE)
}
+/* Return true if we can value number a call. This is true if we have
+ a pure or constant call. */
+static bool
+can_value_number_call (tree stmt)
+{
+ tree call = get_call_expr_in (stmt);
+
+ /* This is a temporary restriction until we translate vuses through
+ phi nodes. This is only needed for PRE, of course. */
+ if (!in_fre && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+ return false;
+ if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
+ return true;
+ return false;
+}
+
+/* Given a statement STMT and its right hand side which is a load, try
+ to look for the expression stored in the location for the load, and
+ return true if a useful equivalence was recorded for LHS. */
+
+static bool
+try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
+{
+ tree store_stmt = NULL;
+ tree rhs;
+ ssa_op_iter i;
+ tree vuse;
+
+ FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
+ {
+ tree def_stmt;
+
+ gcc_assert (TREE_CODE (vuse) == SSA_NAME);
+ def_stmt = SSA_NAME_DEF_STMT (vuse);
+
+ /* If there is no useful statement for this VUSE, we'll not find a
+ useful expression to return either. Likewise, if there is a
+ statement but it is not a simple assignment or it has virtual
+ uses, we can stop right here. Note that this means we do
+ not look through PHI nodes, which is intentional. */
+ if (!def_stmt
+ || TREE_CODE (def_stmt) != MODIFY_EXPR
+ || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
+ return false;
+
+ /* If this is not the same statement as one we have looked at for
+ another VUSE of STMT already, we have two statements producing
+ something that reaches our STMT. */
+ if (store_stmt && store_stmt != def_stmt)
+ return false;
+ else
+ {
+ /* Is this a store to the exact same location as the one we are
+ loading from in STMT? */
+ if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
+ return false;
+
+ /* Otherwise remember this statement and see if all other VUSEs
+ come from the same statement. */
+ store_stmt = def_stmt;
+ }
+ }
+
+ /* Alright then, we have visited all VUSEs of STMT and we've determined
+ that all of them come from the same statement STORE_STMT. See if there
+ is a useful expression we can deduce from STORE_STMT. */
+ rhs = TREE_OPERAND (store_stmt, 1);
+ if ((TREE_CODE (rhs) == SSA_NAME
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
+ || is_gimple_min_invariant (rhs)
+ || TREE_CODE (rhs) == ADDR_EXPR
+ || TREE_INVARIANT (rhs))
+ {
+
+ /* Yay! Compute a value number for the RHS of the statement and
+ add its value to the AVAIL_OUT set for the block. Add the LHS
+ to TMP_GEN. */
+ add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
+ if (TREE_CODE (rhs) == SSA_NAME
+ && !is_undefined_value (rhs))
+ value_insert_into_set (EXP_GEN (block), rhs);
+ return true;
+ }
+
+ return false;
+}
+
/* Compute the AVAIL set for all basic blocks.
This function performs value numbering of the statements in each basic
for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
{
stmt_ann_t ann;
- size_t j;
+ ssa_op_iter iter;
+ tree op;
stmt = bsi_stmt (bsi);
ann = stmt_ann (stmt);
{
tree lhs = TREE_OPERAND (stmt, 0);
tree rhs = TREE_OPERAND (stmt, 1);
- vuse_optype vuses = STMT_VUSE_OPS (stmt);
+
+ /* Try to look through loads. */
+ if (TREE_CODE (lhs) == SSA_NAME
+ && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
+ && try_look_through_load (lhs, rhs, stmt, block))
+ continue;
STRIP_USELESS_TYPE_CONVERSION (rhs);
if (UNARY_CLASS_P (rhs)
|| BINARY_CLASS_P (rhs)
|| COMPARISON_CLASS_P (rhs)
- || REFERENCE_CLASS_P (rhs))
+ || REFERENCE_CLASS_P (rhs)
+ || (TREE_CODE (rhs) == CALL_EXPR
+ && can_value_number_call (stmt)))
{
/* For binary, unary, and reference expressions,
create a duplicate expression with the operands
replaced with the value handles of the original
RHS. */
- tree newt = create_value_expr_from (rhs, block, vuses);
+ tree newt = create_value_expr_from (rhs, block, stmt);
if (newt)
{
- add_to_sets (lhs, newt, vuses, TMP_GEN (block),
+ add_to_sets (lhs, newt, stmt, TMP_GEN (block),
AVAIL_OUT (block));
value_insert_into_set (EXP_GEN (block), newt);
continue;
}
}
- else if (TREE_CODE (rhs) == SSA_NAME
+ else if ((TREE_CODE (rhs) == SSA_NAME
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
|| is_gimple_min_invariant (rhs)
|| TREE_CODE (rhs) == ADDR_EXPR
|| TREE_INVARIANT (rhs)
/* Compute a value number for the RHS of the statement
and add its value to the AVAIL_OUT set for the block.
Add the LHS to TMP_GEN. */
- add_to_sets (lhs, rhs, vuses, TMP_GEN (block),
+ add_to_sets (lhs, rhs, stmt, TMP_GEN (block),
AVAIL_OUT (block));
if (TREE_CODE (rhs) == SSA_NAME
/* For any other statement that we don't recognize, simply
make the names generated by the statement available in
AVAIL_OUT and TMP_GEN. */
- for (j = 0; j < NUM_DEFS (STMT_DEF_OPS (stmt)); j++)
- {
- tree def = DEF_OP (STMT_DEF_OPS (stmt), j);
- add_to_sets (def, def, NULL, TMP_GEN (block),
- AVAIL_OUT (block));
- }
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
+ add_to_sets (op, op, NULL, TMP_GEN (block), AVAIL_OUT (block));
- for (j = 0; j < NUM_USES (STMT_USE_OPS (stmt)); j++)
- {
- tree use = USE_OP (STMT_USE_OPS (stmt), j);
- add_to_sets (use, use, NULL, NULL, AVAIL_OUT (block));
- }
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+ add_to_sets (op, op, NULL, NULL , AVAIL_OUT (block));
}
/* Put the dominator children of BLOCK on the worklist of blocks
fprintf (dump_file, " in ");
print_generic_stmt (dump_file, stmt, 0);
}
+
if (TREE_CODE (sprime) == SSA_NAME)
NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
+ /* We need to make sure the new and old types actually match,
+ which may require adding a simple cast, which fold_convert
+ will do for us. */
+ if (TREE_CODE (*rhs_p) != SSA_NAME
+ && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*rhs_p),
+ TREE_TYPE (sprime)))
+ sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
+
pre_stats.eliminations++;
propagate_tree_value (rhs_p, sprime);
update_stmt (stmt);
/* If we removed EH side effects from the statement, clean
its EH information. */
- if (maybe_clean_eh_stmt (stmt))
+ if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
bb_for_stmt (stmt)->index);
init_pre (bool do_fre)
{
basic_block bb;
+
+ in_fre = do_fre;
inserted_exprs = NULL;
vn_init ();
tree_code_size (NEGATE_EXPR), 30);
reference_node_pool = create_alloc_pool ("Reference tree nodes",
tree_code_size (ARRAY_REF), 30);
+ expression_node_pool = create_alloc_pool ("Expression tree nodes",
+ tree_code_size (CALL_EXPR), 30);
+ list_node_pool = create_alloc_pool ("List tree nodes",
+ tree_code_size (TREE_LIST), 30);
+ comparison_node_pool = create_alloc_pool ("Comparison tree nodes",
+ tree_code_size (EQ_EXPR), 30);
FOR_ALL_BB (bb)
{
EXP_GEN (bb) = set_new (true);
free_alloc_pool (binary_node_pool);
free_alloc_pool (reference_node_pool);
free_alloc_pool (unary_node_pool);
+ free_alloc_pool (list_node_pool);
+ free_alloc_pool (expression_node_pool);
+ free_alloc_pool (comparison_node_pool);
htab_delete (phi_translate_table);
remove_fake_exit_edges ();
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_update_ssa | TODO_dump_func | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
+
+/* Return true if T is a copy statement between two ssa names. */
+
+static bool
+is_copy_stmt (tree t)
+{
+ if (!t || TREE_CODE (t) != MODIFY_EXPR)
+ return false;
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
+ && TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME)
+ return true;
+ return false;
+}
+
+/* Starting from START, walk copy statements till we hit a statement with a
+ VUSE or a non-copy statement. */
+
+static tree
+follow_copies_till_vuse (tree start)
+{
+ if (is_copy_stmt (start) && ZERO_SSA_OPERANDS (start, SSA_OP_VIRTUAL_USES))
+ {
+ tree rhs, defstmt;
+
+ rhs = TREE_OPERAND (start, 1);
+ defstmt = SSA_NAME_DEF_STMT (rhs);
+ return follow_copies_till_vuse (defstmt);
+ }
+ return start;
+}
+
+/* Gate and execute functions for eliminate useless stores.
+ The goal here is to recognize the pattern *x = ... *x, and eliminate the
+ store because the value hasn't changed. Store copy/const prop won't
+ do this because making *more* loads (IE propagating *x) is not a win, so it
+ ignores them.
+ This pass is currently geared completely towards static variable store
+ elimination. */
+
+static void
+do_eustores (void)
+{
+ basic_block bb;
+ /* For each basic block
+ For each statement (STMT) in the block
+ if STMT is a stores of the pattern *x = y
+ follow the chain of definitions for y, until we hit a non-copy
+ statement or a statement with a vuse.
+ if the statement we arrive at is a vuse of the operand we killed,
+ accessed through the same memory operation, then we have a
+ useless store (because it is *x = ... = *x). */
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi);)
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree startat;
+ tree kill;
+ tree found;
+
+ if (NUM_SSA_OPERANDS (stmt, SSA_OP_VMUSTDEF) != 1
+ || TREE_CODE (stmt) != MODIFY_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) != SSA_NAME)
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+
+ kill = MUSTDEF_KILL (MUSTDEF_OPS (stmt));
+ startat = TREE_OPERAND (stmt, 1);
+ startat = SSA_NAME_DEF_STMT (startat);
+ found = follow_copies_till_vuse (startat);
+
+ if (found && TREE_CODE (found) == MODIFY_EXPR)
+ {
+
+ /* We want exactly one virtual use, and it should match up with
+ the use being killed. */
+
+ if (NUM_SSA_OPERANDS (found, SSA_OP_VUSE) != 1
+ || VUSE_OP (VUSE_OPS (found)) != kill
+ || !DECL_P (TREE_OPERAND (stmt, 0))
+ || !operand_equal_p (TREE_OPERAND (found, 1),
+ TREE_OPERAND (stmt, 0), 0))
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "Eliminating useless store ");
+ print_generic_stmt (dump_file, stmt, 0);
+ }
+ mark_sym_for_renaming (TREE_OPERAND (stmt, 0));
+ bsi_remove (&bsi);
+ }
+ else
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+ }
+ }
+}
+
+static bool
+gate_eustores(void)
+{
+ return flag_unit_at_a_time != 0;
+}
+
+struct tree_opt_pass pass_eliminate_useless_stores =
+{
+ "eustores", /* name */
+ gate_eustores, /* gate */
+ do_eustores, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_update_ssa | TODO_dump_func
+ | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};