/* SSA-PRE for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
<stevenb@suse.de>
#include "diagnostic.h"
#include "tree-inline.h"
#include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
#include "tree-dump.h"
#include "timevar.h"
#include "fibheap.h"
#include "langhooks.h"
#include "cfgloop.h"
#include "tree-ssa-sccvn.h"
+#include "tree-scalar-evolution.h"
#include "params.h"
#include "dbgcnt.h"
*/
/* For ease of terminology, "expression node" in the below refers to
- every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
+ every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
represent the actual statement containing the expressions we care about,
and we cache the value number by putting it in the expression. */
/* Representation of expressions on value numbers:
- Expressions consisting of value numbers are represented the same
+ Expressions consisting of value numbers are represented the same
way as our VN internally represents them, with an additional
"pre_expr" wrapping around them in order to facilitate storing all
of the expressions in the same sets. */
switch (e1->kind)
{
case CONSTANT:
- return expressions_equal_p (PRE_EXPR_CONSTANT (e1),
- PRE_EXPR_CONSTANT (e2));
+ return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
+ PRE_EXPR_CONSTANT (e2));
case NAME:
return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
case NARY:
switch (e->kind)
{
case CONSTANT:
- return iterative_hash_expr (PRE_EXPR_CONSTANT (e), 0);
+ return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
case NAME:
- return iterative_hash_expr (PRE_EXPR_NAME (e), 0);
+ return iterative_hash_hashval_t (SSA_NAME_VERSION (PRE_EXPR_NAME (e)), 0);
case NARY:
- return vn_nary_op_compute_hash (PRE_EXPR_NARY (e));
+ return PRE_EXPR_NARY (e)->hashcode;
case REFERENCE:
- return vn_reference_compute_hash (PRE_EXPR_REFERENCE (e));
+ return PRE_EXPR_REFERENCE (e)->hashcode;
default:
abort ();
}
#define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
+#define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
+ EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
+
/* Mapping from value id to expressions with that value_id. */
DEF_VEC_P (bitmap_set_t);
DEF_VEC_ALLOC_P (bitmap_set_t, heap);
the current iteration. */
bitmap_set_t new_sets;
+ /* A cache for value_dies_in_block_x. */
+ bitmap expr_dies;
+
/* True if we have visited this block during ANTIC calculation. */
unsigned int visited:1;
#define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
#define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
#define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
-#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
+#define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
+#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
#define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
-/* Maximal set of values, used to initialize the ANTIC problem, which
- is an intersection problem. */
-static bitmap_set_t maximal_set;
-
/* Basic block list in postorder. */
static int *postorder;
} pre_stats;
static bool do_partial_partial;
-static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int , tree);
+static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool);
static bitmap_set_t bitmap_set_new (void);
-static tree create_expression_by_pieces (basic_block, pre_expr, tree, tree,
- tree);
-static tree find_or_generate_expression (basic_block, pre_expr, tree, tree);
+static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
+ gimple, tree);
+static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
+ gimple);
+static unsigned int get_expr_value_id (pre_expr);
/* We can add and remove elements and entries to and from sets
and hash tables, so we use alloc pools for them. */
{
bitmap_set_t set;
+ gcc_assert (get_expr_value_id (e) == v);
+
if (v >= VEC_length (bitmap_set_t, value_expressions))
{
VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
switch (expr->kind)
{
case CONSTANT:
- return get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
+ {
+ unsigned int id;
+ id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
+ if (id == 0)
+ {
+ id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
+ add_to_value (id, expr);
+ }
+ return id;
+ }
case NAME:
return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
case NARY:
}
-/* A comparison function for use in qsort to top sort a bitmap set. Simply
- subtracts value ids, since they are created with leaves before
- their parent users (IE topological order). */
-
-static int
-value_id_compare (const void *pa, const void *pb)
-{
- const unsigned int vha = get_expr_value_id (*((const pre_expr *)pa));
- const unsigned int vhb = get_expr_value_id (*((const pre_expr *)pb));
-
- return vha - vhb;
-}
-
/* Generate an topological-ordered array of bitmap set SET. */
static VEC(pre_expr, heap) *
sorted_array_from_bitmap_set (bitmap_set_t set)
{
- unsigned int i;
- bitmap_iterator bi;
+ unsigned int i, j;
+ bitmap_iterator bi, bj;
VEC(pre_expr, heap) *result = NULL;
- FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
- VEC_safe_push (pre_expr, heap, result, expression_for_id (i));
+ FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
+ {
+ /* The number of expressions having a given value is usually
+ relatively small. Thus, rather than making a vector of all
+ the expressions and sorting it by value-id, we walk the values
+ and check in the reverse mapping that tells us what expressions
+ have a given value, to filter those in our set. As a result,
+ the expressions are inserted in value-id order, which means
+ topological order.
- qsort (VEC_address (pre_expr, result), VEC_length (pre_expr, result),
- sizeof (pre_expr), value_id_compare);
+ If this is somehow a significant lose for some cases, we can
+ choose which set to walk based on the set size. */
+ bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
+ FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
+ {
+ if (bitmap_bit_p (set->expressions, j))
+ VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
+ }
+ }
return result;
}
VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
i++)
{
+ bool closebrace = false;
if (vro->opcode != SSA_NAME
&& TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
- fprintf (outfile, "%s ", tree_code_name [vro->opcode]);
+ {
+ fprintf (outfile, "%s", tree_code_name [vro->opcode]);
+ if (vro->op0)
+ {
+ fprintf (outfile, "<");
+ closebrace = true;
+ }
+ }
if (vro->op0)
{
- if (vro->op1)
- fprintf (outfile, "<");
print_generic_expr (outfile, vro->op0, 0);
if (vro->op1)
{
fprintf (outfile, ",");
print_generic_expr (outfile, vro->op1, 0);
}
- if (vro->op1)
- fprintf (outfile, ">");
+ if (vro->op2)
+ {
+ fprintf (outfile, ",");
+ print_generic_expr (outfile, vro->op2, 0);
+ }
}
+ if (closebrace)
+ fprintf (outfile, ">");
if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
fprintf (outfile, ",");
}
fprintf (outfile, "}");
+ if (ref->vuse)
+ {
+ fprintf (outfile, "@");
+ print_generic_expr (outfile, ref->vuse, 0);
+ }
}
break;
}
return get_or_alloc_expr_for_name (t);
else if (is_gimple_min_invariant (t))
return get_or_alloc_expr_for_constant (t);
+ else
+ {
+ /* More complex expressions can result from SCCVN expression
+ simplification that inserts values for them. As they all
+ do not have VOPs the get handled by the nary ops struct. */
+ vn_nary_op_t result;
+ unsigned int result_id;
+ vn_nary_op_lookup (t, &result);
+ if (result != NULL)
+ {
+ pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
+ e->kind = NARY;
+ PRE_EXPR_NARY (e) = result;
+ result_id = lookup_expression_id (e);
+ if (result_id != 0)
+ {
+ pool_free (pre_expr_pool, e);
+ e = expression_for_id (result_id);
+ return e;
+ }
+ alloc_expression_id (e);
+ return e;
+ }
+ }
return NULL;
}
vn_nary_op_t nary = PRE_EXPR_NARY (e);
switch (TREE_CODE_CLASS (nary->opcode))
{
+ case tcc_expression:
+ if (nary->opcode == TRUTH_NOT_EXPR)
+ goto do_unary;
+ if (nary->opcode != TRUTH_AND_EXPR
+ && nary->opcode != TRUTH_OR_EXPR
+ && nary->opcode != TRUTH_XOR_EXPR)
+ return e;
+ /* Fallthrough. */
case tcc_binary:
+ case tcc_comparison:
{
/* We have to go from trees to pre exprs to value ids to
constants. */
tree naryop0 = nary->op[0];
tree naryop1 = nary->op[1];
- pre_expr rep0 = get_or_alloc_expr_for (naryop0);
- pre_expr rep1 = get_or_alloc_expr_for (naryop1);
- unsigned int vrep0 = get_expr_value_id (rep0);
- unsigned int vrep1 = get_expr_value_id (rep1);
- tree const0 = get_constant_for_value_id (vrep0);
- tree const1 = get_constant_for_value_id (vrep1);
- tree result = NULL;
- if (const0 && const1)
- result = fold_binary (nary->opcode, nary->type, const0,
- const1);
+ tree result;
+ if (!is_gimple_min_invariant (naryop0))
+ {
+ pre_expr rep0 = get_or_alloc_expr_for (naryop0);
+ unsigned int vrep0 = get_expr_value_id (rep0);
+ tree const0 = get_constant_for_value_id (vrep0);
+ if (const0)
+ naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
+ }
+ if (!is_gimple_min_invariant (naryop1))
+ {
+ pre_expr rep1 = get_or_alloc_expr_for (naryop1);
+ unsigned int vrep1 = get_expr_value_id (rep1);
+ tree const1 = get_constant_for_value_id (vrep1);
+ if (const1)
+ naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
+ }
+ result = fold_binary (nary->opcode, nary->type,
+ naryop0, naryop1);
if (result && is_gimple_min_invariant (result))
return get_or_alloc_expr_for_constant (result);
+ /* We might have simplified the expression to a
+ SSA_NAME for example from x_1 * 1. But we cannot
+ insert a PHI for x_1 unconditionally as x_1 might
+ not be available readily. */
return e;
}
+ case tcc_reference:
+ if (nary->opcode != REALPART_EXPR
+ && nary->opcode != IMAGPART_EXPR
+ && nary->opcode != VIEW_CONVERT_EXPR)
+ return e;
+ /* Fallthrough. */
case tcc_unary:
+do_unary:
{
- /* We have to go from trees to pre exprs to value ids to
- constants. */
+ /* We have to go from trees to pre exprs to value ids to
+ constants. */
tree naryop0 = nary->op[0];
- pre_expr rep0 = get_or_alloc_expr_for (naryop0);
- unsigned int vrep0 = get_expr_value_id (rep0);
- tree const0 = get_constant_for_value_id (vrep0);
- tree result = NULL;
+ tree const0, result;
+ if (is_gimple_min_invariant (naryop0))
+ const0 = naryop0;
+ else
+ {
+ pre_expr rep0 = get_or_alloc_expr_for (naryop0);
+ unsigned int vrep0 = get_expr_value_id (rep0);
+ const0 = get_constant_for_value_id (vrep0);
+ }
+ result = NULL;
if (const0)
- result = fold_unary (nary->opcode, nary->type, const0);
+ {
+ tree type1 = TREE_TYPE (nary->op[0]);
+ const0 = fold_convert (type1, const0);
+ result = fold_unary (nary->opcode, nary->type, const0);
+ }
if (result && is_gimple_min_invariant (result))
return get_or_alloc_expr_for_constant (result);
return e;
return e;
}
}
+ case REFERENCE:
+ {
+ vn_reference_t ref = PRE_EXPR_REFERENCE (e);
+ VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vn_reference_op_t op;
+
+ /* Try to simplify the translated expression if it is
+ a call to a builtin function with at most two arguments. */
+ op = VEC_index (vn_reference_op_s, operands, 0);
+ if (op->opcode == CALL_EXPR
+ && TREE_CODE (op->op0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
+ && VEC_length (vn_reference_op_s, operands) >= 2
+ && VEC_length (vn_reference_op_s, operands) <= 3)
+ {
+ vn_reference_op_t arg0, arg1 = NULL;
+ bool anyconst = false;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (VEC_length (vn_reference_op_s, operands) > 2)
+ arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
+ || (arg0->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg0->op0)))
+ anyconst = true;
+ if (arg1
+ && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
+ || (arg1->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg1->op0))))
+ anyconst = true;
+ if (anyconst)
+ {
+ tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
+ arg1 ? 2 : 1,
+ arg0->op0,
+ arg1 ? arg1->op0 : NULL);
+ if (folded
+ && TREE_CODE (folded) == NOP_EXPR)
+ folded = TREE_OPERAND (folded, 0);
+ if (folded
+ && is_gimple_min_invariant (folded))
+ return get_or_alloc_expr_for_constant (folded);
+ }
+ }
+ return e;
+ }
default:
return e;
}
return e;
}
-/* Translate the vuses in the VUSES vector backwards through phi nodes
- in PHIBLOCK, so that they have the value they would have in
- BLOCK. */
+/* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
+ it has the value it would have in BLOCK. Set *SAME_VALID to true
+ in case the new vuse doesn't change the value id of the OPERANDS. */
-static VEC(tree, gc) *
-translate_vuses_through_block (VEC (tree, gc) *vuses,
- basic_block phiblock,
- basic_block block)
+static tree
+translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
+ alias_set_type set, tree type, tree vuse,
+ basic_block phiblock,
+ basic_block block, bool *same_valid)
{
- tree oldvuse;
- VEC(tree, gc) *result = NULL;
- int i;
+ gimple phi = SSA_NAME_DEF_STMT (vuse);
+ ao_ref ref;
+ edge e = NULL;
+ bool use_oracle;
+
+ *same_valid = true;
+
+ if (gimple_bb (phi) != phiblock)
+ return vuse;
+
+ use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
+
+ /* Use the alias-oracle to find either the PHI node in this block,
+ the first VUSE used in this block that is equivalent to vuse or
+ the first VUSE which definition in this block kills the value. */
+ if (gimple_code (phi) == GIMPLE_PHI)
+ e = find_edge (block, phiblock);
+ else if (use_oracle)
+ while (!stmt_may_clobber_ref_p_1 (phi, &ref))
+ {
+ vuse = gimple_vuse (phi);
+ phi = SSA_NAME_DEF_STMT (vuse);
+ if (gimple_bb (phi) != phiblock)
+ return vuse;
+ if (gimple_code (phi) == GIMPLE_PHI)
+ {
+ e = find_edge (block, phiblock);
+ break;
+ }
+ }
+ else
+ return NULL_TREE;
- for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
+ if (e)
{
- tree phi = SSA_NAME_DEF_STMT (oldvuse);
- if (TREE_CODE (phi) == PHI_NODE
- && bb_for_stmt (phi) == phiblock)
+ if (use_oracle)
{
- edge e = find_edge (block, bb_for_stmt (phi));
- if (e)
- {
- tree def = PHI_ARG_DEF (phi, e->dest_idx);
- if (def != oldvuse)
- {
- if (!result)
- result = VEC_copy (tree, gc, vuses);
- VEC_replace (tree, result, i, def);
- }
- }
+ bitmap visited = NULL;
+ /* Try to find a vuse that dominates this phi node by skipping
+ non-clobbering statements. */
+ vuse = get_continuation_for_phi (phi, &ref, &visited);
+ if (visited)
+ BITMAP_FREE (visited);
}
+ else
+ vuse = NULL_TREE;
+ if (!vuse)
+ {
+ /* If we didn't find any, the value ID can't stay the same,
+ but return the translated vuse. */
+ *same_valid = false;
+ vuse = PHI_ARG_DEF (phi, e->dest_idx);
+ }
+ /* ??? We would like to return vuse here as this is the canonical
+ upmost vdef that this reference is associated with. But during
+ insertion of the references into the hash tables we only ever
+ directly insert with their direct gimple_vuse, hence returning
+ something else would make us not find the other expression. */
+ return PHI_ARG_DEF (phi, e->dest_idx);
}
- /* We avoid creating a new copy of the vuses unless something
- actually changed, so result can be NULL. */
- if (result)
- {
- sort_vuses (result);
- return result;
- }
- return vuses;
-
+ return NULL_TREE;
}
-/* Like find_leader, but checks for the value existing in SET1 *or*
+/* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
SET2. This is used to avoid making a set consisting of the union
of PA_IN and ANTIC_IN during insert. */
{
pre_expr result;
- result = bitmap_find_leader (set1, val, NULL_TREE);
+ result = bitmap_find_leader (set1, val, NULL);
if (!result && set2)
- result = bitmap_find_leader (set2, val, NULL_TREE);
+ result = bitmap_find_leader (set2, val, NULL);
return result;
}
case CONSTANT:
return TREE_TYPE (PRE_EXPR_CONSTANT (e));
case REFERENCE:
- {
- vn_reference_op_t vro;
-
- gcc_assert (PRE_EXPR_REFERENCE (e)->operands);
- vro = VEC_index (vn_reference_op_s,
- PRE_EXPR_REFERENCE (e)->operands,
- 0);
- /* We don't store type along with COMPONENT_REF because it is
- always the same as FIELD_DECL's type. */
- if (!vro->type)
- {
- gcc_assert (vro->opcode == COMPONENT_REF);
- return TREE_TYPE (vro->op0);
- }
- return vro->type;
- }
-
+ return PRE_EXPR_REFERENCE (e)->type;
case NARY:
return PRE_EXPR_NARY (e)->type;
}
case NAME:
return PRE_EXPR_NAME (e);
case CONSTANT:
+ return PRE_EXPR_CONSTANT (e);
case NARY:
case REFERENCE:
{
get_var_ann (pretemp);
}
- name = make_ssa_name (pretemp, build_empty_stmt ());
+ name = make_ssa_name (pretemp, gimple_build_nop ());
VN_INFO_GET (name)->value_id = value_id;
if (e->kind == CONSTANT)
VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
if (constant != expr)
return constant;
get_or_alloc_expression_id (expr);
- add_to_value (new_val_id, expr);
}
+ add_to_value (new_val_id, expr);
}
phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
+
case REFERENCE:
{
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
VEC (vn_reference_op_s, heap) *operands = ref->operands;
- VEC (tree, gc) *vuses = ref->vuses;
- VEC (tree, gc) *newvuses = vuses;
+ tree vuse = ref->vuse;
+ tree newvuse = vuse;
VEC (vn_reference_op_s, heap) *newoperands = NULL;
- bool changed = false;
- unsigned int i;
+ bool changed = false, same_valid = true;
+ unsigned int i, j;
vn_reference_op_t operand;
vn_reference_t newref;
- for (i = 0; VEC_iterate (vn_reference_op_s, operands, i, operand); i++)
+ for (i = 0, j = 0;
+ VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
{
pre_expr opresult;
pre_expr leader;
{
tree name = get_representative_for (opresult);
if (!name)
- return NULL;
+ break;
op0 = name;
}
else if (!opresult)
- return NULL;
+ break;
}
changed |= op0 != oldop0;
{
tree name = get_representative_for (opresult);
if (!name)
- return NULL;
+ break;
op1 = name;
}
else if (!opresult)
- return NULL;
+ break;
}
+ /* We can't possibly insert these. */
+ else if (op1 && !is_gimple_min_invariant (op1))
+ break;
changed |= op1 != oldop1;
if (op2 && TREE_CODE (op2) == SSA_NAME)
{
{
tree name = get_representative_for (opresult);
if (!name)
- return NULL;
+ break;
op2 = name;
}
else if (!opresult)
- return NULL;
+ break;
}
+ /* We can't possibly insert these. */
+ else if (op2 && !is_gimple_min_invariant (op2))
+ break;
changed |= op2 != oldop2;
if (!newoperands)
newop.op0 = op0;
newop.op1 = op1;
newop.op2 = op2;
- VEC_replace (vn_reference_op_s, newoperands, i, &newop);
+ VEC_replace (vn_reference_op_s, newoperands, j, &newop);
+ /* If it transforms from an SSA_NAME to an address, fold with
+ a preceding indirect reference. */
+ if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
+ && VEC_index (vn_reference_op_s,
+ newoperands, j - 1)->opcode == INDIRECT_REF)
+ vn_reference_fold_indirect (&newoperands, &j);
+ }
+ if (i != VEC_length (vn_reference_op_s, operands))
+ {
+ if (newoperands)
+ VEC_free (vn_reference_op_s, heap, newoperands);
+ return NULL;
}
- newvuses = translate_vuses_through_block (vuses, phiblock, pred);
- changed |= newvuses != vuses;
+ if (vuse)
+ {
+ newvuse = translate_vuse_through_block (newoperands,
+ ref->set, ref->type,
+ vuse, phiblock, pred,
+ &same_valid);
+ if (newvuse == NULL_TREE)
+ {
+ VEC_free (vn_reference_op_s, heap, newoperands);
+ return NULL;
+ }
+ }
- if (changed)
+ if (changed || newvuse != vuse)
{
- tree result = vn_reference_lookup_pieces (newvuses,
- newoperands,
- &newref);
unsigned int new_val_id;
+ pre_expr constant;
+ tree result = vn_reference_lookup_pieces (newvuse, ref->set,
+ ref->type,
+ newoperands,
+ &newref, true);
if (newref)
VEC_free (vn_reference_op_s, heap, newoperands);
if (result && is_gimple_min_invariant (result))
- return get_or_alloc_expr_for_constant (result);
+ {
+ gcc_assert (!newoperands);
+ return get_or_alloc_expr_for_constant (result);
+ }
expr = (pre_expr) pool_alloc (pre_expr_pool);
expr->kind = REFERENCE;
if (newref)
{
PRE_EXPR_REFERENCE (expr) = newref;
+ constant = fully_constant_expression (expr);
+ if (constant != expr)
+ return constant;
+
new_val_id = newref->value_id;
get_or_alloc_expression_id (expr);
}
else
{
- new_val_id = get_next_value_id ();
- VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
- get_max_value_id() + 1);
- newref = vn_reference_insert_pieces (newvuses,
+ if (changed || !same_valid)
+ {
+ new_val_id = get_next_value_id ();
+ VEC_safe_grow_cleared (bitmap_set_t, heap,
+ value_expressions,
+ get_max_value_id() + 1);
+ }
+ else
+ new_val_id = ref->value_id;
+ newref = vn_reference_insert_pieces (newvuse, ref->set,
+ ref->type,
newoperands,
result, new_val_id);
+ newoperands = NULL;
PRE_EXPR_REFERENCE (expr) = newref;
+ constant = fully_constant_expression (expr);
+ if (constant != expr)
+ return constant;
get_or_alloc_expression_id (expr);
- add_to_value (new_val_id, expr);
}
+ add_to_value (new_val_id, expr);
}
+ VEC_free (vn_reference_op_s, heap, newoperands);
phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
+
case NAME:
{
- tree phi = NULL;
+ gimple phi = NULL;
edge e;
- tree def_stmt;
+ gimple def_stmt;
tree name = PRE_EXPR_NAME (expr);
def_stmt = SSA_NAME_DEF_STMT (name);
- if (TREE_CODE (def_stmt) == PHI_NODE
- && bb_for_stmt (def_stmt) == phiblock)
+ if (gimple_code (def_stmt) == GIMPLE_PHI
+ && gimple_bb (def_stmt) == phiblock)
phi = def_stmt;
else
return expr;
- e = find_edge (pred, bb_for_stmt (phi));
+ e = find_edge (pred, gimple_bb (phi));
if (e)
{
tree def = PHI_ARG_DEF (phi, e->dest_idx);
pre_expr newexpr;
+ if (TREE_CODE (def) == SSA_NAME)
+ def = VN_INFO (def)->valnum;
+
/* Handle constant. */
if (is_gimple_min_invariant (def))
return get_or_alloc_expr_for_constant (def);
pre_expr translated;
translated = phi_translate (expr, set, NULL, pred, phiblock);
- /* Don't add constants or empty translations to the cache, since
- we won't look them up that way, or use the result, anyway. */
- if (translated && !value_id_constant_p (get_expr_value_id (translated)))
+ /* Don't add empty translations to the cache */
+ if (translated)
phi_trans_add (expr, translated, pred);
if (translated != NULL)
Return NULL if no leader is found. */
static pre_expr
-bitmap_find_leader (bitmap_set_t set, unsigned int val, tree stmt)
+bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
{
if (value_id_constant_p (val))
{
be an SSA_NAME first in the list of expressions. */
if (stmt)
{
- tree def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
- if (TREE_CODE (def_stmt) != PHI_NODE
- && bb_for_stmt (def_stmt) == bb_for_stmt (stmt)
- && stmt_ann (def_stmt)->uid >= stmt_ann (stmt)->uid)
+ gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
+ if (gimple_code (def_stmt) != GIMPLE_PHI
+ && gimple_bb (def_stmt) == gimple_bb (stmt)
+ && gimple_uid (def_stmt) >= gimple_uid (stmt))
continue;
}
return val;
static bool
value_dies_in_block_x (pre_expr expr, basic_block block)
{
- int i;
- tree vuse;
- VEC (tree, gc) *vuses = PRE_EXPR_REFERENCE (expr)->vuses;
+ tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
+ vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
+ gimple def;
+ gimple_stmt_iterator gsi;
+ unsigned id = get_expression_id (expr);
+ bool res = false;
+ ao_ref ref;
+
+ if (!vuse)
+ return false;
- /* Conservatively, a value dies if it's vuses are defined in this
- block, unless they come from phi nodes (which are merge operations,
- rather than stores. */
- for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
+ /* Lookup a previously calculated result. */
+ if (EXPR_DIES (block)
+ && bitmap_bit_p (EXPR_DIES (block), id * 2))
+ return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
+
+ /* A memory expression {e, VUSE} dies in the block if there is a
+ statement that may clobber e. If, starting statement walk from the
+ top of the basic block, a statement uses VUSE there can be no kill
+ inbetween that use and the original statement that loaded {e, VUSE},
+ so we can stop walking. */
+ ref.base = NULL_TREE;
+ for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree def = SSA_NAME_DEF_STMT (vuse);
+ tree def_vuse, def_vdef;
+ def = gsi_stmt (gsi);
+ def_vuse = gimple_vuse (def);
+ def_vdef = gimple_vdef (def);
- if (bb_for_stmt (def) != block)
- continue;
- if (TREE_CODE (def) == PHI_NODE)
+ /* Not a memory statement. */
+ if (!def_vuse)
continue;
- return true;
+
+ /* Not a may-def. */
+ if (!def_vdef)
+ {
+ /* A load with the same VUSE, we're done. */
+ if (def_vuse == vuse)
+ break;
+
+ continue;
+ }
+
+ /* Init ref only if we really need it. */
+ if (ref.base == NULL_TREE
+ && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
+ refx->operands))
+ {
+ res = true;
+ break;
+ }
+ /* If the statement may clobber expr, it dies. */
+ if (stmt_may_clobber_ref_p_1 (def, &ref))
+ {
+ res = true;
+ break;
+ }
}
- return false;
+
+ /* Remember the result. */
+ if (!EXPR_DIES (block))
+ EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
+ bitmap_set_bit (EXPR_DIES (block), id * 2);
+ if (res)
+ bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
+
+ return res;
}
ONLY SET2 CAN BE NULL.
This means that we have a leader for each part of the expression
(if it consists of values), or the expression is an SSA_NAME.
- For loads/calls, we also see if the vuses are killed in this block.
-*/
+ For loads/calls, we also see if the vuse is killed in this block. */
static bool
valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
if (!vro_valid_in_sets (set1, set2, vro))
return false;
}
+ if (ref->vuse)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
+ if (!gimple_nop_p (def_stmt)
+ && gimple_bb (def_stmt) != block
+ && !dominated_by_p (CDI_DOMINATORS,
+ block, gimple_bb (def_stmt)))
+ return false;
+ }
return !value_dies_in_block_x (expr, block);
}
default:
{
VEC(basic_block, heap) * worklist;
size_t i;
- basic_block bprime, first;
+ basic_block bprime, first = NULL;
worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
FOR_EACH_EDGE (e, ei, block->succs)
- VEC_quick_push (basic_block, worklist, e->dest);
- first = VEC_index (basic_block, worklist, 0);
-
- if (phi_nodes (first))
{
- bitmap_set_t from = ANTIC_IN (first);
-
- if (!BB_VISITED (first))
- from = maximal_set;
- phi_translate_set (ANTIC_OUT, from, block, first);
+ if (!first
+ && BB_VISITED (e->dest))
+ first = e->dest;
+ else if (BB_VISITED (e->dest))
+ VEC_quick_push (basic_block, worklist, e->dest);
}
- else
+
+ /* Of multiple successors we have to have visited one already. */
+ if (!first)
{
- if (!BB_VISITED (first))
- bitmap_set_copy (ANTIC_OUT, maximal_set);
- else
- bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
+ SET_BIT (changed_blocks, block->index);
+ BB_VISITED (block) = 0;
+ BB_DEFERRED (block) = 1;
+ changed = true;
+ VEC_free (basic_block, heap, worklist);
+ goto maybe_dump_sets;
}
- for (i = 1; VEC_iterate (basic_block, worklist, i, bprime); i++)
+ if (phi_nodes (first))
+ phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
+ else
+ bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
+
+ for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
{
if (phi_nodes (bprime))
{
bitmap_set_t tmp = bitmap_set_new ();
- bitmap_set_t from = ANTIC_IN (bprime);
-
- if (!BB_VISITED (bprime))
- from = maximal_set;
- phi_translate_set (tmp, from, block, bprime);
+ phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
bitmap_set_and (ANTIC_OUT, tmp);
bitmap_set_free (tmp);
}
else
- {
- if (!BB_VISITED (bprime))
- bitmap_set_and (ANTIC_OUT, maximal_set);
- else
- bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
- }
+ bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
}
VEC_free (basic_block, heap, worklist);
}
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
- for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
+ for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
- for (i = 0; i < last_basic_block - NUM_FIXED_BLOCKS; i++)
+ for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
if we have a pure or constant call. */
static bool
-can_value_number_call (tree stmt)
+can_value_number_call (gimple stmt)
{
- tree call = get_call_expr_in (stmt);
-
- if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
+ if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
return true;
return false;
}
-/* Return true if OP is an exception handler related operation, such as
- FILTER_EXPR or EXC_PTR_EXPR. */
-
-static bool
-is_exception_related (tree op)
-{
- return TREE_CODE (op) == FILTER_EXPR || TREE_CODE (op) == EXC_PTR_EXPR;
-}
-
-/* Return true if OP is a tree which we can perform PRE on
- on. This may not match the operations we can value number, but in
+/* Return true if OP is a tree which we can perform PRE on.
+ This may not match the operations we can value number, but in
a perfect world would. */
static bool
/* Inserted expressions are placed onto this worklist, which is used
for performing quick dead code elimination of insertions we made
that didn't turn out to be necessary. */
-static VEC(tree,heap) *inserted_exprs;
+static VEC(gimple,heap) *inserted_exprs;
+static bitmap inserted_phi_names;
/* Pool allocated fake store expressions are placed onto this
worklist, which, after performing dead code elimination, is walked
to see which expressions need to be put into GC'able memory */
-static VEC(tree, heap) *need_creation;
+static VEC(gimple, heap) *need_creation;
-/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
- COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
- trying to rename aggregates into ssa form directly, which is a no
- no.
+/* The actual worker for create_component_ref_by_pieces. */
- Thus, this routine doesn't create temporaries, it just builds a
- single access expression for the array, calling
- find_or_generate_expression to build the innermost pieces.
-
- This function is a subroutine of create_expression_by_pieces, and
- should not be called on it's own unless you really know what you
- are doing.
-*/
static tree
-create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
- unsigned int operand,
- tree stmts,
- tree domstmt,
- bool in_call)
+create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
+ unsigned int *operand, gimple_seq *stmts,
+ gimple domstmt)
{
vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
- operand);
+ *operand);
tree genop;
+ ++*operand;
switch (currop->opcode)
{
case CALL_EXPR:
{
- tree folded;
- unsigned int i;
- vn_reference_op_t declop = VEC_index (vn_reference_op_s,
- ref->operands, 1);
- unsigned int nargs = VEC_length (vn_reference_op_s, ref->operands) - 2;
- tree *args = XNEWVEC (tree, nargs);
-
- for (i = 0; i < nargs; i++)
+ tree folded, sc = currop->op1;
+ unsigned int nargs = 0;
+ tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
+ ref->operands) - 1);
+ while (*operand < VEC_length (vn_reference_op_s, ref->operands))
{
- args[i] = create_component_ref_by_pieces (block, ref,
- operand + 2 + i, stmts,
- domstmt, true);
+ args[nargs] = create_component_ref_by_pieces_1 (block, ref,
+ operand, stmts,
+ domstmt);
+ nargs++;
}
- folded = build_call_array (currop->type, declop->op0, nargs, args);
+ folded = build_call_array (currop->type,
+ TREE_CODE (currop->op0) == FUNCTION_DECL
+ ? build_fold_addr_expr (currop->op0)
+ : currop->op0,
+ nargs, args);
free (args);
+ if (sc)
+ {
+ pre_expr scexpr = get_or_alloc_expr_for (sc);
+ sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
+ if (!sc)
+ return NULL_TREE;
+ CALL_EXPR_STATIC_CHAIN (folded) = sc;
+ }
return folded;
}
break;
+ case TARGET_MEM_REF:
+ {
+ vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
+ *operand);
+ pre_expr op0expr;
+ tree genop0 = NULL_TREE;
+ tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
+ if (!baseop)
+ return NULL_TREE;
+ if (currop->op0)
+ {
+ op0expr = get_or_alloc_expr_for (currop->op0);
+ genop0 = find_or_generate_expression (block, op0expr,
+ stmts, domstmt);
+ if (!genop0)
+ return NULL_TREE;
+ }
+ if (DECL_P (baseop))
+ return build6 (TARGET_MEM_REF, currop->type,
+ baseop, NULL_TREE,
+ genop0, currop->op1, currop->op2,
+ unshare_expr (nextop->op1));
+ else
+ return build6 (TARGET_MEM_REF, currop->type,
+ NULL_TREE, baseop,
+ genop0, currop->op1, currop->op2,
+ unshare_expr (nextop->op1));
+ }
+ break;
+ case ADDR_EXPR:
+ if (currop->op0)
+ {
+ gcc_assert (is_gimple_min_invariant (currop->op0));
+ return currop->op0;
+ }
+ /* Fallthrough. */
case REALPART_EXPR:
case IMAGPART_EXPR:
case VIEW_CONVERT_EXPR:
{
tree folded;
- tree genop0 = create_component_ref_by_pieces (block, ref,
- operand + 1,
- stmts, domstmt,
- in_call);
+ tree genop0 = create_component_ref_by_pieces_1 (block, ref,
+ operand,
+ stmts, domstmt);
if (!genop0)
return NULL_TREE;
folded = fold_build1 (currop->opcode, currop->type,
case MISALIGNED_INDIRECT_REF:
case INDIRECT_REF:
{
- /* Inside a CALL_EXPR op0 is the actual indirect_ref. */
- if (in_call)
- {
- tree folded;
- tree op0 = TREE_OPERAND (currop->op0, 0);
- pre_expr op0expr = get_or_alloc_expr_for (op0);
- tree genop0 = find_or_generate_expression (block, op0expr, stmts,
- domstmt);
- if (!genop0)
- return NULL_TREE;
- folded = fold_build1 (currop->opcode, currop->type,
- genop0);
- return folded;
- }
- else
- {
-
- tree folded;
- tree genop1 = create_component_ref_by_pieces (block, ref,
- operand + 1,
- stmts, domstmt,
- in_call);
- if (!genop1)
- return NULL_TREE;
- genop1 = fold_convert (build_pointer_type (currop->type),
- genop1);
+ tree folded;
+ tree genop1 = create_component_ref_by_pieces_1 (block, ref,
+ operand,
+ stmts, domstmt);
+ if (!genop1)
+ return NULL_TREE;
+ genop1 = fold_convert (build_pointer_type (currop->type),
+ genop1);
- folded = fold_build1 (currop->opcode, currop->type,
- genop1);
- return folded;
- }
+ if (currop->opcode == MISALIGNED_INDIRECT_REF)
+ folded = fold_build2 (currop->opcode, currop->type,
+ genop1, currop->op1);
+ else
+ folded = fold_build1 (currop->opcode, currop->type,
+ genop1);
+ return folded;
}
break;
case BIT_FIELD_REF:
{
tree folded;
- tree genop0 = create_component_ref_by_pieces (block, ref, operand + 1,
- stmts, domstmt,
- in_call);
+ tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
tree genop1;
case ARRAY_RANGE_REF:
case ARRAY_REF:
{
- vn_reference_op_t op0expr;
tree genop0;
tree genop1 = currop->op0;
pre_expr op1expr;
tree genop2 = currop->op1;
pre_expr op2expr;
- tree genop3;
- op0expr = VEC_index (vn_reference_op_s, ref->operands, operand + 1);
- genop0 = create_component_ref_by_pieces (block, ref, operand + 1,
- stmts, domstmt,
- in_call);
+ tree genop3 = currop->op2;
+ pre_expr op3expr;
+ genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
if (!genop0)
return NULL_TREE;
op1expr = get_or_alloc_expr_for (genop1);
if (!genop2)
return NULL_TREE;
}
-
- genop3 = currop->op2;
+ if (genop3)
+ {
+ tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
+ genop3 = size_binop (EXACT_DIV_EXPR, genop3,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ op3expr = get_or_alloc_expr_for (genop3);
+ genop3 = find_or_generate_expression (block, op3expr, stmts,
+ domstmt);
+ if (!genop3)
+ return NULL_TREE;
+ }
return build4 (currop->opcode, currop->type, genop0, genop1,
genop2, genop3);
}
tree op1;
tree genop2 = currop->op1;
pre_expr op2expr;
- op0 = create_component_ref_by_pieces (block, ref, operand + 1,
- stmts, domstmt, in_call);
+ op0 = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
if (!op0)
return NULL_TREE;
/* op1 should be a FIELD_DECL, which are represented by
case CONST_DECL:
case RESULT_DECL:
case FUNCTION_DECL:
- /* For ADDR_EXPR in a CALL_EXPR, op0 is actually the entire
- ADDR_EXPR, not just it's operand. */
- case ADDR_EXPR:
- if (currop->opcode == ADDR_EXPR)
- gcc_assert (currop->op0 != NULL);
return currop->op0;
default:
}
}
+/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
+ COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
+ trying to rename aggregates into ssa form directly, which is a no no.
+
+ Thus, this routine doesn't create temporaries, it just builds a
+ single access expression for the array, calling
+ find_or_generate_expression to build the innermost pieces.
+
+ This function is a subroutine of create_expression_by_pieces, and
+ should not be called on it's own unless you really know what you
+ are doing. */
+
+static tree
+create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
+ gimple_seq *stmts, gimple domstmt)
+{
+ unsigned int op = 0;
+ return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
+}
+
/* Find a leader for an expression, or generate one using
create_expression_by_pieces if it's ANTIC but
complex.
on failure. */
static tree
-find_or_generate_expression (basic_block block, pre_expr expr, tree stmts,
- tree domstmt)
+find_or_generate_expression (basic_block block, pre_expr expr,
+ gimple_seq *stmts, gimple domstmt)
{
pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
get_expr_value_id (expr), domstmt);
}
/* If it's still NULL, it must be a complex expression, so generate
- it recursively. */
- if (genop == NULL)
+ it recursively. Not so for FRE though. */
+ if (genop == NULL
+ && !in_fre)
{
bitmap_set_t exprset;
unsigned int lookfor = get_expr_value_id (expr);
return genop;
}
-#define NECESSARY(stmt) stmt->base.asm_written_flag
+#define NECESSARY GF_PLF_1
/* Create an expression in pieces, so that we can handle very complex
expressions that may be ANTIC, but not necessary GIMPLE.
can return NULL_TREE to signal failure. */
static tree
-create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
- tree domstmt,
- tree type)
+create_expression_by_pieces (basic_block block, pre_expr expr,
+ gimple_seq *stmts, gimple domstmt, tree type)
{
tree temp, name;
- tree folded, forced_stmts, newexpr;
+ tree folded;
+ gimple_seq forced_stmts = NULL;
unsigned int value_id;
- tree_stmt_iterator tsi;
+ gimple_stmt_iterator gsi;
tree exprtype = type ? type : get_expr_type (expr);
pre_expr nameexpr;
+ gimple newstmt;
switch (expr->kind)
{
case REFERENCE:
{
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
- folded = create_component_ref_by_pieces (block, ref, 0, stmts,
- domstmt, false);
+ folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
}
break;
case NARY:
stmts, domstmt);
if (!genop1 || !genop2)
return NULL_TREE;
+ genop1 = fold_convert (TREE_TYPE (nary->op[0]),
+ genop1);
/* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
may be a constant with the wrong type. */
if (nary->opcode == POINTER_PLUS_EXPR)
genop2 = fold_convert (sizetype, genop2);
+ else
+ genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+
folded = fold_build2 (nary->opcode, nary->type,
genop1, genop2);
}
stmts, domstmt);
if (!genop1)
return NULL_TREE;
+ genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
+
folded = fold_build1 (nary->opcode, nary->type,
genop1);
}
default:
return NULL_TREE;
}
- folded = fold_convert (exprtype, folded);
+
+ if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
+ folded = fold_convert (exprtype, folded);
+
/* Force the generated expression to be a sequence of GIMPLE
statements.
We have to call unshare_expr because force_gimple_operand may
modify the tree we pass to it. */
- newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts,
- false, NULL);
+ folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
+ false, NULL);
/* If we have any intermediate expressions to the value sets, add them
to the value sets and chain them in the instruction stream. */
if (forced_stmts)
{
- tsi = tsi_start (forced_stmts);
- for (; !tsi_end_p (tsi); tsi_next (&tsi))
+ gsi = gsi_start (forced_stmts);
+ for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree stmt = tsi_stmt (tsi);
- tree forcedname = GIMPLE_STMT_OPERAND (stmt, 0);
+ gimple stmt = gsi_stmt (gsi);
+ tree forcedname = gimple_get_lhs (stmt);
pre_expr nameexpr;
- VEC_safe_push (tree, heap, inserted_exprs, stmt);
+ VEC_safe_push (gimple, heap, inserted_exprs, stmt);
if (TREE_CODE (forcedname) == SSA_NAME)
{
VN_INFO_GET (forcedname)->valnum = forcedname;
VN_INFO (forcedname)->value_id = get_next_value_id ();
nameexpr = get_or_alloc_expr_for_name (forcedname);
add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
- bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
+ if (!in_fre)
+ bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
}
mark_symbols_for_renaming (stmt);
}
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
+ gimple_seq_add_seq (stmts, forced_stmts);
}
/* Build and insert the assignment of the end result to the temporary
|| TREE_CODE (exprtype) == VECTOR_TYPE)
DECL_GIMPLE_REG_P (temp) = 1;
- newexpr = build_gimple_modify_stmt (temp, newexpr);
- name = make_ssa_name (temp, newexpr);
- GIMPLE_STMT_OPERAND (newexpr, 0) = name;
- NECESSARY (newexpr) = 0;
+ newstmt = gimple_build_assign (temp, folded);
+ name = make_ssa_name (temp, newstmt);
+ gimple_assign_set_lhs (newstmt, name);
+ gimple_set_plf (newstmt, NECESSARY, false);
- tsi = tsi_last (stmts);
- tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
- VEC_safe_push (tree, heap, inserted_exprs, newexpr);
+ gimple_seq_add_stmt (stmts, newstmt);
+ VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
/* All the symbols in NEWEXPR should be put into SSA form. */
- mark_symbols_for_renaming (newexpr);
+ mark_symbols_for_renaming (newstmt);
/* Add a value number to the temporary.
The value may already exist in either NEW_SETS, or AVAIL_OUT, because
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Inserted ");
- print_generic_expr (dump_file, newexpr, 0);
+ print_gimple_stmt (dump_file, newstmt, 0, 0);
fprintf (dump_file, " in predecessor %d\n", block->index);
}
}
+/* Returns true if we want to inhibit the insertions of PHI nodes
+ for the given EXPR for basic block BB (a member of a loop).
+ We want to do this, when we fear that the induction variable we
+ create might inhibit vectorization. */
+
+static bool
+inhibit_phi_insertion (basic_block bb, pre_expr expr)
+{
+ vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
+ VEC (vn_reference_op_s, heap) *ops = vr->operands;
+ vn_reference_op_t op;
+ unsigned i;
+
+ /* If we aren't going to vectorize we don't inhibit anything. */
+ if (!flag_tree_vectorize)
+ return false;
+
+ /* Otherwise we inhibit the insertion when the address of the
+ memory reference is a simple induction variable. In other
+ cases the vectorizer won't do anything anyway (either it's
+ loop invariant or a complicated expression). */
+ for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ {
+ switch (op->opcode)
+ {
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ if (TREE_CODE (op->op0) != SSA_NAME)
+ break;
+ /* Fallthru. */
+ case SSA_NAME:
+ {
+ basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
+ affine_iv iv;
+ /* Default defs are loop invariant. */
+ if (!defbb)
+ break;
+ /* Defined outside this loop, also loop invariant. */
+ if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
+ break;
+ /* If it's a simple induction variable inhibit insertion,
+ the vectorizer might be interested in this one. */
+ if (simple_iv (bb->loop_father, bb->loop_father,
+ op->op0, &iv, true))
+ return true;
+ /* No simple IV, vectorizer can't do anything, hence no
+ reason to inhibit the transformation for this operand. */
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ return false;
+}
+
/* Insert the to-be-made-available values of expression EXPRNUM for each
predecessor, stored in AVAIL, into the predecessors of BLOCK, and
merge the result with a phi node, given the same value number as
edge_iterator ei;
tree type = get_expr_type (expr);
tree temp;
+ gimple phi;
if (dump_file && (dump_flags & TDF_DETAILS))
{
}
/* Make sure we aren't creating an induction variable. */
- if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2
- && expr->kind != REFERENCE)
+ if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
{
bool firstinsideloop = false;
bool secondinsideloop = false;
secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
EDGE_PRED (block, 1)->src);
/* Induction variables only have one edge inside the loop. */
- if (firstinsideloop ^ secondinsideloop)
+ if ((firstinsideloop ^ secondinsideloop)
+ && (expr->kind != REFERENCE
+ || inhibit_phi_insertion (block, expr)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
}
}
+ /* Make sure we are not inserting trapping expressions. */
+ FOR_EACH_EDGE (pred, ei, block->preds)
+ {
+ bprime = pred->src;
+ eprime = avail[bprime->index];
+ if (eprime->kind == NARY
+ && vn_nary_may_trap (PRE_EXPR_NARY (eprime)))
+ return false;
+ }
/* Make the necessary insertions. */
FOR_EACH_EDGE (pred, ei, block->preds)
{
- tree stmts = alloc_stmt_list ();
+ gimple_seq stmts = NULL;
tree builtexpr;
bprime = pred->src;
eprime = avail[bprime->index];
{
builtexpr = create_expression_by_pieces (bprime,
eprime,
- stmts, NULL_TREE,
+ &stmts, NULL,
type);
gcc_assert (!(pred->flags & EDGE_ABNORMAL));
- bsi_insert_on_edge (pred, stmts);
+ gsi_insert_seq_on_edge (pred, stmts);
avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
insertions = true;
}
should give us back a constant with the right type.
*/
tree constant = PRE_EXPR_CONSTANT (eprime);
- if (TREE_TYPE (constant) != type)
+ if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
{
tree builtexpr = fold_convert (type, constant);
- if (is_gimple_min_invariant (builtexpr))
- {
- PRE_EXPR_CONSTANT (eprime) = builtexpr;
- }
- else
+ if (!is_gimple_min_invariant (builtexpr))
{
tree forcedexpr = force_gimple_operand (builtexpr,
&stmts, true,
NULL);
- if (is_gimple_min_invariant (forcedexpr))
- {
- PRE_EXPR_CONSTANT (eprime) = forcedexpr;
- }
- else
+ if (!is_gimple_min_invariant (forcedexpr))
{
if (forcedexpr != builtexpr)
{
}
if (stmts)
{
- tree_stmt_iterator tsi;
- tsi = tsi_start (stmts);
- for (; !tsi_end_p (tsi); tsi_next (&tsi))
+ gimple_stmt_iterator gsi;
+ gsi = gsi_start (stmts);
+ for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree stmt = tsi_stmt (tsi);
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- VEC_safe_push (tree, heap, inserted_exprs, stmt);
- NECESSARY (lhs) = 0;
+ gimple stmt = gsi_stmt (gsi);
+ VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ gimple_set_plf (stmt, NECESSARY, false);
}
- bsi_insert_on_edge (pred, stmts);
+ gsi_insert_seq_on_edge (pred, stmts);
}
- NECESSARY (forcedexpr) = 0;
avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
}
}
our IL requires all operands of a phi node have the same
type. */
tree name = PRE_EXPR_NAME (eprime);
- if (TREE_TYPE (name) != type)
+ if (!useless_type_conversion_p (type, TREE_TYPE (name)))
{
tree builtexpr;
tree forcedexpr;
- /* When eliminating casts through unions,
- we sometimes want to convert a real to an integer,
- which fold_convert will ICE on */
- if (fold_convertible_p (type, name))
- builtexpr = fold_convert (type, name);
- else
- builtexpr = convert (type, name);
-
+ builtexpr = fold_convert (type, name);
forcedexpr = force_gimple_operand (builtexpr,
&stmts, true,
NULL);
if (stmts)
{
- tree_stmt_iterator tsi;
- tsi = tsi_start (stmts);
- for (; !tsi_end_p (tsi); tsi_next (&tsi))
+ gimple_stmt_iterator gsi;
+ gsi = gsi_start (stmts);
+ for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree stmt = tsi_stmt (tsi);
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- VEC_safe_push (tree, heap, inserted_exprs, stmt);
- NECESSARY (lhs) = 0;
+ gimple stmt = gsi_stmt (gsi);
+ VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ gimple_set_plf (stmt, NECESSARY, false);
}
- bsi_insert_on_edge (pred, stmts);
+ gsi_insert_seq_on_edge (pred, stmts);
}
- NECESSARY (forcedexpr) = 0;
avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
}
}
if (TREE_CODE (type) == COMPLEX_TYPE
|| TREE_CODE (type) == VECTOR_TYPE)
DECL_GIMPLE_REG_P (temp) = 1;
- temp = create_phi_node (temp, block);
-
- NECESSARY (temp) = 0;
- VN_INFO_GET (PHI_RESULT (temp))->valnum = PHI_RESULT (temp);
- VN_INFO (PHI_RESULT (temp))->value_id = val;
- VEC_safe_push (tree, heap, inserted_exprs, temp);
+ phi = create_phi_node (temp, block);
+
+ gimple_set_plf (phi, NECESSARY, false);
+ VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
+ VN_INFO (gimple_phi_result (phi))->value_id = val;
+ VEC_safe_push (gimple, heap, inserted_exprs, phi);
+ bitmap_set_bit (inserted_phi_names,
+ SSA_NAME_VERSION (gimple_phi_result (phi)));
FOR_EACH_EDGE (pred, ei, block->preds)
{
pre_expr ae = avail[pred->src->index];
gcc_assert (get_expr_type (ae) == type
|| useless_type_conversion_p (type, get_expr_type (ae)));
if (ae->kind == CONSTANT)
- add_phi_arg (temp, PRE_EXPR_CONSTANT (ae), pred);
+ add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
else
- add_phi_arg (temp, PRE_EXPR_NAME (avail[pred->src->index]), pred);
+ add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
+ UNKNOWN_LOCATION);
}
- newphi = get_or_alloc_expr_for_name (PHI_RESULT (temp));
+ newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
add_to_value (val, newphi);
/* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Created phi ");
- print_generic_expr (dump_file, temp, 0);
+ print_gimple_stmt (dump_file, phi, 0, 0);
fprintf (dump_file, " in block %d\n", block->index);
}
pre_stats.phis++;
basic_block bprime;
pre_expr eprime = NULL;
edge_iterator ei;
+ pre_expr edoubleprime = NULL;
+ bool do_insertion = false;
val = get_expr_value_id (expr);
if (bitmap_set_contains_value (PHI_GEN (block), val))
FOR_EACH_EDGE (pred, ei, block->preds)
{
unsigned int vprime;
- pre_expr edoubleprime;
- /* This can happen in the very weird case
- that our fake infinite loop edges have caused a
- critical edge to appear. */
- if (EDGE_CRITICAL_P (pred))
- {
- cant_insert = true;
- break;
- }
+ /* We should never run insertion for the exit block
+ and so not come across fake pred edges. */
+ gcc_assert (!(pred->flags & EDGE_FAKE));
bprime = pred->src;
eprime = phi_translate (expr, ANTIC_IN (block), NULL,
bprime, block);
eprime = fully_constant_expression (eprime);
vprime = get_expr_value_id (eprime);
edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
- vprime, NULL_TREE);
+ vprime, NULL);
if (edoubleprime == NULL)
{
avail[bprime->index] = eprime;
{
avail[bprime->index] = edoubleprime;
by_some = true;
+ /* We want to perform insertions to remove a redundancy on
+ a path in the CFG we want to optimize for speed. */
+ if (optimize_edge_for_speed_p (pred))
+ do_insertion = true;
if (first_s == NULL)
first_s = edoubleprime;
else if (!pre_expr_eq (first_s, edoubleprime))
already existing along every predecessor, and
it's defined by some predecessor, it is
partially redundant. */
- if (!cant_insert && !all_same && by_some)
+ if (!cant_insert && !all_same && by_some && do_insertion
+ && dbg_cnt (treepre_insert))
{
if (insert_into_preds_of_block (block, get_expression_id (expr),
avail))
an invariant, then the PHI has the same value on all
edges. Note this. */
else if (!cant_insert && all_same && eprime
- && eprime->kind == CONSTANT
+ && (edoubleprime->kind == CONSTANT
+ || edoubleprime->kind == NAME)
&& !value_id_constant_p (val))
{
unsigned int j;
bitmap_set_t exprset = VEC_index (bitmap_set_t,
value_expressions, val);
- unsigned int new_val = get_expr_value_id (eprime);
+ unsigned int new_val = get_expr_value_id (edoubleprime);
FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
{
pre_expr expr = expression_for_id (j);
vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr));
/* Just reset the value id and valnum so it is
the same as the constant we have discovered. */
- info->valnum = PRE_EXPR_CONSTANT (eprime);
+ if (edoubleprime->kind == CONSTANT)
+ {
+ info->valnum = PRE_EXPR_CONSTANT (edoubleprime);
+ pre_stats.constified++;
+ }
+ else
+ info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum;
info->value_id = new_val;
- pre_stats.constified++;
}
}
}
unsigned int vprime;
pre_expr edoubleprime;
- /* This can happen in the very weird case
- that our fake infinite loop edges have caused a
- critical edge to appear. */
- if (EDGE_CRITICAL_P (pred))
- {
- cant_insert = true;
- break;
- }
+ /* We should never run insertion for the exit block
+ and so not come across fake pred edges. */
+ gcc_assert (!(pred->flags & EDGE_FAKE));
bprime = pred->src;
eprime = phi_translate (expr, ANTIC_IN (block),
PA_IN (block),
eprime = fully_constant_expression (eprime);
vprime = get_expr_value_id (eprime);
edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
- vprime, NULL_TREE);
+ vprime, NULL);
if (edoubleprime == NULL)
{
by_all = false;
}
-/* Add OP to EXP_GEN (block), and possibly to the maximal set if it is
- not defined by a phi node.
- PHI nodes can't go in the maximal sets because they are not in
- TMP_GEN, so it is possible to get into non-monotonic situations
- during ANTIC calculation, because it will *add* bits. */
+/* Add OP to EXP_GEN (block), and possibly to the maximal set. */
static void
add_to_exp_gen (basic_block block, tree op)
return;
result = get_or_alloc_expr_for_name (op);
bitmap_value_insert_into_set (EXP_GEN (block), result);
- if (TREE_CODE (op) != SSA_NAME
- || TREE_CODE (SSA_NAME_DEF_STMT (op)) != PHI_NODE)
- bitmap_value_insert_into_set (maximal_set, result);
- }
-}
-
-/* For each real store operation of the form
- *a = <value> that we see, create a corresponding fake store of the
- form storetmp_<version> = *a.
-
- This enables AVAIL computation to mark the results of stores as
- available. Without this, you'd need to do some computation to
- mark the result of stores as ANTIC and AVAIL at all the right
- points.
- To save memory, we keep the store
- statements pool allocated until we decide whether they are
- necessary or not. */
-
-static void
-insert_fake_stores (void)
-{
- basic_block block;
-
- FOR_ALL_BB (block)
- {
- block_stmt_iterator bsi;
- for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
-
- /* We can't generate SSA names for stores that are complex
- or aggregate. We also want to ignore things whose
- virtual uses occur in abnormal phis. */
-
- if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
- && (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == INDIRECT_REF
- || handled_component_p (GIMPLE_STMT_OPERAND (stmt, 0)))
- && !AGGREGATE_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0))))
- {
- ssa_op_iter iter;
- def_operand_p defp;
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- tree new_tree, new_lhs;
- bool notokay = false;
-
- FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree defvar = DEF_FROM_PTR (defp);
- if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defvar))
- {
- notokay = true;
- break;
- }
- }
-
- if (notokay)
- continue;
-
- if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
- {
- storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
- if (TREE_CODE (TREE_TYPE (storetemp)) == VECTOR_TYPE
- || TREE_CODE (TREE_TYPE (storetemp)) == COMPLEX_TYPE)
- DECL_GIMPLE_REG_P (storetemp) = 1;
- get_var_ann (storetemp);
- }
-
- new_tree = build_gimple_modify_stmt (NULL_TREE, lhs);
- new_lhs = make_ssa_name (storetemp, new_tree);
- GIMPLE_STMT_OPERAND (new_tree, 0) = new_lhs;
- create_ssa_artificial_load_stmt (new_tree, stmt, false);
-
- NECESSARY (new_tree) = 0;
- VEC_safe_push (tree, heap, inserted_exprs, new_tree);
- VEC_safe_push (tree, heap, need_creation, new_tree);
- bsi_insert_after (&bsi, new_tree, BSI_NEW_STMT);
- }
- }
- }
-}
-
-/* Turn the pool allocated fake stores that we created back into real
- GC allocated ones if they turned out to be necessary to PRE some
- expressions. */
-
-static void
-realify_fake_stores (void)
-{
- unsigned int i;
- tree stmt;
-
- for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
- {
- if (NECESSARY (stmt))
- {
- block_stmt_iterator bsi, bsi2;
- tree rhs;
-
- /* Mark the temp variable as referenced */
- add_referenced_var (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt, 0)));
-
- /* Put the statement before the store in the IR stream
- as a plain ssa name copy. */
- bsi = bsi_for_stmt (stmt);
- bsi_prev (&bsi);
- rhs = GIMPLE_STMT_OPERAND (bsi_stmt (bsi), 1);
- GIMPLE_STMT_OPERAND (stmt, 1) = rhs;
- bsi2 = bsi_for_stmt (stmt);
- bsi_remove (&bsi2, true);
- bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
- }
- else
- release_defs (stmt);
}
}
/* Create value ids for PHI in BLOCK. */
static void
-make_values_for_phi (tree phi, basic_block block)
+make_values_for_phi (gimple phi, basic_block block)
{
- tree result = PHI_RESULT (phi);
+ tree result = gimple_phi_result (phi);
+
/* We have no need for virtual phis, as they don't represent
actual computations. */
if (is_gimple_reg (result))
add_to_value (get_expr_value_id (e), e);
bitmap_insert_into_set (PHI_GEN (block), e);
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
+ if (!in_fre)
+ {
+ unsigned i;
+ for (i = 0; i < gimple_phi_num_args (phi); ++i)
+ {
+ tree arg = gimple_phi_arg_def (phi, i);
+ if (TREE_CODE (arg) == SSA_NAME)
+ {
+ e = get_or_alloc_expr_for_name (arg);
+ add_to_value (get_expr_value_id (e), e);
+ }
+ }
+ }
}
}
basic_block block, son;
basic_block *worklist;
size_t sp = 0;
- tree param;
+ unsigned i;
- /* For arguments with default definitions, we pretend they are
- defined in the entry block. */
- for (param = DECL_ARGUMENTS (current_function_decl);
- param;
- param = TREE_CHAIN (param))
+ /* We pretend that default definitions are defined in the entry block.
+ This includes function arguments and the static chain decl. */
+ for (i = 1; i < num_ssa_names; ++i)
{
- if (gimple_default_def (cfun, param) != NULL)
- {
- tree def = gimple_default_def (cfun, param);
- pre_expr e = get_or_alloc_expr_for_name (def);
-
- add_to_value (get_expr_value_id (e), e);
- if (!in_fre)
- {
- bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
- bitmap_value_insert_into_set (maximal_set, e);
- }
- bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
- }
- }
-
- /* Likewise for the static chain decl. */
- if (cfun->static_chain_decl)
- {
- param = cfun->static_chain_decl;
- if (gimple_default_def (cfun, param) != NULL)
- {
- tree def = gimple_default_def (cfun, param);
- pre_expr e = get_or_alloc_expr_for_name (def);
+ tree name = ssa_name (i);
+ pre_expr e;
+ if (!name
+ || !SSA_NAME_IS_DEFAULT_DEF (name)
+ || has_zero_uses (name)
+ || !is_gimple_reg (name))
+ continue;
- add_to_value (get_expr_value_id (e), e);
- if (!in_fre)
- {
- bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
- bitmap_value_insert_into_set (maximal_set, e);
- }
- bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
- }
+ e = get_or_alloc_expr_for_name (name);
+ add_to_value (get_expr_value_id (e), e);
+ if (!in_fre)
+ bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
+ bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
}
/* Allocate the worklist. */
/* Loop until the worklist is empty. */
while (sp)
{
- block_stmt_iterator bsi;
- tree stmt, phi;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
basic_block dom;
unsigned int stmt_uid = 1;
bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
/* Generate values for PHI nodes. */
- for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
- make_values_for_phi (phi, block);
+ for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
+ make_values_for_phi (gsi_stmt (gsi), block);
/* Now compute value numbers and populate value sets with all
the expressions computed in BLOCK. */
- for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
+ for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
{
- stmt_ann_t ann;
ssa_op_iter iter;
tree op;
- stmt = bsi_stmt (bsi);
- ann = stmt_ann (stmt);
-
- set_gimple_stmt_uid (stmt, stmt_uid++);
+ stmt = gsi_stmt (gsi);
+ gimple_set_uid (stmt, stmt_uid++);
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
{
add_to_value (get_expr_value_id (e), e);
if (!in_fre)
- {
- bitmap_insert_into_set (TMP_GEN (block), e);
- bitmap_value_insert_into_set (maximal_set, e);
- }
+ bitmap_insert_into_set (TMP_GEN (block), e);
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
}
- switch (TREE_CODE (stmt))
+ if (gimple_has_volatile_ops (stmt)
+ || stmt_could_throw_p (stmt))
+ continue;
+
+ switch (gimple_code (stmt))
{
- case RETURN_EXPR:
- if (!ann->has_volatile_ops)
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
- add_to_exp_gen (block, op);
+ case GIMPLE_RETURN:
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+ add_to_exp_gen (block, op);
continue;
- case GIMPLE_MODIFY_STMT:
+
+ case GIMPLE_CALL:
{
- tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- if (!ann->has_volatile_ops
- && !tree_could_throw_p (stmt))
- {
- pre_expr result = NULL;
- switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
- {
- case tcc_unary:
- if (is_exception_related (rhs))
- continue;
- case tcc_binary:
- {
- vn_nary_op_t nary;
- unsigned int i;
+ vn_reference_t ref;
+ unsigned int i;
+ vn_reference_op_t vro;
+ pre_expr result = NULL;
+ VEC(vn_reference_op_s, heap) *ops = NULL;
- vn_nary_op_lookup (rhs, &nary);
+ if (!can_value_number_call (stmt))
+ continue;
- if (!nary)
- continue;
+ copy_reference_ops_from_call (stmt, &ops);
+ vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
+ gimple_expr_type (stmt),
+ ops, &ref, false);
+ VEC_free (vn_reference_op_s, heap, ops);
+ if (!ref)
+ continue;
- for (i = 0; i < nary->length; i++)
- if (TREE_CODE (nary->op[i]) == SSA_NAME)
- add_to_exp_gen (block, nary->op[i]);
+ for (i = 0; VEC_iterate (vn_reference_op_s,
+ ref->operands, i,
+ vro); i++)
+ {
+ if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
+ add_to_exp_gen (block, vro->op0);
+ if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+ add_to_exp_gen (block, vro->op1);
+ if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
+ add_to_exp_gen (block, vro->op2);
+ }
+ result = (pre_expr) pool_alloc (pre_expr_pool);
+ result->kind = REFERENCE;
+ result->id = 0;
+ PRE_EXPR_REFERENCE (result) = ref;
+
+ get_or_alloc_expression_id (result);
+ add_to_value (get_expr_value_id (result), result);
+ if (!in_fre)
+ bitmap_value_insert_into_set (EXP_GEN (block), result);
+ continue;
+ }
- result = (pre_expr) pool_alloc (pre_expr_pool);
- result->kind = NARY;
- result->id = 0;
- PRE_EXPR_NARY (result) = nary;
- }
- break;
- case tcc_vl_exp:
- if (!can_value_number_call (rhs))
- continue;
+ case GIMPLE_ASSIGN:
+ {
+ pre_expr result = NULL;
+ switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
+ {
+ case tcc_unary:
+ case tcc_binary:
+ case tcc_comparison:
+ {
+ vn_nary_op_t nary;
+ unsigned int i;
+
+ vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
+ gimple_assign_rhs_code (stmt),
+ gimple_expr_type (stmt),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt),
+ NULL_TREE, NULL_TREE, &nary);
+
+ if (!nary)
+ continue;
+
+ for (i = 0; i < nary->length; i++)
+ if (TREE_CODE (nary->op[i]) == SSA_NAME)
+ add_to_exp_gen (block, nary->op[i]);
+
+ result = (pre_expr) pool_alloc (pre_expr_pool);
+ result->kind = NARY;
+ result->id = 0;
+ PRE_EXPR_NARY (result) = nary;
+ break;
+ }
- case tcc_declaration:
- case tcc_reference:
- {
- vn_reference_t ref;
- unsigned int i;
- vn_reference_op_t vro;
-
- vn_reference_lookup (rhs,
- shared_vuses_from_stmt (stmt),
- true, &ref);
- if (!ref)
- continue;
-
- for (i = 0; VEC_iterate (vn_reference_op_s,
- ref->operands, i,
- vro); i++)
- {
- if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
- add_to_exp_gen (block, vro->op0);
- if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
- add_to_exp_gen (block, vro->op1);
- }
- result = (pre_expr) pool_alloc (pre_expr_pool);
- result->kind = REFERENCE;
- result->id = 0;
- PRE_EXPR_REFERENCE (result) = ref;
- }
- break;
- default:
+ case tcc_declaration:
+ case tcc_reference:
+ {
+ vn_reference_t ref;
+ unsigned int i;
+ vn_reference_op_t vro;
+
+ vn_reference_lookup (gimple_assign_rhs1 (stmt),
+ gimple_vuse (stmt),
+ true, &ref);
+ if (!ref)
+ continue;
+
+ for (i = 0; VEC_iterate (vn_reference_op_s,
+ ref->operands, i,
+ vro); i++)
{
- /* For any other statement that we don't
- recognize, simply add all referenced
- SSA_NAMEs to EXP_GEN. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
- add_to_exp_gen (block, op);
- continue;
+ if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
+ add_to_exp_gen (block, vro->op0);
+ if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+ add_to_exp_gen (block, vro->op1);
+ if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
+ add_to_exp_gen (block, vro->op2);
}
- }
- get_or_alloc_expression_id (result);
- add_to_value (get_expr_value_id (result), result);
- if (!in_fre)
- {
- bitmap_value_insert_into_set (EXP_GEN (block),
- result);
- bitmap_value_insert_into_set (maximal_set, result);
- }
+ result = (pre_expr) pool_alloc (pre_expr_pool);
+ result->kind = REFERENCE;
+ result->id = 0;
+ PRE_EXPR_REFERENCE (result) = ref;
+ break;
+ }
+ default:
+ /* For any other statement that we don't
+ recognize, simply add all referenced
+ SSA_NAMEs to EXP_GEN. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+ add_to_exp_gen (block, op);
+ continue;
}
+
+ get_or_alloc_expression_id (result);
+ add_to_value (get_expr_value_id (result), result);
+ if (!in_fre)
+ bitmap_value_insert_into_set (EXP_GEN (block), result);
+
continue;
}
default:
break;
-
}
-
-
}
+
/* Put the dominator children of BLOCK on the worklist of blocks
to compute available sets for. */
for (son = first_dom_son (CDI_DOMINATORS, block);
be used for replacement. */
static tree
-do_SCCVN_insertion (tree stmt, tree ssa_vn)
+do_SCCVN_insertion (gimple stmt, tree ssa_vn)
{
- basic_block bb = bb_for_stmt (stmt);
- block_stmt_iterator bsi;
- tree expr, stmts;
+ basic_block bb = gimple_bb (stmt);
+ gimple_stmt_iterator gsi;
+ gimple_seq stmts = NULL;
+ tree expr;
pre_expr e;
/* First create a value expression from the expression we want
to insert and associate it with the value handle for SSA_VN. */
-
- /* TODO: Handle complex expressions. */
- e = get_or_alloc_expr_for (VN_INFO (ssa_vn)->expr);
+ e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
if (e == NULL)
return NULL_TREE;
-/* Then use create_expression_by_pieces to generate a valid
+ /* Then use create_expression_by_pieces to generate a valid
expression to insert at this point of the IL stream. */
- stmts = alloc_stmt_list ();
- expr = create_expression_by_pieces (bb, e, stmts, stmt,
- NULL);
+ expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
if (expr == NULL_TREE)
return NULL_TREE;
- bsi = bsi_for_stmt (stmt);
- bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
return expr;
}
static unsigned int
eliminate (void)
{
+ VEC (gimple, heap) *to_remove = NULL;
basic_block b;
unsigned int todo = 0;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
+ unsigned i;
FOR_EACH_BB (b)
{
- block_stmt_iterator i;
-
- for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
+ for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
{
- tree stmt = bsi_stmt (i);
+ stmt = gsi_stmt (gsi);
/* Lookup the RHS of the expression, see if we have an
available computation for it. If so, replace the RHS with
the available computation. */
- if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
- && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
- && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) != SSA_NAME
- && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt, 1))
- && !stmt_ann (stmt)->has_volatile_ops)
+ if (gimple_has_lhs (stmt)
+ && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+ && !gimple_assign_ssa_name_copy_p (stmt)
+ && (!gimple_assign_single_p (stmt)
+ || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ && !gimple_has_volatile_ops (stmt)
+ && !has_zero_uses (gimple_get_lhs (stmt)))
{
- tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
- tree *rhs_p = &GIMPLE_STMT_OPERAND (stmt, 1);
+ tree lhs = gimple_get_lhs (stmt);
+ tree rhs = NULL_TREE;
tree sprime = NULL;
pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
pre_expr sprimeexpr;
+ if (gimple_assign_single_p (stmt))
+ rhs = gimple_assign_rhs1 (stmt);
+
sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
get_expr_value_id (lhsexpr),
- NULL_TREE);
+ NULL);
if (sprimeexpr)
{
else
gcc_unreachable ();
}
+
/* If there is no existing leader but SCCVN knows this
value is constant, use that constant. */
if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
{
sprime = VN_INFO (lhs)->valnum;
+ if (!useless_type_conversion_p (TREE_TYPE (lhs),
+ TREE_TYPE (sprime)))
+ sprime = fold_convert (TREE_TYPE (lhs), sprime);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
- print_generic_expr (dump_file, *rhs_p, 0);
+ print_gimple_expr (dump_file, stmt, 0, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime, 0);
fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, 0);
+ print_gimple_stmt (dump_file, stmt, 0, 0);
}
pre_stats.eliminations++;
- propagate_tree_value (rhs_p, sprime);
+ propagate_tree_value_into_stmt (&gsi, sprime);
+ stmt = gsi_stmt (gsi);
update_stmt (stmt);
continue;
}
if (val != VN_TOP
&& TREE_CODE (val) == SSA_NAME
&& VN_INFO (val)->needs_insertion
- && can_PRE_operation (VN_INFO (val)->expr))
+ && can_PRE_operation (vn_get_expr_for (val)))
sprime = do_SCCVN_insertion (stmt, val);
}
if (sprime
&& sprime != lhs
- && (TREE_CODE (*rhs_p) != SSA_NAME
- || may_propagate_copy (*rhs_p, sprime)))
+ && (rhs == NULL_TREE
+ || TREE_CODE (rhs) != SSA_NAME
+ || may_propagate_copy (rhs, sprime)))
{
- gcc_assert (sprime != *rhs_p);
+ gcc_assert (sprime != rhs);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replaced ");
- print_generic_expr (dump_file, *rhs_p, 0);
+ print_gimple_expr (dump_file, stmt, 0, 0);
fprintf (dump_file, " with ");
print_generic_expr (dump_file, sprime, 0);
fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, 0);
+ print_gimple_stmt (dump_file, stmt, 0, 0);
}
if (TREE_CODE (sprime) == SSA_NAME)
- NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
+ gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
+ NECESSARY, true);
/* We need to make sure the new and old types actually match,
which may require adding a simple cast, which fold_convert
will do for us. */
- if (TREE_CODE (*rhs_p) != SSA_NAME
- && !useless_type_conversion_p (TREE_TYPE (*rhs_p),
- TREE_TYPE (sprime)))
- sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
+ if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
+ && !useless_type_conversion_p (gimple_expr_type (stmt),
+ TREE_TYPE (sprime)))
+ sprime = fold_convert (gimple_expr_type (stmt), sprime);
pre_stats.eliminations++;
- propagate_tree_value (rhs_p, sprime);
+ propagate_tree_value_into_stmt (&gsi, sprime);
+ stmt = gsi_stmt (gsi);
update_stmt (stmt);
/* If we removed EH side effects from the statement, clean
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
- bb_for_stmt (stmt)->index);
+ gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side effects.\n");
}
}
}
+ /* If the statement is a scalar store, see if the expression
+ has the same value number as its rhs. If so, the store is
+ dead. */
+ else if (gimple_assign_single_p (stmt)
+ && !is_gimple_reg (gimple_assign_lhs (stmt))
+ && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+ || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ tree val;
+ val = vn_reference_lookup (gimple_assign_lhs (stmt),
+ gimple_vuse (stmt), true, NULL);
+ if (TREE_CODE (rhs) == SSA_NAME)
+ rhs = VN_INFO (rhs)->valnum;
+ if (val
+ && operand_equal_p (val, rhs, 0))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Deleted redundant store ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ }
+
+ /* Queue stmt for removal. */
+ VEC_safe_push (gimple, heap, to_remove, stmt);
+ }
+ }
/* Visit COND_EXPRs and fold the comparison with the
available value-numbers. */
- else if (TREE_CODE (stmt) == COND_EXPR
- && COMPARISON_CLASS_P (COND_EXPR_COND (stmt)))
+ else if (gimple_code (stmt) == GIMPLE_COND)
{
- tree cond = COND_EXPR_COND (stmt);
- tree op0 = TREE_OPERAND (cond, 0);
- tree op1 = TREE_OPERAND (cond, 1);
+ tree op0 = gimple_cond_lhs (stmt);
+ tree op1 = gimple_cond_rhs (stmt);
tree result;
if (TREE_CODE (op0) == SSA_NAME)
op0 = VN_INFO (op0)->valnum;
if (TREE_CODE (op1) == SSA_NAME)
op1 = VN_INFO (op1)->valnum;
- result = fold_binary (TREE_CODE (cond), TREE_TYPE (cond),
+ result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
op0, op1);
if (result && TREE_CODE (result) == INTEGER_CST)
{
- COND_EXPR_COND (stmt) = result;
+ if (integer_zerop (result))
+ gimple_cond_make_false (stmt);
+ else
+ gimple_cond_make_true (stmt);
update_stmt (stmt);
todo = TODO_cleanup_cfg;
}
}
- else if (TREE_CODE (stmt) == COND_EXPR
- && TREE_CODE (COND_EXPR_COND (stmt)) == SSA_NAME)
+ /* Visit indirect calls and turn them into direct calls if
+ possible. */
+ if (gimple_code (stmt) == GIMPLE_CALL
+ && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
{
- tree op = COND_EXPR_COND (stmt);
- op = VN_INFO (op)->valnum;
- if (TREE_CODE (op) == INTEGER_CST)
+ tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
+ if (TREE_CODE (fn) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
{
- COND_EXPR_COND (stmt) = integer_zerop (op)
- ? boolean_false_node : boolean_true_node;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Replacing call target with ");
+ print_generic_expr (dump_file, fn, 0);
+ fprintf (dump_file, " in ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ }
+
+ gimple_call_set_fn (stmt, fn);
update_stmt (stmt);
- todo = TODO_cleanup_cfg;
+ if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
+ {
+ bitmap_set_bit (need_eh_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed EH side effects.\n");
+ }
+
+ /* Changing an indirect call to a direct call may
+ have exposed different semantics. This may
+ require an SSA update. */
+ todo |= TODO_update_ssa_only_virtuals;
}
}
}
+
+ for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
+ {
+ gimple stmt, phi = gsi_stmt (gsi);
+ tree sprime = NULL_TREE, res = PHI_RESULT (phi);
+ pre_expr sprimeexpr, resexpr;
+ gimple_stmt_iterator gsi2;
+
+ /* We want to perform redundant PHI elimination. Do so by
+ replacing the PHI with a single copy if possible.
+ Do not touch inserted, single-argument or virtual PHIs. */
+ if (gimple_phi_num_args (phi) == 1
+ || !is_gimple_reg (res)
+ || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
+ {
+ gsi_next (&gsi);
+ continue;
+ }
+
+ resexpr = get_or_alloc_expr_for_name (res);
+ sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
+ get_expr_value_id (resexpr), NULL);
+ if (sprimeexpr)
+ {
+ if (sprimeexpr->kind == CONSTANT)
+ sprime = PRE_EXPR_CONSTANT (sprimeexpr);
+ else if (sprimeexpr->kind == NAME)
+ sprime = PRE_EXPR_NAME (sprimeexpr);
+ else
+ gcc_unreachable ();
+ }
+ if (!sprimeexpr
+ || sprime == res)
+ {
+ gsi_next (&gsi);
+ continue;
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Replaced redundant PHI node defining ");
+ print_generic_expr (dump_file, res, 0);
+ fprintf (dump_file, " with ");
+ print_generic_expr (dump_file, sprime, 0);
+ fprintf (dump_file, "\n");
+ }
+
+ remove_phi_node (&gsi, false);
+
+ if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
+ sprime = fold_convert (TREE_TYPE (res), sprime);
+ stmt = gimple_build_assign (res, sprime);
+ SSA_NAME_DEF_STMT (res) = stmt;
+ if (TREE_CODE (sprime) == SSA_NAME)
+ gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
+ NECESSARY, true);
+ gsi2 = gsi_after_labels (b);
+ gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
+ /* Queue the copy for eventual removal. */
+ VEC_safe_push (gimple, heap, to_remove, stmt);
+ pre_stats.eliminations++;
+ }
+ }
+
+ /* We cannot remove stmts during BB walk, especially not release SSA
+ names there as this confuses the VN machinery. The stmts ending
+ up in to_remove are either stores or simple copies. */
+ for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ use_operand_p use_p;
+ gimple use_stmt;
+
+ /* If there is a single use only, propagate the equivalency
+ instead of keeping the copy. */
+ if (TREE_CODE (lhs) == SSA_NAME
+ && single_imm_use (lhs, &use_p, &use_stmt)
+ && may_propagate_copy (USE_FROM_PTR (use_p),
+ gimple_assign_rhs1 (stmt)))
+ {
+ SET_USE (use_p, gimple_assign_rhs1 (stmt));
+ update_stmt (use_stmt);
+ }
+
+ /* If this is a store or a now unused copy, remove it. */
+ if (TREE_CODE (lhs) != SSA_NAME
+ || has_zero_uses (lhs))
+ {
+ gsi = gsi_for_stmt (stmt);
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi, true);
+ release_defs (stmt);
+ }
}
+ VEC_free (gimple, heap, to_remove);
return todo;
}
mark that statement necessary. Return the stmt, if it is newly
necessary. */
-static inline tree
+static inline gimple
mark_operand_necessary (tree op)
{
- tree stmt;
+ gimple stmt;
gcc_assert (op);
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (stmt);
- if (NECESSARY (stmt)
- || IS_EMPTY_STMT (stmt))
+ if (gimple_plf (stmt, NECESSARY)
+ || gimple_nop_p (stmt))
return NULL;
- NECESSARY (stmt) = 1;
+ gimple_set_plf (stmt, NECESSARY, true);
return stmt;
}
static void
remove_dead_inserted_code (void)
{
- VEC(tree,heap) *worklist = NULL;
+ VEC(gimple,heap) *worklist = NULL;
int i;
- tree t;
+ gimple t;
- worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
- for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
+ worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
+ for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
{
- if (NECESSARY (t))
- VEC_quick_push (tree, worklist, t);
+ if (gimple_plf (t, NECESSARY))
+ VEC_quick_push (gimple, worklist, t);
}
- while (VEC_length (tree, worklist) > 0)
+ while (VEC_length (gimple, worklist) > 0)
{
- t = VEC_pop (tree, worklist);
+ t = VEC_pop (gimple, worklist);
/* PHI nodes are somewhat special in that each PHI alternative has
data and control dependencies. All the statements feeding the
PHI node's arguments are always necessary. */
- if (TREE_CODE (t) == PHI_NODE)
+ if (gimple_code (t) == GIMPLE_PHI)
{
- int k;
+ unsigned k;
- VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
- for (k = 0; k < PHI_NUM_ARGS (t); k++)
+ VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
+ for (k = 0; k < gimple_phi_num_args (t); k++)
{
tree arg = PHI_ARG_DEF (t, k);
if (TREE_CODE (arg) == SSA_NAME)
{
- arg = mark_operand_necessary (arg);
- if (arg)
- VEC_quick_push (tree, worklist, arg);
+ gimple n = mark_operand_necessary (arg);
+ if (n)
+ VEC_quick_push (gimple, worklist, n);
}
}
}
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
{
- tree n = mark_operand_necessary (use);
+ gimple n = mark_operand_necessary (use);
if (n)
- VEC_safe_push (tree, heap, worklist, n);
+ VEC_safe_push (gimple, heap, worklist, n);
}
}
}
- for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
+ for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
{
- if (!NECESSARY (t))
+ if (!gimple_plf (t, NECESSARY))
{
- block_stmt_iterator bsi;
+ gimple_stmt_iterator gsi;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Removing unnecessary insertion:");
- print_generic_stmt (dump_file, t, 0);
+ print_gimple_stmt (dump_file, t, 0, 0);
}
- if (TREE_CODE (t) == PHI_NODE)
- {
- remove_phi_node (t, NULL_TREE, true);
- }
+ gsi = gsi_for_stmt (t);
+ if (gimple_code (t) == GIMPLE_PHI)
+ remove_phi_node (&gsi, true);
else
{
- bsi = bsi_for_stmt (t);
- bsi_remove (&bsi, true);
+ gsi_remove (&gsi, true);
release_defs (t);
}
}
}
- VEC_free (tree, heap, worklist);
+ VEC_free (gimple, heap, worklist);
}
/* Initialize data structures used by PRE. */
calculate_dominance_info (CDI_DOMINATORS);
bitmap_obstack_initialize (&grand_bitmap_obstack);
+ inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
phi_translate_table = htab_create (5110, expr_pred_trans_hash,
expr_pred_trans_eq, free);
expression_to_id = htab_create (num_ssa_names * 3,
TMP_GEN (bb) = bitmap_set_new ();
AVAIL_OUT (bb) = bitmap_set_new ();
}
- maximal_set = in_fre ? NULL : bitmap_set_new ();
need_eh_cleanup = BITMAP_ALLOC (NULL);
}
/* Deallocate data structures used by PRE. */
static void
-fini_pre (void)
+fini_pre (bool do_fre)
{
basic_block bb;
free (postorder);
VEC_free (bitmap_set_t, heap, value_expressions);
- VEC_free (tree, heap, inserted_exprs);
- VEC_free (tree, heap, need_creation);
+ VEC_free (gimple, heap, inserted_exprs);
+ VEC_free (gimple, heap, need_creation);
bitmap_obstack_release (&grand_bitmap_obstack);
free_alloc_pool (bitmap_set_pool);
free_alloc_pool (pre_expr_pool);
htab_delete (phi_translate_table);
htab_delete (expression_to_id);
- remove_fake_exit_edges ();
FOR_ALL_BB (bb)
{
if (!bitmap_empty_p (need_eh_cleanup))
{
- tree_purge_all_dead_eh_edges (need_eh_cleanup);
+ gimple_purge_all_dead_eh_edges (need_eh_cleanup);
cleanup_tree_cfg ();
}
BITMAP_FREE (need_eh_cleanup);
- if (current_loops != NULL)
+ if (!do_fre)
loop_optimizer_finalize ();
}
{
unsigned int todo = 0;
- do_partial_partial = optimize > 2;
+ do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
/* This has to happen before SCCVN runs because
loop_optimizer_init may create new phis, etc. */
if (!do_fre)
loop_optimizer_init (LOOPS_NORMAL);
- if (0 && !do_fre)
- insert_fake_stores ();
if (!run_scc_vn (do_fre))
{
if (!do_fre)
- remove_dead_inserted_code ();
+ {
+ remove_dead_inserted_code ();
+ loop_optimizer_finalize ();
+ }
+
return 0;
}
init_pre (do_fre);
+ scev_initialize ();
/* Collect and value number expressions computed in each basic block. */
FOR_ALL_BB (bb)
{
print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
- print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen",
- bb->index);
- print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out",
- bb->index);
+ print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
+ print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
+ print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
}
}
statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
statistics_counter_event (cfun, "Constified", pre_stats.constified);
- bsi_commit_edge_inserts ();
+
+ /* Make sure to remove fake edges before committing our inserts.
+ This makes sure we don't end up with extra critical edges that
+ we would need to split. */
+ remove_fake_exit_edges ();
+ gsi_commit_edge_inserts ();
clear_expression_ids ();
free_scc_vn ();
if (!do_fre)
- {
- remove_dead_inserted_code ();
- if (0)
- realify_fake_stores ();
- }
+ remove_dead_inserted_code ();
- fini_pre ();
+ scev_finalize ();
+ fini_pre (do_fre);
return todo;
}
static unsigned int
do_pre (void)
{
- return TODO_rebuild_alias | execute_pre (false);
+ return execute_pre (false);
}
static bool
0, /* static_pass_number */
TV_TREE_PRE, /* tv_id */
PROP_no_crit_edges | PROP_cfg
- | PROP_ssa | PROP_alias, /* properties_required */
+ | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
- 0, /* todo_flags_start */
+ TODO_rebuild_alias, /* todo_flags_start */
TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
| TODO_verify_ssa /* todo_flags_finish */
}
NULL, /* next */
0, /* static_pass_number */
TV_TREE_FRE, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */