/* SSA-PRE for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
<stevenb@suse.de>
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "tree.h"
#include "basic-block.h"
#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-inline.h"
#include "tree-flow.h"
#include "gimple.h"
#include "fibheap.h"
#include "hashtab.h"
#include "tree-iterator.h"
-#include "real.h"
#include "alloc-pool.h"
#include "obstack.h"
#include "tree-pass.h"
return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
PRE_EXPR_REFERENCE (e2));
default:
- abort();
+ gcc_unreachable ();
}
}
case CONSTANT:
return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
case NAME:
- return iterative_hash_hashval_t (SSA_NAME_VERSION (PRE_EXPR_NAME (e)), 0);
+ return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
case NARY:
return PRE_EXPR_NARY (e)->hashcode;
case REFERENCE:
return PRE_EXPR_REFERENCE (e)->hashcode;
default:
- abort ();
+ gcc_unreachable ();
}
}
DEF_VEC_ALLOC_P (pre_expr, heap);
static VEC(pre_expr, heap) *expressions;
static htab_t expression_to_id;
+static VEC(unsigned, heap) *name_to_id;
/* Allocate an expression id for EXPR. */
gcc_assert (next_expression_id + 1 > next_expression_id);
expr->id = next_expression_id++;
VEC_safe_push (pre_expr, heap, expressions, expr);
- slot = htab_find_slot (expression_to_id, expr, INSERT);
- gcc_assert (!*slot);
- *slot = expr;
+ if (expr->kind == NAME)
+ {
+ unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
+ /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
+ re-allocations by using VEC_reserve upfront. There is no
+ VEC_quick_grow_cleared unfortunately. */
+ VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
+ VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
+ gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
+ VEC_replace (unsigned, name_to_id, version, expr->id);
+ }
+ else
+ {
+ slot = htab_find_slot (expression_to_id, expr, INSERT);
+ gcc_assert (!*slot);
+ *slot = expr;
+ }
return next_expression_id - 1;
}
{
void **slot;
- slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
- if (!slot)
- return 0;
- return ((pre_expr)*slot)->id;
+ if (expr->kind == NAME)
+ {
+ unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
+ if (VEC_length (unsigned, name_to_id) <= version)
+ return 0;
+ return VEC_index (unsigned, name_to_id, version);
+ }
+ else
+ {
+ slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
+ if (!slot)
+ return 0;
+ return ((pre_expr)*slot)->id;
+ }
}
/* Return the existing expression id for EXPR, or create one if one
static pre_expr
get_or_alloc_expr_for_name (tree name)
{
- pre_expr result = (pre_expr) pool_alloc (pre_expr_pool);
+ struct pre_expr_d expr;
+ pre_expr result;
unsigned int result_id;
+ expr.kind = NAME;
+ expr.id = 0;
+ PRE_EXPR_NAME (&expr) = name;
+ result_id = lookup_expression_id (&expr);
+ if (result_id != 0)
+ return expression_for_id (result_id);
+
+ result = (pre_expr) pool_alloc (pre_expr_pool);
result->kind = NAME;
- result->id = 0;
PRE_EXPR_NAME (result) = name;
- result_id = lookup_expression_id (result);
- if (result_id != 0)
- {
- pool_free (pre_expr_pool, result);
- result = expression_for_id (result_id);
- return result;
- }
- get_or_alloc_expression_id (result);
+ alloc_expression_id (result);
return result;
}
bitmap expr_dies;
/* True if we have visited this block during ANTIC calculation. */
- unsigned int visited:1;
+ unsigned int visited : 1;
/* True we have deferred processing this block during ANTIC
calculation until its successor is processed. */
unsigned int deferred : 1;
+
+ /* True when the block contains a call that might not return. */
+ unsigned int contains_may_not_return_call : 1;
} *bb_value_sets_t;
#define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
#define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
#define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
+#define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
/* Basic block list in postorder. */
static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
-static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool);
+static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
+ unsigned int, bool);
static bitmap_set_t bitmap_set_new (void);
static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
gimple, tree);
cleaned up. */
static bitmap need_eh_cleanup;
-/* Which expressions have been seen during a given phi translation. */
-static bitmap seen_during_translate;
-
/* The phi_translate_table caches phi translations for a given
expression and predecessor. */
VEC_replace (bitmap_set_t, value_expressions, v, set);
}
- bitmap_insert_into_set_1 (set, e, true);
+ bitmap_insert_into_set_1 (set, e, v, true);
}
/* Create a new bitmap set and return it. */
static void
bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
- bool allow_constants)
+ unsigned int val, bool allow_constants)
{
- unsigned int val = get_expr_value_id (expr);
if (allow_constants || !value_id_constant_p (val))
{
/* We specifically expect this and only this function to be able to
static void
bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
{
- bitmap_insert_into_set_1 (set, expr, false);
+ bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
}
/* Copy a bitmapped set ORIG, into bitmapped set DEST. */
{
unsigned int i, j;
bitmap_iterator bi, bj;
- VEC(pre_expr, heap) *result = NULL;
+ VEC(pre_expr, heap) *result;
+
+ /* Pre-allocate roughly enough space for the array. */
+ result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
{
{
unsigned int val = get_expr_value_id (expr);
+#ifdef ENABLE_CHECKING
+ gcc_assert (expr->id == get_or_alloc_expression_id (expr));
+#endif
+
+ /* Constant values are always considered to be part of the set. */
if (value_id_constant_p (val))
return;
- if (!bitmap_set_contains_value (set, val))
- bitmap_insert_into_set (set, expr);
+ /* If the value membership changed, add the expression. */
+ if (bitmap_set_bit (set->values, val))
+ bitmap_set_bit (set->expressions, expr->id);
}
/* Print out EXPR to outfile. */
{
unsigned int result_id;
unsigned int value_id;
- pre_expr newexpr = (pre_expr) pool_alloc (pre_expr_pool);
+ struct pre_expr_d expr;
+ pre_expr newexpr;
+
+ expr.kind = CONSTANT;
+ PRE_EXPR_CONSTANT (&expr) = constant;
+ result_id = lookup_expression_id (&expr);
+ if (result_id != 0)
+ return expression_for_id (result_id);
+
+ newexpr = (pre_expr) pool_alloc (pre_expr_pool);
newexpr->kind = CONSTANT;
PRE_EXPR_CONSTANT (newexpr) = constant;
- result_id = lookup_expression_id (newexpr);
- if (result_id != 0)
- {
- pool_free (pre_expr_pool, newexpr);
- newexpr = expression_for_id (result_id);
- return newexpr;
- }
+ alloc_expression_id (newexpr);
value_id = get_or_alloc_constant_value_id (constant);
- get_or_alloc_expression_id (newexpr);
add_to_value (value_id, newexpr);
return newexpr;
}
}
case tcc_reference:
if (nary->opcode != REALPART_EXPR
- && nary->opcode != IMAGPART_EXPR
+ && nary->opcode != IMAGPART_EXPR
&& nary->opcode != VIEW_CONVERT_EXPR)
return e;
/* Fallthrough. */
case REFERENCE:
{
vn_reference_t ref = PRE_EXPR_REFERENCE (e);
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
- vn_reference_op_t op;
-
- /* Try to simplify the translated expression if it is
- a call to a builtin function with at most two arguments. */
- op = VEC_index (vn_reference_op_s, operands, 0);
- if (op->opcode == CALL_EXPR
- && TREE_CODE (op->op0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
- && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
- && VEC_length (vn_reference_op_s, operands) >= 2
- && VEC_length (vn_reference_op_s, operands) <= 3)
- {
- vn_reference_op_t arg0, arg1 = NULL;
- bool anyconst = false;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
- if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = VEC_index (vn_reference_op_s, operands, 2);
- if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
- || (arg0->opcode == ADDR_EXPR
- && is_gimple_min_invariant (arg0->op0)))
- anyconst = true;
- if (arg1
- && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
- || (arg1->opcode == ADDR_EXPR
- && is_gimple_min_invariant (arg1->op0))))
- anyconst = true;
- if (anyconst)
- {
- tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
- arg1 ? 2 : 1,
- arg0->op0,
- arg1 ? arg1->op0 : NULL);
- if (folded
- && TREE_CODE (folded) == NOP_EXPR)
- folded = TREE_OPERAND (folded, 0);
- if (folded
- && is_gimple_min_invariant (folded))
- return get_or_alloc_expr_for_constant (folded);
- }
- }
- return e;
- }
+ tree folded;
+ if ((folded = fully_constant_vn_reference_p (ref)))
+ return get_or_alloc_expr_for_constant (folded);
+ return e;
+ }
default:
return e;
}
}
/* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
- it has the value it would have in BLOCK. */
+ it has the value it would have in BLOCK. Set *SAME_VALID to true
+ in case the new vuse doesn't change the value id of the OPERANDS. */
static tree
translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
alias_set_type set, tree type, tree vuse,
basic_block phiblock,
- basic_block block)
+ basic_block block, bool *same_valid)
{
gimple phi = SSA_NAME_DEF_STMT (vuse);
ao_ref ref;
+ edge e = NULL;
+ bool use_oracle;
+
+ *same_valid = true;
if (gimple_bb (phi) != phiblock)
return vuse;
- if (gimple_code (phi) == GIMPLE_PHI)
- {
- edge e = find_edge (block, phiblock);
- return PHI_ARG_DEF (phi, e->dest_idx);
- }
-
- if (!ao_ref_init_from_vn_reference (&ref, set, type, operands))
- return NULL_TREE;
+ use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
/* Use the alias-oracle to find either the PHI node in this block,
the first VUSE used in this block that is equivalent to vuse or
the first VUSE which definition in this block kills the value. */
- while (!stmt_may_clobber_ref_p_1 (phi, &ref))
+ if (gimple_code (phi) == GIMPLE_PHI)
+ e = find_edge (block, phiblock);
+ else if (use_oracle)
+ while (!stmt_may_clobber_ref_p_1 (phi, &ref))
+ {
+ vuse = gimple_vuse (phi);
+ phi = SSA_NAME_DEF_STMT (vuse);
+ if (gimple_bb (phi) != phiblock)
+ return vuse;
+ if (gimple_code (phi) == GIMPLE_PHI)
+ {
+ e = find_edge (block, phiblock);
+ break;
+ }
+ }
+ else
+ return NULL_TREE;
+
+ if (e)
{
- vuse = gimple_vuse (phi);
- phi = SSA_NAME_DEF_STMT (vuse);
- if (gimple_bb (phi) != phiblock)
- return vuse;
- if (gimple_code (phi) == GIMPLE_PHI)
+ if (use_oracle)
{
- edge e = find_edge (block, phiblock);
- return PHI_ARG_DEF (phi, e->dest_idx);
+ bitmap visited = NULL;
+ /* Try to find a vuse that dominates this phi node by skipping
+ non-clobbering statements. */
+ vuse = get_continuation_for_phi (phi, &ref, &visited);
+ if (visited)
+ BITMAP_FREE (visited);
}
+ else
+ vuse = NULL_TREE;
+ if (!vuse)
+ {
+ /* If we didn't find any, the value ID can't stay the same,
+ but return the translated vuse. */
+ *same_valid = false;
+ vuse = PHI_ARG_DEF (phi, e->dest_idx);
+ }
+ /* ??? We would like to return vuse here as this is the canonical
+ upmost vdef that this reference is associated with. But during
+ insertion of the references into the hash tables we only ever
+ directly insert with their direct gimple_vuse, hence returning
+ something else would make us not find the other expression. */
+ return PHI_ARG_DEF (phi, e->dest_idx);
}
return NULL_TREE;
that we will return. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
{
- pretemp = create_tmp_var (exprtype, "pretmp");
+ pretemp = create_tmp_reg (exprtype, "pretmp");
get_var_ann (pretemp);
}
+static pre_expr
+phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
+ basic_block pred, basic_block phiblock);
/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
- the phis in PRED. SEEN is a bitmap saying which expression we have
- translated since we started translation of the toplevel expression.
- Return NULL if we can't find a leader for each part of the
- translated expression. */
+ the phis in PRED. Return NULL if we can't find a leader for each part
+ of the translated expression. */
static pre_expr
phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
- basic_block pred, basic_block phiblock, bitmap seen)
+ basic_block pred, basic_block phiblock)
{
- pre_expr oldexpr = expr;
- pre_expr phitrans;
-
- if (!expr)
- return NULL;
-
- if (value_id_constant_p (get_expr_value_id (expr)))
- return expr;
-
- phitrans = phi_trans_lookup (expr, pred);
- if (phitrans)
- return phitrans;
-
- /* Prevent cycles when we have recursively dependent leaders. This
- can only happen when phi translating the maximal set. */
- if (seen)
- {
- unsigned int expr_id = get_expression_id (expr);
- if (bitmap_bit_p (seen, expr_id))
- return NULL;
- bitmap_set_bit (seen, expr_id);
- }
-
switch (expr->kind)
{
- /* Constants contain no values that need translation. */
- case CONSTANT:
- return expr;
-
case NARY:
{
unsigned int i;
continue;
else
{
+ pre_expr leader, result;
unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
- pre_expr leader = find_leader_in_sets (op_val_id, set1, set2);
- pre_expr result = phi_translate_1 (leader, set1, set2,
- pred, phiblock, seen);
+ leader = find_leader_in_sets (op_val_id, set1, set2);
+ result = phi_translate (leader, set1, set2, pred, phiblock);
if (result && result != leader)
{
tree name = get_representative_for (result);
if (changed)
{
pre_expr constant;
+ unsigned int new_val_id;
tree result = vn_nary_op_lookup_pieces (newnary.length,
newnary.opcode,
newnary.op[2],
newnary.op[3],
&nary);
- unsigned int new_val_id;
+ if (result && is_gimple_min_invariant (result))
+ return get_or_alloc_expr_for_constant (result);
expr = (pre_expr) pool_alloc (pre_expr_pool);
expr->kind = NARY;
expr->id = 0;
- if (result && is_gimple_min_invariant (result))
- return get_or_alloc_expr_for_constant (result);
-
-
if (nary)
{
PRE_EXPR_NARY (expr) = nary;
}
add_to_value (new_val_id, expr);
}
- phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
tree vuse = ref->vuse;
tree newvuse = vuse;
VEC (vn_reference_op_s, heap) *newoperands = NULL;
- bool changed = false;
+ bool changed = false, same_valid = true;
unsigned int i, j;
vn_reference_op_t operand;
vn_reference_t newref;
{
unsigned int op_val_id = VN_INFO (op0)->value_id;
leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate_1 (leader, set1, set2,
- pred, phiblock, seen);
+ opresult = phi_translate (leader, set1, set2, pred, phiblock);
if (opresult && opresult != leader)
{
tree name = get_representative_for (opresult);
{
unsigned int op_val_id = VN_INFO (op1)->value_id;
leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate_1 (leader, set1, set2,
- pred, phiblock, seen);
+ opresult = phi_translate (leader, set1, set2, pred, phiblock);
if (opresult && opresult != leader)
{
tree name = get_representative_for (opresult);
{
unsigned int op_val_id = VN_INFO (op2)->value_id;
leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate_1 (leader, set1, set2,
- pred, phiblock, seen);
+ opresult = phi_translate (leader, set1, set2, pred, phiblock);
if (opresult && opresult != leader)
{
tree name = get_representative_for (opresult);
{
newvuse = translate_vuse_through_block (newoperands,
ref->set, ref->type,
- vuse, phiblock, pred);
+ vuse, phiblock, pred,
+ &same_valid);
if (newvuse == NULL_TREE)
{
VEC_free (vn_reference_op_s, heap, newoperands);
return NULL;
}
}
- changed |= newvuse != vuse;
- if (changed)
+ if (changed || newvuse != vuse)
{
unsigned int new_val_id;
pre_expr constant;
ref->type,
newoperands,
&newref, true);
- if (newref)
+ if (result)
VEC_free (vn_reference_op_s, heap, newoperands);
if (result && is_gimple_min_invariant (result))
}
else
{
- new_val_id = get_next_value_id ();
- VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
- get_max_value_id() + 1);
+ if (changed || !same_valid)
+ {
+ new_val_id = get_next_value_id ();
+ VEC_safe_grow_cleared (bitmap_set_t, heap,
+ value_expressions,
+ get_max_value_id() + 1);
+ }
+ else
+ new_val_id = ref->value_id;
newref = vn_reference_insert_pieces (newvuse, ref->set,
ref->type,
newoperands,
add_to_value (new_val_id, expr);
}
VEC_free (vn_reference_op_s, heap, newoperands);
- phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
}
}
-/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
- the phis in PRED.
- Return NULL if we can't find a leader for each part of the
- translated expression. */
+/* Wrapper around phi_translate_1 providing caching functionality. */
static pre_expr
phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
basic_block pred, basic_block phiblock)
{
- bitmap_clear (seen_during_translate);
- return phi_translate_1 (expr, set1, set2, pred, phiblock,
- seen_during_translate);
+ pre_expr phitrans;
+
+ if (!expr)
+ return NULL;
+
+ /* Constants contain no values that need translation. */
+ if (expr->kind == CONSTANT)
+ return expr;
+
+ if (value_id_constant_p (get_expr_value_id (expr)))
+ return expr;
+
+ if (expr->kind != NAME)
+ {
+ phitrans = phi_trans_lookup (expr, pred);
+ if (phitrans)
+ return phitrans;
+ }
+
+ /* Translate. */
+ phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
+
+ /* Don't add empty translations to the cache. Neither add
+ translations of NAMEs as those are cheap to translate. */
+ if (phitrans
+ && expr->kind != NAME)
+ phi_trans_add (expr, phitrans, pred);
+
+ return phitrans;
}
+
/* For each expression in SET, translate the values through phi nodes
in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
expressions in DEST. */
pre_expr expr;
int i;
- if (!phi_nodes (phiblock))
+ if (gimple_seq_empty_p (phi_nodes (phiblock)))
{
bitmap_set_copy (dest, set);
return;
{
pre_expr translated;
translated = phi_translate (expr, set, NULL, pred, phiblock);
+ if (!translated)
+ continue;
- /* Don't add empty translations to the cache */
- if (translated)
- phi_trans_add (expr, translated, pred);
-
- if (translated != NULL)
+ /* We might end up with multiple expressions from SET being
+ translated to the same value. In this case we do not want
+ to retain the NARY or REFERENCE expression but prefer a NAME
+ which would be the leader. */
+ if (translated->kind == NAME)
+ bitmap_value_replace_in_set (dest, translated);
+ else
bitmap_value_insert_into_set (dest, translated);
}
VEC_free (pre_expr, heap, exprs);
return false;
}
}
+ /* If the NARY may trap make sure the block does not contain
+ a possible exit point.
+ ??? This is overly conservative if we translate AVAIL_OUT
+ as the available expression might be after the exit point. */
+ if (BB_MAY_NOTRETURN (block)
+ && vn_nary_may_trap (nary))
+ return false;
return true;
}
break;
goto maybe_dump_sets;
}
- if (phi_nodes (first))
+ if (!gimple_seq_empty_p (phi_nodes (first)))
phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
else
bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
{
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t tmp = bitmap_set_new ();
phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
bitmap_value_insert_into_set (PA_OUT,
expression_for_id (i));
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t pa_in = bitmap_set_new ();
phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 0;
+
/* While we are here, give empty ANTIC_IN sets to each block. */
ANTIC_IN (block) = bitmap_set_new ();
PA_IN (block) = bitmap_set_new ();
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
- for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
+ for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
- for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
+ for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
/* Inserted expressions are placed onto this worklist, which is used
for performing quick dead code elimination of insertions we made
that didn't turn out to be necessary. */
-static VEC(gimple,heap) *inserted_exprs;
-static bitmap inserted_phi_names;
+static bitmap inserted_exprs;
/* Pool allocated fake store expressions are placed onto this
worklist, which, after performing dead code elimination, is walked
{
case CALL_EXPR:
{
- tree folded, sc = currop->op1;
+ tree folded, sc = NULL_TREE;
unsigned int nargs = 0;
- tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
- ref->operands) - 1);
+ tree fn, *args;
+ if (TREE_CODE (currop->op0) == FUNCTION_DECL)
+ fn = currop->op0;
+ else
+ {
+ pre_expr op0 = get_or_alloc_expr_for (currop->op0);
+ fn = find_or_generate_expression (block, op0, stmts, domstmt);
+ if (!fn)
+ return NULL_TREE;
+ }
+ if (currop->op1)
+ {
+ pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
+ sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
+ if (!sc)
+ return NULL_TREE;
+ }
+ args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
+ ref->operands) - 1);
while (*operand < VEC_length (vn_reference_op_s, ref->operands))
{
args[nargs] = create_component_ref_by_pieces_1 (block, ref,
operand, stmts,
domstmt);
+ if (!args[nargs])
+ {
+ free (args);
+ return NULL_TREE;
+ }
nargs++;
}
folded = build_call_array (currop->type,
- TREE_CODE (currop->op0) == FUNCTION_DECL
- ? build_fold_addr_expr (currop->op0)
- : currop->op0,
+ (TREE_CODE (fn) == FUNCTION_DECL
+ ? build_fold_addr_expr (fn) : fn),
nargs, args);
free (args);
if (sc)
- {
- pre_expr scexpr = get_or_alloc_expr_for (sc);
- sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
- if (!sc)
- return NULL_TREE;
- CALL_EXPR_STATIC_CHAIN (folded) = sc;
- }
+ CALL_EXPR_STATIC_CHAIN (folded) = sc;
return folded;
}
break;
return NULL_TREE;
if (genop2)
{
- op2expr = get_or_alloc_expr_for (genop2);
- genop2 = find_or_generate_expression (block, op2expr, stmts,
- domstmt);
- if (!genop2)
- return NULL_TREE;
+ /* Drop zero minimum index. */
+ if (tree_int_cst_equal (genop2, integer_zero_node))
+ genop2 = NULL_TREE;
+ else
+ {
+ op2expr = get_or_alloc_expr_for (genop2);
+ genop2 = find_or_generate_expression (block, op2expr, stmts,
+ domstmt);
+ if (!genop2)
+ return NULL_TREE;
+ }
}
if (genop3)
{
tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
- genop3 = size_binop (EXACT_DIV_EXPR, genop3,
- size_int (TYPE_ALIGN_UNIT (elmt_type)));
- op3expr = get_or_alloc_expr_for (genop3);
- genop3 = find_or_generate_expression (block, op3expr, stmts,
- domstmt);
- if (!genop3)
- return NULL_TREE;
+ /* We can't always put a size in units of the element alignment
+ here as the element alignment may be not visible. See
+ PR43783. Simply drop the element size for constant
+ sizes. */
+ if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
+ genop3 = NULL_TREE;
+ else
+ {
+ genop3 = size_binop (EXACT_DIV_EXPR, genop3,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ op3expr = get_or_alloc_expr_for (genop3);
+ genop3 = find_or_generate_expression (block, op3expr, stmts,
+ domstmt);
+ if (!genop3)
+ return NULL_TREE;
+ }
}
return build4 (currop->opcode, currop->type, genop0, genop1,
genop2, genop3);
stmts, domstmt);
if (!genop1 || !genop2)
return NULL_TREE;
- genop1 = fold_convert (TREE_TYPE (nary->op[0]),
- genop1);
/* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
may be a constant with the wrong type. */
if (nary->opcode == POINTER_PLUS_EXPR)
- genop2 = fold_convert (sizetype, genop2);
+ {
+ genop1 = fold_convert (nary->type, genop1);
+ genop2 = fold_convert (sizetype, genop2);
+ }
else
- genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
-
+ {
+ genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
+ genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+ }
+
folded = fold_build2 (nary->opcode, nary->type,
genop1, genop2);
}
tree forcedname = gimple_get_lhs (stmt);
pre_expr nameexpr;
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
if (TREE_CODE (forcedname) == SSA_NAME)
{
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
VN_INFO_GET (forcedname)->valnum = forcedname;
VN_INFO (forcedname)->value_id = get_next_value_id ();
nameexpr = get_or_alloc_expr_for_name (forcedname);
that we will return. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
{
- pretemp = create_tmp_var (exprtype, "pretmp");
+ pretemp = create_tmp_reg (exprtype, "pretmp");
get_var_ann (pretemp);
}
temp = pretemp;
add_referenced_var (temp);
- if (TREE_CODE (exprtype) == COMPLEX_TYPE
- || TREE_CODE (exprtype) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (temp) = 1;
-
newstmt = gimple_build_assign (temp, folded);
name = make_ssa_name (temp, newstmt);
gimple_assign_set_lhs (newstmt, name);
gimple_set_plf (newstmt, NECESSARY, false);
gimple_seq_add_stmt (stmts, newstmt);
- VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
/* All the symbols in NEWEXPR should be put into SSA form. */
mark_symbols_for_renaming (newstmt);
}
}
- /* Make sure we are not inserting trapping expressions. */
- FOR_EACH_EDGE (pred, ei, block->preds)
- {
- bprime = pred->src;
- eprime = avail[bprime->index];
- if (eprime->kind == NARY
- && vn_nary_may_trap (PRE_EXPR_NARY (eprime)))
- return false;
- }
-
/* Make the necessary insertions. */
FOR_EACH_EDGE (pred, ei, block->preds)
{
if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
{
tree builtexpr = fold_convert (type, constant);
- if (!is_gimple_min_invariant (builtexpr))
+ if (!is_gimple_min_invariant (builtexpr))
{
tree forcedexpr = force_gimple_operand (builtexpr,
&stmts, true,
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ tree lhs = gimple_get_lhs (stmt);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_set_bit (inserted_exprs,
+ SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ tree lhs = gimple_get_lhs (stmt);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
gimple_set_plf (phi, NECESSARY, false);
VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
VN_INFO (gimple_phi_result (phi))->value_id = val;
- VEC_safe_push (gimple, heap, inserted_exprs, phi);
- bitmap_set_bit (inserted_phi_names,
- SSA_NAME_VERSION (gimple_phi_result (phi)));
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
FOR_EACH_EDGE (pred, ei, block->preds)
{
pre_expr ae = avail[pred->src->index];
for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
make_values_for_phi (gsi_stmt (gsi), block);
+ BB_MAY_NOTRETURN (block) = 0;
+
/* Now compute value numbers and populate value sets with all
the expressions computed in BLOCK. */
for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, stmt_uid++);
+ /* Cache whether the basic-block has any non-visible side-effect
+ or control flow.
+ If this isn't a call or it is the last stmt in the
+ basic-block then the CFG represents things correctly. */
+ if (is_gimple_call (stmt)
+ && !stmt_ends_bb_p (stmt))
+ {
+ /* Non-looping const functions always return normally.
+ Otherwise the call might not return or have side-effects
+ that forbids hoisting possibly trapping expressions
+ before it. */
+ int flags = gimple_call_flags (stmt);
+ if (!(flags & ECF_CONST)
+ || (flags & ECF_LOOPING_CONST_OR_PURE))
+ BB_MAY_NOTRETURN (block) = 1;
+ }
+
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
{
pre_expr e = get_or_alloc_expr_for_name (op);
vn_reference_lookup (gimple_assign_rhs1 (stmt),
gimple_vuse (stmt),
- false, &ref);
+ true, &ref);
if (!ref)
continue;
replacing the PHI with a single copy if possible.
Do not touch inserted, single-argument or virtual PHIs. */
if (gimple_phi_num_args (phi) == 1
- || !is_gimple_reg (res)
- || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
+ || !is_gimple_reg (res))
{
gsi_next (&gsi);
continue;
remove_phi_node (&gsi, false);
+ if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
+ && TREE_CODE (sprime) == SSA_NAME)
+ gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
+
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
stmt = gimple_build_assign (res, sprime);
SSA_NAME_DEF_STMT (res) = stmt;
- if (TREE_CODE (sprime) == SSA_NAME)
- gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
- NECESSARY, true);
+ gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
+
gsi2 = gsi_after_labels (b);
gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
/* Queue the copy for eventual removal. */
VEC_safe_push (gimple, heap, to_remove, stmt);
- pre_stats.eliminations++;
+ /* If we inserted this PHI node ourself, it's not an elimination. */
+ if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
+ pre_stats.phis--;
+ else
+ pre_stats.eliminations++;
}
}
for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
{
tree lhs = gimple_assign_lhs (stmt);
+ tree rhs = gimple_assign_rhs1 (stmt);
use_operand_p use_p;
gimple use_stmt;
/* If there is a single use only, propagate the equivalency
instead of keeping the copy. */
if (TREE_CODE (lhs) == SSA_NAME
+ && TREE_CODE (rhs) == SSA_NAME
&& single_imm_use (lhs, &use_p, &use_stmt)
- && may_propagate_copy (USE_FROM_PTR (use_p),
- gimple_assign_rhs1 (stmt)))
+ && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
{
- SET_USE (use_p, gimple_assign_rhs1 (stmt));
+ SET_USE (use_p, rhs);
update_stmt (use_stmt);
+ if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
+ && TREE_CODE (rhs) == SSA_NAME)
+ gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
}
/* If this is a store or a now unused copy, remove it. */
gsi = gsi_for_stmt (stmt);
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
release_defs (stmt);
}
}
static void
remove_dead_inserted_code (void)
{
- VEC(gimple,heap) *worklist = NULL;
- int i;
+ bitmap worklist;
+ unsigned i;
+ bitmap_iterator bi;
gimple t;
- worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
- for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
+ worklist = BITMAP_ALLOC (NULL);
+ EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
{
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
if (gimple_plf (t, NECESSARY))
- VEC_quick_push (gimple, worklist, t);
+ bitmap_set_bit (worklist, i);
}
- while (VEC_length (gimple, worklist) > 0)
+ while (!bitmap_empty_p (worklist))
{
- t = VEC_pop (gimple, worklist);
+ i = bitmap_first_set_bit (worklist);
+ bitmap_clear_bit (worklist, i);
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
/* PHI nodes are somewhat special in that each PHI alternative has
data and control dependencies. All the statements feeding the
{
unsigned k;
- VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
for (k = 0; k < gimple_phi_num_args (t); k++)
{
tree arg = PHI_ARG_DEF (t, k);
{
gimple n = mark_operand_necessary (arg);
if (n)
- VEC_quick_push (gimple, worklist, n);
+ bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
}
}
}
{
gimple n = mark_operand_necessary (use);
if (n)
- VEC_safe_push (gimple, heap, worklist, n);
+ bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
}
}
}
- for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
+ EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
{
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
if (!gimple_plf (t, NECESSARY))
{
gimple_stmt_iterator gsi;
if (gimple_code (t) == GIMPLE_PHI)
remove_phi_node (&gsi, true);
else
- gsi_remove (&gsi, true);
- release_defs (t);
+ {
+ gsi_remove (&gsi, true);
+ release_defs (t);
+ }
}
}
- VEC_free (gimple, heap, worklist);
+ BITMAP_FREE (worklist);
+}
+
+/* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
+ true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
+ the number of visited blocks. */
+
+static int
+my_rev_post_order_compute (int *post_order, bool include_entry_exit)
+{
+ edge_iterator *stack;
+ int sp;
+ int post_order_num = 0;
+ sbitmap visited;
+
+ if (include_entry_exit)
+ post_order[post_order_num++] = EXIT_BLOCK;
+
+ /* Allocate stack for back-tracking up CFG. */
+ stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
+ sp = 0;
+
+ /* Allocate bitmap to track nodes that have been visited. */
+ visited = sbitmap_alloc (last_basic_block);
+
+ /* None of the nodes in the CFG have been visited yet. */
+ sbitmap_zero (visited);
+
+ /* Push the last edge on to the stack. */
+ stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
+
+ while (sp)
+ {
+ edge_iterator ei;
+ basic_block src;
+ basic_block dest;
+
+ /* Look at the edge on the top of the stack. */
+ ei = stack[sp - 1];
+ src = ei_edge (ei)->src;
+ dest = ei_edge (ei)->dest;
+
+ /* Check if the edge destination has been visited yet. */
+ if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
+ {
+ /* Mark that we have visited the destination. */
+ SET_BIT (visited, src->index);
+
+ if (EDGE_COUNT (src->preds) > 0)
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = ei_start (src->preds);
+ else
+ post_order[post_order_num++] = src->index;
+ }
+ else
+ {
+ if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
+ post_order[post_order_num++] = dest->index;
+
+ if (!ei_one_before_end_p (ei))
+ ei_next (&stack[sp - 1]);
+ else
+ sp--;
+ }
+ }
+
+ if (include_entry_exit)
+ post_order[post_order_num++] = ENTRY_BLOCK;
+
+ free (stack);
+ sbitmap_free (visited);
+ return post_order_num;
}
+
/* Initialize data structures used by PRE. */
static void
value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
get_max_value_id() + 1);
+ name_to_id = NULL;
in_fre = do_fre;
- inserted_exprs = NULL;
+ inserted_exprs = BITMAP_ALLOC (NULL);
need_creation = NULL;
pretemp = NULL_TREE;
storetemp = NULL_TREE;
postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
- post_order_compute (postorder, false, false);
+ my_rev_post_order_compute (postorder, false);
FOR_ALL_BB (bb)
bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
calculate_dominance_info (CDI_DOMINATORS);
bitmap_obstack_initialize (&grand_bitmap_obstack);
- inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
phi_translate_table = htab_create (5110, expr_pred_trans_hash,
expr_pred_trans_eq, free);
expression_to_id = htab_create (num_ssa_names * 3,
pre_expr_hash,
pre_expr_eq, NULL);
- seen_during_translate = BITMAP_ALLOC (&grand_bitmap_obstack);
bitmap_set_pool = create_alloc_pool ("Bitmap sets",
sizeof (struct bitmap_set), 30);
pre_expr_pool = create_alloc_pool ("pre_expr nodes",
free (postorder);
VEC_free (bitmap_set_t, heap, value_expressions);
- VEC_free (gimple, heap, inserted_exprs);
+ BITMAP_FREE (inserted_exprs);
VEC_free (gimple, heap, need_creation);
bitmap_obstack_release (&grand_bitmap_obstack);
free_alloc_pool (bitmap_set_pool);
free_alloc_pool (pre_expr_pool);
htab_delete (phi_translate_table);
htab_delete (expression_to_id);
+ VEC_free (unsigned, heap, name_to_id);
FOR_ALL_BB (bb)
{
if (!run_scc_vn (do_fre))
{
if (!do_fre)
- {
- remove_dead_inserted_code ();
- loop_optimizer_finalize ();
- }
-
+ loop_optimizer_finalize ();
+
return 0;
}
+
init_pre (do_fre);
scev_initialize ();
-
/* Collect and value number expressions computed in each basic block. */
compute_avail ();
insert ();
}
+ /* Make sure to remove fake edges before committing our inserts.
+ This makes sure we don't end up with extra critical edges that
+ we would need to split. */
+ remove_fake_exit_edges ();
+ gsi_commit_edge_inserts ();
+
/* Remove all the redundant expressions. */
todo |= eliminate ();
statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
statistics_counter_event (cfun, "Constified", pre_stats.constified);
- /* Make sure to remove fake edges before committing our inserts.
- This makes sure we don't end up with extra critical edges that
- we would need to split. */
- remove_fake_exit_edges ();
- gsi_commit_edge_inserts ();
-
clear_expression_ids ();
free_scc_vn ();
if (!do_fre)