return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
PRE_EXPR_REFERENCE (e2));
default:
- abort();
+ gcc_unreachable ();
}
}
case CONSTANT:
return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
case NAME:
- return iterative_hash_hashval_t (SSA_NAME_VERSION (PRE_EXPR_NAME (e)), 0);
+ return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
case NARY:
return PRE_EXPR_NARY (e)->hashcode;
case REFERENCE:
return PRE_EXPR_REFERENCE (e)->hashcode;
default:
- abort ();
+ gcc_unreachable ();
}
}
DEF_VEC_ALLOC_P (pre_expr, heap);
static VEC(pre_expr, heap) *expressions;
static htab_t expression_to_id;
+static VEC(unsigned, heap) *name_to_id;
/* Allocate an expression id for EXPR. */
gcc_assert (next_expression_id + 1 > next_expression_id);
expr->id = next_expression_id++;
VEC_safe_push (pre_expr, heap, expressions, expr);
- slot = htab_find_slot (expression_to_id, expr, INSERT);
- gcc_assert (!*slot);
- *slot = expr;
+ if (expr->kind == NAME)
+ {
+ unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
+ /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
+ re-allocations by using VEC_reserve upfront. There is no
+ VEC_quick_grow_cleared unfortunately. */
+ VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
+ VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
+ gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
+ VEC_replace (unsigned, name_to_id, version, expr->id);
+ }
+ else
+ {
+ slot = htab_find_slot (expression_to_id, expr, INSERT);
+ gcc_assert (!*slot);
+ *slot = expr;
+ }
return next_expression_id - 1;
}
{
void **slot;
- slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
- if (!slot)
- return 0;
- return ((pre_expr)*slot)->id;
+ if (expr->kind == NAME)
+ {
+ unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
+ if (VEC_length (unsigned, name_to_id) <= version)
+ return 0;
+ return VEC_index (unsigned, name_to_id, version);
+ }
+ else
+ {
+ slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
+ if (!slot)
+ return 0;
+ return ((pre_expr)*slot)->id;
+ }
}
/* Return the existing expression id for EXPR, or create one if one
static pre_expr
get_or_alloc_expr_for_name (tree name)
{
- pre_expr result = (pre_expr) pool_alloc (pre_expr_pool);
+ struct pre_expr_d expr;
+ pre_expr result;
unsigned int result_id;
+ expr.kind = NAME;
+ expr.id = 0;
+ PRE_EXPR_NAME (&expr) = name;
+ result_id = lookup_expression_id (&expr);
+ if (result_id != 0)
+ return expression_for_id (result_id);
+
+ result = (pre_expr) pool_alloc (pre_expr_pool);
result->kind = NAME;
- result->id = 0;
PRE_EXPR_NAME (result) = name;
- result_id = lookup_expression_id (result);
- if (result_id != 0)
- {
- pool_free (pre_expr_pool, result);
- result = expression_for_id (result_id);
- return result;
- }
- get_or_alloc_expression_id (result);
+ alloc_expression_id (result);
return result;
}
bitmap expr_dies;
/* True if we have visited this block during ANTIC calculation. */
- unsigned int visited:1;
+ unsigned int visited : 1;
/* True we have deferred processing this block during ANTIC
calculation until its successor is processed. */
unsigned int deferred : 1;
+
+ /* True when the block contains a call that might not return. */
+ unsigned int contains_may_not_return_call : 1;
} *bb_value_sets_t;
#define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
#define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
#define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
#define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
+#define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
/* Basic block list in postorder. */
static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
-static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool);
+static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
+ unsigned int, bool);
static bitmap_set_t bitmap_set_new (void);
static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
gimple, tree);
VEC_replace (bitmap_set_t, value_expressions, v, set);
}
- bitmap_insert_into_set_1 (set, e, true);
+ bitmap_insert_into_set_1 (set, e, v, true);
}
/* Create a new bitmap set and return it. */
static void
bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
- bool allow_constants)
+ unsigned int val, bool allow_constants)
{
- unsigned int val = get_expr_value_id (expr);
if (allow_constants || !value_id_constant_p (val))
{
/* We specifically expect this and only this function to be able to
static void
bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
{
- bitmap_insert_into_set_1 (set, expr, false);
+ bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
}
/* Copy a bitmapped set ORIG, into bitmapped set DEST. */
{
unsigned int i, j;
bitmap_iterator bi, bj;
- VEC(pre_expr, heap) *result = NULL;
+ VEC(pre_expr, heap) *result;
+
+ /* Pre-allocate roughly enough space for the array. */
+ result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
{
{
unsigned int val = get_expr_value_id (expr);
+#ifdef ENABLE_CHECKING
+ gcc_assert (expr->id == get_or_alloc_expression_id (expr));
+#endif
+
+ /* Constant values are always considered to be part of the set. */
if (value_id_constant_p (val))
return;
- if (!bitmap_set_contains_value (set, val))
- bitmap_insert_into_set (set, expr);
+ /* If the value membership changed, add the expression. */
+ if (bitmap_set_bit (set->values, val))
+ bitmap_set_bit (set->expressions, expr->id);
}
/* Print out EXPR to outfile. */
{
unsigned int result_id;
unsigned int value_id;
- pre_expr newexpr = (pre_expr) pool_alloc (pre_expr_pool);
+ struct pre_expr_d expr;
+ pre_expr newexpr;
+
+ expr.kind = CONSTANT;
+ PRE_EXPR_CONSTANT (&expr) = constant;
+ result_id = lookup_expression_id (&expr);
+ if (result_id != 0)
+ return expression_for_id (result_id);
+
+ newexpr = (pre_expr) pool_alloc (pre_expr_pool);
newexpr->kind = CONSTANT;
PRE_EXPR_CONSTANT (newexpr) = constant;
- result_id = lookup_expression_id (newexpr);
- if (result_id != 0)
- {
- pool_free (pre_expr_pool, newexpr);
- newexpr = expression_for_id (result_id);
- return newexpr;
- }
+ alloc_expression_id (newexpr);
value_id = get_or_alloc_constant_value_id (constant);
- get_or_alloc_expression_id (newexpr);
add_to_value (value_id, newexpr);
return newexpr;
}
if (!expr)
return NULL;
+ /* Constants contain no values that need translation. */
+ if (expr->kind == CONSTANT)
+ return expr;
+
if (value_id_constant_p (get_expr_value_id (expr)))
return expr;
switch (expr->kind)
{
- /* Constants contain no values that need translation. */
- case CONSTANT:
- return expr;
-
case NARY:
{
unsigned int i;
if (changed)
{
pre_expr constant;
+ unsigned int new_val_id;
tree result = vn_nary_op_lookup_pieces (newnary.length,
newnary.opcode,
newnary.op[2],
newnary.op[3],
&nary);
- unsigned int new_val_id;
+ if (result && is_gimple_min_invariant (result))
+ return get_or_alloc_expr_for_constant (result);
expr = (pre_expr) pool_alloc (pre_expr_pool);
expr->kind = NARY;
expr->id = 0;
- if (result && is_gimple_min_invariant (result))
- return get_or_alloc_expr_for_constant (result);
-
-
if (nary)
{
PRE_EXPR_NARY (expr) = nary;
pre_expr expr;
int i;
- if (!phi_nodes (phiblock))
+ if (gimple_seq_empty_p (phi_nodes (phiblock)))
{
bitmap_set_copy (dest, set);
return;
translated = phi_translate (expr, set, NULL, pred, phiblock);
/* Don't add empty translations to the cache */
- if (translated)
- phi_trans_add (expr, translated, pred);
+ if (!translated)
+ continue;
- if (translated != NULL)
+ phi_trans_add (expr, translated, pred);
+
+ /* We might end up with multiple expressions from SET being
+ translated to the same value. In this case we do not want
+ to retain the NARY or REFERENCE expression but prefer a NAME
+ which would be the leader. */
+ if (translated->kind == NAME)
+ bitmap_value_replace_in_set (dest, translated);
+ else
bitmap_value_insert_into_set (dest, translated);
}
VEC_free (pre_expr, heap, exprs);
return false;
}
}
+ /* If the NARY may trap make sure the block does not contain
+ a possible exit point.
+ ??? This is overly conservative if we translate AVAIL_OUT
+ as the available expression might be after the exit point. */
+ if (BB_MAY_NOTRETURN (block)
+ && vn_nary_may_trap (nary))
+ return false;
return true;
}
break;
goto maybe_dump_sets;
}
- if (phi_nodes (first))
+ if (!gimple_seq_empty_p (phi_nodes (first)))
phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
else
bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
{
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t tmp = bitmap_set_new ();
phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
bitmap_value_insert_into_set (PA_OUT,
expression_for_id (i));
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t pa_in = bitmap_set_new ();
phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 0;
+
/* While we are here, give empty ANTIC_IN sets to each block. */
ANTIC_IN (block) = bitmap_set_new ();
PA_IN (block) = bitmap_set_new ();
stmts, domstmt);
if (!genop1 || !genop2)
return NULL_TREE;
- genop1 = fold_convert (TREE_TYPE (nary->op[0]),
- genop1);
/* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
may be a constant with the wrong type. */
if (nary->opcode == POINTER_PLUS_EXPR)
- genop2 = fold_convert (sizetype, genop2);
+ {
+ genop1 = fold_convert (nary->type, genop1);
+ genop2 = fold_convert (sizetype, genop2);
+ }
else
- genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+ {
+ genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
+ genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+ }
folded = fold_build2 (nary->opcode, nary->type,
genop1, genop2);
}
}
- /* Make sure we are not inserting trapping expressions. */
- FOR_EACH_EDGE (pred, ei, block->preds)
- {
- bprime = pred->src;
- eprime = avail[bprime->index];
- if (eprime->kind == NARY
- && vn_nary_may_trap (PRE_EXPR_NARY (eprime)))
- return false;
- }
-
/* Make the necessary insertions. */
FOR_EACH_EDGE (pred, ei, block->preds)
{
for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
make_values_for_phi (gsi_stmt (gsi), block);
+ BB_MAY_NOTRETURN (block) = 0;
+
/* Now compute value numbers and populate value sets with all
the expressions computed in BLOCK. */
for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, stmt_uid++);
+ /* Cache whether the basic-block has any non-visible side-effect
+ or control flow.
+ If this isn't a call or it is the last stmt in the
+ basic-block then the CFG represents things correctly. */
+ if (is_gimple_call (stmt)
+ && !stmt_ends_bb_p (stmt))
+ {
+ /* Non-looping const functions always return normally.
+ Otherwise the call might not return or have side-effects
+ that forbids hoisting possibly trapping expressions
+ before it. */
+ int flags = gimple_call_flags (stmt);
+ if (!(flags & ECF_CONST)
+ || (flags & ECF_LOOPING_CONST_OR_PURE))
+ BB_MAY_NOTRETURN (block) = 1;
+ }
+
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
{
pre_expr e = get_or_alloc_expr_for_name (op);
value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
get_max_value_id() + 1);
+ name_to_id = NULL;
in_fre = do_fre;
free_alloc_pool (pre_expr_pool);
htab_delete (phi_translate_table);
htab_delete (expression_to_id);
+ VEC_free (unsigned, heap, name_to_id);
FOR_ALL_BB (bb)
{