static tree storetemp;
static tree prephitemp;
-/* Set of blocks with statements that have had its EH information
- cleaned up. */
+/* Set of blocks with statements that have had their EH properties changed. */
static bitmap need_eh_cleanup;
+/* Set of blocks with statements that have had their AB properties changed. */
+static bitmap need_ab_cleanup;
+
/* The phi_translate_table caches phi translations for a given
expression and predecessor. */
slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
new_pair->hashcode, INSERT);
- if (*slot)
- free (*slot);
+ free (*slot);
*slot = (void *) new_pair;
}
exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
{
- if (bitmap_bit_p (&set->expressions, i))
+ if (bitmap_clear_bit (&set->expressions, i))
{
- bitmap_clear_bit (&set->expressions, i);
bitmap_set_bit (&set->expressions, get_expression_id (expr));
return;
}
{
unsigned int val = get_expr_value_id (expr);
-#ifdef ENABLE_CHECKING
- gcc_assert (expr->id == get_or_alloc_expression_id (expr));
-#endif
+ gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
/* Constant values are always considered to be part of the set. */
if (value_id_constant_p (val))
vn_nary_op_t nary = PRE_EXPR_NARY (e);
switch (TREE_CODE_CLASS (nary->opcode))
{
- case tcc_expression:
- if (nary->opcode == TRUTH_NOT_EXPR)
- goto do_unary;
- if (nary->opcode != TRUTH_AND_EXPR
- && nary->opcode != TRUTH_OR_EXPR
- && nary->opcode != TRUTH_XOR_EXPR)
- return e;
- /* Fallthrough. */
case tcc_binary:
case tcc_comparison:
{
return e;
/* Fallthrough. */
case tcc_unary:
-do_unary:
{
/* We have to go from trees to pre exprs to value ids to
constants. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
{
pretemp = create_tmp_reg (exprtype, "pretmp");
- get_var_ann (pretemp);
+ add_referenced_var (pretemp);
}
name = make_ssa_name (pretemp, gimple_build_nop ());
unsigned int i;
bool changed = false;
vn_nary_op_t nary = PRE_EXPR_NARY (expr);
- struct vn_nary_op_s newnary;
- /* The NARY structure is only guaranteed to have been
- allocated to the nary->length operands. */
- memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
- - sizeof (tree) * (4 - nary->length)));
+ vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
+ sizeof_vn_nary_op (nary->length));
+ memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
- for (i = 0; i < newnary.length; i++)
+ for (i = 0; i < newnary->length; i++)
{
- if (TREE_CODE (newnary.op[i]) != SSA_NAME)
+ if (TREE_CODE (newnary->op[i]) != SSA_NAME)
continue;
else
{
pre_expr leader, result;
- unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
+ unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
leader = find_leader_in_sets (op_val_id, set1, set2);
result = phi_translate (leader, set1, set2, pred, phiblock);
if (result && result != leader)
tree name = get_representative_for (result);
if (!name)
return NULL;
- newnary.op[i] = name;
+ newnary->op[i] = name;
}
else if (!result)
return NULL;
- changed |= newnary.op[i] != nary->op[i];
+ changed |= newnary->op[i] != nary->op[i];
}
}
if (changed)
pre_expr constant;
unsigned int new_val_id;
- tree result = vn_nary_op_lookup_pieces (newnary.length,
- newnary.opcode,
- newnary.type,
- newnary.op[0],
- newnary.op[1],
- newnary.op[2],
- newnary.op[3],
+ tree result = vn_nary_op_lookup_pieces (newnary->length,
+ newnary->opcode,
+ newnary->type,
+ &newnary->op[0],
&nary);
if (result && is_gimple_min_invariant (result))
return get_or_alloc_expr_for_constant (result);
VEC_safe_grow_cleared (bitmap_set_t, heap,
value_expressions,
get_max_value_id() + 1);
- nary = vn_nary_op_insert_pieces (newnary.length,
- newnary.opcode,
- newnary.type,
- newnary.op[0],
- newnary.op[1],
- newnary.op[2],
- newnary.op[3],
+ nary = vn_nary_op_insert_pieces (newnary->length,
+ newnary->opcode,
+ newnary->type,
+ &newnary->op[0],
result, new_val_id);
PRE_EXPR_NARY (expr) = nary;
constant = fully_constant_expression (expr);
tree newvuse = vuse;
VEC (vn_reference_op_s, heap) *newoperands = NULL;
bool changed = false, same_valid = true;
- unsigned int i, j;
+ unsigned int i, j, n;
vn_reference_op_t operand;
vn_reference_t newref;
{
pre_expr opresult;
pre_expr leader;
- tree oldop0 = operand->op0;
- tree oldop1 = operand->op1;
- tree oldop2 = operand->op2;
- tree op0 = oldop0;
- tree op1 = oldop1;
- tree op2 = oldop2;
+ tree op[3];
tree type = operand->type;
vn_reference_op_s newop = *operand;
-
- if (op0 && TREE_CODE (op0) == SSA_NAME)
+ op[0] = operand->op0;
+ op[1] = operand->op1;
+ op[2] = operand->op2;
+ for (n = 0; n < 3; ++n)
{
- unsigned int op_val_id = VN_INFO (op0)->value_id;
- leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate (leader, set1, set2, pred, phiblock);
- if (opresult && opresult != leader)
+ unsigned int op_val_id;
+ if (!op[n])
+ continue;
+ if (TREE_CODE (op[n]) != SSA_NAME)
{
- tree name = get_representative_for (opresult);
- if (!name)
+ /* We can't possibly insert these. */
+ if (n != 0
+ && !is_gimple_min_invariant (op[n]))
break;
- op0 = name;
+ continue;
}
- else if (!opresult)
- break;
- }
- changed |= op0 != oldop0;
-
- if (op1 && TREE_CODE (op1) == SSA_NAME)
- {
- unsigned int op_val_id = VN_INFO (op1)->value_id;
+ op_val_id = VN_INFO (op[n])->value_id;
leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate (leader, set1, set2, pred, phiblock);
- if (opresult && opresult != leader)
+ if (!leader)
+ break;
+ /* Make sure we do not recursively translate ourselves
+ like for translating a[n_1] with the leader for
+ n_1 being a[n_1]. */
+ if (get_expression_id (leader) != get_expression_id (expr))
{
- tree name = get_representative_for (opresult);
- if (!name)
+ opresult = phi_translate (leader, set1, set2,
+ pred, phiblock);
+ if (!opresult)
break;
- op1 = name;
+ if (opresult != leader)
+ {
+ tree name = get_representative_for (opresult);
+ if (!name)
+ break;
+ changed |= name != op[n];
+ op[n] = name;
+ }
}
- else if (!opresult)
- break;
}
- /* We can't possibly insert these. */
- else if (op1 && !is_gimple_min_invariant (op1))
- break;
- changed |= op1 != oldop1;
- if (op2 && TREE_CODE (op2) == SSA_NAME)
+ if (n != 3)
{
- unsigned int op_val_id = VN_INFO (op2)->value_id;
- leader = find_leader_in_sets (op_val_id, set1, set2);
- opresult = phi_translate (leader, set1, set2, pred, phiblock);
- if (opresult && opresult != leader)
- {
- tree name = get_representative_for (opresult);
- if (!name)
- break;
- op2 = name;
- }
- else if (!opresult)
- break;
+ if (newoperands)
+ VEC_free (vn_reference_op_s, heap, newoperands);
+ return NULL;
}
- /* We can't possibly insert these. */
- else if (op2 && !is_gimple_min_invariant (op2))
- break;
- changed |= op2 != oldop2;
-
if (!newoperands)
newoperands = VEC_copy (vn_reference_op_s, heap, operands);
/* We may have changed from an SSA_NAME to a constant */
- if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
- newop.opcode = TREE_CODE (op0);
+ if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
+ newop.opcode = TREE_CODE (op[0]);
newop.type = type;
- newop.op0 = op0;
- newop.op1 = op1;
- newop.op2 = op2;
+ newop.op0 = op[0];
+ newop.op1 = op[1];
+ newop.op2 = op[2];
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
if (newop.opcode == ARRAY_REF
&& newop.off == -1
- && TREE_CODE (op0) == INTEGER_CST
- && TREE_CODE (op1) == INTEGER_CST
- && TREE_CODE (op2) == INTEGER_CST)
+ && TREE_CODE (op[0]) == INTEGER_CST
+ && TREE_CODE (op[1]) == INTEGER_CST
+ && TREE_CODE (op[2]) == INTEGER_CST)
{
- double_int off = tree_to_double_int (op0);
+ double_int off = tree_to_double_int (op[0]);
off = double_int_add (off,
double_int_neg
- (tree_to_double_int (op1)));
- off = double_int_mul (off, tree_to_double_int (op2));
+ (tree_to_double_int (op[1])));
+ off = double_int_mul (off, tree_to_double_int (op[2]));
if (double_int_fits_in_shwi_p (off))
newop.off = off.low;
}
VEC_replace (vn_reference_op_s, newoperands, j, &newop);
/* If it transforms from an SSA_NAME to an address, fold with
a preceding indirect reference. */
- if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
+ if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
&& VEC_index (vn_reference_op_s,
newoperands, j - 1)->opcode == MEM_REF)
vn_reference_fold_indirect (&newoperands, &j);
tree result = vn_reference_lookup_pieces (newvuse, ref->set,
ref->type,
newoperands,
- &newref, true);
+ &newref, VN_WALK);
if (result)
VEC_free (vn_reference_op_s, heap, newoperands);
result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
converted = true;
}
+ else if (!result && newref
+ && !useless_type_conversion_p (ref->type, newref->type))
+ {
+ VEC_free (vn_reference_op_s, heap, newoperands);
+ return NULL;
+ }
if (result && is_gimple_min_invariant (result))
{
nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
TREE_TYPE (result),
- TREE_OPERAND (result, 0),
- NULL_TREE, NULL_TREE,
- NULL_TREE,
+ &TREE_OPERAND (result, 0),
&nary);
if (nresult && is_gimple_min_invariant (nresult))
return get_or_alloc_expr_for_constant (nresult);
get_max_value_id() + 1);
nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
TREE_TYPE (result),
- TREE_OPERAND (result, 0),
- NULL_TREE, NULL_TREE,
- NULL_TREE, NULL_TREE,
+ &TREE_OPERAND (result, 0),
+ NULL_TREE,
new_val_id);
PRE_EXPR_NARY (expr) = nary;
constant = fully_constant_expression (expr);
}
exprs = sorted_array_from_bitmap_set (set);
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
pre_expr translated;
translated = phi_translate (expr, set, NULL, pred, phiblock);
vn_reference_op_t vro;
unsigned int i;
- for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
{
if (!vro_valid_in_sets (set1, set2, vro))
return false;
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (!valid_in_sets (set1, set2, expr, block))
bitmap_remove_from_set (set1, expr);
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (!valid_in_sets (set, NULL, expr, block))
bitmap_remove_from_set (set, expr);
else
bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
- for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
+ FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
{
if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
}
if (VEC_length (basic_block, worklist) > 0)
{
- for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
+ FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
{
unsigned int i;
bitmap_iterator bi;
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
+ /* ??? We need to clear our PHI translation cache here as the
+ ANTIC sets shrink and we restrict valid translations to
+ those having operands with leaders in ANTIC. Same below
+ for PA ANTIC computation. */
num_iterations++;
changed = false;
for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
block->index));
}
}
-#ifdef ENABLE_CHECKING
/* Theoretically possible, but *highly* unlikely. */
- gcc_assert (num_iterations < 500);
-#endif
+ gcc_checking_assert (num_iterations < 500);
}
statistics_histogram_event (cfun, "compute_antic iterations",
block->index));
}
}
-#ifdef ENABLE_CHECKING
/* Theoretically possible, but *highly* unlikely. */
- gcc_assert (num_iterations < 500);
-#endif
+ gcc_checking_assert (num_iterations < 500);
}
statistics_histogram_event (cfun, "compute_partial_antic iterations",
num_iterations);
sbitmap_free (changed_blocks);
}
-/* Return true if we can value number the call in STMT. This is true
- if we have a pure or constant call. */
-
-static bool
-can_value_number_call (gimple stmt)
-{
- if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
- return true;
- return false;
-}
-
/* Return true if OP is a tree which we can perform PRE on.
This may not match the operations we can value number, but in
a perfect world would. */
gcc_assert (base);
offset = int_const_binop (PLUS_EXPR, offset,
build_int_cst (TREE_TYPE (offset),
- off), 0);
+ off));
baseop = build_fold_addr_expr (base);
}
return fold_build2 (MEM_REF, currop->type, baseop, offset);
break;
case TARGET_MEM_REF:
{
+ pre_expr op0expr, op1expr;
+ tree genop0 = NULL_TREE, genop1 = NULL_TREE;
vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
- *operand);
- pre_expr op0expr;
- tree genop0 = NULL_TREE;
+ ++*operand);
tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
stmts, domstmt);
if (!baseop)
if (!genop0)
return NULL_TREE;
}
- if (DECL_P (baseop))
- return build6 (TARGET_MEM_REF, currop->type,
- baseop, NULL_TREE,
- genop0, currop->op1, currop->op2,
- unshare_expr (nextop->op1));
- else
- return build6 (TARGET_MEM_REF, currop->type,
- NULL_TREE, baseop,
- genop0, currop->op1, currop->op2,
- unshare_expr (nextop->op1));
+ if (nextop->op0)
+ {
+ op1expr = get_or_alloc_expr_for (nextop->op0);
+ genop1 = find_or_generate_expression (block, op1expr,
+ stmts, domstmt);
+ if (!genop1)
+ return NULL_TREE;
+ }
+ return build5 (TARGET_MEM_REF, currop->type,
+ baseop, currop->op2, genop0, currop->op1, genop1);
}
break;
case ADDR_EXPR:
return folded;
}
break;
- case MISALIGNED_INDIRECT_REF:
+ case WITH_SIZE_EXPR:
{
- tree folded;
- tree genop1 = create_component_ref_by_pieces_1 (block, ref,
- operand,
+ tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
stmts, domstmt);
+ pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
+ tree genop1;
+
+ if (!genop0)
+ return NULL_TREE;
+
+ genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
if (!genop1)
return NULL_TREE;
- genop1 = fold_convert (build_pointer_type (currop->type),
- genop1);
- if (currop->opcode == MISALIGNED_INDIRECT_REF)
- folded = fold_build2 (currop->opcode, currop->type,
- genop1, currop->op1);
- else
- folded = fold_build1 (currop->opcode, currop->type,
- genop1);
- return folded;
+ return fold_build2 (currop->opcode, currop->type, genop0, genop1);
}
break;
case BIT_FIELD_REF:
return NULL_TREE;
if (genop2)
{
- /* Drop zero minimum index. */
- if (tree_int_cst_equal (genop2, integer_zero_node))
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
+ /* Drop zero minimum index if redundant. */
+ if (integer_zerop (genop2)
+ && (!domain_type
+ || integer_zerop (TYPE_MIN_VALUE (domain_type))))
genop2 = NULL_TREE;
else
{
case NARY:
{
vn_nary_op_t nary = PRE_EXPR_NARY (expr);
- switch (nary->length)
+ tree genop[4];
+ unsigned i;
+ for (i = 0; i < nary->length; ++i)
{
- case 2:
- {
- pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
- pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
- tree genop1 = find_or_generate_expression (block, op1,
- stmts, domstmt);
- tree genop2 = find_or_generate_expression (block, op2,
- stmts, domstmt);
- if (!genop1 || !genop2)
- return NULL_TREE;
- /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
- may be a constant with the wrong type. */
- if (nary->opcode == POINTER_PLUS_EXPR)
- {
- genop1 = fold_convert (nary->type, genop1);
- genop2 = fold_convert (sizetype, genop2);
- }
- else
- {
- genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
- genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
- }
-
- folded = fold_build2 (nary->opcode, nary->type,
- genop1, genop2);
- }
- break;
- case 1:
- {
- pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
- tree genop1 = find_or_generate_expression (block, op1,
- stmts, domstmt);
- if (!genop1)
- return NULL_TREE;
- genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
-
- folded = fold_build1 (nary->opcode, nary->type,
- genop1);
- }
- break;
- default:
- return NULL_TREE;
+ pre_expr op = get_or_alloc_expr_for (nary->op[i]);
+ genop[i] = find_or_generate_expression (block, op,
+ stmts, domstmt);
+ if (!genop[i])
+ return NULL_TREE;
+ /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
+ may have conversions stripped. */
+ if (nary->opcode == POINTER_PLUS_EXPR)
+ {
+ if (i == 0)
+ genop[i] = fold_convert (nary->type, genop[i]);
+ else if (i == 1)
+ genop[i] = convert_to_ptrofftype (genop[i]);
+ }
+ else
+ genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
+ }
+ if (nary->opcode == CONSTRUCTOR)
+ {
+ VEC(constructor_elt,gc) *elts = NULL;
+ for (i = 0; i < nary->length; ++i)
+ CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
+ folded = build_constructor (nary->type, elts);
+ }
+ else
+ {
+ switch (nary->length)
+ {
+ case 1:
+ folded = fold_build1 (nary->opcode, nary->type,
+ genop[0]);
+ break;
+ case 2:
+ folded = fold_build2 (nary->opcode, nary->type,
+ genop[0], genop[1]);
+ break;
+ case 3:
+ folded = fold_build3 (nary->opcode, nary->type,
+ genop[0], genop[1], genop[3]);
+ break;
+ default:
+ gcc_unreachable ();
+ }
}
}
break;
/* Build and insert the assignment of the end result to the temporary
that we will return. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
- {
- pretemp = create_tmp_reg (exprtype, "pretmp");
- get_var_ann (pretemp);
- }
+ pretemp = create_tmp_reg (exprtype, "pretmp");
temp = pretemp;
add_referenced_var (temp);
/* All the symbols in NEWEXPR should be put into SSA form. */
mark_symbols_for_renaming (newstmt);
+ /* Fold the last statement. */
+ gsi = gsi_last (*stmts);
+ if (fold_stmt_inplace (&gsi))
+ update_stmt (gsi_stmt (gsi));
+
/* Add a value number to the temporary.
The value may already exist in either NEW_SETS, or AVAIL_OUT, because
we are creating the expression by pieces, and this particular piece of
memory reference is a simple induction variable. In other
cases the vectorizer won't do anything anyway (either it's
loop invariant or a complicated expression). */
- for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
{
switch (op->opcode)
{
/* Now build a phi for the new variable. */
if (!prephitemp || TREE_TYPE (prephitemp) != type)
- {
- prephitemp = create_tmp_var (type, "prephitmp");
- get_var_ann (prephitemp);
- }
+ prephitemp = create_tmp_var (type, "prephitmp");
temp = prephitemp;
add_referenced_var (temp);
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (expr->kind != NAME)
{
already existing along every predecessor, and
it's defined by some predecessor, it is
partially redundant. */
- if (!cant_insert && !all_same && by_some && do_insertion
- && dbg_cnt (treepre_insert))
+ if (!cant_insert && !all_same && by_some)
{
- if (insert_into_preds_of_block (block, get_expression_id (expr),
- avail))
+ if (!do_insertion)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Skipping partial redundancy for "
+ "expression ");
+ print_pre_expr (dump_file, expr);
+ fprintf (dump_file, " (%04d), no redundancy on to be "
+ "optimized for speed edge\n", val);
+ }
+ }
+ else if (dbg_cnt (treepre_insert)
+ && insert_into_preds_of_block (block,
+ get_expression_id (expr),
+ avail))
new_stuff = true;
}
/* If all edges produce the same value and that value is
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (expr->kind != NAME)
{
or control flow.
If this isn't a call or it is the last stmt in the
basic-block then the CFG represents things correctly. */
- if (is_gimple_call (stmt)
- && !stmt_ends_bb_p (stmt))
+ if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
{
/* Non-looping const functions always return normally.
Otherwise the call might not return or have side-effects
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
}
- if (gimple_has_volatile_ops (stmt)
- || stmt_could_throw_p (stmt))
+ if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
continue;
switch (gimple_code (stmt))
pre_expr result = NULL;
VEC(vn_reference_op_s, heap) *ops = NULL;
- if (!can_value_number_call (stmt))
+ /* We can value number only calls to real functions. */
+ if (gimple_call_internal_p (stmt))
continue;
copy_reference_ops_from_call (stmt, &ops);
vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
gimple_expr_type (stmt),
- ops, &ref, false);
+ ops, &ref, VN_NOWALK);
VEC_free (vn_reference_op_s, heap, ops);
if (!ref)
continue;
vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
gimple_assign_rhs_code (stmt),
gimple_expr_type (stmt),
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt),
- NULL_TREE, NULL_TREE, &nary);
+ gimple_assign_rhs1_ptr (stmt),
+ &nary);
if (!nary)
continue;
vn_reference_lookup (gimple_assign_rhs1 (stmt),
gimple_vuse (stmt),
- true, &ref);
+ VN_WALK, &ref);
if (!ref)
continue;
eliminate (void)
{
VEC (gimple, heap) *to_remove = NULL;
+ VEC (gimple, heap) *to_update = NULL;
basic_block b;
unsigned int todo = 0;
gimple_stmt_iterator gsi;
{
for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
{
+ tree lhs = NULL_TREE;
+ tree rhs = NULL_TREE;
+
stmt = gsi_stmt (gsi);
+ if (gimple_has_lhs (stmt))
+ lhs = gimple_get_lhs (stmt);
+
+ if (gimple_assign_single_p (stmt))
+ rhs = gimple_assign_rhs1 (stmt);
+
/* Lookup the RHS of the expression, see if we have an
available computation for it. If so, replace the RHS with
- the available computation. */
+ the available computation.
+
+ See PR43491.
+ We don't replace global register variable when it is a the RHS of
+ a single assign. We do replace local register variable since gcc
+ does not guarantee local variable will be allocated in register. */
if (gimple_has_lhs (stmt)
- && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (lhs) == SSA_NAME
&& !gimple_assign_ssa_name_copy_p (stmt)
&& (!gimple_assign_single_p (stmt)
- || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ || (!is_gimple_min_invariant (rhs)
+ && (gimple_assign_rhs_code (stmt) != VAR_DECL
+ || !is_global_var (rhs)
+ || !DECL_HARD_REGISTER (rhs))))
&& !gimple_has_volatile_ops (stmt)
- && !has_zero_uses (gimple_get_lhs (stmt)))
+ && !has_zero_uses (lhs))
{
- tree lhs = gimple_get_lhs (stmt);
- tree rhs = NULL_TREE;
tree sprime = NULL;
pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
pre_expr sprimeexpr;
-
- if (gimple_assign_single_p (stmt))
- rhs = gimple_assign_rhs1 (stmt);
+ gimple orig_stmt = stmt;
sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
get_expr_value_id (lhsexpr),
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
+
+ /* If we removed EH side-effects from the statement, clean
+ its EH information. */
+ if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
+ {
+ bitmap_set_bit (need_eh_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
continue;
}
|| TREE_CODE (rhs) != SSA_NAME
|| may_propagate_copy (rhs, sprime)))
{
+ bool can_make_abnormal_goto
+ = is_gimple_call (stmt)
+ && stmt_can_make_abnormal_goto (stmt);
+
gcc_assert (sprime != rhs);
if (dump_file && (dump_flags & TDF_DETAILS))
stmt = gsi_stmt (gsi);
update_stmt (stmt);
- /* If we removed EH side effects from the statement, clean
+ /* If we removed EH side-effects from the statement, clean
its EH information. */
- if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
+ if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Removed EH side effects.\n");
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
+
+ /* Likewise for AB side-effects. */
+ if (can_make_abnormal_goto
+ && !stmt_can_make_abnormal_goto (stmt))
+ {
+ bitmap_set_bit (need_ab_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed AB side-effects.\n");
}
}
}
has the same value number as its rhs. If so, the store is
dead. */
else if (gimple_assign_single_p (stmt)
+ && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg (gimple_assign_lhs (stmt))
- && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
- || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+ && (TREE_CODE (rhs) == SSA_NAME
+ || is_gimple_min_invariant (rhs)))
{
- tree rhs = gimple_assign_rhs1 (stmt);
tree val;
val = vn_reference_lookup (gimple_assign_lhs (stmt),
- gimple_vuse (stmt), true, NULL);
+ gimple_vuse (stmt), VN_WALK, NULL);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum;
if (val
}
/* Visit indirect calls and turn them into direct calls if
possible. */
- if (gimple_code (stmt) == GIMPLE_CALL
- && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
+ if (is_gimple_call (stmt))
{
- tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
- if (TREE_CODE (fn) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
+ tree orig_fn = gimple_call_fn (stmt);
+ tree fn;
+ if (!orig_fn)
+ continue;
+ if (TREE_CODE (orig_fn) == SSA_NAME)
+ fn = VN_INFO (orig_fn)->valnum;
+ else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
+ && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
+ fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
+ else
+ continue;
+ if (gimple_call_addr_fndecl (fn) != NULL_TREE
+ && useless_type_conversion_p (TREE_TYPE (orig_fn),
+ TREE_TYPE (fn)))
{
+ bool can_make_abnormal_goto
+ = stmt_can_make_abnormal_goto (stmt);
+ bool was_noreturn = gimple_call_noreturn_p (stmt);
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replacing call target with ");
}
gimple_call_set_fn (stmt, fn);
- update_stmt (stmt);
+ VEC_safe_push (gimple, heap, to_update, stmt);
+
+ /* When changing a call into a noreturn call, cfg cleanup
+ is needed to fix up the noreturn call. */
+ if (!was_noreturn && gimple_call_noreturn_p (stmt))
+ todo |= TODO_cleanup_cfg;
+
+ /* If we removed EH side-effects from the statement, clean
+ its EH information. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Removed EH side effects.\n");
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
+
+ /* Likewise for AB side-effects. */
+ if (can_make_abnormal_goto
+ && !stmt_can_make_abnormal_goto (stmt))
+ {
+ bitmap_set_bit (need_ab_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed AB side-effects.\n");
}
/* Changing an indirect call to a direct call may
/* We cannot remove stmts during BB walk, especially not release SSA
names there as this confuses the VN machinery. The stmts ending
up in to_remove are either stores or simple copies. */
- for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
+ FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
{
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
if (TREE_CODE (lhs) != SSA_NAME
|| has_zero_uses (lhs))
{
+ basic_block bb = gimple_bb (stmt);
gsi = gsi_for_stmt (stmt);
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
+ /* ??? gsi_remove doesn't tell us whether the stmt was
+ in EH tables and thus whether we need to purge EH edges.
+ Simply schedule the block for a cleanup. */
+ bitmap_set_bit (need_eh_cleanup, bb->index);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
release_defs (stmt);
}
VEC_free (gimple, heap, to_remove);
+ /* We cannot update call statements with virtual operands during
+ SSA walk. This might remove them which in turn makes our
+ VN lattice invalid. */
+ FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
+ update_stmt (stmt);
+ VEC_free (gimple, heap, to_update);
+
return todo;
}
}
need_eh_cleanup = BITMAP_ALLOC (NULL);
+ need_ab_cleanup = BITMAP_ALLOC (NULL);
}
static void
fini_pre (bool do_fre)
{
+ bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
+ bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
+
free (postorder);
VEC_free (bitmap_set_t, heap, value_expressions);
BITMAP_FREE (inserted_exprs);
free_dominance_info (CDI_POST_DOMINATORS);
- if (!bitmap_empty_p (need_eh_cleanup))
- {
- gimple_purge_all_dead_eh_edges (need_eh_cleanup);
- cleanup_tree_cfg ();
- }
+ if (do_eh_cleanup)
+ gimple_purge_all_dead_eh_edges (need_eh_cleanup);
+
+ if (do_ab_cleanup)
+ gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
BITMAP_FREE (need_eh_cleanup);
+ BITMAP_FREE (need_ab_cleanup);
+
+ if (do_eh_cleanup || do_ab_cleanup)
+ cleanup_tree_cfg ();
if (!do_fre)
loop_optimizer_finalize ();
if (!do_fre)
loop_optimizer_init (LOOPS_NORMAL);
- if (!run_scc_vn ())
+ if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
{
if (!do_fre)
loop_optimizer_finalize ();
statistics_counter_event (cfun, "Constified", pre_stats.constified);
clear_expression_ids ();
- free_scc_vn ();
if (!do_fre)
- remove_dead_inserted_code ();
+ {
+ remove_dead_inserted_code ();
+ todo |= TODO_verify_flow;
+ }
scev_finalize ();
fini_pre (do_fre);
+ if (!do_fre)
+ /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
+ case we can merge the block with the remaining predecessor of the block.
+ It should either:
+ - call merge_blocks after each tail merge iteration
+ - call merge_blocks after all tail merge iterations
+ - mark TODO_cleanup_cfg when necessary
+ - share the cfg cleanup with fini_pre. */
+ todo |= tail_merge_optimize (todo);
+ free_scc_vn ();
+
return todo;
}
0, /* properties_provided */
0, /* properties_destroyed */
TODO_rebuild_alias, /* todo_flags_start */
- TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
+ TODO_update_ssa_only_virtuals | TODO_ggc_collect
| TODO_verify_ssa /* todo_flags_finish */
}
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+ TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
}
};