sbitmap_free (changed_blocks);
}
-/* Return true if we can value number the call in STMT. This is true
- if we have a pure or constant call to a real function. */
-
-static bool
-can_value_number_call (gimple stmt)
-{
- if (gimple_call_internal_p (stmt))
- return false;
- if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
- return true;
- return false;
-}
-
/* Return true if OP is a tree which we can perform PRE on.
This may not match the operations we can value number, but in
a perfect world would. */
return folded;
}
break;
+ case WITH_SIZE_EXPR:
+ {
+ tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
+ pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
+ tree genop1;
+
+ if (!genop0)
+ return NULL_TREE;
+
+ genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
+ if (!genop1)
+ return NULL_TREE;
+
+ return fold_build2 (currop->opcode, currop->type, genop0, genop1);
+ }
+ break;
case BIT_FIELD_REF:
{
tree folded;
or control flow.
If this isn't a call or it is the last stmt in the
basic-block then the CFG represents things correctly. */
- if (is_gimple_call (stmt)
- && !stmt_ends_bb_p (stmt))
+ if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
{
/* Non-looping const functions always return normally.
Otherwise the call might not return or have side-effects
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
}
- if (gimple_has_volatile_ops (stmt)
- || stmt_could_throw_p (stmt))
+ if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
continue;
switch (gimple_code (stmt))
pre_expr result = NULL;
VEC(vn_reference_op_s, heap) *ops = NULL;
- if (!can_value_number_call (stmt))
+ /* We can value number only calls to real functions. */
+ if (gimple_call_internal_p (stmt))
continue;
copy_reference_ops_from_call (stmt, &ops);
{
for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
{
+ tree lhs = NULL_TREE;
+ tree rhs = NULL_TREE;
+
stmt = gsi_stmt (gsi);
+ if (gimple_has_lhs (stmt))
+ lhs = gimple_get_lhs (stmt);
+
+ if (gimple_assign_single_p (stmt))
+ rhs = gimple_assign_rhs1 (stmt);
+
/* Lookup the RHS of the expression, see if we have an
available computation for it. If so, replace the RHS with
- the available computation. */
+ the available computation.
+
+ See PR43491.
+ We don't replace global register variable when it is a the RHS of
+ a single assign. We do replace local register variable since gcc
+ does not guarantee local variable will be allocated in register. */
if (gimple_has_lhs (stmt)
- && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+ && TREE_CODE (lhs) == SSA_NAME
&& !gimple_assign_ssa_name_copy_p (stmt)
&& (!gimple_assign_single_p (stmt)
- || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+ || (!is_gimple_min_invariant (rhs)
+ && (gimple_assign_rhs_code (stmt) != VAR_DECL
+ || !is_global_var (rhs)
+ || !DECL_HARD_REGISTER (rhs))))
&& !gimple_has_volatile_ops (stmt)
- && !has_zero_uses (gimple_get_lhs (stmt)))
+ && !has_zero_uses (lhs))
{
- tree lhs = gimple_get_lhs (stmt);
- tree rhs = NULL_TREE;
tree sprime = NULL;
pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
pre_expr sprimeexpr;
-
- if (gimple_assign_single_p (stmt))
- rhs = gimple_assign_rhs1 (stmt);
+ gimple orig_stmt = stmt;
sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
get_expr_value_id (lhsexpr),
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
+
+ /* If we removed EH side-effects from the statement, clean
+ its EH information. */
+ if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
+ {
+ bitmap_set_bit (need_eh_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
continue;
}
/* If we removed EH side-effects from the statement, clean
its EH information. */
- if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
+ if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
has the same value number as its rhs. If so, the store is
dead. */
else if (gimple_assign_single_p (stmt)
+ && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg (gimple_assign_lhs (stmt))
- && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
- || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+ && (TREE_CODE (rhs) == SSA_NAME
+ || is_gimple_min_invariant (rhs)))
{
- tree rhs = gimple_assign_rhs1 (stmt);
tree val;
val = vn_reference_lookup (gimple_assign_lhs (stmt),
gimple_vuse (stmt), VN_WALK, NULL);
gsi = gsi_for_stmt (stmt);
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
- if (gimple_purge_dead_eh_edges (bb))
- todo |= TODO_cleanup_cfg;
+ /* ??? gsi_remove doesn't tell us whether the stmt was
+ in EH tables and thus whether we need to purge EH edges.
+ Simply schedule the block for a cleanup. */
+ bitmap_set_bit (need_eh_cleanup, bb->index);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
release_defs (stmt);
static void
fini_pre (bool do_fre)
{
+ bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
+ bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
+
free (postorder);
VEC_free (bitmap_set_t, heap, value_expressions);
BITMAP_FREE (inserted_exprs);
free_dominance_info (CDI_POST_DOMINATORS);
- if (!bitmap_empty_p (need_eh_cleanup))
- {
- gimple_purge_all_dead_eh_edges (need_eh_cleanup);
- cleanup_tree_cfg ();
- }
+ if (do_eh_cleanup)
+ gimple_purge_all_dead_eh_edges (need_eh_cleanup);
- BITMAP_FREE (need_eh_cleanup);
-
- if (!bitmap_empty_p (need_ab_cleanup))
- {
- gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
- cleanup_tree_cfg ();
- }
+ if (do_ab_cleanup)
+ gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
+ BITMAP_FREE (need_eh_cleanup);
BITMAP_FREE (need_ab_cleanup);
+ if (do_eh_cleanup || do_ab_cleanup)
+ cleanup_tree_cfg ();
+
if (!do_fre)
loop_optimizer_finalize ();
}