/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
We should be able to deduce that the predicate 'a.a != B' is always
false. To achieve this, we associate constant values to the SSA
- names in the V_MAY_DEF and V_MUST_DEF operands for each store.
- Additionally, since we also glob partial loads/stores with the base
- symbol, we also keep track of the memory reference where the
- constant value was stored (in the MEM_REF field of PROP_VALUE_T).
- For instance,
+ names in the VDEF operands for each store. Additionally,
+ since we also glob partial loads/stores with the base symbol, we
+ also keep track of the memory reference where the constant value
+ was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
- # a_5 = V_MAY_DEF <a_4>
+ # a_5 = VDEF <a_4>
a.a = 2;
# VUSE <a_5>
#include "tree-ssa-propagate.h"
#include "langhooks.h"
#include "target.h"
+#include "toplev.h"
/* Possible lattice values. */
/* Array of propagated constant values. After propagation,
CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
the constant is held in an SSA name representing a memory store
- (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
- contain the actual memory reference used to store (i.e., the LHS of
- the assignment doing the store). */
+ (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
+ memory reference used to store (i.e., the LHS of the assignment
+ doing the store). */
static prop_value_t *const_val;
/* True if we are also propagating constants in stores and loads. */
return true;
}
+/* If SYM is a constant variable with known value, return the value.
+ NULL_TREE is returned otherwise. */
+
+static tree
+get_symbol_constant_value (tree sym)
+{
+ if (TREE_STATIC (sym)
+ && TREE_READONLY (sym)
+ && !MTAG_P (sym))
+ {
+ tree val = DECL_INITIAL (sym);
+ if (val
+ && ccp_decl_initial_min_invariant (val))
+ return val;
+ }
+
+ return NULL_TREE;
+}
/* Compute a default value for variable VAR and store it in the
CONST_VAL array. The following rules are used to get default
{
tree sym = SSA_NAME_VAR (var);
prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
+ tree cst_val;
if (!do_store_ccp && !is_gimple_reg (var))
{
val.lattice_val = CONSTANT;
val.value = SSA_NAME_VALUE (var);
}
- else if (TREE_STATIC (sym)
- && TREE_READONLY (sym)
- && !MTAG_P (sym)
- && DECL_INITIAL (sym)
- && ccp_decl_initial_min_invariant (DECL_INITIAL (sym)))
+ else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
{
/* Globals and static variables declared 'const' take their
initial value. */
val.lattice_val = CONSTANT;
- val.value = DECL_INITIAL (sym);
+ val.value = cst_val;
val.mem_ref = sym;
}
else
else
val.lattice_val = VARYING;
}
- else if (TREE_CODE (stmt) == MODIFY_EXPR
+ else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
|| TREE_CODE (stmt) == PHI_NODE)
{
/* Any other variable defined by an assignment or a PHI node
val->mem_ref = NULL_TREE;
}
+/* For float types, modify the value of VAL to make ccp work correctly
+ for non-standard values (-0, NaN):
+
+ If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
+ If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
+ This is to fix the following problem (see PR 29921): Suppose we have
+
+ x = 0.0 * y
+
+ and we set value of y to NaN. This causes value of x to be set to NaN.
+ When we later determine that y is in fact VARYING, fold uses the fact
+ that HONOR_NANS is false, and we try to change the value of x to 0,
+ causing an ICE. With HONOR_NANS being false, the real appearance of
+ NaN would cause undefined behavior, though, so claiming that y (and x)
+ are UNDEFINED initially is correct. */
+
+static void
+canonicalize_float_value (prop_value_t *val)
+{
+ enum machine_mode mode;
+ tree type;
+ REAL_VALUE_TYPE d;
+
+ if (val->lattice_val != CONSTANT
+ || TREE_CODE (val->value) != REAL_CST)
+ return;
+
+ d = TREE_REAL_CST (val->value);
+ type = TREE_TYPE (val->value);
+ mode = TYPE_MODE (type);
+
+ if (!HONOR_SIGNED_ZEROS (mode)
+ && REAL_VALUE_MINUS_ZERO (d))
+ {
+ val->value = build_real (type, dconst0);
+ return;
+ }
+
+ if (!HONOR_NANS (mode)
+ && REAL_VALUE_ISNAN (d))
+ {
+ val->lattice_val = UNDEFINED;
+ val->value = NULL;
+ val->mem_ref = NULL;
+ return;
+ }
+}
+
/* Set the value for variable VAR to NEW_VAL. Return true if the new
value is different from VAR's previous value. */
{
prop_value_t *old_val = get_value (var);
+ canonicalize_float_value (&new_val);
+
/* Lattice transitions must always be monotonically increasing in
value. If *OLD_VAL and NEW_VAL are the same, return false to
inform the caller that this was a non-transition. */
- gcc_assert (old_val->lattice_val <= new_val.lattice_val
+ gcc_assert (old_val->lattice_val < new_val.lattice_val
|| (old_val->lattice_val == new_val.lattice_val
- && old_val->value == new_val.value
+ && ((!old_val->value && !new_val.value)
+ || operand_equal_p (old_val->value, new_val.value, 0))
&& old_val->mem_ref == new_val.mem_ref));
if (old_val->lattice_val != new_val.lattice_val)
/* Anything other than assignments and conditional jumps are not
interesting for CCP. */
- if (TREE_CODE (stmt) != MODIFY_EXPR
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
&& !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
&& TREE_CODE (stmt) != COND_EXPR
&& TREE_CODE (stmt) != SWITCH_EXPR)
/* Anything other than assignments and conditional jumps are not
interesting for CCP. */
- if (TREE_CODE (stmt) != MODIFY_EXPR
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
&& !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
&& TREE_CODE (stmt) != COND_EXPR
&& TREE_CODE (stmt) != SWITCH_EXPR)
{
basic_block bb;
- const_val = XNEWVEC (prop_value_t, num_ssa_names);
- memset (const_val, 0, num_ssa_names * sizeof (*const_val));
+ const_val = XCNEWVEC (prop_value_t, num_ssa_names);
/* Initialize simulation flags for PHI nodes and statements. */
FOR_EACH_BB (bb)
/* Do final substitution of propagated values, cleanup the flowgraph and
- free allocated storage. */
+ free allocated storage.
-static void
+ Return TRUE when something was optimized. */
+
+static bool
ccp_finalize (void)
{
/* Perform substitutions based on the known constant values. */
- substitute_and_fold (const_val, false);
+ bool something_changed = substitute_and_fold (const_val, false);
free (const_val);
+ return something_changed;;
}
/* We may be able to fold away calls to builtin functions if their
arguments are constants. */
else if (code == CALL_EXPR
- && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
+ && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
{
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
{
tree *orig, var;
- tree fndecl, arglist;
size_t i = 0;
ssa_op_iter iter;
use_operand_p var_p;
/* Substitute operands with their values and try to fold. */
replace_uses_in (stmt, NULL, const_val);
- fndecl = get_callee_fndecl (rhs);
- arglist = TREE_OPERAND (rhs, 1);
- retval = fold_builtin (fndecl, arglist, false);
+ retval = fold_call_expr (rhs, false);
/* Restore operands to their original form. */
i = 0;
prop_value_t val;
tree simplified = NULL_TREE;
ccp_lattice_t likelyvalue = likely_value (stmt);
+ bool is_constant;
val.mem_ref = NULL_TREE;
+ fold_defer_overflow_warnings ();
+
/* If the statement is likely to have a CONSTANT result, then try
to fold the statement to determine the constant value. */
if (likelyvalue == CONSTANT)
else if (!simplified)
simplified = fold_const_aggregate_ref (get_rhs (stmt));
- if (simplified && is_gimple_min_invariant (simplified))
+ is_constant = simplified && is_gimple_min_invariant (simplified);
+
+ fold_undefer_overflow_warnings (is_constant, stmt, 0);
+
+ if (is_constant)
{
/* The statement produced a constant value. */
val.lattice_val = CONSTANT;
tree lhs, rhs;
enum ssa_prop_result retval;
- lhs = TREE_OPERAND (stmt, 0);
- rhs = TREE_OPERAND (stmt, 1);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_CODE (rhs) == SSA_NAME)
{
the constant value into the type of the destination variable. This
should not be necessary if GCC represented bitfields properly. */
{
- tree orig_lhs = TREE_OPERAND (stmt, 0);
+ tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
&& val.lattice_val == CONSTANT)
}
else if (do_store_ccp && stmt_makes_single_store (stmt))
{
- /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
- to the new constant value and mark the LHS as the memory
- reference associated with VAL. */
+ /* Otherwise, set the names in VDEF operands to the new
+ constant value and mark the LHS as the memory reference
+ associated with VAL. */
ssa_op_iter i;
tree vdef;
bool changed;
/* Set the value of every VDEF to VAL. */
changed = false;
FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
- changed |= set_lattice_value (vdef, val);
+ {
+ /* See PR 29801. We may have VDEFs for read-only variables
+ (see the handling of unmodifiable variables in
+ add_virtual_operand); do not attempt to change their value. */
+ if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
+ continue;
+
+ changed |= set_lattice_value (vdef, val);
+ }
/* Note that for propagation purposes, we are only interested in
visiting statements that load the exact same memory reference
fprintf (dump_file, "\n");
}
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
/* If the statement is an assignment that produces a single
output value, evaluate its RHS to see if the lattice value of
/* Main entry point for SSA Conditional Constant Propagation. */
-static void
+static unsigned int
execute_ssa_ccp (bool store_ccp)
{
do_store_ccp = store_ccp;
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
- ccp_finalize ();
+ if (ccp_finalize ())
+ return (TODO_cleanup_cfg | TODO_update_ssa | TODO_update_smt_usage
+ | TODO_remove_unused_locals);
+ else
+ return 0;
}
static unsigned int
do_ssa_ccp (void)
{
- execute_ssa_ccp (false);
- return 0;
+ return execute_ssa_ccp (false);
}
NULL, /* next */
0, /* static_pass_number */
TV_TREE_CCP, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa
- | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_ssa
+ | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
0 /* letter */
};
do_ssa_store_ccp (void)
{
/* If STORE-CCP is not enabled, we just run regular CCP. */
- execute_ssa_ccp (flag_tree_store_ccp != 0);
- return 0;
+ return execute_ssa_ccp (flag_tree_store_ccp != 0);
}
static bool
TV_TREE_STORE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa
- | TODO_cleanup_cfg
- | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_ssa
+ | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
0 /* letter */
};
|| lrem || hrem)
return NULL_TREE;
- idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
+ idx = build_int_cst_wide (TREE_TYPE (offset), lquo, hquo);
}
/* Assume the low bound is zero. If there is a domain type, get the
struct fold_stmt_r_data
{
- bool *changed_p;
- bool *inside_addr_expr_p;
+ tree stmt;
+ bool *changed_p;
+ bool *inside_addr_expr_p;
};
/* Subroutine of fold_stmt called via walk_tree. We perform several
if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
{
tree op0 = TREE_OPERAND (expr, 0);
- tree tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
- TREE_OPERAND (op0, 0),
- TREE_OPERAND (op0, 1));
- if (tem && set_rhs (expr_p, tem))
+ tree tem;
+ bool set;
+
+ fold_defer_overflow_warnings ();
+ tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
+ TREE_OPERAND (op0, 0),
+ TREE_OPERAND (op0, 1));
+ set = tem && set_rhs (expr_p, tem);
+ fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
+ if (set)
{
t = *expr_p;
break;
if (TREE_CODE (arg) != SSA_NAME)
{
+ if (TREE_CODE (arg) == COND_EXPR)
+ return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
+ && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
+
if (type == 2)
{
val = arg;
switch (TREE_CODE (def_stmt))
{
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
{
tree rhs;
/* The RHS of the statement defining VAR must either have a
constant length or come from another SSA_NAME with a constant
length. */
- rhs = TREE_OPERAND (def_stmt, 1);
+ rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
STRIP_NOPS (rhs);
return get_maxval_strlen (rhs, length, visited, type);
}
ccp_fold_builtin (tree stmt, tree fn)
{
tree result, val[3];
- tree callee, arglist, a;
+ tree callee, a;
int arg_mask, i, type;
bitmap visited;
bool ignore;
+ call_expr_arg_iterator iter;
+ int nargs;
- ignore = TREE_CODE (stmt) != MODIFY_EXPR;
+ ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
/* First try the generic builtin folder. If that succeeds, return the
result directly. */
- callee = get_callee_fndecl (fn);
- arglist = TREE_OPERAND (fn, 1);
- result = fold_builtin (callee, arglist, ignore);
+ result = fold_call_expr (fn, ignore);
if (result)
{
if (ignore)
}
/* Ignore MD builtins. */
+ callee = get_callee_fndecl (fn);
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
return NULL_TREE;
/* If the builtin could not be folded, and it has no argument list,
we're done. */
- if (!arglist)
+ nargs = call_expr_nargs (fn);
+ if (nargs == 0)
return NULL_TREE;
/* Limit the work only for builtins we know how to simplify. */
visited = BITMAP_ALLOC (NULL);
memset (val, 0, sizeof (val));
- for (i = 0, a = arglist;
- arg_mask;
- i++, arg_mask >>= 1, a = TREE_CHAIN (a))
- if (arg_mask & 1)
- {
- bitmap_clear (visited);
- if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type))
- val[i] = NULL_TREE;
- }
+ init_call_expr_arg_iterator (fn, &iter);
+ for (i = 0; arg_mask; i++, arg_mask >>= 1)
+ {
+ a = next_call_expr_arg (&iter);
+ if (arg_mask & 1)
+ {
+ bitmap_clear (visited);
+ if (!get_maxval_strlen (a, &val[i], visited, type))
+ val[i] = NULL_TREE;
+ }
+ }
BITMAP_FREE (visited);
break;
case BUILT_IN_STRCPY:
- if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_strcpy (callee, arglist, val[1]);
+ if (val[1] && is_gimple_val (val[1]) && nargs == 2)
+ result = fold_builtin_strcpy (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ val[1]);
break;
case BUILT_IN_STRNCPY:
- if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_strncpy (callee, arglist, val[1]);
+ if (val[1] && is_gimple_val (val[1]) && nargs == 3)
+ result = fold_builtin_strncpy (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ val[1]);
break;
case BUILT_IN_FPUTS:
- result = fold_builtin_fputs (arglist,
- TREE_CODE (stmt) != MODIFY_EXPR, 0,
+ result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
val[0]);
break;
case BUILT_IN_FPUTS_UNLOCKED:
- result = fold_builtin_fputs (arglist,
- TREE_CODE (stmt) != MODIFY_EXPR, 1,
+ result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
val[0]);
break;
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMSET_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_memory_chk (callee, arglist, val[2], ignore,
+ result = fold_builtin_memory_chk (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ CALL_EXPR_ARG (fn, 3),
+ val[2], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore,
+ result = fold_builtin_stxcpy_chk (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ val[1], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRNCPY_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_strncpy_chk (arglist, val[2]);
+ result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ CALL_EXPR_ARG (fn, 3),
+ val[2]);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_snprintf_chk (arglist, val[1],
+ result = fold_builtin_snprintf_chk (fn, val[1],
DECL_FUNCTION_CODE (callee));
break;
bool changed = false;
bool inside_addr_expr = false;
+ stmt = *stmt_p;
+
+ fold_stmt_r_data.stmt = stmt;
fold_stmt_r_data.changed_p = &changed;
fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
- stmt = *stmt_p;
-
/* If we replaced constants and the statement makes pointer dereferences,
then we may need to fold instances of *&VAR into VAR, etc. */
if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
{
- *stmt_p
- = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
- NULL);
+ *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
return true;
}
copying EH region info to the new node. Easier to just do it
here where we can just smash the call operand. Also
CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
- copied, fold_ternary does not have not information. */
- callee = TREE_OPERAND (rhs, 0);
+ copied, fold_call_expr does not have not information. */
+ callee = CALL_EXPR_FN (rhs);
if (TREE_CODE (callee) == OBJ_TYPE_REF
&& lang_hooks.fold_obj_type_ref
&& TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
t = lang_hooks.fold_obj_type_ref (callee, t);
if (t)
{
- TREE_OPERAND (rhs, 0) = t;
+ CALL_EXPR_FN (rhs) = t;
changed = true;
}
}
}
}
+ else if (TREE_CODE (rhs) == COND_EXPR)
+ {
+ tree temp = fold (COND_EXPR_COND (rhs));
+ if (temp != COND_EXPR_COND (rhs))
+ result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
+ COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+ }
/* If we couldn't fold the RHS, hand over to the generic fold routines. */
if (result == NULL_TREE)
bool changed = false;
bool inside_addr_expr = false;
+ fold_stmt_r_data.stmt = stmt;
fold_stmt_r_data.changed_p = &changed;
fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
tree new_stmt = tsi_stmt (ti);
find_new_referenced_vars (tsi_stmt_ptr (ti));
bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
- mark_new_vars_to_rename (bsi_stmt (*si_p));
+ mark_symbols_for_renaming (new_stmt);
bsi_next (si_p);
}
print_generic_stmt (dump_file, *stmtp, dump_flags);
}
+ push_stmt_changes (stmtp);
+
if (!set_rhs (stmtp, result))
{
result = convert_to_gimple_builtin (&i, result,
TREE_CODE (old_stmt)
- != MODIFY_EXPR);
+ != GIMPLE_MODIFY_STMT);
if (result)
{
bool ok = set_rhs (stmtp, result);
-
gcc_assert (ok);
}
}
- mark_new_vars_to_rename (*stmtp);
+
+ pop_stmt_changes (stmtp);
+
if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
&& tree_purge_dead_eh_edges (bb))
cfg_changed = true;
}
/* Delete unreachable blocks. */
- if (cfg_changed)
- cleanup_tree_cfg ();
- return 0;
+ return cfg_changed ? TODO_cleanup_cfg : 0;
}
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */