/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
- Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+ 2010 Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
-
+
GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3, or (at your option) any
later version.
-
+
GCC is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
-
+
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
mark the outgoing edges as executable or not executable
depending on the predicate's value. This is then used when
visiting PHI nodes to know when a PHI argument can be ignored.
-
+
2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
same constant C, then the LHS of the PHI is set to C. This
static prop_value_t *const_val;
static void canonicalize_float_value (prop_value_t *);
+static bool ccp_fold_stmt (gimple_stmt_iterator *);
/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
get_symbol_constant_value (tree sym)
{
if (TREE_STATIC (sym)
- && TREE_READONLY (sym))
+ && (TREE_READONLY (sym)
+ || TREE_CODE (sym) == CONST_DECL))
{
tree val = DECL_INITIAL (sym);
if (val)
{
- STRIP_USELESS_TYPE_CONVERSION (val);
+ STRIP_NOPS (val);
if (is_gimple_min_invariant (val))
{
if (TREE_CODE (val) == ADDR_EXPR)
{
tree base = get_base_address (TREE_OPERAND (val, 0));
if (base && TREE_CODE (base) == VAR_DECL)
- add_referenced_var (base);
+ {
+ TREE_ADDRESSABLE (base) = 1;
+ if (gimple_referenced_vars (cfun))
+ add_referenced_var (base);
+ }
}
return val;
}
has_constant_operand = true;
}
+ if (has_constant_operand)
+ all_undefined_operands = false;
+
/* If the operation combines operands like COMPLEX_EXPR make sure to
not mark the result UNDEFINED if only one part of the result is
undefined. */
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
gimple stmt = gsi_stmt (i);
- bool is_varying = surely_varying_stmt_p (stmt);
+ bool is_varying;
+
+ /* If the statement is a control insn, then we do not
+ want to avoid simulating the statement once. Failure
+ to do so means that those edges will never get added. */
+ if (stmt_ends_bb_p (stmt))
+ is_varying = false;
+ else
+ is_varying = surely_varying_stmt_p (stmt);
if (is_varying)
{
/* Do final substitution of propagated values, cleanup the flowgraph and
- free allocated storage.
+ free allocated storage.
Return TRUE when something was optimized. */
do_dbg_cnt ();
/* Perform substitutions based on the known constant values. */
- something_changed = substitute_and_fold (const_val, false);
+ something_changed = substitute_and_fold (const_val, ccp_fold_stmt);
free (const_val);
const_val = NULL;
return SSA_PROP_NOT_INTERESTING;
}
-/* Return true if we may propagate the address expression ADDR into the
+/* Return true if we may propagate the address expression ADDR into the
dereference DEREF and cancel them. */
bool
return get_symbol_constant_value (rhs);
return rhs;
}
-
+
case GIMPLE_UNARY_RHS:
{
/* Handle unary operators that can appear in GIMPLE form.
return op0;
}
- return
+ return
fold_unary_ignore_overflow_loc (loc, subcode,
gimple_expr_type (stmt), op0);
}
&& TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (op1) == INTEGER_CST)
{
- tree lhs = gimple_assign_lhs (stmt);
tree tem = maybe_fold_offset_to_address
- (loc, op0, op1, TREE_TYPE (lhs));
+ (loc, op0, op1, TREE_TYPE (op0));
if (tem != NULL_TREE)
return tem;
}
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
if (tree_int_cst_equal (cfield, idx))
{
- STRIP_USELESS_TYPE_CONVERSION (cval);
+ STRIP_NOPS (cval);
if (TREE_CODE (cval) == ADDR_EXPR)
{
tree base = get_base_address (TREE_OPERAND (cval, 0));
/* FIXME: Handle bit-fields. */
&& ! DECL_BIT_FIELD (cfield))
{
- STRIP_USELESS_TYPE_CONVERSION (cval);
+ STRIP_NOPS (cval);
if (TREE_CODE (cval) == ADDR_EXPR)
{
tree base = get_base_address (TREE_OPERAND (cval, 0));
if (code == GIMPLE_ASSIGN)
{
enum tree_code subcode = gimple_assign_rhs_code (stmt);
-
+
/* Other cases cannot satisfy is_gimple_min_invariant
without folding. */
if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
else if (code == GIMPLE_SWITCH)
simplified = gimple_switch_index (stmt);
else
- /* These cannot satisfy is_gimple_min_invariant without folding. */
- gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
+ /* These cannot satisfy is_gimple_min_invariant without folding. */
+ gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
}
is_constant = simplified && is_gimple_min_invariant (simplified);
return val;
}
+/* Fold the stmt at *GSI with CCP specific information that propagating
+ and regular folding does not catch. */
+
+static bool
+ccp_fold_stmt (gimple_stmt_iterator *gsi)
+{
+ gimple stmt = gsi_stmt (*gsi);
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_COND:
+ {
+ prop_value_t val;
+ /* Statement evaluation will handle type mismatches in constants
+ more gracefully than the final propagation. This allows us to
+ fold more conditionals here. */
+ val = evaluate_stmt (stmt);
+ if (val.lattice_val != CONSTANT
+ || TREE_CODE (val.value) != INTEGER_CST)
+ return false;
+
+ if (integer_zerop (val.value))
+ gimple_cond_make_false (stmt);
+ else
+ gimple_cond_make_true (stmt);
+
+ return true;
+ }
+
+ case GIMPLE_CALL:
+ {
+ tree lhs = gimple_call_lhs (stmt);
+ prop_value_t *val;
+ tree argt;
+ bool changed = false;
+ unsigned i;
+
+ /* If the call was folded into a constant make sure it goes
+ away even if we cannot propagate into all uses because of
+ type issues. */
+ if (lhs
+ && TREE_CODE (lhs) == SSA_NAME
+ && (val = get_value (lhs))
+ && val->lattice_val == CONSTANT)
+ {
+ tree new_rhs = unshare_expr (val->value);
+ bool res;
+ if (!useless_type_conversion_p (TREE_TYPE (lhs),
+ TREE_TYPE (new_rhs)))
+ new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
+ res = update_call_from_tree (gsi, new_rhs);
+ gcc_assert (res);
+ return true;
+ }
+
+ /* Propagate into the call arguments. Compared to replace_uses_in
+ this can use the argument slot types for type verification
+ instead of the current argument type. We also can safely
+ drop qualifiers here as we are dealing with constants anyway. */
+ argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
+ for (i = 0; i < gimple_call_num_args (stmt) && argt;
+ ++i, argt = TREE_CHAIN (argt))
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (TREE_CODE (arg) == SSA_NAME
+ && (val = get_value (arg))
+ && val->lattice_val == CONSTANT
+ && useless_type_conversion_p
+ (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
+ TYPE_MAIN_VARIANT (TREE_TYPE (val->value))))
+ {
+ gimple_call_set_arg (stmt, i, unshare_expr (val->value));
+ changed = true;
+ }
+ }
+
+ return changed;
+ }
+
+ case GIMPLE_ASSIGN:
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ prop_value_t *val;
+
+ /* If we have a load that turned out to be constant replace it
+ as we cannot propagate into all uses in all cases. */
+ if (gimple_assign_single_p (stmt)
+ && TREE_CODE (lhs) == SSA_NAME
+ && (val = get_value (lhs))
+ && val->lattice_val == CONSTANT)
+ {
+ tree rhs = unshare_expr (val->value);
+ if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ gimple_assign_set_rhs_from_tree (gsi, rhs);
+ return true;
+ }
+
+ return false;
+ }
+
+ default:
+ return false;
+ }
+}
+
/* Visit the assignment statement STMT. Set the value of its LHS to the
value computed by the RHS and store LHS in *OUTPUT_P. If STMT
creates virtual definitions, set the value of each new name to that
its evaluation changes the lattice value of its output, return
SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
output value.
-
+
If STMT is a conditional branch and we can determine its truth
value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
value, return SSA_PROP_VARYING. */
}
-struct gimple_opt_pass pass_ccp =
+struct gimple_opt_pass pass_ccp =
{
{
GIMPLE_PASS,
(char *)a - 4;
which should be not folded to &a->d[-8]. */
if (domain_type
- && TYPE_MAX_VALUE (domain_type)
+ && TYPE_MAX_VALUE (domain_type)
&& TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
{
tree up_bound = TYPE_MAX_VALUE (domain_type);
static tree
maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
- tree base, tree offset,
- tree orig_type, bool base_is_ptr)
+ tree base, tree offset, tree orig_type)
{
tree f, t, field_type, tail_array_field, field_offset;
tree ret;
if (cmp == 0
&& useless_type_conversion_p (orig_type, field_type))
{
- if (base_is_ptr)
- base = build1 (INDIRECT_REF, record_type, base);
t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
return t;
}
-
+
/* Don't care about offsets into the middle of scalars. */
if (!AGGREGATE_TYPE_P (field_type))
continue;
/* If we matched, then set offset to the displacement into
this field. */
- if (base_is_ptr)
- new_base = build1 (INDIRECT_REF, record_type, base);
- else
- new_base = base;
- protected_set_expr_location (new_base, loc);
- new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
- protected_set_expr_location (new_base, loc);
+ new_base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
+ SET_EXPR_LOCATION (new_base, loc);
/* Recurse to possibly find the match. */
ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
if (ret)
return ret;
ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
- orig_type, false);
+ orig_type);
if (ret)
return ret;
}
field_type = TREE_TYPE (f);
offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
- /* If we get here, we've got an aggregate field, and a possibly
+ /* If we get here, we've got an aggregate field, and a possibly
nonzero offset into them. Recurse and hope for a valid match. */
- if (base_is_ptr)
- {
- base = build1 (INDIRECT_REF, record_type, base);
- SET_EXPR_LOCATION (base, loc);
- }
base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
SET_EXPR_LOCATION (base, loc);
if (t)
return t;
return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
- orig_type, false);
+ orig_type);
}
/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
{
tree ret;
tree type;
- bool base_is_ptr = true;
STRIP_NOPS (base);
- if (TREE_CODE (base) == ADDR_EXPR)
- {
- base_is_ptr = false;
-
- base = TREE_OPERAND (base, 0);
+ if (TREE_CODE (base) != ADDR_EXPR)
+ return NULL_TREE;
- /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
- so it needs to be removed and new COMPONENT_REF constructed.
- The wrong COMPONENT_REF are often constructed by folding the
- (type *)&object within the expression (type *)&object+offset */
- if (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+
+ /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
+ so it needs to be removed and new COMPONENT_REF constructed.
+ The wrong COMPONENT_REF are often constructed by folding the
+ (type *)&object within the expression (type *)&object+offset */
+ if (handled_component_p (base))
+ {
+ HOST_WIDE_INT sub_offset, size, maxsize;
+ tree newbase;
+ newbase = get_ref_base_and_extent (base, &sub_offset,
+ &size, &maxsize);
+ gcc_assert (newbase);
+ if (size == maxsize
+ && size != -1
+ && !(sub_offset & (BITS_PER_UNIT - 1)))
{
- HOST_WIDE_INT sub_offset, size, maxsize;
- tree newbase;
- newbase = get_ref_base_and_extent (base, &sub_offset,
- &size, &maxsize);
- gcc_assert (newbase);
- if (size == maxsize
- && size != -1
- && !(sub_offset & (BITS_PER_UNIT - 1)))
- {
- base = newbase;
- if (sub_offset)
- offset = int_const_binop (PLUS_EXPR, offset,
- build_int_cst (TREE_TYPE (offset),
- sub_offset / BITS_PER_UNIT), 1);
- }
+ base = newbase;
+ if (sub_offset)
+ offset = int_const_binop (PLUS_EXPR, offset,
+ build_int_cst (TREE_TYPE (offset),
+ sub_offset / BITS_PER_UNIT), 1);
}
- if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
- && integer_zerop (offset))
- return base;
- type = TREE_TYPE (base);
}
- else
- {
- base_is_ptr = true;
- if (!POINTER_TYPE_P (TREE_TYPE (base)))
- return NULL_TREE;
- type = TREE_TYPE (TREE_TYPE (base));
- }
- ret = maybe_fold_offset_to_component_ref (loc, type, base, offset,
- orig_type, base_is_ptr);
+ if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
+ && integer_zerop (offset))
+ return base;
+ type = TREE_TYPE (base);
+
+ ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
if (!ret)
- {
- if (base_is_ptr)
- {
- base = build1 (INDIRECT_REF, type, base);
- SET_EXPR_LOCATION (base, loc);
- }
- ret = maybe_fold_offset_to_array_ref (loc,
- base, offset, orig_type, true);
- }
+ ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
+
return ret;
}
&& is_gimple_min_invariant (DECL_INITIAL (base)))
return DECL_INITIAL (base);
+ /* If there is no offset involved simply return the folded base. */
+ if (integer_zerop (offset))
+ return base;
+
/* Try folding *(&B+O) to B.X. */
t = maybe_fold_offset_to_reference (loc, base_addr, offset,
TREE_TYPE (expr));
}
else
{
- /* We can get here for out-of-range string constant accesses,
+ /* We can get here for out-of-range string constant accesses,
such as "_"[3]. Bail out of the entire substitution search
and arrange for the entire statement to be replaced by a
call to __builtin_trap. In all likelihood this will all be
&& TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
{
/* FIXME: Except that this causes problems elsewhere with dead
- code not being deleted, and we die in the rtl expanders
+ code not being deleted, and we die in the rtl expanders
because we failed to remove some ssa_name. In the meantime,
just return zero. */
/* FIXME2: This condition should be signaled by
- fold_read_from_constant_string directly, rather than
+ fold_read_from_constant_string directly, rather than
re-checking for it here. */
return integer_zero_node;
}
&& TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
&& tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
TYPE_SIZE_UNIT (TREE_TYPE (op0))))
- return build1 (ADDR_EXPR, res_type,
- build4 (ARRAY_REF, TREE_TYPE (op0),
+ return build_fold_addr_expr
+ (build4 (ARRAY_REF, TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
gimple_assign_rhs1 (offset_def),
TREE_OPERAND (op0, 2),
TREE_OPERAND (op0, 3)));
else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
&& gimple_assign_rhs_code (offset_def) != MULT_EXPR)
- return build1 (ADDR_EXPR, res_type,
- build4 (ARRAY_REF, TREE_TYPE (op0),
+ return build_fold_addr_expr
+ (build4 (ARRAY_REF, TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
op1,
TREE_OPERAND (op0, 2),
t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
if (!t)
t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
- ptd_type, false);
+ ptd_type);
if (t)
{
t = build1 (ADDR_EXPR, res_type, t);
return expr;
}
}
+ else if (!is_lhs
+ && DECL_P (*t))
+ {
+ tree tem = get_symbol_constant_value (*t);
+ if (tem
+ && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
+ {
+ *t = unshare_expr (tem);
+ tem = maybe_fold_reference (expr, is_lhs);
+ if (tem)
+ return tem;
+ return expr;
+ }
+ }
return NULL_TREE;
}
{
tree var, val;
gimple def_stmt;
-
+
if (TREE_CODE (arg) != SSA_NAME)
{
if (TREE_CODE (arg) == COND_EXPR)
return false;
}
}
- return true;
+ return true;
default:
return false;
CONSTRUCTOR_ELTS (rhs));
}
+ else if (DECL_P (rhs))
+ return unshare_expr (get_symbol_constant_value (rhs));
+
/* If we couldn't fold the RHS, hand over to the generic
fold routines. */
if (result == NULL_TREE)
result = fold (rhs);
/* Strip away useless type conversions. Both the NON_LVALUE_EXPR
- that may have been added by fold, and "useless" type
+ that may have been added by fold, and "useless" type
conversions that might now be apparent due to propagation. */
STRIP_USELESS_TYPE_CONVERSION (result);
return false;
}
+static void gimplify_and_update_call_from_tree (gimple_stmt_iterator *, tree);
/* Attempt to fold a call statement referenced by the statement iterator GSI.
The statement may be replaced by another statement, e.g., if the call
tree result = ccp_fold_builtin (stmt);
if (result)
- return update_call_from_tree (gsi, result);
+ {
+ if (!update_call_from_tree (gsi, result))
+ gimplify_and_update_call_from_tree (gsi, result);
+ return true;
+ }
}
else
{
{
unsigned old_num_ops = gimple_num_ops (stmt);
tree new_rhs = fold_gimple_assign (gsi);
- if (new_rhs != NULL_TREE
+ tree lhs = gimple_assign_lhs (stmt);
+ if (new_rhs
+ && !useless_type_conversion_p (TREE_TYPE (lhs),
+ TREE_TYPE (new_rhs)))
+ new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
+ if (new_rhs
&& (!inplace
|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
{
static tree
optimize_stack_restore (gimple_stmt_iterator i)
{
- tree callee, rhs;
- gimple stmt, stack_save;
- gimple_stmt_iterator stack_save_gsi;
+ tree callee;
+ gimple stmt;
basic_block bb = gsi_bb (i);
gimple call = gsi_stmt (i);
continue;
callee = gimple_call_fndecl (stmt);
- if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
+ if (!callee
+ || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
+ /* All regular builtins are ok, just obviously not alloca. */
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
return NULL_TREE;
if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
- break;
+ goto second_stack_restore;
}
- if (gsi_end_p (i)
- && (! single_succ_p (bb)
- || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
+ if (!gsi_end_p (i))
return NULL_TREE;
- stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
- if (gimple_code (stack_save) != GIMPLE_CALL
- || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
- || stmt_could_throw_p (stack_save)
- || !has_single_use (gimple_call_arg (call, 0)))
- return NULL_TREE;
+ /* Allow one successor of the exit block, or zero successors. */
+ switch (EDGE_COUNT (bb->succs))
+ {
+ case 0:
+ break;
+ case 1:
+ if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
+ return NULL_TREE;
+ break;
+ default:
+ return NULL_TREE;
+ }
+ second_stack_restore:
- callee = gimple_call_fndecl (stack_save);
- if (!callee
- || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
- || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
- || gimple_call_num_args (stack_save) != 0)
- return NULL_TREE;
+ /* If there's exactly one use, then zap the call to __builtin_stack_save.
+ If there are multiple uses, then the last one should remove the call.
+ In any case, whether the call to __builtin_stack_save can be removed
+ or not is irrelevant to removing the call to __builtin_stack_restore. */
+ if (has_single_use (gimple_call_arg (call, 0)))
+ {
+ gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
+ if (is_gimple_call (stack_save))
+ {
+ callee = gimple_call_fndecl (stack_save);
+ if (callee
+ && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
+ {
+ gimple_stmt_iterator stack_save_gsi;
+ tree rhs;
- stack_save_gsi = gsi_for_stmt (stack_save);
- rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
- if (!update_call_from_tree (&stack_save_gsi, rhs))
- return NULL_TREE;
+ stack_save_gsi = gsi_for_stmt (stack_save);
+ rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
+ update_call_from_tree (&stack_save_gsi, rhs);
+ }
+ }
+ }
/* No effect, so the statement will be deleted. */
return integer_zero_node;
|| TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
!= TYPE_MAIN_VARIANT (cfun_va_list))
return NULL_TREE;
-
+
lhs = build_fold_indirect_ref_loc (loc, lhs);
rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
1, integer_zero_node);
gimple_stmt_iterator i;
gimple_seq stmts = gimple_seq_alloc();
struct gimplify_ctx gctx;
+ gimple last = NULL;
stmt = gsi_stmt (*si_p);
if (lhs == NULL_TREE)
gimplify_and_add (expr, &stmts);
- else
+ else
tmp = get_initialized_tmp_var (expr, &stmts, NULL);
pop_gimplify_context (NULL);
/* The replacement can expose previously unreferenced variables. */
for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
- {
- new_stmt = gsi_stmt (i);
- find_new_referenced_vars (new_stmt);
- gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
- mark_symbols_for_renaming (new_stmt);
- gsi_next (si_p);
- }
+ {
+ if (last)
+ {
+ gsi_insert_before (si_p, last, GSI_NEW_STMT);
+ gsi_next (si_p);
+ }
+ new_stmt = gsi_stmt (i);
+ find_new_referenced_vars (new_stmt);
+ mark_symbols_for_renaming (new_stmt);
+ last = new_stmt;
+ }
if (lhs == NULL_TREE)
{
- new_stmt = gimple_build_nop ();
unlink_stmt_vdef (stmt);
release_defs (stmt);
+ new_stmt = last;
}
else
{
+ if (last)
+ {
+ gsi_insert_before (si_p, last, GSI_NEW_STMT);
+ gsi_next (si_p);
+ }
new_stmt = gimple_build_assign (lhs, tmp);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
bool cfg_changed = false;
basic_block bb;
unsigned int todoflags = 0;
-
+
FOR_EACH_BB (bb)
{
gimple_stmt_iterator i;
gsi_next (&i);
}
}
-
+
/* Delete unreachable blocks. */
if (cfg_changed)
todoflags |= TODO_cleanup_cfg;
-
+
return todoflags;
}
-struct gimple_opt_pass pass_fold_builtins =
+struct gimple_opt_pass pass_fold_builtins =
{
{
GIMPLE_PASS,