/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
#include "tree-flow.h"
#include "tree-pass.h"
#include "tree-ssa-propagate.h"
+#include "value-prof.h"
#include "langhooks.h"
#include "target.h"
#include "toplev.h"
}
-/* The regular is_gimple_min_invariant does a shallow test of the object.
- It assumes that full gimplification has happened, or will happen on the
- object. For a value coming from DECL_INITIAL, this is not true, so we
- have to be more strict ourselves. */
-
-static bool
-ccp_decl_initial_min_invariant (tree t)
-{
- if (!is_gimple_min_invariant (t))
- return false;
- if (TREE_CODE (t) == ADDR_EXPR)
- {
- /* Inline and unroll is_gimple_addressable. */
- while (1)
- {
- t = TREE_OPERAND (t, 0);
- if (is_gimple_id (t))
- return true;
- if (!handled_component_p (t))
- return false;
- }
- }
- return true;
-}
-
/* If SYM is a constant variable with known value, return the value.
NULL_TREE is returned otherwise. */
-static tree
+tree
get_symbol_constant_value (tree sym)
{
if (TREE_STATIC (sym)
&& !MTAG_P (sym))
{
tree val = DECL_INITIAL (sym);
- if (val
- && ccp_decl_initial_min_invariant (val))
- return val;
+ if (val)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (val);
+ if (is_gimple_min_invariant (val))
+ return val;
+ }
+ /* Variables declared 'const' without an initializer
+ have zero as the initializer if they may not be
+ overridden at link or run time. */
+ if (!val
+ && targetm.binds_local_p (sym)
+ && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
+ || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
+ return fold_convert (TREE_TYPE (sym), integer_zero_node);
}
return NULL_TREE;
static inline prop_value_t *
get_value (tree var)
{
- prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
+ prop_value_t *val;
+ if (const_val == NULL)
+ return NULL;
+
+ val = &const_val[SSA_NAME_VERSION (var)];
if (val->lattice_val == UNINITIALIZED)
*val = get_default_value (var);
If STMT has no operands, then return CONSTANT.
- Else if any operands of STMT are undefined, then return UNDEFINED.
+ Else if undefinedness of operands of STMT cause its value to be
+ undefined, then return UNDEFINED.
Else if any operands of STMT are constants, then return CONSTANT.
static ccp_lattice_t
likely_value (tree stmt)
{
- bool has_constant_operand;
+ bool has_constant_operand, has_undefined_operand, all_undefined_operands;
stmt_ann_t ann;
tree use;
ssa_op_iter iter;
return CONSTANT;
has_constant_operand = false;
+ has_undefined_operand = false;
+ all_undefined_operands = true;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
{
prop_value_t *val = get_value (use);
if (val->lattice_val == UNDEFINED)
- return UNDEFINED;
+ has_undefined_operand = true;
+ else
+ all_undefined_operands = false;
if (val->lattice_val == CONSTANT)
has_constant_operand = true;
}
+ /* If the operation combines operands like COMPLEX_EXPR make sure to
+ not mark the result UNDEFINED if only one part of the result is
+ undefined. */
+ if (has_undefined_operand
+ && all_undefined_operands)
+ return UNDEFINED;
+ else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && has_undefined_operand)
+ {
+ switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)))
+ {
+ /* Unary operators are handled with all_undefined_operands. */
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case POINTER_PLUS_EXPR:
+ /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
+ Not bitwise operators, one VARYING operand may specify the
+ result completely. Not logical operators for the same reason.
+ Not COMPLEX_EXPR as one VARYING operand makes the result partly
+ not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
+ the undefined operand may be promoted. */
+ return UNDEFINED;
+
+ default:
+ ;
+ }
+ }
+ /* If there was an UNDEFINED operand but the result may be not UNDEFINED
+ fall back to VARYING even if there were CONSTANT operands. */
+ if (has_undefined_operand)
+ return VARYING;
+
if (has_constant_operand
/* We do not consider virtual operands here -- load from read-only
memory may have only VARYING virtual operands, but still be
bool something_changed = substitute_and_fold (const_val, false);
free (const_val);
+ const_val = NULL;
return something_changed;;
}
op0 = get_value (op0)->value;
}
+ /* Conversions are useless for CCP purposes if they are
+ value-preserving. Thus the restrictions that
+ useless_type_conversion_p places for pointer type conversions do
+ not apply here. Substitution later will only substitute to
+ allowed places. */
if ((code == NOP_EXPR || code == CONVERT_EXPR)
- && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0)))
+ && ((POINTER_TYPE_P (TREE_TYPE (rhs))
+ && POINTER_TYPE_P (TREE_TYPE (op0)))
+ || useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0))))
return op0;
return fold_unary (code, TREE_TYPE (rhs), op0);
}
return fold_binary (code, TREE_TYPE (rhs), op0, op1);
}
+ else if (kind == tcc_declaration)
+ return get_symbol_constant_value (rhs);
+
+ else if (kind == tcc_reference)
+ return fold_const_aggregate_ref (rhs);
+
+ /* Handle propagating invariant addresses into address operations.
+ The folding we do here matches that in tree-ssa-forwprop.c. */
+ else if (code == ADDR_EXPR)
+ {
+ tree *base;
+ base = &TREE_OPERAND (rhs, 0);
+ while (handled_component_p (*base))
+ base = &TREE_OPERAND (*base, 0);
+ if (TREE_CODE (*base) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
+ {
+ prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
+ if (val->lattice_val == CONSTANT
+ && TREE_CODE (val->value) == ADDR_EXPR
+ && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (*base, 0)),
+ TREE_TYPE (val->value))
+ && useless_type_conversion_p (TREE_TYPE (*base),
+ TREE_TYPE (TREE_OPERAND (val->value, 0))))
+ {
+ /* We need to return a new tree, not modify the IL or share
+ parts of it. So play some tricks to avoid manually
+ building it. */
+ tree ret, save = *base;
+ *base = TREE_OPERAND (val->value, 0);
+ ret = unshare_expr (rhs);
+ recompute_tree_invariant_for_addr_expr (ret);
+ *base = save;
+ return ret;
+ }
+ }
+ }
+
/* We may be able to fold away calls to builtin functions if their
arguments are constants. */
else if (code == CALL_EXPR
ARRAY_REF or COMPONENT_REF into constant aggregates. Return
NULL_TREE otherwise. */
-static tree
+tree
fold_const_aggregate_ref (tree t)
{
prop_value_t *value;
ctor = fold_const_aggregate_ref (base);
break;
+ case STRING_CST:
+ case CONSTRUCTOR:
+ ctor = base;
+ break;
+
default:
return NULL_TREE;
}
== MODE_INT)
&& GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
&& compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
- return fold_convert (TREE_TYPE (t),
- build_int_cst (NULL,
- (TREE_STRING_POINTER (ctor)
- [TREE_INT_CST_LOW (idx)])));
+ return build_int_cst_type (TREE_TYPE (t),
+ (TREE_STRING_POINTER (ctor)
+ [TREE_INT_CST_LOW (idx)]));
return NULL_TREE;
}
/* Whoo-hoo! I'll fold ya baby. Yeah! */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
if (tree_int_cst_equal (cfield, idx))
- return cval;
+ {
+ STRIP_USELESS_TYPE_CONVERSION (cval);
+ return cval;
+ }
break;
case COMPONENT_REF:
if (cfield == field
/* FIXME: Handle bit-fields. */
&& ! DECL_BIT_FIELD (cfield))
- return cval;
+ {
+ STRIP_USELESS_TYPE_CONVERSION (cval);
+ return cval;
+ }
break;
case REALPART_EXPR:
return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
break;
}
-
+
+ case INDIRECT_REF:
+ {
+ tree base = TREE_OPERAND (t, 0);
+ if (TREE_CODE (base) == SSA_NAME
+ && (value = get_value (base))
+ && value->lattice_val == CONSTANT
+ && TREE_CODE (value->value) == ADDR_EXPR)
+ return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
+ break;
+ }
+
default:
break;
}
simplified = ccp_fold (stmt);
/* If the statement is likely to have a VARYING result, then do not
bother folding the statement. */
- if (likelyvalue == VARYING)
+ else if (likelyvalue == VARYING)
simplified = get_rhs (stmt);
- /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
- aggregates, extract the referenced constant. Otherwise the
- statement is likely to have an UNDEFINED value, and there will be
- nothing to do. Note that fold_const_aggregate_ref returns
- NULL_TREE if the first case does not match. */
- else if (!simplified)
- simplified = fold_const_aggregate_ref (get_rhs (stmt));
is_constant = simplified && is_gimple_min_invariant (simplified);
fold_undefer_overflow_warnings (is_constant, stmt, 0);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "which is likely ");
+ switch (likelyvalue)
+ {
+ case CONSTANT:
+ fprintf (dump_file, "CONSTANT");
+ break;
+ case UNDEFINED:
+ fprintf (dump_file, "UNDEFINED");
+ break;
+ case VARYING:
+ fprintf (dump_file, "VARYING");
+ break;
+ default:;
+ }
+ fprintf (dump_file, "\n");
+ }
+
if (is_constant)
{
/* The statement produced a constant value. */
}
else
/* Evaluate the statement. */
- val = evaluate_stmt (stmt);
-
- /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
- value to be a VIEW_CONVERT_EXPR of the old constant value.
-
- ??? Also, if this was a definition of a bitfield, we need to widen
- the constant value into the type of the destination variable. This
- should not be necessary if GCC represented bitfields properly. */
- {
- tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-
- if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
- && val.lattice_val == CONSTANT)
- {
- tree w = fold_unary (VIEW_CONVERT_EXPR,
- TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
- val.value);
-
- orig_lhs = TREE_OPERAND (orig_lhs, 0);
- if (w && is_gimple_min_invariant (w))
- val.value = w;
- else
- {
- val.lattice_val = VARYING;
- val.value = NULL;
- }
- }
-
- if (val.lattice_val == CONSTANT
- && TREE_CODE (orig_lhs) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
- {
- tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
- orig_lhs);
-
- if (w && is_gimple_min_invariant (w))
- val.value = w;
- else
- {
- val.lattice_val = VARYING;
- val.value = NULL_TREE;
- val.mem_ref = NULL_TREE;
- }
- }
- }
+ val = evaluate_stmt (stmt);
retval = SSA_PROP_NOT_INTERESTING;
{
fprintf (dump_file, "\nVisiting statement:\n");
print_generic_stmt (dump_file, stmt, dump_flags);
- fprintf (dump_file, "\n");
}
if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
}
-struct tree_opt_pass pass_ccp =
+struct gimple_opt_pass pass_ccp =
{
+ {
+ GIMPLE_PASS,
"ccp", /* name */
gate_ccp, /* gate */
do_ssa_ccp, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_verify_ssa
- | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
+ }
};
}
-struct tree_opt_pass pass_store_ccp =
+struct gimple_opt_pass pass_store_ccp =
{
+ {
+ GIMPLE_PASS,
"store_ccp", /* name */
gate_store_ccp, /* gate */
do_ssa_store_ccp, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_verify_ssa
- | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
+ }
};
-/* Given a constant value VAL for bitfield FIELD, and a destination
- variable VAR, return VAL appropriately widened to fit into VAR. If
- FIELD is wider than HOST_WIDE_INT, NULL is returned. */
-
-tree
-widen_bitfield (tree val, tree field, tree var)
-{
- unsigned HOST_WIDE_INT var_size, field_size;
- tree wide_val;
- unsigned HOST_WIDE_INT mask;
- unsigned int i;
-
- /* We can only do this if the size of the type and field and VAL are
- all constants representable in HOST_WIDE_INT. */
- if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
- || !host_integerp (DECL_SIZE (field), 1)
- || !host_integerp (val, 0))
- return NULL_TREE;
-
- var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
- field_size = tree_low_cst (DECL_SIZE (field), 1);
-
- /* Give up if either the bitfield or the variable are too wide. */
- if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
- return NULL_TREE;
-
- gcc_assert (var_size >= field_size);
-
- /* If the sign bit of the value is not set or the field's type is unsigned,
- just mask off the high order bits of the value. */
- if (DECL_UNSIGNED (field)
- || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
- {
- /* Zero extension. Build a mask with the lower 'field_size' bits
- set and a BIT_AND_EXPR node to clear the high order bits of
- the value. */
- for (i = 0, mask = 0; i < field_size; i++)
- mask |= ((HOST_WIDE_INT) 1) << i;
-
- wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
- build_int_cst (TREE_TYPE (var), mask));
- }
- else
- {
- /* Sign extension. Create a mask with the upper 'field_size'
- bits set and a BIT_IOR_EXPR to set the high order bits of the
- value. */
- for (i = 0, mask = 0; i < (var_size - field_size); i++)
- mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
-
- wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
- build_int_cst (TREE_TYPE (var), mask));
- }
-
- return wide_val;
-}
-
-
/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
is the desired result type. */
static tree
-maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
+maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
+ bool allow_negative_idx)
{
tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
tree array_type, elt_type, elt_size;
+ tree domain_type;
/* If BASE is an ARRAY_REF, we can pick up another offset (this time
measured in units of the size of elements type) from that ARRAY_REF).
low bound, if any, convert the index into that type, and add the
low bound. */
min_idx = build_int_cst (idx_type, 0);
- if (TYPE_DOMAIN (array_type))
+ domain_type = TYPE_DOMAIN (array_type);
+ if (domain_type)
{
- idx_type = TYPE_DOMAIN (array_type);
+ idx_type = domain_type;
if (TYPE_MIN_VALUE (idx_type))
min_idx = TYPE_MIN_VALUE (idx_type);
else
/* Make sure to possibly truncate late after offsetting. */
idx = fold_convert (idx_type, idx);
- return build4 (ARRAY_REF, orig_type, base, idx, NULL_TREE, NULL_TREE);
+ /* We don't want to construct access past array bounds. For example
+ char *(c[4]);
+ c[3][2];
+ should not be simplified into (*c)[14] or tree-vrp will
+ give false warnings. The same is true for
+ struct A { long x; char d[0]; } *a;
+ (char *)a - 4;
+ which should be not folded to &a->d[-8]. */
+ if (domain_type
+ && TYPE_MAX_VALUE (domain_type)
+ && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+ {
+ tree up_bound = TYPE_MAX_VALUE (domain_type);
+
+ if (tree_int_cst_lt (up_bound, idx)
+ /* Accesses after the end of arrays of size 0 (gcc
+ extension) and 1 are likely intentional ("struct
+ hack"). */
+ && compare_tree_int (up_bound, 1) > 0)
+ return NULL_TREE;
+ }
+ if (domain_type
+ && TYPE_MIN_VALUE (domain_type))
+ {
+ if (!allow_negative_idx
+ && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
+ && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+ return NULL_TREE;
+ }
+ else if (!allow_negative_idx
+ && compare_tree_int (idx, 0) < 0)
+ return NULL_TREE;
+
+ return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
}
new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
/* Recurse to possibly find the match. */
- ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type);
+ ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
+ f == TYPE_FIELDS (record_type));
if (ret)
return ret;
ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
base = build1 (INDIRECT_REF, record_type, base);
base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
- t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
+ t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
+ f == TYPE_FIELDS (record_type));
if (t)
return t;
return maybe_fold_offset_to_component_ref (field_type, base, offset,
{
if (base_is_ptr)
base = build1 (INDIRECT_REF, type, base);
- ret = maybe_fold_offset_to_array_ref (base, offset, orig_type);
+ ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
}
return ret;
}
/* Fold away CONST_DECL to its value, if the type is scalar. */
if (TREE_CODE (base) == CONST_DECL
- && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
+ && is_gimple_min_invariant (DECL_INITIAL (base)))
return DECL_INITIAL (base);
/* Try folding *(&B+O) to B.X. */
}
ptd_type = TREE_TYPE (ptr_type);
+ /* If we want a pointer to void, reconstruct the reference from the
+ array element type. A pointer to that can be trivially converted
+ to void *. This happens as we fold (void *)(ptr p+ off). */
+ if (VOID_TYPE_P (ptd_type)
+ && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
+ ptd_type = TREE_TYPE (TREE_TYPE (op0));
/* At which point we can try some of the same things as for indirects. */
- t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
+ t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
if (!t)
t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
ptd_type, false);
bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
bool *changed_p = fold_stmt_r_data->changed_p;
tree expr = *expr_p, t;
+ bool volatile_p = TREE_THIS_VOLATILE (expr);
/* ??? It'd be nice if walk_tree had a pre-order option. */
switch (TREE_CODE (expr))
t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
integer_zero_node);
+ if (!t
+ && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+ /* If we had a good reason for propagating the address here,
+ make sure we end up with valid gimple. See PR34989. */
+ t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
break;
case NOP_EXPR:
(TREE_OPERAND (expr, 0),
integer_zero_node,
TREE_TYPE (TREE_TYPE (expr)))))
- t = build_fold_addr_expr_with_type (t, TREE_TYPE (expr));
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (t));
+ if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
+ return NULL_TREE;
+ t = build_fold_addr_expr_with_type (t, ptr_type);
+ }
break;
/* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
return t;
*walk_subtrees = 0;
- /* Set TREE_INVARIANT properly so that the value is properly
- considered constant, and so gets propagated as expected. */
+ /* Make sure the value is properly considered constant, and so gets
+ propagated as expected. */
if (*changed_p)
recompute_tree_invariant_for_addr_expr (expr);
return NULL_TREE;
if (t)
{
+ /* Preserve volatileness of the original expression. */
+ TREE_THIS_VOLATILE (t) = volatile_p;
*expr_p = t;
*changed_p = true;
}
if (TREE_CODE (arg) == COND_EXPR)
return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
&& get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
+ /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
+ else if (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
+ && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
+ {
+ tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ if (TREE_CODE (aop0) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
+ return get_maxval_strlen (TREE_OPERAND (aop0, 0),
+ length, visited, type);
+ }
if (type == 2)
{
return changed;
}
\f
+/* Try to optimize out __builtin_stack_restore. Optimize it out
+ if there is another __builtin_stack_restore in the same basic
+ block and no calls or ASM_EXPRs are in between, or if this block's
+ only outgoing edge is to EXIT_BLOCK and there are no calls or
+ ASM_EXPRs after this __builtin_stack_restore. */
+
+static tree
+optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
+{
+ tree stack_save, stmt, callee;
+
+ if (TREE_CODE (call) != CALL_EXPR
+ || call_expr_nargs (call) != 1
+ || TREE_CODE (CALL_EXPR_ARG (call, 0)) != SSA_NAME
+ || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
+ return NULL_TREE;
+
+ for (bsi_next (&i); !bsi_end_p (i); bsi_next (&i))
+ {
+ tree call;
+
+ stmt = bsi_stmt (i);
+ if (TREE_CODE (stmt) == ASM_EXPR)
+ return NULL_TREE;
+ call = get_call_expr_in (stmt);
+ if (call == NULL)
+ continue;
+
+ callee = get_callee_fndecl (call);
+ if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
+ return NULL_TREE;
+
+ if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
+ break;
+ }
+
+ if (bsi_end_p (i)
+ && (! single_succ_p (bb)
+ || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
+ return NULL_TREE;
+
+ stack_save = SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call, 0));
+ if (TREE_CODE (stack_save) != GIMPLE_MODIFY_STMT
+ || GIMPLE_STMT_OPERAND (stack_save, 0) != CALL_EXPR_ARG (call, 0)
+ || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save, 1)) != CALL_EXPR
+ || tree_could_throw_p (stack_save)
+ || !has_single_use (CALL_EXPR_ARG (call, 0)))
+ return NULL_TREE;
+
+ callee = get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save, 1));
+ if (!callee
+ || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
+ || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
+ || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save, 1)) != 0)
+ return NULL_TREE;
+
+ stmt = stack_save;
+ push_stmt_changes (&stmt);
+ if (!set_rhs (&stmt,
+ build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call, 0)), 0)))
+ {
+ discard_stmt_changes (&stmt);
+ return NULL_TREE;
+ }
+ gcc_assert (stmt == stack_save);
+ pop_stmt_changes (&stmt);
+
+ return integer_zero_node;
+}
+\f
+/* If va_list type is a simple pointer and nothing special is needed,
+ optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
+ __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
+ pointer assignment. */
+
+static tree
+optimize_stdarg_builtin (tree call)
+{
+ tree callee, lhs, rhs;
+ bool va_list_simple_ptr;
+
+ if (TREE_CODE (call) != CALL_EXPR)
+ return NULL_TREE;
+
+ va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
+ && (TREE_TYPE (va_list_type_node) == void_type_node
+ || TREE_TYPE (va_list_type_node) == char_type_node);
+
+ callee = get_callee_fndecl (call);
+ switch (DECL_FUNCTION_CODE (callee))
+ {
+ case BUILT_IN_VA_START:
+ if (!va_list_simple_ptr
+ || targetm.expand_builtin_va_start != NULL
+ || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
+ 1, integer_zero_node);
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_COPY:
+ if (!va_list_simple_ptr)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = CALL_EXPR_ARG (call, 1);
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_END:
+ return integer_zero_node;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+\f
/* Convert EXPR into a GIMPLE value suitable for substitution on the
RHS of an assignment. Insert the necessary statements before
iterator *SI_P.
{
bool cfg_changed = false;
basic_block bb;
+ unsigned int todoflags = 0;
+
FOR_EACH_BB (bb)
{
block_stmt_iterator i;
fcode = DECL_FUNCTION_CODE (callee);
result = ccp_fold_builtin (*stmtp, call);
+
+ if (result)
+ gimple_remove_stmt_histograms (cfun, *stmtp);
+
if (!result)
switch (DECL_FUNCTION_CODE (callee))
{
result = integer_zero_node;
break;
+ case BUILT_IN_STACK_RESTORE:
+ result = optimize_stack_restore (bb, *stmtp, i);
+ if (result)
+ break;
+ bsi_next (&i);
+ continue;
+
+ case BUILT_IN_VA_START:
+ case BUILT_IN_VA_END:
+ case BUILT_IN_VA_COPY:
+ /* These shouldn't be folded before pass_stdarg. */
+ result = optimize_stdarg_builtin (*stmtp);
+ if (result)
+ break;
+ /* FALLTHRU */
+
default:
bsi_next (&i);
continue;
{
bool ok = set_rhs (stmtp, result);
gcc_assert (ok);
+ todoflags |= TODO_rebuild_alias;
}
}
bsi_next (&i);
}
}
-
+
/* Delete unreachable blocks. */
- return cfg_changed ? TODO_cleanup_cfg : 0;
+ if (cfg_changed)
+ todoflags |= TODO_cleanup_cfg;
+
+ return todoflags;
}
-struct tree_opt_pass pass_fold_builtins =
+struct gimple_opt_pass pass_fold_builtins =
{
+ {
+ GIMPLE_PASS,
"fab", /* name */
NULL, /* gate */
execute_fold_all_builtins, /* execute */
0, /* todo_flags_start */
TODO_dump_func
| TODO_verify_ssa
- | TODO_update_ssa, /* todo_flags_finish */
- 0 /* letter */
+ | TODO_update_ssa /* todo_flags_finish */
+ }
};