GCC is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
-Free Software Foundation; either version 2, or (at your option) any
+Free Software Foundation; either version 3, or (at your option) any
later version.
GCC is distributed in the hope that it will be useful, but WITHOUT
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* Conditional constant propagation (CCP) is based on the SSA
propagation engine (tree-ssa-propagate.c). Constant assignments of
If STMT has no operands, then return CONSTANT.
- Else if any operands of STMT are undefined, then return UNDEFINED.
+ Else if undefinedness of operands of STMT cause its value to be
+ undefined, then return UNDEFINED.
Else if any operands of STMT are constants, then return CONSTANT.
static ccp_lattice_t
likely_value (tree stmt)
{
- bool has_constant_operand;
+ bool has_constant_operand, has_undefined_operand, all_undefined_operands;
stmt_ann_t ann;
tree use;
ssa_op_iter iter;
return CONSTANT;
has_constant_operand = false;
+ has_undefined_operand = false;
+ all_undefined_operands = true;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
{
prop_value_t *val = get_value (use);
if (val->lattice_val == UNDEFINED)
- return UNDEFINED;
+ has_undefined_operand = true;
+ else
+ all_undefined_operands = false;
if (val->lattice_val == CONSTANT)
has_constant_operand = true;
}
+ /* If the operation combines operands like COMPLEX_EXPR make sure to
+ not mark the result UNDEFINED if only one part of the result is
+ undefined. */
+ if (has_undefined_operand
+ && all_undefined_operands)
+ return UNDEFINED;
+ else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && has_undefined_operand)
+ {
+ switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)))
+ {
+ /* Unary operators are handled with all_undefined_operands. */
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case POINTER_PLUS_EXPR:
+ /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
+ Not bitwise operators, one VARYING operand may specify the
+ result completely. Not logical operators for the same reason.
+ Not COMPLEX_EXPR as one VARYING operand makes the result partly
+ not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
+ the undefined operand may be promoted. */
+ return UNDEFINED;
+
+ default:
+ ;
+ }
+ }
+ /* If there was an UNDEFINED operand but the result may be not UNDEFINED
+ fall back to VARYING even if there were CONSTANT operands. */
+ if (has_undefined_operand)
+ return VARYING;
+
if (has_constant_operand
/* We do not consider virtual operands here -- load from read-only
memory may have only VARYING virtual operands, but still be
}
if ((code == NOP_EXPR || code == CONVERT_EXPR)
- && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs),
- TREE_TYPE (op0)))
+ && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0)))
return op0;
return fold_unary (code, TREE_TYPE (rhs), op0);
}
== MODE_INT)
&& GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
&& compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
- return build_int_cst (TREE_TYPE (t), (TREE_STRING_POINTER (ctor)
- [TREE_INT_CST_LOW (idx)]));
+ return build_int_cst_type (TREE_TYPE (t),
+ (TREE_STRING_POINTER (ctor)
+ [TREE_INT_CST_LOW (idx)]));
return NULL_TREE;
}
{
tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
tree array_type, elt_type, elt_size;
+ tree domain_type;
/* If BASE is an ARRAY_REF, we can pick up another offset (this time
measured in units of the size of elements type) from that ARRAY_REF).
if (TREE_CODE (array_type) != ARRAY_TYPE)
return NULL_TREE;
elt_type = TREE_TYPE (array_type);
- if (!lang_hooks.types_compatible_p (orig_type, elt_type))
+ if (!useless_type_conversion_p (orig_type, elt_type))
return NULL_TREE;
/* Use signed size type for intermediate computation on the index. */
low bound, if any, convert the index into that type, and add the
low bound. */
min_idx = build_int_cst (idx_type, 0);
- if (TYPE_DOMAIN (array_type))
+ domain_type = TYPE_DOMAIN (array_type);
+ if (domain_type)
{
- idx_type = TYPE_DOMAIN (array_type);
+ idx_type = domain_type;
if (TYPE_MIN_VALUE (idx_type))
min_idx = TYPE_MIN_VALUE (idx_type);
else
/* Make sure to possibly truncate late after offsetting. */
idx = fold_convert (idx_type, idx);
- return build4 (ARRAY_REF, orig_type, base, idx, NULL_TREE, NULL_TREE);
+ /* We don't want to construct access past array bounds. For example
+ char *(c[4]);
+
+ c[3][2]; should not be simplified into (*c)[14] or tree-vrp will give false
+ warning. */
+ if (domain_type && TYPE_MAX_VALUE (domain_type)
+ && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+ {
+ tree up_bound = TYPE_MAX_VALUE (domain_type);
+
+ if (tree_int_cst_lt (up_bound, idx)
+ /* Accesses after the end of arrays of size 0 (gcc
+ extension) and 1 are likely intentional ("struct
+ hack"). */
+ && compare_tree_int (up_bound, 1) > 0)
+ return NULL_TREE;
+ }
+
+ return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
}
return NULL_TREE;
/* Short-circuit silly cases. */
- if (lang_hooks.types_compatible_p (record_type, orig_type))
+ if (useless_type_conversion_p (record_type, orig_type))
return NULL_TREE;
tail_array_field = NULL_TREE;
/* Here we exactly match the offset being checked. If the types match,
then we can return that field. */
if (cmp == 0
- && lang_hooks.types_compatible_p (orig_type, field_type))
+ && useless_type_conversion_p (orig_type, field_type))
{
if (base_is_ptr)
base = build1 (INDIRECT_REF, record_type, base);
sub_offset / BITS_PER_UNIT), 1);
}
}
- if (lang_hooks.types_compatible_p (orig_type, TREE_TYPE (base))
+ if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
&& integer_zerop (offset))
return base;
type = TREE_TYPE (base);
maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
{
tree t;
+ bool volatile_p = TREE_THIS_VOLATILE (expr);
/* We may well have constructed a double-nested PLUS_EXPR via multiple
substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
if (t)
return t;
- /* Add in any offset from a PLUS_EXPR. */
- if (TREE_CODE (base) == PLUS_EXPR)
+ /* Add in any offset from a POINTER_PLUS_EXPR. */
+ if (TREE_CODE (base) == POINTER_PLUS_EXPR)
{
tree offset2;
return NULL_TREE;
base = TREE_OPERAND (base, 0);
- offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
+ offset = fold_convert (sizetype,
+ int_const_binop (PLUS_EXPR, offset, offset2, 1));
}
if (TREE_CODE (base) == ADDR_EXPR)
t = maybe_fold_offset_to_reference (base_addr, offset,
TREE_TYPE (expr));
if (t)
- return t;
+ {
+ TREE_THIS_VOLATILE (t) = volatile_p;
+ return t;
+ }
}
else
{
}
-/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
+/* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
A quaint feature extant in our address arithmetic is that there
can be hidden type changes here. The type of the result need
What we're after here is an expression of the form
(T *)(&array + const)
where the cast doesn't actually exist, but is implicit in the
- type of the PLUS_EXPR. We'd like to turn this into
+ type of the POINTER_PLUS_EXPR. We'd like to turn this into
&array[x]
which may be able to propagate further. */
tree ptr_type = TREE_TYPE (expr);
tree ptd_type;
tree t;
- bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
- /* We're only interested in pointer arithmetic. */
- if (!POINTER_TYPE_P (ptr_type))
- return NULL_TREE;
- /* Canonicalize the integral operand to op1. */
- if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
- {
- if (subtract)
- return NULL_TREE;
- t = op0, op0 = op1, op1 = t;
- }
+ gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR);
+
/* It had better be a constant. */
if (TREE_CODE (op1) != INTEGER_CST)
return NULL_TREE;
array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
/* Update the operands for the next round, or for folding. */
- /* If we're manipulating unsigned types, then folding into negative
- values can produce incorrect results. Particularly if the type
- is smaller than the width of the pointer. */
- if (subtract
- && TYPE_UNSIGNED (TREE_TYPE (op1))
- && tree_int_cst_lt (array_idx, op1))
- return NULL;
- op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
+ op1 = int_const_binop (PLUS_EXPR,
array_idx, op1, 0);
- subtract = false;
op0 = array_obj;
}
- /* If we weren't able to fold the subtraction into another array reference,
- canonicalize the integer for passing to the array and component ref
- simplification functions. */
- if (subtract)
- {
- if (TYPE_UNSIGNED (TREE_TYPE (op1)))
- return NULL;
- op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1);
- /* ??? In theory fold should always produce another integer. */
- if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST)
- return NULL;
- }
-
ptd_type = TREE_TYPE (ptr_type);
/* At which point we can try some of the same things as for indirects. */
bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
bool *changed_p = fold_stmt_r_data->changed_p;
tree expr = *expr_p, t;
+ bool volatile_p = TREE_THIS_VOLATILE (expr);
/* ??? It'd be nice if walk_tree had a pre-order option. */
switch (TREE_CODE (expr))
(TREE_OPERAND (expr, 0),
integer_zero_node,
TREE_TYPE (TREE_TYPE (expr)))))
- t = build_fold_addr_expr_with_type (t, TREE_TYPE (expr));
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (t));
+ if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
+ return NULL_TREE;
+ t = build_fold_addr_expr_with_type (t, ptr_type);
+ }
break;
/* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
recompute_tree_invariant_for_addr_expr (expr);
return NULL_TREE;
- case PLUS_EXPR:
- case MINUS_EXPR:
+ case POINTER_PLUS_EXPR:
t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
if (t)
return t;
if (t)
{
+ /* Preserve volatileness of the original expression. */
+ TREE_THIS_VOLATILE (t) = volatile_p;
*expr_p = t;
*changed_p = true;
}
case BUILT_IN_STRLEN:
if (val[0])
{
- tree new = fold_convert (TREE_TYPE (fn), val[0]);
+ tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
/* If the result is not a valid gimple value, or not a cast
of a valid gimple value, then we can not use the result. */
- if (is_gimple_val (new)
- || (is_gimple_cast (new)
- && is_gimple_val (TREE_OPERAND (new, 0))))
- return new;
+ if (is_gimple_val (new_val)
+ || (is_gimple_cast (new_val)
+ && is_gimple_val (TREE_OPERAND (new_val, 0))))
+ return new_val;
}
break;
return changed;
}
\f
+/* Try to optimize out __builtin_stack_restore. Optimize it out
+ if there is another __builtin_stack_restore in the same basic
+ block and no calls or ASM_EXPRs are in between, or if this block's
+ only outgoing edge is to EXIT_BLOCK and there are no calls or
+ ASM_EXPRs after this __builtin_stack_restore. */
+
+static tree
+optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
+{
+ tree stack_save, stmt, callee;
+
+ if (TREE_CODE (call) != CALL_EXPR
+ || call_expr_nargs (call) != 1
+ || TREE_CODE (CALL_EXPR_ARG (call, 0)) != SSA_NAME
+ || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
+ return NULL_TREE;
+
+ for (bsi_next (&i); !bsi_end_p (i); bsi_next (&i))
+ {
+ tree call;
+
+ stmt = bsi_stmt (i);
+ if (TREE_CODE (stmt) == ASM_EXPR)
+ return NULL_TREE;
+ call = get_call_expr_in (stmt);
+ if (call == NULL)
+ continue;
+
+ callee = get_callee_fndecl (call);
+ if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
+ return NULL_TREE;
+
+ if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
+ break;
+ }
+
+ if (bsi_end_p (i)
+ && (! single_succ_p (bb)
+ || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
+ return NULL_TREE;
+
+ stack_save = SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call, 0));
+ if (TREE_CODE (stack_save) != GIMPLE_MODIFY_STMT
+ || GIMPLE_STMT_OPERAND (stack_save, 0) != CALL_EXPR_ARG (call, 0)
+ || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save, 1)) != CALL_EXPR
+ || tree_could_throw_p (stack_save)
+ || !has_single_use (CALL_EXPR_ARG (call, 0)))
+ return NULL_TREE;
+
+ callee = get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save, 1));
+ if (!callee
+ || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
+ || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
+ || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save, 1)) != 0)
+ return NULL_TREE;
+
+ stmt = stack_save;
+ push_stmt_changes (&stmt);
+ if (!set_rhs (&stmt,
+ build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call, 0)), 0)))
+ {
+ discard_stmt_changes (&stmt);
+ return NULL_TREE;
+ }
+ gcc_assert (stmt == stack_save);
+ pop_stmt_changes (&stmt);
+
+ return integer_zero_node;
+}
+\f
+/* If va_list type is a simple pointer and nothing special is needed,
+ optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
+ __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
+ pointer assignment. */
+
+static tree
+optimize_stdarg_builtin (tree call)
+{
+ tree callee, lhs, rhs;
+ bool va_list_simple_ptr;
+
+ if (TREE_CODE (call) != CALL_EXPR)
+ return NULL_TREE;
+
+ va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
+ && (TREE_TYPE (va_list_type_node) == void_type_node
+ || TREE_TYPE (va_list_type_node) == char_type_node);
+
+ callee = get_callee_fndecl (call);
+ switch (DECL_FUNCTION_CODE (callee))
+ {
+ case BUILT_IN_VA_START:
+ if (!va_list_simple_ptr
+ || targetm.expand_builtin_va_start != NULL
+ || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
+ 1, integer_zero_node);
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_COPY:
+ if (!va_list_simple_ptr)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = CALL_EXPR_ARG (call, 1);
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_END:
+ return integer_zero_node;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+\f
/* Convert EXPR into a GIMPLE value suitable for substitution on the
RHS of an assignment. Insert the necessary statements before
iterator *SI_P.
{
bool cfg_changed = false;
basic_block bb;
+ unsigned int todoflags = 0;
+
FOR_EACH_BB (bb)
{
block_stmt_iterator i;
result = integer_zero_node;
break;
+ case BUILT_IN_STACK_RESTORE:
+ result = optimize_stack_restore (bb, *stmtp, i);
+ if (result)
+ break;
+ bsi_next (&i);
+ continue;
+
+ case BUILT_IN_VA_START:
+ case BUILT_IN_VA_END:
+ case BUILT_IN_VA_COPY:
+ /* These shouldn't be folded before pass_stdarg. */
+ result = optimize_stdarg_builtin (*stmtp);
+ if (result)
+ break;
+ /* FALLTHRU */
+
default:
bsi_next (&i);
continue;
{
bool ok = set_rhs (stmtp, result);
gcc_assert (ok);
+ todoflags |= TODO_rebuild_alias;
}
}
bsi_next (&i);
}
}
-
+
/* Delete unreachable blocks. */
- return cfg_changed ? TODO_cleanup_cfg : 0;
+ if (cfg_changed)
+ todoflags |= TODO_cleanup_cfg;
+
+ return todoflags;
}