#include "expr.h"
#include "tm_p.h"
#include "toplev.h"
+#include "intl.h"
#include "ggc.h"
#include "hashtab.h"
#include "langhooks.h"
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
static tree range_predecessor (tree);
static tree range_successor (tree);
-static tree make_range (tree, int *, tree *, tree *);
+static tree make_range (tree, int *, tree *, tree *, bool *);
static tree build_range_check (tree, tree, int, tree, tree);
static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
tree);
static tree unextend (tree, int, int, tree);
static tree fold_truthop (enum tree_code, tree, tree, tree);
static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
-static tree extract_muldiv (tree, tree, enum tree_code, tree);
-static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
-static int multiple_of_p (tree, tree, tree);
+static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
+static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
tree, tree,
tree, tree, int);
return build_int_cst_wide (type, quol, quoh);
}
\f
+/* This is non-zero if we should defer warnings about undefined
+ overflow. This facility exists because these warnings are a
+ special case. The code to estimate loop iterations does not want
+ to issue any warnings, since it works with expressions which do not
+ occur in user code. Various bits of cleanup code call fold(), but
+ only use the result if it has certain characteristics (e.g., is a
+ constant); that code only wants to issue a warning if the result is
+ used. */
+
+static int fold_deferring_overflow_warnings;
+
+/* If a warning about undefined overflow is deferred, this is the
+ warning. Note that this may cause us to turn two warnings into
+ one, but that is fine since it is sufficient to only give one
+ warning per expression. */
+
+static const char* fold_deferred_overflow_warning;
+
+/* If a warning about undefined overflow is deferred, this is the
+ level at which the warning should be emitted. */
+
+static enum warn_strict_overflow_code fold_deferred_overflow_code;
+
+/* Start deferring overflow warnings. We could use a stack here to
+ permit nested calls, but at present it is not necessary. */
+
+void
+fold_defer_overflow_warnings (void)
+{
+ ++fold_deferring_overflow_warnings;
+}
+
+/* Stop deferring overflow warnings. If there is a pending warning,
+ and ISSUE is true, then issue the warning if appropriate. STMT is
+ the statement with which the warning should be associated (used for
+ location information); STMT may be NULL. CODE is the level of the
+ warning--a warn_strict_overflow_code value. This function will use
+ the smaller of CODE and the deferred code when deciding whether to
+ issue the warning. CODE may be zero to mean to always use the
+ deferred code. */
+
+void
+fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
+{
+ const char *warnmsg;
+ location_t locus;
+
+ gcc_assert (fold_deferring_overflow_warnings > 0);
+ --fold_deferring_overflow_warnings;
+ if (fold_deferring_overflow_warnings > 0)
+ {
+ if (fold_deferred_overflow_warning != NULL
+ && code != 0
+ && code < (int) fold_deferred_overflow_code)
+ fold_deferred_overflow_code = code;
+ return;
+ }
+
+ warnmsg = fold_deferred_overflow_warning;
+ fold_deferred_overflow_warning = NULL;
+
+ if (!issue || warnmsg == NULL)
+ return;
+
+ /* Use the smallest code level when deciding to issue the
+ warning. */
+ if (code == 0 || code > (int) fold_deferred_overflow_code)
+ code = fold_deferred_overflow_code;
+
+ if (!issue_strict_overflow_warning (code))
+ return;
+
+ if (stmt == NULL_TREE || !expr_has_location (stmt))
+ locus = input_location;
+ else
+ locus = expr_location (stmt);
+ warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
+}
+
+/* Stop deferring overflow warnings, ignoring any deferred
+ warnings. */
+
+void
+fold_undefer_and_ignore_overflow_warnings (void)
+{
+ fold_undefer_overflow_warnings (false, NULL_TREE, 0);
+}
+
+/* Whether we are deferring overflow warnings. */
+
+bool
+fold_deferring_overflow_warnings_p (void)
+{
+ return fold_deferring_overflow_warnings > 0;
+}
+
+/* This is called when we fold something based on the fact that signed
+ overflow is undefined. */
+
+static void
+fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
+{
+ gcc_assert (!flag_wrapv && !flag_trapv);
+ if (fold_deferring_overflow_warnings > 0)
+ {
+ if (fold_deferred_overflow_warning == NULL
+ || wc < fold_deferred_overflow_code)
+ {
+ fold_deferred_overflow_warning = gmsgid;
+ fold_deferred_overflow_code = wc;
+ }
+ }
+ else if (issue_strict_overflow_warning (wc))
+ warning (OPT_Wstrict_overflow, gmsgid);
+}
+\f
/* Return true if the built-in mathematical function specified by CODE
is odd, i.e. -f(x) == f(-x). */
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (TYPE_UNSIGNED (type)
- || (flag_wrapv && ! flag_trapv))
+ if (TYPE_OVERFLOW_WRAPS (type))
return true;
/* Check that -CST will not overflow type. */
return may_negate_without_overflow_p (t);
case BIT_NOT_EXPR:
- return INTEGRAL_TYPE_P (type)
- && (TYPE_UNSIGNED (type)
- || (flag_wrapv && !flag_trapv));
+ return (INTEGRAL_TYPE_P (type)
+ && TYPE_OVERFLOW_WRAPS (type));
case REAL_CST:
case NEGATE_EXPR:
return negate_expr_p (TREE_REALPART (t))
&& negate_expr_p (TREE_IMAGPART (t));
+ case COMPLEX_EXPR:
+ return negate_expr_p (TREE_OPERAND (t, 0))
+ && negate_expr_p (TREE_OPERAND (t, 1));
+
+ case CONJ_EXPR:
+ return negate_expr_p (TREE_OPERAND (t, 0));
+
case PLUS_EXPR:
if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
|| HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
+ /* In general we can't negate A / B, because if A is INT_MIN and
+ B is 1, we may turn this into INT_MIN / -1 which is undefined
+ and actually traps on some architectures. But if overflow is
+ undefined, we can negate, because - (INT_MIN / 1) is an
+ overflow. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (t))
+ && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
break;
return negate_expr_p (TREE_OPERAND (t, 1))
|| negate_expr_p (TREE_OPERAND (t, 0));
case CALL_EXPR:
/* Negate -f(x) as f(-x). */
if (negate_mathfn_p (builtin_mathfn_code (t)))
- return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
+ return negate_expr_p (CALL_EXPR_ARG (t, 0));
break;
case RSHIFT_EXPR:
case INTEGER_CST:
tem = fold_negate_const (t, type);
- if (!TREE_OVERFLOW (tem)
- || TYPE_UNSIGNED (type)
- || !flag_trapv)
+ if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
+ || !TYPE_OVERFLOW_TRAPS (type))
return tem;
break;
}
break;
+ case COMPLEX_EXPR:
+ if (negate_expr_p (t))
+ return fold_build2 (COMPLEX_EXPR, type,
+ fold_negate_expr (TREE_OPERAND (t, 0)),
+ fold_negate_expr (TREE_OPERAND (t, 1)));
+ break;
+
+ case CONJ_EXPR:
+ if (negate_expr_p (t))
+ return fold_build1 (CONJ_EXPR, type,
+ fold_negate_expr (TREE_OPERAND (t, 0)));
+ break;
+
case NEGATE_EXPR:
return TREE_OPERAND (t, 0);
case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (!TYPE_UNSIGNED (type) && !flag_wrapv)
+ /* In general we can't negate A / B, because if A is INT_MIN and
+ B is 1, we may turn this into INT_MIN / -1 which is undefined
+ and actually traps on some architectures. But if overflow is
+ undefined, we can negate, because - (INT_MIN / 1) is an
+ overflow. */
+ if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
{
+ const char * const warnmsg = G_("assuming signed overflow does not "
+ "occur when negating a division");
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_build2 (TREE_CODE (t), type,
- TREE_OPERAND (t, 0), negate_expr (tem));
+ {
+ if (INTEGRAL_TYPE_P (type)
+ && (TREE_CODE (tem) != INTEGER_CST
+ || integer_onep (tem)))
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
+ }
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_build2 (TREE_CODE (t), type,
- negate_expr (tem), TREE_OPERAND (t, 1));
+ {
+ if (INTEGRAL_TYPE_P (type)
+ && (TREE_CODE (tem) != INTEGER_CST
+ || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
+ }
}
break;
case CALL_EXPR:
/* Negate -f(x) as f(-x). */
if (negate_mathfn_p (builtin_mathfn_code (t))
- && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
+ && negate_expr_p (CALL_EXPR_ARG (t, 0)))
{
- tree fndecl, arg, arglist;
+ tree fndecl, arg;
fndecl = get_callee_fndecl (t);
- arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
- arglist = build_tree_list (NULL_TREE, arg);
- return build_function_call_expr (fndecl, arglist);
+ arg = negate_expr (CALL_EXPR_ARG (t, 0));
+ return build_call_expr (fndecl, 1, arg);
}
break;
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
+ default:
+ return 0;
+ }
+
+ case tcc_vl_exp:
+ switch (TREE_CODE (arg0))
+ {
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
- if (!OP_SAME (0))
+ if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
+ flags))
return 0;
{
return 0;
}
- /* Now see if all the arguments are the same. operand_equal_p
- does not handle TREE_LIST, so we walk the operands here
- feeding them to operand_equal_p. */
- arg0 = TREE_OPERAND (arg0, 1);
- arg1 = TREE_OPERAND (arg1, 1);
- while (arg0 && arg1)
- {
- if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
- flags))
+ /* Now see if all the arguments are the same. */
+ {
+ call_expr_arg_iterator iter0, iter1;
+ tree a0, a1;
+ for (a0 = first_call_expr_arg (arg0, &iter0),
+ a1 = first_call_expr_arg (arg1, &iter1);
+ a0 && a1;
+ a0 = next_call_expr_arg (&iter0),
+ a1 = next_call_expr_arg (&iter1))
+ if (! operand_equal_p (a0, a1, flags))
return 0;
- arg0 = TREE_CHAIN (arg0);
- arg1 = TREE_CHAIN (arg1);
- }
-
- /* If we get here and both argument lists are exhausted
- then the CALL_EXPRs are equal. */
- return ! (arg0 || arg1);
-
+ /* If we get here and both argument lists are exhausted
+ then the CALL_EXPRs are equal. */
+ return ! (a0 || a1);
+ }
default:
return 0;
}
/* Make sure shorter operand is extended the right way
to match the longer operand. */
- primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
+ primarg1 = fold_convert (get_signed_or_unsigned_type
(unsignedp1, TREE_TYPE (primarg1)), primarg1);
if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
\f
/* Given EXP, a logical expression, set the range it is testing into
variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
- actually being tested. *PLOW and *PHIGH will be made of the same type
- as the returned expression. If EXP is not a comparison, we will most
- likely not be returning a useful value and range. */
+ actually being tested. *PLOW and *PHIGH will be made of the same
+ type as the returned expression. If EXP is not a comparison, we
+ will most likely not be returning a useful value and range. Set
+ *STRICT_OVERFLOW_P to true if the return value is only valid
+ because signed overflow is undefined; otherwise, do not change
+ *STRICT_OVERFLOW_P. */
static tree
-make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
+make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
+ bool *strict_overflow_p)
{
enum tree_code code;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
- if (TREE_CODE_LENGTH (code) > 0)
+ if (TREE_OPERAND_LENGTH (exp) > 0)
arg0 = TREE_OPERAND (exp, 0);
if (TREE_CODE_CLASS (code) == tcc_comparison
|| TREE_CODE_CLASS (code) == tcc_unary
if (TREE_CODE_CLASS (code) == tcc_binary
|| TREE_CODE_CLASS (code) == tcc_comparison
|| (TREE_CODE_CLASS (code) == tcc_expression
- && TREE_CODE_LENGTH (code) > 1))
+ && TREE_OPERAND_LENGTH (exp) > 1))
arg1 = TREE_OPERAND (exp, 1);
}
/* If flag_wrapv and ARG0_TYPE is signed, then we cannot
move a constant to the other side. */
- if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
+ if (!TYPE_UNSIGNED (arg0_type)
+ && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
break;
/* If EXP is signed, any overflow in the computation is undefined,
|| (n_high != 0 && TREE_OVERFLOW (n_high)))
break;
+ if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
+ *strict_overflow_p = true;
+
/* Check for an unsigned range which has wrapped around the maximum
value thus making n_high < n_low, and normalize it. */
if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
/* If we don't have wrap-around arithmetics upfront, try to force it. */
if (TREE_CODE (etype) == INTEGER_TYPE
- && !TYPE_UNSIGNED (etype) && !flag_wrapv)
+ && !TYPE_OVERFLOW_WRAPS (etype))
{
tree utype, minv, maxv;
|| code == TRUTH_OR_EXPR);
int in0_p, in1_p, in_p;
tree low0, low1, low, high0, high1, high;
- tree lhs = make_range (op0, &in0_p, &low0, &high0);
- tree rhs = make_range (op1, &in1_p, &low1, &high1);
+ bool strict_overflow_p = false;
+ tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
+ tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
tree tem;
+ const char * const warnmsg = G_("assuming signed overflow does not occur "
+ "when simplifying range test");
/* If this is an OR operation, invert both sides; we will invert
again at the end. */
lhs != 0 ? lhs
: rhs != 0 ? rhs : integer_zero_node,
in_p, low, high))))
- return or_op ? invert_truthvalue (tem) : tem;
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
+ return or_op ? invert_truthvalue (tem) : tem;
+ }
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
&& (0 != (rhs = build_range_check (type, common,
or_op ? ! in1_p : in1_p,
low1, high1))))
- return build2 (code == TRUTH_ANDIF_EXPR
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
- type, lhs, rhs);
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (warnmsg,
+ WARN_STRICT_OVERFLOW_COMPARISON);
+ return build2 (code == TRUTH_ANDIF_EXPR
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
+ type, lhs, rhs);
+ }
}
}
addressing calculation.
If we return a non-null expression, it is an equivalent form of the
- original computation, but need not be in the original type. */
+ original computation, but need not be in the original type.
+
+ We set *STRICT_OVERFLOW_P to true if the return values depends on
+ signed overflow being undefined. Otherwise we do not change
+ *STRICT_OVERFLOW_P. */
static tree
-extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
+extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
+ bool *strict_overflow_p)
{
/* To avoid exponential search depth, refuse to allow recursion past
three levels. Beyond that (1) it's highly unlikely that we'll find
return NULL;
depth++;
- ret = extract_muldiv_1 (t, c, code, wide_type);
+ ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
depth--;
return ret;
}
static tree
-extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
+extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
+ bool *strict_overflow_p)
{
tree type = TREE_TYPE (t);
enum tree_code tcode = TREE_CODE (t);
tree t1, t2;
int same_p = tcode == code;
tree op0 = NULL_TREE, op1 = NULL_TREE;
+ bool sub_strict_overflow_p;
/* Don't deal with constants of zero here; they confuse the code below. */
if (integer_zerop (c))
if ((COMPARISON_CLASS_P (op0)
|| UNARY_CLASS_P (op0)
|| BINARY_CLASS_P (op0)
+ || VL_EXP_CLASS_P (op0)
|| EXPRESSION_CLASS_P (op0))
/* ... and is unsigned, and its type is smaller than ctype,
then we cannot pass through as widening. */
&& !TREE_OVERFLOW (t2)
&& (0 != (t1 = extract_muldiv (op0, t2, code,
code == MULT_EXPR
- ? ctype : NULL_TREE))))
+ ? ctype : NULL_TREE,
+ strict_overflow_p))))
return t1;
break;
if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
{
tree cstype = (*lang_hooks.types.signed_type) (ctype);
- if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
+ if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
+ != 0)
{
t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
return fold_convert (ctype, t1);
}
/* FALLTHROUGH */
case NEGATE_EXPR:
- if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
+ if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
+ != 0)
return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
break;
break;
/* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
- if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
- && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
+ sub_strict_overflow_p = false;
+ if ((t1 = extract_muldiv (op0, c, code, wide_type,
+ &sub_strict_overflow_p)) != 0
+ && (t2 = extract_muldiv (op1, c, code, wide_type,
+ &sub_strict_overflow_p)) != 0)
{
if (tree_int_cst_sgn (c) < 0)
tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
-
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
fold_convert (ctype, t2));
}
return extract_muldiv (build2 (tcode == LSHIFT_EXPR
? MULT_EXPR : FLOOR_DIV_EXPR,
ctype, fold_convert (ctype, op0), t1),
- c, code, wide_type);
+ c, code, wide_type, strict_overflow_p);
break;
case PLUS_EXPR: case MINUS_EXPR:
can return a new PLUS or MINUS. If we can't, the only remaining
cases where we can do anything are if the second operand is a
constant. */
- t1 = extract_muldiv (op0, c, code, wide_type);
- t2 = extract_muldiv (op1, c, code, wide_type);
+ sub_strict_overflow_p = false;
+ t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
+ t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
if (t1 != 0 && t2 != 0
&& (code == MULT_EXPR
/* If not multiplication, we can only do this if both operands
are divisible by c. */
|| (multiple_of_p (ctype, op0, c)
&& multiple_of_p (ctype, op1, c))))
- return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
- fold_convert (ctype, t2));
+ {
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
+ return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
+ fold_convert (ctype, t2));
+ }
/* If this was a subtraction, negate OP1 and set it to be an addition.
This simplifies the logic below. */
fold_convert (ctype, c), 0);
/* We allow the constant to overflow with wrapping semantics. */
if (op1 == 0
- || (TREE_OVERFLOW (op1) && ! flag_wrapv))
+ || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
break;
}
else
new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
do something only if the second operand is a constant. */
if (same_p
- && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
+ && (t1 = extract_muldiv (op0, c, code, wide_type,
+ strict_overflow_p)) != 0)
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
fold_convert (ctype, op1));
else if (tcode == MULT_EXPR && code == MULT_EXPR
- && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
+ && (t1 = extract_muldiv (op1, c, code, wide_type,
+ strict_overflow_p)) != 0)
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
fold_convert (ctype, t1));
else if (TREE_CODE (op1) != INTEGER_CST)
If we have an unsigned type that is not a sizetype, we cannot do
this since it will change the result if the original computation
overflowed. */
- if ((! TYPE_UNSIGNED (ctype)
+ if ((TYPE_OVERFLOW_UNDEFINED (ctype)
|| (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
- && ! flag_wrapv
&& ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
|| (tcode == MULT_EXPR
&& code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
&& code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
{
if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
- return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
- fold_convert (ctype,
- const_binop (TRUNC_DIV_EXPR,
- op1, c, 0)));
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (ctype))
+ *strict_overflow_p = true;
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
+ fold_convert (ctype,
+ const_binop (TRUNC_DIV_EXPR,
+ op1, c, 0)));
+ }
else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
- return fold_build2 (code, ctype, fold_convert (ctype, op0),
- fold_convert (ctype,
- const_binop (TRUNC_DIV_EXPR,
- c, op1, 0)));
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (ctype))
+ *strict_overflow_p = true;
+ return fold_build2 (code, ctype, fold_convert (ctype, op0),
+ fold_convert (ctype,
+ const_binop (TRUNC_DIV_EXPR,
+ c, op1, 0)));
+ }
}
break;
if (BUILTIN_SQRT_P (fcode))
{
- tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ tree arg = CALL_EXPR_ARG (arg0, 0);
enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
c = TREE_REAL_CST (arg1);
&& (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
return 0;
- if (DECL_P (arg1))
- return 0;
- if (DECL_P (arg0))
- return 1;
-
/* It is preferable to swap two SSA_NAME to ensure a canonical form
for commutative and comparison operators. Ensuring a canonical
form allows the optimizers to find additional redundancies without
&& SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
return 1;
+ /* Put SSA_NAMEs last. */
+ if (TREE_CODE (arg1) == SSA_NAME)
+ return 0;
+ if (TREE_CODE (arg0) == SSA_NAME)
+ return 1;
+
+ /* Put variables last. */
+ if (DECL_P (arg1))
+ return 0;
+ if (DECL_P (arg0))
+ return 1;
+
return 0;
}
tree ref = TREE_OPERAND (addr, 0), pref;
tree ret, pos;
tree itype;
+ bool mdim = false;
/* Canonicalize op1 into a possibly non-constant delta
and an INTEGER_CST s. */
{
if (TREE_CODE (ref) == ARRAY_REF)
{
+ /* Remember if this was a multi-dimensional array. */
+ if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
+ mdim = true;
+
itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
if (! itype)
continue;
delta = tmp;
}
+ /* Only fold here if we can verify we do not overflow one
+ dimension of a multi-dimensional array. */
+ if (mdim)
+ {
+ tree tmp;
+
+ if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
+ || !INTEGRAL_TYPE_P (itype)
+ || !TYPE_MAX_VALUE (itype)
+ || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
+ continue;
+
+ tmp = fold_binary (code, itype,
+ fold_convert (itype,
+ TREE_OPERAND (ref, 1)),
+ fold_convert (itype, delta));
+ if (!tmp
+ || TREE_CODE (tmp) != INTEGER_CST
+ || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
+ continue;
+ }
+
break;
}
+ else
+ mdim = false;
if (!handled_component_p (ref))
return NULL_TREE;
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
- int byte, offset, word, words;
+ int byte, offset, word, words, bitpos;
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
if (total_bytes > len)
return 0;
- words = total_bytes / UNITS_PER_WORD;
+ words = 32 / UNITS_PER_WORD;
real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
- for (byte = 0; byte < total_bytes; byte++)
+ for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
+ bitpos += BITS_PER_UNIT)
{
- int bitpos = byte * BITS_PER_UNIT;
+ byte = (bitpos / BITS_PER_UNIT) & 3;
value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
- if (total_bytes > UNITS_PER_WORD)
+ if (UNITS_PER_WORD < 4)
{
word = byte / UNITS_PER_WORD;
- if (FLOAT_WORDS_BIG_ENDIAN)
+ if (WORDS_BIG_ENDIAN)
word = (words - 1) - word;
offset = word * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset += byte % UNITS_PER_WORD;
}
else
- offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
- ptr[offset] = value;
+ offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
+ ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
}
return total_bytes;
}
{
enum machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
- int byte, offset, word, words;
+ int byte, offset, word, words, bitpos;
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
the hosts long. We handle floating point representations with
total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
- words = total_bytes / UNITS_PER_WORD;
+ words = 32 / UNITS_PER_WORD;
memset (tmp, 0, sizeof (tmp));
- for (byte = 0; byte < total_bytes; byte++)
+ for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
+ bitpos += BITS_PER_UNIT)
{
- int bitpos = byte * BITS_PER_UNIT;
- if (total_bytes > UNITS_PER_WORD)
+ byte = (bitpos / BITS_PER_UNIT) & 3;
+ if (UNITS_PER_WORD < 4)
{
word = byte / UNITS_PER_WORD;
- if (FLOAT_WORDS_BIG_ENDIAN)
+ if (WORDS_BIG_ENDIAN)
word = (words - 1) - word;
offset = word * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset += byte % UNITS_PER_WORD;
}
else
- offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
- value = ptr[offset];
+ offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
+ value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
}
return fold_convert (type, build_fold_addr_expr (base));
}
+ /* Convert (type *)&A into &A->field_of_type_and_offset_0. */
+ if (TREE_CODE (op0) == ADDR_EXPR && POINTER_TYPE_P (type)
+ && (tem = maybe_fold_offset_to_component_ref
+ (TREE_TYPE (TREE_OPERAND (op0, 0)), TREE_OPERAND (op0, 0),
+ integer_zero_node, TREE_TYPE (type), false)))
+ return build_fold_addr_expr_with_type (tem, type);
+
if ((TREE_CODE (op0) == MODIFY_EXPR
|| TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
&& TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
targ0));
}
/* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
- else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
+ else if (TREE_CODE (arg0) == ABS_EXPR)
+ return arg0;
+ else if (tree_expr_nonnegative_p (arg0))
return arg0;
/* Strip sign ops from argument. */
{
CASE_FLT_FN (BUILT_IN_CEXPI):
fn = mathfn_built_in (type, BUILT_IN_COS);
- return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+ if (fn)
+ return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
+ break;
- default:;
+ default:
+ break;
}
}
return NULL_TREE;
{
CASE_FLT_FN (BUILT_IN_CEXPI):
fn = mathfn_built_in (type, BUILT_IN_SIN);
- return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+ if (fn)
+ return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
+ break;
- default:;
+ default:
+ break;
}
}
return NULL_TREE;
by changing CODE to reduce the magnitude of constants involved in
ARG0 of the comparison.
Returns a canonicalized comparison tree if a simplification was
- possible, otherwise returns NULL_TREE. */
+ possible, otherwise returns NULL_TREE.
+ Set *STRICT_OVERFLOW_P to true if the canonicalization is only
+ valid if signed overflow is undefined. */
static tree
maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
- tree arg0, tree arg1)
+ tree arg0, tree arg1,
+ bool *strict_overflow_p)
{
enum tree_code code0 = TREE_CODE (arg0);
tree t, cst0 = NULL_TREE;
code = GT_EXPR;
else
return NULL_TREE;
+ *strict_overflow_p = true;
}
/* Now build the constant reduced in magnitude. */
tree arg0, tree arg1)
{
tree t;
+ bool strict_overflow_p;
+ const char * const warnmsg = G_("assuming signed overflow does not occur "
+ "when reducing constant in comparison");
/* In principle pointers also have undefined overflow behavior,
but that causes problems elsewhere. */
- if ((flag_wrapv || flag_trapv)
- || (TYPE_UNSIGNED (TREE_TYPE (arg0))
- || POINTER_TYPE_P (TREE_TYPE (arg0))))
+ if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
+ || POINTER_TYPE_P (TREE_TYPE (arg0)))
return NULL_TREE;
/* Try canonicalization by simplifying arg0. */
- t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
+ strict_overflow_p = false;
+ t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
+ &strict_overflow_p);
if (t)
- return t;
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
+ return t;
+ }
/* Try canonicalization by simplifying arg1 using the swapped
comparison. */
code = swap_tree_comparison (code);
- return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
+ strict_overflow_p = false;
+ t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
+ &strict_overflow_p);
+ if (t && strict_overflow_p)
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
+ return t;
}
/* Subroutine of fold_binary. This routine performs all of the
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
- && !TYPE_UNSIGNED (TREE_TYPE (arg1))
- && !(flag_wrapv || flag_trapv))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
&& (TREE_CODE (arg1) == INTEGER_CST
&& !TREE_OVERFLOW (arg1)))
{
lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
TREE_TYPE (arg1), const2, const1);
+
+ /* If the constant operation overflowed this can be
+ simplified as a comparison against INT_MAX/INT_MIN. */
+ if (TREE_CODE (lhs) == INTEGER_CST
+ && TREE_OVERFLOW (lhs))
+ {
+ int const1_sgn = tree_int_cst_sgn (const1);
+ enum tree_code code2 = code;
+
+ /* Get the sign of the constant on the lhs if the
+ operation were VARIABLE + CONST1. */
+ if (TREE_CODE (arg0) == MINUS_EXPR)
+ const1_sgn = -const1_sgn;
+
+ /* The sign of the constant determines if we overflowed
+ INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
+ Canonicalize to the INT_MIN overflow by swapping the comparison
+ if necessary. */
+ if (const1_sgn == -1)
+ code2 = swap_tree_comparison (code);
+
+ /* We now can look at the canonicalized case
+ VARIABLE + 1 CODE2 INT_MIN
+ and decide on the result. */
+ if (code2 == LT_EXPR
+ || code2 == LE_EXPR
+ || code2 == EQ_EXPR)
+ return omit_one_operand (type, boolean_false_node, variable);
+ else if (code2 == NE_EXPR
+ || code2 == GE_EXPR
+ || code2 == GT_EXPR)
+ return omit_one_operand (type, boolean_true_node, variable);
+ }
+
if (TREE_CODE (lhs) == TREE_CODE (arg1)
&& (TREE_CODE (lhs) != INTEGER_CST
|| !TREE_OVERFLOW (lhs)))
- return fold_build2 (code, type, variable, lhs);
+ {
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when changing X +- C1 cmp C2 to "
+ "X cmp C1 +- C2"),
+ WARN_STRICT_OVERFLOW_COMPARISON);
+ return fold_build2 (code, type, variable, lhs);
+ }
}
/* For comparisons of pointers we can decompose it to a compile time
same object, then we can fold this to a comparison of the two offsets in
signed size type. This is possible because pointer arithmetic is
restricted to retain within an object and overflow on pointer differences
- is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
+ is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
+
+ We check flag_wrapv directly because pointers types are unsigned,
+ and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
+ normally what we want to avoid certain odd overflow cases, but
+ not here. */
if (POINTER_TYPE_P (TREE_TYPE (arg0))
- && !flag_wrapv && !flag_trapv)
+ && !flag_wrapv
+ && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
{
tree base0, offset0, base1, offset1;
X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
the resulting offset is smaller in absolute value than the
original one. */
- if (!(flag_wrapv || flag_trapv)
- && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
tree variable1 = TREE_OPERAND (arg0, 0);
tree variable2 = TREE_OPERAND (arg1, 0);
tree cst;
+ const char * const warnmsg = G_("assuming signed overflow does not "
+ "occur when combining constants around "
+ "a comparison");
/* Put the constant on the side where it doesn't overflow and is
of lower absolute value than before. */
const2, const1, 0);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
- return fold_build2 (code, type,
- variable1,
- fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
- variable2, cst));
+ {
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
+ return fold_build2 (code, type,
+ variable1,
+ fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
+ variable2, cst));
+ }
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
const1, const2, 0);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
- return fold_build2 (code, type,
- fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
- variable1, cst),
- variable2);
+ {
+ fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
+ return fold_build2 (code, type,
+ fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
+ variable1, cst),
+ variable2);
+ }
}
/* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
signed arithmetic case. That form is created by the compiler
often enough for folding it to be of value. One example is in
computing loop trip counts after Operator Strength Reduction. */
- if (!(flag_wrapv || flag_trapv)
- && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
&& TREE_CODE (arg0) == MULT_EXPR
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
gcc_assert (!integer_zerop (const1));
+ fold_overflow_warning (("assuming signed overflow does not occur when "
+ "eliminating multiplication in comparison "
+ "with zero"),
+ WARN_STRICT_OVERFLOW_COMPARISON);
+
/* If const1 is negative we swap the sense of the comparison. */
if (tree_int_cst_sgn (const1) < 0)
cmp_code = swap_tree_comparison (cmp_code);
/* Fold ~X op ~Y as Y op X. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
- return fold_build2 (code, type,
- TREE_OPERAND (arg1, 0),
- TREE_OPERAND (arg0, 0));
+ {
+ tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
+ return fold_build2 (code, type,
+ fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 0));
+ }
/* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+ {
+ tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ fold_build1 (BIT_NOT_EXPR, cmp_type,
+ fold_convert (cmp_type, arg1)));
+ }
return NULL_TREE;
}
enum tree_code_class kind = TREE_CODE_CLASS (code);
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
+ bool strict_overflow_p;
gcc_assert ((IS_EXPR_CODE_CLASS (kind)
|| IS_GIMPLE_STMT_CODE_CLASS (kind))
/* ~X + X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && !TYPE_TRAP_SIGNED (type))
+ && !TYPE_OVERFLOW_TRAPS (type))
{
t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
/* X + ~X is -1. */
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && !TYPE_TRAP_SIGNED (type))
+ && !TYPE_OVERFLOW_TRAPS (type))
{
t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
/* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
to __complex__ ( x, y ). This is not the same for SNaNs or
- if singed zeros are involved. */
+ if signed zeros are involved. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree var0, con0, lit0, minus_lit0;
tree var1, con1, lit1, minus_lit1;
+ bool ok = true;
/* Split both trees into variables, constants, and literals. Then
associate each group together, the constants with literals,
var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
code == MINUS_EXPR);
+ /* With undefined overflow we can only associate constants
+ with one variable. */
+ if ((POINTER_TYPE_P (type)
+ || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
+ && var0 && var1)
+ {
+ tree tmp0 = var0;
+ tree tmp1 = var1;
+
+ if (TREE_CODE (tmp0) == NEGATE_EXPR)
+ tmp0 = TREE_OPERAND (tmp0, 0);
+ if (TREE_CODE (tmp1) == NEGATE_EXPR)
+ tmp1 = TREE_OPERAND (tmp1, 0);
+ /* The only case we can still associate with two variables
+ is if they are the same, modulo negation. */
+ if (!operand_equal_p (tmp0, tmp1, 0))
+ ok = false;
+ }
+
/* Only do something if we found more than two objects. Otherwise,
nothing has changed and we risk infinite recursion. */
- if (2 < ((var0 != 0) + (var1 != 0)
- + (con0 != 0) + (con1 != 0)
- + (lit0 != 0) + (lit1 != 0)
- + (minus_lit0 != 0) + (minus_lit1 != 0)))
+ if (ok
+ && (2 < ((var0 != 0) + (var1 != 0)
+ + (con0 != 0) + (con1 != 0)
+ + (lit0 != 0) + (lit1 != 0)
+ + (minus_lit0 != 0) + (minus_lit1 != 0))))
{
/* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
if (code == MINUS_EXPR)
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == NEGATE_EXPR
&& integer_onep (arg1)
- && !TYPE_TRAP_SIGNED (type))
+ && !TYPE_OVERFLOW_TRAPS (type))
return fold_build1 (BIT_NOT_EXPR, type,
fold_convert (type, TREE_OPERAND (arg0, 0)));
else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
return negate_expr (fold_convert (type, arg1));
+ /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
+ __complex__ ( x, -y ). This is not the same for SNaNs or if
+ signed zeros are involved. */
+ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ bool arg0rz = false, arg0iz = false;
+ if ((arg0r && (arg0rz = real_zerop (arg0r)))
+ || (arg0i && (arg0iz = real_zerop (arg0i))))
+ {
+ tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ if (arg0rz && arg1i && real_zerop (arg1i))
+ {
+ tree rp = fold_build1 (NEGATE_EXPR, rtype,
+ arg1r ? arg1r
+ : build1 (REALPART_EXPR, rtype, arg1));
+ tree ip = arg0i ? arg0i
+ : build1 (IMAGPART_EXPR, rtype, arg0);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ else if (arg0iz && arg1r && real_zerop (arg1r))
+ {
+ tree rp = arg0r ? arg0r
+ : build1 (REALPART_EXPR, rtype, arg0);
+ tree ip = fold_build1 (NEGATE_EXPR, rtype,
+ arg1i ? arg1i
+ : build1 (IMAGPART_EXPR, rtype, arg1));
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ }
+ }
+
/* Fold &x - &x. This can happen from &x.foo - &x.
This is unsafe for certain floats even in non-IEEE formats.
In IEEE, it is unsafe because it does wrong for NaNs.
return fold_build2 (LSHIFT_EXPR, type, arg1,
TREE_OPERAND (arg0, 1));
+ strict_overflow_p = false;
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0,
fold_convert (type, arg1),
- code, NULL_TREE)))
- return fold_convert (type, tem);
+ code, NULL_TREE,
+ &strict_overflow_p)))
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when simplifying "
+ "multiplication"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return fold_convert (type, tem);
+ }
/* Optimize z * conj(z) for integer complex numbers. */
if (TREE_CODE (arg0) == CONJ_EXPR
}
/* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
- This is not the same for NaNs or if singed zeros are
+ This is not the same for NaNs or if signed zeros are
involved. */
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
/* Optimizations of root(...)*root(...). */
if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
{
- tree rootfn, arg, arglist;
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+ tree rootfn, arg;
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg10 = CALL_EXPR_ARG (arg1, 0);
/* Optimize sqrt(x)*sqrt(x) as x. */
if (BUILTIN_SQRT_P (fcode0)
return arg00;
/* Optimize root(x)*root(y) as root(x*y). */
- rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
- arglist = build_tree_list (NULL_TREE, arg);
- return build_function_call_expr (rootfn, arglist);
+ return build_call_expr (rootfn, 1, arg);
}
/* Optimize expN(x)*expN(y) as expN(x+y). */
if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
{
- tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
tree arg = fold_build2 (PLUS_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, arg);
- return build_function_call_expr (expfn, arglist);
+ CALL_EXPR_ARG (arg0, 0),
+ CALL_EXPR_ARG (arg1, 0));
+ return build_call_expr (expfn, 1, arg);
}
/* Optimizations of pow(...)*pow(...). */
|| (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
|| (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
{
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
- 1)));
- tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
- tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
- 1)));
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg01 = CALL_EXPR_ARG (arg0, 1);
+ tree arg10 = CALL_EXPR_ARG (arg1, 0);
+ tree arg11 = CALL_EXPR_ARG (arg1, 1);
/* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
if (operand_equal_p (arg01, arg11, 0))
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
- tree arglist = tree_cons (NULL_TREE, arg,
- build_tree_list (NULL_TREE,
- arg01));
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg, arg01);
}
/* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
if (operand_equal_p (arg00, arg10, 0))
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
- tree arglist = tree_cons (NULL_TREE, arg00,
- build_tree_list (NULL_TREE,
- arg));
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg00, arg);
}
}
|| (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
|| (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
|| (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
- && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
+ && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
+ CALL_EXPR_ARG (arg1, 0), 0))
{
tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
if (sinfn != NULL_TREE)
- return build_function_call_expr (sinfn,
- TREE_OPERAND (arg0, 1));
+ return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
}
/* Optimize x*pow(x,c) as pow(x,c+1). */
|| fcode1 == BUILT_IN_POWF
|| fcode1 == BUILT_IN_POWL)
{
- tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
- tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
- 1)));
+ tree arg10 = CALL_EXPR_ARG (arg1, 0);
+ tree arg11 = CALL_EXPR_ARG (arg1, 1);
if (TREE_CODE (arg11) == REAL_CST
&& !TREE_OVERFLOW (arg11)
&& operand_equal_p (arg0, arg10, 0))
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
REAL_VALUE_TYPE c;
- tree arg, arglist;
+ tree arg;
c = TREE_REAL_CST (arg11);
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- arglist = build_tree_list (NULL_TREE, arg);
- arglist = tree_cons (NULL_TREE, arg0, arglist);
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg0, arg);
}
}
|| fcode0 == BUILT_IN_POWF
|| fcode0 == BUILT_IN_POWL)
{
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
- 1)));
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg01 = CALL_EXPR_ARG (arg0, 1);
if (TREE_CODE (arg01) == REAL_CST
&& !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
REAL_VALUE_TYPE c;
- tree arg, arglist;
+ tree arg;
c = TREE_REAL_CST (arg01);
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- arglist = build_tree_list (NULL_TREE, arg);
- arglist = tree_cons (NULL_TREE, arg1, arglist);
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg1, arg);
}
}
if (powfn)
{
tree arg = build_real (type, dconst2);
- tree arglist = build_tree_list (NULL_TREE, arg);
- arglist = tree_cons (NULL_TREE, arg0, arglist);
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg0, arg);
}
}
}
if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
|| (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
|| (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
- && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
+ && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
+ CALL_EXPR_ARG (arg1, 0), 0))
{
tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
if (tanfn != NULL_TREE)
- return build_function_call_expr (tanfn,
- TREE_OPERAND (arg0, 1));
+ return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
}
/* Optimize cos(x)/sin(x) as 1.0/tan(x). */
if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
|| (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
|| (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
- && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
+ && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
+ CALL_EXPR_ARG (arg1, 0), 0))
{
tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
if (tanfn != NULL_TREE)
{
- tree tmp = TREE_OPERAND (arg0, 1);
- tmp = build_function_call_expr (tanfn, tmp);
+ tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
return fold_build2 (RDIV_EXPR, type,
build_real (type, dconst1), tmp);
}
|| (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
|| (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
{
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg01 = CALL_EXPR_ARG (arg1, 0);
if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
if (cosfn != NULL_TREE)
- return build_function_call_expr (cosfn,
- TREE_OPERAND (arg0, 1));
+ return build_call_expr (cosfn, 1, arg00);
}
}
|| (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
|| (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
{
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg01 = CALL_EXPR_ARG (arg1, 0);
if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
if (cosfn != NULL_TREE)
{
- tree tmp = TREE_OPERAND (arg0, 1);
- tmp = build_function_call_expr (cosfn, tmp);
+ tree tmp = build_call_expr (cosfn, 1, arg00);
return fold_build2 (RDIV_EXPR, type,
build_real (type, dconst1),
tmp);
|| fcode0 == BUILT_IN_POWF
|| fcode0 == BUILT_IN_POWL)
{
- tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
+ tree arg00 = CALL_EXPR_ARG (arg0, 0);
+ tree arg01 = CALL_EXPR_ARG (arg0, 1);
if (TREE_CODE (arg01) == REAL_CST
&& !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
REAL_VALUE_TYPE c;
- tree arg, arglist;
+ tree arg;
c = TREE_REAL_CST (arg01);
real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- arglist = build_tree_list (NULL_TREE, arg);
- arglist = tree_cons (NULL_TREE, arg1, arglist);
- return build_function_call_expr (powfn, arglist);
+ return build_call_expr (powfn, 2, arg1, arg);
}
}
/* Optimize x/expN(y) into x*expN(-y). */
if (BUILTIN_EXPONENT_P (fcode1))
{
- tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
- tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE,
- fold_convert (type, arg));
- arg1 = build_function_call_expr (expfn, arglist);
+ tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
+ tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
+ arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
return fold_build2 (MULT_EXPR, type, arg0, arg1);
}
|| fcode1 == BUILT_IN_POWF
|| fcode1 == BUILT_IN_POWL)
{
- tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
- tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
- tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
+ tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
+ tree arg10 = CALL_EXPR_ARG (arg1, 0);
+ tree arg11 = CALL_EXPR_ARG (arg1, 1);
tree neg11 = fold_convert (type, negate_expr (arg11));
- tree arglist = tree_cons(NULL_TREE, arg10,
- build_tree_list (NULL_TREE, neg11));
- arg1 = build_function_call_expr (powfn, arglist);
+ arg1 = build_call_expr (powfn, 2, arg10, neg11);
return fold_build2 (MULT_EXPR, type, arg0, arg1);
}
}
case FLOOR_DIV_EXPR:
/* Simplify A / (B << N) where A and B are positive and B is
a power of 2, to A >> (N + log2(B)). */
+ strict_overflow_p = false;
if (TREE_CODE (arg1) == LSHIFT_EXPR
- && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
+ && (TYPE_UNSIGNED (type)
+ || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
{
tree sval = TREE_OPERAND (arg1, 0);
if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
tree sh_cnt = TREE_OPERAND (arg1, 1);
unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when simplifying A / (B << N)"),
+ WARN_STRICT_OVERFLOW_MISC);
+
sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
sh_cnt, build_int_cst (NULL_TREE, pow2));
return fold_build2 (RSHIFT_EXPR, type,
/* Convert -A / -B to A / B when the type is signed and overflow is
undefined. */
- if (!TYPE_UNSIGNED (type) && !flag_wrapv
+ if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
&& TREE_CODE (arg0) == NEGATE_EXPR
&& negate_expr_p (arg1))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
- negate_expr (arg1));
- if (!TYPE_UNSIGNED (type) && !flag_wrapv
+ {
+ if (INTEGRAL_TYPE_P (type))
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when distributing negation across "
+ "division"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ negate_expr (arg1));
+ }
+ if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
&& TREE_CODE (arg1) == NEGATE_EXPR
&& negate_expr_p (arg0))
- return fold_build2 (code, type, negate_expr (arg0),
- TREE_OPERAND (arg1, 0));
+ {
+ if (INTEGRAL_TYPE_P (type))
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when distributing negation across "
+ "division"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return fold_build2 (code, type, negate_expr (arg0),
+ TREE_OPERAND (arg1, 0));
+ }
/* If arg0 is a multiple of arg1, then rewrite to the fastest div
operation, EXACT_DIV_EXPR.
&& multiple_of_p (type, arg0, arg1))
return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
+ strict_overflow_p = false;
if (TREE_CODE (arg1) == INTEGER_CST
- && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
- return fold_convert (type, tem);
+ && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
+ &strict_overflow_p)))
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when simplifying division"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return fold_convert (type, tem);
+ }
return NULL_TREE;
/* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
+ strict_overflow_p = false;
if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
- && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
+ && (TYPE_UNSIGNED (type)
+ || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
{
tree c = arg1;
/* Also optimize A % (C << N) where C is a power of 2,
{
tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
build_int_cst (TREE_TYPE (arg1), 1));
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when simplifying "
+ "X % (power of two)"),
+ WARN_STRICT_OVERFLOW_MISC);
return fold_build2 (BIT_AND_EXPR, type,
fold_convert (type, arg0),
fold_convert (type, mask));
&& TREE_CODE (arg1) == INTEGER_CST
&& !TREE_OVERFLOW (arg1)
&& TREE_INT_CST_HIGH (arg1) < 0
- && !flag_trapv
+ && !TYPE_OVERFLOW_TRAPS (type)
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
&& !sign_bit_p (arg1, arg1))
return fold_build2 (code, type, fold_convert (type, arg0),
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == NEGATE_EXPR
- && !flag_trapv)
+ && !TYPE_OVERFLOW_TRAPS (type))
return fold_build2 (code, type, fold_convert (type, arg0),
fold_convert (type, TREE_OPERAND (arg1, 0)));
if (TREE_CODE (arg1) == INTEGER_CST
- && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
- return fold_convert (type, tem);
+ && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
+ &strict_overflow_p)))
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when simplifying modulos"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return fold_convert (type, tem);
+ }
return NULL_TREE;
&& code == EQ_EXPR)
return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
- /* If this is an equality comparison of the address of a non-weak
- object against zero, then we know the result. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
- && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
- && integer_zerop (arg1))
- return constant_boolean_node (code != EQ_EXPR, type);
-
/* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not
have access to attributes for externs), then we know the result. */
fold_convert (TREE_TYPE (arg0), arg1),
TREE_OPERAND (arg0, 1)));
+ /* Transform comparisons of the form X +- C CMP X. */
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ || POINTER_TYPE_P (TREE_TYPE (arg0))))
+ {
+ tree cst = TREE_OPERAND (arg0, 1);
+
+ if (code == EQ_EXPR
+ && !integer_zerop (cst))
+ return omit_two_operands (type, boolean_false_node,
+ TREE_OPERAND (arg0, 0), arg1);
+ else
+ return omit_two_operands (type, boolean_true_node,
+ TREE_OPERAND (arg0, 0), arg1);
+ }
+
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
&& integer_zerop (arg1))
{
tree fndecl = get_callee_fndecl (arg0);
- tree arglist;
if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
- && (arglist = TREE_OPERAND (arg0, 1))
- && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
- && ! TREE_CHAIN (arglist))
+ && call_expr_nargs (arg0) == 1
+ && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
{
- tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
+ tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
return fold_build2 (code, type, iref,
build_int_cst (TREE_TYPE (iref), 0));
}
arg01, arg11)),
arg10);
}
+
+ /* Attempt to simplify equality/inequality comparisons of complex
+ values. Only lower the comparison if the result is known or
+ can be simplified to a single scalar comparison. */
+ if ((TREE_CODE (arg0) == COMPLEX_EXPR
+ || TREE_CODE (arg0) == COMPLEX_CST)
+ && (TREE_CODE (arg1) == COMPLEX_EXPR
+ || TREE_CODE (arg1) == COMPLEX_CST))
+ {
+ tree real0, imag0, real1, imag1;
+ tree rcond, icond;
+
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ {
+ real0 = TREE_OPERAND (arg0, 0);
+ imag0 = TREE_OPERAND (arg0, 1);
+ }
+ else
+ {
+ real0 = TREE_REALPART (arg0);
+ imag0 = TREE_IMAGPART (arg0);
+ }
+
+ if (TREE_CODE (arg1) == COMPLEX_EXPR)
+ {
+ real1 = TREE_OPERAND (arg1, 0);
+ imag1 = TREE_OPERAND (arg1, 1);
+ }
+ else
+ {
+ real1 = TREE_REALPART (arg1);
+ imag1 = TREE_IMAGPART (arg1);
+ }
+
+ rcond = fold_binary (code, type, real0, real1);
+ if (rcond && TREE_CODE (rcond) == INTEGER_CST)
+ {
+ if (integer_zerop (rcond))
+ {
+ if (code == EQ_EXPR)
+ return omit_two_operands (type, boolean_false_node,
+ imag0, imag1);
+ return fold_build2 (NE_EXPR, type, imag0, imag1);
+ }
+ else
+ {
+ if (code == NE_EXPR)
+ return omit_two_operands (type, boolean_true_node,
+ imag0, imag1);
+ return fold_build2 (EQ_EXPR, type, imag0, imag1);
+ }
+ }
+
+ icond = fold_binary (code, type, imag0, imag1);
+ if (icond && TREE_CODE (icond) == INTEGER_CST)
+ {
+ if (integer_zerop (icond))
+ {
+ if (code == EQ_EXPR)
+ return omit_two_operands (type, boolean_false_node,
+ real0, real1);
+ return fold_build2 (NE_EXPR, type, real0, real1);
+ }
+ else
+ {
+ if (code == NE_EXPR)
+ return omit_two_operands (type, boolean_true_node,
+ real0, real1);
+ return fold_build2 (EQ_EXPR, type, real0, real1);
+ }
+ }
+ }
+
return NULL_TREE;
case LT_EXPR:
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
|| (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !TYPE_UNSIGNED (TREE_TYPE (arg1))
- && !(flag_wrapv || flag_trapv))))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
{
tree arg01 = TREE_OPERAND (arg0, 1);
enum tree_code code0 = TREE_CODE (arg0);
if (code == GT_EXPR
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|| (code0 == PLUS_EXPR && is_positive <= 0)))
- return constant_boolean_node (0, type);
+ {
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when assuming that (X - c) > X "
+ "is always false"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (0, type);
+ }
/* Likewise (X + c) < X becomes false. */
if (code == LT_EXPR
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|| (code0 == MINUS_EXPR && is_positive <= 0)))
- return constant_boolean_node (0, type);
+ {
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when assuming that "
+ "(X + c) < X is always false"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (0, type);
+ }
/* Convert (X - c) <= X to true. */
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
&& code == LE_EXPR
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|| (code0 == PLUS_EXPR && is_positive <= 0)))
- return constant_boolean_node (1, type);
+ {
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when assuming that "
+ "(X - c) <= X is always true"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (1, type);
+ }
/* Convert (X + c) >= X to true. */
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
&& code == GE_EXPR
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|| (code0 == MINUS_EXPR && is_positive <= 0)))
- return constant_boolean_node (1, type);
+ {
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does not "
+ "occur when assuming that "
+ "(X + c) >= X is always true"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (1, type);
+ }
if (TREE_CODE (arg01) == INTEGER_CST)
{
if (code == GT_EXPR
&& ((code0 == PLUS_EXPR && is_positive > 0)
|| (code0 == MINUS_EXPR && is_positive < 0)))
- return constant_boolean_node (1, type);
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does "
+ "not occur when assuming that "
+ "(X + c) > X is always true"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (1, type);
+ }
if (code == LT_EXPR
&& ((code0 == MINUS_EXPR && is_positive > 0)
|| (code0 == PLUS_EXPR && is_positive < 0)))
- return constant_boolean_node (1, type);
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does "
+ "not occur when assuming that "
+ "(X - c) < X is always true"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (1, type);
+ }
/* Convert X + c <= X and X - c >= X to false for integers. */
if (code == LE_EXPR
&& ((code0 == PLUS_EXPR && is_positive > 0)
|| (code0 == MINUS_EXPR && is_positive < 0)))
- return constant_boolean_node (0, type);
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does "
+ "not occur when assuming that "
+ "(X + c) <= X is always false"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (0, type);
+ }
if (code == GE_EXPR
&& ((code0 == MINUS_EXPR && is_positive > 0)
|| (code0 == PLUS_EXPR && is_positive < 0)))
- return constant_boolean_node (0, type);
+ {
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
+ fold_overflow_warning (("assuming signed overflow does "
+ "not occur when assuming that "
+ "(X - c) >= X is always false"),
+ WARN_STRICT_OVERFLOW_ALL);
+ return constant_boolean_node (0, type);
+ }
}
}
TREE_OPERAND (arg0, 0), arg1));
/* Convert ABS_EXPR<x> >= 0 to true. */
+ strict_overflow_p = false;
if (code == GE_EXPR
- && tree_expr_nonnegative_p (arg0)
&& (integer_zerop (arg1)
|| (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
- && real_zerop (arg1))))
- return omit_one_operand (type, integer_one_node, arg0);
+ && real_zerop (arg1)))
+ && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when simplifying comparison of "
+ "absolute value and zero"),
+ WARN_STRICT_OVERFLOW_CONDITIONAL);
+ return omit_one_operand (type, integer_one_node, arg0);
+ }
/* Convert ABS_EXPR<x> < 0 to false. */
+ strict_overflow_p = false;
if (code == LT_EXPR
- && tree_expr_nonnegative_p (arg0)
- && (integer_zerop (arg1) || real_zerop (arg1)))
- return omit_one_operand (type, integer_zero_node, arg0);
+ && (integer_zerop (arg1) || real_zerop (arg1))
+ && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
+ {
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur "
+ "when simplifying comparison of "
+ "absolute value and zero"),
+ WARN_STRICT_OVERFLOW_CONDITIONAL);
+ return omit_one_operand (type, integer_zero_node, arg0);
+ }
/* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
and similarly for >= into !=. */
return NULL_TREE;
case CALL_EXPR:
- /* Check for a built-in function. */
- if (TREE_CODE (op0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
- && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
- return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
- return NULL_TREE;
+ /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
+ of fold_ternary on them. */
+ gcc_unreachable ();
case BIT_FIELD_REF:
- if (TREE_CODE (arg0) == VECTOR_CST
+ if ((TREE_CODE (arg0) == VECTOR_CST
+ || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
&& type == TREE_TYPE (TREE_TYPE (arg0))
&& host_integerp (arg1, 1)
&& host_integerp (op2, 1))
&& (idx = idx / width)
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
{
- tree elements = TREE_VECTOR_CST_ELTS (arg0);
+ tree elements = NULL_TREE;
+
+ if (TREE_CODE (arg0) == VECTOR_CST)
+ elements = TREE_VECTOR_CST_ELTS (arg0);
+ else
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree value;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
+ elements = tree_cons (NULL_TREE, value, elements);
+ }
while (idx-- > 0 && elements)
elements = TREE_CHAIN (elements);
if (elements)
if (kind == tcc_constant)
return t;
+ /* CALL_EXPR-like objects with variable numbers of operands are
+ treated specially. */
+ if (kind == tcc_vl_exp)
+ {
+ if (code == CALL_EXPR)
+ {
+ tem = fold_call_expr (expr, false);
+ return tem ? tem : expr;
+ }
+ return expr;
+ }
+
if (IS_EXPR_CODE_CLASS (kind)
|| IS_GIMPLE_STMT_CODE_CLASS (kind))
{
case tcc_unary:
case tcc_binary:
case tcc_statement:
- len = TREE_CODE_LENGTH (code);
+ case tcc_vl_exp:
+ len = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
md5_finish_ctx (&ctx, checksum_before_op2);
htab_empty (ht);
#endif
-
+
+ gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
tem = fold_ternary (code, type, op0, op1, op2);
if (!tem)
tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
return tem;
}
+/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
+ arguments in ARGARRAY, and a null static chain.
+ Return a folded expression if successful. Otherwise, return a CALL_EXPR
+ of type TYPE from the given operands as constructed by build_call_array. */
+
+tree
+fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
+{
+ tree tem;
+#ifdef ENABLE_FOLD_CHECKING
+ unsigned char checksum_before_fn[16],
+ checksum_before_arglist[16],
+ checksum_after_fn[16],
+ checksum_after_arglist[16];
+ struct md5_ctx ctx;
+ htab_t ht;
+ int i;
+
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (fn, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_fn);
+ htab_empty (ht);
+
+ md5_init_ctx (&ctx);
+ for (i = 0; i < nargs; i++)
+ fold_checksum_tree (argarray[i], &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_arglist);
+ htab_empty (ht);
+#endif
+
+ tem = fold_builtin_call_array (type, fn, nargs, argarray);
+
+#ifdef ENABLE_FOLD_CHECKING
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (fn, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_fn);
+ htab_empty (ht);
+
+ if (memcmp (checksum_before_fn, checksum_after_fn, 16))
+ fold_check_failed (fn, tem);
+
+ md5_init_ctx (&ctx);
+ for (i = 0; i < nargs; i++)
+ fold_checksum_tree (argarray[i], &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_arglist);
+ htab_delete (ht);
+
+ if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
+ fold_check_failed (NULL_TREE, tem);
+#endif
+ return tem;
+}
+
/* Perform constant folding and related simplification of initializer
expression EXPR. These behave identically to "fold_buildN" but ignore
potential run-time traps and exceptions that fold must preserve. */
return result;
}
+tree
+fold_build_call_array_initializer (tree type, tree fn,
+ int nargs, tree *argarray)
+{
+ tree result;
+ START_FOLD_INIT;
+
+ result = fold_build_call_array (type, fn, nargs, argarray);
+
+ END_FOLD_INIT;
+ return result;
+}
+
#undef START_FOLD_INIT
#undef END_FOLD_INIT
(where the same SAVE_EXPR (J) is used in the original and the
transformed version). */
-static int
+int
multiple_of_p (tree type, tree top, tree bottom)
{
if (operand_equal_p (top, bottom, 0))
}
}
-/* Return true if `t' is known to be non-negative. */
+/* Return true if `t' is known to be non-negative. If the return
+ value is based on the assumption that signed overflow is undefined,
+ set *STRICT_OVERFLOW_P to true; otherwise, don't change
+ *STRICT_OVERFLOW_P. */
bool
-tree_expr_nonnegative_p (tree t)
+tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
{
if (t == error_mark_node)
return false;
case ABS_EXPR:
/* We can't return 1 if flag_wrapv is set because
ABS_EXPR<INT_MIN> = INT_MIN. */
- if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
- return true;
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
+ return true;
+ if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
+ {
+ *strict_overflow_p = true;
+ return true;
+ }
break;
case INTEGER_CST:
case PLUS_EXPR:
if (FLOAT_TYPE_P (TREE_TYPE (t)))
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p));
/* zero_extend(x) + zero_extend(y) is non-negative if x and y are
both unsigned and at least 2 bits shorter than the result. */
/* x * x for floating point x is always non-negative. */
if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
return true;
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p));
}
/* zero_extend(x) * zero_extend(y) is non-negative if x and y are
case BIT_AND_EXPR:
case MAX_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p));
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p));
case TRUNC_MOD_EXPR:
case CEIL_MOD_EXPR:
case SAVE_EXPR:
case NON_LVALUE_EXPR:
case FLOAT_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
+ case FIX_TRUNC_EXPR:
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
case COMPOUND_EXPR:
case MODIFY_EXPR:
case GIMPLE_MODIFY_STMT:
- return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
+ return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ strict_overflow_p);
case BIND_EXPR:
- return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
+ return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
+ strict_overflow_p);
case COND_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
+ return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
+ strict_overflow_p));
case NOP_EXPR:
{
if (TREE_CODE (outer_type) == REAL_TYPE)
{
if (TREE_CODE (inner_type) == REAL_TYPE)
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
if (TREE_CODE (inner_type) == INTEGER_TYPE)
{
if (TYPE_UNSIGNED (inner_type))
return true;
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
}
}
else if (TREE_CODE (outer_type) == INTEGER_TYPE)
{
if (TREE_CODE (inner_type) == REAL_TYPE)
- return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
+ strict_overflow_p);
if (TREE_CODE (inner_type) == INTEGER_TYPE)
return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
&& TYPE_UNSIGNED (inner_type);
/* If the initializer is non-void, then it's a normal expression
that will be assigned to the slot. */
if (!VOID_TYPE_P (t))
- return tree_expr_nonnegative_p (t);
+ return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
/* Otherwise, the initializer sets the slot in some way. One common
way is an assignment statement at the end of the initializer. */
if ((TREE_CODE (t) == MODIFY_EXPR
|| TREE_CODE (t) == GIMPLE_MODIFY_STMT)
&& GENERIC_TREE_OPERAND (t, 0) == temp)
- return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
+ return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ strict_overflow_p);
return false;
}
case CALL_EXPR:
{
tree fndecl = get_callee_fndecl (t);
- tree arglist = TREE_OPERAND (t, 1);
if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (fndecl))
{
/* sqrt(-0.0) is -0.0. */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
return true;
- return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+ return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p);
CASE_FLT_FN (BUILT_IN_ASINH):
CASE_FLT_FN (BUILT_IN_ATAN):
CASE_FLT_FN (BUILT_IN_NEARBYINT):
CASE_FLT_FN (BUILT_IN_RINT):
CASE_FLT_FN (BUILT_IN_ROUND):
+ CASE_FLT_FN (BUILT_IN_SCALB):
+ CASE_FLT_FN (BUILT_IN_SCALBLN):
+ CASE_FLT_FN (BUILT_IN_SCALBN):
CASE_FLT_FN (BUILT_IN_SIGNBIT):
+ CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
CASE_FLT_FN (BUILT_IN_SINH):
CASE_FLT_FN (BUILT_IN_TANH):
CASE_FLT_FN (BUILT_IN_TRUNC):
/* True if the 1st argument is nonnegative. */
- return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+ return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p);
CASE_FLT_FN (BUILT_IN_FMAX):
/* True if the 1st OR 2nd arguments are nonnegative. */
- return tree_expr_nonnegative_p (TREE_VALUE (arglist))
- || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
+ return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p)
+ || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
+ strict_overflow_p)));
CASE_FLT_FN (BUILT_IN_FMIN):
/* True if the 1st AND 2nd arguments are nonnegative. */
- return tree_expr_nonnegative_p (TREE_VALUE (arglist))
- && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
+ return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p)
+ && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
+ strict_overflow_p)));
CASE_FLT_FN (BUILT_IN_COPYSIGN):
/* True if the 2nd argument is nonnegative. */
- return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
+ return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
+ strict_overflow_p);
CASE_FLT_FN (BUILT_IN_POWI):
/* True if the 1st argument is nonnegative or the second
argument is an even integer. */
- if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
+ if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
{
- tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree arg1 = CALL_EXPR_ARG (t, 1);
if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
return true;
}
- return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+ return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p);
CASE_FLT_FN (BUILT_IN_POW):
/* True if the 1st argument is nonnegative or the second
argument is an even integer valued real. */
- if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
+ if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
{
REAL_VALUE_TYPE c;
HOST_WIDE_INT n;
- c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
+ c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
n = real_to_integer (&c);
if ((n & 1) == 0)
{
return true;
}
}
- return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+ return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
+ strict_overflow_p);
default:
break;
return false;
}
+/* Return true if `t' is known to be non-negative. Handle warnings
+ about undefined signed overflow. */
+
+bool
+tree_expr_nonnegative_p (tree t)
+{
+ bool ret, strict_overflow_p;
+
+ strict_overflow_p = false;
+ ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur when "
+ "determining that expression is always "
+ "non-negative"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return ret;
+}
+
/* Return true when T is an address and is known to be nonzero.
For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h. */
+ Similar logic is present in nonzero_address in rtlanal.h.
+
+ If the return value is based on the assumption that signed overflow
+ is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+ change *STRICT_OVERFLOW_P. */
bool
-tree_expr_nonzero_p (tree t)
+tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
{
tree type = TREE_TYPE (t);
+ bool sub_strict_overflow_p;
/* Doing something useful for floating point would need more work. */
if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
return ssa_name_nonzero_p (t);
case ABS_EXPR:
- return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
case INTEGER_CST:
return !integer_zerop (t);
case PLUS_EXPR:
- if (!TYPE_UNSIGNED (type) && !flag_wrapv)
+ if (TYPE_OVERFLOW_UNDEFINED (type))
{
/* With the presence of negative values it is hard
to say something. */
- if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
+ sub_strict_overflow_p = false;
+ if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ &sub_strict_overflow_p)
+ || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ &sub_strict_overflow_p))
return false;
/* One of operands must be positive and the other non-negative. */
- return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
- || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
+ /* We don't set *STRICT_OVERFLOW_P here: even if this value
+ overflows, on a twos-complement machine the sum of two
+ nonnegative numbers can never be zero. */
+ return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p));
}
break;
case MULT_EXPR:
- if (!TYPE_UNSIGNED (type) && !flag_wrapv)
+ if (TYPE_OVERFLOW_UNDEFINED (type))
{
- return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
- && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
+ if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p)
+ && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p))
+ {
+ *strict_overflow_p = true;
+ return true;
+ }
}
break;
tree outer_type = TREE_TYPE (t);
return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
- && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
+ && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p));
}
break;
}
case COND_EXPR:
- return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
- && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
+ sub_strict_overflow_p = false;
+ if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ &sub_strict_overflow_p)
+ && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
+ &sub_strict_overflow_p))
+ {
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
+ return true;
+ }
+ break;
case MIN_EXPR:
- return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
- && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
+ sub_strict_overflow_p = false;
+ if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ &sub_strict_overflow_p)
+ && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ &sub_strict_overflow_p))
+ {
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
+ }
+ break;
case MAX_EXPR:
- if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
+ sub_strict_overflow_p = false;
+ if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ &sub_strict_overflow_p))
{
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
+
/* When both operands are nonzero, then MAX must be too. */
- if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
+ if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p))
return true;
/* MAX where operand 0 is positive is positive. */
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
}
/* MAX where operand 1 is positive is positive. */
- else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
- return true;
+ else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ &sub_strict_overflow_p)
+ && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+ &sub_strict_overflow_p))
+ {
+ if (sub_strict_overflow_p)
+ *strict_overflow_p = true;
+ return true;
+ }
break;
case COMPOUND_EXPR:
case MODIFY_EXPR:
case GIMPLE_MODIFY_STMT:
case BIND_EXPR:
- return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
+ return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ strict_overflow_p);
case SAVE_EXPR:
case NON_LVALUE_EXPR:
- return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
case BIT_IOR_EXPR:
- return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
- || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
+ return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p)
+ || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p));
case CALL_EXPR:
return alloca_call_p (t);
return false;
}
+/* Return true when T is an address and is known to be nonzero.
+ Handle warnings about undefined signed overflow. */
+
+bool
+tree_expr_nonzero_p (tree t)
+{
+ bool ret, strict_overflow_p;
+
+ strict_overflow_p = false;
+ ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur when "
+ "determining that expression is always "
+ "non-zero"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return ret;
+}
+
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
attempt to fold the expression to a constant without modifying TYPE,
OP0 or OP1.
return constant_boolean_node (real_compare (code, c0, c1), type);
}
+ /* Handle equality/inequality of complex constants. */
+ if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
+ {
+ tree rcond = fold_relational_const (code, type,
+ TREE_REALPART (op0),
+ TREE_REALPART (op1));
+ tree icond = fold_relational_const (code, type,
+ TREE_IMAGPART (op0),
+ TREE_IMAGPART (op1));
+ if (code == EQ_EXPR)
+ return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
+ else if (code == NE_EXPR)
+ return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
+ else
+ return NULL_TREE;
+ }
+
/* From here on we only handle LT, LE, GT, GE, EQ and NE.
To compute GT, swap the arguments and do LT.
/* If divisor is a power of two, simplify this to bit manipulation. */
if (divisor == (divisor & -divisor))
{
- tree t;
-
if (TREE_CODE (value) == INTEGER_CST)
{
unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
- HOST_WIDE_INT high;
+ unsigned HOST_WIDE_INT high;
+ bool overflow_p;
if ((low & (divisor - 1)) == 0)
return value;
+ overflow_p = TREE_OVERFLOW (value);
high = TREE_INT_CST_HIGH (value);
low &= ~(divisor - 1);
low += divisor;
if (low == 0)
- high++;
-
- t = build_int_cst_wide_type (TREE_TYPE (value), low, high);
- if ((TREE_OVERFLOW (value) || integer_zerop (t))
- && !TREE_OVERFLOW (t))
{
- t = copy_node (t);
- TREE_OVERFLOW (t) = 1;
+ high++;
+ if (high == 0)
+ overflow_p = true;
}
- return t;
+
+ return force_fit_type_double (TREE_TYPE (value), low, high,
+ -1, overflow_p);
}
else
{
+ tree t;
+
t = build_int_cst (TREE_TYPE (value), divisor - 1);
value = size_binop (PLUS_EXPR, value, t);
t = build_int_cst (TREE_TYPE (value), -divisor);
{
CASE_FLT_FN (BUILT_IN_COPYSIGN):
/* Strip copysign function call, return the 1st argument. */
- arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
- arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
+ arg0 = CALL_EXPR_ARG (exp, 0);
+ arg1 = CALL_EXPR_ARG (exp, 1);
return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
default:
/* Strip sign ops from the argument of "odd" math functions. */
if (negate_mathfn_p (fcode))
{
- arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
+ arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
if (arg0)
- return build_function_call_expr (get_callee_fndecl (exp),
- build_tree_list (NULL_TREE,
- arg0));
+ return build_call_expr (get_callee_fndecl (exp), 1, arg0);
}
break;
}