/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "md5.h"
+/* Non-zero if we are folding constants inside an initializer; zero
+ otherwise. */
+int folding_initializer = 0;
+
/* The following constants represent a bit based encoding of GCC's
comparison operators. This encoding simplifies transformations
on relational comparison operators, such as AND and OR. */
static tree sign_bit_p (tree, tree);
static int simple_operand_p (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
+static tree range_predecessor (tree);
+static tree range_successor (tree);
static tree make_range (tree, int *, tree *, tree *);
static tree build_range_check (tree, tree, int, tree, tree);
static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
+static int native_encode_expr (tree, unsigned char *, int);
+static tree native_interpret_expr (tree, unsigned char *, int);
+
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
}
\f
/* Add two doubleword integers with doubleword result.
+ Return nonzero if the operation overflows according to UNSIGNED_P.
Each argument is given as two `HOST_WIDE_INT' pieces.
One argument is L1 and H1; the other, L2 and H2.
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
- unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
- unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
+ bool unsigned_p)
{
unsigned HOST_WIDE_INT l;
HOST_WIDE_INT h;
*lv = l;
*hv = h;
- return OVERFLOW_SUM_SIGN (h1, h2, h);
+
+ if (unsigned_p)
+ return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
+ else
+ return OVERFLOW_SUM_SIGN (h1, h2, h);
}
/* Negate a doubleword integer with doubleword result.
}
\f
/* Multiply two doubleword integers with doubleword result.
- Return nonzero if the operation overflows, assuming it's signed.
+ Return nonzero if the operation overflows according to UNSIGNED_P.
Each argument is given as two `HOST_WIDE_INT' pieces.
One argument is L1 and H1; the other, L2 and H2.
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
- unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
- unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
+ bool unsigned_p)
{
HOST_WIDE_INT arg1[4];
HOST_WIDE_INT arg2[4];
prod[i + 4] = carry;
}
- decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
-
- /* Check for overflow by calculating the top half of the answer in full;
- it should agree with the low half's sign bit. */
+ decode (prod, lv, hv);
decode (prod + 4, &toplow, &tophigh);
+
+ /* Unsigned overflow is immediate. */
+ if (unsigned_p)
+ return (toplow | tophigh) != 0;
+
+ /* Check for signed overflow by calculating the signed representation of the
+ top half of the result; it should agree with the low half's sign bit. */
if (h1 < 0)
{
neg_double (l2, h2, &neglow, &neghigh);
}
/* Determine whether an expression T can be cheaply negated using
- the function negate_expr. */
+ the function negate_expr without introducing undefined overflow. */
static bool
negate_expr_p (tree t)
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (TYPE_UNSIGNED (type) || ! flag_trapv)
+ if (TYPE_UNSIGNED (type)
+ || (flag_wrapv && ! flag_trapv))
return true;
/* Check that -CST will not overflow type. */
return may_negate_without_overflow_p (t);
case BIT_NOT_EXPR:
- return INTEGRAL_TYPE_P (type);
+ return INTEGRAL_TYPE_P (type)
+ && (TYPE_UNSIGNED (type)
+ || (flag_wrapv && !flag_trapv));
case REAL_CST:
case NEGATE_EXPR:
return false;
}
-/* Given T, an expression, return the negation of T. Allow for T to be
- null, in which case return null. */
+/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
+ simplification is possible.
+ If negate_expr_p would return true for T, NULL_TREE will never be
+ returned. */
static tree
-negate_expr (tree t)
+fold_negate_expr (tree t)
{
- tree type;
+ tree type = TREE_TYPE (t);
tree tem;
- if (t == 0)
- return 0;
-
- type = TREE_TYPE (t);
- STRIP_SIGN_NOPS (t);
-
switch (TREE_CODE (t))
{
/* Convert - (~A) to A + 1. */
tem = fold_negate_const (t, type);
/* Two's complement FP formats, such as c4x, may overflow. */
if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
- return fold_convert (type, tem);
+ return tem;
break;
case COMPLEX_CST:
break;
case NEGATE_EXPR:
- return fold_convert (type, TREE_OPERAND (t, 0));
+ return TREE_OPERAND (t, 0);
case PLUS_EXPR:
if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 0));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 0));
}
/* -(A + B) -> (-A) - B. */
if (negate_expr_p (TREE_OPERAND (t, 0)))
{
tem = negate_expr (TREE_OPERAND (t, 0));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 1));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 1));
}
}
break;
/* - (A - B) -> B - A */
if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
- return fold_convert (type,
- fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 0)));
+ return fold_build2 (MINUS_EXPR, type,
+ TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
break;
case MULT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (t)))
+ if (TYPE_UNSIGNED (type))
break;
/* Fall through. */
case RDIV_EXPR:
- if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
+ if (!TYPE_UNSIGNED (type) && !flag_wrapv)
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
- return fold_convert (type, negate_expr (tem));
+ return negate_expr (tem);
}
break;
break;
}
- tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ return NULL_TREE;
+}
+
+/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
+ negated in a simpler way. Also allow for T to be NULL_TREE, in which case
+ return NULL_TREE. */
+
+static tree
+negate_expr (tree t)
+{
+ tree type, tem;
+
+ if (t == NULL_TREE)
+ return NULL_TREE;
+
+ type = TREE_TYPE (t);
+ STRIP_SIGN_NOPS (t);
+
+ tem = fold_negate_expr (t);
+ if (!tem)
+ tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
return fold_convert (type, tem);
}
\f
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
constant. We assume ARG1 and ARG2 have the same data type, or at least
- are the same kind of constant and the same machine mode.
+ are the same kind of constant and the same machine mode. Return zero if
+ combining the constants is not allowed in the current operating mode.
If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
static tree
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
{
+ /* Sanity check for the recursive cases. */
+ if (!arg1 || !arg2)
+ return NULL_TREE;
+
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
/* Don't constant fold this floating point operation if
the result has overflowed and flag_trapping_math. */
-
if (flag_trapping_math
&& MODE_HAS_INFINITIES (mode)
&& REAL_VALUE_ISINF (result)
result may dependent upon the run-time rounding mode and
flag_rounding_math is set, or if GCC's software emulation
is unable to accurately represent the result. */
-
if ((flag_rounding_math
|| (REAL_MODE_FORMAT_COMPOSITE_P (mode)
&& !flag_unsafe_math_optimizations))
tree i1 = TREE_IMAGPART (arg1);
tree r2 = TREE_REALPART (arg2);
tree i2 = TREE_IMAGPART (arg2);
- tree t;
+ tree real, imag;
switch (code)
{
case PLUS_EXPR:
- t = build_complex (type,
- const_binop (PLUS_EXPR, r1, r2, notrunc),
- const_binop (PLUS_EXPR, i1, i2, notrunc));
- break;
-
case MINUS_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR, r1, r2, notrunc),
- const_binop (MINUS_EXPR, i1, i2, notrunc));
+ real = const_binop (code, r1, r2, notrunc);
+ imag = const_binop (code, i1, i2, notrunc);
break;
case MULT_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR,
- r1, r2, notrunc),
- const_binop (MULT_EXPR,
- i1, i2, notrunc),
- notrunc),
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR,
- r1, i2, notrunc),
- const_binop (MULT_EXPR,
- i1, r2, notrunc),
- notrunc));
+ real = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ imag = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ notrunc);
break;
case RDIV_EXPR:
{
- tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
-
- t1 = const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2, notrunc),
- const_binop (MULT_EXPR, i1, i2, notrunc),
- notrunc);
- t2 = const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2, notrunc),
- const_binop (MULT_EXPR, r1, i2, notrunc),
- notrunc);
+ tree t1
+ = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ tree t2
+ = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
- {
- real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
- }
- else
- {
- real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
- if (!real || !imag)
- return NULL_TREE;
- }
+ code = TRUNC_DIV_EXPR;
- t = build_complex (type, real, imag);
+ real = const_binop (code, t1, magsquared, notrunc);
+ imag = const_binop (code, t2, magsquared, notrunc);
}
break;
default:
return NULL_TREE;
}
- return t;
+
+ if (real && imag)
+ return build_complex (type, real, imag);
}
+
return NULL_TREE;
}
{
tree type = TREE_TYPE (arg0);
+ if (arg0 == error_mark_node || arg1 == error_mark_node)
+ return error_mark_node;
+
gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
&& type == TREE_TYPE (arg1));
return int_const_binop (code, arg0, arg1, 0);
}
- if (arg0 == error_mark_node || arg1 == error_mark_node)
- return error_mark_node;
-
return fold_build2 (code, type, arg0, arg1);
}
switch (TREE_CODE (type))
{
- case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
case OFFSET_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
switch (TREE_CODE (orig))
{
- case INTEGER_TYPE: case CHAR_TYPE:
+ case INTEGER_TYPE:
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
return fold_build1 (FLOAT_EXPR, type, arg);
case COMPLEX_TYPE:
switch (TREE_CODE (orig))
{
- case INTEGER_TYPE: case CHAR_TYPE:
+ case INTEGER_TYPE:
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
case REAL_TYPE:
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
return 0;
+ /* If both types don't have the same precision, then it is not safe
+ to strip NOPs. */
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
+ return 0;
+
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
+ /* In case both args are comparisons but with different comparison
+ code, try to swap the comparison operands of one arg to produce
+ a match and compare that variant. */
+ if (TREE_CODE (arg0) != TREE_CODE (arg1)
+ && COMPARISON_CLASS_P (arg0)
+ && COMPARISON_CLASS_P (arg1))
+ {
+ enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
+
+ if (TREE_CODE (arg0) == swap_code)
+ return operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 1), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 0), flags);
+ }
+
if (TREE_CODE (arg0) != TREE_CODE (arg1)
/* This is needed for conversions and for COMPONENT_REF.
Might as well play it safe and always test this. */
FIXME: one would think we would fold the result, but it causes
problems with the dominator optimizer. */
+
tree
-invert_truthvalue (tree arg)
+fold_truth_not_expr (tree arg)
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- if (code == ERROR_MARK)
- return arg;
-
/* If this is a comparison, we can simply invert it, except for
floating-point non-equality comparisons, in which case we just
enclose a TRUTH_NOT_EXPR around what we have. */
&& flag_trapping_math
&& code != ORDERED_EXPR && code != UNORDERED_EXPR
&& code != NE_EXPR && code != EQ_EXPR)
- return build1 (TRUTH_NOT_EXPR, type, arg);
+ return NULL_TREE;
else
{
code = invert_tree_comparison (code,
HONOR_NANS (TYPE_MODE (op_type)));
if (code == ERROR_MARK)
- return build1 (TRUTH_NOT_EXPR, type, arg);
+ return NULL_TREE;
else
return build2 (code, type,
TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
case NOP_EXPR:
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
- break;
+ return build1 (TRUTH_NOT_EXPR, type, arg);
case CONVERT_EXPR:
case FLOAT_EXPR:
default:
break;
}
- gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
- return build1 (TRUTH_NOT_EXPR, type, arg);
+
+ return NULL_TREE;
+}
+
+/* Return a simplified tree node for the truth-negation of ARG. This
+ never alters ARG itself. We assume that ARG is an operation that
+ returns a truth value (0 or 1).
+
+ FIXME: one would think we would fold the result, but it causes
+ problems with the dominator optimizer. */
+
+tree
+invert_truthvalue (tree arg)
+{
+ tree tem;
+
+ if (TREE_CODE (arg) == ERROR_MARK)
+ return arg;
+
+ tem = fold_truth_not_expr (arg);
+ if (!tem)
+ tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
+
+ return tem;
}
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
}
}
- value = const_binop (MINUS_EXPR, high, low, 0);
- if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
- && ! TYPE_UNSIGNED (etype))
+ /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
+ This requires wrap-around arithmetics for the type of the expression. */
+ switch (TREE_CODE (etype))
+ {
+ case INTEGER_TYPE:
+ /* There is no requirement that LOW be within the range of ETYPE
+ if the latter is a subtype. It must, however, be within the base
+ type of ETYPE. So be sure we do the subtraction in that type. */
+ if (TREE_TYPE (etype))
+ etype = TREE_TYPE (etype);
+ break;
+
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
+ TYPE_UNSIGNED (etype));
+ break;
+
+ default:
+ break;
+ }
+
+ /* If we don't have wrap-around arithmetics upfront, try to force it. */
+ if (TREE_CODE (etype) == INTEGER_TYPE
+ && !TYPE_UNSIGNED (etype) && !flag_wrapv)
{
tree utype, minv, maxv;
/* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
for the type in question, as we rely on this here. */
- switch (TREE_CODE (etype))
- {
- case INTEGER_TYPE:
- case ENUMERAL_TYPE:
- case CHAR_TYPE:
- /* There is no requirement that LOW be within the range of ETYPE
- if the latter is a subtype. It must, however, be within the base
- type of ETYPE. So be sure we do the subtraction in that type. */
- if (TREE_TYPE (etype))
- etype = TREE_TYPE (etype);
- utype = lang_hooks.types.unsigned_type (etype);
- maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
- maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
- integer_one_node, 1);
- minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
- if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
- minv, 1, maxv, 1)))
- {
- etype = utype;
- high = fold_convert (etype, high);
- low = fold_convert (etype, low);
- exp = fold_convert (etype, exp);
- value = const_binop (MINUS_EXPR, high, low, 0);
- }
- break;
- default:
- break;
- }
+ utype = lang_hooks.types.unsigned_type (etype);
+ maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
+ maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
+ integer_one_node, 1);
+ minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
+
+ if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
+ minv, 1, maxv, 1)))
+ etype = utype;
+ else
+ return 0;
}
- if (value != 0 && ! TREE_OVERFLOW (value))
- {
- /* There is no requirement that LOW be within the range of ETYPE
- if the latter is a subtype. It must, however, be within the base
- type of ETYPE. So be sure we do the subtraction in that type. */
- if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
- {
- etype = TREE_TYPE (etype);
- exp = fold_convert (etype, exp);
- low = fold_convert (etype, low);
- value = fold_convert (etype, value);
- }
+ high = fold_convert (etype, high);
+ low = fold_convert (etype, low);
+ exp = fold_convert (etype, exp);
- return build_range_check (type,
- fold_build2 (MINUS_EXPR, etype, exp, low),
- 1, build_int_cst (etype, 0), value);
- }
+ value = const_binop (MINUS_EXPR, high, low, 0);
+
+ if (value != 0 && !TREE_OVERFLOW (value))
+ return build_range_check (type,
+ fold_build2 (MINUS_EXPR, etype, exp, low),
+ 1, build_int_cst (etype, 0), value);
return 0;
}
\f
+/* Return the predecessor of VAL in its type, handling the infinite case. */
+
+static tree
+range_predecessor (tree val)
+{
+ tree type = TREE_TYPE (val);
+
+ if (INTEGRAL_TYPE_P (type)
+ && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
+ return 0;
+ else
+ return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
+}
+
+/* Return the successor of VAL in its type, handling the infinite case. */
+
+static tree
+range_successor (tree val)
+{
+ tree type = TREE_TYPE (val);
+
+ if (INTEGRAL_TYPE_P (type)
+ && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
+ return 0;
+ else
+ return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
+}
+
/* Given two ranges, see if we can merge them into one. Return 1 if we
can, 0 if we can't. Set the output range into the specified parameters. */
/* If they don't overlap, the result is the first range. If they are
equal, the result is false. If the second range is a subset of the
first, and the ranges begin at the same place, we go from just after
- the end of the first range to the end of the second. If the second
+ the end of the second range to the end of the first. If the second
range is not a subset of the first, or if it is a subset and both
ranges end at the same place, the range starts at the start of the
first range and ends just before the second range.
in_p = 0, low = high = 0;
else if (subset && lowequal)
{
- in_p = 1, high = high0;
- low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
- integer_one_node, 0);
+ low = range_successor (high1);
+ high = high0;
+ in_p = (low != 0);
}
else if (! subset || highequal)
{
- in_p = 1, low = low0;
- high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
- integer_one_node, 0);
+ low = low0;
+ high = range_predecessor (low1);
+ in_p = (high != 0);
}
else
return 0;
in_p = 0, low = high = 0;
else
{
- in_p = 1, high = high1;
- low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
- integer_one_node, 0);
+ low = range_successor (high0);
+ high = high1;
+ in_p = (low != 0);
}
}
if (no_overlap)
{
if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
- range_binop (PLUS_EXPR, NULL_TREE,
- high0, 1,
- integer_one_node, 1),
+ range_successor (high0),
1, low1, 0)))
in_p = 0, low = low0, high = high1;
else
break;
/* FALLTHROUGH */
case INTEGER_TYPE:
- case CHAR_TYPE:
if (tree_int_cst_equal (low0,
TYPE_MIN_VALUE (TREE_TYPE (low0))))
low0 = 0;
break;
/* FALLTHROUGH */
case INTEGER_TYPE:
- case CHAR_TYPE:
if (tree_int_cst_equal (high1,
TYPE_MAX_VALUE (TREE_TYPE (high1))))
high1 = 0;
return + [x + 1, y - 1]. */
if (low0 == 0 && high1 == 0)
{
- low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
- integer_one_node, 1);
- high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
- integer_one_node, 0);
+ low = range_successor (high0);
+ high = range_predecessor (low1);
if (low == 0 || high == 0)
return 0;
/* Avoid these transformations if the COND_EXPR may be used
as an lvalue in the C++ front-end. PR c++/19199. */
&& (in_gimple_form
- || strcmp (lang_hooks.name, "GNU C++") != 0
+ || (strcmp (lang_hooks.name, "GNU C++") != 0
+ && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
|| ! maybe_lvalue_p (arg1)
|| ! maybe_lvalue_p (arg2)))
{
tree lntype, rntype, result;
int first_bit, end_bit;
int volatilep;
+ tree orig_lhs = lhs, orig_rhs = rhs;
+ enum tree_code orig_code = code;
/* Start by getting the comparison codes. Fail if anything is volatile.
If one operand is a BIT_AND_EXPR with the constant one, treat it as if
build_int_cst (TREE_TYPE (ll_arg), 0));
if (LOGICAL_OP_NON_SHORT_CIRCUIT)
- return build2 (code, truth_type, lhs, rhs);
+ {
+ if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
+ return build2 (code, truth_type, lhs, rhs);
+ return NULL_TREE;
+ }
}
/* See if the comparisons can be merged. Then get all the parameters for
{
case NE_EXPR: case LT_EXPR: case LE_EXPR:
{
- /* FIXME: We should be able to invert code without building a
- scratch tree node, but doing so would require us to
- duplicate a part of invert_truthvalue here. */
- tree tem = invert_truthvalue (build2 (code, type, op0, op1));
- tem = optimize_minmax_comparison (TREE_CODE (tem),
- TREE_TYPE (tem),
- TREE_OPERAND (tem, 0),
- TREE_OPERAND (tem, 1));
- return invert_truthvalue (tem);
+ tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
+ type, op0, op1);
+ if (tem)
+ return invert_truthvalue (tem);
+ return NULL_TREE;
}
case GE_EXPR:
tree arg01 = TREE_OPERAND (arg0, 1);
unsigned HOST_WIDE_INT lpart;
HOST_WIDE_INT hpart;
+ bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
+ bool neg_overflow;
int overflow;
/* We have to do this the hard way to detect unsigned overflow.
prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
- overflow = mul_double (TREE_INT_CST_LOW (arg01),
- TREE_INT_CST_HIGH (arg01),
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
+ overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
+ TREE_INT_CST_HIGH (arg01),
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ &lpart, &hpart, unsigned_p);
prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
prod = force_fit_type (prod, -1, overflow, false);
+ neg_overflow = false;
- if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
+ if (unsigned_p)
{
tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
lo = prod;
/* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
- overflow = add_double (TREE_INT_CST_LOW (prod),
- TREE_INT_CST_HIGH (prod),
- TREE_INT_CST_LOW (tmp),
- TREE_INT_CST_HIGH (tmp),
- &lpart, &hpart);
+ overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
+ TREE_INT_CST_HIGH (prod),
+ TREE_INT_CST_LOW (tmp),
+ TREE_INT_CST_HIGH (tmp),
+ &lpart, &hpart, unsigned_p);
hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
TREE_CONSTANT_OVERFLOW (prod));
switch (tree_int_cst_sgn (arg1))
{
case -1:
+ neg_overflow = true;
lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
hi = prod;
break;
break;
case 1:
- lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
+ neg_overflow = true;
+ lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
hi = prod;
break;
case LT_EXPR:
if (TREE_OVERFLOW (lo))
- return omit_one_operand (type, integer_zero_node, arg00);
+ {
+ tmp = neg_overflow ? integer_zero_node : integer_one_node;
+ return omit_one_operand (type, tmp, arg00);
+ }
return fold_build2 (LT_EXPR, type, arg00, lo);
case LE_EXPR:
if (TREE_OVERFLOW (hi))
- return omit_one_operand (type, integer_one_node, arg00);
+ {
+ tmp = neg_overflow ? integer_zero_node : integer_one_node;
+ return omit_one_operand (type, tmp, arg00);
+ }
return fold_build2 (LE_EXPR, type, arg00, hi);
case GT_EXPR:
if (TREE_OVERFLOW (hi))
- return omit_one_operand (type, integer_zero_node, arg00);
+ {
+ tmp = neg_overflow ? integer_one_node : integer_zero_node;
+ return omit_one_operand (type, tmp, arg00);
+ }
return fold_build2 (GT_EXPR, type, arg00, hi);
case GE_EXPR:
if (TREE_OVERFLOW (lo))
- return omit_one_operand (type, integer_one_node, arg00);
+ {
+ tmp = neg_overflow ? integer_one_node : integer_zero_node;
+ return omit_one_operand (type, tmp, arg00);
+ }
return fold_build2 (GE_EXPR, type, arg00, lo);
default:
else
{
arg00 = arg0;
- if (!FLOAT_TYPE_P (type))
- arg01 = build_int_cst (type, 1);
- else
- arg01 = build_real (type, dconst1);
+ arg01 = build_one_cst (type);
}
if (TREE_CODE (arg1) == MULT_EXPR)
{
else
{
arg10 = arg1;
- if (!FLOAT_TYPE_P (type))
- arg11 = build_int_cst (type, 1);
- else
- arg11 = build_real (type, dconst1);
+ arg11 = build_one_cst (type);
}
same = NULL_TREE;
return NULL_TREE;
}
-/* Fold a unary expression of code CODE and type TYPE with operand
- OP0. Return the folded expression if folding is successful.
- Otherwise, return NULL_TREE. */
+/* Subroutine of native_encode_expr. Encode the INTEGER_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
-tree
-fold_unary (enum tree_code code, tree type, tree op0)
+static int
+native_encode_int (tree expr, unsigned char *ptr, int len)
{
- tree tem;
- tree arg0;
- enum tree_code_class kind = TREE_CODE_CLASS (code);
+ tree type = TREE_TYPE (expr);
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
- gcc_assert (IS_EXPR_CODE_CLASS (kind)
- && TREE_CODE_LENGTH (code) == 1);
+ if (total_bytes > len)
+ return 0;
+ words = total_bytes / UNITS_PER_WORD;
- arg0 = op0;
- if (arg0)
+ for (byte = 0; byte < total_bytes; byte++)
{
- if (code == NOP_EXPR || code == CONVERT_EXPR
- || code == FLOAT_EXPR || code == ABS_EXPR)
+ int bitpos = byte * BITS_PER_UNIT;
+ if (bitpos < HOST_BITS_PER_WIDE_INT)
+ value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
+ else
+ value = (unsigned char) (TREE_INT_CST_HIGH (expr)
+ >> (bitpos - HOST_BITS_PER_WIDE_INT));
+
+ if (total_bytes > UNITS_PER_WORD)
{
- /* Don't use STRIP_NOPS, because signedness of argument type
- matters. */
- STRIP_SIGN_NOPS (arg0);
+ word = byte / UNITS_PER_WORD;
+ if (WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
}
else
- {
- /* Strip any conversions that don't change the mode. This
- is safe for every expression, except for a comparison
- expression because its signedness is derived from its
- operands.
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ ptr[offset] = value;
+ }
+ return total_bytes;
+}
- Note that this is done as an internal manipulation within
- the constant folder, in order to find the simplest
- representation of the arguments so that their form can be
- studied. In any cases, the appropriate type conversions
- should be put back in the tree that will get out of the
- constant folder. */
- STRIP_NOPS (arg0);
+
+/* Subroutine of native_encode_expr. Encode the REAL_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_real (tree expr, unsigned char *ptr, int len)
+{
+ tree type = TREE_TYPE (expr);
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
+
+ /* There are always 32 bits in each long, no matter the size of
+ the hosts long. We handle floating point representations with
+ up to 192 bits. */
+ long tmp[6];
+
+ if (total_bytes > len)
+ return 0;
+ words = total_bytes / UNITS_PER_WORD;
+
+ real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
+
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
+
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (FLOAT_WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
}
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ ptr[offset] = value;
}
+ return total_bytes;
+}
- if (TREE_CODE_CLASS (code) == tcc_unary)
+/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_complex (tree expr, unsigned char *ptr, int len)
+{
+ int rsize, isize;
+ tree part;
+
+ part = TREE_REALPART (expr);
+ rsize = native_encode_expr (part, ptr, len);
+ if (rsize == 0)
+ return 0;
+ part = TREE_IMAGPART (expr);
+ isize = native_encode_expr (part, ptr+rsize, len-rsize);
+ if (isize != rsize)
+ return 0;
+ return rsize + isize;
+}
+
+
+/* Subroutine of native_encode_expr. Encode the VECTOR_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_vector (tree expr, unsigned char *ptr, int len)
+{
+ int i, size, offset, count;
+ tree itype, elem, elements;
+
+ offset = 0;
+ elements = TREE_VECTOR_CST_ELTS (expr);
+ count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
+ itype = TREE_TYPE (TREE_TYPE (expr));
+ size = GET_MODE_SIZE (TYPE_MODE (itype));
+ for (i = 0; i < count; i++)
{
- if (TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
- else if (TREE_CODE (arg0) == COND_EXPR)
+ if (elements)
+ {
+ elem = TREE_VALUE (elements);
+ elements = TREE_CHAIN (elements);
+ }
+ else
+ elem = NULL_TREE;
+
+ if (elem)
+ {
+ if (native_encode_expr (elem, ptr+offset, len-offset) != size)
+ return 0;
+ }
+ else
+ {
+ if (offset + size > len)
+ return 0;
+ memset (ptr+offset, 0, size);
+ }
+ offset += size;
+ }
+ return offset;
+}
+
+
+/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
+ REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
+ buffer PTR of length LEN bytes. Return the number of bytes
+ placed in the buffer, or zero upon failure. */
+
+static int
+native_encode_expr (tree expr, unsigned char *ptr, int len)
+{
+ switch (TREE_CODE (expr))
+ {
+ case INTEGER_CST:
+ return native_encode_int (expr, ptr, len);
+
+ case REAL_CST:
+ return native_encode_real (expr, ptr, len);
+
+ case COMPLEX_CST:
+ return native_encode_complex (expr, ptr, len);
+
+ case VECTOR_CST:
+ return native_encode_vector (expr, ptr, len);
+
+ default:
+ return 0;
+ }
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_int (tree type, unsigned char *ptr, int len)
+{
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
+ unsigned int HOST_WIDE_INT lo = 0;
+ HOST_WIDE_INT hi = 0;
+
+ if (total_bytes > len)
+ return NULL_TREE;
+ if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
+ return NULL_TREE;
+ words = total_bytes / UNITS_PER_WORD;
+
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
+ }
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ value = ptr[offset];
+
+ if (bitpos < HOST_BITS_PER_WIDE_INT)
+ lo |= (unsigned HOST_WIDE_INT) value << bitpos;
+ else
+ hi |= (unsigned HOST_WIDE_INT) value
+ << (bitpos - HOST_BITS_PER_WIDE_INT);
+ }
+
+ return force_fit_type (build_int_cst_wide (type, lo, hi),
+ 0, false, false);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a REAL_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_real (tree type, unsigned char *ptr, int len)
+{
+ enum machine_mode mode = TYPE_MODE (type);
+ int total_bytes = GET_MODE_SIZE (mode);
+ int byte, offset, word, words;
+ unsigned char value;
+ /* There are always 32 bits in each long, no matter the size of
+ the hosts long. We handle floating point representations with
+ up to 192 bits. */
+ REAL_VALUE_TYPE r;
+ long tmp[6];
+
+ total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ if (total_bytes > len || total_bytes > 24)
+ return NULL_TREE;
+ words = total_bytes / UNITS_PER_WORD;
+
+ memset (tmp, 0, sizeof (tmp));
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (FLOAT_WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
+ }
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ value = ptr[offset];
+
+ tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
+ }
+
+ real_from_target (&r, tmp, mode);
+ return build_real (type, r);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_complex (tree type, unsigned char *ptr, int len)
+{
+ tree etype, rpart, ipart;
+ int size;
+
+ etype = TREE_TYPE (type);
+ size = GET_MODE_SIZE (TYPE_MODE (etype));
+ if (size * 2 > len)
+ return NULL_TREE;
+ rpart = native_interpret_expr (etype, ptr, size);
+ if (!rpart)
+ return NULL_TREE;
+ ipart = native_interpret_expr (etype, ptr+size, size);
+ if (!ipart)
+ return NULL_TREE;
+ return build_complex (type, rpart, ipart);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_vector (tree type, unsigned char *ptr, int len)
+{
+ tree etype, elem, elements;
+ int i, size, count;
+
+ etype = TREE_TYPE (type);
+ size = GET_MODE_SIZE (TYPE_MODE (etype));
+ count = TYPE_VECTOR_SUBPARTS (type);
+ if (size * count > len)
+ return NULL_TREE;
+
+ elements = NULL_TREE;
+ for (i = count - 1; i >= 0; i--)
+ {
+ elem = native_interpret_expr (etype, ptr+(i*size), size);
+ if (!elem)
+ return NULL_TREE;
+ elements = tree_cons (NULL_TREE, elem, elements);
+ }
+ return build_vector (type, elements);
+}
+
+
+/* Subroutine of fold_view_convert_expr. Interpret the contents of
+ the buffer PTR of length LEN as a constant of type TYPE. For
+ INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
+ we return a REAL_CST, etc... If the buffer cannot be interpreted,
+ return NULL_TREE. */
+
+static tree
+native_interpret_expr (tree type, unsigned char *ptr, int len)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ return native_interpret_int (type, ptr, len);
+
+ case REAL_TYPE:
+ return native_interpret_real (type, ptr, len);
+
+ case COMPLEX_TYPE:
+ return native_interpret_complex (type, ptr, len);
+
+ case VECTOR_TYPE:
+ return native_interpret_vector (type, ptr, len);
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+
+/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
+ TYPE at compile-time. If we're unable to perform the conversion
+ return NULL_TREE. */
+
+static tree
+fold_view_convert_expr (tree type, tree expr)
+{
+ /* We support up to 512-bit values (for V8DFmode). */
+ unsigned char buffer[64];
+ int len;
+
+ /* Check that the host and target are sane. */
+ if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
+ return NULL_TREE;
+
+ len = native_encode_expr (expr, buffer, sizeof (buffer));
+ if (len == 0)
+ return NULL_TREE;
+
+ return native_interpret_expr (type, buffer, len);
+}
+
+
+/* Fold a unary expression of code CODE and type TYPE with operand
+ OP0. Return the folded expression if folding is successful.
+ Otherwise, return NULL_TREE. */
+
+tree
+fold_unary (enum tree_code code, tree type, tree op0)
+{
+ tree tem;
+ tree arg0;
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 1);
+
+ arg0 = op0;
+ if (arg0)
+ {
+ if (code == NOP_EXPR || code == CONVERT_EXPR
+ || code == FLOAT_EXPR || code == ABS_EXPR)
+ {
+ /* Don't use STRIP_NOPS, because signedness of argument type
+ matters. */
+ STRIP_SIGN_NOPS (arg0);
+ }
+ else
+ {
+ /* Strip any conversions that don't change the mode. This
+ is safe for every expression, except for a comparison
+ expression because its signedness is derived from its
+ operands.
+
+ Note that this is done as an internal manipulation within
+ the constant folder, in order to find the simplest
+ representation of the arguments so that their form can be
+ studied. In any cases, the appropriate type conversions
+ should be put back in the tree that will get out of the
+ constant folder. */
+ STRIP_NOPS (arg0);
+ }
+ }
+
+ if (TREE_CODE_CLASS (code) == tcc_unary)
+ {
+ if (TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == COND_EXPR)
{
tree arg01 = TREE_OPERAND (arg0, 1);
tree arg02 = TREE_OPERAND (arg0, 2);
type via an object of identical or wider precision, neither
conversion is needed. */
if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
- && ((inter_int && final_int) || (inter_float && final_float))
+ && (((inter_int || inter_ptr) && final_int)
+ || (inter_float && final_float))
&& inter_prec >= final_prec)
return fold_build1 (code, type, TREE_OPERAND (op0, 0));
- the initial type is a pointer type and the precisions of the
intermediate and final types differ, or
- the final type is a pointer type and the precisions of the
- initial and intermediate types differ. */
+ initial and intermediate types differ.
+ - the final type is a pointer type and the initial type not
+ - the initial type is a pointer to an array and the final type
+ not. */
if (! inside_float && ! inter_float && ! final_float
&& ! inside_vec && ! inter_vec && ! final_vec
- && (inter_prec > inside_prec || inter_prec > final_prec)
+ && (inter_prec >= inside_prec || inter_prec >= final_prec)
&& ! (inside_int && inter_int
&& inter_unsignedp != inside_unsignedp
&& inter_prec < final_prec)
&& ! (final_ptr && inside_prec != inter_prec)
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
- && ! final_ptr)
+ && final_ptr == inside_ptr
+ && ! (inside_ptr
+ && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
+ && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
return fold_build1 (code, type, TREE_OPERAND (op0, 0));
}
TREE_OPERAND (arg0, 1));
}
+ /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
+ of the same precision, and X is a integer type not narrower than
+ types T1 or T2, i.e. the cast (T2)X isn't an extension. */
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (op0) == BIT_NOT_EXPR
+ && INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
+ && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
+ {
+ tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
+ if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
+ return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
+ }
+
tem = fold_convert_const (code, type, arg0);
return tem ? tem : NULL_TREE;
case VIEW_CONVERT_EXPR:
if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
- return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
- return NULL_TREE;
+ return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+ return fold_view_convert_expr (type, op0);
case NEGATE_EXPR:
- if (negate_expr_p (arg0))
- return fold_convert (type, negate_expr (arg0));
+ tem = fold_negate_expr (arg0);
+ if (tem)
+ return fold_convert (type, tem);
return NULL_TREE;
case ABS_EXPR:
case CONJ_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert (type, arg0);
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return build2 (COMPLEX_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (TREE_OPERAND (arg0, 1)));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return build_complex (type, TREE_REALPART (arg0),
- negate_expr (TREE_IMAGPART (arg0)));
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 1)));
- else if (TREE_CODE (arg0) == CONJ_EXPR)
- return TREE_OPERAND (arg0, 0);
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ {
+ tree itype = TREE_TYPE (type);
+ tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
+ tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
+ return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
+ }
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ {
+ tree itype = TREE_TYPE (type);
+ tree rpart = fold_convert (itype, TREE_REALPART (arg0));
+ tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
+ return build_complex (type, rpart, negate_expr (ipart));
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ return fold_convert (type, TREE_OPERAND (arg0, 0));
return NULL_TREE;
case BIT_NOT_EXPR:
and its values must be 0 or 1.
("true" is a fixed value perhaps depending on the language,
but we don't handle values other than 1 correctly yet.) */
- tem = invert_truthvalue (arg0);
- /* Avoid infinite recursion. */
- if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
+ tem = fold_truth_not_expr (arg0);
+ if (!tem)
return NULL_TREE;
return fold_convert (type, tem);
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return NULL_TREE;
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return fold_convert (type, arg0);
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand (type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_REALPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 1)));
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_REALPART (arg0));
+ if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build2 (TREE_CODE (arg0), itype,
+ fold_build1 (REALPART_EXPR, itype,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (REALPART_EXPR, itype,
+ TREE_OPERAND (arg0, 1)));
+ return fold_convert (type, tem);
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
+ return fold_convert (type, tem);
+ }
return NULL_TREE;
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert (type, integer_zero_node);
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand (type, TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg0, 0));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_IMAGPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 1)));
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_IMAGPART (arg0));
+ if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build2 (TREE_CODE (arg0), itype,
+ fold_build1 (IMAGPART_EXPR, itype,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (IMAGPART_EXPR, itype,
+ TREE_OPERAND (arg0, 1)));
+ return fold_convert (type, tem);
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
+ return fold_convert (type, negate_expr (tem));
+ }
return NULL_TREE;
default:
return NULL_TREE;
}
-/* Fold a binary expression of code CODE and type TYPE with operands
- OP0 and OP1. Return the folded expression if folding is
- successful. Otherwise, return NULL_TREE. */
+/* Subroutine of fold_binary. This routine performs all of the
+ transformations that are common to the equality/inequality
+ operators (EQ_EXPR and NE_EXPR) and the ordering operators
+ (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
+ fold_binary should call fold_binary. Fold a comparison with
+ tree code CODE and type TYPE with operands OP0 and OP1. Return
+ the folded comparison or NULL_TREE. */
-tree
-fold_binary (enum tree_code code, tree type, tree op0, tree op1)
+static tree
+fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
{
- tree t1 = NULL_TREE;
- tree tem;
- tree arg0 = NULL_TREE, arg1 = NULL_TREE;
- enum tree_code_class kind = TREE_CODE_CLASS (code);
-
- gcc_assert (IS_EXPR_CODE_CLASS (kind)
- && TREE_CODE_LENGTH (code) == 2
- && op0 != NULL_TREE
- && op1 != NULL_TREE);
+ tree arg0, arg1, tem;
arg0 = op0;
arg1 = op1;
- /* Strip any conversions that don't change the mode. This is
- safe for every expression, except for a comparison expression
- because its signedness is derived from its operands. So, in
- the latter case, only strip conversions that don't change the
- signedness.
+ STRIP_SIGN_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg1);
- Note that this is done as an internal manipulation within the
- constant folder, in order to find the simplest representation
- of the arguments so that their form can be studied. In any
- cases, the appropriate type conversions should be put back in
- the tree that will get out of the constant folder. */
+ tem = fold_relational_const (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
- if (kind == tcc_comparison)
- {
- STRIP_SIGN_NOPS (arg0);
- STRIP_SIGN_NOPS (arg1);
- }
- else
- {
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
+ /* If one arg is a real or integer constant, put it last. */
+ if (tree_swap_operands_p (arg0, arg1, true))
+ return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+
+ /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && !TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && !(flag_wrapv || flag_trapv))
+ && (TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_OVERFLOW (arg1)))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = arg1;
+ tree variable = TREE_OPERAND (arg0, 0);
+ tree lhs;
+ int lhs_add;
+ lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
+
+ lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
+ TREE_TYPE (arg1), const2, const1);
+ if (TREE_CODE (lhs) == TREE_CODE (arg1)
+ && (TREE_CODE (lhs) != INTEGER_CST
+ || !TREE_OVERFLOW (lhs)))
+ return fold_build2 (code, type, variable, lhs);
+ }
+
+ /* If this is a comparison of two exprs that look like an ARRAY_REF of the
+ same object, then we can fold this to a comparison of the two offsets in
+ signed size type. This is possible because pointer arithmetic is
+ restricted to retain within an object and overflow on pointer differences
+ is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && !flag_wrapv && !flag_trapv)
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ tree signed_size_type_node;
+ signed_size_type_node = signed_type_for (size_type_node);
+
+ /* By converting to signed size type we cover middle-end pointer
+ arithmetic which operates on unsigned pointer types of size
+ type size and ARRAY_REF offsets which are properly sign or
+ zero extended from their type in case it is narrower than
+ size type. */
+ if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset0 = fold_convert (signed_size_type_node, offset0);
+ if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset1 = fold_convert (signed_size_type_node, offset1);
+
+ return fold_build2 (code, type, offset0, offset1);
+ }
+ }
+
+ /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
+ X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
+ the resulting offset is smaller in absolute value than the
+ original one. */
+ if (!(flag_wrapv || flag_trapv)
+ && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
+ && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = TREE_OPERAND (arg1, 1);
+ tree variable1 = TREE_OPERAND (arg0, 0);
+ tree variable2 = TREE_OPERAND (arg1, 0);
+ tree cst;
+
+ /* Put the constant on the side where it doesn't overflow and is
+ of lower absolute value than before. */
+ cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
+ ? MINUS_EXPR : PLUS_EXPR,
+ const2, const1, 0);
+ if (!TREE_OVERFLOW (cst)
+ && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
+ return fold_build2 (code, type,
+ variable1,
+ fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
+ variable2, cst));
+
+ cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
+ ? MINUS_EXPR : PLUS_EXPR,
+ const1, const2, 0);
+ if (!TREE_OVERFLOW (cst)
+ && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
+ return fold_build2 (code, type,
+ fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
+ variable1, cst),
+ variable2);
}
- /* Note that TREE_CONSTANT isn't enough: static var addresses are
- constant but we can't do arithmetic on them. */
- if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
- || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
- || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
- || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
+ if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
- if (kind == tcc_binary)
- tem = const_binop (code, arg0, arg1, 0);
- else if (kind == tcc_comparison)
- tem = fold_relational_const (code, type, arg0, arg1);
- else
- tem = NULL_TREE;
+ tree targ0 = strip_float_extensions (arg0);
+ tree targ1 = strip_float_extensions (arg1);
+ tree newtype = TREE_TYPE (targ0);
- if (tem != NULL_TREE)
- {
- if (TREE_TYPE (tem) != type)
- tem = fold_convert (type, tem);
- return tem;
+ if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
+ newtype = TREE_TYPE (targ1);
+
+ /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
+ if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
+ return fold_build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1));
+
+ /* (-a) CMP (-b) -> b CMP a */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (arg1) == NEGATE_EXPR)
+ return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
+ TREE_OPERAND (arg0, 0));
+
+ if (TREE_CODE (arg1) == REAL_CST)
+ {
+ REAL_VALUE_TYPE cst;
+ cst = TREE_REAL_CST (arg1);
+
+ /* (-a) CMP CST -> a swap(CMP) (-CST) */
+ if (TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ build_real (TREE_TYPE (arg1),
+ REAL_VALUE_NEGATE (cst)));
+
+ /* IEEE doesn't distinguish +0 and -0 in comparisons. */
+ /* a CMP (-0) -> a CMP 0 */
+ if (REAL_VALUE_MINUS_ZERO (cst))
+ return fold_build2 (code, type, arg0,
+ build_real (TREE_TYPE (arg1), dconst0));
+
+ /* x != NaN is always true, other ops are always false. */
+ if (REAL_VALUE_ISNAN (cst)
+ && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
+ return omit_one_operand (type, tem, arg0);
+ }
+
+ /* Fold comparisons against infinity. */
+ if (REAL_VALUE_ISINF (cst))
+ {
+ tem = fold_inf_compare (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+
+ /* If this is a comparison of a real constant with a PLUS_EXPR
+ or a MINUS_EXPR of a real constant, we can convert it into a
+ comparison with a revised real constant as long as no overflow
+ occurs when unsafe_math_optimizations are enabled. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == REAL_CST
+ && (TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
+ ? MINUS_EXPR : PLUS_EXPR,
+ arg1, TREE_OPERAND (arg0, 1), 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+
+ /* Likewise, we can simplify a comparison of a real constant with
+ a MINUS_EXPR whose first operand is also a real constant, i.e.
+ (c1 - x) < c2 becomes x > c1-c2. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == REAL_CST
+ && TREE_CODE (arg0) == MINUS_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
+ && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
+ arg1, 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 1), tem);
+
+ /* Fold comparisons against built-in math functions. */
+ if (TREE_CODE (arg1) == REAL_CST
+ && flag_unsafe_math_optimizations
+ && ! flag_errno_math)
+ {
+ enum built_in_function fcode = builtin_mathfn_code (arg0);
+
+ if (fcode != END_BUILTINS)
+ {
+ tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+ }
+
+ /* Convert foo++ == CONST into ++foo == CONST + INCR. */
+ if (TREE_CONSTANT (arg1)
+ && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
+ || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
+ /* This optimization is invalid for ordered comparisons
+ if CONST+INCR overflows or if foo+incr might overflow.
+ This optimization is invalid for floating point due to rounding.
+ For pointer types we assume overflow doesn't happen. */
+ && (POINTER_TYPE_P (TREE_TYPE (arg0))
+ || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && (code == EQ_EXPR || code == NE_EXPR))))
+ {
+ tree varop, newconst;
+
+ if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
+ {
+ newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
+ varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ }
+ else
+ {
+ newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
+ arg1, TREE_OPERAND (arg0, 1));
+ varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ }
+
+
+ /* If VAROP is a reference to a bitfield, we must mask
+ the constant by the width of the field. */
+ if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
+ && host_integerp (DECL_SIZE (TREE_OPERAND
+ (TREE_OPERAND (varop, 0), 1)), 1))
+ {
+ tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
+ HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
+ tree folded_compare, shift;
+
+ /* First check whether the comparison would come out
+ always the same. If we don't do that we would
+ change the meaning with the masking. */
+ folded_compare = fold_build2 (code, type,
+ TREE_OPERAND (varop, 0), arg1);
+ if (TREE_CODE (folded_compare) == INTEGER_CST)
+ return omit_one_operand (type, folded_compare, varop);
+
+ shift = build_int_cst (NULL_TREE,
+ TYPE_PRECISION (TREE_TYPE (varop)) - size);
+ shift = fold_convert (TREE_TYPE (varop), shift);
+ newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
+ newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
+ newconst, shift);
+ }
+
+ return fold_build2 (code, type, varop, newconst);
+ }
+
+ if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
+ && (TREE_CODE (arg0) == NOP_EXPR
+ || TREE_CODE (arg0) == CONVERT_EXPR))
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
+
+ /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
+ constant, we can simplify it. */
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == MIN_EXPR
+ || TREE_CODE (arg0) == MAX_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ tem = optimize_minmax_comparison (code, type, op0, op1);
+ if (tem)
+ return tem;
+ }
+
+ /* Simplify comparison of something with itself. (For IEEE
+ floating-point, we can only do some of these simplifications.) */
+ if (operand_equal_p (arg0, arg1, 0))
+ {
+ switch (code)
+ {
+ case EQ_EXPR:
+ if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
+ || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return constant_boolean_node (1, type);
+ break;
+
+ case GE_EXPR:
+ case LE_EXPR:
+ if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
+ || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return constant_boolean_node (1, type);
+ return fold_build2 (EQ_EXPR, type, arg0, arg1);
+
+ case NE_EXPR:
+ /* For NE, we can only do this simplification if integer
+ or we don't honor IEEE floating point NaNs. */
+ if (FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ break;
+ /* ... fall through ... */
+ case GT_EXPR:
+ case LT_EXPR:
+ return constant_boolean_node (0, type);
+ default:
+ gcc_unreachable ();
+ }
+ }
+
+ /* If we are comparing an expression that just has comparisons
+ of two integer values, arithmetic expressions of those comparisons,
+ and constants, we can simplify it. There are only three cases
+ to check: the two values can either be equal, the first can be
+ greater, or the second can be greater. Fold the expression for
+ those three values. Since each value must be 0 or 1, we have
+ eight possibilities, each of which corresponds to the constant 0
+ or 1 or one of the six possible comparisons.
+
+ This handles common cases like (a > b) == 0 but also handles
+ expressions like ((x > y) - (y > x)) > 0, which supposedly
+ occur in macroized code. */
+
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
+ {
+ tree cval1 = 0, cval2 = 0;
+ int save_p = 0;
+
+ if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
+ /* Don't handle degenerate cases here; they should already
+ have been handled anyway. */
+ && cval1 != 0 && cval2 != 0
+ && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
+ && TREE_TYPE (cval1) == TREE_TYPE (cval2)
+ && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
+ && TYPE_MAX_VALUE (TREE_TYPE (cval1))
+ && TYPE_MAX_VALUE (TREE_TYPE (cval2))
+ && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
+ TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
+ {
+ tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
+ tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
+
+ /* We can't just pass T to eval_subst in case cval1 or cval2
+ was the same as ARG1. */
+
+ tree high_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, minval),
+ arg1);
+ tree equal_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, maxval,
+ cval2, maxval),
+ arg1);
+ tree low_result
+ = fold_build2 (code, type,
+ eval_subst (arg0, cval1, minval,
+ cval2, maxval),
+ arg1);
+
+ /* All three of these results should be 0 or 1. Confirm they are.
+ Then use those values to select the proper code to use. */
+
+ if (TREE_CODE (high_result) == INTEGER_CST
+ && TREE_CODE (equal_result) == INTEGER_CST
+ && TREE_CODE (low_result) == INTEGER_CST)
+ {
+ /* Make a 3-bit mask with the high-order bit being the
+ value for `>', the next for '=', and the low for '<'. */
+ switch ((integer_onep (high_result) * 4)
+ + (integer_onep (equal_result) * 2)
+ + integer_onep (low_result))
+ {
+ case 0:
+ /* Always false. */
+ return omit_one_operand (type, integer_zero_node, arg0);
+ case 1:
+ code = LT_EXPR;
+ break;
+ case 2:
+ code = EQ_EXPR;
+ break;
+ case 3:
+ code = LE_EXPR;
+ break;
+ case 4:
+ code = GT_EXPR;
+ break;
+ case 5:
+ code = NE_EXPR;
+ break;
+ case 6:
+ code = GE_EXPR;
+ break;
+ case 7:
+ /* Always true. */
+ return omit_one_operand (type, integer_one_node, arg0);
+ }
+
+ if (save_p)
+ return save_expr (build2 (code, type, cval1, cval2));
+ return fold_build2 (code, type, cval1, cval2);
+ }
+ }
+ }
+
+ /* Fold a comparison of the address of COMPONENT_REFs with the same
+ type and component to a comparison of the address of the base
+ object. In short, &x->a OP &y->a to x OP y and
+ &x->a OP &y.a to x OP &y */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
+ {
+ tree cref0 = TREE_OPERAND (arg0, 0);
+ tree cref1 = TREE_OPERAND (arg1, 0);
+ if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
+ {
+ tree op0 = TREE_OPERAND (cref0, 0);
+ tree op1 = TREE_OPERAND (cref1, 0);
+ return fold_build2 (code, type,
+ build_fold_addr_expr (op0),
+ build_fold_addr_expr (op1));
+ }
+ }
+
+ /* We can fold X/C1 op C2 where C1 and C2 are integer constants
+ into a single range test. */
+ if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ || TREE_CODE (arg0) == EXACT_DIV_EXPR)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !integer_zerop (TREE_OPERAND (arg0, 1))
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && !TREE_OVERFLOW (arg1))
+ {
+ tem = fold_div_compare (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+
+ return NULL_TREE;
+}
+
+
+/* Subroutine of fold_binary. Optimize complex multiplications of the
+ form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
+ argument EXPR represents the expression "z" of type TYPE. */
+
+static tree
+fold_mult_zconjz (tree type, tree expr)
+{
+ tree itype = TREE_TYPE (type);
+ tree rpart, ipart, tem;
+
+ if (TREE_CODE (expr) == COMPLEX_EXPR)
+ {
+ rpart = TREE_OPERAND (expr, 0);
+ ipart = TREE_OPERAND (expr, 1);
+ }
+ else if (TREE_CODE (expr) == COMPLEX_CST)
+ {
+ rpart = TREE_REALPART (expr);
+ ipart = TREE_IMAGPART (expr);
+ }
+ else
+ {
+ expr = save_expr (expr);
+ rpart = fold_build1 (REALPART_EXPR, itype, expr);
+ ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
+ }
+
+ rpart = save_expr (rpart);
+ ipart = save_expr (ipart);
+ tem = fold_build2 (PLUS_EXPR, itype,
+ fold_build2 (MULT_EXPR, itype, rpart, rpart),
+ fold_build2 (MULT_EXPR, itype, ipart, ipart));
+ return fold_build2 (COMPLEX_EXPR, type, tem,
+ fold_convert (itype, integer_zero_node));
+}
+
+
+/* Fold a binary expression of code CODE and type TYPE with operands
+ OP0 and OP1. Return the folded expression if folding is
+ successful. Otherwise, return NULL_TREE. */
+
+tree
+fold_binary (enum tree_code code, tree type, tree op0, tree op1)
+{
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+ tree arg0, arg1, tem;
+ tree t1 = NULL_TREE;
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 2
+ && op0 != NULL_TREE
+ && op1 != NULL_TREE);
+
+ arg0 = op0;
+ arg1 = op1;
+
+ /* Strip any conversions that don't change the mode. This is
+ safe for every expression, except for a comparison expression
+ because its signedness is derived from its operands. So, in
+ the latter case, only strip conversions that don't change the
+ signedness.
+
+ Note that this is done as an internal manipulation within the
+ constant folder, in order to find the simplest representation
+ of the arguments so that their form can be studied. In any
+ cases, the appropriate type conversions should be put back in
+ the tree that will get out of the constant folder. */
+
+ if (kind == tcc_comparison)
+ {
+ STRIP_SIGN_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg1);
+ }
+ else
+ {
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+ }
+
+ /* Note that TREE_CONSTANT isn't enough: static var addresses are
+ constant but we can't do arithmetic on them. */
+ if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
+ || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
+ || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
+ {
+ if (kind == tcc_binary)
+ tem = const_binop (code, arg0, arg1, 0);
+ else if (kind == tcc_comparison)
+ tem = fold_relational_const (code, type, arg0, arg1);
+ else
+ tem = NULL_TREE;
+
+ if (tem != NULL_TREE)
+ {
+ if (TREE_TYPE (tem) != type)
+ tem = fold_convert (type, tem);
+ return tem;
}
}
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == NEGATE_EXPR
&& integer_onep (arg1))
- return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
+ return fold_build1 (BIT_NOT_EXPR, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)));
/* Convert -1 - A to ~A. */
if (INTEGRAL_TYPE_P (type)
/* (-A) * (-B) -> A * B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
return fold_build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (arg1));
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_convert (type, negate_expr (arg1)));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
return fold_build2 (MULT_EXPR, type,
- negate_expr (arg0),
- TREE_OPERAND (arg1, 0));
+ fold_convert (type, negate_expr (arg0)),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
if (! FLOAT_TYPE_P (type))
{
code, NULL_TREE)))
return fold_convert (type, tem);
+ /* Optimize z * conj(z) for integer complex numbers. */
+ if (TREE_CODE (arg0) == CONJ_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return fold_mult_zconjz (type, arg1);
+ if (TREE_CODE (arg1) == CONJ_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return fold_mult_zconjz (type, arg0);
}
else
{
}
}
+ /* Optimize z * conj(z) for floating point complex numbers.
+ Guarded by flag_unsafe_math_optimizations as non-finite
+ imaginary components don't produce scalar results. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg0) == CONJ_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return fold_mult_zconjz (type, arg1);
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == CONJ_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return fold_mult_zconjz (type, arg0);
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
return omit_one_operand (type, t1, arg0);
}
+ /* Canonicalize (X & C1) | C2. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
+ int width = TYPE_PRECISION (type);
+ hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
+ lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
+ hi2 = TREE_INT_CST_HIGH (arg1);
+ lo2 = TREE_INT_CST_LOW (arg1);
+
+ /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
+ if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
+ return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+
+ if (width > HOST_BITS_PER_WIDE_INT)
+ {
+ mhi = (unsigned HOST_WIDE_INT) -1
+ >> (2 * HOST_BITS_PER_WIDE_INT - width);
+ mlo = -1;
+ }
+ else
+ {
+ mhi = 0;
+ mlo = (unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - width);
+ }
+
+ /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
+ if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
+ return fold_build2 (BIT_IOR_EXPR, type,
+ TREE_OPERAND (arg0, 0), arg1);
+
+ /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
+ hi1 &= mhi;
+ lo1 &= mlo;
+ if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
+ return fold_build2 (BIT_IOR_EXPR, type,
+ fold_build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_int_cst_wide (type,
+ lo1 & ~lo2,
+ hi1 & ~hi2)),
+ arg1);
+ }
+
+ /* (X & Y) | Y is (X, Y). */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+ /* (X & Y) | X is (Y, X). */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
+ return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
+ /* X | (X & Y) is (Y, X). */
+ if (TREE_CODE (arg1) == BIT_AND_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
+ return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
+ /* X | (Y & X) is (Y, X). */
+ if (TREE_CODE (arg1) == BIT_AND_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
fold_convert (type, TREE_OPERAND (arg0, 0)),
fold_convert (type, TREE_OPERAND (arg1, 0)));
- /* See if this can be simplified into a rotate first. If that
- is unsuccessful continue in the association code. */
- goto bit_rotate;
+ /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1))
+ && integer_onep (arg1))
+ return fold_build2 (EQ_EXPR, type, arg0,
+ build_int_cst (TREE_TYPE (arg0), 0));
- case BIT_AND_EXPR:
- if (integer_all_onesp (arg1))
+ /* Fold (X & Y) ^ Y as ~X & Y. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg0, 0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg1));
+ }
+ /* Fold (X & Y) ^ X as ~Y & X. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg0, 1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg1));
+ }
+ /* Fold X ^ (X & Y) as X & ~Y. */
+ if (TREE_CODE (arg1) == BIT_AND_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg1, 1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0),
+ fold_build1 (BIT_NOT_EXPR, type, tem));
+ }
+ /* Fold X ^ (Y & X) as ~Y & X. */
+ if (TREE_CODE (arg1) == BIT_AND_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg1, 0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg0));
+ }
+
+ /* See if this can be simplified into a rotate first. If that
+ is unsuccessful continue in the association code. */
+ goto bit_rotate;
+
+ case BIT_AND_EXPR:
+ if (integer_all_onesp (arg1))
return non_lvalue (fold_convert (type, arg0));
if (integer_zerop (arg1))
return omit_one_operand (type, arg1, arg0);
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold_build2 (BIT_IOR_EXPR, type,
+ fold_build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (arg0, 0), arg1),
+ fold_build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (arg0, 1), arg1));
+
+ /* (X | Y) & Y is (X, Y). */
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+ /* (X | Y) & X is (Y, X). */
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
+ return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
+ /* X & (X | Y) is (Y, X). */
+ if (TREE_CODE (arg1) == BIT_IOR_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
+ return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
+ /* X & (Y | X) is (Y, X). */
+ if (TREE_CODE (arg1) == BIT_IOR_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
+
+ /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1))
+ && integer_onep (arg1))
+ {
+ tem = TREE_OPERAND (arg0, 0);
+ return fold_build2 (EQ_EXPR, type,
+ fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
+ build_int_cst (TREE_TYPE (tem), 1)),
+ build_int_cst (TREE_TYPE (tem), 0));
+ }
+ /* Fold ~X & 1 as (X & 1) == 0. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && integer_onep (arg1))
+ {
+ tem = TREE_OPERAND (arg0, 0);
+ return fold_build2 (EQ_EXPR, type,
+ fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
+ build_int_cst (TREE_TYPE (tem), 1)),
+ build_int_cst (TREE_TYPE (tem), 0));
+ }
+
+ /* Fold (X ^ Y) & Y as ~X & Y. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg0, 0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg1));
+ }
+ /* Fold (X ^ Y) & X as ~Y & X. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg0, 1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg1));
+ }
+ /* Fold X & (X ^ Y) as X & ~Y. */
+ if (TREE_CODE (arg1) == BIT_XOR_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg1, 1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0),
+ fold_build1 (BIT_NOT_EXPR, type, tem));
+ }
+ /* Fold X & (Y ^ X) as ~Y & X. */
+ if (TREE_CODE (arg1) == BIT_XOR_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ {
+ tem = fold_convert (type, TREE_OPERAND (arg1, 0));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_build1 (BIT_NOT_EXPR, type, tem),
+ fold_convert (type, arg0));
+ }
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
return NULL_TREE;
/* Optimize A / A to 1.0 if we don't care about
- NaNs or Infinities. */
- if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ NaNs or Infinities. Skip the transformation
+ for non-real operands. */
+ if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
&& ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
&& operand_equal_p (arg0, arg1, 0))
{
return omit_two_operands (type, r, arg0, arg1);
}
+ /* The complex version of the above A / A optimization. */
+ if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && operand_equal_p (arg0, arg1, 0))
+ {
+ tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
+ if (! HONOR_NANS (TYPE_MODE (elem_type))
+ && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
+ {
+ tree r = build_real (elem_type, dconst1);
+ /* omit_two_operands will call fold_convert for us. */
+ return omit_two_operands (type, r, arg0, arg1);
+ }
+ }
+
/* (-A) / (-B) -> A / B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
return fold_build2 (RDIV_EXPR, type,
return NULL_TREE;
case TRUNC_DIV_EXPR:
- case ROUND_DIV_EXPR:
case FLOOR_DIV_EXPR:
+ /* Simplify A / (B << N) where A and B are positive and B is
+ a power of 2, to A >> (N + log2(B)). */
+ if (TREE_CODE (arg1) == LSHIFT_EXPR
+ && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
+ {
+ tree sval = TREE_OPERAND (arg1, 0);
+ if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
+ {
+ tree sh_cnt = TREE_OPERAND (arg1, 1);
+ unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
+
+ sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
+ sh_cnt, build_int_cst (NULL_TREE, pow2));
+ return fold_build2 (RSHIFT_EXPR, type,
+ fold_convert (type, arg0), sh_cnt);
+ }
+ }
+ /* Fall thru */
+
+ case ROUND_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
if (integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
/* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
- i.e. "X % C" into "X & C2", if X and C are positive. */
+ i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
- && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
- && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
+ && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
{
- unsigned HOST_WIDE_INT high, low;
- tree mask;
- int l;
+ tree c = arg1;
+ /* Also optimize A % (C << N) where C is a power of 2,
+ to A & ((C << N) - 1). */
+ if (TREE_CODE (arg1) == LSHIFT_EXPR)
+ c = TREE_OPERAND (arg1, 0);
- l = tree_log2 (arg1);
- if (l >= HOST_BITS_PER_WIDE_INT)
+ if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
{
- high = ((unsigned HOST_WIDE_INT) 1
- << (l - HOST_BITS_PER_WIDE_INT)) - 1;
- low = -1;
- }
- else
- {
- high = 0;
- low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
+ tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
+ arg1, integer_one_node);
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, mask));
}
-
- mask = build_int_cst_wide (type, low, high);
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0), mask);
}
/* X % -C is the same as X % C. */
return NULL_TREE;
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
- if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
+ if (TREE_CODE (op0) == code && host_integerp (arg1, false)
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
&& host_integerp (TREE_OPERAND (arg0, 1), false)
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
case EQ_EXPR:
case NE_EXPR:
- case LT_EXPR:
- case GT_EXPR:
- case LE_EXPR:
- case GE_EXPR:
- /* If one arg is a real or integer constant, put it last. */
- if (tree_swap_operands_p (arg0, arg1, true))
- return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+ tem = fold_comparison (code, type, op0, op1);
+ if (tem != NULL_TREE)
+ return tem;
- /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
- && (code == NE_EXPR || code == EQ_EXPR))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
- fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
- arg1));
-
/* bool_var != 0 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
&& code == NE_EXPR)
return non_lvalue (fold_convert (type, arg0));
-
+
/* bool_var == 1 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == EQ_EXPR)
&& code == EQ_EXPR)
return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
+ /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
+ arg1));
+
/* If this is an equality comparison of the address of a non-weak
object against zero, then we know the result. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == ADDR_EXPR
+ if (TREE_CODE (arg0) == ADDR_EXPR
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& integer_zerop (arg1))
/* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not
have access to attributes for externs), then we know the result. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == ADDR_EXPR
+ if (TREE_CODE (arg0) == ADDR_EXPR
&& VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& ! lookup_attribute ("alias",
type);
}
- /* If this is a comparison of two exprs that look like an
- ARRAY_REF of the same object, then we can fold this to a
- comparison of the two offsets. */
- if (TREE_CODE_CLASS (code) == tcc_comparison)
+ /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
+ a MINUS_EXPR of a constant, we can convert it into a comparison with
+ a revised constant as long as no overflow occurs. */
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
+ ? MINUS_EXPR : PLUS_EXPR,
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1), 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+
+ /* Similarly for a NEGATE_EXPR. */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && 0 != (tem = negate_expr (arg1))
+ && TREE_CODE (tem) == INTEGER_CST
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+
+ /* If we have X - Y == 0, we can convert that to X == Y and similarly
+ for !=. Don't do this for ordered comparisons due to overflow. */
+ if (TREE_CODE (arg0) == MINUS_EXPR
+ && integer_zerop (arg1))
+ return fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
+
+ /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
+ if (TREE_CODE (arg0) == ABS_EXPR
+ && (integer_zerop (arg1) || real_zerop (arg1)))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
+
+ /* If this is an EQ or NE comparison with zero and ARG0 is
+ (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
+ two operations, but the latter can be done in one less insn
+ on machines that have only two-operand insns or on which a
+ constant cannot be the first operand. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_zerop (arg1))
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ if (TREE_CODE (arg00) == LSHIFT_EXPR
+ && integer_onep (TREE_OPERAND (arg00, 0)))
+ return
+ fold_build2 (code, type,
+ build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
+ arg01, TREE_OPERAND (arg00, 1)),
+ fold_convert (TREE_TYPE (arg0),
+ integer_one_node)),
+ arg1);
+ else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
+ && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
+ return
+ fold_build2 (code, type,
+ build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
+ arg00, TREE_OPERAND (arg01, 1)),
+ fold_convert (TREE_TYPE (arg0),
+ integer_one_node)),
+ arg1);
+ }
+
+ /* If this is an NE or EQ comparison of zero against the result of a
+ signed MOD operation whose second operand is a power of 2, make
+ the MOD operation unsigned since it is simpler and equivalent. */
+ if (integer_zerop (arg1)
+ && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
+ || TREE_CODE (arg0) == CEIL_MOD_EXPR
+ || TREE_CODE (arg0) == FLOOR_MOD_EXPR
+ || TREE_CODE (arg0) == ROUND_MOD_EXPR)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
+ tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 1)));
+
+ return fold_build2 (code, type, newmod,
+ fold_convert (newtype, arg1));
+ }
+
+ /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
+ C1 is a valid shift constant, and C2 is a power of two, i.e.
+ a single bit. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
+ == INTEGER_CST
+ && integer_pow2p (TREE_OPERAND (arg0, 1))
+ && integer_zerop (arg1))
+ {
+ tree itype = TREE_TYPE (arg0);
+ unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
+ tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
+
+ /* Check for a valid shift count. */
+ if (TREE_INT_CST_HIGH (arg001) == 0
+ && TREE_INT_CST_LOW (arg001) < prec)
+ {
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
+ /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
+ can be rewritten as (X & (C2 << C1)) != 0. */
+ if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
+ {
+ tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
+ tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
+ return fold_build2 (code, type, tem, arg1);
+ }
+ /* Otherwise, for signed (arithmetic) shifts,
+ ((X >> C1) & C2) != 0 is rewritten as X < 0, and
+ ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
+ else if (!TYPE_UNSIGNED (itype))
+ return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
+ arg000, build_int_cst (itype, 0));
+ /* Otherwise, of unsigned (logical) shifts,
+ ((X >> C1) & C2) != 0 is rewritten as (X,false), and
+ ((X >> C1) & C2) == 0 is rewritten as (X,true). */
+ else
+ return omit_one_operand (type,
+ code == EQ_EXPR ? integer_one_node
+ : integer_zero_node,
+ arg000);
+ }
+ }
+
+ /* If this is an NE comparison of zero with an AND of one, remove the
+ comparison since the AND will give the correct value. */
+ if (code == NE_EXPR
+ && integer_zerop (arg1)
+ && TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))
+ return fold_convert (type, arg0);
+
+ /* If we have (A & C) == C where C is a power of 2, convert this into
+ (A & C) != 0. Similarly for NE_EXPR. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_pow2p (TREE_OPERAND (arg0, 1))
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ arg0, fold_convert (TREE_TYPE (arg0),
+ integer_zero_node));
+
+ /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
+ bit, then fold the expression into A < 0 or A >= 0. */
+ tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
+ if (tem)
+ return tem;
+
+ /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
+ Similarly for NE_EXPR. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ tree notc = fold_build1 (BIT_NOT_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_OPERAND (arg0, 1));
+ tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ arg1, notc);
+ tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
+ if (integer_nonzerop (dandnotc))
+ return omit_one_operand (type, rslt, arg0);
+ }
+
+ /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
+ Similarly for NE_EXPR. */
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
+ tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1), notd);
+ tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
+ if (integer_nonzerop (candnotd))
+ return omit_one_operand (type, rslt, arg0);
+ }
+
+ /* If this is a comparison of a field, we may be able to simplify it. */
+ if (((TREE_CODE (arg0) == COMPONENT_REF
+ && lang_hooks.can_use_bit_fields_p ())
+ || TREE_CODE (arg0) == BIT_FIELD_REF)
+ /* Handle the constant case even without -O
+ to make sure the warnings are given. */
+ && (optimize || TREE_CODE (arg1) == INTEGER_CST))
+ {
+ t1 = optimize_bit_field_compare (code, type, arg0, arg1);
+ if (t1)
+ return t1;
+ }
+
+ /* Optimize comparisons of strlen vs zero to a compare of the
+ first character of the string vs zero. To wit,
+ strlen(ptr) == 0 => *ptr == 0
+ strlen(ptr) != 0 => *ptr != 0
+ Other cases should reduce to one of these two (or a constant)
+ due to the return value of strlen being unsigned. */
+ if (TREE_CODE (arg0) == CALL_EXPR
+ && integer_zerop (arg1))
{
- tree base0, offset0, base1, offset1;
+ tree fndecl = get_callee_fndecl (arg0);
+ tree arglist;
- if (extract_array_ref (arg0, &base0, &offset0)
- && extract_array_ref (arg1, &base1, &offset1)
- && operand_equal_p (base0, base1, 0))
+ if (fndecl
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
+ && (arglist = TREE_OPERAND (arg0, 1))
+ && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
+ && ! TREE_CHAIN (arglist))
{
- /* Handle no offsets on both sides specially. */
- if (offset0 == NULL_TREE
- && offset1 == NULL_TREE)
- return fold_build2 (code, type, integer_zero_node,
- integer_zero_node);
-
- if (!offset0 || !offset1
- || TREE_TYPE (offset0) == TREE_TYPE (offset1))
+ tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
+ return fold_build2 (code, type, iref,
+ build_int_cst (TREE_TYPE (iref), 0));
+ }
+ }
+
+ /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
+ of X. Similarly fold (X >> C) == 0 into X >= 0. */
+ if (TREE_CODE (arg0) == RSHIFT_EXPR
+ && integer_zerop (arg1)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree itype = TREE_TYPE (arg00);
+ if (TREE_INT_CST_HIGH (arg01) == 0
+ && TREE_INT_CST_LOW (arg01)
+ == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
+ {
+ if (TYPE_UNSIGNED (itype))
{
- if (offset0 == NULL_TREE)
- offset0 = build_int_cst (TREE_TYPE (offset1), 0);
- if (offset1 == NULL_TREE)
- offset1 = build_int_cst (TREE_TYPE (offset0), 0);
- return fold_build2 (code, type, offset0, offset1);
+ itype = lang_hooks.types.signed_type (itype);
+ arg00 = fold_convert (itype, arg00);
}
+ return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
+ type, arg00, build_int_cst (itype, 0));
}
}
+ /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
+ if (integer_zerop (arg1)
+ && TREE_CODE (arg0) == BIT_XOR_EXPR)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+
+ /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (TREE_TYPE (arg1), 0));
+ /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
+ build_int_cst (TREE_TYPE (arg1), 0));
+
+ /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
+ TREE_OPERAND (arg0, 1), arg1));
+
+ /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
+ (X & C) == 0 when C is a single bit. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
+ && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
+ TREE_OPERAND (arg0, 1));
+ return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
+ type, tem, arg1);
+ }
+
+ /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
+ constant C is a power of two, i.e. a single bit. */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
+ && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1))
+ && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
+ TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ arg00, build_int_cst (TREE_TYPE (arg00), 0));
+ }
+
+ /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
+ when is C is a power of two, i.e. a single bit. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
+ && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1))
+ && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
+ TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
+ {
+ tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
+ arg000, TREE_OPERAND (arg0, 1));
+ return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ tem, build_int_cst (TREE_TYPE (tem), 0));
+ }
+
+ if (integer_zerop (arg1)
+ && tree_expr_nonzero_p (arg0))
+ {
+ tree res = constant_boolean_node (code==NE_EXPR, type);
+ return omit_one_operand (type, res, arg0);
+ }
+ return NULL_TREE;
+
+ case LT_EXPR:
+ case GT_EXPR:
+ case LE_EXPR:
+ case GE_EXPR:
+ tem = fold_comparison (code, type, op0, op1);
+ if (tem != NULL_TREE)
+ return tem;
+
/* Transform comparisons of the form X +- C CMP X. */
- if ((code != EQ_EXPR && code != NE_EXPR)
- && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
&& !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
}
}
- /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
- if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
- && !TYPE_UNSIGNED (TREE_TYPE (arg1))
- && !(flag_wrapv || flag_trapv))
- && (TREE_CODE (arg1) == INTEGER_CST
- && !TREE_OVERFLOW (arg1)))
- {
- tree const1 = TREE_OPERAND (arg0, 1);
- tree const2 = arg1;
- tree variable = TREE_OPERAND (arg0, 0);
- tree lhs;
- int lhs_add;
- lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
-
- lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
- TREE_TYPE (arg1), const2, const1);
- if (TREE_CODE (lhs) == TREE_CODE (arg1)
- && (TREE_CODE (lhs) != INTEGER_CST
- || !TREE_OVERFLOW (lhs)))
- return fold_build2 (code, type, variable, lhs);
- }
-
- if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
- {
- tree targ0 = strip_float_extensions (arg0);
- tree targ1 = strip_float_extensions (arg1);
- tree newtype = TREE_TYPE (targ0);
-
- if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
- newtype = TREE_TYPE (targ1);
-
- /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
- if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold_build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1));
-
- /* (-a) CMP (-b) -> b CMP a */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
- TREE_OPERAND (arg0, 0));
-
- if (TREE_CODE (arg1) == REAL_CST)
- {
- REAL_VALUE_TYPE cst;
- cst = TREE_REAL_CST (arg1);
-
- /* (-a) CMP CST -> a swap(CMP) (-CST) */
- if (TREE_CODE (arg0) == NEGATE_EXPR)
- return
- fold_build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- build_real (TREE_TYPE (arg1),
- REAL_VALUE_NEGATE (cst)));
-
- /* IEEE doesn't distinguish +0 and -0 in comparisons. */
- /* a CMP (-0) -> a CMP 0 */
- if (REAL_VALUE_MINUS_ZERO (cst))
- return fold_build2 (code, type, arg0,
- build_real (TREE_TYPE (arg1), dconst0));
-
- /* x != NaN is always true, other ops are always false. */
- if (REAL_VALUE_ISNAN (cst)
- && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
- {
- tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
- return omit_one_operand (type, tem, arg0);
- }
-
- /* Fold comparisons against infinity. */
- if (REAL_VALUE_ISINF (cst))
- {
- tem = fold_inf_compare (code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
- }
- }
-
- /* If this is a comparison of a real constant with a PLUS_EXPR
- or a MINUS_EXPR of a real constant, we can convert it into a
- comparison with a revised real constant as long as no overflow
- occurs when unsafe_math_optimizations are enabled. */
- if (flag_unsafe_math_optimizations
- && TREE_CODE (arg1) == REAL_CST
- && (TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
- && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
- ? MINUS_EXPR : PLUS_EXPR,
- arg1, TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
-
- /* Likewise, we can simplify a comparison of a real constant with
- a MINUS_EXPR whose first operand is also a real constant, i.e.
- (c1 - x) < c2 becomes x > c1-c2. */
- if (flag_unsafe_math_optimizations
- && TREE_CODE (arg1) == REAL_CST
- && TREE_CODE (arg0) == MINUS_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
- && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
- arg1, 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
- return fold_build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 1), tem);
-
- /* Fold comparisons against built-in math functions. */
- if (TREE_CODE (arg1) == REAL_CST
- && flag_unsafe_math_optimizations
- && ! flag_errno_math)
- {
- enum built_in_function fcode = builtin_mathfn_code (arg0);
-
- if (fcode != END_BUILTINS)
- {
- tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
- }
- }
- }
-
- /* Convert foo++ == CONST into ++foo == CONST + INCR. */
- if (TREE_CONSTANT (arg1)
- && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
- || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
- /* This optimization is invalid for ordered comparisons
- if CONST+INCR overflows or if foo+incr might overflow.
- This optimization is invalid for floating point due to rounding.
- For pointer types we assume overflow doesn't happen. */
- && (POINTER_TYPE_P (TREE_TYPE (arg0))
- || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && (code == EQ_EXPR || code == NE_EXPR))))
- {
- tree varop, newconst;
-
- if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
- {
- newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1));
- varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
- else
- {
- newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1));
- varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
-
-
- /* If VAROP is a reference to a bitfield, we must mask
- the constant by the width of the field. */
- if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
- && host_integerp (DECL_SIZE (TREE_OPERAND
- (TREE_OPERAND (varop, 0), 1)), 1))
- {
- tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
- HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
- tree folded_compare, shift;
-
- /* First check whether the comparison would come out
- always the same. If we don't do that we would
- change the meaning with the masking. */
- folded_compare = fold_build2 (code, type,
- TREE_OPERAND (varop, 0), arg1);
- if (integer_zerop (folded_compare)
- || integer_onep (folded_compare))
- return omit_one_operand (type, folded_compare, varop);
-
- shift = build_int_cst (NULL_TREE,
- TYPE_PRECISION (TREE_TYPE (varop)) - size);
- shift = fold_convert (TREE_TYPE (varop), shift);
- newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift);
- newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift);
- }
-
- return fold_build2 (code, type, varop, newconst);
- }
-
/* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
This transformation affects the cases which are handled in later
optimizations involving comparisons with non-negative constants. */
&& TREE_CODE (arg0) != INTEGER_CST
&& tree_int_cst_sgn (arg1) > 0)
{
- switch (code)
+ if (code == GE_EXPR)
{
- case GE_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (GT_EXPR, type, arg0,
fold_convert (TREE_TYPE (arg0), arg1));
-
- case LT_EXPR:
+ }
+ if (code == LT_EXPR)
+ {
arg1 = const_binop (MINUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (LE_EXPR, type, arg0,
fold_convert (TREE_TYPE (arg0), arg1));
-
- default:
- break;
}
}
/* signed_type does not work on pointer types. */
&& INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
{
- /* The following case also applies to X < signed_max+1
- and X >= signed_max+1 because previous transformations. */
- if (code == LE_EXPR || code == GT_EXPR)
- {
- tree st0, st1;
- st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
- st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
- return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type, fold_convert (st0, arg0),
- build_int_cst (st1, 0));
- }
- }
- }
- }
-
- /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
- a MINUS_EXPR of a constant, we can convert it into a comparison with
- a revised constant as long as no overflow occurs. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg1) == INTEGER_CST
- && (TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
- ? MINUS_EXPR : PLUS_EXPR,
- arg1, TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
-
- /* Similarly for a NEGATE_EXPR. */
- else if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == NEGATE_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && 0 != (tem = negate_expr (arg1))
- && TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
-
- /* If we have X - Y == 0, we can convert that to X == Y and similarly
- for !=. Don't do this for ordered comparisons due to overflow. */
- else if ((code == NE_EXPR || code == EQ_EXPR)
- && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (code, type,
- TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
-
- else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && (TREE_CODE (arg0) == NOP_EXPR
- || TREE_CODE (arg0) == CONVERT_EXPR))
- {
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
- tem = fold_widened_comparison (code, type, arg0, arg1);
- if (tem)
- return tem;
-
- /* Or if we are changing signedness. */
- tem = fold_sign_changed_comparison (code, type, arg0, arg1);
- if (tem)
- return tem;
- }
-
- /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
- constant, we can simplify it. */
- else if (TREE_CODE (arg1) == INTEGER_CST
- && (TREE_CODE (arg0) == MIN_EXPR
- || TREE_CODE (arg0) == MAX_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tem = optimize_minmax_comparison (code, type, op0, op1);
- if (tem)
- return tem;
-
- return NULL_TREE;
- }
+ /* The following case also applies to X < signed_max+1
+ and X >= signed_max+1 because previous transformations. */
+ if (code == LE_EXPR || code == GT_EXPR)
+ {
+ tree st0, st1;
+ st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
+ st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
+ return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
+ type, fold_convert (st0, arg0),
+ build_int_cst (st1, 0));
+ }
+ }
+ }
+ }
/* If we are comparing an ABS_EXPR with a constant, we can
convert all the cases into explicit comparisons, but they may
well not be faster than doing the ABS and one comparison.
But ABS (X) <= C is a range comparison, which becomes a subtraction
and a comparison, and is probably faster. */
- else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (arg0) == ABS_EXPR
- && ! TREE_SIDE_EFFECTS (arg0)
- && (0 != (tem = negate_expr (arg1)))
- && TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ if (code == LE_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (arg0) == ABS_EXPR
+ && ! TREE_SIDE_EFFECTS (arg0)
+ && (0 != (tem = negate_expr (arg1)))
+ && TREE_CODE (tem) == INTEGER_CST
+ && ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (TRUTH_ANDIF_EXPR, type,
build2 (GE_EXPR, type,
TREE_OPERAND (arg0, 0), tem),
TREE_OPERAND (arg0, 0), arg1));
/* Convert ABS_EXPR<x> >= 0 to true. */
- else if (code == GE_EXPR
- && tree_expr_nonnegative_p (arg0)
- && (integer_zerop (arg1)
- || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
- && real_zerop (arg1))))
+ if (code == GE_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1)
+ || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && real_zerop (arg1))))
return omit_one_operand (type, integer_one_node, arg0);
/* Convert ABS_EXPR<x> < 0 to false. */
- else if (code == LT_EXPR
- && tree_expr_nonnegative_p (arg0)
- && (integer_zerop (arg1) || real_zerop (arg1)))
+ if (code == LT_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1) || real_zerop (arg1)))
return omit_one_operand (type, integer_zero_node, arg0);
- /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
- else if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == ABS_EXPR
- && (integer_zerop (arg1) || real_zerop (arg1)))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
-
- /* If this is an EQ or NE comparison with zero and ARG0 is
- (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
- two operations, but the latter can be done in one less insn
- on machines that have only two-operand insns or on which a
- constant cannot be the first operand. */
- if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == BIT_AND_EXPR)
- {
- tree arg00 = TREE_OPERAND (arg0, 0);
- tree arg01 = TREE_OPERAND (arg0, 1);
- if (TREE_CODE (arg00) == LSHIFT_EXPR
- && integer_onep (TREE_OPERAND (arg00, 0)))
- return
- fold_build2 (code, type,
- build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
- arg01, TREE_OPERAND (arg00, 1)),
- fold_convert (TREE_TYPE (arg0),
- integer_one_node)),
- arg1);
- else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
- && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
- return
- fold_build2 (code, type,
- build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
- arg00, TREE_OPERAND (arg01, 1)),
- fold_convert (TREE_TYPE (arg0),
- integer_one_node)),
- arg1);
- }
-
- /* If this is an NE or EQ comparison of zero against the result of a
- signed MOD operation whose second operand is a power of 2, make
- the MOD operation unsigned since it is simpler and equivalent. */
- if ((code == NE_EXPR || code == EQ_EXPR)
- && integer_zerop (arg1)
- && !TYPE_UNSIGNED (TREE_TYPE (arg0))
- && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
- || TREE_CODE (arg0) == CEIL_MOD_EXPR
- || TREE_CODE (arg0) == FLOOR_MOD_EXPR
- || TREE_CODE (arg0) == ROUND_MOD_EXPR)
- && integer_pow2p (TREE_OPERAND (arg0, 1)))
- {
- tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
- tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
- fold_convert (newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert (newtype,
- TREE_OPERAND (arg0, 1)));
-
- return fold_build2 (code, type, newmod,
- fold_convert (newtype, arg1));
- }
-
- /* If this is an NE comparison of zero with an AND of one, remove the
- comparison since the AND will give the correct value. */
- if (code == NE_EXPR && integer_zerop (arg1)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (arg0, 1)))
- return fold_convert (type, arg0);
-
- /* If we have (A & C) == C where C is a power of 2, convert this into
- (A & C) != 0. Similarly for NE_EXPR. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_pow2p (TREE_OPERAND (arg0, 1))
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
- arg0, fold_convert (TREE_TYPE (arg0),
- integer_zero_node));
-
- /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
- bit, then fold the expression into A < 0 or A >= 0. */
- tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
- if (tem)
- return tem;
-
- /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
- Similarly for NE_EXPR. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree notc = fold_build1 (BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1));
- tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, notc);
- tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- if (integer_nonzerop (dandnotc))
- return omit_one_operand (type, rslt, arg0);
- }
-
- /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
- Similarly for NE_EXPR. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == BIT_IOR_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1), notd);
- tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- if (integer_nonzerop (candnotd))
- return omit_one_operand (type, rslt, arg0);
- }
-
/* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
and similarly for >= into !=. */
if ((code == LT_EXPR || code == GE_EXPR)
TREE_OPERAND (arg1, 1)),
build_int_cst (TREE_TYPE (arg0), 0));
- else if ((code == LT_EXPR || code == GE_EXPR)
- && TYPE_UNSIGNED (TREE_TYPE (arg0))
- && (TREE_CODE (arg1) == NOP_EXPR
- || TREE_CODE (arg1) == CONVERT_EXPR)
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
- && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
+ if ((code == LT_EXPR || code == GE_EXPR)
+ && TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && (TREE_CODE (arg1) == NOP_EXPR
+ || TREE_CODE (arg1) == CONVERT_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
+ && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
return
build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
fold_convert (TREE_TYPE (arg0),
1))),
build_int_cst (TREE_TYPE (arg0), 0));
- /* Simplify comparison of something with itself. (For IEEE
- floating-point, we can only do some of these simplifications.) */
- if (operand_equal_p (arg0, arg1, 0))
- {
- switch (code)
- {
- case EQ_EXPR:
- if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
- || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return constant_boolean_node (1, type);
- break;
-
- case GE_EXPR:
- case LE_EXPR:
- if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
- || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return constant_boolean_node (1, type);
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
-
- case NE_EXPR:
- /* For NE, we can only do this simplification if integer
- or we don't honor IEEE floating point NaNs. */
- if (FLOAT_TYPE_P (TREE_TYPE (arg0))
- && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- break;
- /* ... fall through ... */
- case GT_EXPR:
- case LT_EXPR:
- return constant_boolean_node (0, type);
- default:
- gcc_unreachable ();
- }
- }
-
- /* If we are comparing an expression that just has comparisons
- of two integer values, arithmetic expressions of those comparisons,
- and constants, we can simplify it. There are only three cases
- to check: the two values can either be equal, the first can be
- greater, or the second can be greater. Fold the expression for
- those three values. Since each value must be 0 or 1, we have
- eight possibilities, each of which corresponds to the constant 0
- or 1 or one of the six possible comparisons.
-
- This handles common cases like (a > b) == 0 but also handles
- expressions like ((x > y) - (y > x)) > 0, which supposedly
- occur in macroized code. */
-
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
- {
- tree cval1 = 0, cval2 = 0;
- int save_p = 0;
-
- if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
- /* Don't handle degenerate cases here; they should already
- have been handled anyway. */
- && cval1 != 0 && cval2 != 0
- && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
- && TREE_TYPE (cval1) == TREE_TYPE (cval2)
- && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
- && TYPE_MAX_VALUE (TREE_TYPE (cval1))
- && TYPE_MAX_VALUE (TREE_TYPE (cval2))
- && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
- TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
- {
- tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
- tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
-
- /* We can't just pass T to eval_subst in case cval1 or cval2
- was the same as ARG1. */
-
- tree high_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, maxval,
- cval2, minval),
- arg1);
- tree equal_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, maxval,
- cval2, maxval),
- arg1);
- tree low_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, minval,
- cval2, maxval),
- arg1);
-
- /* All three of these results should be 0 or 1. Confirm they
- are. Then use those values to select the proper code
- to use. */
-
- if ((integer_zerop (high_result)
- || integer_onep (high_result))
- && (integer_zerop (equal_result)
- || integer_onep (equal_result))
- && (integer_zerop (low_result)
- || integer_onep (low_result)))
- {
- /* Make a 3-bit mask with the high-order bit being the
- value for `>', the next for '=', and the low for '<'. */
- switch ((integer_onep (high_result) * 4)
- + (integer_onep (equal_result) * 2)
- + integer_onep (low_result))
- {
- case 0:
- /* Always false. */
- return omit_one_operand (type, integer_zero_node, arg0);
- case 1:
- code = LT_EXPR;
- break;
- case 2:
- code = EQ_EXPR;
- break;
- case 3:
- code = LE_EXPR;
- break;
- case 4:
- code = GT_EXPR;
- break;
- case 5:
- code = NE_EXPR;
- break;
- case 6:
- code = GE_EXPR;
- break;
- case 7:
- /* Always true. */
- return omit_one_operand (type, integer_one_node, arg0);
- }
-
- if (save_p)
- return save_expr (build2 (code, type, cval1, cval2));
- else
- return fold_build2 (code, type, cval1, cval2);
- }
- }
- }
-
- /* If this is a comparison of a field, we may be able to simplify it. */
- if (((TREE_CODE (arg0) == COMPONENT_REF
- && lang_hooks.can_use_bit_fields_p ())
- || TREE_CODE (arg0) == BIT_FIELD_REF)
- && (code == EQ_EXPR || code == NE_EXPR)
- /* Handle the constant case even without -O
- to make sure the warnings are given. */
- && (optimize || TREE_CODE (arg1) == INTEGER_CST))
- {
- t1 = optimize_bit_field_compare (code, type, arg0, arg1);
- if (t1)
- return t1;
- }
-
- /* Fold a comparison of the address of COMPONENT_REFs with the same
- type and component to a comparison of the address of the base
- object. In short, &x->a OP &y->a to x OP y and
- &x->a OP &y.a to x OP &y */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
- && TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
- {
- tree cref0 = TREE_OPERAND (arg0, 0);
- tree cref1 = TREE_OPERAND (arg1, 0);
- if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
- {
- tree op0 = TREE_OPERAND (cref0, 0);
- tree op1 = TREE_OPERAND (cref1, 0);
- return fold_build2 (code, type,
- build_fold_addr_expr (op0),
- build_fold_addr_expr (op1));
- }
- }
-
- /* Optimize comparisons of strlen vs zero to a compare of the
- first character of the string vs zero. To wit,
- strlen(ptr) == 0 => *ptr == 0
- strlen(ptr) != 0 => *ptr != 0
- Other cases should reduce to one of these two (or a constant)
- due to the return value of strlen being unsigned. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && integer_zerop (arg1)
- && TREE_CODE (arg0) == CALL_EXPR)
- {
- tree fndecl = get_callee_fndecl (arg0);
- tree arglist;
-
- if (fndecl
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
- && (arglist = TREE_OPERAND (arg0, 1))
- && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
- && ! TREE_CHAIN (arglist))
- {
- tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
- return fold_build2 (code, type, iref,
- build_int_cst (TREE_TYPE (iref), 0));
- }
- }
-
- /* We can fold X/C1 op C2 where C1 and C2 are integer constants
- into a single range test. */
- if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
- || TREE_CODE (arg0) == EXACT_DIV_EXPR)
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !integer_zerop (TREE_OPERAND (arg0, 1))
- && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
- && !TREE_OVERFLOW (arg1))
- {
- t1 = fold_div_compare (code, type, arg0, arg1);
- if (t1 != NULL_TREE)
- return t1;
- }
-
- if ((code == EQ_EXPR || code == NE_EXPR)
- && integer_zerop (arg1)
- && tree_expr_nonzero_p (arg0))
- {
- tree res = constant_boolean_node (code==NE_EXPR, type);
- return omit_one_operand (type, res, arg0);
- }
-
- t1 = fold_relational_const (code, type, arg0, arg1);
- return t1 == NULL_TREE ? NULL_TREE : t1;
+ return NULL_TREE;
case UNORDERED_EXPR:
case ORDERED_EXPR:
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
{
- tem = invert_truthvalue (arg0);
- if (COMPARISON_CLASS_P (tem))
+ tem = fold_truth_not_expr (arg0);
+ if (tem && COMPARISON_CLASS_P (tem))
{
tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
if (tem)
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
anything. */
- tem = invert_truthvalue (arg0);
-
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
return fold_build3 (code, type, tem, op2, op1);
}
&& integer_zerop (TREE_OPERAND (arg0, 1))
&& integer_zerop (op2)
&& (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
- return fold_convert (type, fold_build2 (BIT_AND_EXPR,
- TREE_TYPE (tem), tem, arg1));
+ return fold_convert (type,
+ fold_build2 (BIT_AND_EXPR,
+ TREE_TYPE (tem), tem,
+ fold_convert (TREE_TYPE (tem), arg1)));
/* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
already handled above. */
if (integer_zerop (op2)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
- return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_convert (type, arg0),
+ arg1);
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
if (integer_onep (op2)
&& truth_value_p (TREE_CODE (arg1)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
+ return fold_build2 (TRUTH_ORIF_EXPR, type,
+ fold_convert (type, tem),
+ arg1);
}
/* Convert A ? 0 : B into !A && B if A and B are truth values. */
&& truth_value_p (TREE_CODE (op2)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_convert (type, tem),
+ op2);
}
/* Convert A ? 1 : B into A || B if A and B are truth values. */
if (integer_onep (arg1)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
- return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
+ return fold_build2 (TRUTH_ORIF_EXPR, type,
+ fold_convert (type, arg0),
+ op2);
return NULL_TREE;
&& TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
- /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
- here are when we've propagated the address of a decl into the
- object slot. */
- if (TREE_CODE (op0) == OBJ_TYPE_REF
- && lang_hooks.fold_obj_type_ref
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (op0)) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (op0), 0)))
- {
- tree t;
-
- /* ??? Caution: Broken ADDR_EXPR semantics means that
- looking at the type of the operand of the addr_expr
- can yield an array type. See silly exception in
- check_pointer_types_r. */
-
- t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (op0)));
- t = lang_hooks.fold_obj_type_ref (op0, t);
- if (t)
- return fold_build3 (code, type, t, op1, op2);
- }
return NULL_TREE;
case BIT_FIELD_REF:
{
void **slot;
enum tree_code code;
- char buf[sizeof (struct tree_function_decl)];
+ struct tree_function_decl buf;
int i, len;
recursive_label:
&& DECL_ASSEMBLER_NAME_SET_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME to be modified. */
- memcpy (buf, expr, tree_size (expr));
- expr = (tree) buf;
+ memcpy ((char *) &buf, expr, tree_size (expr));
+ expr = (tree) &buf;
SET_DECL_ASSEMBLER_NAME (expr, NULL);
}
else if (TREE_CODE_CLASS (code) == tcc_type
|| TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
{
/* Allow these fields to be modified. */
- memcpy (buf, expr, tree_size (expr));
- expr = (tree) buf;
+ memcpy ((char *) &buf, expr, tree_size (expr));
+ expr = (tree) &buf;
TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
int saved_trapping_math = flag_trapping_math;\
int saved_rounding_math = flag_rounding_math;\
int saved_trapv = flag_trapv;\
+ int saved_folding_initializer = folding_initializer;\
flag_signaling_nans = 0;\
flag_trapping_math = 0;\
flag_rounding_math = 0;\
- flag_trapv = 0
+ flag_trapv = 0;\
+ folding_initializer = 1;
#define END_FOLD_INIT \
flag_signaling_nans = saved_signaling_nans;\
flag_trapping_math = saved_trapping_math;\
flag_rounding_math = saved_rounding_math;\
- flag_trapv = saved_trapv
+ flag_trapv = saved_trapv;\
+ folding_initializer = saved_folding_initializer;
tree
fold_build1_initializer (enum tree_code code, tree type, tree op)
int
tree_expr_nonnegative_p (tree t)
{
+ if (t == error_mark_node)
+ return 0;
+
if (TYPE_UNSIGNED (TREE_TYPE (t)))
return 1;
switch (TREE_CODE (t))
{
+ case SSA_NAME:
+ /* Query VRP to see if it has recorded any information about
+ the range of this object. */
+ return ssa_name_nonnegative_p (t);
+
case ABS_EXPR:
/* We can't return 1 if flag_wrapv is set because
ABS_EXPR<INT_MIN> = INT_MIN. */
switch (TREE_CODE (t))
{
+ case SSA_NAME:
+ /* Query VRP to see if it has recorded any information about
+ the range of this object. */
+ return ssa_name_nonzero_p (t);
+
case ABS_EXPR:
return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
tree outer_type = TREE_TYPE (t);
- return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
+ return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
&& tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
}
break;
}
if (string
- && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
+ && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
&& TREE_CODE (string) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
min_val = TYPE_MIN_VALUE (type_domain);
return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
}
+ /* *(foo *)&complexfoo => __real__ complexfoo */
+ else if (TREE_CODE (optype) == COMPLEX_TYPE
+ && type == TREE_TYPE (optype))
+ return fold_build1 (REALPART_EXPR, type, op);
}
+ /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
+ if (TREE_CODE (sub) == PLUS_EXPR
+ && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
+ {
+ tree op00 = TREE_OPERAND (sub, 0);
+ tree op01 = TREE_OPERAND (sub, 1);
+ tree op00type;
+
+ STRIP_NOPS (op00);
+ op00type = TREE_TYPE (op00);
+ if (TREE_CODE (op00) == ADDR_EXPR
+ && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
+ && type == TREE_TYPE (TREE_TYPE (op00type)))
+ {
+ tree size = TYPE_SIZE_UNIT (type);
+ if (tree_int_cst_equal (size, op01))
+ return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
+ }
+ }
+
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
&& type == TREE_TYPE (TREE_TYPE (subtype)))