force_fit_type takes a constant, an overflowable flag and prior
overflow indicators. It forces the value to fit the type and sets
- TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
+ TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
+
+ Note: Since the folders get called on non-gimple code as well as
+ gimple code, we need to handle GIMPLE tuples as well as their
+ corresponding tree equivalents. */
#include "config.h"
#include "system.h"
#include "langhooks.h"
#include "md5.h"
+/* Non-zero if we are folding constants inside an initializer; zero
+ otherwise. */
+int folding_initializer = 0;
+
/* The following constants represent a bit based encoding of GCC's
comparison operators. This encoding simplifies transformations
on relational comparison operators, such as AND and OR. */
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
+static int native_encode_expr (tree, unsigned char *, int);
+static tree native_interpret_expr (tree, unsigned char *, int);
+
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
*hi = words[2] + words[3] * BASE;
}
\f
-/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
- in overflow of the value, when >0 we are only interested in signed
- overflow, for <0 we are interested in any overflow. OVERFLOWED
- indicates whether overflow has already occurred. CONST_OVERFLOWED
- indicates whether constant overflow has already occurred. We force
- T's value to be within range of T's type (by setting to 0 or 1 all
- the bits outside the type's range). We set TREE_OVERFLOWED if,
- OVERFLOWED is nonzero,
- or OVERFLOWABLE is >0 and signed overflow occurs
- or OVERFLOWABLE is <0 and any overflow occurs
- We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is nonzero
- or we set TREE_OVERFLOWED.
- We return either the original T, or a copy. */
+/* Force the double-word integer L1, H1 to be within the range of the
+ integer type TYPE. Stores the properly truncated and sign-extended
+ double-word integer in *LV, *HV. Returns true if the operation
+ overflows, that is, argument and result are different. */
-tree
-force_fit_type (tree t, int overflowable,
- bool overflowed, bool overflowed_const)
+int
+fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
+ unsigned HOST_WIDE_INT low0 = l1;
+ HOST_WIDE_INT high0 = h1;
unsigned int prec;
int sign_extended_type;
- gcc_assert (TREE_CODE (t) == INTEGER_CST);
-
- low = TREE_INT_CST_LOW (t);
- high = TREE_INT_CST_HIGH (t);
-
- if (POINTER_TYPE_P (TREE_TYPE (t))
- || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (type) == OFFSET_TYPE)
prec = POINTER_SIZE;
else
- prec = TYPE_PRECISION (TREE_TYPE (t));
+ prec = TYPE_PRECISION (type);
+
/* Size types *are* sign extended. */
- sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
- || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
- && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
/* First clear all bits that are beyond the type's precision. */
-
if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
- high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
+ h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
else
{
- high = 0;
+ h1 = 0;
if (prec < HOST_BITS_PER_WIDE_INT)
- low &= ~((HOST_WIDE_INT) (-1) << prec);
+ l1 &= ~((HOST_WIDE_INT) (-1) << prec);
}
+ /* Then do sign extension if necessary. */
if (!sign_extended_type)
/* No sign extension */;
else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
else if (prec > HOST_BITS_PER_WIDE_INT)
{
/* Sign extend top half? */
- if (high & ((unsigned HOST_WIDE_INT)1
- << (prec - HOST_BITS_PER_WIDE_INT - 1)))
- high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
+ if (h1 & ((unsigned HOST_WIDE_INT)1
+ << (prec - HOST_BITS_PER_WIDE_INT - 1)))
+ h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
}
else if (prec == HOST_BITS_PER_WIDE_INT)
{
- if ((HOST_WIDE_INT)low < 0)
- high = -1;
+ if ((HOST_WIDE_INT)l1 < 0)
+ h1 = -1;
}
else
{
/* Sign extend bottom half? */
- if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
+ if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
{
- high = -1;
- low |= (HOST_WIDE_INT)(-1) << prec;
+ h1 = -1;
+ l1 |= (HOST_WIDE_INT)(-1) << prec;
}
}
- /* If the value changed, return a new node. */
- if (overflowed || overflowed_const
- || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
- {
- t = build_int_cst_wide (TREE_TYPE (t), low, high);
+ *lv = l1;
+ *hv = h1;
+
+ /* If the value didn't fit, signal overflow. */
+ return l1 != low0 || h1 != high0;
+}
+
+/* We force the double-int HIGH:LOW to the range of the type TYPE by
+ sign or zero extending it.
+ OVERFLOWABLE indicates if we are interested
+ in overflow of the value, when >0 we are only interested in signed
+ overflow, for <0 we are interested in any overflow. OVERFLOWED
+ indicates whether overflow has already occurred. CONST_OVERFLOWED
+ indicates whether constant overflow has already occurred. We force
+ T's value to be within range of T's type (by setting to 0 or 1 all
+ the bits outside the type's range). We set TREE_OVERFLOWED if,
+ OVERFLOWED is nonzero,
+ or OVERFLOWABLE is >0 and signed overflow occurs
+ or OVERFLOWABLE is <0 and any overflow occurs
+ We set TREE_CONSTANT_OVERFLOWED if,
+ CONST_OVERFLOWED is nonzero
+ or we set TREE_OVERFLOWED.
+ We return a new tree node for the extended double-int. The node
+ is shared if no overflow flags are set. */
+tree
+force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
+ HOST_WIDE_INT high, int overflowable,
+ bool overflowed, bool overflowed_const)
+{
+ int sign_extended_type;
+ bool overflow;
+
+ /* Size types *are* sign extended. */
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ overflow = fit_double_type (low, high, &low, &high, type);
+
+ /* If we need to set overflow flags, return a new unshared node. */
+ if (overflowed || overflowed_const || overflow)
+ {
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_OVERFLOW (t) = 1;
TREE_CONSTANT_OVERFLOW (t) = 1;
+
+ return t;
}
else if (overflowed_const)
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_CONSTANT_OVERFLOW (t) = 1;
+
+ return t;
}
}
- return t;
+ /* Else build a shared node. */
+ return build_int_cst_wide (type, low, high);
}
\f
/* Add two doubleword integers with doubleword result.
+ Return nonzero if the operation overflows according to UNSIGNED_P.
Each argument is given as two `HOST_WIDE_INT' pieces.
One argument is L1 and H1; the other, L2 and H2.
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
- unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
- unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
+ bool unsigned_p)
{
unsigned HOST_WIDE_INT l;
HOST_WIDE_INT h;
*lv = l;
*hv = h;
- return OVERFLOW_SUM_SIGN (h1, h2, h);
+
+ if (unsigned_p)
+ return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
+ else
+ return OVERFLOW_SUM_SIGN (h1, h2, h);
}
/* Negate a doubleword integer with doubleword result.
}
\f
/* Multiply two doubleword integers with doubleword result.
- Return nonzero if the operation overflows, assuming it's signed.
+ Return nonzero if the operation overflows according to UNSIGNED_P.
Each argument is given as two `HOST_WIDE_INT' pieces.
One argument is L1 and H1; the other, L2 and H2.
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
- unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
- unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
+ bool unsigned_p)
{
HOST_WIDE_INT arg1[4];
HOST_WIDE_INT arg2[4];
prod[i + 4] = carry;
}
- decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
-
- /* Check for overflow by calculating the top half of the answer in full;
- it should agree with the low half's sign bit. */
+ decode (prod, lv, hv);
decode (prod + 4, &toplow, &tophigh);
+
+ /* Unsigned overflow is immediate. */
+ if (unsigned_p)
+ return (toplow | tophigh) != 0;
+
+ /* Check for signed overflow by calculating the signed representation of the
+ top half of the result; it should agree with the low half's sign bit. */
if (h1 < 0)
{
neg_double (l2, h2, &neglow, &neghigh);
CASE_FLT_FN (BUILT_IN_ATAN):
CASE_FLT_FN (BUILT_IN_ATANH):
CASE_FLT_FN (BUILT_IN_CBRT):
+ CASE_FLT_FN (BUILT_IN_ERF):
+ CASE_FLT_FN (BUILT_IN_LLROUND):
+ CASE_FLT_FN (BUILT_IN_LROUND):
+ CASE_FLT_FN (BUILT_IN_ROUND):
CASE_FLT_FN (BUILT_IN_SIN):
CASE_FLT_FN (BUILT_IN_SINH):
CASE_FLT_FN (BUILT_IN_TAN):
CASE_FLT_FN (BUILT_IN_TANH):
+ CASE_FLT_FN (BUILT_IN_TRUNC):
return true;
+ CASE_FLT_FN (BUILT_IN_LLRINT):
+ CASE_FLT_FN (BUILT_IN_LRINT):
+ CASE_FLT_FN (BUILT_IN_NEARBYINT):
+ CASE_FLT_FN (BUILT_IN_RINT):
+ return !flag_rounding_math;
+
default:
break;
}
}
/* Determine whether an expression T can be cheaply negated using
- the function negate_expr. */
+ the function negate_expr without introducing undefined overflow. */
static bool
negate_expr_p (tree t)
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (TYPE_UNSIGNED (type) || ! flag_trapv)
+ if (TYPE_UNSIGNED (type)
+ || (flag_wrapv && ! flag_trapv))
return true;
/* Check that -CST will not overflow type. */
return may_negate_without_overflow_p (t);
case BIT_NOT_EXPR:
- return INTEGRAL_TYPE_P (type);
+ return INTEGRAL_TYPE_P (type)
+ && (TYPE_UNSIGNED (type)
+ || (flag_wrapv && !flag_trapv));
case REAL_CST:
case NEGATE_EXPR:
&& negate_expr_p (TREE_IMAGPART (t));
case PLUS_EXPR:
- if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
+ || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
return false;
/* -(A + B) -> (-B) - A. */
if (negate_expr_p (TREE_OPERAND (t, 1))
case MINUS_EXPR:
/* We can't turn -(A-B) into B-A when we honor signed zeros. */
- return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
+ return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
&& reorder_operands_p (TREE_OPERAND (t, 0),
TREE_OPERAND (t, 1));
return false;
}
-/* Given T, an expression, return the negation of T. Allow for T to be
- null, in which case return null. */
+/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
+ simplification is possible.
+ If negate_expr_p would return true for T, NULL_TREE will never be
+ returned. */
static tree
-negate_expr (tree t)
+fold_negate_expr (tree t)
{
- tree type;
+ tree type = TREE_TYPE (t);
tree tem;
- if (t == 0)
- return 0;
-
- type = TREE_TYPE (t);
- STRIP_SIGN_NOPS (t);
-
switch (TREE_CODE (t))
{
/* Convert - (~A) to A + 1. */
tem = fold_negate_const (t, type);
/* Two's complement FP formats, such as c4x, may overflow. */
if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
- return fold_convert (type, tem);
+ return tem;
break;
case COMPLEX_CST:
break;
case NEGATE_EXPR:
- return fold_convert (type, TREE_OPERAND (t, 0));
+ return TREE_OPERAND (t, 0);
case PLUS_EXPR:
- if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
+ if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
{
/* -(A + B) -> (-B) - A. */
if (negate_expr_p (TREE_OPERAND (t, 1))
TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 0));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 0));
}
/* -(A + B) -> (-A) - B. */
if (negate_expr_p (TREE_OPERAND (t, 0)))
{
tem = negate_expr (TREE_OPERAND (t, 0));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 1));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 1));
}
}
break;
case MINUS_EXPR:
/* - (A - B) -> B - A */
- if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
+ if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
- return fold_convert (type,
- fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 0)));
+ return fold_build2 (MINUS_EXPR, type,
+ TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
break;
case MULT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (t)))
+ if (TYPE_UNSIGNED (type))
break;
/* Fall through. */
case RDIV_EXPR:
- if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
+ if (!TYPE_UNSIGNED (type) && !flag_wrapv)
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
- return fold_convert (type, negate_expr (tem));
+ return negate_expr (tem);
}
break;
break;
}
- tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ return NULL_TREE;
+}
+
+/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
+ negated in a simpler way. Also allow for T to be NULL_TREE, in which case
+ return NULL_TREE. */
+
+static tree
+negate_expr (tree t)
+{
+ tree type, tem;
+
+ if (t == NULL_TREE)
+ return NULL_TREE;
+
+ type = TREE_TYPE (t);
+ STRIP_SIGN_NOPS (t);
+
+ tem = fold_negate_expr (t);
+ if (!tem)
+ tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
return fold_convert (type, tem);
}
\f
fold_convert (type, t2));
}
\f
+/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
+ for use in int_const_binop, size_binop and size_diffop. */
+
+static bool
+int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
+{
+ if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
+ return false;
+ if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
+ return false;
+
+ switch (code)
+ {
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ return true;
+
+ default:
+ break;
+ }
+
+ return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
+ && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
+ && TYPE_MODE (type1) == TYPE_MODE (type2);
+}
+
+
/* Combine two integer constants ARG1 and ARG2 under operation CODE
to produce a new constant. Return NULL_TREE if we don't know how
to evaluate CODE at compile-time.
return NULL_TREE;
}
- t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
-
if (notrunc)
{
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
+
/* Propagate overflow flags ourselves. */
if (((!uns || is_sizetype) && overflow)
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
}
}
else
- t = force_fit_type (t, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
- TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
+ TREE_CONSTANT_OVERFLOW (arg1)
+ | TREE_CONSTANT_OVERFLOW (arg2));
return t;
}
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
constant. We assume ARG1 and ARG2 have the same data type, or at least
- are the same kind of constant and the same machine mode.
+ are the same kind of constant and the same machine mode. Return zero if
+ combining the constants is not allowed in the current operating mode.
If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
static tree
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
{
+ /* Sanity check for the recursive cases. */
+ if (!arg1 || !arg2)
+ return NULL_TREE;
+
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
/* Don't constant fold this floating point operation if
the result has overflowed and flag_trapping_math. */
-
if (flag_trapping_math
&& MODE_HAS_INFINITIES (mode)
&& REAL_VALUE_ISINF (result)
result may dependent upon the run-time rounding mode and
flag_rounding_math is set, or if GCC's software emulation
is unable to accurately represent the result. */
-
if ((flag_rounding_math
|| (REAL_MODE_FORMAT_COMPOSITE_P (mode)
&& !flag_unsafe_math_optimizations))
tree i1 = TREE_IMAGPART (arg1);
tree r2 = TREE_REALPART (arg2);
tree i2 = TREE_IMAGPART (arg2);
- tree t;
+ tree real, imag;
switch (code)
{
case PLUS_EXPR:
- t = build_complex (type,
- const_binop (PLUS_EXPR, r1, r2, notrunc),
- const_binop (PLUS_EXPR, i1, i2, notrunc));
- break;
-
case MINUS_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR, r1, r2, notrunc),
- const_binop (MINUS_EXPR, i1, i2, notrunc));
+ real = const_binop (code, r1, r2, notrunc);
+ imag = const_binop (code, i1, i2, notrunc);
break;
case MULT_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR,
- r1, r2, notrunc),
- const_binop (MULT_EXPR,
- i1, i2, notrunc),
- notrunc),
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR,
- r1, i2, notrunc),
- const_binop (MULT_EXPR,
- i1, r2, notrunc),
- notrunc));
+ real = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ imag = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ notrunc);
break;
case RDIV_EXPR:
{
- tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
-
- t1 = const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2, notrunc),
- const_binop (MULT_EXPR, i1, i2, notrunc),
- notrunc);
- t2 = const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2, notrunc),
- const_binop (MULT_EXPR, r1, i2, notrunc),
- notrunc);
+ tree t1
+ = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ tree t2
+ = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
- {
- real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
- }
- else
- {
- real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
- if (!real || !imag)
- return NULL_TREE;
- }
+ code = TRUNC_DIV_EXPR;
- t = build_complex (type, real, imag);
+ real = const_binop (code, t1, magsquared, notrunc);
+ imag = const_binop (code, t2, magsquared, notrunc);
}
break;
default:
return NULL_TREE;
}
- return t;
+
+ if (real && imag)
+ return build_complex (type, real, imag);
}
+
return NULL_TREE;
}
\f
/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
is a tree code. The type of the result is taken from the operands.
- Both must be the same type integer type and it must be a size type.
+ Both must be equivalent integer types, ala int_binop_types_match_p.
If the operands are constant, so is the result. */
tree
{
tree type = TREE_TYPE (arg0);
- gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
- && type == TREE_TYPE (arg1));
+ if (arg0 == error_mark_node || arg1 == error_mark_node)
+ return error_mark_node;
+
+ gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
+ TREE_TYPE (arg1)));
/* Handle the special case of two integer constants faster. */
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
return int_const_binop (code, arg0, arg1, 0);
}
- if (arg0 == error_mark_node || arg1 == error_mark_node)
- return error_mark_node;
-
return fold_build2 (code, type, arg0, arg1);
}
tree type = TREE_TYPE (arg0);
tree ctype;
- gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
- && type == TREE_TYPE (arg1));
+ gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
+ TREE_TYPE (arg1)));
/* If the type is already signed, just do the simple thing. */
if (!TYPE_UNSIGNED (type))
return size_binop (MINUS_EXPR, arg0, arg1);
- ctype = type == bitsizetype ? sbitsizetype : ssizetype;
+ if (type == sizetype)
+ ctype = ssizetype;
+ else if (type == bitsizetype)
+ ctype = sbitsizetype;
+ else
+ ctype = lang_hooks.types.signed_type (type);
/* If either operand is not a constant, do the conversions to the signed
type and subtract. The hardware will do the right thing with any
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
real_trunc (&r, VOIDmode, &x);
break;
- case FIX_CEIL_EXPR:
- real_ceil (&r, VOIDmode, &x);
- break;
-
- case FIX_FLOOR_EXPR:
- real_floor (&r, VOIDmode, &x);
- break;
-
- case FIX_ROUND_EXPR:
- real_round (&r, VOIDmode, &x);
- break;
-
default:
gcc_unreachable ();
}
if (! overflow)
REAL_VALUE_TO_INT (&low, &high, r);
- t = build_int_cst_wide (type, low, high);
-
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
- return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
+ tem = fold_ignored_result (arg);
+ if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+ return tem;
+ return fold_build1 (NOP_EXPR, type, tem);
default:
gcc_unreachable ();
case WITH_CLEANUP_EXPR:
case COMPOUND_EXPR:
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case TARGET_EXPR:
case COND_EXPR:
case BIND_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
return 0;
+ /* If both types don't have the same precision, then it is not safe
+ to strip NOPs. */
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
+ return 0;
+
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
+ /* In case both args are comparisons but with different comparison
+ code, try to swap the comparison operands of one arg to produce
+ a match and compare that variant. */
+ if (TREE_CODE (arg0) != TREE_CODE (arg1)
+ && COMPARISON_CLASS_P (arg0)
+ && COMPARISON_CLASS_P (arg1))
+ {
+ enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
+
+ if (TREE_CODE (arg0) == swap_code)
+ return operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 1), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 0), flags);
+ }
+
if (TREE_CODE (arg0) != TREE_CODE (arg1)
/* This is needed for conversions and for COMPONENT_REF.
Might as well play it safe and always test this. */
switch (TREE_CODE (arg0))
{
case INTEGER_CST:
- return (! TREE_CONSTANT_OVERFLOW (arg0)
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && tree_int_cst_equal (arg0, arg1));
+ return tree_int_cst_equal (arg0, arg1);
case REAL_CST:
- return (! TREE_CONSTANT_OVERFLOW (arg0)
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
- TREE_REAL_CST (arg1)));
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
+ TREE_REAL_CST (arg1)))
+ return 1;
+
+
+ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
+ {
+ /* If we do not distinguish between signed and unsigned zero,
+ consider them equal. */
+ if (real_zerop (arg0) && real_zerop (arg1))
+ return 1;
+ }
+ return 0;
case VECTOR_CST:
{
tree v1, v2;
- if (TREE_CONSTANT_OVERFLOW (arg0)
- || TREE_CONSTANT_OVERFLOW (arg1))
- return 0;
-
v1 = TREE_VECTOR_CST_ELTS (arg0);
v2 = TREE_VECTOR_CST_ELTS (arg1);
while (v1 && v2)
{
case NOP_EXPR:
case CONVERT_EXPR:
- case FIX_CEIL_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg0))
!= TYPE_UNSIGNED (TREE_TYPE (arg1)))
return 0;
FIXME: one would think we would fold the result, but it causes
problems with the dominator optimizer. */
+
tree
-invert_truthvalue (tree arg)
+fold_truth_not_expr (tree arg)
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- if (code == ERROR_MARK)
- return arg;
-
/* If this is a comparison, we can simply invert it, except for
floating-point non-equality comparisons, in which case we just
enclose a TRUTH_NOT_EXPR around what we have. */
&& flag_trapping_math
&& code != ORDERED_EXPR && code != UNORDERED_EXPR
&& code != NE_EXPR && code != EQ_EXPR)
- return build1 (TRUTH_NOT_EXPR, type, arg);
+ return NULL_TREE;
else
{
code = invert_tree_comparison (code,
HONOR_NANS (TYPE_MODE (op_type)));
if (code == ERROR_MARK)
- return build1 (TRUTH_NOT_EXPR, type, arg);
+ return NULL_TREE;
else
return build2 (code, type,
TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
case NOP_EXPR:
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
- break;
+ return build1 (TRUTH_NOT_EXPR, type, arg);
case CONVERT_EXPR:
case FLOAT_EXPR:
default:
break;
}
- gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
- return build1 (TRUTH_NOT_EXPR, type, arg);
+
+ return NULL_TREE;
+}
+
+/* Return a simplified tree node for the truth-negation of ARG. This
+ never alters ARG itself. We assume that ARG is an operation that
+ returns a truth value (0 or 1).
+
+ FIXME: one would think we would fold the result, but it causes
+ problems with the dominator optimizer. */
+
+tree
+invert_truthvalue (tree arg)
+{
+ tree tem;
+
+ if (TREE_CODE (arg) == ERROR_MARK)
+ return arg;
+
+ tem = fold_truth_not_expr (arg);
+ if (!tem)
+ tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
+
+ return tem;
}
/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
- mask = fold_convert (unsigned_type, mask);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask,
size_int (nbitsize - lbitsize - lbitpos), 0);
if (! const_p)
/* If not comparing with constant, just rework the comparison
and return. */
- return build2 (code, compare_type,
- build2 (BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (linner, unsigned_type,
- nbitsize, nbitpos, 1),
- mask),
- build2 (BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (rinner, unsigned_type,
- nbitsize, nbitpos, 1),
- mask));
+ return fold_build2 (code, compare_type,
+ fold_build2 (BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (linner,
+ unsigned_type,
+ nbitsize, nbitpos,
+ 1),
+ mask),
+ fold_build2 (BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (rinner,
+ unsigned_type,
+ nbitsize, nbitpos,
+ 1),
+ mask));
/* Otherwise, we are handling the constant case. See if the constant is too
big for the field. Warn and return a tree of for 0 (false) if so. We do
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
- tmask = force_fit_type (tmask, 0, false, false);
+ tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
return
tree_int_cst_equal (mask,
high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
fold_convert (arg0_type,
high_positive),
- fold_convert (arg0_type,
- integer_one_node));
+ build_int_cst (arg0_type, 1));
/* If the low bound is specified, "and" the range with the
range for which the original unsigned value will be
{
tree type = TREE_TYPE (val);
- if (INTEGRAL_TYPE_P (type) && val == TYPE_MIN_VALUE (type))
+ if (INTEGRAL_TYPE_P (type)
+ && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
return 0;
else
return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
{
tree type = TREE_TYPE (val);
- if (INTEGRAL_TYPE_P (type) && val == TYPE_MAX_VALUE (type))
+ if (INTEGRAL_TYPE_P (type)
+ && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
return 0;
else
return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
/* Avoid these transformations if the COND_EXPR may be used
as an lvalue in the C++ front-end. PR c++/19199. */
&& (in_gimple_form
- || strcmp (lang_hooks.name, "GNU C++") != 0
+ || (strcmp (lang_hooks.name, "GNU C++") != 0
+ && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
|| ! maybe_lvalue_p (arg1)
|| ! maybe_lvalue_p (arg2)))
{
OEP_ONLY_CONST)
&& operand_equal_p (arg01,
const_binop (PLUS_EXPR, arg2,
- integer_one_node, 0),
+ build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
type, arg1, arg2));
OEP_ONLY_CONST)
&& operand_equal_p (arg01,
const_binop (MINUS_EXPR, arg2,
- integer_one_node, 0),
+ build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
type, arg1, arg2));
OEP_ONLY_CONST)
&& operand_equal_p (arg01,
const_binop (MINUS_EXPR, arg2,
- integer_one_node, 0),
+ build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
type, arg1, arg2));
OEP_ONLY_CONST)
&& operand_equal_p (arg01,
const_binop (PLUS_EXPR, arg2,
- integer_one_node, 0),
+ build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
type, arg1, arg2));
tree lntype, rntype, result;
int first_bit, end_bit;
int volatilep;
+ tree orig_lhs = lhs, orig_rhs = rhs;
+ enum tree_code orig_code = code;
/* Start by getting the comparison codes. Fail if anything is volatile.
If one operand is a BIT_AND_EXPR with the constant one, treat it as if
build_int_cst (TREE_TYPE (ll_arg), 0));
if (LOGICAL_OP_NON_SHORT_CIRCUIT)
- return build2 (code, truth_type, lhs, rhs);
+ {
+ if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
+ return build2 (code, truth_type, lhs, rhs);
+ return NULL_TREE;
+ }
}
/* See if the comparisons can be merged. Then get all the parameters for
{
case NE_EXPR: case LT_EXPR: case LE_EXPR:
{
- /* FIXME: We should be able to invert code without building a
- scratch tree node, but doing so would require us to
- duplicate a part of invert_truthvalue here. */
- tree tem = invert_truthvalue (build2 (code, type, op0, op1));
- tem = optimize_minmax_comparison (TREE_CODE (tem),
- TREE_TYPE (tem),
- TREE_OPERAND (tem, 0),
- TREE_OPERAND (tem, 1));
- return invert_truthvalue (tem);
+ tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
+ type, op0, op1);
+ if (tem)
+ return invert_truthvalue (tem);
+ return NULL_TREE;
}
case GE_EXPR:
return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
- /* The transformation below creates non-gimple code and thus is
- not appropriate if we are in gimple form. */
- if (in_gimple_form)
- return NULL_TREE;
-
temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
return fold_build1 (TRUTH_NOT_EXPR, type, temp);
tree arg01 = TREE_OPERAND (arg0, 1);
unsigned HOST_WIDE_INT lpart;
HOST_WIDE_INT hpart;
+ bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
bool neg_overflow;
int overflow;
/* We have to do this the hard way to detect unsigned overflow.
prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
- overflow = mul_double (TREE_INT_CST_LOW (arg01),
- TREE_INT_CST_HIGH (arg01),
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
- prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- prod = force_fit_type (prod, -1, overflow, false);
+ overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
+ TREE_INT_CST_HIGH (arg01),
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ &lpart, &hpart, unsigned_p);
+ prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow, false);
neg_overflow = false;
- if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
+ if (unsigned_p)
{
- tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
+ tmp = int_const_binop (MINUS_EXPR, arg01,
+ build_int_cst (TREE_TYPE (arg01), 1), 0);
lo = prod;
/* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
- overflow = add_double (TREE_INT_CST_LOW (prod),
- TREE_INT_CST_HIGH (prod),
- TREE_INT_CST_LOW (tmp),
- TREE_INT_CST_HIGH (tmp),
- &lpart, &hpart);
- hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
- TREE_CONSTANT_OVERFLOW (prod));
+ overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
+ TREE_INT_CST_HIGH (prod),
+ TREE_INT_CST_LOW (tmp),
+ TREE_INT_CST_HIGH (tmp),
+ &lpart, &hpart, unsigned_p);
+ hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow | TREE_OVERFLOW (prod),
+ TREE_CONSTANT_OVERFLOW (prod));
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
- tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
+ tmp = int_const_binop (MINUS_EXPR, arg01,
+ build_int_cst (TREE_TYPE (arg01), 1), 0);
switch (tree_int_cst_sgn (arg1))
{
case -1:
/* A negative divisor reverses the relational operators. */
code = swap_tree_comparison (code);
- tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
+ tmp = int_const_binop (PLUS_EXPR, arg01,
+ build_int_cst (TREE_TYPE (arg01), 1), 0);
switch (tree_int_cst_sgn (arg1))
{
case -1:
enum machine_mode operand_mode = TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
- tree tem;
+ tree tem, one;
/* First, see if we can fold the single bit test into a sign-bit
test. */
inner = build2 (RSHIFT_EXPR, intermediate_type,
inner, size_int (bitnum));
+ one = build_int_cst (intermediate_type, 1);
+
if (code == EQ_EXPR)
- inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
- inner, integer_one_node);
+ inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
/* Put the AND last so it can combine with more things. */
- inner = build2 (BIT_AND_EXPR, intermediate_type,
- inner, integer_one_node);
+ inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
/* Make sure to return the proper type. */
inner = fold_convert (result_type, inner);
fold_sign_changed_comparison (enum tree_code code, tree type,
tree arg0, tree arg1)
{
- tree arg0_inner, tmp;
+ tree arg0_inner;
tree inner_type, outer_type;
if (TREE_CODE (arg0) != NOP_EXPR
return NULL_TREE;
if (TREE_CODE (arg1) == INTEGER_CST)
- {
- tmp = build_int_cst_wide (inner_type,
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- arg1 = force_fit_type (tmp, 0,
- TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- }
+ arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1), 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
else
arg1 = fold_convert (inner_type, arg1);
else
{
arg00 = arg0;
- if (!FLOAT_TYPE_P (type))
- arg01 = build_int_cst (type, 1);
- else
- arg01 = build_real (type, dconst1);
+ arg01 = build_one_cst (type);
}
if (TREE_CODE (arg1) == MULT_EXPR)
{
else
{
arg10 = arg1;
- if (!FLOAT_TYPE_P (type))
- arg11 = build_int_cst (type, 1);
- else
- arg11 = build_real (type, dconst1);
+ arg11 = build_one_cst (type);
}
same = NULL_TREE;
else
maybe_same = arg11;
- if (exact_log2 (int11) > 0 && int01 % int11 == 0)
+ if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
{
alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
build_int_cst (TREE_TYPE (arg00),
return NULL_TREE;
}
-/* Fold a unary expression of code CODE and type TYPE with operand
- OP0. Return the folded expression if folding is successful.
- Otherwise, return NULL_TREE. */
+/* Subroutine of native_encode_expr. Encode the INTEGER_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
-tree
-fold_unary (enum tree_code code, tree type, tree op0)
+static int
+native_encode_int (tree expr, unsigned char *ptr, int len)
{
- tree tem;
- tree arg0;
- enum tree_code_class kind = TREE_CODE_CLASS (code);
+ tree type = TREE_TYPE (expr);
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
- gcc_assert (IS_EXPR_CODE_CLASS (kind)
- && TREE_CODE_LENGTH (code) == 1);
+ if (total_bytes > len)
+ return 0;
+ words = total_bytes / UNITS_PER_WORD;
- arg0 = op0;
- if (arg0)
+ for (byte = 0; byte < total_bytes; byte++)
{
- if (code == NOP_EXPR || code == CONVERT_EXPR
- || code == FLOAT_EXPR || code == ABS_EXPR)
+ int bitpos = byte * BITS_PER_UNIT;
+ if (bitpos < HOST_BITS_PER_WIDE_INT)
+ value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
+ else
+ value = (unsigned char) (TREE_INT_CST_HIGH (expr)
+ >> (bitpos - HOST_BITS_PER_WIDE_INT));
+
+ if (total_bytes > UNITS_PER_WORD)
{
- /* Don't use STRIP_NOPS, because signedness of argument type
- matters. */
- STRIP_SIGN_NOPS (arg0);
+ word = byte / UNITS_PER_WORD;
+ if (WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
}
else
- {
- /* Strip any conversions that don't change the mode. This
- is safe for every expression, except for a comparison
- expression because its signedness is derived from its
- operands.
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ ptr[offset] = value;
+ }
+ return total_bytes;
+}
+
+
+/* Subroutine of native_encode_expr. Encode the REAL_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_real (tree expr, unsigned char *ptr, int len)
+{
+ tree type = TREE_TYPE (expr);
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
+
+ /* There are always 32 bits in each long, no matter the size of
+ the hosts long. We handle floating point representations with
+ up to 192 bits. */
+ long tmp[6];
+
+ if (total_bytes > len)
+ return 0;
+ words = total_bytes / UNITS_PER_WORD;
+
+ real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
+
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
+
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (FLOAT_WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
+ }
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ ptr[offset] = value;
+ }
+ return total_bytes;
+}
+
+/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_complex (tree expr, unsigned char *ptr, int len)
+{
+ int rsize, isize;
+ tree part;
+
+ part = TREE_REALPART (expr);
+ rsize = native_encode_expr (part, ptr, len);
+ if (rsize == 0)
+ return 0;
+ part = TREE_IMAGPART (expr);
+ isize = native_encode_expr (part, ptr+rsize, len-rsize);
+ if (isize != rsize)
+ return 0;
+ return rsize + isize;
+}
+
+
+/* Subroutine of native_encode_expr. Encode the VECTOR_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_vector (tree expr, unsigned char *ptr, int len)
+{
+ int i, size, offset, count;
+ tree itype, elem, elements;
+
+ offset = 0;
+ elements = TREE_VECTOR_CST_ELTS (expr);
+ count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
+ itype = TREE_TYPE (TREE_TYPE (expr));
+ size = GET_MODE_SIZE (TYPE_MODE (itype));
+ for (i = 0; i < count; i++)
+ {
+ if (elements)
+ {
+ elem = TREE_VALUE (elements);
+ elements = TREE_CHAIN (elements);
+ }
+ else
+ elem = NULL_TREE;
+
+ if (elem)
+ {
+ if (native_encode_expr (elem, ptr+offset, len-offset) != size)
+ return 0;
+ }
+ else
+ {
+ if (offset + size > len)
+ return 0;
+ memset (ptr+offset, 0, size);
+ }
+ offset += size;
+ }
+ return offset;
+}
+
+
+/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
+ REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
+ buffer PTR of length LEN bytes. Return the number of bytes
+ placed in the buffer, or zero upon failure. */
+
+static int
+native_encode_expr (tree expr, unsigned char *ptr, int len)
+{
+ switch (TREE_CODE (expr))
+ {
+ case INTEGER_CST:
+ return native_encode_int (expr, ptr, len);
+
+ case REAL_CST:
+ return native_encode_real (expr, ptr, len);
+
+ case COMPLEX_CST:
+ return native_encode_complex (expr, ptr, len);
+
+ case VECTOR_CST:
+ return native_encode_vector (expr, ptr, len);
+
+ default:
+ return 0;
+ }
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_int (tree type, unsigned char *ptr, int len)
+{
+ int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int byte, offset, word, words;
+ unsigned char value;
+ unsigned int HOST_WIDE_INT lo = 0;
+ HOST_WIDE_INT hi = 0;
+
+ if (total_bytes > len)
+ return NULL_TREE;
+ if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
+ return NULL_TREE;
+ words = total_bytes / UNITS_PER_WORD;
+
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
+ }
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ value = ptr[offset];
+
+ if (bitpos < HOST_BITS_PER_WIDE_INT)
+ lo |= (unsigned HOST_WIDE_INT) value << bitpos;
+ else
+ hi |= (unsigned HOST_WIDE_INT) value
+ << (bitpos - HOST_BITS_PER_WIDE_INT);
+ }
+
+ return build_int_cst_wide_type (type, lo, hi);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a REAL_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_real (tree type, unsigned char *ptr, int len)
+{
+ enum machine_mode mode = TYPE_MODE (type);
+ int total_bytes = GET_MODE_SIZE (mode);
+ int byte, offset, word, words;
+ unsigned char value;
+ /* There are always 32 bits in each long, no matter the size of
+ the hosts long. We handle floating point representations with
+ up to 192 bits. */
+ REAL_VALUE_TYPE r;
+ long tmp[6];
+
+ total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ if (total_bytes > len || total_bytes > 24)
+ return NULL_TREE;
+ words = total_bytes / UNITS_PER_WORD;
+
+ memset (tmp, 0, sizeof (tmp));
+ for (byte = 0; byte < total_bytes; byte++)
+ {
+ int bitpos = byte * BITS_PER_UNIT;
+ if (total_bytes > UNITS_PER_WORD)
+ {
+ word = byte / UNITS_PER_WORD;
+ if (FLOAT_WORDS_BIG_ENDIAN)
+ word = (words - 1) - word;
+ offset = word * UNITS_PER_WORD;
+ if (BYTES_BIG_ENDIAN)
+ offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
+ else
+ offset += byte % UNITS_PER_WORD;
+ }
+ else
+ offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
+ value = ptr[offset];
+
+ tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
+ }
+
+ real_from_target (&r, tmp, mode);
+ return build_real (type, r);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_complex (tree type, unsigned char *ptr, int len)
+{
+ tree etype, rpart, ipart;
+ int size;
+
+ etype = TREE_TYPE (type);
+ size = GET_MODE_SIZE (TYPE_MODE (etype));
+ if (size * 2 > len)
+ return NULL_TREE;
+ rpart = native_interpret_expr (etype, ptr, size);
+ if (!rpart)
+ return NULL_TREE;
+ ipart = native_interpret_expr (etype, ptr+size, size);
+ if (!ipart)
+ return NULL_TREE;
+ return build_complex (type, rpart, ipart);
+}
+
+
+/* Subroutine of native_interpret_expr. Interpret the contents of
+ the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
+ If the buffer cannot be interpreted, return NULL_TREE. */
+
+static tree
+native_interpret_vector (tree type, unsigned char *ptr, int len)
+{
+ tree etype, elem, elements;
+ int i, size, count;
+
+ etype = TREE_TYPE (type);
+ size = GET_MODE_SIZE (TYPE_MODE (etype));
+ count = TYPE_VECTOR_SUBPARTS (type);
+ if (size * count > len)
+ return NULL_TREE;
+
+ elements = NULL_TREE;
+ for (i = count - 1; i >= 0; i--)
+ {
+ elem = native_interpret_expr (etype, ptr+(i*size), size);
+ if (!elem)
+ return NULL_TREE;
+ elements = tree_cons (NULL_TREE, elem, elements);
+ }
+ return build_vector (type, elements);
+}
+
+
+/* Subroutine of fold_view_convert_expr. Interpret the contents of
+ the buffer PTR of length LEN as a constant of type TYPE. For
+ INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
+ we return a REAL_CST, etc... If the buffer cannot be interpreted,
+ return NULL_TREE. */
+
+static tree
+native_interpret_expr (tree type, unsigned char *ptr, int len)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ return native_interpret_int (type, ptr, len);
+
+ case REAL_TYPE:
+ return native_interpret_real (type, ptr, len);
+
+ case COMPLEX_TYPE:
+ return native_interpret_complex (type, ptr, len);
+
+ case VECTOR_TYPE:
+ return native_interpret_vector (type, ptr, len);
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+
+/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
+ TYPE at compile-time. If we're unable to perform the conversion
+ return NULL_TREE. */
+
+static tree
+fold_view_convert_expr (tree type, tree expr)
+{
+ /* We support up to 512-bit values (for V8DFmode). */
+ unsigned char buffer[64];
+ int len;
+
+ /* Check that the host and target are sane. */
+ if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
+ return NULL_TREE;
+
+ len = native_encode_expr (expr, buffer, sizeof (buffer));
+ if (len == 0)
+ return NULL_TREE;
+
+ return native_interpret_expr (type, buffer, len);
+}
+
+
+/* Fold a unary expression of code CODE and type TYPE with operand
+ OP0. Return the folded expression if folding is successful.
+ Otherwise, return NULL_TREE. */
+
+tree
+fold_unary (enum tree_code code, tree type, tree op0)
+{
+ tree tem;
+ tree arg0;
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 1);
+
+ arg0 = op0;
+ if (arg0)
+ {
+ if (code == NOP_EXPR || code == CONVERT_EXPR
+ || code == FLOAT_EXPR || code == ABS_EXPR)
+ {
+ /* Don't use STRIP_NOPS, because signedness of argument type
+ matters. */
+ STRIP_SIGN_NOPS (arg0);
+ }
+ else
+ {
+ /* Strip any conversions that don't change the mode. This
+ is safe for every expression, except for a comparison
+ expression because its signedness is derived from its
+ operands.
Note that this is done as an internal manipulation within
the constant folder, in order to find the simplest
case FLOAT_EXPR:
case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
if (TREE_TYPE (op0) == type)
return op0;
type via an object of identical or wider precision, neither
conversion is needed. */
if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
- && ((inter_int && final_int) || (inter_float && final_float))
+ && (((inter_int || inter_ptr) && final_int)
+ || (inter_float && final_float))
&& inter_prec >= final_prec)
return fold_build1 (code, type, TREE_OPERAND (op0, 0));
- the initial type is a pointer type and the precisions of the
intermediate and final types differ, or
- the final type is a pointer type and the precisions of the
- initial and intermediate types differ. */
+ initial and intermediate types differ.
+ - the final type is a pointer type and the initial type not
+ - the initial type is a pointer to an array and the final type
+ not. */
if (! inside_float && ! inter_float && ! final_float
&& ! inside_vec && ! inter_vec && ! final_vec
- && (inter_prec > inside_prec || inter_prec > final_prec)
+ && (inter_prec >= inside_prec || inter_prec >= final_prec)
&& ! (inside_int && inter_int
&& inter_unsignedp != inside_unsignedp
&& inter_prec < final_prec)
&& ! (final_ptr && inside_prec != inter_prec)
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
- && ! final_ptr)
+ && final_ptr == inside_ptr
+ && ! (inside_ptr
+ && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
+ && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
return fold_build1 (code, type, TREE_OPERAND (op0, 0));
}
return fold_convert (type, build_fold_addr_expr (base));
}
- if (TREE_CODE (op0) == MODIFY_EXPR
- && TREE_CONSTANT (TREE_OPERAND (op0, 1))
+ if ((TREE_CODE (op0) == MODIFY_EXPR
+ || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
+ && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
/* Detect assigning a bitfield. */
- && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
+ && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD
+ (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
+ tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
}
if (change)
{
- tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
- TREE_INT_CST_HIGH (and1));
- tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
- TREE_CONSTANT_OVERFLOW (and1));
+ tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1), 0,
+ TREE_OVERFLOW (and1),
+ TREE_CONSTANT_OVERFLOW (and1));
return fold_build2 (BIT_AND_EXPR, type,
fold_convert (type, and0), tem);
}
case VIEW_CONVERT_EXPR:
if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
- return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
- return NULL_TREE;
+ return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+ return fold_view_convert_expr (type, op0);
case NEGATE_EXPR:
- if (negate_expr_p (arg0))
- return fold_convert (type, negate_expr (arg0));
+ tem = fold_negate_expr (arg0);
+ if (tem)
+ return fold_convert (type, tem);
return NULL_TREE;
case ABS_EXPR:
case CONJ_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert (type, arg0);
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return build2 (COMPLEX_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (TREE_OPERAND (arg0, 1)));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return build_complex (type, TREE_REALPART (arg0),
- negate_expr (TREE_IMAGPART (arg0)));
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (CONJ_EXPR, type,
- TREE_OPERAND (arg0, 1)));
- else if (TREE_CODE (arg0) == CONJ_EXPR)
- return TREE_OPERAND (arg0, 0);
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ {
+ tree itype = TREE_TYPE (type);
+ tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
+ tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
+ return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
+ }
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ {
+ tree itype = TREE_TYPE (type);
+ tree rpart = fold_convert (itype, TREE_REALPART (arg0));
+ tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
+ return build_complex (type, rpart, negate_expr (ipart));
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ return fold_convert (type, TREE_OPERAND (arg0, 0));
return NULL_TREE;
case BIT_NOT_EXPR:
and its values must be 0 or 1.
("true" is a fixed value perhaps depending on the language,
but we don't handle values other than 1 correctly yet.) */
- tem = invert_truthvalue (arg0);
- /* Avoid infinite recursion. */
- if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
+ tem = fold_truth_not_expr (arg0);
+ if (!tem)
return NULL_TREE;
return fold_convert (type, tem);
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return NULL_TREE;
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return fold_convert (type, arg0);
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand (type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_REALPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 1)));
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_REALPART (arg0));
+ if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build2 (TREE_CODE (arg0), itype,
+ fold_build1 (REALPART_EXPR, itype,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (REALPART_EXPR, itype,
+ TREE_OPERAND (arg0, 1)));
+ return fold_convert (type, tem);
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
+ return fold_convert (type, tem);
+ }
+ if (TREE_CODE (arg0) == CALL_EXPR)
+ {
+ tree fn = get_callee_fndecl (arg0);
+ if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (fn))
+ {
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ fn = mathfn_built_in (type, BUILT_IN_COS);
+ return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+
+ default:;
+ }
+ }
return NULL_TREE;
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert (type, integer_zero_node);
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand (type, TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg0, 0));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_IMAGPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 1)));
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_IMAGPART (arg0));
+ if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build2 (TREE_CODE (arg0), itype,
+ fold_build1 (IMAGPART_EXPR, itype,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1 (IMAGPART_EXPR, itype,
+ TREE_OPERAND (arg0, 1)));
+ return fold_convert (type, tem);
+ }
+ if (TREE_CODE (arg0) == CONJ_EXPR)
+ {
+ tree itype = TREE_TYPE (TREE_TYPE (arg0));
+ tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
+ return fold_convert (type, negate_expr (tem));
+ }
+ if (TREE_CODE (arg0) == CALL_EXPR)
+ {
+ tree fn = get_callee_fndecl (arg0);
+ if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (fn))
+ {
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ fn = mathfn_built_in (type, BUILT_IN_SIN);
+ return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+
+ default:;
+ }
+ }
return NULL_TREE;
default:
else
gcc_unreachable ();
- /* MIN (MAX (a, b), b) == b. Â */
+ /* MIN (MAX (a, b), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
- /* MIN (MAX (b, a), b) == b. Â */
+ /* MIN (MAX (b, a), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
&& reorder_operands_p (TREE_OPERAND (op0, 1), op1))
return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
- /* MIN (a, MAX (a, b)) == a. Â */
+ /* MIN (a, MAX (a, b)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
- /* MIN (a, MAX (b, a)) == a. Â */
+ /* MIN (a, MAX (b, a)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
return NULL_TREE;
}
+/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
+ by changing CODE to reduce the magnitude of constants involved in
+ ARG0 of the comparison.
+ Returns a canonicalized comparison tree if a simplification was
+ possible, otherwise returns NULL_TREE. */
+
+static tree
+maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ enum tree_code code0 = TREE_CODE (arg0);
+ tree t, cst0 = NULL_TREE;
+ int sgn0;
+ bool swap = false;
+
+ /* Match A +- CST code arg1 and CST code arg1. */
+ if (!(((code0 == MINUS_EXPR
+ || code0 == PLUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ || code0 == INTEGER_CST))
+ return NULL_TREE;
+
+ /* Identify the constant in arg0 and its sign. */
+ if (code0 == INTEGER_CST)
+ cst0 = arg0;
+ else
+ cst0 = TREE_OPERAND (arg0, 1);
+ sgn0 = tree_int_cst_sgn (cst0);
+
+ /* Overflowed constants and zero will cause problems. */
+ if (integer_zerop (cst0)
+ || TREE_OVERFLOW (cst0))
+ return NULL_TREE;
+
+ /* See if we can reduce the magnitude of the constant in
+ arg0 by changing the comparison code. */
+ if (code0 == INTEGER_CST)
+ {
+ /* CST <= arg1 -> CST-1 < arg1. */
+ if (code == LE_EXPR && sgn0 == 1)
+ code = LT_EXPR;
+ /* -CST < arg1 -> -CST-1 <= arg1. */
+ else if (code == LT_EXPR && sgn0 == -1)
+ code = LE_EXPR;
+ /* CST > arg1 -> CST-1 >= arg1. */
+ else if (code == GT_EXPR && sgn0 == 1)
+ code = GE_EXPR;
+ /* -CST >= arg1 -> -CST-1 > arg1. */
+ else if (code == GE_EXPR && sgn0 == -1)
+ code = GT_EXPR;
+ else
+ return NULL_TREE;
+ /* arg1 code' CST' might be more canonical. */
+ swap = true;
+ }
+ else
+ {
+ /* A - CST < arg1 -> A - CST-1 <= arg1. */
+ if (code == LT_EXPR
+ && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
+ code = LE_EXPR;
+ /* A + CST > arg1 -> A + CST-1 >= arg1. */
+ else if (code == GT_EXPR
+ && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
+ code = GE_EXPR;
+ /* A + CST <= arg1 -> A + CST-1 < arg1. */
+ else if (code == LE_EXPR
+ && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
+ code = LT_EXPR;
+ /* A - CST >= arg1 -> A - CST-1 > arg1. */
+ else if (code == GE_EXPR
+ && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
+ code = GT_EXPR;
+ else
+ return NULL_TREE;
+ }
+
+ /* Now build the constant reduced in magnitude. */
+ t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
+ cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
+ if (code0 != INTEGER_CST)
+ t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
+
+ /* If swapping might yield to a more canonical form, do so. */
+ if (swap)
+ return fold_build2 (swap_tree_comparison (code), type, arg1, t);
+ else
+ return fold_build2 (code, type, t, arg1);
+}
+
+/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
+ overflow further. Try to decrease the magnitude of constants involved
+ by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
+ and put sole constants at the second argument position.
+ Returns the canonicalized tree if changed, otherwise NULL_TREE. */
+
+static tree
+maybe_canonicalize_comparison (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ tree t;
+
+ /* In principle pointers also have undefined overflow behavior,
+ but that causes problems elsewhere. */
+ if ((flag_wrapv || flag_trapv)
+ || (TYPE_UNSIGNED (TREE_TYPE (arg0))
+ || POINTER_TYPE_P (TREE_TYPE (arg0))))
+ return NULL_TREE;
+
+ /* Try canonicalization by simplifying arg0. */
+ t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
+ if (t)
+ return t;
+
+ /* Try canonicalization by simplifying arg1 using the swapped
+ comparison. */
+ code = swap_tree_comparison (code);
+ return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
+}
+
/* Subroutine of fold_binary. This routine performs all of the
transformations that are common to the equality/inequality
operators (EQ_EXPR and NE_EXPR) and the ordering operators
if (tree_swap_operands_p (arg0, arg1, true))
return fold_build2 (swap_tree_comparison (code), type, op1, op0);
- /* If this is a comparison of two exprs that look like an
- ARRAY_REF of the same object, then we can fold this to a
- comparison of the two offsets. */
- {
- tree base0, offset0, base1, offset1;
-
- if (extract_array_ref (arg0, &base0, &offset0)
- && extract_array_ref (arg1, &base1, &offset1)
- && operand_equal_p (base0, base1, 0))
- {
- /* Handle no offsets on both sides specially. */
- if (offset0 == NULL_TREE && offset1 == NULL_TREE)
- return fold_build2 (code, type, integer_zero_node,
- integer_zero_node);
-
- if (!offset0 || !offset1
- || TREE_TYPE (offset0) == TREE_TYPE (offset1))
- {
- if (offset0 == NULL_TREE)
- offset0 = build_int_cst (TREE_TYPE (offset1), 0);
- if (offset1 == NULL_TREE)
- offset1 = build_int_cst (TREE_TYPE (offset0), 0);
- return fold_build2 (code, type, offset0, offset1);
- }
- }
- }
-
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
return fold_build2 (code, type, variable, lhs);
}
+ /* For comparisons of pointers we can decompose it to a compile time
+ comparison of the base objects and the offsets into the object.
+ This requires at least one operand being an ADDR_EXPR to do more
+ than the operand_equal_p test below. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && (TREE_CODE (arg0) == ADDR_EXPR
+ || TREE_CODE (arg1) == ADDR_EXPR))
+ {
+ tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
+ HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
+ enum machine_mode mode;
+ int volatilep, unsignedp;
+ bool indirect_base0 = false;
+
+ /* Get base and offset for the access. Strip ADDR_EXPR for
+ get_inner_reference, but put it back by stripping INDIRECT_REF
+ off the base object if possible. */
+ base0 = arg0;
+ if (TREE_CODE (arg0) == ADDR_EXPR)
+ {
+ base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
+ &bitsize, &bitpos0, &offset0, &mode,
+ &unsignedp, &volatilep, false);
+ if (TREE_CODE (base0) == INDIRECT_REF)
+ base0 = TREE_OPERAND (base0, 0);
+ else
+ indirect_base0 = true;
+ }
+
+ base1 = arg1;
+ if (TREE_CODE (arg1) == ADDR_EXPR)
+ {
+ base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
+ &bitsize, &bitpos1, &offset1, &mode,
+ &unsignedp, &volatilep, false);
+ /* We have to make sure to have an indirect/non-indirect base1
+ just the same as we did for base0. */
+ if (TREE_CODE (base1) == INDIRECT_REF
+ && !indirect_base0)
+ base1 = TREE_OPERAND (base1, 0);
+ else if (!indirect_base0)
+ base1 = NULL_TREE;
+ }
+ else if (indirect_base0)
+ base1 = NULL_TREE;
+
+ /* If we have equivalent bases we might be able to simplify. */
+ if (base0 && base1
+ && operand_equal_p (base0, base1, 0))
+ {
+ /* We can fold this expression to a constant if the non-constant
+ offset parts are equal. */
+ if (offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
+ {
+ switch (code)
+ {
+ case EQ_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
+ case NE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
+ case LT_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
+ case LE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
+ case GE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
+ case GT_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
+ default:;
+ }
+ }
+ /* We can simplify the comparison to a comparison of the variable
+ offset parts if the constant offset parts are equal.
+ Be careful to use signed size type here because otherwise we
+ mess with array offsets in the wrong way. This is possible
+ because pointer arithmetic is restricted to retain within an
+ object and overflow on pointer differences is undefined as of
+ 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
+ else if (bitpos0 == bitpos1)
+ {
+ tree signed_size_type_node;
+ signed_size_type_node = signed_type_for (size_type_node);
+
+ /* By converting to signed size type we cover middle-end pointer
+ arithmetic which operates on unsigned pointer types of size
+ type size and ARRAY_REF offsets which are properly sign or
+ zero extended from their type in case it is narrower than
+ size type. */
+ if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset0 = fold_convert (signed_size_type_node, offset0);
+ if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset1 = fold_convert (signed_size_type_node, offset1);
+
+ return fold_build2 (code, type, offset0, offset1);
+ }
+ }
+ }
+
+ /* If this is a comparison of two exprs that look like an ARRAY_REF of the
+ same object, then we can fold this to a comparison of the two offsets in
+ signed size type. This is possible because pointer arithmetic is
+ restricted to retain within an object and overflow on pointer differences
+ is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && !flag_wrapv && !flag_trapv)
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ tree signed_size_type_node;
+ signed_size_type_node = signed_type_for (size_type_node);
+
+ /* By converting to signed size type we cover middle-end pointer
+ arithmetic which operates on unsigned pointer types of size
+ type size and ARRAY_REF offsets which are properly sign or
+ zero extended from their type in case it is narrower than
+ size type. */
+ if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset0 = fold_convert (signed_size_type_node, offset0);
+ if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset1 = fold_convert (signed_size_type_node, offset1);
+
+ return fold_build2 (code, type, offset0, offset1);
+ }
+ }
+
+ /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
+ X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
+ the resulting offset is smaller in absolute value than the
+ original one. */
+ if (!(flag_wrapv || flag_trapv)
+ && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
+ && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = TREE_OPERAND (arg1, 1);
+ tree variable1 = TREE_OPERAND (arg0, 0);
+ tree variable2 = TREE_OPERAND (arg1, 0);
+ tree cst;
+
+ /* Put the constant on the side where it doesn't overflow and is
+ of lower absolute value than before. */
+ cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
+ ? MINUS_EXPR : PLUS_EXPR,
+ const2, const1, 0);
+ if (!TREE_OVERFLOW (cst)
+ && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
+ return fold_build2 (code, type,
+ variable1,
+ fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
+ variable2, cst));
+
+ cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
+ ? MINUS_EXPR : PLUS_EXPR,
+ const1, const2, 0);
+ if (!TREE_OVERFLOW (cst)
+ && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
+ return fold_build2 (code, type,
+ fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
+ variable1, cst),
+ variable2);
+ }
+
+ tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
return tem;
}
+ /* Fold ~X op ~Y as Y op X. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == BIT_NOT_EXPR)
+ return fold_build2 (code, type,
+ TREE_OPERAND (arg1, 0),
+ TREE_OPERAND (arg0, 0));
+
+ /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+
return NULL_TREE;
}
+
+/* Subroutine of fold_binary. Optimize complex multiplications of the
+ form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
+ argument EXPR represents the expression "z" of type TYPE. */
+
+static tree
+fold_mult_zconjz (tree type, tree expr)
+{
+ tree itype = TREE_TYPE (type);
+ tree rpart, ipart, tem;
+
+ if (TREE_CODE (expr) == COMPLEX_EXPR)
+ {
+ rpart = TREE_OPERAND (expr, 0);
+ ipart = TREE_OPERAND (expr, 1);
+ }
+ else if (TREE_CODE (expr) == COMPLEX_CST)
+ {
+ rpart = TREE_REALPART (expr);
+ ipart = TREE_IMAGPART (expr);
+ }
+ else
+ {
+ expr = save_expr (expr);
+ rpart = fold_build1 (REALPART_EXPR, itype, expr);
+ ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
+ }
+
+ rpart = save_expr (rpart);
+ ipart = save_expr (ipart);
+ tem = fold_build2 (PLUS_EXPR, itype,
+ fold_build2 (MULT_EXPR, itype, rpart, rpart),
+ fold_build2 (MULT_EXPR, itype, ipart, ipart));
+ return fold_build2 (COMPLEX_EXPR, type, tem,
+ fold_convert (itype, integer_zero_node));
+}
+
+
/* Fold a binary expression of code CODE and type TYPE with operands
OP0 and OP1. Return the folded expression if folding is
successful. Otherwise, return NULL_TREE. */
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
- gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ gcc_assert ((IS_EXPR_CODE_CLASS (kind)
+ || IS_GIMPLE_STMT_CODE_CLASS (kind))
&& TREE_CODE_LENGTH (code) == 2
&& op0 != NULL_TREE
&& op1 != NULL_TREE);
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
+ /* ~X + X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && !TYPE_TRAP_SIGNED (type))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X + ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
+ && !TYPE_TRAP_SIGNED (type))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg0);
+ }
+
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
fold_convert (type, tem));
}
+ /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
+ to __complex__ ( x, y ). This is not the same for SNaNs or
+ if singed zeros are involved. */
+ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ bool arg0rz = false, arg0iz = false;
+ if ((arg0r && (arg0rz = real_zerop (arg0r)))
+ || (arg0i && (arg0iz = real_zerop (arg0i))))
+ {
+ tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ if (arg0rz && arg1i && real_zerop (arg1i))
+ {
+ tree rp = arg1r ? arg1r
+ : build1 (REALPART_EXPR, rtype, arg1);
+ tree ip = arg0i ? arg0i
+ : build1 (IMAGPART_EXPR, rtype, arg0);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ else if (arg0iz && arg1r && real_zerop (arg1r))
+ {
+ tree rp = arg0r ? arg0r
+ : build1 (REALPART_EXPR, rtype, arg0);
+ tree ip = arg1i ? arg1i
+ : build1 (IMAGPART_EXPR, rtype, arg1);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ }
+ }
+
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& (FLOAT_TYPE_P (type)
- || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
+ || INTEGRAL_TYPE_P (type))
&& negate_expr_p (arg1)
&& reorder_operands_p (arg0, arg1))
return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
/* Convert -A - 1 to ~A. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == NEGATE_EXPR
- && integer_onep (arg1))
+ && integer_onep (arg1)
+ && !TYPE_TRAP_SIGNED (type))
return fold_build1 (BIT_NOT_EXPR, type,
fold_convert (type, TREE_OPERAND (arg0, 0)));
/* Convert -1 - A to ~A. */
if (INTEGRAL_TYPE_P (type)
&& integer_all_onesp (arg0))
- return fold_build1 (BIT_NOT_EXPR, type, arg1);
+ return fold_build1 (BIT_NOT_EXPR, type, op1);
if (! FLOAT_TYPE_P (type))
{
/* Avoid this transformation if B is a positive REAL_CST. */
&& (TREE_CODE (arg1) != REAL_CST
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
- || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
+ || INTEGRAL_TYPE_P (type)))
return fold_build2 (PLUS_EXPR, type,
fold_convert (type, arg0),
fold_convert (type, negate_expr (arg1)));
/* (-A) * (-B) -> A * B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
return fold_build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (arg1));
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_convert (type, negate_expr (arg1)));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
return fold_build2 (MULT_EXPR, type,
- negate_expr (arg0),
- TREE_OPERAND (arg1, 0));
+ fold_convert (type, negate_expr (arg0)),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
if (! FLOAT_TYPE_P (type))
{
/* Transform x * -1 into -x. */
if (integer_all_onesp (arg1))
return fold_convert (type, negate_expr (arg0));
+ /* Transform x * -C into -x * C if x is easily negatable. */
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && tree_int_cst_sgn (arg1) == -1
+ && negate_expr_p (arg0)
+ && (tem = negate_expr (arg1)) != arg1
+ && !TREE_OVERFLOW (tem))
+ return fold_build2 (MULT_EXPR, type,
+ negate_expr (arg0), tem);
/* (a * (1 << b)) is (a << b) */
if (TREE_CODE (arg1) == LSHIFT_EXPR
code, NULL_TREE)))
return fold_convert (type, tem);
+ /* Optimize z * conj(z) for integer complex numbers. */
+ if (TREE_CODE (arg0) == CONJ_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return fold_mult_zconjz (type, arg1);
+ if (TREE_CODE (arg1) == CONJ_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return fold_mult_zconjz (type, arg0);
}
else
{
}
}
+ /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
+ This is not the same for NaNs or if singed zeros are
+ involved. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && TREE_CODE (arg1) == COMPLEX_CST
+ && real_zerop (TREE_REALPART (arg1)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ if (real_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ negate_expr (fold_build1 (IMAGPART_EXPR,
+ rtype, arg0)),
+ fold_build1 (REALPART_EXPR, rtype, arg0));
+ else if (real_minus_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ fold_build1 (IMAGPART_EXPR, rtype, arg0),
+ negate_expr (fold_build1 (REALPART_EXPR,
+ rtype, arg0)));
+ }
+
+ /* Optimize z * conj(z) for floating point complex numbers.
+ Guarded by flag_unsafe_math_optimizations as non-finite
+ imaginary components don't produce scalar results. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg0) == CONJ_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return fold_mult_zconjz (type, arg1);
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == CONJ_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return fold_mult_zconjz (type, arg0);
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
fold_convert (type, TREE_OPERAND (arg0, 0)),
fold_convert (type, TREE_OPERAND (arg1, 0)));
+ /* Convert ~X ^ C to X ^ ~C. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2 (code, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_build1 (BIT_NOT_EXPR, type, arg1));
+
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1))
return NULL_TREE;
case TRUNC_DIV_EXPR:
- case ROUND_DIV_EXPR:
case FLOOR_DIV_EXPR:
+ /* Simplify A / (B << N) where A and B are positive and B is
+ a power of 2, to A >> (N + log2(B)). */
+ if (TREE_CODE (arg1) == LSHIFT_EXPR
+ && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
+ {
+ tree sval = TREE_OPERAND (arg1, 0);
+ if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
+ {
+ tree sh_cnt = TREE_OPERAND (arg1, 1);
+ unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
+
+ sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
+ sh_cnt, build_int_cst (NULL_TREE, pow2));
+ return fold_build2 (RSHIFT_EXPR, type,
+ fold_convert (type, arg0), sh_cnt);
+ }
+ }
+ /* Fall thru */
+
+ case ROUND_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
if (integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
/* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
- i.e. "X % C" into "X & C2", if X and C are positive. */
+ i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
- && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
- && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
+ && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
{
- unsigned HOST_WIDE_INT high, low;
- tree mask;
- int l;
+ tree c = arg1;
+ /* Also optimize A % (C << N) where C is a power of 2,
+ to A & ((C << N) - 1). */
+ if (TREE_CODE (arg1) == LSHIFT_EXPR)
+ c = TREE_OPERAND (arg1, 0);
- l = tree_log2 (arg1);
- if (l >= HOST_BITS_PER_WIDE_INT)
- {
- high = ((unsigned HOST_WIDE_INT) 1
- << (l - HOST_BITS_PER_WIDE_INT)) - 1;
- low = -1;
- }
- else
+ if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
{
- high = 0;
- low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
+ tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
+ build_int_cst (TREE_TYPE (arg1), 1));
+ return fold_build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, mask));
}
-
- mask = build_int_cst_wide (type, low, high);
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0), mask);
}
/* X % -C is the same as X % C. */
return NULL_TREE;
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
- if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
+ if (TREE_CODE (op0) == code && host_integerp (arg1, false)
&& TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
&& host_integerp (TREE_OPERAND (arg0, 1), false)
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
- tree tem = build_int_cst (NULL_TREE,
+ tree tem = build_int_cst (TREE_TYPE (arg1),
GET_MODE_BITSIZE (TYPE_MODE (type)));
- tem = fold_convert (TREE_TYPE (arg1), tem);
tem = const_binop (MINUS_EXPR, tem, arg1, 0);
return fold_build2 (RROTATE_EXPR, type, arg0, tem);
}
&& code == EQ_EXPR)
return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
- /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
- fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
- arg1));
-
/* If this is an equality comparison of the address of a non-weak
object against zero, then we know the result. */
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
- arg1, TREE_OPERAND (arg0, 1), 0))
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1)));
+
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
/* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
can be rewritten as (X & (C2 << C1)) != 0. */
- if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
+ if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
{
tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
tree res = constant_boolean_node (code==NE_EXPR, type);
return omit_one_operand (type, res, arg0);
}
+
+ /* Fold -X op -Y as X op Y, where op is eq/ne. */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (arg1) == NEGATE_EXPR)
+ return fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0));
+
+ /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (arg1) == BIT_AND_EXPR)
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg10 = TREE_OPERAND (arg1, 0);
+ tree arg11 = TREE_OPERAND (arg1, 1);
+ tree itype = TREE_TYPE (arg0);
+
+ if (operand_equal_p (arg01, arg11, 0))
+ return fold_build2 (code, type,
+ fold_build2 (BIT_AND_EXPR, itype,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg00, arg10),
+ arg01),
+ build_int_cst (itype, 0));
+
+ if (operand_equal_p (arg01, arg10, 0))
+ return fold_build2 (code, type,
+ fold_build2 (BIT_AND_EXPR, itype,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg00, arg11),
+ arg01),
+ build_int_cst (itype, 0));
+
+ if (operand_equal_p (arg00, arg11, 0))
+ return fold_build2 (code, type,
+ fold_build2 (BIT_AND_EXPR, itype,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg01, arg10),
+ arg00),
+ build_int_cst (itype, 0));
+
+ if (operand_equal_p (arg00, arg10, 0))
+ return fold_build2 (code, type,
+ fold_build2 (BIT_AND_EXPR, itype,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg01, arg11),
+ arg00),
+ build_int_cst (itype, 0));
+ }
+
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == BIT_XOR_EXPR)
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg10 = TREE_OPERAND (arg1, 0);
+ tree arg11 = TREE_OPERAND (arg1, 1);
+ tree itype = TREE_TYPE (arg0);
+
+ /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
+ operand_equal_p guarantees no side-effects so we don't need
+ to use omit_one_operand on Z. */
+ if (operand_equal_p (arg01, arg11, 0))
+ return fold_build2 (code, type, arg00, arg10);
+ if (operand_equal_p (arg01, arg10, 0))
+ return fold_build2 (code, type, arg00, arg11);
+ if (operand_equal_p (arg00, arg11, 0))
+ return fold_build2 (code, type, arg01, arg10);
+ if (operand_equal_p (arg00, arg10, 0))
+ return fold_build2 (code, type, arg01, arg11);
+
+ /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TREE_CODE (arg11) == INTEGER_CST)
+ return fold_build2 (code, type,
+ fold_build2 (BIT_XOR_EXPR, itype, arg00,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg01, arg11)),
+ arg10);
+ }
return NULL_TREE;
case LT_EXPR:
}
/* Comparisons with the highest or lowest possible integer of
- the specified size will have known values. */
+ the specified precision will have known values. */
{
- int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
+ tree arg1_type = TREE_TYPE (arg1);
+ unsigned int width = TYPE_PRECISION (arg1_type);
if (TREE_CODE (arg1) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (arg1)
&& width <= 2 * HOST_BITS_PER_WIDE_INT
- && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
- || POINTER_TYPE_P (TREE_TYPE (arg1))))
+ && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
{
HOST_WIDE_INT signed_max_hi;
unsigned HOST_WIDE_INT signed_max_lo;
signed_max_hi = 0;
max_hi = 0;
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (TYPE_UNSIGNED (arg1_type))
{
max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
min_lo = 0;
max_lo = -1;
min_lo = 0;
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (TYPE_UNSIGNED (arg1_type))
{
max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
min_hi = 0;
switch (code)
{
case GT_EXPR:
- arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
+ arg1 = const_binop (PLUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (EQ_EXPR, type, arg0, arg1);
case LE_EXPR:
- arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
+ arg1 = const_binop (PLUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
return fold_build2 (NE_EXPR, type, arg0, arg1);
default:
break;
break;
}
- else if (!in_gimple_form
- && TREE_INT_CST_HIGH (arg1) == signed_max_hi
+ else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
&& TREE_INT_CST_LOW (arg1) == signed_max_lo
- && TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && TYPE_UNSIGNED (arg1_type)
+ /* We will flip the signedness of the comparison operator
+ associated with the mode of arg1, so the sign bit is
+ specified by this mode. Check that arg1 is the signed
+ max associated with this sign bit. */
+ && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
/* signed_type does not work on pointer types. */
- && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
+ && INTEGRAL_TYPE_P (arg1_type))
{
/* The following case also applies to X < signed_max+1
and X >= signed_max+1 because previous transformations. */
st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type, fold_convert (st0, arg0),
- build_int_cst (st1, 0));
+ type, fold_convert (st0, arg0),
+ build_int_cst (st1, 0));
}
}
}
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
{
- tem = invert_truthvalue (arg0);
- if (COMPARISON_CLASS_P (tem))
+ tem = fold_truth_not_expr (arg0);
+ if (tem && COMPARISON_CLASS_P (tem))
{
tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
if (tem)
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
anything. */
- tem = invert_truthvalue (arg0);
-
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
return fold_build3 (code, type, tem, op2, op1);
}
/* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
if (TREE_CODE (arg0) == LT_EXPR
- && integer_zerop (TREE_OPERAND (arg0, 1))
- && integer_zerop (op2)
- && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
- return fold_convert (type, fold_build2 (BIT_AND_EXPR,
- TREE_TYPE (tem), tem, arg1));
+ && integer_zerop (TREE_OPERAND (arg0, 1))
+ && integer_zerop (op2)
+ && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
+ {
+ /* sign_bit_p only checks ARG1 bits within A's precision.
+ If <sign bit of A> has wider type than A, bits outside
+ of A's precision in <sign bit of A> need to be checked.
+ If they are all 0, this optimization needs to be done
+ in unsigned A's type, if they are all 1 in signed A's type,
+ otherwise this can't be done. */
+ if (TYPE_PRECISION (TREE_TYPE (tem))
+ < TYPE_PRECISION (TREE_TYPE (arg1))
+ && TYPE_PRECISION (TREE_TYPE (tem))
+ < TYPE_PRECISION (type))
+ {
+ unsigned HOST_WIDE_INT mask_lo;
+ HOST_WIDE_INT mask_hi;
+ int inner_width, outer_width;
+ tree tem_type;
+
+ inner_width = TYPE_PRECISION (TREE_TYPE (tem));
+ outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
+ if (outer_width > TYPE_PRECISION (type))
+ outer_width = TYPE_PRECISION (type);
+
+ if (outer_width > HOST_BITS_PER_WIDE_INT)
+ {
+ mask_hi = ((unsigned HOST_WIDE_INT) -1
+ >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
+ mask_lo = -1;
+ }
+ else
+ {
+ mask_hi = 0;
+ mask_lo = ((unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - outer_width));
+ }
+ if (inner_width > HOST_BITS_PER_WIDE_INT)
+ {
+ mask_hi &= ~((unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - inner_width));
+ mask_lo = 0;
+ }
+ else
+ mask_lo &= ~((unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - inner_width));
+
+ if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
+ && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
+ {
+ tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
+ tem = fold_convert (tem_type, tem);
+ }
+ else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
+ && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
+ {
+ tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
+ tem = fold_convert (tem_type, tem);
+ }
+ else
+ tem = NULL;
+ }
+
+ if (tem)
+ return fold_convert (type,
+ fold_build2 (BIT_AND_EXPR,
+ TREE_TYPE (tem), tem,
+ fold_convert (TREE_TYPE (tem),
+ arg1)));
+ }
/* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
already handled above. */
if (integer_zerop (op2)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
- return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_convert (type, arg0),
+ arg1);
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
if (integer_onep (op2)
&& truth_value_p (TREE_CODE (arg1)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
+ return fold_build2 (TRUTH_ORIF_EXPR, type,
+ fold_convert (type, tem),
+ arg1);
}
/* Convert A ? 0 : B into !A && B if A and B are truth values. */
&& truth_value_p (TREE_CODE (op2)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
+ tem = fold_truth_not_expr (arg0);
+ if (tem)
+ return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ fold_convert (type, tem),
+ op2);
}
/* Convert A ? 1 : B into A || B if A and B are truth values. */
if (integer_onep (arg1)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
- return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
+ return fold_build2 (TRUTH_ORIF_EXPR, type,
+ fold_convert (type, arg0),
+ op2);
return NULL_TREE;
if (kind == tcc_constant)
return t;
- if (IS_EXPR_CODE_CLASS (kind))
+ if (IS_EXPR_CODE_CLASS (kind)
+ || IS_GIMPLE_STMT_CODE_CLASS (kind))
{
tree type = TREE_TYPE (t);
tree op0, op1, op2;
int saved_trapping_math = flag_trapping_math;\
int saved_rounding_math = flag_rounding_math;\
int saved_trapv = flag_trapv;\
+ int saved_folding_initializer = folding_initializer;\
flag_signaling_nans = 0;\
flag_trapping_math = 0;\
flag_rounding_math = 0;\
- flag_trapv = 0
+ flag_trapv = 0;\
+ folding_initializer = 1;
#define END_FOLD_INIT \
flag_signaling_nans = saved_signaling_nans;\
flag_trapping_math = saved_trapping_math;\
flag_rounding_math = saved_rounding_math;\
- flag_trapv = saved_trapv
+ flag_trapv = saved_trapv;\
+ folding_initializer = saved_folding_initializer;
tree
fold_build1_initializer (enum tree_code code, tree type, tree op)
/* Return true if `t' is known to be non-negative. */
-int
+bool
tree_expr_nonnegative_p (tree t)
{
+ if (t == error_mark_node)
+ return false;
+
if (TYPE_UNSIGNED (TREE_TYPE (t)))
- return 1;
+ return true;
switch (TREE_CODE (t))
{
/* We can't return 1 if flag_wrapv is set because
ABS_EXPR<INT_MIN> = INT_MIN. */
if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
- return 1;
+ return true;
break;
case INTEGER_CST:
{
/* x * x for floating point x is always non-negative. */
if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
- return 1;
+ return true;
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
&& tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
}
return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
< TYPE_PRECISION (TREE_TYPE (t));
}
- return 0;
+ return false;
case BIT_AND_EXPR:
case MAX_EXPR:
case COMPOUND_EXPR:
case MODIFY_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ case GIMPLE_MODIFY_STMT:
+ return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
case BIND_EXPR:
return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
if (TREE_CODE (inner_type) == INTEGER_TYPE)
{
if (TYPE_UNSIGNED (inner_type))
- return 1;
+ return true;
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
}
}
else
break;
}
- if (TREE_CODE (t) == MODIFY_EXPR
- && TREE_OPERAND (t, 0) == temp)
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ if ((TREE_CODE (t) == MODIFY_EXPR
+ || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+ && GENERIC_TREE_OPERAND (t, 0) == temp)
+ return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
- return 0;
+ return false;
}
case CALL_EXPR:
CASE_INT_FN (BUILT_IN_FFS):
CASE_INT_FN (BUILT_IN_PARITY):
CASE_INT_FN (BUILT_IN_POPCOUNT):
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
/* Always true. */
- return 1;
+ return true;
CASE_FLT_FN (BUILT_IN_SQRT):
/* sqrt(-0.0) is -0.0. */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
- return 1;
+ return true;
return tree_expr_nonnegative_p (TREE_VALUE (arglist));
CASE_FLT_FN (BUILT_IN_ASINH):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_MODF):
CASE_FLT_FN (BUILT_IN_NEARBYINT):
- CASE_FLT_FN (BUILT_IN_POW):
CASE_FLT_FN (BUILT_IN_RINT):
CASE_FLT_FN (BUILT_IN_ROUND):
CASE_FLT_FN (BUILT_IN_SIGNBIT):
/* True if the 2nd argument is nonnegative. */
return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
+ CASE_FLT_FN (BUILT_IN_POWI):
+ /* True if the 1st argument is nonnegative or the second
+ argument is an even integer. */
+ if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
+ {
+ tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
+ return true;
+ }
+ return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+
+ CASE_FLT_FN (BUILT_IN_POW):
+ /* True if the 1st argument is nonnegative or the second
+ argument is an even integer valued real. */
+ if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
+ {
+ REAL_VALUE_TYPE c;
+ HOST_WIDE_INT n;
+
+ c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
+ n = real_to_integer (&c);
+ if ((n & 1) == 0)
+ {
+ REAL_VALUE_TYPE cint;
+ real_from_integer (&cint, VOIDmode, n,
+ n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint))
+ return true;
+ }
+ }
+ return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+
default:
break;
}
default:
if (truth_value_p (TREE_CODE (t)))
/* Truth values evaluate to 0 or 1, which is nonnegative. */
- return 1;
+ return true;
}
/* We don't know sign of `t', so be conservative and return false. */
- return 0;
+ return false;
}
/* Return true when T is an address and is known to be nonzero.
case COMPOUND_EXPR:
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case BIND_EXPR:
- return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
+ return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
case SAVE_EXPR:
case NON_LVALUE_EXPR:
tree
fold_read_from_constant_string (tree exp)
{
- if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
+ if ((TREE_CODE (exp) == INDIRECT_REF
+ || TREE_CODE (exp) == ARRAY_REF)
+ && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
{
tree exp1 = TREE_OPERAND (exp, 0);
tree index;
}
if (string
- && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
+ && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
&& TREE_CODE (string) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type),
+ TREE_CONSTANT_OVERFLOW (arg0));
break;
}
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
}
break;
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
- t = build_int_cst_wide (type,
- ~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
+ ~TREE_INT_CST_HIGH (arg0), 0,
+ TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
return t;
}
{
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
+ /* *&CONST_DECL -> to the value of the const decl. */
+ if (TREE_CODE (op) == CONST_DECL)
+ return DECL_INITIAL (op);
/* *&p => p; make sure to handle *&"str"[cst] here. */
if (type == optype)
{
else if (TREE_CODE (optype) == COMPLEX_TYPE
&& type == TREE_TYPE (optype))
return fold_build1 (REALPART_EXPR, type, op);
+ /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
+ else if (TREE_CODE (optype) == VECTOR_TYPE
+ && type == TREE_TYPE (optype))
+ {
+ tree part_width = TYPE_SIZE (type);
+ tree index = bitsize_int (0);
+ return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
+ }
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
arg1 ? arg1 : TREE_OPERAND (exp, 1));
break;
+ case COMPOUND_EXPR:
+ arg0 = TREE_OPERAND (exp, 0);
+ arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
+ if (arg1)
+ return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
+ break;
+
+ case COND_EXPR:
+ arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
+ arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
+ if (arg0 || arg1)
+ return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
+ arg0 ? arg0 : TREE_OPERAND (exp, 1),
+ arg1 ? arg1 : TREE_OPERAND (exp, 2));
+ break;
+
+ case CALL_EXPR:
+ {
+ const enum built_in_function fcode = builtin_mathfn_code (exp);
+ switch (fcode)
+ {
+ CASE_FLT_FN (BUILT_IN_COPYSIGN):
+ /* Strip copysign function call, return the 1st argument. */
+ arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
+ arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
+ return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
+
+ default:
+ /* Strip sign ops from the argument of "odd" math functions. */
+ if (negate_mathfn_p (fcode))
+ {
+ arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
+ if (arg0)
+ return build_function_call_expr (get_callee_fndecl (exp),
+ build_tree_list (NULL_TREE,
+ arg0));
+ }
+ break;
+ }
+ }
+ break;
+
default:
break;
}
return NULL_TREE;
}
-