/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static tree build_zero_vector (tree);
-static tree fold_convert_const (enum tree_code, tree, tree);
static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
/* First clear all bits that are beyond the type's precision. */
- if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
if (!sign_extended_type)
/* No sign extension */;
- else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
/* Correct width already. */;
else if (prec > HOST_BITS_PER_WIDE_INT)
{
else if (TREE_CODE (t2) == NEGATE_EXPR)
return build2 (MINUS_EXPR, type, fold_convert (type, t1),
fold_convert (type, TREE_OPERAND (t2, 0)));
+ else if (integer_zerop (t2))
+ return fold_convert (type, t1);
+ }
+ else if (code == MINUS_EXPR)
+ {
+ if (integer_zerop (t2))
+ return fold_convert (type, t1);
}
+
return build2 (code, type, fold_convert (type, t1),
fold_convert (type, t2));
}
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
+ REAL_VALUE_TYPE result;
+ bool inexact;
tree t, type;
d1 = TREE_REAL_CST (arg1);
else if (REAL_VALUE_ISNAN (d2))
return arg2;
- REAL_ARITHMETIC (value, code, d1, d2);
+ inexact = real_arithmetic (&value, code, &d1, &d2);
+ real_convert (&result, mode, &value);
+
+ /* Don't constant fold this floating point operation if the
+ result may dependent upon the run-time rounding mode and
+ flag_rounding_math is set. */
+
+ if (flag_rounding_math
+ && (inexact || !real_identical (&result, &value)))
+ return NULL_TREE;
- t = build_real (type, real_value_truncate (mode, value));
+ t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
TREE_CONSTANT_OVERFLOW (t)
arg1, arg0)));
}
\f
-/* Construct a vector of zero elements of vector type TYPE. */
+/* A subroutine of fold_convert_const handling conversions of an
+ INTEGER_CST to another integer type. */
static tree
-build_zero_vector (tree type)
+fold_convert_const_int_from_int (tree type, tree arg1)
{
- tree elem, list;
- int i, units;
+ tree t;
- elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
- units = TYPE_VECTOR_SUBPARTS (type);
+ /* Given an integer constant, make new constant with new type,
+ appropriately sign-extended or truncated. */
+ t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+
+ t = force_fit_type (t,
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
- list = NULL_TREE;
- for (i = 0; i < units; i++)
- list = tree_cons (NULL_TREE, elem, list);
- return build_vector (type, list);
+ return t;
}
-
-/* Attempt to fold type conversion operation CODE of expression ARG1 to
- type TYPE. If no simplification can be done return NULL_TREE. */
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to an integer type. */
static tree
-fold_convert_const (enum tree_code code, tree type, tree arg1)
+fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
{
int overflow = 0;
tree t;
- if (TREE_TYPE (arg1) == type)
- return arg1;
+ /* The following code implements the floating point to integer
+ conversion rules required by the Java Language Specification,
+ that IEEE NaNs are mapped to zero and values that overflow
+ the target precision saturate, i.e. values greater than
+ INT_MAX are mapped to INT_MAX, and values less than INT_MIN
+ are mapped to INT_MIN. These semantics are allowed by the
+ C and C++ standards that simply state that the behavior of
+ FP-to-integer conversion is unspecified upon overflow. */
- if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ HOST_WIDE_INT high, low;
+ REAL_VALUE_TYPE r;
+ REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+
+ switch (code)
{
- if (TREE_CODE (arg1) == INTEGER_CST)
- {
- /* If we would build a constant wider than GCC supports,
- leave the conversion unfolded. */
- if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
- return NULL_TREE;
-
- /* Given an integer constant, make new constant with new type,
- appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
- else if (TREE_CODE (arg1) == REAL_CST)
- {
- /* The following code implements the floating point to integer
- conversion rules required by the Java Language Specification,
- that IEEE NaNs are mapped to zero and values that overflow
- the target precision saturate, i.e. values greater than
- INT_MAX are mapped to INT_MAX, and values less than INT_MIN
- are mapped to INT_MIN. These semantics are allowed by the
- C and C++ standards that simply state that the behavior of
- FP-to-integer conversion is unspecified upon overflow. */
+ case FIX_TRUNC_EXPR:
+ real_trunc (&r, VOIDmode, &x);
+ break;
- HOST_WIDE_INT high, low;
- REAL_VALUE_TYPE r;
- REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+ case FIX_CEIL_EXPR:
+ real_ceil (&r, VOIDmode, &x);
+ break;
- switch (code)
- {
- case FIX_TRUNC_EXPR:
- real_trunc (&r, VOIDmode, &x);
- break;
+ case FIX_FLOOR_EXPR:
+ real_floor (&r, VOIDmode, &x);
+ break;
- case FIX_CEIL_EXPR:
- real_ceil (&r, VOIDmode, &x);
- break;
+ case FIX_ROUND_EXPR:
+ real_round (&r, VOIDmode, &x);
+ break;
- case FIX_FLOOR_EXPR:
- real_floor (&r, VOIDmode, &x);
- break;
+ default:
+ gcc_unreachable ();
+ }
- case FIX_ROUND_EXPR:
- real_round (&r, VOIDmode, &x);
- break;
+ /* If R is NaN, return zero and show we have an overflow. */
+ if (REAL_VALUE_ISNAN (r))
+ {
+ overflow = 1;
+ high = 0;
+ low = 0;
+ }
- default:
- gcc_unreachable ();
- }
+ /* See if R is less than the lower bound or greater than the
+ upper bound. */
+
+ if (! overflow)
+ {
+ tree lt = TYPE_MIN_VALUE (type);
+ REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
+ if (REAL_VALUES_LESS (r, l))
+ {
+ overflow = 1;
+ high = TREE_INT_CST_HIGH (lt);
+ low = TREE_INT_CST_LOW (lt);
+ }
+ }
- /* If R is NaN, return zero and show we have an overflow. */
- if (REAL_VALUE_ISNAN (r))
+ if (! overflow)
+ {
+ tree ut = TYPE_MAX_VALUE (type);
+ if (ut)
+ {
+ REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
+ if (REAL_VALUES_LESS (u, r))
{
overflow = 1;
- high = 0;
- low = 0;
+ high = TREE_INT_CST_HIGH (ut);
+ low = TREE_INT_CST_LOW (ut);
}
+ }
+ }
- /* See if R is less than the lower bound or greater than the
- upper bound. */
+ if (! overflow)
+ REAL_VALUE_TO_INT (&low, &high, r);
- if (! overflow)
- {
- tree lt = TYPE_MIN_VALUE (type);
- REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
- if (REAL_VALUES_LESS (r, l))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (lt);
- low = TREE_INT_CST_LOW (lt);
- }
- }
+ t = build_int_cst_wide (type, low, high);
- if (! overflow)
- {
- tree ut = TYPE_MAX_VALUE (type);
- if (ut)
- {
- REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
- if (REAL_VALUES_LESS (u, r))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (ut);
- low = TREE_INT_CST_LOW (ut);
- }
- }
- }
+ t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ return t;
+}
- if (! overflow)
- REAL_VALUE_TO_INT (&low, &high, r);
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to another floating point type. */
- t = build_int_cst_wide (type, low, high);
+static tree
+fold_convert_const_real_from_real (tree type, tree arg1)
+{
+ REAL_VALUE_TYPE value;
+ tree t;
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
+ real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
+ t = build_real (type, value);
+
+ TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
+ TREE_CONSTANT_OVERFLOW (t)
+ = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
+ return t;
+}
+
+/* Attempt to fold type conversion operation CODE of expression ARG1 to
+ type TYPE. If no simplification can be done return NULL_TREE. */
+
+static tree
+fold_convert_const (enum tree_code code, tree type, tree arg1)
+{
+ if (TREE_TYPE (arg1) == type)
+ return arg1;
+
+ if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ {
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ return fold_convert_const_int_from_int (type, arg1);
+ else if (TREE_CODE (arg1) == REAL_CST)
+ return fold_convert_const_int_from_real (code, type, arg1);
}
else if (TREE_CODE (type) == REAL_TYPE)
{
if (TREE_CODE (arg1) == INTEGER_CST)
return build_real_from_int_cst (type, arg1);
if (TREE_CODE (arg1) == REAL_CST)
- {
- if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
- {
- /* We make a copy of ARG1 so that we don't modify an
- existing constant tree. */
- t = copy_node (arg1);
- TREE_TYPE (t) = type;
- return t;
- }
-
- t = build_real (type,
- real_value_truncate (TYPE_MODE (type),
- TREE_REAL_CST (arg1)));
-
- TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
- return t;
- }
+ return fold_convert_const_real_from_real (type, arg1);
}
return NULL_TREE;
}
+/* Construct a vector of zero elements of vector type TYPE. */
+
+static tree
+build_zero_vector (tree type)
+{
+ tree elem, list;
+ int i, units;
+
+ elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
+ units = TYPE_VECTOR_SUBPARTS (type);
+
+ list = NULL_TREE;
+ for (i = 0; i < units; i++)
+ list = tree_cons (NULL_TREE, elem, list);
+ return build_vector (type, list);
+}
+
/* Convert expression ARG to type TYPE. Used by the middle-end for
simple conversions in preference to calling the front-end's convert. */
tree
non_lvalue (tree x)
{
+ /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
+ us. */
+ if (in_gimple_form)
+ return x;
+
/* We only need to wrap lvalue tree codes. */
switch (TREE_CODE (x))
{
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
-tree
+static tree
pedantic_non_lvalue (tree x)
{
if (pedantic_lvalues)
int
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
{
- /* If one is specified and the other isn't, they aren't equal and if
- neither is specified, they are.
-
- ??? This is temporary and is meant only to handle the cases of the
- optional operands for COMPONENT_REF and ARRAY_REF. */
- if ((arg0 && !arg1) || (!arg0 && arg1))
- return 0;
- else if (!arg0 && !arg1)
- return 1;
/* If either is ERROR_MARK, they aren't equal. */
- else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
+ if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
/* If both types don't have the same signedness, then we can't consider
if (flags & OEP_ONLY_CONST)
return 0;
+/* Define macros to test an operand from arg0 and arg1 for equality and a
+ variant that allows null and views null as being different from any
+ non-null value. In the latter case, if either is null, the both
+ must be; otherwise, do the normal comparison. */
+#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
+ TREE_OPERAND (arg1, N), flags)
+
+#define OP_SAME_WITH_NULL(N) \
+ ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
+ ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
+
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
{
case tcc_unary:
break;
}
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
+
case tcc_comparison:
case tcc_binary:
- if (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
+ if (OP_SAME (0) && OP_SAME (1))
return 1;
/* For commutative ops, allow the other order. */
case MISALIGNED_INDIRECT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case ARRAY_REF:
case ARRAY_RANGE_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 3),
- TREE_OPERAND (arg1, 3), flags));
-
+ /* Operands 2 and 3 may be null. */
+ return (OP_SAME (0)
+ && OP_SAME (1)
+ && OP_SAME_WITH_NULL (2)
+ && OP_SAME_WITH_NULL (3));
case COMPONENT_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
-
+ /* Handle operand 2 the same as for ARRAY_REF. */
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+
default:
return 0;
}
{
case ADDR_EXPR:
case TRUTH_NOT_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags);
+ return OP_SAME (0) && OP_SAME (1);
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
+ if (OP_SAME (0) && OP_SAME (1))
+ return 1;
+
+ /* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
+ TREE_OPERAND (arg1, 1), flags)
&& operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
- || (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 0), flags));
+ TREE_OPERAND (arg1, 0), flags));
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
- if (! operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags))
+ if (!OP_SAME (0))
return 0;
{
default:
return 0;
}
+
+#undef OP_SAME
+#undef OP_SAME_WITH_NULL
}
\f
/* Similar to operand_equal_p, but see if ARG0 might have been made by
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep);
+ &lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
return 0;
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep);
+ &runsignedp, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep);
+ punsignedp, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
simple_operand_p (tree exp)
{
/* Strip any conversions that don't change the machine mode. */
- while ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- exp = TREE_OPERAND (exp, 0);
+ STRIP_NOPS (exp);
return (CONSTANT_CLASS_P (exp)
+ || TREE_CODE (exp) == SSA_NAME
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
- if (first_rtl_op (code) > 0)
+ if (TREE_CODE_LENGTH (code) > 0)
arg0 = TREE_OPERAND (exp, 0);
if (TREE_CODE_CLASS (code) == tcc_comparison
|| TREE_CODE_CLASS (code) == tcc_unary
\f
-#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
-#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
+#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
#endif
/* EXP is some logical combination of boolean tests. See if we can
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
- else if (RANGE_TEST_NON_SHORT_CIRCUIT
+ else if (LOGICAL_OP_NON_SHORT_CIRCUIT
&& lhs != 0 && rhs != 0
&& (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
|| TREE_CODE (exp) == TRUTH_ORIF_EXPR)
ll_arg, rl_arg),
fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
- return build2 (code, truth_type, lhs, rhs);
+ if (LOGICAL_OP_NON_SHORT_CIRCUIT)
+ return build2 (code, truth_type, lhs, rhs);
}
/* See if the comparisons can be merged. Then get all the parameters for
return t1;
break;
- case NEGATE_EXPR: case ABS_EXPR:
+ case ABS_EXPR:
+ /* If widening the type changes it from signed to unsigned, then we
+ must avoid building ABS_EXPR itself as unsigned. */
+ if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
+ {
+ tree cstype = (*lang_hooks.types.signed_type) (ctype);
+ if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
+ {
+ t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
+ return fold_convert (ctype, t1);
+ }
+ break;
+ }
+ /* FALLTHROUGH */
+ case NEGATE_EXPR:
if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
break;
return build_int_cst (type, value);
}
+
+/* Return true if expr looks like an ARRAY_REF and set base and
+ offset to the appropriate trees. If there is no offset,
+ offset is set to NULL_TREE. */
+
+static bool
+extract_array_ref (tree expr, tree *base, tree *offset)
+{
+ /* We have to be careful with stripping nops as with the
+ base type the meaning of the offset can change. */
+ tree inner_expr = expr;
+ STRIP_NOPS (inner_expr);
+ /* One canonical form is a PLUS_EXPR with the first
+ argument being an ADDR_EXPR with a possible NOP_EXPR
+ attached. */
+ if (TREE_CODE (expr) == PLUS_EXPR)
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ STRIP_NOPS (op0);
+ if (TREE_CODE (op0) == ADDR_EXPR)
+ {
+ *base = TREE_OPERAND (expr, 0);
+ *offset = TREE_OPERAND (expr, 1);
+ return true;
+ }
+ }
+ /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
+ which we transform into an ADDR_EXPR with appropriate
+ offset. For other arguments to the ADDR_EXPR we assume
+ zero offset and as such do not care about the ADDR_EXPR
+ type and strip possible nops from it. */
+ else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ {
+ tree op0 = TREE_OPERAND (inner_expr, 0);
+ if (TREE_CODE (op0) == ARRAY_REF)
+ {
+ *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *offset = TREE_OPERAND (op0, 1);
+ }
+ else
+ {
+ *base = inner_expr;
+ *offset = NULL_TREE;
+ }
+ return true;
+ }
+
+ return false;
+}
+
+
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
tree rhs = NULL_TREE;
/* This transformation is only worthwhile if we don't have to wrap
- arg in a SAVE_EXPR, and the operation can be simplified on atleast
+ arg in a SAVE_EXPR, and the operation can be simplified on at least
one of the branches once its pushed inside the COND_EXPR. */
if (!TREE_CONSTANT (arg))
return NULL_TREE;
fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
tree result_type)
{
- /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
- operand 0. */
- if (code == TRUTH_NOT_EXPR)
- {
- code = TREE_CODE (arg0);
- if (code != NE_EXPR && code != EQ_EXPR)
- return NULL_TREE;
-
- /* Extract the arguments of the EQ/NE. */
- arg1 = TREE_OPERAND (arg0, 1);
- arg0 = TREE_OPERAND (arg0, 0);
-
- /* This requires us to invert the code. */
- code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
- }
-
/* If this is testing a single bit, we can optimize the test. */
if ((code == NE_EXPR || code == EQ_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
#ifdef LOAD_EXTEND_OP
- ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
+ && !flag_syntax_only) ? 0 : 1;
#else
ops_unsigned = 1;
#endif
return 0;
}
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
+ ARG0 is extended to a wider type. */
+
+static tree
+fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ tree arg0_unw = get_unwidened (arg0, NULL_TREE);
+ tree arg1_unw;
+ tree shorter_type, outer_type;
+ tree min, max;
+ bool above, below;
+
+ if (arg0_unw == arg0)
+ return NULL_TREE;
+ shorter_type = TREE_TYPE (arg0_unw);
+
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
+ return NULL_TREE;
+
+ arg1_unw = get_unwidened (arg1, shorter_type);
+ if (!arg1_unw)
+ return NULL_TREE;
+
+ /* If possible, express the comparison in the shorter mode. */
+ if ((code == EQ_EXPR || code == NE_EXPR
+ || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
+ && (TREE_TYPE (arg1_unw) == shorter_type
+ || (TREE_CODE (arg1_unw) == INTEGER_CST
+ && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && int_fits_type_p (arg1_unw, shorter_type))))
+ return fold (build (code, type, arg0_unw,
+ fold_convert (shorter_type, arg1_unw)));
+
+ if (TREE_CODE (arg1_unw) != INTEGER_CST)
+ return NULL_TREE;
+
+ /* If we are comparing with the integer that does not fit into the range
+ of the shorter type, the result is known. */
+ outer_type = TREE_TYPE (arg1_unw);
+ min = lower_bound_in_type (outer_type, shorter_type);
+ max = upper_bound_in_type (outer_type, shorter_type);
+
+ above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ max, arg1_unw));
+ below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ arg1_unw, min));
+
+ switch (code)
+ {
+ case EQ_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ break;
+
+ case NE_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_one_node, arg0);
+ break;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_one_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+
+ case GT_EXPR:
+ case GE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_one_node, arg0);
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
+ ARG0 just the signedness is changed. */
+
+static tree
+fold_sign_changed_comparison (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ tree arg0_inner, tmp;
+ tree inner_type, outer_type;
+
+ if (TREE_CODE (arg0) != NOP_EXPR)
+ return NULL_TREE;
+
+ outer_type = TREE_TYPE (arg0);
+ arg0_inner = TREE_OPERAND (arg0, 0);
+ inner_type = TREE_TYPE (arg0_inner);
+
+ if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) != INTEGER_CST
+ && !(TREE_CODE (arg1) == NOP_EXPR
+ && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
+ return NULL_TREE;
+
+ if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ && code != NE_EXPR
+ && code != EQ_EXPR)
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ tmp = build_int_cst_wide (inner_type,
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+ arg1 = force_fit_type (tmp, 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ }
+ else
+ arg1 = fold_convert (inner_type, arg1);
+
+ return fold (build (code, type, arg0_inner, arg1));
+}
+
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
step of the array. TYPE is the type of the expression. ADDR is the address.
MULT is the multiplicative expression. If the function succeeds, the new
return build1 (ADDR_EXPR, type, ret);
}
+
+/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
+
+static tree
+fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
+{
+ tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
+
+ if (TREE_CODE (bound) == LT_EXPR)
+ a = TREE_OPERAND (bound, 0);
+ else if (TREE_CODE (bound) == GT_EXPR)
+ a = TREE_OPERAND (bound, 1);
+ else
+ return NULL_TREE;
+
+ typea = TREE_TYPE (a);
+ if (!INTEGRAL_TYPE_P (typea)
+ && !POINTER_TYPE_P (typea))
+ return NULL_TREE;
+
+ if (TREE_CODE (ineq) == LT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 1);
+ y = TREE_OPERAND (ineq, 0);
+ }
+ else if (TREE_CODE (ineq) == GT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 0);
+ y = TREE_OPERAND (ineq, 1);
+ }
+ else
+ return NULL_TREE;
+
+ if (TREE_TYPE (a1) != typea)
+ return NULL_TREE;
+
+ diff = fold (build2 (MINUS_EXPR, typea, a1, a));
+ if (!integer_onep (diff))
+ return NULL_TREE;
+
+ return fold (build2 (GE_EXPR, type, a, y));
+}
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
}
else if (IS_EXPR_CODE_CLASS (kind))
{
- int len = first_rtl_op (code);
+ int len = TREE_CODE_LENGTH (code);
int i;
for (i = 0; i < len; i++)
{
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
&& (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
- && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
- && (INTEGRAL_TYPE_P
- (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
- && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
+ && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && (INTEGRAL_TYPE_P
+ (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
+ && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
+ || flag_syntax_only))
tem = build1 (code, type,
build3 (COND_EXPR,
TREE_TYPE (TREE_OPERAND
change = (cst == 0);
#ifdef LOAD_EXTEND_OP
if (change
+ && !flag_syntax_only
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
- if ((TREE_CODE (arg0) == PLUS_EXPR
+ if (((TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (arg1) == MULT_EXPR)
- || (TREE_CODE (arg1) == PLUS_EXPR
+ || ((TREE_CODE (arg1) == PLUS_EXPR
+ || TREE_CODE (arg1) == MINUS_EXPR)
&& TREE_CODE (arg0) == MULT_EXPR))
{
tree parg0, parg1, parg, marg;
+ enum tree_code pcode;
- if (TREE_CODE (arg0) == PLUS_EXPR)
+ if (TREE_CODE (arg1) == MULT_EXPR)
parg = arg0, marg = arg1;
else
parg = arg1, marg = arg0;
+ pcode = TREE_CODE (parg);
parg0 = TREE_OPERAND (parg, 0);
parg1 = TREE_OPERAND (parg, 1);
STRIP_NOPS (parg0);
if (TREE_CODE (parg0) == MULT_EXPR
&& TREE_CODE (parg1) != MULT_EXPR)
- return fold (build2 (PLUS_EXPR, type,
+ return fold (build2 (pcode, type,
fold (build2 (PLUS_EXPR, type,
fold_convert (type, parg0),
fold_convert (type, marg))),
if (TREE_CODE (parg0) != MULT_EXPR
&& TREE_CODE (parg1) == MULT_EXPR)
return fold (build2 (PLUS_EXPR, type,
- fold (build2 (PLUS_EXPR, type,
- fold_convert (type, parg1),
- fold_convert (type, marg))),
- fold_convert (type, parg0)));
+ fold_convert (type, parg0),
+ fold (build2 (pcode, type,
+ fold_convert (type, marg),
+ fold_convert (type,
+ parg1)))));
}
if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
if (same)
return fold (build2 (MULT_EXPR, type,
fold (build2 (PLUS_EXPR, type,
- alt0, alt1)),
+ fold_convert (type, alt0),
+ fold_convert (type, alt1))),
same));
}
{
HOST_WIDE_INT diff;
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == ADDR_EXPR
- && ptr_difference_const (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0),
- &diff))
+ if ((TREE_CODE (arg0) == ADDR_EXPR
+ || TREE_CODE (arg1) == ADDR_EXPR)
+ && ptr_difference_const (arg0, arg1, &diff))
return build_int_cst_type (type, diff);
}
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
- && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
+ && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
{
/* (A * C) - (B * C) -> (A-B) * C. */
if (operand_equal_p (TREE_OPERAND (arg0, 1),
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- if (integer_onep (arg1))
+ /* 0 % X is always zero as is X % 1. */
+ if (integer_zerop (arg0) || integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
if (integer_zerop (arg1))
return t;
tem = invert_truthvalue (arg0);
/* Avoid infinite recursion. */
if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
- {
- tem = fold_single_bit_test (code, arg0, arg1, type);
- if (tem)
- return tem;
- return t;
- }
+ return t;
return fold_convert (type, tem);
case TRUTH_ANDIF_EXPR:
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. */
+
+ if (!TREE_SIDE_EFFECTS (arg0)
+ && !TREE_SIDE_EFFECTS (arg1))
+ {
+ tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
+ if (tem)
+ return fold (build2 (code, type, tem, arg1));
+
+ tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
+ if (tem)
+ return fold (build2 (code, type, arg0, tem));
+ }
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
? code == EQ_EXPR : code != EQ_EXPR,
type);
+ /* If this is a comparison of two exprs that look like an
+ ARRAY_REF of the same object, then we can fold this to a
+ comparison of the two offsets. */
+ if (COMPARISON_CLASS_P (t))
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ if (offset0 == NULL_TREE
+ && offset1 == NULL_TREE)
+ {
+ offset0 = integer_zero_node;
+ offset1 = integer_zero_node;
+ }
+ else if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (TREE_TYPE (offset1), 0);
+ else if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (TREE_TYPE (offset0), 0);
+
+ if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
+ return fold (build2 (code, type, offset0, offset1));
+ }
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
/* Comparisons with the highest or lowest possible integer of
the specified size will have known values.
- This is quite similar to fold_relational_hi_lo; however, my
- attempts to share the code have been nothing but trouble.
- I give up for now. */
+ This is quite similar to fold_relational_hi_lo, however,
+ attempts to share the code have been nothing but trouble. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
if (TREE_CODE (arg1) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (arg1)
- && width <= HOST_BITS_PER_WIDE_INT
+ && width <= 2 * HOST_BITS_PER_WIDE_INT
&& (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
|| POINTER_TYPE_P (TREE_TYPE (arg1))))
{
- unsigned HOST_WIDE_INT signed_max;
- unsigned HOST_WIDE_INT max, min;
+ HOST_WIDE_INT signed_max_hi;
+ unsigned HOST_WIDE_INT signed_max_lo;
+ unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
- signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
-
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (width <= HOST_BITS_PER_WIDE_INT)
{
- max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min = 0;
+ signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ signed_max_hi = 0;
+ max_hi = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_lo = 0;
+ min_hi = 0;
+ }
+ else
+ {
+ max_lo = signed_max_lo;
+ min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ min_hi = -1;
+ }
}
else
{
- max = signed_max;
- min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ width -= HOST_BITS_PER_WIDE_INT;
+ signed_max_lo = -1;
+ signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ max_lo = -1;
+ min_lo = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_hi = 0;
+ }
+ else
+ {
+ max_hi = signed_max_hi;
+ min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ }
}
- if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max)
+ if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max - 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo - 1)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo)
switch (code)
{
case LT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min + 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo + 1)
switch (code)
{
case GE_EXPR:
}
else if (!in_gimple_form
- && TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == signed_max
+ && TREE_INT_CST_HIGH (arg1) == signed_max_hi
+ && TREE_INT_CST_LOW (arg1) == signed_max_lo
&& TYPE_UNSIGNED (TREE_TYPE (arg1))
/* signed_type does not work on pointer types. */
&& INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
return fold (build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && TREE_CODE (arg0) == NOP_EXPR
- && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
- && (code == EQ_EXPR || code == NE_EXPR
- || TYPE_UNSIGNED (TREE_TYPE (arg0))
- == TYPE_UNSIGNED (TREE_TYPE (tem)))
- && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
- && (TREE_TYPE (t1) == TREE_TYPE (tem)
- || (TREE_CODE (t1) == INTEGER_CST
- && TREE_CODE (TREE_TYPE (tem)) == INTEGER_TYPE
- && int_fits_type_p (t1, TREE_TYPE (tem)))))
- return fold (build2 (code, type, tem,
- fold_convert (TREE_TYPE (tem), t1)));
+ && TREE_CODE (arg0) == NOP_EXPR)
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
tree arglist;
if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
into a single range test. */
- if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ || TREE_CODE (arg0) == EXACT_DIV_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !integer_zerop (TREE_OPERAND (arg0, 1))
case tcc_unary:
case tcc_binary:
case tcc_statement:
- len = first_rtl_op (code);
+ len = TREE_CODE_LENGTH (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
{
int saved_signaling_nans = flag_signaling_nans;
int saved_trapping_math = flag_trapping_math;
+ int saved_rounding_math = flag_rounding_math;
int saved_trapv = flag_trapv;
tree result;
flag_signaling_nans = 0;
flag_trapping_math = 0;
+ flag_rounding_math = 0;
flag_trapv = 0;
result = fold (expr);
flag_signaling_nans = saved_signaling_nans;
flag_trapping_math = saved_trapping_math;
+ flag_rounding_math = saved_rounding_math;
flag_trapv = saved_trapv;
return result;
switch (TREE_CODE (top))
{
+ case BIT_AND_EXPR:
+ /* Bitwise and provides a power of two multiple. If the mask is
+ a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
+ if (!integer_pow2p (bottom))
+ return 0;
+ /* FALLTHRU */
+
case MULT_EXPR:
return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
|| multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
{
tree fndecl = get_callee_fndecl (t);
tree arglist = TREE_OPERAND (t, 1);
- if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
+ if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (fndecl))
{
#define CASE_BUILTIN_F(BUILT_IN_FN) \
fold_convert (st0, op0),
fold_convert (st1, integer_zero_node));
- retval
- = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
+ retval = fold_binary_to_constant (TREE_CODE (exp),
+ TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1));
/* If we are in gimple form, then returning EXP would create
non-gimple expressions. Clearing it is safe and insures
simpler than the generic fold routine. */
tree
-nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
- tree op0, tree op1)
+fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
int wins = 1;
tree subop0;
the generic fold routine. */
tree
-nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
- tree op0)
+fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
/* Make sure we have a suitable constant argument. */
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
it with a cleanup point expression. */
if (!TREE_SIDE_EFFECTS (expr))
return expr;
+
+ /* If the expression is a return, check to see if the expression inside the
+ return has no side effects or the right hand side of the modify expression
+ inside the return. If either don't have side effects set we don't need to
+ wrap the expression in a cleanup point expression. Note we don't check the
+ left hand side of the modify because it should always be a return decl. */
+ if (TREE_CODE (expr) == RETURN_EXPR)
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!op || !TREE_SIDE_EFFECTS (op))
+ return expr;
+ op = TREE_OPERAND (op, 1);
+ if (!TREE_SIDE_EFFECTS (op))
+ return expr;
+ }
return build1 (CLEANUP_POINT_EXPR, type, expr);
}
{
tree base = t;
- while (handled_component_p (base)
- || TREE_CODE (base) == REALPART_EXPR
- || TREE_CODE (base) == IMAGPART_EXPR)
+ while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
TREE_ADDRESSABLE (base) = 1;
return value;
}
+/* Returns the pointer to the base of the object addressed by EXP and
+ extracts the information about the offset of the access, storing it
+ to PBITPOS and POFFSET. */
+
+static tree
+split_address_to_core_and_offset (tree exp,
+ HOST_WIDE_INT *pbitpos, tree *poffset)
+{
+ tree core;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ HOST_WIDE_INT bitsize;
+
+ if (TREE_CODE (exp) == ADDR_EXPR)
+ {
+ core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
+ poffset, &mode, &unsignedp, &volatilep,
+ false);
+
+ if (TREE_CODE (core) == INDIRECT_REF)
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ {
+ core = exp;
+ *pbitpos = 0;
+ *poffset = NULL_TREE;
+ }
+
+ return core;
+}
+
/* Returns true if addresses of E1 and E2 differ by a constant, false
- otherwise. If they do, &E1 - &E2 is stored in *DIFF. */
+ otherwise. If they do, E1 - E2 is stored in *DIFF. */
bool
ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
{
tree core1, core2;
- HOST_WIDE_INT bitsize1, bitsize2;
HOST_WIDE_INT bitpos1, bitpos2;
tree toffset1, toffset2, tdiff, type;
- enum machine_mode mode1, mode2;
- int unsignedp1, unsignedp2, volatilep1, volatilep2;
- core1 = get_inner_reference (e1, &bitsize1, &bitpos1, &toffset1, &mode1,
- &unsignedp1, &volatilep1);
- core2 = get_inner_reference (e2, &bitsize2, &bitpos2, &toffset2, &mode2,
- &unsignedp2, &volatilep2);
+ core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
+ core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
if (bitpos1 % BITS_PER_UNIT != 0
|| bitpos2 % BITS_PER_UNIT != 0