/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static tree build_zero_vector (tree);
-static tree fold_convert_const (enum tree_code, tree, tree);
static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
indicates whether constant overflow has already occurred. We force
T's value to be within range of T's type (by setting to 0 or 1 all
the bits outside the type's range). We set TREE_OVERFLOWED if,
- OVERFLOWED is non-zero,
+ OVERFLOWED is nonzero,
or OVERFLOWABLE is >0 and signed overflow occurs
or OVERFLOWABLE is <0 and any overflow occurs
We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is non-zero
+ CONST_OVERFLOWED is nonzero
or we set TREE_OVERFLOWED.
We return either the original T, or a copy. */
int sign_extended_type;
gcc_assert (TREE_CODE (t) == INTEGER_CST);
-
+
low = TREE_INT_CST_LOW (t);
high = TREE_INT_CST_HIGH (t);
/* First clear all bits that are beyond the type's precision. */
- if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
if (!sign_extended_type)
/* No sign extension */;
- else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
/* Correct width already. */;
else if (prec > HOST_BITS_PER_WIDE_INT)
{
|| low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
{
t = build_int_cst_wide (TREE_TYPE (t), low, high);
-
+
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
TREE_CONSTANT_OVERFLOW (t) = 1;
}
}
-
+
return t;
}
\f
else if (TREE_CODE (t2) == NEGATE_EXPR)
return build2 (MINUS_EXPR, type, fold_convert (type, t1),
fold_convert (type, TREE_OPERAND (t2, 0)));
+ else if (integer_zerop (t2))
+ return fold_convert (type, t1);
}
+ else if (code == MINUS_EXPR)
+ {
+ if (integer_zerop (t2))
+ return fold_convert (type, t1);
+ }
+
return build2 (code, type, fold_convert (type, t1),
fold_convert (type, t2));
}
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
TREE_CONSTANT_OVERFLOW (arg1)
| TREE_CONSTANT_OVERFLOW (arg2));
-
+
return t;
}
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
+ REAL_VALUE_TYPE result;
+ bool inexact;
tree t, type;
d1 = TREE_REAL_CST (arg1);
else if (REAL_VALUE_ISNAN (d2))
return arg2;
- REAL_ARITHMETIC (value, code, d1, d2);
+ inexact = real_arithmetic (&value, code, &d1, &d2);
+ real_convert (&result, mode, &value);
+
+ /* Don't constant fold this floating point operation if the
+ result may dependent upon the run-time rounding mode and
+ flag_rounding_math is set. */
+
+ if (flag_rounding_math
+ && (inexact || !real_identical (&result, &value)))
+ return NULL_TREE;
- t = build_real (type, real_value_truncate (mode, value));
+ t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
TREE_CONSTANT_OVERFLOW (t)
arg1, arg0)));
}
\f
-/* Construct a vector of zero elements of vector type TYPE. */
+/* A subroutine of fold_convert_const handling conversions of an
+ INTEGER_CST to another integer type. */
static tree
-build_zero_vector (tree type)
+fold_convert_const_int_from_int (tree type, tree arg1)
{
- tree elem, list;
- int i, units;
+ tree t;
- elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
- units = TYPE_VECTOR_SUBPARTS (type);
+ /* Given an integer constant, make new constant with new type,
+ appropriately sign-extended or truncated. */
+ t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+
+ t = force_fit_type (t,
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
- list = NULL_TREE;
- for (i = 0; i < units; i++)
- list = tree_cons (NULL_TREE, elem, list);
- return build_vector (type, list);
+ return t;
}
-
-/* Attempt to fold type conversion operation CODE of expression ARG1 to
- type TYPE. If no simplification can be done return NULL_TREE. */
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to an integer type. */
static tree
-fold_convert_const (enum tree_code code, tree type, tree arg1)
+fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
{
int overflow = 0;
tree t;
- if (TREE_TYPE (arg1) == type)
- return arg1;
+ /* The following code implements the floating point to integer
+ conversion rules required by the Java Language Specification,
+ that IEEE NaNs are mapped to zero and values that overflow
+ the target precision saturate, i.e. values greater than
+ INT_MAX are mapped to INT_MAX, and values less than INT_MIN
+ are mapped to INT_MIN. These semantics are allowed by the
+ C and C++ standards that simply state that the behavior of
+ FP-to-integer conversion is unspecified upon overflow. */
- if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ HOST_WIDE_INT high, low;
+ REAL_VALUE_TYPE r;
+ REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+
+ switch (code)
{
- if (TREE_CODE (arg1) == INTEGER_CST)
- {
- /* If we would build a constant wider than GCC supports,
- leave the conversion unfolded. */
- if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
- return NULL_TREE;
-
- /* Given an integer constant, make new constant with new type,
- appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
- else if (TREE_CODE (arg1) == REAL_CST)
- {
- /* The following code implements the floating point to integer
- conversion rules required by the Java Language Specification,
- that IEEE NaNs are mapped to zero and values that overflow
- the target precision saturate, i.e. values greater than
- INT_MAX are mapped to INT_MAX, and values less than INT_MIN
- are mapped to INT_MIN. These semantics are allowed by the
- C and C++ standards that simply state that the behavior of
- FP-to-integer conversion is unspecified upon overflow. */
+ case FIX_TRUNC_EXPR:
+ real_trunc (&r, VOIDmode, &x);
+ break;
- HOST_WIDE_INT high, low;
- REAL_VALUE_TYPE r;
- REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+ case FIX_CEIL_EXPR:
+ real_ceil (&r, VOIDmode, &x);
+ break;
- switch (code)
- {
- case FIX_TRUNC_EXPR:
- real_trunc (&r, VOIDmode, &x);
- break;
+ case FIX_FLOOR_EXPR:
+ real_floor (&r, VOIDmode, &x);
+ break;
- case FIX_CEIL_EXPR:
- real_ceil (&r, VOIDmode, &x);
- break;
+ case FIX_ROUND_EXPR:
+ real_round (&r, VOIDmode, &x);
+ break;
- case FIX_FLOOR_EXPR:
- real_floor (&r, VOIDmode, &x);
- break;
+ default:
+ gcc_unreachable ();
+ }
- case FIX_ROUND_EXPR:
- real_round (&r, VOIDmode, &x);
- break;
+ /* If R is NaN, return zero and show we have an overflow. */
+ if (REAL_VALUE_ISNAN (r))
+ {
+ overflow = 1;
+ high = 0;
+ low = 0;
+ }
- default:
- gcc_unreachable ();
- }
+ /* See if R is less than the lower bound or greater than the
+ upper bound. */
- /* If R is NaN, return zero and show we have an overflow. */
- if (REAL_VALUE_ISNAN (r))
+ if (! overflow)
+ {
+ tree lt = TYPE_MIN_VALUE (type);
+ REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
+ if (REAL_VALUES_LESS (r, l))
+ {
+ overflow = 1;
+ high = TREE_INT_CST_HIGH (lt);
+ low = TREE_INT_CST_LOW (lt);
+ }
+ }
+
+ if (! overflow)
+ {
+ tree ut = TYPE_MAX_VALUE (type);
+ if (ut)
+ {
+ REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
+ if (REAL_VALUES_LESS (u, r))
{
overflow = 1;
- high = 0;
- low = 0;
+ high = TREE_INT_CST_HIGH (ut);
+ low = TREE_INT_CST_LOW (ut);
}
+ }
+ }
- /* See if R is less than the lower bound or greater than the
- upper bound. */
+ if (! overflow)
+ REAL_VALUE_TO_INT (&low, &high, r);
- if (! overflow)
- {
- tree lt = TYPE_MIN_VALUE (type);
- REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
- if (REAL_VALUES_LESS (r, l))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (lt);
- low = TREE_INT_CST_LOW (lt);
- }
- }
+ t = build_int_cst_wide (type, low, high);
- if (! overflow)
- {
- tree ut = TYPE_MAX_VALUE (type);
- if (ut)
- {
- REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
- if (REAL_VALUES_LESS (u, r))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (ut);
- low = TREE_INT_CST_LOW (ut);
- }
- }
- }
+ t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ return t;
+}
- if (! overflow)
- REAL_VALUE_TO_INT (&low, &high, r);
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to another floating point type. */
- t = build_int_cst_wide (type, low, high);
+static tree
+fold_convert_const_real_from_real (tree type, tree arg1)
+{
+ REAL_VALUE_TYPE value;
+ tree t;
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
+ real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
+ t = build_real (type, value);
+
+ TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
+ TREE_CONSTANT_OVERFLOW (t)
+ = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
+ return t;
+}
+
+/* Attempt to fold type conversion operation CODE of expression ARG1 to
+ type TYPE. If no simplification can be done return NULL_TREE. */
+
+static tree
+fold_convert_const (enum tree_code code, tree type, tree arg1)
+{
+ if (TREE_TYPE (arg1) == type)
+ return arg1;
+
+ if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ {
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ return fold_convert_const_int_from_int (type, arg1);
+ else if (TREE_CODE (arg1) == REAL_CST)
+ return fold_convert_const_int_from_real (code, type, arg1);
}
else if (TREE_CODE (type) == REAL_TYPE)
{
if (TREE_CODE (arg1) == INTEGER_CST)
return build_real_from_int_cst (type, arg1);
if (TREE_CODE (arg1) == REAL_CST)
- {
- if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
- {
- /* We make a copy of ARG1 so that we don't modify an
- existing constant tree. */
- t = copy_node (arg1);
- TREE_TYPE (t) = type;
- return t;
- }
-
- t = build_real (type,
- real_value_truncate (TYPE_MODE (type),
- TREE_REAL_CST (arg1)));
-
- TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
- return t;
- }
+ return fold_convert_const_real_from_real (type, arg1);
}
return NULL_TREE;
}
+/* Construct a vector of zero elements of vector type TYPE. */
+
+static tree
+build_zero_vector (tree type)
+{
+ tree elem, list;
+ int i, units;
+
+ elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
+ units = TYPE_VECTOR_SUBPARTS (type);
+
+ list = NULL_TREE;
+ for (i = 0; i < units; i++)
+ list = tree_cons (NULL_TREE, elem, list);
+ return build_vector (type, list);
+}
+
/* Convert expression ARG to type TYPE. Used by the middle-end for
simple conversions in preference to calling the front-end's convert. */
gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
return fold (build1 (NOP_EXPR, type, arg));
-
+
case REAL_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
{
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
return fold (build1 (FLOAT_EXPR, type, arg));
-
+
case REAL_TYPE:
return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
type, arg));
-
+
case COMPLEX_TYPE:
tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
return fold_convert (type, tem);
-
+
default:
gcc_unreachable ();
}
-
+
case COMPLEX_TYPE:
switch (TREE_CODE (orig))
{
case COMPLEX_TYPE:
{
tree rpart, ipart;
-
+
if (TREE_CODE (arg) == COMPLEX_EXPR)
{
rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
}
-
+
arg = save_expr (arg);
rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
ipart = fold_convert (TREE_TYPE (type), ipart);
return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
}
-
+
default:
gcc_unreachable ();
}
-
+
case VECTOR_TYPE:
if (integer_zerop (arg))
return build_zero_vector (type);
tree
non_lvalue (tree x)
{
+ /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
+ us. */
+ if (in_gimple_form)
+ return x;
+
/* We only need to wrap lvalue tree codes. */
switch (TREE_CODE (x))
{
case COMPONENT_REF:
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
-tree
+static tree
pedantic_non_lvalue (tree x)
{
if (pedantic_lvalues)
static int
truth_value_p (enum tree_code code)
{
- return (TREE_CODE_CLASS (code) == '<'
+ return (TREE_CODE_CLASS (code) == tcc_comparison
|| code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
|| code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
|| code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
int
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
{
- /* If one is specified and the other isn't, they aren't equal and if
- neither is specified, they are.
-
- ??? This is temporary and is meant only to handle the cases of the
- optional operands for COMPONENT_REF and ARRAY_REF. */
- if ((arg0 && !arg1) || (!arg0 && arg1))
- return 0;
- else if (!arg0 && !arg1)
- return 1;
/* If either is ERROR_MARK, they aren't equal. */
- else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
+ if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
/* If both types don't have the same signedness, then we can't consider
if (flags & OEP_ONLY_CONST)
return 0;
+/* Define macros to test an operand from arg0 and arg1 for equality and a
+ variant that allows null and views null as being different from any
+ non-null value. In the latter case, if either is null, the both
+ must be; otherwise, do the normal comparison. */
+#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
+ TREE_OPERAND (arg1, N), flags)
+
+#define OP_SAME_WITH_NULL(N) \
+ ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
+ ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
+
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
{
- case '1':
+ case tcc_unary:
/* Two conversions are equal only if signedness and modes match. */
switch (TREE_CODE (arg0))
{
break;
}
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
+
- case '<':
- case '2':
- if (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
+ case tcc_comparison:
+ case tcc_binary:
+ if (OP_SAME (0) && OP_SAME (1))
return 1;
/* For commutative ops, allow the other order. */
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
- case 'r':
+ case tcc_reference:
/* If either of the pointer (or reference) expressions we are
dereferencing contain a side effect, these cannot be equal. */
if (TREE_SIDE_EFFECTS (arg0)
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case ARRAY_REF:
case ARRAY_RANGE_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 3),
- TREE_OPERAND (arg1, 3), flags));
-
+ /* Operands 2 and 3 may be null. */
+ return (OP_SAME (0)
+ && OP_SAME (1)
+ && OP_SAME_WITH_NULL (2)
+ && OP_SAME_WITH_NULL (3));
case COMPONENT_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
-
+ /* Handle operand 2 the same as for ARRAY_REF. */
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+
default:
return 0;
}
- case 'e':
+ case tcc_expression:
switch (TREE_CODE (arg0))
{
case ADDR_EXPR:
case TRUTH_NOT_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags);
+ return OP_SAME (0) && OP_SAME (1);
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
+ if (OP_SAME (0) && OP_SAME (1))
+ return 1;
+
+ /* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
+ TREE_OPERAND (arg1, 1), flags)
&& operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
- || (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 0), flags));
+ TREE_OPERAND (arg1, 0), flags));
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
- if (! operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags))
+ if (!OP_SAME (0))
return 0;
{
return 0;
}
- case 'd':
+ case tcc_declaration:
/* Consider __builtin_sqrt equal to sqrt. */
return (TREE_CODE (arg0) == FUNCTION_DECL
&& DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
default:
return 0;
}
+
+#undef OP_SAME
+#undef OP_SAME_WITH_NULL
}
\f
/* Similar to operand_equal_p, but see if ARG0 might have been made by
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
{
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
|| code == COMPOUND_EXPR))
- class = '2';
+ class = tcc_binary;
- else if (class == 'e' && code == SAVE_EXPR
+ else if (class == tcc_expression && code == SAVE_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
{
/* If we've already found a CVAL1 or CVAL2, this expression is
if (*cval1 || *cval2)
return 0;
- class = '1';
+ class = tcc_unary;
*save_p = 1;
}
switch (class)
{
- case '1':
+ case tcc_unary:
return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
- case '2':
+ case tcc_binary:
return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
&& twoval_comparison_p (TREE_OPERAND (arg, 1),
cval1, cval2, save_p));
- case 'c':
+ case tcc_constant:
return 1;
- case 'e':
+ case tcc_expression:
if (code == COND_EXPR)
return (twoval_comparison_p (TREE_OPERAND (arg, 0),
cval1, cval2, save_p)
cval1, cval2, save_p));
return 0;
- case '<':
+ case tcc_comparison:
/* First see if we can handle the first operand, then the second. For
the second operand, we know *CVAL1 can't be zero. It must be that
one side of the comparison is each of the values; test for the
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
- class = '2';
+ class = tcc_binary;
switch (class)
{
- case '1':
+ case tcc_unary:
return fold (build1 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1)));
- case '2':
+ case tcc_binary:
return fold (build2 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1),
eval_subst (TREE_OPERAND (arg, 1),
old0, new0, old1, new1)));
- case 'e':
+ case tcc_expression:
switch (code)
{
case SAVE_EXPR:
}
/* Fall through - ??? */
- case '<':
+ case tcc_comparison:
{
tree arg0 = TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
floating-point non-equality comparisons, in which case we just
enclose a TRUTH_NOT_EXPR around what we have. */
- if (TREE_CODE_CLASS (code) == '<')
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
{
tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
if (FLOAT_TYPE_P (op_type)
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep);
+ &lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
return 0;
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep);
+ &runsignedp, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep);
+ punsignedp, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
mask = build_int_cst (unsigned_type, -1);
mask = force_fit_type (mask, 0, false, false);
-
+
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
tmask = force_fit_type (tmask, 0, false, false);
-
+
return
tree_int_cst_equal (mask,
const_binop (RSHIFT_EXPR,
simple_operand_p (tree exp)
{
/* Strip any conversions that don't change the machine mode. */
- while ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- exp = TREE_OPERAND (exp, 0);
+ STRIP_NOPS (exp);
- return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
+ return (CONSTANT_CLASS_P (exp)
+ || TREE_CODE (exp) == SSA_NAME
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
}
- if (TREE_CODE_CLASS (code) != '<')
+ if (TREE_CODE_CLASS (code) != tcc_comparison)
return 0;
/* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
- if (first_rtl_op (code) > 0)
+ if (TREE_CODE_LENGTH (code) > 0)
arg0 = TREE_OPERAND (exp, 0);
- if (TREE_CODE_CLASS (code) == '<'
- || TREE_CODE_CLASS (code) == '1'
- || TREE_CODE_CLASS (code) == '2')
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
arg0_type = TREE_TYPE (arg0);
- if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<'
- || (TREE_CODE_CLASS (code) == 'e'
+ if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison
+ || (TREE_CODE_CLASS (code) == tcc_expression
&& TREE_CODE_LENGTH (code) > 1))
arg1 = TREE_OPERAND (exp, 1);
}
tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
return negate_expr (fold_convert (type, tem));
default:
- gcc_assert (TREE_CODE_CLASS (comp_code) == '<');
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
break;
}
return pedantic_non_lvalue (fold_convert (type, arg1));
break;
default:
- gcc_assert (TREE_CODE_CLASS (comp_code) == '<');
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
break;
}
}
\f
-#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
-#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
+#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
#endif
/* EXP is some logical combination of boolean tests. See if we can
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
- else if (RANGE_TEST_NON_SHORT_CIRCUIT
+ else if (LOGICAL_OP_NON_SHORT_CIRCUIT
&& lhs != 0 && rhs != 0
&& (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
|| TREE_CODE (exp) == TRUTH_ORIF_EXPR)
rcode = NE_EXPR;
}
- if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
+ if (TREE_CODE_CLASS (lcode) != tcc_comparison
+ || TREE_CODE_CLASS (rcode) != tcc_comparison)
return 0;
ll_arg = TREE_OPERAND (lhs, 0);
ll_arg, rl_arg),
fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
- return build2 (code, truth_type, lhs, rhs);
+ if (LOGICAL_OP_NON_SHORT_CIRCUIT)
+ return build2 (code, truth_type, lhs, rhs);
}
/* See if the comparisons can be merged. Then get all the parameters for
{
if (wanted_code == NE_EXPR)
{
- warning ("`or' of unmatched not-equal tests is always 1");
+ warning ("%<or%> of unmatched not-equal tests is always 1");
return constant_boolean_node (true, truth_type);
}
else
{
- warning ("`and' of mutually exclusive equal-tests is always 0");
+ warning ("%<and%> of mutually exclusive equal-tests is always 0");
return constant_boolean_node (false, truth_type);
}
}
if (integer_zerop (c))
return NULL_TREE;
- if (TREE_CODE_CLASS (tcode) == '1')
+ if (TREE_CODE_CLASS (tcode) == tcc_unary)
op0 = TREE_OPERAND (t, 0);
- if (TREE_CODE_CLASS (tcode) == '2')
+ if (TREE_CODE_CLASS (tcode) == tcc_binary)
op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
/* Note that we need not handle conditional operations here since fold
case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
/* If op0 is an expression ... */
- if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
+ if ((COMPARISON_CLASS_P (op0)
+ || UNARY_CLASS_P (op0)
+ || BINARY_CLASS_P (op0)
+ || EXPRESSION_CLASS_P (op0))
/* ... and is unsigned, and its type is smaller than ctype,
then we cannot pass through as widening. */
&& ((TYPE_UNSIGNED (TREE_TYPE (op0))
return t1;
break;
- case NEGATE_EXPR: case ABS_EXPR:
+ case ABS_EXPR:
+ /* If widening the type changes it from signed to unsigned, then we
+ must avoid building ABS_EXPR itself as unsigned. */
+ if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
+ {
+ tree cstype = (*lang_hooks.types.signed_type) (ctype);
+ if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
+ {
+ t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
+ return fold_convert (ctype, t1);
+ }
+ break;
+ }
+ /* FALLTHROUGH */
+ case NEGATE_EXPR:
if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
break;
return build_int_cst (type, value);
}
+
+/* Return true if expr looks like an ARRAY_REF and set base and
+ offset to the appropriate trees. If there is no offset,
+ offset is set to NULL_TREE. */
+
+static bool
+extract_array_ref (tree expr, tree *base, tree *offset)
+{
+ /* We have to be careful with stripping nops as with the
+ base type the meaning of the offset can change. */
+ tree inner_expr = expr;
+ STRIP_NOPS (inner_expr);
+ /* One canonical form is a PLUS_EXPR with the first
+ argument being an ADDR_EXPR with a possible NOP_EXPR
+ attached. */
+ if (TREE_CODE (expr) == PLUS_EXPR)
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ STRIP_NOPS (op0);
+ if (TREE_CODE (op0) == ADDR_EXPR)
+ {
+ *base = TREE_OPERAND (expr, 0);
+ *offset = TREE_OPERAND (expr, 1);
+ return true;
+ }
+ }
+ /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
+ which we transform into an ADDR_EXPR with appropriate
+ offset. For other arguments to the ADDR_EXPR we assume
+ zero offset and as such do not care about the ADDR_EXPR
+ type and strip possible nops from it. */
+ else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ {
+ tree op0 = TREE_OPERAND (inner_expr, 0);
+ if (TREE_CODE (op0) == ARRAY_REF)
+ {
+ *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *offset = TREE_OPERAND (op0, 1);
+ }
+ else
+ {
+ *base = inner_expr;
+ *offset = NULL_TREE;
+ }
+ return true;
+ }
+
+ return false;
+}
+
+
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
tree rhs = NULL_TREE;
/* This transformation is only worthwhile if we don't have to wrap
- arg in a SAVE_EXPR, and the operation can be simplified on atleast
+ arg in a SAVE_EXPR, and the operation can be simplified on at least
one of the branches once its pushed inside the COND_EXPR. */
if (!TREE_CONSTANT (arg))
return NULL_TREE;
}
else
{
+ /* A negative divisor reverses the relational operators. */
+ code = swap_tree_comparison (code);
+
tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
switch (tree_int_cst_sgn (arg1))
{
fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
tree result_type)
{
- /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
- operand 0. */
- if (code == TRUTH_NOT_EXPR)
- {
- code = TREE_CODE (arg0);
- if (code != NE_EXPR && code != EQ_EXPR)
- return NULL_TREE;
-
- /* Extract the arguments of the EQ/NE. */
- arg1 = TREE_OPERAND (arg0, 1);
- arg0 = TREE_OPERAND (arg0, 0);
-
- /* This requires us to invert the code. */
- code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
- }
-
/* If this is testing a single bit, we can optimize the test. */
if ((code == NE_EXPR || code == EQ_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
#ifdef LOAD_EXTEND_OP
- ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
+ && !flag_syntax_only) ? 0 : 1;
#else
ops_unsigned = 1;
#endif
reorder_operands_p (tree arg0, tree arg1)
{
if (! flag_evaluation_order)
- return true;
+ return true;
if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
return true;
return ! TREE_SIDE_EFFECTS (arg0)
if (DECL_P (arg0))
return 1;
- if (reorder && flag_evaluation_order
- && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
- return 0;
-
- if (DECL_P (arg1))
- return 0;
- if (DECL_P (arg0))
- return 1;
-
/* It is preferable to swap two SSA_NAME to ensure a canonical form
for commutative and comparison operators. Ensuring a canonical
form allows the optimizers to find additional redundancies without
return 0;
}
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
+ ARG0 is extended to a wider type. */
+
+static tree
+fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ tree arg0_unw = get_unwidened (arg0, NULL_TREE);
+ tree arg1_unw;
+ tree shorter_type, outer_type;
+ tree min, max;
+ bool above, below;
+
+ if (arg0_unw == arg0)
+ return NULL_TREE;
+ shorter_type = TREE_TYPE (arg0_unw);
+
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
+ return NULL_TREE;
+
+ arg1_unw = get_unwidened (arg1, shorter_type);
+ if (!arg1_unw)
+ return NULL_TREE;
+
+ /* If possible, express the comparison in the shorter mode. */
+ if ((code == EQ_EXPR || code == NE_EXPR
+ || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
+ && (TREE_TYPE (arg1_unw) == shorter_type
+ || (TREE_CODE (arg1_unw) == INTEGER_CST
+ && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && int_fits_type_p (arg1_unw, shorter_type))))
+ return fold (build (code, type, arg0_unw,
+ fold_convert (shorter_type, arg1_unw)));
+
+ if (TREE_CODE (arg1_unw) != INTEGER_CST)
+ return NULL_TREE;
+
+ /* If we are comparing with the integer that does not fit into the range
+ of the shorter type, the result is known. */
+ outer_type = TREE_TYPE (arg1_unw);
+ min = lower_bound_in_type (outer_type, shorter_type);
+ max = upper_bound_in_type (outer_type, shorter_type);
+
+ above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ max, arg1_unw));
+ below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ arg1_unw, min));
+
+ switch (code)
+ {
+ case EQ_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ break;
+
+ case NE_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_one_node, arg0);
+ break;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_one_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+
+ case GT_EXPR:
+ case GE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_one_node, arg0);
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
+ ARG0 just the signedness is changed. */
+
+static tree
+fold_sign_changed_comparison (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ tree arg0_inner, tmp;
+ tree inner_type, outer_type;
+
+ if (TREE_CODE (arg0) != NOP_EXPR)
+ return NULL_TREE;
+
+ outer_type = TREE_TYPE (arg0);
+ arg0_inner = TREE_OPERAND (arg0, 0);
+ inner_type = TREE_TYPE (arg0_inner);
+
+ if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) != INTEGER_CST
+ && !(TREE_CODE (arg1) == NOP_EXPR
+ && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
+ return NULL_TREE;
+
+ if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ && code != NE_EXPR
+ && code != EQ_EXPR)
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ tmp = build_int_cst_wide (inner_type,
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+ arg1 = force_fit_type (tmp, 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ }
+ else
+ arg1 = fold_convert (inner_type, arg1);
+
+ return fold (build (code, type, arg0_inner, arg1));
+}
+
+/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
+ step of the array. TYPE is the type of the expression. ADDR is the address.
+ MULT is the multiplicative expression. If the function succeeds, the new
+ address expression is returned. Otherwise NULL_TREE is returned. */
+
+static tree
+try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
+{
+ tree s, delta, step;
+ tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
+ tree ref = TREE_OPERAND (addr, 0), pref;
+ tree ret, pos;
+ tree itype;
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ s = arg0;
+ delta = arg1;
+ }
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ s = arg1;
+ delta = arg0;
+ }
+ else
+ return NULL_TREE;
+
+ for (;; ref = TREE_OPERAND (ref, 0))
+ {
+ if (TREE_CODE (ref) == ARRAY_REF)
+ {
+ step = array_ref_element_size (ref);
+
+ if (TREE_CODE (step) != INTEGER_CST)
+ continue;
+
+ itype = TREE_TYPE (step);
+
+ /* If the type sizes do not match, we might run into problems
+ when one of them would overflow. */
+ if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
+ continue;
+
+ if (!operand_equal_p (step, fold_convert (itype, s), 0))
+ continue;
+
+ delta = fold_convert (itype, delta);
+ break;
+ }
+
+ if (!handled_component_p (ref))
+ return NULL_TREE;
+ }
+
+ /* We found the suitable array reference. So copy everything up to it,
+ and replace the index. */
+
+ pref = TREE_OPERAND (addr, 0);
+ ret = copy_node (pref);
+ pos = ret;
+
+ while (pref != ref)
+ {
+ pref = TREE_OPERAND (pref, 0);
+ TREE_OPERAND (pos, 0) = copy_node (pref);
+ pos = TREE_OPERAND (pos, 0);
+ }
+
+ TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
+ TREE_OPERAND (pos, 1),
+ delta));
+
+ return build1 (ADDR_EXPR, type, ret);
+}
+
+
+/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
+
+static tree
+fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
+{
+ tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
+
+ if (TREE_CODE (bound) == LT_EXPR)
+ a = TREE_OPERAND (bound, 0);
+ else if (TREE_CODE (bound) == GT_EXPR)
+ a = TREE_OPERAND (bound, 1);
+ else
+ return NULL_TREE;
+
+ typea = TREE_TYPE (a);
+ if (!INTEGRAL_TYPE_P (typea)
+ && !POINTER_TYPE_P (typea))
+ return NULL_TREE;
+
+ if (TREE_CODE (ineq) == LT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 1);
+ y = TREE_OPERAND (ineq, 0);
+ }
+ else if (TREE_CODE (ineq) == GT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 0);
+ y = TREE_OPERAND (ineq, 1);
+ }
+ else
+ return NULL_TREE;
+
+ if (TREE_TYPE (a1) != typea)
+ return NULL_TREE;
+
+ diff = fold (build2 (MINUS_EXPR, typea, a1, a));
+ if (!integer_onep (diff))
+ return NULL_TREE;
+
+ return fold (build2 (GE_EXPR, type, a, y));
+}
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
tree tem;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
enum tree_code code = TREE_CODE (t);
- int kind = TREE_CODE_CLASS (code);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
/* WINS will be nonzero when the switch is done
if all operands are constant. */
int wins = 1;
/* Return right away if a constant. */
- if (kind == 'c')
+ if (kind == tcc_constant)
return t;
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
}
else if (IS_EXPR_CODE_CLASS (kind))
{
- int len = first_rtl_op (code);
+ int len = TREE_CODE_LENGTH (code);
int i;
for (i = 0; i < len; i++)
{
of the arguments so that their form can be studied. In any
cases, the appropriate type conversions should be put back in
the tree that will get out of the constant folder. */
- if (kind == '<')
+ if (kind == tcc_comparison)
STRIP_SIGN_NOPS (op);
else
STRIP_NOPS (op);
return tem;
}
- if (TREE_CODE_CLASS (code) == '1')
+ if (TREE_CODE_CLASS (code) == tcc_unary)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
&& (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
- && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
- && (INTEGRAL_TYPE_P
- (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
- && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
+ && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && (INTEGRAL_TYPE_P
+ (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
+ && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
+ || flag_syntax_only))
tem = build1 (code, type,
build3 (COND_EXPR,
TREE_TYPE (TREE_OPERAND
TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
return tem;
}
- else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
+ else if (COMPARISON_CLASS_P (arg0))
{
if (TREE_CODE (type) == BOOLEAN_TYPE)
{
integer_zero_node))));
}
}
- else if (TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
- else if (TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg1) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
- else if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<')
+ else if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
fold (build2 (code, type,
arg0, TREE_OPERAND (arg1, 1))));
- if (TREE_CODE (arg0) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
+ if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
/*cond_first_p=*/1);
return tem;
}
- if (TREE_CODE (arg1) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
+ if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
{
tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
/*cond_first_p=*/0);
change = (cst == 0);
#ifdef LOAD_EXTEND_OP
if (change
+ && !flag_syntax_only
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
/* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
T2 being pointers to types of the same size. */
if (POINTER_TYPE_P (TREE_TYPE (t))
- && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
+ && BINARY_CLASS_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
{
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
- if ((TREE_CODE (arg0) == PLUS_EXPR
+ if (((TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (arg1) == MULT_EXPR)
- || (TREE_CODE (arg1) == PLUS_EXPR
+ || ((TREE_CODE (arg1) == PLUS_EXPR
+ || TREE_CODE (arg1) == MINUS_EXPR)
&& TREE_CODE (arg0) == MULT_EXPR))
{
tree parg0, parg1, parg, marg;
+ enum tree_code pcode;
- if (TREE_CODE (arg0) == PLUS_EXPR)
+ if (TREE_CODE (arg1) == MULT_EXPR)
parg = arg0, marg = arg1;
else
parg = arg1, marg = arg0;
+ pcode = TREE_CODE (parg);
parg0 = TREE_OPERAND (parg, 0);
parg1 = TREE_OPERAND (parg, 1);
STRIP_NOPS (parg0);
if (TREE_CODE (parg0) == MULT_EXPR
&& TREE_CODE (parg1) != MULT_EXPR)
- return fold (build2 (PLUS_EXPR, type,
+ return fold (build2 (pcode, type,
fold (build2 (PLUS_EXPR, type,
fold_convert (type, parg0),
fold_convert (type, marg))),
if (TREE_CODE (parg0) != MULT_EXPR
&& TREE_CODE (parg1) == MULT_EXPR)
return fold (build2 (PLUS_EXPR, type,
- fold (build2 (PLUS_EXPR, type,
- fold_convert (type, parg1),
- fold_convert (type, marg))),
- fold_convert (type, parg0)));
+ fold_convert (type, parg0),
+ fold (build2 (pcode, type,
+ fold_convert (type, marg),
+ fold_convert (type,
+ parg1)))));
}
if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
if (same)
return fold (build2 (MULT_EXPR, type,
fold (build2 (PLUS_EXPR, type,
- alt0, alt1)),
+ fold_convert (type, alt0),
+ fold_convert (type, alt1))),
same));
}
+
+ /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (arg1) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
+ if (tem)
+ return fold (tem);
+ }
+ else if (TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (arg0) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
+ if (tem)
+ return fold (tem);
+ }
}
else
{
TREE_OPERAND (arg0, 0),
build_real (type, c1)));
}
- /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
+ /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == PLUS_EXPR
&& TREE_CODE (arg0) != MULT_EXPR)
return fold (build2 (PLUS_EXPR, type, tree0, tree11));
}
}
- /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
+ /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg0) == PLUS_EXPR
&& TREE_CODE (arg1) != MULT_EXPR)
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
+ /* Try folding difference of addresses. */
+ {
+ HOST_WIDE_INT diff;
+
+ if ((TREE_CODE (arg0) == ADDR_EXPR
+ || TREE_CODE (arg1) == ADDR_EXPR)
+ && ptr_difference_const (arg0, arg1, &diff))
+ return build_int_cst_type (type, diff);
+ }
+
+ /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (arg1) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
+ if (tem)
+ return fold (tem);
+ }
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
- && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
+ && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
{
/* (A * C) - (B * C) -> (A-B) * C. */
if (operand_equal_p (TREE_OPERAND (arg0, 1),
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- if (integer_onep (arg1))
+ /* 0 % X is always zero as is X % 1. */
+ if (integer_zerop (arg0) || integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
if (integer_zerop (arg1))
return t;
tem = invert_truthvalue (arg0);
/* Avoid infinite recursion. */
if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
- {
- tem = fold_single_bit_test (code, arg0, arg1, type);
- if (tem)
- return tem;
- return t;
- }
+ return t;
return fold_convert (type, tem);
case TRUTH_ANDIF_EXPR:
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. */
+
+ if (!TREE_SIDE_EFFECTS (arg0)
+ && !TREE_SIDE_EFFECTS (arg1))
+ {
+ tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
+ if (tem)
+ return fold (build2 (code, type, tem, arg1));
+
+ tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
+ if (tem)
+ return fold (build2 (code, type, arg0, tem));
+ }
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
? code == EQ_EXPR : code != EQ_EXPR,
type);
+ /* If this is a comparison of two exprs that look like an
+ ARRAY_REF of the same object, then we can fold this to a
+ comparison of the two offsets. */
+ if (COMPARISON_CLASS_P (t))
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ if (offset0 == NULL_TREE
+ && offset1 == NULL_TREE)
+ {
+ offset0 = integer_zero_node;
+ offset1 = integer_zero_node;
+ }
+ else if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (TREE_TYPE (offset1), 0);
+ else if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (TREE_TYPE (offset0), 0);
+
+ if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
+ return fold (build2 (code, type, offset0, offset1));
+ }
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
/* Comparisons with the highest or lowest possible integer of
the specified size will have known values.
- This is quite similar to fold_relational_hi_lo; however, my
- attempts to share the code have been nothing but trouble.
- I give up for now. */
+ This is quite similar to fold_relational_hi_lo, however,
+ attempts to share the code have been nothing but trouble. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
if (TREE_CODE (arg1) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (arg1)
- && width <= HOST_BITS_PER_WIDE_INT
+ && width <= 2 * HOST_BITS_PER_WIDE_INT
&& (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
|| POINTER_TYPE_P (TREE_TYPE (arg1))))
{
- unsigned HOST_WIDE_INT signed_max;
- unsigned HOST_WIDE_INT max, min;
-
- signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
+ HOST_WIDE_INT signed_max_hi;
+ unsigned HOST_WIDE_INT signed_max_lo;
+ unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (width <= HOST_BITS_PER_WIDE_INT)
{
- max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min = 0;
+ signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ signed_max_hi = 0;
+ max_hi = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_lo = 0;
+ min_hi = 0;
+ }
+ else
+ {
+ max_lo = signed_max_lo;
+ min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ min_hi = -1;
+ }
}
else
{
- max = signed_max;
- min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ width -= HOST_BITS_PER_WIDE_INT;
+ signed_max_lo = -1;
+ signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ max_lo = -1;
+ min_lo = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_hi = 0;
+ }
+ else
+ {
+ max_hi = signed_max_hi;
+ min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ }
}
- if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max)
+ if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max - 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo - 1)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo)
switch (code)
{
case LT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min + 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo + 1)
switch (code)
{
case GE_EXPR:
}
else if (!in_gimple_form
- && TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == signed_max
+ && TREE_INT_CST_HIGH (arg1) == signed_max_hi
+ && TREE_INT_CST_LOW (arg1) == signed_max_lo
&& TYPE_UNSIGNED (TREE_TYPE (arg1))
/* signed_type does not work on pointer types. */
&& INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
return fold (build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && TREE_CODE (arg0) == NOP_EXPR
- && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
- && (code == EQ_EXPR || code == NE_EXPR
- || TYPE_UNSIGNED (TREE_TYPE (arg0))
- == TYPE_UNSIGNED (TREE_TYPE (tem)))
- && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
- && (TREE_TYPE (t1) == TREE_TYPE (tem)
- || (TREE_CODE (t1) == INTEGER_CST
- && int_fits_type_p (t1, TREE_TYPE (tem)))))
- return fold (build2 (code, type, tem,
- fold_convert (TREE_TYPE (tem), t1)));
+ && TREE_CODE (arg0) == NOP_EXPR)
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree dandnotc
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, build1 (BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1))));
+ tree notc = fold (build1 (BIT_NOT_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_OPERAND (arg0, 1)));
+ tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ arg1, notc));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree candnotd
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1),
- build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
+ tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+ tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1), notd));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
tree arglist;
if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
into a single range test. */
- if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ || TREE_CODE (arg0) == EXACT_DIV_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !integer_zerop (TREE_OPERAND (arg0, 1))
for reasons given above each one.
Also try swapping the arguments and inverting the conditional. */
- if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
+ if (COMPARISON_CLASS_P (arg0)
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
arg1, TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
return tem;
}
- if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
+ if (COMPARISON_CLASS_P (arg0)
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (t, 2),
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
{
tem = invert_truthvalue (arg0);
- if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
+ if (COMPARISON_CLASS_P (tem))
{
tem = fold_cond_expr_with_comparison (type, tem,
TREE_OPERAND (t, 2),
return;
*slot = expr;
code = TREE_CODE (expr);
- if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
+ if (TREE_CODE_CLASS (code) == tcc_declaration
+ && DECL_ASSEMBLER_NAME_SET_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
SET_DECL_ASSEMBLER_NAME (expr, NULL);
}
- else if (TREE_CODE_CLASS (code) == 't'
+ else if (TREE_CODE_CLASS (code) == tcc_type
&& (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
|| TYPE_CACHED_VALUES_P (expr)))
{
}
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
- if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
+ if (TREE_CODE_CLASS (code) != tcc_type
+ && TREE_CODE_CLASS (code) != tcc_declaration)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
- case 'c':
+ case tcc_constant:
switch (code)
{
case STRING_CST:
break;
}
break;
- case 'x':
+ case tcc_exceptional:
switch (code)
{
case TREE_LIST:
break;
}
break;
- case 'e':
- case 'r':
- case '<':
- case '1':
- case '2':
- case 's':
- len = first_rtl_op (code);
+ case tcc_expression:
+ case tcc_reference:
+ case tcc_comparison:
+ case tcc_unary:
+ case tcc_binary:
+ case tcc_statement:
+ len = TREE_CODE_LENGTH (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
- case 'd':
+ case tcc_declaration:
fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
fold_checksum_tree (DECL_NAME (expr), ctx, ht);
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
break;
- case 't':
+ case tcc_type:
if (TREE_CODE (expr) == ENUMERAL_TYPE)
fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
{
int saved_signaling_nans = flag_signaling_nans;
int saved_trapping_math = flag_trapping_math;
+ int saved_rounding_math = flag_rounding_math;
int saved_trapv = flag_trapv;
tree result;
flag_signaling_nans = 0;
flag_trapping_math = 0;
+ flag_rounding_math = 0;
flag_trapv = 0;
result = fold (expr);
flag_signaling_nans = saved_signaling_nans;
flag_trapping_math = saved_trapping_math;
+ flag_rounding_math = saved_rounding_math;
flag_trapv = saved_trapv;
return result;
switch (TREE_CODE (top))
{
+ case BIT_AND_EXPR:
+ /* Bitwise and provides a power of two multiple. If the mask is
+ a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
+ if (!integer_pow2p (bottom))
+ return 0;
+ /* FALLTHRU */
+
case MULT_EXPR:
return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
|| multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
{
tree fndecl = get_callee_fndecl (t);
tree arglist = TREE_OPERAND (t, 1);
- if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
+ if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (fndecl))
{
#define CASE_BUILTIN_F(BUILT_IN_FN) \
/* Return true when T is an address and is known to be nonzero.
For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h */
+ Similar logic is present in nonzero_address in rtlanal.h. */
static bool
tree_expr_nonzero_p (tree t)
return !DECL_WEAK (base);
/* Constants are never weak. */
- if (TREE_CODE_CLASS (TREE_CODE (base)) == 'c')
+ if (CONSTANT_CLASS_P (base))
return true;
return false;
fold_convert (st0, op0),
fold_convert (st1, integer_zero_node));
- retval
- = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
+ retval = fold_binary_to_constant (TREE_CODE (exp),
+ TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1));
/* If we are in gimple form, then returning EXP would create
non-gimple expressions. Clearing it is safe and insures
simpler than the generic fold routine. */
tree
-nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
- tree op0, tree op1)
+fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
int wins = 1;
tree subop0;
the generic fold routine. */
tree
-nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
- tree op0)
+fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
/* Make sure we have a suitable constant argument. */
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
TREE_CONSTANT_OVERFLOW (arg0));
break;
}
-
+
case REAL_CST:
t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
break;
default:
gcc_unreachable ();
}
-
+
return t;
}
TREE_CONSTANT_OVERFLOW (arg0));
}
break;
-
+
case REAL_CST:
if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
else
t = arg0;
break;
-
+
default:
gcc_unreachable ();
}
-
+
return t;
}
tree t = NULL_TREE;
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
-
+
t = build_int_cst_wide (type,
~ TREE_INT_CST_LOW (arg0),
~ TREE_INT_CST_HIGH (arg0));
t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
TREE_CONSTANT_OVERFLOW (arg0));
-
+
return t;
}
return constant_boolean_node (result, type);
}
+/* Build an expression for the a clean point containing EXPR with type TYPE.
+ Don't build a cleanup point expression for EXPR which don't have side
+ effects. */
+
+tree
+fold_build_cleanup_point_expr (tree type, tree expr)
+{
+ /* If the expression does not have side effects then we don't have to wrap
+ it with a cleanup point expression. */
+ if (!TREE_SIDE_EFFECTS (expr))
+ return expr;
+
+ /* If the expression is a return, check to see if the expression inside the
+ return has no side effects or the right hand side of the modify expression
+ inside the return. If either don't have side effects set we don't need to
+ wrap the expression in a cleanup point expression. Note we don't check the
+ left hand side of the modify because it should always be a return decl. */
+ if (TREE_CODE (expr) == RETURN_EXPR)
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!op || !TREE_SIDE_EFFECTS (op))
+ return expr;
+ op = TREE_OPERAND (op, 1);
+ if (!TREE_SIDE_EFFECTS (op))
+ return expr;
+ }
+
+ return build1 (CLEANUP_POINT_EXPR, type, expr);
+}
+
/* Build an expression for the address of T. Folds away INDIRECT_REF to
avoid confusing the gimplify process. */
if (TREE_CODE (t) == WITH_SIZE_EXPR)
t = TREE_OPERAND (t, 0);
- if (TREE_CODE (t) == INDIRECT_REF)
+ /* Note: doesn't apply to ALIGN_INDIRECT_REF */
+ if (TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
{
t = TREE_OPERAND (t, 0);
if (TREE_TYPE (t) != ptrtype)
{
tree base = t;
- while (handled_component_p (base)
- || TREE_CODE (base) == REALPART_EXPR
- || TREE_CODE (base) == IMAGPART_EXPR)
+ while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
TREE_ADDRESSABLE (base) = 1;
for (;;)
switch (TREE_CODE_CLASS (TREE_CODE (t)))
{
- case '1':
+ case tcc_unary:
t = TREE_OPERAND (t, 0);
break;
- case '2':
- case '<':
+ case tcc_binary:
+ case tcc_comparison:
if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
t = TREE_OPERAND (t, 0);
else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
return t;
break;
- case 'e':
+ case tcc_expression:
switch (TREE_CODE (t))
{
case COMPOUND_EXPR:
if (divisor == (divisor & -divisor))
{
tree t;
-
+
t = build_int_cst (TREE_TYPE (value), divisor - 1);
value = size_binop (PLUS_EXPR, value, t);
t = build_int_cst (TREE_TYPE (value), -divisor);
if (divisor == (divisor & -divisor))
{
tree t;
-
+
t = build_int_cst (TREE_TYPE (value), -divisor);
value = size_binop (BIT_AND_EXPR, value, t);
}
return value;
}
+
+/* Returns the pointer to the base of the object addressed by EXP and
+ extracts the information about the offset of the access, storing it
+ to PBITPOS and POFFSET. */
+
+static tree
+split_address_to_core_and_offset (tree exp,
+ HOST_WIDE_INT *pbitpos, tree *poffset)
+{
+ tree core;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ HOST_WIDE_INT bitsize;
+
+ if (TREE_CODE (exp) == ADDR_EXPR)
+ {
+ core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
+ poffset, &mode, &unsignedp, &volatilep,
+ false);
+
+ if (TREE_CODE (core) == INDIRECT_REF)
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ {
+ core = exp;
+ *pbitpos = 0;
+ *poffset = NULL_TREE;
+ }
+
+ return core;
+}
+
+/* Returns true if addresses of E1 and E2 differ by a constant, false
+ otherwise. If they do, E1 - E2 is stored in *DIFF. */
+
+bool
+ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
+{
+ tree core1, core2;
+ HOST_WIDE_INT bitpos1, bitpos2;
+ tree toffset1, toffset2, tdiff, type;
+
+ core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
+ core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
+
+ if (bitpos1 % BITS_PER_UNIT != 0
+ || bitpos2 % BITS_PER_UNIT != 0
+ || !operand_equal_p (core1, core2, 0))
+ return false;
+
+ if (toffset1 && toffset2)
+ {
+ type = TREE_TYPE (toffset1);
+ if (type != TREE_TYPE (toffset2))
+ toffset2 = fold_convert (type, toffset2);
+
+ tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
+ if (!host_integerp (tdiff, 0))
+ return false;
+
+ *diff = tree_low_cst (tdiff, 0);
+ }
+ else if (toffset1 || toffset2)
+ {
+ /* If only one of the offsets is non-constant, the difference cannot
+ be a constant. */
+ return false;
+ }
+ else
+ *diff = 0;
+
+ *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
+ return true;
+}