/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static hashval_t size_htab_hash (const void *);
-static int size_htab_eq (const void *, const void *);
-static tree fold_convert_const (enum tree_code, tree, tree);
static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static tree extract_muldiv (tree, tree, enum tree_code, tree);
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
static int multiple_of_p (tree, tree, tree);
-static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
- tree, int);
+static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
+ tree, tree, int);
static bool fold_real_zero_addition_p (tree, tree, int);
static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
tree, tree, tree);
indicates whether constant overflow has already occurred. We force
T's value to be within range of T's type (by setting to 0 or 1 all
the bits outside the type's range). We set TREE_OVERFLOWED if,
- OVERFLOWED is non-zero,
+ OVERFLOWED is nonzero,
or OVERFLOWABLE is >0 and signed overflow occurs
or OVERFLOWABLE is <0 and any overflow occurs
We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is non-zero
+ CONST_OVERFLOWED is nonzero
or we set TREE_OVERFLOWED.
We return either the original T, or a copy. */
tree
-force_fit_type (tree t, int overflowable, bool overflowed, bool overflowed_const)
+force_fit_type (tree t, int overflowable,
+ bool overflowed, bool overflowed_const)
{
unsigned HOST_WIDE_INT low;
HOST_WIDE_INT high;
unsigned int prec;
int sign_extended_type;
- if (TREE_CODE (t) != INTEGER_CST)
- abort ();
-
+ gcc_assert (TREE_CODE (t) == INTEGER_CST);
+
low = TREE_INT_CST_LOW (t);
high = TREE_INT_CST_HIGH (t);
/* First clear all bits that are beyond the type's precision. */
- if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
if (!sign_extended_type)
/* No sign extension */;
- else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
/* Correct width already. */;
else if (prec > HOST_BITS_PER_WIDE_INT)
{
if (overflowed || overflowed_const
|| low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
{
+ t = build_int_cst_wide (TREE_TYPE (t), low, high);
+
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
{
+ t = copy_node (t);
TREE_OVERFLOW (t) = 1;
TREE_CONSTANT_OVERFLOW (t) = 1;
}
else if (overflowed_const)
- TREE_CONSTANT_OVERFLOW (t) = 1;
-
- TREE_INT_CST_LOW (t) = low;
- TREE_INT_CST_HIGH (t) = high;
+ {
+ t = copy_node (t);
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
}
-
+
return t;
}
\f
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Compute true remainder: rem = num - (quo * den) */
return false;
}
+/* Check whether we may negate an integer constant T without causing
+ overflow. */
+
+bool
+may_negate_without_overflow_p (tree t)
+{
+ unsigned HOST_WIDE_INT val;
+ unsigned int prec;
+ tree type;
+
+ gcc_assert (TREE_CODE (t) == INTEGER_CST);
+
+ type = TREE_TYPE (t);
+ if (TYPE_UNSIGNED (type))
+ return false;
+
+ prec = TYPE_PRECISION (type);
+ if (prec > HOST_BITS_PER_WIDE_INT)
+ {
+ if (TREE_INT_CST_LOW (t) != 0)
+ return true;
+ prec -= HOST_BITS_PER_WIDE_INT;
+ val = TREE_INT_CST_HIGH (t);
+ }
+ else
+ val = TREE_INT_CST_LOW (t);
+ if (prec < HOST_BITS_PER_WIDE_INT)
+ val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
+ return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
+}
+
/* Determine whether an expression T can be cheaply negated using
the function negate_expr. */
static bool
negate_expr_p (tree t)
{
- unsigned HOST_WIDE_INT val;
- unsigned int prec;
tree type;
if (t == 0)
return true;
/* Check that -CST will not overflow type. */
- prec = TYPE_PRECISION (type);
- if (prec > HOST_BITS_PER_WIDE_INT)
- {
- if (TREE_INT_CST_LOW (t) != 0)
- return true;
- prec -= HOST_BITS_PER_WIDE_INT;
- val = TREE_INT_CST_HIGH (t);
- }
- else
- val = TREE_INT_CST_LOW (t);
- if (prec < HOST_BITS_PER_WIDE_INT)
- val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
- return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
+ return may_negate_without_overflow_p (t);
case REAL_CST:
case NEGATE_EXPR:
else if (TREE_CODE (t2) == NEGATE_EXPR)
return build2 (MINUS_EXPR, type, fold_convert (type, t1),
fold_convert (type, TREE_OPERAND (t2, 0)));
+ else if (integer_zerop (t2))
+ return fold_convert (type, t1);
}
+ else if (code == MINUS_EXPR)
+ {
+ if (integer_zerop (t2))
+ return fold_convert (type, t1);
+ }
+
return build2 (code, type, fold_convert (type, t1),
fold_convert (type, t2));
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
- /* If this is for a sizetype, can be represented as one (signed)
- HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
- constants. */
- if (is_sizetype
- && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
- || (hi == -1 && (HOST_WIDE_INT) low < 0))
- && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
- return size_int_type (low, type);
- else
- {
- t = build_int_2 (low, hi);
- TREE_TYPE (t) = TREE_TYPE (arg1);
- }
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
if (notrunc)
{
/* Propagate overflow flags ourselves. */
if (((!uns || is_sizetype) && overflow)
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
- TREE_OVERFLOW (t) = 1;
-
- if (TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2))
- TREE_CONSTANT_OVERFLOW (t) = 1;
+ {
+ t = copy_node (t);
+ TREE_OVERFLOW (t) = 1;
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
+ else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
+ {
+ t = copy_node (t);
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
}
else
t = force_fit_type (t, 1,
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
TREE_CONSTANT_OVERFLOW (arg1)
| TREE_CONSTANT_OVERFLOW (arg2));
-
+
return t;
}
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
+ REAL_VALUE_TYPE result;
+ bool inexact;
tree t, type;
d1 = TREE_REAL_CST (arg1);
else if (REAL_VALUE_ISNAN (d2))
return arg2;
- REAL_ARITHMETIC (value, code, d1, d2);
+ inexact = real_arithmetic (&value, code, &d1, &d2);
+ real_convert (&result, mode, &value);
- t = build_real (type, real_value_truncate (mode, value));
+ /* Don't constant fold this floating point operation if the
+ result may dependent upon the run-time rounding mode and
+ flag_rounding_math is set, or if GCC's software emulation
+ is unable to accurately represent the result. */
+
+ if ((flag_rounding_math
+ || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
+ && !flag_unsafe_math_optimizations))
+ && (inexact || !real_identical (&result, &value)))
+ return NULL_TREE;
+
+ t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
TREE_CONSTANT_OVERFLOW (t)
break;
default:
- abort ();
+ gcc_unreachable ();
}
return t;
}
return 0;
}
-/* These are the hash table functions for the hash table of INTEGER_CST
- nodes of a sizetype. */
-
-/* Return the hash code code X, an INTEGER_CST. */
-
-static hashval_t
-size_htab_hash (const void *x)
-{
- tree t = (tree) x;
-
- return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
- ^ htab_hash_pointer (TREE_TYPE (t))
- ^ (TREE_OVERFLOW (t) << 20));
-}
-
-/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
- is the same as that given by *Y, which is the same. */
-
-static int
-size_htab_eq (const void *x, const void *y)
-{
- tree xt = (tree) x;
- tree yt = (tree) y;
-
- return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
- && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
- && TREE_TYPE (xt) == TREE_TYPE (yt)
- && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
-}
-\f
-/* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
- bits are given by NUMBER and of the sizetype represented by KIND. */
+/* Create a size type INT_CST node with NUMBER sign extended. KIND
+ indicates which particular sizetype to create. */
tree
size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
{
- return size_int_type (number, sizetype_tab[(int) kind]);
-}
-
-/* Likewise, but the desired type is specified explicitly. */
-
-static GTY (()) tree new_const;
-static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
- htab_t size_htab;
-
-tree
-size_int_type (HOST_WIDE_INT number, tree type)
-{
- void **slot;
- unsigned int prec;
- HOST_WIDE_INT high;
- unsigned HOST_WIDE_INT low;
-
- if (size_htab == 0)
- {
- size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
- new_const = make_node (INTEGER_CST);
- }
-
- /* Adjust NEW_CONST to be the constant we want. If it's already in the
- hash table, we return the value from the hash table. Otherwise, we
- place that in the hash table and make a new node for the next time. */
- prec = TYPE_PRECISION (type);
- TREE_TYPE (new_const) = type;
- TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 0;
- low = number;
- if (number >= 0)
- high = 0;
- else
- {
- /* Sizetype IS sign extended. */
- high = -1;
- if (prec <= HOST_BITS_PER_WIDE_INT)
- low |= (HOST_WIDE_INT)(-1) << (prec - 1);
- }
- TREE_INT_CST_LOW (new_const) = low;
- TREE_INT_CST_HIGH (new_const) = high;
-
- if (low != (unsigned HOST_WIDE_INT)number
- || high != (number < 0 ? -1 : 0))
- TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const) = 1;
-
- slot = htab_find_slot (size_htab, new_const, INSERT);
- if (*slot == 0)
- {
- tree t = new_const;
-
- *slot = new_const;
- new_const = make_node (INTEGER_CST);
- return t;
- }
- else
- return (tree) *slot;
+ return build_int_cst (sizetype_tab[(int) kind], number);
}
-
+\f
/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
is a tree code. The type of the result is taken from the operands.
Both must be the same type integer type and it must be a size type.
{
tree type = TREE_TYPE (arg0);
- if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
- || type != TREE_TYPE (arg1))
- abort ();
+ gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
+ && type == TREE_TYPE (arg1));
/* Handle the special case of two integer constants faster. */
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
tree type = TREE_TYPE (arg0);
tree ctype;
- if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
- || type != TREE_TYPE (arg1))
- abort ();
+ gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
+ && type == TREE_TYPE (arg1));
/* If the type is already signed, just do the simple thing. */
if (!TYPE_UNSIGNED (type))
return size_binop (MINUS_EXPR, arg0, arg1);
- ctype = (type == bitsizetype || type == ubitsizetype
- ? sbitsizetype : ssizetype);
+ ctype = type == bitsizetype ? sbitsizetype : ssizetype;
/* If either operand is not a constant, do the conversions to the signed
type and subtract. The hardware will do the right thing with any
arg1, arg0)));
}
\f
+/* A subroutine of fold_convert_const handling conversions of an
+ INTEGER_CST to another integer type. */
-/* Attempt to fold type conversion operation CODE of expression ARG1 to
- type TYPE. If no simplification can be done return NULL_TREE. */
+static tree
+fold_convert_const_int_from_int (tree type, tree arg1)
+{
+ tree t;
+
+ /* Given an integer constant, make new constant with new type,
+ appropriately sign-extended or truncated. */
+ t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+
+ t = force_fit_type (t,
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+
+ return t;
+}
+
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to an integer type. */
static tree
-fold_convert_const (enum tree_code code, tree type, tree arg1)
+fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
{
int overflow = 0;
tree t;
- if (TREE_TYPE (arg1) == type)
- return arg1;
+ /* The following code implements the floating point to integer
+ conversion rules required by the Java Language Specification,
+ that IEEE NaNs are mapped to zero and values that overflow
+ the target precision saturate, i.e. values greater than
+ INT_MAX are mapped to INT_MAX, and values less than INT_MIN
+ are mapped to INT_MIN. These semantics are allowed by the
+ C and C++ standards that simply state that the behavior of
+ FP-to-integer conversion is unspecified upon overflow. */
- if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ HOST_WIDE_INT high, low;
+ REAL_VALUE_TYPE r;
+ REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+
+ switch (code)
{
- if (TREE_CODE (arg1) == INTEGER_CST)
- {
- /* If we would build a constant wider than GCC supports,
- leave the conversion unfolded. */
- if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
- return NULL_TREE;
-
- /* If we are trying to make a sizetype for a small integer, use
- size_int to pick up cached types to reduce duplicate nodes. */
- if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
- && !TREE_CONSTANT_OVERFLOW (arg1)
- && compare_tree_int (arg1, 10000) < 0)
- return size_int_type (TREE_INT_CST_LOW (arg1), type);
-
- /* Given an integer constant, make new constant with new type,
- appropriately sign-extended or truncated. */
- t = build_int_2 (TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- TREE_TYPE (t) = type;
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
- else if (TREE_CODE (arg1) == REAL_CST)
- {
- /* The following code implements the floating point to integer
- conversion rules required by the Java Language Specification,
- that IEEE NaNs are mapped to zero and values that overflow
- the target precision saturate, i.e. values greater than
- INT_MAX are mapped to INT_MAX, and values less than INT_MIN
- are mapped to INT_MIN. These semantics are allowed by the
- C and C++ standards that simply state that the behavior of
- FP-to-integer conversion is unspecified upon overflow. */
+ case FIX_TRUNC_EXPR:
+ real_trunc (&r, VOIDmode, &x);
+ break;
- HOST_WIDE_INT high, low;
- REAL_VALUE_TYPE r;
- REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
+ case FIX_CEIL_EXPR:
+ real_ceil (&r, VOIDmode, &x);
+ break;
- switch (code)
- {
- case FIX_TRUNC_EXPR:
- real_trunc (&r, VOIDmode, &x);
- break;
+ case FIX_FLOOR_EXPR:
+ real_floor (&r, VOIDmode, &x);
+ break;
- case FIX_CEIL_EXPR:
- real_ceil (&r, VOIDmode, &x);
- break;
+ case FIX_ROUND_EXPR:
+ real_round (&r, VOIDmode, &x);
+ break;
- case FIX_FLOOR_EXPR:
- real_floor (&r, VOIDmode, &x);
- break;
+ default:
+ gcc_unreachable ();
+ }
- case FIX_ROUND_EXPR:
- real_round (&r, VOIDmode, &x);
- break;
+ /* If R is NaN, return zero and show we have an overflow. */
+ if (REAL_VALUE_ISNAN (r))
+ {
+ overflow = 1;
+ high = 0;
+ low = 0;
+ }
- default:
- abort ();
- }
+ /* See if R is less than the lower bound or greater than the
+ upper bound. */
+
+ if (! overflow)
+ {
+ tree lt = TYPE_MIN_VALUE (type);
+ REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
+ if (REAL_VALUES_LESS (r, l))
+ {
+ overflow = 1;
+ high = TREE_INT_CST_HIGH (lt);
+ low = TREE_INT_CST_LOW (lt);
+ }
+ }
- /* If R is NaN, return zero and show we have an overflow. */
- if (REAL_VALUE_ISNAN (r))
+ if (! overflow)
+ {
+ tree ut = TYPE_MAX_VALUE (type);
+ if (ut)
+ {
+ REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
+ if (REAL_VALUES_LESS (u, r))
{
overflow = 1;
- high = 0;
- low = 0;
+ high = TREE_INT_CST_HIGH (ut);
+ low = TREE_INT_CST_LOW (ut);
}
+ }
+ }
- /* See if R is less than the lower bound or greater than the
- upper bound. */
+ if (! overflow)
+ REAL_VALUE_TO_INT (&low, &high, r);
- if (! overflow)
- {
- tree lt = TYPE_MIN_VALUE (type);
- REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
- if (REAL_VALUES_LESS (r, l))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (lt);
- low = TREE_INT_CST_LOW (lt);
- }
- }
+ t = build_int_cst_wide (type, low, high);
- if (! overflow)
- {
- tree ut = TYPE_MAX_VALUE (type);
- if (ut)
- {
- REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
- if (REAL_VALUES_LESS (u, r))
- {
- overflow = 1;
- high = TREE_INT_CST_HIGH (ut);
- low = TREE_INT_CST_LOW (ut);
- }
- }
- }
+ t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
+ return t;
+}
- if (! overflow)
- REAL_VALUE_TO_INT (&low, &high, r);
+/* A subroutine of fold_convert_const handling conversions a REAL_CST
+ to another floating point type. */
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
+static tree
+fold_convert_const_real_from_real (tree type, tree arg1)
+{
+ REAL_VALUE_TYPE value;
+ tree t;
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- return t;
- }
+ real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
+ t = build_real (type, value);
+
+ TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
+ TREE_CONSTANT_OVERFLOW (t)
+ = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
+ return t;
+}
+
+/* Attempt to fold type conversion operation CODE of expression ARG1 to
+ type TYPE. If no simplification can be done return NULL_TREE. */
+
+static tree
+fold_convert_const (enum tree_code code, tree type, tree arg1)
+{
+ if (TREE_TYPE (arg1) == type)
+ return arg1;
+
+ if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ {
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ return fold_convert_const_int_from_int (type, arg1);
+ else if (TREE_CODE (arg1) == REAL_CST)
+ return fold_convert_const_int_from_real (code, type, arg1);
}
else if (TREE_CODE (type) == REAL_TYPE)
{
if (TREE_CODE (arg1) == INTEGER_CST)
return build_real_from_int_cst (type, arg1);
if (TREE_CODE (arg1) == REAL_CST)
- {
- if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
- {
- /* We make a copy of ARG1 so that we don't modify an
- existing constant tree. */
- t = copy_node (arg1);
- TREE_TYPE (t) = type;
- return t;
- }
-
- t = build_real (type,
- real_value_truncate (TYPE_MODE (type),
- TREE_REAL_CST (arg1)));
-
- TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
- return t;
- }
+ return fold_convert_const_real_from_real (type, arg1);
}
return NULL_TREE;
}
+/* Construct a vector of zero elements of vector type TYPE. */
+
+static tree
+build_zero_vector (tree type)
+{
+ tree elem, list;
+ int i, units;
+
+ elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
+ units = TYPE_VECTOR_SUBPARTS (type);
+
+ list = NULL_TREE;
+ for (i = 0; i < units; i++)
+ list = tree_cons (NULL_TREE, elem, list);
+ return build_vector (type, list);
+}
+
/* Convert expression ARG to type TYPE. Used by the middle-end for
simple conversions in preference to calling the front-end's convert. */
TYPE_MAIN_VARIANT (orig)))
return fold (build1 (NOP_EXPR, type, arg));
- if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
- || TREE_CODE (type) == OFFSET_TYPE)
+ switch (TREE_CODE (type))
{
+ case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
{
tem = fold_convert_const (NOP_EXPR, type, arg);
tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
return fold_convert (type, tem);
}
- if (TREE_CODE (orig) == VECTOR_TYPE
- && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- }
- else if (TREE_CODE (type) == REAL_TYPE)
- {
+ gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
+ && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
+ return fold (build1 (NOP_EXPR, type, arg));
+
+ case REAL_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
{
tem = fold_convert_const (FLOAT_EXPR, type, arg);
return tem;
}
- if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
- return fold (build1 (FLOAT_EXPR, type, arg));
- if (TREE_CODE (orig) == REAL_TYPE)
- return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
- type, arg));
- if (TREE_CODE (orig) == COMPLEX_TYPE)
+ switch (TREE_CODE (orig))
{
+ case INTEGER_TYPE: case CHAR_TYPE:
+ case BOOLEAN_TYPE: case ENUMERAL_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ return fold (build1 (FLOAT_EXPR, type, arg));
+
+ case REAL_TYPE:
+ return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
+ type, arg));
+
+ case COMPLEX_TYPE:
tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
return fold_convert (type, tem);
+
+ default:
+ gcc_unreachable ();
}
- }
- else if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- if (INTEGRAL_TYPE_P (orig)
- || POINTER_TYPE_P (orig)
- || TREE_CODE (orig) == REAL_TYPE)
- return build2 (COMPLEX_EXPR, type,
- fold_convert (TREE_TYPE (type), arg),
- fold_convert (TREE_TYPE (type), integer_zero_node));
- if (TREE_CODE (orig) == COMPLEX_TYPE)
+
+ case COMPLEX_TYPE:
+ switch (TREE_CODE (orig))
{
- tree rpart, ipart;
+ case INTEGER_TYPE: case CHAR_TYPE:
+ case BOOLEAN_TYPE: case ENUMERAL_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case REAL_TYPE:
+ return build2 (COMPLEX_EXPR, type,
+ fold_convert (TREE_TYPE (type), arg),
+ fold_convert (TREE_TYPE (type), integer_zero_node));
+ case COMPLEX_TYPE:
+ {
+ tree rpart, ipart;
- if (TREE_CODE (arg) == COMPLEX_EXPR)
- {
- rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
- ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
- }
+ if (TREE_CODE (arg) == COMPLEX_EXPR)
+ {
+ rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
+ ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
+ return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ }
- arg = save_expr (arg);
- rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
- ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
- rpart = fold_convert (TREE_TYPE (type), rpart);
- ipart = fold_convert (TREE_TYPE (type), ipart);
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ arg = save_expr (arg);
+ rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
+ ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
+ rpart = fold_convert (TREE_TYPE (type), rpart);
+ ipart = fold_convert (TREE_TYPE (type), ipart);
+ return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ }
+
+ default:
+ gcc_unreachable ();
}
+
+ case VECTOR_TYPE:
+ if (integer_zerop (arg))
+ return build_zero_vector (type);
+ gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
+ gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+ || TREE_CODE (orig) == VECTOR_TYPE);
+ return fold (build1 (NOP_EXPR, type, arg));
+
+ case VOID_TYPE:
+ return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
+
+ default:
+ gcc_unreachable ();
}
- else if (TREE_CODE (type) == VECTOR_TYPE)
- {
- if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
- && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- if (TREE_CODE (orig) == VECTOR_TYPE
- && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- }
- else if (VOID_TYPE_P (type))
- return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
- abort ();
}
\f
/* Return an expr equal to X but certainly not valid as an lvalue. */
tree
non_lvalue (tree x)
{
+ /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
+ us. */
+ if (in_gimple_form)
+ return x;
+
/* We only need to wrap lvalue tree codes. */
switch (TREE_CODE (x))
{
case COMPONENT_REF:
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
-tree
+static tree
pedantic_non_lvalue (tree x)
{
if (pedantic_lvalues)
case UNORDERED_EXPR:
return ORDERED_EXPR;
default:
- abort ();
+ gcc_unreachable ();
}
}
case LE_EXPR:
return GE_EXPR;
default:
- abort ();
+ gcc_unreachable ();
}
}
case UNGE_EXPR:
return COMPCODE_UNGE;
default:
- abort ();
+ gcc_unreachable ();
}
}
case COMPCODE_UNGE:
return UNGE_EXPR;
default:
- abort ();
+ gcc_unreachable ();
}
}
static int
truth_value_p (enum tree_code code)
{
- return (TREE_CODE_CLASS (code) == '<'
+ return (TREE_CODE_CLASS (code) == tcc_comparison
|| code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
|| code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
|| code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
int
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
{
- /* If one is specified and the other isn't, they aren't equal and if
- neither is specified, they are.
-
- ??? This is temporary and is meant only to handle the cases of the
- optional operands for COMPONENT_REF and ARRAY_REF. */
- if ((arg0 && !arg1) || (!arg0 && arg1))
- return 0;
- else if (!arg0 && !arg1)
- return 1;
/* If either is ERROR_MARK, they aren't equal. */
- else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
+ if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
/* If both types don't have the same signedness, then we can't consider
if (flags & OEP_ONLY_CONST)
return 0;
+/* Define macros to test an operand from arg0 and arg1 for equality and a
+ variant that allows null and views null as being different from any
+ non-null value. In the latter case, if either is null, the both
+ must be; otherwise, do the normal comparison. */
+#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
+ TREE_OPERAND (arg1, N), flags)
+
+#define OP_SAME_WITH_NULL(N) \
+ ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
+ ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
+
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
{
- case '1':
+ case tcc_unary:
/* Two conversions are equal only if signedness and modes match. */
- if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
- && (TYPE_UNSIGNED (TREE_TYPE (arg0))
- != TYPE_UNSIGNED (TREE_TYPE (arg1))))
- return 0;
+ switch (TREE_CODE (arg0))
+ {
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (arg0))
+ != TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ return 0;
+ break;
+ default:
+ break;
+ }
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
- case '<':
- case '2':
- if (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
+
+ case tcc_comparison:
+ case tcc_binary:
+ if (OP_SAME (0) && OP_SAME (1))
return 1;
/* For commutative ops, allow the other order. */
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
- case 'r':
+ case tcc_reference:
/* If either of the pointer (or reference) expressions we are
dereferencing contain a side effect, these cannot be equal. */
if (TREE_SIDE_EFFECTS (arg0)
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case ARRAY_REF:
case ARRAY_RANGE_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 3),
- TREE_OPERAND (arg1, 3), flags));
-
+ /* Operands 2 and 3 may be null. */
+ return (OP_SAME (0)
+ && OP_SAME (1)
+ && OP_SAME_WITH_NULL (2)
+ && OP_SAME_WITH_NULL (3));
case COMPONENT_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
-
+ /* Handle operand 2 the same as for ARRAY_REF. */
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
- return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 2),
- TREE_OPERAND (arg1, 2), flags));
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+
default:
return 0;
}
- case 'e':
+ case tcc_expression:
switch (TREE_CODE (arg0))
{
case ADDR_EXPR:
case TRUTH_NOT_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags);
+ return OP_SAME (0);
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- return operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags);
+ return OP_SAME (0) && OP_SAME (1);
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
+ if (OP_SAME (0) && OP_SAME (1))
+ return 1;
+
+ /* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags)
+ TREE_OPERAND (arg1, 1), flags)
&& operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags))
- || (operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 1), flags)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 0), flags));
+ TREE_OPERAND (arg1, 0), flags));
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
clearly can not be equal. */
- if (! operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), flags))
+ if (!OP_SAME (0))
return 0;
{
return 0;
}
- case 'd':
+ case tcc_declaration:
/* Consider __builtin_sqrt equal to sqrt. */
return (TREE_CODE (arg0) == FUNCTION_DECL
&& DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
default:
return 0;
}
+
+#undef OP_SAME
+#undef OP_SAME_WITH_NULL
}
\f
/* Similar to operand_equal_p, but see if ARG0 might have been made by
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
{
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
|| code == COMPOUND_EXPR))
- class = '2';
+ class = tcc_binary;
- else if (class == 'e' && code == SAVE_EXPR
+ else if (class == tcc_expression && code == SAVE_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
{
/* If we've already found a CVAL1 or CVAL2, this expression is
if (*cval1 || *cval2)
return 0;
- class = '1';
+ class = tcc_unary;
*save_p = 1;
}
switch (class)
{
- case '1':
+ case tcc_unary:
return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
- case '2':
+ case tcc_binary:
return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
&& twoval_comparison_p (TREE_OPERAND (arg, 1),
cval1, cval2, save_p));
- case 'c':
+ case tcc_constant:
return 1;
- case 'e':
+ case tcc_expression:
if (code == COND_EXPR)
return (twoval_comparison_p (TREE_OPERAND (arg, 0),
cval1, cval2, save_p)
cval1, cval2, save_p));
return 0;
- case '<':
+ case tcc_comparison:
/* First see if we can handle the first operand, then the second. For
the second operand, we know *CVAL1 can't be zero. It must be that
one side of the comparison is each of the values; test for the
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
- class = '2';
+ class = tcc_binary;
switch (class)
{
- case '1':
+ case tcc_unary:
return fold (build1 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1)));
- case '2':
+ case tcc_binary:
return fold (build2 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1),
eval_subst (TREE_OPERAND (arg, 1),
old0, new0, old1, new1)));
- case 'e':
+ case tcc_expression:
switch (code)
{
case SAVE_EXPR:
}
/* Fall through - ??? */
- case '<':
+ case tcc_comparison:
{
tree arg0 = TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
floating-point non-equality comparisons, in which case we just
enclose a TRUTH_NOT_EXPR around what we have. */
- if (TREE_CODE_CLASS (code) == '<')
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
{
tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
if (FLOAT_TYPE_P (op_type)
switch (code)
{
case INTEGER_CST:
- return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
+ return constant_boolean_node (integer_zerop (arg), type);
case TRUTH_AND_EXPR:
return build2 (TRUTH_OR_EXPR, type,
default:
break;
}
- if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
- abort ();
+ gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
return build1 (TRUTH_NOT_EXPR, type, arg);
}
make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
int unsignedp)
{
- tree result = build3 (BIT_FIELD_REF, type, inner,
- size_int (bitsize), bitsize_int (bitpos));
+ tree result;
+
+ if (bitpos == 0)
+ {
+ tree size = TYPE_SIZE (TREE_TYPE (inner));
+ if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
+ || POINTER_TYPE_P (TREE_TYPE (inner)))
+ && host_integerp (size, 0)
+ && tree_low_cst (size, 0) == bitsize)
+ return fold_convert (type, inner);
+ }
+
+ result = build3 (BIT_FIELD_REF, type, inner,
+ size_int (bitsize), bitsize_int (bitpos));
BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep);
+ &lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
return 0;
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep);
+ &runsignedp, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_2 (~0, ~0);
- TREE_TYPE (mask) = unsigned_type;
+ mask = build_int_cst (unsigned_type, -1);
mask = force_fit_type (mask, 0, false, false);
mask = fold_convert (unsigned_type, mask);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep);
+ punsignedp, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_2 (~0, ~0);
- TREE_TYPE (mask) = unsigned_type;
+ mask = build_int_cst (unsigned_type, -1);
mask = force_fit_type (mask, 0, false, false);
-
+
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_2 (~0, ~0);
- TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
+ tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
tmask = force_fit_type (tmask, 0, false, false);
-
+
return
tree_int_cst_equal (mask,
const_binop (RSHIFT_EXPR,
simple_operand_p (tree exp)
{
/* Strip any conversions that don't change the machine mode. */
- while ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- exp = TREE_OPERAND (exp, 0);
+ STRIP_NOPS (exp);
- return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
+ return (CONSTANT_CLASS_P (exp)
+ || TREE_CODE (exp) == SSA_NAME
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
}
- if (TREE_CODE_CLASS (code) != '<')
+ if (TREE_CODE_CLASS (code) != tcc_comparison)
return 0;
/* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
result = sgn0 >= sgn1;
break;
default:
- abort ();
+ gcc_unreachable ();
}
return constant_boolean_node (result, type);
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
- if (first_rtl_op (code) > 0)
+ if (TREE_CODE_LENGTH (code) > 0)
arg0 = TREE_OPERAND (exp, 0);
- if (TREE_CODE_CLASS (code) == '<'
- || TREE_CODE_CLASS (code) == '1'
- || TREE_CODE_CLASS (code) == '2')
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
arg0_type = TREE_TYPE (arg0);
- if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<'
- || (TREE_CODE_CLASS (code) == 'e'
+ if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison
+ || (TREE_CODE_CLASS (code) == tcc_expression
&& TREE_CODE_LENGTH (code) > 1))
arg1 = TREE_OPERAND (exp, 1);
}
in_p = ! in_p, low = 0, high = arg1;
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* If this is an unsigned comparison, we also know that EXP is
switch (comp_code)
{
case EQ_EXPR:
+ case UNEQ_EXPR:
tem = fold_convert (arg1_type, arg1);
return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
case NE_EXPR:
+ case LTGT_EXPR:
return pedantic_non_lvalue (fold_convert (type, arg1));
+ case UNGE_EXPR:
+ case UNGT_EXPR:
+ if (flag_trapping_math)
+ break;
+ /* Fall through. */
case GE_EXPR:
case GT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
(TREE_TYPE (arg1)), arg1);
tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
return pedantic_non_lvalue (fold_convert (type, tem));
+ case UNLE_EXPR:
+ case UNLT_EXPR:
+ if (flag_trapping_math)
+ break;
case LE_EXPR:
case LT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
return negate_expr (fold_convert (type, tem));
default:
- abort ();
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
+ break;
}
/* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
return pedantic_non_lvalue (fold_convert (type, arg1));
case LE_EXPR:
case LT_EXPR:
+ case UNLE_EXPR:
+ case UNLT_EXPR:
/* In C++ a ?: expression can be an lvalue, so put the
operand which will be used if they are equal first
so that we can convert this back to the
{
comp_op0 = fold_convert (comp_type, comp_op0);
comp_op1 = fold_convert (comp_type, comp_op1);
- tem = fold (build2 (MIN_EXPR, comp_type,
- (comp_code == LE_EXPR
- ? comp_op0 : comp_op1),
- (comp_code == LE_EXPR
- ? comp_op1 : comp_op0)));
+ tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
+ ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
+ : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
return pedantic_non_lvalue (fold_convert (type, tem));
}
break;
case GE_EXPR:
case GT_EXPR:
+ case UNGE_EXPR:
+ case UNGT_EXPR:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
{
comp_op0 = fold_convert (comp_type, comp_op0);
comp_op1 = fold_convert (comp_type, comp_op1);
- tem = fold (build2 (MAX_EXPR, comp_type,
- (comp_code == GE_EXPR
- ? comp_op0 : comp_op1),
- (comp_code == GE_EXPR
- ? comp_op1 : comp_op0)));
- tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
+ tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
+ ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
+ : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
return pedantic_non_lvalue (fold_convert (type, tem));
}
break;
+ case UNEQ_EXPR:
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ return pedantic_non_lvalue (fold_convert (type, arg2));
+ break;
+ case LTGT_EXPR:
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ return pedantic_non_lvalue (fold_convert (type, arg1));
+ break;
default:
- abort ();
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
+ break;
}
}
case NE_EXPR:
break;
default:
- abort ();
+ gcc_unreachable ();
}
return NULL_TREE;
\f
-#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
-#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
+#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
#endif
/* EXP is some logical combination of boolean tests. See if we can
/* On machines where the branch cost is expensive, if this is a
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
- else if (RANGE_TEST_NON_SHORT_CIRCUIT
+ else if (LOGICAL_OP_NON_SHORT_CIRCUIT
&& lhs != 0 && rhs != 0
&& (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
|| TREE_CODE (exp) == TRUTH_ORIF_EXPR)
rcode = NE_EXPR;
}
- if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
+ if (TREE_CODE_CLASS (lcode) != tcc_comparison
+ || TREE_CODE_CLASS (rcode) != tcc_comparison)
return 0;
ll_arg = TREE_OPERAND (lhs, 0);
ll_arg, rl_arg),
fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
- return build2 (code, truth_type, lhs, rhs);
+ if (LOGICAL_OP_NON_SHORT_CIRCUIT)
+ return build2 (code, truth_type, lhs, rhs);
}
/* See if the comparisons can be merged. Then get all the parameters for
{
if (wanted_code == NE_EXPR)
{
- warning ("`or' of unmatched not-equal tests is always 1");
+ warning ("%<or%> of unmatched not-equal tests is always 1");
return constant_boolean_node (true, truth_type);
}
else
{
- warning ("`and' of mutually exclusive equal-tests is always 0");
+ warning ("%<and%> of mutually exclusive equal-tests is always 0");
return constant_boolean_node (false, truth_type);
}
}
if (integer_zerop (c))
return NULL_TREE;
- if (TREE_CODE_CLASS (tcode) == '1')
+ if (TREE_CODE_CLASS (tcode) == tcc_unary)
op0 = TREE_OPERAND (t, 0);
- if (TREE_CODE_CLASS (tcode) == '2')
+ if (TREE_CODE_CLASS (tcode) == tcc_binary)
op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
/* Note that we need not handle conditional operations here since fold
case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
/* If op0 is an expression ... */
- if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
+ if ((COMPARISON_CLASS_P (op0)
+ || UNARY_CLASS_P (op0)
+ || BINARY_CLASS_P (op0)
+ || EXPRESSION_CLASS_P (op0))
/* ... and is unsigned, and its type is smaller than ctype,
then we cannot pass through as widening. */
&& ((TYPE_UNSIGNED (TREE_TYPE (op0))
return t1;
break;
- case NEGATE_EXPR: case ABS_EXPR:
+ case ABS_EXPR:
+ /* If widening the type changes it from signed to unsigned, then we
+ must avoid building ABS_EXPR itself as unsigned. */
+ if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
+ {
+ tree cstype = (*lang_hooks.types.signed_type) (ctype);
+ if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
+ {
+ t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
+ return fold_convert (ctype, t1);
+ }
+ break;
+ }
+ /* FALLTHROUGH */
+ case NEGATE_EXPR:
if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
break;
return value ? integer_one_node : integer_zero_node;
else if (type == boolean_type_node)
return value ? boolean_true_node : boolean_false_node;
- else if (TREE_CODE (type) == BOOLEAN_TYPE)
- return lang_hooks.truthvalue_conversion (value ? integer_one_node
- : integer_zero_node);
else
- {
- tree t = build_int_2 (value, 0);
+ return build_int_cst (type, value);
+}
- TREE_TYPE (t) = type;
- return t;
+
+/* Return true if expr looks like an ARRAY_REF and set base and
+ offset to the appropriate trees. If there is no offset,
+ offset is set to NULL_TREE. */
+
+static bool
+extract_array_ref (tree expr, tree *base, tree *offset)
+{
+ /* We have to be careful with stripping nops as with the
+ base type the meaning of the offset can change. */
+ tree inner_expr = expr;
+ STRIP_NOPS (inner_expr);
+ /* One canonical form is a PLUS_EXPR with the first
+ argument being an ADDR_EXPR with a possible NOP_EXPR
+ attached. */
+ if (TREE_CODE (expr) == PLUS_EXPR)
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ STRIP_NOPS (op0);
+ if (TREE_CODE (op0) == ADDR_EXPR)
+ {
+ *base = TREE_OPERAND (expr, 0);
+ *offset = TREE_OPERAND (expr, 1);
+ return true;
+ }
}
+ /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
+ which we transform into an ADDR_EXPR with appropriate
+ offset. For other arguments to the ADDR_EXPR we assume
+ zero offset and as such do not care about the ADDR_EXPR
+ type and strip possible nops from it. */
+ else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ {
+ tree op0 = TREE_OPERAND (inner_expr, 0);
+ if (TREE_CODE (op0) == ARRAY_REF)
+ {
+ *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *offset = TREE_OPERAND (op0, 1);
+ }
+ else
+ {
+ *base = inner_expr;
+ *offset = NULL_TREE;
+ }
+ return true;
+ }
+
+ return false;
}
+
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
possible. */
static tree
-fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
- tree cond, tree arg, int cond_first_p)
+fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
+ tree arg, int cond_first_p)
{
+ const tree type = TREE_TYPE (t);
+ tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
+ : TREE_TYPE (TREE_OPERAND (t, 1));
+ tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
+ : TREE_TYPE (TREE_OPERAND (t, 0));
tree test, true_value, false_value;
tree lhs = NULL_TREE;
tree rhs = NULL_TREE;
/* This transformation is only worthwhile if we don't have to wrap
- arg in a SAVE_EXPR, and the operation can be simplified on atleast
+ arg in a SAVE_EXPR, and the operation can be simplified on at least
one of the branches once its pushed inside the COND_EXPR. */
if (!TREE_CONSTANT (arg))
return NULL_TREE;
false_value = constant_boolean_node (false, testtype);
}
+ arg = fold_convert (arg_type, arg);
if (lhs == 0)
- lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
+ {
+ true_value = fold_convert (cond_type, true_value);
+ lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
: build2 (code, type, arg, true_value));
+ }
if (rhs == 0)
- rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
+ {
+ false_value = fold_convert (cond_type, false_value);
+ rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
: build2 (code, type, arg, false_value));
+ }
test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
return fold_convert (type, test);
TREE_INT_CST_HIGH (arg01),
TREE_INT_CST_LOW (arg1),
TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
- prod = build_int_2 (lpart, hpart);
- TREE_TYPE (prod) = TREE_TYPE (arg00);
+ prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
prod = force_fit_type (prod, -1, overflow, false);
if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
TREE_INT_CST_LOW (tmp),
TREE_INT_CST_HIGH (tmp),
&lpart, &hpart);
- hi = build_int_2 (lpart, hpart);
- TREE_TYPE (hi) = TREE_TYPE (arg00);
+ hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
TREE_CONSTANT_OVERFLOW (prod));
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
else
{
+ /* A negative divisor reverses the relational operators. */
+ code = swap_tree_comparison (code);
+
tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
switch (tree_int_cst_sgn (arg1))
{
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
tree result_type)
{
- /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
- operand 0. */
- if (code == TRUTH_NOT_EXPR)
- {
- code = TREE_CODE (arg0);
- if (code != NE_EXPR && code != EQ_EXPR)
- return NULL_TREE;
-
- /* Extract the arguments of the EQ/NE. */
- arg1 = TREE_OPERAND (arg0, 1);
- arg0 = TREE_OPERAND (arg0, 0);
-
- /* This requires us to invert the code. */
- code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
- }
-
/* If this is testing a single bit, we can optimize the test. */
if ((code == NE_EXPR || code == EQ_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
#ifdef LOAD_EXTEND_OP
- ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
+ && !flag_syntax_only) ? 0 : 1;
#else
ops_unsigned = 1;
#endif
reorder_operands_p (tree arg0, tree arg1)
{
if (! flag_evaluation_order)
- return true;
+ return true;
if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
return true;
return ! TREE_SIDE_EFFECTS (arg0)
if (DECL_P (arg0))
return 1;
- if (reorder && flag_evaluation_order
- && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
- return 0;
-
- if (DECL_P (arg1))
- return 0;
- if (DECL_P (arg0))
- return 1;
-
/* It is preferable to swap two SSA_NAME to ensure a canonical form
for commutative and comparison operators. Ensuring a canonical
form allows the optimizers to find additional redundancies without
return 0;
}
-/* Perform constant folding and related simplification of EXPR.
- The related simplifications include x*1 => x, x*0 => 0, etc.,
- and application of the associative law.
- NOP_EXPR conversions may be removed freely (as long as we
- are careful not to change the type of the overall expression).
- We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
- but we can constant-fold them if they have constant operands. */
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
+ ARG0 is extended to a wider type. */
-#ifdef ENABLE_FOLD_CHECKING
-# define fold(x) fold_1 (x)
-static tree fold_1 (tree);
-static
-#endif
-tree
-fold (tree expr)
+static tree
+fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
{
- const tree t = expr;
- const tree type = TREE_TYPE (expr);
- tree t1 = NULL_TREE;
- tree tem;
- tree arg0 = NULL_TREE, arg1 = NULL_TREE;
- enum tree_code code = TREE_CODE (t);
- int kind = TREE_CODE_CLASS (code);
+ tree arg0_unw = get_unwidened (arg0, NULL_TREE);
+ tree arg1_unw;
+ tree shorter_type, outer_type;
+ tree min, max;
+ bool above, below;
- /* WINS will be nonzero when the switch is done
- if all operands are constant. */
- int wins = 1;
+ if (arg0_unw == arg0)
+ return NULL_TREE;
+ shorter_type = TREE_TYPE (arg0_unw);
- /* Return right away if a constant. */
- if (kind == 'c')
- return t;
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
+ return NULL_TREE;
- if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
+ arg1_unw = get_unwidened (arg1, shorter_type);
+ if (!arg1_unw)
+ return NULL_TREE;
+
+ /* If possible, express the comparison in the shorter mode. */
+ if ((code == EQ_EXPR || code == NE_EXPR
+ || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
+ && (TREE_TYPE (arg1_unw) == shorter_type
+ || (TREE_CODE (arg1_unw) == INTEGER_CST
+ && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && int_fits_type_p (arg1_unw, shorter_type))))
+ return fold (build (code, type, arg0_unw,
+ fold_convert (shorter_type, arg1_unw)));
+
+ if (TREE_CODE (arg1_unw) != INTEGER_CST)
+ return NULL_TREE;
+
+ /* If we are comparing with the integer that does not fit into the range
+ of the shorter type, the result is known. */
+ outer_type = TREE_TYPE (arg1_unw);
+ min = lower_bound_in_type (outer_type, shorter_type);
+ max = upper_bound_in_type (outer_type, shorter_type);
+
+ above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ max, arg1_unw));
+ below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
+ arg1_unw, min));
+
+ switch (code)
{
- tree subop;
+ case EQ_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ break;
- /* Special case for conversion ops that can have fixed point args. */
- arg0 = TREE_OPERAND (t, 0);
+ case NE_EXPR:
+ if (above || below)
+ return omit_one_operand (type, integer_one_node, arg0);
+ break;
- /* Don't use STRIP_NOPS, because signedness of argument type matters. */
- if (arg0 != 0)
- STRIP_SIGN_NOPS (arg0);
+ case LT_EXPR:
+ case LE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_one_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_zero_node, arg0);
- if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
- subop = TREE_REALPART (arg0);
- else
- subop = arg0;
+ case GT_EXPR:
+ case GE_EXPR:
+ if (above)
+ return omit_one_operand (type, integer_zero_node, arg0);
+ else if (below)
+ return omit_one_operand (type, integer_one_node, arg0);
- if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
- && TREE_CODE (subop) != REAL_CST)
- /* Note that TREE_CONSTANT isn't enough:
- static var addresses are constant but we can't
- do arithmetic on them. */
- wins = 0;
+ default:
+ break;
}
- else if (IS_EXPR_CODE_CLASS (kind))
- {
- int len = first_rtl_op (code);
- int i;
- for (i = 0; i < len; i++)
- {
- tree op = TREE_OPERAND (t, i);
- tree subop;
- if (op == 0)
- continue; /* Valid for CALL_EXPR, at least. */
+ return NULL_TREE;
+}
- /* Strip any conversions that don't change the mode. This is
- safe for every expression, except for a comparison expression
- because its signedness is derived from its operands. So, in
- the latter case, only strip conversions that don't change the
- signedness.
+/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
+ ARG0 just the signedness is changed. */
- Note that this is done as an internal manipulation within the
- constant folder, in order to find the simplest representation
- of the arguments so that their form can be studied. In any
- cases, the appropriate type conversions should be put back in
- the tree that will get out of the constant folder. */
- if (kind == '<')
- STRIP_SIGN_NOPS (op);
- else
- STRIP_NOPS (op);
+static tree
+fold_sign_changed_comparison (enum tree_code code, tree type,
+ tree arg0, tree arg1)
+{
+ tree arg0_inner, tmp;
+ tree inner_type, outer_type;
- if (TREE_CODE (op) == COMPLEX_CST)
- subop = TREE_REALPART (op);
- else
- subop = op;
+ if (TREE_CODE (arg0) != NOP_EXPR)
+ return NULL_TREE;
- if (TREE_CODE (subop) != INTEGER_CST
- && TREE_CODE (subop) != REAL_CST)
- /* Note that TREE_CONSTANT isn't enough:
- static var addresses are constant but we can't
- do arithmetic on them. */
- wins = 0;
+ outer_type = TREE_TYPE (arg0);
+ arg0_inner = TREE_OPERAND (arg0, 0);
+ inner_type = TREE_TYPE (arg0_inner);
- if (i == 0)
- arg0 = op;
- else if (i == 1)
- arg1 = op;
- }
+ if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) != INTEGER_CST
+ && !(TREE_CODE (arg1) == NOP_EXPR
+ && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
+ return NULL_TREE;
+
+ if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ && code != NE_EXPR
+ && code != EQ_EXPR)
+ return NULL_TREE;
+
+ if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ tmp = build_int_cst_wide (inner_type,
+ TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+ arg1 = force_fit_type (tmp, 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
}
+ else
+ arg1 = fold_convert (inner_type, arg1);
- /* If this is a commutative operation, and ARG0 is a constant, move it
- to ARG1 to reduce the number of tests below. */
- if (commutative_tree_code (code)
- && tree_swap_operands_p (arg0, arg1, true))
- return fold (build2 (code, type, TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 0)));
+ return fold (build (code, type, arg0_inner, arg1));
+}
- /* Now WINS is set as described above,
- ARG0 is the first operand of EXPR,
- and ARG1 is the second operand (if it has more than one operand).
+/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
+ step of the array. ADDR is the address. MULT is the multiplicative expression.
+ If the function succeeds, the new address expression is returned. Otherwise
+ NULL_TREE is returned. */
- First check for cases where an arithmetic operation is applied to a
- compound, conditional, or comparison operation. Push the arithmetic
- operation inside the compound or conditional to see if any folding
- can then be done. Convert comparison to conditional for this purpose.
- The also optimizes non-constant cases that used to be done in
- expand_expr.
+static tree
+try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
+{
+ tree s, delta, step;
+ tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
+ tree ref = TREE_OPERAND (addr, 0), pref;
+ tree ret, pos;
+ tree itype;
- Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
- one of the operands is a comparison and the other is a comparison, a
- BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
- code below would make the expression more complex. Change it to a
- TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
- TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ s = arg0;
+ delta = arg1;
+ }
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ s = arg1;
+ delta = arg0;
+ }
+ else
+ return NULL_TREE;
- if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
- || code == EQ_EXPR || code == NE_EXPR)
- && ((truth_value_p (TREE_CODE (arg0))
- && (truth_value_p (TREE_CODE (arg1))
- || (TREE_CODE (arg1) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (arg1, 1)))))
- || (truth_value_p (TREE_CODE (arg1))
- && (truth_value_p (TREE_CODE (arg0))
- || (TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (arg0, 1)))))))
+ for (;; ref = TREE_OPERAND (ref, 0))
{
- tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
- : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
- : TRUTH_XOR_EXPR,
- type, fold_convert (boolean_type_node, arg0),
- fold_convert (boolean_type_node, arg1)));
+ if (TREE_CODE (ref) == ARRAY_REF)
+ {
+ step = array_ref_element_size (ref);
- if (code == EQ_EXPR)
- tem = invert_truthvalue (tem);
+ if (TREE_CODE (step) != INTEGER_CST)
+ continue;
- return tem;
+ itype = TREE_TYPE (step);
+
+ /* If the type sizes do not match, we might run into problems
+ when one of them would overflow. */
+ if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
+ continue;
+
+ if (!operand_equal_p (step, fold_convert (itype, s), 0))
+ continue;
+
+ delta = fold_convert (itype, delta);
+ break;
+ }
+
+ if (!handled_component_p (ref))
+ return NULL_TREE;
}
- if (TREE_CODE_CLASS (code) == '1')
+ /* We found the suitable array reference. So copy everything up to it,
+ and replace the index. */
+
+ pref = TREE_OPERAND (addr, 0);
+ ret = copy_node (pref);
+ pos = ret;
+
+ while (pref != ref)
{
- if (TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
- else if (TREE_CODE (arg0) == COND_EXPR)
- {
- tree arg01 = TREE_OPERAND (arg0, 1);
- tree arg02 = TREE_OPERAND (arg0, 2);
- if (! VOID_TYPE_P (TREE_TYPE (arg01)))
- arg01 = fold (build1 (code, type, arg01));
- if (! VOID_TYPE_P (TREE_TYPE (arg02)))
- arg02 = fold (build1 (code, type, arg02));
- tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
- arg01, arg02));
+ pref = TREE_OPERAND (pref, 0);
+ TREE_OPERAND (pos, 0) = copy_node (pref);
+ pos = TREE_OPERAND (pos, 0);
+ }
- /* If this was a conversion, and all we did was to move into
- inside the COND_EXPR, bring it back out. But leave it if
- it is a conversion from integer to integer and the
- result precision is no wider than a word since such a
- conversion is cheap and may be optimized away by combine,
- while it couldn't if it were outside the COND_EXPR. Then return
- so we don't get into an infinite recursion loop taking the
- conversion out and then back in. */
+ TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
+ TREE_OPERAND (pos, 1),
+ delta));
- if ((code == NOP_EXPR || code == CONVERT_EXPR
- || code == NON_LVALUE_EXPR)
+ return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
+}
+
+
+/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
+
+static tree
+fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
+{
+ tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
+
+ if (TREE_CODE (bound) == LT_EXPR)
+ a = TREE_OPERAND (bound, 0);
+ else if (TREE_CODE (bound) == GT_EXPR)
+ a = TREE_OPERAND (bound, 1);
+ else
+ return NULL_TREE;
+
+ typea = TREE_TYPE (a);
+ if (!INTEGRAL_TYPE_P (typea)
+ && !POINTER_TYPE_P (typea))
+ return NULL_TREE;
+
+ if (TREE_CODE (ineq) == LT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 1);
+ y = TREE_OPERAND (ineq, 0);
+ }
+ else if (TREE_CODE (ineq) == GT_EXPR)
+ {
+ a1 = TREE_OPERAND (ineq, 0);
+ y = TREE_OPERAND (ineq, 1);
+ }
+ else
+ return NULL_TREE;
+
+ if (TREE_TYPE (a1) != typea)
+ return NULL_TREE;
+
+ diff = fold (build2 (MINUS_EXPR, typea, a1, a));
+ if (!integer_onep (diff))
+ return NULL_TREE;
+
+ return fold (build2 (GE_EXPR, type, a, y));
+}
+
+/* Fold complex addition when both components are accessible by parts.
+ Return non-null if successful. CODE should be PLUS_EXPR for addition,
+ or MINUS_EXPR for subtraction. */
+
+static tree
+fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
+{
+ tree ar, ai, br, bi, rr, ri, inner_type;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ inner_type = TREE_TYPE (type);
+
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = fold (build2 (code, inner_type, ai, bi));
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+/* Perform some simplifications of complex multiplication when one or more
+ of the components are constants or zeros. Return non-null if successful. */
+
+tree
+fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
+{
+ tree rr, ri, inner_type, zero;
+ bool ar0, ai0, br0, bi0, bi1;
+
+ inner_type = TREE_TYPE (type);
+ zero = NULL;
+
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
+ {
+ ar0 = ai0 = br0 = bi0 = bi1 = false;
+
+ /* We're only interested in +0.0 here, thus we don't use real_zerop. */
+
+ if (TREE_CODE (ar) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
+ ar0 = true, zero = ar;
+
+ if (TREE_CODE (ai) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
+ ai0 = true, zero = ai;
+
+ if (TREE_CODE (br) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
+ br0 = true, zero = br;
+
+ if (TREE_CODE (bi) == REAL_CST)
+ {
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
+ bi0 = true, zero = bi;
+ else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
+ bi1 = true;
+ }
+ }
+ else
+ {
+ ar0 = integer_zerop (ar);
+ if (ar0)
+ zero = ar;
+ ai0 = integer_zerop (ai);
+ if (ai0)
+ zero = ai;
+ br0 = integer_zerop (br);
+ if (br0)
+ zero = br;
+ bi0 = integer_zerop (bi);
+ if (bi0)
+ {
+ zero = bi;
+ bi1 = false;
+ }
+ else
+ bi1 = integer_onep (bi);
+ }
+
+ /* We won't optimize anything below unless something is zero. */
+ if (zero == NULL)
+ return NULL;
+
+ if (ai0 && br0 && bi1)
+ {
+ rr = zero;
+ ri = ar;
+ }
+ else if (ai0 && bi0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = zero;
+ }
+ else if (ai0 && br0)
+ {
+ rr = zero;
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (ar0 && bi0)
+ {
+ rr = zero;
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else if (ar0 && br0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = zero;
+ }
+ else if (bi0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else if (ai0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (br0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (ar0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else
+ return NULL;
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+static tree
+fold_complex_mult (tree type, tree ac, tree bc)
+{
+ tree ar, ai, br, bi;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ return fold_complex_mult_parts (type, ar, ai, br, bi);
+}
+
+/* Perform some simplifications of complex division when one or more of
+ the components are constants or zeros. Return non-null if successful. */
+
+tree
+fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
+ enum tree_code code)
+{
+ tree rr, ri, inner_type, zero;
+ bool ar0, ai0, br0, bi0, bi1;
+
+ inner_type = TREE_TYPE (type);
+ zero = NULL;
+
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
+ {
+ ar0 = ai0 = br0 = bi0 = bi1 = false;
+
+ /* We're only interested in +0.0 here, thus we don't use real_zerop. */
+
+ if (TREE_CODE (ar) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
+ ar0 = true, zero = ar;
+
+ if (TREE_CODE (ai) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
+ ai0 = true, zero = ai;
+
+ if (TREE_CODE (br) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
+ br0 = true, zero = br;
+
+ if (TREE_CODE (bi) == REAL_CST)
+ {
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
+ bi0 = true, zero = bi;
+ else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
+ bi1 = true;
+ }
+ }
+ else
+ {
+ ar0 = integer_zerop (ar);
+ if (ar0)
+ zero = ar;
+ ai0 = integer_zerop (ai);
+ if (ai0)
+ zero = ai;
+ br0 = integer_zerop (br);
+ if (br0)
+ zero = br;
+ bi0 = integer_zerop (bi);
+ if (bi0)
+ {
+ zero = bi;
+ bi1 = false;
+ }
+ else
+ bi1 = integer_onep (bi);
+ }
+
+ /* We won't optimize anything below unless something is zero. */
+ if (zero == NULL)
+ return NULL;
+
+ if (ai0 && bi0)
+ {
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = zero;
+ }
+ else if (ai0 && br0)
+ {
+ rr = zero;
+ ri = fold (build2 (code, inner_type, ar, bi));
+ ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ }
+ else if (ar0 && bi0)
+ {
+ rr = zero;
+ ri = fold (build2 (code, inner_type, ai, br));
+ }
+ else if (ar0 && br0)
+ {
+ rr = fold (build2 (code, inner_type, ai, bi));
+ ri = zero;
+ }
+ else if (bi0)
+ {
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = fold (build2 (code, inner_type, ai, br));
+ }
+ else if (br0)
+ {
+ rr = fold (build2 (code, inner_type, ai, bi));
+ ri = fold (build2 (code, inner_type, ar, bi));
+ ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ }
+ else
+ return NULL;
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+static tree
+fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
+{
+ tree ar, ai, br, bi;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ return fold_complex_div_parts (type, ar, ai, br, bi, code);
+}
+
+/* Fold a unary expression EXPR. Return the folded expression if
+ folding is successful. Otherwise, return the original
+ expression. */
+
+static tree
+fold_unary (tree expr)
+{
+ const tree t = expr;
+ const tree type = TREE_TYPE (expr);
+ tree tem;
+ tree op0, arg0;
+ enum tree_code code = TREE_CODE (t);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 1);
+
+
+ arg0 = op0 = TREE_OPERAND (t, 0);
+ if (arg0)
+ {
+ if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
+ {
+ /* Don't use STRIP_NOPS, because signedness of argument type matters. */
+ STRIP_SIGN_NOPS (arg0);
+ }
+ else
+ {
+ /* Strip any conversions that don't change the mode. This
+ is safe for every expression, except for a comparison
+ expression because its signedness is derived from its
+ operands.
+
+ Note that this is done as an internal manipulation within
+ the constant folder, in order to find the simplest
+ representation of the arguments so that their form can be
+ studied. In any cases, the appropriate type conversions
+ should be put back in the tree that will get out of the
+ constant folder. */
+ STRIP_NOPS (arg0);
+ }
+ }
+
+ if (TREE_CODE_CLASS (code) == tcc_unary)
+ {
+ if (TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
+ else if (TREE_CODE (arg0) == COND_EXPR)
+ {
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg02 = TREE_OPERAND (arg0, 2);
+ if (! VOID_TYPE_P (TREE_TYPE (arg01)))
+ arg01 = fold (build1 (code, type, arg01));
+ if (! VOID_TYPE_P (TREE_TYPE (arg02)))
+ arg02 = fold (build1 (code, type, arg02));
+ tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
+ arg01, arg02));
+
+ /* If this was a conversion, and all we did was to move into
+ inside the COND_EXPR, bring it back out. But leave it if
+ it is a conversion from integer to integer and the
+ result precision is no wider than a word since such a
+ conversion is cheap and may be optimized away by combine,
+ while it couldn't if it were outside the COND_EXPR. Then return
+ so we don't get into an infinite recursion loop taking the
+ conversion out and then back in. */
+
+ if ((code == NOP_EXPR || code == CONVERT_EXPR
+ || code == NON_LVALUE_EXPR)
&& TREE_CODE (tem) == COND_EXPR
&& TREE_CODE (TREE_OPERAND (tem, 1)) == code
&& TREE_CODE (TREE_OPERAND (tem, 2)) == code
&& ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
&& (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
- && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
- && (INTEGRAL_TYPE_P
- (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
- && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
+ && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
+ && (INTEGRAL_TYPE_P
+ (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
+ && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
+ || flag_syntax_only))
tem = build1 (code, type,
build3 (COND_EXPR,
TREE_TYPE (TREE_OPERAND
TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
return tem;
}
- else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
+ else if (COMPARISON_CLASS_P (arg0))
{
if (TREE_CODE (type) == BOOLEAN_TYPE)
{
integer_zero_node))));
}
}
- else if (TREE_CODE_CLASS (code) == '<'
- && TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
- else if (TREE_CODE_CLASS (code) == '<'
- && TREE_CODE (arg1) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
- else if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<')
- {
- if (TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build2 (code, type, TREE_OPERAND (arg0, 1),
- arg1)));
- if (TREE_CODE (arg1) == COMPOUND_EXPR
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold (build2 (code, type,
- arg0, TREE_OPERAND (arg1, 1))));
-
- if (TREE_CODE (arg0) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
- {
- tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
- /*cond_first_p=*/1);
- if (tem != NULL_TREE)
- return tem;
- }
-
- if (TREE_CODE (arg1) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
- {
- tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
- /*cond_first_p=*/0);
- if (tem != NULL_TREE)
- return tem;
- }
- }
switch (code)
{
- case CONST_DECL:
- return fold (DECL_INITIAL (t));
-
case NOP_EXPR:
case FLOAT_EXPR:
case CONVERT_EXPR:
case FIX_CEIL_EXPR:
case FIX_FLOOR_EXPR:
case FIX_ROUND_EXPR:
- if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
- return TREE_OPERAND (t, 0);
+ if (TREE_TYPE (op0) == type)
+ return op0;
/* Handle cases of two conversions in a row. */
- if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
- || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
+ if (TREE_CODE (op0) == NOP_EXPR
+ || TREE_CODE (op0) == CONVERT_EXPR)
{
- tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
- tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
+ tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
+ tree inter_type = TREE_TYPE (op0);
int inside_int = INTEGRAL_TYPE_P (inside_type);
int inside_ptr = POINTER_TYPE_P (inside_type);
int inside_float = FLOAT_TYPE_P (inside_type);
if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
&& ((inter_int && final_int) || (inter_float && final_float))
&& inter_prec >= final_prec)
- return fold (build1 (code, type,
- TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+ return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
/* Likewise, if the intermediate and final types are either both
float or both integer, we don't need the middle conversion if
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
&& ! final_ptr)
- return fold (build1 (code, type,
- TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+ return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
/* If we have a sign-extension of a zero-extended value, we can
replace that by a single zero-extension. */
if (inside_int && inter_int && final_int
&& inside_prec < inter_prec && inter_prec < final_prec
&& inside_unsignedp && !inter_unsignedp)
- return fold (build1 (code, type,
- TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+ return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
/* Two conversions in a row are not needed unless:
- some conversion is floating-point (overstrict for now), or
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
&& ! final_ptr)
- return fold (build1 (code, type,
- TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+ return fold (build1 (code, type, TREE_OPERAND (op0, 0)));
}
- if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
- && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
+ if (TREE_CODE (op0) == MODIFY_EXPR
+ && TREE_CONSTANT (TREE_OPERAND (op0, 1))
/* Detect assigning a bitfield. */
- && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
+ && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tree prev = TREE_OPERAND (t, 0);
tem = copy_node (t);
- TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
+ TREE_OPERAND (tem, 0) = TREE_OPERAND (op0, 1);
/* First do the assignment, then return converted constant. */
- tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
+ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
TREE_NO_WARNING (tem) = 1;
TREE_USED (tem) = 1;
return tem;
in c). This folds extension into the BIT_AND_EXPR. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (type) != BOOLEAN_TYPE
- && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
+ && TREE_CODE (op0) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
{
- tree and = TREE_OPERAND (t, 0);
+ tree and = op0;
tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
int change = 0;
change = (cst == 0);
#ifdef LOAD_EXTEND_OP
if (change
+ && !flag_syntax_only
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
#endif
}
if (change)
- return fold (build2 (BIT_AND_EXPR, type,
- fold_convert (type, and0),
- fold_convert (type, and1)));
+ {
+ tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1));
+ tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
+ TREE_CONSTANT_OVERFLOW (and1));
+ return fold (build2 (BIT_AND_EXPR, type,
+ fold_convert (type, and0), tem));
+ }
}
/* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
T2 being pointers to types of the same size. */
- if (POINTER_TYPE_P (TREE_TYPE (t))
- && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
+ if (POINTER_TYPE_P (type)
+ && BINARY_CLASS_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
{
tree arg00 = TREE_OPERAND (arg0, 0);
- tree t0 = TREE_TYPE (t);
+ tree t0 = type;
tree t1 = TREE_TYPE (arg00);
tree tt0 = TREE_TYPE (t0);
tree tt1 = TREE_TYPE (t1);
return tem ? tem : t;
case VIEW_CONVERT_EXPR:
- if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
- return build1 (VIEW_CONVERT_EXPR, type,
- TREE_OPERAND (TREE_OPERAND (t, 0), 0));
- return t;
-
- case COMPONENT_REF:
- if (TREE_CODE (arg0) == CONSTRUCTOR
- && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
- {
- tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
- if (m)
- return TREE_VALUE (m);
- }
- return t;
-
- case RANGE_EXPR:
- if (TREE_CONSTANT (t) != wins)
- {
- tem = copy_node (t);
- TREE_CONSTANT (tem) = wins;
- TREE_INVARIANT (tem) = wins;
- return tem;
- }
+ if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
+ return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
return t;
case NEGATE_EXPR:
if (negate_expr_p (arg0))
return fold_convert (type, negate_expr (arg0));
+ /* Convert - (~A) to A + 1. */
+ if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
+ return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1)));
return t;
case ABS_EXPR:
}
else if (tree_expr_nonnegative_p (arg0))
return arg0;
+
+ /* Strip sign ops from argument. */
+ if (TREE_CODE (type) == REAL_TYPE)
+ {
+ tem = fold_strip_sign_ops (arg0);
+ if (tem)
+ return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
+ }
return t;
case CONJ_EXPR:
return fold_not_const (arg0, type);
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
return TREE_OPERAND (arg0, 0);
+ /* Convert ~ (-A) to A - 1. */
+ else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1)));
+ /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
+ else if (INTEGRAL_TYPE_P (type)
+ && ((TREE_CODE (arg0) == MINUS_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))
+ || (TREE_CODE (arg0) == PLUS_EXPR
+ && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
+ return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
return t;
- case PLUS_EXPR:
- /* A + (-B) -> A - B */
- if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
- /* (-A) + B -> B - A */
- if (TREE_CODE (arg0) == NEGATE_EXPR
+ case TRUTH_NOT_EXPR:
+ /* The argument to invert_truthvalue must have Boolean type. */
+ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
+ arg0 = fold_convert (boolean_type_node, arg0);
+
+ /* Note that the operand of this must be an int
+ and its values must be 0 or 1.
+ ("true" is a fixed value perhaps depending on the language,
+ but we don't handle values other than 1 correctly yet.) */
+ tem = invert_truthvalue (arg0);
+ /* Avoid infinite recursion. */
+ if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
+ return t;
+ return fold_convert (type, tem);
+
+ case REALPART_EXPR:
+ if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
+ return t;
+ else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return omit_one_operand (type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ else if (TREE_CODE (arg0) == COMPLEX_CST)
+ return TREE_REALPART (arg0);
+ else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ return fold (build2 (TREE_CODE (arg0), type,
+ fold (build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 0))),
+ fold (build1 (REALPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)))));
+ return t;
+
+ case IMAGPART_EXPR:
+ if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
+ return fold_convert (type, integer_zero_node);
+ else if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ return omit_one_operand (type, TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg0, 0));
+ else if (TREE_CODE (arg0) == COMPLEX_CST)
+ return TREE_IMAGPART (arg0);
+ else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ return fold (build2 (TREE_CODE (arg0), type,
+ fold (build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 0))),
+ fold (build1 (IMAGPART_EXPR, type,
+ TREE_OPERAND (arg0, 1)))));
+ return t;
+
+ default:
+ return t;
+ } /* switch (code) */
+}
+
+/* Fold a ternary expression EXPR. Return the folded expression if
+ folding is successful. Otherwise, return the original
+ expression. */
+
+static tree
+fold_ternary (tree expr)
+{
+ const tree t = expr;
+ const tree type = TREE_TYPE (expr);
+ tree tem;
+ tree arg0 = NULL_TREE, arg1 = NULL_TREE;
+ enum tree_code code = TREE_CODE (t);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+ int i;
+
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ && TREE_CODE_LENGTH (code) == 3);
+
+ /* For now, we iterate only twice even though we are handling
+ ternary expressions. This is because we haven't defined arg2
+ yet. */
+ for (i = 0; i < 2; i++)
+ {
+ tree op = TREE_OPERAND (t, i);
+
+ if (op == 0)
+ continue; /* Valid for CALL_EXPR, at least. */
+
+ /* Strip any conversions that don't change the mode. This is
+ safe for every expression, except for a comparison expression
+ because its signedness is derived from its operands. So, in
+ the latter case, only strip conversions that don't change the
+ signedness.
+
+ Note that this is done as an internal manipulation within the
+ constant folder, in order to find the simplest representation
+ of the arguments so that their form can be studied. In any
+ cases, the appropriate type conversions should be put back in
+ the tree that will get out of the constant folder. */
+ STRIP_NOPS (op);
+
+ if (i == 0)
+ arg0 = op;
+ else if (i == 1)
+ arg1 = op;
+ }
+
+ switch (code)
+ {
+ case COMPONENT_REF:
+ if (TREE_CODE (arg0) == CONSTRUCTOR
+ && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
+ {
+ tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
+ if (m)
+ return TREE_VALUE (m);
+ }
+ return t;
+
+ case COND_EXPR:
+ /* Pedantic ANSI C says that a conditional expression is never an lvalue,
+ so all simple results must be passed through pedantic_non_lvalue. */
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
+ /* Only optimize constant conditions when the selected branch
+ has the same type as the COND_EXPR. This avoids optimizing
+ away "c ? x : throw", where the throw has a void type. */
+ if (! VOID_TYPE_P (TREE_TYPE (tem))
+ || VOID_TYPE_P (type))
+ return pedantic_non_lvalue (tem);
+ return t;
+ }
+ if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
+ return pedantic_omit_one_operand (type, arg1, arg0);
+
+ /* If we have A op B ? A : C, we may be able to convert this to a
+ simpler expression, depending on the operation and the values
+ of B and C. Signed zeros prevent all of these transformations,
+ for reasons given above each one.
+
+ Also try swapping the arguments and inverting the conditional. */
+ if (COMPARISON_CLASS_P (arg0)
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
+ arg1, TREE_OPERAND (arg0, 1))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ tem = fold_cond_expr_with_comparison (type, arg0,
+ TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 2));
+ if (tem)
+ return tem;
+ }
+
+ if (COMPARISON_CLASS_P (arg0)
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (t, 2),
+ TREE_OPERAND (arg0, 1))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
+ {
+ tem = invert_truthvalue (arg0);
+ if (COMPARISON_CLASS_P (tem))
+ {
+ tem = fold_cond_expr_with_comparison (type, tem,
+ TREE_OPERAND (t, 2),
+ TREE_OPERAND (t, 1));
+ if (tem)
+ return tem;
+ }
+ }
+
+ /* If the second operand is simpler than the third, swap them
+ since that produces better jump optimization results. */
+ if (tree_swap_operands_p (TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 2), false))
+ {
+ /* See if this can be inverted. If it can't, possibly because
+ it was a floating-point inequality comparison, don't do
+ anything. */
+ tem = invert_truthvalue (arg0);
+
+ if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ return fold (build3 (code, type, tem,
+ TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
+ }
+
+ /* Convert A ? 1 : 0 to simply A. */
+ if (integer_onep (TREE_OPERAND (t, 1))
+ && integer_zerop (TREE_OPERAND (t, 2))
+ /* If we try to convert TREE_OPERAND (t, 0) to our type, the
+ call to fold will try to move the conversion inside
+ a COND, which will recurse. In that case, the COND_EXPR
+ is probably the best choice, so leave it alone. */
+ && type == TREE_TYPE (arg0))
+ return pedantic_non_lvalue (arg0);
+
+ /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
+ over COND_EXPR in cases such as floating point comparisons. */
+ if (integer_zerop (TREE_OPERAND (t, 1))
+ && integer_onep (TREE_OPERAND (t, 2))
+ && truth_value_p (TREE_CODE (arg0)))
+ return pedantic_non_lvalue (fold_convert (type,
+ invert_truthvalue (arg0)));
+
+ /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
+ if (TREE_CODE (arg0) == LT_EXPR
+ && integer_zerop (TREE_OPERAND (arg0, 1))
+ && integer_zerop (TREE_OPERAND (t, 2))
+ && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
+ return fold_convert (type, fold (build2 (BIT_AND_EXPR,
+ TREE_TYPE (tem), tem, arg1)));
+
+ /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
+ already handled above. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1))
+ && integer_zerop (TREE_OPERAND (t, 2))
+ && integer_pow2p (arg1))
+ {
+ tree tem = TREE_OPERAND (arg0, 0);
+ STRIP_NOPS (tem);
+ if (TREE_CODE (tem) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
+ && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
+ TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
+ return fold (build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (tem, 0), arg1));
+ }
+
+ /* A & N ? N : 0 is simply A & N if N is a power of two. This
+ is probably obsolete because the first operand should be a
+ truth value (that's why we have the two cases above), but let's
+ leave it in until we can confirm this for all front-ends. */
+ if (integer_zerop (TREE_OPERAND (t, 2))
+ && TREE_CODE (arg0) == NE_EXPR
+ && integer_zerop (TREE_OPERAND (arg0, 1))
+ && integer_pow2p (arg1)
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
+ && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
+ arg1, OEP_ONLY_CONST))
+ return pedantic_non_lvalue (fold_convert (type,
+ TREE_OPERAND (arg0, 0)));
+
+ /* Convert A ? B : 0 into A && B if A and B are truth values. */
+ if (integer_zerop (TREE_OPERAND (t, 2))
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (arg1)))
+ return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
+
+ /* Convert A ? B : 1 into !A || B if A and B are truth values. */
+ if (integer_onep (TREE_OPERAND (t, 2))
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (arg1)))
+ {
+ /* Only perform transformation if ARG0 is easily inverted. */
+ tem = invert_truthvalue (arg0);
+ if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
+ }
+
+ /* Convert A ? 0 : B into !A && B if A and B are truth values. */
+ if (integer_zerop (arg1)
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
+ {
+ /* Only perform transformation if ARG0 is easily inverted. */
+ tem = invert_truthvalue (arg0);
+ if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
+ TREE_OPERAND (t, 2)));
+ }
+
+ /* Convert A ? 1 : B into A || B if A and B are truth values. */
+ if (integer_onep (arg1)
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
+ return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
+ TREE_OPERAND (t, 2)));
+
+ return t;
+
+ case CALL_EXPR:
+ /* Check for a built-in function. */
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+ && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
+ == FUNCTION_DECL)
+ && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
+ {
+ tree tmp = fold_builtin (t, false);
+ if (tmp)
+ return tmp;
+ }
+ return t;
+
+ default:
+ return t;
+ } /* switch (code) */
+}
+
+/* Perform constant folding and related simplification of EXPR.
+ The related simplifications include x*1 => x, x*0 => 0, etc.,
+ and application of the associative law.
+ NOP_EXPR conversions may be removed freely (as long as we
+ are careful not to change the type of the overall expression).
+ We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
+ but we can constant-fold them if they have constant operands. */
+
+#ifdef ENABLE_FOLD_CHECKING
+# define fold(x) fold_1 (x)
+static tree fold_1 (tree);
+static
+#endif
+tree
+fold (tree expr)
+{
+ const tree t = expr;
+ const tree type = TREE_TYPE (expr);
+ tree t1 = NULL_TREE;
+ tree tem;
+ tree arg0 = NULL_TREE, arg1 = NULL_TREE;
+ enum tree_code code = TREE_CODE (t);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
+
+ /* WINS will be nonzero when the switch is done
+ if all operands are constant. */
+ int wins = 1;
+
+ /* Return right away if a constant. */
+ if (kind == tcc_constant)
+ return t;
+
+ if (IS_EXPR_CODE_CLASS (kind))
+ {
+ switch (TREE_CODE_LENGTH (code))
+ {
+ case 1:
+ return fold_unary (expr);
+ case 3:
+ return fold_ternary (expr);
+ default:
+ break;
+ }
+ }
+
+ if (IS_EXPR_CODE_CLASS (kind))
+ {
+ int len = TREE_CODE_LENGTH (code);
+ int i;
+ for (i = 0; i < len; i++)
+ {
+ tree op = TREE_OPERAND (t, i);
+ tree subop;
+
+ if (op == 0)
+ continue; /* Valid for CALL_EXPR, at least. */
+
+ /* Strip any conversions that don't change the mode. This is
+ safe for every expression, except for a comparison expression
+ because its signedness is derived from its operands. So, in
+ the latter case, only strip conversions that don't change the
+ signedness.
+
+ Note that this is done as an internal manipulation within the
+ constant folder, in order to find the simplest representation
+ of the arguments so that their form can be studied. In any
+ cases, the appropriate type conversions should be put back in
+ the tree that will get out of the constant folder. */
+ if (kind == tcc_comparison)
+ STRIP_SIGN_NOPS (op);
+ else
+ STRIP_NOPS (op);
+
+ if (TREE_CODE (op) == COMPLEX_CST)
+ subop = TREE_REALPART (op);
+ else
+ subop = op;
+
+ if (TREE_CODE (subop) != INTEGER_CST
+ && TREE_CODE (subop) != REAL_CST)
+ /* Note that TREE_CONSTANT isn't enough:
+ static var addresses are constant but we can't
+ do arithmetic on them. */
+ wins = 0;
+
+ if (i == 0)
+ arg0 = op;
+ else if (i == 1)
+ arg1 = op;
+ }
+ }
+
+ /* If this is a commutative operation, and ARG0 is a constant, move it
+ to ARG1 to reduce the number of tests below. */
+ if (commutative_tree_code (code)
+ && tree_swap_operands_p (arg0, arg1, true))
+ return fold (build2 (code, type, TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 0)));
+
+ /* Now WINS is set as described above,
+ ARG0 is the first operand of EXPR,
+ and ARG1 is the second operand (if it has more than one operand).
+
+ First check for cases where an arithmetic operation is applied to a
+ compound, conditional, or comparison operation. Push the arithmetic
+ operation inside the compound or conditional to see if any folding
+ can then be done. Convert comparison to conditional for this purpose.
+ The also optimizes non-constant cases that used to be done in
+ expand_expr.
+
+ Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
+ one of the operands is a comparison and the other is a comparison, a
+ BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
+ code below would make the expression more complex. Change it to a
+ TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
+ TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
+
+ if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
+ || code == EQ_EXPR || code == NE_EXPR)
+ && ((truth_value_p (TREE_CODE (arg0))
+ && (truth_value_p (TREE_CODE (arg1))
+ || (TREE_CODE (arg1) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg1, 1)))))
+ || (truth_value_p (TREE_CODE (arg1))
+ && (truth_value_p (TREE_CODE (arg0))
+ || (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))))))
+ {
+ tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
+ : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
+ : TRUTH_XOR_EXPR,
+ type, fold_convert (boolean_type_node, arg0),
+ fold_convert (boolean_type_node, arg1)));
+
+ if (code == EQ_EXPR)
+ tem = invert_truthvalue (tem);
+
+ return tem;
+ }
+
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ && TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
+ else if (TREE_CODE_CLASS (code) == tcc_comparison
+ && TREE_CODE (arg1) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
+ fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
+ else if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison)
+ {
+ if (TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold (build2 (code, type, TREE_OPERAND (arg0, 1),
+ arg1)));
+ if (TREE_CODE (arg1) == COMPOUND_EXPR
+ && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
+ fold (build2 (code, type,
+ arg0, TREE_OPERAND (arg1, 1))));
+
+ if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
+ {
+ tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
+ /*cond_first_p=*/1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+
+ if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
+ {
+ tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
+ /*cond_first_p=*/0);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
+
+ switch (code)
+ {
+ case CONST_DECL:
+ return fold (DECL_INITIAL (t));
+
+ case RANGE_EXPR:
+ if (TREE_CONSTANT (t) != wins)
+ {
+ tem = copy_node (t);
+ TREE_CONSTANT (tem) = wins;
+ TREE_INVARIANT (tem) = wins;
+ return tem;
+ }
+ return t;
+
+ case PLUS_EXPR:
+ /* A + (-B) -> A - B */
+ if (TREE_CODE (arg1) == NEGATE_EXPR)
+ return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
+ /* (-A) + B -> B - A */
+ if (TREE_CODE (arg0) == NEGATE_EXPR
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
+
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
- if ((TREE_CODE (arg0) == PLUS_EXPR
+ if (((TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (arg1) == MULT_EXPR)
- || (TREE_CODE (arg1) == PLUS_EXPR
+ || ((TREE_CODE (arg1) == PLUS_EXPR
+ || TREE_CODE (arg1) == MINUS_EXPR)
&& TREE_CODE (arg0) == MULT_EXPR))
{
tree parg0, parg1, parg, marg;
+ enum tree_code pcode;
- if (TREE_CODE (arg0) == PLUS_EXPR)
+ if (TREE_CODE (arg1) == MULT_EXPR)
parg = arg0, marg = arg1;
else
parg = arg1, marg = arg0;
+ pcode = TREE_CODE (parg);
parg0 = TREE_OPERAND (parg, 0);
parg1 = TREE_OPERAND (parg, 1);
STRIP_NOPS (parg0);
if (TREE_CODE (parg0) == MULT_EXPR
&& TREE_CODE (parg1) != MULT_EXPR)
- return fold (build2 (PLUS_EXPR, type,
+ return fold (build2 (pcode, type,
fold (build2 (PLUS_EXPR, type,
fold_convert (type, parg0),
fold_convert (type, marg))),
if (TREE_CODE (parg0) != MULT_EXPR
&& TREE_CODE (parg1) == MULT_EXPR)
return fold (build2 (PLUS_EXPR, type,
- fold (build2 (PLUS_EXPR, type,
- fold_convert (type, parg1),
- fold_convert (type, marg))),
- fold_convert (type, parg0)));
+ fold_convert (type, parg0),
+ fold (build2 (pcode, type,
+ fold_convert (type, marg),
+ fold_convert (type,
+ parg1)))));
}
if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
if (exact_log2 (int11) > 0 && int01 % int11 == 0)
{
alt0 = fold (build2 (MULT_EXPR, type, arg00,
- build_int_2 (int01 / int11, 0)));
+ build_int_cst (NULL_TREE,
+ int01 / int11)));
alt1 = arg10;
same = arg11;
}
if (same)
return fold (build2 (MULT_EXPR, type,
fold (build2 (PLUS_EXPR, type,
- alt0, alt1)),
+ fold_convert (type, alt0),
+ fold_convert (type, alt1))),
same));
}
+
+ /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (arg1) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
+ if (tem)
+ return fold_convert (type, fold (tem));
+ }
+ else if (TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (arg0) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
+ if (tem)
+ return fold_convert (type, fold (tem));
+ }
}
else
{
TREE_OPERAND (arg0, 0),
build_real (type, c1)));
}
- /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
+ /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == PLUS_EXPR
&& TREE_CODE (arg0) != MULT_EXPR)
return fold (build2 (PLUS_EXPR, type, tree0, tree11));
}
}
- /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
+ /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg0) == PLUS_EXPR
&& TREE_CODE (arg1) != MULT_EXPR)
return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
TREE_OPERAND (arg0, 0)));
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (! wins && integer_zerop (arg0))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
+ /* Try folding difference of addresses. */
+ {
+ HOST_WIDE_INT diff;
+
+ if ((TREE_CODE (arg0) == ADDR_EXPR
+ || TREE_CODE (arg1) == ADDR_EXPR)
+ && ptr_difference_const (arg0, arg1, &diff))
+ return build_int_cst_type (type, diff);
+ }
+
+ /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (arg1) == MULT_EXPR)
+ {
+ tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
+ if (tem)
+ return fold_convert (type, fold (tem));
+ }
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
- && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
+ && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
{
/* (A * C) - (B * C) -> (A-B) * C. */
if (operand_equal_p (TREE_OPERAND (arg0, 1),
negate_expr (arg0),
TREE_OPERAND (arg1, 0)));
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_mult (type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
TREE_OPERAND (arg0, 1)));
}
+ /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
+ if (operand_equal_p (arg0, arg1, 0))
+ {
+ tree tem = fold_strip_sign_ops (arg0);
+ if (tem != NULL_TREE)
+ {
+ tem = fold_convert (type, tem);
+ return fold (build2 (MULT_EXPR, type, tem, tem));
+ }
+ }
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_2 (-1, -1);
- TREE_TYPE (t1) = type;
+ t1 = build_int_cst (type, -1);
t1 = force_fit_type (t1, 0, false, false);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_2 (-1, -1);
- TREE_TYPE (t1) = type;
+ t1 = build_int_cst (type, -1);
t1 = force_fit_type (t1, 0, false, false);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_2 (-1, -1);
- TREE_TYPE (t1) = type;
+ t1 = build_int_cst (type, -1);
t1 = force_fit_type (t1, 0, false, false);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_2 (-1, -1);
- TREE_TYPE (t1) = type;
+ t1 = build_int_cst (type, -1);
t1 = force_fit_type (t1, 0, false, false);
return omit_one_operand (type, t1, arg0);
}
TREE_OPERAND (arg1, 0)));
}
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_div (type, arg0, arg1, code);
+ if (tem)
+ return tem;
+ }
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode = builtin_mathfn_code (arg1);
code, NULL_TREE)))
return fold_convert (type, tem);
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_div (type, arg0, arg1, code);
+ if (tem)
+ return tem;
+ }
goto binary;
case CEIL_MOD_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
+ /* X % 1 is always zero, but be sure to preserve any side
+ effects in X. */
if (integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* X % 0, return X % 0 unchanged so that we can get the
+ proper warnings and errors. */
if (integer_zerop (arg1))
return t;
+ /* 0 % X is always zero, but be sure to preserve any side
+ effects in X. Place this after checking for X == 0. */
+ if (integer_zerop (arg0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
}
- mask = build_int_2 (low, high);
- TREE_TYPE (mask) = type;
+ mask = build_int_cst_wide (type, low, high);
return fold (build2 (BIT_AND_EXPR, type,
fold_convert (type, arg0), mask));
}
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
- tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
+ tree tem = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (TYPE_MODE (type)));
tem = fold_convert (TREE_TYPE (arg1), tem);
tem = const_binop (MINUS_EXPR, tem, arg1, 0);
return fold (build2 (RROTATE_EXPR, type, arg0, tem));
return omit_one_operand (type, arg1, arg0);
goto associate;
- case TRUTH_NOT_EXPR:
- /* The argument to invert_truthvalue must have Boolean type. */
- if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
- arg0 = fold_convert (boolean_type_node, arg0);
-
- /* Note that the operand of this must be an int
- and its values must be 0 or 1.
- ("true" is a fixed value perhaps depending on the language,
- but we don't handle values other than 1 correctly yet.) */
- tem = invert_truthvalue (arg0);
- /* Avoid infinite recursion. */
- if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
- {
- tem = fold_single_bit_test (code, arg0, arg1, type);
- if (tem)
- return tem;
- return t;
- }
- return fold_convert (type, tem);
-
case TRUTH_ANDIF_EXPR:
/* Note that the operands of this must be ints
and their values must be 0 or 1.
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
+ means A >= Y && A != MAX, but in this case we know that
+ A < X <= MAX. */
+
+ if (!TREE_SIDE_EFFECTS (arg0)
+ && !TREE_SIDE_EFFECTS (arg1))
+ {
+ tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
+ if (tem)
+ return fold (build2 (code, type, tem, arg1));
+
+ tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
+ if (tem)
+ return fold (build2 (code, type, arg0, tem));
+ }
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
? code == EQ_EXPR : code != EQ_EXPR,
type);
+ /* If this is a comparison of two exprs that look like an
+ ARRAY_REF of the same object, then we can fold this to a
+ comparison of the two offsets. */
+ if (COMPARISON_CLASS_P (t))
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ if (offset0 == NULL_TREE
+ && offset1 == NULL_TREE)
+ {
+ offset0 = integer_zero_node;
+ offset1 = integer_zero_node;
+ }
+ else if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (TREE_TYPE (offset1), 0);
+ else if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (TREE_TYPE (offset0), 0);
+
+ if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
+ return fold (build2 (code, type, offset0, offset1));
+ }
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
|| integer_onep (folded_compare))
return omit_one_operand (type, folded_compare, varop);
- shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
- 0);
+ shift = build_int_cst (NULL_TREE,
+ TYPE_PRECISION (TREE_TYPE (varop)) - size);
shift = fold_convert (TREE_TYPE (varop), shift);
newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift));
/* Comparisons with the highest or lowest possible integer of
the specified size will have known values.
- This is quite similar to fold_relational_hi_lo; however, my
- attempts to share the code have been nothing but trouble.
- I give up for now. */
+ This is quite similar to fold_relational_hi_lo, however,
+ attempts to share the code have been nothing but trouble. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
if (TREE_CODE (arg1) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (arg1)
- && width <= HOST_BITS_PER_WIDE_INT
+ && width <= 2 * HOST_BITS_PER_WIDE_INT
&& (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
|| POINTER_TYPE_P (TREE_TYPE (arg1))))
{
- unsigned HOST_WIDE_INT signed_max;
- unsigned HOST_WIDE_INT max, min;
+ HOST_WIDE_INT signed_max_hi;
+ unsigned HOST_WIDE_INT signed_max_lo;
+ unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
- signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
-
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (width <= HOST_BITS_PER_WIDE_INT)
{
- max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min = 0;
+ signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ signed_max_hi = 0;
+ max_hi = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_lo = 0;
+ min_hi = 0;
+ }
+ else
+ {
+ max_lo = signed_max_lo;
+ min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ min_hi = -1;
+ }
}
else
{
- max = signed_max;
- min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ width -= HOST_BITS_PER_WIDE_INT;
+ signed_max_lo = -1;
+ signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
+ - 1;
+ max_lo = -1;
+ min_lo = 0;
+
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ {
+ max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
+ min_hi = 0;
+ }
+ else
+ {
+ max_hi = signed_max_hi;
+ min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
+ }
}
- if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max)
+ if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == max - 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == max_hi
+ && TREE_INT_CST_LOW (arg1) == max_lo - 1)
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo)
switch (code)
{
case LT_EXPR:
default:
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (arg1) == min + 1)
+ else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
+ == min_hi
+ && TREE_INT_CST_LOW (arg1) == min_lo + 1)
switch (code)
{
case GE_EXPR:
}
else if (!in_gimple_form
- && TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_LOW (arg1) == signed_max
+ && TREE_INT_CST_HIGH (arg1) == signed_max_hi
+ && TREE_INT_CST_LOW (arg1) == signed_max_lo
&& TYPE_UNSIGNED (TREE_TYPE (arg1))
/* signed_type does not work on pointer types. */
&& INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
return fold (build2 (code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && TREE_CODE (arg0) == NOP_EXPR
- && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
- && (code == EQ_EXPR || code == NE_EXPR
- || TYPE_UNSIGNED (TREE_TYPE (arg0))
- == TYPE_UNSIGNED (TREE_TYPE (tem)))
- && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
- && (TREE_TYPE (t1) == TREE_TYPE (tem)
- || (TREE_CODE (t1) == INTEGER_CST
- && int_fits_type_p (t1, TREE_TYPE (tem)))))
- return fold (build2 (code, type, tem,
- fold_convert (TREE_TYPE (tem), t1)));
+ && TREE_CODE (arg0) == NOP_EXPR)
+ {
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ tem = fold_widened_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+
+ /* Or if we are changing signedness. */
+ tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
build2 (LE_EXPR, type,
TREE_OPERAND (arg0, 0), arg1)));
+ /* Convert ABS_EXPR<x> >= 0 to true. */
+ else if (code == GE_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1)
+ || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && real_zerop (arg1))))
+ return omit_one_operand (type, integer_one_node, arg0);
+
+ /* Convert ABS_EXPR<x> < 0 to false. */
+ else if (code == LT_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1) || real_zerop (arg1)))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
+ else if ((code == EQ_EXPR || code == NE_EXPR)
+ && TREE_CODE (arg0) == ABS_EXPR
+ && (integer_zerop (arg1) || real_zerop (arg1)))
+ return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
+
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree dandnotc
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, build1 (BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1))));
+ tree notc = fold (build1 (BIT_NOT_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_OPERAND (arg0, 1)));
+ tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ arg1, notc));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree candnotd
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1),
- build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
+ tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+ tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1), notd));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
case LT_EXPR:
return constant_boolean_node (0, type);
default:
- abort ();
+ gcc_unreachable ();
}
}
tree arglist;
if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
into a single range test. */
- if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
+ || TREE_CODE (arg0) == EXACT_DIV_EXPR)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !integer_zerop (TREE_OPERAND (arg0, 1))
if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
newtype = TREE_TYPE (targ1);
- if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold (build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1)));
- }
-
- return t;
-
- case COND_EXPR:
- /* Pedantic ANSI C says that a conditional expression is never an lvalue,
- so all simple results must be passed through pedantic_non_lvalue. */
- if (TREE_CODE (arg0) == INTEGER_CST)
- {
- tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
- /* Only optimize constant conditions when the selected branch
- has the same type as the COND_EXPR. This avoids optimizing
- away "c ? x : throw", where the throw has a void type. */
- if (! VOID_TYPE_P (TREE_TYPE (tem))
- || VOID_TYPE_P (type))
- return pedantic_non_lvalue (tem);
- return t;
- }
- if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
- return pedantic_omit_one_operand (type, arg1, arg0);
-
- /* If we have A op B ? A : C, we may be able to convert this to a
- simpler expression, depending on the operation and the values
- of B and C. Signed zeros prevent all of these transformations,
- for reasons given above each one.
-
- Also try swapping the arguments and inverting the conditional. */
- if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- arg1, TREE_OPERAND (arg0, 1))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
- {
- tem = fold_cond_expr_with_comparison (type, arg0,
- TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 2));
- if (tem)
- return tem;
- }
-
- if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (t, 2),
- TREE_OPERAND (arg0, 1))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
- {
- tem = invert_truthvalue (arg0);
- if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
- {
- tem = fold_cond_expr_with_comparison (type, tem,
- TREE_OPERAND (t, 2),
- TREE_OPERAND (t, 1));
- if (tem)
- return tem;
- }
- }
-
- /* If the second operand is simpler than the third, swap them
- since that produces better jump optimization results. */
- if (tree_swap_operands_p (TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 2), false))
- {
- /* See if this can be inverted. If it can't, possibly because
- it was a floating-point inequality comparison, don't do
- anything. */
- tem = invert_truthvalue (arg0);
-
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build3 (code, type, tem,
- TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
- }
-
- /* Convert A ? 1 : 0 to simply A. */
- if (integer_onep (TREE_OPERAND (t, 1))
- && integer_zerop (TREE_OPERAND (t, 2))
- /* If we try to convert TREE_OPERAND (t, 0) to our type, the
- call to fold will try to move the conversion inside
- a COND, which will recurse. In that case, the COND_EXPR
- is probably the best choice, so leave it alone. */
- && type == TREE_TYPE (arg0))
- return pedantic_non_lvalue (arg0);
-
- /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
- over COND_EXPR in cases such as floating point comparisons. */
- if (integer_zerop (TREE_OPERAND (t, 1))
- && integer_onep (TREE_OPERAND (t, 2))
- && truth_value_p (TREE_CODE (arg0)))
- return pedantic_non_lvalue (fold_convert (type,
- invert_truthvalue (arg0)));
-
- /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
- if (TREE_CODE (arg0) == LT_EXPR
- && integer_zerop (TREE_OPERAND (arg0, 1))
- && integer_zerop (TREE_OPERAND (t, 2))
- && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
- return fold_convert (type, fold (build2 (BIT_AND_EXPR,
- TREE_TYPE (tem), tem, arg1)));
-
- /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
- already handled above. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_onep (TREE_OPERAND (arg0, 1))
- && integer_zerop (TREE_OPERAND (t, 2))
- && integer_pow2p (arg1))
- {
- tree tem = TREE_OPERAND (arg0, 0);
- STRIP_NOPS (tem);
- if (TREE_CODE (tem) == RSHIFT_EXPR
- && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
- TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
- return fold (build2 (BIT_AND_EXPR, type,
- TREE_OPERAND (tem, 0), arg1));
- }
-
- /* A & N ? N : 0 is simply A & N if N is a power of two. This
- is probably obsolete because the first operand should be a
- truth value (that's why we have the two cases above), but let's
- leave it in until we can confirm this for all front-ends. */
- if (integer_zerop (TREE_OPERAND (t, 2))
- && TREE_CODE (arg0) == NE_EXPR
- && integer_zerop (TREE_OPERAND (arg0, 1))
- && integer_pow2p (arg1)
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
- arg1, OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold_convert (type,
- TREE_OPERAND (arg0, 0)));
-
- /* Convert A ? B : 0 into A && B if A and B are truth values. */
- if (integer_zerop (TREE_OPERAND (t, 2))
- && truth_value_p (TREE_CODE (arg0))
- && truth_value_p (TREE_CODE (arg1)))
- return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
-
- /* Convert A ? B : 1 into !A || B if A and B are truth values. */
- if (integer_onep (TREE_OPERAND (t, 2))
- && truth_value_p (TREE_CODE (arg0))
- && truth_value_p (TREE_CODE (arg1)))
- {
- /* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
- }
-
- /* Convert A ? 0 : B into !A && B if A and B are truth values. */
- if (integer_zerop (arg1)
- && truth_value_p (TREE_CODE (arg0))
- && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
- {
- /* Only perform transformation if ARG0 is easily inverted. */
- tem = invert_truthvalue (arg0);
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
- TREE_OPERAND (t, 2)));
- }
-
- /* Convert A ? 1 : B into A || B if A and B are truth values. */
- if (integer_onep (arg1)
- && truth_value_p (TREE_CODE (arg0))
- && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
- return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
- TREE_OPERAND (t, 2)));
+ if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
+ return fold (build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1)));
+ }
return t;
return build_complex (type, arg0, arg1);
return t;
- case REALPART_EXPR:
- if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return t;
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand (type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_REALPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 0))),
- fold (build1 (REALPART_EXPR, type,
- TREE_OPERAND (arg0, 1)))));
- return t;
-
- case IMAGPART_EXPR:
- if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return fold_convert (type, integer_zero_node);
- else if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand (type, TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg0, 0));
- else if (TREE_CODE (arg0) == COMPLEX_CST)
- return TREE_IMAGPART (arg0);
- else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- return fold (build2 (TREE_CODE (arg0), type,
- fold (build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 0))),
- fold (build1 (IMAGPART_EXPR, type,
- TREE_OPERAND (arg0, 1)))));
- return t;
-
- /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
- appropriate. */
- case CLEANUP_POINT_EXPR:
- if (! has_cleanups (arg0))
- return TREE_OPERAND (t, 0);
-
- {
- enum tree_code code0 = TREE_CODE (arg0);
- int kind0 = TREE_CODE_CLASS (code0);
- tree arg00 = TREE_OPERAND (arg0, 0);
- tree arg01;
-
- if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
- return fold (build1 (code0, type,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg00), arg00))));
-
- if (kind0 == '<' || kind0 == '2'
- || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
- || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
- || code0 == TRUTH_XOR_EXPR)
- {
- arg01 = TREE_OPERAND (arg0, 1);
-
- if (TREE_CONSTANT (arg00)
- || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
- && ! has_cleanups (arg00)))
- return fold (build2 (code0, type, arg00,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg01), arg01))));
-
- if (TREE_CONSTANT (arg01))
- return fold (build2 (code0, type,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg00), arg00)),
- arg01));
- }
-
- return t;
- }
-
- case CALL_EXPR:
- /* Check for a built-in function. */
- if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- {
- tree tmp = fold_builtin (t, false);
- if (tmp)
- return tmp;
- }
- return t;
-
default:
return t;
} /* switch (code) */
char buf[sizeof (struct tree_decl)];
int i, len;
- if (sizeof (struct tree_exp) + 5 * sizeof (tree)
- > sizeof (struct tree_decl)
- || sizeof (struct tree_type) > sizeof (struct tree_decl))
- abort ();
+ gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
+ <= sizeof (struct tree_decl))
+ && sizeof (struct tree_type) <= sizeof (struct tree_decl));
if (expr == NULL)
return;
slot = htab_find_slot (ht, expr, INSERT);
return;
*slot = expr;
code = TREE_CODE (expr);
- if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
+ if (TREE_CODE_CLASS (code) == tcc_declaration
+ && DECL_ASSEMBLER_NAME_SET_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
SET_DECL_ASSEMBLER_NAME (expr, NULL);
}
- else if (TREE_CODE_CLASS (code) == 't'
- && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
+ else if (TREE_CODE_CLASS (code) == tcc_type
+ && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
+ || TYPE_CACHED_VALUES_P (expr)))
{
- /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
+ /* Allow these fields to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
+ TYPE_CACHED_VALUES_P (expr) = 0;
+ TYPE_CACHED_VALUES (expr) = NULL;
}
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
- if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
+ if (TREE_CODE_CLASS (code) != tcc_type
+ && TREE_CODE_CLASS (code) != tcc_declaration)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
- case 'c':
+ case tcc_constant:
switch (code)
{
case STRING_CST:
break;
}
break;
- case 'x':
+ case tcc_exceptional:
switch (code)
{
case TREE_LIST:
break;
}
break;
- case 'e':
- case 'r':
- case '<':
- case '1':
- case '2':
- case 's':
- len = first_rtl_op (code);
+ case tcc_expression:
+ case tcc_reference:
+ case tcc_comparison:
+ case tcc_unary:
+ case tcc_binary:
+ case tcc_statement:
+ len = TREE_CODE_LENGTH (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
- case 'd':
+ case tcc_declaration:
fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
fold_checksum_tree (DECL_NAME (expr), ctx, ht);
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
break;
- case 't':
+ case tcc_type:
if (TREE_CODE (expr) == ENUMERAL_TYPE)
fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
}
fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
- fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
+ if (TREE_CODE (expr) == RECORD_TYPE
+ || TREE_CODE (expr) == UNION_TYPE
+ || TREE_CODE (expr) == QUAL_UNION_TYPE)
+ fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
break;
default:
{
int saved_signaling_nans = flag_signaling_nans;
int saved_trapping_math = flag_trapping_math;
+ int saved_rounding_math = flag_rounding_math;
int saved_trapv = flag_trapv;
tree result;
flag_signaling_nans = 0;
flag_trapping_math = 0;
+ flag_rounding_math = 0;
flag_trapv = 0;
result = fold (expr);
flag_signaling_nans = saved_signaling_nans;
flag_trapping_math = saved_trapping_math;
+ flag_rounding_math = saved_rounding_math;
flag_trapv = saved_trapv;
return result;
switch (TREE_CODE (top))
{
+ case BIT_AND_EXPR:
+ /* Bitwise and provides a power of two multiple. If the mask is
+ a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
+ if (!integer_pow2p (bottom))
+ return 0;
+ /* FALLTHRU */
+
case MULT_EXPR:
return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
|| multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
{
tree fndecl = get_callee_fndecl (t);
tree arglist = TREE_OPERAND (t, 1);
- if (fndecl
- && DECL_BUILT_IN (fndecl)
- && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
+ if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (fndecl))
{
#define CASE_BUILTIN_F(BUILT_IN_FN) \
/* Return true when T is an address and is known to be nonzero.
For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h */
+ Similar logic is present in nonzero_address in rtlanal.h. */
static bool
tree_expr_nonzero_p (tree t)
return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
case INTEGER_CST:
- return !integer_zerop (t);
+ /* We used to test for !integer_zerop here. This does not work correctly
+ if TREE_CONSTANT_OVERFLOW (t). */
+ return (TREE_INT_CST_LOW (t) != 0
+ || TREE_INT_CST_HIGH (t) != 0);
case PLUS_EXPR:
if (!TYPE_UNSIGNED (type) && !flag_wrapv)
break;
case ADDR_EXPR:
- /* Weak declarations may link to NULL. */
- if (DECL_P (TREE_OPERAND (t, 0)))
- return !DECL_WEAK (TREE_OPERAND (t, 0));
- /* Constants and all other cases are never weak. */
- return true;
+ {
+ tree base = get_base_address (TREE_OPERAND (t, 0));
+
+ if (!base)
+ return false;
+
+ /* Weak declarations may link to NULL. */
+ if (DECL_P (base))
+ return !DECL_WEAK (base);
+
+ /* Constants are never weak. */
+ if (CONSTANT_CLASS_P (base))
+ return true;
+
+ return false;
+ }
case COND_EXPR:
return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
return false;
}
-/* Return true if `r' is known to be non-negative.
- Only handles constants at the moment. */
-
-int
-rtl_expr_nonnegative_p (rtx r)
-{
- switch (GET_CODE (r))
- {
- case CONST_INT:
- return INTVAL (r) >= 0;
-
- case CONST_DOUBLE:
- if (GET_MODE (r) == VOIDmode)
- return CONST_DOUBLE_HIGH (r) >= 0;
- return 0;
-
- case CONST_VECTOR:
- {
- int units, i;
- rtx elt;
-
- units = CONST_VECTOR_NUNITS (r);
-
- for (i = 0; i < units; ++i)
- {
- elt = CONST_VECTOR_ELT (r, i);
- if (!rtl_expr_nonnegative_p (elt))
- return 0;
- }
-
- return 1;
- }
-
- case SYMBOL_REF:
- case LABEL_REF:
- /* These are always nonnegative. */
- return 1;
-
- default:
- return 0;
- }
-}
-
-
/* See if we are applying CODE, a relational to the highest or lowest
possible integer of TYPE. If so, then the result is a compile
time constant. */
fold_convert (st0, op0),
fold_convert (st1, integer_zero_node));
- retval
- = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
+ retval = fold_binary_to_constant (TREE_CODE (exp),
+ TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1));
/* If we are in gimple form, then returning EXP would create
non-gimple expressions. Clearing it is safe and insures
simpler than the generic fold routine. */
tree
-nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
- tree op0, tree op1)
+fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
int wins = 1;
tree subop0;
the generic fold routine. */
tree
-nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
- tree op0)
+fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
/* Make sure we have a suitable constant argument. */
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
== MODE_INT)
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
return fold_convert (TREE_TYPE (exp),
- build_int_2 ((TREE_STRING_POINTER (string)
- [TREE_INT_CST_LOW (index)]), 0));
+ build_int_cst (NULL_TREE,
+ (TREE_STRING_POINTER (string)
+ [TREE_INT_CST_LOW (index)])));
}
return NULL;
}
{
tree t = NULL_TREE;
- if (TREE_CODE (arg0) == INTEGER_CST)
+ switch (TREE_CODE (arg0))
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
- int overflow = neg_double (TREE_INT_CST_LOW (arg0),
- TREE_INT_CST_HIGH (arg0),
- &low, &high);
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
- t = force_fit_type (t, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type),
- TREE_CONSTANT_OVERFLOW (arg0));
- }
- else if (TREE_CODE (arg0) == REAL_CST)
- t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
-#ifdef ENABLE_CHECKING
- else
- abort ();
-#endif
+ case INTEGER_CST:
+ {
+ unsigned HOST_WIDE_INT low;
+ HOST_WIDE_INT high;
+ int overflow = neg_double (TREE_INT_CST_LOW (arg0),
+ TREE_INT_CST_HIGH (arg0),
+ &low, &high);
+ t = build_int_cst_wide (type, low, high);
+ t = force_fit_type (t, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type),
+ TREE_CONSTANT_OVERFLOW (arg0));
+ break;
+ }
+
+ case REAL_CST:
+ t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
return t;
}
{
tree t = NULL_TREE;
- if (TREE_CODE (arg0) == INTEGER_CST)
+ switch (TREE_CODE (arg0))
{
+ case INTEGER_CST:
/* If the value is unsigned, then the absolute value is
the same as the ordinary value. */
if (TYPE_UNSIGNED (type))
- return arg0;
+ t = arg0;
/* Similarly, if the value is non-negative. */
else if (INT_CST_LT (integer_minus_one_node, arg0))
- return arg0;
+ t = arg0;
/* If the value is negative, then the absolute value is
its negation. */
else
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
+ t = build_int_cst_wide (type, low, high);
t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
TREE_CONSTANT_OVERFLOW (arg0));
- return t;
}
- }
- else if (TREE_CODE (arg0) == REAL_CST)
- {
+ break;
+
+ case REAL_CST:
if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
- return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
+ t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
else
- return arg0;
+ t = arg0;
+ break;
+
+ default:
+ gcc_unreachable ();
}
-#ifdef ENABLE_CHECKING
- else
- abort ();
-#endif
return t;
}
{
tree t = NULL_TREE;
- if (TREE_CODE (arg0) == INTEGER_CST)
- {
- t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- TREE_TYPE (t) = type;
- t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
- }
-#ifdef ENABLE_CHECKING
- else
- abort ();
-#endif
+ gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
+
+ t = build_int_cst_wide (type,
+ ~ TREE_INT_CST_LOW (arg0),
+ ~ TREE_INT_CST_HIGH (arg0));
+ t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
return t;
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
return constant_boolean_node (result, type);
return constant_boolean_node (result, type);
}
+/* Build an expression for the a clean point containing EXPR with type TYPE.
+ Don't build a cleanup point expression for EXPR which don't have side
+ effects. */
+
+tree
+fold_build_cleanup_point_expr (tree type, tree expr)
+{
+ /* If the expression does not have side effects then we don't have to wrap
+ it with a cleanup point expression. */
+ if (!TREE_SIDE_EFFECTS (expr))
+ return expr;
+
+ /* If the expression is a return, check to see if the expression inside the
+ return has no side effects or the right hand side of the modify expression
+ inside the return. If either don't have side effects set we don't need to
+ wrap the expression in a cleanup point expression. Note we don't check the
+ left hand side of the modify because it should always be a return decl. */
+ if (TREE_CODE (expr) == RETURN_EXPR)
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!op || !TREE_SIDE_EFFECTS (op))
+ return expr;
+ op = TREE_OPERAND (op, 1);
+ if (!TREE_SIDE_EFFECTS (op))
+ return expr;
+ }
+
+ return build1 (CLEANUP_POINT_EXPR, type, expr);
+}
+
/* Build an expression for the address of T. Folds away INDIRECT_REF to
avoid confusing the gimplify process. */
tree
build_fold_addr_expr_with_type (tree t, tree ptrtype)
{
- if (TREE_CODE (t) == INDIRECT_REF)
+ /* The size of the object is not relevant when talking about its address. */
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Note: doesn't apply to ALIGN_INDIRECT_REF */
+ if (TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
{
t = TREE_OPERAND (t, 0);
if (TREE_TYPE (t) != ptrtype)
{
tree base = t;
- while (handled_component_p (base)
- || TREE_CODE (base) == REALPART_EXPR
- || TREE_CODE (base) == IMAGPART_EXPR)
+ while (handled_component_p (base))
base = TREE_OPERAND (base, 0);
if (DECL_P (base))
TREE_ADDRESSABLE (base) = 1;
return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
}
-/* Builds an expression for an indirection through T, simplifying some
- cases. */
+/* Given a pointer value T, return a simplified version of an indirection
+ through T, or NULL_TREE if no simplification is possible. */
-tree
-build_fold_indirect_ref (tree t)
+static tree
+fold_indirect_ref_1 (tree t)
{
tree type = TREE_TYPE (TREE_TYPE (t));
tree sub = t;
tree subtype;
STRIP_NOPS (sub);
+ subtype = TREE_TYPE (sub);
+ if (!POINTER_TYPE_P (subtype))
+ return NULL_TREE;
+
if (TREE_CODE (sub) == ADDR_EXPR)
{
tree op = TREE_OPERAND (sub, 0);
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
&& lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
- return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
+ {
+ tree type_domain = TYPE_DOMAIN (optype);
+ tree min_val = size_zero_node;
+ if (type_domain && TYPE_MIN_VALUE (type_domain))
+ min_val = TYPE_MIN_VALUE (type_domain);
+ return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
+ }
}
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
- subtype = TREE_TYPE (sub);
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
&& lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
{
+ tree type_domain;
+ tree min_val = size_zero_node;
sub = build_fold_indirect_ref (sub);
- return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
+ type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
+ if (type_domain && TYPE_MIN_VALUE (type_domain))
+ min_val = TYPE_MIN_VALUE (type_domain);
+ return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
}
- return build1 (INDIRECT_REF, type, t);
+ return NULL_TREE;
+}
+
+/* Builds an expression for an indirection through T, simplifying some
+ cases. */
+
+tree
+build_fold_indirect_ref (tree t)
+{
+ tree sub = fold_indirect_ref_1 (t);
+
+ if (sub)
+ return sub;
+ else
+ return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
+}
+
+/* Given an INDIRECT_REF T, return either T or a simplified version. */
+
+tree
+fold_indirect_ref (tree t)
+{
+ tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
+
+ if (sub)
+ return sub;
+ else
+ return t;
}
/* Strip non-trapping, non-side-effecting tree nodes from an expression
for (;;)
switch (TREE_CODE_CLASS (TREE_CODE (t)))
{
- case '1':
+ case tcc_unary:
t = TREE_OPERAND (t, 0);
break;
- case '2':
- case '<':
+ case tcc_binary:
+ case tcc_comparison:
if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
t = TREE_OPERAND (t, 0);
else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
return t;
break;
- case 'e':
+ case tcc_expression:
switch (TREE_CODE (t))
{
case COMPOUND_EXPR:
}
}
-#include "gt-fold-const.h"
+/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
+ This can only be applied to objects of a sizetype. */
+
+tree
+round_up (tree value, int divisor)
+{
+ tree div = NULL_TREE;
+
+ gcc_assert (divisor > 0);
+ if (divisor == 1)
+ return value;
+
+ /* See if VALUE is already a multiple of DIVISOR. If so, we don't
+ have to do anything. Only do this when we are not given a const,
+ because in that case, this check is more expensive than just
+ doing it. */
+ if (TREE_CODE (value) != INTEGER_CST)
+ {
+ div = build_int_cst (TREE_TYPE (value), divisor);
+
+ if (multiple_of_p (TREE_TYPE (value), value, div))
+ return value;
+ }
+
+ /* If divisor is a power of two, simplify this to bit manipulation. */
+ if (divisor == (divisor & -divisor))
+ {
+ tree t;
+
+ t = build_int_cst (TREE_TYPE (value), divisor - 1);
+ value = size_binop (PLUS_EXPR, value, t);
+ t = build_int_cst (TREE_TYPE (value), -divisor);
+ value = size_binop (BIT_AND_EXPR, value, t);
+ }
+ else
+ {
+ if (!div)
+ div = build_int_cst (TREE_TYPE (value), divisor);
+ value = size_binop (CEIL_DIV_EXPR, value, div);
+ value = size_binop (MULT_EXPR, value, div);
+ }
+
+ return value;
+}
+
+/* Likewise, but round down. */
+
+tree
+round_down (tree value, int divisor)
+{
+ tree div = NULL_TREE;
+
+ gcc_assert (divisor > 0);
+ if (divisor == 1)
+ return value;
+
+ /* See if VALUE is already a multiple of DIVISOR. If so, we don't
+ have to do anything. Only do this when we are not given a const,
+ because in that case, this check is more expensive than just
+ doing it. */
+ if (TREE_CODE (value) != INTEGER_CST)
+ {
+ div = build_int_cst (TREE_TYPE (value), divisor);
+
+ if (multiple_of_p (TREE_TYPE (value), value, div))
+ return value;
+ }
+
+ /* If divisor is a power of two, simplify this to bit manipulation. */
+ if (divisor == (divisor & -divisor))
+ {
+ tree t;
+
+ t = build_int_cst (TREE_TYPE (value), -divisor);
+ value = size_binop (BIT_AND_EXPR, value, t);
+ }
+ else
+ {
+ if (!div)
+ div = build_int_cst (TREE_TYPE (value), divisor);
+ value = size_binop (FLOOR_DIV_EXPR, value, div);
+ value = size_binop (MULT_EXPR, value, div);
+ }
+
+ return value;
+}
+
+/* Returns the pointer to the base of the object addressed by EXP and
+ extracts the information about the offset of the access, storing it
+ to PBITPOS and POFFSET. */
+
+static tree
+split_address_to_core_and_offset (tree exp,
+ HOST_WIDE_INT *pbitpos, tree *poffset)
+{
+ tree core;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ HOST_WIDE_INT bitsize;
+
+ if (TREE_CODE (exp) == ADDR_EXPR)
+ {
+ core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
+ poffset, &mode, &unsignedp, &volatilep,
+ false);
+
+ if (TREE_CODE (core) == INDIRECT_REF)
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ {
+ core = exp;
+ *pbitpos = 0;
+ *poffset = NULL_TREE;
+ }
+
+ return core;
+}
+
+/* Returns true if addresses of E1 and E2 differ by a constant, false
+ otherwise. If they do, E1 - E2 is stored in *DIFF. */
+
+bool
+ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
+{
+ tree core1, core2;
+ HOST_WIDE_INT bitpos1, bitpos2;
+ tree toffset1, toffset2, tdiff, type;
+
+ core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
+ core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
+
+ if (bitpos1 % BITS_PER_UNIT != 0
+ || bitpos2 % BITS_PER_UNIT != 0
+ || !operand_equal_p (core1, core2, 0))
+ return false;
+
+ if (toffset1 && toffset2)
+ {
+ type = TREE_TYPE (toffset1);
+ if (type != TREE_TYPE (toffset2))
+ toffset2 = fold_convert (type, toffset2);
+
+ tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
+ if (!host_integerp (tdiff, 0))
+ return false;
+
+ *diff = tree_low_cst (tdiff, 0);
+ }
+ else if (toffset1 || toffset2)
+ {
+ /* If only one of the offsets is non-constant, the difference cannot
+ be a constant. */
+ return false;
+ }
+ else
+ *diff = 0;
+
+ *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
+ return true;
+}
+
+/* Simplify the floating point expression EXP when the sign of the
+ result is not significant. Return NULL_TREE if no simplification
+ is possible. */
+
+tree
+fold_strip_sign_ops (tree exp)
+{
+ tree arg0, arg1;
+
+ switch (TREE_CODE (exp))
+ {
+ case ABS_EXPR:
+ case NEGATE_EXPR:
+ arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
+ return arg0 ? arg0 : TREE_OPERAND (exp, 0);
+
+ case MULT_EXPR:
+ case RDIV_EXPR:
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
+ return NULL_TREE;
+ arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
+ arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
+ if (arg0 != NULL_TREE || arg1 != NULL_TREE)
+ return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
+ arg0 ? arg0 : TREE_OPERAND (exp, 0),
+ arg1 ? arg1 : TREE_OPERAND (exp, 1)));
+ break;
+
+ default:
+ break;
+ }
+ return NULL_TREE;
+}
+