size_int takes an integer value, and creates a tree constant
with type from `sizetype'.
- force_fit_type takes a constant and prior overflow indicator, and
- forces the value to fit the type. It returns an overflow indicator. */
+ force_fit_type takes a constant, an overflowable flag and prior
+ overflow indicators. It forces the value to fit the type and sets
+ TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
#include "config.h"
#include "system.h"
#include "langhooks.h"
#include "md5.h"
+/* The following constants represent a bit based encoding of GCC's
+ comparison operators. This encoding simplifies transformations
+ on relational comparison operators, such as AND and OR. */
+enum comparison_code {
+ COMPCODE_FALSE = 0,
+ COMPCODE_LT = 1,
+ COMPCODE_EQ = 2,
+ COMPCODE_LE = 3,
+ COMPCODE_GT = 4,
+ COMPCODE_LTGT = 5,
+ COMPCODE_GE = 6,
+ COMPCODE_ORD = 7,
+ COMPCODE_UNORD = 8,
+ COMPCODE_UNLT = 9,
+ COMPCODE_UNEQ = 10,
+ COMPCODE_UNLE = 11,
+ COMPCODE_UNGT = 12,
+ COMPCODE_NE = 13,
+ COMPCODE_UNGE = 14,
+ COMPCODE_TRUE = 15
+};
+
static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
static bool negate_mathfn_p (enum built_in_function);
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static hashval_t size_htab_hash (const void *);
-static int size_htab_eq (const void *, const void *);
+static tree build_zero_vector (tree);
static tree fold_convert_const (enum tree_code, tree, tree);
-static enum tree_code invert_tree_comparison (enum tree_code);
-static enum tree_code swap_tree_comparison (enum tree_code);
-static int comparison_to_compcode (enum tree_code);
-static enum tree_code compcode_to_comparison (int);
+static enum tree_code invert_tree_comparison (enum tree_code, bool);
+static enum comparison_code comparison_to_compcode (enum tree_code);
+static enum tree_code compcode_to_comparison (enum comparison_code);
+static tree combine_comparisons (enum tree_code, enum tree_code,
+ enum tree_code, tree, tree, tree);
static int truth_value_p (enum tree_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
tree);
static tree fold_range_test (tree);
+static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
static tree unextend (tree, int, int, tree);
static tree fold_truthop (enum tree_code, tree, tree, tree);
static tree optimize_minmax_comparison (tree);
static tree extract_muldiv (tree, tree, enum tree_code, tree);
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
static int multiple_of_p (tree, tree, tree);
-static tree constant_boolean_node (int, tree);
static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
tree, int);
static bool fold_real_zero_addition_p (tree, tree, int);
static tree fold_inf_compare (enum tree_code, tree, tree, tree);
static tree fold_div_compare (enum tree_code, tree, tree, tree);
static bool reorder_operands_p (tree, tree);
-static bool tree_swap_operands_p (tree, tree, bool);
-
static tree fold_negate_const (tree, tree);
-static tree fold_abs_const (tree, tree);
+static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
-static tree fold_relational_hi_lo (enum tree_code *, const tree, tree *, tree *);
-
-/* The following constants represent a bit based encoding of GCC's
- comparison operators. This encoding simplifies transformations
- on relational comparison operators, such as AND and OR. */
-#define COMPCODE_FALSE 0
-#define COMPCODE_LT 1
-#define COMPCODE_EQ 2
-#define COMPCODE_LE 3
-#define COMPCODE_GT 4
-#define COMPCODE_NE 5
-#define COMPCODE_GE 6
-#define COMPCODE_TRUE 7
+static tree fold_relational_hi_lo (enum tree_code *, const tree,
+ tree *, tree *);
+static bool tree_expr_nonzero_p (tree);
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
*hi = words[2] + words[3] * BASE;
}
\f
-/* Make the integer constant T valid for its type by setting to 0 or 1 all
- the bits in the constant that don't belong in the type.
-
- Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
- nonzero, a signed overflow has already occurred in calculating T, so
- propagate it. */
+/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
+ in overflow of the value, when >0 we are only interested in signed
+ overflow, for <0 we are interested in any overflow. OVERFLOWED
+ indicates whether overflow has already occurred. CONST_OVERFLOWED
+ indicates whether constant overflow has already occurred. We force
+ T's value to be within range of T's type (by setting to 0 or 1 all
+ the bits outside the type's range). We set TREE_OVERFLOWED if,
+ OVERFLOWED is nonzero,
+ or OVERFLOWABLE is >0 and signed overflow occurs
+ or OVERFLOWABLE is <0 and any overflow occurs
+ We set TREE_CONSTANT_OVERFLOWED if,
+ CONST_OVERFLOWED is nonzero
+ or we set TREE_OVERFLOWED.
+ We return either the original T, or a copy. */
-int
-force_fit_type (tree t, int overflow)
+tree
+force_fit_type (tree t, int overflowable,
+ bool overflowed, bool overflowed_const)
{
unsigned HOST_WIDE_INT low;
HOST_WIDE_INT high;
unsigned int prec;
+ int sign_extended_type;
- if (TREE_CODE (t) == REAL_CST)
- {
- /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
- Consider doing it via real_convert now. */
- return overflow;
- }
-
- else if (TREE_CODE (t) != INTEGER_CST)
- return overflow;
+ gcc_assert (TREE_CODE (t) == INTEGER_CST);
low = TREE_INT_CST_LOW (t);
high = TREE_INT_CST_HIGH (t);
prec = POINTER_SIZE;
else
prec = TYPE_PRECISION (TREE_TYPE (t));
+ /* Size types *are* sign extended. */
+ sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
+ || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
/* First clear all bits that are beyond the type's precision. */
if (prec == 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
- TREE_INT_CST_HIGH (t)
- &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
+ high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
else
{
- TREE_INT_CST_HIGH (t) = 0;
+ high = 0;
if (prec < HOST_BITS_PER_WIDE_INT)
- TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
- }
-
- /* Unsigned types do not suffer sign extension or overflow unless they
- are a sizetype. */
- if (TYPE_UNSIGNED (TREE_TYPE (t))
- && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
- && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
- return overflow;
-
- /* If the value's sign bit is set, extend the sign. */
- if (prec != 2 * HOST_BITS_PER_WIDE_INT
- && (prec > HOST_BITS_PER_WIDE_INT
- ? 0 != (TREE_INT_CST_HIGH (t)
- & ((HOST_WIDE_INT) 1
- << (prec - HOST_BITS_PER_WIDE_INT - 1)))
- : 0 != (TREE_INT_CST_LOW (t)
- & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
- {
- /* Value is negative:
- set to 1 all the bits that are outside this type's precision. */
- if (prec > HOST_BITS_PER_WIDE_INT)
- TREE_INT_CST_HIGH (t)
- |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
- else
+ low &= ~((HOST_WIDE_INT) (-1) << prec);
+ }
+
+ if (!sign_extended_type)
+ /* No sign extension */;
+ else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
+ /* Correct width already. */;
+ else if (prec > HOST_BITS_PER_WIDE_INT)
+ {
+ /* Sign extend top half? */
+ if (high & ((unsigned HOST_WIDE_INT)1
+ << (prec - HOST_BITS_PER_WIDE_INT - 1)))
+ high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
+ }
+ else if (prec == HOST_BITS_PER_WIDE_INT)
+ {
+ if ((HOST_WIDE_INT)low < 0)
+ high = -1;
+ }
+ else
+ {
+ /* Sign extend bottom half? */
+ if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
{
- TREE_INT_CST_HIGH (t) = -1;
- if (prec < HOST_BITS_PER_WIDE_INT)
- TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
+ high = -1;
+ low |= (HOST_WIDE_INT)(-1) << prec;
}
}
- /* Return nonzero if signed overflow occurred. */
- return
- ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
- != 0);
+ /* If the value changed, return a new node. */
+ if (overflowed || overflowed_const
+ || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
+ {
+ t = build_int_cst_wide (TREE_TYPE (t), low, high);
+
+ if (overflowed
+ || overflowable < 0
+ || (overflowable > 0 && sign_extended_type))
+ {
+ t = copy_node (t);
+ TREE_OVERFLOW (t) = 1;
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
+ else if (overflowed_const)
+ {
+ t = copy_node (t);
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
+ }
+
+ return t;
}
\f
/* Add two doubleword integers with doubleword result.
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Compute true remainder: rem = num - (quo * den) */
return false;
}
+/* Check whether we may negate an integer constant T without causing
+ overflow. */
+
+bool
+may_negate_without_overflow_p (tree t)
+{
+ unsigned HOST_WIDE_INT val;
+ unsigned int prec;
+ tree type;
+
+ gcc_assert (TREE_CODE (t) == INTEGER_CST);
+
+ type = TREE_TYPE (t);
+ if (TYPE_UNSIGNED (type))
+ return false;
+
+ prec = TYPE_PRECISION (type);
+ if (prec > HOST_BITS_PER_WIDE_INT)
+ {
+ if (TREE_INT_CST_LOW (t) != 0)
+ return true;
+ prec -= HOST_BITS_PER_WIDE_INT;
+ val = TREE_INT_CST_HIGH (t);
+ }
+ else
+ val = TREE_INT_CST_LOW (t);
+ if (prec < HOST_BITS_PER_WIDE_INT)
+ val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
+ return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
+}
+
/* Determine whether an expression T can be cheaply negated using
the function negate_expr. */
static bool
negate_expr_p (tree t)
{
- unsigned HOST_WIDE_INT val;
- unsigned int prec;
tree type;
if (t == 0)
return true;
/* Check that -CST will not overflow type. */
- prec = TYPE_PRECISION (type);
- if (prec > HOST_BITS_PER_WIDE_INT)
- {
- if (TREE_INT_CST_LOW (t) != 0)
- return true;
- prec -= HOST_BITS_PER_WIDE_INT;
- val = TREE_INT_CST_HIGH (t);
- }
- else
- val = TREE_INT_CST_LOW (t);
- if (prec < HOST_BITS_PER_WIDE_INT)
- val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
- return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
+ return may_negate_without_overflow_p (t);
case REAL_CST:
case NEGATE_EXPR:
break;
default:
- abort ();
+ gcc_unreachable ();
}
- /* If this is for a sizetype, can be represented as one (signed)
- HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
- constants. */
- if (is_sizetype
- && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
- || (hi == -1 && (HOST_WIDE_INT) low < 0))
- && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
- return size_int_type_wide (low, type);
- else
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
+
+ if (notrunc)
{
- t = build_int_2 (low, hi);
- TREE_TYPE (t) = TREE_TYPE (arg1);
- }
-
- TREE_OVERFLOW (t)
- = ((notrunc
- ? (!uns || is_sizetype) && overflow
- : (force_fit_type (t, (!uns || is_sizetype) && overflow)
- && ! no_overflow))
- | TREE_OVERFLOW (arg1)
- | TREE_OVERFLOW (arg2));
-
- /* If we're doing a size calculation, unsigned arithmetic does overflow.
- So check if force_fit_type truncated the value. */
- if (is_sizetype
- && ! TREE_OVERFLOW (t)
- && (TREE_INT_CST_HIGH (t) != hi
- || TREE_INT_CST_LOW (t) != low))
- TREE_OVERFLOW (t) = 1;
-
- TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
- | TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2));
+ /* Propagate overflow flags ourselves. */
+ if (((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
+ {
+ t = copy_node (t);
+ TREE_OVERFLOW (t) = 1;
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
+ else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
+ {
+ t = copy_node (t);
+ TREE_CONSTANT_OVERFLOW (t) = 1;
+ }
+ }
+ else
+ t = force_fit_type (t, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
+ TREE_CONSTANT_OVERFLOW (arg1)
+ | TREE_CONSTANT_OVERFLOW (arg2));
+
return t;
}
t = build_real (type, real_value_truncate (mode, value));
- TREE_OVERFLOW (t)
- = (force_fit_type (t, 0)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
+ TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
TREE_CONSTANT_OVERFLOW (t)
= TREE_OVERFLOW (t)
| TREE_CONSTANT_OVERFLOW (arg1)
break;
default:
- abort ();
+ gcc_unreachable ();
}
return t;
}
return 0;
}
-/* These are the hash table functions for the hash table of INTEGER_CST
- nodes of a sizetype. */
-
-/* Return the hash code code X, an INTEGER_CST. */
-
-static hashval_t
-size_htab_hash (const void *x)
-{
- tree t = (tree) x;
-
- return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
- ^ htab_hash_pointer (TREE_TYPE (t))
- ^ (TREE_OVERFLOW (t) << 20));
-}
-
-/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
- is the same as that given by *Y, which is the same. */
-
-static int
-size_htab_eq (const void *x, const void *y)
-{
- tree xt = (tree) x;
- tree yt = (tree) y;
-
- return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
- && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
- && TREE_TYPE (xt) == TREE_TYPE (yt)
- && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
-}
-\f
-/* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
- bits are given by NUMBER and of the sizetype represented by KIND. */
-
-tree
-size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
-{
- return size_int_type_wide (number, sizetype_tab[(int) kind]);
-}
-
-/* Likewise, but the desired type is specified explicitly. */
-
-static GTY (()) tree new_const;
-static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
- htab_t size_htab;
+/* Create a size type INT_CST node with NUMBER sign extended. KIND
+ indicates which particular sizetype to create. */
tree
-size_int_type_wide (HOST_WIDE_INT number, tree type)
+size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
{
- void **slot;
-
- if (size_htab == 0)
- {
- size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
- new_const = make_node (INTEGER_CST);
- }
-
- /* Adjust NEW_CONST to be the constant we want. If it's already in the
- hash table, we return the value from the hash table. Otherwise, we
- place that in the hash table and make a new node for the next time. */
- TREE_INT_CST_LOW (new_const) = number;
- TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
- TREE_TYPE (new_const) = type;
- TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
- = force_fit_type (new_const, 0);
-
- slot = htab_find_slot (size_htab, new_const, INSERT);
- if (*slot == 0)
- {
- tree t = new_const;
-
- *slot = new_const;
- new_const = make_node (INTEGER_CST);
- return t;
- }
- else
- return (tree) *slot;
+ return build_int_cst (sizetype_tab[(int) kind], number);
}
-
+\f
/* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
is a tree code. The type of the result is taken from the operands.
Both must be the same type integer type and it must be a size type.
{
tree type = TREE_TYPE (arg0);
- if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
- || type != TREE_TYPE (arg1))
- abort ();
+ gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
+ && type == TREE_TYPE (arg1));
/* Handle the special case of two integer constants faster. */
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
tree type = TREE_TYPE (arg0);
tree ctype;
- if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
- || type != TREE_TYPE (arg1))
- abort ();
+ gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
+ && type == TREE_TYPE (arg1));
/* If the type is already signed, just do the simple thing. */
if (!TYPE_UNSIGNED (type))
return size_binop (MINUS_EXPR, arg0, arg1);
- ctype = (type == bitsizetype || type == ubitsizetype
- ? sbitsizetype : ssizetype);
+ ctype = type == bitsizetype ? sbitsizetype : ssizetype;
/* If either operand is not a constant, do the conversions to the signed
type and subtract. The hardware will do the right thing with any
arg1, arg0)));
}
\f
+/* Construct a vector of zero elements of vector type TYPE. */
+
+static tree
+build_zero_vector (tree type)
+{
+ tree elem, list;
+ int i, units;
+
+ elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
+ units = TYPE_VECTOR_SUBPARTS (type);
+
+ list = NULL_TREE;
+ for (i = 0; i < units; i++)
+ list = tree_cons (NULL_TREE, elem, list);
+ return build_vector (type, list);
+}
+
/* Attempt to fold type conversion operation CODE of expression ARG1 to
type TYPE. If no simplification can be done return NULL_TREE. */
if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
return NULL_TREE;
- /* If we are trying to make a sizetype for a small integer, use
- size_int to pick up cached types to reduce duplicate nodes. */
- if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
- && !TREE_CONSTANT_OVERFLOW (arg1)
- && compare_tree_int (arg1, 10000) < 0)
- return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
-
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = build_int_2 (TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- TREE_TYPE (t) = type;
- /* Indicate an overflow if (1) ARG1 already overflowed,
- or (2) force_fit_type indicates an overflow.
- Tell force_fit_type that an overflow has already occurred
- if ARG1 is a too-large unsigned value and T is signed.
- But don't indicate an overflow if converting a pointer. */
- TREE_OVERFLOW (t)
- = ((force_fit_type (t,
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
- && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
- || TREE_OVERFLOW (arg1));
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
+ t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1));
+
+ t = force_fit_type (t,
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
else if (TREE_CODE (arg1) == REAL_CST)
FP-to-integer conversion is unspecified upon overflow. */
HOST_WIDE_INT high, low;
-
REAL_VALUE_TYPE r;
REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* If R is NaN, return zero and show we have an overflow. */
if (! overflow)
REAL_VALUE_TO_INT (&low, &high, r);
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
- TREE_OVERFLOW (t)
- = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
+ t = build_int_cst_wide (type, low, high);
+
+ t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
}
real_value_truncate (TYPE_MODE (type),
TREE_REAL_CST (arg1)));
- TREE_OVERFLOW (t)
- = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
+ TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
TREE_CONSTANT_OVERFLOW (t)
= TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
return t;
|| TREE_CODE (orig) == ERROR_MARK)
return error_mark_node;
- if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
+ if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
+ || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
+ TYPE_MAIN_VARIANT (orig)))
return fold (build1 (NOP_EXPR, type, arg));
- if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
+ switch (TREE_CODE (type))
{
+ case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
{
tem = fold_convert_const (NOP_EXPR, type, arg);
if (tem != NULL_TREE)
return tem;
}
- if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
+ if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+ || TREE_CODE (orig) == OFFSET_TYPE)
return fold (build1 (NOP_EXPR, type, arg));
if (TREE_CODE (orig) == COMPLEX_TYPE)
{
tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
return fold_convert (type, tem);
}
- if (TREE_CODE (orig) == VECTOR_TYPE
- && GET_MODE_SIZE (TYPE_MODE (type))
- == GET_MODE_SIZE (TYPE_MODE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- }
- else if (TREE_CODE (type) == REAL_TYPE)
- {
+ gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
+ && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
+ return fold (build1 (NOP_EXPR, type, arg));
+
+ case REAL_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
{
tem = fold_convert_const (FLOAT_EXPR, type, arg);
return tem;
}
- if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
- return fold (build1 (FLOAT_EXPR, type, arg));
- if (TREE_CODE (orig) == REAL_TYPE)
- return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
- type, arg));
- if (TREE_CODE (orig) == COMPLEX_TYPE)
+ switch (TREE_CODE (orig))
{
+ case INTEGER_TYPE: case CHAR_TYPE:
+ case BOOLEAN_TYPE: case ENUMERAL_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ return fold (build1 (FLOAT_EXPR, type, arg));
+
+ case REAL_TYPE:
+ return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
+ type, arg));
+
+ case COMPLEX_TYPE:
tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
return fold_convert (type, tem);
+
+ default:
+ gcc_unreachable ();
}
- }
- else if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- if (INTEGRAL_TYPE_P (orig)
- || POINTER_TYPE_P (orig)
- || TREE_CODE (orig) == REAL_TYPE)
- return build2 (COMPLEX_EXPR, type,
- fold_convert (TREE_TYPE (type), arg),
- fold_convert (TREE_TYPE (type), integer_zero_node));
- if (TREE_CODE (orig) == COMPLEX_TYPE)
+
+ case COMPLEX_TYPE:
+ switch (TREE_CODE (orig))
{
- tree rpart, ipart;
+ case INTEGER_TYPE: case CHAR_TYPE:
+ case BOOLEAN_TYPE: case ENUMERAL_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case REAL_TYPE:
+ return build2 (COMPLEX_EXPR, type,
+ fold_convert (TREE_TYPE (type), arg),
+ fold_convert (TREE_TYPE (type), integer_zero_node));
+ case COMPLEX_TYPE:
+ {
+ tree rpart, ipart;
- if (TREE_CODE (arg) == COMPLEX_EXPR)
- {
- rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
- ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
- }
+ if (TREE_CODE (arg) == COMPLEX_EXPR)
+ {
+ rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
+ ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
+ return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ }
+
+ arg = save_expr (arg);
+ rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
+ ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
+ rpart = fold_convert (TREE_TYPE (type), rpart);
+ ipart = fold_convert (TREE_TYPE (type), ipart);
+ return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ }
- arg = save_expr (arg);
- rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
- ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
- rpart = fold_convert (TREE_TYPE (type), rpart);
- ipart = fold_convert (TREE_TYPE (type), ipart);
- return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
+ default:
+ gcc_unreachable ();
}
+
+ case VECTOR_TYPE:
+ if (integer_zerop (arg))
+ return build_zero_vector (type);
+ gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
+ gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+ || TREE_CODE (orig) == VECTOR_TYPE);
+ return fold (build1 (NOP_EXPR, type, arg));
+
+ case VOID_TYPE:
+ return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
+
+ default:
+ gcc_unreachable ();
}
- else if (TREE_CODE (type) == VECTOR_TYPE)
- {
- if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
- && GET_MODE_SIZE (TYPE_MODE (type))
- == GET_MODE_SIZE (TYPE_MODE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- if (TREE_CODE (orig) == VECTOR_TYPE
- && GET_MODE_SIZE (TYPE_MODE (type))
- == GET_MODE_SIZE (TYPE_MODE (orig)))
- return fold (build1 (NOP_EXPR, type, arg));
- }
- else if (VOID_TYPE_P (type))
- return fold (build1 (CONVERT_EXPR, type, arg));
- abort ();
}
\f
/* Return an expr equal to X but certainly not valid as an lvalue. */
case COMPONENT_REF:
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case ARRAY_REF:
- case BIT_FIELD_REF:
- case BUFFER_REF:
case ARRAY_RANGE_REF:
- case VTABLE_REF:
+ case BIT_FIELD_REF:
+ case OBJ_TYPE_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
case SAVE_EXPR:
- case UNSAVE_EXPR:
case TRY_CATCH_EXPR:
case WITH_CLEANUP_EXPR:
case COMPOUND_EXPR:
case BIND_EXPR:
case MIN_EXPR:
case MAX_EXPR:
- case RTL_EXPR:
break;
default:
\f
/* Given a tree comparison code, return the code that is the logical inverse
of the given code. It is not safe to do this for floating-point
- comparisons, except for NE_EXPR and EQ_EXPR. */
+ comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
+ as well: if reversing the comparison is unsafe, return ERROR_MARK. */
static enum tree_code
-invert_tree_comparison (enum tree_code code)
+invert_tree_comparison (enum tree_code code, bool honor_nans)
{
+ if (honor_nans && flag_trapping_math)
+ return ERROR_MARK;
+
switch (code)
{
case EQ_EXPR:
case NE_EXPR:
return EQ_EXPR;
case GT_EXPR:
- return LE_EXPR;
+ return honor_nans ? UNLE_EXPR : LE_EXPR;
case GE_EXPR:
- return LT_EXPR;
+ return honor_nans ? UNLT_EXPR : LT_EXPR;
case LT_EXPR:
- return GE_EXPR;
+ return honor_nans ? UNGE_EXPR : GE_EXPR;
case LE_EXPR:
+ return honor_nans ? UNGT_EXPR : GT_EXPR;
+ case LTGT_EXPR:
+ return UNEQ_EXPR;
+ case UNEQ_EXPR:
+ return LTGT_EXPR;
+ case UNGT_EXPR:
+ return LE_EXPR;
+ case UNGE_EXPR:
+ return LT_EXPR;
+ case UNLT_EXPR:
+ return GE_EXPR;
+ case UNLE_EXPR:
return GT_EXPR;
+ case ORDERED_EXPR:
+ return UNORDERED_EXPR;
+ case UNORDERED_EXPR:
+ return ORDERED_EXPR;
default:
- abort ();
+ gcc_unreachable ();
}
}
/* Similar, but return the comparison that results if the operands are
swapped. This is safe for floating-point. */
-static enum tree_code
+enum tree_code
swap_tree_comparison (enum tree_code code)
{
switch (code)
case LE_EXPR:
return GE_EXPR;
default:
- abort ();
+ gcc_unreachable ();
}
}
into a compcode bit-based encoding. This function is the inverse of
compcode_to_comparison. */
-static int
+static enum comparison_code
comparison_to_compcode (enum tree_code code)
{
switch (code)
return COMPCODE_NE;
case GE_EXPR:
return COMPCODE_GE;
+ case ORDERED_EXPR:
+ return COMPCODE_ORD;
+ case UNORDERED_EXPR:
+ return COMPCODE_UNORD;
+ case UNLT_EXPR:
+ return COMPCODE_UNLT;
+ case UNEQ_EXPR:
+ return COMPCODE_UNEQ;
+ case UNLE_EXPR:
+ return COMPCODE_UNLE;
+ case UNGT_EXPR:
+ return COMPCODE_UNGT;
+ case LTGT_EXPR:
+ return COMPCODE_LTGT;
+ case UNGE_EXPR:
+ return COMPCODE_UNGE;
default:
- abort ();
+ gcc_unreachable ();
}
}
inverse of comparison_to_compcode. */
static enum tree_code
-compcode_to_comparison (int code)
+compcode_to_comparison (enum comparison_code code)
{
switch (code)
{
return NE_EXPR;
case COMPCODE_GE:
return GE_EXPR;
+ case COMPCODE_ORD:
+ return ORDERED_EXPR;
+ case COMPCODE_UNORD:
+ return UNORDERED_EXPR;
+ case COMPCODE_UNLT:
+ return UNLT_EXPR;
+ case COMPCODE_UNEQ:
+ return UNEQ_EXPR;
+ case COMPCODE_UNLE:
+ return UNLE_EXPR;
+ case COMPCODE_UNGT:
+ return UNGT_EXPR;
+ case COMPCODE_LTGT:
+ return LTGT_EXPR;
+ case COMPCODE_UNGE:
+ return UNGE_EXPR;
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Return a tree for the comparison which is the combination of
+ doing the AND or OR (depending on CODE) of the two operations LCODE
+ and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
+ the possibility of trapping if the mode has NaNs, and return NULL_TREE
+ if this makes the transformation invalid. */
+
+tree
+combine_comparisons (enum tree_code code, enum tree_code lcode,
+ enum tree_code rcode, tree truth_type,
+ tree ll_arg, tree lr_arg)
+{
+ bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
+ enum comparison_code lcompcode = comparison_to_compcode (lcode);
+ enum comparison_code rcompcode = comparison_to_compcode (rcode);
+ enum comparison_code compcode;
+
+ switch (code)
+ {
+ case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
+ compcode = lcompcode & rcompcode;
+ break;
+
+ case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
+ compcode = lcompcode | rcompcode;
+ break;
+
default:
- abort ();
+ return NULL_TREE;
}
+
+ if (!honor_nans)
+ {
+ /* Eliminate unordered comparisons, as well as LTGT and ORD
+ which are not used unless the mode has NaNs. */
+ compcode &= ~COMPCODE_UNORD;
+ if (compcode == COMPCODE_LTGT)
+ compcode = COMPCODE_NE;
+ else if (compcode == COMPCODE_ORD)
+ compcode = COMPCODE_TRUE;
+ }
+ else if (flag_trapping_math)
+ {
+ /* Check that the original operation and the optimized ones will trap
+ under the same condition. */
+ bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
+ && (lcompcode != COMPCODE_EQ)
+ && (lcompcode != COMPCODE_ORD);
+ bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
+ && (rcompcode != COMPCODE_EQ)
+ && (rcompcode != COMPCODE_ORD);
+ bool trap = (compcode & COMPCODE_UNORD) == 0
+ && (compcode != COMPCODE_EQ)
+ && (compcode != COMPCODE_ORD);
+
+ /* In a short-circuited boolean expression the LHS might be
+ such that the RHS, if evaluated, will never trap. For
+ example, in ORD (x, y) && (x < y), we evaluate the RHS only
+ if neither x nor y is NaN. (This is a mixed blessing: for
+ example, the expression above will never trap, hence
+ optimizing it to x < y would be invalid). */
+ if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
+ || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
+ rtrap = false;
+
+ /* If the comparison was short-circuited, and only the RHS
+ trapped, we may now generate a spurious trap. */
+ if (rtrap && !ltrap
+ && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
+ return NULL_TREE;
+
+ /* If we changed the conditions that cause a trap, we lose. */
+ if ((ltrap || rtrap) != trap)
+ return NULL_TREE;
+ }
+
+ if (compcode == COMPCODE_TRUE)
+ return constant_boolean_node (true, truth_type);
+ else if (compcode == COMPCODE_FALSE)
+ return constant_boolean_node (false, truth_type);
+ else
+ return fold (build2 (compcode_to_comparison (compcode),
+ truth_type, ll_arg, lr_arg));
}
/* Return nonzero if CODE is a tree code that represents a truth value. */
static int
truth_value_p (enum tree_code code)
{
- return (TREE_CODE_CLASS (code) == '<'
+ return (TREE_CODE_CLASS (code) == tcc_comparison
|| code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
|| code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
|| code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
\f
/* Return nonzero if two operands (typically of the same tree node)
are necessarily equal. If either argument has side-effects this
- function returns zero. FLAGS modifies behaviour as follows:
+ function returns zero. FLAGS modifies behavior as follows:
If OEP_ONLY_CONST is set, only return nonzero for constants.
This function tests whether the operands are indistinguishable;
executed between the evaluation of its "operands" (which may often
be evaluated in arbitrary order). Hence if the operands themselves
don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
- same value in each operand/subexpression. Hence a zero value for
- ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
- If comparing arbitrary expression trees, such as from different
- statements, ONLY_CONST must usually be nonzero.
+ same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
+ unset means assuming isochronic (or instantaneous) tree equivalence.
+ Unless comparing arbitrary expression trees, such as from different
+ statements, this flag can usually be left unset.
If OEP_PURE_SAME is set, then pure functions with identical arguments
are considered the same. It is used when the caller has other ways
int
operand_equal_p (tree arg0, tree arg1, unsigned int flags)
{
+ /* If one is specified and the other isn't, they aren't equal and if
+ neither is specified, they are.
+
+ ??? This is temporary and is meant only to handle the cases of the
+ optional operands for COMPONENT_REF and ARRAY_REF. */
+ if ((arg0 && !arg1) || (!arg0 && arg1))
+ return 0;
+ else if (!arg0 && !arg1)
+ return 1;
/* If either is ERROR_MARK, they aren't equal. */
- if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
+ else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
/* If both types don't have the same signedness, then we can't consider
switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
{
- case '1':
+ case tcc_unary:
/* Two conversions are equal only if signedness and modes match. */
- if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
- && (TYPE_UNSIGNED (TREE_TYPE (arg0))
- != TYPE_UNSIGNED (TREE_TYPE (arg1))))
- return 0;
+ switch (TREE_CODE (arg0))
+ {
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (arg0))
+ != TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ return 0;
+ break;
+ default:
+ break;
+ }
return operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), flags);
- case '<':
- case '2':
- if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
- && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
- 0))
+ case tcc_comparison:
+ case tcc_binary:
+ if (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), flags))
return 1;
/* For commutative ops, allow the other order. */
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
- case 'r':
+ case tcc_reference:
/* If either of the pointer (or reference) expressions we are
dereferencing contain a side effect, these cannot be equal. */
if (TREE_SIDE_EFFECTS (arg0)
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
return operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), flags);
- case COMPONENT_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
return (operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), flags)
&& operand_equal_p (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1), flags));
+ TREE_OPERAND (arg1, 1), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 2),
+ TREE_OPERAND (arg1, 2), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 3),
+ TREE_OPERAND (arg1, 3), flags));
+
+
+ case COMPONENT_REF:
+ return (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 2),
+ TREE_OPERAND (arg1, 2), flags));
+
case BIT_FIELD_REF:
return (operand_equal_p (TREE_OPERAND (arg0, 0),
return 0;
}
- case 'e':
+ case tcc_expression:
switch (TREE_CODE (arg0))
{
case ADDR_EXPR:
return operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0), flags);
- case RTL_EXPR:
- return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ return operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), flags);
+
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ return (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), flags))
+ || (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 1), flags)
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 0), flags));
case CALL_EXPR:
/* If the CALL_EXPRs call different functions, then they
return 0;
}
- case 'd':
+ case tcc_declaration:
/* Consider __builtin_sqrt equal to sqrt. */
return (TREE_CODE (arg0) == FUNCTION_DECL
&& DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
{
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
|| code == COMPOUND_EXPR))
- class = '2';
+ class = tcc_binary;
- else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
+ else if (class == tcc_expression && code == SAVE_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
{
/* If we've already found a CVAL1 or CVAL2, this expression is
if (*cval1 || *cval2)
return 0;
- class = '1';
+ class = tcc_unary;
*save_p = 1;
}
switch (class)
{
- case '1':
+ case tcc_unary:
return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
- case '2':
+ case tcc_binary:
return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
&& twoval_comparison_p (TREE_OPERAND (arg, 1),
cval1, cval2, save_p));
- case 'c':
+ case tcc_constant:
return 1;
- case 'e':
+ case tcc_expression:
if (code == COND_EXPR)
return (twoval_comparison_p (TREE_OPERAND (arg, 0),
cval1, cval2, save_p)
cval1, cval2, save_p));
return 0;
- case '<':
+ case tcc_comparison:
/* First see if we can handle the first operand, then the second. For
the second operand, we know *CVAL1 can't be zero. It must be that
one side of the comparison is each of the values; test for the
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- char class = TREE_CODE_CLASS (code);
+ enum tree_code_class class = TREE_CODE_CLASS (code);
- /* We can handle some of the 'e' cases here. */
- if (class == 'e' && code == TRUTH_NOT_EXPR)
- class = '1';
- else if (class == 'e'
+ /* We can handle some of the tcc_expression cases here. */
+ if (class == tcc_expression && code == TRUTH_NOT_EXPR)
+ class = tcc_unary;
+ else if (class == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
- class = '2';
+ class = tcc_binary;
switch (class)
{
- case '1':
+ case tcc_unary:
return fold (build1 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1)));
- case '2':
+ case tcc_binary:
return fold (build2 (code, type,
eval_subst (TREE_OPERAND (arg, 0),
old0, new0, old1, new1),
eval_subst (TREE_OPERAND (arg, 1),
old0, new0, old1, new1)));
- case 'e':
+ case tcc_expression:
switch (code)
{
case SAVE_EXPR:
}
/* Fall through - ??? */
- case '<':
+ case tcc_comparison:
{
tree arg0 = TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
tree t = fold_convert (type, result);
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, omitted, t);
+ return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
return non_lvalue (t);
}
tree t = fold_convert (type, result);
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, omitted, t);
+ return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
return pedantic_non_lvalue (t);
}
+
+/* Return a tree for the case when the result of an expression is RESULT
+ converted to TYPE and OMITTED1 and OMITTED2 were previously operands
+ of the expression but are now not needed.
+
+ If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
+ If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
+ evaluated before OMITTED2. Otherwise, if neither has side effects,
+ just do the conversion of RESULT to TYPE. */
+
+tree
+omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
+{
+ tree t = fold_convert (type, result);
+
+ if (TREE_SIDE_EFFECTS (omitted2))
+ t = build2 (COMPOUND_EXPR, type, omitted2, t);
+ if (TREE_SIDE_EFFECTS (omitted1))
+ t = build2 (COMPOUND_EXPR, type, omitted1, t);
+
+ return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
+}
+
\f
/* Return a simplified tree node for the truth-negation of ARG. This
never alters ARG itself. We assume that ARG is an operation that
- returns a truth value (0 or 1). */
+ returns a truth value (0 or 1).
+ FIXME: one would think we would fold the result, but it causes
+ problems with the dominator optimizer. */
tree
invert_truthvalue (tree arg)
{
floating-point non-equality comparisons, in which case we just
enclose a TRUTH_NOT_EXPR around what we have. */
- if (TREE_CODE_CLASS (code) == '<')
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
{
- if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
- && !flag_unsafe_math_optimizations
- && code != NE_EXPR
- && code != EQ_EXPR)
- return build1 (TRUTH_NOT_EXPR, type, arg);
- else if (code == UNORDERED_EXPR
- || code == ORDERED_EXPR
- || code == UNEQ_EXPR
- || code == UNLT_EXPR
- || code == UNLE_EXPR
- || code == UNGT_EXPR
- || code == UNGE_EXPR)
+ tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
+ if (FLOAT_TYPE_P (op_type)
+ && flag_trapping_math
+ && code != ORDERED_EXPR && code != UNORDERED_EXPR
+ && code != NE_EXPR && code != EQ_EXPR)
return build1 (TRUTH_NOT_EXPR, type, arg);
else
- return build2 (invert_tree_comparison (code), type,
- TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
+ {
+ code = invert_tree_comparison (code,
+ HONOR_NANS (TYPE_MODE (op_type)));
+ if (code == ERROR_MARK)
+ return build1 (TRUTH_NOT_EXPR, type, arg);
+ else
+ return build2 (code, type,
+ TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
+ }
}
switch (code)
{
case INTEGER_CST:
- return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
+ return fold_convert (type,
+ build_int_cst (NULL_TREE, integer_zerop (arg)));
case TRUTH_AND_EXPR:
return build2 (TRUTH_OR_EXPR, type,
default:
break;
}
- if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
- abort ();
+ gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
return build1 (TRUTH_NOT_EXPR, type, arg);
}
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_2 (~0, ~0);
- TREE_TYPE (mask) = unsigned_type;
- force_fit_type (mask, 0);
+ mask = build_int_cst (unsigned_type, -1);
+ mask = force_fit_type (mask, 0, false, false);
mask = fold_convert (unsigned_type, mask);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask,
{
warning ("comparison is always %d due to width of bit-field",
code == NE_EXPR);
- return fold_convert (compare_type,
- (code == NE_EXPR
- ? integer_one_node : integer_zero_node));
+ return constant_boolean_node (code == NE_EXPR, compare_type);
}
}
else
{
warning ("comparison is always %d due to width of bit-field",
code == NE_EXPR);
- return fold_convert (compare_type,
- (code == NE_EXPR
- ? integer_one_node : integer_zero_node));
+ return constant_boolean_node (code == NE_EXPR, compare_type);
}
}
/* If the number of bits in the reference is the same as the bitsize of
the outer type, then the outer type gives the signedness. Otherwise
(in case of a small bitfield) the signedness is unchanged. */
- if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
+ if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
*punsignedp = TYPE_UNSIGNED (outer_type);
/* Compute the mask to access the bitfield. */
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_2 (~0, ~0);
- TREE_TYPE (mask) = unsigned_type;
- force_fit_type (mask, 0);
+ mask = build_int_cst (unsigned_type, -1);
+ mask = force_fit_type (mask, 0, false, false);
+
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_2 (~0, ~0);
- TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
- force_fit_type (tmask, 0);
+ tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
+ tmask = force_fit_type (tmask, 0, false, false);
+
return
tree_int_cst_equal (mask,
const_binop (RSHIFT_EXPR,
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
exp = TREE_OPERAND (exp, 0);
- return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
+ return (CONSTANT_CLASS_P (exp)
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
}
- if (TREE_CODE_CLASS (code) != '<')
+ if (TREE_CODE_CLASS (code) != tcc_comparison)
return 0;
/* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
result = sgn0 >= sgn1;
break;
default:
- abort ();
+ gcc_unreachable ();
}
- return fold_convert (type, result ? integer_one_node : integer_zero_node);
+ return constant_boolean_node (result, type);
}
\f
/* Given EXP, a logical expression, set the range it is testing into
make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
{
enum tree_code code;
- tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
- tree orig_type = NULL_TREE;
+ tree arg0 = NULL_TREE, arg1 = NULL_TREE;
+ tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
int in_p, n_in_p;
tree low, high, n_low, n_high;
while (1)
{
code = TREE_CODE (exp);
+ exp_type = TREE_TYPE (exp);
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
if (first_rtl_op (code) > 0)
arg0 = TREE_OPERAND (exp, 0);
- if (TREE_CODE_CLASS (code) == '<'
- || TREE_CODE_CLASS (code) == '1'
- || TREE_CODE_CLASS (code) == '2')
- type = TREE_TYPE (arg0);
- if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<'
- || (TREE_CODE_CLASS (code) == 'e'
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
+ arg0_type = TREE_TYPE (arg0);
+ if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison
+ || (TREE_CODE_CLASS (code) == tcc_expression
&& TREE_CODE_LENGTH (code) > 1))
arg1 = TREE_OPERAND (exp, 1);
}
- /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
- lose a cast by accident. */
- if (type != NULL_TREE && orig_type == NULL_TREE)
- orig_type = type;
-
switch (code)
{
case TRUTH_NOT_EXPR:
in_p = ! in_p, low = 0, high = arg1;
break;
default:
- abort ();
+ gcc_unreachable ();
}
- exp = arg0;
-
/* If this is an unsigned comparison, we also know that EXP is
greater than or equal to zero. We base the range tests we make
on that fact, so we record it here so we can parse existing
- range tests. */
- if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
+ range tests. We test arg0_type since often the return type
+ of, e.g. EQ_EXPR, is boolean. */
+ if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
{
- if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
- 1, fold_convert (type, integer_zero_node),
+ if (! merge_ranges (&n_in_p, &n_low, &n_high,
+ in_p, low, high, 1,
+ fold_convert (arg0_type, integer_zero_node),
NULL_TREE))
break;
in_p = ! in_p;
high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
integer_one_node, 0);
- low = fold_convert (type, integer_zero_node);
+ low = fold_convert (arg0_type, integer_zero_node);
}
}
+
+ exp = arg0;
continue;
case NEGATE_EXPR:
/* (-x) IN [a,b] -> x in [-b, -a] */
- n_low = range_binop (MINUS_EXPR, type,
- fold_convert (type, integer_zero_node),
+ n_low = range_binop (MINUS_EXPR, exp_type,
+ fold_convert (exp_type, integer_zero_node),
0, high, 1);
- n_high = range_binop (MINUS_EXPR, type,
- fold_convert (type, integer_zero_node),
+ n_high = range_binop (MINUS_EXPR, exp_type,
+ fold_convert (exp_type, integer_zero_node),
0, low, 0);
low = n_low, high = n_high;
exp = arg0;
case BIT_NOT_EXPR:
/* ~ X -> -X - 1 */
- exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
- fold_convert (type, integer_one_node));
+ exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
+ fold_convert (exp_type, integer_one_node));
continue;
case PLUS_EXPR: case MINUS_EXPR:
the bounds don't overflow. For unsigned, overflow is defined
and this is exactly the right thing. */
n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
- type, low, 0, arg1, 0);
+ arg0_type, low, 0, arg1, 0);
n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
- type, high, 1, arg1, 0);
+ arg0_type, high, 1, arg1, 0);
if ((n_low != 0 && TREE_OVERFLOW (n_low))
|| (n_high != 0 && TREE_OVERFLOW (n_high)))
break;
value thus making n_high < n_low, and normalize it. */
if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
{
- low = range_binop (PLUS_EXPR, type, n_high, 0,
+ low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
integer_one_node, 0);
- high = range_binop (MINUS_EXPR, type, n_low, 0,
+ high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
integer_one_node, 0);
/* If the range is of the form +/- [ x+1, x ], we won't
continue;
case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
- if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
+ if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
break;
- if (! INTEGRAL_TYPE_P (type)
- || (low != 0 && ! int_fits_type_p (low, type))
- || (high != 0 && ! int_fits_type_p (high, type)))
+ if (! INTEGRAL_TYPE_P (arg0_type)
+ || (low != 0 && ! int_fits_type_p (low, arg0_type))
+ || (high != 0 && ! int_fits_type_p (high, arg0_type)))
break;
n_low = low, n_high = high;
if (n_low != 0)
- n_low = fold_convert (type, n_low);
+ n_low = fold_convert (arg0_type, n_low);
if (n_high != 0)
- n_high = fold_convert (type, n_high);
+ n_high = fold_convert (arg0_type, n_high);
+
- /* If we're converting from an unsigned to a signed type,
- we will be doing the comparison as unsigned. The tests above
- have already verified that LOW and HIGH are both positive.
+ /* If we're converting arg0 from an unsigned type, to exp,
+ a signed type, we will be doing the comparison as unsigned.
+ The tests above have already verified that LOW and HIGH
+ are both positive.
- So we have to make sure that the original unsigned value will
- be interpreted as positive. */
- if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
+ So we have to ensure that we will handle large unsigned
+ values the same way that the current signed bounds treat
+ negative values. */
+
+ if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
{
- tree equiv_type = lang_hooks.types.type_for_mode
- (TYPE_MODE (type), 1);
tree high_positive;
+ tree equiv_type = lang_hooks.types.type_for_mode
+ (TYPE_MODE (arg0_type), 1);
/* A range without an upper bound is, naturally, unbounded.
Since convert would have cropped a very large value, use
the max value for the destination type. */
high_positive
= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
- : TYPE_MAX_VALUE (type);
+ : TYPE_MAX_VALUE (arg0_type);
- if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
- high_positive = fold (build2 (RSHIFT_EXPR, type,
- fold_convert (type,
+ if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
+ high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
+ fold_convert (arg0_type,
high_positive),
- fold_convert (type,
+ fold_convert (arg0_type,
integer_one_node)));
/* If the low bound is specified, "and" the range with the
{
if (! merge_ranges (&n_in_p, &n_low, &n_high,
1, n_low, n_high, 1,
- fold_convert (type, integer_zero_node),
+ fold_convert (arg0_type,
+ integer_zero_node),
high_positive))
break;
that will be interpreted as negative. */
if (! merge_ranges (&n_in_p, &n_low, &n_high,
0, n_low, n_high, 1,
- fold_convert (type, integer_zero_node),
+ fold_convert (arg0_type,
+ integer_zero_node),
high_positive))
break;
\f
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
type, TYPE, return an expression to test if EXP is in (or out of, depending
- on IN_P) the range. */
+ on IN_P) the range. Return 0 if the test couldn't be created. */
static tree
build_range_check (tree type, tree exp, int in_p, tree low, tree high)
tree etype = TREE_TYPE (exp);
tree value;
- if (! in_p
- && (0 != (value = build_range_check (type, exp, 1, low, high))))
- return invert_truthvalue (value);
+ if (! in_p)
+ {
+ value = build_range_check (type, exp, 1, low, high);
+ if (value != 0)
+ return invert_truthvalue (value);
+
+ return 0;
+ }
if (low == 0 && high == 0)
return fold_convert (type, integer_one_node);
}
}
- if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
- && ! TREE_OVERFLOW (value))
+ value = const_binop (MINUS_EXPR, high, low, 0);
+ if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
+ {
+ tree utype, minv, maxv;
+
+ /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
+ for the type in question, as we rely on this here. */
+ switch (TREE_CODE (etype))
+ {
+ case INTEGER_TYPE:
+ case ENUMERAL_TYPE:
+ case CHAR_TYPE:
+ utype = lang_hooks.types.unsigned_type (etype);
+ maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
+ maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
+ integer_one_node, 1);
+ minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
+ if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
+ minv, 1, maxv, 1)))
+ {
+ etype = utype;
+ high = fold_convert (etype, high);
+ low = fold_convert (etype, low);
+ exp = fold_convert (etype, exp);
+ value = const_binop (MINUS_EXPR, high, low, 0);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (value != 0 && ! TREE_OVERFLOW (value))
return build_range_check (type,
fold (build2 (MINUS_EXPR, etype, exp, low)),
1, fold_convert (etype, integer_zero_node),
1, low1, 0)))
in_p = 0, low = low0, high = high1;
else
- return 0;
- }
- else if (subset)
- in_p = 0, low = low0, high = high0;
- else
- in_p = 0, low = low0, high = high1;
- }
-
- *pin_p = in_p, *plow = low, *phigh = high;
- return 1;
-}
+ {
+ /* Canonicalize - [min, x] into - [-, x]. */
+ if (low0 && TREE_CODE (low0) == INTEGER_CST)
+ switch (TREE_CODE (TREE_TYPE (low0)))
+ {
+ case ENUMERAL_TYPE:
+ if (TYPE_PRECISION (TREE_TYPE (low0))
+ != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
+ break;
+ /* FALLTHROUGH */
+ case INTEGER_TYPE:
+ case CHAR_TYPE:
+ if (tree_int_cst_equal (low0,
+ TYPE_MIN_VALUE (TREE_TYPE (low0))))
+ low0 = 0;
+ break;
+ case POINTER_TYPE:
+ if (TYPE_UNSIGNED (TREE_TYPE (low0))
+ && integer_zerop (low0))
+ low0 = 0;
+ break;
+ default:
+ break;
+ }
+
+ /* Canonicalize - [x, max] into - [x, -]. */
+ if (high1 && TREE_CODE (high1) == INTEGER_CST)
+ switch (TREE_CODE (TREE_TYPE (high1)))
+ {
+ case ENUMERAL_TYPE:
+ if (TYPE_PRECISION (TREE_TYPE (high1))
+ != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
+ break;
+ /* FALLTHROUGH */
+ case INTEGER_TYPE:
+ case CHAR_TYPE:
+ if (tree_int_cst_equal (high1,
+ TYPE_MAX_VALUE (TREE_TYPE (high1))))
+ high1 = 0;
+ break;
+ case POINTER_TYPE:
+ if (TYPE_UNSIGNED (TREE_TYPE (high1))
+ && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
+ high1, 1,
+ integer_one_node, 1)))
+ high1 = 0;
+ break;
+ default:
+ break;
+ }
+
+ /* The ranges might be also adjacent between the maximum and
+ minimum values of the given type. For
+ - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
+ return + [x + 1, y - 1]. */
+ if (low0 == 0 && high1 == 0)
+ {
+ low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
+ integer_one_node, 1);
+ high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
+ integer_one_node, 0);
+ if (low == 0 || high == 0)
+ return 0;
+
+ in_p = 1;
+ }
+ else
+ return 0;
+ }
+ }
+ else if (subset)
+ in_p = 0, low = low0, high = high0;
+ else
+ in_p = 0, low = low0, high = high1;
+ }
+
+ *pin_p = in_p, *plow = low, *phigh = high;
+ return 1;
+}
+\f
+
+/* Subroutine of fold, looking inside expressions of the form
+ A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
+ of the COND_EXPR. This function is being used also to optimize
+ A op B ? C : A, by reversing the comparison first.
+
+ Return a folded expression whose code is not a COND_EXPR
+ anymore, or NULL_TREE if no folding opportunity is found. */
+
+static tree
+fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
+{
+ enum tree_code comp_code = TREE_CODE (arg0);
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg1_type = TREE_TYPE (arg1);
+ tree tem;
+
+ STRIP_NOPS (arg1);
+ STRIP_NOPS (arg2);
+
+ /* If we have A op 0 ? A : -A, consider applying the following
+ transformations:
+
+ A == 0? A : -A same as -A
+ A != 0? A : -A same as A
+ A >= 0? A : -A same as abs (A)
+ A > 0? A : -A same as abs (A)
+ A <= 0? A : -A same as -abs (A)
+ A < 0? A : -A same as -abs (A)
+
+ None of these transformations work for modes with signed
+ zeros. If A is +/-0, the first two transformations will
+ change the sign of the result (from +0 to -0, or vice
+ versa). The last four will fix the sign of the result,
+ even though the original expressions could be positive or
+ negative, depending on the sign of A.
+
+ Note that all these transformations are correct if A is
+ NaN, since the two alternatives (A and -A) are also NaNs. */
+ if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
+ ? real_zerop (arg01)
+ : integer_zerop (arg01))
+ && TREE_CODE (arg2) == NEGATE_EXPR
+ && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
+ switch (comp_code)
+ {
+ case EQ_EXPR:
+ case UNEQ_EXPR:
+ tem = fold_convert (arg1_type, arg1);
+ return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
+ case NE_EXPR:
+ case LTGT_EXPR:
+ return pedantic_non_lvalue (fold_convert (type, arg1));
+ case UNGE_EXPR:
+ case UNGT_EXPR:
+ if (flag_trapping_math)
+ break;
+ /* Fall through. */
+ case GE_EXPR:
+ case GT_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ arg1 = fold_convert (lang_hooks.types.signed_type
+ (TREE_TYPE (arg1)), arg1);
+ tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
+ return pedantic_non_lvalue (fold_convert (type, tem));
+ case UNLE_EXPR:
+ case UNLT_EXPR:
+ if (flag_trapping_math)
+ break;
+ case LE_EXPR:
+ case LT_EXPR:
+ if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ arg1 = fold_convert (lang_hooks.types.signed_type
+ (TREE_TYPE (arg1)), arg1);
+ tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
+ return negate_expr (fold_convert (type, tem));
+ default:
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
+ break;
+ }
+
+ /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
+ A == 0 ? A : 0 is always 0 unless A is -0. Note that
+ both transformations are correct when A is NaN: A != 0
+ is then true, and A == 0 is false. */
+
+ if (integer_zerop (arg01) && integer_zerop (arg2))
+ {
+ if (comp_code == NE_EXPR)
+ return pedantic_non_lvalue (fold_convert (type, arg1));
+ else if (comp_code == EQ_EXPR)
+ return fold_convert (type, integer_zero_node);
+ }
+
+ /* Try some transformations of A op B ? A : B.
+
+ A == B? A : B same as B
+ A != B? A : B same as A
+ A >= B? A : B same as max (A, B)
+ A > B? A : B same as max (B, A)
+ A <= B? A : B same as min (A, B)
+ A < B? A : B same as min (B, A)
+
+ As above, these transformations don't work in the presence
+ of signed zeros. For example, if A and B are zeros of
+ opposite sign, the first two transformations will change
+ the sign of the result. In the last four, the original
+ expressions give different results for (A=+0, B=-0) and
+ (A=-0, B=+0), but the transformed expressions do not.
+
+ The first two transformations are correct if either A or B
+ is a NaN. In the first transformation, the condition will
+ be false, and B will indeed be chosen. In the case of the
+ second transformation, the condition A != B will be true,
+ and A will be chosen.
+
+ The conversions to max() and min() are not correct if B is
+ a number and A is not. The conditions in the original
+ expressions will be false, so all four give B. The min()
+ and max() versions would give a NaN instead. */
+ if (operand_equal_for_comparison_p (arg01, arg2, arg00))
+ {
+ tree comp_op0 = arg00;
+ tree comp_op1 = arg01;
+ tree comp_type = TREE_TYPE (comp_op0);
+
+ /* Avoid adding NOP_EXPRs in case this is an lvalue. */
+ if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
+ {
+ comp_type = type;
+ comp_op0 = arg1;
+ comp_op1 = arg2;
+ }
+
+ switch (comp_code)
+ {
+ case EQ_EXPR:
+ return pedantic_non_lvalue (fold_convert (type, arg2));
+ case NE_EXPR:
+ return pedantic_non_lvalue (fold_convert (type, arg1));
+ case LE_EXPR:
+ case LT_EXPR:
+ case UNLE_EXPR:
+ case UNLT_EXPR:
+ /* In C++ a ?: expression can be an lvalue, so put the
+ operand which will be used if they are equal first
+ so that we can convert this back to the
+ corresponding COND_EXPR. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ comp_op0 = fold_convert (comp_type, comp_op0);
+ comp_op1 = fold_convert (comp_type, comp_op1);
+ tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
+ ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
+ : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
+ return pedantic_non_lvalue (fold_convert (type, tem));
+ }
+ break;
+ case GE_EXPR:
+ case GT_EXPR:
+ case UNGE_EXPR:
+ case UNGT_EXPR:
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ comp_op0 = fold_convert (comp_type, comp_op0);
+ comp_op1 = fold_convert (comp_type, comp_op1);
+ tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
+ ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
+ : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
+ return pedantic_non_lvalue (fold_convert (type, tem));
+ }
+ break;
+ case UNEQ_EXPR:
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ return pedantic_non_lvalue (fold_convert (type, arg2));
+ break;
+ case LTGT_EXPR:
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ return pedantic_non_lvalue (fold_convert (type, arg1));
+ break;
+ default:
+ gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
+ break;
+ }
+ }
+
+ /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
+ we might still be able to simplify this. For example,
+ if C1 is one less or one more than C2, this might have started
+ out as a MIN or MAX and been transformed by this function.
+ Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
+
+ if (INTEGRAL_TYPE_P (type)
+ && TREE_CODE (arg01) == INTEGER_CST
+ && TREE_CODE (arg2) == INTEGER_CST)
+ switch (comp_code)
+ {
+ case EQ_EXPR:
+ /* We can replace A with C1 in this case. */
+ arg1 = fold_convert (type, arg01);
+ return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
+
+ case LT_EXPR:
+ /* If C1 is C2 + 1, this is min(A, C2). */
+ if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
+ OEP_ONLY_CONST)
+ && operand_equal_p (arg01,
+ const_binop (PLUS_EXPR, arg2,
+ integer_one_node, 0),
+ OEP_ONLY_CONST))
+ return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
+ type, arg1, arg2)));
+ break;
+
+ case LE_EXPR:
+ /* If C1 is C2 - 1, this is min(A, C2). */
+ if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
+ OEP_ONLY_CONST)
+ && operand_equal_p (arg01,
+ const_binop (MINUS_EXPR, arg2,
+ integer_one_node, 0),
+ OEP_ONLY_CONST))
+ return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
+ type, arg1, arg2)));
+ break;
+
+ case GT_EXPR:
+ /* If C1 is C2 - 1, this is max(A, C2). */
+ if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
+ OEP_ONLY_CONST)
+ && operand_equal_p (arg01,
+ const_binop (MINUS_EXPR, arg2,
+ integer_one_node, 0),
+ OEP_ONLY_CONST))
+ return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
+ type, arg1, arg2)));
+ break;
+
+ case GE_EXPR:
+ /* If C1 is C2 + 1, this is max(A, C2). */
+ if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
+ OEP_ONLY_CONST)
+ && operand_equal_p (arg01,
+ const_binop (PLUS_EXPR, arg2,
+ integer_one_node, 0),
+ OEP_ONLY_CONST))
+ return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
+ type, arg1, arg2)));
+ break;
+ case NE_EXPR:
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ return NULL_TREE;
+}
+
+
\f
#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
{
- lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
+ lhs = build2 (NE_EXPR, truth_type, lhs,
+ fold_convert (TREE_TYPE (lhs), integer_zero_node));
lcode = NE_EXPR;
}
if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
{
- rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
+ rhs = build2 (NE_EXPR, truth_type, rhs,
+ fold_convert (TREE_TYPE (rhs), integer_zero_node));
rcode = NE_EXPR;
}
- if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
+ if (TREE_CODE_CLASS (lcode) != tcc_comparison
+ || TREE_CODE_CLASS (rcode) != tcc_comparison)
return 0;
- code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
-
ll_arg = TREE_OPERAND (lhs, 0);
lr_arg = TREE_OPERAND (lhs, 1);
rl_arg = TREE_OPERAND (rhs, 0);
/* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
if (simple_operand_p (ll_arg)
- && simple_operand_p (lr_arg)
- && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
+ && simple_operand_p (lr_arg))
{
- int compcode;
-
+ tree result;
if (operand_equal_p (ll_arg, rl_arg, 0)
&& operand_equal_p (lr_arg, rr_arg, 0))
- {
- int lcompcode, rcompcode;
-
- lcompcode = comparison_to_compcode (lcode);
- rcompcode = comparison_to_compcode (rcode);
- compcode = (code == TRUTH_AND_EXPR)
- ? lcompcode & rcompcode
- : lcompcode | rcompcode;
- }
+ {
+ result = combine_comparisons (code, lcode, rcode,
+ truth_type, ll_arg, lr_arg);
+ if (result)
+ return result;
+ }
else if (operand_equal_p (ll_arg, rr_arg, 0)
&& operand_equal_p (lr_arg, rl_arg, 0))
- {
- int lcompcode, rcompcode;
-
- rcode = swap_tree_comparison (rcode);
- lcompcode = comparison_to_compcode (lcode);
- rcompcode = comparison_to_compcode (rcode);
- compcode = (code == TRUTH_AND_EXPR)
- ? lcompcode & rcompcode
- : lcompcode | rcompcode;
- }
- else
- compcode = -1;
-
- if (compcode == COMPCODE_TRUE)
- return fold_convert (truth_type, integer_one_node);
- else if (compcode == COMPCODE_FALSE)
- return fold_convert (truth_type, integer_zero_node);
- else if (compcode != -1)
- return build2 (compcode_to_comparison (compcode),
- truth_type, ll_arg, lr_arg);
+ {
+ result = combine_comparisons (code, lcode,
+ swap_tree_comparison (rcode),
+ truth_type, ll_arg, lr_arg);
+ if (result)
+ return result;
+ }
}
+ code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
+
/* If the RHS can be evaluated unconditionally and its operands are
simple, it wins to evaluate the RHS unconditionally on machines
with expensive branches. In this case, this isn't a comparison
return build2 (NE_EXPR, truth_type,
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
ll_arg, rl_arg),
- integer_zero_node);
+ fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
/* Convert (a == 0) && (b == 0) into (a | b) == 0. */
if (code == TRUTH_AND_EXPR
return build2 (EQ_EXPR, truth_type,
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
ll_arg, rl_arg),
- integer_zero_node);
+ fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
return build2 (code, truth_type, lhs, rhs);
}
{
warning ("comparison is always %d", wanted_code == NE_EXPR);
- return fold_convert (truth_type,
- wanted_code == NE_EXPR
- ? integer_one_node : integer_zero_node);
+ return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
}
if (r_const)
{
warning ("comparison is always %d", wanted_code == NE_EXPR);
- return fold_convert (truth_type,
- wanted_code == NE_EXPR
- ? integer_one_node : integer_zero_node);
+ return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
}
{
if (wanted_code == NE_EXPR)
{
- warning ("`or' of unmatched not-equal tests is always 1");
- return fold_convert (truth_type, integer_one_node);
+ warning ("%<or%> of unmatched not-equal tests is always 1");
+ return constant_boolean_node (true, truth_type);
}
else
{
- warning ("`and' of mutually exclusive equal-tests is always 0");
- return fold_convert (truth_type, integer_zero_node);
+ warning ("%<and%> of mutually exclusive equal-tests is always 0");
+ return constant_boolean_node (false, truth_type);
}
}
if (integer_zerop (c))
return NULL_TREE;
- if (TREE_CODE_CLASS (tcode) == '1')
+ if (TREE_CODE_CLASS (tcode) == tcc_unary)
op0 = TREE_OPERAND (t, 0);
- if (TREE_CODE_CLASS (tcode) == '2')
+ if (TREE_CODE_CLASS (tcode) == tcc_binary)
op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
/* Note that we need not handle conditional operations here since fold
case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
/* If op0 is an expression ... */
- if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
+ if ((COMPARISON_CLASS_P (op0)
+ || UNARY_CLASS_P (op0)
+ || BINARY_CLASS_P (op0)
+ || EXPRESSION_CLASS_P (op0))
/* ... and is unsigned, and its type is smaller than ctype,
then we cannot pass through as widening. */
&& ((TYPE_UNSIGNED (TREE_TYPE (op0))
&& TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
&& (GET_MODE_SIZE (TYPE_MODE (ctype))
> GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
- /* ... or its type is larger than ctype,
- then we cannot pass through this truncation. */
- || (GET_MODE_SIZE (TYPE_MODE (ctype))
+ /* ... or this is a truncation (t is narrower than op0),
+ then we cannot pass through this narrowing. */
+ || (GET_MODE_SIZE (TYPE_MODE (type))
< GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
/* ... or signedness changes for division or modulus,
then we cannot pass through this conversion. */
case LSHIFT_EXPR: case RSHIFT_EXPR:
/* If the second operand is constant, this is a multiplication
or floor division, by a power of two, so we can treat it that
- way unless the multiplier or divisor overflows. */
+ way unless the multiplier or divisor overflows. Signed
+ left-shift overflow is implementation-defined rather than
+ undefined in C90, so do not convert signed left shift into
+ multiplication. */
if (TREE_CODE (op1) == INTEGER_CST
+ && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
/* const_binop may not detect overflow correctly,
so check for it explicitly here. */
&& TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
/* Return a node which has the indicated constant VALUE (either 0 or
1), and is of the indicated TYPE. */
-static tree
+tree
constant_boolean_node (int value, tree type)
{
if (type == integer_type_node)
return value ? integer_one_node : integer_zero_node;
+ else if (type == boolean_type_node)
+ return value ? boolean_true_node : boolean_false_node;
else if (TREE_CODE (type) == BOOLEAN_TYPE)
return lang_hooks.truthvalue_conversion (value ? integer_one_node
: integer_zero_node);
else
- {
- tree t = build_int_2 (value, 0);
-
- TREE_TYPE (t) = type;
- return t;
- }
+ return build_int_cst (type, value);
}
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
{
tree testtype = TREE_TYPE (cond);
test = cond;
- true_value = fold_convert (testtype, integer_one_node);
- false_value = fold_convert (testtype, integer_zero_node);
+ true_value = constant_boolean_node (true, testtype);
+ false_value = constant_boolean_node (false, testtype);
}
if (lhs == 0)
{
/* sqrt(x) < y is always false, if y is negative. */
if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
- return omit_one_operand (type,
- fold_convert (type, integer_zero_node),
- arg);
+ return omit_one_operand (type, integer_zero_node, arg);
/* sqrt(x) > y is always true, if y is negative and we
don't care about NaNs, i.e. negative values of x. */
if (code == NE_EXPR || !HONOR_NANS (mode))
- return omit_one_operand (type,
- fold_convert (type, integer_one_node),
- arg);
+ return omit_one_operand (type, integer_one_node, arg);
/* sqrt(x) > y is the same as x >= 0, if y is negative. */
return fold (build2 (GE_EXPR, type, arg,
/* sqrt(x) > y is always false, when y is very large
and we don't care about infinities. */
- return omit_one_operand (type,
- fold_convert (type, integer_zero_node),
- arg);
+ return omit_one_operand (type, integer_zero_node, arg);
}
/* sqrt(x) > c is the same as x > c*c. */
/* sqrt(x) < y is always true, when y is a very large
value and we don't care about NaNs or Infinities. */
if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
- return omit_one_operand (type,
- fold_convert (type, integer_one_node),
- arg);
+ return omit_one_operand (type, integer_one_node, arg);
/* sqrt(x) < y is x != +Inf when y is very large and we
don't care about NaNs. */
/* x > +Inf is always false, if with ignore sNANs. */
if (HONOR_SNANS (mode))
return NULL_TREE;
- return omit_one_operand (type,
- fold_convert (type, integer_zero_node),
- arg0);
+ return omit_one_operand (type, integer_zero_node, arg0);
case LE_EXPR:
/* x <= +Inf is always true, if we don't case about NaNs. */
if (! HONOR_NANS (mode))
- return omit_one_operand (type,
- fold_convert (type, integer_one_node),
- arg0);
+ return omit_one_operand (type, integer_one_node, arg0);
/* x <= +Inf is the same as x == x, i.e. isfinite(x). */
if (lang_hooks.decls.global_bindings_p () == 0
if (! HONOR_NANS (mode))
return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max)));
+
+ /* The transformation below creates non-gimple code and thus is
+ not appropriate if we are in gimple form. */
+ if (in_gimple_form)
+ return NULL_TREE;
+
temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max)));
return fold (build1 (TRUTH_NOT_EXPR, type, temp));
}
/* Subroutine of fold() that optimizes comparisons of a division by
- a non-zero integer constant against an integer constant, i.e.
+ a nonzero integer constant against an integer constant, i.e.
X/C1 op C2.
CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
TREE_INT_CST_HIGH (arg01),
TREE_INT_CST_LOW (arg1),
TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
- prod = build_int_2 (lpart, hpart);
- TREE_TYPE (prod) = TREE_TYPE (arg00);
- TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
- || TREE_INT_CST_HIGH (prod) != hpart
- || TREE_INT_CST_LOW (prod) != lpart;
- TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
+ prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
+ prod = force_fit_type (prod, -1, overflow, false);
if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
{
TREE_INT_CST_LOW (tmp),
TREE_INT_CST_HIGH (tmp),
&lpart, &hpart);
- hi = build_int_2 (lpart, hpart);
- TREE_TYPE (hi) = TREE_TYPE (arg00);
- TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
- || TREE_INT_CST_HIGH (hi) != hpart
- || TREE_INT_CST_LOW (hi) != lpart
- || TREE_OVERFLOW (prod);
- TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
+ hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
+ hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
+ TREE_CONSTANT_OVERFLOW (prod));
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
else
{
+ /* A negative divisor reverses the relational operators. */
+ code = swap_tree_comparison (code);
+
tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
switch (tree_int_cst_sgn (arg1))
{
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
equality/inequality test, then return a simplified form of
the test using shifts and logical operations. Otherwise return
NULL. TYPE is the desired result type. */
-
+
tree
fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
tree result_type)
arg1 = TREE_OPERAND (arg0, 1);
arg0 = TREE_OPERAND (arg0, 0);
- /* This requires us to invert the code. */
+ /* This requires us to invert the code. */
code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
}
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree arg00;
-
+
/* If we have (A & C) != 0 where C is the sign bit of A, convert
this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
- if (arg00 != NULL_TREE)
+ if (arg00 != NULL_TREE
+ /* This is only a win if casting to a signed type is cheap,
+ i.e. when arg00's type is not a partial mode. */
+ && TYPE_PRECISION (TREE_TYPE (arg00))
+ == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
{
tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
fold_convert (stype, integer_zero_node)));
}
- /* At this point, we know that arg0 is not testing the sign bit. */
- if (TYPE_PRECISION (type) - 1 == bitnum)
- abort ();
-
- /* Otherwise we have (A & C) != 0 where C is a single bit,
+ /* Otherwise we have (A & C) != 0 where C is a single bit,
convert that into ((A >> C2) & 1). Where C2 = log2(C).
Similarly for (A & C) == 0. */
inner, size_int (bitnum));
if (code == EQ_EXPR)
- inner = build2 (BIT_XOR_EXPR, intermediate_type,
- inner, integer_one_node);
+ inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
+ inner, integer_one_node));
/* Put the AND last so it can combine with more things. */
inner = build2 (BIT_AND_EXPR, intermediate_type,
reorder_operands_p (tree arg0, tree arg1)
{
if (! flag_evaluation_order)
- return true;
+ return true;
if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
return true;
return ! TREE_SIDE_EFFECTS (arg0)
isn't. If REORDER is true, only recommend swapping if we can
evaluate the operands in reverse order. */
-static bool
+bool
tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
{
STRIP_SIGN_NOPS (arg0);
return 0;
if (TREE_CONSTANT (arg0))
return 1;
-
+
if (optimize_size)
return 0;
if (DECL_P (arg0))
return 1;
- if (reorder && flag_evaluation_order
- && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
- return 0;
-
- if (DECL_P (arg1))
- return 0;
- if (DECL_P (arg0))
+ /* It is preferable to swap two SSA_NAME to ensure a canonical form
+ for commutative and comparison operators. Ensuring a canonical
+ form allows the optimizers to find additional redundancies without
+ having to explicitly check for both orderings. */
+ if (TREE_CODE (arg0) == SSA_NAME
+ && TREE_CODE (arg1) == SSA_NAME
+ && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
return 1;
return 0;
tree tem;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
enum tree_code code = TREE_CODE (t);
- int kind = TREE_CODE_CLASS (code);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
/* WINS will be nonzero when the switch is done
if all operands are constant. */
int wins = 1;
- /* Don't try to process an RTL_EXPR since its operands aren't trees.
- Likewise for a SAVE_EXPR that's already been evaluated. */
- if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
- return t;
-
/* Return right away if a constant. */
- if (kind == 'c')
+ if (kind == tcc_constant)
return t;
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
of the arguments so that their form can be studied. In any
cases, the appropriate type conversions should be put back in
the tree that will get out of the constant folder. */
- if (kind == '<')
+ if (kind == tcc_comparison)
STRIP_SIGN_NOPS (op);
else
STRIP_NOPS (op);
return tem;
}
- if (TREE_CODE_CLASS (code) == '1')
+ if (TREE_CODE_CLASS (code) == tcc_unary)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
return tem;
}
- else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
+ else if (COMPARISON_CLASS_P (arg0))
{
if (TREE_CODE (type) == BOOLEAN_TYPE)
{
integer_zero_node))));
}
}
- else if (TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
- else if (TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_comparison
&& TREE_CODE (arg1) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
- else if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<')
+ else if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
fold (build2 (code, type,
arg0, TREE_OPERAND (arg1, 1))));
- if (TREE_CODE (arg0) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
+ if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
/*cond_first_p=*/1);
return tem;
}
- if (TREE_CODE (arg1) == COND_EXPR
- || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
+ if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
{
tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
/*cond_first_p=*/0);
/* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
T2 being pointers to types of the same size. */
if (POINTER_TYPE_P (TREE_TYPE (t))
- && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
+ && BINARY_CLASS_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
{
return t;
case ABS_EXPR:
- if (wins
- && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
+ if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
else if (TREE_CODE (arg0) == NEGATE_EXPR)
return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
return t;
case BIT_NOT_EXPR:
- if (wins)
- {
- tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- TREE_TYPE (tem) = type;
- force_fit_type (tem, 0);
- TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
- TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
- return tem;
- }
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ return fold_not_const (arg0, type);
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
return TREE_OPERAND (arg0, 0);
return t;
if (exact_log2 (int11) > 0 && int01 % int11 == 0)
{
alt0 = fold (build2 (MULT_EXPR, type, arg00,
- build_int_2 (int01 / int11, 0)));
+ build_int_cst (NULL_TREE,
+ int01 / int11)));
alt1 = arg10;
same = arg11;
}
if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
return non_lvalue (fold_convert (type, arg1));
+ /* Convert X + -C into X - C. */
+ if (TREE_CODE (arg1) == REAL_CST
+ && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
+ {
+ tem = fold_negate_const (arg1, type);
+ if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
+ return fold (build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, tem)));
+ }
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
TREE_OPERAND (arg0, 0),
build_real (type, c1)));
}
- /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
+ /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == PLUS_EXPR
&& TREE_CODE (arg0) != MULT_EXPR)
return fold (build2 (PLUS_EXPR, type, tree0, tree11));
}
}
- /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
+ /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg0) == PLUS_EXPR
&& TREE_CODE (arg1) != MULT_EXPR)
tree mask0 = TREE_OPERAND (arg0, 1);
tree mask1 = TREE_OPERAND (arg1, 1);
tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
-
+
if (operand_equal_p (tem, mask1, 0))
{
tem = fold (build2 (BIT_XOR_EXPR, type,
/* A - B -> A + (-B) if B is easily negatable. */
if (!wins && negate_expr_p (arg1)
- && (FLOAT_TYPE_P (type)
+ && ((FLOAT_TYPE_P (type)
+ /* Avoid this transformation if B is a positive REAL_CST. */
+ && (TREE_CODE (arg1) != REAL_CST
+ || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
+ /* Try folding difference of addresses. */
+ {
+ HOST_WIDE_INT diff;
+
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && ptr_difference_const (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0),
+ &diff))
+ return build_int_cst_type (type, diff);
+ }
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
&& (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
return non_lvalue (fold_convert (type, arg0));
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue (fold_convert (type, arg0));
+
+ /* ~X | X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ t1 = build_int_cst (type, -1);
+ t1 = force_fit_type (t1, 0, false, false);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X | ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ t1 = build_int_cst (type, -1);
+ t1 = force_fit_type (t1, 0, false, false);
+ return omit_one_operand (type, t1, arg0);
+ }
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
+ /* ~X ^ X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ t1 = build_int_cst (type, -1);
+ t1 = force_fit_type (t1, 0, false, false);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X ^ ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ {
+ t1 = build_int_cst (type, -1);
+ t1 = force_fit_type (t1, 0, false, false);
+ return omit_one_operand (type, t1, arg0);
+ }
+
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
return omit_one_operand (type, arg1, arg0);
if (operand_equal_p (arg0, arg1, 0))
return non_lvalue (fold_convert (type, arg0));
+
+ /* ~X & X is always zero. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+
+ /* X & ~X is always zero. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
t1 = distribute_bit_expr (code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
return omit_one_operand (type, integer_zero_node, arg0);
if (integer_zerop (arg1))
return t;
+
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_INT_CST_HIGH (arg1) == -1)
return omit_one_operand (type, integer_zero_node, arg0);
+ /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
+ BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
+ if (code == TRUNC_MOD_EXPR
+ && TYPE_UNSIGNED (type)
+ && integer_pow2p (arg1))
+ {
+ unsigned HOST_WIDE_INT high, low;
+ tree mask;
+ int l;
+
+ l = tree_log2 (arg1);
+ if (l >= HOST_BITS_PER_WIDE_INT)
+ {
+ high = ((unsigned HOST_WIDE_INT) 1
+ << (l - HOST_BITS_PER_WIDE_INT)) - 1;
+ low = -1;
+ }
+ else
+ {
+ high = 0;
+ low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
+ }
+
+ mask = build_int_cst_wide (type, low, high);
+ return fold (build2 (BIT_AND_EXPR, type,
+ fold_convert (type, arg0), mask));
+ }
+
+ /* X % -C is the same as X % C. */
+ if (code == TRUNC_MOD_EXPR
+ && !TYPE_UNSIGNED (type)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_INT_CST_HIGH (arg1) < 0
+ && !flag_trapv
+ /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
+ && !sign_bit_p (arg1, arg1))
+ return fold (build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1))));
+
+ /* X % -Y is the same as X % Y. */
+ if (code == TRUNC_MOD_EXPR
+ && !TYPE_UNSIGNED (type)
+ && TREE_CODE (arg1) == NEGATE_EXPR
+ && !flag_trapv)
+ return fold (build2 (code, type, fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0))));
+
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
code, NULL_TREE)))
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
- tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
+ tree tem = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (TYPE_MODE (type)));
tem = fold_convert (TREE_TYPE (arg1), tem);
tem = const_binop (MINUS_EXPR, tem, arg1, 0);
return fold (build2 (RROTATE_EXPR, type, arg0, tem));
goto associate;
case TRUTH_NOT_EXPR:
+ /* The argument to invert_truthvalue must have Boolean type. */
+ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
+ arg0 = fold_convert (boolean_type_node, arg0);
+
/* Note that the operand of this must be an int
and its values must be 0 or 1.
("true" is a fixed value perhaps depending on the language,
if (integer_zerop (arg0))
return omit_one_operand (type, arg0, arg1);
+ /* !X && X is always false. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+ /* X && !X is always false. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
truth_andor:
/* We only do these simplifications if we are optimizing. */
if (!optimize)
TRUTH_OR_EXPR. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
return omit_one_operand (type, arg0, arg1);
+
+ /* !X || X is always true. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_one_node, arg1);
+ /* X || !X is always true. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_one_node, arg0);
+
goto truth_andor;
case TRUTH_XOR_EXPR:
- /* If either arg is constant zero, drop it. */
- if (integer_zerop (arg0))
- return non_lvalue (fold_convert (type, arg1));
+ /* If the second arg is constant zero, drop it. */
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
- /* If either arg is constant true, this is a logical inversion. */
- if (integer_onep (arg0))
- return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
+ /* If the second arg is constant true, this is a logical inversion. */
if (integer_onep (arg1))
return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
/* Identical arguments cancel to zero. */
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* !X ^ X is always true. */
+ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ return omit_one_operand (type, integer_one_node, arg1);
+
+ /* X ^ !X is always true. */
+ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand (type, integer_one_node, arg0);
+
return t;
case EQ_EXPR:
&& DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& integer_zerop (arg1))
- {
- if (code == EQ_EXPR)
- return fold_convert (type, integer_zero_node);
- else
- return fold_convert (type, integer_one_node);
- }
+ return constant_boolean_node (code != EQ_EXPR, type);
/* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not
&& ! lookup_attribute ("alias",
DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
&& ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
- {
- if (code == EQ_EXPR)
- return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
- ? integer_one_node : integer_zero_node));
- else
- return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
- ? integer_zero_node : integer_one_node));
- }
+ return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
+ ? code == EQ_EXPR : code != EQ_EXPR,
+ type);
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
{
tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
- return omit_one_operand (type, fold_convert (type, tem), arg0);
+ return omit_one_operand (type, tem, arg0);
}
/* Fold comparisons against infinity. */
/* If VAROP is a reference to a bitfield, we must mask
the constant by the width of the field. */
if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
+ && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
+ && host_integerp (DECL_SIZE (TREE_OPERAND
+ (TREE_OPERAND (varop, 0), 1)), 1))
{
tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
- int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
+ HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
tree folded_compare, shift;
/* First check whether the comparison would come out
always the same. If we don't do that we would
change the meaning with the masking. */
folded_compare = fold (build2 (code, type,
- TREE_OPERAND (varop, 0),
- arg1));
+ TREE_OPERAND (varop, 0), arg1));
if (integer_zerop (folded_compare)
|| integer_onep (folded_compare))
return omit_one_operand (type, folded_compare, varop);
- shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
- 0);
+ shift = build_int_cst (NULL_TREE,
+ TYPE_PRECISION (TREE_TYPE (varop)) - size);
+ shift = fold_convert (TREE_TYPE (varop), shift);
newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
newconst, shift));
newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
}
/* Comparisons with the highest or lowest possible integer of
- the specified size will have known values.
+ the specified size will have known values.
This is quite similar to fold_relational_hi_lo; however, my
attempts to share the code have been nothing but trouble.
switch (code)
{
case GT_EXPR:
- return omit_one_operand (type,
- fold_convert (type,
- integer_zero_node),
- arg0);
+ return omit_one_operand (type, integer_zero_node, arg0);
+
case GE_EXPR:
return fold (build2 (EQ_EXPR, type, arg0, arg1));
case LE_EXPR:
- return omit_one_operand (type,
- fold_convert (type,
- integer_one_node),
- arg0);
+ return omit_one_operand (type, integer_one_node, arg0);
+
case LT_EXPR:
return fold (build2 (NE_EXPR, type, arg0, arg1));
switch (code)
{
case LT_EXPR:
- return omit_one_operand (type,
- fold_convert (type,
- integer_zero_node),
- arg0);
+ return omit_one_operand (type, integer_zero_node, arg0);
+
case LE_EXPR:
return fold (build2 (EQ_EXPR, type, arg0, arg1));
case GE_EXPR:
- return omit_one_operand (type,
- fold_convert (type,
- integer_one_node),
- arg0);
+ return omit_one_operand (type, integer_one_node, arg0);
+
case GT_EXPR:
return fold (build2 (NE_EXPR, type, arg0, arg1));
&& (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
&& (TREE_TYPE (t1) == TREE_TYPE (tem)
|| (TREE_CODE (t1) == INTEGER_CST
+ && TREE_CODE (TREE_TYPE (tem)) == INTEGER_TYPE
&& int_fits_type_p (t1, TREE_TYPE (tem)))))
return fold (build2 (code, type, tem,
fold_convert (TREE_TYPE (tem), t1)));
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
- tree newmod = build2 (TREE_CODE (arg0), newtype,
- fold_convert (newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert (newtype,
- TREE_OPERAND (arg0, 1)));
+ tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert (newtype,
+ TREE_OPERAND (arg0, 1))));
- return build2 (code, type, newmod, fold_convert (newtype, arg1));
+ return fold (build2 (code, type, newmod,
+ fold_convert (newtype, arg1)));
}
/* If this is an NE comparison of zero with an AND of one, remove the
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
- arg0, integer_zero_node));
+ arg0, fold_convert (TREE_TYPE (arg0),
+ integer_zero_node)));
/* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
2, then fold the expression into shifts and logical operations. */
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree dandnotc
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, build1 (BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1))));
+ tree notc = fold (build1 (BIT_NOT_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_OPERAND (arg0, 1)));
+ tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ arg1, notc));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand (type, rslt, arg0);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree candnotd
- = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1),
- build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
+ tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+ tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1), notd));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand (type, rslt, arg0);
case LT_EXPR:
return constant_boolean_node (0, type);
default:
- abort ();
+ gcc_unreachable ();
}
}
&& ! TREE_CHAIN (arglist))
return fold (build2 (code, type,
build1 (INDIRECT_REF, char_type_node,
- TREE_VALUE(arglist)),
- integer_zero_node));
+ TREE_VALUE (arglist)),
+ fold_convert (char_type_node,
+ integer_zero_node)));
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
return t1;
}
- /* Both ARG0 and ARG1 are known to be constants at this point. */
+ if ((code == EQ_EXPR || code == NE_EXPR)
+ && !TREE_SIDE_EFFECTS (arg0)
+ && integer_zerop (arg1)
+ && tree_expr_nonzero_p (arg0))
+ return constant_boolean_node (code==NE_EXPR, type);
+
t1 = fold_relational_const (code, type, arg0, arg1);
- return (t1 == NULL_TREE ? t : t1);
+ return t1 == NULL_TREE ? t : t1;
+
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ case LTGT_EXPR:
+ if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
+ {
+ t1 = fold_relational_const (code, type, arg0, arg1);
+ if (t1 != NULL_TREE)
+ return t1;
+ }
+
+ /* If the first operand is NaN, the result is constant. */
+ if (TREE_CODE (arg0) == REAL_CST
+ && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
+ && (code != LTGT_EXPR || ! flag_trapping_math))
+ {
+ t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
+ ? integer_zero_node
+ : integer_one_node;
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* If the second operand is NaN, the result is constant. */
+ if (TREE_CODE (arg1) == REAL_CST
+ && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
+ && (code != LTGT_EXPR || ! flag_trapping_math))
+ {
+ t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
+ ? integer_zero_node
+ : integer_one_node;
+ return omit_one_operand (type, t1, arg0);
+ }
+
+ /* Simplify unordered comparison of something with itself. */
+ if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
+ && operand_equal_p (arg0, arg1, 0))
+ return constant_boolean_node (1, type);
+
+ if (code == LTGT_EXPR
+ && !flag_trapping_math
+ && operand_equal_p (arg0, arg1, 0))
+ return constant_boolean_node (0, type);
+
+ /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
+ {
+ tree targ0 = strip_float_extensions (arg0);
+ tree targ1 = strip_float_extensions (arg1);
+ tree newtype = TREE_TYPE (targ0);
+
+ if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
+ newtype = TREE_TYPE (targ1);
+
+ if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
+ return fold (build2 (code, type, fold_convert (newtype, targ0),
+ fold_convert (newtype, targ1)));
+ }
+
+ return t;
case COND_EXPR:
/* Pedantic ANSI C says that a conditional expression is never an lvalue,
/* If we have A op B ? A : C, we may be able to convert this to a
simpler expression, depending on the operation and the values
of B and C. Signed zeros prevent all of these transformations,
- for reasons given above each one. */
+ for reasons given above each one.
- if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
+ Also try swapping the arguments and inverting the conditional. */
+ if (COMPARISON_CLASS_P (arg0)
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
arg1, TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
{
- tree arg2 = TREE_OPERAND (t, 2);
- enum tree_code comp_code = TREE_CODE (arg0);
-
- STRIP_NOPS (arg2);
-
- /* If we have A op 0 ? A : -A, consider applying the following
- transformations:
-
- A == 0? A : -A same as -A
- A != 0? A : -A same as A
- A >= 0? A : -A same as abs (A)
- A > 0? A : -A same as abs (A)
- A <= 0? A : -A same as -abs (A)
- A < 0? A : -A same as -abs (A)
-
- None of these transformations work for modes with signed
- zeros. If A is +/-0, the first two transformations will
- change the sign of the result (from +0 to -0, or vice
- versa). The last four will fix the sign of the result,
- even though the original expressions could be positive or
- negative, depending on the sign of A.
-
- Note that all these transformations are correct if A is
- NaN, since the two alternatives (A and -A) are also NaNs. */
- if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
- ? real_zerop (TREE_OPERAND (arg0, 1))
- : integer_zerop (TREE_OPERAND (arg0, 1)))
- && TREE_CODE (arg2) == NEGATE_EXPR
- && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
- switch (comp_code)
- {
- case EQ_EXPR:
- tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
- tem = fold_convert (type, negate_expr (tem));
- return pedantic_non_lvalue (tem);
- case NE_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg1));
- case GE_EXPR:
- case GT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (lang_hooks.types.signed_type
- (TREE_TYPE (arg1)), arg1);
- arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
- return pedantic_non_lvalue (fold_convert (type, arg1));
- case LE_EXPR:
- case LT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (lang_hooks.types.signed_type
- (TREE_TYPE (arg1)), arg1);
- arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
- arg1 = negate_expr (fold_convert (type, arg1));
- return pedantic_non_lvalue (arg1);
- default:
- abort ();
- }
-
- /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
- A == 0 ? A : 0 is always 0 unless A is -0. Note that
- both transformations are correct when A is NaN: A != 0
- is then true, and A == 0 is false. */
+ tem = fold_cond_expr_with_comparison (type, arg0,
+ TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 2));
+ if (tem)
+ return tem;
+ }
- if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
+ if (COMPARISON_CLASS_P (arg0)
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (t, 2),
+ TREE_OPERAND (arg0, 1))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
+ {
+ tem = invert_truthvalue (arg0);
+ if (COMPARISON_CLASS_P (tem))
{
- if (comp_code == NE_EXPR)
- return pedantic_non_lvalue (fold_convert (type, arg1));
- else if (comp_code == EQ_EXPR)
- return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
+ tem = fold_cond_expr_with_comparison (type, tem,
+ TREE_OPERAND (t, 2),
+ TREE_OPERAND (t, 1));
+ if (tem)
+ return tem;
}
+ }
- /* Try some transformations of A op B ? A : B.
-
- A == B? A : B same as B
- A != B? A : B same as A
- A >= B? A : B same as max (A, B)
- A > B? A : B same as max (B, A)
- A <= B? A : B same as min (A, B)
- A < B? A : B same as min (B, A)
-
- As above, these transformations don't work in the presence
- of signed zeros. For example, if A and B are zeros of
- opposite sign, the first two transformations will change
- the sign of the result. In the last four, the original
- expressions give different results for (A=+0, B=-0) and
- (A=-0, B=+0), but the transformed expressions do not.
-
- The first two transformations are correct if either A or B
- is a NaN. In the first transformation, the condition will
- be false, and B will indeed be chosen. In the case of the
- second transformation, the condition A != B will be true,
- and A will be chosen.
-
- The conversions to max() and min() are not correct if B is
- a number and A is not. The conditions in the original
- expressions will be false, so all four give B. The min()
- and max() versions would give a NaN instead. */
- if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
- arg2, TREE_OPERAND (arg0, 0)))
- {
- tree comp_op0 = TREE_OPERAND (arg0, 0);
- tree comp_op1 = TREE_OPERAND (arg0, 1);
- tree comp_type = TREE_TYPE (comp_op0);
+ /* If the second operand is simpler than the third, swap them
+ since that produces better jump optimization results. */
+ if (tree_swap_operands_p (TREE_OPERAND (t, 1),
+ TREE_OPERAND (t, 2), false))
+ {
+ /* See if this can be inverted. If it can't, possibly because
+ it was a floating-point inequality comparison, don't do
+ anything. */
+ tem = invert_truthvalue (arg0);
- /* Avoid adding NOP_EXPRs in case this is an lvalue. */
- if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
- {
- comp_type = type;
- comp_op0 = arg1;
- comp_op1 = arg2;
- }
-
- switch (comp_code)
- {
- case EQ_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg2));
- case NE_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg1));
- case LE_EXPR:
- case LT_EXPR:
- /* In C++ a ?: expression can be an lvalue, so put the
- operand which will be used if they are equal first
- so that we can convert this back to the
- corresponding COND_EXPR. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
- return pedantic_non_lvalue (fold_convert
- (type, fold (build2 (MIN_EXPR, comp_type,
- (comp_code == LE_EXPR
- ? comp_op0 : comp_op1),
- (comp_code == LE_EXPR
- ? comp_op1 : comp_op0)))));
- break;
- case GE_EXPR:
- case GT_EXPR:
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
- return pedantic_non_lvalue (fold_convert
- (type, fold (build2 (MAX_EXPR, comp_type,
- (comp_code == GE_EXPR
- ? comp_op0 : comp_op1),
- (comp_code == GE_EXPR
- ? comp_op1 : comp_op0)))));
- break;
- default:
- abort ();
- }
- }
-
- /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
- we might still be able to simplify this. For example,
- if C1 is one less or one more than C2, this might have started
- out as a MIN or MAX and been transformed by this function.
- Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
-
- if (INTEGRAL_TYPE_P (type)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_CODE (arg2) == INTEGER_CST)
- switch (comp_code)
- {
- case EQ_EXPR:
- /* We can replace A with C1 in this case. */
- arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
- return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
- TREE_OPERAND (t, 2)));
-
- case LT_EXPR:
- /* If C1 is C2 + 1, this is min(A, C2). */
- if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- const_binop (PLUS_EXPR, arg2,
- integer_one_node, 0),
- OEP_ONLY_CONST))
- return pedantic_non_lvalue
- (fold (build2 (MIN_EXPR, type, arg1, arg2)));
- break;
-
- case LE_EXPR:
- /* If C1 is C2 - 1, this is min(A, C2). */
- if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- const_binop (MINUS_EXPR, arg2,
- integer_one_node, 0),
- OEP_ONLY_CONST))
- return pedantic_non_lvalue
- (fold (build2 (MIN_EXPR, type, arg1, arg2)));
- break;
-
- case GT_EXPR:
- /* If C1 is C2 - 1, this is max(A, C2). */
- if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- const_binop (MINUS_EXPR, arg2,
- integer_one_node, 0),
- OEP_ONLY_CONST))
- return pedantic_non_lvalue
- (fold (build2 (MAX_EXPR, type, arg1, arg2)));
- break;
-
- case GE_EXPR:
- /* If C1 is C2 + 1, this is max(A, C2). */
- if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (TREE_OPERAND (arg0, 1),
- const_binop (PLUS_EXPR, arg2,
- integer_one_node, 0),
- OEP_ONLY_CONST))
- return pedantic_non_lvalue
- (fold (build2 (MAX_EXPR, type, arg1, arg2)));
- break;
- case NE_EXPR:
- break;
- default:
- abort ();
- }
- }
-
- /* If the second operand is simpler than the third, swap them
- since that produces better jump optimization results. */
- if (tree_swap_operands_p (TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 2), false))
- {
- /* See if this can be inverted. If it can't, possibly because
- it was a floating-point inequality comparison, don't do
- anything. */
- tem = invert_truthvalue (arg0);
-
- if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return fold (build3 (code, type, tem,
- TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
- }
+ if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ return fold (build3 (code, type, tem,
+ TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
+ }
/* Convert A ? 1 : 0 to simply A. */
if (integer_onep (TREE_OPERAND (t, 1))
return pedantic_non_lvalue (fold_convert (type,
invert_truthvalue (arg0)));
- /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
- operation is simply A & 2. */
+ /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
+ if (TREE_CODE (arg0) == LT_EXPR
+ && integer_zerop (TREE_OPERAND (arg0, 1))
+ && integer_zerop (TREE_OPERAND (t, 2))
+ && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
+ return fold_convert (type, fold (build2 (BIT_AND_EXPR,
+ TREE_TYPE (tem), tem, arg1)));
+ /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
+ already handled above. */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1))
+ && integer_zerop (TREE_OPERAND (t, 2))
+ && integer_pow2p (arg1))
+ {
+ tree tem = TREE_OPERAND (arg0, 0);
+ STRIP_NOPS (tem);
+ if (TREE_CODE (tem) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
+ && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
+ TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
+ return fold (build2 (BIT_AND_EXPR, type,
+ TREE_OPERAND (tem, 0), arg1));
+ }
+
+ /* A & N ? N : 0 is simply A & N if N is a power of two. This
+ is probably obsolete because the first operand should be a
+ truth value (that's why we have the two cases above), but let's
+ leave it in until we can confirm this for all front-ends. */
if (integer_zerop (TREE_OPERAND (t, 2))
&& TREE_CODE (arg0) == NE_EXPR
&& integer_zerop (TREE_OPERAND (arg0, 1))
if (integer_zerop (TREE_OPERAND (t, 2))
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
- return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
- arg0, arg1)));
+ return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
if (integer_onep (TREE_OPERAND (t, 2))
/* Only perform transformation if ARG0 is easily inverted. */
tem = invert_truthvalue (arg0);
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
- return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
- tem, arg1)));
+ return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
}
+ /* Convert A ? 0 : B into !A && B if A and B are truth values. */
+ if (integer_zerop (arg1)
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
+ {
+ /* Only perform transformation if ARG0 is easily inverted. */
+ tem = invert_truthvalue (arg0);
+ if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
+ return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
+ TREE_OPERAND (t, 2)));
+ }
+
+ /* Convert A ? 1 : B into A || B if A and B are truth values. */
+ if (integer_onep (arg1)
+ && truth_value_p (TREE_CODE (arg0))
+ && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
+ return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
+ TREE_OPERAND (t, 2)));
+
return t;
case COMPOUND_EXPR:
TREE_OPERAND (arg0, 1)))));
return t;
- /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
- appropriate. */
- case CLEANUP_POINT_EXPR:
- if (! has_cleanups (arg0))
- return TREE_OPERAND (t, 0);
-
- {
- enum tree_code code0 = TREE_CODE (arg0);
- int kind0 = TREE_CODE_CLASS (code0);
- tree arg00 = TREE_OPERAND (arg0, 0);
- tree arg01;
-
- if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
- return fold (build1 (code0, type,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg00), arg00))));
-
- if (kind0 == '<' || kind0 == '2'
- || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
- || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
- || code0 == TRUTH_XOR_EXPR)
- {
- arg01 = TREE_OPERAND (arg0, 1);
-
- if (TREE_CONSTANT (arg00)
- || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
- && ! has_cleanups (arg00)))
- return fold (build2 (code0, type, arg00,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg01), arg01))));
-
- if (TREE_CONSTANT (arg01))
- return fold (build2 (code0, type,
- fold (build1 (CLEANUP_POINT_EXPR,
- TREE_TYPE (arg00), arg00)),
- arg01));
- }
-
- return t;
- }
-
case CALL_EXPR:
/* Check for a built-in function. */
if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
== FUNCTION_DECL)
&& DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
{
- tree tmp = fold_builtin (t);
+ tree tmp = fold_builtin (t, false);
if (tmp)
return tmp;
}
char buf[sizeof (struct tree_decl)];
int i, len;
- if (sizeof (struct tree_exp) + 5 * sizeof (tree)
- > sizeof (struct tree_decl)
- || sizeof (struct tree_type) > sizeof (struct tree_decl))
- abort ();
+ gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
+ <= sizeof (struct tree_decl))
+ && sizeof (struct tree_type) <= sizeof (struct tree_decl));
if (expr == NULL)
return;
slot = htab_find_slot (ht, expr, INSERT);
return;
*slot = expr;
code = TREE_CODE (expr);
- if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
- {
- /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
- memcpy (buf, expr, tree_size (expr));
- expr = (tree) buf;
- SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
- }
- else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
+ if (TREE_CODE_CLASS (code) == tcc_declaration
+ && DECL_ASSEMBLER_NAME_SET_P (expr))
{
/* Allow DECL_ASSEMBLER_NAME to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
SET_DECL_ASSEMBLER_NAME (expr, NULL);
}
- else if (TREE_CODE_CLASS (code) == 't'
- && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
+ else if (TREE_CODE_CLASS (code) == tcc_type
+ && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
+ || TYPE_CACHED_VALUES_P (expr)))
{
- /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
+ /* Allow these fields to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
+ TYPE_CACHED_VALUES_P (expr) = 0;
+ TYPE_CACHED_VALUES (expr) = NULL;
}
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
- if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
+ if (TREE_CODE_CLASS (code) != tcc_type
+ && TREE_CODE_CLASS (code) != tcc_declaration)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
- len = TREE_CODE_LENGTH (code);
switch (TREE_CODE_CLASS (code))
{
- case 'c':
+ case tcc_constant:
switch (code)
{
case STRING_CST:
break;
}
break;
- case 'x':
+ case tcc_exceptional:
switch (code)
{
case TREE_LIST:
break;
}
break;
- case 'e':
- switch (code)
- {
- case SAVE_EXPR: len = 2; break;
- case GOTO_SUBROUTINE_EXPR: len = 0; break;
- case RTL_EXPR: len = 0; break;
- case WITH_CLEANUP_EXPR: len = 2; break;
- default: break;
- }
- /* Fall through. */
- case 'r':
- case '<':
- case '1':
- case '2':
- case 's':
+ case tcc_expression:
+ case tcc_reference:
+ case tcc_comparison:
+ case tcc_unary:
+ case tcc_binary:
+ case tcc_statement:
+ len = first_rtl_op (code);
for (i = 0; i < len; ++i)
fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
break;
- case 'd':
+ case tcc_declaration:
fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
fold_checksum_tree (DECL_NAME (expr), ctx, ht);
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
break;
- case 't':
- fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
+ case tcc_type:
+ if (TREE_CODE (expr) == ENUMERAL_TYPE)
+ fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
- fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
- fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
+ if (INTEGRAL_TYPE_P (expr)
+ || SCALAR_FLOAT_TYPE_P (expr))
+ {
+ fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
+ fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
+ }
fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
- fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
+ if (TREE_CODE (expr) == RECORD_TYPE
+ || TREE_CODE (expr) == UNION_TYPE
+ || TREE_CODE (expr) == QUAL_UNION_TYPE)
+ fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
break;
default:
case MODIFY_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
case BIND_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
case SAVE_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
case NON_LVALUE_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
case FLOAT_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case RTL_EXPR:
- return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
+
+ case TARGET_EXPR:
+ {
+ tree temp = TARGET_EXPR_SLOT (t);
+ t = TARGET_EXPR_INITIAL (t);
+
+ /* If the initializer is non-void, then it's a normal expression
+ that will be assigned to the slot. */
+ if (!VOID_TYPE_P (t))
+ return tree_expr_nonnegative_p (t);
+
+ /* Otherwise, the initializer sets the slot in some way. One common
+ way is an assignment statement at the end of the initializer. */
+ while (1)
+ {
+ if (TREE_CODE (t) == BIND_EXPR)
+ t = expr_last (BIND_EXPR_BODY (t));
+ else if (TREE_CODE (t) == TRY_FINALLY_EXPR
+ || TREE_CODE (t) == TRY_CATCH_EXPR)
+ t = expr_last (TREE_OPERAND (t, 0));
+ else if (TREE_CODE (t) == STATEMENT_LIST)
+ t = expr_last (t);
+ else
+ break;
+ }
+ if (TREE_CODE (t) == MODIFY_EXPR
+ && TREE_OPERAND (t, 0) == temp)
+ return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+
+ return 0;
+ }
case CALL_EXPR:
{
CASE_BUILTIN_F (BUILT_IN_FREXP)
CASE_BUILTIN_F (BUILT_IN_HYPOT)
CASE_BUILTIN_F (BUILT_IN_POW10)
- CASE_BUILTIN_F (BUILT_IN_SQRT)
CASE_BUILTIN_I (BUILT_IN_FFS)
CASE_BUILTIN_I (BUILT_IN_PARITY)
CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
/* Always true. */
return 1;
+ CASE_BUILTIN_F (BUILT_IN_SQRT)
+ /* sqrt(-0.0) is -0.0. */
+ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
+ return 1;
+ return tree_expr_nonnegative_p (TREE_VALUE (arglist));
+
CASE_BUILTIN_F (BUILT_IN_ASINH)
CASE_BUILTIN_F (BUILT_IN_ATAN)
CASE_BUILTIN_F (BUILT_IN_ATANH)
/* True if the 1st argument is nonnegative. */
return tree_expr_nonnegative_p (TREE_VALUE (arglist));
- CASE_BUILTIN_F(BUILT_IN_FMAX)
+ CASE_BUILTIN_F (BUILT_IN_FMAX)
/* True if the 1st OR 2nd arguments are nonnegative. */
return tree_expr_nonnegative_p (TREE_VALUE (arglist))
|| tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
- CASE_BUILTIN_F(BUILT_IN_FMIN)
+ CASE_BUILTIN_F (BUILT_IN_FMIN)
/* True if the 1st AND 2nd arguments are nonnegative. */
return tree_expr_nonnegative_p (TREE_VALUE (arglist))
&& tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
- CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
+ CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
/* True if the 2nd argument is nonnegative. */
return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
/* Return true when T is an address and is known to be nonzero.
For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h */
+ Similar logic is present in nonzero_address in rtlanal.h. */
static bool
tree_expr_nonzero_p (tree t)
return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
case INTEGER_CST:
- return !integer_zerop (t);
+ /* We used to test for !integer_zerop here. This does not work correctly
+ if TREE_CONSTANT_OVERFLOW (t). */
+ return (TREE_INT_CST_LOW (t) != 0
+ || TREE_INT_CST_HIGH (t) != 0);
case PLUS_EXPR:
if (!TYPE_UNSIGNED (type) && !flag_wrapv)
break;
case ADDR_EXPR:
- /* Weak declarations may link to NULL. */
- if (DECL_P (TREE_OPERAND (t, 0)))
- return !DECL_WEAK (TREE_OPERAND (t, 0));
- /* Constants and all other cases are never weak. */
- return true;
+ {
+ tree base = get_base_address (TREE_OPERAND (t, 0));
+
+ if (!base)
+ return false;
+
+ /* Weak declarations may link to NULL. */
+ if (DECL_P (base))
+ return !DECL_WEAK (base);
+
+ /* Constants are never weak. */
+ if (CONSTANT_CLASS_P (base))
+ return true;
+
+ return false;
+ }
case COND_EXPR:
return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
return false;
}
-/* Return true if `r' is known to be non-negative.
- Only handles constants at the moment. */
-
-int
-rtl_expr_nonnegative_p (rtx r)
-{
- switch (GET_CODE (r))
- {
- case CONST_INT:
- return INTVAL (r) >= 0;
-
- case CONST_DOUBLE:
- if (GET_MODE (r) == VOIDmode)
- return CONST_DOUBLE_HIGH (r) >= 0;
- return 0;
-
- case CONST_VECTOR:
- {
- int units, i;
- rtx elt;
-
- units = CONST_VECTOR_NUNITS (r);
-
- for (i = 0; i < units; ++i)
- {
- elt = CONST_VECTOR_ELT (r, i);
- if (!rtl_expr_nonnegative_p (elt))
- return 0;
- }
-
- return 1;
- }
-
- case SYMBOL_REF:
- case LABEL_REF:
- /* These are always nonnegative. */
- return 1;
-
- default:
- return 0;
- }
-}
-
-
/* See if we are applying CODE, a relational to the highest or lowest
possible integer of TYPE. If so, then the result is a compile
time constant. */
switch (code)
{
case GT_EXPR:
- return omit_one_operand (type,
- fold_convert (type, integer_zero_node),
- op0);
+ return omit_one_operand (type, integer_zero_node, op0);
+
case GE_EXPR:
*code_p = EQ_EXPR;
break;
case LE_EXPR:
- return omit_one_operand (type,
- fold_convert (type, integer_one_node),
- op0);
+ return omit_one_operand (type, integer_one_node, op0);
+
case LT_EXPR:
*code_p = NE_EXPR;
break;
switch (code)
{
case LT_EXPR:
- return omit_one_operand (type,
- fold_convert (type, integer_zero_node),
- op0);
+ return omit_one_operand (type, integer_zero_node, op0);
+
case LE_EXPR:
*code_p = EQ_EXPR;
break;
case GE_EXPR:
- return omit_one_operand (type,
- fold_convert (type, integer_one_node),
- op0);
+ return omit_one_operand (type, integer_one_node, op0);
+
case GT_EXPR:
*code_p = NE_EXPR;
break;
return tem;
}
return NULL_TREE;
-
+
case MINUS_EXPR:
/* Fold &x - &x. This can happen from &x.foo - &x.
This is unsafe for certain floats even in non-IEEE formats.
return omit_one_operand (type, op1, op0);
goto binary;
-
+
case TRUNC_DIV_EXPR:
case ROUND_DIV_EXPR:
case FLOOR_DIV_EXPR:
if (tem)
return tem;
+ /* Fall through. */
+
+ case ORDERED_EXPR:
+ case UNORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ case LTGT_EXPR:
if (!wins)
return NULL_TREE;
if (integer_zerop (op0))
return omit_one_operand (type, op0, op1);
if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- {
- int x1 = ! integer_zerop (op0);
- int x2 = ! integer_zerop (op1);
-
- return ((x1 & x2) ? integer_one_node : integer_zero_node);
- }
+ return constant_boolean_node (true, type);
return NULL_TREE;
case TRUTH_OR_EXPR:
if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
return omit_one_operand (type, op0, op1);
if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- {
- int x1 = ! integer_zerop (op0);
- int x2 = ! integer_zerop (op1);
-
- return ((x1 | x2) ? integer_one_node : integer_zero_node);
- }
+ return constant_boolean_node (false, type);
return NULL_TREE;
case TRUTH_XOR_EXPR:
if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
{
- int x1 = ! integer_zerop (op0);
- int x2 = ! integer_zerop (op1);
-
- return ((x1 ^ x2) ? integer_one_node : integer_zero_node);
+ int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
+ return constant_boolean_node (x, type);
}
return NULL_TREE;
/* Given the components of a unary expression CODE, TYPE and OP0,
attempt to fold the expression to a constant without modifying
- TYPE or OP0.
+ TYPE or OP0.
If the expression could be simplified to a constant, then return
the constant. If the expression would not be simplified to a
nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
tree op0)
{
- tree t;
-
/* Make sure we have a suitable constant argument. */
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
{
return NULL_TREE;
case BIT_NOT_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
- {
- t = build_int_2 (~ TREE_INT_CST_LOW (op0), ~ TREE_INT_CST_HIGH (op0));
- TREE_TYPE (t) = type;
- force_fit_type (t, 0);
- TREE_OVERFLOW (t) = TREE_OVERFLOW (op0);
- TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (op0);
- return t;
- }
+ if (TREE_CODE (op0) == INTEGER_CST)
+ return fold_not_const (op0, type);
else
return NULL_TREE;
tree string;
if (TREE_CODE (exp) == INDIRECT_REF)
- {
- string = string_constant (exp1, &index);
- }
+ string = string_constant (exp1, &index);
else
{
- tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
- tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
+ tree low_bound = array_ref_low_bound (exp);
index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
-
+
/* Optimize the special-case of a zero lower bound.
We convert the low_bound to sizetype to avoid some problems
}
if (string
+ && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
&& TREE_CODE (string) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
== MODE_INT)
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
return fold_convert (TREE_TYPE (exp),
- build_int_2 ((TREE_STRING_POINTER (string)
- [TREE_INT_CST_LOW (index)]), 0));
+ build_int_cst (NULL_TREE,
+ (TREE_STRING_POINTER (string)
+ [TREE_INT_CST_LOW (index)])));
}
return NULL;
}
{
tree t = NULL_TREE;
- if (TREE_CODE (arg0) == INTEGER_CST)
+ switch (TREE_CODE (arg0))
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
- int overflow = neg_double (TREE_INT_CST_LOW (arg0),
- TREE_INT_CST_HIGH (arg0),
- &low, &high);
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
- TREE_OVERFLOW (t)
- = (TREE_OVERFLOW (arg0)
- | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
+ case INTEGER_CST:
+ {
+ unsigned HOST_WIDE_INT low;
+ HOST_WIDE_INT high;
+ int overflow = neg_double (TREE_INT_CST_LOW (arg0),
+ TREE_INT_CST_HIGH (arg0),
+ &low, &high);
+ t = build_int_cst_wide (type, low, high);
+ t = force_fit_type (t, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type),
+ TREE_CONSTANT_OVERFLOW (arg0));
+ break;
+ }
+
+ case REAL_CST:
+ t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
+ break;
+
+ default:
+ gcc_unreachable ();
}
- else if (TREE_CODE (arg0) == REAL_CST)
- t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
-#ifdef ENABLE_CHECKING
- else
- abort ();
-#endif
-
+
return t;
}
TYPE is the type of the result. */
-static tree
+tree
fold_abs_const (tree arg0, tree type)
{
tree t = NULL_TREE;
- if (TREE_CODE (arg0) == INTEGER_CST)
+ switch (TREE_CODE (arg0))
{
+ case INTEGER_CST:
/* If the value is unsigned, then the absolute value is
the same as the ordinary value. */
if (TYPE_UNSIGNED (type))
- return arg0;
+ t = arg0;
/* Similarly, if the value is non-negative. */
else if (INT_CST_LT (integer_minus_one_node, arg0))
- return arg0;
+ t = arg0;
/* If the value is negative, then the absolute value is
its negation. */
else
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_2 (low, high);
- TREE_TYPE (t) = type;
- TREE_OVERFLOW (t)
- = (TREE_OVERFLOW (arg0)
- | force_fit_type (t, overflow));
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
- return t;
+ t = build_int_cst_wide (type, low, high);
+ t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
}
- }
- else if (TREE_CODE (arg0) == REAL_CST)
- {
+ break;
+
+ case REAL_CST:
if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
- return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
+ t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
else
- return arg0;
+ t = arg0;
+ break;
+
+ default:
+ gcc_unreachable ();
}
-#ifdef ENABLE_CHECKING
- else
- abort ();
-#endif
-
+
+ return t;
+}
+
+/* Return the tree for not (ARG0) when ARG0 is known to be an integer
+ constant. TYPE is the type of the result. */
+
+static tree
+fold_not_const (tree arg0, tree type)
+{
+ tree t = NULL_TREE;
+
+ gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
+
+ t = build_int_cst_wide (type,
+ ~ TREE_INT_CST_LOW (arg0),
+ ~ TREE_INT_CST_HIGH (arg0));
+ t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
+
return t;
}
static tree
fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
{
- tree tem;
- int invert;
+ int result, invert;
/* From here on, the only cases we handle are when the result is
- known to be a constant.
+ known to be a constant. */
+
+ if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
+ {
+ const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
+ const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
+
+ /* Handle the cases where either operand is a NaN. */
+ if (real_isnan (c0) || real_isnan (c1))
+ {
+ switch (code)
+ {
+ case EQ_EXPR:
+ case ORDERED_EXPR:
+ result = 0;
+ break;
+
+ case NE_EXPR:
+ case UNORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ result = 1;
+ break;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ case LTGT_EXPR:
+ if (flag_trapping_math)
+ return NULL_TREE;
+ result = 0;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ return constant_boolean_node (result, type);
+ }
+
+ return constant_boolean_node (real_compare (code, c0, c1), type);
+ }
+
+ /* From here on we only handle LT, LE, GT, GE, EQ and NE.
To compute GT, swap the arguments and do LT.
To compute GE, do LT and invert the result.
if (code == LE_EXPR || code == GT_EXPR)
{
- tem = op0, op0 = op1, op1 = tem;
+ tree tem = op0;
+ op0 = op1;
+ op1 = tem;
code = swap_tree_comparison (code);
}
/* Note that it is safe to invert for real values here because we
- will check below in the one case that it matters. */
+ have already handled the one case that it matters. */
- tem = NULL_TREE;
invert = 0;
if (code == NE_EXPR || code == GE_EXPR)
{
invert = 1;
- code = invert_tree_comparison (code);
+ code = invert_tree_comparison (code, false);
}
/* Compute a result for LT or EQ if args permit;
if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
{
if (code == EQ_EXPR)
- tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
- else
- tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
- ? INT_CST_LT_UNSIGNED (op0, op1)
- : INT_CST_LT (op0, op1)),
- 0);
- }
-
- else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
- && integer_zerop (op1) && tree_expr_nonzero_p (op0))
- tem = build_int_2 (0, 0);
-
- /* Two real constants can be compared explicitly. */
- else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
- {
- /* If either operand is a NaN, the result is false with two
- exceptions: First, an NE_EXPR is true on NaNs, but that case
- is already handled correctly since we will be inverting the
- result for NE_EXPR. Second, if we had inverted a LE_EXPR
- or a GE_EXPR into a LT_EXPR, we must return true so that it
- will be inverted into false. */
-
- if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
- || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
- tem = build_int_2 (invert && code == LT_EXPR, 0);
-
- else if (code == EQ_EXPR)
- tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
- TREE_REAL_CST (op1)),
- 0);
+ result = tree_int_cst_equal (op0, op1);
+ else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
+ result = INT_CST_LT_UNSIGNED (op0, op1);
else
- tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
- TREE_REAL_CST (op1)),
- 0);
+ result = INT_CST_LT (op0, op1);
}
-
- if (tem == NULL_TREE)
+ else
return NULL_TREE;
if (invert)
- TREE_INT_CST_LOW (tem) ^= 1;
+ result ^= 1;
+ return constant_boolean_node (result, type);
+}
- TREE_TYPE (tem) = type;
- if (TREE_CODE (type) == BOOLEAN_TYPE)
- return lang_hooks.truthvalue_conversion (tem);
- return tem;
+/* Build an expression for the a clean point containing EXPR with type TYPE.
+ Don't build a cleanup point expression for EXPR which don't have side
+ effects. */
+
+tree
+fold_build_cleanup_point_expr (tree type, tree expr)
+{
+ /* If the expression does not have side effects then we don't have to wrap
+ it with a cleanup point expression. */
+ if (!TREE_SIDE_EFFECTS (expr))
+ return expr;
+
+ return build1 (CLEANUP_POINT_EXPR, type, expr);
}
-#include "gt-fold-const.h"
+/* Build an expression for the address of T. Folds away INDIRECT_REF to
+ avoid confusing the gimplify process. */
+
+tree
+build_fold_addr_expr_with_type (tree t, tree ptrtype)
+{
+ /* The size of the object is not relevant when talking about its address. */
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Note: doesn't apply to ALIGN_INDIRECT_REF */
+ if (TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
+ {
+ t = TREE_OPERAND (t, 0);
+ if (TREE_TYPE (t) != ptrtype)
+ t = build1 (NOP_EXPR, ptrtype, t);
+ }
+ else
+ {
+ tree base = t;
+
+ while (handled_component_p (base)
+ || TREE_CODE (base) == REALPART_EXPR
+ || TREE_CODE (base) == IMAGPART_EXPR)
+ base = TREE_OPERAND (base, 0);
+ if (DECL_P (base))
+ TREE_ADDRESSABLE (base) = 1;
+
+ t = build1 (ADDR_EXPR, ptrtype, t);
+ }
+
+ return t;
+}
+
+tree
+build_fold_addr_expr (tree t)
+{
+ return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
+}
+
+/* Builds an expression for an indirection through T, simplifying some
+ cases. */
+
+tree
+build_fold_indirect_ref (tree t)
+{
+ tree type = TREE_TYPE (TREE_TYPE (t));
+ tree sub = t;
+ tree subtype;
+
+ STRIP_NOPS (sub);
+ if (TREE_CODE (sub) == ADDR_EXPR)
+ {
+ tree op = TREE_OPERAND (sub, 0);
+ tree optype = TREE_TYPE (op);
+ /* *&p => p */
+ if (lang_hooks.types_compatible_p (type, optype))
+ return op;
+ /* *(foo *)&fooarray => fooarray[0] */
+ else if (TREE_CODE (optype) == ARRAY_TYPE
+ && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
+ }
+
+ /* *(foo *)fooarrptr => (*fooarrptr)[0] */
+ subtype = TREE_TYPE (sub);
+ if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
+ && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ {
+ sub = build_fold_indirect_ref (sub);
+ return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
+ }
+
+ return build1 (INDIRECT_REF, type, t);
+}
+
+/* Strip non-trapping, non-side-effecting tree nodes from an expression
+ whose result is ignored. The type of the returned tree need not be
+ the same as the original expression. */
+
+tree
+fold_ignored_result (tree t)
+{
+ if (!TREE_SIDE_EFFECTS (t))
+ return integer_zero_node;
+
+ for (;;)
+ switch (TREE_CODE_CLASS (TREE_CODE (t)))
+ {
+ case tcc_unary:
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ case tcc_binary:
+ case tcc_comparison:
+ if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
+ t = TREE_OPERAND (t, 0);
+ else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
+ t = TREE_OPERAND (t, 1);
+ else
+ return t;
+ break;
+
+ case tcc_expression:
+ switch (TREE_CODE (t))
+ {
+ case COMPOUND_EXPR:
+ if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
+ return t;
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ case COND_EXPR:
+ if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
+ || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
+ return t;
+ t = TREE_OPERAND (t, 0);
+ break;
+
+ default:
+ return t;
+ }
+ break;
+
+ default:
+ return t;
+ }
+}
+
+/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
+ This can only be applied to objects of a sizetype. */
+
+tree
+round_up (tree value, int divisor)
+{
+ tree div = NULL_TREE;
+
+ gcc_assert (divisor > 0);
+ if (divisor == 1)
+ return value;
+
+ /* See if VALUE is already a multiple of DIVISOR. If so, we don't
+ have to do anything. Only do this when we are not given a const,
+ because in that case, this check is more expensive than just
+ doing it. */
+ if (TREE_CODE (value) != INTEGER_CST)
+ {
+ div = build_int_cst (TREE_TYPE (value), divisor);
+
+ if (multiple_of_p (TREE_TYPE (value), value, div))
+ return value;
+ }
+
+ /* If divisor is a power of two, simplify this to bit manipulation. */
+ if (divisor == (divisor & -divisor))
+ {
+ tree t;
+
+ t = build_int_cst (TREE_TYPE (value), divisor - 1);
+ value = size_binop (PLUS_EXPR, value, t);
+ t = build_int_cst (TREE_TYPE (value), -divisor);
+ value = size_binop (BIT_AND_EXPR, value, t);
+ }
+ else
+ {
+ if (!div)
+ div = build_int_cst (TREE_TYPE (value), divisor);
+ value = size_binop (CEIL_DIV_EXPR, value, div);
+ value = size_binop (MULT_EXPR, value, div);
+ }
+
+ return value;
+}
+
+/* Likewise, but round down. */
+
+tree
+round_down (tree value, int divisor)
+{
+ tree div = NULL_TREE;
+
+ gcc_assert (divisor > 0);
+ if (divisor == 1)
+ return value;
+
+ /* See if VALUE is already a multiple of DIVISOR. If so, we don't
+ have to do anything. Only do this when we are not given a const,
+ because in that case, this check is more expensive than just
+ doing it. */
+ if (TREE_CODE (value) != INTEGER_CST)
+ {
+ div = build_int_cst (TREE_TYPE (value), divisor);
+
+ if (multiple_of_p (TREE_TYPE (value), value, div))
+ return value;
+ }
+
+ /* If divisor is a power of two, simplify this to bit manipulation. */
+ if (divisor == (divisor & -divisor))
+ {
+ tree t;
+
+ t = build_int_cst (TREE_TYPE (value), -divisor);
+ value = size_binop (BIT_AND_EXPR, value, t);
+ }
+ else
+ {
+ if (!div)
+ div = build_int_cst (TREE_TYPE (value), divisor);
+ value = size_binop (FLOOR_DIV_EXPR, value, div);
+ value = size_binop (MULT_EXPR, value, div);
+ }
+
+ return value;
+}
+
+/* Returns true if addresses of E1 and E2 differ by a constant, false
+ otherwise. If they do, &E1 - &E2 is stored in *DIFF. */
+
+bool
+ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
+{
+ tree core1, core2;
+ HOST_WIDE_INT bitsize1, bitsize2;
+ HOST_WIDE_INT bitpos1, bitpos2;
+ tree toffset1, toffset2, tdiff, type;
+ enum machine_mode mode1, mode2;
+ int unsignedp1, unsignedp2, volatilep1, volatilep2;
+
+ core1 = get_inner_reference (e1, &bitsize1, &bitpos1, &toffset1, &mode1,
+ &unsignedp1, &volatilep1);
+ core2 = get_inner_reference (e2, &bitsize2, &bitpos2, &toffset2, &mode2,
+ &unsignedp2, &volatilep2);
+
+ if (bitpos1 % BITS_PER_UNIT != 0
+ || bitpos2 % BITS_PER_UNIT != 0
+ || !operand_equal_p (core1, core2, 0))
+ return false;
+
+ if (toffset1 && toffset2)
+ {
+ type = TREE_TYPE (toffset1);
+ if (type != TREE_TYPE (toffset2))
+ toffset2 = fold_convert (type, toffset2);
+
+ tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
+ if (!host_integerp (tdiff, 0))
+ return false;
+
+ *diff = tree_low_cst (tdiff, 0);
+ }
+ else if (toffset1 || toffset2)
+ {
+ /* If only one of the offsets is non-constant, the difference cannot
+ be a constant. */
+ return false;
+ }
+ else
+ *diff = 0;
+
+ *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
+ return true;
+}