/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
force_fit_type takes a constant, an overflowable flag and prior
overflow indicators. It forces the value to fit the type and sets
- TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
+ TREE_OVERFLOW as appropriate.
+
+ Note: Since the folders get called on non-gimple code as well as
+ gimple code, we need to handle GIMPLE tuples as well as their
+ corresponding tree equivalents. */
#include "config.h"
#include "system.h"
*hi = words[2] + words[3] * BASE;
}
\f
-/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
- in overflow of the value, when >0 we are only interested in signed
- overflow, for <0 we are interested in any overflow. OVERFLOWED
- indicates whether overflow has already occurred. CONST_OVERFLOWED
- indicates whether constant overflow has already occurred. We force
- T's value to be within range of T's type (by setting to 0 or 1 all
- the bits outside the type's range). We set TREE_OVERFLOWED if,
- OVERFLOWED is nonzero,
- or OVERFLOWABLE is >0 and signed overflow occurs
- or OVERFLOWABLE is <0 and any overflow occurs
- We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is nonzero
- or we set TREE_OVERFLOWED.
- We return either the original T, or a copy. */
+/* Force the double-word integer L1, H1 to be within the range of the
+ integer type TYPE. Stores the properly truncated and sign-extended
+ double-word integer in *LV, *HV. Returns true if the operation
+ overflows, that is, argument and result are different. */
-tree
-force_fit_type (tree t, int overflowable,
- bool overflowed, bool overflowed_const)
+int
+fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
+ unsigned HOST_WIDE_INT low0 = l1;
+ HOST_WIDE_INT high0 = h1;
unsigned int prec;
int sign_extended_type;
- gcc_assert (TREE_CODE (t) == INTEGER_CST);
-
- low = TREE_INT_CST_LOW (t);
- high = TREE_INT_CST_HIGH (t);
-
- if (POINTER_TYPE_P (TREE_TYPE (t))
- || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (type) == OFFSET_TYPE)
prec = POINTER_SIZE;
else
- prec = TYPE_PRECISION (TREE_TYPE (t));
+ prec = TYPE_PRECISION (type);
+
/* Size types *are* sign extended. */
- sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
- || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
- && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
/* First clear all bits that are beyond the type's precision. */
-
if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
- high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
+ h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
else
{
- high = 0;
+ h1 = 0;
if (prec < HOST_BITS_PER_WIDE_INT)
- low &= ~((HOST_WIDE_INT) (-1) << prec);
+ l1 &= ~((HOST_WIDE_INT) (-1) << prec);
}
+ /* Then do sign extension if necessary. */
if (!sign_extended_type)
/* No sign extension */;
else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
else if (prec > HOST_BITS_PER_WIDE_INT)
{
/* Sign extend top half? */
- if (high & ((unsigned HOST_WIDE_INT)1
- << (prec - HOST_BITS_PER_WIDE_INT - 1)))
- high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
+ if (h1 & ((unsigned HOST_WIDE_INT)1
+ << (prec - HOST_BITS_PER_WIDE_INT - 1)))
+ h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
}
else if (prec == HOST_BITS_PER_WIDE_INT)
{
- if ((HOST_WIDE_INT)low < 0)
- high = -1;
+ if ((HOST_WIDE_INT)l1 < 0)
+ h1 = -1;
}
else
{
/* Sign extend bottom half? */
- if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
+ if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
{
- high = -1;
- low |= (HOST_WIDE_INT)(-1) << prec;
+ h1 = -1;
+ l1 |= (HOST_WIDE_INT)(-1) << prec;
}
}
- /* If the value changed, return a new node. */
- if (overflowed || overflowed_const
- || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
- {
- t = build_int_cst_wide (TREE_TYPE (t), low, high);
+ *lv = l1;
+ *hv = h1;
+
+ /* If the value didn't fit, signal overflow. */
+ return l1 != low0 || h1 != high0;
+}
+/* We force the double-int HIGH:LOW to the range of the type TYPE by
+ sign or zero extending it.
+ OVERFLOWABLE indicates if we are interested
+ in overflow of the value, when >0 we are only interested in signed
+ overflow, for <0 we are interested in any overflow. OVERFLOWED
+ indicates whether overflow has already occurred. CONST_OVERFLOWED
+ indicates whether constant overflow has already occurred. We force
+ T's value to be within range of T's type (by setting to 0 or 1 all
+ the bits outside the type's range). We set TREE_OVERFLOWED if,
+ OVERFLOWED is nonzero,
+ or OVERFLOWABLE is >0 and signed overflow occurs
+ or OVERFLOWABLE is <0 and any overflow occurs
+ We return a new tree node for the extended double-int. The node
+ is shared if no overflow flags are set. */
+
+tree
+force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
+ HOST_WIDE_INT high, int overflowable,
+ bool overflowed, bool overflowed_const)
+{
+ int sign_extended_type;
+ bool overflow;
+
+ /* Size types *are* sign extended. */
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ overflow = fit_double_type (low, high, &low, &high, type);
+
+ /* If we need to set overflow flags, return a new unshared node. */
+ if (overflowed || overflowed_const || overflow)
+ {
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_OVERFLOW (t) = 1;
- TREE_CONSTANT_OVERFLOW (t) = 1;
- }
- else if (overflowed_const)
- {
- t = copy_node (t);
- TREE_CONSTANT_OVERFLOW (t) = 1;
+ return t;
}
}
- return t;
+ /* Else build a shared node. */
+ return build_int_cst_wide (type, low, high);
}
\f
/* Add two doubleword integers with doubleword result.
case INTEGER_CST:
tem = fold_negate_const (t, type);
- if (! TREE_OVERFLOW (tem)
+ if (!TREE_OVERFLOW (tem)
|| TYPE_UNSIGNED (type)
- || ! flag_trapv)
+ || !flag_trapv)
return tem;
break;
case REAL_CST:
tem = fold_negate_const (t, type);
/* Two's complement FP formats, such as c4x, may overflow. */
- if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
+ if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
return tem;
break;
case EXACT_DIV_EXPR:
/* This is a shortcut for a common special case. */
if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && ! TREE_CONSTANT_OVERFLOW (arg2)
+ && !TREE_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg2)
&& int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
{
if (code == CEIL_DIV_EXPR)
case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
/* This is a shortcut for a common special case. */
if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && ! TREE_CONSTANT_OVERFLOW (arg2)
+ && !TREE_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg2)
&& int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
{
if (code == CEIL_MOD_EXPR)
return NULL_TREE;
}
- t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
-
if (notrunc)
{
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
+
/* Propagate overflow flags ourselves. */
if (((!uns || is_sizetype) && overflow)
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
{
t = copy_node (t);
TREE_OVERFLOW (t) = 1;
- TREE_CONSTANT_OVERFLOW (t) = 1;
- }
- else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
- {
- t = copy_node (t);
- TREE_CONSTANT_OVERFLOW (t) = 1;
}
}
else
- t = force_fit_type (t, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
- TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
+ false);
return t;
}
t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t)
- | TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2);
return t;
}
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ false);
return t;
}
if (! overflow)
REAL_VALUE_TO_INT (&low, &high, r);
- t = build_int_cst_wide (type, low, high);
-
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg1),
+ false);
return t;
}
t = build_real (type, value);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
return t;
}
return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
- return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
+ tem = fold_ignored_result (arg);
+ if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+ return tem;
+ return fold_build1 (NOP_EXPR, type, tem);
default:
gcc_unreachable ();
case WITH_CLEANUP_EXPR:
case COMPOUND_EXPR:
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case TARGET_EXPR:
case COND_EXPR:
case BIND_EXPR:
return tree_int_cst_equal (arg0, arg1);
case REAL_CST:
- return REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
- TREE_REAL_CST (arg1));
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
+ TREE_REAL_CST (arg1)))
+ return 1;
+
+
+ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
+ {
+ /* If we do not distinguish between signed and unsigned zero,
+ consider them equal. */
+ if (real_zerop (arg0) && real_zerop (arg1))
+ return 1;
+ }
+ return 0;
case VECTOR_CST:
{
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
- mask = fold_convert (unsigned_type, mask);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask,
size_int (nbitsize - lbitsize - lbitpos), 0);
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
- tmask = force_fit_type (tmask, 0, false, false);
+ tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
return
tree_int_cst_equal (mask,
/* Tree VAL must be an integer constant. */
if (TREE_CODE (val) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (val))
+ || TREE_OVERFLOW (val))
return NULL_TREE;
width = TYPE_PRECISION (t);
return 0;
}
- /* After this point all optimizations will generate bit-field
- references, which we might not want. */
- if (! lang_hooks.can_use_bit_fields_p ())
- return 0;
-
/* See if we can find a mode that contains both fields being compared on
the left. If we can't, fail. Otherwise, update all constants and masks
to be relative to a field of that size. */
/* If something does not permit us to optimize, return the original tree. */
if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
|| TREE_CODE (comp_const) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (comp_const)
+ || TREE_OVERFLOW (comp_const)
|| TREE_CODE (minmax_const) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (minmax_const))
+ || TREE_OVERFLOW (minmax_const))
return NULL_TREE;
/* Now handle all the various comparison codes. We only handle EQ_EXPR
possible later conversion to our or some other type. */
if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
&& TREE_CODE (t2) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (t2)
+ && !TREE_OVERFLOW (t2)
&& (0 != (t1 = extract_muldiv (op0, t2, code,
code == MULT_EXPR
? ctype : NULL_TREE))))
const_binop (LSHIFT_EXPR,
size_one_node,
op1, 0)))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return extract_muldiv (build2 (tcode == LSHIFT_EXPR
? MULT_EXPR : FLOOR_DIV_EXPR,
ctype, fold_convert (ctype, op0), t1),
if (tcode == code
&& 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
fold_convert (ctype, c), 0))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
/* If these operations "cancel" each other, we have the main
TREE_INT_CST_LOW (arg1),
TREE_INT_CST_HIGH (arg1),
&lpart, &hpart, unsigned_p);
- prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- prod = force_fit_type (prod, -1, overflow, false);
+ prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow, false);
neg_overflow = false;
if (unsigned_p)
TREE_INT_CST_LOW (tmp),
TREE_INT_CST_HIGH (tmp),
&lpart, &hpart, unsigned_p);
- hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
- TREE_CONSTANT_OVERFLOW (prod));
+ hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow | TREE_OVERFLOW (prod),
+ false);
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
fold_sign_changed_comparison (enum tree_code code, tree type,
tree arg0, tree arg1)
{
- tree arg0_inner, tmp;
+ tree arg0_inner;
tree inner_type, outer_type;
if (TREE_CODE (arg0) != NOP_EXPR
return NULL_TREE;
if (TREE_CODE (arg1) == INTEGER_CST)
- {
- tmp = build_int_cst_wide (inner_type,
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- arg1 = force_fit_type (tmp, 0,
- TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- }
+ arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1), 0,
+ TREE_OVERFLOW (arg1),
+ false);
else
arg1 = fold_convert (inner_type, arg1);
<< (bitpos - HOST_BITS_PER_WIDE_INT);
}
- return force_fit_type (build_int_cst_wide (type, lo, hi),
- 0, false, false);
+ return build_int_cst_wide_type (type, lo, hi);
}
return fold_convert (type, build_fold_addr_expr (base));
}
- if (TREE_CODE (op0) == MODIFY_EXPR
- && TREE_CONSTANT (TREE_OPERAND (op0, 1))
+ if ((TREE_CODE (op0) == MODIFY_EXPR
+ || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
+ && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
/* Detect assigning a bitfield. */
- && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
+ && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
+ && DECL_BIT_FIELD
+ (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
+ tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
}
if (change)
{
- tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
- TREE_INT_CST_HIGH (and1));
- tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
- TREE_CONSTANT_OVERFLOW (and1));
+ tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1), 0,
+ TREE_OVERFLOW (and1),
+ false);
return fold_build2 (BIT_AND_EXPR, type,
fold_convert (type, and0), tem);
}
tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
return fold_convert (type, tem);
}
+ if (TREE_CODE (arg0) == CALL_EXPR)
+ {
+ tree fn = get_callee_fndecl (arg0);
+ if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (fn))
+ {
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ fn = mathfn_built_in (type, BUILT_IN_COS);
+ return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+
+ default:;
+ }
+ }
return NULL_TREE;
case IMAGPART_EXPR:
tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
return fold_convert (type, negate_expr (tem));
}
+ if (TREE_CODE (arg0) == CALL_EXPR)
+ {
+ tree fn = get_callee_fndecl (arg0);
+ if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (fn))
+ {
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ fn = mathfn_built_in (type, BUILT_IN_SIN);
+ return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
+
+ default:;
+ }
+ }
return NULL_TREE;
default:
else
gcc_unreachable ();
- /* MIN (MAX (a, b), b) == b. Â */
+ /* MIN (MAX (a, b), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
- /* MIN (MAX (b, a), b) == b. Â */
+ /* MIN (MAX (b, a), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
&& reorder_operands_p (TREE_OPERAND (op0, 1), op1))
return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
- /* MIN (a, MAX (a, b)) == a. Â */
+ /* MIN (a, MAX (a, b)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
- /* MIN (a, MAX (b, a)) == a. Â */
+ /* MIN (a, MAX (b, a)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
|| TREE_OVERFLOW (cst0))
return NULL_TREE;
- /* See if we can reduce the mangitude of the constant in
+ /* See if we can reduce the magnitude of the constant in
arg0 by changing the comparison code. */
if (code0 == INTEGER_CST)
{
return t;
/* Try canonicalization by simplifying arg1 using the swapped
- comparsion. */
+ comparison. */
code = swap_tree_comparison (code);
return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
}
return fold_build2 (code, type, variable, lhs);
}
+ /* For comparisons of pointers we can decompose it to a compile time
+ comparison of the base objects and the offsets into the object.
+ This requires at least one operand being an ADDR_EXPR to do more
+ than the operand_equal_p test below. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && (TREE_CODE (arg0) == ADDR_EXPR
+ || TREE_CODE (arg1) == ADDR_EXPR))
+ {
+ tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
+ HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
+ enum machine_mode mode;
+ int volatilep, unsignedp;
+ bool indirect_base0 = false;
+
+ /* Get base and offset for the access. Strip ADDR_EXPR for
+ get_inner_reference, but put it back by stripping INDIRECT_REF
+ off the base object if possible. */
+ base0 = arg0;
+ if (TREE_CODE (arg0) == ADDR_EXPR)
+ {
+ base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
+ &bitsize, &bitpos0, &offset0, &mode,
+ &unsignedp, &volatilep, false);
+ if (TREE_CODE (base0) == INDIRECT_REF)
+ base0 = TREE_OPERAND (base0, 0);
+ else
+ indirect_base0 = true;
+ }
+
+ base1 = arg1;
+ if (TREE_CODE (arg1) == ADDR_EXPR)
+ {
+ base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
+ &bitsize, &bitpos1, &offset1, &mode,
+ &unsignedp, &volatilep, false);
+ /* We have to make sure to have an indirect/non-indirect base1
+ just the same as we did for base0. */
+ if (TREE_CODE (base1) == INDIRECT_REF
+ && !indirect_base0)
+ base1 = TREE_OPERAND (base1, 0);
+ else if (!indirect_base0)
+ base1 = NULL_TREE;
+ }
+ else if (indirect_base0)
+ base1 = NULL_TREE;
+
+ /* If we have equivalent bases we might be able to simplify. */
+ if (base0 && base1
+ && operand_equal_p (base0, base1, 0))
+ {
+ /* We can fold this expression to a constant if the non-constant
+ offset parts are equal. */
+ if (offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
+ {
+ switch (code)
+ {
+ case EQ_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
+ case NE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
+ case LT_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
+ case LE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
+ case GE_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
+ case GT_EXPR:
+ return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
+ default:;
+ }
+ }
+ /* We can simplify the comparison to a comparison of the variable
+ offset parts if the constant offset parts are equal.
+ Be careful to use signed size type here because otherwise we
+ mess with array offsets in the wrong way. This is possible
+ because pointer arithmetic is restricted to retain within an
+ object and overflow on pointer differences is undefined as of
+ 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
+ else if (bitpos0 == bitpos1)
+ {
+ tree signed_size_type_node;
+ signed_size_type_node = signed_type_for (size_type_node);
+
+ /* By converting to signed size type we cover middle-end pointer
+ arithmetic which operates on unsigned pointer types of size
+ type size and ARRAY_REF offsets which are properly sign or
+ zero extended from their type in case it is narrower than
+ size type. */
+ if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset0 = fold_convert (signed_size_type_node, offset0);
+ if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (signed_size_type_node, 0);
+ else
+ offset1 = fold_convert (signed_size_type_node, offset1);
+
+ return fold_build2 (code, type, offset0, offset1);
+ }
+ }
+ }
+
/* If this is a comparison of two exprs that look like an ARRAY_REF of the
same object, then we can fold this to a comparison of the two offsets in
signed size type. This is possible because pointer arithmetic is
variable2);
}
+ /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
+ signed arithmetic case. That form is created by the compiler
+ often enough for folding it to be of value. One example is in
+ computing loop trip counts after Operator Strength Reduction. */
+ if (!(flag_wrapv || flag_trapv)
+ && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && TREE_CODE (arg0) == MULT_EXPR
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
+ && integer_zerop (arg1))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = arg1; /* zero */
+ tree variable1 = TREE_OPERAND (arg0, 0);
+ enum tree_code cmp_code = code;
+
+ gcc_assert (!integer_zerop (const1));
+
+ /* If const1 is negative we swap the sense of the comparison. */
+ if (tree_int_cst_sgn (const1) < 0)
+ cmp_code = swap_tree_comparison (cmp_code);
+
+ return fold_build2 (cmp_code, type, variable1, const2);
+ }
+
tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
if (tem)
return tem;
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 1), tem);
tree arg0, arg1, tem;
tree t1 = NULL_TREE;
- gcc_assert (IS_EXPR_CODE_CLASS (kind)
+ gcc_assert ((IS_EXPR_CODE_CLASS (kind)
+ || IS_GIMPLE_STMT_CODE_CLASS (kind))
&& TREE_CODE_LENGTH (code) == 2
&& op0 != NULL_TREE
&& op1 != NULL_TREE);
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
+ /* ~X + X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && !TYPE_TRAP_SIGNED (type))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg1);
+ }
+
+ /* X + ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
+ && !TYPE_TRAP_SIGNED (type))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg0);
+ }
+
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
fold_convert (type, tem));
}
+ /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
+ to __complex__ ( x, y ). This is not the same for SNaNs or
+ if singed zeros are involved. */
+ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ bool arg0rz = false, arg0iz = false;
+ if ((arg0r && (arg0rz = real_zerop (arg0r)))
+ || (arg0i && (arg0iz = real_zerop (arg0i))))
+ {
+ tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ if (arg0rz && arg1i && real_zerop (arg1i))
+ {
+ tree rp = arg1r ? arg1r
+ : build1 (REALPART_EXPR, rtype, arg1);
+ tree ip = arg0i ? arg0i
+ : build1 (IMAGPART_EXPR, rtype, arg0);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ else if (arg0iz && arg1r && real_zerop (arg1r))
+ {
+ tree rp = arg0r ? arg0r
+ : build1 (REALPART_EXPR, rtype, arg0);
+ tree ip = arg1i ? arg1i
+ : build1 (IMAGPART_EXPR, rtype, arg1);
+ return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ }
+ }
+ }
+
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
/* Convert -A - 1 to ~A. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == NEGATE_EXPR
- && integer_onep (arg1))
+ && integer_onep (arg1)
+ && !TYPE_TRAP_SIGNED (type))
return fold_build1 (BIT_NOT_EXPR, type,
fold_convert (type, TREE_OPERAND (arg0, 0)));
/* Convert -1 - A to ~A. */
if (INTEGRAL_TYPE_P (type)
&& integer_all_onesp (arg0))
- return fold_build1 (BIT_NOT_EXPR, type, arg1);
+ return fold_build1 (BIT_NOT_EXPR, type, op1);
if (! FLOAT_TYPE_P (type))
{
}
}
+ /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
+ This is not the same for NaNs or if singed zeros are
+ involved. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
+ && TREE_CODE (arg1) == COMPLEX_CST
+ && real_zerop (TREE_REALPART (arg1)))
+ {
+ tree rtype = TREE_TYPE (TREE_TYPE (arg0));
+ if (real_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ negate_expr (fold_build1 (IMAGPART_EXPR,
+ rtype, arg0)),
+ fold_build1 (REALPART_EXPR, rtype, arg0));
+ else if (real_minus_onep (TREE_IMAGPART (arg1)))
+ return fold_build2 (COMPLEX_EXPR, type,
+ fold_build1 (IMAGPART_EXPR, rtype, arg0),
+ negate_expr (fold_build1 (REALPART_EXPR,
+ rtype, arg0)));
+ }
+
/* Optimize z * conj(z) for floating point complex numbers.
Guarded by flag_unsafe_math_optimizations as non-finite
imaginary components don't produce scalar results. */
tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
1)));
if (TREE_CODE (arg11) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg11)
+ && !TREE_OVERFLOW (arg11)
&& operand_equal_p (arg0, arg10, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
1)));
if (TREE_CODE (arg01) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg01)
+ && !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
if (TREE_CODE (arg01) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg01)
+ && !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
- && !TREE_CONSTANT_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg1)
&& TREE_INT_CST_HIGH (arg1) < 0
&& !flag_trapv
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
? MINUS_EXPR : PLUS_EXPR,
fold_convert (TREE_TYPE (arg0), arg1),
TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a NEGATE_EXPR. */
&& TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1)));
+
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
}
/* If this is a comparison of a field, we may be able to simplify it. */
- if (((TREE_CODE (arg0) == COMPONENT_REF
- && lang_hooks.can_use_bit_fields_p ())
+ if ((TREE_CODE (arg0) == COMPONENT_REF
|| TREE_CODE (arg0) == BIT_FIELD_REF)
/* Handle the constant case even without -O
to make sure the warnings are given. */
build_int_cst (itype, 0));
}
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == BIT_XOR_EXPR)
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg10 = TREE_OPERAND (arg1, 0);
+ tree arg11 = TREE_OPERAND (arg1, 1);
+ tree itype = TREE_TYPE (arg0);
+
+ /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
+ operand_equal_p guarantees no side-effects so we don't need
+ to use omit_one_operand on Z. */
+ if (operand_equal_p (arg01, arg11, 0))
+ return fold_build2 (code, type, arg00, arg10);
+ if (operand_equal_p (arg01, arg10, 0))
+ return fold_build2 (code, type, arg00, arg11);
+ if (operand_equal_p (arg00, arg11, 0))
+ return fold_build2 (code, type, arg01, arg10);
+ if (operand_equal_p (arg00, arg10, 0))
+ return fold_build2 (code, type, arg01, arg11);
+
+ /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TREE_CODE (arg11) == INTEGER_CST)
+ return fold_build2 (code, type,
+ fold_build2 (BIT_XOR_EXPR, itype, arg00,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg01, arg11)),
+ arg10);
+ }
return NULL_TREE;
case LT_EXPR:
}
/* Comparisons with the highest or lowest possible integer of
- the specified size will have known values. */
+ the specified precision will have known values. */
{
- int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
+ tree arg1_type = TREE_TYPE (arg1);
+ unsigned int width = TYPE_PRECISION (arg1_type);
if (TREE_CODE (arg1) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg1)
&& width <= 2 * HOST_BITS_PER_WIDE_INT
- && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
- || POINTER_TYPE_P (TREE_TYPE (arg1))))
+ && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
{
HOST_WIDE_INT signed_max_hi;
unsigned HOST_WIDE_INT signed_max_lo;
signed_max_hi = 0;
max_hi = 0;
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (TYPE_UNSIGNED (arg1_type))
{
max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
min_lo = 0;
max_lo = -1;
min_lo = 0;
- if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ if (TYPE_UNSIGNED (arg1_type))
{
max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
min_hi = 0;
else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
&& TREE_INT_CST_LOW (arg1) == signed_max_lo
- && TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && TYPE_UNSIGNED (arg1_type)
+ /* We will flip the signedness of the comparison operator
+ associated with the mode of arg1, so the sign bit is
+ specified by this mode. Check that arg1 is the signed
+ max associated with this sign bit. */
+ && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
/* signed_type does not work on pointer types. */
- && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
+ && INTEGRAL_TYPE_P (arg1_type))
{
/* The following case also applies to X < signed_max+1
and X >= signed_max+1 because previous transformations. */
st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type, fold_convert (st0, arg0),
- build_int_cst (st1, 0));
+ type, fold_convert (st0, arg0),
+ build_int_cst (st1, 0));
}
}
}
&& ! TREE_SIDE_EFFECTS (arg0)
&& (0 != (tem = negate_expr (arg1)))
&& TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (TRUTH_ANDIF_EXPR, type,
build2 (GE_EXPR, type,
TREE_OPERAND (arg0, 0), tem),
if (kind == tcc_constant)
return t;
- if (IS_EXPR_CODE_CLASS (kind))
+ if (IS_EXPR_CODE_CLASS (kind)
+ || IS_GIMPLE_STMT_CODE_CLASS (kind))
{
tree type = TREE_TYPE (t);
tree op0, op1, op2;
const_binop (LSHIFT_EXPR,
size_one_node,
op1, 0)))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return multiple_of_p (type, t1, bottom);
}
return 0;
case COMPOUND_EXPR:
case MODIFY_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ case GIMPLE_MODIFY_STMT:
+ return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
case BIND_EXPR:
return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
else
break;
}
- if (TREE_CODE (t) == MODIFY_EXPR
- && TREE_OPERAND (t, 0) == temp)
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ if ((TREE_CODE (t) == MODIFY_EXPR
+ || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+ && GENERIC_TREE_OPERAND (t, 0) == temp)
+ return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
return false;
}
return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
case INTEGER_CST:
- /* We used to test for !integer_zerop here. This does not work correctly
- if TREE_CONSTANT_OVERFLOW (t). */
- return (TREE_INT_CST_LOW (t) != 0
- || TREE_INT_CST_HIGH (t) != 0);
+ return !integer_zerop (t);
case PLUS_EXPR:
if (!TYPE_UNSIGNED (type) && !flag_wrapv)
case COMPOUND_EXPR:
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case BIND_EXPR:
- return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
+ return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
case SAVE_EXPR:
case NON_LVALUE_EXPR:
tree
fold_read_from_constant_string (tree exp)
{
- if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
+ if ((TREE_CODE (exp) == INDIRECT_REF
+ || TREE_CODE (exp) == ARRAY_REF)
+ && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
{
tree exp1 = TREE_OPERAND (exp, 0);
tree index;
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type),
+ false);
break;
}
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg0),
+ false);
}
break;
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
- t = build_int_cst_wide (type,
- ~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
+ ~TREE_INT_CST_HIGH (arg0), 0,
+ TREE_OVERFLOW (arg0),
+ false);
return t;
}
else if (TREE_CODE (optype) == COMPLEX_TYPE
&& type == TREE_TYPE (optype))
return fold_build1 (REALPART_EXPR, type, op);
+ /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
+ else if (TREE_CODE (optype) == VECTOR_TYPE
+ && type == TREE_TYPE (optype))
+ {
+ tree part_width = TYPE_SIZE (type);
+ tree index = bitsize_int (0);
+ return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
+ }
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */