*hi = words[2] + words[3] * BASE;
}
\f
-/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
- in overflow of the value, when >0 we are only interested in signed
- overflow, for <0 we are interested in any overflow. OVERFLOWED
- indicates whether overflow has already occurred. CONST_OVERFLOWED
- indicates whether constant overflow has already occurred. We force
- T's value to be within range of T's type (by setting to 0 or 1 all
- the bits outside the type's range). We set TREE_OVERFLOWED if,
- OVERFLOWED is nonzero,
- or OVERFLOWABLE is >0 and signed overflow occurs
- or OVERFLOWABLE is <0 and any overflow occurs
- We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is nonzero
- or we set TREE_OVERFLOWED.
- We return either the original T, or a copy. */
+/* Force the double-word integer L1, H1 to be within the range of the
+ integer type TYPE. Stores the properly truncated and sign-extended
+ double-word integer in *LV, *HV. Returns true if the operation
+ overflows, that is, argument and result are different. */
-tree
-force_fit_type (tree t, int overflowable,
- bool overflowed, bool overflowed_const)
+int
+fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
+ unsigned HOST_WIDE_INT low0 = l1;
+ HOST_WIDE_INT high0 = h1;
unsigned int prec;
int sign_extended_type;
- gcc_assert (TREE_CODE (t) == INTEGER_CST);
-
- low = TREE_INT_CST_LOW (t);
- high = TREE_INT_CST_HIGH (t);
-
- if (POINTER_TYPE_P (TREE_TYPE (t))
- || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (type) == OFFSET_TYPE)
prec = POINTER_SIZE;
else
- prec = TYPE_PRECISION (TREE_TYPE (t));
+ prec = TYPE_PRECISION (type);
+
/* Size types *are* sign extended. */
- sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
- || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
- && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
/* First clear all bits that are beyond the type's precision. */
-
if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
;
else if (prec > HOST_BITS_PER_WIDE_INT)
- high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
+ h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
else
{
- high = 0;
+ h1 = 0;
if (prec < HOST_BITS_PER_WIDE_INT)
- low &= ~((HOST_WIDE_INT) (-1) << prec);
+ l1 &= ~((HOST_WIDE_INT) (-1) << prec);
}
+ /* Then do sign extension if necessary. */
if (!sign_extended_type)
/* No sign extension */;
else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
else if (prec > HOST_BITS_PER_WIDE_INT)
{
/* Sign extend top half? */
- if (high & ((unsigned HOST_WIDE_INT)1
- << (prec - HOST_BITS_PER_WIDE_INT - 1)))
- high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
+ if (h1 & ((unsigned HOST_WIDE_INT)1
+ << (prec - HOST_BITS_PER_WIDE_INT - 1)))
+ h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
}
else if (prec == HOST_BITS_PER_WIDE_INT)
{
- if ((HOST_WIDE_INT)low < 0)
- high = -1;
+ if ((HOST_WIDE_INT)l1 < 0)
+ h1 = -1;
}
else
{
/* Sign extend bottom half? */
- if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
+ if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
{
- high = -1;
- low |= (HOST_WIDE_INT)(-1) << prec;
+ h1 = -1;
+ l1 |= (HOST_WIDE_INT)(-1) << prec;
}
}
- /* If the value changed, return a new node. */
- if (overflowed || overflowed_const
- || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
- {
- t = build_int_cst_wide (TREE_TYPE (t), low, high);
+ *lv = l1;
+ *hv = h1;
+
+ /* If the value didn't fit, signal overflow. */
+ return l1 != low0 || h1 != high0;
+}
+
+/* We force the double-int HIGH:LOW to the range of the type TYPE by
+ sign or zero extending it.
+ OVERFLOWABLE indicates if we are interested
+ in overflow of the value, when >0 we are only interested in signed
+ overflow, for <0 we are interested in any overflow. OVERFLOWED
+ indicates whether overflow has already occurred. CONST_OVERFLOWED
+ indicates whether constant overflow has already occurred. We force
+ T's value to be within range of T's type (by setting to 0 or 1 all
+ the bits outside the type's range). We set TREE_OVERFLOWED if,
+ OVERFLOWED is nonzero,
+ or OVERFLOWABLE is >0 and signed overflow occurs
+ or OVERFLOWABLE is <0 and any overflow occurs
+ We set TREE_CONSTANT_OVERFLOWED if,
+ CONST_OVERFLOWED is nonzero
+ or we set TREE_OVERFLOWED.
+ We return a new tree node for the extended double-int. The node
+ is shared if no overflow flags are set. */
+
+tree
+force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
+ HOST_WIDE_INT high, int overflowable,
+ bool overflowed, bool overflowed_const)
+{
+ int sign_extended_type;
+ bool overflow;
+ /* Size types *are* sign extended. */
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ overflow = fit_double_type (low, high, &low, &high, type);
+
+ /* If we need to set overflow flags, return a new unshared node. */
+ if (overflowed || overflowed_const || overflow)
+ {
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_OVERFLOW (t) = 1;
TREE_CONSTANT_OVERFLOW (t) = 1;
+
+ return t;
}
else if (overflowed_const)
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_CONSTANT_OVERFLOW (t) = 1;
+
+ return t;
}
}
- return t;
+ /* Else build a shared node. */
+ return build_int_cst_wide (type, low, high);
}
\f
/* Add two doubleword integers with doubleword result.
return NULL_TREE;
}
- t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
-
if (notrunc)
{
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
+
/* Propagate overflow flags ourselves. */
if (((!uns || is_sizetype) && overflow)
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
}
}
else
- t = force_fit_type (t, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
- TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
+ TREE_CONSTANT_OVERFLOW (arg1)
+ | TREE_CONSTANT_OVERFLOW (arg2));
return t;
}
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
if (! overflow)
REAL_VALUE_TO_INT (&low, &high, r);
- t = build_int_cst_wide (type, low, high);
-
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
return t;
}
return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
- return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
+ tem = fold_ignored_result (arg);
+ if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+ return tem;
+ return fold_build1 (NOP_EXPR, type, tem);
default:
gcc_unreachable ();
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
- mask = fold_convert (unsigned_type, mask);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask,
size_int (nbitsize - lbitsize - lbitpos), 0);
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
- tmask = force_fit_type (tmask, 0, false, false);
+ tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
return
tree_int_cst_equal (mask,
TREE_INT_CST_LOW (arg1),
TREE_INT_CST_HIGH (arg1),
&lpart, &hpart, unsigned_p);
- prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- prod = force_fit_type (prod, -1, overflow, false);
+ prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow, false);
neg_overflow = false;
if (unsigned_p)
TREE_INT_CST_LOW (tmp),
TREE_INT_CST_HIGH (tmp),
&lpart, &hpart, unsigned_p);
- hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
- TREE_CONSTANT_OVERFLOW (prod));
+ hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow | TREE_OVERFLOW (prod),
+ TREE_CONSTANT_OVERFLOW (prod));
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
fold_sign_changed_comparison (enum tree_code code, tree type,
tree arg0, tree arg1)
{
- tree arg0_inner, tmp;
+ tree arg0_inner;
tree inner_type, outer_type;
if (TREE_CODE (arg0) != NOP_EXPR
return NULL_TREE;
if (TREE_CODE (arg1) == INTEGER_CST)
- {
- tmp = build_int_cst_wide (inner_type,
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- arg1 = force_fit_type (tmp, 0,
- TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- }
+ arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1), 0,
+ TREE_OVERFLOW (arg1),
+ TREE_CONSTANT_OVERFLOW (arg1));
else
arg1 = fold_convert (inner_type, arg1);
<< (bitpos - HOST_BITS_PER_WIDE_INT);
}
- return force_fit_type (build_int_cst_wide (type, lo, hi),
- 0, false, false);
+ return build_int_cst_wide_type (type, lo, hi);
}
}
if (change)
{
- tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
- TREE_INT_CST_HIGH (and1));
- tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
- TREE_CONSTANT_OVERFLOW (and1));
+ tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1), 0,
+ TREE_OVERFLOW (and1),
+ TREE_CONSTANT_OVERFLOW (and1));
return fold_build2 (BIT_AND_EXPR, type,
fold_convert (type, and0), tem);
}
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& !TYPE_TRAP_SIGNED (type))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
&& !TYPE_TRAP_SIGNED (type))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1)));
+
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
build_int_cst (itype, 0));
}
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && TREE_CODE (arg1) == BIT_XOR_EXPR)
+ {
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ tree arg10 = TREE_OPERAND (arg1, 0);
+ tree arg11 = TREE_OPERAND (arg1, 1);
+ tree itype = TREE_TYPE (arg0);
+
+ /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
+ operand_equal_p guarantees no side-effects so we don't need
+ to use omit_one_operand on Z. */
+ if (operand_equal_p (arg01, arg11, 0))
+ return fold_build2 (code, type, arg00, arg10);
+ if (operand_equal_p (arg01, arg10, 0))
+ return fold_build2 (code, type, arg00, arg11);
+ if (operand_equal_p (arg00, arg11, 0))
+ return fold_build2 (code, type, arg01, arg10);
+ if (operand_equal_p (arg00, arg10, 0))
+ return fold_build2 (code, type, arg01, arg11);
+
+ /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
+ if (TREE_CODE (arg01) == INTEGER_CST
+ && TREE_CODE (arg11) == INTEGER_CST)
+ return fold_build2 (code, type,
+ fold_build2 (BIT_XOR_EXPR, itype, arg00,
+ fold_build2 (BIT_XOR_EXPR, itype,
+ arg01, arg11)),
+ arg10);
+ }
return NULL_TREE;
case LT_EXPR:
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type),
+ TREE_CONSTANT_OVERFLOW (arg0));
break;
}
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
}
break;
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
- t = build_int_cst_wide (type,
- ~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
+ ~TREE_INT_CST_HIGH (arg0), 0,
+ TREE_OVERFLOW (arg0),
+ TREE_CONSTANT_OVERFLOW (arg0));
return t;
}