/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
This file is part of GCC.
@@ for cross-compilers. */
/* The entry points in this file are fold, size_int_wide, size_binop
- and force_fit_type.
+ and force_fit_type_double.
fold takes a tree as argument and returns a simplified tree.
size_int takes an integer value, and creates a tree constant
with type from `sizetype'.
- force_fit_type takes a constant, an overflowable flag and prior
- overflow indicators. It forces the value to fit the type and sets
- TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
-
+ force_fit_type_double takes a constant, an overflowable flag and a
+ prior overflow indicator. It forces the value to fit the type and
+ sets TREE_OVERFLOW.
+
Note: Since the folders get called on non-gimple code as well as
gimple code, we need to handle GIMPLE tuples as well as their
corresponding tree equivalents. */
return l1 != low0 || h1 != high0;
}
-/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
+/* We force the double-int HIGH:LOW to the range of the type TYPE by
+ sign or zero extending it.
+ OVERFLOWABLE indicates if we are interested
in overflow of the value, when >0 we are only interested in signed
overflow, for <0 we are interested in any overflow. OVERFLOWED
indicates whether overflow has already occurred. CONST_OVERFLOWED
OVERFLOWED is nonzero,
or OVERFLOWABLE is >0 and signed overflow occurs
or OVERFLOWABLE is <0 and any overflow occurs
- We set TREE_CONSTANT_OVERFLOWED if,
- CONST_OVERFLOWED is nonzero
- or we set TREE_OVERFLOWED.
- We return either the original T, or a copy. */
+ We return a new tree node for the extended double-int. The node
+ is shared if no overflow flags are set. */
tree
-force_fit_type (tree t, int overflowable,
- bool overflowed, bool overflowed_const)
+force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
+ HOST_WIDE_INT high, int overflowable,
+ bool overflowed)
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
int sign_extended_type;
bool overflow;
- gcc_assert (TREE_CODE (t) == INTEGER_CST);
-
/* Size types *are* sign extended. */
- sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
- || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
- && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
-
- low = TREE_INT_CST_LOW (t);
- high = TREE_INT_CST_HIGH (t);
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
- overflow = fit_double_type (low, high, &low, &high, TREE_TYPE (t));
+ overflow = fit_double_type (low, high, &low, &high, type);
- /* If the value changed, return a new node. */
- if (overflowed || overflowed_const || overflow)
+ /* If we need to set overflow flags, return a new unshared node. */
+ if (overflowed || overflow)
{
- t = build_int_cst_wide (TREE_TYPE (t), low, high);
-
if (overflowed
|| overflowable < 0
|| (overflowable > 0 && sign_extended_type))
{
- t = copy_node (t);
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST_LOW (t) = low;
+ TREE_INT_CST_HIGH (t) = high;
+ TREE_TYPE (t) = type;
TREE_OVERFLOW (t) = 1;
- TREE_CONSTANT_OVERFLOW (t) = 1;
- }
- else if (overflowed_const)
- {
- t = copy_node (t);
- TREE_CONSTANT_OVERFLOW (t) = 1;
+ return t;
}
}
- return t;
+ /* Else build a shared node. */
+ return build_int_cst_wide (type, low, high);
}
\f
/* Add two doubleword integers with doubleword result.
case INTEGER_CST:
tem = fold_negate_const (t, type);
- if (! TREE_OVERFLOW (tem)
+ if (!TREE_OVERFLOW (tem)
|| TYPE_UNSIGNED (type)
- || ! flag_trapv)
+ || !flag_trapv)
return tem;
break;
case REAL_CST:
tem = fold_negate_const (t, type);
/* Two's complement FP formats, such as c4x, may overflow. */
- if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
+ if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
return tem;
break;
case EXACT_DIV_EXPR:
/* This is a shortcut for a common special case. */
if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && ! TREE_CONSTANT_OVERFLOW (arg2)
+ && !TREE_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg2)
&& int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
{
if (code == CEIL_DIV_EXPR)
case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
/* This is a shortcut for a common special case. */
if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
- && ! TREE_CONSTANT_OVERFLOW (arg1)
- && ! TREE_CONSTANT_OVERFLOW (arg2)
+ && !TREE_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg2)
&& int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
{
if (code == CEIL_MOD_EXPR)
return NULL_TREE;
}
- t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
-
if (notrunc)
{
+ t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
+
/* Propagate overflow flags ourselves. */
if (((!uns || is_sizetype) && overflow)
| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
{
t = copy_node (t);
TREE_OVERFLOW (t) = 1;
- TREE_CONSTANT_OVERFLOW (t) = 1;
- }
- else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
- {
- t = copy_node (t);
- TREE_CONSTANT_OVERFLOW (t) = 1;
}
}
else
- t = force_fit_type (t, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
- TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
return t;
}
t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t)
- | TREE_CONSTANT_OVERFLOW (arg1)
- | TREE_CONSTANT_OVERFLOW (arg2);
return t;
}
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
-
- t = force_fit_type (t,
- /* Don't set the overflow when
- converting a pointer */
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1),
+ /* Don't set the overflow when
+ converting a pointer */
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ (TREE_INT_CST_HIGH (arg1) < 0
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1));
return t;
}
if (! overflow)
REAL_VALUE_TO_INT (&low, &high, r);
- t = build_int_cst_wide (type, low, high);
-
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg1));
return t;
}
t = build_real (type, value);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
return t;
}
lbitpos = nbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
- mask = fold_convert (unsigned_type, mask);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask,
size_int (nbitsize - lbitsize - lbitpos), 0);
unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
precision = TYPE_PRECISION (unsigned_type);
- mask = build_int_cst (unsigned_type, -1);
- mask = force_fit_type (mask, 0, false, false);
+ mask = build_int_cst_type (unsigned_type, -1);
mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
- tmask = force_fit_type (tmask, 0, false, false);
+ tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
return
tree_int_cst_equal (mask,
/* Tree VAL must be an integer constant. */
if (TREE_CODE (val) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (val))
+ || TREE_OVERFLOW (val))
return NULL_TREE;
width = TYPE_PRECISION (t);
return 0;
}
- /* After this point all optimizations will generate bit-field
- references, which we might not want. */
- if (! lang_hooks.can_use_bit_fields_p ())
- return 0;
-
/* See if we can find a mode that contains both fields being compared on
the left. If we can't, fail. Otherwise, update all constants and masks
to be relative to a field of that size. */
/* If something does not permit us to optimize, return the original tree. */
if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
|| TREE_CODE (comp_const) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (comp_const)
+ || TREE_OVERFLOW (comp_const)
|| TREE_CODE (minmax_const) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (minmax_const))
+ || TREE_OVERFLOW (minmax_const))
return NULL_TREE;
/* Now handle all the various comparison codes. We only handle EQ_EXPR
possible later conversion to our or some other type. */
if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
&& TREE_CODE (t2) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (t2)
+ && !TREE_OVERFLOW (t2)
&& (0 != (t1 = extract_muldiv (op0, t2, code,
code == MULT_EXPR
? ctype : NULL_TREE))))
const_binop (LSHIFT_EXPR,
size_one_node,
op1, 0)))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return extract_muldiv (build2 (tcode == LSHIFT_EXPR
? MULT_EXPR : FLOOR_DIV_EXPR,
ctype, fold_convert (ctype, op0), t1),
if (tcode == code
&& 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
fold_convert (ctype, c), 0))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
/* If these operations "cancel" each other, we have the main
TREE_INT_CST_LOW (arg1),
TREE_INT_CST_HIGH (arg1),
&lpart, &hpart, unsigned_p);
- prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- prod = force_fit_type (prod, -1, overflow, false);
+ prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow);
neg_overflow = false;
if (unsigned_p)
TREE_INT_CST_LOW (tmp),
TREE_INT_CST_HIGH (tmp),
&lpart, &hpart, unsigned_p);
- hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
- hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
- TREE_CONSTANT_OVERFLOW (prod));
+ hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
+ -1, overflow | TREE_OVERFLOW (prod));
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
fold_sign_changed_comparison (enum tree_code code, tree type,
tree arg0, tree arg1)
{
- tree arg0_inner, tmp;
+ tree arg0_inner;
tree inner_type, outer_type;
if (TREE_CODE (arg0) != NOP_EXPR
return NULL_TREE;
if (TREE_CODE (arg1) == INTEGER_CST)
- {
- tmp = build_int_cst_wide (inner_type,
- TREE_INT_CST_LOW (arg1),
- TREE_INT_CST_HIGH (arg1));
- arg1 = force_fit_type (tmp, 0,
- TREE_OVERFLOW (arg1),
- TREE_CONSTANT_OVERFLOW (arg1));
- }
+ arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
+ TREE_INT_CST_HIGH (arg1), 0,
+ TREE_OVERFLOW (arg1));
else
arg1 = fold_convert (inner_type, arg1);
<< (bitpos - HOST_BITS_PER_WIDE_INT);
}
- return force_fit_type (build_int_cst_wide (type, lo, hi),
- 0, false, false);
+ return build_int_cst_wide_type (type, lo, hi);
}
}
if (change)
{
- tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
- TREE_INT_CST_HIGH (and1));
- tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
- TREE_CONSTANT_OVERFLOW (and1));
+ tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1), 0,
+ TREE_OVERFLOW (and1));
return fold_build2 (BIT_AND_EXPR, type,
fold_convert (type, and0), tem);
}
variable2);
}
+ /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
+ signed arithmetic case. That form is created by the compiler
+ often enough for folding it to be of value. One example is in
+ computing loop trip counts after Operator Strength Reduction. */
+ if (!(flag_wrapv || flag_trapv)
+ && !TYPE_UNSIGNED (TREE_TYPE (arg0))
+ && TREE_CODE (arg0) == MULT_EXPR
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
+ && integer_zerop (arg1))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = arg1; /* zero */
+ tree variable1 = TREE_OPERAND (arg0, 0);
+ enum tree_code cmp_code = code;
+
+ gcc_assert (!integer_zerop (const1));
+
+ /* If const1 is negative we swap the sense of the comparison. */
+ if (tree_int_cst_sgn (const1) < 0)
+ cmp_code = swap_tree_comparison (cmp_code);
+
+ return fold_build2 (cmp_code, type, variable1, const2);
+ }
+
tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
if (tem)
return tem;
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 1), tem);
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& !TYPE_TRAP_SIGNED (type))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
&& !TYPE_TRAP_SIGNED (type))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
1)));
if (TREE_CODE (arg11) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg11)
+ && !TREE_OVERFLOW (arg11)
&& operand_equal_p (arg0, arg10, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
1)));
if (TREE_CODE (arg01) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg01)
+ && !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = build_int_cst (type, -1);
- t1 = force_fit_type (t1, 0, false, false);
+ t1 = build_int_cst_type (type, -1);
return omit_one_operand (type, t1, arg0);
}
tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
if (TREE_CODE (arg01) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg01)
+ && !TREE_OVERFLOW (arg01)
&& operand_equal_p (arg1, arg00, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
- && !TREE_CONSTANT_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg1)
&& TREE_INT_CST_HIGH (arg1) < 0
&& !flag_trapv
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
? MINUS_EXPR : PLUS_EXPR,
fold_convert (TREE_TYPE (arg0), arg1),
TREE_OPERAND (arg0, 1), 0))
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a NEGATE_EXPR. */
&& TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
}
/* If this is a comparison of a field, we may be able to simplify it. */
- if (((TREE_CODE (arg0) == COMPONENT_REF
- && lang_hooks.can_use_bit_fields_p ())
+ if ((TREE_CODE (arg0) == COMPONENT_REF
|| TREE_CODE (arg0) == BIT_FIELD_REF)
/* Handle the constant case even without -O
to make sure the warnings are given. */
unsigned int width = TYPE_PRECISION (arg1_type);
if (TREE_CODE (arg1) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (arg1)
+ && !TREE_OVERFLOW (arg1)
&& width <= 2 * HOST_BITS_PER_WIDE_INT
&& (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
{
&& ! TREE_SIDE_EFFECTS (arg0)
&& (0 != (tem = negate_expr (arg1)))
&& TREE_CODE (tem) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (tem))
+ && !TREE_OVERFLOW (tem))
return fold_build2 (TRUTH_ANDIF_EXPR, type,
build2 (GE_EXPR, type,
TREE_OPERAND (arg0, 0), tem),
const_binop (LSHIFT_EXPR,
size_one_node,
op1, 0)))
- && ! TREE_OVERFLOW (t1))
+ && !TREE_OVERFLOW (t1))
return multiple_of_p (type, t1, bottom);
}
return 0;
return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
case INTEGER_CST:
- /* We used to test for !integer_zerop here. This does not work correctly
- if TREE_CONSTANT_OVERFLOW (t). */
- return (TREE_INT_CST_LOW (t) != 0
- || TREE_INT_CST_HIGH (t) != 0);
+ return !integer_zerop (t);
case PLUS_EXPR:
if (!TYPE_UNSIGNED (type) && !flag_wrapv)
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type));
break;
}
int overflow = neg_double (TREE_INT_CST_LOW (arg0),
TREE_INT_CST_HIGH (arg0),
&low, &high);
- t = build_int_cst_wide (type, low, high);
- t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, low, high, -1,
+ overflow | TREE_OVERFLOW (arg0));
}
break;
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
- t = build_int_cst_wide (type,
- ~ TREE_INT_CST_LOW (arg0),
- ~ TREE_INT_CST_HIGH (arg0));
- t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
- TREE_CONSTANT_OVERFLOW (arg0));
+ t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
+ ~TREE_INT_CST_HIGH (arg0), 0,
+ TREE_OVERFLOW (arg0));
return t;
}