/* Fold a constant sub-tree into a single node for C-compiler
- Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
+ Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
This file is part of GNU CC.
You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
/*@@ This file should be rewritten to use an arbitrary precision
@@ representation for "struct tree_int_cst" and "struct tree_real_cst".
static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
static tree eval_subst PROTO((tree, tree, tree, tree, tree));
static tree omit_one_operand PROTO((tree, tree, tree));
+static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
tree, tree));
static tree decode_field_reference PROTO((tree, int *, int *,
enum machine_mode *, int *,
- int *, tree *));
+ int *, tree *, tree *));
static int all_ones_mask_p PROTO((tree, int));
static int simple_operand_p PROTO((tree));
static tree range_test PROTO((enum tree_code, tree, enum tree_code,
enum tree_code, tree, tree, tree));
+static tree unextend PROTO((tree, int, int, tree));
static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
static tree strip_compound_expr PROTO((tree, tree));
#define BRANCH_COST 1
#endif
-/* Yield nonzero if a signed left shift of A by B bits overflows. */
-#define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
-
/* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
Then this yields nonzero if overflow occurred during the addition.
/* Unsigned types do not suffer sign extension or overflow. */
if (TREE_UNSIGNED (TREE_TYPE (t)))
- return 0;
+ return overflow;
/* If the value's sign bit is set, extend the sign. */
if (prec != 2 * HOST_BITS_PER_WIDE_INT
return;
}
- if (count >= prec)
- count = (unsigned HOST_WIDE_INT) count & prec;
+#ifdef SHIFT_COUNT_TRUNCATED
+ if (SHIFT_COUNT_TRUNCATED)
+ count %= prec;
+#endif
if (count >= HOST_BITS_PER_WIDE_INT)
{
? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
: 0);
- if (count >= prec)
- count = (unsigned HOST_WIDE_INT) count % prec;
+#ifdef SHIFT_COUNT_TRUNCATED
+ if (SHIFT_COUNT_TRUNCATED)
+ count %= prec;
+#endif
if (count >= HOST_BITS_PER_WIDE_INT)
{
int prec;
HOST_WIDE_INT *lv, *hv;
{
- HOST_WIDE_INT arg1[4];
- register int i;
- register int carry;
+ HOST_WIDE_INT s1l, s1h, s2l, s2h;
+ count %= prec;
if (count < 0)
- {
- rrotate_double (l1, h1, - count, prec, lv, hv);
- return;
- }
-
- encode (arg1, l1, h1);
-
- if (count > prec)
- count = prec;
-
- carry = arg1[4 - 1] >> 16 - 1;
- while (count > 0)
- {
- for (i = 0; i < 4; i++)
- {
- carry += arg1[i] << 1;
- arg1[i] = LOWPART (carry);
- carry = HIGHPART (carry);
- }
- count--;
- }
+ count += prec;
- decode (arg1, lv, hv);
+ lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
+ rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
+ *lv = s1l | s2l;
+ *hv = s1h | s2h;
}
/* Rotate the doubleword integer in L1, H1 left by COUNT places
int prec;
HOST_WIDE_INT *lv, *hv;
{
- HOST_WIDE_INT arg1[4];
- register int i;
- register int carry;
-
- encode (arg1, l1, h1);
-
- if (count > prec)
- count = prec;
+ HOST_WIDE_INT s1l, s1h, s2l, s2h;
- carry = arg1[0] & 1;
- while (count > 0)
- {
- for (i = 4 - 1; i >= 0; i--)
- {
- carry *= BASE;
- carry += arg1[i];
- arg1[i] = LOWPART (carry >> 1);
- }
- count--;
- }
+ count %= prec;
+ if (count < 0)
+ count += prec;
- decode (arg1, lv, hv);
+ rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
+ lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
+ *lv = s1l | s2l;
+ *hv = s1h | s2h;
}
\f
/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
HOST_WIDE_INT den[4], quo[4];
register int i, j;
unsigned HOST_WIDE_INT work;
- register int carry = 0;
+ register unsigned HOST_WIDE_INT carry = 0;
HOST_WIDE_INT lnum = lnum_orig;
HOST_WIDE_INT hnum = hnum_orig;
HOST_WIDE_INT lden = lden_orig;
{
/* Full double precision division,
with thanks to Don Knuth's "Seminumerical Algorithms". */
- int quo_est, scale, num_hi_sig, den_hi_sig;
+ int num_hi_sig, den_hi_sig;
+ unsigned HOST_WIDE_INT quo_est, scale;
/* Find the highest non-zero divisor digit. */
for (i = 4 - 1; ; i--)
register tree arg1, arg2;
int notrunc;
{
+ STRIP_NOPS (arg1); STRIP_NOPS (arg2);
+
if (TREE_CODE (arg1) == INTEGER_CST)
{
register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
got_it:
TREE_TYPE (t) = TREE_TYPE (arg1);
TREE_OVERFLOW (t)
- = ((notrunc ? !uns && overflow : force_fit_type (t, overflow))
+ = ((notrunc ? !uns && overflow : force_fit_type (t, overflow && !uns))
| TREE_OVERFLOW (arg1)
| TREE_OVERFLOW (arg2));
TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
tree
size_int (number)
- unsigned int number;
+ unsigned HOST_WIDE_INT number;
{
register tree t;
/* Type-size nodes already made for small sizes. */
&& TREE_INT_CST_HIGH (arg0) == 0)
return arg1;
/* Handle general case of two integer constants. */
- return const_binop (code, arg0, arg1, 1);
+ return const_binop (code, arg0, arg1, 0);
}
if (arg0 == error_mark_node || arg1 == error_mark_node)
{
if (TREE_CODE (arg1) == INTEGER_CST)
{
+ /* If we would build a constant wider than GCC supports,
+ leave the conversion unfolded. */
+ if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
+ return t;
+
/* Given an integer constant, make new constant with new type,
appropriately sign-extended or truncated. */
t = build_int_2 (TREE_INT_CST_LOW (arg1),
REAL_VALUE_TYPE x;
REAL_VALUE_TYPE l;
REAL_VALUE_TYPE u;
+ tree type1 = TREE_TYPE (arg1);
x = TREE_REAL_CST (arg1);
- l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
- u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
+ l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
+ u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
/* See if X will be in range after truncation towards 0.
To compensate for truncation, move the bounds away from 0,
but reject if X exactly equals the adjusted bounds. */
if (TREE_CODE (arg1) == REAL_CST)
{
if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
- return arg1;
+ {
+ t = arg1;
+ TREE_TYPE (arg1) = type;
+ return t;
+ }
else if (setjmp (float_error))
{
overflow = 1;
return result;
}
+/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
+ Zero means allow extended lvalues. */
+
+int pedantic_lvalues;
+
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
pedantic_non_lvalue (x)
tree x;
{
- if (pedantic)
+ if (pedantic_lvalues)
return non_lvalue (x);
else
return x;
if (operand_equal_p (arg0, arg1, 0))
return 1;
- if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
return 0;
/* Duplicate what shorten_compare does to ARG1 and see if that gives the
return non_lvalue (t);
}
+
+/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
+
+static tree
+pedantic_omit_one_operand (type, result, omitted)
+ tree type, result, omitted;
+{
+ tree t = convert (type, result);
+
+ if (TREE_SIDE_EFFECTS (omitted))
+ return build (COMPOUND_EXPR, type, omitted, t);
+
+ return pedantic_non_lvalue (t);
+}
+
+
\f
/* Return a simplified tree node for the truth-negation of ARG. This
never alters ARG itself. We assume that ARG is an operation that
case SAVE_EXPR:
return build1 (TRUTH_NOT_EXPR, type, arg);
+
+ case CLEANUP_POINT_EXPR:
+ return build1 (CLEANUP_POINT_EXPR, type,
+ invert_truthvalue (TREE_OPERAND (arg, 0)));
}
if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
abort ();
return 0;
}
-#if BYTES_BIG_ENDIAN
- lbitpos = lnbitsize - lbitsize - lbitpos;
-#endif
+ if (BYTES_BIG_ENDIAN)
+ lbitpos = lnbitsize - lbitsize - lbitpos;
/* Make the mask to be used against the extracted field. */
mask = build_int_2 (~0, ~0);
*PMASK is set to the mask used. This is either contained in a
BIT_AND_EXPR or derived from the width of the field.
+ *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
+
Return 0 if this is not a component reference or is one that we can't
do anything with. */
static tree
decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
- pvolatilep, pmask)
+ pvolatilep, pmask, pand_mask)
tree exp;
int *pbitsize, *pbitpos;
enum machine_mode *pmode;
int *punsignedp, *pvolatilep;
tree *pmask;
+ tree *pand_mask;
{
tree and_mask = 0;
tree mask, inner, offset;
return 0;
}
- if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
- && TREE_CODE (exp) != BIT_FIELD_REF)
- return 0;
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
punsignedp, pvolatilep);
- if (inner == exp || *pbitsize < 0 || offset != 0)
+ if ((inner == exp && and_mask == 0)
+ || *pbitsize < 0 || offset != 0)
return 0;
/* Compute the mask to access the bitfield. */
convert (unsigned_type, and_mask), mask));
*pmask = mask;
+ *pand_mask = and_mask;
return inner;
}
TREE_TYPE (tmask) = signed_type (type);
force_fit_type (tmask, 0);
return
- operand_equal_p (mask,
- const_binop (RSHIFT_EXPR,
- const_binop (LSHIFT_EXPR, tmask,
- size_int (precision - size), 0),
- size_int (precision - size), 0),
- 0);
+ tree_int_cst_equal (mask,
+ const_binop (RSHIFT_EXPR,
+ const_binop (LSHIFT_EXPR, tmask,
+ size_int (precision - size),
+ 0),
+ size_int (precision - size), 0));
}
/* Subroutine for fold_truthop: determine if an operand is simple enough
const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
}
\f
+/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
+ bit value. Arrange things so the extra bits will be set to zero if and
+ only if C is signed-extended to its full width. If MASK is nonzero,
+ it is an INTEGER_CST that should be AND'ed with the extra bits. */
+
+static tree
+unextend (c, p, unsignedp, mask)
+ tree c;
+ int p;
+ int unsignedp;
+ tree mask;
+{
+ tree type = TREE_TYPE (c);
+ int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
+ tree temp;
+
+ if (p == modesize || unsignedp)
+ return c;
+
+ if (TREE_UNSIGNED (type))
+ c = convert (signed_type (type), c);
+
+ /* We work by getting just the sign bit into the low-order bit, then
+ into the high-order bit, then sign-extend. We then XOR that value
+ with C. */
+ temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
+ temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
+ temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
+ temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
+ if (mask != 0)
+ temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
+
+ return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
+}
+\f
/* Find ways of folding logical expressions of LHS and RHS:
Try to merge two comparisons to the same innermost item.
Look for range tests like "ch >= '0' && ch <= '9'".
enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
enum machine_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
+ tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
tree type, result;
int first_bit, end_bit;
volatilep = 0;
ll_inner = decode_field_reference (ll_arg,
&ll_bitsize, &ll_bitpos, &ll_mode,
- &ll_unsignedp, &volatilep, &ll_mask);
+ &ll_unsignedp, &volatilep, &ll_mask,
+ &ll_and_mask);
lr_inner = decode_field_reference (lr_arg,
&lr_bitsize, &lr_bitpos, &lr_mode,
- &lr_unsignedp, &volatilep, &lr_mask);
+ &lr_unsignedp, &volatilep, &lr_mask,
+ &lr_and_mask);
rl_inner = decode_field_reference (rl_arg,
&rl_bitsize, &rl_bitpos, &rl_mode,
- &rl_unsignedp, &volatilep, &rl_mask);
+ &rl_unsignedp, &volatilep, &rl_mask,
+ &rl_and_mask);
rr_inner = decode_field_reference (rr_arg,
&rr_bitsize, &rr_bitpos, &rr_mode,
- &rr_unsignedp, &volatilep, &rr_mask);
+ &rr_unsignedp, &volatilep, &rr_mask,
+ &rr_and_mask);
/* It must be true that the inner operation on the lhs of each
comparison must be the same if we are to be able to do anything.
type = type_for_size (lnbitsize, 1);
xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
-#if BYTES_BIG_ENDIAN
- xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
- xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
-#endif
+ if (BYTES_BIG_ENDIAN)
+ {
+ xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
+ xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
+ }
ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
size_int (xll_bitpos), 0);
rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
size_int (xrl_bitpos), 0);
- /* Make sure the constants are interpreted as unsigned, so we
- don't have sign bits outside the range of their type. */
-
if (l_const)
{
- l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
- l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
- size_int (xll_bitpos), 0);
- l_const = const_binop (BIT_AND_EXPR, l_const, ll_mask, 0);
+ l_const = convert (type, l_const);
+ l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
+ l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
+ if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
+ fold (build1 (BIT_NOT_EXPR,
+ type, ll_mask)),
+ 0)))
+ {
+ warning ("comparison is always %s",
+ wanted_code == NE_EXPR ? "one" : "zero");
+
+ return convert (truth_type,
+ wanted_code == NE_EXPR
+ ? integer_one_node : integer_zero_node);
+ }
}
if (r_const)
{
- r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
- r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
- size_int (xrl_bitpos), 0);
- r_const = const_binop (BIT_AND_EXPR, r_const, rl_mask, 0);
+ r_const = convert (type, r_const);
+ r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
+ r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
+ if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
+ fold (build1 (BIT_NOT_EXPR,
+ type, rl_mask)),
+ 0)))
+ {
+ warning ("comparison is always %s",
+ wanted_code == NE_EXPR ? "one" : "zero");
+
+ return convert (truth_type,
+ wanted_code == NE_EXPR
+ ? integer_one_node : integer_zero_node);
+ }
}
/* If the right sides are not constant, do the same for it. Also,
rnbitpos = first_bit & ~ (rnbitsize - 1);
xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
-#if BYTES_BIG_ENDIAN
- xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
- xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
-#endif
+ if (BYTES_BIG_ENDIAN)
+ {
+ xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
+ xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
+ }
lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
size_int (xlr_bitpos), 0);
}
else
{
+ tree testtype = TREE_TYPE (arg1);
test = arg1;
- true_value = integer_one_node;
- false_value = integer_zero_node;
+ true_value = convert (testtype, integer_one_node);
+ false_value = convert (testtype, integer_zero_node);
}
/* If ARG0 is complex we want to make sure we only evaluate
succeed in folding one part to a constant, we do not need
to make this SAVE_EXPR. Since we do this optimization
primarily to see if we do end up with constant and this
- SAVE_EXPR interfers with later optimizations, suppressing
+ SAVE_EXPR interferes with later optimizations, suppressing
it when we can is important. */
if (TREE_CODE (arg0) != SAVE_EXPR
}
else
{
+ tree testtype = TREE_TYPE (arg0);
test = arg0;
- true_value = integer_one_node;
- false_value = integer_zero_node;
+ true_value = convert (testtype, integer_one_node);
+ false_value = convert (testtype, integer_zero_node);
}
if (TREE_CODE (arg1) != SAVE_EXPR
if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
return TREE_OPERAND (t, 0);
- /* In addition to the cases of two conversions in a row
- handled below, if we are converting something to its own
- type via an object of identical or wider precision, neither
- conversion is needed. */
- if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
- || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
- && TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == TREE_TYPE (t)
- && ((INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
- && INTEGRAL_TYPE_P (TREE_TYPE (t)))
- || (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
- && FLOAT_TYPE_P (TREE_TYPE (t))))
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- >= TYPE_PRECISION (TREE_TYPE (t))))
- return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
-
- /* Two conversions in a row are not needed unless:
- - the intermediate type is narrower than both initial and final, or
- - the intermediate type and innermost type differ in signedness,
- and the outermost type is wider than the intermediate, or
- - the initial type is a pointer type and the precisions of the
- intermediate and final types differ, or
- - the final type is a pointer type and the precisions of the
- initial and intermediate types differ. */
- if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
- || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- ||
- TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- > TYPE_PRECISION (TREE_TYPE (t)))
- && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- == INTEGER_TYPE)
- && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
- == INTEGER_TYPE)
- && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
- != TREE_UNSIGNED (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- < TYPE_PRECISION (TREE_TYPE (t))))
- && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
- ==
- (TREE_UNSIGNED (TREE_TYPE (t))
- && (TYPE_PRECISION (TREE_TYPE (t))
- > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
- && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- == POINTER_TYPE)
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
- != TYPE_PRECISION (TREE_TYPE (t))))
- && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
- != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
- return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
+ /* Handle cases of two conversions in a row. */
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
+ {
+ tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
+ tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
+ tree final_type = TREE_TYPE (t);
+ int inside_int = INTEGRAL_TYPE_P (inside_type);
+ int inside_ptr = POINTER_TYPE_P (inside_type);
+ int inside_float = FLOAT_TYPE_P (inside_type);
+ int inside_prec = TYPE_PRECISION (inside_type);
+ int inside_unsignedp = TREE_UNSIGNED (inside_type);
+ int inter_int = INTEGRAL_TYPE_P (inter_type);
+ int inter_ptr = POINTER_TYPE_P (inter_type);
+ int inter_float = FLOAT_TYPE_P (inter_type);
+ int inter_prec = TYPE_PRECISION (inter_type);
+ int inter_unsignedp = TREE_UNSIGNED (inter_type);
+ int final_int = INTEGRAL_TYPE_P (final_type);
+ int final_ptr = POINTER_TYPE_P (final_type);
+ int final_float = FLOAT_TYPE_P (final_type);
+ int final_prec = TYPE_PRECISION (final_type);
+ int final_unsignedp = TREE_UNSIGNED (final_type);
+
+ /* In addition to the cases of two conversions in a row
+ handled below, if we are converting something to its own
+ type via an object of identical or wider precision, neither
+ conversion is needed. */
+ if (inside_type == final_type
+ && ((inter_int && final_int) || (inter_float && final_float))
+ && inter_prec >= final_prec)
+ return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
+
+ /* Likewise, if the intermediate and final types are either both
+ float or both integer, we don't need the middle conversion if
+ it is wider than the final type and doesn't change the signedness
+ (for integers). Avoid this if the final type is a pointer
+ since then we sometimes need the inner conversion. Likewise if
+ the outer has a precision not equal to the size of its mode. */
+ if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
+ || (inter_float && inside_float))
+ && inter_prec >= inside_prec
+ && (inter_float || inter_unsignedp == inside_unsignedp)
+ && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
+ && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
+ && ! final_ptr)
+ return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
+
+ /* Two conversions in a row are not needed unless:
+ - some conversion is floating-point (overstrict for now), or
+ - the intermediate type is narrower than both initial and
+ final, or
+ - the intermediate type and innermost type differ in signedness,
+ and the outermost type is wider than the intermediate, or
+ - the initial type is a pointer type and the precisions of the
+ intermediate and final types differ, or
+ - the final type is a pointer type and the precisions of the
+ initial and intermediate types differ. */
+ if (! inside_float && ! inter_float && ! final_float
+ && (inter_prec > inside_prec || inter_prec > final_prec)
+ && ! (inside_int && inter_int
+ && inter_unsignedp != inside_unsignedp
+ && inter_prec < final_prec)
+ && ((inter_unsignedp && inter_prec > inside_prec)
+ == (final_unsignedp && final_prec > inter_prec))
+ && ! (inside_ptr && inter_prec != final_prec)
+ && ! (final_ptr && inside_prec != inter_prec)
+ && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
+ && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
+ && ! final_ptr)
+ return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
+ }
if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
&& TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
TREE_TYPE (t) = type;
TREE_OVERFLOW (t)
= (TREE_OVERFLOW (arg0)
- | force_fit_type (t, overflow));
+ | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
TREE_CONSTANT_OVERFLOW (t)
= TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
}
return t;
/* Otherwise return (CON +- ARG1) - VAR. */
- TREE_SET_CODE (t, MINUS_EXPR);
- TREE_OPERAND (t, 1) = var;
- TREE_OPERAND (t, 0)
- = fold (build (code, TREE_TYPE (t), con, arg1));
+ t = build (MINUS_EXPR, type,
+ fold (build (code, type, con, arg1)), var);
}
else
{
return t;
/* Otherwise return VAR +- (ARG1 +- CON). */
- TREE_OPERAND (t, 1) = tem
- = fold (build (code, TREE_TYPE (t), arg1, con));
- TREE_OPERAND (t, 0) = var;
+ tem = fold (build (code, type, arg1, con));
+ t = build (code, type, var, tem);
+
if (integer_zerop (tem)
&& (code == PLUS_EXPR || code == MINUS_EXPR))
return convert (type, var);
convert (TREE_TYPE (t), con)));
}
- TREE_OPERAND (t, 0)
- = fold (build (code, TREE_TYPE (t), arg0, con));
- TREE_OPERAND (t, 1) = var;
+ t = build (TREE_CODE (t), type,
+ fold (build (code, TREE_TYPE (t), arg0, con)), var);
+
if (integer_zerop (TREE_OPERAND (t, 0))
&& TREE_CODE (t) == PLUS_EXPR)
return convert (TREE_TYPE (t), var);
case BIT_IOR_EXPR:
bit_ior:
+ {
+ register enum tree_code code0, code1;
+
if (integer_all_onesp (arg1))
return omit_one_operand (type, arg1, arg0);
if (integer_zerop (arg1))
if (t1 != NULL_TREE)
return t1;
- /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
+ /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
is a rotate of A by C1 bits. */
+ /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
+ is a rotate of A by B bits. */
- if ((TREE_CODE (arg0) == RSHIFT_EXPR
- || TREE_CODE (arg0) == LSHIFT_EXPR)
- && (TREE_CODE (arg1) == RSHIFT_EXPR
- || TREE_CODE (arg1) == LSHIFT_EXPR)
- && TREE_CODE (arg0) != TREE_CODE (arg1)
+ code0 = TREE_CODE (arg0);
+ code1 = TREE_CODE (arg1);
+ if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
+ || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
&& operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
- && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
- && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
- && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
- + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
+ && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+ {
+ register tree tree01, tree11;
+ register enum tree_code code01, code11;
+
+ tree01 = TREE_OPERAND (arg0, 1);
+ tree11 = TREE_OPERAND (arg1, 1);
+ code01 = TREE_CODE (tree01);
+ code11 = TREE_CODE (tree11);
+ if (code01 == INTEGER_CST
+ && code11 == INTEGER_CST
+ && TREE_INT_CST_HIGH (tree01) == 0
+ && TREE_INT_CST_HIGH (tree11) == 0
+ && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
- return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
- TREE_CODE (arg0) == LSHIFT_EXPR
- ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
+ return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
+ code0 == LSHIFT_EXPR ? tree01 : tree11);
+ else if (code11 == MINUS_EXPR
+ && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
+ && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
+ == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
+ && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
+ return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
+ type, TREE_OPERAND (arg0, 0), tree01);
+ else if (code01 == MINUS_EXPR
+ && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
+ && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
+ == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
+ && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
+ return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
+ type, TREE_OPERAND (arg0, 0), tree11);
+ }
goto associate;
+ }
case BIT_XOR_EXPR:
if (integer_zerop (arg1))
tree c2 = integer_zero_node;
tree xarg0 = arg0;
- if (TREE_CODE (xarg0) == SAVE_EXPR)
+ if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
STRIP_NOPS (xarg0);
xarg0 = TREE_OPERAND (xarg0, 0);
}
- if (TREE_CODE (xarg0) == SAVE_EXPR)
+ if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
STRIP_NOPS (xarg0);
return non_lvalue (convert (type, arg0));
/* Since negative shift count is not well-defined,
don't try to compute it in the compiler. */
- if (tree_int_cst_sgn (arg1) < 0)
+ if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
return t;
+ /* Rewrite an LROTATE_EXPR by a constant into an
+ RROTATE_EXPR by a new constant. */
+ if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
+ {
+ TREE_SET_CODE (t, RROTATE_EXPR);
+ code = RROTATE_EXPR;
+ TREE_OPERAND (t, 1) = arg1
+ = const_binop
+ (MINUS_EXPR,
+ convert (TREE_TYPE (arg1),
+ build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
+ arg1, 0);
+ if (tree_int_cst_sgn (arg1) < 0)
+ return t;
+ }
+
+ /* If we have a rotate of a bit operation with the rotate count and
+ the second operand of the bit operation both constant,
+ permute the two operations. */
+ if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == BIT_AND_EXPR
+ || TREE_CODE (arg0) == BIT_ANDTC_EXPR
+ || TREE_CODE (arg0) == BIT_IOR_EXPR
+ || TREE_CODE (arg0) == BIT_XOR_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold (build (TREE_CODE (arg0), type,
+ fold (build (code, type,
+ TREE_OPERAND (arg0, 0), arg1)),
+ fold (build (code, type,
+ TREE_OPERAND (arg0, 1), arg1))));
+
+ /* Two consecutive rotates adding up to the width of the mode can
+ be ignored. */
+ if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (arg0) == RROTATE_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (arg1) == 0
+ && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
+ && ((TREE_INT_CST_LOW (arg1)
+ + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
+ == GET_MODE_BITSIZE (TYPE_MODE (type))))
+ return TREE_OPERAND (arg0, 0);
+
goto binary;
case MIN_EXPR:
and its values must be 0 or 1.
("true" is a fixed value perhaps depending on the language,
but we don't handle values other than 1 correctly yet.) */
- return invert_truthvalue (arg0);
+ tem = invert_truthvalue (arg0);
+ /* Avoid infinite recursion. */
+ if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
+ return t;
+ return convert (type, tem);
case TRUTH_ANDIF_EXPR:
/* Note that the operands of this must be ints
and the other one. */
{
tree constop = 0, varop;
- tree *constoploc;
+ int constopnum = -1;
if (TREE_CONSTANT (arg1))
- constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
+ constopnum = 1, constop = arg1, varop = arg0;
if (TREE_CONSTANT (arg0))
- constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
+ constopnum = 0, constop = arg0, varop = arg1;
if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
{
= fold (build (PLUS_EXPR, TREE_TYPE (varop),
constop, TREE_OPERAND (varop, 1)));
TREE_SET_CODE (varop, PREINCREMENT_EXPR);
- *constoploc = newconst;
+
+ t = build (code, type, TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1));
+ TREE_OPERAND (t, constopnum) = newconst;
return t;
}
}
= fold (build (MINUS_EXPR, TREE_TYPE (varop),
constop, TREE_OPERAND (varop, 1)));
TREE_SET_CODE (varop, PREDECREMENT_EXPR);
- *constoploc = newconst;
+ t = build (code, type, TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1));
+ TREE_OPERAND (t, constopnum) = newconst;
return t;
}
}
{
case GE_EXPR:
code = GT_EXPR;
- TREE_SET_CODE (t, code);
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- TREE_OPERAND (t, 1) = arg1;
+ t = build (code, type, TREE_OPERAND (t, 0), arg1);
break;
case LT_EXPR:
code = LE_EXPR;
- TREE_SET_CODE (t, code);
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- TREE_OPERAND (t, 1) = arg1;
+ t = build (code, type, TREE_OPERAND (t, 0), arg1);
+ break;
}
}
return pedantic_non_lvalue
(TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
- return pedantic_non_lvalue (omit_one_operand (type, arg1, arg0));
+ return pedantic_omit_one_operand (type, arg1, arg0);
/* If the second operand is zero, invert the comparison and swap
the second and third operands. Likewise if the second operand
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
{
- arg0 = TREE_OPERAND (t, 0) = tem;
- TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
- TREE_OPERAND (t, 2) = arg1;
- arg1 = TREE_OPERAND (t, 1);
+ t = build (code, type, tem,
+ TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
+ arg0 = tem;
+ arg1 = TREE_OPERAND (t, 2);
+ STRIP_NOPS (arg1);
}
}
tree arg2 = TREE_OPERAND (t, 2);
enum tree_code comp_code = TREE_CODE (arg0);
+ STRIP_NOPS (arg2);
+
/* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
depending on the comparison operation. */
- if (integer_zerop (TREE_OPERAND (arg0, 1))
+ if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
+ ? real_zerop (TREE_OPERAND (arg0, 1))
+ : integer_zerop (TREE_OPERAND (arg0, 1)))
&& TREE_CODE (arg2) == NEGATE_EXPR
&& operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
switch (comp_code)
case GE_EXPR:
case GT_EXPR:
return pedantic_non_lvalue
- (fold (build1 (ABS_EXPR, type, arg1)));
+ (convert (type, fold (build1 (ABS_EXPR,
+ TREE_TYPE (arg1), arg1))));
case LE_EXPR:
case LT_EXPR:
return pedantic_non_lvalue
(fold (build1 (NEGATE_EXPR, type,
- fold (build1 (ABS_EXPR, type, arg1)))));
+ convert (type,
+ fold (build1 (ABS_EXPR,
+ TREE_TYPE (arg1),
+ arg1))))));
}
/* If this is A != 0 ? A : 0, this is simply A. For ==, it is
if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
arg2, TREE_OPERAND (arg0, 0)))
- switch (comp_code)
- {
- case EQ_EXPR:
- return pedantic_non_lvalue (convert (type, arg2));
- case NE_EXPR:
- return pedantic_non_lvalue (convert (type, arg1));
- case LE_EXPR:
- case LT_EXPR:
- return pedantic_non_lvalue
- (fold (build (MIN_EXPR, type, arg1, arg2)));
- case GE_EXPR:
- case GT_EXPR:
- return pedantic_non_lvalue
- (fold (build (MAX_EXPR, type, arg1, arg2)));
- }
+ {
+ tree comp_op0 = TREE_OPERAND (arg0, 0);
+ tree comp_op1 = TREE_OPERAND (arg0, 1);
+ tree comp_type = TREE_TYPE (comp_op0);
+
+ switch (comp_code)
+ {
+ case EQ_EXPR:
+ return pedantic_non_lvalue (convert (type, arg2));
+ case NE_EXPR:
+ return pedantic_non_lvalue (convert (type, arg1));
+ case LE_EXPR:
+ case LT_EXPR:
+ return pedantic_non_lvalue
+ (convert (type, (fold (build (MIN_EXPR, comp_type,
+ comp_op0, comp_op1)))));
+ case GE_EXPR:
+ case GT_EXPR:
+ return pedantic_non_lvalue
+ (convert (type, fold (build (MAX_EXPR, comp_type,
+ comp_op0, comp_op1))));
+ }
+ }
/* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
we might still be able to simplify this. For example,
{
case EQ_EXPR:
/* We can replace A with C1 in this case. */
- arg1 = TREE_OPERAND (t, 1)
- = convert (type, TREE_OPERAND (arg0, 1));
+ arg1 = convert (type, TREE_OPERAND (arg0, 1));
+ t = build (code, type, TREE_OPERAND (t, 0), arg1,
+ TREE_OPERAND (t, 2));
break;
case LT_EXPR:
if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
{
- arg0 = TREE_OPERAND (t, 0) = tem;
- TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
- TREE_OPERAND (t, 2) = arg1;
- arg1 = TREE_OPERAND (t, 1);
+ t = build (code, type, tem,
+ TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
+ arg0 = tem;
+ arg1 = TREE_OPERAND (t, 2);
+ STRIP_NOPS (arg1);
}
}
TREE_OPERAND (arg0, 1)))));
return t;
+ /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
+ appropriate. */
+ case CLEANUP_POINT_EXPR:
+ if (! TREE_SIDE_EFFECTS (arg0))
+ return convert (type, arg0);
+
+ {
+ enum tree_code code0 = TREE_CODE (arg0);
+ int kind0 = TREE_CODE_CLASS (code0);
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ tree arg01;
+
+ if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
+ return fold (build1 (code0, type,
+ fold (build1 (CLEANUP_POINT_EXPR,
+ TREE_TYPE (arg00), arg00))));
+
+ if (kind0 == '<' || kind0 == '2'
+ || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
+ || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
+ || code0 == TRUTH_XOR_EXPR)
+ {
+ arg01 = TREE_OPERAND (arg0, 1);
+
+ if (! TREE_SIDE_EFFECTS (arg00))
+ return fold (build (code0, type, arg00,
+ fold (build1 (CLEANUP_POINT_EXPR,
+ TREE_TYPE (arg01), arg01))));
+
+ if (! TREE_SIDE_EFFECTS (arg01))
+ return fold (build (code0, type,
+ fold (build1 (CLEANUP_POINT_EXPR,
+ TREE_TYPE (arg00), arg00)),
+ arg01));
+ }
+
+ return t;
+ }
+
default:
return t;
} /* switch (code) */