#include "ggc.h"
#include "hashtab.h"
#include "langhooks.h"
+#include "md5.h"
static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
static void
-decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, HOST_WIDE_INT *hi)
+decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
+ HOST_WIDE_INT *hi)
{
*low = words[0] + words[1] * BASE;
*hi = words[2] + words[3] * BASE;
low = TREE_INT_CST_LOW (t);
high = TREE_INT_CST_HIGH (t);
- if (POINTER_TYPE_P (TREE_TYPE (t)))
+ if (POINTER_TYPE_P (TREE_TYPE (t))
+ || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
prec = POINTER_SIZE;
else
prec = TYPE_PRECISION (TREE_TYPE (t));
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
- HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
{
unsigned HOST_WIDE_INT l;
HOST_WIDE_INT h;
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT *lv,
- HOST_WIDE_INT *hv)
+neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
{
if (l1 == 0)
{
The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
int
-mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, unsigned HOST_WIDE_INT l2,
- HOST_WIDE_INT h2, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
{
HOST_WIDE_INT arg1[4];
HOST_WIDE_INT arg2[4];
encode (arg1, l1, h1);
encode (arg2, l2, h2);
- memset ((char *) prod, 0, sizeof prod);
+ memset (prod, 0, sizeof prod);
for (i = 0; i < 4; i++)
{
Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
void
-lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
- unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
- int arith)
+lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ HOST_WIDE_INT count, unsigned int prec,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
{
unsigned HOST_WIDE_INT signmask;
Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
void
-rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
- unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
+rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ HOST_WIDE_INT count, unsigned int prec,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
int arith)
{
unsigned HOST_WIDE_INT signmask;
Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
void
-lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
- unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ HOST_WIDE_INT count, unsigned int prec,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
{
unsigned HOST_WIDE_INT s1l, s2l;
HOST_WIDE_INT s1h, s2h;
Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
void
-rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count,
- unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
+rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
+ HOST_WIDE_INT count, unsigned int prec,
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
{
unsigned HOST_WIDE_INT s1l, s2l;
HOST_WIDE_INT s1h, s2h;
unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
HOST_WIDE_INT hnum_orig,
unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
- HOST_WIDE_INT hden_orig, unsigned HOST_WIDE_INT *lquo,
+ HOST_WIDE_INT hden_orig,
+ unsigned HOST_WIDE_INT *lquo,
HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
HOST_WIDE_INT *hrem)
{
goto finish_up;
}
- memset ((char *) quo, 0, sizeof quo);
+ memset (quo, 0, sizeof quo);
- memset ((char *) num, 0, sizeof num); /* to zero 9th element */
- memset ((char *) den, 0, sizeof den);
+ memset (num, 0, sizeof num); /* to zero 9th element */
+ memset (den, 0, sizeof den);
encode (num, lnum, hnum);
encode (den, lden, hden);
decode (quo, lquo, hquo);
finish_up:
- /* if result is negative, make it so. */
+ /* If result is negative, make it so. */
if (quo_neg)
neg_double (*lquo, *hquo, lquo, hquo);
same type as IN, but they will have the same signedness and mode. */
static tree
-split_tree (tree in, enum tree_code code, tree *conp, tree *litp, tree *minus_litp, int negate_p)
+split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
+ tree *minus_litp, int negate_p)
{
tree var = 0;
if (TREE_CODE (arg1) == REAL_CST)
{
+ enum machine_mode mode;
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
- tree t;
+ tree t, type;
d1 = TREE_REAL_CST (arg1);
d2 = TREE_REAL_CST (arg2);
+ type = TREE_TYPE (arg1);
+ mode = TYPE_MODE (type);
+
+ /* Don't perform operation if we honor signaling NaNs and
+ either operand is a NaN. */
+ if (HONOR_SNANS (mode)
+ && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
+ return NULL_TREE;
+
+ /* Don't perform operation if it would raise a division
+ by zero exception. */
+ if (code == RDIV_EXPR
+ && REAL_VALUES_EQUAL (d2, dconst0)
+ && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
+ return NULL_TREE;
+
/* If either operand is a NaN, just return it. Otherwise, set up
for floating-point trap; we return an overflow. */
if (REAL_VALUE_ISNAN (d1))
REAL_ARITHMETIC (value, code, d1, d2);
- t = build_real (TREE_TYPE (arg1),
- real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)),
- value));
+ t = build_real (type, real_value_truncate (mode, value));
TREE_OVERFLOW (t)
= (force_fit_type (t, 0)
default:
break;
}
- /* fall through - ??? */
+ /* Fall through - ??? */
case '<':
{
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
static tree
-make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos, int unsignedp)
+make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
+ int unsignedp)
{
tree result = build (BIT_FIELD_REF, type, inner,
size_int (bitsize), bitsize_int (bitpos));
tree. Otherwise we return zero. */
static tree
-optimize_bit_field_compare (enum tree_code code, tree compare_type, tree lhs, tree rhs)
+optimize_bit_field_compare (enum tree_code code, tree compare_type,
+ tree lhs, tree rhs)
{
HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
tree type = TREE_TYPE (lhs);
do anything with. */
static tree
-decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos,
- enum machine_mode *pmode, int *punsignedp, int *pvolatilep,
+decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
+ HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
+ int *punsignedp, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
tree outer_type = 0;
static tree
sign_bit_p (tree exp, tree val)
{
- unsigned HOST_WIDE_INT lo;
- HOST_WIDE_INT hi;
+ unsigned HOST_WIDE_INT mask_lo, lo;
+ HOST_WIDE_INT mask_hi, hi;
int width;
tree t;
{
hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
lo = 0;
+
+ mask_hi = ((unsigned HOST_WIDE_INT) -1
+ >> (2 * HOST_BITS_PER_WIDE_INT - width));
+ mask_lo = -1;
}
else
{
hi = 0;
lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
+
+ mask_hi = 0;
+ mask_lo = ((unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - width));
}
- if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
+ /* We mask off those bits beyond TREE_TYPE (exp) so that we can
+ treat VAL as if it were unsigned. */
+ if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
+ && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
return exp;
/* Handle extension from a narrower type. */
type if both are specified. */
static tree
-range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, tree arg1,
- int upper1_p)
+range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
+ tree arg1, int upper1_p)
{
tree tem;
int result;
can, 0 if we can't. Set the output range into the specified parameters. */
static int
-merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, tree high0,
- int in1_p, tree low1, tree high1)
+merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
+ tree high0, int in1_p, tree low1, tree high1)
{
int no_overlap;
int subset;
original expression. */
static tree
-fold_binary_op_with_conditional_arg (enum tree_code code, tree type, tree cond, tree arg, int cond_first_p)
+fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
+ tree cond, tree arg, int cond_first_p)
{
tree test, true_value, false_value;
tree lhs = NULL_TREE;
can be made, and NULL_TREE otherwise. */
static tree
-fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, tree type, tree arg0, tree arg1)
+fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
+ tree type, tree arg0, tree arg1)
{
REAL_VALUE_TYPE c;
convert (stype, arg00),
convert (stype, integer_zero_node)));
}
+
+ /* At this point, we know that arg0 is not testing the sign bit. */
+ if (TYPE_PRECISION (type) - 1 == bitnum)
+ abort ();
/* Otherwise we have (A & C) != 0 where C is a single bit,
convert that into ((A >> C2) & 1). Where C2 = log2(C).
/* If we are going to be able to omit the AND below, we must do our
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
- ops_unsigned = (bitnum == TYPE_PRECISION (type) - 1 ? 1
#ifdef LOAD_EXTEND_OP
- : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
#else
- : 1
+ ops_unsigned = 1;
#endif
- );
signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
inner, integer_one_node);
/* Put the AND last so it can combine with more things. */
- if (bitnum != TYPE_PRECISION (type) - 1)
- inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
- inner, integer_one_node);
+ inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
+ inner, integer_one_node);
/* Make sure to return the proper type. */
if (TREE_TYPE (inner) != result_type)
}
return NULL_TREE;
}
-
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
but we can constant-fold them if they have constant operands. */
+#ifdef ENABLE_FOLD_CHECKING
+# define fold(x) fold_1 (x)
+static tree fold_1 (tree);
+static
+#endif
tree
fold (tree expr)
{
- tree t = expr;
+ tree t = expr, orig_t;
tree t1 = NULL_TREE;
tree tem;
tree type = TREE_TYPE (expr);
#ifdef MAX_INTEGER_COMPUTATION_MODE
check_max_integer_computation_mode (expr);
#endif
+ orig_t = t;
if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
{
if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
|| code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
|| code == BIT_AND_EXPR)
- && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
+ && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
+ || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
{
tem = arg0; arg0 = arg1; arg1 = tem;
- tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
- TREE_OPERAND (t, 1) = tem;
+ if (t == orig_t)
+ t = copy_node (t);
+ TREE_OPERAND (t, 0) = arg0;
+ TREE_OPERAND (t, 1) = arg1;
}
/* Now WINS is set as described above,
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
tree prev = TREE_OPERAND (t, 0);
+ if (t == orig_t)
+ t = copy_node (t);
TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
/* First do the assignment, then return converted constant. */
t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
if (!wins)
{
- TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
+ if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
+ {
+ if (t == orig_t)
+ t = copy_node (t);
+ TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
+ }
return t;
}
return fold_convert (t, arg0);
return t;
case RANGE_EXPR:
- TREE_CONSTANT (t) = wins;
+ if (TREE_CONSTANT (t) != wins)
+ {
+ if (t == orig_t)
+ t = copy_node (t);
+ TREE_CONSTANT (t) = wins;
+ }
return t;
case NEGATE_EXPR:
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
{
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, RROTATE_EXPR);
code = RROTATE_EXPR;
TREE_OPERAND (t, 1) = arg1
|| (TREE_CODE (arg0) == REAL_CST
&& TREE_CODE (arg0) != REAL_CST))
{
+ if (t == orig_t)
+ t = copy_node (t);
TREE_OPERAND (t, 0) = arg1;
TREE_OPERAND (t, 1) = arg0;
arg0 = TREE_OPERAND (t, 0);
arg0);
case GE_EXPR:
code = EQ_EXPR;
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, EQ_EXPR);
break;
case LE_EXPR:
arg0);
case LT_EXPR:
code = NE_EXPR;
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, NE_EXPR);
break;
arg0);
case LE_EXPR:
code = EQ_EXPR;
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, EQ_EXPR);
break;
arg0);
case GT_EXPR:
code = NE_EXPR;
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, NE_EXPR);
break;
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
code = EQ_EXPR;
+ if (t == orig_t)
+ t = copy_node (t);
TREE_SET_CODE (t, code);
break;
} /* switch (code) */
}
+#ifdef ENABLE_FOLD_CHECKING
+#undef fold
+
+static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
+static void fold_check_failed (tree, tree);
+void print_fold_checksum (tree);
+
+/* When --enable-checking=fold, compute a digest of expr before
+ and after actual fold call to see if fold did not accidentally
+ change original expr. */
+
+tree
+fold (tree expr)
+{
+ tree ret;
+ struct md5_ctx ctx;
+ unsigned char checksum_before[16], checksum_after[16];
+ htab_t ht;
+
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (expr, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before);
+ htab_empty (ht);
+
+ ret = fold_1 (expr);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (expr, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after);
+ htab_delete (ht);
+
+ if (memcmp (checksum_before, checksum_after, 16))
+ fold_check_failed (expr, ret);
+
+ return ret;
+}
+
+void
+print_fold_checksum (tree expr)
+{
+ struct md5_ctx ctx;
+ unsigned char checksum[16], cnt;
+ htab_t ht;
+
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (expr, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum);
+ htab_delete (ht);
+ for (cnt = 0; cnt < 16; ++cnt)
+ fprintf (stderr, "%02x", checksum[cnt]);
+ putc ('\n', stderr);
+}
+
+static void
+fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
+{
+ internal_error ("fold check: original tree changed by fold");
+}
+
+static void
+fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
+{
+ void **slot;
+ enum tree_code code;
+ char buf[sizeof (struct tree_decl)];
+ int i, len;
+
+ if (sizeof (struct tree_exp) + 5 * sizeof (tree)
+ > sizeof (struct tree_decl)
+ || sizeof (struct tree_type) > sizeof (struct tree_decl))
+ abort ();
+ if (expr == NULL)
+ return;
+ slot = htab_find_slot (ht, expr, INSERT);
+ if (*slot != NULL)
+ return;
+ *slot = expr;
+ code = TREE_CODE (expr);
+ if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
+ {
+ /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
+ memcpy (buf, expr, tree_size (expr));
+ expr = (tree) buf;
+ SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
+ }
+ else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
+ {
+ /* Allow DECL_ASSEMBLER_NAME to be modified. */
+ memcpy (buf, expr, tree_size (expr));
+ expr = (tree) buf;
+ SET_DECL_ASSEMBLER_NAME (expr, NULL);
+ }
+ else if (TREE_CODE_CLASS (code) == 't'
+ && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
+ {
+ /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
+ memcpy (buf, expr, tree_size (expr));
+ expr = (tree) buf;
+ TYPE_POINTER_TO (expr) = NULL;
+ TYPE_REFERENCE_TO (expr) = NULL;
+ }
+ md5_process_bytes (expr, tree_size (expr), ctx);
+ fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
+ if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
+ fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
+ len = TREE_CODE_LENGTH (code);
+ switch (TREE_CODE_CLASS (code))
+ {
+ case 'c':
+ switch (code)
+ {
+ case STRING_CST:
+ md5_process_bytes (TREE_STRING_POINTER (expr),
+ TREE_STRING_LENGTH (expr), ctx);
+ break;
+ case COMPLEX_CST:
+ fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
+ fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
+ break;
+ case VECTOR_CST:
+ fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
+ break;
+ default:
+ break;
+ }
+ break;
+ case 'x':
+ switch (code)
+ {
+ case TREE_LIST:
+ fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
+ fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
+ break;
+ case TREE_VEC:
+ for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
+ fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
+ break;
+ default:
+ break;
+ }
+ break;
+ case 'e':
+ switch (code)
+ {
+ case SAVE_EXPR: len = 2; break;
+ case GOTO_SUBROUTINE_EXPR: len = 0; break;
+ case RTL_EXPR: len = 0; break;
+ case WITH_CLEANUP_EXPR: len = 2; break;
+ default: break;
+ }
+ /* FALLTHROUGH */
+ case 'r':
+ case '<':
+ case '1':
+ case '2':
+ case 's':
+ for (i = 0; i < len; ++i)
+ fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
+ break;
+ case 'd':
+ fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
+ fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
+ fold_checksum_tree (DECL_NAME (expr), ctx, ht);
+ fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
+ fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
+ fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
+ fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
+ fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
+ fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
+ fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
+ fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
+ break;
+ case 't':
+ fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
+ fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
+ fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
+ fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
+ fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
+ fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
+ fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
+ fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
+ fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
+ fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
+ break;
+ default:
+ break;
+ }
+}
+
+#endif
+
+/* Perform constant folding and related simplification of intializer
+ expression EXPR. This behaves identically to "fold" but ignores
+ potential run-time traps and exceptions that fold must preserve. */
+
+tree
+fold_initializer (tree expr)
+{
+ int saved_signaling_nans = flag_signaling_nans;
+ int saved_trapping_math = flag_trapping_math;
+ int saved_trapv = flag_trapv;
+ tree result;
+
+ flag_signaling_nans = 0;
+ flag_trapping_math = 0;
+ flag_trapv = 0;
+
+ result = fold (expr);
+
+ flag_signaling_nans = saved_signaling_nans;
+ flag_trapping_math = saved_trapping_math;
+ flag_trapv = saved_trapv;
+
+ return result;
+}
+
/* Determine if first argument is a multiple of second argument. Return 0 if
it is not, or we cannot easily determined it to be.