#include "langhooks.h"
#include "md5.h"
-/* Non-zero if we are folding constants inside an initializer; zero
+/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
int folding_initializer = 0;
static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
-static int multiple_of_p (tree, tree, tree);
static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
tree, tree,
tree, tree, int);
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
-static int native_encode_expr (tree, unsigned char *, int);
-static tree native_interpret_expr (tree, unsigned char *, int);
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
int1l = TREE_INT_CST_LOW (arg1);
int1h = TREE_INT_CST_HIGH (arg1);
+ /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
+ &obj[some_exotic_number]. */
+ if (POINTER_TYPE_P (type))
+ {
+ uns = false;
+ type = signed_type_for (type);
+ fit_double_type (int1l, int1h, &int1l, &int1h,
+ type);
+ }
+ else
+ fit_double_type (int1l, int1h, &int1l, &int1h, type);
int2l = TREE_INT_CST_LOW (arg2);
int2h = TREE_INT_CST_HIGH (arg2);
return build_int_cst_wide (type, quol, quoh);
}
\f
-/* This is non-zero if we should defer warnings about undefined
+/* This is nonzero if we should defer warnings about undefined
overflow. This facility exists because these warnings are a
special case. The code to estimate loop iterations does not want
to issue any warnings, since it works with expressions which do not
== TREE_INT_CST_LOW (op1))
{
tree ntype = TYPE_UNSIGNED (type)
- ? lang_hooks.types.signed_type (type)
- : lang_hooks.types.unsigned_type (type);
+ ? signed_type_for (type)
+ : unsigned_type_for (type);
tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
return fold_convert (type, temp);
else if (type == bitsizetype)
ctype = sbitsizetype;
else
- ctype = lang_hooks.types.signed_type (type);
+ ctype = signed_type_for (type);
/* If either operand is not a constant, do the conversions to the signed
type and subtract. The hardware will do the right thing with any
case ARRAY_REF:
case ARRAY_RANGE_REF:
- /* Operands 2 and 3 may be null. */
+ /* Operands 2 and 3 may be null.
+ Compare the array index by value if it is constant first as we
+ may have different types but same value here. */
return (OP_SAME (0)
- && OP_SAME (1)
+ && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1))
+ || OP_SAME (1))
&& OP_SAME_WITH_NULL (2)
&& OP_SAME_WITH_NULL (3));
/* Make sure shorter operand is extended the right way
to match the longer operand. */
- primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
+ primarg1 = fold_convert (signed_or_unsigned_type_for
(unsignedp1, TREE_TYPE (primarg1)), primarg1);
if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
unsigned int precision = TYPE_PRECISION (type);
tree tmask;
- tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
+ tmask = build_int_cst_type (signed_type_for (type), -1);
return
tree_int_cst_equal (mask,
{
if (! TYPE_UNSIGNED (etype))
{
- etype = lang_hooks.types.unsigned_type (etype);
+ etype = unsigned_type_for (etype);
high = fold_convert (etype, high);
exp = fold_convert (etype, exp);
}
{
if (TYPE_UNSIGNED (etype))
{
- etype = lang_hooks.types.signed_type (etype);
+ etype = signed_type_for (etype);
exp = fold_convert (etype, exp);
}
return fold_build2 (GT_EXPR, type, exp,
/* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
for the type in question, as we rely on this here. */
- utype = lang_hooks.types.unsigned_type (etype);
+ utype = unsigned_type_for (etype);
maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
integer_one_node, 1);
value = const_binop (MINUS_EXPR, high, low, 0);
+
+ if (POINTER_TYPE_P (etype))
+ {
+ if (value != 0 && !TREE_OVERFLOW (value))
+ {
+ low = fold_convert (sizetype, low);
+ low = fold_build1 (NEGATE_EXPR, sizetype, low);
+ return build_range_check (type,
+ fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
+ 1, build_int_cst (etype, 0), value);
+ }
+ return 0;
+ }
+
if (value != 0 && !TREE_OVERFLOW (value))
return build_range_check (type,
fold_build2 (MINUS_EXPR, etype, exp, low),
{
low = range_successor (high1);
high = high0;
- in_p = (low != 0);
+ in_p = 1;
+ if (low == 0)
+ {
+ /* We are in the weird situation where high0 > high1 but
+ high1 has no successor. Punt. */
+ return 0;
+ }
}
else if (! subset || highequal)
{
low = low0;
high = range_predecessor (low1);
- in_p = (high != 0);
+ in_p = 1;
+ if (high == 0)
+ {
+ /* low0 < low1 but low1 has no predecessor. Punt. */
+ return 0;
+ }
}
else
return 0;
{
low = range_successor (high0);
high = high1;
- in_p = (low != 0);
+ in_p = 1;
+ if (low == 0)
+ {
+ /* high1 > high0 but high0 has no successor. Punt. */
+ return 0;
+ }
}
}
case GE_EXPR:
case GT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (lang_hooks.types.signed_type
+ arg1 = fold_convert (signed_type_for
(TREE_TYPE (arg1)), arg1);
tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
return pedantic_non_lvalue (fold_convert (type, tem));
case LE_EXPR:
case LT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (lang_hooks.types.signed_type
+ arg1 = fold_convert (signed_type_for
(TREE_TYPE (arg1)), arg1);
tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
return negate_expr (fold_convert (type, tem));
zero or one, and the conversion to a signed type can never overflow.
We could get an overflow if this conversion is done anywhere else. */
if (TYPE_UNSIGNED (type))
- temp = fold_convert (lang_hooks.types.signed_type (type), temp);
+ temp = fold_convert (signed_type_for (type), temp);
temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
must avoid building ABS_EXPR itself as unsigned. */
if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
{
- tree cstype = (*lang_hooks.types.signed_type) (ctype);
+ tree cstype = (*signed_type_for) (ctype);
if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
!= 0)
{
offset is set to NULL_TREE. Base will be canonicalized to
something you can get the element type from using
TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
- in bytes to the base. */
+ in bytes to the base in sizetype. */
static bool
extract_array_ref (tree expr, tree *base, tree *offset)
/* One canonical form is a PLUS_EXPR with the first
argument being an ADDR_EXPR with a possible NOP_EXPR
attached. */
- if (TREE_CODE (expr) == PLUS_EXPR)
+ if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
{
tree op0 = TREE_OPERAND (expr, 0);
tree inner_base, dummy1;
/* Strip NOP_EXPRs here because the C frontends and/or
- folders present us (int *)&x.a + 4B possibly. */
+ folders present us (int *)&x.a p+ 4 possibly. */
STRIP_NOPS (op0);
if (extract_array_ref (op0, &inner_base, &dummy1))
{
*base = inner_base;
- if (dummy1 == NULL_TREE)
- *offset = TREE_OPERAND (expr, 1);
- else
- *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
- dummy1, TREE_OPERAND (expr, 1));
+ *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
+ if (dummy1 != NULL_TREE)
+ *offset = fold_build2 (PLUS_EXPR, sizetype,
+ dummy1, *offset);
return true;
}
}
*base = TREE_OPERAND (op0, 0);
*offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
array_ref_element_size (op0));
+ *offset = fold_convert (sizetype, *offset);
}
else
{
&& TYPE_PRECISION (TREE_TYPE (arg00))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
{
- tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
+ tree stype = signed_type_for (TREE_TYPE (arg00));
return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
result_type, fold_convert (stype, arg00),
build_int_cst (stype, 0));
return fold_build2 (code, type, arg0_inner, arg1);
}
-/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
+/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
step of the array. Reconstructs s and delta in the case of s * delta
being an integer constant (and thus already folded).
ADDR is the address. MULT is the multiplicative expression.
NULL_TREE is returned. */
static tree
-try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
+try_move_mult_to_index (tree addr, tree op1)
{
tree s, delta, step;
tree ref = TREE_OPERAND (addr, 0), pref;
tree itype;
bool mdim = false;
+ /* Strip the nops that might be added when converting op1 to sizetype. */
+ STRIP_NOPS (op1);
+
/* Canonicalize op1 into a possibly non-constant delta
and an INTEGER_CST s. */
if (TREE_CODE (op1) == MULT_EXPR)
|| TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
continue;
- tmp = fold_binary (code, itype,
+ tmp = fold_binary (PLUS_EXPR, itype,
fold_convert (itype,
TREE_OPERAND (ref, 1)),
fold_convert (itype, delta));
pos = TREE_OPERAND (pos, 0);
}
- TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
+ TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
fold_convert (itype,
TREE_OPERAND (pos, 1)),
fold_convert (itype, delta));
if (TREE_TYPE (a1) != typea)
return NULL_TREE;
- diff = fold_build2 (MINUS_EXPR, typea, a1, a);
- if (!integer_onep (diff))
- return NULL_TREE;
+ if (POINTER_TYPE_P (typea))
+ {
+ /* Convert the pointer types into integer before taking the difference. */
+ tree ta = fold_convert (ssizetype, a);
+ tree ta1 = fold_convert (ssizetype, a1);
+ diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
+ }
+ else
+ diff = fold_binary (MINUS_EXPR, typea, a1, a);
+
+ if (!diff || !integer_onep (diff))
+ return NULL_TREE;
return fold_build2 (GE_EXPR, type, a, y);
}
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
- int byte, offset, word, words;
+ int byte, offset, word, words, bitpos;
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
if (total_bytes > len)
return 0;
- words = total_bytes / UNITS_PER_WORD;
+ words = 32 / UNITS_PER_WORD;
real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
- for (byte = 0; byte < total_bytes; byte++)
+ for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
+ bitpos += BITS_PER_UNIT)
{
- int bitpos = byte * BITS_PER_UNIT;
+ byte = (bitpos / BITS_PER_UNIT) & 3;
value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
- if (total_bytes > UNITS_PER_WORD)
+ if (UNITS_PER_WORD < 4)
{
word = byte / UNITS_PER_WORD;
- if (FLOAT_WORDS_BIG_ENDIAN)
+ if (WORDS_BIG_ENDIAN)
word = (words - 1) - word;
offset = word * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset += byte % UNITS_PER_WORD;
}
else
- offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
- ptr[offset] = value;
+ offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
+ ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
}
return total_bytes;
}
buffer PTR of length LEN bytes. Return the number of bytes
placed in the buffer, or zero upon failure. */
-static int
+int
native_encode_expr (tree expr, unsigned char *ptr, int len)
{
switch (TREE_CODE (expr))
{
enum machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
- int byte, offset, word, words;
+ int byte, offset, word, words, bitpos;
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
the hosts long. We handle floating point representations with
total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
- words = total_bytes / UNITS_PER_WORD;
+ words = 32 / UNITS_PER_WORD;
memset (tmp, 0, sizeof (tmp));
- for (byte = 0; byte < total_bytes; byte++)
+ for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
+ bitpos += BITS_PER_UNIT)
{
- int bitpos = byte * BITS_PER_UNIT;
- if (total_bytes > UNITS_PER_WORD)
+ byte = (bitpos / BITS_PER_UNIT) & 3;
+ if (UNITS_PER_WORD < 4)
{
word = byte / UNITS_PER_WORD;
- if (FLOAT_WORDS_BIG_ENDIAN)
+ if (WORDS_BIG_ENDIAN)
word = (words - 1) - word;
offset = word * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset += byte % UNITS_PER_WORD;
}
else
- offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
- value = ptr[offset];
+ offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
+ value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
}
we return a REAL_CST, etc... If the buffer cannot be interpreted,
return NULL_TREE. */
-static tree
+tree
native_interpret_expr (tree type, unsigned char *ptr, int len)
{
switch (TREE_CODE (type))
return native_interpret_expr (type, buffer, len);
}
+/* Build an expression for the address of T. Folds away INDIRECT_REF
+ to avoid confusing the gimplify process. When IN_FOLD is true
+ avoid modifications of T. */
+
+static tree
+build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
+{
+ /* The size of the object is not relevant when talking about its address. */
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Note: doesn't apply to ALIGN_INDIRECT_REF */
+ if (TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
+ {
+ t = TREE_OPERAND (t, 0);
+
+ if (TREE_TYPE (t) != ptrtype)
+ t = build1 (NOP_EXPR, ptrtype, t);
+ }
+ else if (!in_fold)
+ {
+ tree base = t;
+
+ while (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+
+ if (DECL_P (base))
+ TREE_ADDRESSABLE (base) = 1;
+
+ t = build1 (ADDR_EXPR, ptrtype, t);
+ }
+ else
+ t = build1 (ADDR_EXPR, ptrtype, t);
+
+ return t;
+}
+
+/* Build an expression for the address of T with type PTRTYPE. This
+ function modifies the input parameter 'T' by sometimes setting the
+ TREE_ADDRESSABLE flag. */
+
+tree
+build_fold_addr_expr_with_type (tree t, tree ptrtype)
+{
+ return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
+}
+
+/* Build an expression for the address of T. This function modifies
+ the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
+ flag. When called from fold functions, use fold_addr_expr instead. */
+
+tree
+build_fold_addr_expr (tree t)
+{
+ return build_fold_addr_expr_with_type_1 (t,
+ build_pointer_type (TREE_TYPE (t)),
+ false);
+}
+
+/* Same as build_fold_addr_expr, builds an expression for the address
+ of T, but avoids touching the input node 't'. Fold functions
+ should use this version. */
+
+static tree
+fold_addr_expr (tree t)
+{
+ tree ptrtype = build_pointer_type (TREE_TYPE (t));
+
+ return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
+}
/* Fold a unary expression of code CODE and type TYPE with operand
OP0. Return the folded expression if folding is successful.
if (! offset && bitpos == 0
&& TYPE_MAIN_VARIANT (TREE_TYPE (type))
== TYPE_MAIN_VARIANT (TREE_TYPE (base)))
- return fold_convert (type, build_fold_addr_expr (base));
+ return fold_convert (type, fold_addr_expr (base));
}
if ((TREE_CODE (op0) == MODIFY_EXPR
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
- tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
+ tree uns = unsigned_type_for (TREE_TYPE (and0));
and0 = fold_convert (uns, and0);
and1 = fold_convert (uns, and1);
}
}
}
- /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
- T2 being pointers to types of the same size. */
+ /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
+ when one of the new casts will fold away. Conservatively we assume
+ that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
if (POINTER_TYPE_P (type)
- && BINARY_CLASS_P (arg0)
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
- && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+ && TREE_CODE (arg0) == POINTER_PLUS_EXPR
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
+ || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
{
tree arg00 = TREE_OPERAND (arg0, 0);
- tree t0 = type;
- tree t1 = TREE_TYPE (arg00);
- tree tt0 = TREE_TYPE (t0);
- tree tt1 = TREE_TYPE (t1);
- tree s0 = TYPE_SIZE (tt0);
- tree s1 = TYPE_SIZE (tt1);
+ tree arg01 = TREE_OPERAND (arg0, 1);
- if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
- return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
- TREE_OPERAND (arg0, 1));
+ return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
+ fold_convert (sizetype, arg01));
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
- of the same precision, and X is a integer type not narrower than
+ of the same precision, and X is an integer type not narrower than
types T1 or T2, i.e. the cast (T2)X isn't an extension. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (op0) == BIT_NOT_EXPR
return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
}
- tem = fold_convert_const (code, type, arg0);
+ tem = fold_convert_const (code, type, op0);
return tem ? tem : NULL_TREE;
case VIEW_CONVERT_EXPR:
return fold_build2 (cmp_code, type, variable1, const2);
}
- tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
+ tem = maybe_canonicalize_comparison (code, type, op0, op1);
if (tem)
return tem;
}
}
- /* Convert foo++ == CONST into ++foo == CONST + INCR. */
- if (TREE_CONSTANT (arg1)
- && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
- || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
- /* This optimization is invalid for ordered comparisons
- if CONST+INCR overflows or if foo+incr might overflow.
- This optimization is invalid for floating point due to rounding.
- For pointer types we assume overflow doesn't happen. */
- && (POINTER_TYPE_P (TREE_TYPE (arg0))
- || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && (code == EQ_EXPR || code == NE_EXPR))))
- {
- tree varop, newconst;
-
- if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
- {
- newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1));
- varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
- else
- {
- newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
- arg1, TREE_OPERAND (arg0, 1));
- varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
-
-
- /* If VAROP is a reference to a bitfield, we must mask
- the constant by the width of the field. */
- if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
- && host_integerp (DECL_SIZE (TREE_OPERAND
- (TREE_OPERAND (varop, 0), 1)), 1))
- {
- tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
- HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
- tree folded_compare, shift;
-
- /* First check whether the comparison would come out
- always the same. If we don't do that we would
- change the meaning with the masking. */
- folded_compare = fold_build2 (code, type,
- TREE_OPERAND (varop, 0), arg1);
- if (TREE_CODE (folded_compare) == INTEGER_CST)
- return omit_one_operand (type, folded_compare, varop);
-
- shift = build_int_cst (NULL_TREE,
- TYPE_PRECISION (TREE_TYPE (varop)) - size);
- shift = fold_convert (TREE_TYPE (varop), shift);
- newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift);
- newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
- newconst, shift);
- }
-
- return fold_build2 (code, type, varop, newconst);
- }
-
if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
&& (TREE_CODE (arg0) == NOP_EXPR
|| TREE_CODE (arg0) == CONVERT_EXPR))
}
}
- /* If this is a comparison of complex values and both sides
- are COMPLEX_CST, do the comparision by parts to fold the
- comparision. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
- && TREE_CODE (arg0) == COMPLEX_CST
- && TREE_CODE (arg1) == COMPLEX_CST)
- {
- tree real0, imag0, real1, imag1;
- enum tree_code outercode;
-
- real0 = TREE_REALPART (arg0);
- imag0 = TREE_IMAGPART (arg0);
- real1 = TREE_REALPART (arg1);
- imag1 = TREE_IMAGPART (arg1);
- outercode = code == EQ_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
-
- return fold_build2 (outercode, type,
- fold_build2 (code, type, real0, real1),
- fold_build2 (code, type, imag0, imag1));
- }
-
-
/* Fold a comparison of the address of COMPONENT_REFs with the same
type and component to a comparison of the address of the base
object. In short, &x->a OP &y->a to x OP y and
tree op0 = TREE_OPERAND (cref0, 0);
tree op1 = TREE_OPERAND (cref1, 0);
return fold_build2 (code, type,
- build_fold_addr_expr (op0),
- build_fold_addr_expr (op1));
+ fold_addr_expr (op0),
+ fold_addr_expr (op1));
}
}
/* Fold ~X op ~Y as Y op X. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
- return fold_build2 (code, type,
- TREE_OPERAND (arg1, 0),
- TREE_OPERAND (arg0, 0));
+ {
+ tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
+ return fold_build2 (code, type,
+ fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 0));
+ }
/* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2 (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
+ {
+ tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
+ return fold_build2 (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ fold_build1 (BIT_NOT_EXPR, cmp_type,
+ fold_convert (cmp_type, arg1)));
+ }
return NULL_TREE;
}
switch (code)
{
+ case POINTER_PLUS_EXPR:
+ /* 0 +p index -> (type)index */
+ if (integer_zerop (arg0))
+ return non_lvalue (fold_convert (type, arg1));
+
+ /* PTR +p 0 -> PTR */
+ if (integer_zerop (arg1))
+ return non_lvalue (fold_convert (type, arg0));
+
+ /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
+ fold_convert (sizetype, arg1),
+ fold_convert (sizetype, arg0)));
+
+ /* index +p PTR -> PTR +p index */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_build2 (POINTER_PLUS_EXPR, type,
+ fold_convert (type, arg1), fold_convert (sizetype, arg0));
+
+ /* (PTR +p B) +p A -> PTR +p (B + A) */
+ if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
+ {
+ tree inner;
+ tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
+ return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
+ }
+
+ /* PTR_CST +p CST -> CST1 */
+ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
+
+ /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR)
+ {
+ tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
+ if (tem)
+ return fold_convert (type, tem);
+ }
+
+ return NULL_TREE;
case PLUS_EXPR:
+ /* PTR + INT -> (INT)(PTR p+ INT) */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
+ return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (arg0),
+ arg0,
+ fold_convert (sizetype, arg1)));
+ /* INT + PTR -> (INT)(PTR p+ INT) */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (arg1),
+ arg1,
+ fold_convert (sizetype, arg0)));
/* A + (-B) -> A - B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (MINUS_EXPR, type,
fold_convert (type,
parg1)));
}
-
- /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
- of the array. Loop optimizer sometimes produce this type of
- expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
- if (tem)
- return fold_convert (type, tem);
- }
- else if (TREE_CODE (arg1) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
- if (tem)
- return fold_convert (type, tem);
- }
}
else
{
{
tree var0, con0, lit0, minus_lit0;
tree var1, con1, lit1, minus_lit1;
+ bool ok = true;
/* Split both trees into variables, constants, and literals. Then
associate each group together, the constants with literals,
var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
code == MINUS_EXPR);
+ /* With undefined overflow we can only associate constants
+ with one variable. */
+ if ((POINTER_TYPE_P (type)
+ || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
+ && var0 && var1)
+ {
+ tree tmp0 = var0;
+ tree tmp1 = var1;
+
+ if (TREE_CODE (tmp0) == NEGATE_EXPR)
+ tmp0 = TREE_OPERAND (tmp0, 0);
+ if (TREE_CODE (tmp1) == NEGATE_EXPR)
+ tmp1 = TREE_OPERAND (tmp1, 0);
+ /* The only case we can still associate with two variables
+ is if they are the same, modulo negation. */
+ if (!operand_equal_p (tmp0, tmp1, 0))
+ ok = false;
+ }
+
/* Only do something if we found more than two objects. Otherwise,
nothing has changed and we risk infinite recursion. */
- if (2 < ((var0 != 0) + (var1 != 0)
- + (con0 != 0) + (con1 != 0)
- + (lit0 != 0) + (lit1 != 0)
- + (minus_lit0 != 0) + (minus_lit1 != 0)))
+ if (ok
+ && (2 < ((var0 != 0) + (var1 != 0)
+ + (con0 != 0) + (con1 != 0)
+ + (lit0 != 0) + (lit1 != 0)
+ + (minus_lit0 != 0) + (minus_lit1 != 0))))
{
/* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
if (code == MINUS_EXPR)
return NULL_TREE;
case MINUS_EXPR:
+ /* Pointer simplifications for subtraction, simple reassociations. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
+ {
+ /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
+ if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
+ && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
+ {
+ tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
+ tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
+ tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_build2 (MINUS_EXPR, type, arg00, arg10),
+ fold_build2 (MINUS_EXPR, type, arg01, arg11));
+ }
+ /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
+ else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
+ {
+ tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
+ tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
+ if (tmp)
+ return fold_build2 (PLUS_EXPR, type, tmp, arg01);
+ }
+ }
/* A - (-B) -> A + B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
Also note that operand_equal_p is always false if an operand
is volatile. */
- if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
+ if ((! FLOAT_TYPE_P (type)
+ || (flag_unsafe_math_optimizations
+ && !HONOR_NANS (TYPE_MODE (type))
+ && !HONOR_INFINITIES (TYPE_MODE (type))))
&& operand_equal_p (arg0, arg1, 0))
return fold_convert (type, integer_zero_node);
}
}
- /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
- of the array. Loop optimizer sometimes produce this type of
- expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
- if (tem)
- return fold_convert (type, tem);
- }
-
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
}
}
+ /* Optimize a/root(b/c) into a*root(c/b). */
+ if (BUILTIN_ROOT_P (fcode1))
+ {
+ tree rootarg = CALL_EXPR_ARG (arg1, 0);
+
+ if (TREE_CODE (rootarg) == RDIV_EXPR)
+ {
+ tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
+ tree b = TREE_OPERAND (rootarg, 0);
+ tree c = TREE_OPERAND (rootarg, 1);
+
+ tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
+
+ tmp = build_call_expr (rootfn, 1, tmp);
+ return fold_build2 (MULT_EXPR, type, arg0, tmp);
+ }
+ }
+
/* Optimize x/expN(y) into x*expN(-y). */
if (BUILTIN_EXPONENT_P (fcode1))
{
&& code == EQ_EXPR)
return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
- /* If this is an equality comparison of the address of a non-weak
- object against zero, then we know the result. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
- && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
- && integer_zerop (arg1))
- return constant_boolean_node (code != EQ_EXPR, type);
-
/* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not
have access to attributes for externs), then we know the result. */
fold_convert (TREE_TYPE (arg0), arg1),
TREE_OPERAND (arg0, 1)));
+ /* Transform comparisons of the form X +- C CMP X. */
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ || POINTER_TYPE_P (TREE_TYPE (arg0))))
+ {
+ tree cst = TREE_OPERAND (arg0, 1);
+
+ if (code == EQ_EXPR
+ && !integer_zerop (cst))
+ return omit_two_operands (type, boolean_false_node,
+ TREE_OPERAND (arg0, 0), arg1);
+ else
+ return omit_two_operands (type, boolean_true_node,
+ TREE_OPERAND (arg0, 0), arg1);
+ }
+
/* If we have X - Y == 0, we can convert that to X == Y and similarly
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
|| TREE_CODE (arg0) == ROUND_MOD_EXPR)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
+ tree newtype = unsigned_type_for (TREE_TYPE (arg0));
tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
fold_convert (newtype,
TREE_OPERAND (arg0, 0)),
{
if (TYPE_UNSIGNED (itype))
{
- itype = lang_hooks.types.signed_type (itype);
+ itype = signed_type_for (itype);
arg00 = fold_convert (itype, arg00);
}
return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
arg01, arg11)),
arg10);
}
+
+ /* Attempt to simplify equality/inequality comparisons of complex
+ values. Only lower the comparison if the result is known or
+ can be simplified to a single scalar comparison. */
+ if ((TREE_CODE (arg0) == COMPLEX_EXPR
+ || TREE_CODE (arg0) == COMPLEX_CST)
+ && (TREE_CODE (arg1) == COMPLEX_EXPR
+ || TREE_CODE (arg1) == COMPLEX_CST))
+ {
+ tree real0, imag0, real1, imag1;
+ tree rcond, icond;
+
+ if (TREE_CODE (arg0) == COMPLEX_EXPR)
+ {
+ real0 = TREE_OPERAND (arg0, 0);
+ imag0 = TREE_OPERAND (arg0, 1);
+ }
+ else
+ {
+ real0 = TREE_REALPART (arg0);
+ imag0 = TREE_IMAGPART (arg0);
+ }
+
+ if (TREE_CODE (arg1) == COMPLEX_EXPR)
+ {
+ real1 = TREE_OPERAND (arg1, 0);
+ imag1 = TREE_OPERAND (arg1, 1);
+ }
+ else
+ {
+ real1 = TREE_REALPART (arg1);
+ imag1 = TREE_IMAGPART (arg1);
+ }
+
+ rcond = fold_binary (code, type, real0, real1);
+ if (rcond && TREE_CODE (rcond) == INTEGER_CST)
+ {
+ if (integer_zerop (rcond))
+ {
+ if (code == EQ_EXPR)
+ return omit_two_operands (type, boolean_false_node,
+ imag0, imag1);
+ return fold_build2 (NE_EXPR, type, imag0, imag1);
+ }
+ else
+ {
+ if (code == NE_EXPR)
+ return omit_two_operands (type, boolean_true_node,
+ imag0, imag1);
+ return fold_build2 (EQ_EXPR, type, imag0, imag1);
+ }
+ }
+
+ icond = fold_binary (code, type, imag0, imag1);
+ if (icond && TREE_CODE (icond) == INTEGER_CST)
+ {
+ if (integer_zerop (icond))
+ {
+ if (code == EQ_EXPR)
+ return omit_two_operands (type, boolean_false_node,
+ real0, real1);
+ return fold_build2 (NE_EXPR, type, real0, real1);
+ }
+ else
+ {
+ if (code == NE_EXPR)
+ return omit_two_operands (type, boolean_true_node,
+ real0, real1);
+ return fold_build2 (EQ_EXPR, type, real0, real1);
+ }
+ }
+ }
+
return NULL_TREE;
case LT_EXPR:
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
fold_overflow_warning (("assuming signed overflow does "
"not occur when assuming that "
- "(X - c) >= X is always true"),
+ "(X - c) >= X is always false"),
WARN_STRICT_OVERFLOW_ALL);
return constant_boolean_node (0, type);
}
return omit_one_operand (type, integer_zero_node, arg0);
case GE_EXPR:
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
+ return fold_build2 (EQ_EXPR, type, op0, op1);
case LE_EXPR:
return omit_one_operand (type, integer_one_node, arg0);
case LT_EXPR:
- return fold_build2 (NE_EXPR, type, arg0, arg1);
+ return fold_build2 (NE_EXPR, type, op0, op1);
/* The GE_EXPR and LT_EXPR cases above are not normally
reached because of previous transformations. */
case GT_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
+ return fold_build2 (EQ_EXPR, type,
+ fold_convert (TREE_TYPE (arg1), arg0),
+ arg1);
case LE_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (NE_EXPR, type, arg0, arg1);
+ return fold_build2 (NE_EXPR, type,
+ fold_convert (TREE_TYPE (arg1), arg0),
+ arg1);
default:
break;
}
return omit_one_operand (type, integer_zero_node, arg0);
case LE_EXPR:
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
+ return fold_build2 (EQ_EXPR, type, op0, op1);
case GE_EXPR:
return omit_one_operand (type, integer_one_node, arg0);
{
case GE_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (NE_EXPR, type, arg0, arg1);
+ return fold_build2 (NE_EXPR, type,
+ fold_convert (TREE_TYPE (arg1), arg0),
+ arg1);
case LT_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
+ return fold_build2 (EQ_EXPR, type,
+ fold_convert (TREE_TYPE (arg1), arg0),
+ arg1);
default:
break;
}
and X >= signed_max+1 because previous transformations. */
if (code == LE_EXPR || code == GT_EXPR)
{
- tree st0, st1;
- st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
- st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
- return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type, fold_convert (st0, arg0),
- build_int_cst (st1, 0));
+ tree st;
+ st = signed_type_for (TREE_TYPE (arg1));
+ return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
+ type, fold_convert (st, arg0),
+ build_int_cst (st, 0));
}
}
}
if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
{
- tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
+ tem_type = signed_type_for (TREE_TYPE (tem));
tem = fold_convert (tem_type, tem);
}
else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
{
- tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
+ tem_type = unsigned_type_for (TREE_TYPE (tem));
tem = fold_convert (tem_type, tem);
}
else
gcc_unreachable ();
case BIT_FIELD_REF:
- if (TREE_CODE (arg0) == VECTOR_CST
+ if ((TREE_CODE (arg0) == VECTOR_CST
+ || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
&& type == TREE_TYPE (TREE_TYPE (arg0))
&& host_integerp (arg1, 1)
&& host_integerp (op2, 1))
&& (idx = idx / width)
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
{
- tree elements = TREE_VECTOR_CST_ELTS (arg0);
+ tree elements = NULL_TREE;
+
+ if (TREE_CODE (arg0) == VECTOR_CST)
+ elements = TREE_VECTOR_CST_ELTS (arg0);
+ else
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree value;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
+ elements = tree_cons (NULL_TREE, value, elements);
+ }
while (idx-- > 0 && elements)
elements = TREE_CHAIN (elements);
if (elements)
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
&& TREE_CODE_CLASS (code) != tcc_declaration
- && code != TREE_LIST)
+ && code != TREE_LIST
+ && code != SSA_NAME)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
return tem;
}
+/* Helper function for outputting the checksum of a tree T. When
+ debugging with gdb, you can "define mynext" to be "next" followed
+ by "call debug_fold_checksum (op0)", then just trace down till the
+ outputs differ. */
+
+void
+debug_fold_checksum (tree t)
+{
+ int i;
+ unsigned char checksum[16];
+ struct md5_ctx ctx;
+ htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (t, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum);
+ htab_empty (ht);
+
+ for (i = 0; i < 16; i++)
+ fprintf (stderr, "%d ", checksum[i]);
+
+ fprintf (stderr, "\n");
+}
+
/* Fold a binary tree expression with code CODE of type TYPE with
operands OP0 and OP1. Return a folded expression if successful.
Otherwise, return a tree expression with code CODE of type TYPE
return tem;
}
-/* Fold a CALL_EXPR expression of type TYPE with operands FN and ARGLIST
- and a null static chain.
+/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
+ arguments in ARGARRAY, and a null static chain.
Return a folded expression if successful. Otherwise, return a CALL_EXPR
- of type TYPE from the given operands as constructed by build_call_list. */
+ of type TYPE from the given operands as constructed by build_call_array. */
tree
-fold_build_call_list (tree type, tree fn, tree arglist)
+fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
checksum_after_arglist[16];
struct md5_ctx ctx;
htab_t ht;
+ int i;
ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
md5_init_ctx (&ctx);
htab_empty (ht);
md5_init_ctx (&ctx);
- fold_checksum_tree (arglist, &ctx, ht);
+ for (i = 0; i < nargs; i++)
+ fold_checksum_tree (argarray[i], &ctx, ht);
md5_finish_ctx (&ctx, checksum_before_arglist);
htab_empty (ht);
#endif
- tem = fold_builtin_call_list (type, fn, arglist);
+ tem = fold_builtin_call_array (type, fn, nargs, argarray);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
fold_check_failed (fn, tem);
md5_init_ctx (&ctx);
- fold_checksum_tree (arglist, &ctx, ht);
+ for (i = 0; i < nargs; i++)
+ fold_checksum_tree (argarray[i], &ctx, ht);
md5_finish_ctx (&ctx, checksum_after_arglist);
htab_delete (ht);
if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
- fold_check_failed (arglist, tem);
+ fold_check_failed (NULL_TREE, tem);
#endif
return tem;
}
}
tree
-fold_build_call_list_initializer (tree type, tree fn, tree arglist)
+fold_build_call_array_initializer (tree type, tree fn,
+ int nargs, tree *argarray)
{
tree result;
START_FOLD_INIT;
- result = fold_build_call_list (type, fn, arglist);
+ result = fold_build_call_array (type, fn, nargs, argarray);
END_FOLD_INIT;
return result;
(where the same SAVE_EXPR (J) is used in the original and the
transformed version). */
-static int
+int
multiple_of_p (tree type, tree top, tree bottom)
{
if (operand_equal_p (top, bottom, 0))
case INTEGER_CST:
if (TREE_CODE (bottom) != INTEGER_CST
+ || integer_zerop (bottom)
|| (TYPE_UNSIGNED (type)
&& (tree_int_cst_sgn (top) < 0
|| tree_int_cst_sgn (bottom) < 0)))
case REAL_CST:
return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (FLOAT_TYPE_P (TREE_TYPE (t)))
return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
case SAVE_EXPR:
case NON_LVALUE_EXPR:
case FLOAT_EXPR:
+ case FIX_TRUNC_EXPR:
return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
strict_overflow_p);
CASE_FLT_FN (BUILT_IN_NEARBYINT):
CASE_FLT_FN (BUILT_IN_RINT):
CASE_FLT_FN (BUILT_IN_ROUND):
+ CASE_FLT_FN (BUILT_IN_SCALB):
+ CASE_FLT_FN (BUILT_IN_SCALBLN):
+ CASE_FLT_FN (BUILT_IN_SCALBN):
CASE_FLT_FN (BUILT_IN_SIGNBIT):
+ CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
CASE_FLT_FN (BUILT_IN_SINH):
CASE_FLT_FN (BUILT_IN_TANH):
CASE_FLT_FN (BUILT_IN_TRUNC):
/* ... fall through ... */
default:
- if (truth_value_p (TREE_CODE (t)))
- /* Truth values evaluate to 0 or 1, which is nonnegative. */
- return true;
+ {
+ tree type = TREE_TYPE (t);
+ if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
+ && truth_value_p (TREE_CODE (t)))
+ /* Truth values evaluate to 0 or 1, which is nonnegative unless we
+ have a signed:1 type (where the value is -1 and 0). */
+ return true;
+ }
}
/* We don't know sign of `t', so be conservative and return false. */
case INTEGER_CST:
return !integer_zerop (t);
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (TYPE_OVERFLOW_UNDEFINED (type))
{
return constant_boolean_node (real_compare (code, c0, c1), type);
}
+ /* Handle equality/inequality of complex constants. */
+ if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
+ {
+ tree rcond = fold_relational_const (code, type,
+ TREE_REALPART (op0),
+ TREE_REALPART (op1));
+ tree icond = fold_relational_const (code, type,
+ TREE_IMAGPART (op0),
+ TREE_IMAGPART (op1));
+ if (code == EQ_EXPR)
+ return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
+ else if (code == NE_EXPR)
+ return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
+ else
+ return NULL_TREE;
+ }
+
/* From here on we only handle LT, LE, GT, GE, EQ and NE.
To compute GT, swap the arguments and do LT.
return build1 (CLEANUP_POINT_EXPR, type, expr);
}
-/* Build an expression for the address of T. Folds away INDIRECT_REF to
- avoid confusing the gimplify process. */
-
-tree
-build_fold_addr_expr_with_type (tree t, tree ptrtype)
-{
- /* The size of the object is not relevant when talking about its address. */
- if (TREE_CODE (t) == WITH_SIZE_EXPR)
- t = TREE_OPERAND (t, 0);
-
- /* Note: doesn't apply to ALIGN_INDIRECT_REF */
- if (TREE_CODE (t) == INDIRECT_REF
- || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
- {
- t = TREE_OPERAND (t, 0);
- if (TREE_TYPE (t) != ptrtype)
- t = build1 (NOP_EXPR, ptrtype, t);
- }
- else
- {
- tree base = t;
-
- while (handled_component_p (base))
- base = TREE_OPERAND (base, 0);
- if (DECL_P (base))
- TREE_ADDRESSABLE (base) = 1;
-
- t = build1 (ADDR_EXPR, ptrtype, t);
- }
-
- return t;
-}
-
-tree
-build_fold_addr_expr (tree t)
-{
- return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
-}
-
/* Given a pointer value OP0 and a type TYPE, return a simplified version
of an indirection through OP0, or NULL_TREE if no simplification is
possible. */
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
- if (TREE_CODE (sub) == PLUS_EXPR
+ if (TREE_CODE (sub) == POINTER_PLUS_EXPR
&& TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
{
tree op00 = TREE_OPERAND (sub, 0);
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
poffset, &mode, &unsignedp, &volatilep,
false);
- core = build_fold_addr_expr (core);
+ core = fold_addr_expr (core);
}
else
{