You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/*@@ This file should be rewritten to use an arbitrary precision
@@ representation for "struct tree_int_cst" and "struct tree_real_cst".
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static tree combine_comparisons (enum tree_code, enum tree_code,
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
-static bool tree_expr_nonzero_p (tree);
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
return overflow;
}
+
+/* If ARG2 divides ARG1 with zero remainder, carries out the division
+ of type CODE and returns the quotient.
+ Otherwise returns NULL_TREE. */
+
+static tree
+div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
+{
+ unsigned HOST_WIDE_INT int1l, int2l;
+ HOST_WIDE_INT int1h, int2h;
+ unsigned HOST_WIDE_INT quol, reml;
+ HOST_WIDE_INT quoh, remh;
+ tree type = TREE_TYPE (arg1);
+ int uns = TYPE_UNSIGNED (type);
+
+ int1l = TREE_INT_CST_LOW (arg1);
+ int1h = TREE_INT_CST_HIGH (arg1);
+ int2l = TREE_INT_CST_LOW (arg2);
+ int2h = TREE_INT_CST_HIGH (arg2);
+
+ div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
+ &quol, &quoh, &reml, &remh);
+ if (remh != 0 || reml != 0)
+ return NULL_TREE;
+
+ return build_int_cst_wide (type, quol, quoh);
+}
\f
/* Return true if built-in mathematical function specified by CODE
preserves the sign of it argument, i.e. -f(x) == f(-x). */
case RDIV_EXPR:
{
+ tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
- t = build_complex (type,
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2,
- notrunc),
- const_binop (MULT_EXPR, i1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc),
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2,
- notrunc),
- const_binop (MULT_EXPR, r1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc));
+ t1 = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ t2 = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
+
+ if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
+ {
+ real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
+ }
+ else
+ {
+ real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
+ if (!real || !imag)
+ return NULL_TREE;
+ }
+
+ t = build_complex (type, real, imag);
}
break;
gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|| TREE_CODE (orig) == VECTOR_TYPE);
- return fold_build1 (NOP_EXPR, type, arg);
+ return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
}
}
\f
-/* Return false if expr can be assumed not to be an value, true
+/* Return false if expr can be assumed not to be an lvalue, true
otherwise. */
static bool
comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
as well: if reversing the comparison is unsafe, return ERROR_MARK. */
-static enum tree_code
+enum tree_code
invert_tree_comparison (enum tree_code code, bool honor_nans)
{
if (honor_nans && flag_trapping_math)
{
case EQ_EXPR:
case NE_EXPR:
+ case ORDERED_EXPR:
+ case UNORDERED_EXPR:
+ case LTGT_EXPR:
+ case UNEQ_EXPR:
return code;
case GT_EXPR:
return LT_EXPR;
return GT_EXPR;
case LE_EXPR:
return GE_EXPR;
+ case UNGT_EXPR:
+ return UNLT_EXPR;
+ case UNGE_EXPR:
+ return UNLE_EXPR;
+ case UNLT_EXPR:
+ return UNGT_EXPR;
+ case UNLE_EXPR:
+ return UNGE_EXPR;
default:
gcc_unreachable ();
}
v2 = TREE_CHAIN (v2);
}
- return 1;
+ return v1 == v2;
}
case COMPLEX_CST:
return TREE_OPERAND (arg, 0);
case COND_EXPR:
- return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
- invert_truthvalue (TREE_OPERAND (arg, 1)),
- invert_truthvalue (TREE_OPERAND (arg, 2)));
+ {
+ tree arg1 = TREE_OPERAND (arg, 1);
+ tree arg2 = TREE_OPERAND (arg, 2);
+ /* A COND_EXPR may have a throw as one operand, which
+ then has void type. Just leave void operands
+ as they are. */
+ return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
+ VOID_TYPE_P (TREE_TYPE (arg1))
+ ? arg1 : invert_truthvalue (arg1),
+ VOID_TYPE_P (TREE_TYPE (arg2))
+ ? arg2 : invert_truthvalue (arg2));
+ }
case COMPOUND_EXPR:
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
return fold_build2 (TREE_CODE (arg0), type, common,
fold_build2 (code, type, left, right));
}
+
+/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
+ with code CODE. This optimization is unsafe. */
+static tree
+distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
+ bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
+
+ /* (A / C) +- (B / C) -> (A +- B) / C. */
+ if (mul0 == mul1
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), 0))
+ return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 1));
+
+ /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
+ if (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
+ {
+ REAL_VALUE_TYPE r0, r1;
+ r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ if (!mul0)
+ real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
+ if (!mul1)
+ real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
+ real_arithmetic (&r0, code, &r0, &r1);
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, r0));
+ }
+
+ return NULL_TREE;
+}
\f
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
TREE_THIS_VOLATILE (lhs) = 1;
}
- rhs = fold (const_binop (BIT_AND_EXPR,
- const_binop (LSHIFT_EXPR,
- fold_convert (unsigned_type, rhs),
- size_int (lbitpos), 0),
- mask, 0));
+ rhs = const_binop (BIT_AND_EXPR,
+ const_binop (LSHIFT_EXPR,
+ fold_convert (unsigned_type, rhs),
+ size_int (lbitpos), 0),
+ mask, 0);
return build2 (code, compare_type,
build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
tree etype = TREE_TYPE (exp);
tree value;
+#ifdef HAVE_canonicalize_funcptr_for_compare
+ /* Disable this optimization for function pointer expressions
+ on targets that require function pointer canonicalization. */
+ if (HAVE_canonicalize_funcptr_for_compare
+ && TREE_CODE (etype) == POINTER_TYPE
+ && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
+ return NULL_TREE;
+#endif
+
if (! in_p)
{
value = build_range_check (type, exp, 1, low, high);
return fold_convert (type, integer_one_node);
if (low == 0)
- return fold_build2 (LE_EXPR, type, exp, high);
+ return fold_build2 (LE_EXPR, type, exp,
+ fold_convert (etype, high));
if (high == 0)
- return fold_build2 (GE_EXPR, type, exp, low);
+ return fold_build2 (GE_EXPR, type, exp,
+ fold_convert (etype, low));
if (operand_equal_p (low, high, 0))
- return fold_build2 (EQ_EXPR, type, exp, low);
+ return fold_build2 (EQ_EXPR, type, exp,
+ fold_convert (etype, low));
if (integer_zerop (low))
{
}
value = const_binop (MINUS_EXPR, high, low, 0);
- if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
+ if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
+ && ! TYPE_UNSIGNED (etype))
{
tree utype, minv, maxv;
case INTEGER_TYPE:
case ENUMERAL_TYPE:
case CHAR_TYPE:
+ /* There is no requirement that LOW be within the range of ETYPE
+ if the latter is a subtype. It must, however, be within the base
+ type of ETYPE. So be sure we do the subtraction in that type. */
+ if (TREE_TYPE (etype))
+ etype = TREE_TYPE (etype);
utype = lang_hooks.types.unsigned_type (etype);
maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
/* Return true if expr looks like an ARRAY_REF and set base and
offset to the appropriate trees. If there is no offset,
- offset is set to NULL_TREE. */
+ offset is set to NULL_TREE. Base will be canonicalized to
+ something you can get the element type from using
+ TREE_TYPE (TREE_TYPE (base)). */
static bool
extract_array_ref (tree expr, tree *base, tree *offset)
{
- /* We have to be careful with stripping nops as with the
- base type the meaning of the offset can change. */
- tree inner_expr = expr;
- STRIP_NOPS (inner_expr);
/* One canonical form is a PLUS_EXPR with the first
argument being an ADDR_EXPR with a possible NOP_EXPR
attached. */
if (TREE_CODE (expr) == PLUS_EXPR)
{
tree op0 = TREE_OPERAND (expr, 0);
+ tree inner_base, dummy1;
+ /* Strip NOP_EXPRs here because the C frontends and/or
+ folders present us (int *)&x.a + 4B possibly. */
STRIP_NOPS (op0);
- if (TREE_CODE (op0) == ADDR_EXPR)
+ if (extract_array_ref (op0, &inner_base, &dummy1))
{
- *base = TREE_OPERAND (expr, 0);
- *offset = TREE_OPERAND (expr, 1);
+ *base = inner_base;
+ if (dummy1 == NULL_TREE)
+ *offset = TREE_OPERAND (expr, 1);
+ else
+ *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
+ dummy1, TREE_OPERAND (expr, 1));
return true;
}
}
offset. For other arguments to the ADDR_EXPR we assume
zero offset and as such do not care about the ADDR_EXPR
type and strip possible nops from it. */
- else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ else if (TREE_CODE (expr) == ADDR_EXPR)
{
- tree op0 = TREE_OPERAND (inner_expr, 0);
+ tree op0 = TREE_OPERAND (expr, 0);
if (TREE_CODE (op0) == ARRAY_REF)
{
- *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *base = TREE_OPERAND (op0, 0);
*offset = TREE_OPERAND (op0, 1);
}
else
{
- *base = inner_expr;
+ /* Handle array-to-pointer decay as &a. */
+ if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
+ *base = TREE_OPERAND (expr, 0);
+ else
+ *base = expr;
*offset = NULL_TREE;
}
return true;
}
+ /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
+ else if (SSA_VAR_P (expr)
+ && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
+ {
+ *base = expr;
+ *offset = NULL_TREE;
+ return true;
+ }
return false;
}
return NULL_TREE;
arg1_unw = get_unwidened (arg1, shorter_type);
- if (!arg1_unw)
- return NULL_TREE;
/* If possible, express the comparison in the shorter mode. */
if ((code == EQ_EXPR || code == NE_EXPR
return fold_build2 (code, type, arg0_unw,
fold_convert (shorter_type, arg1_unw));
- if (TREE_CODE (arg1_unw) != INTEGER_CST)
+ if (TREE_CODE (arg1_unw) != INTEGER_CST
+ || TREE_CODE (shorter_type) != INTEGER_TYPE
+ || !int_fits_type_p (arg1_unw, shorter_type))
return NULL_TREE;
/* If we are comparing with the integer that does not fit into the range
}
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
- step of the array. ADDR is the address. MULT is the multiplicative expression.
+ step of the array. Reconstructs s and delta in the case of s * delta
+ being an integer constant (and thus already folded).
+ ADDR is the address. MULT is the multiplicative expression.
If the function succeeds, the new address expression is returned. Otherwise
NULL_TREE is returned. */
static tree
-try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
+try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
{
tree s, delta, step;
- tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
tree ref = TREE_OPERAND (addr, 0), pref;
tree ret, pos;
tree itype;
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == INTEGER_CST)
+ /* Canonicalize op1 into a possibly non-constant delta
+ and an INTEGER_CST s. */
+ if (TREE_CODE (op1) == MULT_EXPR)
{
- s = arg0;
- delta = arg1;
+ tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ s = arg0;
+ delta = arg1;
+ }
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ s = arg1;
+ delta = arg0;
+ }
+ else
+ return NULL_TREE;
}
- else if (TREE_CODE (arg1) == INTEGER_CST)
+ else if (TREE_CODE (op1) == INTEGER_CST)
{
- s = arg1;
- delta = arg0;
+ delta = op1;
+ s = NULL_TREE;
}
else
- return NULL_TREE;
+ {
+ /* Simulate we are delta * 1. */
+ delta = op1;
+ s = integer_one_node;
+ }
for (;; ref = TREE_OPERAND (ref, 0))
{
if (TREE_CODE (ref) == ARRAY_REF)
{
- step = array_ref_element_size (ref);
-
- if (TREE_CODE (step) != INTEGER_CST)
+ itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+ if (! itype)
continue;
- itype = TREE_TYPE (step);
-
- /* If the type sizes do not match, we might run into problems
- when one of them would overflow. */
- if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
+ step = array_ref_element_size (ref);
+ if (TREE_CODE (step) != INTEGER_CST)
continue;
- if (!operand_equal_p (step, fold_convert (itype, s), 0))
- continue;
+ if (s)
+ {
+ if (! tree_int_cst_equal (step, s))
+ continue;
+ }
+ else
+ {
+ /* Try if delta is a multiple of step. */
+ tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
+ if (! tmp)
+ continue;
+ delta = tmp;
+ }
- delta = fold_convert (itype, delta);
break;
}
}
TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
- TREE_OPERAND (pos, 1),
- delta);
+ fold_convert (itype,
+ TREE_OPERAND (pos, 1)),
+ fold_convert (itype, delta));
- return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
+ return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
}
return fold_build2 (GE_EXPR, type, a, y);
}
-/* Fold complex addition when both components are accessible by parts.
- Return non-null if successful. CODE should be PLUS_EXPR for addition,
- or MINUS_EXPR for subtraction. */
-
-static tree
-fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
-{
- tree ar, ai, br, bi, rr, ri, inner_type;
-
- if (TREE_CODE (ac) == COMPLEX_EXPR)
- ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
- else if (TREE_CODE (ac) == COMPLEX_CST)
- ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
- else
- return NULL;
-
- if (TREE_CODE (bc) == COMPLEX_EXPR)
- br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
- else if (TREE_CODE (bc) == COMPLEX_CST)
- br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
- else
- return NULL;
-
- inner_type = TREE_TYPE (type);
-
- rr = fold_build2 (code, inner_type, ar, br);
- ri = fold_build2 (code, inner_type, ai, bi);
-
- return fold_build2 (COMPLEX_EXPR, type, rr, ri);
-}
-
-/* Perform some simplifications of complex multiplication when one or more
- of the components are constants or zeros. Return non-null if successful. */
-
-tree
-fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
-{
- tree rr, ri, inner_type, zero;
- bool ar0, ai0, br0, bi0, bi1;
-
- inner_type = TREE_TYPE (type);
- zero = NULL;
-
- if (SCALAR_FLOAT_TYPE_P (inner_type))
- {
- ar0 = ai0 = br0 = bi0 = bi1 = false;
-
- /* We're only interested in +0.0 here, thus we don't use real_zerop. */
-
- if (TREE_CODE (ar) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
- ar0 = true, zero = ar;
-
- if (TREE_CODE (ai) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
- ai0 = true, zero = ai;
-
- if (TREE_CODE (br) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
- br0 = true, zero = br;
-
- if (TREE_CODE (bi) == REAL_CST)
- {
- if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
- bi0 = true, zero = bi;
- else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
- bi1 = true;
- }
- }
- else
- {
- ar0 = integer_zerop (ar);
- if (ar0)
- zero = ar;
- ai0 = integer_zerop (ai);
- if (ai0)
- zero = ai;
- br0 = integer_zerop (br);
- if (br0)
- zero = br;
- bi0 = integer_zerop (bi);
- if (bi0)
- {
- zero = bi;
- bi1 = false;
- }
- else
- bi1 = integer_onep (bi);
- }
-
- /* We won't optimize anything below unless something is zero. */
- if (zero == NULL)
- return NULL;
-
- if (ai0 && br0 && bi1)
- {
- rr = zero;
- ri = ar;
- }
- else if (ai0 && bi0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
- ri = zero;
- }
- else if (ai0 && br0)
- {
- rr = zero;
- ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
- }
- else if (ar0 && bi0)
- {
- rr = zero;
- ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
- }
- else if (ar0 && br0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
- rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
- ri = zero;
- }
- else if (bi0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
- ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
- }
- else if (ai0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ar, br);
- ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
- }
- else if (br0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
- rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
- ri = fold_build2 (MULT_EXPR, inner_type, ar, bi);
- }
- else if (ar0)
- {
- rr = fold_build2 (MULT_EXPR, inner_type, ai, bi);
- rr = fold_build1 (NEGATE_EXPR, inner_type, rr);
- ri = fold_build2 (MULT_EXPR, inner_type, ai, br);
- }
- else
- return NULL;
-
- return fold_build2 (COMPLEX_EXPR, type, rr, ri);
-}
-
-static tree
-fold_complex_mult (tree type, tree ac, tree bc)
-{
- tree ar, ai, br, bi;
-
- if (TREE_CODE (ac) == COMPLEX_EXPR)
- ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
- else if (TREE_CODE (ac) == COMPLEX_CST)
- ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
- else
- return NULL;
-
- if (TREE_CODE (bc) == COMPLEX_EXPR)
- br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
- else if (TREE_CODE (bc) == COMPLEX_CST)
- br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
- else
- return NULL;
-
- return fold_complex_mult_parts (type, ar, ai, br, bi);
-}
-
-/* Perform some simplifications of complex division when one or more of
- the components are constants or zeros. Return non-null if successful. */
-
-tree
-fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
- enum tree_code code)
-{
- tree rr, ri, inner_type, zero;
- bool ar0, ai0, br0, bi0, bi1;
-
- inner_type = TREE_TYPE (type);
- zero = NULL;
-
- if (SCALAR_FLOAT_TYPE_P (inner_type))
- {
- ar0 = ai0 = br0 = bi0 = bi1 = false;
-
- /* We're only interested in +0.0 here, thus we don't use real_zerop. */
-
- if (TREE_CODE (ar) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
- ar0 = true, zero = ar;
-
- if (TREE_CODE (ai) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
- ai0 = true, zero = ai;
-
- if (TREE_CODE (br) == REAL_CST
- && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
- br0 = true, zero = br;
-
- if (TREE_CODE (bi) == REAL_CST)
- {
- if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
- bi0 = true, zero = bi;
- else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
- bi1 = true;
- }
- }
- else
- {
- ar0 = integer_zerop (ar);
- if (ar0)
- zero = ar;
- ai0 = integer_zerop (ai);
- if (ai0)
- zero = ai;
- br0 = integer_zerop (br);
- if (br0)
- zero = br;
- bi0 = integer_zerop (bi);
- if (bi0)
- {
- zero = bi;
- bi1 = false;
- }
- else
- bi1 = integer_onep (bi);
- }
-
- /* We won't optimize anything below unless something is zero. */
- if (zero == NULL)
- return NULL;
-
- if (ai0 && bi0)
- {
- rr = fold_build2 (code, inner_type, ar, br);
- ri = zero;
- }
- else if (ai0 && br0)
- {
- rr = zero;
- ri = fold_build2 (code, inner_type, ar, bi);
- ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
- }
- else if (ar0 && bi0)
- {
- rr = zero;
- ri = fold_build2 (code, inner_type, ai, br);
- }
- else if (ar0 && br0)
- {
- rr = fold_build2 (code, inner_type, ai, bi);
- ri = zero;
- }
- else if (bi0)
- {
- rr = fold_build2 (code, inner_type, ar, br);
- ri = fold_build2 (code, inner_type, ai, br);
- }
- else if (br0)
- {
- rr = fold_build2 (code, inner_type, ai, bi);
- ri = fold_build2 (code, inner_type, ar, bi);
- ri = fold_build1 (NEGATE_EXPR, inner_type, ri);
- }
- else
- return NULL;
-
- return fold_build2 (COMPLEX_EXPR, type, rr, ri);
-}
-
-static tree
-fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
-{
- tree ar, ai, br, bi;
-
- if (TREE_CODE (ac) == COMPLEX_EXPR)
- ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
- else if (TREE_CODE (ac) == COMPLEX_CST)
- ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
- else
- return NULL;
-
- if (TREE_CODE (bc) == COMPLEX_EXPR)
- br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
- else if (TREE_CODE (bc) == COMPLEX_CST)
- br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
- else
- return NULL;
-
- return fold_complex_div_parts (type, ar, ai, br, bi, code);
-}
-
/* Fold a unary expression of code CODE and type TYPE with operand
OP0. Return the folded expression if folding is successful.
Otherwise, return NULL_TREE. */
arg0 = op0;
if (arg0)
{
- if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
+ if (code == NOP_EXPR || code == CONVERT_EXPR
+ || code == FLOAT_EXPR || code == ABS_EXPR)
{
- /* Don't use STRIP_NOPS, because signedness of argument type matters. */
+ /* Don't use STRIP_NOPS, because signedness of argument type
+ matters. */
STRIP_SIGN_NOPS (arg0);
}
else
return fold_build1 (code, type, TREE_OPERAND (op0, 0));
}
+ /* Handle (T *)&A.B.C for A being of type T and B and C
+ living at offset zero. This occurs frequently in
+ C++ upcasting and then accessing the base. */
+ if (TREE_CODE (op0) == ADDR_EXPR
+ && POINTER_TYPE_P (type)
+ && handled_component_p (TREE_OPERAND (op0, 0)))
+ {
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ tree base = TREE_OPERAND (op0, 0);
+ base = get_inner_reference (base, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep, false);
+ /* If the reference was to a (constant) zero offset, we can use
+ the address of the base if it has the same base type
+ as the result type. */
+ if (! offset && bitpos == 0
+ && TYPE_MAIN_VARIANT (TREE_TYPE (type))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
+ return fold_convert (type, build_fold_addr_expr (base));
+ }
+
if (TREE_CODE (op0) == MODIFY_EXPR
&& TREE_CONSTANT (TREE_OPERAND (op0, 1))
/* Detect assigning a bitfield. */
TREE_TYPE (targ0),
targ0));
}
- else if (tree_expr_nonnegative_p (arg0))
+ /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
+ else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
return arg0;
/* Strip sign ops from argument. */
return fold_convert (type, tem);
}
- if (TREE_CODE_CLASS (code) == tcc_comparison
- && TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
- else if (TREE_CODE_CLASS (code) == tcc_comparison
- && TREE_CODE (arg1) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
- else if (TREE_CODE_CLASS (code) == tcc_binary
- || TREE_CODE_CLASS (code) == tcc_comparison)
+ if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build2 (code, type, TREE_OPERAND (arg0, 1),
- arg1));
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 1), op1));
if (TREE_CODE (arg1) == COMPOUND_EXPR
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
fold_build2 (code, type,
- arg0, TREE_OPERAND (arg1, 1)));
+ op0, TREE_OPERAND (arg1, 1)));
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
&& integer_onep (arg1))
return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
- if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
- if (tem)
- return tem;
- }
-
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
/* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
- else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (arg0) == MULT_EXPR)
+ else if (TREE_CODE (arg1) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
}
else
fold_convert (type, tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
&& integer_all_onesp (arg0))
return fold_build1 (BIT_NOT_EXPR, type, arg1);
- if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
- if (tem)
- return tem;
- }
-
if (! FLOAT_TYPE_P (type))
{
if (! wins && integer_zerop (arg0))
&& (TREE_CODE (arg1) != REAL_CST
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
- return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1)));
/* Try folding difference of addresses. */
{
/* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
&& (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
negate_expr (arg0),
TREE_OPERAND (arg1, 0));
- if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- tem = fold_complex_mult (type, arg0, arg1);
- if (tem)
- return tem;
- }
-
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
goto bit_ior;
}
+ /* (X | Y) ^ X -> Y & ~ X*/
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ tree t2 = TREE_OPERAND (arg0, 1);
+ t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
+ arg1);
+ t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
+ fold_convert (type, t1));
+ return t1;
+ }
+
+ /* (Y | X) ^ X -> Y & ~ X*/
+ if (TREE_CODE (arg0) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
+ {
+ tree t2 = TREE_OPERAND (arg0, 0);
+ t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
+ arg1);
+ t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
+ fold_convert (type, t1));
+ return t1;
+ }
+
+ /* X ^ (X | Y) -> Y & ~ X*/
+ if (TREE_CODE (arg1) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
+ {
+ tree t2 = TREE_OPERAND (arg1, 1);
+ t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
+ arg0);
+ t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
+ fold_convert (type, t1));
+ return t1;
+ }
+
+ /* X ^ (Y | X) -> Y & ~ X*/
+ if (TREE_CODE (arg1) == BIT_IOR_EXPR
+ && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
+ {
+ tree t2 = TREE_OPERAND (arg1, 0);
+ t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
+ arg0);
+ t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
+ fold_convert (type, t1));
+ return t1;
+ }
+
/* Convert ~X ^ ~Y to X ^ Y. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
{
tem = build_real (type, r);
- return fold_build2 (MULT_EXPR, type, arg0, tem);
+ return fold_build2 (MULT_EXPR, type,
+ fold_convert (type, arg0), tem);
}
}
}
TREE_OPERAND (arg1, 0));
}
- if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- tem = fold_complex_div (type, arg0, arg1, code);
- if (tem)
- return tem;
- }
-
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode = builtin_mathfn_code (arg1);
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
return fold_convert (type, tem);
- if (TREE_CODE (type) == COMPLEX_TYPE)
- {
- tem = fold_complex_div (type, arg0, arg1, code);
- if (tem)
- return tem;
- }
goto binary;
case CEIL_MOD_EXPR:
&& TREE_INT_CST_HIGH (arg1) == -1)
return omit_one_operand (type, integer_zero_node, arg0);
- /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
- BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
- if (code == TRUNC_MOD_EXPR
- && TYPE_UNSIGNED (type)
- && integer_pow2p (arg1))
+ /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
+ i.e. "X % C" into "X & C2", if X and C are positive. */
+ if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
+ && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
+ && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
{
unsigned HOST_WIDE_INT high, low;
tree mask;
don't try to compute it in the compiler. */
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
return NULL_TREE;
+
+ /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
+ if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
+ && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
+ && host_integerp (TREE_OPERAND (arg0, 1), false)
+ && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
+ {
+ HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
+ + TREE_INT_CST_LOW (arg1));
+
+ /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
+ being well defined. */
+ if (low >= TYPE_PRECISION (type))
+ {
+ if (code == LROTATE_EXPR || code == RROTATE_EXPR)
+ low = low % TYPE_PRECISION (type);
+ else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
+ return build_int_cst (type, 0);
+ else
+ low = TYPE_PRECISION (type) - 1;
+ }
+
+ return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, low));
+ }
+
+ /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
+ into x & ((unsigned)-1 >> c) for unsigned types. */
+ if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
+ || (TYPE_UNSIGNED (type)
+ && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
+ && host_integerp (arg1, false)
+ && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
+ && host_integerp (TREE_OPERAND (arg0, 1), false)
+ && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
+ {
+ HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
+ HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
+ tree lshift;
+ tree arg00;
+
+ if (low0 == low1)
+ {
+ arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+
+ lshift = build_int_cst (type, -1);
+ lshift = int_const_binop (code, lshift, arg1, 0);
+
+ return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
+ }
+ }
+
/* Rewrite an LROTATE_EXPR by a constant into an
RROTATE_EXPR by a new constant. */
if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
&& !TREE_SIDE_EFFECTS (arg1))
{
tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
- if (tem)
+ if (tem && !operand_equal_p (tem, arg0, 0))
return fold_build2 (code, type, tem, arg1);
tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
- if (tem)
+ if (tem && !operand_equal_p (tem, arg1, 0))
return fold_build2 (code, type, arg0, tem);
}
object against zero, then we know the result. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (arg0, 0))
+ && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& integer_zerop (arg1))
return constant_boolean_node (code != EQ_EXPR, type);
have access to attributes for externs), then we know the result. */
if ((code == EQ_EXPR || code == NE_EXPR)
&& TREE_CODE (arg0) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (arg0, 0))
+ && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg0, 0))
&& ! lookup_attribute ("alias",
DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
&& ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
&& TREE_CODE (arg1) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (arg1, 0))
+ && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
&& ! DECL_WEAK (TREE_OPERAND (arg1, 0))
&& ! lookup_attribute ("alias",
DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
&& ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
- return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
- ? code == EQ_EXPR : code != EQ_EXPR,
- type);
+ {
+ /* We know that we're looking at the address of two
+ non-weak, unaliased, static _DECL nodes.
+
+ It is both wasteful and incorrect to call operand_equal_p
+ to compare the two ADDR_EXPR nodes. It is wasteful in that
+ all we need to do is test pointer equality for the arguments
+ to the two ADDR_EXPR nodes. It is incorrect to use
+ operand_equal_p as that function is NOT equivalent to a
+ C equality test. It can in fact return false for two
+ objects which would test as equal using the C equality
+ operator. */
+ bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
+ return constant_boolean_node (equal
+ ? code == EQ_EXPR : code != EQ_EXPR,
+ type);
+ }
/* If this is a comparison of two exprs that look like an
ARRAY_REF of the same object, then we can fold this to a
&& extract_array_ref (arg1, &base1, &offset1)
&& operand_equal_p (base0, base1, 0))
{
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
+ offset0 = NULL_TREE;
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
+ offset1 = NULL_TREE;
if (offset0 == NULL_TREE
&& offset1 == NULL_TREE)
{
}
}
+ /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
+ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && !TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && !(flag_wrapv || flag_trapv))
+ && (TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_OVERFLOW (arg1)))
+ {
+ tree const1 = TREE_OPERAND (arg0, 1);
+ tree const2 = arg1;
+ tree variable = TREE_OPERAND (arg0, 0);
+ tree lhs;
+ int lhs_add;
+ lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
+
+ lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
+ TREE_TYPE (arg1), const2, const1);
+ if (TREE_CODE (lhs) == TREE_CODE (arg1)
+ && (TREE_CODE (lhs) != INTEGER_CST
+ || !TREE_OVERFLOW (lhs)))
+ return fold_build2 (code, type, variable, lhs);
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
switch (code)
{
case GE_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (GT_EXPR, type, arg0, arg1);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (GT_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
case LT_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (LE_EXPR, type, arg0, arg1);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (LE_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
default:
break;
return omit_one_operand (type, integer_one_node, arg0);
case GT_EXPR:
- return fold_build2 (NE_EXPR, type, arg0, arg1);
+ return fold_build2 (NE_EXPR, type, op0, op1);
default:
break;
tree st0, st1;
st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
- return fold
- (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type, fold_convert (st0, arg0),
- fold_convert (st1, integer_zero_node)));
+ return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
+ type, fold_convert (st0, arg0),
+ build_int_cst (st1, 0));
}
}
}
}
}
- /* If this is a comparison of complex values and either or both sides
- are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
- comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
- This may prevent needless evaluations. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
- && (TREE_CODE (arg0) == COMPLEX_EXPR
- || TREE_CODE (arg1) == COMPLEX_EXPR
- || TREE_CODE (arg0) == COMPLEX_CST
- || TREE_CODE (arg1) == COMPLEX_CST))
- {
- tree subtype = TREE_TYPE (TREE_TYPE (arg0));
- tree real0, imag0, real1, imag1;
-
- arg0 = save_expr (arg0);
- arg1 = save_expr (arg1);
- real0 = fold_build1 (REALPART_EXPR, subtype, arg0);
- imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0);
- real1 = fold_build1 (REALPART_EXPR, subtype, arg1);
- imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1);
-
- return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
- : TRUTH_ORIF_EXPR),
- type,
- fold_build2 (code, type, real0, real1),
- fold_build2 (code, type, imag0, imag1));
- }
-
/* Optimize comparisons of strlen vs zero to a compare of the
first character of the string vs zero. To wit,
strlen(ptr) == 0 => *ptr == 0
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
&& ! TREE_CHAIN (arglist))
- return fold_build2 (code, type,
- build1 (INDIRECT_REF, char_type_node,
- TREE_VALUE (arglist)),
- fold_convert (char_type_node,
- integer_zero_node));
+ {
+ tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
+ return fold_build2 (code, type, iref,
+ build_int_cst (TREE_TYPE (iref), 0));
+ }
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
}
if ((code == EQ_EXPR || code == NE_EXPR)
- && !TREE_SIDE_EFFECTS (arg0)
&& integer_zerop (arg1)
&& tree_expr_nonzero_p (arg0))
- return constant_boolean_node (code==NE_EXPR, type);
+ {
+ tree res = constant_boolean_node (code==NE_EXPR, type);
+ return omit_one_operand (type, res, arg0);
+ }
t1 = fold_relational_const (code, type, arg0, arg1);
return t1 == NULL_TREE ? NULL_TREE : t1;
}
}
-/* Checks wheter the sub-tree ST contains a label LABEL_EXPR which is
+/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
accessible from outside the sub-tree. Returns NULL_TREE if no
addressable label is found. */
if (TREE_CODE (arg0) == CONSTRUCTOR
&& ! type_contains_placeholder_p (TREE_TYPE (arg0)))
{
- tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
- if (m)
- return TREE_VALUE (m);
+ unsigned HOST_WIDE_INT idx;
+ tree field, value;
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
+ if (field == arg1)
+ return value;
}
return NULL_TREE;
/* If the second operand is simpler than the third, swap them
since that produces better jump optimization results. */
- if (tree_swap_operands_p (op1, op2, false))
+ if (truth_value_p (TREE_CODE (arg0))
+ && tree_swap_operands_p (op1, op2, false))
{
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
if (TREE_CODE (op0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
+ return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
+ return NULL_TREE;
+
+ case BIT_FIELD_REF:
+ if (TREE_CODE (arg0) == VECTOR_CST
+ && type == TREE_TYPE (TREE_TYPE (arg0))
+ && host_integerp (arg1, 1)
+ && host_integerp (op2, 1))
{
- tree fndecl = TREE_OPERAND (op0, 0);
- tree arglist = op1;
- tree tmp = fold_builtin (fndecl, arglist, false);
- if (tmp)
- return tmp;
+ unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
+ unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
+
+ if (width != 0
+ && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
+ && (idx % width) == 0
+ && (idx = idx / width)
+ < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
+ {
+ tree elements = TREE_VECTOR_CST_ELTS (arg0);
+ while (idx-- > 0 && elements)
+ elements = TREE_CHAIN (elements);
+ if (elements)
+ return TREE_VALUE (elements);
+ else
+ return fold_convert (type, integer_zero_node);
+ }
}
return NULL_TREE;
{
void **slot;
enum tree_code code;
- char buf[sizeof (struct tree_decl)];
+ char buf[sizeof (struct tree_function_decl)];
int i, len;
+
+recursive_label:
gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
- <= sizeof (struct tree_decl))
- && sizeof (struct tree_type) <= sizeof (struct tree_decl));
+ <= sizeof (struct tree_function_decl))
+ && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
if (expr == NULL)
return;
slot = htab_find_slot (ht, expr, INSERT);
}
else if (TREE_CODE_CLASS (code) == tcc_type
&& (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
- || TYPE_CACHED_VALUES_P (expr)))
+ || TYPE_CACHED_VALUES_P (expr)
+ || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
{
/* Allow these fields to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
+ TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
if (TYPE_CACHED_VALUES_P (expr))
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
- && TREE_CODE_CLASS (code) != tcc_declaration)
+ && TREE_CODE_CLASS (code) != tcc_declaration
+ && code != TREE_LIST)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
case TREE_LIST:
fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
+ expr = TREE_CHAIN (expr);
+ goto recursive_label;
break;
case TREE_VEC:
for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
fold_checksum_tree (DECL_NAME (expr), ctx, ht);
fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
- fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
- fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
- fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
- fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
+ fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
+
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
+ {
+ fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
+ fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
+ fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
+ }
break;
case tcc_type:
if (TREE_CODE (expr) == ENUMERAL_TYPE)
operand OP0. */
tree
-fold_build1 (enum tree_code code, tree type, tree op0)
+fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
{
- tree tem = fold_unary (code, type, op0);
- if (tem)
- return tem;
+ tree tem;
+#ifdef ENABLE_FOLD_CHECKING
+ unsigned char checksum_before[16], checksum_after[16];
+ struct md5_ctx ctx;
+ htab_t ht;
- return build1 (code, type, op0);
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before);
+ htab_empty (ht);
+#endif
+
+ tem = fold_unary (code, type, op0);
+ if (!tem)
+ tem = build1_stat (code, type, op0 PASS_MEM_STAT);
+
+#ifdef ENABLE_FOLD_CHECKING
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after);
+ htab_delete (ht);
+
+ if (memcmp (checksum_before, checksum_after, 16))
+ fold_check_failed (op0, tem);
+#endif
+ return tem;
}
/* Fold a binary tree expression with code CODE of type TYPE with
with operands OP0 and OP1. */
tree
-fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
+fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
+ MEM_STAT_DECL)
{
- tree tem = fold_binary (code, type, op0, op1);
- if (tem)
- return tem;
+ tree tem;
+#ifdef ENABLE_FOLD_CHECKING
+ unsigned char checksum_before_op0[16],
+ checksum_before_op1[16],
+ checksum_after_op0[16],
+ checksum_after_op1[16];
+ struct md5_ctx ctx;
+ htab_t ht;
+
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_op0);
+ htab_empty (ht);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op1, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_op1);
+ htab_empty (ht);
+#endif
+
+ tem = fold_binary (code, type, op0, op1);
+ if (!tem)
+ tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
+
+#ifdef ENABLE_FOLD_CHECKING
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_op0);
+ htab_empty (ht);
+
+ if (memcmp (checksum_before_op0, checksum_after_op0, 16))
+ fold_check_failed (op0, tem);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op1, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_op1);
+ htab_delete (ht);
- return build2 (code, type, op0, op1);
+ if (memcmp (checksum_before_op1, checksum_after_op1, 16))
+ fold_check_failed (op1, tem);
+#endif
+ return tem;
}
/* Fold a ternary tree expression with code CODE of type TYPE with
type TYPE with operands OP0, OP1, and OP2. */
tree
-fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
+fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
+ MEM_STAT_DECL)
{
- tree tem = fold_ternary (code, type, op0, op1, op2);
- if (tem)
- return tem;
+ tree tem;
+#ifdef ENABLE_FOLD_CHECKING
+ unsigned char checksum_before_op0[16],
+ checksum_before_op1[16],
+ checksum_before_op2[16],
+ checksum_after_op0[16],
+ checksum_after_op1[16],
+ checksum_after_op2[16];
+ struct md5_ctx ctx;
+ htab_t ht;
+
+ ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_op0);
+ htab_empty (ht);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op1, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_op1);
+ htab_empty (ht);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op2, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_before_op2);
+ htab_empty (ht);
+#endif
+
+ tem = fold_ternary (code, type, op0, op1, op2);
+ if (!tem)
+ tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
+
+#ifdef ENABLE_FOLD_CHECKING
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op0, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_op0);
+ htab_empty (ht);
+
+ if (memcmp (checksum_before_op0, checksum_after_op0, 16))
+ fold_check_failed (op0, tem);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op1, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_op1);
+ htab_empty (ht);
+
+ if (memcmp (checksum_before_op1, checksum_after_op1, 16))
+ fold_check_failed (op1, tem);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (op2, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum_after_op2);
+ htab_delete (ht);
- return build3 (code, type, op0, op1, op2);
+ if (memcmp (checksum_before_op2, checksum_after_op2, 16))
+ fold_check_failed (op2, tem);
+#endif
+ return tem;
}
/* Perform constant folding and related simplification of initializer
- expression EXPR. This behaves identically to "fold" but ignores
+ expression EXPR. These behave identically to "fold_buildN" but ignore
potential run-time traps and exceptions that fold must preserve. */
+#define START_FOLD_INIT \
+ int saved_signaling_nans = flag_signaling_nans;\
+ int saved_trapping_math = flag_trapping_math;\
+ int saved_rounding_math = flag_rounding_math;\
+ int saved_trapv = flag_trapv;\
+ flag_signaling_nans = 0;\
+ flag_trapping_math = 0;\
+ flag_rounding_math = 0;\
+ flag_trapv = 0
+
+#define END_FOLD_INIT \
+ flag_signaling_nans = saved_signaling_nans;\
+ flag_trapping_math = saved_trapping_math;\
+ flag_rounding_math = saved_rounding_math;\
+ flag_trapv = saved_trapv
+
tree
-fold_initializer (tree expr)
+fold_build1_initializer (enum tree_code code, tree type, tree op)
{
- int saved_signaling_nans = flag_signaling_nans;
- int saved_trapping_math = flag_trapping_math;
- int saved_rounding_math = flag_rounding_math;
- int saved_trapv = flag_trapv;
tree result;
+ START_FOLD_INIT;
+
+ result = fold_build1 (code, type, op);
- flag_signaling_nans = 0;
- flag_trapping_math = 0;
- flag_rounding_math = 0;
- flag_trapv = 0;
+ END_FOLD_INIT;
+ return result;
+}
- result = fold (expr);
+tree
+fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
+{
+ tree result;
+ START_FOLD_INIT;
- flag_signaling_nans = saved_signaling_nans;
- flag_trapping_math = saved_trapping_math;
- flag_rounding_math = saved_rounding_math;
- flag_trapv = saved_trapv;
+ result = fold_build2 (code, type, op0, op1);
+ END_FOLD_INIT;
return result;
}
+tree
+fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
+ tree op2)
+{
+ tree result;
+ START_FOLD_INIT;
+
+ result = fold_build3 (code, type, op0, op1, op2);
+
+ END_FOLD_INIT;
+ return result;
+}
+
+#undef START_FOLD_INIT
+#undef END_FOLD_INIT
+
/* Determine if first argument is a multiple of second argument. Return 0 if
it is not, or we cannot easily determined it to be.
int
tree_expr_nonnegative_p (tree t)
{
+ if (TYPE_UNSIGNED (TREE_TYPE (t)))
+ return 1;
+
switch (TREE_CODE (t))
{
case ABS_EXPR:
- return 1;
+ /* We can't return 1 if flag_wrapv is set because
+ ABS_EXPR<INT_MIN> = INT_MIN. */
+ if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
+ return 1;
+ break;
case INTEGER_CST:
return tree_int_cst_sgn (t) >= 0;
}
return 0;
+ case BIT_AND_EXPR:
+ case MAX_EXPR:
+ return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
+ || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ case MIN_EXPR:
+ case RDIV_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case CEIL_MOD_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
+ case SAVE_EXPR:
+ case NON_LVALUE_EXPR:
+ case FLOAT_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case RDIV_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ case COMPOUND_EXPR:
+ case MODIFY_EXPR:
+ return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
- case BIT_AND_EXPR:
+ case BIND_EXPR:
+ return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
+
+ case COND_EXPR:
return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
- || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
+ && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
case NOP_EXPR:
{
}
break;
- case COND_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
- case COMPOUND_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
- case MIN_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
- case MAX_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
- || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
- case MODIFY_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
- case BIND_EXPR:
- return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
- case SAVE_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case NON_LVALUE_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
- case FLOAT_EXPR:
- return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
-
case TARGET_EXPR:
{
tree temp = TARGET_EXPR_SLOT (t);
CASE_BUILTIN_F (BUILT_IN_EXP2)
CASE_BUILTIN_F (BUILT_IN_FABS)
CASE_BUILTIN_F (BUILT_IN_FDIM)
- CASE_BUILTIN_F (BUILT_IN_FREXP)
CASE_BUILTIN_F (BUILT_IN_HYPOT)
CASE_BUILTIN_F (BUILT_IN_POW10)
CASE_BUILTIN_I (BUILT_IN_FFS)
CASE_BUILTIN_F (BUILT_IN_EXPM1)
CASE_BUILTIN_F (BUILT_IN_FLOOR)
CASE_BUILTIN_F (BUILT_IN_FMOD)
+ CASE_BUILTIN_F (BUILT_IN_FREXP)
CASE_BUILTIN_F (BUILT_IN_LCEIL)
CASE_BUILTIN_F (BUILT_IN_LDEXP)
CASE_BUILTIN_F (BUILT_IN_LFLOOR)
For floating point we further ensure that T is not denormal.
Similar logic is present in nonzero_address in rtlanal.h. */
-static bool
+bool
tree_expr_nonzero_p (tree t)
{
tree type = TREE_TYPE (t);
switch (TREE_CODE (t))
{
case ABS_EXPR:
- if (!TYPE_UNSIGNED (type) && !flag_wrapv)
- return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
+ return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
case INTEGER_CST:
/* We used to test for !integer_zerop here. This does not work correctly
return false;
/* Weak declarations may link to NULL. */
- if (DECL_P (base))
+ if (VAR_OR_FUNCTION_DECL_P (base))
return !DECL_WEAK (base);
/* Constants are never weak. */
return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
|| tree_expr_nonzero_p (TREE_OPERAND (t, 0));
+ case CALL_EXPR:
+ return alloca_call_p (t);
+
default:
break;
}
return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
}
-/* Given a pointer value T, return a simplified version of an indirection
- through T, or NULL_TREE if no simplification is possible. */
+/* Given a pointer value OP0 and a type TYPE, return a simplified version
+ of an indirection through OP0, or NULL_TREE if no simplification is
+ possible. */
-static tree
-fold_indirect_ref_1 (tree t)
+tree
+fold_indirect_ref_1 (tree type, tree op0)
{
- tree type = TREE_TYPE (TREE_TYPE (t));
- tree sub = t;
+ tree sub = op0;
tree subtype;
STRIP_NOPS (sub);
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
/* *&p => p */
- if (lang_hooks.types_compatible_p (type, optype))
+ if (type == optype)
return op;
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ && type == TREE_TYPE (optype))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ && type == TREE_TYPE (TREE_TYPE (subtype)))
{
tree type_domain;
tree min_val = size_zero_node;
tree
build_fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (t);
+ tree type = TREE_TYPE (TREE_TYPE (t));
+ tree sub = fold_indirect_ref_1 (type, t);
if (sub)
return sub;
else
- return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
+ return build1 (INDIRECT_REF, type, t);
}
/* Given an INDIRECT_REF T, return either T or a simplified version. */
tree
fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
+ tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
if (sub)
return sub;
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
poffset, &mode, &unsignedp, &volatilep,
false);
-
- if (TREE_CODE (core) == INDIRECT_REF)
- core = TREE_OPERAND (core, 0);
+ core = build_fold_addr_expr (core);
}
else
{
toffset2 = fold_convert (type, toffset2);
tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
- if (!host_integerp (tdiff, 0))
+ if (!cst_and_fits_in_hwi (tdiff))
return false;
- *diff = tree_low_cst (tdiff, 0);
+ *diff = int_cst_value (tdiff);
}
else if (toffset1 || toffset2)
{