static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
-static enum tree_code invert_tree_comparison (enum tree_code, bool);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static tree combine_comparisons (enum tree_code, enum tree_code,
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
-static tree fold_relational_hi_lo (enum tree_code *, const tree,
- tree *, tree *);
static bool tree_expr_nonzero_p (tree);
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
return overflow;
}
+
+/* If ARG2 divides ARG1 with zero remainder, carries out the division
+ of type CODE and returns the quotient.
+ Otherwise returns NULL_TREE. */
+
+static tree
+div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
+{
+ unsigned HOST_WIDE_INT int1l, int2l;
+ HOST_WIDE_INT int1h, int2h;
+ unsigned HOST_WIDE_INT quol, reml;
+ HOST_WIDE_INT quoh, remh;
+ tree type = TREE_TYPE (arg1);
+ int uns = TYPE_UNSIGNED (type);
+
+ int1l = TREE_INT_CST_LOW (arg1);
+ int1h = TREE_INT_CST_HIGH (arg1);
+ int2l = TREE_INT_CST_LOW (arg2);
+ int2h = TREE_INT_CST_HIGH (arg2);
+
+ div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
+ &quol, &quoh, &reml, &remh);
+ if (remh != 0 || reml != 0)
+ return NULL_TREE;
+
+ return build_int_cst_wide (type, quol, quoh);
+}
\f
/* Return true if built-in mathematical function specified by CODE
preserves the sign of it argument, i.e. -f(x) == f(-x). */
case RDIV_EXPR:
{
+ tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
- t = build_complex (type,
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2,
- notrunc),
- const_binop (MULT_EXPR, i1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc),
- const_binop
- (INTEGRAL_TYPE_P (TREE_TYPE (r1))
- ? TRUNC_DIV_EXPR : RDIV_EXPR,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2,
- notrunc),
- const_binop (MULT_EXPR, r1, i2,
- notrunc),
- notrunc),
- magsquared, notrunc));
+ t1 = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ t2 = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
+
+ if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
+ {
+ real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
+ }
+ else
+ {
+ real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
+ imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
+ if (!real || !imag)
+ return NULL_TREE;
+ }
+
+ t = build_complex (type, real, imag);
}
break;
comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
as well: if reversing the comparison is unsafe, return ERROR_MARK. */
-static enum tree_code
+enum tree_code
invert_tree_comparison (enum tree_code code, bool honor_nans)
{
if (honor_nans && flag_trapping_math)
v2 = TREE_CHAIN (v2);
}
- return 1;
+ return v1 == v2;
}
case COMPLEX_CST:
return fold_build2 (TREE_CODE (arg0), type, common,
fold_build2 (code, type, left, right));
}
+
+/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
+ with code CODE. This optimization is unsafe. */
+static tree
+distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
+ bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
+
+ /* (A / C) +- (B / C) -> (A +- B) / C. */
+ if (mul0 == mul1
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), 0))
+ return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 1));
+
+ /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
+ if (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
+ {
+ REAL_VALUE_TYPE r0, r1;
+ r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ if (!mul0)
+ real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
+ if (!mul1)
+ real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
+ real_arithmetic (&r0, code, &r0, &r1);
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, r0));
+ }
+
+ return NULL_TREE;
+}
\f
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
fold_convert (unsigned_type, rhs),
size_int (lbitsize), 0)))
{
- warning ("comparison is always %d due to width of bit-field",
+ warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
return constant_boolean_node (code == NE_EXPR, compare_type);
}
size_int (lbitsize - 1), 0);
if (! integer_zerop (tem) && ! integer_all_onesp (tem))
{
- warning ("comparison is always %d due to width of bit-field",
+ warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
return constant_boolean_node (code == NE_EXPR, compare_type);
}
lntype, ll_mask),
0)))
{
- warning ("comparison is always %d", wanted_code == NE_EXPR);
+ warning (0, "comparison is always %d", wanted_code == NE_EXPR);
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
lntype, rl_mask),
0)))
{
- warning ("comparison is always %d", wanted_code == NE_EXPR);
+ warning (0, "comparison is always %d", wanted_code == NE_EXPR);
return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
}
{
if (wanted_code == NE_EXPR)
{
- warning ("%<or%> of unmatched not-equal tests is always 1");
+ warning (0, "%<or%> of unmatched not-equal tests is always 1");
return constant_boolean_node (true, truth_type);
}
else
{
- warning ("%<and%> of mutually exclusive equal-tests is always 0");
+ warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
return constant_boolean_node (false, truth_type);
}
}
/* Return true if expr looks like an ARRAY_REF and set base and
offset to the appropriate trees. If there is no offset,
- offset is set to NULL_TREE. */
+ offset is set to NULL_TREE. Base will be canonicalized to
+ something you can get the element type from using
+ TREE_TYPE (TREE_TYPE (base)). */
static bool
extract_array_ref (tree expr, tree *base, tree *offset)
{
- /* We have to be careful with stripping nops as with the
- base type the meaning of the offset can change. */
- tree inner_expr = expr;
- STRIP_NOPS (inner_expr);
/* One canonical form is a PLUS_EXPR with the first
argument being an ADDR_EXPR with a possible NOP_EXPR
attached. */
if (TREE_CODE (expr) == PLUS_EXPR)
{
tree op0 = TREE_OPERAND (expr, 0);
+ tree inner_base, dummy1;
+ /* Strip NOP_EXPRs here because the C frontends and/or
+ folders present us (int *)&x.a + 4B possibly. */
STRIP_NOPS (op0);
- if (TREE_CODE (op0) == ADDR_EXPR)
+ if (extract_array_ref (op0, &inner_base, &dummy1))
{
- *base = TREE_OPERAND (expr, 0);
- *offset = TREE_OPERAND (expr, 1);
+ *base = inner_base;
+ if (dummy1 == NULL_TREE)
+ *offset = TREE_OPERAND (expr, 1);
+ else
+ *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
+ dummy1, TREE_OPERAND (expr, 1));
return true;
}
}
offset. For other arguments to the ADDR_EXPR we assume
zero offset and as such do not care about the ADDR_EXPR
type and strip possible nops from it. */
- else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ else if (TREE_CODE (expr) == ADDR_EXPR)
{
- tree op0 = TREE_OPERAND (inner_expr, 0);
+ tree op0 = TREE_OPERAND (expr, 0);
if (TREE_CODE (op0) == ARRAY_REF)
{
- *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *base = TREE_OPERAND (op0, 0);
*offset = TREE_OPERAND (op0, 1);
}
else
{
- *base = inner_expr;
+ /* Handle array-to-pointer decay as &a. */
+ if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
+ *base = TREE_OPERAND (expr, 0);
+ else
+ *base = expr;
*offset = NULL_TREE;
}
return true;
}
+ /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
+ else if (SSA_VAR_P (expr)
+ && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
+ {
+ *base = expr;
+ *offset = NULL_TREE;
+ return true;
+ }
return false;
}
if (lhs == 0)
{
true_value = fold_convert (cond_type, true_value);
- lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
- : build2 (code, type, arg, true_value));
+ if (cond_first_p)
+ lhs = fold_build2 (code, type, true_value, arg);
+ else
+ lhs = fold_build2 (code, type, arg, true_value);
}
if (rhs == 0)
{
false_value = fold_convert (cond_type, false_value);
- rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
- : build2 (code, type, arg, false_value));
+ if (cond_first_p)
+ rhs = fold_build2 (code, type, false_value, arg);
+ else
+ rhs = fold_build2 (code, type, arg, false_value);
}
test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
/* If CODE with arguments ARG0 and ARG1 represents a single bit
- equality/inequality test, then return a simplified form of
- the test using shifts and logical operations. Otherwise return
- NULL. TYPE is the desired result type. */
+ equality/inequality test, then return a simplified form of the test
+ using a sign testing. Otherwise return NULL. TYPE is the desired
+ result type. */
-tree
-fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
- tree result_type)
+static tree
+fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
+ tree result_type)
{
/* If this is testing a single bit, we can optimize the test. */
if ((code == NE_EXPR || code == EQ_EXPR)
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- tree inner = TREE_OPERAND (arg0, 0);
- tree type = TREE_TYPE (arg0);
- int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- enum machine_mode operand_mode = TYPE_MODE (type);
- int ops_unsigned;
- tree signed_type, unsigned_type, intermediate_type;
- tree arg00;
-
/* If we have (A & C) != 0 where C is the sign bit of A, convert
this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
- arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
+ tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
+
if (arg00 != NULL_TREE
/* This is only a win if casting to a signed type is cheap,
i.e. when arg00's type is not a partial mode. */
result_type, fold_convert (stype, arg00),
fold_convert (stype, integer_zero_node));
}
+ }
+
+ return NULL_TREE;
+}
+
+/* If CODE with arguments ARG0 and ARG1 represents a single bit
+ equality/inequality test, then return a simplified form of
+ the test using shifts and logical operations. Otherwise return
+ NULL. TYPE is the desired result type. */
+
+tree
+fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
+ tree result_type)
+{
+ /* If this is testing a single bit, we can optimize the test. */
+ if ((code == NE_EXPR || code == EQ_EXPR)
+ && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ tree inner = TREE_OPERAND (arg0, 0);
+ tree type = TREE_TYPE (arg0);
+ int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
+ enum machine_mode operand_mode = TYPE_MODE (type);
+ int ops_unsigned;
+ tree signed_type, unsigned_type, intermediate_type;
+ tree tem;
+
+ /* First, see if we can fold the single bit test into a sign-bit
+ test. */
+ tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
+ result_type);
+ if (tem)
+ return tem;
/* Otherwise we have (A & C) != 0 where C is a single bit,
convert that into ((A >> C2) & 1). Where C2 = log2(C).
|| TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
&& (TREE_TYPE (arg1_unw) == shorter_type
|| (TREE_CODE (arg1_unw) == INTEGER_CST
- && TREE_CODE (shorter_type) == INTEGER_TYPE
+ && (TREE_CODE (shorter_type) == INTEGER_TYPE
+ || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
&& int_fits_type_p (arg1_unw, shorter_type))))
return fold_build2 (code, type, arg0_unw,
fold_convert (shorter_type, arg1_unw));
}
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
- step of the array. ADDR is the address. MULT is the multiplicative expression.
+ step of the array. Reconstructs s and delta in the case of s * delta
+ being an integer constant (and thus already folded).
+ ADDR is the address. MULT is the multiplicative expression.
If the function succeeds, the new address expression is returned. Otherwise
NULL_TREE is returned. */
static tree
-try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
+try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
{
tree s, delta, step;
- tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
tree ref = TREE_OPERAND (addr, 0), pref;
tree ret, pos;
tree itype;
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == INTEGER_CST)
+ /* Canonicalize op1 into a possibly non-constant delta
+ and an INTEGER_CST s. */
+ if (TREE_CODE (op1) == MULT_EXPR)
{
- s = arg0;
- delta = arg1;
+ tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ s = arg0;
+ delta = arg1;
+ }
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ s = arg1;
+ delta = arg0;
+ }
+ else
+ return NULL_TREE;
}
- else if (TREE_CODE (arg1) == INTEGER_CST)
+ else if (TREE_CODE (op1) == INTEGER_CST)
{
- s = arg1;
- delta = arg0;
+ delta = op1;
+ s = NULL_TREE;
}
else
- return NULL_TREE;
+ {
+ /* Simulate we are delta * 1. */
+ delta = op1;
+ s = integer_one_node;
+ }
for (;; ref = TREE_OPERAND (ref, 0))
{
if (TREE_CODE (ref) == ARRAY_REF)
{
- step = array_ref_element_size (ref);
-
- if (TREE_CODE (step) != INTEGER_CST)
+ itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+ if (! itype)
continue;
- itype = TREE_TYPE (step);
-
- /* If the type sizes do not match, we might run into problems
- when one of them would overflow. */
- if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
+ step = array_ref_element_size (ref);
+ if (TREE_CODE (step) != INTEGER_CST)
continue;
- if (!operand_equal_p (step, fold_convert (itype, s), 0))
- continue;
+ if (s)
+ {
+ if (! tree_int_cst_equal (step, s))
+ continue;
+ }
+ else
+ {
+ /* Try if delta is a multiple of step. */
+ tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
+ if (! tmp)
+ continue;
+ delta = tmp;
+ }
- delta = fold_convert (itype, delta);
break;
}
}
TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
- TREE_OPERAND (pos, 1),
- delta);
+ fold_convert (itype,
+ TREE_OPERAND (pos, 1)),
+ fold_convert (itype, delta));
return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
}
OP0. Return the folded expression if folding is successful.
Otherwise, return NULL_TREE. */
-static tree
+tree
fold_unary (enum tree_code code, tree type, tree op0)
{
tree tem;
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = build1 (code, type, TREE_OPERAND (op0, 1));
+ tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
- tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, fold (tem));
+ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
TREE_USED (tem) = 1;
return tem;
|| (TREE_CODE (arg0) == PLUS_EXPR
&& integer_all_onesp (TREE_OPERAND (arg0, 1)))))
return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+ /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 0)))))
+ return fold_build2 (BIT_XOR_EXPR, type, tem,
+ fold_convert (type, TREE_OPERAND (arg0, 1)));
+ else if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary (BIT_NOT_EXPR, type,
+ fold_convert (type,
+ TREE_OPERAND (arg0, 1)))))
+ return fold_build2 (BIT_XOR_EXPR, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
+
return NULL_TREE;
case TRUTH_NOT_EXPR:
OP0 and OP1. Return the folded expression if folding is
successful. Otherwise, return NULL_TREE. */
-static tree
+tree
fold_binary (enum tree_code code, tree type, tree op0, tree op1)
{
tree t1 = NULL_TREE;
case PLUS_EXPR:
/* A + (-B) -> A - B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
/* (-A) + B -> B - A */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
- return fold_build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0));
+ return fold_build2 (MINUS_EXPR, type,
+ fold_convert (type, arg1),
+ fold_convert (type, TREE_OPERAND (arg0, 0)));
/* Convert ~A + 1 to -A. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == BIT_NOT_EXPR
fold_build2 (PLUS_EXPR, type,
fold_convert (type, alt0),
fold_convert (type, alt1)),
- same);
+ fold_convert (type, same));
}
/* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
if (tem)
return fold_convert (type, fold (tem));
}
- else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (arg0) == MULT_EXPR)
+ else if (TREE_CODE (arg1) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
if (tem)
fold_convert (type, tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
&& (TREE_CODE (arg1) != REAL_CST
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
- return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_convert (type, arg0),
+ fold_convert (type, negate_expr (arg1)));
/* Try folding difference of addresses. */
{
&& ptr_difference_const (arg0, arg1, &diff))
return build_int_cst_type (type, diff);
}
-
+
+ /* Fold &a[i] - &a[j] to i-j. */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
+ {
+ tree aref0 = TREE_OPERAND (arg0, 0);
+ tree aref1 = TREE_OPERAND (arg1, 0);
+ if (operand_equal_p (TREE_OPERAND (aref0, 0),
+ TREE_OPERAND (aref1, 0), 0))
+ {
+ tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
+ tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
+ tree esz = array_ref_element_size (aref0);
+ tree diff = build2 (MINUS_EXPR, type, op0, op1);
+ return fold_build2 (MULT_EXPR, type, diff,
+ fold_convert (type, esz));
+
+ }
+ }
+
/* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (arg1) == MULT_EXPR)
+ if (TREE_CODE (arg0) == ADDR_EXPR)
{
tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
if (tem)
return fold_convert (type, fold (tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
&& (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = build2 (PLUS_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, fold (arg));
+ tree arg = fold_build2 (PLUS_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg0, 1)),
+ TREE_VALUE (TREE_OPERAND (arg1, 1)));
+ tree arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
if (operand_equal_p (arg01, arg11, 0))
{
tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = build2 (MULT_EXPR, type, arg00, arg10);
- tree arglist = tree_cons (NULL_TREE, fold (arg),
+ tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
+ tree arglist = tree_cons (NULL_TREE, arg,
build_tree_list (NULL_TREE,
arg01));
return build_function_call_expr (powfn, arglist);
goto bit_ior;
}
+ /* Convert ~X ^ ~Y to X ^ Y. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && TREE_CODE (arg1) == BIT_NOT_EXPR)
+ return fold_build2 (code, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
+
/* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */
goto bit_rotate;
if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
{
tem = build_real (type, r);
- return fold_build2 (MULT_EXPR, type, arg0, tem);
+ return fold_build2 (MULT_EXPR, type,
+ fold_convert (type, arg0), tem);
}
}
}
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_CONSTANT_OVERFLOW (arg1)
&& TREE_INT_CST_HIGH (arg1) < 0
&& !flag_trapv
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
case LT_EXPR:
case GT_EXPR:
case LE_EXPR:
- case GE_EXPR:
+ case GE_EXPR:
/* If one arg is a real or integer constant, put it last. */
if (tree_swap_operands_p (arg0, arg1, true))
- return fold_build2 (swap_tree_comparison (code), type, arg1, arg0);
+ return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+
+ /* bool_var != 0 becomes bool_var. */
+ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
+ && code == NE_EXPR)
+ return non_lvalue (fold_convert (type, arg0));
+
+ /* bool_var == 1 becomes bool_var. */
+ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
+ && code == EQ_EXPR)
+ return non_lvalue (fold_convert (type, arg0));
/* If this is an equality comparison of the address of a non-weak
object against zero, then we know the result. */
&& extract_array_ref (arg1, &base1, &offset1)
&& operand_equal_p (base0, base1, 0))
{
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
+ offset0 = NULL_TREE;
+ if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
+ && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
+ offset1 = NULL_TREE;
if (offset0 == NULL_TREE
&& offset1 == NULL_TREE)
{
}
}
+ /* Transform comparisons of the form X +- C CMP X. */
+ if ((code != EQ_EXPR && code != NE_EXPR)
+ && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
+ || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TYPE_UNSIGNED (TREE_TYPE (arg1))
+ && !(flag_wrapv || flag_trapv))))
+ {
+ tree arg01 = TREE_OPERAND (arg0, 1);
+ enum tree_code code0 = TREE_CODE (arg0);
+ int is_positive;
+
+ if (TREE_CODE (arg01) == REAL_CST)
+ is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
+ else
+ is_positive = tree_int_cst_sgn (arg01);
+
+ /* (X - c) > X becomes false. */
+ if (code == GT_EXPR
+ && ((code0 == MINUS_EXPR && is_positive >= 0)
+ || (code0 == PLUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (0, type);
+
+ /* Likewise (X + c) < X becomes false. */
+ if (code == LT_EXPR
+ && ((code0 == PLUS_EXPR && is_positive >= 0)
+ || (code0 == MINUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (0, type);
+
+ /* Convert (X - c) <= X to true. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ && code == LE_EXPR
+ && ((code0 == MINUS_EXPR && is_positive >= 0)
+ || (code0 == PLUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (1, type);
+
+ /* Convert (X + c) >= X to true. */
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ && code == GE_EXPR
+ && ((code0 == PLUS_EXPR && is_positive >= 0)
+ || (code0 == MINUS_EXPR && is_positive <= 0)))
+ return constant_boolean_node (1, type);
+
+ if (TREE_CODE (arg01) == INTEGER_CST)
+ {
+ /* Convert X + c > X and X - c < X to true for integers. */
+ if (code == GT_EXPR
+ && ((code0 == PLUS_EXPR && is_positive > 0)
+ || (code0 == MINUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (1, type);
+
+ if (code == LT_EXPR
+ && ((code0 == MINUS_EXPR && is_positive > 0)
+ || (code0 == PLUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (1, type);
+
+ /* Convert X + c <= X and X - c >= X to false for integers. */
+ if (code == LE_EXPR
+ && ((code0 == PLUS_EXPR && is_positive > 0)
+ || (code0 == MINUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (0, type);
+
+ if (code == GE_EXPR
+ && ((code0 == MINUS_EXPR && is_positive > 0)
+ || (code0 == PLUS_EXPR && is_positive < 0)))
+ return constant_boolean_node (0, type);
+ }
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
switch (code)
{
case GE_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (GT_EXPR, type, arg0, arg1);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (GT_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
case LT_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (LE_EXPR, type, arg0, arg1);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1), 0);
+ return fold_build2 (LE_EXPR, type, arg0,
+ fold_convert (TREE_TYPE (arg0), arg1));
default:
break;
}
/* Comparisons with the highest or lowest possible integer of
- the specified size will have known values.
-
- This is quite similar to fold_relational_hi_lo, however,
- attempts to share the code have been nothing but trouble. */
+ the specified size will have known values. */
{
int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
arg0, fold_convert (TREE_TYPE (arg0),
integer_zero_node));
- /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
- 2, then fold the expression into shifts and logical operations. */
- tem = fold_single_bit_test (code, arg0, arg1, type);
+ /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
+ bit, then fold the expression into A < 0 or A >= 0. */
+ tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
if (tem)
return tem;
return omit_one_operand (type, integer_one_node, arg0);
}
- tem = build2 (code, type, cval1, cval2);
if (save_p)
- return save_expr (tem);
+ return save_expr (build2 (code, type, cval1, cval2));
else
- return fold (tem);
+ return fold_build2 (code, type, cval1, cval2);
}
}
}
return t1;
}
+ /* Fold a comparison of the address of COMPONENT_REFs with the same
+ type and component to a comparison of the address of the base
+ object. In short, &x->a OP &y->a to x OP y and
+ &x->a OP &y.a to x OP &y */
+ if (TREE_CODE (arg0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
+ && TREE_CODE (arg1) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
+ {
+ tree cref0 = TREE_OPERAND (arg0, 0);
+ tree cref1 = TREE_OPERAND (arg1, 0);
+ if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
+ {
+ tree op0 = TREE_OPERAND (cref0, 0);
+ tree op1 = TREE_OPERAND (cref1, 0);
+ return fold_build2 (code, type,
+ build_fold_addr_expr (op0),
+ build_fold_addr_expr (op1));
+ }
+ }
+
/* If this is a comparison of complex values and either or both sides
are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
&& (arglist = TREE_OPERAND (arg0, 1))
&& TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
&& ! TREE_CHAIN (arglist))
- return fold_build2 (code, type,
- build1 (INDIRECT_REF, char_type_node,
- TREE_VALUE (arglist)),
- fold_convert (char_type_node,
- integer_zero_node));
+ {
+ tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
+ return fold_build2 (code, type, iref,
+ build_int_cst (TREE_TYPE (iref), 0));
+ }
}
/* We can fold X/C1 op C2 where C1 and C2 are integer constants
return build_complex (type, arg0, arg1);
return NULL_TREE;
+ case ASSERT_EXPR:
+ /* An ASSERT_EXPR should never be passed to fold_binary. */
+ gcc_unreachable ();
+
default:
return NULL_TREE;
} /* switch (code) */
}
+/* Callback for walk_tree, looking for LABEL_EXPR.
+ Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
+ Do not check the sub-tree of GOTO_EXPR. */
+
+static tree
+contains_label_1 (tree *tp,
+ int *walk_subtrees,
+ void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case LABEL_EXPR:
+ return *tp;
+ case GOTO_EXPR:
+ *walk_subtrees = 0;
+ /* no break */
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
+ accessible from outside the sub-tree. Returns NULL_TREE if no
+ addressable label is found. */
+
+static bool
+contains_label_p (tree st)
+{
+ return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
+}
+
/* Fold a ternary expression of code CODE and type TYPE with operands
OP0, OP1, and OP2. Return the folded expression if folding is
successful. Otherwise, return NULL_TREE. */
-static tree
+tree
fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
{
tree tem;
so all simple results must be passed through pedantic_non_lvalue. */
if (TREE_CODE (arg0) == INTEGER_CST)
{
+ tree unused_op = integer_zerop (arg0) ? op1 : op2;
tem = integer_zerop (arg0) ? op2 : op1;
/* Only optimize constant conditions when the selected branch
has the same type as the COND_EXPR. This avoids optimizing
- away "c ? x : throw", where the throw has a void type. */
- if (! VOID_TYPE_P (TREE_TYPE (tem))
- || VOID_TYPE_P (type))
+ away "c ? x : throw", where the throw has a void type.
+ Avoid throwing away that operand which contains label. */
+ if ((!TREE_SIDE_EFFECTS (unused_op)
+ || !contains_label_p (unused_op))
+ && (! VOID_TYPE_P (TREE_TYPE (tem))
+ || VOID_TYPE_P (type)))
return pedantic_non_lvalue (tem);
return NULL_TREE;
}
}
return NULL_TREE;
+ case BIT_FIELD_REF:
+ if (TREE_CODE (arg0) == VECTOR_CST
+ && type == TREE_TYPE (TREE_TYPE (arg0))
+ && host_integerp (arg1, 1)
+ && host_integerp (op2, 1))
+ {
+ unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
+ unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
+
+ if (width != 0
+ && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
+ && (idx % width) == 0
+ && (idx = idx / width)
+ < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
+ {
+ tree elements = TREE_VECTOR_CST_ELTS (arg0);
+ while (idx-- > 0 && elements)
+ elements = TREE_CHAIN (elements);
+ if (elements)
+ return TREE_VALUE (elements);
+ else
+ return fold_convert (type, integer_zero_node);
+ }
+ }
+ return NULL_TREE;
+
default:
return NULL_TREE;
} /* switch (code) */
case CONST_DECL:
return fold (DECL_INITIAL (t));
- case ASSERT_EXPR:
- {
- /* Given ASSERT_EXPR <Y, COND>, return Y if COND can be folded
- to boolean_true_node. If COND folds to boolean_false_node,
- return ASSERT_EXPR <Y, 0>. Otherwise, return the original
- expression. */
- tree c = fold (ASSERT_EXPR_COND (t));
- if (c == boolean_true_node)
- return ASSERT_EXPR_VAR (t);
- else if (c == boolean_false_node)
- return build (ASSERT_EXPR, TREE_TYPE (t), ASSERT_EXPR_VAR (t), c);
- else
- return t;
- }
-
default:
return t;
} /* switch (code) */
enum tree_code code;
char buf[sizeof (struct tree_decl)];
int i, len;
+
+recursive_label:
gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
<= sizeof (struct tree_decl))
}
else if (TREE_CODE_CLASS (code) == tcc_type
&& (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
- || TYPE_CACHED_VALUES_P (expr)))
+ || TYPE_CACHED_VALUES_P (expr)
+ || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
{
/* Allow these fields to be modified. */
memcpy (buf, expr, tree_size (expr));
expr = (tree) buf;
+ TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
TYPE_POINTER_TO (expr) = NULL;
TYPE_REFERENCE_TO (expr) = NULL;
if (TYPE_CACHED_VALUES_P (expr))
md5_process_bytes (expr, tree_size (expr), ctx);
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
- && TREE_CODE_CLASS (code) != tcc_declaration)
+ && TREE_CODE_CLASS (code) != tcc_declaration
+ && code != TREE_LIST)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
case TREE_LIST:
fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
+ expr = TREE_CHAIN (expr);
+ goto recursive_label;
break;
case TREE_VEC:
for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
CASE_BUILTIN_F (BUILT_IN_EXPM1)
CASE_BUILTIN_F (BUILT_IN_FLOOR)
CASE_BUILTIN_F (BUILT_IN_FMOD)
+ CASE_BUILTIN_F (BUILT_IN_LCEIL)
CASE_BUILTIN_F (BUILT_IN_LDEXP)
+ CASE_BUILTIN_F (BUILT_IN_LFLOOR)
+ CASE_BUILTIN_F (BUILT_IN_LLCEIL)
+ CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
CASE_BUILTIN_F (BUILT_IN_LLRINT)
CASE_BUILTIN_F (BUILT_IN_LLROUND)
CASE_BUILTIN_F (BUILT_IN_LRINT)
return false;
}
-/* See if we are applying CODE, a relational to the highest or lowest
- possible integer of TYPE. If so, then the result is a compile
- time constant. */
-
-static tree
-fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
- tree *op1_p)
-{
- tree op0 = *op0_p;
- tree op1 = *op1_p;
- enum tree_code code = *code_p;
- int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
-
- if (TREE_CODE (op1) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (op1)
- && width <= HOST_BITS_PER_WIDE_INT
- && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
- || POINTER_TYPE_P (TREE_TYPE (op1))))
- {
- unsigned HOST_WIDE_INT signed_max;
- unsigned HOST_WIDE_INT max, min;
-
- signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
-
- if (TYPE_UNSIGNED (TREE_TYPE (op1)))
- {
- max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min = 0;
- }
- else
- {
- max = signed_max;
- min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
- }
-
- if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == max)
- switch (code)
- {
- case GT_EXPR:
- return omit_one_operand (type, integer_zero_node, op0);
-
- case GE_EXPR:
- *code_p = EQ_EXPR;
- break;
- case LE_EXPR:
- return omit_one_operand (type, integer_one_node, op0);
-
- case LT_EXPR:
- *code_p = NE_EXPR;
- break;
-
- /* The GE_EXPR and LT_EXPR cases above are not normally
- reached because of previous transformations. */
-
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == max - 1)
- switch (code)
- {
- case GT_EXPR:
- *code_p = EQ_EXPR;
- *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
- break;
- case LE_EXPR:
- *code_p = NE_EXPR;
- *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
- break;
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (op1) == min)
- switch (code)
- {
- case LT_EXPR:
- return omit_one_operand (type, integer_zero_node, op0);
-
- case LE_EXPR:
- *code_p = EQ_EXPR;
- break;
-
- case GE_EXPR:
- return omit_one_operand (type, integer_one_node, op0);
-
- case GT_EXPR:
- *code_p = NE_EXPR;
- break;
-
- default:
- break;
- }
- else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
- && TREE_INT_CST_LOW (op1) == min + 1)
- switch (code)
- {
- case GE_EXPR:
- *code_p = NE_EXPR;
- *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
- case LT_EXPR:
- *code_p = EQ_EXPR;
- *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
- default:
- break;
- }
-
- else if (TREE_INT_CST_HIGH (op1) == 0
- && TREE_INT_CST_LOW (op1) == signed_max
- && TYPE_UNSIGNED (TREE_TYPE (op1))
- /* signed_type does not work on pointer types. */
- && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
- {
- /* The following case also applies to X < signed_max+1
- and X >= signed_max+1 because previous transformations. */
- if (code == LE_EXPR || code == GT_EXPR)
- {
- tree st0, st1, exp, retval;
- st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
- st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
-
- exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
- type,
- fold_convert (st0, op0),
- fold_convert (st1, integer_zero_node));
-
- retval = fold_binary_to_constant (TREE_CODE (exp),
- TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1));
-
- /* If we are in gimple form, then returning EXP would create
- non-gimple expressions. Clearing it is safe and insures
- we do not allow a non-gimple expression to escape. */
- if (in_gimple_form)
- exp = NULL;
-
- return (retval ? retval : exp);
- }
- }
- }
-
- return NULL_TREE;
-}
-
-
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
attempt to fold the expression to a constant without modifying TYPE,
OP0 or OP1.
If the expression could be simplified to a constant, then return
the constant. If the expression would not be simplified to a
- constant, then return NULL_TREE.
-
- Note this is primarily designed to be called after gimplification
- of the tree structures and when at least one operand is a constant.
- As a result of those simplifying assumptions this routine is far
- simpler than the generic fold routine. */
+ constant, then return NULL_TREE. */
tree
fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
{
- int wins = 1;
- tree subop0;
- tree subop1;
- tree tem;
-
- /* If this is a commutative operation, and ARG0 is a constant, move it
- to ARG1 to reduce the number of tests below. */
- if (commutative_tree_code (code)
- && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
- {
- tem = op0;
- op0 = op1;
- op1 = tem;
- }
-
- /* If either operand is a complex type, extract its real component. */
- if (TREE_CODE (op0) == COMPLEX_CST)
- subop0 = TREE_REALPART (op0);
- else
- subop0 = op0;
-
- if (TREE_CODE (op1) == COMPLEX_CST)
- subop1 = TREE_REALPART (op1);
- else
- subop1 = op1;
-
- /* Note if either argument is not a real or integer constant.
- With a few exceptions, simplification is limited to cases
- where both arguments are constants. */
- if ((TREE_CODE (subop0) != INTEGER_CST
- && TREE_CODE (subop0) != REAL_CST)
- || (TREE_CODE (subop1) != INTEGER_CST
- && TREE_CODE (subop1) != REAL_CST))
- wins = 0;
-
- switch (code)
- {
- case PLUS_EXPR:
- /* (plus (address) (const_int)) is a constant. */
- if (TREE_CODE (op0) == PLUS_EXPR
- && TREE_CODE (op1) == INTEGER_CST
- && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
- || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
- == ADDR_EXPR)))
- && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
- {
- return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
- const_binop (PLUS_EXPR, op1,
- TREE_OPERAND (op0, 1), 0));
- }
- case BIT_XOR_EXPR:
-
- binary:
- if (!wins)
- return NULL_TREE;
-
- /* Both arguments are constants. Simplify. */
- tem = const_binop (code, op0, op1, 0);
- if (tem != NULL_TREE)
- {
- /* The return value should always have the same type as
- the original expression. */
- if (TREE_TYPE (tem) != type)
- tem = fold_convert (type, tem);
-
- return tem;
- }
- return NULL_TREE;
-
- case MINUS_EXPR:
- /* Fold &x - &x. This can happen from &x.foo - &x.
- This is unsafe for certain floats even in non-IEEE formats.
- In IEEE, it is unsafe because it does wrong for NaNs.
- Also note that operand_equal_p is always false if an
- operand is volatile. */
- if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
- return fold_convert (type, integer_zero_node);
-
- goto binary;
-
- case MULT_EXPR:
- case BIT_AND_EXPR:
- /* Special case multiplication or bitwise AND where one argument
- is zero. */
- if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- else
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
- && real_zerop (op1))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case BIT_IOR_EXPR:
- /* Special case when we know the result will be all ones. */
- if (integer_all_onesp (op1))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case TRUNC_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case EXACT_DIV_EXPR:
- case TRUNC_MOD_EXPR:
- case ROUND_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case CEIL_MOD_EXPR:
- case RDIV_EXPR:
- /* Division by zero is undefined. */
- if (integer_zerop (op1))
- return NULL_TREE;
-
- if (TREE_CODE (op1) == REAL_CST
- && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
- && real_zerop (op1))
- return NULL_TREE;
-
- goto binary;
-
- case MIN_EXPR:
- if (INTEGRAL_TYPE_P (type)
- && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case MAX_EXPR:
- if (INTEGRAL_TYPE_P (type)
- && TYPE_MAX_VALUE (type)
- && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, op1, op0);
-
- goto binary;
-
- case RSHIFT_EXPR:
- /* Optimize -1 >> x for arithmetic right shifts. */
- if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
- return omit_one_operand (type, op0, op1);
- /* ... fall through ... */
-
- case LSHIFT_EXPR:
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
-
- /* Since negative shift count is not well-defined, don't
- try to compute it in the compiler. */
- if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
- return NULL_TREE;
-
- goto binary;
-
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- /* -1 rotated either direction by any amount is still -1. */
- if (integer_all_onesp (op0))
- return omit_one_operand (type, op0, op1);
-
- /* 0 rotated either direction by any amount is still zero. */
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
-
- goto binary;
-
- case COMPLEX_EXPR:
- if (wins)
- return build_complex (type, op0, op1);
- return NULL_TREE;
-
- case LT_EXPR:
- case LE_EXPR:
- case GT_EXPR:
- case GE_EXPR:
- case EQ_EXPR:
- case NE_EXPR:
- /* If one arg is a real or integer constant, put it last. */
- if ((TREE_CODE (op0) == INTEGER_CST
- && TREE_CODE (op1) != INTEGER_CST)
- || (TREE_CODE (op0) == REAL_CST
- && TREE_CODE (op0) != REAL_CST))
- {
- tree temp;
-
- temp = op0;
- op0 = op1;
- op1 = temp;
- code = swap_tree_comparison (code);
- }
-
- /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
- This transformation affects the cases which are handled in later
- optimizations involving comparisons with non-negative constants. */
- if (TREE_CODE (op1) == INTEGER_CST
- && TREE_CODE (op0) != INTEGER_CST
- && tree_int_cst_sgn (op1) > 0)
- {
- switch (code)
- {
- case GE_EXPR:
- code = GT_EXPR;
- op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
-
- case LT_EXPR:
- code = LE_EXPR;
- op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
- break;
-
- default:
- break;
- }
- }
-
- tem = fold_relational_hi_lo (&code, type, &op0, &op1);
- if (tem)
- return tem;
-
- /* Fall through. */
-
- case ORDERED_EXPR:
- case UNORDERED_EXPR:
- case UNLT_EXPR:
- case UNLE_EXPR:
- case UNGT_EXPR:
- case UNGE_EXPR:
- case UNEQ_EXPR:
- case LTGT_EXPR:
- if (!wins)
- return NULL_TREE;
-
- return fold_relational_const (code, type, op0, op1);
-
- case RANGE_EXPR:
- /* This could probably be handled. */
- return NULL_TREE;
-
- case TRUTH_AND_EXPR:
- /* If second arg is constant zero, result is zero, but first arg
- must be evaluated. */
- if (integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
- case will be handled here. */
- if (integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- return constant_boolean_node (true, type);
- return NULL_TREE;
-
- case TRUTH_OR_EXPR:
- /* If second arg is constant true, result is true, but we must
- evaluate first arg. */
- if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
- return omit_one_operand (type, op1, op0);
- /* Likewise for first arg, but note this only occurs here for
- TRUTH_OR_EXPR. */
- if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
- return omit_one_operand (type, op0, op1);
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- return constant_boolean_node (false, type);
- return NULL_TREE;
-
- case TRUTH_XOR_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
- {
- int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
- return constant_boolean_node (x, type);
- }
- return NULL_TREE;
-
- default:
- return NULL_TREE;
- }
+ tree tem = fold_binary (code, type, op0, op1);
+ return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
}
/* Given the components of a unary expression CODE, TYPE and OP0,
If the expression could be simplified to a constant, then return
the constant. If the expression would not be simplified to a
- constant, then return NULL_TREE.
-
- Note this is primarily designed to be called after gimplification
- of the tree structures and when op0 is a constant. As a result
- of those simplifying assumptions this routine is far simpler than
- the generic fold routine. */
+ constant, then return NULL_TREE. */
tree
fold_unary_to_constant (enum tree_code code, tree type, tree op0)
{
- /* Make sure we have a suitable constant argument. */
- if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
- {
- tree subop;
-
- if (TREE_CODE (op0) == COMPLEX_CST)
- subop = TREE_REALPART (op0);
- else
- subop = op0;
-
- if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
- return NULL_TREE;
- }
-
- switch (code)
- {
- case NOP_EXPR:
- case FLOAT_EXPR:
- case CONVERT_EXPR:
- case FIX_TRUNC_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_ROUND_EXPR:
- return fold_convert_const (code, type, op0);
-
- case NEGATE_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
- return fold_negate_const (op0, type);
- else
- return NULL_TREE;
-
- case ABS_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
- return fold_abs_const (op0, type);
- else
- return NULL_TREE;
-
- case BIT_NOT_EXPR:
- if (TREE_CODE (op0) == INTEGER_CST)
- return fold_not_const (op0, type);
- else
- return NULL_TREE;
-
- case REALPART_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST)
- return TREE_REALPART (op0);
- else
- return NULL_TREE;
-
- case IMAGPART_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST)
- return TREE_IMAGPART (op0);
- else
- return NULL_TREE;
-
- case CONJ_EXPR:
- if (TREE_CODE (op0) == COMPLEX_CST
- && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
- return build_complex (type, TREE_REALPART (op0),
- negate_expr (TREE_IMAGPART (op0)));
- return NULL_TREE;
-
- default:
- return NULL_TREE;
- }
+ tree tem = fold_unary (code, type, op0);
+ return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
}
/* If EXP represents referencing an element in a constant string
return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
}
-/* Given a pointer value T, return a simplified version of an indirection
- through T, or NULL_TREE if no simplification is possible. */
+/* Given a pointer value OP0 and a type TYPE, return a simplified version
+ of an indirection through OP0, or NULL_TREE if no simplification is
+ possible. */
-static tree
-fold_indirect_ref_1 (tree t)
+tree
+fold_indirect_ref_1 (tree type, tree op0)
{
- tree type = TREE_TYPE (TREE_TYPE (t));
- tree sub = t;
+ tree sub = op0;
tree subtype;
STRIP_NOPS (sub);
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
/* *&p => p */
- if (lang_hooks.types_compatible_p (type, optype))
+ if (type == optype)
return op;
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ && type == TREE_TYPE (optype))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ && type == TREE_TYPE (TREE_TYPE (subtype)))
{
tree type_domain;
tree min_val = size_zero_node;
tree
build_fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (t);
+ tree type = TREE_TYPE (TREE_TYPE (t));
+ tree sub = fold_indirect_ref_1 (type, t);
if (sub)
return sub;
else
- return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
+ return build1 (INDIRECT_REF, type, t);
}
/* Given an INDIRECT_REF T, return either T or a simplified version. */
tree
fold_indirect_ref (tree t)
{
- tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
+ tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
if (sub)
return sub;